@salesforce/storefront-next-dev 0.1.1 → 0.2.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +45 -36
- package/bin/run.js +12 -0
- package/dist/bundle.js +83 -0
- package/dist/cartridge-services/index.d.ts +2 -26
- package/dist/cartridge-services/index.d.ts.map +1 -1
- package/dist/cartridge-services/index.js +3 -336
- package/dist/cartridge-services/index.js.map +1 -1
- package/dist/commands/create-bundle.js +107 -0
- package/dist/commands/create-instructions.js +174 -0
- package/dist/commands/create-storefront.js +210 -0
- package/dist/commands/deploy-cartridge.js +52 -0
- package/dist/commands/dev.js +122 -0
- package/dist/commands/extensions/create.js +38 -0
- package/dist/commands/extensions/install.js +44 -0
- package/dist/commands/extensions/list.js +21 -0
- package/dist/commands/extensions/remove.js +38 -0
- package/dist/commands/generate-cartridge.js +35 -0
- package/dist/commands/prepare-local.js +30 -0
- package/dist/commands/preview.js +101 -0
- package/dist/commands/push.js +139 -0
- package/dist/config.js +87 -0
- package/dist/configs/react-router.config.js +3 -1
- package/dist/configs/react-router.config.js.map +1 -1
- package/dist/dependency-utils.js +314 -0
- package/dist/entry/client.d.ts +1 -0
- package/dist/entry/client.js +28 -0
- package/dist/entry/client.js.map +1 -0
- package/dist/entry/server.d.ts +15 -0
- package/dist/entry/server.d.ts.map +1 -0
- package/dist/entry/server.js +35 -0
- package/dist/entry/server.js.map +1 -0
- package/dist/flags.js +11 -0
- package/dist/generate-cartridge.js +620 -0
- package/dist/hooks/init.js +47 -0
- package/dist/index.d.ts +9 -29
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +413 -621
- package/dist/index.js.map +1 -1
- package/dist/local-dev-setup.js +176 -0
- package/dist/logger.js +105 -0
- package/dist/manage-extensions.js +329 -0
- package/dist/mrt/ssr.mjs +21 -21
- package/dist/mrt/ssr.mjs.map +1 -1
- package/dist/mrt/streamingHandler.mjs +28 -28
- package/dist/mrt/streamingHandler.mjs.map +1 -1
- package/dist/server.js +425 -0
- package/dist/utils.js +126 -0
- package/package.json +44 -9
- package/dist/cli.js +0 -3393
- /package/{LICENSE.txt → LICENSE} +0 -0
|
@@ -0,0 +1,620 @@
|
|
|
1
|
+
import { existsSync, readFileSync, unlinkSync } from "node:fs";
|
|
2
|
+
import { basename, extname, join, resolve } from "node:path";
|
|
3
|
+
import { access, mkdir, readFile, readdir, rm, writeFile } from "node:fs/promises";
|
|
4
|
+
import { execSync } from "node:child_process";
|
|
5
|
+
import { Node, Project } from "ts-morph";
|
|
6
|
+
import { tmpdir } from "node:os";
|
|
7
|
+
import { randomUUID } from "node:crypto";
|
|
8
|
+
import { npmRunPathEnv } from "npm-run-path";
|
|
9
|
+
|
|
10
|
+
//#region src/cartridge-services/react-router-config.ts
|
|
11
|
+
let isCliAvailable = null;
|
|
12
|
+
function checkReactRouterCli(projectDirectory) {
|
|
13
|
+
if (isCliAvailable !== null) return isCliAvailable;
|
|
14
|
+
try {
|
|
15
|
+
execSync("react-router --version", {
|
|
16
|
+
cwd: projectDirectory,
|
|
17
|
+
env: npmRunPathEnv(),
|
|
18
|
+
stdio: "pipe"
|
|
19
|
+
});
|
|
20
|
+
isCliAvailable = true;
|
|
21
|
+
} catch {
|
|
22
|
+
isCliAvailable = false;
|
|
23
|
+
}
|
|
24
|
+
return isCliAvailable;
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Get the fully resolved routes from React Router by invoking its CLI.
|
|
28
|
+
* This ensures we get the exact same route resolution as React Router uses internally,
|
|
29
|
+
* including all presets, file-system routes, and custom route configurations.
|
|
30
|
+
* @param projectDirectory - The project root directory
|
|
31
|
+
* @returns Array of resolved route config entries
|
|
32
|
+
* @example
|
|
33
|
+
* const routes = getReactRouterRoutes('/path/to/project');
|
|
34
|
+
* // Returns the same structure as `react-router routes --json`
|
|
35
|
+
*/
|
|
36
|
+
function getReactRouterRoutes(projectDirectory) {
|
|
37
|
+
if (!checkReactRouterCli(projectDirectory)) throw new Error("React Router CLI is not available. Please make sure @react-router/dev is installed and accessible.");
|
|
38
|
+
const tempFile = join(tmpdir(), `react-router-routes-${randomUUID()}.json`);
|
|
39
|
+
try {
|
|
40
|
+
execSync(`react-router routes --json > "${tempFile}"`, {
|
|
41
|
+
cwd: projectDirectory,
|
|
42
|
+
env: npmRunPathEnv(),
|
|
43
|
+
encoding: "utf-8",
|
|
44
|
+
stdio: [
|
|
45
|
+
"pipe",
|
|
46
|
+
"pipe",
|
|
47
|
+
"pipe"
|
|
48
|
+
]
|
|
49
|
+
});
|
|
50
|
+
const output = readFileSync(tempFile, "utf-8");
|
|
51
|
+
return JSON.parse(output);
|
|
52
|
+
} catch (error) {
|
|
53
|
+
throw new Error(`Failed to get routes from React Router CLI: ${error.message}`);
|
|
54
|
+
} finally {
|
|
55
|
+
try {
|
|
56
|
+
if (existsSync(tempFile)) unlinkSync(tempFile);
|
|
57
|
+
} catch {}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Convert a file path to its corresponding route path using React Router's CLI.
|
|
62
|
+
* This ensures we get the exact same route resolution as React Router uses internally.
|
|
63
|
+
* @param filePath - Absolute path to the route file
|
|
64
|
+
* @param projectRoot - The project root directory
|
|
65
|
+
* @returns The route path (e.g., '/cart', '/product/:productId')
|
|
66
|
+
* @example
|
|
67
|
+
* const route = filePathToRoute('/path/to/project/src/routes/_app.cart.tsx', '/path/to/project');
|
|
68
|
+
* // Returns: '/cart'
|
|
69
|
+
*/
|
|
70
|
+
function filePathToRoute(filePath, projectRoot) {
|
|
71
|
+
const filePathPosix = filePath.replace(/\\/g, "/");
|
|
72
|
+
const flatRoutes = flattenRoutes(getReactRouterRoutes(projectRoot));
|
|
73
|
+
for (const route of flatRoutes) {
|
|
74
|
+
const routeFilePosix = route.file.replace(/\\/g, "/");
|
|
75
|
+
if (filePathPosix.endsWith(routeFilePosix) || filePathPosix.endsWith(`/${routeFilePosix}`)) return route.path;
|
|
76
|
+
const routeFileNormalized = routeFilePosix.replace(/^\.\//, "");
|
|
77
|
+
if (filePathPosix.endsWith(routeFileNormalized) || filePathPosix.endsWith(`/${routeFileNormalized}`)) return route.path;
|
|
78
|
+
}
|
|
79
|
+
console.warn(`Warning: Could not find route for file: ${filePath}`);
|
|
80
|
+
return "/unknown";
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Flatten a nested route tree into a flat array with computed paths.
|
|
84
|
+
* Each route will have its full path computed from parent paths.
|
|
85
|
+
* @param routes - The nested route config entries
|
|
86
|
+
* @param parentPath - The parent path prefix (used internally for recursion)
|
|
87
|
+
* @returns Flat array of routes with their full paths
|
|
88
|
+
*/
|
|
89
|
+
function flattenRoutes(routes, parentPath = "") {
|
|
90
|
+
const result = [];
|
|
91
|
+
for (const route of routes) {
|
|
92
|
+
let fullPath;
|
|
93
|
+
if (route.index) fullPath = parentPath || "/";
|
|
94
|
+
else if (route.path) {
|
|
95
|
+
const pathSegment = route.path.startsWith("/") ? route.path : `/${route.path}`;
|
|
96
|
+
fullPath = parentPath ? `${parentPath}${pathSegment}`.replace(/\/+/g, "/") : pathSegment;
|
|
97
|
+
} else fullPath = parentPath || "/";
|
|
98
|
+
if (route.id) result.push({
|
|
99
|
+
id: route.id,
|
|
100
|
+
path: fullPath,
|
|
101
|
+
file: route.file,
|
|
102
|
+
index: route.index
|
|
103
|
+
});
|
|
104
|
+
if (route.children && route.children.length > 0) {
|
|
105
|
+
const childPath = route.path ? fullPath : parentPath;
|
|
106
|
+
result.push(...flattenRoutes(route.children, childPath));
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
return result;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
//#endregion
|
|
113
|
+
//#region src/cartridge-services/generate-cartridge.ts
|
|
114
|
+
const SKIP_DIRECTORIES = [
|
|
115
|
+
"build",
|
|
116
|
+
"dist",
|
|
117
|
+
"node_modules",
|
|
118
|
+
".git",
|
|
119
|
+
".next",
|
|
120
|
+
"coverage"
|
|
121
|
+
];
|
|
122
|
+
const DEFAULT_COMPONENT_GROUP = "odyssey_base";
|
|
123
|
+
const ARCH_TYPE_HEADLESS = "headless";
|
|
124
|
+
const VALID_ATTRIBUTE_TYPES = [
|
|
125
|
+
"string",
|
|
126
|
+
"text",
|
|
127
|
+
"markup",
|
|
128
|
+
"integer",
|
|
129
|
+
"boolean",
|
|
130
|
+
"product",
|
|
131
|
+
"category",
|
|
132
|
+
"file",
|
|
133
|
+
"page",
|
|
134
|
+
"image",
|
|
135
|
+
"url",
|
|
136
|
+
"enum",
|
|
137
|
+
"custom",
|
|
138
|
+
"cms_record"
|
|
139
|
+
];
|
|
140
|
+
const TYPE_MAPPING = {
|
|
141
|
+
String: "string",
|
|
142
|
+
string: "string",
|
|
143
|
+
Number: "integer",
|
|
144
|
+
number: "integer",
|
|
145
|
+
Boolean: "boolean",
|
|
146
|
+
boolean: "boolean",
|
|
147
|
+
Date: "string",
|
|
148
|
+
URL: "url",
|
|
149
|
+
CMSRecord: "cms_record"
|
|
150
|
+
};
|
|
151
|
+
function resolveAttributeType(decoratorType, tsMorphType, fieldName) {
|
|
152
|
+
if (decoratorType) {
|
|
153
|
+
if (!VALID_ATTRIBUTE_TYPES.includes(decoratorType)) {
|
|
154
|
+
console.error(`Error: Invalid attribute type '${decoratorType}' for field '${fieldName || "unknown"}'. Valid types are: ${VALID_ATTRIBUTE_TYPES.join(", ")}`);
|
|
155
|
+
process.exit(1);
|
|
156
|
+
}
|
|
157
|
+
return decoratorType;
|
|
158
|
+
}
|
|
159
|
+
if (tsMorphType && TYPE_MAPPING[tsMorphType]) return TYPE_MAPPING[tsMorphType];
|
|
160
|
+
return "string";
|
|
161
|
+
}
|
|
162
|
+
function toHumanReadableName(fieldName) {
|
|
163
|
+
return fieldName.replace(/([A-Z])/g, " $1").replace(/^./, (str) => str.toUpperCase()).trim();
|
|
164
|
+
}
|
|
165
|
+
function toCamelCaseFileName(name) {
|
|
166
|
+
if (!/[\s-]/.test(name)) return name;
|
|
167
|
+
return name.split(/[\s-]+/).map((word, index) => {
|
|
168
|
+
if (index === 0) return word.toLowerCase();
|
|
169
|
+
return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();
|
|
170
|
+
}).join("");
|
|
171
|
+
}
|
|
172
|
+
function getTypeFromTsMorph(property, _sourceFile) {
|
|
173
|
+
try {
|
|
174
|
+
const typeNode = property.getTypeNode();
|
|
175
|
+
if (typeNode) return typeNode.getText().split("|")[0].split("&")[0].trim();
|
|
176
|
+
} catch {}
|
|
177
|
+
return "string";
|
|
178
|
+
}
|
|
179
|
+
function parseExpression(expression) {
|
|
180
|
+
if (Node.isStringLiteral(expression)) return expression.getLiteralValue();
|
|
181
|
+
else if (Node.isNumericLiteral(expression)) return expression.getLiteralValue();
|
|
182
|
+
else if (Node.isTrueLiteral(expression)) return true;
|
|
183
|
+
else if (Node.isFalseLiteral(expression)) return false;
|
|
184
|
+
else if (Node.isObjectLiteralExpression(expression)) return parseNestedObject(expression);
|
|
185
|
+
else if (Node.isArrayLiteralExpression(expression)) return parseArrayLiteral(expression);
|
|
186
|
+
else return expression.getText();
|
|
187
|
+
}
|
|
188
|
+
function parseNestedObject(objectLiteral) {
|
|
189
|
+
const result = {};
|
|
190
|
+
try {
|
|
191
|
+
const properties = objectLiteral.getProperties();
|
|
192
|
+
for (const property of properties) if (Node.isPropertyAssignment(property)) {
|
|
193
|
+
const name = property.getName();
|
|
194
|
+
const initializer = property.getInitializer();
|
|
195
|
+
if (initializer) result[name] = parseExpression(initializer);
|
|
196
|
+
}
|
|
197
|
+
} catch (error) {
|
|
198
|
+
console.warn(`Warning: Could not parse nested object: ${error.message}`);
|
|
199
|
+
return result;
|
|
200
|
+
}
|
|
201
|
+
return result;
|
|
202
|
+
}
|
|
203
|
+
function parseArrayLiteral(arrayLiteral) {
|
|
204
|
+
const result = [];
|
|
205
|
+
try {
|
|
206
|
+
const elements = arrayLiteral.getElements();
|
|
207
|
+
for (const element of elements) result.push(parseExpression(element));
|
|
208
|
+
} catch (error) {
|
|
209
|
+
console.warn(`Warning: Could not parse array literal: ${error.message}`);
|
|
210
|
+
}
|
|
211
|
+
return result;
|
|
212
|
+
}
|
|
213
|
+
function parseDecoratorArgs(decorator) {
|
|
214
|
+
const result = {};
|
|
215
|
+
try {
|
|
216
|
+
const args = decorator.getArguments();
|
|
217
|
+
if (args.length === 0) return result;
|
|
218
|
+
const firstArg = args[0];
|
|
219
|
+
if (Node.isObjectLiteralExpression(firstArg)) {
|
|
220
|
+
const properties = firstArg.getProperties();
|
|
221
|
+
for (const property of properties) if (Node.isPropertyAssignment(property)) {
|
|
222
|
+
const name = property.getName();
|
|
223
|
+
const initializer = property.getInitializer();
|
|
224
|
+
if (initializer) result[name] = parseExpression(initializer);
|
|
225
|
+
}
|
|
226
|
+
} else if (Node.isStringLiteral(firstArg)) {
|
|
227
|
+
result.id = parseExpression(firstArg);
|
|
228
|
+
if (args.length > 1) {
|
|
229
|
+
const secondArg = args[1];
|
|
230
|
+
if (Node.isObjectLiteralExpression(secondArg)) {
|
|
231
|
+
const properties = secondArg.getProperties();
|
|
232
|
+
for (const property of properties) if (Node.isPropertyAssignment(property)) {
|
|
233
|
+
const name = property.getName();
|
|
234
|
+
const initializer = property.getInitializer();
|
|
235
|
+
if (initializer) result[name] = parseExpression(initializer);
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
return result;
|
|
241
|
+
} catch (error) {
|
|
242
|
+
console.warn(`Warning: Could not parse decorator arguments: ${error.message}`);
|
|
243
|
+
return result;
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
function extractAttributesFromSource(sourceFile, className) {
|
|
247
|
+
const attributes = [];
|
|
248
|
+
try {
|
|
249
|
+
const classDeclaration = sourceFile.getClass(className);
|
|
250
|
+
if (!classDeclaration) return attributes;
|
|
251
|
+
const properties = classDeclaration.getProperties();
|
|
252
|
+
for (const property of properties) {
|
|
253
|
+
const attributeDecorator = property.getDecorator("AttributeDefinition");
|
|
254
|
+
if (!attributeDecorator) continue;
|
|
255
|
+
const fieldName = property.getName();
|
|
256
|
+
const config = parseDecoratorArgs(attributeDecorator);
|
|
257
|
+
const isRequired = !property.hasQuestionToken();
|
|
258
|
+
const inferredType = config.type || getTypeFromTsMorph(property, sourceFile);
|
|
259
|
+
const attribute = {
|
|
260
|
+
id: config.id || fieldName,
|
|
261
|
+
name: config.name || toHumanReadableName(fieldName),
|
|
262
|
+
type: resolveAttributeType(config.type, inferredType, fieldName),
|
|
263
|
+
required: config.required !== void 0 ? config.required : isRequired,
|
|
264
|
+
description: config.description || `Field: ${fieldName}`
|
|
265
|
+
};
|
|
266
|
+
if (config.values) attribute.values = config.values;
|
|
267
|
+
if (config.defaultValue !== void 0) attribute.default_value = config.defaultValue;
|
|
268
|
+
attributes.push(attribute);
|
|
269
|
+
}
|
|
270
|
+
} catch (error) {
|
|
271
|
+
console.warn(`Warning: Could not extract attributes from class ${className}: ${error.message}`);
|
|
272
|
+
}
|
|
273
|
+
return attributes;
|
|
274
|
+
}
|
|
275
|
+
function extractRegionDefinitionsFromSource(sourceFile, className) {
|
|
276
|
+
const regionDefinitions = [];
|
|
277
|
+
try {
|
|
278
|
+
const classDeclaration = sourceFile.getClass(className);
|
|
279
|
+
if (!classDeclaration) return regionDefinitions;
|
|
280
|
+
const classRegionDecorator = classDeclaration.getDecorator("RegionDefinition");
|
|
281
|
+
if (classRegionDecorator) {
|
|
282
|
+
const args = classRegionDecorator.getArguments();
|
|
283
|
+
if (args.length > 0) {
|
|
284
|
+
const firstArg = args[0];
|
|
285
|
+
if (Node.isArrayLiteralExpression(firstArg)) {
|
|
286
|
+
const elements = firstArg.getElements();
|
|
287
|
+
for (const element of elements) if (Node.isObjectLiteralExpression(element)) {
|
|
288
|
+
const regionConfig = parseDecoratorArgs({ getArguments: () => [element] });
|
|
289
|
+
const regionDefinition = {
|
|
290
|
+
id: regionConfig.id || "region",
|
|
291
|
+
name: regionConfig.name || "Region"
|
|
292
|
+
};
|
|
293
|
+
if (regionConfig.componentTypes) regionDefinition.component_types = regionConfig.componentTypes;
|
|
294
|
+
if (Array.isArray(regionConfig.componentTypeInclusions)) regionDefinition.component_type_inclusions = regionConfig.componentTypeInclusions.map((incl) => ({ type_id: incl }));
|
|
295
|
+
if (Array.isArray(regionConfig.componentTypeExclusions)) regionDefinition.component_type_exclusions = regionConfig.componentTypeExclusions.map((excl) => ({ type_id: excl }));
|
|
296
|
+
if (regionConfig.maxComponents !== void 0) regionDefinition.max_components = regionConfig.maxComponents;
|
|
297
|
+
if (regionConfig.minComponents !== void 0) regionDefinition.min_components = regionConfig.minComponents;
|
|
298
|
+
if (regionConfig.allowMultiple !== void 0) regionDefinition.allow_multiple = regionConfig.allowMultiple;
|
|
299
|
+
if (regionConfig.defaultComponentConstructors) regionDefinition.default_component_constructors = regionConfig.defaultComponentConstructors;
|
|
300
|
+
regionDefinitions.push(regionDefinition);
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
} catch (error) {
|
|
306
|
+
console.warn(`Warning: Could not extract region definitions from class ${className}: ${error.message}`);
|
|
307
|
+
}
|
|
308
|
+
return regionDefinitions;
|
|
309
|
+
}
|
|
310
|
+
async function processComponentFile(filePath, _projectRoot) {
|
|
311
|
+
try {
|
|
312
|
+
const content = await readFile(filePath, "utf-8");
|
|
313
|
+
const components = [];
|
|
314
|
+
if (!content.includes("@Component")) return components;
|
|
315
|
+
try {
|
|
316
|
+
const sourceFile = new Project({
|
|
317
|
+
useInMemoryFileSystem: true,
|
|
318
|
+
skipAddingFilesFromTsConfig: true
|
|
319
|
+
}).createSourceFile(filePath, content);
|
|
320
|
+
const classes = sourceFile.getClasses();
|
|
321
|
+
for (const classDeclaration of classes) {
|
|
322
|
+
const componentDecorator = classDeclaration.getDecorator("Component");
|
|
323
|
+
if (!componentDecorator) continue;
|
|
324
|
+
const className = classDeclaration.getName();
|
|
325
|
+
if (!className) continue;
|
|
326
|
+
const componentConfig = parseDecoratorArgs(componentDecorator);
|
|
327
|
+
const attributes = extractAttributesFromSource(sourceFile, className);
|
|
328
|
+
const regionDefinitions = extractRegionDefinitionsFromSource(sourceFile, className);
|
|
329
|
+
const componentMetadata = {
|
|
330
|
+
typeId: componentConfig.id || className.toLowerCase(),
|
|
331
|
+
name: componentConfig.name || toHumanReadableName(className),
|
|
332
|
+
group: componentConfig.group || DEFAULT_COMPONENT_GROUP,
|
|
333
|
+
description: componentConfig.description || `Custom component: ${className}`,
|
|
334
|
+
regionDefinitions,
|
|
335
|
+
attributes
|
|
336
|
+
};
|
|
337
|
+
components.push(componentMetadata);
|
|
338
|
+
}
|
|
339
|
+
} catch (error) {
|
|
340
|
+
console.warn(`Warning: Could not process file ${filePath}:`, error.message);
|
|
341
|
+
}
|
|
342
|
+
return components;
|
|
343
|
+
} catch (error) {
|
|
344
|
+
console.warn(`Warning: Could not read file ${filePath}:`, error.message);
|
|
345
|
+
return [];
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
async function processPageTypeFile(filePath, projectRoot) {
|
|
349
|
+
try {
|
|
350
|
+
const content = await readFile(filePath, "utf-8");
|
|
351
|
+
const pageTypes = [];
|
|
352
|
+
if (!content.includes("@PageType")) return pageTypes;
|
|
353
|
+
try {
|
|
354
|
+
const sourceFile = new Project({
|
|
355
|
+
useInMemoryFileSystem: true,
|
|
356
|
+
skipAddingFilesFromTsConfig: true
|
|
357
|
+
}).createSourceFile(filePath, content);
|
|
358
|
+
const classes = sourceFile.getClasses();
|
|
359
|
+
for (const classDeclaration of classes) {
|
|
360
|
+
const pageTypeDecorator = classDeclaration.getDecorator("PageType");
|
|
361
|
+
if (!pageTypeDecorator) continue;
|
|
362
|
+
const className = classDeclaration.getName();
|
|
363
|
+
if (!className) continue;
|
|
364
|
+
const pageTypeConfig = parseDecoratorArgs(pageTypeDecorator);
|
|
365
|
+
const attributes = extractAttributesFromSource(sourceFile, className);
|
|
366
|
+
const regionDefinitions = extractRegionDefinitionsFromSource(sourceFile, className);
|
|
367
|
+
const route = filePathToRoute(filePath, projectRoot);
|
|
368
|
+
const pageTypeMetadata = {
|
|
369
|
+
typeId: pageTypeConfig.id || className.toLowerCase(),
|
|
370
|
+
name: pageTypeConfig.name || toHumanReadableName(className),
|
|
371
|
+
description: pageTypeConfig.description || `Custom page type: ${className}`,
|
|
372
|
+
regionDefinitions,
|
|
373
|
+
supportedAspectTypes: pageTypeConfig.supportedAspectTypes || [],
|
|
374
|
+
attributes,
|
|
375
|
+
route
|
|
376
|
+
};
|
|
377
|
+
pageTypes.push(pageTypeMetadata);
|
|
378
|
+
}
|
|
379
|
+
} catch (error) {
|
|
380
|
+
console.warn(`Warning: Could not process file ${filePath}:`, error.message);
|
|
381
|
+
}
|
|
382
|
+
return pageTypes;
|
|
383
|
+
} catch (error) {
|
|
384
|
+
console.warn(`Warning: Could not read file ${filePath}:`, error.message);
|
|
385
|
+
return [];
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
async function processAspectFile(filePath, _projectRoot) {
|
|
389
|
+
try {
|
|
390
|
+
const content = await readFile(filePath, "utf-8");
|
|
391
|
+
const aspects = [];
|
|
392
|
+
if (!filePath.endsWith(".json") || !content.trim().startsWith("{")) return aspects;
|
|
393
|
+
if (!filePath.includes("/aspects/") && !filePath.includes("\\aspects\\")) return aspects;
|
|
394
|
+
try {
|
|
395
|
+
const aspectData = JSON.parse(content);
|
|
396
|
+
const fileName = basename(filePath, ".json");
|
|
397
|
+
if (!aspectData.name || !aspectData.attribute_definitions) return aspects;
|
|
398
|
+
const aspectMetadata = {
|
|
399
|
+
id: fileName,
|
|
400
|
+
name: aspectData.name,
|
|
401
|
+
description: aspectData.description || `Aspect type: ${aspectData.name}`,
|
|
402
|
+
attributeDefinitions: aspectData.attribute_definitions || [],
|
|
403
|
+
supportedObjectTypes: aspectData.supported_object_types || []
|
|
404
|
+
};
|
|
405
|
+
aspects.push(aspectMetadata);
|
|
406
|
+
} catch (parseError) {
|
|
407
|
+
console.warn(`Warning: Could not parse JSON in file ${filePath}:`, parseError.message);
|
|
408
|
+
}
|
|
409
|
+
return aspects;
|
|
410
|
+
} catch (error) {
|
|
411
|
+
console.warn(`Warning: Could not read file ${filePath}:`, error.message);
|
|
412
|
+
return [];
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
async function generateComponentCartridge(component, outputDir, dryRun = false) {
|
|
416
|
+
const fileName = toCamelCaseFileName(component.typeId);
|
|
417
|
+
const groupDir = join(outputDir, component.group);
|
|
418
|
+
const outputPath = join(groupDir, `${fileName}.json`);
|
|
419
|
+
if (!dryRun) {
|
|
420
|
+
try {
|
|
421
|
+
await mkdir(groupDir, { recursive: true });
|
|
422
|
+
} catch {}
|
|
423
|
+
const attributeDefinitionGroups = [{
|
|
424
|
+
id: component.typeId,
|
|
425
|
+
name: component.name,
|
|
426
|
+
description: component.description,
|
|
427
|
+
attribute_definitions: component.attributes
|
|
428
|
+
}];
|
|
429
|
+
const cartridgeData = {
|
|
430
|
+
name: component.name,
|
|
431
|
+
description: component.description,
|
|
432
|
+
group: component.group,
|
|
433
|
+
arch_type: ARCH_TYPE_HEADLESS,
|
|
434
|
+
region_definitions: component.regionDefinitions || [],
|
|
435
|
+
attribute_definition_groups: attributeDefinitionGroups
|
|
436
|
+
};
|
|
437
|
+
await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));
|
|
438
|
+
}
|
|
439
|
+
const prefix = dryRun ? " - [DRY RUN]" : " -";
|
|
440
|
+
console.log(`${prefix} ${String(component.typeId)}: ${String(component.name)} (${String(component.attributes.length)} attributes) → ${fileName}.json`);
|
|
441
|
+
}
|
|
442
|
+
async function generatePageTypeCartridge(pageType, outputDir, dryRun = false) {
|
|
443
|
+
const fileName = toCamelCaseFileName(pageType.name);
|
|
444
|
+
const outputPath = join(outputDir, `${fileName}.json`);
|
|
445
|
+
if (!dryRun) {
|
|
446
|
+
const cartridgeData = {
|
|
447
|
+
name: pageType.name,
|
|
448
|
+
description: pageType.description,
|
|
449
|
+
arch_type: ARCH_TYPE_HEADLESS,
|
|
450
|
+
region_definitions: pageType.regionDefinitions || []
|
|
451
|
+
};
|
|
452
|
+
if (pageType.attributes && pageType.attributes.length > 0) cartridgeData.attribute_definition_groups = [{
|
|
453
|
+
id: pageType.typeId || fileName,
|
|
454
|
+
name: pageType.name,
|
|
455
|
+
description: pageType.description,
|
|
456
|
+
attribute_definitions: pageType.attributes
|
|
457
|
+
}];
|
|
458
|
+
if (pageType.supportedAspectTypes) cartridgeData.supported_aspect_types = pageType.supportedAspectTypes;
|
|
459
|
+
if (pageType.route) cartridgeData.route = pageType.route;
|
|
460
|
+
await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));
|
|
461
|
+
}
|
|
462
|
+
const prefix = dryRun ? " - [DRY RUN]" : " -";
|
|
463
|
+
console.log(`${prefix} ${String(pageType.name)}: ${String(pageType.description)} (${String(pageType.attributes.length)} attributes) → ${fileName}.json`);
|
|
464
|
+
}
|
|
465
|
+
async function generateAspectCartridge(aspect, outputDir, dryRun = false) {
|
|
466
|
+
const fileName = toCamelCaseFileName(aspect.id);
|
|
467
|
+
const outputPath = join(outputDir, `${fileName}.json`);
|
|
468
|
+
if (!dryRun) {
|
|
469
|
+
const cartridgeData = {
|
|
470
|
+
name: aspect.name,
|
|
471
|
+
description: aspect.description,
|
|
472
|
+
arch_type: ARCH_TYPE_HEADLESS,
|
|
473
|
+
attribute_definitions: aspect.attributeDefinitions || []
|
|
474
|
+
};
|
|
475
|
+
if (aspect.supportedObjectTypes) cartridgeData.supported_object_types = aspect.supportedObjectTypes;
|
|
476
|
+
await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));
|
|
477
|
+
}
|
|
478
|
+
const prefix = dryRun ? " - [DRY RUN]" : " -";
|
|
479
|
+
console.log(`${prefix} ${String(aspect.name)}: ${String(aspect.description)} (${String(aspect.attributeDefinitions.length)} attributes) → ${fileName}.json`);
|
|
480
|
+
}
|
|
481
|
+
/**
|
|
482
|
+
* Runs ESLint with --fix on the specified directory to format JSON files.
|
|
483
|
+
* This ensures generated JSON files match the project's Prettier/ESLint configuration.
|
|
484
|
+
*/
|
|
485
|
+
function lintGeneratedFiles(metadataDir, projectRoot) {
|
|
486
|
+
try {
|
|
487
|
+
console.log("🔧 Running ESLint --fix on generated JSON files...");
|
|
488
|
+
execSync(`npx eslint "${metadataDir}/**/*.json" --fix --no-error-on-unmatched-pattern`, {
|
|
489
|
+
cwd: projectRoot,
|
|
490
|
+
stdio: "pipe",
|
|
491
|
+
encoding: "utf-8"
|
|
492
|
+
});
|
|
493
|
+
console.log("✅ JSON files formatted successfully");
|
|
494
|
+
} catch (error) {
|
|
495
|
+
const execError = error;
|
|
496
|
+
if (execError.status === 2) {
|
|
497
|
+
const errMsg = execError.stderr || execError.stdout || "Unknown error";
|
|
498
|
+
console.warn(`⚠️ Warning: Could not run ESLint --fix: ${errMsg}`);
|
|
499
|
+
} else if (execError.stderr && execError.stderr.includes("error")) console.warn(`⚠️ Warning: Some linting issues could not be auto-fixed. Run ESLint manually to review.`);
|
|
500
|
+
else console.log("✅ JSON files formatted successfully");
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
async function generateMetadata(projectDirectory, metadataDirectory, options) {
|
|
504
|
+
try {
|
|
505
|
+
const filePaths = options?.filePaths;
|
|
506
|
+
const isIncrementalMode = filePaths && filePaths.length > 0;
|
|
507
|
+
const dryRun = options?.dryRun || false;
|
|
508
|
+
if (dryRun) console.log("🔍 [DRY RUN] Scanning for decorated components and page types...");
|
|
509
|
+
else if (isIncrementalMode) console.log(`🔍 Generating metadata for ${filePaths.length} specified file(s)...`);
|
|
510
|
+
else console.log("🔍 Generating metadata for decorated components and page types...");
|
|
511
|
+
const projectRoot = resolve(projectDirectory);
|
|
512
|
+
const srcDir = join(projectRoot, "src");
|
|
513
|
+
const metadataDir = resolve(metadataDirectory);
|
|
514
|
+
const componentsOutputDir = join(metadataDir, "components");
|
|
515
|
+
const pagesOutputDir = join(metadataDir, "pages");
|
|
516
|
+
const aspectsOutputDir = join(metadataDir, "aspects");
|
|
517
|
+
if (!dryRun) {
|
|
518
|
+
if (!isIncrementalMode) {
|
|
519
|
+
console.log("🗑️ Cleaning existing output directories...");
|
|
520
|
+
for (const outputDir of [
|
|
521
|
+
componentsOutputDir,
|
|
522
|
+
pagesOutputDir,
|
|
523
|
+
aspectsOutputDir
|
|
524
|
+
]) try {
|
|
525
|
+
await rm(outputDir, {
|
|
526
|
+
recursive: true,
|
|
527
|
+
force: true
|
|
528
|
+
});
|
|
529
|
+
console.log(` - Deleted: ${outputDir}`);
|
|
530
|
+
} catch {
|
|
531
|
+
console.log(` - Directory not found (skipping): ${outputDir}`);
|
|
532
|
+
}
|
|
533
|
+
} else console.log("📝 Incremental mode: existing cartridge files will be preserved/overwritten");
|
|
534
|
+
console.log("📁 Creating output directories...");
|
|
535
|
+
for (const outputDir of [
|
|
536
|
+
componentsOutputDir,
|
|
537
|
+
pagesOutputDir,
|
|
538
|
+
aspectsOutputDir
|
|
539
|
+
]) try {
|
|
540
|
+
await mkdir(outputDir, { recursive: true });
|
|
541
|
+
} catch (error) {
|
|
542
|
+
try {
|
|
543
|
+
await access(outputDir);
|
|
544
|
+
} catch {
|
|
545
|
+
console.error(`❌ Error: Failed to create output directory ${outputDir}: ${error.message}`);
|
|
546
|
+
process.exit(1);
|
|
547
|
+
}
|
|
548
|
+
}
|
|
549
|
+
} else if (isIncrementalMode) console.log(`📝 [DRY RUN] Would process ${filePaths.length} specific file(s)`);
|
|
550
|
+
else console.log("📝 [DRY RUN] Would clean and regenerate all metadata files");
|
|
551
|
+
let files = [];
|
|
552
|
+
if (isIncrementalMode && filePaths) {
|
|
553
|
+
files = filePaths.map((fp) => resolve(projectRoot, fp));
|
|
554
|
+
console.log(`📂 Processing ${files.length} specified file(s)...`);
|
|
555
|
+
} else {
|
|
556
|
+
const scanDirectory = async (dir) => {
|
|
557
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
558
|
+
for (const entry of entries) {
|
|
559
|
+
const fullPath = join(dir, entry.name);
|
|
560
|
+
if (entry.isDirectory()) {
|
|
561
|
+
if (!SKIP_DIRECTORIES.includes(entry.name)) await scanDirectory(fullPath);
|
|
562
|
+
} else if (entry.isFile() && (extname(entry.name) === ".ts" || extname(entry.name) === ".tsx" || extname(entry.name) === ".json")) files.push(fullPath);
|
|
563
|
+
}
|
|
564
|
+
};
|
|
565
|
+
await scanDirectory(srcDir);
|
|
566
|
+
}
|
|
567
|
+
const allComponents = [];
|
|
568
|
+
const allPageTypes = [];
|
|
569
|
+
const allAspects = [];
|
|
570
|
+
for (const file of files) {
|
|
571
|
+
const components = await processComponentFile(file, projectRoot);
|
|
572
|
+
allComponents.push(...components);
|
|
573
|
+
const pageTypes = await processPageTypeFile(file, projectRoot);
|
|
574
|
+
allPageTypes.push(...pageTypes);
|
|
575
|
+
const aspects = await processAspectFile(file, projectRoot);
|
|
576
|
+
allAspects.push(...aspects);
|
|
577
|
+
}
|
|
578
|
+
if (allComponents.length === 0 && allPageTypes.length === 0 && allAspects.length === 0) {
|
|
579
|
+
console.log("⚠️ No decorated components, page types, or aspect files found.");
|
|
580
|
+
return {
|
|
581
|
+
componentsGenerated: 0,
|
|
582
|
+
pageTypesGenerated: 0,
|
|
583
|
+
aspectsGenerated: 0,
|
|
584
|
+
totalFiles: 0
|
|
585
|
+
};
|
|
586
|
+
}
|
|
587
|
+
if (allComponents.length > 0) {
|
|
588
|
+
console.log(`✅ Found ${allComponents.length} decorated component(s):`);
|
|
589
|
+
for (const component of allComponents) await generateComponentCartridge(component, componentsOutputDir, dryRun);
|
|
590
|
+
if (dryRun) console.log(`📄 [DRY RUN] Would generate ${allComponents.length} component metadata file(s) in: ${componentsOutputDir}`);
|
|
591
|
+
else console.log(`📄 Generated ${allComponents.length} component metadata file(s) in: ${componentsOutputDir}`);
|
|
592
|
+
}
|
|
593
|
+
if (allPageTypes.length > 0) {
|
|
594
|
+
console.log(`✅ Found ${allPageTypes.length} decorated page type(s):`);
|
|
595
|
+
for (const pageType of allPageTypes) await generatePageTypeCartridge(pageType, pagesOutputDir, dryRun);
|
|
596
|
+
if (dryRun) console.log(`📄 [DRY RUN] Would generate ${allPageTypes.length} page type metadata file(s) in: ${pagesOutputDir}`);
|
|
597
|
+
else console.log(`📄 Generated ${allPageTypes.length} page type metadata file(s) in: ${pagesOutputDir}`);
|
|
598
|
+
}
|
|
599
|
+
if (allAspects.length > 0) {
|
|
600
|
+
console.log(`✅ Found ${allAspects.length} decorated aspect(s):`);
|
|
601
|
+
for (const aspect of allAspects) await generateAspectCartridge(aspect, aspectsOutputDir, dryRun);
|
|
602
|
+
if (dryRun) console.log(`📄 [DRY RUN] Would generate ${allAspects.length} aspect metadata file(s) in: ${aspectsOutputDir}`);
|
|
603
|
+
else console.log(`📄 Generated ${allAspects.length} aspect metadata file(s) in: ${aspectsOutputDir}`);
|
|
604
|
+
}
|
|
605
|
+
const shouldLintFix = options?.lintFix !== false;
|
|
606
|
+
if (!dryRun && shouldLintFix && (allComponents.length > 0 || allPageTypes.length > 0 || allAspects.length > 0)) lintGeneratedFiles(metadataDir, projectRoot);
|
|
607
|
+
return {
|
|
608
|
+
componentsGenerated: allComponents.length,
|
|
609
|
+
pageTypesGenerated: allPageTypes.length,
|
|
610
|
+
aspectsGenerated: allAspects.length,
|
|
611
|
+
totalFiles: allComponents.length + allPageTypes.length + allAspects.length
|
|
612
|
+
};
|
|
613
|
+
} catch (error) {
|
|
614
|
+
console.error("❌ Error:", error.message);
|
|
615
|
+
process.exit(1);
|
|
616
|
+
}
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
//#endregion
|
|
620
|
+
export { generateMetadata as t };
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { B2CPluginManager } from "@salesforce/b2c-tooling-sdk/plugins";
|
|
2
|
+
import { getLogger } from "@salesforce/b2c-tooling-sdk/logging";
|
|
3
|
+
|
|
4
|
+
//#region src/cli-plugins.ts
|
|
5
|
+
let manager;
|
|
6
|
+
let initialized = false;
|
|
7
|
+
/**
|
|
8
|
+
* Initializes the b2c-cli plugin system.
|
|
9
|
+
*
|
|
10
|
+
* Discovers plugins installed via `b2c plugins:install`, invokes their hooks,
|
|
11
|
+
* and registers middleware and config sources with the global registries.
|
|
12
|
+
* All failures are non-fatal — the CLI continues to work without plugin support.
|
|
13
|
+
*/
|
|
14
|
+
async function initializePlugins() {
|
|
15
|
+
if (initialized) return;
|
|
16
|
+
initialized = true;
|
|
17
|
+
try {
|
|
18
|
+
const logger = getLogger();
|
|
19
|
+
manager = new B2CPluginManager({ logger });
|
|
20
|
+
await manager.initialize();
|
|
21
|
+
manager.applyMiddleware();
|
|
22
|
+
if (manager.pluginNames.length > 0) logger.info(`Loaded ${manager.pluginNames.length} plugin(s): ${manager.pluginNames.join(", ")}`);
|
|
23
|
+
} catch (err) {
|
|
24
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
25
|
+
try {
|
|
26
|
+
getLogger().warn(`Plugin initialization failed: ${message}`);
|
|
27
|
+
} catch {}
|
|
28
|
+
manager = void 0;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
//#endregion
|
|
33
|
+
//#region src/hooks/init.ts
|
|
34
|
+
/**
|
|
35
|
+
* Oclif init hook — runs before any command executes.
|
|
36
|
+
*
|
|
37
|
+
* Discovers b2c-cli plugins (installed via `b2c plugins:install`) and registers
|
|
38
|
+
* their middleware and config sources with the global registries. This ensures
|
|
39
|
+
* all sfnext commands automatically benefit from installed b2c-cli plugins.
|
|
40
|
+
*/
|
|
41
|
+
const hook = async function() {
|
|
42
|
+
await initializePlugins();
|
|
43
|
+
};
|
|
44
|
+
var init_default = hook;
|
|
45
|
+
|
|
46
|
+
//#endregion
|
|
47
|
+
export { init_default as default };
|