@salesforce/storefront-next-dev 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.txt +181 -0
- package/README.md +302 -0
- package/dist/cartridge-services/index.d.ts +60 -0
- package/dist/cartridge-services/index.d.ts.map +1 -0
- package/dist/cartridge-services/index.js +954 -0
- package/dist/cartridge-services/index.js.map +1 -0
- package/dist/cli.js +3373 -0
- package/dist/configs/react-router.config.d.ts +13 -0
- package/dist/configs/react-router.config.d.ts.map +1 -0
- package/dist/configs/react-router.config.js +36 -0
- package/dist/configs/react-router.config.js.map +1 -0
- package/dist/extensibility/templates/install-instructions.mdc.hbs +192 -0
- package/dist/extensibility/templates/uninstall-instructions.mdc.hbs +137 -0
- package/dist/index.d.ts +327 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +2606 -0
- package/dist/index.js.map +1 -0
- package/dist/mrt/sfnext-server-chunk-DUt5XHAg.mjs +1 -0
- package/dist/mrt/sfnext-server-jiti-DjnmHo-6.mjs +10 -0
- package/dist/mrt/sfnext-server-jiti-DjnmHo-6.mjs.map +1 -0
- package/dist/mrt/ssr.d.ts +19 -0
- package/dist/mrt/ssr.d.ts.map +1 -0
- package/dist/mrt/ssr.mjs +246 -0
- package/dist/mrt/ssr.mjs.map +1 -0
- package/dist/mrt/streamingHandler.d.ts +11 -0
- package/dist/mrt/streamingHandler.d.ts.map +1 -0
- package/dist/mrt/streamingHandler.mjs +255 -0
- package/dist/mrt/streamingHandler.mjs.map +1 -0
- package/dist/react-router/Scripts.d.ts +36 -0
- package/dist/react-router/Scripts.d.ts.map +1 -0
- package/dist/react-router/Scripts.js +68 -0
- package/dist/react-router/Scripts.js.map +1 -0
- package/package.json +157 -0
package/dist/cli.js
ADDED
|
@@ -0,0 +1,3373 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { Command } from "commander";
|
|
3
|
+
import fs from "fs-extra";
|
|
4
|
+
import path, { dirname, extname } from "path";
|
|
5
|
+
import os from "os";
|
|
6
|
+
import archiver from "archiver";
|
|
7
|
+
import { Minimatch, minimatch } from "minimatch";
|
|
8
|
+
import { execSync } from "child_process";
|
|
9
|
+
import dotenv from "dotenv";
|
|
10
|
+
import chalk from "chalk";
|
|
11
|
+
import { createRequire } from "module";
|
|
12
|
+
import { URL as URL$1, fileURLToPath, pathToFileURL } from "url";
|
|
13
|
+
import zlib from "zlib";
|
|
14
|
+
import { promisify } from "util";
|
|
15
|
+
import { createServer } from "vite";
|
|
16
|
+
import express from "express";
|
|
17
|
+
import { createRequestHandler } from "@react-router/express";
|
|
18
|
+
import { existsSync, readFileSync, unlinkSync } from "node:fs";
|
|
19
|
+
import { basename, extname as extname$1, join, resolve } from "node:path";
|
|
20
|
+
import { createProxyMiddleware } from "http-proxy-middleware";
|
|
21
|
+
import compression from "compression";
|
|
22
|
+
import zlib$1 from "node:zlib";
|
|
23
|
+
import morgan from "morgan";
|
|
24
|
+
import fs$1 from "fs";
|
|
25
|
+
import Handlebars from "handlebars";
|
|
26
|
+
import { access, mkdir, readFile, readdir, rm, writeFile } from "node:fs/promises";
|
|
27
|
+
import { execSync as execSync$1 } from "node:child_process";
|
|
28
|
+
import { Node, Project } from "ts-morph";
|
|
29
|
+
import { tmpdir } from "node:os";
|
|
30
|
+
import { randomUUID } from "node:crypto";
|
|
31
|
+
import { npmRunPathEnv } from "npm-run-path";
|
|
32
|
+
import prompts from "prompts";
|
|
33
|
+
import { z } from "zod";
|
|
34
|
+
|
|
35
|
+
//#region package.json
|
|
36
|
+
var version = "0.1.0";
|
|
37
|
+
|
|
38
|
+
//#endregion
|
|
39
|
+
//#region src/utils/logger.ts
|
|
40
|
+
/**
|
|
41
|
+
* Get the local network IPv4 address
|
|
42
|
+
*/
|
|
43
|
+
function getNetworkAddress() {
|
|
44
|
+
const interfaces = os.networkInterfaces();
|
|
45
|
+
for (const name of Object.keys(interfaces)) {
|
|
46
|
+
const iface = interfaces[name];
|
|
47
|
+
if (!iface) continue;
|
|
48
|
+
for (const alias of iface) if (alias.family === "IPv4" && !alias.internal) return alias.address;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Get the version of a package from the project's package.json
|
|
53
|
+
*/
|
|
54
|
+
function getPackageVersion(packageName, projectDir) {
|
|
55
|
+
try {
|
|
56
|
+
const require = createRequire(import.meta.url);
|
|
57
|
+
return require(require.resolve(`${packageName}/package.json`, { paths: [projectDir] })).version;
|
|
58
|
+
} catch {
|
|
59
|
+
return "unknown";
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Logger utilities
|
|
64
|
+
*/
|
|
65
|
+
const colors = {
|
|
66
|
+
warn: "yellow",
|
|
67
|
+
error: "red",
|
|
68
|
+
success: "cyan",
|
|
69
|
+
info: "green",
|
|
70
|
+
debug: "gray"
|
|
71
|
+
};
|
|
72
|
+
const fancyLog = (level, msg) => {
|
|
73
|
+
const colorFn = chalk[colors[level]];
|
|
74
|
+
console.log(`${colorFn(level)}: ${msg}`);
|
|
75
|
+
};
|
|
76
|
+
const info = (msg) => fancyLog("info", msg);
|
|
77
|
+
const success = (msg) => fancyLog("success", msg);
|
|
78
|
+
const warn = (msg) => fancyLog("warn", msg);
|
|
79
|
+
const error = (msg) => fancyLog("error", msg);
|
|
80
|
+
const debug = (msg, data) => {
|
|
81
|
+
if (process.env.DEBUG || process.env.NODE_ENV !== "production") {
|
|
82
|
+
fancyLog("debug", msg);
|
|
83
|
+
if (data) console.log(data);
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
/**
|
|
87
|
+
* Print the server information banner with URLs and versions
|
|
88
|
+
*/
|
|
89
|
+
function printServerInfo(mode, port, startTime, projectDir) {
|
|
90
|
+
const elapsed = Date.now() - startTime;
|
|
91
|
+
const sfnextVersion = version;
|
|
92
|
+
const reactVersion = getPackageVersion("react", projectDir);
|
|
93
|
+
const reactRouterVersion = getPackageVersion("react-router", projectDir);
|
|
94
|
+
const modeLabel = mode === "development" ? "Development Mode" : "Preview Mode";
|
|
95
|
+
console.log();
|
|
96
|
+
console.log(` ${chalk.cyan.bold("⚡ SFCC Storefront Next")} ${chalk.dim(`v${sfnextVersion}`)}`);
|
|
97
|
+
console.log(` ${chalk.green.bold(modeLabel)}`);
|
|
98
|
+
console.log();
|
|
99
|
+
console.log(` ${chalk.dim("react")} ${chalk.green(`v${reactVersion}`)} ${chalk.dim("│")} ${chalk.dim("react-router")} ${chalk.green(`v${reactRouterVersion}`)} ${chalk.dim("│")} ${chalk.green(`ready in ${elapsed}ms`)}`);
|
|
100
|
+
console.log();
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Print server configuration details (proxy, static, etc.)
|
|
104
|
+
*/
|
|
105
|
+
function printServerConfig(config) {
|
|
106
|
+
const { port, enableProxy, enableStaticServing, enableCompression, proxyPath, proxyTarget, shortCode, organizationId, clientId, siteId } = config;
|
|
107
|
+
console.log(` ${chalk.bold("Environment Configuration:")}`);
|
|
108
|
+
if (enableProxy && proxyPath && proxyTarget && shortCode) {
|
|
109
|
+
console.log(` ${chalk.green("✓")} ${chalk.bold("Proxy:")} ${chalk.cyan(`localhost:${port}${proxyPath}`)} ${chalk.dim("→")} ${chalk.cyan(proxyTarget)}`);
|
|
110
|
+
console.log(` ${chalk.dim("Short Code: ")} ${chalk.dim(shortCode)}`);
|
|
111
|
+
if (organizationId) console.log(` ${chalk.dim("Organization ID:")} ${chalk.dim(organizationId)}`);
|
|
112
|
+
if (clientId) console.log(` ${chalk.dim("Client ID: ")} ${chalk.dim(clientId)}`);
|
|
113
|
+
if (siteId) console.log(` ${chalk.dim("Site ID: ")} ${chalk.dim(siteId)}`);
|
|
114
|
+
} else console.log(` ${chalk.gray("○")} ${chalk.bold("Proxy: ")} ${chalk.dim("disabled")}`);
|
|
115
|
+
if (enableStaticServing) console.log(` ${chalk.green("✓")} ${chalk.bold("Static: ")} ${chalk.dim("enabled")}`);
|
|
116
|
+
if (enableCompression) console.log(` ${chalk.green("✓")} ${chalk.bold("Compression: ")} ${chalk.dim("enabled")}`);
|
|
117
|
+
const localUrl = `http://localhost:${port}`;
|
|
118
|
+
const networkAddress = getNetworkAddress();
|
|
119
|
+
const networkUrl = networkAddress ? `http://${networkAddress}:${port}` : null;
|
|
120
|
+
console.log();
|
|
121
|
+
console.log(` ${chalk.green("➜")} ${chalk.bold("Local: ")} ${chalk.cyan(localUrl)}`);
|
|
122
|
+
if (networkUrl) console.log(` ${chalk.green("➜")} ${chalk.bold("Network:")} ${chalk.cyan(networkUrl)}`);
|
|
123
|
+
console.log();
|
|
124
|
+
console.log(` ${chalk.dim("Press")} ${chalk.bold("Ctrl+C")} ${chalk.dim("to stop the server")}`);
|
|
125
|
+
console.log();
|
|
126
|
+
}
|
|
127
|
+
/**
|
|
128
|
+
* Print shutdown message
|
|
129
|
+
*/
|
|
130
|
+
function printShutdownMessage() {
|
|
131
|
+
console.log(`\n ${chalk.yellow("⚡")} ${chalk.dim("Server shutting down...")}\n`);
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
//#endregion
|
|
135
|
+
//#region src/utils.ts
|
|
136
|
+
const DEFAULT_CLOUD_ORIGIN = "https://cloud.mobify.com";
|
|
137
|
+
const getDefaultBuildDir = (targetDir) => path.join(targetDir, "build");
|
|
138
|
+
const NODE_ENV = process.env.NODE_ENV || "development";
|
|
139
|
+
/**
|
|
140
|
+
* Get credentials file path based on cloud origin
|
|
141
|
+
*/
|
|
142
|
+
const getCredentialsFile = (cloudOrigin, credentialsFile) => {
|
|
143
|
+
if (credentialsFile) return credentialsFile;
|
|
144
|
+
const host = new URL(cloudOrigin).host;
|
|
145
|
+
const suffix = host === "cloud.mobify.com" ? "" : `--${host}`;
|
|
146
|
+
return path.join(os.homedir(), `.mobify${suffix}`);
|
|
147
|
+
};
|
|
148
|
+
/**
|
|
149
|
+
* Read credentials from file
|
|
150
|
+
*/
|
|
151
|
+
const readCredentials = async (filepath) => {
|
|
152
|
+
try {
|
|
153
|
+
const data = await fs.readJSON(filepath);
|
|
154
|
+
return {
|
|
155
|
+
username: data.username,
|
|
156
|
+
api_key: data.api_key
|
|
157
|
+
};
|
|
158
|
+
} catch {
|
|
159
|
+
throw new Error(`Credentials file "${filepath}" not found.\nVisit https://runtime.commercecloud.com/account/settings for steps on authorizing your computer to push bundles.`);
|
|
160
|
+
}
|
|
161
|
+
};
|
|
162
|
+
/**
|
|
163
|
+
* Get project package.json
|
|
164
|
+
*/
|
|
165
|
+
const getProjectPkg = (projectDir) => {
|
|
166
|
+
const packagePath = path.join(projectDir, "package.json");
|
|
167
|
+
try {
|
|
168
|
+
return fs.readJSONSync(packagePath);
|
|
169
|
+
} catch {
|
|
170
|
+
throw new Error(`Could not read project package at "${packagePath}"`);
|
|
171
|
+
}
|
|
172
|
+
};
|
|
173
|
+
/**
|
|
174
|
+
* Load .env file from project directory
|
|
175
|
+
*/
|
|
176
|
+
const loadEnvFile = (projectDir) => {
|
|
177
|
+
const envPath = path.join(projectDir, ".env");
|
|
178
|
+
if (fs.existsSync(envPath)) dotenv.config({ path: envPath });
|
|
179
|
+
else warn("No .env file found");
|
|
180
|
+
};
|
|
181
|
+
/**
|
|
182
|
+
* Get MRT configuration with priority logic: .env -> package.json -> defaults
|
|
183
|
+
*/
|
|
184
|
+
const getMrtConfig = (projectDir) => {
|
|
185
|
+
loadEnvFile(projectDir);
|
|
186
|
+
const pkg = getProjectPkg(projectDir);
|
|
187
|
+
const defaultMrtProject = process.env.MRT_PROJECT ?? pkg.name;
|
|
188
|
+
if (!defaultMrtProject || defaultMrtProject.trim() === "") throw new Error("Project name couldn't be determined. Do one of these options:\n 1. Set MRT_PROJECT in your .env file, or\n 2. Ensure package.json has a valid \"name\" field.");
|
|
189
|
+
const defaultMrtTarget = process.env.MRT_TARGET ?? void 0;
|
|
190
|
+
debug("MRT configuration resolved", {
|
|
191
|
+
projectDir,
|
|
192
|
+
envMrtProject: process.env.MRT_PROJECT,
|
|
193
|
+
envMrtTarget: process.env.MRT_TARGET,
|
|
194
|
+
packageName: pkg.name,
|
|
195
|
+
resolvedProject: defaultMrtProject,
|
|
196
|
+
resolvedTarget: defaultMrtTarget
|
|
197
|
+
});
|
|
198
|
+
return {
|
|
199
|
+
defaultMrtProject,
|
|
200
|
+
defaultMrtTarget
|
|
201
|
+
};
|
|
202
|
+
};
|
|
203
|
+
/**
|
|
204
|
+
* Get project dependency tree (simplified version)
|
|
205
|
+
*/
|
|
206
|
+
const getProjectDependencyTree = (projectDir) => {
|
|
207
|
+
try {
|
|
208
|
+
const tmpFile = path.join(os.tmpdir(), `npm-ls-${Date.now()}.json`);
|
|
209
|
+
execSync(`npm ls --all --json > ${tmpFile}`, {
|
|
210
|
+
stdio: "ignore",
|
|
211
|
+
cwd: projectDir
|
|
212
|
+
});
|
|
213
|
+
const data = fs.readJSONSync(tmpFile);
|
|
214
|
+
fs.unlinkSync(tmpFile);
|
|
215
|
+
return data;
|
|
216
|
+
} catch {
|
|
217
|
+
return null;
|
|
218
|
+
}
|
|
219
|
+
};
|
|
220
|
+
/**
|
|
221
|
+
* Get PWA Kit dependencies from dependency tree
|
|
222
|
+
*/
|
|
223
|
+
const getPwaKitDependencies = (dependencyTree) => {
|
|
224
|
+
if (!dependencyTree) return {};
|
|
225
|
+
const pwaKitDependencies = ["@salesforce/storefront-next-dev"];
|
|
226
|
+
const result = {};
|
|
227
|
+
const searchDeps = (tree) => {
|
|
228
|
+
if (tree.dependencies) for (const [name, dep] of Object.entries(tree.dependencies)) {
|
|
229
|
+
if (pwaKitDependencies.includes(name)) result[name] = dep.version || "unknown";
|
|
230
|
+
if (dep.dependencies) searchDeps({ dependencies: dep.dependencies });
|
|
231
|
+
}
|
|
232
|
+
};
|
|
233
|
+
searchDeps(dependencyTree);
|
|
234
|
+
return result;
|
|
235
|
+
};
|
|
236
|
+
/**
|
|
237
|
+
* Get default commit message from git
|
|
238
|
+
*/
|
|
239
|
+
const getDefaultMessage = (projectDir) => {
|
|
240
|
+
try {
|
|
241
|
+
return `${execSync("git rev-parse --abbrev-ref HEAD", {
|
|
242
|
+
encoding: "utf8",
|
|
243
|
+
cwd: projectDir
|
|
244
|
+
}).trim()}: ${execSync("git rev-parse --short HEAD", {
|
|
245
|
+
encoding: "utf8",
|
|
246
|
+
cwd: projectDir
|
|
247
|
+
}).trim()}`;
|
|
248
|
+
} catch {
|
|
249
|
+
debug("Using default bundle message as no message was provided and not in a Git repo.");
|
|
250
|
+
return "PWA Kit Bundle";
|
|
251
|
+
}
|
|
252
|
+
};
|
|
253
|
+
/**
|
|
254
|
+
* Given a project directory and a record of config overrides, generate a new .env file with the overrides based on the .env.default file.
|
|
255
|
+
* @param projectDir
|
|
256
|
+
* @param configOverrides
|
|
257
|
+
*/
|
|
258
|
+
const generateEnvFile = (projectDir, configOverrides) => {
|
|
259
|
+
const envDefaultPath = path.join(projectDir, ".env.default");
|
|
260
|
+
const envPath = path.join(projectDir, ".env");
|
|
261
|
+
if (!fs.existsSync(envDefaultPath)) {
|
|
262
|
+
console.warn(`${envDefaultPath} not found`);
|
|
263
|
+
return;
|
|
264
|
+
}
|
|
265
|
+
const envOutputLines = fs.readFileSync(envDefaultPath, "utf8").split("\n").map((line) => {
|
|
266
|
+
if (!line || line.trim().startsWith("#")) return line;
|
|
267
|
+
const eqIndex = line.indexOf("=");
|
|
268
|
+
if (eqIndex === -1) return line;
|
|
269
|
+
const key = line.slice(0, eqIndex);
|
|
270
|
+
const originalValue = line.slice(eqIndex + 1);
|
|
271
|
+
return `${key}=${(Object.prototype.hasOwnProperty.call(configOverrides, key) ? configOverrides[key] : void 0) ?? originalValue}`;
|
|
272
|
+
});
|
|
273
|
+
fs.writeFileSync(envPath, envOutputLines.join("\n"));
|
|
274
|
+
};
|
|
275
|
+
|
|
276
|
+
//#endregion
|
|
277
|
+
//#region src/bundle.ts
|
|
278
|
+
/**
|
|
279
|
+
* Create a bundle from the build directory
|
|
280
|
+
*/
|
|
281
|
+
const createBundle = async (options) => {
|
|
282
|
+
const { message, ssr_parameters, ssr_only, ssr_shared, buildDirectory, projectDirectory, projectSlug } = options;
|
|
283
|
+
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "storefront-next-dev-push-"));
|
|
284
|
+
const destination = path.join(tmpDir, "build.tar");
|
|
285
|
+
const filesInArchive = [];
|
|
286
|
+
if (!ssr_only || ssr_only.length === 0 || !ssr_shared || ssr_shared.length === 0) throw new Error("no ssrOnly or ssrShared files are defined");
|
|
287
|
+
return new Promise((resolve$1, reject) => {
|
|
288
|
+
const output = fs.createWriteStream(destination);
|
|
289
|
+
const archive = archiver("tar");
|
|
290
|
+
archive.pipe(output);
|
|
291
|
+
const newRoot = path.join(projectSlug, "bld", "");
|
|
292
|
+
const storybookExclusionMatchers = [
|
|
293
|
+
"**/*.stories.tsx",
|
|
294
|
+
"**/*.stories.ts",
|
|
295
|
+
"**/*-snapshot.tsx",
|
|
296
|
+
".storybook/**/*",
|
|
297
|
+
"storybook-static/**/*",
|
|
298
|
+
"**/__mocks__/**/*",
|
|
299
|
+
"**/__snapshots__/**/*"
|
|
300
|
+
].map((pattern) => new Minimatch(pattern, { nocomment: true }));
|
|
301
|
+
archive.directory(buildDirectory, "", (entry) => {
|
|
302
|
+
if (entry.name && storybookExclusionMatchers.some((matcher) => matcher.match(entry.name))) return false;
|
|
303
|
+
if (entry.stats?.isFile() && entry.name) filesInArchive.push(entry.name);
|
|
304
|
+
entry.prefix = newRoot;
|
|
305
|
+
return entry;
|
|
306
|
+
});
|
|
307
|
+
archive.on("error", reject);
|
|
308
|
+
output.on("finish", () => {
|
|
309
|
+
try {
|
|
310
|
+
const { dependencies = {}, devDependencies = {} } = getProjectPkg(projectDirectory);
|
|
311
|
+
const dependencyTree = getProjectDependencyTree(projectDirectory);
|
|
312
|
+
const pwaKitDeps = dependencyTree ? getPwaKitDependencies(dependencyTree) : {};
|
|
313
|
+
const bundle_metadata = { dependencies: {
|
|
314
|
+
...dependencies,
|
|
315
|
+
...devDependencies,
|
|
316
|
+
...pwaKitDeps
|
|
317
|
+
} };
|
|
318
|
+
const data = fs.readFileSync(destination);
|
|
319
|
+
const encoding = "base64";
|
|
320
|
+
fs.rmSync(tmpDir, { recursive: true });
|
|
321
|
+
const createGlobMatcher = (patterns) => {
|
|
322
|
+
const allPatterns = patterns.map((pattern) => new Minimatch(pattern, { nocomment: true })).filter((pattern) => !pattern.empty);
|
|
323
|
+
const positivePatterns = allPatterns.filter((pattern) => !pattern.negate);
|
|
324
|
+
const negativePatterns = allPatterns.filter((pattern) => pattern.negate);
|
|
325
|
+
return (filePath) => {
|
|
326
|
+
if (filePath) {
|
|
327
|
+
const positive = positivePatterns.some((pattern) => pattern.match(filePath));
|
|
328
|
+
const negative = negativePatterns.some((pattern) => !pattern.match(filePath));
|
|
329
|
+
return positive && !negative;
|
|
330
|
+
}
|
|
331
|
+
return false;
|
|
332
|
+
};
|
|
333
|
+
};
|
|
334
|
+
resolve$1({
|
|
335
|
+
message,
|
|
336
|
+
encoding,
|
|
337
|
+
data: data.toString(encoding),
|
|
338
|
+
ssr_parameters,
|
|
339
|
+
ssr_only: filesInArchive.filter(createGlobMatcher(ssr_only)),
|
|
340
|
+
ssr_shared: filesInArchive.filter(createGlobMatcher(ssr_shared)),
|
|
341
|
+
bundle_metadata
|
|
342
|
+
});
|
|
343
|
+
} catch (err) {
|
|
344
|
+
reject(err);
|
|
345
|
+
}
|
|
346
|
+
});
|
|
347
|
+
archive.finalize().catch(reject);
|
|
348
|
+
});
|
|
349
|
+
};
|
|
350
|
+
|
|
351
|
+
//#endregion
|
|
352
|
+
//#region src/cloud-api.ts
|
|
353
|
+
var CloudAPIClient = class {
|
|
354
|
+
credentials;
|
|
355
|
+
origin;
|
|
356
|
+
constructor({ credentials, origin }) {
|
|
357
|
+
this.credentials = credentials;
|
|
358
|
+
this.origin = origin;
|
|
359
|
+
}
|
|
360
|
+
getAuthHeader() {
|
|
361
|
+
const { username, api_key } = this.credentials;
|
|
362
|
+
return { Authorization: `Basic ${Buffer.from(`${username}:${api_key}`, "binary").toString("base64")}` };
|
|
363
|
+
}
|
|
364
|
+
getHeaders() {
|
|
365
|
+
return {
|
|
366
|
+
"User-Agent": `storefront-next-dev@${version}`,
|
|
367
|
+
...this.getAuthHeader()
|
|
368
|
+
};
|
|
369
|
+
}
|
|
370
|
+
/**
|
|
371
|
+
* Push bundle to Managed Runtime
|
|
372
|
+
*/
|
|
373
|
+
async push(bundle, projectSlug, target) {
|
|
374
|
+
const base = `api/projects/${projectSlug}/builds/`;
|
|
375
|
+
const pathname = target ? `${base}${target}/` : base;
|
|
376
|
+
const url = new URL$1(this.origin);
|
|
377
|
+
url.pathname = pathname;
|
|
378
|
+
const body = Buffer.from(JSON.stringify(bundle));
|
|
379
|
+
const headers = {
|
|
380
|
+
...this.getHeaders(),
|
|
381
|
+
"Content-Length": body.length.toString()
|
|
382
|
+
};
|
|
383
|
+
const res = await fetch(url.toString(), {
|
|
384
|
+
body,
|
|
385
|
+
method: "POST",
|
|
386
|
+
headers
|
|
387
|
+
});
|
|
388
|
+
if (res.status >= 400) {
|
|
389
|
+
const bodyText = await res.text();
|
|
390
|
+
let errorData;
|
|
391
|
+
try {
|
|
392
|
+
errorData = JSON.parse(bodyText);
|
|
393
|
+
} catch {
|
|
394
|
+
errorData = { message: bodyText };
|
|
395
|
+
}
|
|
396
|
+
throw new Error(`HTTP ${res.status}: ${errorData.message || bodyText}\nFor more information visit https://developer.salesforce.com/docs/commerce/pwa-kit-managed-runtime/guide/pushing-and-deploying-bundles.html`);
|
|
397
|
+
}
|
|
398
|
+
return await res.json();
|
|
399
|
+
}
|
|
400
|
+
/**
|
|
401
|
+
* Wait for deployment to complete
|
|
402
|
+
*/
|
|
403
|
+
async waitForDeploy(project, environment) {
|
|
404
|
+
return new Promise((resolve$1, reject) => {
|
|
405
|
+
const delay = 3e4;
|
|
406
|
+
const check = async () => {
|
|
407
|
+
const url = new URL$1(`/api/projects/${project}/target/${environment}`, this.origin);
|
|
408
|
+
const res = await fetch(url, { headers: this.getHeaders() });
|
|
409
|
+
if (!res.ok) {
|
|
410
|
+
const text = await res.text();
|
|
411
|
+
let json;
|
|
412
|
+
try {
|
|
413
|
+
if (text) json = JSON.parse(text);
|
|
414
|
+
} catch {}
|
|
415
|
+
const message = json?.detail ?? text;
|
|
416
|
+
const detail = message ? `: ${message}` : "";
|
|
417
|
+
throw new Error(`${res.status} ${res.statusText}${detail}`);
|
|
418
|
+
}
|
|
419
|
+
const data = await res.json();
|
|
420
|
+
if (typeof data.state !== "string") return reject(/* @__PURE__ */ new Error("An unknown state occurred when polling the deployment."));
|
|
421
|
+
switch (data.state) {
|
|
422
|
+
case "CREATE_IN_PROGRESS":
|
|
423
|
+
case "PUBLISH_IN_PROGRESS":
|
|
424
|
+
setTimeout(() => {
|
|
425
|
+
check().catch(reject);
|
|
426
|
+
}, delay);
|
|
427
|
+
return;
|
|
428
|
+
case "CREATE_FAILED":
|
|
429
|
+
case "PUBLISH_FAILED": return reject(/* @__PURE__ */ new Error("Deployment failed."));
|
|
430
|
+
case "ACTIVE": return resolve$1();
|
|
431
|
+
default: return reject(/* @__PURE__ */ new Error(`Unknown deployment state "${data.state}".`));
|
|
432
|
+
}
|
|
433
|
+
};
|
|
434
|
+
setTimeout(() => {
|
|
435
|
+
check().catch(reject);
|
|
436
|
+
}, delay);
|
|
437
|
+
});
|
|
438
|
+
}
|
|
439
|
+
};
|
|
440
|
+
|
|
441
|
+
//#endregion
|
|
442
|
+
//#region src/mrt/utils.ts
|
|
443
|
+
const MRT_BUNDLE_TYPE_SSR = "ssr";
|
|
444
|
+
const MRT_STREAMING_ENTRY_FILE = "streamingHandler";
|
|
445
|
+
/**
|
|
446
|
+
* Gets the MRT entry file for the given mode
|
|
447
|
+
* @param mode - The mode to get the MRT entry file for
|
|
448
|
+
* @returns The MRT entry file for the given mode
|
|
449
|
+
*/
|
|
450
|
+
const getMrtEntryFile = (mode) => {
|
|
451
|
+
return process.env.MRT_BUNDLE_TYPE !== MRT_BUNDLE_TYPE_SSR && mode === "production" ? MRT_STREAMING_ENTRY_FILE : MRT_BUNDLE_TYPE_SSR;
|
|
452
|
+
};
|
|
453
|
+
|
|
454
|
+
//#endregion
|
|
455
|
+
//#region src/config.ts
|
|
456
|
+
const CARTRIDGES_BASE_DIR = "cartridges";
|
|
457
|
+
const SFNEXT_BASE_CARTRIDGE_NAME = "app_storefrontnext_base";
|
|
458
|
+
const SFNEXT_BASE_CARTRIDGE_OUTPUT_DIR = `${SFNEXT_BASE_CARTRIDGE_NAME}/cartridge/experience`;
|
|
459
|
+
/**
|
|
460
|
+
* When enabled, automatically generates and deploys cartridge metadata before an MRT push.
|
|
461
|
+
* This is useful for keeping Page Designer metadata in sync with component changes.
|
|
462
|
+
*
|
|
463
|
+
* When enabled:
|
|
464
|
+
* 1. Generates cartridge metadata from decorated components
|
|
465
|
+
* 2. Deploys the cartridge to Commerce Cloud (requires dw.json configuration)
|
|
466
|
+
* 3. Proceeds with the MRT push
|
|
467
|
+
*
|
|
468
|
+
* To enable: Set this to `true` in your local config.ts
|
|
469
|
+
* Default: false (manual cartridge generation/deployment via `sfnext generate-cartridge` and `sfnext deploy-cartridge`)
|
|
470
|
+
*/
|
|
471
|
+
const GENERATE_AND_DEPLOY_CARTRIDGE_ON_MRT_PUSH = false;
|
|
472
|
+
/**
|
|
473
|
+
* Build MRT SSR configuration for bundle deployment
|
|
474
|
+
*
|
|
475
|
+
* Defines which files should be:
|
|
476
|
+
* - Server-only (ssrOnly): Deployed only to Lambda functions
|
|
477
|
+
* - Shared (ssrShared): Deployed to both Lambda and CDN
|
|
478
|
+
*
|
|
479
|
+
* @param buildDirectory - Path to the build output directory
|
|
480
|
+
* @param projectDirectory - Path to the project root (reserved for future use)
|
|
481
|
+
* @returns MRT SSR configuration with glob patterns
|
|
482
|
+
*/
|
|
483
|
+
const buildMrtConfig = (_buildDirectory, _projectDirectory) => {
|
|
484
|
+
const ssrEntryPoint = getMrtEntryFile("production");
|
|
485
|
+
return {
|
|
486
|
+
ssrOnly: [
|
|
487
|
+
"server/**/*",
|
|
488
|
+
"loader.js",
|
|
489
|
+
`${ssrEntryPoint}.{js,mjs,cjs}`,
|
|
490
|
+
`${ssrEntryPoint}.{js,mjs,cjs}.map`,
|
|
491
|
+
"!static/**/*",
|
|
492
|
+
"sfnext-server-*.mjs",
|
|
493
|
+
"!**/*.stories.tsx",
|
|
494
|
+
"!**/*.stories.ts",
|
|
495
|
+
"!**/*-snapshot.tsx",
|
|
496
|
+
"!.storybook/**/*",
|
|
497
|
+
"!storybook-static/**/*",
|
|
498
|
+
"!**/__mocks__/**/*",
|
|
499
|
+
"!**/__snapshots__/**/*"
|
|
500
|
+
],
|
|
501
|
+
ssrShared: [
|
|
502
|
+
"client/**/*",
|
|
503
|
+
"static/**/*",
|
|
504
|
+
"**/*.css",
|
|
505
|
+
"**/*.png",
|
|
506
|
+
"**/*.jpg",
|
|
507
|
+
"**/*.jpeg",
|
|
508
|
+
"**/*.gif",
|
|
509
|
+
"**/*.svg",
|
|
510
|
+
"**/*.ico",
|
|
511
|
+
"**/*.woff",
|
|
512
|
+
"**/*.woff2",
|
|
513
|
+
"**/*.ttf",
|
|
514
|
+
"**/*.eot",
|
|
515
|
+
"!**/*.stories.tsx",
|
|
516
|
+
"!**/*.stories.ts",
|
|
517
|
+
"!**/*-snapshot.tsx",
|
|
518
|
+
"!.storybook/**/*",
|
|
519
|
+
"!storybook-static/**/*",
|
|
520
|
+
"!**/__mocks__/**/*",
|
|
521
|
+
"!**/__snapshots__/**/*"
|
|
522
|
+
],
|
|
523
|
+
ssrParameters: { ssrFunctionNodeVersion: "24.x" }
|
|
524
|
+
};
|
|
525
|
+
};
|
|
526
|
+
|
|
527
|
+
//#endregion
|
|
528
|
+
//#region src/commands/push.ts
|
|
529
|
+
/**
|
|
530
|
+
* Main function to push bundle to Managed Runtime
|
|
531
|
+
*/
|
|
532
|
+
async function push(options) {
|
|
533
|
+
const mrtConfig = getMrtConfig(options.projectDirectory);
|
|
534
|
+
const resolvedTarget = options.target ?? mrtConfig.defaultMrtTarget;
|
|
535
|
+
if (options.wait && !resolvedTarget) throw new Error("You must provide a target to deploy to when using --wait (via --target flag or .env MRT_TARGET)");
|
|
536
|
+
if (options.user && !options.key || !options.user && options.key) throw new Error("You must provide both --user and --key together, or neither");
|
|
537
|
+
if (!fs.existsSync(options.projectDirectory)) throw new Error(`Project directory "${options.projectDirectory}" does not exist!`);
|
|
538
|
+
const projectSlug = options.projectSlug ?? mrtConfig.defaultMrtProject;
|
|
539
|
+
if (!projectSlug || projectSlug.trim() === "") throw new Error("Project slug could not be determined from CLI, .env, or package.json");
|
|
540
|
+
const target = resolvedTarget;
|
|
541
|
+
const buildDirectory = options.buildDirectory ?? getDefaultBuildDir(options.projectDirectory);
|
|
542
|
+
if (!fs.existsSync(buildDirectory)) throw new Error(`Build directory "${buildDirectory}" does not exist!`);
|
|
543
|
+
try {
|
|
544
|
+
if (target) process.env.DEPLOY_TARGET = target;
|
|
545
|
+
let credentials;
|
|
546
|
+
if (options.user && options.key) credentials = {
|
|
547
|
+
username: options.user,
|
|
548
|
+
api_key: options.key
|
|
549
|
+
};
|
|
550
|
+
else credentials = await readCredentials(getCredentialsFile(options.cloudOrigin ?? DEFAULT_CLOUD_ORIGIN, options.credentialsFile));
|
|
551
|
+
const config = buildMrtConfig(buildDirectory, options.projectDirectory);
|
|
552
|
+
const message = options.message ?? getDefaultMessage(options.projectDirectory);
|
|
553
|
+
info(`Creating bundle for project: ${projectSlug}`);
|
|
554
|
+
if (options.projectSlug) debug("Using project slug from CLI argument");
|
|
555
|
+
else if (process.env.MRT_PROJECT) debug("Using project slug from .env MRT_PROJECT");
|
|
556
|
+
else debug("Using project slug from package.json name");
|
|
557
|
+
if (target) {
|
|
558
|
+
info(`Target environment: ${target}`);
|
|
559
|
+
if (options.target) debug("Using target from CLI argument");
|
|
560
|
+
else debug("Using target from .env");
|
|
561
|
+
}
|
|
562
|
+
debug("SSR shared files", config.ssrShared);
|
|
563
|
+
debug("SSR only files", config.ssrOnly);
|
|
564
|
+
const bundle = await createBundle({
|
|
565
|
+
message,
|
|
566
|
+
ssr_parameters: config.ssrParameters,
|
|
567
|
+
ssr_only: config.ssrOnly,
|
|
568
|
+
ssr_shared: config.ssrShared,
|
|
569
|
+
buildDirectory,
|
|
570
|
+
projectDirectory: options.projectDirectory,
|
|
571
|
+
projectSlug
|
|
572
|
+
});
|
|
573
|
+
const client = new CloudAPIClient({
|
|
574
|
+
credentials,
|
|
575
|
+
origin: options.cloudOrigin ?? DEFAULT_CLOUD_ORIGIN
|
|
576
|
+
});
|
|
577
|
+
info(`Beginning upload to ${options.cloudOrigin ?? DEFAULT_CLOUD_ORIGIN}`);
|
|
578
|
+
const data = await client.push(bundle, projectSlug, target);
|
|
579
|
+
debug("API response", data);
|
|
580
|
+
(data.warnings || []).forEach(warn);
|
|
581
|
+
if (options.wait && target) {
|
|
582
|
+
success("Bundle uploaded - waiting for deployment to complete");
|
|
583
|
+
await client.waitForDeploy(projectSlug, target);
|
|
584
|
+
success("Deployment complete!");
|
|
585
|
+
} else success("Bundle uploaded successfully!");
|
|
586
|
+
if (data.url) info(`Bundle URL: ${data.url}`);
|
|
587
|
+
} catch (err) {
|
|
588
|
+
error(err.message || err?.toString() || "Unknown error");
|
|
589
|
+
throw err;
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
//#endregion
|
|
594
|
+
//#region src/commands/create-bundle.ts
|
|
595
|
+
const gzip = promisify(zlib.gzip);
|
|
596
|
+
/**
|
|
597
|
+
* Create a bundle and save it to disk without pushing to Managed Runtime
|
|
598
|
+
*/
|
|
599
|
+
async function createBundleCommand(options) {
|
|
600
|
+
if (!fs.existsSync(options.projectDirectory)) throw new Error(`Project directory "${options.projectDirectory}" does not exist!`);
|
|
601
|
+
const mrtConfig = getMrtConfig(options.projectDirectory);
|
|
602
|
+
const projectSlug = options.projectSlug ?? mrtConfig.defaultMrtProject;
|
|
603
|
+
if (!projectSlug || projectSlug.trim() === "") throw new Error("Project slug could not be determined from CLI, .env, or package.json");
|
|
604
|
+
const buildDirectory = options.buildDirectory ?? getDefaultBuildDir(options.projectDirectory);
|
|
605
|
+
if (!fs.existsSync(buildDirectory)) throw new Error(`Build directory "${buildDirectory}" does not exist!`);
|
|
606
|
+
const outputDirectory = options.outputDirectory ?? path.join(options.projectDirectory, ".bundle");
|
|
607
|
+
await fs.ensureDir(outputDirectory);
|
|
608
|
+
const message = options.message ?? getDefaultMessage(options.projectDirectory);
|
|
609
|
+
const config = buildMrtConfig(buildDirectory, options.projectDirectory);
|
|
610
|
+
info(`Creating bundle for project: ${projectSlug}`);
|
|
611
|
+
info(`Build directory: ${buildDirectory}`);
|
|
612
|
+
info(`Output directory: ${outputDirectory}`);
|
|
613
|
+
const bundle = await createBundle({
|
|
614
|
+
message,
|
|
615
|
+
ssr_parameters: config.ssrParameters,
|
|
616
|
+
ssr_only: config.ssrOnly,
|
|
617
|
+
ssr_shared: config.ssrShared,
|
|
618
|
+
buildDirectory,
|
|
619
|
+
projectDirectory: options.projectDirectory,
|
|
620
|
+
projectSlug
|
|
621
|
+
});
|
|
622
|
+
const bundleTgzPath = path.join(outputDirectory, "bundle.tgz");
|
|
623
|
+
const bundleJsonPath = path.join(outputDirectory, "bundle.json");
|
|
624
|
+
const bundleData = Buffer.from(bundle.data, "base64");
|
|
625
|
+
const compressedData = await gzip(bundleData);
|
|
626
|
+
await fs.writeFile(bundleTgzPath, compressedData);
|
|
627
|
+
const bundleMetadata = {
|
|
628
|
+
message: bundle.message,
|
|
629
|
+
encoding: bundle.encoding,
|
|
630
|
+
ssr_parameters: bundle.ssr_parameters,
|
|
631
|
+
ssr_only: bundle.ssr_only,
|
|
632
|
+
ssr_shared: bundle.ssr_shared,
|
|
633
|
+
bundle_metadata: bundle.bundle_metadata,
|
|
634
|
+
data_size: bundleData.length
|
|
635
|
+
};
|
|
636
|
+
await fs.writeJson(bundleJsonPath, bundleMetadata, { spaces: 2 });
|
|
637
|
+
success(`Bundle created successfully!`);
|
|
638
|
+
info(`Bundle tgz file: ${bundleTgzPath}`);
|
|
639
|
+
info(`Bundle metadata: ${bundleJsonPath}`);
|
|
640
|
+
info(`Uncompressed size: ${(bundleData.length / 1024 / 1024).toFixed(2)} MB`);
|
|
641
|
+
info(`Compressed size: ${(compressedData.length / 1024 / 1024).toFixed(2)} MB`);
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
//#endregion
|
|
645
|
+
//#region src/server/ts-import.ts
|
|
646
|
+
/**
|
|
647
|
+
* Parse TypeScript paths from tsconfig.json and convert to jiti alias format.
|
|
648
|
+
*
|
|
649
|
+
* @param tsconfigPath - Path to tsconfig.json
|
|
650
|
+
* @param projectDirectory - Project root directory for resolving relative paths
|
|
651
|
+
* @returns Record of alias mappings for jiti
|
|
652
|
+
*
|
|
653
|
+
* @example
|
|
654
|
+
* // tsconfig.json: { "compilerOptions": { "paths": { "@/*": ["./src/*"] } } }
|
|
655
|
+
* // Returns: { "@/": "/absolute/path/to/src/" }
|
|
656
|
+
*/
|
|
657
|
+
function parseTsconfigPaths(tsconfigPath, projectDirectory) {
|
|
658
|
+
const alias = {};
|
|
659
|
+
if (!existsSync(tsconfigPath)) return alias;
|
|
660
|
+
try {
|
|
661
|
+
const tsconfigContent = readFileSync(tsconfigPath, "utf-8");
|
|
662
|
+
const tsconfig = JSON.parse(tsconfigContent);
|
|
663
|
+
const paths = tsconfig.compilerOptions?.paths;
|
|
664
|
+
const baseUrl = tsconfig.compilerOptions?.baseUrl || ".";
|
|
665
|
+
if (paths) {
|
|
666
|
+
for (const [key, values] of Object.entries(paths)) if (values && values.length > 0) {
|
|
667
|
+
const aliasKey = key.replace(/\/\*$/, "/");
|
|
668
|
+
alias[aliasKey] = resolve(projectDirectory, baseUrl, values[0].replace(/\/\*$/, "/").replace(/^\.\//, ""));
|
|
669
|
+
}
|
|
670
|
+
}
|
|
671
|
+
} catch {}
|
|
672
|
+
return alias;
|
|
673
|
+
}
|
|
674
|
+
/**
|
|
675
|
+
* Import a TypeScript file using jiti with proper path alias resolution.
|
|
676
|
+
* This is a cross-platform alternative to tsx that works on Windows.
|
|
677
|
+
*
|
|
678
|
+
* @param filePath - Absolute path to the TypeScript file to import
|
|
679
|
+
* @param options - Import options including project directory
|
|
680
|
+
* @returns The imported module
|
|
681
|
+
*/
|
|
682
|
+
async function importTypescript(filePath, options) {
|
|
683
|
+
const { projectDirectory, tsconfigPath = resolve(projectDirectory, "tsconfig.json") } = options;
|
|
684
|
+
const { createJiti } = await import("jiti");
|
|
685
|
+
const alias = parseTsconfigPaths(tsconfigPath, projectDirectory);
|
|
686
|
+
return createJiti(import.meta.url, {
|
|
687
|
+
fsCache: false,
|
|
688
|
+
interopDefault: true,
|
|
689
|
+
alias
|
|
690
|
+
}).import(filePath);
|
|
691
|
+
}
|
|
692
|
+
|
|
693
|
+
//#endregion
|
|
694
|
+
//#region src/server/config.ts
|
|
695
|
+
/**
|
|
696
|
+
* This is a temporary function before we move the config implementation from
|
|
697
|
+
* template-retail-rsc-app to the SDK.
|
|
698
|
+
*
|
|
699
|
+
* @ TODO: Remove this function after we move the config implementation from
|
|
700
|
+
* template-retail-rsc-app to the SDK.
|
|
701
|
+
*
|
|
702
|
+
*/
|
|
703
|
+
function loadConfigFromEnv() {
|
|
704
|
+
const shortCode = process.env.PUBLIC__app__commerce__api__shortCode;
|
|
705
|
+
const organizationId = process.env.PUBLIC__app__commerce__api__organizationId;
|
|
706
|
+
const clientId = process.env.PUBLIC__app__commerce__api__clientId;
|
|
707
|
+
const siteId = process.env.PUBLIC__app__commerce__api__siteId;
|
|
708
|
+
const proxy = process.env.PUBLIC__app__commerce__api__proxy || "/mobify/proxy/api";
|
|
709
|
+
if (!shortCode) throw new Error("Missing PUBLIC__app__commerce__api__shortCode environment variable.\nPlease set it in your .env file or environment.");
|
|
710
|
+
if (!organizationId) throw new Error("Missing PUBLIC__app__commerce__api__organizationId environment variable.\nPlease set it in your .env file or environment.");
|
|
711
|
+
if (!clientId) throw new Error("Missing PUBLIC__app__commerce__api__clientId environment variable.\nPlease set it in your .env file or environment.");
|
|
712
|
+
if (!siteId) throw new Error("Missing PUBLIC__app__commerce__api__siteId environment variable.\nPlease set it in your .env file or environment.");
|
|
713
|
+
return { commerce: { api: {
|
|
714
|
+
shortCode,
|
|
715
|
+
organizationId,
|
|
716
|
+
clientId,
|
|
717
|
+
siteId,
|
|
718
|
+
proxy
|
|
719
|
+
} } };
|
|
720
|
+
}
|
|
721
|
+
/**
|
|
722
|
+
* Load storefront-next project configuration from config.server.ts.
|
|
723
|
+
* Requires projectDirectory to be provided.
|
|
724
|
+
*
|
|
725
|
+
* @param projectDirectory - Project directory to load config.server.ts from
|
|
726
|
+
* @throws Error if config.server.ts is not found or invalid
|
|
727
|
+
*/
|
|
728
|
+
async function loadProjectConfig(projectDirectory) {
|
|
729
|
+
const configPath = resolve(projectDirectory, "config.server.ts");
|
|
730
|
+
const tsconfigPath = resolve(projectDirectory, "tsconfig.json");
|
|
731
|
+
if (!existsSync(configPath)) throw new Error(`config.server.ts not found at ${configPath}.\nPlease ensure config.server.ts exists in your project root.`);
|
|
732
|
+
const config = (await importTypescript(configPath, {
|
|
733
|
+
projectDirectory,
|
|
734
|
+
tsconfigPath
|
|
735
|
+
})).default;
|
|
736
|
+
if (!config?.app?.commerce?.api) throw new Error("Invalid config.server.ts: missing app.commerce.api configuration.\nPlease ensure your config.server.ts has the commerce API configuration.");
|
|
737
|
+
const api = config.app.commerce.api;
|
|
738
|
+
if (!api.shortCode) throw new Error("Missing shortCode in config.server.ts commerce.api configuration");
|
|
739
|
+
if (!api.organizationId) throw new Error("Missing organizationId in config.server.ts commerce.api configuration");
|
|
740
|
+
if (!api.clientId) throw new Error("Missing clientId in config.server.ts commerce.api configuration");
|
|
741
|
+
if (!api.siteId) throw new Error("Missing siteId in config.server.ts commerce.api configuration");
|
|
742
|
+
return { commerce: { api: {
|
|
743
|
+
shortCode: api.shortCode,
|
|
744
|
+
organizationId: api.organizationId,
|
|
745
|
+
clientId: api.clientId,
|
|
746
|
+
siteId: api.siteId,
|
|
747
|
+
proxy: api.proxy || "/mobify/proxy/api"
|
|
748
|
+
} } };
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
//#endregion
|
|
752
|
+
//#region src/utils/paths.ts
|
|
753
|
+
/**
|
|
754
|
+
* Copyright 2026 Salesforce, Inc.
|
|
755
|
+
*
|
|
756
|
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
757
|
+
* you may not use this file except in compliance with the License.
|
|
758
|
+
* You may obtain a copy of the License at
|
|
759
|
+
*
|
|
760
|
+
* http://www.apache.org/licenses/LICENSE-2.0
|
|
761
|
+
*
|
|
762
|
+
* Unless required by applicable law or agreed to in writing, software
|
|
763
|
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
764
|
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
765
|
+
* See the License for the specific language governing permissions and
|
|
766
|
+
* limitations under the License.
|
|
767
|
+
*/
|
|
768
|
+
/**
|
|
769
|
+
* Get the Commerce Cloud API URL from a short code
|
|
770
|
+
*/
|
|
771
|
+
function getCommerceCloudApiUrl(shortCode) {
|
|
772
|
+
return `https://${shortCode}.api.commercecloud.salesforce.com`;
|
|
773
|
+
}
|
|
774
|
+
/**
|
|
775
|
+
* Get the bundle path for static assets
|
|
776
|
+
*/
|
|
777
|
+
function getBundlePath(bundleId) {
|
|
778
|
+
return `/mobify/bundle/${bundleId}/client/`;
|
|
779
|
+
}
|
|
780
|
+
|
|
781
|
+
//#endregion
|
|
782
|
+
//#region src/server/middleware/proxy.ts
|
|
783
|
+
/**
|
|
784
|
+
* Create proxy middleware for Commerce Cloud API
|
|
785
|
+
* Proxies requests from /mobify/proxy/api to the Commerce Cloud API
|
|
786
|
+
*/
|
|
787
|
+
function createCommerceProxyMiddleware(config) {
|
|
788
|
+
return createProxyMiddleware({
|
|
789
|
+
target: getCommerceCloudApiUrl(config.commerce.api.shortCode),
|
|
790
|
+
changeOrigin: true
|
|
791
|
+
});
|
|
792
|
+
}
|
|
793
|
+
|
|
794
|
+
//#endregion
|
|
795
|
+
//#region src/server/middleware/static.ts
|
|
796
|
+
/**
|
|
797
|
+
* Create static file serving middleware for client assets
|
|
798
|
+
* Serves files from build/client at /mobify/bundle/{BUNDLE_ID}/client/
|
|
799
|
+
*/
|
|
800
|
+
function createStaticMiddleware(bundleId, projectDirectory) {
|
|
801
|
+
const bundlePath = getBundlePath(bundleId);
|
|
802
|
+
const clientBuildDir = path.join(projectDirectory, "build", "client");
|
|
803
|
+
info(`Serving static assets from ${clientBuildDir} at ${bundlePath}`);
|
|
804
|
+
return express.static(clientBuildDir, { setHeaders: (res) => {
|
|
805
|
+
res.setHeader("Cache-Control", "public, max-age=31536000, immutable");
|
|
806
|
+
res.setHeader("x-local-static-cache-control", "1");
|
|
807
|
+
} });
|
|
808
|
+
}
|
|
809
|
+
|
|
810
|
+
//#endregion
|
|
811
|
+
//#region src/server/middleware/compression.ts
|
|
812
|
+
/**
|
|
813
|
+
* Parse and validate COMPRESSION_LEVEL environment variable
|
|
814
|
+
* @returns Valid compression level (0-9) or default compression level
|
|
815
|
+
*/
|
|
816
|
+
function getCompressionLevel() {
|
|
817
|
+
const raw = process.env.COMPRESSION_LEVEL;
|
|
818
|
+
const DEFAULT = zlib$1.constants.Z_DEFAULT_COMPRESSION;
|
|
819
|
+
if (raw == null || raw.trim() === "") return DEFAULT;
|
|
820
|
+
const level = Number(raw);
|
|
821
|
+
if (!(Number.isInteger(level) && level >= 0 && level <= 9)) {
|
|
822
|
+
warn(`[compression] Invalid COMPRESSION_LEVEL="${raw}". Using default (${DEFAULT}).`);
|
|
823
|
+
return DEFAULT;
|
|
824
|
+
}
|
|
825
|
+
return level;
|
|
826
|
+
}
|
|
827
|
+
/**
|
|
828
|
+
* Create compression middleware for gzip/brotli compression
|
|
829
|
+
* Used in preview mode to optimize response sizes
|
|
830
|
+
*/
|
|
831
|
+
function createCompressionMiddleware() {
|
|
832
|
+
return compression({
|
|
833
|
+
filter: (req, res) => {
|
|
834
|
+
if (req.headers["x-no-compression"]) return false;
|
|
835
|
+
return compression.filter(req, res);
|
|
836
|
+
},
|
|
837
|
+
level: getCompressionLevel()
|
|
838
|
+
});
|
|
839
|
+
}
|
|
840
|
+
|
|
841
|
+
//#endregion
|
|
842
|
+
//#region src/server/middleware/logging.ts
|
|
843
|
+
/**
|
|
844
|
+
* Patterns for URLs to skip logging (static assets and Vite internals)
|
|
845
|
+
*/
|
|
846
|
+
const SKIP_PATTERNS = [
|
|
847
|
+
"/@vite/**",
|
|
848
|
+
"/@id/**",
|
|
849
|
+
"/@fs/**",
|
|
850
|
+
"/@react-router/**",
|
|
851
|
+
"/src/**",
|
|
852
|
+
"/node_modules/**",
|
|
853
|
+
"**/*.js",
|
|
854
|
+
"**/*.css",
|
|
855
|
+
"**/*.ts",
|
|
856
|
+
"**/*.tsx",
|
|
857
|
+
"**/*.js.map",
|
|
858
|
+
"**/*.css.map"
|
|
859
|
+
];
|
|
860
|
+
/**
|
|
861
|
+
* Create request logging middleware
|
|
862
|
+
* Used in dev and preview modes for request visibility
|
|
863
|
+
*/
|
|
864
|
+
function createLoggingMiddleware() {
|
|
865
|
+
morgan.token("status-colored", (req, res) => {
|
|
866
|
+
const status = res.statusCode;
|
|
867
|
+
let color = chalk.green;
|
|
868
|
+
if (status >= 500) color = chalk.red;
|
|
869
|
+
else if (status >= 400) color = chalk.yellow;
|
|
870
|
+
else if (status >= 300) color = chalk.cyan;
|
|
871
|
+
return color(String(status));
|
|
872
|
+
});
|
|
873
|
+
morgan.token("method-colored", (req) => {
|
|
874
|
+
const method = req.method;
|
|
875
|
+
const colors$1 = {
|
|
876
|
+
GET: chalk.green,
|
|
877
|
+
POST: chalk.blue,
|
|
878
|
+
PUT: chalk.yellow,
|
|
879
|
+
DELETE: chalk.red,
|
|
880
|
+
PATCH: chalk.magenta
|
|
881
|
+
};
|
|
882
|
+
return (method && colors$1[method] || chalk.white)(method);
|
|
883
|
+
});
|
|
884
|
+
return morgan((tokens, req, res) => {
|
|
885
|
+
return [
|
|
886
|
+
chalk.gray("["),
|
|
887
|
+
tokens["method-colored"](req, res),
|
|
888
|
+
chalk.gray("]"),
|
|
889
|
+
tokens.url(req, res),
|
|
890
|
+
"-",
|
|
891
|
+
tokens["status-colored"](req, res),
|
|
892
|
+
chalk.gray(`(${tokens["response-time"](req, res)}ms)`)
|
|
893
|
+
].join(" ");
|
|
894
|
+
}, { skip: (req) => {
|
|
895
|
+
return SKIP_PATTERNS.some((pattern) => minimatch(req.url, pattern, { dot: true }));
|
|
896
|
+
} });
|
|
897
|
+
}
|
|
898
|
+
|
|
899
|
+
//#endregion
|
|
900
|
+
//#region src/server/middleware/host-header.ts
|
|
901
|
+
/**
|
|
902
|
+
* Normalizes the X-Forwarded-Host header to support React Router's CSRF validation features.
|
|
903
|
+
*
|
|
904
|
+
* NOTE: This middleware performs header manipulation as a temporary, internal
|
|
905
|
+
* solution for MRT/Lambda environments. It may be updated or removed if React Router
|
|
906
|
+
* introduces a first-class configuration for validating against forwarded headers.
|
|
907
|
+
*
|
|
908
|
+
* React Router v7.12+ uses the X-Forwarded-Host header (preferring it over Host)
|
|
909
|
+
* to validate request origins for security. In Managed Runtime (MRT) with a vanity
|
|
910
|
+
* domain, the eCDN automatically sets the X-Forwarded-Host to the vanity domain.
|
|
911
|
+
* React Router handles cases where this header contains multiple comma-separated
|
|
912
|
+
* values by prioritizing the first entry.
|
|
913
|
+
*
|
|
914
|
+
* This middleware ensures that X-Forwarded-Host is always present by falling back
|
|
915
|
+
* to a configured public domain if the header is missing (e.g., local development).
|
|
916
|
+
* By only modifying X-Forwarded-Host, we provide a consistent environment for
|
|
917
|
+
* React Router's security checks without modifying the internal 'Host' header,
|
|
918
|
+
* which is required for environment-specific routing logic (e.g., Hybrid Proxy).
|
|
919
|
+
*
|
|
920
|
+
* Priority order:
|
|
921
|
+
* 1. X-Forwarded-Host: Automatically set by eCDN for vanity domains.
|
|
922
|
+
* 2. EXTERNAL_DOMAIN_NAME: Fallback environment variable for the public domain
|
|
923
|
+
* used when no forwarded headers are present (e.g., local development).
|
|
924
|
+
*/
|
|
925
|
+
function createHostHeaderMiddleware() {
|
|
926
|
+
return (req, _res, next) => {
|
|
927
|
+
if (!req.get("x-forwarded-host") && process.env.EXTERNAL_DOMAIN_NAME) req.headers["x-forwarded-host"] = process.env.EXTERNAL_DOMAIN_NAME;
|
|
928
|
+
next();
|
|
929
|
+
};
|
|
930
|
+
}
|
|
931
|
+
|
|
932
|
+
//#endregion
|
|
933
|
+
//#region src/server/utils.ts
|
|
934
|
+
/**
|
|
935
|
+
* Patch React Router build to rewrite asset URLs with the correct bundle path
|
|
936
|
+
* This is needed because the build output uses /assets/ but we preview at /mobify/bundle/{BUNDLE_ID}/client/assets/
|
|
937
|
+
*/
|
|
938
|
+
function patchReactRouterBuild(build, bundleId) {
|
|
939
|
+
const bundlePath = getBundlePath(bundleId);
|
|
940
|
+
const patchedAssetsJson = JSON.stringify(build.assets).replace(/"\/assets\//g, `"${bundlePath}assets/`);
|
|
941
|
+
const newAssets = JSON.parse(patchedAssetsJson);
|
|
942
|
+
return Object.assign({}, build, {
|
|
943
|
+
publicPath: bundlePath,
|
|
944
|
+
assets: newAssets
|
|
945
|
+
});
|
|
946
|
+
}
|
|
947
|
+
|
|
948
|
+
//#endregion
|
|
949
|
+
//#region src/server/modes.ts
|
|
950
|
+
/**
|
|
951
|
+
* Default feature configuration for each server mode
|
|
952
|
+
*/
|
|
953
|
+
const ServerModeFeatureMap = {
|
|
954
|
+
development: {
|
|
955
|
+
enableProxy: true,
|
|
956
|
+
enableStaticServing: false,
|
|
957
|
+
enableCompression: false,
|
|
958
|
+
enableLogging: true,
|
|
959
|
+
enableAssetUrlPatching: false
|
|
960
|
+
},
|
|
961
|
+
preview: {
|
|
962
|
+
enableProxy: true,
|
|
963
|
+
enableStaticServing: true,
|
|
964
|
+
enableCompression: true,
|
|
965
|
+
enableLogging: true,
|
|
966
|
+
enableAssetUrlPatching: true
|
|
967
|
+
},
|
|
968
|
+
production: {
|
|
969
|
+
enableProxy: false,
|
|
970
|
+
enableStaticServing: false,
|
|
971
|
+
enableCompression: true,
|
|
972
|
+
enableLogging: true,
|
|
973
|
+
enableAssetUrlPatching: true
|
|
974
|
+
}
|
|
975
|
+
};
|
|
976
|
+
|
|
977
|
+
//#endregion
|
|
978
|
+
//#region src/server/index.ts
|
|
979
|
+
/**
|
|
980
|
+
* Create a unified Express server for development, preview, or production mode
|
|
981
|
+
*/
|
|
982
|
+
async function createServer$1(options) {
|
|
983
|
+
const { mode, projectDirectory = process.cwd(), config: providedConfig, vite, build, streaming = false, enableProxy = ServerModeFeatureMap[mode].enableProxy, enableStaticServing = ServerModeFeatureMap[mode].enableStaticServing, enableCompression = ServerModeFeatureMap[mode].enableCompression, enableLogging = ServerModeFeatureMap[mode].enableLogging, enableAssetUrlPatching = ServerModeFeatureMap[mode].enableAssetUrlPatching } = options;
|
|
984
|
+
if (mode === "development" && !vite) throw new Error("Vite dev server instance is required for development mode");
|
|
985
|
+
if ((mode === "preview" || mode === "production") && !build) throw new Error("React Router server build is required for preview/production mode");
|
|
986
|
+
const config = providedConfig ?? loadConfigFromEnv();
|
|
987
|
+
const bundleId = process.env.BUNDLE_ID ?? "local";
|
|
988
|
+
const app = express();
|
|
989
|
+
app.disable("x-powered-by");
|
|
990
|
+
if (enableLogging) app.use(createLoggingMiddleware());
|
|
991
|
+
if (enableCompression && !streaming) app.use(createCompressionMiddleware());
|
|
992
|
+
if (enableStaticServing && build) {
|
|
993
|
+
const bundlePath = getBundlePath(bundleId);
|
|
994
|
+
app.use(bundlePath, createStaticMiddleware(bundleId, projectDirectory));
|
|
995
|
+
}
|
|
996
|
+
const middlewareRegistryPath = resolve(projectDirectory, "src/server/middleware-registry.ts");
|
|
997
|
+
if (existsSync(middlewareRegistryPath)) {
|
|
998
|
+
const registry = await importTypescript(middlewareRegistryPath, { projectDirectory });
|
|
999
|
+
if (registry.customMiddlewares && Array.isArray(registry.customMiddlewares)) registry.customMiddlewares.forEach((middleware) => {
|
|
1000
|
+
app.use(middleware);
|
|
1001
|
+
});
|
|
1002
|
+
}
|
|
1003
|
+
if (mode === "development" && vite) app.use(vite.middlewares);
|
|
1004
|
+
if (enableProxy) app.use(config.commerce.api.proxy, createCommerceProxyMiddleware(config));
|
|
1005
|
+
app.use(createHostHeaderMiddleware());
|
|
1006
|
+
app.all("*", await createSSRHandler(mode, bundleId, vite, build, enableAssetUrlPatching));
|
|
1007
|
+
return app;
|
|
1008
|
+
}
|
|
1009
|
+
/**
|
|
1010
|
+
* Create the SSR request handler based on mode
|
|
1011
|
+
*/
|
|
1012
|
+
async function createSSRHandler(mode, bundleId, vite, build, enableAssetUrlPatching) {
|
|
1013
|
+
if (mode === "development" && vite) {
|
|
1014
|
+
const { isRunnableDevEnvironment } = await import("vite");
|
|
1015
|
+
return async (req, res, next) => {
|
|
1016
|
+
try {
|
|
1017
|
+
const ssrEnvironment = vite.environments.ssr;
|
|
1018
|
+
if (!isRunnableDevEnvironment(ssrEnvironment)) {
|
|
1019
|
+
next(/* @__PURE__ */ new Error("SSR environment is not runnable. Please ensure:\n 1. \"@salesforce/storefront-next-dev\" plugin is added to vite.config.ts\n 2. React Router config uses the Storefront Next preset"));
|
|
1020
|
+
return;
|
|
1021
|
+
}
|
|
1022
|
+
await createRequestHandler({
|
|
1023
|
+
build: await ssrEnvironment.runner.import("virtual:react-router/server-build"),
|
|
1024
|
+
mode: process.env.NODE_ENV
|
|
1025
|
+
})(req, res, next);
|
|
1026
|
+
} catch (error$1) {
|
|
1027
|
+
vite.ssrFixStacktrace(error$1);
|
|
1028
|
+
next(error$1);
|
|
1029
|
+
}
|
|
1030
|
+
};
|
|
1031
|
+
} else if (build) {
|
|
1032
|
+
let patchedBuild = build;
|
|
1033
|
+
if (enableAssetUrlPatching) patchedBuild = patchReactRouterBuild(build, bundleId);
|
|
1034
|
+
return createRequestHandler({
|
|
1035
|
+
build: patchedBuild,
|
|
1036
|
+
mode: process.env.NODE_ENV
|
|
1037
|
+
});
|
|
1038
|
+
} else throw new Error("Invalid server configuration: no vite or build provided");
|
|
1039
|
+
}
|
|
1040
|
+
|
|
1041
|
+
//#endregion
|
|
1042
|
+
//#region src/commands/dev.ts
|
|
1043
|
+
/**
|
|
1044
|
+
* Start the development server with Vite in middleware mode
|
|
1045
|
+
*/
|
|
1046
|
+
async function dev(options = {}) {
|
|
1047
|
+
const startTime = Date.now();
|
|
1048
|
+
const projectDir = path.resolve(options.projectDirectory || process.cwd());
|
|
1049
|
+
const port = options.port || 5173;
|
|
1050
|
+
process.env.NODE_ENV = process.env.NODE_ENV ?? "development";
|
|
1051
|
+
process.env.EXTERNAL_DOMAIN_NAME = process.env.EXTERNAL_DOMAIN_NAME ?? `localhost:${port}`;
|
|
1052
|
+
loadEnvFile(projectDir);
|
|
1053
|
+
const config = await loadProjectConfig(projectDir);
|
|
1054
|
+
const vite = await createServer({
|
|
1055
|
+
root: projectDir,
|
|
1056
|
+
server: { middlewareMode: true }
|
|
1057
|
+
});
|
|
1058
|
+
const server = (await createServer$1({
|
|
1059
|
+
mode: "development",
|
|
1060
|
+
projectDirectory: projectDir,
|
|
1061
|
+
config,
|
|
1062
|
+
port,
|
|
1063
|
+
vite
|
|
1064
|
+
})).listen(port, () => {
|
|
1065
|
+
printServerInfo("development", port, startTime, projectDir);
|
|
1066
|
+
printServerConfig({
|
|
1067
|
+
mode: "development",
|
|
1068
|
+
port,
|
|
1069
|
+
enableProxy: true,
|
|
1070
|
+
enableStaticServing: false,
|
|
1071
|
+
enableCompression: false,
|
|
1072
|
+
proxyPath: config.commerce.api.proxy,
|
|
1073
|
+
proxyTarget: getCommerceCloudApiUrl(config.commerce.api.shortCode),
|
|
1074
|
+
shortCode: config.commerce.api.shortCode,
|
|
1075
|
+
organizationId: config.commerce.api.organizationId,
|
|
1076
|
+
clientId: config.commerce.api.clientId,
|
|
1077
|
+
siteId: config.commerce.api.siteId
|
|
1078
|
+
});
|
|
1079
|
+
});
|
|
1080
|
+
["SIGTERM", "SIGINT"].forEach((signal) => {
|
|
1081
|
+
process.once(signal, () => {
|
|
1082
|
+
printShutdownMessage();
|
|
1083
|
+
server?.close(() => {
|
|
1084
|
+
vite.close();
|
|
1085
|
+
process.exit(0);
|
|
1086
|
+
});
|
|
1087
|
+
});
|
|
1088
|
+
});
|
|
1089
|
+
}
|
|
1090
|
+
|
|
1091
|
+
//#endregion
|
|
1092
|
+
//#region src/commands/preview.ts
|
|
1093
|
+
/**
|
|
1094
|
+
* Start the preview server with production build
|
|
1095
|
+
*/
|
|
1096
|
+
async function preview(options = {}) {
|
|
1097
|
+
const startTime = Date.now();
|
|
1098
|
+
const projectDir = path.resolve(options.projectDirectory || process.cwd());
|
|
1099
|
+
const port = options.port || 3e3;
|
|
1100
|
+
process.env.NODE_ENV = process.env.NODE_ENV ?? "production";
|
|
1101
|
+
process.env.EXTERNAL_DOMAIN_NAME = process.env.EXTERNAL_DOMAIN_NAME ?? `localhost:${port}`;
|
|
1102
|
+
loadEnvFile(projectDir);
|
|
1103
|
+
const buildPath = path.join(projectDir, "build", "server", "index.js");
|
|
1104
|
+
if (!fs$1.existsSync(buildPath)) {
|
|
1105
|
+
warn("Production build not found. Building project...");
|
|
1106
|
+
info("Running: pnpm build");
|
|
1107
|
+
try {
|
|
1108
|
+
execSync("pnpm build", {
|
|
1109
|
+
cwd: projectDir,
|
|
1110
|
+
stdio: "inherit"
|
|
1111
|
+
});
|
|
1112
|
+
info("Build completed successfully");
|
|
1113
|
+
} catch (err) {
|
|
1114
|
+
error(`Build failed: ${err instanceof Error ? err.message : String(err)}`);
|
|
1115
|
+
process.exit(1);
|
|
1116
|
+
}
|
|
1117
|
+
if (!fs$1.existsSync(buildPath)) {
|
|
1118
|
+
error(`Build still not found at ${buildPath} after running build command`);
|
|
1119
|
+
process.exit(1);
|
|
1120
|
+
}
|
|
1121
|
+
}
|
|
1122
|
+
info(`Loading production build from ${buildPath}`);
|
|
1123
|
+
const build = (await import(pathToFileURL(buildPath).href)).default;
|
|
1124
|
+
const config = await loadProjectConfig(projectDir);
|
|
1125
|
+
const server = (await createServer$1({
|
|
1126
|
+
mode: "preview",
|
|
1127
|
+
projectDirectory: projectDir,
|
|
1128
|
+
config,
|
|
1129
|
+
port,
|
|
1130
|
+
build
|
|
1131
|
+
})).listen(port, () => {
|
|
1132
|
+
printServerInfo("preview", port, startTime, projectDir);
|
|
1133
|
+
printServerConfig({
|
|
1134
|
+
mode: "preview",
|
|
1135
|
+
port,
|
|
1136
|
+
enableProxy: true,
|
|
1137
|
+
enableStaticServing: true,
|
|
1138
|
+
enableCompression: true,
|
|
1139
|
+
proxyPath: config.commerce.api.proxy,
|
|
1140
|
+
proxyTarget: getCommerceCloudApiUrl(config.commerce.api.shortCode),
|
|
1141
|
+
shortCode: config.commerce.api.shortCode,
|
|
1142
|
+
organizationId: config.commerce.api.organizationId,
|
|
1143
|
+
clientId: config.commerce.api.clientId,
|
|
1144
|
+
siteId: config.commerce.api.siteId
|
|
1145
|
+
});
|
|
1146
|
+
});
|
|
1147
|
+
["SIGTERM", "SIGINT"].forEach((signal) => {
|
|
1148
|
+
process.once(signal, () => {
|
|
1149
|
+
printShutdownMessage();
|
|
1150
|
+
server?.close(() => {
|
|
1151
|
+
process.exit(0);
|
|
1152
|
+
});
|
|
1153
|
+
});
|
|
1154
|
+
});
|
|
1155
|
+
}
|
|
1156
|
+
|
|
1157
|
+
//#endregion
|
|
1158
|
+
//#region src/extensibility/create-instructions.ts
|
|
1159
|
+
const SKIP_DIRS = [
|
|
1160
|
+
"node_modules",
|
|
1161
|
+
"dist",
|
|
1162
|
+
"build"
|
|
1163
|
+
];
|
|
1164
|
+
const INSTALL_INSTRUCTIONS_TEMPLATE = "install-instructions.mdc.hbs";
|
|
1165
|
+
const UNINSTALL_INSTRUCTIONS_TEMPLATE = "uninstall-instructions.mdc.hbs";
|
|
1166
|
+
/**
|
|
1167
|
+
* Build the context for the instructions template.
|
|
1168
|
+
*/
|
|
1169
|
+
function getContext(projectRoot, markerValue, pwaRepo = "https://github.com/SalesforceCommerceCloud/storefront-next-template.git", branch = "main", filesToCopy = [], extensionConfigPath = "") {
|
|
1170
|
+
const extensionConfig = JSON.parse(fs$1.readFileSync(extensionConfigPath, "utf8"));
|
|
1171
|
+
if (!extensionConfig.extensions[markerValue]) throw new Error(`Extension ${markerValue} not found in extension config`);
|
|
1172
|
+
filesToCopy.forEach((file) => {
|
|
1173
|
+
const fullPath = path.join(projectRoot, file);
|
|
1174
|
+
if (!fs$1.existsSync(fullPath)) throw new Error(`File or directory ${fullPath} not found`);
|
|
1175
|
+
});
|
|
1176
|
+
const { mergeFiles, newFiles } = findMarkedFiles(projectRoot, markerValue);
|
|
1177
|
+
filesToCopy.push(...newFiles);
|
|
1178
|
+
const extensionMeta = extensionConfig.extensions[markerValue];
|
|
1179
|
+
const dependencies = (extensionMeta.dependencies || []).map((depKey) => ({
|
|
1180
|
+
key: depKey,
|
|
1181
|
+
name: extensionConfig.extensions[depKey]?.name || depKey
|
|
1182
|
+
}));
|
|
1183
|
+
return {
|
|
1184
|
+
extensionName: extensionMeta.name,
|
|
1185
|
+
pwaRepo,
|
|
1186
|
+
branch,
|
|
1187
|
+
markerValue,
|
|
1188
|
+
mergeFiles,
|
|
1189
|
+
newFiles,
|
|
1190
|
+
copy: getFilesToCopyContext(projectRoot, filesToCopy),
|
|
1191
|
+
dependencies
|
|
1192
|
+
};
|
|
1193
|
+
}
|
|
1194
|
+
/**
|
|
1195
|
+
* Get the context for the files to copy.
|
|
1196
|
+
*/
|
|
1197
|
+
const getFilesToCopyContext = (projectRoot, filesToCopy) => {
|
|
1198
|
+
filesToCopy.forEach((file) => {
|
|
1199
|
+
const fullPath = path.join(projectRoot, file);
|
|
1200
|
+
if (!fs$1.existsSync(fullPath)) throw new Error(`File or directory ${fullPath} not found`);
|
|
1201
|
+
});
|
|
1202
|
+
return filesToCopy.map((file) => ({
|
|
1203
|
+
src: file,
|
|
1204
|
+
dest: file,
|
|
1205
|
+
isDirectory: fs$1.statSync(path.join(projectRoot, file)).isDirectory()
|
|
1206
|
+
}));
|
|
1207
|
+
};
|
|
1208
|
+
/**
|
|
1209
|
+
* Find all the files that contain the marker value in the project folder.
|
|
1210
|
+
* @param {string} markerValue
|
|
1211
|
+
* @returns {string[]} The files that are marked with the marker value
|
|
1212
|
+
*/
|
|
1213
|
+
const findMarkedFiles = (projectRoot, markerValue) => {
|
|
1214
|
+
const fileTypes = [
|
|
1215
|
+
"jsx",
|
|
1216
|
+
"tsx",
|
|
1217
|
+
"ts",
|
|
1218
|
+
"js"
|
|
1219
|
+
];
|
|
1220
|
+
const mergeFiles = [];
|
|
1221
|
+
const newFiles = [];
|
|
1222
|
+
const lineRegex = /* @__PURE__ */ new RegExp(`@sfdc-extension-line\\s+${markerValue}`);
|
|
1223
|
+
const blockStartRegex = /* @__PURE__ */ new RegExp(`@sfdc-extension-block-start\\s+${markerValue}`);
|
|
1224
|
+
const blockEndRegex = /* @__PURE__ */ new RegExp(`@sfdc-extension-block-end\\s+${markerValue}`);
|
|
1225
|
+
const fileRegex = /* @__PURE__ */ new RegExp(`@sfdc-extension-file\\s+${markerValue}`);
|
|
1226
|
+
const searchFiles = (dir) => {
|
|
1227
|
+
const entries = fs$1.readdirSync(dir, { withFileTypes: true });
|
|
1228
|
+
for (const entry of entries) {
|
|
1229
|
+
const fullPath = path.join(dir, entry.name);
|
|
1230
|
+
if (entry.isDirectory() && !SKIP_DIRS.includes(entry.name)) searchFiles(fullPath);
|
|
1231
|
+
else if (entry.isFile() && fileTypes.some((ext) => fullPath.endsWith(`.${ext}`))) {
|
|
1232
|
+
const content = fs$1.readFileSync(fullPath, "utf8");
|
|
1233
|
+
if (lineRegex.test(content) || blockStartRegex.test(content) || blockEndRegex.test(content)) mergeFiles.push(path.relative(projectRoot, fullPath));
|
|
1234
|
+
else if (fileRegex.test(content)) newFiles.push(path.relative(projectRoot, fullPath));
|
|
1235
|
+
}
|
|
1236
|
+
}
|
|
1237
|
+
};
|
|
1238
|
+
searchFiles(projectRoot);
|
|
1239
|
+
console.log(`Found ${mergeFiles.length} files to merge for marker value ${markerValue}:`);
|
|
1240
|
+
console.log(mergeFiles.join("\n"));
|
|
1241
|
+
console.log(`Found ${newFiles.length} files to add for marker value ${markerValue}:`);
|
|
1242
|
+
console.log(newFiles.join("\n"));
|
|
1243
|
+
return {
|
|
1244
|
+
mergeFiles,
|
|
1245
|
+
newFiles
|
|
1246
|
+
};
|
|
1247
|
+
};
|
|
1248
|
+
/**
|
|
1249
|
+
* Generate the MDC instructions file based on user inputs.
|
|
1250
|
+
*/
|
|
1251
|
+
const generateInstructions = (projectRoot, markerValue, outputDir, pwaRepo, branch, filesToCopy, extensionConfig = "", templateDir = "") => {
|
|
1252
|
+
const context = getContext(projectRoot, markerValue, pwaRepo, branch, filesToCopy, extensionConfig);
|
|
1253
|
+
const instructionsDir = path.join(projectRoot, outputDir || "instructions");
|
|
1254
|
+
if (!fs$1.existsSync(instructionsDir)) fs$1.mkdirSync(instructionsDir);
|
|
1255
|
+
genertaeAndWriteInstructions(path.join(templateDir, INSTALL_INSTRUCTIONS_TEMPLATE), context, path.join(instructionsDir, `install-${context.extensionName.toLowerCase().replace(/ /g, "-")}.mdc`));
|
|
1256
|
+
genertaeAndWriteInstructions(path.join(templateDir, UNINSTALL_INSTRUCTIONS_TEMPLATE), context, path.join(instructionsDir, `uninstall-${context.extensionName.toLowerCase().replace(/ /g, "-")}.mdc`));
|
|
1257
|
+
};
|
|
1258
|
+
/**
|
|
1259
|
+
* Generate the MDC instructions file based on the template file and context.
|
|
1260
|
+
*/
|
|
1261
|
+
const genertaeAndWriteInstructions = (templateFile, context, outputFile) => {
|
|
1262
|
+
const templateContent = fs$1.readFileSync(templateFile, "utf8");
|
|
1263
|
+
const mdcContent = Handlebars.compile(templateContent)(context);
|
|
1264
|
+
fs$1.writeFileSync(outputFile, mdcContent, "utf8");
|
|
1265
|
+
console.log(`MDC instructions written to ${outputFile}`);
|
|
1266
|
+
};
|
|
1267
|
+
|
|
1268
|
+
//#endregion
|
|
1269
|
+
//#region src/cartridge-services/react-router-config.ts
|
|
1270
|
+
let isCliAvailable = null;
|
|
1271
|
+
function checkReactRouterCli(projectDirectory) {
|
|
1272
|
+
if (isCliAvailable !== null) return isCliAvailable;
|
|
1273
|
+
try {
|
|
1274
|
+
execSync$1("react-router --version", {
|
|
1275
|
+
cwd: projectDirectory,
|
|
1276
|
+
env: npmRunPathEnv(),
|
|
1277
|
+
stdio: "pipe"
|
|
1278
|
+
});
|
|
1279
|
+
isCliAvailable = true;
|
|
1280
|
+
} catch {
|
|
1281
|
+
isCliAvailable = false;
|
|
1282
|
+
}
|
|
1283
|
+
return isCliAvailable;
|
|
1284
|
+
}
|
|
1285
|
+
/**
|
|
1286
|
+
* Get the fully resolved routes from React Router by invoking its CLI.
|
|
1287
|
+
* This ensures we get the exact same route resolution as React Router uses internally,
|
|
1288
|
+
* including all presets, file-system routes, and custom route configurations.
|
|
1289
|
+
* @param projectDirectory - The project root directory
|
|
1290
|
+
* @returns Array of resolved route config entries
|
|
1291
|
+
* @example
|
|
1292
|
+
* const routes = getReactRouterRoutes('/path/to/project');
|
|
1293
|
+
* // Returns the same structure as `react-router routes --json`
|
|
1294
|
+
*/
|
|
1295
|
+
function getReactRouterRoutes(projectDirectory) {
|
|
1296
|
+
if (!checkReactRouterCli(projectDirectory)) throw new Error("React Router CLI is not available. Please make sure @react-router/dev is installed and accessible.");
|
|
1297
|
+
const tempFile = join(tmpdir(), `react-router-routes-${randomUUID()}.json`);
|
|
1298
|
+
try {
|
|
1299
|
+
execSync$1(`react-router routes --json > "${tempFile}"`, {
|
|
1300
|
+
cwd: projectDirectory,
|
|
1301
|
+
env: npmRunPathEnv(),
|
|
1302
|
+
encoding: "utf-8",
|
|
1303
|
+
stdio: [
|
|
1304
|
+
"pipe",
|
|
1305
|
+
"pipe",
|
|
1306
|
+
"pipe"
|
|
1307
|
+
]
|
|
1308
|
+
});
|
|
1309
|
+
const output = readFileSync(tempFile, "utf-8");
|
|
1310
|
+
return JSON.parse(output);
|
|
1311
|
+
} catch (error$1) {
|
|
1312
|
+
throw new Error(`Failed to get routes from React Router CLI: ${error$1.message}`);
|
|
1313
|
+
} finally {
|
|
1314
|
+
try {
|
|
1315
|
+
if (existsSync(tempFile)) unlinkSync(tempFile);
|
|
1316
|
+
} catch {}
|
|
1317
|
+
}
|
|
1318
|
+
}
|
|
1319
|
+
/**
|
|
1320
|
+
* Convert a file path to its corresponding route path using React Router's CLI.
|
|
1321
|
+
* This ensures we get the exact same route resolution as React Router uses internally.
|
|
1322
|
+
* @param filePath - Absolute path to the route file
|
|
1323
|
+
* @param projectRoot - The project root directory
|
|
1324
|
+
* @returns The route path (e.g., '/cart', '/product/:productId')
|
|
1325
|
+
* @example
|
|
1326
|
+
* const route = filePathToRoute('/path/to/project/src/routes/_app.cart.tsx', '/path/to/project');
|
|
1327
|
+
* // Returns: '/cart'
|
|
1328
|
+
*/
|
|
1329
|
+
function filePathToRoute(filePath, projectRoot) {
|
|
1330
|
+
const filePathPosix = filePath.replace(/\\/g, "/");
|
|
1331
|
+
const flatRoutes = flattenRoutes(getReactRouterRoutes(projectRoot));
|
|
1332
|
+
for (const route of flatRoutes) {
|
|
1333
|
+
const routeFilePosix = route.file.replace(/\\/g, "/");
|
|
1334
|
+
if (filePathPosix.endsWith(routeFilePosix) || filePathPosix.endsWith(`/${routeFilePosix}`)) return route.path;
|
|
1335
|
+
const routeFileNormalized = routeFilePosix.replace(/^\.\//, "");
|
|
1336
|
+
if (filePathPosix.endsWith(routeFileNormalized) || filePathPosix.endsWith(`/${routeFileNormalized}`)) return route.path;
|
|
1337
|
+
}
|
|
1338
|
+
console.warn(`Warning: Could not find route for file: ${filePath}`);
|
|
1339
|
+
return "/unknown";
|
|
1340
|
+
}
|
|
1341
|
+
/**
|
|
1342
|
+
* Flatten a nested route tree into a flat array with computed paths.
|
|
1343
|
+
* Each route will have its full path computed from parent paths.
|
|
1344
|
+
* @param routes - The nested route config entries
|
|
1345
|
+
* @param parentPath - The parent path prefix (used internally for recursion)
|
|
1346
|
+
* @returns Flat array of routes with their full paths
|
|
1347
|
+
*/
|
|
1348
|
+
function flattenRoutes(routes, parentPath = "") {
|
|
1349
|
+
const result = [];
|
|
1350
|
+
for (const route of routes) {
|
|
1351
|
+
let fullPath;
|
|
1352
|
+
if (route.index) fullPath = parentPath || "/";
|
|
1353
|
+
else if (route.path) {
|
|
1354
|
+
const pathSegment = route.path.startsWith("/") ? route.path : `/${route.path}`;
|
|
1355
|
+
fullPath = parentPath ? `${parentPath}${pathSegment}`.replace(/\/+/g, "/") : pathSegment;
|
|
1356
|
+
} else fullPath = parentPath || "/";
|
|
1357
|
+
if (route.id) result.push({
|
|
1358
|
+
id: route.id,
|
|
1359
|
+
path: fullPath,
|
|
1360
|
+
file: route.file,
|
|
1361
|
+
index: route.index
|
|
1362
|
+
});
|
|
1363
|
+
if (route.children && route.children.length > 0) {
|
|
1364
|
+
const childPath = route.path ? fullPath : parentPath;
|
|
1365
|
+
result.push(...flattenRoutes(route.children, childPath));
|
|
1366
|
+
}
|
|
1367
|
+
}
|
|
1368
|
+
return result;
|
|
1369
|
+
}
|
|
1370
|
+
|
|
1371
|
+
//#endregion
|
|
1372
|
+
//#region src/cartridge-services/generate-cartridge.ts
|
|
1373
|
+
const SKIP_DIRECTORIES = [
|
|
1374
|
+
"build",
|
|
1375
|
+
"dist",
|
|
1376
|
+
"node_modules",
|
|
1377
|
+
".git",
|
|
1378
|
+
".next",
|
|
1379
|
+
"coverage"
|
|
1380
|
+
];
|
|
1381
|
+
const DEFAULT_COMPONENT_GROUP = "odyssey_base";
|
|
1382
|
+
const ARCH_TYPE_HEADLESS = "headless";
|
|
1383
|
+
const VALID_ATTRIBUTE_TYPES = [
|
|
1384
|
+
"string",
|
|
1385
|
+
"text",
|
|
1386
|
+
"markup",
|
|
1387
|
+
"integer",
|
|
1388
|
+
"boolean",
|
|
1389
|
+
"product",
|
|
1390
|
+
"category",
|
|
1391
|
+
"file",
|
|
1392
|
+
"page",
|
|
1393
|
+
"image",
|
|
1394
|
+
"url",
|
|
1395
|
+
"enum",
|
|
1396
|
+
"custom",
|
|
1397
|
+
"cms_record"
|
|
1398
|
+
];
|
|
1399
|
+
const TYPE_MAPPING = {
|
|
1400
|
+
String: "string",
|
|
1401
|
+
string: "string",
|
|
1402
|
+
Number: "integer",
|
|
1403
|
+
number: "integer",
|
|
1404
|
+
Boolean: "boolean",
|
|
1405
|
+
boolean: "boolean",
|
|
1406
|
+
Date: "string",
|
|
1407
|
+
URL: "url",
|
|
1408
|
+
CMSRecord: "cms_record"
|
|
1409
|
+
};
|
|
1410
|
+
function resolveAttributeType(decoratorType, tsMorphType, fieldName) {
|
|
1411
|
+
if (decoratorType) {
|
|
1412
|
+
if (!VALID_ATTRIBUTE_TYPES.includes(decoratorType)) {
|
|
1413
|
+
console.error(`Error: Invalid attribute type '${decoratorType}' for field '${fieldName || "unknown"}'. Valid types are: ${VALID_ATTRIBUTE_TYPES.join(", ")}`);
|
|
1414
|
+
process.exit(1);
|
|
1415
|
+
}
|
|
1416
|
+
return decoratorType;
|
|
1417
|
+
}
|
|
1418
|
+
if (tsMorphType && TYPE_MAPPING[tsMorphType]) return TYPE_MAPPING[tsMorphType];
|
|
1419
|
+
return "string";
|
|
1420
|
+
}
|
|
1421
|
+
function toHumanReadableName(fieldName) {
|
|
1422
|
+
return fieldName.replace(/([A-Z])/g, " $1").replace(/^./, (str) => str.toUpperCase()).trim();
|
|
1423
|
+
}
|
|
1424
|
+
function toCamelCaseFileName(name) {
|
|
1425
|
+
if (!/[\s-]/.test(name)) return name;
|
|
1426
|
+
return name.split(/[\s-]+/).map((word, index) => {
|
|
1427
|
+
if (index === 0) return word.toLowerCase();
|
|
1428
|
+
return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();
|
|
1429
|
+
}).join("");
|
|
1430
|
+
}
|
|
1431
|
+
function getTypeFromTsMorph(property, _sourceFile) {
|
|
1432
|
+
try {
|
|
1433
|
+
const typeNode = property.getTypeNode();
|
|
1434
|
+
if (typeNode) return typeNode.getText().split("|")[0].split("&")[0].trim();
|
|
1435
|
+
} catch {}
|
|
1436
|
+
return "string";
|
|
1437
|
+
}
|
|
1438
|
+
function parseExpression(expression) {
|
|
1439
|
+
if (Node.isStringLiteral(expression)) return expression.getLiteralValue();
|
|
1440
|
+
else if (Node.isNumericLiteral(expression)) return expression.getLiteralValue();
|
|
1441
|
+
else if (Node.isTrueLiteral(expression)) return true;
|
|
1442
|
+
else if (Node.isFalseLiteral(expression)) return false;
|
|
1443
|
+
else if (Node.isObjectLiteralExpression(expression)) return parseNestedObject(expression);
|
|
1444
|
+
else if (Node.isArrayLiteralExpression(expression)) return parseArrayLiteral(expression);
|
|
1445
|
+
else return expression.getText();
|
|
1446
|
+
}
|
|
1447
|
+
function parseNestedObject(objectLiteral) {
|
|
1448
|
+
const result = {};
|
|
1449
|
+
try {
|
|
1450
|
+
const properties = objectLiteral.getProperties();
|
|
1451
|
+
for (const property of properties) if (Node.isPropertyAssignment(property)) {
|
|
1452
|
+
const name = property.getName();
|
|
1453
|
+
const initializer = property.getInitializer();
|
|
1454
|
+
if (initializer) result[name] = parseExpression(initializer);
|
|
1455
|
+
}
|
|
1456
|
+
} catch (error$1) {
|
|
1457
|
+
console.warn(`Warning: Could not parse nested object: ${error$1.message}`);
|
|
1458
|
+
return result;
|
|
1459
|
+
}
|
|
1460
|
+
return result;
|
|
1461
|
+
}
|
|
1462
|
+
function parseArrayLiteral(arrayLiteral) {
|
|
1463
|
+
const result = [];
|
|
1464
|
+
try {
|
|
1465
|
+
const elements = arrayLiteral.getElements();
|
|
1466
|
+
for (const element of elements) result.push(parseExpression(element));
|
|
1467
|
+
} catch (error$1) {
|
|
1468
|
+
console.warn(`Warning: Could not parse array literal: ${error$1.message}`);
|
|
1469
|
+
}
|
|
1470
|
+
return result;
|
|
1471
|
+
}
|
|
1472
|
+
function parseDecoratorArgs(decorator) {
|
|
1473
|
+
const result = {};
|
|
1474
|
+
try {
|
|
1475
|
+
const args = decorator.getArguments();
|
|
1476
|
+
if (args.length === 0) return result;
|
|
1477
|
+
const firstArg = args[0];
|
|
1478
|
+
if (Node.isObjectLiteralExpression(firstArg)) {
|
|
1479
|
+
const properties = firstArg.getProperties();
|
|
1480
|
+
for (const property of properties) if (Node.isPropertyAssignment(property)) {
|
|
1481
|
+
const name = property.getName();
|
|
1482
|
+
const initializer = property.getInitializer();
|
|
1483
|
+
if (initializer) result[name] = parseExpression(initializer);
|
|
1484
|
+
}
|
|
1485
|
+
} else if (Node.isStringLiteral(firstArg)) {
|
|
1486
|
+
result.id = parseExpression(firstArg);
|
|
1487
|
+
if (args.length > 1) {
|
|
1488
|
+
const secondArg = args[1];
|
|
1489
|
+
if (Node.isObjectLiteralExpression(secondArg)) {
|
|
1490
|
+
const properties = secondArg.getProperties();
|
|
1491
|
+
for (const property of properties) if (Node.isPropertyAssignment(property)) {
|
|
1492
|
+
const name = property.getName();
|
|
1493
|
+
const initializer = property.getInitializer();
|
|
1494
|
+
if (initializer) result[name] = parseExpression(initializer);
|
|
1495
|
+
}
|
|
1496
|
+
}
|
|
1497
|
+
}
|
|
1498
|
+
}
|
|
1499
|
+
return result;
|
|
1500
|
+
} catch (error$1) {
|
|
1501
|
+
console.warn(`Warning: Could not parse decorator arguments: ${error$1.message}`);
|
|
1502
|
+
return result;
|
|
1503
|
+
}
|
|
1504
|
+
}
|
|
1505
|
+
function extractAttributesFromSource(sourceFile, className) {
|
|
1506
|
+
const attributes = [];
|
|
1507
|
+
try {
|
|
1508
|
+
const classDeclaration = sourceFile.getClass(className);
|
|
1509
|
+
if (!classDeclaration) return attributes;
|
|
1510
|
+
const properties = classDeclaration.getProperties();
|
|
1511
|
+
for (const property of properties) {
|
|
1512
|
+
const attributeDecorator = property.getDecorator("AttributeDefinition");
|
|
1513
|
+
if (!attributeDecorator) continue;
|
|
1514
|
+
const fieldName = property.getName();
|
|
1515
|
+
const config = parseDecoratorArgs(attributeDecorator);
|
|
1516
|
+
const isRequired = !property.hasQuestionToken();
|
|
1517
|
+
const inferredType = config.type || getTypeFromTsMorph(property, sourceFile);
|
|
1518
|
+
const attribute = {
|
|
1519
|
+
id: config.id || fieldName,
|
|
1520
|
+
name: config.name || toHumanReadableName(fieldName),
|
|
1521
|
+
type: resolveAttributeType(config.type, inferredType, fieldName),
|
|
1522
|
+
required: config.required !== void 0 ? config.required : isRequired,
|
|
1523
|
+
description: config.description || `Field: ${fieldName}`
|
|
1524
|
+
};
|
|
1525
|
+
if (config.values) attribute.values = config.values;
|
|
1526
|
+
if (config.defaultValue !== void 0) attribute.default_value = config.defaultValue;
|
|
1527
|
+
attributes.push(attribute);
|
|
1528
|
+
}
|
|
1529
|
+
} catch (error$1) {
|
|
1530
|
+
console.warn(`Warning: Could not extract attributes from class ${className}: ${error$1.message}`);
|
|
1531
|
+
}
|
|
1532
|
+
return attributes;
|
|
1533
|
+
}
|
|
1534
|
+
function extractRegionDefinitionsFromSource(sourceFile, className) {
|
|
1535
|
+
const regionDefinitions = [];
|
|
1536
|
+
try {
|
|
1537
|
+
const classDeclaration = sourceFile.getClass(className);
|
|
1538
|
+
if (!classDeclaration) return regionDefinitions;
|
|
1539
|
+
const classRegionDecorator = classDeclaration.getDecorator("RegionDefinition");
|
|
1540
|
+
if (classRegionDecorator) {
|
|
1541
|
+
const args = classRegionDecorator.getArguments();
|
|
1542
|
+
if (args.length > 0) {
|
|
1543
|
+
const firstArg = args[0];
|
|
1544
|
+
if (Node.isArrayLiteralExpression(firstArg)) {
|
|
1545
|
+
const elements = firstArg.getElements();
|
|
1546
|
+
for (const element of elements) if (Node.isObjectLiteralExpression(element)) {
|
|
1547
|
+
const regionConfig = parseDecoratorArgs({ getArguments: () => [element] });
|
|
1548
|
+
const regionDefinition = {
|
|
1549
|
+
id: regionConfig.id || "region",
|
|
1550
|
+
name: regionConfig.name || "Region"
|
|
1551
|
+
};
|
|
1552
|
+
if (regionConfig.componentTypes) regionDefinition.component_types = regionConfig.componentTypes;
|
|
1553
|
+
if (Array.isArray(regionConfig.componentTypeInclusions)) regionDefinition.component_type_inclusions = regionConfig.componentTypeInclusions.map((incl) => ({ type_id: incl }));
|
|
1554
|
+
if (Array.isArray(regionConfig.componentTypeExclusions)) regionDefinition.component_type_exclusions = regionConfig.componentTypeExclusions.map((excl) => ({ type_id: excl }));
|
|
1555
|
+
if (regionConfig.maxComponents !== void 0) regionDefinition.max_components = regionConfig.maxComponents;
|
|
1556
|
+
if (regionConfig.minComponents !== void 0) regionDefinition.min_components = regionConfig.minComponents;
|
|
1557
|
+
if (regionConfig.allowMultiple !== void 0) regionDefinition.allow_multiple = regionConfig.allowMultiple;
|
|
1558
|
+
if (regionConfig.defaultComponentConstructors) regionDefinition.default_component_constructors = regionConfig.defaultComponentConstructors;
|
|
1559
|
+
regionDefinitions.push(regionDefinition);
|
|
1560
|
+
}
|
|
1561
|
+
}
|
|
1562
|
+
}
|
|
1563
|
+
}
|
|
1564
|
+
} catch (error$1) {
|
|
1565
|
+
console.warn(`Warning: Could not extract region definitions from class ${className}: ${error$1.message}`);
|
|
1566
|
+
}
|
|
1567
|
+
return regionDefinitions;
|
|
1568
|
+
}
|
|
1569
|
+
async function processComponentFile(filePath, _projectRoot) {
|
|
1570
|
+
try {
|
|
1571
|
+
const content = await readFile(filePath, "utf-8");
|
|
1572
|
+
const components = [];
|
|
1573
|
+
if (!content.includes("@Component")) return components;
|
|
1574
|
+
try {
|
|
1575
|
+
const sourceFile = new Project({
|
|
1576
|
+
useInMemoryFileSystem: true,
|
|
1577
|
+
skipAddingFilesFromTsConfig: true
|
|
1578
|
+
}).createSourceFile(filePath, content);
|
|
1579
|
+
const classes = sourceFile.getClasses();
|
|
1580
|
+
for (const classDeclaration of classes) {
|
|
1581
|
+
const componentDecorator = classDeclaration.getDecorator("Component");
|
|
1582
|
+
if (!componentDecorator) continue;
|
|
1583
|
+
const className = classDeclaration.getName();
|
|
1584
|
+
if (!className) continue;
|
|
1585
|
+
const componentConfig = parseDecoratorArgs(componentDecorator);
|
|
1586
|
+
const attributes = extractAttributesFromSource(sourceFile, className);
|
|
1587
|
+
const regionDefinitions = extractRegionDefinitionsFromSource(sourceFile, className);
|
|
1588
|
+
const componentMetadata = {
|
|
1589
|
+
typeId: componentConfig.id || className.toLowerCase(),
|
|
1590
|
+
name: componentConfig.name || toHumanReadableName(className),
|
|
1591
|
+
group: componentConfig.group || DEFAULT_COMPONENT_GROUP,
|
|
1592
|
+
description: componentConfig.description || `Custom component: ${className}`,
|
|
1593
|
+
regionDefinitions,
|
|
1594
|
+
attributes
|
|
1595
|
+
};
|
|
1596
|
+
components.push(componentMetadata);
|
|
1597
|
+
}
|
|
1598
|
+
} catch (error$1) {
|
|
1599
|
+
console.warn(`Warning: Could not process file ${filePath}:`, error$1.message);
|
|
1600
|
+
}
|
|
1601
|
+
return components;
|
|
1602
|
+
} catch (error$1) {
|
|
1603
|
+
console.warn(`Warning: Could not read file ${filePath}:`, error$1.message);
|
|
1604
|
+
return [];
|
|
1605
|
+
}
|
|
1606
|
+
}
|
|
1607
|
+
async function processPageTypeFile(filePath, projectRoot) {
|
|
1608
|
+
try {
|
|
1609
|
+
const content = await readFile(filePath, "utf-8");
|
|
1610
|
+
const pageTypes = [];
|
|
1611
|
+
if (!content.includes("@PageType")) return pageTypes;
|
|
1612
|
+
try {
|
|
1613
|
+
const sourceFile = new Project({
|
|
1614
|
+
useInMemoryFileSystem: true,
|
|
1615
|
+
skipAddingFilesFromTsConfig: true
|
|
1616
|
+
}).createSourceFile(filePath, content);
|
|
1617
|
+
const classes = sourceFile.getClasses();
|
|
1618
|
+
for (const classDeclaration of classes) {
|
|
1619
|
+
const pageTypeDecorator = classDeclaration.getDecorator("PageType");
|
|
1620
|
+
if (!pageTypeDecorator) continue;
|
|
1621
|
+
const className = classDeclaration.getName();
|
|
1622
|
+
if (!className) continue;
|
|
1623
|
+
const pageTypeConfig = parseDecoratorArgs(pageTypeDecorator);
|
|
1624
|
+
const attributes = extractAttributesFromSource(sourceFile, className);
|
|
1625
|
+
const regionDefinitions = extractRegionDefinitionsFromSource(sourceFile, className);
|
|
1626
|
+
const route = filePathToRoute(filePath, projectRoot);
|
|
1627
|
+
const pageTypeMetadata = {
|
|
1628
|
+
typeId: pageTypeConfig.id || className.toLowerCase(),
|
|
1629
|
+
name: pageTypeConfig.name || toHumanReadableName(className),
|
|
1630
|
+
description: pageTypeConfig.description || `Custom page type: ${className}`,
|
|
1631
|
+
regionDefinitions,
|
|
1632
|
+
supportedAspectTypes: pageTypeConfig.supportedAspectTypes || [],
|
|
1633
|
+
attributes,
|
|
1634
|
+
route
|
|
1635
|
+
};
|
|
1636
|
+
pageTypes.push(pageTypeMetadata);
|
|
1637
|
+
}
|
|
1638
|
+
} catch (error$1) {
|
|
1639
|
+
console.warn(`Warning: Could not process file ${filePath}:`, error$1.message);
|
|
1640
|
+
}
|
|
1641
|
+
return pageTypes;
|
|
1642
|
+
} catch (error$1) {
|
|
1643
|
+
console.warn(`Warning: Could not read file ${filePath}:`, error$1.message);
|
|
1644
|
+
return [];
|
|
1645
|
+
}
|
|
1646
|
+
}
|
|
1647
|
+
async function processAspectFile(filePath, _projectRoot) {
|
|
1648
|
+
try {
|
|
1649
|
+
const content = await readFile(filePath, "utf-8");
|
|
1650
|
+
const aspects = [];
|
|
1651
|
+
if (!filePath.endsWith(".json") || !content.trim().startsWith("{")) return aspects;
|
|
1652
|
+
if (!filePath.includes("/aspects/") && !filePath.includes("\\aspects\\")) return aspects;
|
|
1653
|
+
try {
|
|
1654
|
+
const aspectData = JSON.parse(content);
|
|
1655
|
+
const fileName = basename(filePath, ".json");
|
|
1656
|
+
if (!aspectData.name || !aspectData.attribute_definitions) return aspects;
|
|
1657
|
+
const aspectMetadata = {
|
|
1658
|
+
id: fileName,
|
|
1659
|
+
name: aspectData.name,
|
|
1660
|
+
description: aspectData.description || `Aspect type: ${aspectData.name}`,
|
|
1661
|
+
attributeDefinitions: aspectData.attribute_definitions || [],
|
|
1662
|
+
supportedObjectTypes: aspectData.supported_object_types || []
|
|
1663
|
+
};
|
|
1664
|
+
aspects.push(aspectMetadata);
|
|
1665
|
+
} catch (parseError) {
|
|
1666
|
+
console.warn(`Warning: Could not parse JSON in file ${filePath}:`, parseError.message);
|
|
1667
|
+
}
|
|
1668
|
+
return aspects;
|
|
1669
|
+
} catch (error$1) {
|
|
1670
|
+
console.warn(`Warning: Could not read file ${filePath}:`, error$1.message);
|
|
1671
|
+
return [];
|
|
1672
|
+
}
|
|
1673
|
+
}
|
|
1674
|
+
async function generateComponentCartridge(component, outputDir, dryRun = false) {
|
|
1675
|
+
const fileName = toCamelCaseFileName(component.typeId);
|
|
1676
|
+
const groupDir = join(outputDir, component.group);
|
|
1677
|
+
const outputPath = join(groupDir, `${fileName}.json`);
|
|
1678
|
+
if (!dryRun) {
|
|
1679
|
+
try {
|
|
1680
|
+
await mkdir(groupDir, { recursive: true });
|
|
1681
|
+
} catch {}
|
|
1682
|
+
const attributeDefinitionGroups = [{
|
|
1683
|
+
id: component.typeId,
|
|
1684
|
+
name: component.name,
|
|
1685
|
+
description: component.description,
|
|
1686
|
+
attribute_definitions: component.attributes
|
|
1687
|
+
}];
|
|
1688
|
+
const cartridgeData = {
|
|
1689
|
+
name: component.name,
|
|
1690
|
+
description: component.description,
|
|
1691
|
+
group: component.group,
|
|
1692
|
+
arch_type: ARCH_TYPE_HEADLESS,
|
|
1693
|
+
region_definitions: component.regionDefinitions || [],
|
|
1694
|
+
attribute_definition_groups: attributeDefinitionGroups
|
|
1695
|
+
};
|
|
1696
|
+
await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));
|
|
1697
|
+
}
|
|
1698
|
+
const prefix = dryRun ? " - [DRY RUN]" : " -";
|
|
1699
|
+
console.log(`${prefix} ${String(component.typeId)}: ${String(component.name)} (${String(component.attributes.length)} attributes) → ${fileName}.json`);
|
|
1700
|
+
}
|
|
1701
|
+
async function generatePageTypeCartridge(pageType, outputDir, dryRun = false) {
|
|
1702
|
+
const fileName = toCamelCaseFileName(pageType.name);
|
|
1703
|
+
const outputPath = join(outputDir, `${fileName}.json`);
|
|
1704
|
+
if (!dryRun) {
|
|
1705
|
+
const cartridgeData = {
|
|
1706
|
+
name: pageType.name,
|
|
1707
|
+
description: pageType.description,
|
|
1708
|
+
arch_type: ARCH_TYPE_HEADLESS,
|
|
1709
|
+
region_definitions: pageType.regionDefinitions || []
|
|
1710
|
+
};
|
|
1711
|
+
if (pageType.attributes && pageType.attributes.length > 0) cartridgeData.attribute_definition_groups = [{
|
|
1712
|
+
id: pageType.typeId || fileName,
|
|
1713
|
+
name: pageType.name,
|
|
1714
|
+
description: pageType.description,
|
|
1715
|
+
attribute_definitions: pageType.attributes
|
|
1716
|
+
}];
|
|
1717
|
+
if (pageType.supportedAspectTypes) cartridgeData.supported_aspect_types = pageType.supportedAspectTypes;
|
|
1718
|
+
if (pageType.route) cartridgeData.route = pageType.route;
|
|
1719
|
+
await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));
|
|
1720
|
+
}
|
|
1721
|
+
const prefix = dryRun ? " - [DRY RUN]" : " -";
|
|
1722
|
+
console.log(`${prefix} ${String(pageType.name)}: ${String(pageType.description)} (${String(pageType.attributes.length)} attributes) → ${fileName}.json`);
|
|
1723
|
+
}
|
|
1724
|
+
async function generateAspectCartridge(aspect, outputDir, dryRun = false) {
|
|
1725
|
+
const fileName = toCamelCaseFileName(aspect.id);
|
|
1726
|
+
const outputPath = join(outputDir, `${fileName}.json`);
|
|
1727
|
+
if (!dryRun) {
|
|
1728
|
+
const cartridgeData = {
|
|
1729
|
+
name: aspect.name,
|
|
1730
|
+
description: aspect.description,
|
|
1731
|
+
arch_type: ARCH_TYPE_HEADLESS,
|
|
1732
|
+
attribute_definitions: aspect.attributeDefinitions || []
|
|
1733
|
+
};
|
|
1734
|
+
if (aspect.supportedObjectTypes) cartridgeData.supported_object_types = aspect.supportedObjectTypes;
|
|
1735
|
+
await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));
|
|
1736
|
+
}
|
|
1737
|
+
const prefix = dryRun ? " - [DRY RUN]" : " -";
|
|
1738
|
+
console.log(`${prefix} ${String(aspect.name)}: ${String(aspect.description)} (${String(aspect.attributeDefinitions.length)} attributes) → ${fileName}.json`);
|
|
1739
|
+
}
|
|
1740
|
+
/**
|
|
1741
|
+
* Runs ESLint with --fix on the specified directory to format JSON files.
|
|
1742
|
+
* This ensures generated JSON files match the project's Prettier/ESLint configuration.
|
|
1743
|
+
*/
|
|
1744
|
+
function lintGeneratedFiles(metadataDir, projectRoot) {
|
|
1745
|
+
try {
|
|
1746
|
+
console.log("🔧 Running ESLint --fix on generated JSON files...");
|
|
1747
|
+
execSync$1(`npx eslint "${metadataDir}/**/*.json" --fix --no-error-on-unmatched-pattern`, {
|
|
1748
|
+
cwd: projectRoot,
|
|
1749
|
+
stdio: "pipe",
|
|
1750
|
+
encoding: "utf-8"
|
|
1751
|
+
});
|
|
1752
|
+
console.log("✅ JSON files formatted successfully");
|
|
1753
|
+
} catch (error$1) {
|
|
1754
|
+
const execError = error$1;
|
|
1755
|
+
if (execError.status === 2) {
|
|
1756
|
+
const errMsg = execError.stderr || execError.stdout || "Unknown error";
|
|
1757
|
+
console.warn(`⚠️ Warning: Could not run ESLint --fix: ${errMsg}`);
|
|
1758
|
+
} else if (execError.stderr && execError.stderr.includes("error")) console.warn(`⚠️ Warning: Some linting issues could not be auto-fixed. Run ESLint manually to review.`);
|
|
1759
|
+
else console.log("✅ JSON files formatted successfully");
|
|
1760
|
+
}
|
|
1761
|
+
}
|
|
1762
|
+
async function generateMetadata(projectDirectory, metadataDirectory, options) {
|
|
1763
|
+
try {
|
|
1764
|
+
const filePaths = options?.filePaths;
|
|
1765
|
+
const isIncrementalMode = filePaths && filePaths.length > 0;
|
|
1766
|
+
const dryRun = options?.dryRun || false;
|
|
1767
|
+
if (dryRun) console.log("🔍 [DRY RUN] Scanning for decorated components and page types...");
|
|
1768
|
+
else if (isIncrementalMode) console.log(`🔍 Generating metadata for ${filePaths.length} specified file(s)...`);
|
|
1769
|
+
else console.log("🔍 Generating metadata for decorated components and page types...");
|
|
1770
|
+
const projectRoot = resolve(projectDirectory);
|
|
1771
|
+
const srcDir = join(projectRoot, "src");
|
|
1772
|
+
const metadataDir = resolve(metadataDirectory);
|
|
1773
|
+
const componentsOutputDir = join(metadataDir, "components");
|
|
1774
|
+
const pagesOutputDir = join(metadataDir, "pages");
|
|
1775
|
+
const aspectsOutputDir = join(metadataDir, "aspects");
|
|
1776
|
+
if (!dryRun) {
|
|
1777
|
+
if (!isIncrementalMode) {
|
|
1778
|
+
console.log("🗑️ Cleaning existing output directories...");
|
|
1779
|
+
for (const outputDir of [
|
|
1780
|
+
componentsOutputDir,
|
|
1781
|
+
pagesOutputDir,
|
|
1782
|
+
aspectsOutputDir
|
|
1783
|
+
]) try {
|
|
1784
|
+
await rm(outputDir, {
|
|
1785
|
+
recursive: true,
|
|
1786
|
+
force: true
|
|
1787
|
+
});
|
|
1788
|
+
console.log(` - Deleted: ${outputDir}`);
|
|
1789
|
+
} catch {
|
|
1790
|
+
console.log(` - Directory not found (skipping): ${outputDir}`);
|
|
1791
|
+
}
|
|
1792
|
+
} else console.log("📝 Incremental mode: existing cartridge files will be preserved/overwritten");
|
|
1793
|
+
console.log("📁 Creating output directories...");
|
|
1794
|
+
for (const outputDir of [
|
|
1795
|
+
componentsOutputDir,
|
|
1796
|
+
pagesOutputDir,
|
|
1797
|
+
aspectsOutputDir
|
|
1798
|
+
]) try {
|
|
1799
|
+
await mkdir(outputDir, { recursive: true });
|
|
1800
|
+
} catch (error$1) {
|
|
1801
|
+
try {
|
|
1802
|
+
await access(outputDir);
|
|
1803
|
+
} catch {
|
|
1804
|
+
console.error(`❌ Error: Failed to create output directory ${outputDir}: ${error$1.message}`);
|
|
1805
|
+
process.exit(1);
|
|
1806
|
+
}
|
|
1807
|
+
}
|
|
1808
|
+
} else if (isIncrementalMode) console.log(`📝 [DRY RUN] Would process ${filePaths.length} specific file(s)`);
|
|
1809
|
+
else console.log("📝 [DRY RUN] Would clean and regenerate all metadata files");
|
|
1810
|
+
let files = [];
|
|
1811
|
+
if (isIncrementalMode && filePaths) {
|
|
1812
|
+
files = filePaths.map((fp) => resolve(projectRoot, fp));
|
|
1813
|
+
console.log(`📂 Processing ${files.length} specified file(s)...`);
|
|
1814
|
+
} else {
|
|
1815
|
+
const scanDirectory = async (dir) => {
|
|
1816
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
1817
|
+
for (const entry of entries) {
|
|
1818
|
+
const fullPath = join(dir, entry.name);
|
|
1819
|
+
if (entry.isDirectory()) {
|
|
1820
|
+
if (!SKIP_DIRECTORIES.includes(entry.name)) await scanDirectory(fullPath);
|
|
1821
|
+
} else if (entry.isFile() && (extname$1(entry.name) === ".ts" || extname$1(entry.name) === ".tsx" || extname$1(entry.name) === ".json")) files.push(fullPath);
|
|
1822
|
+
}
|
|
1823
|
+
};
|
|
1824
|
+
await scanDirectory(srcDir);
|
|
1825
|
+
}
|
|
1826
|
+
const allComponents = [];
|
|
1827
|
+
const allPageTypes = [];
|
|
1828
|
+
const allAspects = [];
|
|
1829
|
+
for (const file of files) {
|
|
1830
|
+
const components = await processComponentFile(file, projectRoot);
|
|
1831
|
+
allComponents.push(...components);
|
|
1832
|
+
const pageTypes = await processPageTypeFile(file, projectRoot);
|
|
1833
|
+
allPageTypes.push(...pageTypes);
|
|
1834
|
+
const aspects = await processAspectFile(file, projectRoot);
|
|
1835
|
+
allAspects.push(...aspects);
|
|
1836
|
+
}
|
|
1837
|
+
if (allComponents.length === 0 && allPageTypes.length === 0 && allAspects.length === 0) {
|
|
1838
|
+
console.log("⚠️ No decorated components, page types, or aspect files found.");
|
|
1839
|
+
return {
|
|
1840
|
+
componentsGenerated: 0,
|
|
1841
|
+
pageTypesGenerated: 0,
|
|
1842
|
+
aspectsGenerated: 0,
|
|
1843
|
+
totalFiles: 0
|
|
1844
|
+
};
|
|
1845
|
+
}
|
|
1846
|
+
if (allComponents.length > 0) {
|
|
1847
|
+
console.log(`✅ Found ${allComponents.length} decorated component(s):`);
|
|
1848
|
+
for (const component of allComponents) await generateComponentCartridge(component, componentsOutputDir, dryRun);
|
|
1849
|
+
if (dryRun) console.log(`📄 [DRY RUN] Would generate ${allComponents.length} component metadata file(s) in: ${componentsOutputDir}`);
|
|
1850
|
+
else console.log(`📄 Generated ${allComponents.length} component metadata file(s) in: ${componentsOutputDir}`);
|
|
1851
|
+
}
|
|
1852
|
+
if (allPageTypes.length > 0) {
|
|
1853
|
+
console.log(`✅ Found ${allPageTypes.length} decorated page type(s):`);
|
|
1854
|
+
for (const pageType of allPageTypes) await generatePageTypeCartridge(pageType, pagesOutputDir, dryRun);
|
|
1855
|
+
if (dryRun) console.log(`📄 [DRY RUN] Would generate ${allPageTypes.length} page type metadata file(s) in: ${pagesOutputDir}`);
|
|
1856
|
+
else console.log(`📄 Generated ${allPageTypes.length} page type metadata file(s) in: ${pagesOutputDir}`);
|
|
1857
|
+
}
|
|
1858
|
+
if (allAspects.length > 0) {
|
|
1859
|
+
console.log(`✅ Found ${allAspects.length} decorated aspect(s):`);
|
|
1860
|
+
for (const aspect of allAspects) await generateAspectCartridge(aspect, aspectsOutputDir, dryRun);
|
|
1861
|
+
if (dryRun) console.log(`📄 [DRY RUN] Would generate ${allAspects.length} aspect metadata file(s) in: ${aspectsOutputDir}`);
|
|
1862
|
+
else console.log(`📄 Generated ${allAspects.length} aspect metadata file(s) in: ${aspectsOutputDir}`);
|
|
1863
|
+
}
|
|
1864
|
+
const shouldLintFix = options?.lintFix !== false;
|
|
1865
|
+
if (!dryRun && shouldLintFix && (allComponents.length > 0 || allPageTypes.length > 0 || allAspects.length > 0)) lintGeneratedFiles(metadataDir, projectRoot);
|
|
1866
|
+
return {
|
|
1867
|
+
componentsGenerated: allComponents.length,
|
|
1868
|
+
pageTypesGenerated: allPageTypes.length,
|
|
1869
|
+
aspectsGenerated: allAspects.length,
|
|
1870
|
+
totalFiles: allComponents.length + allPageTypes.length + allAspects.length
|
|
1871
|
+
};
|
|
1872
|
+
} catch (error$1) {
|
|
1873
|
+
console.error("❌ Error:", error$1.message);
|
|
1874
|
+
process.exit(1);
|
|
1875
|
+
}
|
|
1876
|
+
}
|
|
1877
|
+
|
|
1878
|
+
//#endregion
|
|
1879
|
+
//#region src/cartridge-services/types.ts
|
|
1880
|
+
const WEBDAV_BASE = "/on/demandware.servlet/webdav/Sites";
|
|
1881
|
+
const CARTRIDGES_PATH = "Cartridges";
|
|
1882
|
+
const HTTP_METHODS = {
|
|
1883
|
+
PUT: "PUT",
|
|
1884
|
+
POST: "POST",
|
|
1885
|
+
DELETE: "DELETE"
|
|
1886
|
+
};
|
|
1887
|
+
const CONTENT_TYPES = {
|
|
1888
|
+
APPLICATION_ZIP: "application/zip",
|
|
1889
|
+
APPLICATION_FORM_URLENCODED: "application/x-www-form-urlencoded",
|
|
1890
|
+
APPLICATION_JSON: "application/json"
|
|
1891
|
+
};
|
|
1892
|
+
const WEBDAV_OPERATIONS = {
|
|
1893
|
+
UNZIP: "UNZIP",
|
|
1894
|
+
TARGET_CARTRIDGES: "cartridges"
|
|
1895
|
+
};
|
|
1896
|
+
|
|
1897
|
+
//#endregion
|
|
1898
|
+
//#region src/cartridge-services/sfcc-client.ts
|
|
1899
|
+
/**
|
|
1900
|
+
* Create HTTP request options for WebDAV operations (file upload/download)
|
|
1901
|
+
*
|
|
1902
|
+
* @param instance - The Commerce Cloud instance hostname
|
|
1903
|
+
* @param path - The WebDAV path (e.g., '/cartridges')
|
|
1904
|
+
* @param basicAuth - Base64 encoded basic authentication credentials (required)
|
|
1905
|
+
* @param method - HTTP method (PUT, DELETE, UNZIP, etc.)
|
|
1906
|
+
* @param formData - Optional form data for the request
|
|
1907
|
+
* @returns Configured HTTP request options for WebDAV operations
|
|
1908
|
+
*/
|
|
1909
|
+
function getWebdavOptions(instance, path$1, basicAuth, method, formData) {
|
|
1910
|
+
const endpoint = `${WEBDAV_BASE}/${path$1}`;
|
|
1911
|
+
return {
|
|
1912
|
+
baseUrl: `https://${instance}`,
|
|
1913
|
+
uri: endpoint,
|
|
1914
|
+
auth: { basic: basicAuth },
|
|
1915
|
+
method,
|
|
1916
|
+
...formData && { form: formData }
|
|
1917
|
+
};
|
|
1918
|
+
}
|
|
1919
|
+
/**
|
|
1920
|
+
* Check if an HTTP response indicates an authentication error and throw if so
|
|
1921
|
+
*
|
|
1922
|
+
* @param response - The HTTP response to check
|
|
1923
|
+
* @throws Error with authentication message if status code is 401
|
|
1924
|
+
*/
|
|
1925
|
+
function checkAuthenticationError(response) {
|
|
1926
|
+
if (response.statusCode === 401) throw new Error("Authentication failed. Please login again.");
|
|
1927
|
+
}
|
|
1928
|
+
/**
|
|
1929
|
+
* Execute an HTTP request using the native fetch API with default SSL validation
|
|
1930
|
+
*
|
|
1931
|
+
* This function handles general HTTP requests and does not automatically set Content-Type headers.
|
|
1932
|
+
* Callers must set the appropriate Content-Type header in opts.headers based on their body type
|
|
1933
|
+
*
|
|
1934
|
+
* @param opts - HTTP request configuration including URL, method, headers, and body
|
|
1935
|
+
* @returns Promise resolving to an object containing the HTTP response and parsed body
|
|
1936
|
+
* @throws Error if the HTTP request fails or cannot be completed
|
|
1937
|
+
*/
|
|
1938
|
+
async function makeRequest(opts) {
|
|
1939
|
+
const url = opts.uri;
|
|
1940
|
+
const fetchOptions = {
|
|
1941
|
+
...opts,
|
|
1942
|
+
headers: {
|
|
1943
|
+
Authorization: `Basic ${opts.auth.basic}`,
|
|
1944
|
+
...opts.headers
|
|
1945
|
+
}
|
|
1946
|
+
};
|
|
1947
|
+
if (opts.form) {
|
|
1948
|
+
const formData = new URLSearchParams();
|
|
1949
|
+
Object.entries(opts.form).forEach(([key, value]) => {
|
|
1950
|
+
formData.append(key, String(value));
|
|
1951
|
+
});
|
|
1952
|
+
fetchOptions.body = formData;
|
|
1953
|
+
fetchOptions.headers = {
|
|
1954
|
+
...fetchOptions.headers,
|
|
1955
|
+
"Content-Type": CONTENT_TYPES.APPLICATION_FORM_URLENCODED
|
|
1956
|
+
};
|
|
1957
|
+
}
|
|
1958
|
+
try {
|
|
1959
|
+
const response = await fetch(url, fetchOptions);
|
|
1960
|
+
const body = response.headers.get("content-type")?.includes(CONTENT_TYPES.APPLICATION_JSON) ? await response.json() : await response.text();
|
|
1961
|
+
const headers = {};
|
|
1962
|
+
response.headers.forEach((value, key) => {
|
|
1963
|
+
headers[key] = value;
|
|
1964
|
+
});
|
|
1965
|
+
return {
|
|
1966
|
+
response: {
|
|
1967
|
+
statusCode: response.status,
|
|
1968
|
+
statusMessage: response.statusText,
|
|
1969
|
+
headers
|
|
1970
|
+
},
|
|
1971
|
+
body
|
|
1972
|
+
};
|
|
1973
|
+
} catch (error$1) {
|
|
1974
|
+
throw new Error(`HTTP request failed: ${error$1 instanceof Error ? error$1.message : String(error$1)}`);
|
|
1975
|
+
}
|
|
1976
|
+
}
|
|
1977
|
+
|
|
1978
|
+
//#endregion
|
|
1979
|
+
//#region src/cartridge-services/validation.ts
|
|
1980
|
+
/**
|
|
1981
|
+
* Validation error class for cartridge service parameter validation
|
|
1982
|
+
*/
|
|
1983
|
+
var ValidationError = class extends Error {
|
|
1984
|
+
constructor(message) {
|
|
1985
|
+
super(message);
|
|
1986
|
+
this.name = "ValidationError";
|
|
1987
|
+
}
|
|
1988
|
+
};
|
|
1989
|
+
/**
|
|
1990
|
+
* Validate Commerce Cloud instance hostname
|
|
1991
|
+
*
|
|
1992
|
+
* @param instance - The instance hostname to validate
|
|
1993
|
+
* @throws ValidationError if instance is invalid
|
|
1994
|
+
*/
|
|
1995
|
+
function validateInstance(instance) {
|
|
1996
|
+
if (!instance || typeof instance !== "string") throw new ValidationError("Instance parameter is required and must be a string");
|
|
1997
|
+
if (instance.trim().length === 0) throw new ValidationError("Instance parameter cannot be empty");
|
|
1998
|
+
if (!instance.includes(".")) throw new ValidationError("Parameter instance must be a valid domain name");
|
|
1999
|
+
}
|
|
2000
|
+
/**
|
|
2001
|
+
* Validate cartridge file (must be a ZIP file)
|
|
2002
|
+
*
|
|
2003
|
+
* @param cartridgePath - The cartridge file path to validate
|
|
2004
|
+
* @throws ValidationError if cartridge is invalid
|
|
2005
|
+
*/
|
|
2006
|
+
function validateCartridgePath(cartridgePath) {
|
|
2007
|
+
if (!cartridgePath || typeof cartridgePath !== "string") throw new ValidationError("cartridge parameter is required and must be a string");
|
|
2008
|
+
if (cartridgePath.trim().length === 0) throw new ValidationError("cartridge parameter cannot be empty");
|
|
2009
|
+
const ext = extname(cartridgePath).toLowerCase();
|
|
2010
|
+
if (ext !== "") throw new ValidationError(`cartridge must be a directory, got: ${ext}`);
|
|
2011
|
+
}
|
|
2012
|
+
/**
|
|
2013
|
+
* Validate Basic Auth credentials
|
|
2014
|
+
*
|
|
2015
|
+
* @param basicAuth - The base64 encoded basic auth credentials to validate
|
|
2016
|
+
* @throws ValidationError if credentials are invalid
|
|
2017
|
+
*/
|
|
2018
|
+
function validateBasicAuth(basicAuth) {
|
|
2019
|
+
if (!basicAuth || typeof basicAuth !== "string") throw new ValidationError("Basic auth credentials parameter is required and must be a string");
|
|
2020
|
+
if (basicAuth.trim().length === 0) throw new ValidationError("Basic auth credentials parameter cannot be empty");
|
|
2021
|
+
if (basicAuth.length < 10) throw new ValidationError("Basic auth credentials appear to be too short to be valid");
|
|
2022
|
+
}
|
|
2023
|
+
/**
|
|
2024
|
+
* Validate code version name
|
|
2025
|
+
*
|
|
2026
|
+
* @param version - The code version name to validate
|
|
2027
|
+
* @throws ValidationError if version is invalid
|
|
2028
|
+
*/
|
|
2029
|
+
function validateVersion(version$1) {
|
|
2030
|
+
if (!version$1 || typeof version$1 !== "string") throw new ValidationError("Version parameter is required and must be a string");
|
|
2031
|
+
if (version$1.trim().length === 0) throw new ValidationError("Version parameter cannot be empty");
|
|
2032
|
+
if (!/^[a-zA-Z0-9._-]+$/.test(version$1)) throw new ValidationError("Version parameter contains invalid characters. Only alphanumeric, dots, hyphens, and underscores are allowed");
|
|
2033
|
+
}
|
|
2034
|
+
/**
|
|
2035
|
+
* Validate WebDAV path
|
|
2036
|
+
*
|
|
2037
|
+
* @param webdavPath - The WebDAV path to validate
|
|
2038
|
+
* @throws ValidationError if path is invalid
|
|
2039
|
+
*/
|
|
2040
|
+
function validateWebdavPath(webdavPath) {
|
|
2041
|
+
if (!webdavPath || typeof webdavPath !== "string") throw new ValidationError("WebDAV path parameter is required and must be a string");
|
|
2042
|
+
if (!webdavPath.startsWith("/")) throw new ValidationError("WebDAV path must start with a forward slash");
|
|
2043
|
+
}
|
|
2044
|
+
/**
|
|
2045
|
+
* Validate all parameters for deployCode function
|
|
2046
|
+
*
|
|
2047
|
+
* @param instance - Commerce Cloud instance hostname
|
|
2048
|
+
* @param codeVersionName - Target code version name
|
|
2049
|
+
* @param cartridgeDirectoryPath - Path to the source directory
|
|
2050
|
+
* @param basicAuth - Base64 encoded basic auth credentials
|
|
2051
|
+
* @param cartridgeWebDevPath - WebDAV path for cartridge deployment
|
|
2052
|
+
* @throws ValidationError if any parameter is invalid
|
|
2053
|
+
*/
|
|
2054
|
+
function validateDeployCodeParams(instance, codeVersionName, cartridgeDirectoryPath, basicAuth, cartridgeWebDevPath) {
|
|
2055
|
+
validateInstance(instance);
|
|
2056
|
+
validateVersion(codeVersionName);
|
|
2057
|
+
validateCartridgePath(cartridgeDirectoryPath);
|
|
2058
|
+
validateBasicAuth(basicAuth);
|
|
2059
|
+
validateWebdavPath(cartridgeWebDevPath);
|
|
2060
|
+
}
|
|
2061
|
+
|
|
2062
|
+
//#endregion
|
|
2063
|
+
//#region src/cartridge-services/deploy-cartridge.ts
|
|
2064
|
+
/**
|
|
2065
|
+
* Extract the filename (including extension) from a file path
|
|
2066
|
+
*
|
|
2067
|
+
* @param filePath - The full path to the file
|
|
2068
|
+
* @returns The filename portion of the path (e.g., 'archive.zip' from '/path/to/archive.zip')
|
|
2069
|
+
*/
|
|
2070
|
+
function getFilename(filePath) {
|
|
2071
|
+
return path.basename(filePath);
|
|
2072
|
+
}
|
|
2073
|
+
/**
|
|
2074
|
+
* Create a ZIP cartridge from a directory
|
|
2075
|
+
*
|
|
2076
|
+
* @param sourceDir - The directory to zip
|
|
2077
|
+
* @param outputPath - The output ZIP file path (can be same as sourceDir)
|
|
2078
|
+
* @returns Promise resolving when the ZIP file is created
|
|
2079
|
+
*/
|
|
2080
|
+
async function zipCartridge(sourceDir, outputPath) {
|
|
2081
|
+
const archive = archiver("zip", { zlib: { level: 9 } });
|
|
2082
|
+
const output = fs$1.createWriteStream(outputPath);
|
|
2083
|
+
archive.pipe(output);
|
|
2084
|
+
archive.directory(sourceDir, false);
|
|
2085
|
+
await archive.finalize();
|
|
2086
|
+
}
|
|
2087
|
+
/**
|
|
2088
|
+
* Build the WebDAV endpoint URL for a file
|
|
2089
|
+
*
|
|
2090
|
+
* @param instance - The Commerce Cloud instance hostname
|
|
2091
|
+
* @param path - The WebDAV path (e.g., 'Cartridges/local_metadata')
|
|
2092
|
+
* @param file - The local file path (filename will be extracted)
|
|
2093
|
+
* @returns The complete WebDAV endpoint URL
|
|
2094
|
+
*/
|
|
2095
|
+
function buildWebdavEndpoint(instance, webdavPath, file) {
|
|
2096
|
+
return `https://${instance}${WEBDAV_BASE}/${webdavPath}/${getFilename(file)}`;
|
|
2097
|
+
}
|
|
2098
|
+
/**
|
|
2099
|
+
* Unzip an uploaded archive file on Commerce Cloud via WebDAV
|
|
2100
|
+
*
|
|
2101
|
+
* @param instance - The Commerce Cloud instance hostname
|
|
2102
|
+
* @param path - The WebDAV path where the file was uploaded
|
|
2103
|
+
* @param file - The local file path (used to determine the remote filename)
|
|
2104
|
+
* @param basicAuth - Base64 encoded basic authentication credentials
|
|
2105
|
+
* @returns Promise resolving to HTTP response and body from the unzip operation
|
|
2106
|
+
*/
|
|
2107
|
+
async function unzip(instance, webdavPath, file, basicAuth) {
|
|
2108
|
+
const endpoint = buildWebdavEndpoint(instance, webdavPath, file);
|
|
2109
|
+
const opts = getWebdavOptions(instance, webdavPath, basicAuth, HTTP_METHODS.POST, {
|
|
2110
|
+
method: WEBDAV_OPERATIONS.UNZIP,
|
|
2111
|
+
target: WEBDAV_OPERATIONS.TARGET_CARTRIDGES
|
|
2112
|
+
});
|
|
2113
|
+
opts.uri = endpoint;
|
|
2114
|
+
const result = await makeRequest(opts);
|
|
2115
|
+
checkAuthenticationError(result.response);
|
|
2116
|
+
return result;
|
|
2117
|
+
}
|
|
2118
|
+
/**
|
|
2119
|
+
* Delete a file from Commerce Cloud via WebDAV
|
|
2120
|
+
*
|
|
2121
|
+
* @param instance - The Commerce Cloud instance hostname
|
|
2122
|
+
* @param path - The WebDAV path where the file is located
|
|
2123
|
+
* @param file - The local file path (used to determine the remote filename)
|
|
2124
|
+
* @param basicAuth - Base64 encoded basic authentication credentials
|
|
2125
|
+
* @returns Promise resolving to HTTP response and body from the delete operation
|
|
2126
|
+
*/
|
|
2127
|
+
async function deleteFile(instance, webdavPath, file, basicAuth) {
|
|
2128
|
+
const endpoint = buildWebdavEndpoint(instance, webdavPath, file);
|
|
2129
|
+
const opts = getWebdavOptions(instance, webdavPath, basicAuth, HTTP_METHODS.DELETE);
|
|
2130
|
+
opts.uri = endpoint;
|
|
2131
|
+
const result = await makeRequest(opts);
|
|
2132
|
+
checkAuthenticationError(result.response);
|
|
2133
|
+
return result;
|
|
2134
|
+
}
|
|
2135
|
+
/**
|
|
2136
|
+
* Upload a file to a specific cartridge version on Commerce Cloud via WebDAV (internal function)
|
|
2137
|
+
*
|
|
2138
|
+
* @param instance - The Commerce Cloud instance hostname
|
|
2139
|
+
* @param codeVersionName - The target code version name
|
|
2140
|
+
* @param filePath - The local file path to upload
|
|
2141
|
+
* @param basicAuth - Base64 encoded basic authentication credentials
|
|
2142
|
+
* @returns Promise resolving to HTTP response and body from the upload operation
|
|
2143
|
+
*/
|
|
2144
|
+
async function postFile(instance, codeVersionName, filePath, basicAuth) {
|
|
2145
|
+
const targetPath = `${CARTRIDGES_PATH}/${codeVersionName}`;
|
|
2146
|
+
try {
|
|
2147
|
+
const endpoint = buildWebdavEndpoint(instance, targetPath, filePath);
|
|
2148
|
+
const opts = getWebdavOptions(instance, targetPath, basicAuth, HTTP_METHODS.PUT);
|
|
2149
|
+
opts.uri = endpoint;
|
|
2150
|
+
opts.body = fs$1.createReadStream(filePath);
|
|
2151
|
+
opts.duplex = "half";
|
|
2152
|
+
opts.headers = {
|
|
2153
|
+
...opts.headers,
|
|
2154
|
+
"Content-Type": CONTENT_TYPES.APPLICATION_ZIP
|
|
2155
|
+
};
|
|
2156
|
+
const result = await makeRequest(opts);
|
|
2157
|
+
checkAuthenticationError(result.response);
|
|
2158
|
+
if (![
|
|
2159
|
+
200,
|
|
2160
|
+
201,
|
|
2161
|
+
204
|
|
2162
|
+
].includes(result.response.statusCode)) throw new Error(`Post file "${filePath}" failed: ${result.response.statusCode} (${result.response.statusMessage})`);
|
|
2163
|
+
return result;
|
|
2164
|
+
} catch (error$1) {
|
|
2165
|
+
throw new Error(`Post file "${filePath}" failed: ${error$1 instanceof Error ? error$1.message : String(error$1)}`);
|
|
2166
|
+
}
|
|
2167
|
+
}
|
|
2168
|
+
/**
|
|
2169
|
+
* Deploy code to Commerce Cloud by uploading, unzipping, and cleaning up
|
|
2170
|
+
*
|
|
2171
|
+
* This function performs a complete code deployment workflow:
|
|
2172
|
+
* 1. Uploads the archive file via WebDAV to the specified cartridge version
|
|
2173
|
+
* 2. Unzips the archive on the server
|
|
2174
|
+
* 3. Deletes the uploaded archive file
|
|
2175
|
+
* 4. Returns the deployed version name
|
|
2176
|
+
*
|
|
2177
|
+
* @param instance - The Commerce Cloud instance hostname
|
|
2178
|
+
* @param codeVersionName - The target code version name
|
|
2179
|
+
* @param sourceDir - The local directory containing the source files to deploy
|
|
2180
|
+
* @param basicAuth - Base64 encoded basic authentication credentials
|
|
2181
|
+
* @returns Promise resolving to deployment result with the version name
|
|
2182
|
+
* @throws Error if any step of the deployment process fails
|
|
2183
|
+
*/
|
|
2184
|
+
async function deployCode(instance, codeVersionName, sourceDir, basicAuth) {
|
|
2185
|
+
validateDeployCodeParams(instance, codeVersionName, sourceDir, basicAuth, `/${CARTRIDGES_PATH}/${codeVersionName}/cartridges`);
|
|
2186
|
+
const tempZipPath = path.join(path.dirname(sourceDir), `metadata-${Date.now()}.zip`);
|
|
2187
|
+
try {
|
|
2188
|
+
await zipCartridge(sourceDir, tempZipPath);
|
|
2189
|
+
const file = path.basename(tempZipPath);
|
|
2190
|
+
await postFile(instance, codeVersionName, tempZipPath, basicAuth);
|
|
2191
|
+
const unzipResult = await unzip(instance, `${CARTRIDGES_PATH}/${codeVersionName}`, file, basicAuth);
|
|
2192
|
+
if (![
|
|
2193
|
+
200,
|
|
2194
|
+
201,
|
|
2195
|
+
202
|
|
2196
|
+
].includes(unzipResult.response.statusCode)) throw new Error(`Deploy code ${file} failed (unzip step): ${unzipResult.response.statusCode} (${unzipResult.response.statusMessage})`);
|
|
2197
|
+
const deleteResult = await deleteFile(instance, `${CARTRIDGES_PATH}/${codeVersionName}`, file, basicAuth);
|
|
2198
|
+
if (![200, 204].includes(deleteResult.response.statusCode)) throw new Error(`Delete ZIP file ${file} after deployment failed (deleteFile step): ${deleteResult.response.statusCode} (${deleteResult.response.statusMessage})`);
|
|
2199
|
+
return { version: getFilename(file).replace(".zip", "") };
|
|
2200
|
+
} catch (error$1) {
|
|
2201
|
+
if (error$1 instanceof Error) throw error$1;
|
|
2202
|
+
throw new Error(`Deploy code ${sourceDir} failed: ${String(error$1)}`);
|
|
2203
|
+
} finally {
|
|
2204
|
+
if (fs$1.existsSync(tempZipPath)) fs$1.unlinkSync(tempZipPath);
|
|
2205
|
+
}
|
|
2206
|
+
}
|
|
2207
|
+
|
|
2208
|
+
//#endregion
|
|
2209
|
+
//#region src/extensibility/path-util.ts
|
|
2210
|
+
const FILE_EXTENSIONS = [
|
|
2211
|
+
".tsx",
|
|
2212
|
+
".ts",
|
|
2213
|
+
".d.ts"
|
|
2214
|
+
];
|
|
2215
|
+
function isSupportedFileExtension(fileName) {
|
|
2216
|
+
return FILE_EXTENSIONS.some((ext) => fileName.endsWith(ext));
|
|
2217
|
+
}
|
|
2218
|
+
|
|
2219
|
+
//#endregion
|
|
2220
|
+
//#region src/extensibility/trim-extensions.ts
|
|
2221
|
+
const SINGLE_LINE_MARKER = "@sfdc-extension-line";
|
|
2222
|
+
const BLOCK_MARKER_START = "@sfdc-extension-block-start";
|
|
2223
|
+
const BLOCK_MARKER_END = "@sfdc-extension-block-end";
|
|
2224
|
+
const FILE_MARKER = "@sfdc-extension-file";
|
|
2225
|
+
let verbose = false;
|
|
2226
|
+
function trimExtensions(directory, selectedExtensions, extensionConfig, verboseOverride = false) {
|
|
2227
|
+
const startTime = Date.now();
|
|
2228
|
+
verbose = verboseOverride ?? false;
|
|
2229
|
+
const configuredExtensions = extensionConfig?.extensions || {};
|
|
2230
|
+
const extensions = {};
|
|
2231
|
+
Object.keys(configuredExtensions).forEach((pluginKey) => {
|
|
2232
|
+
extensions[pluginKey] = Boolean(selectedExtensions?.[pluginKey]) || false;
|
|
2233
|
+
});
|
|
2234
|
+
if (Object.keys(extensions).length === 0) {
|
|
2235
|
+
if (verbose) console.log("No plugins found, skipping trim");
|
|
2236
|
+
return;
|
|
2237
|
+
}
|
|
2238
|
+
const processDirectory = (dir) => {
|
|
2239
|
+
fs$1.readdirSync(dir).forEach((file) => {
|
|
2240
|
+
const filePath = path.join(dir, file);
|
|
2241
|
+
const stats = fs$1.statSync(filePath);
|
|
2242
|
+
if (!filePath.includes("node_modules")) {
|
|
2243
|
+
if (stats.isDirectory()) processDirectory(filePath);
|
|
2244
|
+
else if (isSupportedFileExtension(file)) processFile(filePath, extensions);
|
|
2245
|
+
}
|
|
2246
|
+
});
|
|
2247
|
+
};
|
|
2248
|
+
processDirectory(directory);
|
|
2249
|
+
if (extensionConfig?.extensions) {
|
|
2250
|
+
deleteExtensionFolders(directory, extensions, extensionConfig);
|
|
2251
|
+
updateExtensionConfig(directory, extensions);
|
|
2252
|
+
}
|
|
2253
|
+
const endTime = Date.now();
|
|
2254
|
+
if (verbose) console.log(`Trim extensions took ${endTime - startTime}ms`);
|
|
2255
|
+
}
|
|
2256
|
+
/**
|
|
2257
|
+
* Update the extension config file to only include the selected extensions.
|
|
2258
|
+
* @param projectDirectory - The project directory
|
|
2259
|
+
* @param extensionSelections - The selected extensions
|
|
2260
|
+
*/
|
|
2261
|
+
function updateExtensionConfig(projectDirectory, extensionSelections) {
|
|
2262
|
+
const extensionConfigPath = path.join(projectDirectory, "src", "extensions", "config.json");
|
|
2263
|
+
const extensionConfig = JSON.parse(fs$1.readFileSync(extensionConfigPath, "utf8"));
|
|
2264
|
+
Object.keys(extensionConfig.extensions).forEach((extensionKey) => {
|
|
2265
|
+
if (!extensionSelections[extensionKey]) delete extensionConfig.extensions[extensionKey];
|
|
2266
|
+
});
|
|
2267
|
+
fs$1.writeFileSync(extensionConfigPath, JSON.stringify({ extensions: extensionConfig.extensions }, null, 4), "utf8");
|
|
2268
|
+
}
|
|
2269
|
+
/**
|
|
2270
|
+
* Process a file to trim extension-specific code based on markers.
|
|
2271
|
+
* @param filePath - The file path to process
|
|
2272
|
+
* @param extensions - The extension selections
|
|
2273
|
+
*/
|
|
2274
|
+
function processFile(filePath, extensions) {
|
|
2275
|
+
const source = fs$1.readFileSync(filePath, "utf-8");
|
|
2276
|
+
if (source.includes(FILE_MARKER)) {
|
|
2277
|
+
const markerLine = source.split("\n").find((line) => line.includes(FILE_MARKER));
|
|
2278
|
+
const extMatch = Object.keys(extensions).find((ext) => markerLine.includes(ext));
|
|
2279
|
+
if (!extMatch) {
|
|
2280
|
+
if (verbose) console.warn(`File ${filePath} is marked with ${markerLine} but it does not match any known extensions`);
|
|
2281
|
+
} else if (extensions[extMatch] === false) {
|
|
2282
|
+
try {
|
|
2283
|
+
fs$1.unlinkSync(filePath);
|
|
2284
|
+
if (verbose) console.log(`Deleted file ${filePath}`);
|
|
2285
|
+
} catch (e) {
|
|
2286
|
+
const error$1 = e;
|
|
2287
|
+
console.error(`Error deleting file ${filePath}: ${error$1.message}`);
|
|
2288
|
+
throw e;
|
|
2289
|
+
}
|
|
2290
|
+
return;
|
|
2291
|
+
}
|
|
2292
|
+
}
|
|
2293
|
+
const extKeys = Object.keys(extensions);
|
|
2294
|
+
if (new RegExp(extKeys.join("|"), "g").test(source)) {
|
|
2295
|
+
const lines = source.split("\n");
|
|
2296
|
+
const newLines = [];
|
|
2297
|
+
const blockMarkers = [];
|
|
2298
|
+
let skippingBlock = false;
|
|
2299
|
+
let i = 0;
|
|
2300
|
+
while (i < lines.length) {
|
|
2301
|
+
const line = lines[i];
|
|
2302
|
+
if (line.includes(SINGLE_LINE_MARKER)) {
|
|
2303
|
+
const matchingExtension = Object.keys(extensions).find((extension) => line.includes(extension));
|
|
2304
|
+
if (matchingExtension && extensions[matchingExtension] === false) {
|
|
2305
|
+
i += 2;
|
|
2306
|
+
continue;
|
|
2307
|
+
}
|
|
2308
|
+
} else if (line.includes(BLOCK_MARKER_START)) {
|
|
2309
|
+
const matchingExtension = Object.keys(extensions).find((extension) => line.includes(extension));
|
|
2310
|
+
if (matchingExtension) {
|
|
2311
|
+
blockMarkers.push({
|
|
2312
|
+
extension: matchingExtension,
|
|
2313
|
+
line: i
|
|
2314
|
+
});
|
|
2315
|
+
skippingBlock = extensions[matchingExtension] === false;
|
|
2316
|
+
} else if (verbose) console.warn(`Warning: Unknown marker found in ${filePath} at line ${i}: \n${line}`);
|
|
2317
|
+
} else if (line.includes(BLOCK_MARKER_END)) {
|
|
2318
|
+
if (Object.keys(extensions).find((extension) => line.includes(extension))) {
|
|
2319
|
+
const extension = Object.keys(extensions).find((p) => line.includes(p));
|
|
2320
|
+
if (blockMarkers.length === 0) throw new Error(`Block marker mismatch in ${filePath}, encountered end marker ${extension} without a matching start marker at line ${i}:\n${lines[i]}`);
|
|
2321
|
+
const startMarker = blockMarkers.pop();
|
|
2322
|
+
if (!extension || startMarker.extension !== extension) throw new Error(`Block marker mismatch in ${filePath}, expected end marker for ${startMarker.extension} but got ${extension} at line ${i}:\n${lines[i]}`);
|
|
2323
|
+
if (extensions[extension] === false) {
|
|
2324
|
+
skippingBlock = false;
|
|
2325
|
+
i++;
|
|
2326
|
+
continue;
|
|
2327
|
+
}
|
|
2328
|
+
}
|
|
2329
|
+
}
|
|
2330
|
+
if (!skippingBlock) newLines.push(line);
|
|
2331
|
+
i++;
|
|
2332
|
+
}
|
|
2333
|
+
if (blockMarkers.length > 0) throw new Error(`Unclosed end marker found in ${filePath}: ${blockMarkers[blockMarkers.length - 1].extension}`);
|
|
2334
|
+
const newSource = newLines.join("\n");
|
|
2335
|
+
if (newSource !== source) try {
|
|
2336
|
+
fs$1.writeFileSync(filePath, newSource);
|
|
2337
|
+
if (verbose) console.log(`Updated file ${filePath}`);
|
|
2338
|
+
} catch (e) {
|
|
2339
|
+
const error$1 = e;
|
|
2340
|
+
console.error(`Error updating file ${filePath}: ${error$1.message}`);
|
|
2341
|
+
throw e;
|
|
2342
|
+
}
|
|
2343
|
+
}
|
|
2344
|
+
}
|
|
2345
|
+
/**
|
|
2346
|
+
* Delete extension folders for disabled extensions.
|
|
2347
|
+
* @param projectRoot - The project root directory
|
|
2348
|
+
* @param extensions - The extension selections
|
|
2349
|
+
* @param extensionConfig - The extension configuration
|
|
2350
|
+
*/
|
|
2351
|
+
function deleteExtensionFolders(projectRoot, extensions, extensionConfig) {
|
|
2352
|
+
const extensionsDir = path.join(projectRoot, "src", "extensions");
|
|
2353
|
+
if (!fs$1.existsSync(extensionsDir)) return;
|
|
2354
|
+
const configuredExtensions = extensionConfig.extensions;
|
|
2355
|
+
Object.keys(extensions).filter((ext) => extensions[ext] === false).forEach((extKey) => {
|
|
2356
|
+
const extensionMeta = configuredExtensions[extKey];
|
|
2357
|
+
if (extensionMeta?.folder) {
|
|
2358
|
+
const extensionFolderPath = path.join(extensionsDir, extensionMeta.folder);
|
|
2359
|
+
if (fs$1.existsSync(extensionFolderPath)) try {
|
|
2360
|
+
fs$1.rmSync(extensionFolderPath, {
|
|
2361
|
+
recursive: true,
|
|
2362
|
+
force: true
|
|
2363
|
+
});
|
|
2364
|
+
if (verbose) console.log(`Deleted extension folder: ${extensionFolderPath}`);
|
|
2365
|
+
} catch (err) {
|
|
2366
|
+
const error$1 = err;
|
|
2367
|
+
if (error$1.code === "EPERM") console.error(`Permission denied - cannot delete ${extensionFolderPath}. You may need to run with sudo or check permissions.`);
|
|
2368
|
+
else console.error(`Error deleting ${extensionFolderPath}: ${error$1.message}`);
|
|
2369
|
+
}
|
|
2370
|
+
}
|
|
2371
|
+
});
|
|
2372
|
+
}
|
|
2373
|
+
|
|
2374
|
+
//#endregion
|
|
2375
|
+
//#region src/extensibility/dependency-utils.ts
|
|
2376
|
+
/**
|
|
2377
|
+
* Resolve full transitive dependency chain in topological order (dependencies first).
|
|
2378
|
+
* Example: resolveDependencies('BOPIS', config) → ['Store Locator', 'BOPIS']
|
|
2379
|
+
*
|
|
2380
|
+
* @param extensionKey - The extension key to resolve dependencies for
|
|
2381
|
+
* @param config - The extension configuration
|
|
2382
|
+
* @returns Array of extension keys in topological order (dependencies first, then the extension itself)
|
|
2383
|
+
*/
|
|
2384
|
+
function resolveDependencies(extensionKey, config) {
|
|
2385
|
+
const visited = /* @__PURE__ */ new Set();
|
|
2386
|
+
const result = [];
|
|
2387
|
+
function visit(key) {
|
|
2388
|
+
if (visited.has(key)) return;
|
|
2389
|
+
visited.add(key);
|
|
2390
|
+
const extension = config.extensions[key];
|
|
2391
|
+
if (!extension) return;
|
|
2392
|
+
const dependencies = extension.dependencies || [];
|
|
2393
|
+
for (const dep of dependencies) visit(dep);
|
|
2394
|
+
result.push(key);
|
|
2395
|
+
}
|
|
2396
|
+
visit(extensionKey);
|
|
2397
|
+
return result;
|
|
2398
|
+
}
|
|
2399
|
+
/**
|
|
2400
|
+
* Reverse lookup: find immediate extensions that depend on this one.
|
|
2401
|
+
* Example: getDependents('Store Locator', config) → ['BOPIS']
|
|
2402
|
+
*
|
|
2403
|
+
* @param extensionKey - The extension key to find dependents for
|
|
2404
|
+
* @param config - The extension configuration
|
|
2405
|
+
* @returns Array of extension keys that directly depend on this extension
|
|
2406
|
+
*/
|
|
2407
|
+
function getDependents(extensionKey, config) {
|
|
2408
|
+
const dependents = [];
|
|
2409
|
+
for (const [key, extension] of Object.entries(config.extensions)) if ((extension.dependencies || []).includes(extensionKey)) dependents.push(key);
|
|
2410
|
+
return dependents;
|
|
2411
|
+
}
|
|
2412
|
+
/**
|
|
2413
|
+
* Resolve full transitive dependent chain in reverse topological order (dependents first).
|
|
2414
|
+
* Example: resolveDependents('Store Locator', config) → ['BOPIS', 'Store Locator']
|
|
2415
|
+
*
|
|
2416
|
+
* @param extensionKey - The extension key to resolve dependents for
|
|
2417
|
+
* @param config - The extension configuration
|
|
2418
|
+
* @returns Array of extension keys in reverse topological order (dependents first, then the extension itself)
|
|
2419
|
+
*/
|
|
2420
|
+
function resolveDependents(extensionKey, config) {
|
|
2421
|
+
const visited = /* @__PURE__ */ new Set();
|
|
2422
|
+
const result = [];
|
|
2423
|
+
function visit(key) {
|
|
2424
|
+
if (visited.has(key)) return;
|
|
2425
|
+
visited.add(key);
|
|
2426
|
+
const dependents = getDependents(key, config);
|
|
2427
|
+
for (const dep of dependents) visit(dep);
|
|
2428
|
+
result.push(key);
|
|
2429
|
+
}
|
|
2430
|
+
visit(extensionKey);
|
|
2431
|
+
return result;
|
|
2432
|
+
}
|
|
2433
|
+
/**
|
|
2434
|
+
* Validate that no circular dependencies exist in the configuration.
|
|
2435
|
+
* Throws a descriptive error if a cycle is found.
|
|
2436
|
+
*
|
|
2437
|
+
* @param config - The extension configuration to validate
|
|
2438
|
+
* @throws Error if a circular dependency is detected
|
|
2439
|
+
*/
|
|
2440
|
+
function validateNoCycles(config) {
|
|
2441
|
+
const visiting = /* @__PURE__ */ new Set();
|
|
2442
|
+
const visited = /* @__PURE__ */ new Set();
|
|
2443
|
+
function visit(key, path$1) {
|
|
2444
|
+
if (visited.has(key)) return;
|
|
2445
|
+
if (visiting.has(key)) {
|
|
2446
|
+
const cycleStart = path$1.indexOf(key);
|
|
2447
|
+
const cyclePath = [...path$1.slice(cycleStart), key];
|
|
2448
|
+
throw new Error(`Circular dependency detected: ${cyclePath.join(" -> ")}`);
|
|
2449
|
+
}
|
|
2450
|
+
visiting.add(key);
|
|
2451
|
+
path$1.push(key);
|
|
2452
|
+
const extension = config.extensions[key];
|
|
2453
|
+
if (extension) {
|
|
2454
|
+
const dependencies = extension.dependencies || [];
|
|
2455
|
+
for (const dep of dependencies) visit(dep, path$1);
|
|
2456
|
+
}
|
|
2457
|
+
path$1.pop();
|
|
2458
|
+
visiting.delete(key);
|
|
2459
|
+
visited.add(key);
|
|
2460
|
+
}
|
|
2461
|
+
for (const key of Object.keys(config.extensions)) visit(key, []);
|
|
2462
|
+
}
|
|
2463
|
+
/**
|
|
2464
|
+
* Filter resolved dependencies to only those not yet installed.
|
|
2465
|
+
* Returns dependencies in topological order (install order).
|
|
2466
|
+
*
|
|
2467
|
+
* @param extensionKey - The extension key to check dependencies for
|
|
2468
|
+
* @param installedExtensions - Array of already installed extension keys
|
|
2469
|
+
* @param config - The extension configuration
|
|
2470
|
+
* @returns Array of missing extension keys in topological order (install order)
|
|
2471
|
+
*/
|
|
2472
|
+
function getMissingDependencies(extensionKey, installedExtensions, config) {
|
|
2473
|
+
const allDependencies = resolveDependencies(extensionKey, config);
|
|
2474
|
+
const installedSet = new Set(installedExtensions);
|
|
2475
|
+
return allDependencies.filter((key) => !installedSet.has(key));
|
|
2476
|
+
}
|
|
2477
|
+
/**
|
|
2478
|
+
* Resolve dependencies for multiple extensions, merging and deduplicating the results.
|
|
2479
|
+
* Returns all dependencies in topological order.
|
|
2480
|
+
*
|
|
2481
|
+
* @param extensionKeys - Array of extension keys to resolve dependencies for
|
|
2482
|
+
* @param config - The extension configuration
|
|
2483
|
+
* @returns Array of all extension keys in topological order (dependencies first)
|
|
2484
|
+
*/
|
|
2485
|
+
function resolveDependenciesForMultiple(extensionKeys, config) {
|
|
2486
|
+
const allDeps = /* @__PURE__ */ new Set();
|
|
2487
|
+
const result = [];
|
|
2488
|
+
for (const key of extensionKeys) {
|
|
2489
|
+
const deps = resolveDependencies(key, config);
|
|
2490
|
+
for (const dep of deps) if (!allDeps.has(dep)) {
|
|
2491
|
+
allDeps.add(dep);
|
|
2492
|
+
result.push(dep);
|
|
2493
|
+
}
|
|
2494
|
+
}
|
|
2495
|
+
return result;
|
|
2496
|
+
}
|
|
2497
|
+
/**
|
|
2498
|
+
* Resolve dependents for multiple extensions, merging and deduplicating the results.
|
|
2499
|
+
* Returns all dependents in reverse topological order (uninstall order).
|
|
2500
|
+
*
|
|
2501
|
+
* @param extensionKeys - Array of extension keys to resolve dependents for
|
|
2502
|
+
* @param config - The extension configuration
|
|
2503
|
+
* @returns Array of all extension keys in reverse topological order (dependents first)
|
|
2504
|
+
*/
|
|
2505
|
+
function resolveDependentsForMultiple(extensionKeys, config) {
|
|
2506
|
+
const allDeps = /* @__PURE__ */ new Set();
|
|
2507
|
+
const result = [];
|
|
2508
|
+
for (const key of extensionKeys) {
|
|
2509
|
+
const deps = resolveDependents(key, config);
|
|
2510
|
+
for (const dep of deps) if (!allDeps.has(dep)) {
|
|
2511
|
+
allDeps.add(dep);
|
|
2512
|
+
result.push(dep);
|
|
2513
|
+
}
|
|
2514
|
+
}
|
|
2515
|
+
return result;
|
|
2516
|
+
}
|
|
2517
|
+
|
|
2518
|
+
//#endregion
|
|
2519
|
+
//#region src/utils/local-dev-setup.ts
|
|
2520
|
+
/**
|
|
2521
|
+
* Prepares a cloned template for standalone use outside the monorepo.
|
|
2522
|
+
* Prompts user for local package paths and replaces workspace:* dependencies with file: references.
|
|
2523
|
+
*/
|
|
2524
|
+
async function prepareForLocalDev(options) {
|
|
2525
|
+
const { projectDirectory, sourcePackagesDir } = options;
|
|
2526
|
+
const packageJsonPath = path.join(projectDirectory, "package.json");
|
|
2527
|
+
if (!fs.existsSync(packageJsonPath)) throw new Error(`package.json not found in ${projectDirectory}`);
|
|
2528
|
+
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
|
|
2529
|
+
const workspaceDeps = [];
|
|
2530
|
+
for (const depType of [
|
|
2531
|
+
"dependencies",
|
|
2532
|
+
"devDependencies",
|
|
2533
|
+
"peerDependencies"
|
|
2534
|
+
]) {
|
|
2535
|
+
const deps = packageJson[depType];
|
|
2536
|
+
if (!deps) continue;
|
|
2537
|
+
for (const [pkg, version$1] of Object.entries(deps)) if (typeof version$1 === "string" && version$1.startsWith("workspace:")) workspaceDeps.push({
|
|
2538
|
+
pkg,
|
|
2539
|
+
depType
|
|
2540
|
+
});
|
|
2541
|
+
}
|
|
2542
|
+
if (workspaceDeps.length === 0) {
|
|
2543
|
+
info("No workspace:* dependencies found. Project is ready for standalone use.");
|
|
2544
|
+
return;
|
|
2545
|
+
}
|
|
2546
|
+
console.log("\n🔗 Found workspace dependencies that need to be linked to local packages:\n");
|
|
2547
|
+
for (const { pkg } of workspaceDeps) console.log(` • ${pkg}`);
|
|
2548
|
+
console.log("");
|
|
2549
|
+
const defaultPaths = {};
|
|
2550
|
+
if (sourcePackagesDir) {
|
|
2551
|
+
defaultPaths["@salesforce/storefront-next-dev"] = path.join(sourcePackagesDir, "storefront-next-dev");
|
|
2552
|
+
defaultPaths["@salesforce/storefront-next-runtime"] = path.join(sourcePackagesDir, "storefront-next-runtime");
|
|
2553
|
+
}
|
|
2554
|
+
const resolvedPaths = {};
|
|
2555
|
+
for (const { pkg } of workspaceDeps) {
|
|
2556
|
+
if (resolvedPaths[pkg]) continue;
|
|
2557
|
+
const defaultPath = defaultPaths[pkg] || "";
|
|
2558
|
+
const defaultExists = defaultPath && fs.existsSync(defaultPath);
|
|
2559
|
+
const { localPath } = await prompts({
|
|
2560
|
+
type: "text",
|
|
2561
|
+
name: "localPath",
|
|
2562
|
+
message: `📦 Path to ${pkg}:`,
|
|
2563
|
+
initial: defaultExists ? defaultPath : "",
|
|
2564
|
+
validate: (value) => {
|
|
2565
|
+
if (!value) return "Path is required";
|
|
2566
|
+
if (!fs.existsSync(value)) return `Directory not found: ${value}`;
|
|
2567
|
+
if (!fs.existsSync(path.join(value, "package.json"))) return `No package.json found in: ${value}`;
|
|
2568
|
+
return true;
|
|
2569
|
+
}
|
|
2570
|
+
});
|
|
2571
|
+
if (!localPath) {
|
|
2572
|
+
warn(`Skipping ${pkg} - no path provided`);
|
|
2573
|
+
continue;
|
|
2574
|
+
}
|
|
2575
|
+
resolvedPaths[pkg] = localPath;
|
|
2576
|
+
}
|
|
2577
|
+
let modified = false;
|
|
2578
|
+
for (const depType of [
|
|
2579
|
+
"dependencies",
|
|
2580
|
+
"devDependencies",
|
|
2581
|
+
"peerDependencies"
|
|
2582
|
+
]) {
|
|
2583
|
+
const deps = packageJson[depType];
|
|
2584
|
+
if (!deps) continue;
|
|
2585
|
+
for (const [pkg, version$1] of Object.entries(deps)) if (typeof version$1 === "string" && version$1.startsWith("workspace:")) {
|
|
2586
|
+
const localPath = resolvedPaths[pkg];
|
|
2587
|
+
if (localPath) {
|
|
2588
|
+
const fileRef = `file:${localPath}`;
|
|
2589
|
+
info(`Linked ${pkg} → ${fileRef}`);
|
|
2590
|
+
deps[pkg] = fileRef;
|
|
2591
|
+
modified = true;
|
|
2592
|
+
} else {
|
|
2593
|
+
warn(`Removing unresolved workspace dependency: ${pkg}`);
|
|
2594
|
+
delete deps[pkg];
|
|
2595
|
+
modified = true;
|
|
2596
|
+
}
|
|
2597
|
+
}
|
|
2598
|
+
}
|
|
2599
|
+
if (packageJson.volta?.extends) {
|
|
2600
|
+
delete packageJson.volta.extends;
|
|
2601
|
+
if (Object.keys(packageJson.volta).length === 0) delete packageJson.volta;
|
|
2602
|
+
modified = true;
|
|
2603
|
+
}
|
|
2604
|
+
if (modified) {
|
|
2605
|
+
fs.writeFileSync(packageJsonPath, `${JSON.stringify(packageJson, null, 4)}\n`);
|
|
2606
|
+
success("package.json updated with local package links");
|
|
2607
|
+
patchViteConfigForLinkedPackages(projectDirectory, Object.keys(resolvedPaths));
|
|
2608
|
+
}
|
|
2609
|
+
}
|
|
2610
|
+
/**
|
|
2611
|
+
* Patches vite.config.ts to fix "You must render this element inside a <HydratedRouter>" errors
|
|
2612
|
+
* that occur when using file: linked packages.
|
|
2613
|
+
*
|
|
2614
|
+
* The fix adds:
|
|
2615
|
+
* 1. resolve.dedupe for react, react-dom, react-router (helps with non-linked duplicates)
|
|
2616
|
+
* 2. ssr.noExternal for file-linked packages (key fix - bundles them so they use host's dependencies)
|
|
2617
|
+
*
|
|
2618
|
+
* When packages are in ssr.noExternal, Vite bundles them during SSR instead of externalizing.
|
|
2619
|
+
* During bundling, their imports resolve through the host project's node_modules,
|
|
2620
|
+
* ensuring all code uses the same react-router instance with the same context.
|
|
2621
|
+
*/
|
|
2622
|
+
function patchViteConfigForLinkedPackages(projectDirectory, linkedPackages) {
|
|
2623
|
+
const viteConfigPath = path.join(projectDirectory, "vite.config.ts");
|
|
2624
|
+
if (!fs.existsSync(viteConfigPath)) {
|
|
2625
|
+
warn("vite.config.ts not found, skipping patch for file-linked packages");
|
|
2626
|
+
return;
|
|
2627
|
+
}
|
|
2628
|
+
if (linkedPackages.length === 0) return;
|
|
2629
|
+
let viteConfig = fs.readFileSync(viteConfigPath, "utf8");
|
|
2630
|
+
let modified = false;
|
|
2631
|
+
if (!viteConfig.includes("dedupe:")) {
|
|
2632
|
+
const resolveMatch = viteConfig.match(/resolve:\s*\{/);
|
|
2633
|
+
if (resolveMatch && resolveMatch.index !== void 0) {
|
|
2634
|
+
const insertPos = resolveMatch.index + resolveMatch[0].length;
|
|
2635
|
+
viteConfig = viteConfig.slice(0, insertPos) + `
|
|
2636
|
+
// Deduplicates packages to prevent context issues with file-linked packages
|
|
2637
|
+
dedupe: ['react', 'react-dom', 'react-router'],` + viteConfig.slice(insertPos);
|
|
2638
|
+
modified = true;
|
|
2639
|
+
}
|
|
2640
|
+
}
|
|
2641
|
+
const packageList = linkedPackages.map((p) => `'${p}'`).join(", ");
|
|
2642
|
+
if (/ssr:\s*\{[^}]*noExternal:/.test(viteConfig)) {
|
|
2643
|
+
const noExternalArrayRegex = /noExternal:\s*\[([^\]]*)\]/;
|
|
2644
|
+
const noExternalMatch = viteConfig.match(noExternalArrayRegex);
|
|
2645
|
+
if (noExternalMatch) {
|
|
2646
|
+
const existingPackages = noExternalMatch[1];
|
|
2647
|
+
const packagesToAdd = linkedPackages.filter((p) => !existingPackages.includes(p));
|
|
2648
|
+
if (packagesToAdd.length > 0) {
|
|
2649
|
+
const newPackageList = packagesToAdd.map((p) => `'${p}'`).join(", ");
|
|
2650
|
+
const newArray = existingPackages.trim() ? `[${existingPackages.trim()}, ${newPackageList}]` : `[${newPackageList}]`;
|
|
2651
|
+
viteConfig = viteConfig.replace(noExternalArrayRegex, `noExternal: ${newArray}`);
|
|
2652
|
+
modified = true;
|
|
2653
|
+
}
|
|
2654
|
+
}
|
|
2655
|
+
} else {
|
|
2656
|
+
const ssrMatch = viteConfig.match(/ssr:\s*\{/);
|
|
2657
|
+
if (ssrMatch && ssrMatch.index !== void 0) {
|
|
2658
|
+
const insertPos = ssrMatch.index + ssrMatch[0].length;
|
|
2659
|
+
const noExternalBlock = `
|
|
2660
|
+
// Bundle file-linked packages so they use host project's dependencies
|
|
2661
|
+
// This prevents "You must render this element inside a <HydratedRouter>" errors
|
|
2662
|
+
noExternal: [${packageList}],`;
|
|
2663
|
+
viteConfig = viteConfig.slice(0, insertPos) + noExternalBlock + viteConfig.slice(insertPos);
|
|
2664
|
+
modified = true;
|
|
2665
|
+
} else {
|
|
2666
|
+
const returnMatch = viteConfig.match(/return\s*\{/);
|
|
2667
|
+
if (returnMatch && returnMatch.index !== void 0) {
|
|
2668
|
+
const insertPos = returnMatch.index + returnMatch[0].length;
|
|
2669
|
+
const ssrBlock = `
|
|
2670
|
+
// SSR config for file-linked packages
|
|
2671
|
+
ssr: {
|
|
2672
|
+
// Bundle file-linked packages so they use host project's dependencies
|
|
2673
|
+
// This prevents "You must render this element inside a <HydratedRouter>" errors
|
|
2674
|
+
noExternal: [${packageList}],
|
|
2675
|
+
target: 'node',
|
|
2676
|
+
},`;
|
|
2677
|
+
viteConfig = viteConfig.slice(0, insertPos) + ssrBlock + viteConfig.slice(insertPos);
|
|
2678
|
+
modified = true;
|
|
2679
|
+
}
|
|
2680
|
+
}
|
|
2681
|
+
}
|
|
2682
|
+
if (modified) {
|
|
2683
|
+
fs.writeFileSync(viteConfigPath, viteConfig);
|
|
2684
|
+
success("vite.config.ts patched for file-linked packages (ssr.noExternal + resolve.dedupe)");
|
|
2685
|
+
} else info("vite.config.ts already configured for file-linked packages");
|
|
2686
|
+
}
|
|
2687
|
+
|
|
2688
|
+
//#endregion
|
|
2689
|
+
//#region src/create-storefront.ts
|
|
2690
|
+
const DEFAULT_STOREFRONT = "sfcc-storefront";
|
|
2691
|
+
const STOREFRONT_NEXT_GITHUB_URL = "https://github.com/SalesforceCommerceCloud/storefront-next-template";
|
|
2692
|
+
const createStorefront = async (options = {}) => {
|
|
2693
|
+
try {
|
|
2694
|
+
execSync("git --version", { stdio: "ignore" });
|
|
2695
|
+
} catch (e) {
|
|
2696
|
+
error(`❌ git isn't installed or found in your PATH. Install git before running this command: ${String(e)}`);
|
|
2697
|
+
process.exit(1);
|
|
2698
|
+
}
|
|
2699
|
+
let storefront = options.name;
|
|
2700
|
+
if (!storefront) storefront = (await prompts({
|
|
2701
|
+
type: "text",
|
|
2702
|
+
name: "storefront",
|
|
2703
|
+
message: "🏪 What would you like to name your storefront?\n",
|
|
2704
|
+
initial: DEFAULT_STOREFRONT
|
|
2705
|
+
})).storefront;
|
|
2706
|
+
if (!storefront) {
|
|
2707
|
+
error("Storefront name is required.");
|
|
2708
|
+
process.exit(1);
|
|
2709
|
+
}
|
|
2710
|
+
console.log("\n");
|
|
2711
|
+
let template = options.template;
|
|
2712
|
+
if (!template) {
|
|
2713
|
+
template = (await prompts({
|
|
2714
|
+
type: "select",
|
|
2715
|
+
name: "template",
|
|
2716
|
+
message: "📄 Which template would you like to use for your storefront?\n",
|
|
2717
|
+
choices: [{
|
|
2718
|
+
title: "Salesforce B2C Commerce Retail Storefront",
|
|
2719
|
+
value: STOREFRONT_NEXT_GITHUB_URL
|
|
2720
|
+
}, {
|
|
2721
|
+
title: "A different template (I will provide the Github URL)",
|
|
2722
|
+
value: "custom"
|
|
2723
|
+
}]
|
|
2724
|
+
})).template;
|
|
2725
|
+
console.log("\n");
|
|
2726
|
+
if (template === "custom") {
|
|
2727
|
+
const { githubUrl } = await prompts({
|
|
2728
|
+
type: "text",
|
|
2729
|
+
name: "githubUrl",
|
|
2730
|
+
message: "🌐 What is the Github URL for your template?\n"
|
|
2731
|
+
});
|
|
2732
|
+
if (!githubUrl) {
|
|
2733
|
+
error("Github URL is required.");
|
|
2734
|
+
process.exit(1);
|
|
2735
|
+
}
|
|
2736
|
+
template = githubUrl;
|
|
2737
|
+
}
|
|
2738
|
+
}
|
|
2739
|
+
if (!template) {
|
|
2740
|
+
error("Template is required.");
|
|
2741
|
+
process.exit(1);
|
|
2742
|
+
}
|
|
2743
|
+
execSync(`git clone --depth 1 ${template} ${storefront}`);
|
|
2744
|
+
const gitDir = path.join(storefront, ".git");
|
|
2745
|
+
if (fs.existsSync(gitDir)) fs.rmSync(gitDir, {
|
|
2746
|
+
recursive: true,
|
|
2747
|
+
force: true
|
|
2748
|
+
});
|
|
2749
|
+
if (template.startsWith("file://") || options.localPackagesDir) {
|
|
2750
|
+
const templatePath = template.replace("file://", "");
|
|
2751
|
+
const sourcePackagesDir = options.localPackagesDir || path.dirname(templatePath);
|
|
2752
|
+
await prepareForLocalDev({
|
|
2753
|
+
projectDirectory: storefront,
|
|
2754
|
+
sourcePackagesDir
|
|
2755
|
+
});
|
|
2756
|
+
}
|
|
2757
|
+
console.log("\n");
|
|
2758
|
+
if (fs.existsSync(path.join(storefront, "src", "extensions", "config.json"))) {
|
|
2759
|
+
const extensionConfigText = fs.readFileSync(path.join(storefront, "src", "extensions", "config.json"), "utf8");
|
|
2760
|
+
const extensionConfig = JSON.parse(extensionConfigText);
|
|
2761
|
+
if (extensionConfig.extensions) {
|
|
2762
|
+
try {
|
|
2763
|
+
validateNoCycles(extensionConfig);
|
|
2764
|
+
} catch (e) {
|
|
2765
|
+
error(`Extension configuration error: ${e.message}`);
|
|
2766
|
+
process.exit(1);
|
|
2767
|
+
}
|
|
2768
|
+
const { selectedExtensions } = await prompts({
|
|
2769
|
+
type: "multiselect",
|
|
2770
|
+
name: "selectedExtensions",
|
|
2771
|
+
message: "🔌 Which extension would you like to enable? (Use arrow keys to select, space to toggle, and enter to confirm.)\n",
|
|
2772
|
+
choices: Object.keys(extensionConfig.extensions).map((extension) => ({
|
|
2773
|
+
title: `${extensionConfig.extensions[extension].name} - ${extensionConfig.extensions[extension].description}`,
|
|
2774
|
+
value: extension,
|
|
2775
|
+
selected: extensionConfig.extensions[extension].defaultOn ?? true
|
|
2776
|
+
})),
|
|
2777
|
+
instructions: false
|
|
2778
|
+
});
|
|
2779
|
+
const resolvedExtensions = resolveDependenciesForMultiple(selectedExtensions, extensionConfig);
|
|
2780
|
+
const selectedSet = new Set(selectedExtensions);
|
|
2781
|
+
const autoAdded = resolvedExtensions.filter((ext) => !selectedSet.has(ext));
|
|
2782
|
+
if (autoAdded.length > 0) for (const addedExt of autoAdded) {
|
|
2783
|
+
const dependentExts = selectedExtensions.filter((selected) => {
|
|
2784
|
+
return (extensionConfig.extensions[selected]?.dependencies || []).includes(addedExt) || resolvedExtensions.indexOf(addedExt) < resolvedExtensions.indexOf(selected);
|
|
2785
|
+
});
|
|
2786
|
+
if (dependentExts.length > 0) {
|
|
2787
|
+
const addedName = extensionConfig.extensions[addedExt]?.name || addedExt;
|
|
2788
|
+
warn(`${dependentExts.map((ext) => extensionConfig.extensions[ext]?.name || ext).join(", ")} requires ${addedName}. ${addedName} has been automatically added.`);
|
|
2789
|
+
}
|
|
2790
|
+
}
|
|
2791
|
+
const enabledExtensions = Object.fromEntries(resolvedExtensions.map((ext) => [ext, true]));
|
|
2792
|
+
trimExtensions(storefront, enabledExtensions, { extensions: extensionConfig.extensions }, options?.verbose || false);
|
|
2793
|
+
}
|
|
2794
|
+
}
|
|
2795
|
+
const configMeta = JSON.parse(fs.readFileSync(path.join(storefront, "src", "config", "config-meta.json"), "utf8"));
|
|
2796
|
+
const envDefaultPath = path.join(storefront, ".env.default");
|
|
2797
|
+
let envDefaultValues = {};
|
|
2798
|
+
if (fs.existsSync(envDefaultPath)) envDefaultValues = dotenv.parse(fs.readFileSync(envDefaultPath, "utf8"));
|
|
2799
|
+
console.log("\n⚙️ We will now configure your storefront before it will be ready to run.\n");
|
|
2800
|
+
const configOverrides = {};
|
|
2801
|
+
for (const config of configMeta.configs) {
|
|
2802
|
+
const answer = await prompts({
|
|
2803
|
+
type: "text",
|
|
2804
|
+
name: config.key,
|
|
2805
|
+
message: `What is the value for ${config.name}? (default: ${envDefaultValues[config.key]})\n`,
|
|
2806
|
+
initial: envDefaultValues[config.key] ?? ""
|
|
2807
|
+
});
|
|
2808
|
+
configOverrides[config.key] = answer[config.key];
|
|
2809
|
+
}
|
|
2810
|
+
generateEnvFile(storefront, configOverrides);
|
|
2811
|
+
const BANNER = `
|
|
2812
|
+
╔══════════════════════════════════════════════════════════════════╗
|
|
2813
|
+
║ CONGRATULATIONS ║
|
|
2814
|
+
╚══════════════════════════════════════════════════════════════════╝
|
|
2815
|
+
|
|
2816
|
+
🎉 Congratulations! Your storefront is ready to use! 🎉
|
|
2817
|
+
What's next:
|
|
2818
|
+
- Navigate to the storefront directory: cd ${storefront}
|
|
2819
|
+
- Install dependencies: pnpm install
|
|
2820
|
+
- Build the storefront: pnpm run build
|
|
2821
|
+
- Run the development server: pnpm run dev
|
|
2822
|
+
`;
|
|
2823
|
+
console.log(BANNER);
|
|
2824
|
+
};
|
|
2825
|
+
|
|
2826
|
+
//#endregion
|
|
2827
|
+
//#region src/extensibility/manage-extensions.ts
|
|
2828
|
+
const EXTENSIONS_DIR = ["src", "extensions"];
|
|
2829
|
+
const CONFIG_PATH = [...EXTENSIONS_DIR, "config.json"];
|
|
2830
|
+
const EXTENSION_FOLDERS = [
|
|
2831
|
+
"components",
|
|
2832
|
+
"locales",
|
|
2833
|
+
"hooks",
|
|
2834
|
+
"routes"
|
|
2835
|
+
];
|
|
2836
|
+
/**
|
|
2837
|
+
* Console log a message with a specific type
|
|
2838
|
+
* @param message string
|
|
2839
|
+
* @param type
|
|
2840
|
+
*/
|
|
2841
|
+
const consoleLog = (message, type) => {
|
|
2842
|
+
switch (type) {
|
|
2843
|
+
case "error":
|
|
2844
|
+
console.error(`❌ ${message}`);
|
|
2845
|
+
break;
|
|
2846
|
+
case "success":
|
|
2847
|
+
console.log(`✅ ${message}`);
|
|
2848
|
+
break;
|
|
2849
|
+
default:
|
|
2850
|
+
console.log(message);
|
|
2851
|
+
break;
|
|
2852
|
+
}
|
|
2853
|
+
};
|
|
2854
|
+
/**
|
|
2855
|
+
* Get the path to the extension config file
|
|
2856
|
+
*/
|
|
2857
|
+
const getExtensionConfigPath = (projectDirectory) => {
|
|
2858
|
+
return path.join(projectDirectory, ...CONFIG_PATH);
|
|
2859
|
+
};
|
|
2860
|
+
/**
|
|
2861
|
+
* Check if the project directory contains the extensions directory and config.json file
|
|
2862
|
+
*/
|
|
2863
|
+
const getExtensionConfig = (projectDirectory) => {
|
|
2864
|
+
const extensionConfigPath = getExtensionConfigPath(projectDirectory);
|
|
2865
|
+
if (!fs.existsSync(extensionConfigPath)) {
|
|
2866
|
+
consoleLog(`Extension config file not found: ${extensionConfigPath}. Are you running this command in the correct project directory?`, "error");
|
|
2867
|
+
process.exit(1);
|
|
2868
|
+
}
|
|
2869
|
+
return JSON.parse(fs.readFileSync(extensionConfigPath, "utf8")).extensions;
|
|
2870
|
+
};
|
|
2871
|
+
/**
|
|
2872
|
+
* Common function to get the extension selection from the user
|
|
2873
|
+
* @param type 'multiselect' | 'select'
|
|
2874
|
+
* @param extensionConfig Record<string, ExtensionMeta>
|
|
2875
|
+
* @param message string
|
|
2876
|
+
* @param installedExtensions string[]
|
|
2877
|
+
* @param excludeExtensions string[] extensions to exclude from the list, so we can filter out extensions that are already installed
|
|
2878
|
+
* @returns string[]
|
|
2879
|
+
*/
|
|
2880
|
+
const getExtensionSelection = async (type, extensionConfig, message, installedExtensions, excludeExtensions = []) => {
|
|
2881
|
+
consoleLog("\n", "info");
|
|
2882
|
+
const { selectedExtensions } = await prompts({
|
|
2883
|
+
type,
|
|
2884
|
+
name: "selectedExtensions",
|
|
2885
|
+
message,
|
|
2886
|
+
choices: installedExtensions.filter((extensionKey) => !excludeExtensions.includes(extensionKey)).map((extensionKey) => ({
|
|
2887
|
+
title: `${extensionConfig[extensionKey].name} - ${extensionConfig[extensionKey].description}`,
|
|
2888
|
+
value: extensionKey
|
|
2889
|
+
})),
|
|
2890
|
+
instructions: false
|
|
2891
|
+
});
|
|
2892
|
+
return type === "multiselect" ? selectedExtensions : [selectedExtensions];
|
|
2893
|
+
};
|
|
2894
|
+
/**
|
|
2895
|
+
* Handle the uninstallation of extensions
|
|
2896
|
+
* @param extensionConfig Record<string, ExtensionMeta>
|
|
2897
|
+
* @param options {
|
|
2898
|
+
projectDirectory: string;
|
|
2899
|
+
extensions?: string[];
|
|
2900
|
+
verbose?: boolean;
|
|
2901
|
+
}
|
|
2902
|
+
* @returns void
|
|
2903
|
+
*/
|
|
2904
|
+
const handleUninstall = async (extensionConfig, options) => {
|
|
2905
|
+
let installedExtensions = Object.keys(extensionConfig);
|
|
2906
|
+
if (installedExtensions.length === 0) {
|
|
2907
|
+
consoleLog("\n You have not installed any extensions yet.", "error");
|
|
2908
|
+
return;
|
|
2909
|
+
}
|
|
2910
|
+
const selectedExtensions = options.extensions ? options.extensions : await getExtensionSelection("multiselect", extensionConfig, "🔌 Which extensions would you like to uninstall?", installedExtensions);
|
|
2911
|
+
if (selectedExtensions == null || selectedExtensions.length === 0) {
|
|
2912
|
+
consoleLog("\n Please select at least one extension to uninstall.", "error");
|
|
2913
|
+
return;
|
|
2914
|
+
}
|
|
2915
|
+
const allToUninstall = resolveDependentsForMultiple(selectedExtensions, { extensions: extensionConfig });
|
|
2916
|
+
const installedSet = new Set(installedExtensions);
|
|
2917
|
+
const extensionsToUninstall = allToUninstall.filter((key) => installedSet.has(key));
|
|
2918
|
+
const selectedSet = new Set(selectedExtensions);
|
|
2919
|
+
const additionalDependents = extensionsToUninstall.filter((key) => !selectedSet.has(key));
|
|
2920
|
+
if (additionalDependents.length > 0) {
|
|
2921
|
+
consoleLog("\n", "info");
|
|
2922
|
+
consoleLog(`Uninstalling the selected extension(s) will also uninstall the following dependent extensions:`, "info");
|
|
2923
|
+
additionalDependents.forEach((depKey) => {
|
|
2924
|
+
const depExtension = extensionConfig[depKey];
|
|
2925
|
+
const dependsOn = selectedExtensions.find((selKey) => {
|
|
2926
|
+
return extensionConfig[selKey] && extensionConfig[depKey]?.dependencies?.includes(selKey);
|
|
2927
|
+
});
|
|
2928
|
+
const dependsOnName = dependsOn ? extensionConfig[dependsOn]?.name : "selected extension";
|
|
2929
|
+
consoleLog(` • ${depExtension?.name || depKey} (depends on ${dependsOnName})`, "info");
|
|
2930
|
+
});
|
|
2931
|
+
consoleLog("\n", "info");
|
|
2932
|
+
const { confirmUninstall } = await prompts({
|
|
2933
|
+
type: "confirm",
|
|
2934
|
+
name: "confirmUninstall",
|
|
2935
|
+
message: `Uninstall all ${extensionsToUninstall.length} extensions?`,
|
|
2936
|
+
initial: true
|
|
2937
|
+
});
|
|
2938
|
+
if (!confirmUninstall) {
|
|
2939
|
+
consoleLog("Uninstallation aborted.", "info");
|
|
2940
|
+
return;
|
|
2941
|
+
}
|
|
2942
|
+
}
|
|
2943
|
+
extensionsToUninstall.forEach((ext) => {
|
|
2944
|
+
if (extensionConfig[ext]?.folder) fs.rmSync(path.join(options.projectDirectory, ...EXTENSIONS_DIR, extensionConfig[ext].folder), {
|
|
2945
|
+
recursive: true,
|
|
2946
|
+
force: true
|
|
2947
|
+
});
|
|
2948
|
+
});
|
|
2949
|
+
const extensionsToUninstallSet = new Set(extensionsToUninstall);
|
|
2950
|
+
installedExtensions = installedExtensions.filter((ext) => !extensionsToUninstallSet.has(ext));
|
|
2951
|
+
trimExtensions(options.projectDirectory, Object.fromEntries(installedExtensions.map((ext) => [ext, true])), { extensions: extensionConfig }, options.verbose ?? false);
|
|
2952
|
+
consoleLog(" Extensions uninstalled.", "success");
|
|
2953
|
+
};
|
|
2954
|
+
/**
|
|
2955
|
+
* Install a single extension (internal helper)
|
|
2956
|
+
* @returns true if installation succeeded, false otherwise
|
|
2957
|
+
*/
|
|
2958
|
+
const installSingleExtension = (extensionKey, srcExtensionConfig, extensionConfig, tmpDir, projectDirectory) => {
|
|
2959
|
+
const extension = srcExtensionConfig[extensionKey];
|
|
2960
|
+
const startTime = Date.now();
|
|
2961
|
+
if (extension.folder) fs.copySync(path.join(tmpDir, ...EXTENSIONS_DIR, extension.folder), path.join(projectDirectory, ...EXTENSIONS_DIR, extension.folder));
|
|
2962
|
+
if (extension.installationInstructions) {
|
|
2963
|
+
console.log(`\n⏳ Installing ${extension.name}, this will take a few minutes...`);
|
|
2964
|
+
try {
|
|
2965
|
+
execSync(`cursor-agent -p --force 'Execute the steps specified in the installation instructions file: ${extension.installationInstructions}' --output-format text`, {
|
|
2966
|
+
cwd: projectDirectory,
|
|
2967
|
+
stdio: "inherit"
|
|
2968
|
+
});
|
|
2969
|
+
} catch (e) {
|
|
2970
|
+
consoleLog(`Error installing ${extension.name}. ${e.message}`, "error");
|
|
2971
|
+
return false;
|
|
2972
|
+
}
|
|
2973
|
+
}
|
|
2974
|
+
extensionConfig[extensionKey] = extension;
|
|
2975
|
+
fs.writeFileSync(getExtensionConfigPath(projectDirectory), JSON.stringify({ extensions: extensionConfig }, null, 4));
|
|
2976
|
+
consoleLog(`${extension.name} was installed successfully. (${Date.now() - startTime}ms)`, "success");
|
|
2977
|
+
return true;
|
|
2978
|
+
};
|
|
2979
|
+
/**
|
|
2980
|
+
* Handle the installation of extensions
|
|
2981
|
+
* @param extensionConfig
|
|
2982
|
+
* @param options {
|
|
2983
|
+
sourceGithubUrl?: string;
|
|
2984
|
+
projectDirectory: string;
|
|
2985
|
+
extensions?: string[];
|
|
2986
|
+
verbose?: boolean;
|
|
2987
|
+
}
|
|
2988
|
+
* @returns
|
|
2989
|
+
*/
|
|
2990
|
+
const handleInstall = async (extensionConfig, options) => {
|
|
2991
|
+
const { sourceGitUrl } = await prompts({
|
|
2992
|
+
type: "text",
|
|
2993
|
+
name: "sourceGitUrl",
|
|
2994
|
+
message: "🌐 What is the Git URL for the extensions project?",
|
|
2995
|
+
initial: options.sourceGitUrl
|
|
2996
|
+
});
|
|
2997
|
+
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), `sfnext-extensions-${Date.now()}`));
|
|
2998
|
+
execSync(`git clone ${sourceGitUrl} ${tmpDir}`);
|
|
2999
|
+
const srcExtensionConfig = getExtensionConfig(tmpDir);
|
|
3000
|
+
if (srcExtensionConfig == null || Object.keys(srcExtensionConfig).length === 0) {
|
|
3001
|
+
consoleLog(`No extensions found in the source project, please check ${path.join(...CONFIG_PATH)} exists in ${sourceGitUrl} and contains at least one extension.`, "error");
|
|
3002
|
+
return;
|
|
3003
|
+
}
|
|
3004
|
+
const selectedExtensions = options.extensions ? options.extensions : await getExtensionSelection("select", srcExtensionConfig, "🔌 Which extension would you like to install?", Object.keys(srcExtensionConfig), Object.keys(extensionConfig));
|
|
3005
|
+
if (selectedExtensions == null || selectedExtensions.length !== 1 || selectedExtensions[0] == null) {
|
|
3006
|
+
consoleLog("Please select exactly one extension to install.", "error");
|
|
3007
|
+
return;
|
|
3008
|
+
}
|
|
3009
|
+
const extensionKey = selectedExtensions[0];
|
|
3010
|
+
const extension = srcExtensionConfig[extensionKey];
|
|
3011
|
+
if (Object.values(srcExtensionConfig).some((ext) => ext.installationInstructions)) try {
|
|
3012
|
+
execSync("cursor-agent -v", { stdio: "ignore" });
|
|
3013
|
+
} catch (e) {
|
|
3014
|
+
consoleLog("This extension contains LLM instructions, please install cursor cli and try again. (https://cursor.com/docs/cli/overview)", "error");
|
|
3015
|
+
return;
|
|
3016
|
+
}
|
|
3017
|
+
const srcConfig = { extensions: srcExtensionConfig };
|
|
3018
|
+
const missingDeps = getMissingDependencies(extensionKey, Object.keys(extensionConfig), srcConfig);
|
|
3019
|
+
const dependenciesToInstall = missingDeps.slice(0, -1);
|
|
3020
|
+
let hasError = false;
|
|
3021
|
+
try {
|
|
3022
|
+
if (dependenciesToInstall.length > 0) {
|
|
3023
|
+
consoleLog("\n", "info");
|
|
3024
|
+
consoleLog(`Installing ${extension.name} requires the following dependencies:`, "info");
|
|
3025
|
+
dependenciesToInstall.forEach((depKey) => {
|
|
3026
|
+
const depExtension = srcExtensionConfig[depKey];
|
|
3027
|
+
consoleLog(` • ${depExtension?.name || depKey} (not installed)`, "info");
|
|
3028
|
+
});
|
|
3029
|
+
consoleLog("\n", "info");
|
|
3030
|
+
const estimatedMinutes = missingDeps.length * 5;
|
|
3031
|
+
const { confirmInstall } = await prompts({
|
|
3032
|
+
type: "confirm",
|
|
3033
|
+
name: "confirmInstall",
|
|
3034
|
+
message: `Install all ${missingDeps.length} extensions? (~${estimatedMinutes} minutes total)`,
|
|
3035
|
+
initial: true
|
|
3036
|
+
});
|
|
3037
|
+
if (!confirmInstall) {
|
|
3038
|
+
consoleLog("Installation aborted.", "info");
|
|
3039
|
+
return;
|
|
3040
|
+
}
|
|
3041
|
+
}
|
|
3042
|
+
for (const depKey of missingDeps) if (!installSingleExtension(depKey, srcExtensionConfig, extensionConfig, tmpDir, options.projectDirectory)) hasError = true;
|
|
3043
|
+
} finally {
|
|
3044
|
+
fs.rmSync(tmpDir, {
|
|
3045
|
+
recursive: true,
|
|
3046
|
+
force: true
|
|
3047
|
+
});
|
|
3048
|
+
}
|
|
3049
|
+
const originalFiles = fs.readdirSync(path.join(options.projectDirectory, "src"), { recursive: true }).filter((file) => file.toString().endsWith(".original"));
|
|
3050
|
+
if (originalFiles.length > 0) {
|
|
3051
|
+
consoleLog("\n📄 The following files were modified. The original files are still available in the same location with the \".original\" extension.:", "info");
|
|
3052
|
+
originalFiles.forEach((file) => {
|
|
3053
|
+
consoleLog(`- ${file.toString().replace(".original", "")}`, "info");
|
|
3054
|
+
});
|
|
3055
|
+
}
|
|
3056
|
+
if (!hasError) consoleLog("\n🚀 Installation completed successfully.", "info");
|
|
3057
|
+
};
|
|
3058
|
+
const manageExtensions = async (options) => {
|
|
3059
|
+
if (options.install && options.uninstall) {
|
|
3060
|
+
consoleLog("Please select either install or uninstall, not both.", "error");
|
|
3061
|
+
return;
|
|
3062
|
+
}
|
|
3063
|
+
let operation = options.install ? "install" : options.uninstall ? "uninstall" : void 0;
|
|
3064
|
+
const extensionConfig = getExtensionConfig(options.projectDirectory);
|
|
3065
|
+
if (operation == null) operation = (await prompts({
|
|
3066
|
+
type: "select",
|
|
3067
|
+
name: "operation",
|
|
3068
|
+
message: "🤔 What would you like to do?",
|
|
3069
|
+
choices: [{
|
|
3070
|
+
title: "Install extensions",
|
|
3071
|
+
value: "install"
|
|
3072
|
+
}, {
|
|
3073
|
+
title: "Uninstall extensions",
|
|
3074
|
+
value: "uninstall"
|
|
3075
|
+
}]
|
|
3076
|
+
})).operation;
|
|
3077
|
+
if (operation === "uninstall") await handleUninstall(extensionConfig, options);
|
|
3078
|
+
else await handleInstall(extensionConfig, options);
|
|
3079
|
+
};
|
|
3080
|
+
const getExtensionMarker = (val) => {
|
|
3081
|
+
return `SFDC_EXT_${val.toUpperCase().replaceAll(" ", "_").replaceAll("-", "_")}`;
|
|
3082
|
+
};
|
|
3083
|
+
const getExtensionFolderName = (val) => {
|
|
3084
|
+
return val.toLowerCase().replaceAll(" ", "-").trim();
|
|
3085
|
+
};
|
|
3086
|
+
const getExtensionNameSchema = (projectDirectory, extensionConfig) => {
|
|
3087
|
+
return z.object({ name: z.string().regex(/^[a-zA-Z0-9 _-]+$/, { message: "Extension name can only contain alphanumeric characters, spaces, dashes, or underscores" }) }).superRefine((data, ctx) => {
|
|
3088
|
+
if (extensionConfig[getExtensionMarker(data.name)]) ctx.addIssue({
|
|
3089
|
+
code: z.ZodIssueCode.custom,
|
|
3090
|
+
message: `Extension "${data.name}" already exists`
|
|
3091
|
+
});
|
|
3092
|
+
if (fs.existsSync(path.join(projectDirectory, ...EXTENSIONS_DIR, getExtensionFolderName(data.name)))) ctx.addIssue({
|
|
3093
|
+
code: z.ZodIssueCode.custom,
|
|
3094
|
+
message: `Extension directory ${getExtensionFolderName(data.name)} already exists`
|
|
3095
|
+
});
|
|
3096
|
+
});
|
|
3097
|
+
};
|
|
3098
|
+
const listExtensions = (options) => {
|
|
3099
|
+
const extensionConfig = getExtensionConfig(options.projectDirectory);
|
|
3100
|
+
consoleLog("The following extensions are installed:", "info");
|
|
3101
|
+
Object.keys(extensionConfig).forEach((key) => {
|
|
3102
|
+
consoleLog(`- ${extensionConfig[key].name}: ${extensionConfig[key].description}`, "info");
|
|
3103
|
+
});
|
|
3104
|
+
};
|
|
3105
|
+
const createExtension = async (options) => {
|
|
3106
|
+
const { projectDirectory, name, description } = options;
|
|
3107
|
+
const extensionConfig = getExtensionConfig(projectDirectory);
|
|
3108
|
+
let extensionName = name;
|
|
3109
|
+
let extensionDescription = description;
|
|
3110
|
+
if (extensionName == null || extensionName.trim() === "") extensionName = (await prompts({
|
|
3111
|
+
type: "text",
|
|
3112
|
+
name: "extensionName",
|
|
3113
|
+
message: "What would you like to name the extension? (e.g., \"My Extension\")"
|
|
3114
|
+
})).extensionName;
|
|
3115
|
+
const result = getExtensionNameSchema(projectDirectory, extensionConfig).safeParse({ name: extensionName });
|
|
3116
|
+
if (!result.success) {
|
|
3117
|
+
const firstIssueMessage = result.error.issues?.[0]?.message;
|
|
3118
|
+
consoleLog(firstIssueMessage, "error");
|
|
3119
|
+
return;
|
|
3120
|
+
}
|
|
3121
|
+
if (extensionDescription == null || extensionDescription.trim() === "") extensionDescription = (await prompts({
|
|
3122
|
+
type: "text",
|
|
3123
|
+
name: "extensionDescription",
|
|
3124
|
+
message: "How would you describe the extension?"
|
|
3125
|
+
})).extensionDescription;
|
|
3126
|
+
const folderName = getExtensionFolderName(extensionName);
|
|
3127
|
+
const extensionFolderPath = path.join(projectDirectory, ...EXTENSIONS_DIR, folderName);
|
|
3128
|
+
fs.mkdirSync(extensionFolderPath, { recursive: true });
|
|
3129
|
+
EXTENSION_FOLDERS.forEach((folder) => {
|
|
3130
|
+
fs.mkdirSync(path.join(extensionFolderPath, folder), { recursive: true });
|
|
3131
|
+
});
|
|
3132
|
+
fs.writeFileSync(path.join(extensionFolderPath, "README.md"), `# ${extensionName}\n\n${extensionDescription}`);
|
|
3133
|
+
const marker = getExtensionMarker(extensionName);
|
|
3134
|
+
extensionConfig[marker] = {
|
|
3135
|
+
name: extensionName,
|
|
3136
|
+
description: extensionDescription,
|
|
3137
|
+
installationInstructions: "",
|
|
3138
|
+
uninstallationInstructions: "",
|
|
3139
|
+
folder: folderName,
|
|
3140
|
+
dependencies: []
|
|
3141
|
+
};
|
|
3142
|
+
fs.writeFileSync(path.join(projectDirectory, ...CONFIG_PATH), JSON.stringify({ extensions: extensionConfig }, null, 4));
|
|
3143
|
+
consoleLog(`Extension "${extensionName}" scaffolding was created successfully.`, "success");
|
|
3144
|
+
};
|
|
3145
|
+
|
|
3146
|
+
//#endregion
|
|
3147
|
+
//#region src/cli.ts
|
|
3148
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
3149
|
+
function validateAndBuildPaths(options) {
|
|
3150
|
+
if (!options.projectDirectory) {
|
|
3151
|
+
error("--project-directory is required.");
|
|
3152
|
+
process.exit(1);
|
|
3153
|
+
}
|
|
3154
|
+
if (!fs.existsSync(options.projectDirectory)) {
|
|
3155
|
+
error(`Project directory doesn't exist: ${options.projectDirectory}`);
|
|
3156
|
+
process.exit(1);
|
|
3157
|
+
}
|
|
3158
|
+
const cartridgeBaseDir = path.join(options.projectDirectory, CARTRIDGES_BASE_DIR);
|
|
3159
|
+
const metadataDir = path.join(options.projectDirectory, CARTRIDGES_BASE_DIR, SFNEXT_BASE_CARTRIDGE_OUTPUT_DIR);
|
|
3160
|
+
return {
|
|
3161
|
+
projectDirectory: options.projectDirectory,
|
|
3162
|
+
cartridgeBaseDir,
|
|
3163
|
+
metadataDir
|
|
3164
|
+
};
|
|
3165
|
+
}
|
|
3166
|
+
/**
|
|
3167
|
+
* Shared function to generate cartridge metadata
|
|
3168
|
+
* Used by both the generate-cartridge command and the push command (when enabled)
|
|
3169
|
+
*/
|
|
3170
|
+
async function runGenerateCartridge(projectDirectory) {
|
|
3171
|
+
const { projectDirectory: validatedProjectDir, metadataDir } = validateAndBuildPaths({ projectDirectory });
|
|
3172
|
+
if (!fs.existsSync(metadataDir)) {
|
|
3173
|
+
info(`Creating metadata directory: ${metadataDir}`);
|
|
3174
|
+
fs.mkdirSync(metadataDir, { recursive: true });
|
|
3175
|
+
}
|
|
3176
|
+
await generateMetadata(validatedProjectDir, metadataDir);
|
|
3177
|
+
}
|
|
3178
|
+
/**
|
|
3179
|
+
* Shared function to deploy cartridge to Commerce Cloud
|
|
3180
|
+
* Used by both the deploy-cartridge command and the push command (when enabled)
|
|
3181
|
+
*/
|
|
3182
|
+
async function runDeployCartridge(projectDirectory) {
|
|
3183
|
+
const dwJsonPath = path.join(__dirname, "..", "dw.json");
|
|
3184
|
+
if (!fs.existsSync(dwJsonPath)) throw new Error(`The dw.json file not found in storefront-next-dev directory. Make sure dw.json exists at ${dwJsonPath}`);
|
|
3185
|
+
const dwConfig = JSON.parse(fs.readFileSync(dwJsonPath, "utf8"));
|
|
3186
|
+
const { cartridgeBaseDir, metadataDir } = validateAndBuildPaths({ projectDirectory });
|
|
3187
|
+
if (!fs.existsSync(metadataDir)) throw new Error(`Metadata directory doesn't exist: ${metadataDir}. Run 'generate-cartridge' first.`);
|
|
3188
|
+
if (!dwConfig.username || !dwConfig.password) throw new Error("Username and password are required in the dw.json file.");
|
|
3189
|
+
const instance = dwConfig.hostname;
|
|
3190
|
+
if (!instance) throw new Error("Instance is required. Add \"hostname\" to the dw.json file.");
|
|
3191
|
+
const codeVersion = dwConfig["code-version"];
|
|
3192
|
+
if (!codeVersion) throw new Error("Code version is required. Add \"code-version\" to the dw.json file.");
|
|
3193
|
+
const credentials = `${dwConfig.username}:${dwConfig.password}`;
|
|
3194
|
+
success(`Code deployed to version "${(await deployCode(instance, codeVersion, cartridgeBaseDir, Buffer.from(credentials).toString("base64"))).version}" successfully!`);
|
|
3195
|
+
}
|
|
3196
|
+
const program = new Command();
|
|
3197
|
+
const DEFAULT_TEMPLATE_GIT_URL = process.env.DEFAULT_TEMPLATE_GIT_URL || "https://github.com/SalesforceCommerceCloud/storefront-next-template.git";
|
|
3198
|
+
const handleCommandError = (label, err) => {
|
|
3199
|
+
if (err instanceof Error) {
|
|
3200
|
+
error(err.stack || err.message);
|
|
3201
|
+
error(`${label} failed: ${err.message}`);
|
|
3202
|
+
} else {
|
|
3203
|
+
error(String(err));
|
|
3204
|
+
error(`${label} failed`);
|
|
3205
|
+
}
|
|
3206
|
+
process.exit(1);
|
|
3207
|
+
};
|
|
3208
|
+
program.name("sfnext").description("Dev and build tools for Storefront Next.").version(version);
|
|
3209
|
+
program.command("create-storefront").description("Create a storefront project.").option("-v --verbose", "Verbose mode").option("-n, --name <name>", "Name for the storefront (skips interactive prompt)").option("-t, --template <template>", "Template URL or path (e.g., file:///path/to/template or GitHub URL)").option("-l, --local-packages-dir <dir>", "Local monorepo packages directory for file:// templates (pre-fills dependency paths)").action(async (options) => {
|
|
3210
|
+
try {
|
|
3211
|
+
await createStorefront({
|
|
3212
|
+
verbose: options.verbose,
|
|
3213
|
+
name: options.name,
|
|
3214
|
+
template: options.template,
|
|
3215
|
+
localPackagesDir: options.localPackagesDir
|
|
3216
|
+
});
|
|
3217
|
+
} catch (err) {
|
|
3218
|
+
handleCommandError("create-storefront", err);
|
|
3219
|
+
}
|
|
3220
|
+
});
|
|
3221
|
+
program.command("prepare-local").description("Prepare a storefront project for local development with file-linked packages. Converts workspace:* dependencies to file: references and patches vite.config.ts.").option("-d, --project-directory <dir>", "Project directory to prepare", process.cwd()).option("-s, --source-packages-dir <dir>", "Source monorepo packages directory (for default path suggestions)").action(async (options) => {
|
|
3222
|
+
try {
|
|
3223
|
+
await prepareForLocalDev({
|
|
3224
|
+
projectDirectory: options.projectDirectory,
|
|
3225
|
+
sourcePackagesDir: options.sourcePackagesDir
|
|
3226
|
+
});
|
|
3227
|
+
process.exit(0);
|
|
3228
|
+
} catch (err) {
|
|
3229
|
+
handleCommandError("prepare-local", err);
|
|
3230
|
+
}
|
|
3231
|
+
});
|
|
3232
|
+
program.command("push").description("Create and push bundle to Managed Runtime.").requiredOption("-d, --project-directory <dir>", "Project directory").option("-b, --build-directory <dir>", "Build directory to push (default: auto-detected)").option("-m, --message <message>", "Bundle message (default: git branch:commit)").option("-s, --project-slug <slug>", "Project slug - the unique identifier for your project on Managed Runtime (default: from .env MRT_PROJECT or package.json name.)").option("-t, --target <target>", "Deploy target environment (default: from .env MRT_TARGET).").option("-w, --wait", "Wait for deployment to complete.", false).option("--cloud-origin <origin>", "API origin", DEFAULT_CLOUD_ORIGIN).option("-c, --credentials-file <file>", "Credentials file location.").option("-u, --user <email>", "User email for Managed Runtime.").option("-k, --key <api-key>", "API key for Managed Runtime.").action(async (options) => {
|
|
3233
|
+
try {
|
|
3234
|
+
if (GENERATE_AND_DEPLOY_CARTRIDGE_ON_MRT_PUSH) try {
|
|
3235
|
+
info("Generating cartridge metadata before MRT push...");
|
|
3236
|
+
await runGenerateCartridge(options.projectDirectory);
|
|
3237
|
+
success("Cartridge metadata generated successfully!");
|
|
3238
|
+
info("Deploying cartridge to Commerce Cloud...");
|
|
3239
|
+
await runDeployCartridge(options.projectDirectory);
|
|
3240
|
+
success("Cartridge deployed successfully!");
|
|
3241
|
+
} catch (cartridgeError) {
|
|
3242
|
+
error(`Warning: Failed to generate or deploy cartridge: ${cartridgeError.message}`);
|
|
3243
|
+
}
|
|
3244
|
+
await push({
|
|
3245
|
+
projectDirectory: options.projectDirectory,
|
|
3246
|
+
buildDirectory: options.buildDirectory,
|
|
3247
|
+
message: options.message,
|
|
3248
|
+
projectSlug: options.projectSlug,
|
|
3249
|
+
target: options.target,
|
|
3250
|
+
wait: options.wait,
|
|
3251
|
+
cloudOrigin: options.cloudOrigin,
|
|
3252
|
+
credentialsFile: options.credentialsFile,
|
|
3253
|
+
user: options.user,
|
|
3254
|
+
key: options.key
|
|
3255
|
+
});
|
|
3256
|
+
process.exit(0);
|
|
3257
|
+
} catch (err) {
|
|
3258
|
+
handleCommandError("Push", err);
|
|
3259
|
+
}
|
|
3260
|
+
});
|
|
3261
|
+
program.command("dev").description("Start Vite development server with SSR.").option("-d, --project-directory <dir>", "Project directory (default: current directory).").option("-p, --port <port>", "Port number (default: 5173)", (val) => parseInt(val, 10)).action(async (options) => {
|
|
3262
|
+
try {
|
|
3263
|
+
await dev({
|
|
3264
|
+
projectDirectory: options.projectDirectory,
|
|
3265
|
+
port: options.port
|
|
3266
|
+
});
|
|
3267
|
+
} catch (err) {
|
|
3268
|
+
handleCommandError("Dev", err);
|
|
3269
|
+
}
|
|
3270
|
+
});
|
|
3271
|
+
program.command("preview").description("Start preview server with production build (auto-builds if needed).").option("-d, --project-directory <dir>", "Project directory (default: current directory).").option("-p, --port <port>", "Port number (default: 3000)", (val) => parseInt(val, 10)).action(async (options) => {
|
|
3272
|
+
try {
|
|
3273
|
+
await preview({
|
|
3274
|
+
projectDirectory: options.projectDirectory,
|
|
3275
|
+
port: options.port
|
|
3276
|
+
});
|
|
3277
|
+
} catch (err) {
|
|
3278
|
+
handleCommandError("Serve", err);
|
|
3279
|
+
}
|
|
3280
|
+
});
|
|
3281
|
+
program.command("create-instructions").description("Generate LLM instructions using prompt templating for installing and uninstalling Storefront Next feature extensions.").requiredOption("-d, --project-directory <dir>", "Project directory.").requiredOption("-c, --extension-config <config>", "Extension config JSON file location.").requiredOption("-e, --extension <extension>", "Extension marker value (e.g. SFDC_EXT_featureA).").option("-p, --template-repo <repo>", "Storefront template repo URL (default: https://github.com/SalesforceCommerceCloud/storefront-next-template.git)").option("-b, --branch <branch>", "Storefront template repo branch (default: main).").option("-f, --files <files...>", "Specific files to include (relative to project directory).").option("-o, --output-dir <dir>", "Output directory (default: ./instructions).").action((options) => {
|
|
3282
|
+
try {
|
|
3283
|
+
const baseDir = process.cwd();
|
|
3284
|
+
const projectDirectory = path.resolve(baseDir, options.projectDirectory);
|
|
3285
|
+
const extensionConfig = path.resolve(baseDir, options.extensionConfig);
|
|
3286
|
+
const files = options.files ?? void 0;
|
|
3287
|
+
generateInstructions(projectDirectory, options.extension, options.outputDir, options.templateRepo, options.branch, files, extensionConfig, `${__dirname}/extensibility/templates`);
|
|
3288
|
+
process.exit(0);
|
|
3289
|
+
} catch (err) {
|
|
3290
|
+
handleCommandError("create-instructions", err);
|
|
3291
|
+
}
|
|
3292
|
+
});
|
|
3293
|
+
const extensionsCommand = program.command("extensions").description("Manage features extensions for a storefront project.");
|
|
3294
|
+
extensionsCommand.command("list").description("List all installed extensions.").option("-d, --project-directory <dir>", "Target project directory", process.cwd()).action((options) => {
|
|
3295
|
+
try {
|
|
3296
|
+
listExtensions(options);
|
|
3297
|
+
} catch (err) {
|
|
3298
|
+
handleCommandError("extensions list", err);
|
|
3299
|
+
}
|
|
3300
|
+
});
|
|
3301
|
+
extensionsCommand.command("install").description("Install an extension.").option("-d, --project-directory <dir>", "Target project directory.", process.cwd()).option("-e, --extension <extension>", "Extension marker value (e.g. SFDC_EXT_STORE_LOCATOR).").option("-s, --source-git-url <url>", "Git URL of the source template project", DEFAULT_TEMPLATE_GIT_URL).option("-v, --verbose", "Verbose mode.").action(async (options) => {
|
|
3302
|
+
try {
|
|
3303
|
+
await manageExtensions({
|
|
3304
|
+
projectDirectory: options.projectDirectory,
|
|
3305
|
+
install: true,
|
|
3306
|
+
extensions: options.extension ? [options.extension] : void 0,
|
|
3307
|
+
sourceGitUrl: options.sourceGitUrl,
|
|
3308
|
+
verbose: options.verbose
|
|
3309
|
+
});
|
|
3310
|
+
} catch (err) {
|
|
3311
|
+
handleCommandError("extensions install", err);
|
|
3312
|
+
}
|
|
3313
|
+
});
|
|
3314
|
+
extensionsCommand.command("remove").description("Remove one or more installed extensions.").option("-d, --project-directory <dir>", "Target project directory", process.cwd()).option("-e, --extensions <extensions>", "Comma-separated list of extension marker values (e.g. SFDC_EXT_STORE_LOCATOR,SFDC_EXT_INTERNAL_THEME_SWITCHER).").option("-v, --verbose", "Verbose mode.").action(async (options) => {
|
|
3315
|
+
try {
|
|
3316
|
+
await manageExtensions({
|
|
3317
|
+
projectDirectory: options.projectDirectory,
|
|
3318
|
+
uninstall: true,
|
|
3319
|
+
extensions: options.extensions?.split(","),
|
|
3320
|
+
verbose: options.verbose
|
|
3321
|
+
});
|
|
3322
|
+
} catch (err) {
|
|
3323
|
+
handleCommandError("extensions remove", err);
|
|
3324
|
+
}
|
|
3325
|
+
});
|
|
3326
|
+
extensionsCommand.command("create").description("Create an extension.").option("-p, --project-directory <projectDirectory>", "Target project directory", process.cwd()).option("-n, --name <name>", "Name of the extension to create, e.g., \"My Extension\".").option("-d, --description <description>", "Description of the extension.").action(async (options) => {
|
|
3327
|
+
try {
|
|
3328
|
+
await createExtension(options);
|
|
3329
|
+
} catch (err) {
|
|
3330
|
+
handleCommandError("extensions create", err);
|
|
3331
|
+
}
|
|
3332
|
+
});
|
|
3333
|
+
program.command("create-bundle").description("Create a bundle from the build directory without pushing to Managed Runtime.").requiredOption("-d, --project-directory <dir>", "Project directory").option("-b, --build-directory <dir>", "Build directory to bundle (default: auto-detected)").option("-o, --output-directory <dir>", "Output directory for bundle files (default: .bundle)").option("-m, --message <message>", "Bundle message (default: git branch:commit)").option("-s, --project-slug <slug>", "Project slug - the unique identifier for your project on Managed Runtime (default: from .env MRT_PROJECT or package.json name.)").action(async (options) => {
|
|
3334
|
+
try {
|
|
3335
|
+
await createBundleCommand({
|
|
3336
|
+
projectDirectory: options.projectDirectory,
|
|
3337
|
+
buildDirectory: options.buildDirectory,
|
|
3338
|
+
outputDirectory: options.outputDirectory,
|
|
3339
|
+
message: options.message,
|
|
3340
|
+
projectSlug: options.projectSlug
|
|
3341
|
+
});
|
|
3342
|
+
process.exit(0);
|
|
3343
|
+
} catch (err) {
|
|
3344
|
+
handleCommandError("create-bundle", err);
|
|
3345
|
+
}
|
|
3346
|
+
});
|
|
3347
|
+
program.command("generate-cartridge").description("Generate component cartridge metadata from decorated components.").requiredOption("-d, --project-directory <dir>", "Project directory containing the source code.").action(async (options) => {
|
|
3348
|
+
try {
|
|
3349
|
+
await runGenerateCartridge(options.projectDirectory);
|
|
3350
|
+
process.exit(0);
|
|
3351
|
+
} catch (err) {
|
|
3352
|
+
error(`Generate metadata failed: ${err.message}`);
|
|
3353
|
+
process.exit(1);
|
|
3354
|
+
}
|
|
3355
|
+
});
|
|
3356
|
+
program.command("deploy-cartridge").description("Deploy a cartridge to Commerce Cloud (zips and uploads the metadata directory).").requiredOption("-d, --project-directory <dir>", "Project directory containing the source code.").action(async (options) => {
|
|
3357
|
+
try {
|
|
3358
|
+
await runDeployCartridge(options.projectDirectory);
|
|
3359
|
+
process.exit(0);
|
|
3360
|
+
} catch (err) {
|
|
3361
|
+
error(`Deploy failed: ${err.message}`);
|
|
3362
|
+
process.exit(1);
|
|
3363
|
+
}
|
|
3364
|
+
});
|
|
3365
|
+
process.on("unhandledRejection", (reason, promise) => {
|
|
3366
|
+
error(`Unhandled Rejection at: ${String(promise)}, reason: ${String(reason)}`);
|
|
3367
|
+
process.exit(1);
|
|
3368
|
+
});
|
|
3369
|
+
program.parse();
|
|
3370
|
+
if (!process.argv.slice(2).length) program.outputHelp();
|
|
3371
|
+
|
|
3372
|
+
//#endregion
|
|
3373
|
+
export { };
|