@graphcommerce/next-config 9.0.0-canary.106 → 9.0.0-canary.107
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +2 -0
- package/__tests__/commands/copyFiles.ts +512 -0
- package/__tests__/config/utils/__snapshots__/mergeEnvIntoConfig.ts.snap +3 -0
- package/__tests__/config/utils/mergeEnvIntoConfig.ts +4 -17
- package/__tests__/config/utils/rewriteLegancyEnv.ts +30 -35
- package/__tests__/interceptors/findPlugins.ts +38 -53
- package/__tests__/interceptors/generateInterceptors.ts +23 -74
- package/__tests__/utils/resolveDependenciesSync.ts +9 -9
- package/dist/commands/copyFiles.js +132 -40
- package/dist/config/utils/mergeEnvIntoConfig.js +5 -5
- package/dist/generated/config.js +8 -0
- package/dist/interceptors/generateInterceptor.js +3 -5
- package/dist/withGraphCommerce.js +1 -1
- package/package.json +2 -1
- package/src/commands/copyFiles.ts +147 -45
- package/src/config/utils/mergeEnvIntoConfig.ts +6 -7
- package/src/generated/config.ts +18 -0
- package/src/interceptors/generateInterceptor.ts +3 -5
- package/src/withGraphCommerce.ts +1 -1
|
@@ -4,18 +4,19 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.copyFiles = copyFiles;
|
|
7
|
+
/* eslint-disable no-await-in-loop */
|
|
7
8
|
const promises_1 = __importDefault(require("fs/promises"));
|
|
8
9
|
const path_1 = __importDefault(require("path"));
|
|
9
|
-
const
|
|
10
|
+
const fast_glob_1 = __importDefault(require("fast-glob"));
|
|
10
11
|
const resolveDependenciesSync_1 = require("../utils/resolveDependenciesSync");
|
|
11
12
|
// Add debug logging helper
|
|
12
13
|
const debug = (...args) => {
|
|
13
14
|
if (process.env.DEBUG)
|
|
14
|
-
console.log('[
|
|
15
|
+
console.log('[copy-files]', ...args);
|
|
15
16
|
};
|
|
16
|
-
|
|
17
|
-
const MANAGED_BY_GC =
|
|
18
|
-
const MANAGED_LOCALLY =
|
|
17
|
+
// Add constants for the magic comments
|
|
18
|
+
const MANAGED_BY_GC = '// managed by: graphcommerce';
|
|
19
|
+
const MANAGED_LOCALLY = '// managed by: local';
|
|
19
20
|
const GITIGNORE_SECTION_START = '# managed by: graphcommerce';
|
|
20
21
|
const GITIGNORE_SECTION_END = '# end managed by: graphcommerce';
|
|
21
22
|
/**
|
|
@@ -29,10 +30,9 @@ const GITIGNORE_SECTION_END = '# end managed by: graphcommerce';
|
|
|
29
30
|
async function updateGitignore(managedFiles) {
|
|
30
31
|
const gitignorePath = path_1.default.join(process.cwd(), '.gitignore');
|
|
31
32
|
let content;
|
|
32
|
-
debug('Updating .gitignore with managed files:', managedFiles);
|
|
33
33
|
try {
|
|
34
34
|
content = await promises_1.default.readFile(gitignorePath, 'utf-8');
|
|
35
|
-
debug('
|
|
35
|
+
debug('Reading existing .gitignore');
|
|
36
36
|
}
|
|
37
37
|
catch (err) {
|
|
38
38
|
debug('.gitignore not found, creating new file');
|
|
@@ -41,7 +41,6 @@ async function updateGitignore(managedFiles) {
|
|
|
41
41
|
// Remove existing GraphCommerce section if it exists
|
|
42
42
|
const sectionRegex = new RegExp(`${GITIGNORE_SECTION_START}[\\s\\S]*?${GITIGNORE_SECTION_END}\\n?`, 'g');
|
|
43
43
|
content = content.replace(sectionRegex, '');
|
|
44
|
-
debug('Content after removing existing section:', content);
|
|
45
44
|
// Only add new section if there are files to manage
|
|
46
45
|
if (managedFiles.length > 0) {
|
|
47
46
|
const newSection = [
|
|
@@ -50,22 +49,15 @@ async function updateGitignore(managedFiles) {
|
|
|
50
49
|
GITIGNORE_SECTION_END,
|
|
51
50
|
'', // Empty line at the end
|
|
52
51
|
].join('\n');
|
|
53
|
-
debug('New section to add:', newSection);
|
|
54
52
|
// Append the new section
|
|
55
53
|
content = `${content.trim()}\n\n${newSection}`;
|
|
54
|
+
debug(`Updated .gitignore with ${managedFiles.length} managed files`);
|
|
56
55
|
}
|
|
57
56
|
else {
|
|
58
|
-
// Just trim the content when no files to manage
|
|
59
57
|
content = `${content.trim()}\n`;
|
|
58
|
+
debug('Cleaned up .gitignore managed section');
|
|
60
59
|
}
|
|
61
|
-
|
|
62
|
-
try {
|
|
63
|
-
await promises_1.default.writeFile(gitignorePath, content);
|
|
64
|
-
debug('Successfully wrote .gitignore file');
|
|
65
|
-
}
|
|
66
|
-
catch (err) {
|
|
67
|
-
console.error('Error writing .gitignore:', err);
|
|
68
|
-
}
|
|
60
|
+
await promises_1.default.writeFile(gitignorePath, content);
|
|
69
61
|
}
|
|
70
62
|
/** Determines how a file should be managed based on its content */
|
|
71
63
|
function getFileManagement(content) {
|
|
@@ -91,44 +83,84 @@ function getFileManagement(content) {
|
|
|
91
83
|
* 4. If the file is managed by graphcommerce: Update if content differs
|
|
92
84
|
*/
|
|
93
85
|
async function copyFiles() {
|
|
86
|
+
const startTime = performance.now();
|
|
94
87
|
debug('Starting copyFiles');
|
|
95
88
|
const cwd = process.cwd();
|
|
96
89
|
const deps = (0, resolveDependenciesSync_1.resolveDependenciesSync)();
|
|
97
90
|
const packages = [...deps.values()].filter((p) => p !== '.');
|
|
98
|
-
debug('Found packages:', packages);
|
|
99
91
|
// Track files and their source packages to detect conflicts
|
|
100
92
|
const fileMap = new Map();
|
|
101
|
-
// Track which files are managed by GraphCommerce
|
|
102
93
|
const managedFiles = new Set();
|
|
94
|
+
const existingManagedFiles = new Set();
|
|
95
|
+
// First scan existing files to find GraphCommerce managed ones
|
|
96
|
+
const scanStart = performance.now();
|
|
97
|
+
try {
|
|
98
|
+
// Use only default patterns for testing
|
|
99
|
+
const gitignorePatterns = [
|
|
100
|
+
'**/dist/**',
|
|
101
|
+
'**/build/**',
|
|
102
|
+
'**/.next/**',
|
|
103
|
+
'**/.git/**',
|
|
104
|
+
'**/node_modules/**',
|
|
105
|
+
];
|
|
106
|
+
const allFiles = await (0, fast_glob_1.default)('**/*', {
|
|
107
|
+
cwd,
|
|
108
|
+
dot: true,
|
|
109
|
+
ignore: gitignorePatterns,
|
|
110
|
+
onlyFiles: true,
|
|
111
|
+
});
|
|
112
|
+
debug(`Found ${allFiles.length} project files in ${(performance.now() - scanStart).toFixed(0)}ms`);
|
|
113
|
+
const readStart = performance.now();
|
|
114
|
+
await Promise.all(allFiles.map(async (file) => {
|
|
115
|
+
const filePath = path_1.default.join(cwd, file);
|
|
116
|
+
try {
|
|
117
|
+
const content = await promises_1.default.readFile(filePath);
|
|
118
|
+
if (getFileManagement(content) === 'graphcommerce') {
|
|
119
|
+
existingManagedFiles.add(file);
|
|
120
|
+
debug(`Found existing managed file: ${file}`);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
catch (err) {
|
|
124
|
+
debug(`Error reading file ${file}:`, err);
|
|
125
|
+
}
|
|
126
|
+
}));
|
|
127
|
+
debug(`Read ${existingManagedFiles.size} managed files in ${(performance.now() - readStart).toFixed(0)}ms`);
|
|
128
|
+
}
|
|
129
|
+
catch (err) {
|
|
130
|
+
debug('Error scanning project files:', err);
|
|
131
|
+
}
|
|
103
132
|
// First pass: collect all files and check for conflicts
|
|
133
|
+
const collectStart = performance.now();
|
|
104
134
|
await Promise.all(packages.map(async (pkg) => {
|
|
105
135
|
const copyDir = path_1.default.join(pkg, 'copy');
|
|
106
136
|
try {
|
|
107
|
-
const files = await (0,
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
const
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
137
|
+
const files = await (0, fast_glob_1.default)('**/*', { cwd: copyDir, dot: true, suppressErrors: true });
|
|
138
|
+
if (files.length > 0) {
|
|
139
|
+
debug(`Found files in ${pkg}:`, files);
|
|
140
|
+
for (const file of files) {
|
|
141
|
+
const sourcePath = path_1.default.join(copyDir, file);
|
|
142
|
+
const existing = fileMap.get(file);
|
|
143
|
+
if (existing) {
|
|
144
|
+
console.error(`Error: File conflict detected for '${file}'
|
|
114
145
|
Found in packages:
|
|
115
146
|
- ${existing.packagePath} -> ${existing.sourcePath}
|
|
116
147
|
- ${pkg} -> ${sourcePath}`);
|
|
117
|
-
|
|
148
|
+
process.exit(1);
|
|
149
|
+
}
|
|
150
|
+
fileMap.set(file, { sourcePath, packagePath: pkg });
|
|
118
151
|
}
|
|
119
|
-
fileMap.set(file, { sourcePath, packagePath: pkg });
|
|
120
152
|
}
|
|
121
153
|
}
|
|
122
154
|
catch (err) {
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
process.exit(1);
|
|
128
|
-
}
|
|
155
|
+
if (err.code === 'ENOENT')
|
|
156
|
+
return;
|
|
157
|
+
console.error(`Error scanning directory ${copyDir}: ${err.message}\nPath: ${copyDir}`);
|
|
158
|
+
process.exit(1);
|
|
129
159
|
}
|
|
130
160
|
}));
|
|
131
|
-
|
|
161
|
+
debug(`Collected ${fileMap.size} files in ${(performance.now() - collectStart).toFixed(0)}ms`);
|
|
162
|
+
// Second pass: copy files and handle removals
|
|
163
|
+
const copyStart = performance.now();
|
|
132
164
|
await Promise.all(Array.from(fileMap.entries()).map(async ([file, { sourcePath }]) => {
|
|
133
165
|
const targetPath = path_1.default.join(cwd, file);
|
|
134
166
|
debug(`Processing file: ${file}`);
|
|
@@ -136,8 +168,7 @@ Path: ${copyDir}`);
|
|
|
136
168
|
await promises_1.default.mkdir(path_1.default.dirname(targetPath), { recursive: true });
|
|
137
169
|
const sourceContent = await promises_1.default.readFile(sourcePath);
|
|
138
170
|
const contentWithComment = Buffer.concat([
|
|
139
|
-
Buffer.from(`${MANAGED_BY_GC}\n`),
|
|
140
|
-
Buffer.from('// to modify this file, change it to managed by: local\n\n'),
|
|
171
|
+
Buffer.from(`${MANAGED_BY_GC}\n// to modify this file, change it to managed by: local\n\n`),
|
|
141
172
|
sourceContent,
|
|
142
173
|
]);
|
|
143
174
|
let targetContent;
|
|
@@ -189,13 +220,74 @@ Source: ${sourcePath}`);
|
|
|
189
220
|
process.exit(1);
|
|
190
221
|
}
|
|
191
222
|
}));
|
|
192
|
-
|
|
223
|
+
debug(`Copied ${managedFiles.size} files in ${(performance.now() - copyStart).toFixed(0)}ms`);
|
|
224
|
+
// Remove files that are no longer provided
|
|
225
|
+
const removeStart = performance.now();
|
|
226
|
+
const filesToRemove = Array.from(existingManagedFiles).filter((file) => !managedFiles.has(file));
|
|
227
|
+
debug(`Files to remove: ${filesToRemove.length}`);
|
|
228
|
+
// Helper function to recursively clean up empty directories
|
|
229
|
+
async function cleanupEmptyDirs(startPath) {
|
|
230
|
+
let currentDir = startPath;
|
|
231
|
+
while (currentDir !== cwd) {
|
|
232
|
+
try {
|
|
233
|
+
const dirContents = await promises_1.default.readdir(currentDir);
|
|
234
|
+
if (dirContents.length === 0) {
|
|
235
|
+
await promises_1.default.rmdir(currentDir);
|
|
236
|
+
debug(`Removed empty directory: ${currentDir}`);
|
|
237
|
+
currentDir = path_1.default.dirname(currentDir);
|
|
238
|
+
}
|
|
239
|
+
else {
|
|
240
|
+
break; // Stop if directory is not empty
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
catch (err) {
|
|
244
|
+
if (err.code === 'EACCES') {
|
|
245
|
+
console.error(`Error cleaning up directory ${currentDir}: ${err.message}`);
|
|
246
|
+
process.exit(1);
|
|
247
|
+
}
|
|
248
|
+
break; // Stop on other errors (like ENOENT)
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
// Process file removals in parallel
|
|
253
|
+
await Promise.all(filesToRemove.map(async (file) => {
|
|
254
|
+
const filePath = path_1.default.join(cwd, file);
|
|
255
|
+
const dirPath = path_1.default.dirname(filePath);
|
|
256
|
+
try {
|
|
257
|
+
// First check if the directory exists and is accessible
|
|
258
|
+
await promises_1.default.readdir(dirPath);
|
|
259
|
+
// Then try to remove the file
|
|
260
|
+
try {
|
|
261
|
+
await promises_1.default.unlink(filePath);
|
|
262
|
+
console.log(`Removed managed file: ${file}`);
|
|
263
|
+
debug(`Removed file: ${file}`);
|
|
264
|
+
}
|
|
265
|
+
catch (err) {
|
|
266
|
+
if (err.code !== 'ENOENT') {
|
|
267
|
+
console.error(`Error removing file ${file}: ${err.message}`);
|
|
268
|
+
process.exit(1);
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
// Finally, try to clean up empty directories
|
|
272
|
+
await cleanupEmptyDirs(dirPath);
|
|
273
|
+
}
|
|
274
|
+
catch (err) {
|
|
275
|
+
if (err.code === 'EACCES') {
|
|
276
|
+
console.error(`Error accessing directory ${dirPath}: ${err.message}`);
|
|
277
|
+
process.exit(1);
|
|
278
|
+
}
|
|
279
|
+
// Ignore ENOENT errors for directories that don't exist
|
|
280
|
+
}
|
|
281
|
+
}));
|
|
282
|
+
debug(`Removed files in ${(performance.now() - removeStart).toFixed(0)}ms`);
|
|
283
|
+
// Update .gitignore with current list of managed files
|
|
193
284
|
if (managedFiles.size > 0) {
|
|
194
285
|
debug('Found managed files:', Array.from(managedFiles));
|
|
195
286
|
await updateGitignore(Array.from(managedFiles));
|
|
196
287
|
}
|
|
197
288
|
else {
|
|
198
289
|
debug('No managed files found, cleaning up .gitignore section');
|
|
199
|
-
await updateGitignore([]);
|
|
290
|
+
await updateGitignore([]);
|
|
200
291
|
}
|
|
292
|
+
debug(`Total execution time: ${(performance.now() - startTime).toFixed(0)}ms`);
|
|
201
293
|
}
|
|
@@ -155,7 +155,7 @@ function formatAppliedEnv(applyResult) {
|
|
|
155
155
|
const lines = applyResult.map(({ from, to, envValue, envVar, dotVar, error, warning }) => {
|
|
156
156
|
const fromFmt = chalk_1.default.red(JSON.stringify(from));
|
|
157
157
|
const toFmt = chalk_1.default.green(JSON.stringify(to));
|
|
158
|
-
const envVariableFmt = `${envVar}
|
|
158
|
+
const envVariableFmt = `${envVar}`;
|
|
159
159
|
const dotVariableFmt = chalk_1.default.bold.underline(`${dotVar}`);
|
|
160
160
|
const baseLog = `${envVariableFmt} => ${dotVariableFmt}`;
|
|
161
161
|
if (error) {
|
|
@@ -169,12 +169,12 @@ function formatAppliedEnv(applyResult) {
|
|
|
169
169
|
if (!dotVar)
|
|
170
170
|
return chalk_1.default.red(`${envVariableFmt} => ignored (no matching config)`);
|
|
171
171
|
if (from === undefined && to === undefined)
|
|
172
|
-
return ` = ${baseLog}: (ignored
|
|
172
|
+
return ` = ${baseLog}: (ignored)`;
|
|
173
173
|
if (from === undefined && to !== undefined)
|
|
174
|
-
return ` ${chalk_1.default.green('+')} ${baseLog}
|
|
174
|
+
return ` ${chalk_1.default.green('+')} ${baseLog}`;
|
|
175
175
|
if (from !== undefined && to === undefined)
|
|
176
|
-
return ` ${chalk_1.default.red('-')} ${baseLog}
|
|
177
|
-
return ` ${chalk_1.default.yellowBright('~')} ${baseLog}
|
|
176
|
+
return ` ${chalk_1.default.red('-')} ${baseLog}`;
|
|
177
|
+
return ` ${chalk_1.default.yellowBright('~')} ${baseLog}`;
|
|
178
178
|
});
|
|
179
179
|
let header = chalk_1.default.blueBright('info');
|
|
180
180
|
if (hasWarning)
|
package/dist/generated/config.js
CHANGED
|
@@ -4,6 +4,7 @@ exports.WebsitePermissionsSchema = exports.SidebarGalleryPaginationVariantSchema
|
|
|
4
4
|
exports.DatalayerConfigSchema = DatalayerConfigSchema;
|
|
5
5
|
exports.GraphCommerceConfigSchema = GraphCommerceConfigSchema;
|
|
6
6
|
exports.GraphCommerceDebugConfigSchema = GraphCommerceDebugConfigSchema;
|
|
7
|
+
exports.GraphCommerceGooglePlaystoreConfigSchema = GraphCommerceGooglePlaystoreConfigSchema;
|
|
7
8
|
exports.GraphCommercePermissionsSchema = GraphCommercePermissionsSchema;
|
|
8
9
|
exports.GraphCommerceStorefrontConfigSchema = GraphCommerceStorefrontConfigSchema;
|
|
9
10
|
exports.MagentoConfigurableVariantValuesSchema = MagentoConfigurableVariantValuesSchema;
|
|
@@ -46,6 +47,7 @@ function GraphCommerceConfigSchema() {
|
|
|
46
47
|
demoMode: zod_1.z.boolean().default(true).nullish(),
|
|
47
48
|
enableGuestCheckoutLogin: zod_1.z.boolean().nullish(),
|
|
48
49
|
googleAnalyticsId: zod_1.z.string().nullish(),
|
|
50
|
+
googlePlaystore: GraphCommerceGooglePlaystoreConfigSchema().nullish(),
|
|
49
51
|
googleRecaptchaKey: zod_1.z.string().nullish(),
|
|
50
52
|
googleTagmanagerId: zod_1.z.string().nullish(),
|
|
51
53
|
hygraphEndpoint: zod_1.z.string().min(1),
|
|
@@ -77,6 +79,12 @@ function GraphCommerceDebugConfigSchema() {
|
|
|
77
79
|
webpackDuplicatesPlugin: zod_1.z.boolean().nullish()
|
|
78
80
|
});
|
|
79
81
|
}
|
|
82
|
+
function GraphCommerceGooglePlaystoreConfigSchema() {
|
|
83
|
+
return zod_1.z.object({
|
|
84
|
+
packageName: zod_1.z.string().min(1),
|
|
85
|
+
sha256CertificateFingerprint: zod_1.z.string().min(1)
|
|
86
|
+
});
|
|
87
|
+
}
|
|
80
88
|
function GraphCommercePermissionsSchema() {
|
|
81
89
|
return zod_1.z.object({
|
|
82
90
|
cart: exports.CartPermissionsSchema.nullish(),
|
|
@@ -41,8 +41,8 @@ function isReplacePluginConfig(plugin) {
|
|
|
41
41
|
function isPluginConfig(plugin) {
|
|
42
42
|
return isPluginBaseConfig(plugin);
|
|
43
43
|
}
|
|
44
|
-
exports.SOURCE_START = '/**
|
|
45
|
-
exports.SOURCE_END = '/**
|
|
44
|
+
exports.SOURCE_START = '/** SOURCE_START */';
|
|
45
|
+
exports.SOURCE_END = '/** SOURCE_END */';
|
|
46
46
|
const originalSuffix = 'Original';
|
|
47
47
|
const interceptorSuffix = 'Interceptor';
|
|
48
48
|
const disabledSuffix = 'Disabled';
|
|
@@ -68,9 +68,7 @@ const generateIdentifyer = (s) => Math.abs(s.split('').reduce((a, b) => {
|
|
|
68
68
|
// eslint-disable-next-line no-bitwise
|
|
69
69
|
return a & a;
|
|
70
70
|
}, 0)).toString();
|
|
71
|
-
/**
|
|
72
|
-
* The is on the first line, with the format: \/* hash:${identifer} *\/
|
|
73
|
-
*/
|
|
71
|
+
/** The is on the first line, with the format: /* hash:${identifer} */
|
|
74
72
|
function extractIdentifier(source) {
|
|
75
73
|
if (!source)
|
|
76
74
|
return null;
|
|
@@ -41,10 +41,10 @@ function withGraphCommerce(nextConfig, cwd) {
|
|
|
41
41
|
];
|
|
42
42
|
return {
|
|
43
43
|
...nextConfig,
|
|
44
|
+
bundlePagesRouterDependencies: true,
|
|
44
45
|
experimental: {
|
|
45
46
|
...nextConfig.experimental,
|
|
46
47
|
scrollRestoration: true,
|
|
47
|
-
bundlePagesExternals: true,
|
|
48
48
|
swcPlugins: [...(nextConfig.experimental?.swcPlugins ?? []), ['@lingui/swc-plugin', {}]],
|
|
49
49
|
},
|
|
50
50
|
i18n: {
|
package/package.json
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
"name": "@graphcommerce/next-config",
|
|
3
3
|
"homepage": "https://www.graphcommerce.org/",
|
|
4
4
|
"repository": "github:graphcommerce-org/graphcommerce",
|
|
5
|
-
"version": "9.0.0-canary.
|
|
5
|
+
"version": "9.0.0-canary.107",
|
|
6
6
|
"type": "commonjs",
|
|
7
7
|
"main": "dist/index.js",
|
|
8
8
|
"types": "src/index.ts",
|
|
@@ -23,6 +23,7 @@
|
|
|
23
23
|
"@types/lodash": "^4.17.13",
|
|
24
24
|
"babel-plugin-macros": "^3.1.0",
|
|
25
25
|
"circular-dependency-plugin": "^5.2.2",
|
|
26
|
+
"fast-glob": "^3.3.2",
|
|
26
27
|
"glob": "^10.4.5",
|
|
27
28
|
"graphql": "^16",
|
|
28
29
|
"inspectpack": "^4.7.1",
|
|
@@ -1,19 +1,17 @@
|
|
|
1
|
+
/* eslint-disable no-await-in-loop */
|
|
1
2
|
import fs from 'fs/promises'
|
|
2
3
|
import path from 'path'
|
|
3
|
-
import
|
|
4
|
+
import fg from 'fast-glob'
|
|
4
5
|
import { resolveDependenciesSync } from '../utils/resolveDependenciesSync'
|
|
5
6
|
|
|
6
7
|
// Add debug logging helper
|
|
7
8
|
const debug = (...args: unknown[]) => {
|
|
8
|
-
if (process.env.DEBUG) console.log('[
|
|
9
|
+
if (process.env.DEBUG) console.log('[copy-files]', ...args)
|
|
9
10
|
}
|
|
10
11
|
|
|
11
12
|
// Add constants for the magic comments
|
|
12
|
-
|
|
13
|
-
const
|
|
14
|
-
|
|
15
|
-
const MANAGED_BY_GC = createManagementComment('graphcommerce')
|
|
16
|
-
const MANAGED_LOCALLY = createManagementComment('local')
|
|
13
|
+
const MANAGED_BY_GC = '// managed by: graphcommerce'
|
|
14
|
+
const MANAGED_LOCALLY = '// managed by: local'
|
|
17
15
|
|
|
18
16
|
const GITIGNORE_SECTION_START = '# managed by: graphcommerce'
|
|
19
17
|
const GITIGNORE_SECTION_END = '# end managed by: graphcommerce'
|
|
@@ -30,11 +28,9 @@ async function updateGitignore(managedFiles: string[]) {
|
|
|
30
28
|
const gitignorePath = path.join(process.cwd(), '.gitignore')
|
|
31
29
|
let content: string
|
|
32
30
|
|
|
33
|
-
debug('Updating .gitignore with managed files:', managedFiles)
|
|
34
|
-
|
|
35
31
|
try {
|
|
36
32
|
content = await fs.readFile(gitignorePath, 'utf-8')
|
|
37
|
-
debug('
|
|
33
|
+
debug('Reading existing .gitignore')
|
|
38
34
|
} catch (err) {
|
|
39
35
|
debug('.gitignore not found, creating new file')
|
|
40
36
|
content = ''
|
|
@@ -46,7 +42,6 @@ async function updateGitignore(managedFiles: string[]) {
|
|
|
46
42
|
'g',
|
|
47
43
|
)
|
|
48
44
|
content = content.replace(sectionRegex, '')
|
|
49
|
-
debug('Content after removing existing section:', content)
|
|
50
45
|
|
|
51
46
|
// Only add new section if there are files to manage
|
|
52
47
|
if (managedFiles.length > 0) {
|
|
@@ -56,23 +51,16 @@ async function updateGitignore(managedFiles: string[]) {
|
|
|
56
51
|
GITIGNORE_SECTION_END,
|
|
57
52
|
'', // Empty line at the end
|
|
58
53
|
].join('\n')
|
|
59
|
-
debug('New section to add:', newSection)
|
|
60
54
|
|
|
61
55
|
// Append the new section
|
|
62
56
|
content = `${content.trim()}\n\n${newSection}`
|
|
57
|
+
debug(`Updated .gitignore with ${managedFiles.length} managed files`)
|
|
63
58
|
} else {
|
|
64
|
-
// Just trim the content when no files to manage
|
|
65
59
|
content = `${content.trim()}\n`
|
|
60
|
+
debug('Cleaned up .gitignore managed section')
|
|
66
61
|
}
|
|
67
62
|
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
try {
|
|
71
|
-
await fs.writeFile(gitignorePath, content)
|
|
72
|
-
debug('Successfully wrote .gitignore file')
|
|
73
|
-
} catch (err) {
|
|
74
|
-
console.error('Error writing .gitignore:', err)
|
|
75
|
-
}
|
|
63
|
+
await fs.writeFile(gitignorePath, content)
|
|
76
64
|
}
|
|
77
65
|
|
|
78
66
|
/** Determines how a file should be managed based on its content */
|
|
@@ -97,53 +85,100 @@ function getFileManagement(content: Buffer | undefined): 'local' | 'graphcommerc
|
|
|
97
85
|
* 4. If the file is managed by graphcommerce: Update if content differs
|
|
98
86
|
*/
|
|
99
87
|
export async function copyFiles() {
|
|
88
|
+
const startTime = performance.now()
|
|
100
89
|
debug('Starting copyFiles')
|
|
101
90
|
|
|
102
91
|
const cwd = process.cwd()
|
|
103
92
|
const deps = resolveDependenciesSync()
|
|
104
93
|
const packages = [...deps.values()].filter((p) => p !== '.')
|
|
105
|
-
debug('Found packages:', packages)
|
|
106
94
|
|
|
107
95
|
// Track files and their source packages to detect conflicts
|
|
108
96
|
const fileMap = new Map<string, { sourcePath: string; packagePath: string }>()
|
|
109
|
-
// Track which files are managed by GraphCommerce
|
|
110
97
|
const managedFiles = new Set<string>()
|
|
98
|
+
const existingManagedFiles = new Set<string>()
|
|
99
|
+
|
|
100
|
+
// First scan existing files to find GraphCommerce managed ones
|
|
101
|
+
const scanStart = performance.now()
|
|
102
|
+
try {
|
|
103
|
+
// Use only default patterns for testing
|
|
104
|
+
const gitignorePatterns = [
|
|
105
|
+
'**/dist/**',
|
|
106
|
+
'**/build/**',
|
|
107
|
+
'**/.next/**',
|
|
108
|
+
'**/.git/**',
|
|
109
|
+
'**/node_modules/**',
|
|
110
|
+
]
|
|
111
|
+
|
|
112
|
+
const allFiles = await fg('**/*', {
|
|
113
|
+
cwd,
|
|
114
|
+
dot: true,
|
|
115
|
+
ignore: gitignorePatterns,
|
|
116
|
+
onlyFiles: true,
|
|
117
|
+
})
|
|
118
|
+
debug(
|
|
119
|
+
`Found ${allFiles.length} project files in ${(performance.now() - scanStart).toFixed(0)}ms`,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
const readStart = performance.now()
|
|
123
|
+
await Promise.all(
|
|
124
|
+
allFiles.map(async (file) => {
|
|
125
|
+
const filePath = path.join(cwd, file)
|
|
126
|
+
try {
|
|
127
|
+
const content = await fs.readFile(filePath)
|
|
128
|
+
if (getFileManagement(content) === 'graphcommerce') {
|
|
129
|
+
existingManagedFiles.add(file)
|
|
130
|
+
debug(`Found existing managed file: ${file}`)
|
|
131
|
+
}
|
|
132
|
+
} catch (err) {
|
|
133
|
+
debug(`Error reading file ${file}:`, err)
|
|
134
|
+
}
|
|
135
|
+
}),
|
|
136
|
+
)
|
|
137
|
+
debug(
|
|
138
|
+
`Read ${existingManagedFiles.size} managed files in ${(performance.now() - readStart).toFixed(0)}ms`,
|
|
139
|
+
)
|
|
140
|
+
} catch (err) {
|
|
141
|
+
debug('Error scanning project files:', err)
|
|
142
|
+
}
|
|
111
143
|
|
|
112
144
|
// First pass: collect all files and check for conflicts
|
|
145
|
+
const collectStart = performance.now()
|
|
113
146
|
await Promise.all(
|
|
114
147
|
packages.map(async (pkg) => {
|
|
115
148
|
const copyDir = path.join(pkg, 'copy')
|
|
116
|
-
|
|
117
149
|
try {
|
|
118
|
-
const files = await
|
|
119
|
-
|
|
150
|
+
const files = await fg('**/*', { cwd: copyDir, dot: true, suppressErrors: true })
|
|
151
|
+
if (files.length > 0) {
|
|
152
|
+
debug(`Found files in ${pkg}:`, files)
|
|
120
153
|
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
154
|
+
for (const file of files) {
|
|
155
|
+
const sourcePath = path.join(copyDir, file)
|
|
156
|
+
const existing = fileMap.get(file)
|
|
124
157
|
|
|
125
|
-
|
|
126
|
-
|
|
158
|
+
if (existing) {
|
|
159
|
+
console.error(`Error: File conflict detected for '${file}'
|
|
127
160
|
Found in packages:
|
|
128
161
|
- ${existing.packagePath} -> ${existing.sourcePath}
|
|
129
162
|
- ${pkg} -> ${sourcePath}`)
|
|
130
|
-
|
|
131
|
-
|
|
163
|
+
process.exit(1)
|
|
164
|
+
}
|
|
132
165
|
|
|
133
|
-
|
|
166
|
+
fileMap.set(file, { sourcePath, packagePath: pkg })
|
|
167
|
+
}
|
|
134
168
|
}
|
|
135
169
|
} catch (err) {
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
}
|
|
170
|
+
if ((err as { code?: string }).code === 'ENOENT') return
|
|
171
|
+
console.error(
|
|
172
|
+
`Error scanning directory ${copyDir}: ${(err as Error).message}\nPath: ${copyDir}`,
|
|
173
|
+
)
|
|
174
|
+
process.exit(1)
|
|
142
175
|
}
|
|
143
176
|
}),
|
|
144
177
|
)
|
|
178
|
+
debug(`Collected ${fileMap.size} files in ${(performance.now() - collectStart).toFixed(0)}ms`)
|
|
145
179
|
|
|
146
|
-
// Second pass: copy files
|
|
180
|
+
// Second pass: copy files and handle removals
|
|
181
|
+
const copyStart = performance.now()
|
|
147
182
|
await Promise.all(
|
|
148
183
|
Array.from(fileMap.entries()).map(async ([file, { sourcePath }]) => {
|
|
149
184
|
const targetPath = path.join(cwd, file)
|
|
@@ -154,8 +189,9 @@ Path: ${copyDir}`)
|
|
|
154
189
|
|
|
155
190
|
const sourceContent = await fs.readFile(sourcePath)
|
|
156
191
|
const contentWithComment = Buffer.concat([
|
|
157
|
-
Buffer.from(
|
|
158
|
-
|
|
192
|
+
Buffer.from(
|
|
193
|
+
`${MANAGED_BY_GC}\n// to modify this file, change it to managed by: local\n\n`,
|
|
194
|
+
),
|
|
159
195
|
sourceContent,
|
|
160
196
|
])
|
|
161
197
|
|
|
@@ -215,13 +251,79 @@ Source: ${sourcePath}`)
|
|
|
215
251
|
}
|
|
216
252
|
}),
|
|
217
253
|
)
|
|
254
|
+
debug(`Copied ${managedFiles.size} files in ${(performance.now() - copyStart).toFixed(0)}ms`)
|
|
255
|
+
|
|
256
|
+
// Remove files that are no longer provided
|
|
257
|
+
const removeStart = performance.now()
|
|
258
|
+
const filesToRemove = Array.from(existingManagedFiles).filter((file) => !managedFiles.has(file))
|
|
259
|
+
debug(`Files to remove: ${filesToRemove.length}`)
|
|
218
260
|
|
|
219
|
-
//
|
|
261
|
+
// Helper function to recursively clean up empty directories
|
|
262
|
+
async function cleanupEmptyDirs(startPath: string) {
|
|
263
|
+
let currentDir = startPath
|
|
264
|
+
while (currentDir !== cwd) {
|
|
265
|
+
try {
|
|
266
|
+
const dirContents = await fs.readdir(currentDir)
|
|
267
|
+
if (dirContents.length === 0) {
|
|
268
|
+
await fs.rmdir(currentDir)
|
|
269
|
+
debug(`Removed empty directory: ${currentDir}`)
|
|
270
|
+
currentDir = path.dirname(currentDir)
|
|
271
|
+
} else {
|
|
272
|
+
break // Stop if directory is not empty
|
|
273
|
+
}
|
|
274
|
+
} catch (err) {
|
|
275
|
+
if ((err as { code?: string }).code === 'EACCES') {
|
|
276
|
+
console.error(`Error cleaning up directory ${currentDir}: ${(err as Error).message}`)
|
|
277
|
+
process.exit(1)
|
|
278
|
+
}
|
|
279
|
+
break // Stop on other errors (like ENOENT)
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
// Process file removals in parallel
|
|
285
|
+
await Promise.all(
|
|
286
|
+
filesToRemove.map(async (file) => {
|
|
287
|
+
const filePath = path.join(cwd, file)
|
|
288
|
+
const dirPath = path.dirname(filePath)
|
|
289
|
+
|
|
290
|
+
try {
|
|
291
|
+
// First check if the directory exists and is accessible
|
|
292
|
+
await fs.readdir(dirPath)
|
|
293
|
+
|
|
294
|
+
// Then try to remove the file
|
|
295
|
+
try {
|
|
296
|
+
await fs.unlink(filePath)
|
|
297
|
+
console.log(`Removed managed file: ${file}`)
|
|
298
|
+
debug(`Removed file: ${file}`)
|
|
299
|
+
} catch (err) {
|
|
300
|
+
if ((err as { code?: string }).code !== 'ENOENT') {
|
|
301
|
+
console.error(`Error removing file ${file}: ${(err as Error).message}`)
|
|
302
|
+
process.exit(1)
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
// Finally, try to clean up empty directories
|
|
307
|
+
await cleanupEmptyDirs(dirPath)
|
|
308
|
+
} catch (err) {
|
|
309
|
+
if ((err as { code?: string }).code === 'EACCES') {
|
|
310
|
+
console.error(`Error accessing directory ${dirPath}: ${(err as Error).message}`)
|
|
311
|
+
process.exit(1)
|
|
312
|
+
}
|
|
313
|
+
// Ignore ENOENT errors for directories that don't exist
|
|
314
|
+
}
|
|
315
|
+
}),
|
|
316
|
+
)
|
|
317
|
+
debug(`Removed files in ${(performance.now() - removeStart).toFixed(0)}ms`)
|
|
318
|
+
|
|
319
|
+
// Update .gitignore with current list of managed files
|
|
220
320
|
if (managedFiles.size > 0) {
|
|
221
321
|
debug('Found managed files:', Array.from(managedFiles))
|
|
222
322
|
await updateGitignore(Array.from(managedFiles))
|
|
223
323
|
} else {
|
|
224
324
|
debug('No managed files found, cleaning up .gitignore section')
|
|
225
|
-
await updateGitignore([])
|
|
325
|
+
await updateGitignore([])
|
|
226
326
|
}
|
|
327
|
+
|
|
328
|
+
debug(`Total execution time: ${(performance.now() - startTime).toFixed(0)}ms`)
|
|
227
329
|
}
|