@graphcommerce/next-config 9.0.0-canary.105 → 9.0.0-canary.107
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/__tests__/commands/copyFiles.ts +512 -0
- package/__tests__/config/utils/__snapshots__/mergeEnvIntoConfig.ts.snap +3 -0
- package/__tests__/config/utils/mergeEnvIntoConfig.ts +4 -17
- package/__tests__/config/utils/rewriteLegancyEnv.ts +30 -35
- package/__tests__/interceptors/findPlugins.ts +38 -53
- package/__tests__/interceptors/generateInterceptors.ts +23 -74
- package/__tests__/utils/resolveDependenciesSync.ts +9 -9
- package/dist/commands/codegen.js +18 -0
- package/dist/commands/copyFiles.js +293 -0
- package/dist/commands/copyRoutes.js +20 -0
- package/dist/config/utils/mergeEnvIntoConfig.js +5 -5
- package/dist/generated/config.js +8 -0
- package/dist/index.js +3 -0
- package/dist/interceptors/generateInterceptor.js +3 -5
- package/dist/utils/resolveDependenciesSync.js +6 -1
- package/dist/utils/sig.js +34 -0
- package/dist/withGraphCommerce.js +1 -1
- package/package.json +10 -9
- package/src/commands/codegen.ts +18 -0
- package/src/commands/copyFiles.ts +329 -0
- package/src/config/utils/mergeEnvIntoConfig.ts +6 -7
- package/src/generated/config.ts +18 -0
- package/src/index.ts +4 -3
- package/src/interceptors/generateInterceptor.ts +3 -5
- package/src/utils/resolveDependenciesSync.ts +10 -1
- package/src/utils/sig.ts +37 -0
- package/src/withGraphCommerce.ts +1 -1
- package/dist/config/commands/generateIntercetors.js +0 -9
- package/dist/interceptors/commands/generateIntercetors.js +0 -9
|
@@ -0,0 +1,329 @@
|
|
|
1
|
+
/* eslint-disable no-await-in-loop */
|
|
2
|
+
import fs from 'fs/promises'
|
|
3
|
+
import path from 'path'
|
|
4
|
+
import fg from 'fast-glob'
|
|
5
|
+
import { resolveDependenciesSync } from '../utils/resolveDependenciesSync'
|
|
6
|
+
|
|
7
|
+
// Add debug logging helper
|
|
8
|
+
const debug = (...args: unknown[]) => {
|
|
9
|
+
if (process.env.DEBUG) console.log('[copy-files]', ...args)
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
// Add constants for the magic comments
|
|
13
|
+
const MANAGED_BY_GC = '// managed by: graphcommerce'
|
|
14
|
+
const MANAGED_LOCALLY = '// managed by: local'
|
|
15
|
+
|
|
16
|
+
const GITIGNORE_SECTION_START = '# managed by: graphcommerce'
|
|
17
|
+
const GITIGNORE_SECTION_END = '# end managed by: graphcommerce'
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Updates the .gitignore file with a list of GraphCommerce managed files
|
|
21
|
+
*
|
|
22
|
+
* - Removes any existing GraphCommerce managed files section
|
|
23
|
+
* - If managedFiles is not empty, adds a new section with the files
|
|
24
|
+
* - If managedFiles is empty, just cleans up the existing section
|
|
25
|
+
* - Ensures the file ends with a newline
|
|
26
|
+
*/
|
|
27
|
+
async function updateGitignore(managedFiles: string[]) {
|
|
28
|
+
const gitignorePath = path.join(process.cwd(), '.gitignore')
|
|
29
|
+
let content: string
|
|
30
|
+
|
|
31
|
+
try {
|
|
32
|
+
content = await fs.readFile(gitignorePath, 'utf-8')
|
|
33
|
+
debug('Reading existing .gitignore')
|
|
34
|
+
} catch (err) {
|
|
35
|
+
debug('.gitignore not found, creating new file')
|
|
36
|
+
content = ''
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// Remove existing GraphCommerce section if it exists
|
|
40
|
+
const sectionRegex = new RegExp(
|
|
41
|
+
`${GITIGNORE_SECTION_START}[\\s\\S]*?${GITIGNORE_SECTION_END}\\n?`,
|
|
42
|
+
'g',
|
|
43
|
+
)
|
|
44
|
+
content = content.replace(sectionRegex, '')
|
|
45
|
+
|
|
46
|
+
// Only add new section if there are files to manage
|
|
47
|
+
if (managedFiles.length > 0) {
|
|
48
|
+
const newSection = [
|
|
49
|
+
GITIGNORE_SECTION_START,
|
|
50
|
+
...managedFiles.sort(),
|
|
51
|
+
GITIGNORE_SECTION_END,
|
|
52
|
+
'', // Empty line at the end
|
|
53
|
+
].join('\n')
|
|
54
|
+
|
|
55
|
+
// Append the new section
|
|
56
|
+
content = `${content.trim()}\n\n${newSection}`
|
|
57
|
+
debug(`Updated .gitignore with ${managedFiles.length} managed files`)
|
|
58
|
+
} else {
|
|
59
|
+
content = `${content.trim()}\n`
|
|
60
|
+
debug('Cleaned up .gitignore managed section')
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
await fs.writeFile(gitignorePath, content)
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/** Determines how a file should be managed based on its content */
|
|
67
|
+
function getFileManagement(content: Buffer | undefined): 'local' | 'graphcommerce' | 'unmanaged' {
|
|
68
|
+
if (!content) return 'graphcommerce'
|
|
69
|
+
const contentStr = content.toString()
|
|
70
|
+
if (contentStr.startsWith(MANAGED_LOCALLY)) return 'local'
|
|
71
|
+
if (contentStr.startsWith(MANAGED_BY_GC)) return 'graphcommerce'
|
|
72
|
+
return 'unmanaged'
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* The packages are @graphcommerce/* packages and have special treatment.
|
|
77
|
+
*
|
|
78
|
+
* 1. Glob the `copy/**` directory for each package and generate a list of files that need to be
|
|
79
|
+
* copied. Error if a file with the same path exists in another package.
|
|
80
|
+
* 2. Copy the files to the project directory (cwd).
|
|
81
|
+
*
|
|
82
|
+
* 1. If the file doesn't exist: Create directories and the file with "managed by: graphcommerce"
|
|
83
|
+
* 2. If the file exists and starts with "managed by: local": Skip the file
|
|
84
|
+
* 3. If the file exists but doesn't have a management comment: Suggest adding "managed by: local"
|
|
85
|
+
* 4. If the file is managed by graphcommerce: Update if content differs
|
|
86
|
+
*/
|
|
87
|
+
export async function copyFiles() {
|
|
88
|
+
const startTime = performance.now()
|
|
89
|
+
debug('Starting copyFiles')
|
|
90
|
+
|
|
91
|
+
const cwd = process.cwd()
|
|
92
|
+
const deps = resolveDependenciesSync()
|
|
93
|
+
const packages = [...deps.values()].filter((p) => p !== '.')
|
|
94
|
+
|
|
95
|
+
// Track files and their source packages to detect conflicts
|
|
96
|
+
const fileMap = new Map<string, { sourcePath: string; packagePath: string }>()
|
|
97
|
+
const managedFiles = new Set<string>()
|
|
98
|
+
const existingManagedFiles = new Set<string>()
|
|
99
|
+
|
|
100
|
+
// First scan existing files to find GraphCommerce managed ones
|
|
101
|
+
const scanStart = performance.now()
|
|
102
|
+
try {
|
|
103
|
+
// Use only default patterns for testing
|
|
104
|
+
const gitignorePatterns = [
|
|
105
|
+
'**/dist/**',
|
|
106
|
+
'**/build/**',
|
|
107
|
+
'**/.next/**',
|
|
108
|
+
'**/.git/**',
|
|
109
|
+
'**/node_modules/**',
|
|
110
|
+
]
|
|
111
|
+
|
|
112
|
+
const allFiles = await fg('**/*', {
|
|
113
|
+
cwd,
|
|
114
|
+
dot: true,
|
|
115
|
+
ignore: gitignorePatterns,
|
|
116
|
+
onlyFiles: true,
|
|
117
|
+
})
|
|
118
|
+
debug(
|
|
119
|
+
`Found ${allFiles.length} project files in ${(performance.now() - scanStart).toFixed(0)}ms`,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
const readStart = performance.now()
|
|
123
|
+
await Promise.all(
|
|
124
|
+
allFiles.map(async (file) => {
|
|
125
|
+
const filePath = path.join(cwd, file)
|
|
126
|
+
try {
|
|
127
|
+
const content = await fs.readFile(filePath)
|
|
128
|
+
if (getFileManagement(content) === 'graphcommerce') {
|
|
129
|
+
existingManagedFiles.add(file)
|
|
130
|
+
debug(`Found existing managed file: ${file}`)
|
|
131
|
+
}
|
|
132
|
+
} catch (err) {
|
|
133
|
+
debug(`Error reading file ${file}:`, err)
|
|
134
|
+
}
|
|
135
|
+
}),
|
|
136
|
+
)
|
|
137
|
+
debug(
|
|
138
|
+
`Read ${existingManagedFiles.size} managed files in ${(performance.now() - readStart).toFixed(0)}ms`,
|
|
139
|
+
)
|
|
140
|
+
} catch (err) {
|
|
141
|
+
debug('Error scanning project files:', err)
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// First pass: collect all files and check for conflicts
|
|
145
|
+
const collectStart = performance.now()
|
|
146
|
+
await Promise.all(
|
|
147
|
+
packages.map(async (pkg) => {
|
|
148
|
+
const copyDir = path.join(pkg, 'copy')
|
|
149
|
+
try {
|
|
150
|
+
const files = await fg('**/*', { cwd: copyDir, dot: true, suppressErrors: true })
|
|
151
|
+
if (files.length > 0) {
|
|
152
|
+
debug(`Found files in ${pkg}:`, files)
|
|
153
|
+
|
|
154
|
+
for (const file of files) {
|
|
155
|
+
const sourcePath = path.join(copyDir, file)
|
|
156
|
+
const existing = fileMap.get(file)
|
|
157
|
+
|
|
158
|
+
if (existing) {
|
|
159
|
+
console.error(`Error: File conflict detected for '${file}'
|
|
160
|
+
Found in packages:
|
|
161
|
+
- ${existing.packagePath} -> ${existing.sourcePath}
|
|
162
|
+
- ${pkg} -> ${sourcePath}`)
|
|
163
|
+
process.exit(1)
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
fileMap.set(file, { sourcePath, packagePath: pkg })
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
} catch (err) {
|
|
170
|
+
if ((err as { code?: string }).code === 'ENOENT') return
|
|
171
|
+
console.error(
|
|
172
|
+
`Error scanning directory ${copyDir}: ${(err as Error).message}\nPath: ${copyDir}`,
|
|
173
|
+
)
|
|
174
|
+
process.exit(1)
|
|
175
|
+
}
|
|
176
|
+
}),
|
|
177
|
+
)
|
|
178
|
+
debug(`Collected ${fileMap.size} files in ${(performance.now() - collectStart).toFixed(0)}ms`)
|
|
179
|
+
|
|
180
|
+
// Second pass: copy files and handle removals
|
|
181
|
+
const copyStart = performance.now()
|
|
182
|
+
await Promise.all(
|
|
183
|
+
Array.from(fileMap.entries()).map(async ([file, { sourcePath }]) => {
|
|
184
|
+
const targetPath = path.join(cwd, file)
|
|
185
|
+
debug(`Processing file: ${file}`)
|
|
186
|
+
|
|
187
|
+
try {
|
|
188
|
+
await fs.mkdir(path.dirname(targetPath), { recursive: true })
|
|
189
|
+
|
|
190
|
+
const sourceContent = await fs.readFile(sourcePath)
|
|
191
|
+
const contentWithComment = Buffer.concat([
|
|
192
|
+
Buffer.from(
|
|
193
|
+
`${MANAGED_BY_GC}\n// to modify this file, change it to managed by: local\n\n`,
|
|
194
|
+
),
|
|
195
|
+
sourceContent,
|
|
196
|
+
])
|
|
197
|
+
|
|
198
|
+
let targetContent: Buffer | undefined
|
|
199
|
+
|
|
200
|
+
try {
|
|
201
|
+
targetContent = await fs.readFile(targetPath)
|
|
202
|
+
|
|
203
|
+
const management = getFileManagement(targetContent)
|
|
204
|
+
if (management === 'local') {
|
|
205
|
+
debug(`File ${file} is managed locally, skipping`)
|
|
206
|
+
return
|
|
207
|
+
}
|
|
208
|
+
if (management === 'unmanaged') {
|
|
209
|
+
console.log(
|
|
210
|
+
`Note: File ${file} has been modified. Add '${MANAGED_LOCALLY.trim()}' at the top to manage it locally.`,
|
|
211
|
+
)
|
|
212
|
+
debug(`File ${file} doesn't have management comment, skipping`)
|
|
213
|
+
return
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
debug(`File ${file} is managed by graphcommerce, will update if needed`)
|
|
217
|
+
} catch (err) {
|
|
218
|
+
if ((err as { code?: string }).code !== 'ENOENT') {
|
|
219
|
+
console.error(`Error reading file ${file}: ${(err as Error).message}
|
|
220
|
+
Source: ${sourcePath}`)
|
|
221
|
+
process.exit(1)
|
|
222
|
+
}
|
|
223
|
+
console.log(`Creating new file: ${file}
|
|
224
|
+
Source: ${sourcePath}`)
|
|
225
|
+
debug('File does not exist yet')
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
// Skip if content is identical (including magic comment)
|
|
229
|
+
if (targetContent && Buffer.compare(contentWithComment, targetContent) === 0) {
|
|
230
|
+
debug(`File ${file} content is identical to source, skipping`)
|
|
231
|
+
managedFiles.add(file)
|
|
232
|
+
return
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
// Copy the file with magic comment
|
|
236
|
+
await fs.writeFile(targetPath, contentWithComment)
|
|
237
|
+
if (targetContent) {
|
|
238
|
+
console.log(`Updated managed file: ${file}`)
|
|
239
|
+
debug(`Overwrote existing file: ${file}`)
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
// If the file is managed by GraphCommerce (new or updated), add it to managedFiles
|
|
243
|
+
if (!targetContent || targetContent.toString().startsWith(MANAGED_BY_GC)) {
|
|
244
|
+
managedFiles.add(file)
|
|
245
|
+
debug('Added managed file:', file)
|
|
246
|
+
}
|
|
247
|
+
} catch (err) {
|
|
248
|
+
console.error(`Error copying file ${file}: ${(err as Error).message}
|
|
249
|
+
Source: ${sourcePath}`)
|
|
250
|
+
process.exit(1)
|
|
251
|
+
}
|
|
252
|
+
}),
|
|
253
|
+
)
|
|
254
|
+
debug(`Copied ${managedFiles.size} files in ${(performance.now() - copyStart).toFixed(0)}ms`)
|
|
255
|
+
|
|
256
|
+
// Remove files that are no longer provided
|
|
257
|
+
const removeStart = performance.now()
|
|
258
|
+
const filesToRemove = Array.from(existingManagedFiles).filter((file) => !managedFiles.has(file))
|
|
259
|
+
debug(`Files to remove: ${filesToRemove.length}`)
|
|
260
|
+
|
|
261
|
+
// Helper function to recursively clean up empty directories
|
|
262
|
+
async function cleanupEmptyDirs(startPath: string) {
|
|
263
|
+
let currentDir = startPath
|
|
264
|
+
while (currentDir !== cwd) {
|
|
265
|
+
try {
|
|
266
|
+
const dirContents = await fs.readdir(currentDir)
|
|
267
|
+
if (dirContents.length === 0) {
|
|
268
|
+
await fs.rmdir(currentDir)
|
|
269
|
+
debug(`Removed empty directory: ${currentDir}`)
|
|
270
|
+
currentDir = path.dirname(currentDir)
|
|
271
|
+
} else {
|
|
272
|
+
break // Stop if directory is not empty
|
|
273
|
+
}
|
|
274
|
+
} catch (err) {
|
|
275
|
+
if ((err as { code?: string }).code === 'EACCES') {
|
|
276
|
+
console.error(`Error cleaning up directory ${currentDir}: ${(err as Error).message}`)
|
|
277
|
+
process.exit(1)
|
|
278
|
+
}
|
|
279
|
+
break // Stop on other errors (like ENOENT)
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
// Process file removals in parallel
|
|
285
|
+
await Promise.all(
|
|
286
|
+
filesToRemove.map(async (file) => {
|
|
287
|
+
const filePath = path.join(cwd, file)
|
|
288
|
+
const dirPath = path.dirname(filePath)
|
|
289
|
+
|
|
290
|
+
try {
|
|
291
|
+
// First check if the directory exists and is accessible
|
|
292
|
+
await fs.readdir(dirPath)
|
|
293
|
+
|
|
294
|
+
// Then try to remove the file
|
|
295
|
+
try {
|
|
296
|
+
await fs.unlink(filePath)
|
|
297
|
+
console.log(`Removed managed file: ${file}`)
|
|
298
|
+
debug(`Removed file: ${file}`)
|
|
299
|
+
} catch (err) {
|
|
300
|
+
if ((err as { code?: string }).code !== 'ENOENT') {
|
|
301
|
+
console.error(`Error removing file ${file}: ${(err as Error).message}`)
|
|
302
|
+
process.exit(1)
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
// Finally, try to clean up empty directories
|
|
307
|
+
await cleanupEmptyDirs(dirPath)
|
|
308
|
+
} catch (err) {
|
|
309
|
+
if ((err as { code?: string }).code === 'EACCES') {
|
|
310
|
+
console.error(`Error accessing directory ${dirPath}: ${(err as Error).message}`)
|
|
311
|
+
process.exit(1)
|
|
312
|
+
}
|
|
313
|
+
// Ignore ENOENT errors for directories that don't exist
|
|
314
|
+
}
|
|
315
|
+
}),
|
|
316
|
+
)
|
|
317
|
+
debug(`Removed files in ${(performance.now() - removeStart).toFixed(0)}ms`)
|
|
318
|
+
|
|
319
|
+
// Update .gitignore with current list of managed files
|
|
320
|
+
if (managedFiles.size > 0) {
|
|
321
|
+
debug('Found managed files:', Array.from(managedFiles))
|
|
322
|
+
await updateGitignore(Array.from(managedFiles))
|
|
323
|
+
} else {
|
|
324
|
+
debug('No managed files found, cleaning up .gitignore section')
|
|
325
|
+
await updateGitignore([])
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
debug(`Total execution time: ${(performance.now() - startTime).toFixed(0)}ms`)
|
|
329
|
+
}
|
|
@@ -5,6 +5,7 @@ import { get, set } from 'lodash'
|
|
|
5
5
|
import snakeCase from 'lodash/snakeCase'
|
|
6
6
|
import type { ZodAny, ZodRawShape, ZodTypeAny } from 'zod'
|
|
7
7
|
import {
|
|
8
|
+
z,
|
|
8
9
|
ZodArray,
|
|
9
10
|
ZodBoolean,
|
|
10
11
|
ZodDefault,
|
|
@@ -15,7 +16,6 @@ import {
|
|
|
15
16
|
ZodObject,
|
|
16
17
|
ZodOptional,
|
|
17
18
|
ZodString,
|
|
18
|
-
z,
|
|
19
19
|
} from 'zod'
|
|
20
20
|
import diff from './diff'
|
|
21
21
|
|
|
@@ -207,7 +207,7 @@ export function formatAppliedEnv(applyResult: ApplyResult) {
|
|
|
207
207
|
const lines = applyResult.map(({ from, to, envValue, envVar, dotVar, error, warning }) => {
|
|
208
208
|
const fromFmt = chalk.red(JSON.stringify(from))
|
|
209
209
|
const toFmt = chalk.green(JSON.stringify(to))
|
|
210
|
-
const envVariableFmt = `${envVar}
|
|
210
|
+
const envVariableFmt = `${envVar}`
|
|
211
211
|
const dotVariableFmt = chalk.bold.underline(`${dotVar}`)
|
|
212
212
|
|
|
213
213
|
const baseLog = `${envVariableFmt} => ${dotVariableFmt}`
|
|
@@ -223,11 +223,10 @@ export function formatAppliedEnv(applyResult: ApplyResult) {
|
|
|
223
223
|
|
|
224
224
|
if (!dotVar) return chalk.red(`${envVariableFmt} => ignored (no matching config)`)
|
|
225
225
|
|
|
226
|
-
if (from === undefined && to === undefined)
|
|
227
|
-
|
|
228
|
-
if (from
|
|
229
|
-
|
|
230
|
-
return ` ${chalk.yellowBright('~')} ${baseLog}: ${fromFmt} => ${toFmt}`
|
|
226
|
+
if (from === undefined && to === undefined) return ` = ${baseLog}: (ignored)`
|
|
227
|
+
if (from === undefined && to !== undefined) return ` ${chalk.green('+')} ${baseLog}`
|
|
228
|
+
if (from !== undefined && to === undefined) return ` ${chalk.red('-')} ${baseLog}`
|
|
229
|
+
return ` ${chalk.yellowBright('~')} ${baseLog}`
|
|
231
230
|
})
|
|
232
231
|
|
|
233
232
|
let header = chalk.blueBright('info')
|
package/src/generated/config.ts
CHANGED
|
@@ -215,6 +215,8 @@ export type GraphCommerceConfig = {
|
|
|
215
215
|
* To override the value for a specific locale, configure in i18n config.
|
|
216
216
|
*/
|
|
217
217
|
googleAnalyticsId?: InputMaybe<Scalars['String']['input']>;
|
|
218
|
+
/** To create an assetlinks.json file for the Android app. */
|
|
219
|
+
googlePlaystore?: InputMaybe<GraphCommerceGooglePlaystoreConfig>;
|
|
218
220
|
/**
|
|
219
221
|
* Google reCAPTCHA site key.
|
|
220
222
|
* When using reCAPTCHA, this value is required, even if you are configuring different values for each locale.
|
|
@@ -369,6 +371,14 @@ export type GraphCommerceDebugConfig = {
|
|
|
369
371
|
webpackDuplicatesPlugin?: InputMaybe<Scalars['Boolean']['input']>;
|
|
370
372
|
};
|
|
371
373
|
|
|
374
|
+
/** See https://developer.android.com/training/app-links/verify-android-applinks#web-assoc */
|
|
375
|
+
export type GraphCommerceGooglePlaystoreConfig = {
|
|
376
|
+
/** The package name of the Android app. */
|
|
377
|
+
packageName: Scalars['String']['input'];
|
|
378
|
+
/** The sha256 certificate fingerprint of the Android app. */
|
|
379
|
+
sha256CertificateFingerprint: Scalars['String']['input'];
|
|
380
|
+
};
|
|
381
|
+
|
|
372
382
|
/** Permissions input */
|
|
373
383
|
export type GraphCommercePermissions = {
|
|
374
384
|
/** Changes the availability of the add to cart buttons and the cart page to either customer only or completely disables it. */
|
|
@@ -546,6 +556,7 @@ export function GraphCommerceConfigSchema(): z.ZodObject<Properties<GraphCommerc
|
|
|
546
556
|
demoMode: z.boolean().default(true).nullish(),
|
|
547
557
|
enableGuestCheckoutLogin: z.boolean().nullish(),
|
|
548
558
|
googleAnalyticsId: z.string().nullish(),
|
|
559
|
+
googlePlaystore: GraphCommerceGooglePlaystoreConfigSchema().nullish(),
|
|
549
560
|
googleRecaptchaKey: z.string().nullish(),
|
|
550
561
|
googleTagmanagerId: z.string().nullish(),
|
|
551
562
|
hygraphEndpoint: z.string().min(1),
|
|
@@ -579,6 +590,13 @@ export function GraphCommerceDebugConfigSchema(): z.ZodObject<Properties<GraphCo
|
|
|
579
590
|
})
|
|
580
591
|
}
|
|
581
592
|
|
|
593
|
+
export function GraphCommerceGooglePlaystoreConfigSchema(): z.ZodObject<Properties<GraphCommerceGooglePlaystoreConfig>> {
|
|
594
|
+
return z.object({
|
|
595
|
+
packageName: z.string().min(1),
|
|
596
|
+
sha256CertificateFingerprint: z.string().min(1)
|
|
597
|
+
})
|
|
598
|
+
}
|
|
599
|
+
|
|
582
600
|
export function GraphCommercePermissionsSchema(): z.ZodObject<Properties<GraphCommercePermissions>> {
|
|
583
601
|
return z.object({
|
|
584
602
|
cart: CartPermissionsSchema.nullish(),
|
package/src/index.ts
CHANGED
|
@@ -6,11 +6,14 @@ import type { GraphCommerceConfig } from './generated/config'
|
|
|
6
6
|
export * from './utils/isMonorepo'
|
|
7
7
|
export * from './utils/resolveDependenciesSync'
|
|
8
8
|
export * from './utils/packageRoots'
|
|
9
|
+
export * from './utils/sig'
|
|
9
10
|
export * from './withGraphCommerce'
|
|
10
11
|
export * from './generated/config'
|
|
11
12
|
export * from './config'
|
|
12
13
|
export * from './runtimeCachingOptimizations'
|
|
13
14
|
export * from './interceptors/commands/codegenInterceptors'
|
|
15
|
+
export * from './commands/copyFiles'
|
|
16
|
+
export * from './commands/codegen'
|
|
14
17
|
|
|
15
18
|
export type PluginProps<P extends Record<string, unknown> = Record<string, unknown>> = P & {
|
|
16
19
|
Prev: React.FC<P>
|
|
@@ -21,9 +24,7 @@ export type FunctionPlugin<T extends (...args: any[]) => any> = (
|
|
|
21
24
|
...args: Parameters<T>
|
|
22
25
|
) => ReturnType<T>
|
|
23
26
|
|
|
24
|
-
/**
|
|
25
|
-
* @deprecated use FunctionPlugin instead
|
|
26
|
-
*/
|
|
27
|
+
/** @deprecated Use FunctionPlugin instead */
|
|
27
28
|
export type MethodPlugin<T extends (...args: any[]) => any> = (
|
|
28
29
|
prev: T,
|
|
29
30
|
...args: Parameters<T>
|
|
@@ -67,8 +67,8 @@ export type Interceptor = ResolveDependencyReturn & {
|
|
|
67
67
|
|
|
68
68
|
export type MaterializedPlugin = Interceptor & { template: string }
|
|
69
69
|
|
|
70
|
-
export const SOURCE_START = '/**
|
|
71
|
-
export const SOURCE_END = '/**
|
|
70
|
+
export const SOURCE_START = '/** SOURCE_START */'
|
|
71
|
+
export const SOURCE_END = '/** SOURCE_END */'
|
|
72
72
|
|
|
73
73
|
const originalSuffix = 'Original'
|
|
74
74
|
const interceptorSuffix = 'Interceptor'
|
|
@@ -103,9 +103,7 @@ const generateIdentifyer = (s: string) =>
|
|
|
103
103
|
}, 0),
|
|
104
104
|
).toString()
|
|
105
105
|
|
|
106
|
-
/**
|
|
107
|
-
* The is on the first line, with the format: \/* hash:${identifer} *\/
|
|
108
|
-
*/
|
|
106
|
+
/** The is on the first line, with the format: /* hash:${identifer} */
|
|
109
107
|
function extractIdentifier(source: string | undefined) {
|
|
110
108
|
if (!source) return null
|
|
111
109
|
const match = source.match(/\/\* hash:(\d+) \*\//)
|
|
@@ -2,6 +2,7 @@ import fs from 'node:fs'
|
|
|
2
2
|
import path from 'node:path'
|
|
3
3
|
import type { PackageJson } from 'type-fest'
|
|
4
4
|
import { PackagesSort } from './PackagesSort'
|
|
5
|
+
import { g, sig } from './sig'
|
|
5
6
|
|
|
6
7
|
type PackageNames = Map<string, string>
|
|
7
8
|
type DependencyStructure = Record<string, { dirName: string; dependencies: string[] }>
|
|
@@ -18,6 +19,9 @@ function resolveRecursivePackageJson(
|
|
|
18
19
|
const fileName = require.resolve(path.join(dependencyPath, 'package.json'))
|
|
19
20
|
const packageJsonFile = fs.readFileSync(fileName, 'utf-8').toString()
|
|
20
21
|
const packageJson = JSON.parse(packageJsonFile) as PackageJson
|
|
22
|
+
const e = [atob('QGdyYXBoY29tbWVyY2UvYWRvYmUtY29tbWVyY2U=')].filter((n) =>
|
|
23
|
+
!globalThis.gcl ? true : !globalThis.gcl.includes(n),
|
|
24
|
+
)
|
|
21
25
|
|
|
22
26
|
if (!packageJson.name) throw Error(`Package ${packageJsonFile} does not have a name field`)
|
|
23
27
|
|
|
@@ -36,7 +40,11 @@ function resolveRecursivePackageJson(
|
|
|
36
40
|
...Object.keys(packageJson.devDependencies ?? []),
|
|
37
41
|
...additionalDependencies,
|
|
38
42
|
...Object.keys(packageJson.peerDependencies ?? {}),
|
|
39
|
-
].filter((name) =>
|
|
43
|
+
].filter((name) =>
|
|
44
|
+
name.includes('graphcommerce')
|
|
45
|
+
? !(e.length >= 0 && e.some((v) => name.startsWith(v)))
|
|
46
|
+
: false,
|
|
47
|
+
),
|
|
40
48
|
),
|
|
41
49
|
]
|
|
42
50
|
|
|
@@ -82,6 +90,7 @@ export function sortDependencies(dependencyStructure: DependencyStructure): Pack
|
|
|
82
90
|
export function resolveDependenciesSync(root = process.cwd()) {
|
|
83
91
|
const cached = resolveCache.get(root)
|
|
84
92
|
if (cached) return cached
|
|
93
|
+
sig()
|
|
85
94
|
|
|
86
95
|
const dependencyStructure = resolveRecursivePackageJson(
|
|
87
96
|
root,
|
package/src/utils/sig.ts
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
// import necessary modules
|
|
2
|
+
import crypto from 'crypto'
|
|
3
|
+
|
|
4
|
+
declare global {
|
|
5
|
+
// eslint-disable-next-line vars-on-top, no-var
|
|
6
|
+
var gcl: string[] | undefined
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
// Function to generate a license key based on input data
|
|
10
|
+
export function g(data: string[]) {
|
|
11
|
+
const iv = crypto.randomBytes(16) // Initialization vector
|
|
12
|
+
const cipher = crypto.createCipheriv('aes-256-cbc', 'BbcFEkUydGw3nE9ZPm7gbxTIIBQ9IiKN', iv)
|
|
13
|
+
let encrypted = cipher.update(JSON.stringify(data), 'utf-8', 'hex')
|
|
14
|
+
encrypted += cipher.final('hex')
|
|
15
|
+
// Return the IV and the encrypted data as a single string, encoded in base64
|
|
16
|
+
return Buffer.from(`${iv.toString('hex')}:${encrypted}`).toString()
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
// Function to validate and decode the license key
|
|
20
|
+
export function sig() {
|
|
21
|
+
const l = process.env[atob('R0NfTElDRU5TRQ==')]
|
|
22
|
+
if (!l) return
|
|
23
|
+
|
|
24
|
+
if (!globalThis.gcl)
|
|
25
|
+
try {
|
|
26
|
+
const decipher = crypto.createDecipheriv(
|
|
27
|
+
'aes-256-cbc',
|
|
28
|
+
'BbcFEkUydGw3nE9ZPm7gbxTIIBQ9IiKN',
|
|
29
|
+
Buffer.from(l.split(':')[0], 'hex'),
|
|
30
|
+
)
|
|
31
|
+
let decrypted = decipher.update(l.split(':')[1], 'hex', 'utf-8')
|
|
32
|
+
decrypted += decipher.final('utf-8')
|
|
33
|
+
globalThis.gcl = JSON.parse(decrypted) // Parse and return the decoded data
|
|
34
|
+
} catch (error) {
|
|
35
|
+
// Silent
|
|
36
|
+
}
|
|
37
|
+
}
|
package/src/withGraphCommerce.ts
CHANGED
|
@@ -54,10 +54,10 @@ export function withGraphCommerce(nextConfig: NextConfig, cwd: string): NextConf
|
|
|
54
54
|
|
|
55
55
|
return {
|
|
56
56
|
...nextConfig,
|
|
57
|
+
bundlePagesRouterDependencies: true,
|
|
57
58
|
experimental: {
|
|
58
59
|
...nextConfig.experimental,
|
|
59
60
|
scrollRestoration: true,
|
|
60
|
-
bundlePagesExternals: true,
|
|
61
61
|
swcPlugins: [...(nextConfig.experimental?.swcPlugins ?? []), ['@lingui/swc-plugin', {}]],
|
|
62
62
|
},
|
|
63
63
|
i18n: {
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.exportConfig = void 0;
|
|
4
|
-
const loadConfig_1 = require("../loadConfig");
|
|
5
|
-
// eslint-disable-next-line @typescript-eslint/require-await
|
|
6
|
-
async function exportConfig() {
|
|
7
|
-
const conf = (0, loadConfig_1.loadConfig)(process.cwd());
|
|
8
|
-
}
|
|
9
|
-
exports.exportConfig = exportConfig;
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.exportConfig = void 0;
|
|
4
|
-
const loadConfig_1 = require("../../config/loadConfig");
|
|
5
|
-
// eslint-disable-next-line @typescript-eslint/require-await
|
|
6
|
-
async function exportConfig() {
|
|
7
|
-
const conf = (0, loadConfig_1.loadConfig)(process.cwd());
|
|
8
|
-
}
|
|
9
|
-
exports.exportConfig = exportConfig;
|