@graphcommerce/next-config 9.0.0-canary.106 → 9.0.0-canary.108

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,19 +1,17 @@
1
+ /* eslint-disable no-await-in-loop */
1
2
  import fs from 'fs/promises'
2
3
  import path from 'path'
3
- import { glob } from 'glob'
4
+ import fg from 'fast-glob'
4
5
  import { resolveDependenciesSync } from '../utils/resolveDependenciesSync'
5
6
 
6
7
  // Add debug logging helper
7
8
  const debug = (...args: unknown[]) => {
8
- if (process.env.DEBUG) console.log('[copyFiles]', ...args)
9
+ if (process.env.DEBUG) console.log('[copy-files]', ...args)
9
10
  }
10
11
 
11
12
  // Add constants for the magic comments
12
- type FileManagement = 'graphcommerce' | 'local'
13
- const createManagementComment = (type: FileManagement) => `// managed by: ${type}`
14
-
15
- const MANAGED_BY_GC = createManagementComment('graphcommerce')
16
- const MANAGED_LOCALLY = createManagementComment('local')
13
+ const MANAGED_BY_GC = '// managed by: graphcommerce'
14
+ const MANAGED_LOCALLY = '// managed by: local'
17
15
 
18
16
  const GITIGNORE_SECTION_START = '# managed by: graphcommerce'
19
17
  const GITIGNORE_SECTION_END = '# end managed by: graphcommerce'
@@ -30,11 +28,9 @@ async function updateGitignore(managedFiles: string[]) {
30
28
  const gitignorePath = path.join(process.cwd(), '.gitignore')
31
29
  let content: string
32
30
 
33
- debug('Updating .gitignore with managed files:', managedFiles)
34
-
35
31
  try {
36
32
  content = await fs.readFile(gitignorePath, 'utf-8')
37
- debug('Existing .gitignore content:', content)
33
+ debug('Reading existing .gitignore')
38
34
  } catch (err) {
39
35
  debug('.gitignore not found, creating new file')
40
36
  content = ''
@@ -46,7 +42,6 @@ async function updateGitignore(managedFiles: string[]) {
46
42
  'g',
47
43
  )
48
44
  content = content.replace(sectionRegex, '')
49
- debug('Content after removing existing section:', content)
50
45
 
51
46
  // Only add new section if there are files to manage
52
47
  if (managedFiles.length > 0) {
@@ -56,23 +51,16 @@ async function updateGitignore(managedFiles: string[]) {
56
51
  GITIGNORE_SECTION_END,
57
52
  '', // Empty line at the end
58
53
  ].join('\n')
59
- debug('New section to add:', newSection)
60
54
 
61
55
  // Append the new section
62
56
  content = `${content.trim()}\n\n${newSection}`
57
+ debug(`Updated .gitignore with ${managedFiles.length} managed files`)
63
58
  } else {
64
- // Just trim the content when no files to manage
65
59
  content = `${content.trim()}\n`
60
+ debug('Cleaned up .gitignore managed section')
66
61
  }
67
62
 
68
- debug('Final content:', content)
69
-
70
- try {
71
- await fs.writeFile(gitignorePath, content)
72
- debug('Successfully wrote .gitignore file')
73
- } catch (err) {
74
- console.error('Error writing .gitignore:', err)
75
- }
63
+ await fs.writeFile(gitignorePath, content)
76
64
  }
77
65
 
78
66
  /** Determines how a file should be managed based on its content */
@@ -97,53 +85,100 @@ function getFileManagement(content: Buffer | undefined): 'local' | 'graphcommerc
97
85
  * 4. If the file is managed by graphcommerce: Update if content differs
98
86
  */
99
87
  export async function copyFiles() {
88
+ const startTime = performance.now()
100
89
  debug('Starting copyFiles')
101
90
 
102
91
  const cwd = process.cwd()
103
92
  const deps = resolveDependenciesSync()
104
93
  const packages = [...deps.values()].filter((p) => p !== '.')
105
- debug('Found packages:', packages)
106
94
 
107
95
  // Track files and their source packages to detect conflicts
108
96
  const fileMap = new Map<string, { sourcePath: string; packagePath: string }>()
109
- // Track which files are managed by GraphCommerce
110
97
  const managedFiles = new Set<string>()
98
+ const existingManagedFiles = new Set<string>()
99
+
100
+ // First scan existing files to find GraphCommerce managed ones
101
+ const scanStart = performance.now()
102
+ try {
103
+ // Use only default patterns for testing
104
+ const gitignorePatterns = [
105
+ '**/dist/**',
106
+ '**/build/**',
107
+ '**/.next/**',
108
+ '**/.git/**',
109
+ '**/node_modules/**',
110
+ ]
111
+
112
+ const allFiles = await fg('**/*', {
113
+ cwd,
114
+ dot: true,
115
+ ignore: gitignorePatterns,
116
+ onlyFiles: true,
117
+ })
118
+ debug(
119
+ `Found ${allFiles.length} project files in ${(performance.now() - scanStart).toFixed(0)}ms`,
120
+ )
121
+
122
+ const readStart = performance.now()
123
+ await Promise.all(
124
+ allFiles.map(async (file) => {
125
+ const filePath = path.join(cwd, file)
126
+ try {
127
+ const content = await fs.readFile(filePath)
128
+ if (getFileManagement(content) === 'graphcommerce') {
129
+ existingManagedFiles.add(file)
130
+ debug(`Found existing managed file: ${file}`)
131
+ }
132
+ } catch (err) {
133
+ debug(`Error reading file ${file}:`, err)
134
+ }
135
+ }),
136
+ )
137
+ debug(
138
+ `Read ${existingManagedFiles.size} managed files in ${(performance.now() - readStart).toFixed(0)}ms`,
139
+ )
140
+ } catch (err) {
141
+ debug('Error scanning project files:', err)
142
+ }
111
143
 
112
144
  // First pass: collect all files and check for conflicts
145
+ const collectStart = performance.now()
113
146
  await Promise.all(
114
147
  packages.map(async (pkg) => {
115
148
  const copyDir = path.join(pkg, 'copy')
116
-
117
149
  try {
118
- const files = await glob('**/*', { cwd: copyDir, nodir: true, dot: true })
119
- debug(`Found files in ${pkg}:`, files)
150
+ const files = await fg('**/*', { cwd: copyDir, dot: true, suppressErrors: true })
151
+ if (files.length > 0) {
152
+ debug(`Found files in ${pkg}:`, files)
120
153
 
121
- for (const file of files) {
122
- const sourcePath = path.join(copyDir, file)
123
- const existing = fileMap.get(file)
154
+ for (const file of files) {
155
+ const sourcePath = path.join(copyDir, file)
156
+ const existing = fileMap.get(file)
124
157
 
125
- if (existing) {
126
- console.error(`Error: File conflict detected for '${file}'
158
+ if (existing) {
159
+ console.error(`Error: File conflict detected for '${file}'
127
160
  Found in packages:
128
161
  - ${existing.packagePath} -> ${existing.sourcePath}
129
162
  - ${pkg} -> ${sourcePath}`)
130
- process.exit(1)
131
- }
163
+ process.exit(1)
164
+ }
132
165
 
133
- fileMap.set(file, { sourcePath, packagePath: pkg })
166
+ fileMap.set(file, { sourcePath, packagePath: pkg })
167
+ }
134
168
  }
135
169
  } catch (err) {
136
- // Skip if copy directory doesn't exist
137
- if ((err as { code?: string }).code !== 'ENOENT') {
138
- console.error(`Error scanning directory ${copyDir}: ${(err as Error).message}
139
- Path: ${copyDir}`)
140
- process.exit(1)
141
- }
170
+ if ((err as { code?: string }).code === 'ENOENT') return
171
+ console.error(
172
+ `Error scanning directory ${copyDir}: ${(err as Error).message}\nPath: ${copyDir}`,
173
+ )
174
+ process.exit(1)
142
175
  }
143
176
  }),
144
177
  )
178
+ debug(`Collected ${fileMap.size} files in ${(performance.now() - collectStart).toFixed(0)}ms`)
145
179
 
146
- // Second pass: copy files
180
+ // Second pass: copy files and handle removals
181
+ const copyStart = performance.now()
147
182
  await Promise.all(
148
183
  Array.from(fileMap.entries()).map(async ([file, { sourcePath }]) => {
149
184
  const targetPath = path.join(cwd, file)
@@ -154,8 +189,9 @@ Path: ${copyDir}`)
154
189
 
155
190
  const sourceContent = await fs.readFile(sourcePath)
156
191
  const contentWithComment = Buffer.concat([
157
- Buffer.from(`${MANAGED_BY_GC}\n`),
158
- Buffer.from('// to modify this file, change it to managed by: local\n\n'),
192
+ Buffer.from(
193
+ `${MANAGED_BY_GC}\n// to modify this file, change it to managed by: local\n\n`,
194
+ ),
159
195
  sourceContent,
160
196
  ])
161
197
 
@@ -184,8 +220,7 @@ Path: ${copyDir}`)
184
220
  Source: ${sourcePath}`)
185
221
  process.exit(1)
186
222
  }
187
- console.log(`Creating new file: ${file}
188
- Source: ${sourcePath}`)
223
+ console.log(`Creating new file: ${file}\nSource: ${sourcePath}`)
189
224
  debug('File does not exist yet')
190
225
  }
191
226
 
@@ -215,13 +250,79 @@ Source: ${sourcePath}`)
215
250
  }
216
251
  }),
217
252
  )
253
+ debug(`Copied ${managedFiles.size} files in ${(performance.now() - copyStart).toFixed(0)}ms`)
254
+
255
+ // Remove files that are no longer provided
256
+ const removeStart = performance.now()
257
+ const filesToRemove = Array.from(existingManagedFiles).filter((file) => !managedFiles.has(file))
258
+ debug(`Files to remove: ${filesToRemove.length}`)
259
+
260
+ // Helper function to recursively clean up empty directories
261
+ async function cleanupEmptyDirs(startPath: string) {
262
+ let currentDir = startPath
263
+ while (currentDir !== cwd) {
264
+ try {
265
+ const dirContents = await fs.readdir(currentDir)
266
+ if (dirContents.length === 0) {
267
+ await fs.rmdir(currentDir)
268
+ debug(`Removed empty directory: ${currentDir}`)
269
+ currentDir = path.dirname(currentDir)
270
+ } else {
271
+ break // Stop if directory is not empty
272
+ }
273
+ } catch (err) {
274
+ if ((err as { code?: string }).code === 'EACCES') {
275
+ console.error(`Error cleaning up directory ${currentDir}: ${(err as Error).message}`)
276
+ process.exit(1)
277
+ }
278
+ break // Stop on other errors (like ENOENT)
279
+ }
280
+ }
281
+ }
282
+
283
+ // Process file removals in parallel
284
+ await Promise.all(
285
+ filesToRemove.map(async (file) => {
286
+ const filePath = path.join(cwd, file)
287
+ const dirPath = path.dirname(filePath)
288
+
289
+ try {
290
+ // First check if the directory exists and is accessible
291
+ await fs.readdir(dirPath)
292
+
293
+ // Then try to remove the file
294
+ try {
295
+ await fs.unlink(filePath)
296
+ console.log(`Removed managed file: ${file}`)
297
+ debug(`Removed file: ${file}`)
298
+ } catch (err) {
299
+ if ((err as { code?: string }).code !== 'ENOENT') {
300
+ console.error(`Error removing file ${file}: ${(err as Error).message}`)
301
+ process.exit(1)
302
+ }
303
+ }
304
+
305
+ // Finally, try to clean up empty directories
306
+ await cleanupEmptyDirs(dirPath)
307
+ } catch (err) {
308
+ if ((err as { code?: string }).code === 'EACCES') {
309
+ console.error(`Error accessing directory ${dirPath}: ${(err as Error).message}`)
310
+ process.exit(1)
311
+ }
312
+ // Ignore ENOENT errors for directories that don't exist
313
+ }
314
+ }),
315
+ )
316
+ debug(`Removed files in ${(performance.now() - removeStart).toFixed(0)}ms`)
218
317
 
219
- // Update .gitignore with the list of managed files
318
+ // Update .gitignore with current list of managed files
220
319
  if (managedFiles.size > 0) {
221
320
  debug('Found managed files:', Array.from(managedFiles))
222
321
  await updateGitignore(Array.from(managedFiles))
223
322
  } else {
224
323
  debug('No managed files found, cleaning up .gitignore section')
225
- await updateGitignore([]) // Pass empty array to clean up the section
324
+ await updateGitignore([])
226
325
  }
326
+
327
+ debug(`Total execution time: ${(performance.now() - startTime).toFixed(0)}ms`)
227
328
  }
@@ -5,6 +5,7 @@ import { get, set } from 'lodash'
5
5
  import snakeCase from 'lodash/snakeCase'
6
6
  import type { ZodAny, ZodRawShape, ZodTypeAny } from 'zod'
7
7
  import {
8
+ z,
8
9
  ZodArray,
9
10
  ZodBoolean,
10
11
  ZodDefault,
@@ -15,7 +16,6 @@ import {
15
16
  ZodObject,
16
17
  ZodOptional,
17
18
  ZodString,
18
- z,
19
19
  } from 'zod'
20
20
  import diff from './diff'
21
21
 
@@ -207,7 +207,7 @@ export function formatAppliedEnv(applyResult: ApplyResult) {
207
207
  const lines = applyResult.map(({ from, to, envValue, envVar, dotVar, error, warning }) => {
208
208
  const fromFmt = chalk.red(JSON.stringify(from))
209
209
  const toFmt = chalk.green(JSON.stringify(to))
210
- const envVariableFmt = `${envVar}='${envValue}'`
210
+ const envVariableFmt = `${envVar}`
211
211
  const dotVariableFmt = chalk.bold.underline(`${dotVar}`)
212
212
 
213
213
  const baseLog = `${envVariableFmt} => ${dotVariableFmt}`
@@ -223,11 +223,10 @@ export function formatAppliedEnv(applyResult: ApplyResult) {
223
223
 
224
224
  if (!dotVar) return chalk.red(`${envVariableFmt} => ignored (no matching config)`)
225
225
 
226
- if (from === undefined && to === undefined)
227
- return ` = ${baseLog}: (ignored, no change/wrong format)`
228
- if (from === undefined && to !== undefined) return ` ${chalk.green('+')} ${baseLog}: ${toFmt}`
229
- if (from !== undefined && to === undefined) return ` ${chalk.red('-')} ${baseLog}: ${fromFmt}`
230
- return ` ${chalk.yellowBright('~')} ${baseLog}: ${fromFmt} => ${toFmt}`
226
+ if (from === undefined && to === undefined) return ` = ${baseLog}: (ignored)`
227
+ if (from === undefined && to !== undefined) return ` ${chalk.green('+')} ${baseLog}`
228
+ if (from !== undefined && to === undefined) return ` ${chalk.red('-')} ${baseLog}`
229
+ return ` ${chalk.yellowBright('~')} ${baseLog}`
231
230
  })
232
231
 
233
232
  let header = chalk.blueBright('info')
@@ -215,6 +215,8 @@ export type GraphCommerceConfig = {
215
215
  * To override the value for a specific locale, configure in i18n config.
216
216
  */
217
217
  googleAnalyticsId?: InputMaybe<Scalars['String']['input']>;
218
+ /** To create an assetlinks.json file for the Android app. */
219
+ googlePlaystore?: InputMaybe<GraphCommerceGooglePlaystoreConfig>;
218
220
  /**
219
221
  * Google reCAPTCHA site key.
220
222
  * When using reCAPTCHA, this value is required, even if you are configuring different values for each locale.
@@ -369,6 +371,14 @@ export type GraphCommerceDebugConfig = {
369
371
  webpackDuplicatesPlugin?: InputMaybe<Scalars['Boolean']['input']>;
370
372
  };
371
373
 
374
+ /** See https://developer.android.com/training/app-links/verify-android-applinks#web-assoc */
375
+ export type GraphCommerceGooglePlaystoreConfig = {
376
+ /** The package name of the Android app. */
377
+ packageName: Scalars['String']['input'];
378
+ /** The sha256 certificate fingerprint of the Android app. */
379
+ sha256CertificateFingerprint: Scalars['String']['input'];
380
+ };
381
+
372
382
  /** Permissions input */
373
383
  export type GraphCommercePermissions = {
374
384
  /** Changes the availability of the add to cart buttons and the cart page to either customer only or completely disables it. */
@@ -546,6 +556,7 @@ export function GraphCommerceConfigSchema(): z.ZodObject<Properties<GraphCommerc
546
556
  demoMode: z.boolean().default(true).nullish(),
547
557
  enableGuestCheckoutLogin: z.boolean().nullish(),
548
558
  googleAnalyticsId: z.string().nullish(),
559
+ googlePlaystore: GraphCommerceGooglePlaystoreConfigSchema().nullish(),
549
560
  googleRecaptchaKey: z.string().nullish(),
550
561
  googleTagmanagerId: z.string().nullish(),
551
562
  hygraphEndpoint: z.string().min(1),
@@ -579,6 +590,13 @@ export function GraphCommerceDebugConfigSchema(): z.ZodObject<Properties<GraphCo
579
590
  })
580
591
  }
581
592
 
593
+ export function GraphCommerceGooglePlaystoreConfigSchema(): z.ZodObject<Properties<GraphCommerceGooglePlaystoreConfig>> {
594
+ return z.object({
595
+ packageName: z.string().min(1),
596
+ sha256CertificateFingerprint: z.string().min(1)
597
+ })
598
+ }
599
+
582
600
  export function GraphCommercePermissionsSchema(): z.ZodObject<Properties<GraphCommercePermissions>> {
583
601
  return z.object({
584
602
  cart: CartPermissionsSchema.nullish(),
package/src/index.ts CHANGED
@@ -10,7 +10,6 @@ export * from './utils/sig'
10
10
  export * from './withGraphCommerce'
11
11
  export * from './generated/config'
12
12
  export * from './config'
13
- export * from './runtimeCachingOptimizations'
14
13
  export * from './interceptors/commands/codegenInterceptors'
15
14
  export * from './commands/copyFiles'
16
15
  export * from './commands/codegen'
@@ -67,8 +67,8 @@ export type Interceptor = ResolveDependencyReturn & {
67
67
 
68
68
  export type MaterializedPlugin = Interceptor & { template: string }
69
69
 
70
- export const SOURCE_START = '/** Original source starts here (do not modify!): **/'
71
- export const SOURCE_END = '/** Original source ends here (do not modify!) **/'
70
+ export const SOURCE_START = '/** SOURCE_START */'
71
+ export const SOURCE_END = '/** SOURCE_END */'
72
72
 
73
73
  const originalSuffix = 'Original'
74
74
  const interceptorSuffix = 'Interceptor'
@@ -103,9 +103,7 @@ const generateIdentifyer = (s: string) =>
103
103
  }, 0),
104
104
  ).toString()
105
105
 
106
- /**
107
- * The is on the first line, with the format: \/* hash:${identifer} *\/
108
- */
106
+ /** The is on the first line, with the format: /* hash:${identifer} */
109
107
  function extractIdentifier(source: string | undefined) {
110
108
  if (!source) return null
111
109
  const match = source.match(/\/\* hash:(\d+) \*\//)
@@ -1,8 +1,83 @@
1
+ import fs from 'node:fs'
1
2
  import path from 'node:path'
2
3
 
4
+ const debug = process.env.DEBUG === '1'
5
+ // eslint-disable-next-line no-console
6
+ const log = (message: string) => debug && console.log(`isMonorepo: ${message}`)
7
+
8
+ function findPackageJson(directory: string): { name: string } | null {
9
+ try {
10
+ const packageJsonPath = path.join(directory, 'package.json')
11
+ const content = fs.readFileSync(packageJsonPath, 'utf8')
12
+ return JSON.parse(content)
13
+ } catch {
14
+ return null
15
+ }
16
+ }
17
+
18
+ /**
19
+ * Determines if we're running in a monorepo context and how to handle postinstall scripts.
20
+ *
21
+ * If there is a parent `@graphcommerce/*` package, we're in a monorepo.
22
+ */
3
23
  export function isMonorepo() {
4
- const root = process.cwd()
5
- const meshDir = path.dirname(require.resolve('@graphcommerce/graphql-mesh'))
6
- const relativePath = path.join(path.relative(meshDir, root), '/')
7
- return relativePath.startsWith(`..${path.sep}..${path.sep}examples`)
24
+ let currentDir = process.cwd()
25
+ log(`Starting directory: ${currentDir}`)
26
+
27
+ // Start from the parent directory to find a parent @graphcommerce package
28
+ currentDir = path.dirname(currentDir)
29
+ log(`Looking for parent packages starting from: ${currentDir}`)
30
+
31
+ // Keep going up until we find a root package or hit the filesystem root
32
+ while (currentDir !== path.parse(currentDir).root) {
33
+ const packageJson = findPackageJson(currentDir)
34
+
35
+ if (packageJson) {
36
+ log(`Found package.json in: ${currentDir}`)
37
+ log(`Package name: ${packageJson.name}`)
38
+
39
+ if (packageJson.name.startsWith('@graphcommerce/')) {
40
+ log('isMonorepo result: true (found parent @graphcommerce package)')
41
+ return true
42
+ }
43
+ }
44
+
45
+ currentDir = path.dirname(currentDir)
46
+ }
47
+
48
+ log('isMonorepo result: false (no parent @graphcommerce package found)')
49
+ return false
50
+ }
51
+
52
+ /**
53
+ * Finds the path of the parent @graphcommerce package if it exists Returns null if no parent
54
+ * package is found
55
+ */
56
+ export function findParentPath(directory: string): string | null {
57
+ let currentDir = directory
58
+ log(`Starting directory: ${currentDir}`)
59
+
60
+ // Start from the parent directory
61
+ currentDir = path.dirname(currentDir)
62
+ log(`Looking for parent packages starting from: ${currentDir}`)
63
+
64
+ // Keep going up until we find a root package or hit the filesystem root
65
+ while (currentDir !== path.parse(currentDir).root) {
66
+ const packageJson = findPackageJson(currentDir)
67
+
68
+ if (packageJson) {
69
+ log(`Found package.json in: ${currentDir}`)
70
+ log(`Package name: ${packageJson.name}`)
71
+
72
+ if (packageJson.name.startsWith('@graphcommerce/')) {
73
+ log(`Found parent @graphcommerce package at: ${currentDir}`)
74
+ return currentDir
75
+ }
76
+ }
77
+
78
+ currentDir = path.dirname(currentDir)
79
+ }
80
+
81
+ log('No parent @graphcommerce package found')
82
+ return null
8
83
  }
@@ -41,7 +41,7 @@ function domains(config: GraphCommerceConfig): DomainLocale[] {
41
41
  * module.exports = withGraphCommerce(nextConfig)
42
42
  * ```
43
43
  */
44
- export function withGraphCommerce(nextConfig: NextConfig, cwd: string): NextConfig {
44
+ export function withGraphCommerce(nextConfig: NextConfig, cwd: string = process.cwd()): NextConfig {
45
45
  graphcommerceConfig ??= loadConfig(cwd)
46
46
  const importMetaPaths = configToImportMeta(graphcommerceConfig)
47
47
 
@@ -54,10 +54,10 @@ export function withGraphCommerce(nextConfig: NextConfig, cwd: string): NextConf
54
54
 
55
55
  return {
56
56
  ...nextConfig,
57
+ bundlePagesRouterDependencies: true,
57
58
  experimental: {
58
59
  ...nextConfig.experimental,
59
60
  scrollRestoration: true,
60
- bundlePagesExternals: true,
61
61
  swcPlugins: [...(nextConfig.experimental?.swcPlugins ?? []), ['@lingui/swc-plugin', {}]],
62
62
  },
63
63
  i18n: {
@@ -1,28 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.runtimeCachingOptimizations = void 0;
4
- exports.runtimeCachingOptimizations = [
5
- {
6
- urlPattern: /\/_next\/image\?url=.+$/i,
7
- handler: 'StaleWhileRevalidate',
8
- options: {
9
- cacheName: 'next-image',
10
- expiration: {
11
- maxEntries: 1000, // 1000 images
12
- maxAgeSeconds: 168 * 60 * 60, // 1 week
13
- matchOptions: { ignoreVary: true },
14
- },
15
- },
16
- },
17
- {
18
- urlPattern: /\/_next\/data\/.+\/.+\.json$/i,
19
- handler: 'NetworkFirst',
20
- options: {
21
- cacheName: 'next-data',
22
- expiration: {
23
- maxEntries: 32,
24
- maxAgeSeconds: 24 * 60 * 60, // 24 hours
25
- },
26
- },
27
- },
28
- ];
@@ -1,27 +0,0 @@
1
- import type { RuntimeCaching } from 'workbox-build'
2
-
3
- export const runtimeCachingOptimizations: RuntimeCaching[] = [
4
- {
5
- urlPattern: /\/_next\/image\?url=.+$/i,
6
- handler: 'StaleWhileRevalidate',
7
- options: {
8
- cacheName: 'next-image',
9
- expiration: {
10
- maxEntries: 1000, // 1000 images
11
- maxAgeSeconds: 168 * 60 * 60, // 1 week
12
- matchOptions: { ignoreVary: true },
13
- },
14
- },
15
- },
16
- {
17
- urlPattern: /\/_next\/data\/.+\/.+\.json$/i,
18
- handler: 'NetworkFirst',
19
- options: {
20
- cacheName: 'next-data',
21
- expiration: {
22
- maxEntries: 32,
23
- maxAgeSeconds: 24 * 60 * 60, // 24 hours
24
- },
25
- },
26
- },
27
- ]