vite 2.9.0-beta.6 → 2.9.0-beta.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of vite might be problematic. Click here for more details.

@@ -18,9 +18,9 @@ var zlib$1 = require('zlib');
18
18
  var require$$1$1 = require('crypto');
19
19
  var require$$4 = require('tls');
20
20
  var require$$5 = require('assert');
21
+ var esbuild = require('esbuild');
21
22
  var require$$0$8 = require('buffer');
22
23
  var qs = require('querystring');
23
- var esbuild = require('esbuild');
24
24
  var require$$1$5 = require('child_process');
25
25
  var require$$1$2 = require('worker_threads');
26
26
  var readline = require('readline');
@@ -2514,11 +2514,14 @@ function combineSourcemaps(filename, sourcemapList) {
2514
2514
  }
2515
2515
  // hack for parse broken with normalized absolute paths on windows (C:/path/to/something).
2516
2516
  // escape them to linux like paths
2517
- sourcemapList.forEach((sourcemap) => {
2518
- sourcemap.sources = sourcemap.sources.map((source) => source ? escapeToLinuxLikePath(source) : null);
2517
+ // also avoid mutation here to prevent breaking plugin's using cache to generate sourcemaps like vue (see #7442)
2518
+ sourcemapList = sourcemapList.map((sourcemap) => {
2519
+ const newSourcemaps = { ...sourcemap };
2520
+ newSourcemaps.sources = sourcemap.sources.map((source) => source ? escapeToLinuxLikePath(source) : null);
2519
2521
  if (sourcemap.sourceRoot) {
2520
- sourcemap.sourceRoot = escapeToLinuxLikePath(sourcemap.sourceRoot);
2522
+ newSourcemaps.sourceRoot = escapeToLinuxLikePath(sourcemap.sourceRoot);
2521
2523
  }
2524
+ return newSourcemaps;
2522
2525
  });
2523
2526
  const escapedFilename = escapeToLinuxLikePath(filename);
2524
2527
  // We don't declare type here so we can convert/fake/map as RawSourceMap
@@ -18601,6 +18604,8 @@ async function injectSourcesContent(map, file, logger) {
18601
18604
  sourcePath = path__default.resolve(sourceRoot, sourcePath);
18602
18605
  }
18603
18606
  return fs$n.promises.readFile(sourcePath, 'utf-8').catch(() => {
18607
+ if (maybeVirtualHtmlSet.has(normalizePath$4(sourcePath)))
18608
+ return null;
18604
18609
  missingSources.push(sourcePath);
18605
18610
  return null;
18606
18611
  });
@@ -18674,7 +18679,9 @@ function cssPlugin(config) {
18674
18679
  },
18675
18680
  async transform(raw, id, options) {
18676
18681
  var _a, _b;
18677
- if (!isCSSRequest(id) || commonjsProxyRE.test(id)) {
18682
+ if (!isCSSRequest(id) ||
18683
+ commonjsProxyRE.test(id) ||
18684
+ SPECIAL_QUERY_RE.test(id)) {
18678
18685
  return;
18679
18686
  }
18680
18687
  const ssr = (options === null || options === void 0 ? void 0 : options.ssr) === true;
@@ -18755,7 +18762,10 @@ function cssPostPlugin(config) {
18755
18762
  hasEmitted = false;
18756
18763
  },
18757
18764
  async transform(css, id, options) {
18758
- if (!isCSSRequest(id) || commonjsProxyRE.test(id)) {
18765
+ var _a;
18766
+ if (!isCSSRequest(id) ||
18767
+ commonjsProxyRE.test(id) ||
18768
+ SPECIAL_QUERY_RE.test(id)) {
18759
18769
  return;
18760
18770
  }
18761
18771
  const inlined = inlineRE.test(id);
@@ -18773,9 +18783,12 @@ function cssPostPlugin(config) {
18773
18783
  if (inlined) {
18774
18784
  return `export default ${JSON.stringify(css)}`;
18775
18785
  }
18776
- const sourcemap = this.getCombinedSourcemap();
18777
- await injectSourcesContent(sourcemap, cleanUrl(id), config.logger);
18778
- const cssContent = getCodeWithSourcemap('css', css, sourcemap);
18786
+ let cssContent = css;
18787
+ if ((_a = config.css) === null || _a === void 0 ? void 0 : _a.devSourcemap) {
18788
+ const sourcemap = this.getCombinedSourcemap();
18789
+ await injectSourcesContent(sourcemap, cleanUrl(id), config.logger);
18790
+ cssContent = getCodeWithSourcemap('css', css, sourcemap);
18791
+ }
18779
18792
  return [
18780
18793
  `import { updateStyle as __vite__updateStyle, removeStyle as __vite__removeStyle } from ${JSON.stringify(path__default.posix.join(config.base, CLIENT_PUBLIC_PATH))}`,
18781
18794
  `const __vite__id = ${JSON.stringify(id)}`,
@@ -19004,7 +19017,7 @@ function getCssResolversKeys(resolvers) {
19004
19017
  }
19005
19018
  async function compileCSS(id, code, config, urlReplacer, atImportResolvers, server) {
19006
19019
  var _a;
19007
- const { modules: modulesOptions, preprocessorOptions } = config.css || {};
19020
+ const { modules: modulesOptions, preprocessorOptions, devSourcemap } = config.css || {};
19008
19021
  const isModule = modulesOptions !== false && cssModuleRE.test(id);
19009
19022
  // although at serve time it can work without processing, we do need to
19010
19023
  // crawl them in order to register watch dependencies.
@@ -19048,6 +19061,7 @@ async function compileCSS(id, code, config, urlReplacer, atImportResolvers, serv
19048
19061
  }
19049
19062
  // important: set this for relative import resolving
19050
19063
  opts.filename = cleanUrl(id);
19064
+ opts.enableSourcemap = devSourcemap !== null && devSourcemap !== void 0 ? devSourcemap : false;
19051
19065
  const preprocessResult = await preProcessor(code, config.root, opts, atImportResolvers);
19052
19066
  if (preprocessResult.errors.length) {
19053
19067
  throw preprocessResult.errors[0];
@@ -19086,7 +19100,7 @@ async function compileCSS(id, code, config, urlReplacer, atImportResolvers, serv
19086
19100
  replacer: urlReplacer
19087
19101
  }));
19088
19102
  if (isModule) {
19089
- postcssPlugins.unshift((await Promise.resolve().then(function () { return require('./dep-b1024ee7.js'); }).then(function (n) { return n.index; })).default({
19103
+ postcssPlugins.unshift((await Promise.resolve().then(function () { return require('./dep-051d3e69.js'); }).then(function (n) { return n.index; })).default({
19090
19104
  ...modulesOptions,
19091
19105
  getJSON(cssFileName, _modules, outputFileName) {
19092
19106
  modules = _modules;
@@ -19166,6 +19180,15 @@ async function compileCSS(id, code, config, urlReplacer, atImportResolvers, serv
19166
19180
  config.logger.warn(colors$1.yellow(msg));
19167
19181
  }
19168
19182
  }
19183
+ if (!devSourcemap) {
19184
+ return {
19185
+ ast: postcssResult,
19186
+ code: postcssResult.css,
19187
+ map: { mappings: '' },
19188
+ modules,
19189
+ deps
19190
+ };
19191
+ }
19169
19192
  const rawPostcssMap = postcssResult.map.toJSON();
19170
19193
  const postcssMap = formatPostcssSourceMap(
19171
19194
  // version property of rawPostcssMap is declared as string
@@ -19395,16 +19418,20 @@ const scss = async (source, root, options, resolvers) => {
19395
19418
  ? importer.push(...options.importer)
19396
19419
  : importer.push(options.importer);
19397
19420
  }
19398
- const { content: data, map: additionalMap } = await getSource(source, options.filename, options.additionalData);
19421
+ const { content: data, map: additionalMap } = await getSource(source, options.filename, options.additionalData, options.enableSourcemap);
19399
19422
  const finalOptions = {
19400
19423
  ...options,
19401
19424
  data,
19402
19425
  file: options.filename,
19403
19426
  outFile: options.filename,
19404
19427
  importer,
19405
- sourceMap: true,
19406
- omitSourceMapUrl: true,
19407
- sourceMapRoot: path__default.dirname(options.filename)
19428
+ ...(options.enableSourcemap
19429
+ ? {
19430
+ sourceMap: true,
19431
+ omitSourceMapUrl: true,
19432
+ sourceMapRoot: path__default.dirname(options.filename)
19433
+ }
19434
+ : {})
19408
19435
  };
19409
19436
  try {
19410
19437
  const result = await new Promise((resolve, reject) => {
@@ -19491,16 +19518,20 @@ async function rebaseUrls(file, rootFile, alias) {
19491
19518
  const less = async (source, root, options, resolvers) => {
19492
19519
  const nodeLess = loadPreprocessor("less" /* less */, root);
19493
19520
  const viteResolverPlugin = createViteLessPlugin(nodeLess, options.filename, options.alias, resolvers);
19494
- const { content, map: additionalMap } = await getSource(source, options.filename, options.additionalData);
19521
+ const { content, map: additionalMap } = await getSource(source, options.filename, options.additionalData, options.enableSourcemap);
19495
19522
  let result;
19496
19523
  try {
19497
19524
  result = await nodeLess.render(content, {
19498
19525
  ...options,
19499
19526
  plugins: [viteResolverPlugin, ...(options.plugins || [])],
19500
- sourceMap: {
19501
- outputSourceFiles: true,
19502
- sourceMapFileInline: false
19503
- }
19527
+ ...(options.enableSourcemap
19528
+ ? {
19529
+ sourceMap: {
19530
+ outputSourceFiles: true,
19531
+ sourceMapFileInline: false
19532
+ }
19533
+ }
19534
+ : {})
19504
19535
  });
19505
19536
  }
19506
19537
  catch (e) {
@@ -19514,8 +19545,10 @@ const less = async (source, root, options, resolvers) => {
19514
19545
  };
19515
19546
  return { code: '', errors: [normalizedError], deps: [] };
19516
19547
  }
19517
- const map = JSON.parse(result.map);
19518
- delete map.sourcesContent;
19548
+ const map = result.map && JSON.parse(result.map);
19549
+ if (map) {
19550
+ delete map.sourcesContent;
19551
+ }
19519
19552
  return {
19520
19553
  code: result.css.toString(),
19521
19554
  map,
@@ -19578,17 +19611,19 @@ const styl = async (source, root, options) => {
19578
19611
  const nodeStylus = loadPreprocessor("stylus" /* stylus */, root);
19579
19612
  // Get source with preprocessor options.additionalData. Make sure a new line separator
19580
19613
  // is added to avoid any render error, as added stylus content may not have semi-colon separators
19581
- const { content, map: additionalMap } = await getSource(source, options.filename, options.additionalData, '\n');
19614
+ const { content, map: additionalMap } = await getSource(source, options.filename, options.additionalData, options.enableSourcemap, '\n');
19582
19615
  // Get preprocessor options.imports dependencies as stylus
19583
19616
  // does not return them with its builtin `.deps()` method
19584
19617
  const importsDeps = ((_a = options.imports) !== null && _a !== void 0 ? _a : []).map((dep) => path__default.resolve(dep));
19585
19618
  try {
19586
19619
  const ref = nodeStylus(content, options);
19587
- ref.set('sourcemap', {
19588
- comment: false,
19589
- inline: false,
19590
- basePath: root
19591
- });
19620
+ if (options.enableSourcemap) {
19621
+ ref.set('sourcemap', {
19622
+ comment: false,
19623
+ inline: false,
19624
+ basePath: root
19625
+ });
19626
+ }
19592
19627
  const result = ref.render();
19593
19628
  // Concat imports deps with computed deps
19594
19629
  const deps = [...ref.deps(), ...importsDeps];
@@ -19607,6 +19642,8 @@ const styl = async (source, root, options) => {
19607
19642
  }
19608
19643
  };
19609
19644
  function formatStylusSourceMap(mapBefore, root) {
19645
+ if (!mapBefore)
19646
+ return undefined;
19610
19647
  const map = { ...mapBefore };
19611
19648
  const resolveFromRoot = (p) => normalizePath$4(path__default.resolve(root, p));
19612
19649
  if (map.file) {
@@ -19615,7 +19652,7 @@ function formatStylusSourceMap(mapBefore, root) {
19615
19652
  map.sources = map.sources.map(resolveFromRoot);
19616
19653
  return map;
19617
19654
  }
19618
- async function getSource(source, filename, additionalData, sep = '') {
19655
+ async function getSource(source, filename, additionalData, enableSourcemap, sep = '') {
19619
19656
  if (!additionalData)
19620
19657
  return { content: source };
19621
19658
  if (typeof additionalData === 'function') {
@@ -19625,6 +19662,9 @@ async function getSource(source, filename, additionalData, sep = '') {
19625
19662
  }
19626
19663
  return newContent;
19627
19664
  }
19665
+ if (!enableSourcemap) {
19666
+ return { content: additionalData + sep + source };
19667
+ }
19628
19668
  const ms = new MagicString$1(source);
19629
19669
  ms.appendLeft(0, sep);
19630
19670
  ms.appendLeft(0, additionalData);
@@ -21294,6 +21334,7 @@ function preload(baseModule, deps) {
21294
21334
  function buildImportAnalysisPlugin(config) {
21295
21335
  const ssr = !!config.build.ssr;
21296
21336
  const insertPreload = !(ssr || !!config.build.lib);
21337
+ const isWorker = config.isWorker;
21297
21338
  const scriptRel = config.build.polyfillModulePreload
21298
21339
  ? `'modulepreload'`
21299
21340
  : `(${detectScriptRel.toString()})()`;
@@ -21320,6 +21361,10 @@ function buildImportAnalysisPlugin(config) {
21320
21361
  !source.includes('import.meta.glob')) {
21321
21362
  return;
21322
21363
  }
21364
+ if (isWorker) {
21365
+ // preload method use `document` and can't run in the worker
21366
+ return;
21367
+ }
21323
21368
  await init;
21324
21369
  let imports = [];
21325
21370
  try {
@@ -21403,7 +21448,7 @@ function buildImportAnalysisPlugin(config) {
21403
21448
  return null;
21404
21449
  },
21405
21450
  generateBundle({ format }, bundle) {
21406
- if (format !== 'es' || ssr) {
21451
+ if (format !== 'es' || ssr || isWorker) {
21407
21452
  return;
21408
21453
  }
21409
21454
  for (const file in bundle) {
@@ -21598,7 +21643,7 @@ function htmlInlineProxyPlugin(config) {
21598
21643
  const file = cleanUrl(id);
21599
21644
  const url = file.replace(normalizePath$4(config.root), '');
21600
21645
  const result = htmlProxyMap.get(config).get(url)[index];
21601
- if (typeof result === 'string') {
21646
+ if (result) {
21602
21647
  return result;
21603
21648
  }
21604
21649
  else {
@@ -21608,14 +21653,14 @@ function htmlInlineProxyPlugin(config) {
21608
21653
  }
21609
21654
  };
21610
21655
  }
21611
- function addToHTMLProxyCache(config, filePath, index, code) {
21656
+ function addToHTMLProxyCache(config, filePath, index, result) {
21612
21657
  if (!htmlProxyMap.get(config)) {
21613
21658
  htmlProxyMap.set(config, new Map());
21614
21659
  }
21615
21660
  if (!htmlProxyMap.get(config).get(filePath)) {
21616
21661
  htmlProxyMap.get(config).set(filePath, []);
21617
21662
  }
21618
- htmlProxyMap.get(config).get(filePath)[index] = code;
21663
+ htmlProxyMap.get(config).get(filePath)[index] = result;
21619
21664
  }
21620
21665
  function addToHTMLProxyTransformResult(hash, code) {
21621
21666
  htmlProxyResult.set(hash, code);
@@ -21632,7 +21677,7 @@ const assetAttrsConfig = {
21632
21677
  const isAsyncScriptMap = new WeakMap();
21633
21678
  async function traverseHtml(html, filePath, visitor) {
21634
21679
  // lazy load compiler
21635
- const { parse, transform } = await Promise.resolve().then(function () { return require('./dep-5ae06c1f.js'); }).then(function (n) { return n.compilerDom_cjs; });
21680
+ const { parse, transform } = await Promise.resolve().then(function () { return require('./dep-f61a3114.js'); }).then(function (n) { return n.compilerDom_cjs; });
21636
21681
  // @vue/compiler-core doesn't like lowercase doctypes
21637
21682
  html = html.replace(/<!doctype\s/i, '<!DOCTYPE ');
21638
21683
  try {
@@ -21747,7 +21792,9 @@ function buildHtmlPlugin(config) {
21747
21792
  .join('');
21748
21793
  // <script type="module">...</script>
21749
21794
  const filePath = id.replace(normalizePath$4(config.root), '');
21750
- addToHTMLProxyCache(config, filePath, inlineModuleIndex, contents);
21795
+ addToHTMLProxyCache(config, filePath, inlineModuleIndex, {
21796
+ code: contents
21797
+ });
21751
21798
  js += `\nimport "${id}?html-proxy&index=${inlineModuleIndex}.js"`;
21752
21799
  shouldRemove = true;
21753
21800
  }
@@ -21813,7 +21860,7 @@ function buildHtmlPlugin(config) {
21813
21860
  const styleNode = inlineStyle.value;
21814
21861
  const code = styleNode.content;
21815
21862
  const filePath = id.replace(normalizePath$4(config.root), '');
21816
- addToHTMLProxyCache(config, filePath, inlineModuleIndex, code);
21863
+ addToHTMLProxyCache(config, filePath, inlineModuleIndex, { code });
21817
21864
  // will transform with css plugin and cache result with css-post plugin
21818
21865
  js += `\nimport "${id}?html-proxy&inline-css&index=${inlineModuleIndex}.css"`;
21819
21866
  // will transfrom in `applyHtmlTransforms`
@@ -21824,7 +21871,9 @@ function buildHtmlPlugin(config) {
21824
21871
  const styleNode = node.children.pop();
21825
21872
  const filePath = id.replace(normalizePath$4(config.root), '');
21826
21873
  inlineModuleIndex++;
21827
- addToHTMLProxyCache(config, filePath, inlineModuleIndex, styleNode.content);
21874
+ addToHTMLProxyCache(config, filePath, inlineModuleIndex, {
21875
+ code: styleNode.content
21876
+ });
21828
21877
  js += `\nimport "${id}?html-proxy&index=${inlineModuleIndex}.css"`;
21829
21878
  shouldRemove = true;
21830
21879
  }
@@ -22034,11 +22083,13 @@ function resolveHtmlTransforms(plugins) {
22034
22083
  }
22035
22084
  return [preHooks, postHooks];
22036
22085
  }
22086
+ const maybeVirtualHtmlSet = new Set();
22037
22087
  async function applyHtmlTransforms(html, hooks, ctx) {
22038
22088
  const headTags = [];
22039
22089
  const headPrependTags = [];
22040
22090
  const bodyTags = [];
22041
22091
  const bodyPrependTags = [];
22092
+ maybeVirtualHtmlSet.add(ctx.filename);
22042
22093
  for (const hook of hooks) {
22043
22094
  const res = await hook(html, ctx);
22044
22095
  if (!res) {
@@ -22604,11 +22655,21 @@ async function parseExtends(result, cache) {
22604
22655
  }
22605
22656
  }
22606
22657
  function resolveExtends(extended, from) {
22658
+ let error;
22607
22659
  try {
22608
22660
  return require$$0$5.createRequire(from).resolve(extended);
22609
22661
  } catch (e) {
22610
- throw new TSConfckParseError(`failed to resolve "extends":"${extended}" in ${from}`, "EXTENDS_RESOLVE", from, e);
22662
+ error = e;
22663
+ }
22664
+ if (!path__default.isAbsolute(extended) && !extended.startsWith("./") && !extended.startsWith("../")) {
22665
+ try {
22666
+ const fallbackExtended = path__default.join(extended, "tsconfig.json");
22667
+ return require$$0$5.createRequire(from).resolve(fallbackExtended);
22668
+ } catch (e) {
22669
+ error = e;
22670
+ }
22611
22671
  }
22672
+ throw new TSConfckParseError(`failed to resolve "extends":"${extended}" in ${from}`, "EXTENDS_RESOLVE", from, error);
22612
22673
  }
22613
22674
  var EXTENDABLE_KEYS = [
22614
22675
  "compilerOptions",
@@ -30217,17 +30278,18 @@ const externalTypes = [
30217
30278
  'tsx',
30218
30279
  ...KNOWN_ASSET_TYPES
30219
30280
  ];
30220
- function esbuildDepPlugin(qualified, exportsData, config, ssr) {
30281
+ function esbuildDepPlugin(qualified, exportsData, config) {
30221
30282
  // remove optimizable extensions from `externalTypes` list
30222
30283
  const allExternalTypes = config.optimizeDeps.extensions
30223
30284
  ? externalTypes.filter((type) => { var _a; return !((_a = config.optimizeDeps.extensions) === null || _a === void 0 ? void 0 : _a.includes('.' + type)); })
30224
30285
  : externalTypes;
30225
30286
  // default resolver which prefers ESM
30226
- const _resolve = config.createResolver({ asSrc: false });
30287
+ const _resolve = config.createResolver({ asSrc: false, scan: true });
30227
30288
  // cjs resolver that prefers Node
30228
30289
  const _resolveRequire = config.createResolver({
30229
30290
  asSrc: false,
30230
- isRequire: true
30291
+ isRequire: true,
30292
+ scan: true
30231
30293
  });
30232
30294
  const resolve = (id, importer, kind, resolveDir) => {
30233
30295
  let _importer;
@@ -30241,7 +30303,7 @@ function esbuildDepPlugin(qualified, exportsData, config, ssr) {
30241
30303
  _importer = importer in qualified ? qualified[importer] : importer;
30242
30304
  }
30243
30305
  const resolver = kind.startsWith('require') ? _resolveRequire : _resolve;
30244
- return resolver(id, _importer, undefined, ssr);
30306
+ return resolver(id, _importer, undefined);
30245
30307
  };
30246
30308
  return {
30247
30309
  name: 'vite:dep-pre-bundle',
@@ -36138,6 +36200,7 @@ async function createPluginContainer({ plugins, logger, root, build: { rollupOpt
36138
36200
  constructor(initialPlugin) {
36139
36201
  this.meta = minimalContext.meta;
36140
36202
  this.ssr = false;
36203
+ this._scan = false;
36141
36204
  this._activeId = null;
36142
36205
  this._activeCode = null;
36143
36206
  this._addedImports = null;
@@ -36157,7 +36220,11 @@ async function createPluginContainer({ plugins, logger, root, build: { rollupOpt
36157
36220
  skip = new Set(this._resolveSkips);
36158
36221
  skip.add(this._activePlugin);
36159
36222
  }
36160
- let out = await container.resolveId(id, importer, { skip, ssr: this.ssr });
36223
+ let out = await container.resolveId(id, importer, {
36224
+ skip,
36225
+ ssr: this.ssr,
36226
+ scan: this._scan
36227
+ });
36161
36228
  if (typeof out === 'string')
36162
36229
  out = { id: out };
36163
36230
  return out;
@@ -36320,7 +36387,7 @@ async function createPluginContainer({ plugins, logger, root, build: { rollupOpt
36320
36387
  ? new MagicString$1(this.originalCode).generateMap({
36321
36388
  includeContent: true,
36322
36389
  hires: true,
36323
- source: this.filename
36390
+ source: cleanUrl(this.filename)
36324
36391
  })
36325
36392
  : null;
36326
36393
  }
@@ -36364,8 +36431,10 @@ async function createPluginContainer({ plugins, logger, root, build: { rollupOpt
36364
36431
  async resolveId(rawId, importer = path$p.join(root, 'index.html'), options) {
36365
36432
  const skip = options === null || options === void 0 ? void 0 : options.skip;
36366
36433
  const ssr = options === null || options === void 0 ? void 0 : options.ssr;
36434
+ const scan = !!(options === null || options === void 0 ? void 0 : options.scan);
36367
36435
  const ctx = new Context();
36368
36436
  ctx.ssr = !!ssr;
36437
+ ctx._scan = scan;
36369
36438
  ctx._resolveSkips = skip;
36370
36439
  const resolveStart = isDebug ? perf_hooks.performance.now() : 0;
36371
36440
  let id = null;
@@ -36377,7 +36446,7 @@ async function createPluginContainer({ plugins, logger, root, build: { rollupOpt
36377
36446
  continue;
36378
36447
  ctx._activePlugin = plugin;
36379
36448
  const pluginResolveStart = isDebug ? perf_hooks.performance.now() : 0;
36380
- const result = await plugin.resolveId.call(ctx, rawId, importer, { ssr });
36449
+ const result = await plugin.resolveId.call(ctx, rawId, importer, { ssr, scan });
36381
36450
  if (!result)
36382
36451
  continue;
36383
36452
  if (typeof result === 'string') {
@@ -36549,10 +36618,17 @@ async function scanImports(config) {
36549
36618
  })));
36550
36619
  debug$d(`Scan completed in ${(perf_hooks.performance.now() - start).toFixed(2)}ms:`, deps);
36551
36620
  return {
36552
- deps,
36621
+ // Ensure a fixed order so hashes are stable and improve logs
36622
+ deps: orderedDependencies(deps),
36553
36623
  missing
36554
36624
  };
36555
36625
  }
36626
+ function orderedDependencies(deps) {
36627
+ const depsList = Object.entries(deps);
36628
+ // Ensure the same browserHash for the same set of dependencies
36629
+ depsList.sort((a, b) => a[0].localeCompare(b[0]));
36630
+ return Object.fromEntries(depsList);
36631
+ }
36556
36632
  function globEntries(pattern, config) {
36557
36633
  return out(pattern, {
36558
36634
  cwd: config.root,
@@ -36579,7 +36655,9 @@ function esbuildScanPlugin(config, container, depImports, missing, entries) {
36579
36655
  if (seen.has(key)) {
36580
36656
  return seen.get(key);
36581
36657
  }
36582
- const resolved = await container.resolveId(id, importer && normalizePath$4(importer));
36658
+ const resolved = await container.resolveId(id, importer && normalizePath$4(importer), {
36659
+ scan: true
36660
+ });
36583
36661
  const res = resolved === null || resolved === void 0 ? void 0 : resolved.id;
36584
36662
  seen.set(key, res);
36585
36663
  return res;
@@ -36885,75 +36963,127 @@ function isScannable(id) {
36885
36963
  return JS_TYPES_RE.test(id) || htmlTypesRE.test(id);
36886
36964
  }
36887
36965
 
36888
- const debug$c = createDebugger('vite:deps');
36889
- const isDebugEnabled = _debug('vite:deps').enabled;
36966
+ const debuggerViteDeps = createDebugger('vite:deps');
36967
+ const debug$c = debuggerViteDeps;
36968
+ const isDebugEnabled$1 = _debug('vite:deps').enabled;
36890
36969
  const jsExtensionRE = /\.js$/i;
36891
36970
  const jsMapExtensionRE = /\.js\.map$/i;
36892
36971
  /**
36893
36972
  * Used by Vite CLI when running `vite optimize`
36894
36973
  */
36895
- async function optimizeDeps(config, force = config.server.force, asCommand = false, newDeps, // missing imports encountered after server has started
36896
- ssr) {
36897
- const { metadata, run } = await createOptimizeDepsRun(config, force, asCommand, null, newDeps, ssr);
36898
- const result = await run();
36974
+ async function optimizeDeps(config, force = config.server.force, asCommand = false) {
36975
+ const log = asCommand ? config.logger.info : debug$c;
36976
+ const cachedMetadata = loadCachedDepOptimizationMetadata(config, force, asCommand);
36977
+ if (cachedMetadata) {
36978
+ return cachedMetadata;
36979
+ }
36980
+ const depsInfo = await discoverProjectDependencies(config);
36981
+ const depsString = depsLogString(Object.keys(depsInfo));
36982
+ log(colors$1.green(`Optimizing dependencies:\n ${depsString}`));
36983
+ const result = await runOptimizeDeps(config, depsInfo);
36899
36984
  result.commit();
36900
- return metadata;
36985
+ return result.metadata;
36986
+ }
36987
+ function createOptimizedDepsMetadata(config, timestamp) {
36988
+ const hash = getDepHash(config);
36989
+ return {
36990
+ hash,
36991
+ browserHash: getOptimizedBrowserHash(hash, {}, timestamp),
36992
+ optimized: {},
36993
+ chunks: {},
36994
+ discovered: {},
36995
+ depInfoList: []
36996
+ };
36997
+ }
36998
+ function addOptimizedDepInfo(metadata, type, depInfo) {
36999
+ metadata[type][depInfo.id] = depInfo;
37000
+ metadata.depInfoList.push(depInfo);
37001
+ return depInfo;
36901
37002
  }
36902
37003
  /**
36903
- * Internally, Vite uses this function to prepare a optimizeDeps run. When Vite starts, we can get
36904
- * the metadata and start the server without waiting for the optimizeDeps processing to be completed
37004
+ * Creates the initial dep optimization metadata, loading it from the deps cache
37005
+ * if it exists and pre-bundling isn't forced
36905
37006
  */
36906
- async function createOptimizeDepsRun(config, force = config.server.force, asCommand = false, currentData = null, newDeps, // missing imports encountered after server has started
36907
- ssr) {
36908
- config = {
36909
- ...config,
36910
- command: 'build'
36911
- };
36912
- const { root, logger } = config;
36913
- const log = asCommand ? logger.info : debug$c;
37007
+ function loadCachedDepOptimizationMetadata(config, force = config.server.force, asCommand = false) {
37008
+ const log = asCommand ? config.logger.info : debug$c;
36914
37009
  // Before Vite 2.9, dependencies were cached in the root of the cacheDir
36915
37010
  // For compat, we remove the cache if we find the old structure
36916
37011
  if (fs__default.existsSync(path__default.join(config.cacheDir, '_metadata.json'))) {
36917
37012
  emptyDir(config.cacheDir);
36918
37013
  }
36919
37014
  const depsCacheDir = getDepsCacheDir(config);
36920
- const processingCacheDir = getProcessingDepsCacheDir(config);
36921
- const mainHash = getDepHash(root, config);
36922
- const processing = newDepOptimizationProcessing();
36923
- const metadata = {
36924
- hash: mainHash,
36925
- browserHash: mainHash,
36926
- optimized: {},
36927
- chunks: {},
36928
- discovered: {}
36929
- };
36930
37015
  if (!force) {
36931
- let prevData;
37016
+ let cachedMetadata;
36932
37017
  try {
36933
- const prevDataPath = path__default.join(depsCacheDir, '_metadata.json');
36934
- prevData = parseOptimizedDepsMetadata(fs__default.readFileSync(prevDataPath, 'utf-8'), depsCacheDir);
37018
+ const cachedMetadataPath = path__default.join(depsCacheDir, '_metadata.json');
37019
+ cachedMetadata = parseOptimizedDepsMetadata(fs__default.readFileSync(cachedMetadataPath, 'utf-8'), depsCacheDir);
36935
37020
  }
36936
37021
  catch (e) { }
36937
37022
  // hash is consistent, no need to re-bundle
36938
- if (prevData && prevData.hash === metadata.hash) {
37023
+ if (cachedMetadata && cachedMetadata.hash === getDepHash(config)) {
36939
37024
  log('Hash is consistent. Skipping. Use --force to override.');
36940
37025
  // Nothing to commit or cancel as we are using the cache, we only
36941
37026
  // need to resolve the processing promise so requests can move on
36942
- const resolve = () => {
36943
- processing.resolve();
36944
- };
36945
- return {
36946
- metadata: prevData,
36947
- run: async () => {
36948
- return {
36949
- alteredFiles: false,
36950
- commit: resolve,
36951
- cancel: resolve
36952
- };
36953
- }
36954
- };
37027
+ return cachedMetadata;
36955
37028
  }
36956
37029
  }
37030
+ else {
37031
+ config.logger.info('Forced re-optimization of dependencies');
37032
+ }
37033
+ // Start with a fresh cache
37034
+ removeDirSync(depsCacheDir);
37035
+ }
37036
+ /**
37037
+ * Initial optimizeDeps at server start. Perform a fast scan using esbuild to
37038
+ * find deps to pre-bundle and include user hard-coded dependencies
37039
+ */
37040
+ async function discoverProjectDependencies(config, timestamp) {
37041
+ const { deps, missing } = await scanImports(config);
37042
+ const missingIds = Object.keys(missing);
37043
+ if (missingIds.length) {
37044
+ throw new Error(`The following dependencies are imported but could not be resolved:\n\n ${missingIds
37045
+ .map((id) => `${colors$1.cyan(id)} ${colors$1.white(colors$1.dim(`(imported by ${missing[id]})`))}`)
37046
+ .join(`\n `)}\n\nAre they installed?`);
37047
+ }
37048
+ await addManuallyIncludedOptimizeDeps(deps, config);
37049
+ const browserHash = getOptimizedBrowserHash(getDepHash(config), deps, timestamp);
37050
+ const discovered = {};
37051
+ for (const id in deps) {
37052
+ const entry = deps[id];
37053
+ discovered[id] = {
37054
+ id,
37055
+ file: getOptimizedDepPath(id, config),
37056
+ src: entry,
37057
+ browserHash: browserHash
37058
+ };
37059
+ }
37060
+ return discovered;
37061
+ }
37062
+ function depsLogString(qualifiedIds) {
37063
+ if (isDebugEnabled$1) {
37064
+ return colors$1.yellow(qualifiedIds.join(`\n `));
37065
+ }
37066
+ else {
37067
+ const total = qualifiedIds.length;
37068
+ const maxListed = 5;
37069
+ const listed = Math.min(total, maxListed);
37070
+ const extra = Math.max(0, total - maxListed);
37071
+ return colors$1.yellow(qualifiedIds.slice(0, listed).join(`, `) +
37072
+ (extra > 0 ? `, ...and ${extra} more` : ``));
37073
+ }
37074
+ }
37075
+ /**
37076
+ * Internally, Vite uses this function to prepare a optimizeDeps run. When Vite starts, we can get
37077
+ * the metadata and start the server without waiting for the optimizeDeps processing to be completed
37078
+ */
37079
+ async function runOptimizeDeps(config, depsInfo) {
37080
+ var _a, _b, _c, _d;
37081
+ config = {
37082
+ ...config,
37083
+ command: 'build'
37084
+ };
37085
+ const depsCacheDir = getDepsCacheDir(config);
37086
+ const processingCacheDir = getProcessingDepsCacheDir(config);
36957
37087
  // Create a temporal directory so we don't need to delete optimized deps
36958
37088
  // until they have been processed. This also avoids leaving the deps cache
36959
37089
  // directory in a corrupted state if there is an error
@@ -36966,256 +37096,156 @@ ssr) {
36966
37096
  // a hint for Node.js
36967
37097
  // all files in the cache directory should be recognized as ES modules
36968
37098
  writeFile(path__default.resolve(processingCacheDir, 'package.json'), JSON.stringify({ type: 'module' }));
36969
- let newBrowserHash;
36970
- let deps;
36971
- if (!newDeps) {
36972
- // Initial optimizeDeps at server start. Perform a fast scan using esbuild to
36973
- // find deps to pre-bundle and include user hard-coded dependencies
36974
- let missing;
36975
- ({ deps, missing } = await scanImports(config));
36976
- const missingIds = Object.keys(missing);
36977
- if (missingIds.length) {
36978
- processing.resolve();
36979
- throw new Error(`The following dependencies are imported but could not be resolved:\n\n ${missingIds
36980
- .map((id) => `${colors$1.cyan(id)} ${colors$1.white(colors$1.dim(`(imported by ${missing[id]})`))}`)
36981
- .join(`\n `)}\n\nAre they installed?`);
36982
- }
36983
- try {
36984
- await addManuallyIncludedOptimizeDeps(deps, config);
36985
- }
36986
- catch (e) {
36987
- processing.resolve();
36988
- throw e;
36989
- }
36990
- // update browser hash
36991
- newBrowserHash = metadata.browserHash = getOptimizedBrowserHash(metadata.hash, deps);
36992
- // We generate the mapping of dependency ids to their cache file location
36993
- // before processing the dependencies with esbuild. This allow us to continue
36994
- // processing files in the importAnalysis and resolve plugins
36995
- for (const id in deps) {
36996
- const entry = deps[id];
36997
- metadata.optimized[id] = {
36998
- file: getOptimizedDepPath(id, config),
36999
- src: entry,
37000
- browserHash: newBrowserHash,
37001
- processing: processing.promise
37002
- };
37003
- }
37099
+ const metadata = createOptimizedDepsMetadata(config);
37100
+ metadata.browserHash = getOptimizedBrowserHash(metadata.hash, depsFromOptimizedDepInfo(depsInfo));
37101
+ // We prebundle dependencies with esbuild and cache them, but there is no need
37102
+ // to wait here. Code that needs to access the cached deps needs to await
37103
+ // the optimizedDepInfo.processing promise for each dep
37104
+ const qualifiedIds = Object.keys(depsInfo);
37105
+ if (!qualifiedIds.length) {
37106
+ return {
37107
+ metadata,
37108
+ commit() {
37109
+ // Write metadata file, delete `deps` folder and rename the `processing` folder to `deps`
37110
+ commitProcessingDepsCacheSync();
37111
+ config.logger.info(`No dependencies to bundle. Skipping.\n\n\n`);
37112
+ },
37113
+ cancel
37114
+ };
37004
37115
  }
37005
- else {
37006
- // Missing dependencies were found at run-time, optimizeDeps called while the
37007
- // server is running
37008
- deps = depsFromOptimizedDepInfo(newDeps);
37009
- metadata.optimized = newDeps;
37010
- // For reruns keep current global browser hash and newDeps individual hashes until we know
37011
- // if files are stable so we can avoid a full page reload
37012
- metadata.browserHash = currentData.browserHash;
37013
- newBrowserHash = getOptimizedBrowserHash(metadata.hash, deps);
37014
- }
37015
- return { metadata, run: prebundleDeps };
37016
- async function prebundleDeps() {
37017
- // We prebundle dependencies with esbuild and cache them, but there is no need
37018
- // to wait here. Code that needs to access the cached deps needs to await
37019
- // the optimizeDepInfo.processing promise for each dep
37020
- var _a, _b, _c, _d, _e;
37021
- const qualifiedIds = Object.keys(deps);
37022
- if (!qualifiedIds.length) {
37023
- return {
37024
- alteredFiles: false,
37025
- commit() {
37026
- // Write metadata file, delete `deps` folder and rename the `processing` folder to `deps`
37027
- commitProcessingDepsCacheSync();
37028
- log(`No dependencies to bundle. Skipping.\n\n\n`);
37029
- processing.resolve();
37030
- },
37031
- cancel
37032
- };
37033
- }
37034
- let depsString;
37035
- if (isDebugEnabled) {
37036
- depsString = colors$1.yellow(qualifiedIds.join(`\n `));
37116
+ // esbuild generates nested directory output with lowest common ancestor base
37117
+ // this is unpredictable and makes it difficult to analyze entry / output
37118
+ // mapping. So what we do here is:
37119
+ // 1. flatten all ids to eliminate slash
37120
+ // 2. in the plugin, read the entry ourselves as virtual files to retain the
37121
+ // path.
37122
+ const flatIdDeps = {};
37123
+ const idToExports = {};
37124
+ const flatIdToExports = {};
37125
+ const { plugins = [], ...esbuildOptions } = (_b = (_a = config.optimizeDeps) === null || _a === void 0 ? void 0 : _a.esbuildOptions) !== null && _b !== void 0 ? _b : {};
37126
+ await init;
37127
+ for (const id in depsInfo) {
37128
+ const flatId = flattenId(id);
37129
+ const filePath = (flatIdDeps[flatId] = depsInfo[id].src);
37130
+ let exportsData;
37131
+ if ((_c = config.optimizeDeps.extensions) === null || _c === void 0 ? void 0 : _c.some((ext) => filePath.endsWith(ext))) {
37132
+ // For custom supported extensions, build the entry file to transform it into JS,
37133
+ // and then parse with es-module-lexer. Note that the `bundle` option is not `true`,
37134
+ // so only the entry file is being transformed.
37135
+ const result = await esbuild.build({
37136
+ ...esbuildOptions,
37137
+ plugins,
37138
+ entryPoints: [filePath],
37139
+ write: false,
37140
+ format: 'esm'
37141
+ });
37142
+ exportsData = parse$f(result.outputFiles[0].text);
37037
37143
  }
37038
37144
  else {
37039
- const total = qualifiedIds.length;
37040
- const maxListed = 5;
37041
- const listed = Math.min(total, maxListed);
37042
- const extra = Math.max(0, total - maxListed);
37043
- depsString = colors$1.yellow(qualifiedIds.slice(0, listed).join(`\n `) +
37044
- (extra > 0 ? `\n (...and ${extra} more)` : ``));
37045
- }
37046
- if (!asCommand) {
37047
- if (!newDeps) {
37048
- // This is auto run on server start - let the user know that we are
37049
- // pre-optimizing deps
37050
- logger.info(colors$1.green(`Pre-bundling dependencies:\n ${depsString}`));
37051
- logger.info(`(this will be run only when your dependencies or config have changed)`);
37145
+ const entryContent = fs__default.readFileSync(filePath, 'utf-8');
37146
+ try {
37147
+ exportsData = parse$f(entryContent);
37052
37148
  }
37053
- }
37054
- else {
37055
- logger.info(colors$1.green(`Optimizing dependencies:\n ${depsString}`));
37056
- }
37057
- // esbuild generates nested directory output with lowest common ancestor base
37058
- // this is unpredictable and makes it difficult to analyze entry / output
37059
- // mapping. So what we do here is:
37060
- // 1. flatten all ids to eliminate slash
37061
- // 2. in the plugin, read the entry ourselves as virtual files to retain the
37062
- // path.
37063
- const flatIdDeps = {};
37064
- const idToExports = {};
37065
- const flatIdToExports = {};
37066
- const { plugins = [], ...esbuildOptions } = (_b = (_a = config.optimizeDeps) === null || _a === void 0 ? void 0 : _a.esbuildOptions) !== null && _b !== void 0 ? _b : {};
37067
- await init;
37068
- for (const id in deps) {
37069
- const flatId = flattenId(id);
37070
- const filePath = (flatIdDeps[flatId] = deps[id]);
37071
- let exportsData;
37072
- if ((_c = config.optimizeDeps.extensions) === null || _c === void 0 ? void 0 : _c.some((ext) => filePath.endsWith(ext))) {
37073
- // For custom supported extensions, build the entry file to transform it into JS,
37074
- // and then parse with es-module-lexer. Note that the `bundle` option is not `true`,
37075
- // so only the entry file is being transformed.
37076
- const result = await esbuild.build({
37077
- ...esbuildOptions,
37078
- plugins,
37079
- entryPoints: [filePath],
37080
- write: false,
37081
- format: 'esm'
37149
+ catch {
37150
+ debug$c(`Unable to parse dependency: ${id}. Trying again with a JSX transform.`);
37151
+ const transformed = await transformWithEsbuild(entryContent, filePath, {
37152
+ loader: 'jsx'
37082
37153
  });
37083
- exportsData = parse$f(result.outputFiles[0].text);
37154
+ // Ensure that optimization won't fail by defaulting '.js' to the JSX parser.
37155
+ // This is useful for packages such as Gatsby.
37156
+ esbuildOptions.loader = {
37157
+ '.js': 'jsx',
37158
+ ...esbuildOptions.loader
37159
+ };
37160
+ exportsData = parse$f(transformed.code);
37084
37161
  }
37085
- else {
37086
- const entryContent = fs__default.readFileSync(filePath, 'utf-8');
37087
- try {
37088
- exportsData = parse$f(entryContent);
37089
- }
37090
- catch {
37091
- debug$c(`Unable to parse dependency: ${id}. Trying again with a JSX transform.`);
37092
- const transformed = await transformWithEsbuild(entryContent, filePath, {
37093
- loader: 'jsx'
37094
- });
37095
- // Ensure that optimization won't fail by defaulting '.js' to the JSX parser.
37096
- // This is useful for packages such as Gatsby.
37097
- esbuildOptions.loader = {
37098
- '.js': 'jsx',
37099
- ...esbuildOptions.loader
37100
- };
37101
- exportsData = parse$f(transformed.code);
37102
- }
37103
- for (const { ss, se } of exportsData[0]) {
37104
- const exp = entryContent.slice(ss, se);
37105
- if (/export\s+\*\s+from/.test(exp)) {
37106
- exportsData.hasReExports = true;
37107
- }
37162
+ for (const { ss, se } of exportsData[0]) {
37163
+ const exp = entryContent.slice(ss, se);
37164
+ if (/export\s+\*\s+from/.test(exp)) {
37165
+ exportsData.hasReExports = true;
37108
37166
  }
37109
37167
  }
37110
- idToExports[id] = exportsData;
37111
- flatIdToExports[flatId] = exportsData;
37112
- }
37113
- const define = {
37114
- 'process.env.NODE_ENV': JSON.stringify(config.mode)
37115
- };
37116
- for (const key in config.define) {
37117
- const value = config.define[key];
37118
- define[key] = typeof value === 'string' ? value : JSON.stringify(value);
37119
37168
  }
37120
- const start = perf_hooks.performance.now();
37121
- const result = await esbuild.build({
37122
- absWorkingDir: process.cwd(),
37123
- entryPoints: Object.keys(flatIdDeps),
37124
- bundle: true,
37125
- format: 'esm',
37126
- target: config.build.target || undefined,
37127
- external: (_d = config.optimizeDeps) === null || _d === void 0 ? void 0 : _d.exclude,
37128
- logLevel: 'error',
37129
- splitting: true,
37130
- sourcemap: true,
37131
- outdir: processingCacheDir,
37132
- ignoreAnnotations: true,
37133
- metafile: true,
37134
- define,
37135
- plugins: [
37136
- ...plugins,
37137
- esbuildDepPlugin(flatIdDeps, flatIdToExports, config, ssr)
37138
- ],
37139
- ...esbuildOptions
37169
+ idToExports[id] = exportsData;
37170
+ flatIdToExports[flatId] = exportsData;
37171
+ }
37172
+ const define = {
37173
+ 'process.env.NODE_ENV': JSON.stringify(config.mode)
37174
+ };
37175
+ for (const key in config.define) {
37176
+ const value = config.define[key];
37177
+ define[key] = typeof value === 'string' ? value : JSON.stringify(value);
37178
+ }
37179
+ const start = perf_hooks.performance.now();
37180
+ const result = await esbuild.build({
37181
+ absWorkingDir: process.cwd(),
37182
+ entryPoints: Object.keys(flatIdDeps),
37183
+ bundle: true,
37184
+ format: 'esm',
37185
+ target: config.build.target || undefined,
37186
+ external: (_d = config.optimizeDeps) === null || _d === void 0 ? void 0 : _d.exclude,
37187
+ logLevel: 'error',
37188
+ splitting: true,
37189
+ sourcemap: true,
37190
+ outdir: processingCacheDir,
37191
+ ignoreAnnotations: true,
37192
+ metafile: true,
37193
+ define,
37194
+ plugins: [
37195
+ ...plugins,
37196
+ esbuildDepPlugin(flatIdDeps, flatIdToExports, config)
37197
+ ],
37198
+ ...esbuildOptions
37199
+ });
37200
+ const meta = result.metafile;
37201
+ // the paths in `meta.outputs` are relative to `process.cwd()`
37202
+ const processingCacheDirOutputPath = path__default.relative(process.cwd(), processingCacheDir);
37203
+ for (const id in depsInfo) {
37204
+ const output = esbuildOutputFromId(meta.outputs, id, processingCacheDir);
37205
+ addOptimizedDepInfo(metadata, 'optimized', {
37206
+ ...depsInfo[id],
37207
+ needsInterop: needsInterop(id, idToExports[id], output),
37208
+ // We only need to hash the output.imports in to check for stability, but adding the hash
37209
+ // and file path gives us a unique hash that may be useful for other things in the future
37210
+ fileHash: getHash(metadata.hash + depsInfo[id].file + JSON.stringify(output.imports)),
37211
+ browserHash: metadata.browserHash
37140
37212
  });
37141
- const meta = result.metafile;
37142
- // the paths in `meta.outputs` are relative to `process.cwd()`
37143
- const processingCacheDirOutputPath = path__default.relative(process.cwd(), processingCacheDir);
37144
- for (const id in deps) {
37145
- const optimizedInfo = metadata.optimized[id];
37146
- optimizedInfo.needsInterop = needsInterop(id, idToExports[id], meta.outputs, processingCacheDirOutputPath);
37147
- const output = meta.outputs[path__default.relative(process.cwd(), getProcessingDepPath(id, config))];
37148
- if (output) {
37149
- // We only need to hash the output.imports in to check for stability, but adding the hash
37150
- // and file path gives us a unique hash that may be useful for other things in the future
37151
- optimizedInfo.fileHash = getHash(metadata.hash + optimizedInfo.file + JSON.stringify(output.imports));
37152
- }
37153
- }
37154
- // This only runs when missing deps are processed. Previous optimized deps are stable if
37155
- // the newly discovered deps don't have common chunks with them. Comparing their fileHash we
37156
- // can find out if it is safe to keep the current browser state. If one of the file hashes
37157
- // changed, a full page reload is needed
37158
- let alteredFiles = false;
37159
- if (currentData) {
37160
- alteredFiles = Object.keys(currentData.optimized).some((dep) => {
37161
- const currentInfo = currentData.optimized[dep];
37162
- const info = metadata.optimized[dep];
37163
- return (!(info === null || info === void 0 ? void 0 : info.fileHash) ||
37164
- !(currentInfo === null || currentInfo === void 0 ? void 0 : currentInfo.fileHash) ||
37165
- (info === null || info === void 0 ? void 0 : info.fileHash) !== (currentInfo === null || currentInfo === void 0 ? void 0 : currentInfo.fileHash));
37166
- });
37167
- debug$c(`optimized deps have altered files: ${alteredFiles}`);
37168
- }
37169
- for (const o of Object.keys(meta.outputs)) {
37170
- if (!o.match(jsMapExtensionRE)) {
37171
- const id = path__default
37172
- .relative(processingCacheDirOutputPath, o)
37173
- .replace(jsExtensionRE, '');
37174
- const file = getOptimizedDepPath(id, config);
37175
- if (!findFileInfo(metadata.optimized, file)) {
37176
- metadata.chunks[id] = {
37177
- file,
37178
- src: '',
37179
- needsInterop: false,
37180
- browserHash: (!alteredFiles && ((_e = currentData === null || currentData === void 0 ? void 0 : currentData.chunks[id]) === null || _e === void 0 ? void 0 : _e.browserHash)) ||
37181
- newBrowserHash
37182
- };
37183
- }
37213
+ }
37214
+ for (const o of Object.keys(meta.outputs)) {
37215
+ if (!o.match(jsMapExtensionRE)) {
37216
+ const id = path__default
37217
+ .relative(processingCacheDirOutputPath, o)
37218
+ .replace(jsExtensionRE, '');
37219
+ const file = getOptimizedDepPath(id, config);
37220
+ if (!findOptimizedDepInfoInRecord(metadata.optimized, (depInfo) => depInfo.file === file)) {
37221
+ addOptimizedDepInfo(metadata, 'chunks', {
37222
+ id,
37223
+ file,
37224
+ needsInterop: false,
37225
+ browserHash: metadata.browserHash
37226
+ });
37184
37227
  }
37185
37228
  }
37186
- if (alteredFiles) {
37187
- metadata.browserHash = newBrowserHash;
37188
- }
37189
- debug$c(`deps bundled in ${(perf_hooks.performance.now() - start).toFixed(2)}ms`);
37190
- return {
37191
- alteredFiles,
37192
- commit() {
37193
- if (alteredFiles) {
37194
- // Overwrite individual hashes with the new global browserHash, a full page reload is required
37195
- // New deps that ended up with a different hash replaced while doing analysis import are going to
37196
- // return a not found so the browser doesn't cache them. And will properly get loaded after the reload
37197
- for (const id in deps) {
37198
- metadata.optimized[id].browserHash = newBrowserHash;
37199
- }
37200
- }
37201
- // Write metadata file, delete `deps` folder and rename the new `processing` folder to `deps` in sync
37202
- commitProcessingDepsCacheSync();
37203
- processing.resolve();
37204
- },
37205
- cancel
37206
- };
37207
37229
  }
37230
+ const dataPath = path__default.join(processingCacheDir, '_metadata.json');
37231
+ writeFile(dataPath, stringifyOptimizedDepsMetadata(metadata, depsCacheDir));
37232
+ debug$c(`deps bundled in ${(perf_hooks.performance.now() - start).toFixed(2)}ms`);
37233
+ return {
37234
+ metadata,
37235
+ commit() {
37236
+ // Write metadata file, delete `deps` folder and rename the new `processing` folder to `deps` in sync
37237
+ commitProcessingDepsCacheSync();
37238
+ },
37239
+ cancel
37240
+ };
37208
37241
  function commitProcessingDepsCacheSync() {
37209
- // Rewire the file paths from the temporal processing dir to the final deps cache dir
37210
- const dataPath = path__default.join(processingCacheDir, '_metadata.json');
37211
- writeFile(dataPath, stringifyOptimizedDepsMetadata(metadata, depsCacheDir));
37212
37242
  // Processing is done, we can now replace the depsCacheDir with processingCacheDir
37243
+ // Rewire the file paths from the temporal processing dir to the final deps cache dir
37213
37244
  removeDirSync(depsCacheDir);
37214
37245
  fs__default.renameSync(processingCacheDir, depsCacheDir);
37215
37246
  }
37216
37247
  function cancel() {
37217
37248
  removeDirSync(processingCacheDir);
37218
- processing.resolve();
37219
37249
  }
37220
37250
  }
37221
37251
  function removeDirSync(dir) {
@@ -37262,27 +37292,12 @@ function newDepOptimizationProcessing() {
37262
37292
  function depsFromOptimizedDepInfo(depsInfo) {
37263
37293
  return Object.fromEntries(Object.entries(depsInfo).map((d) => [d[0], d[1].src]));
37264
37294
  }
37265
- function getHash(text) {
37266
- return require$$1$1.createHash('sha256').update(text).digest('hex').substring(0, 8);
37267
- }
37268
- function getOptimizedBrowserHash(hash, deps) {
37269
- return getHash(hash + JSON.stringify(deps));
37270
- }
37271
- function getCachedDepFilePath(id, depsCacheDir) {
37272
- return normalizePath$4(path__default.resolve(depsCacheDir, flattenId(id) + '.js'));
37273
- }
37274
37295
  function getOptimizedDepPath(id, config) {
37275
- return getCachedDepFilePath(id, getDepsCacheDir(config));
37296
+ return normalizePath$4(path__default.resolve(getDepsCacheDir(config), flattenId(id) + '.js'));
37276
37297
  }
37277
37298
  function getDepsCacheDir(config) {
37278
37299
  return normalizePath$4(path__default.resolve(config.cacheDir, 'deps'));
37279
37300
  }
37280
- function getProcessingDepFilePath(id, processingCacheDir) {
37281
- return normalizePath$4(path__default.resolve(processingCacheDir, flattenId(id) + '.js'));
37282
- }
37283
- function getProcessingDepPath(id, config) {
37284
- return getProcessingDepFilePath(id, getProcessingDepsCacheDir(config));
37285
- }
37286
37301
  function getProcessingDepsCacheDir(config) {
37287
37302
  return normalizePath$4(path__default.resolve(config.cacheDir, 'processing'));
37288
37303
  }
@@ -37306,7 +37321,7 @@ function createIsOptimizedDepUrl(config) {
37306
37321
  };
37307
37322
  }
37308
37323
  function parseOptimizedDepsMetadata(jsonMetadata, depsCacheDir) {
37309
- const metadata = JSON.parse(jsonMetadata, (key, value) => {
37324
+ const { hash, browserHash, optimized, chunks } = JSON.parse(jsonMetadata, (key, value) => {
37310
37325
  // Paths can be absolute or relative to the deps cache dir where
37311
37326
  // the _metadata.json is located
37312
37327
  if (key === 'file' || key === 'src') {
@@ -37314,18 +37329,34 @@ function parseOptimizedDepsMetadata(jsonMetadata, depsCacheDir) {
37314
37329
  }
37315
37330
  return value;
37316
37331
  });
37317
- const { browserHash } = metadata;
37318
- for (const o of Object.keys(metadata.optimized)) {
37319
- const depInfo = metadata.optimized[o];
37320
- depInfo.browserHash = browserHash;
37321
- }
37322
- metadata.chunks || (metadata.chunks = {}); // Support missing chunks for back compat
37323
- for (const o of Object.keys(metadata.chunks)) {
37324
- const depInfo = metadata.chunks[o];
37325
- depInfo.src = '';
37326
- depInfo.browserHash = browserHash;
37327
- }
37328
- metadata.discovered = {};
37332
+ if (!chunks ||
37333
+ Object.values(optimized).some((depInfo) => !depInfo.fileHash)) {
37334
+ // outdated _metadata.json version, ignore
37335
+ return;
37336
+ }
37337
+ const metadata = {
37338
+ hash,
37339
+ browserHash,
37340
+ optimized: {},
37341
+ discovered: {},
37342
+ chunks: {},
37343
+ depInfoList: []
37344
+ };
37345
+ for (const id of Object.keys(optimized)) {
37346
+ addOptimizedDepInfo(metadata, 'optimized', {
37347
+ ...optimized[id],
37348
+ id,
37349
+ browserHash
37350
+ });
37351
+ }
37352
+ for (const id of Object.keys(chunks)) {
37353
+ addOptimizedDepInfo(metadata, 'chunks', {
37354
+ ...chunks[id],
37355
+ id,
37356
+ browserHash,
37357
+ needsInterop: false
37358
+ });
37359
+ }
37329
37360
  return metadata;
37330
37361
  }
37331
37362
  /**
@@ -37335,44 +37366,38 @@ function parseOptimizedDepsMetadata(jsonMetadata, depsCacheDir) {
37335
37366
  * browserHash to allow long term caching
37336
37367
  */
37337
37368
  function stringifyOptimizedDepsMetadata(metadata, depsCacheDir) {
37338
- return JSON.stringify(metadata, (key, value) => {
37339
- if (key === 'discovered' || key === 'processing') {
37340
- return;
37341
- }
37369
+ const { hash, browserHash, optimized, chunks } = metadata;
37370
+ return JSON.stringify({
37371
+ hash,
37372
+ browserHash,
37373
+ optimized: Object.fromEntries(Object.values(optimized).map(({ id, src, file, fileHash, needsInterop }) => [
37374
+ id,
37375
+ {
37376
+ src,
37377
+ file,
37378
+ fileHash,
37379
+ needsInterop
37380
+ }
37381
+ ])),
37382
+ chunks: Object.fromEntries(Object.values(chunks).map(({ id, file }) => [id, { file }]))
37383
+ }, (key, value) => {
37384
+ // Paths can be absolute or relative to the deps cache dir where
37385
+ // the _metadata.json is located
37342
37386
  if (key === 'file' || key === 'src') {
37343
37387
  return normalizePath$4(path__default.relative(depsCacheDir, value));
37344
37388
  }
37345
- if (key === 'optimized') {
37346
- // Only remove browserHash for individual dep info
37347
- const cleaned = {};
37348
- for (const dep of Object.keys(value)) {
37349
- const { browserHash, ...c } = value[dep];
37350
- cleaned[dep] = c;
37351
- }
37352
- return cleaned;
37353
- }
37354
- if (key === 'optimized') {
37355
- return Object.keys(value).reduce((cleaned, dep) => {
37356
- const { browserHash, ...c } = value[dep];
37357
- cleaned[dep] = c;
37358
- return cleaned;
37359
- }, {});
37360
- }
37361
- if (key === 'chunks') {
37362
- return Object.keys(value).reduce((cleaned, dep) => {
37363
- const { browserHash, needsInterop, src, ...c } = value[dep];
37364
- cleaned[dep] = c;
37365
- return cleaned;
37366
- }, {});
37367
- }
37368
37389
  return value;
37369
37390
  }, 2);
37370
37391
  }
37392
+ function esbuildOutputFromId(outputs, id, cacheDirOutputPath) {
37393
+ const flatId = flattenId(id) + '.js';
37394
+ return outputs[normalizePath$4(path__default.relative(process.cwd(), path__default.join(cacheDirOutputPath, flatId)))];
37395
+ }
37371
37396
  // https://github.com/vitejs/vite/issues/1724#issuecomment-767619642
37372
37397
  // a list of modules that pretends to be ESM but still uses `require`.
37373
37398
  // this causes esbuild to wrap them as CJS even when its entry appears to be ESM.
37374
37399
  const KNOWN_INTEROP_IDS = new Set(['moment']);
37375
- function needsInterop(id, exportsData, outputs, cacheDirOutputPath) {
37400
+ function needsInterop(id, exportsData, output) {
37376
37401
  if (KNOWN_INTEROP_IDS.has(id)) {
37377
37402
  return true;
37378
37403
  }
@@ -37384,15 +37409,7 @@ function needsInterop(id, exportsData, outputs, cacheDirOutputPath) {
37384
37409
  // if a peer dependency used require() on a ESM dependency, esbuild turns the
37385
37410
  // ESM dependency's entry chunk into a single default export... detect
37386
37411
  // such cases by checking exports mismatch, and force interop.
37387
- const flatId = flattenId(id) + '.js';
37388
- let generatedExports;
37389
- for (const output in outputs) {
37390
- if (normalizePath$4(output) ===
37391
- normalizePath$4(path__default.join(cacheDirOutputPath, flatId))) {
37392
- generatedExports = outputs[output].exports;
37393
- break;
37394
- }
37395
- }
37412
+ const generatedExports = output.exports;
37396
37413
  if (!generatedExports ||
37397
37414
  (isSingleDefaultExport(generatedExports) && !isSingleDefaultExport(exports))) {
37398
37415
  return true;
@@ -37403,9 +37420,9 @@ function isSingleDefaultExport(exports) {
37403
37420
  return exports.length === 1 && exports[0] === 'default';
37404
37421
  }
37405
37422
  const lockfileFormats = ['package-lock.json', 'yarn.lock', 'pnpm-lock.yaml'];
37406
- function getDepHash(root, config) {
37423
+ function getDepHash(config) {
37407
37424
  var _a, _b, _c, _d, _e, _f;
37408
- let content = lookupFile(root, lockfileFormats) || '';
37425
+ let content = lookupFile(config.root, lockfileFormats) || '';
37409
37426
  // also take config into account
37410
37427
  // only a subset of config options that can affect dep optimization
37411
37428
  content += JSON.stringify({
@@ -37430,23 +37447,30 @@ function getDepHash(root, config) {
37430
37447
  }
37431
37448
  return value;
37432
37449
  });
37433
- return require$$1$1.createHash('sha256').update(content).digest('hex').substring(0, 8);
37450
+ return getHash(content);
37451
+ }
37452
+ function getOptimizedBrowserHash(hash, deps, timestamp = '') {
37453
+ return getHash(hash + JSON.stringify(deps) + timestamp);
37454
+ }
37455
+ function getHash(text) {
37456
+ return require$$1$1.createHash('sha256').update(text).digest('hex').substring(0, 8);
37434
37457
  }
37435
- function optimizeDepInfoFromFile(metadata, file) {
37436
- return (findFileInfo(metadata.optimized, file) ||
37437
- findFileInfo(metadata.discovered, file) ||
37438
- findFileInfo(metadata.chunks, file));
37458
+ function optimizedDepInfoFromId(metadata, id) {
37459
+ return (metadata.optimized[id] || metadata.discovered[id] || metadata.chunks[id]);
37439
37460
  }
37440
- function findFileInfo(dependenciesInfo, file) {
37461
+ function optimizedDepInfoFromFile(metadata, file) {
37462
+ return metadata.depInfoList.find((depInfo) => depInfo.file === file);
37463
+ }
37464
+ function findOptimizedDepInfoInRecord(dependenciesInfo, callbackFn) {
37441
37465
  for (const o of Object.keys(dependenciesInfo)) {
37442
37466
  const info = dependenciesInfo[o];
37443
- if (info.file === file) {
37467
+ if (callbackFn(info, o)) {
37444
37468
  return info;
37445
37469
  }
37446
37470
  }
37447
37471
  }
37448
37472
  async function optimizedDepNeedsInterop(metadata, file) {
37449
- const depInfo = optimizeDepInfoFromFile(metadata, file);
37473
+ const depInfo = optimizedDepInfoFromFile(metadata, file);
37450
37474
  if (!depInfo)
37451
37475
  return undefined;
37452
37476
  // Wait until the dependency has been pre-bundled
@@ -37456,17 +37480,25 @@ async function optimizedDepNeedsInterop(metadata, file) {
37456
37480
 
37457
37481
  var index$1 = {
37458
37482
  __proto__: null,
37483
+ debuggerViteDeps: debuggerViteDeps,
37459
37484
  optimizeDeps: optimizeDeps,
37460
- createOptimizeDepsRun: createOptimizeDepsRun,
37485
+ createOptimizedDepsMetadata: createOptimizedDepsMetadata,
37486
+ addOptimizedDepInfo: addOptimizedDepInfo,
37487
+ loadCachedDepOptimizationMetadata: loadCachedDepOptimizationMetadata,
37488
+ discoverProjectDependencies: discoverProjectDependencies,
37489
+ depsLogString: depsLogString,
37490
+ runOptimizeDeps: runOptimizeDeps,
37461
37491
  findKnownImports: findKnownImports,
37462
37492
  newDepOptimizationProcessing: newDepOptimizationProcessing,
37463
37493
  depsFromOptimizedDepInfo: depsFromOptimizedDepInfo,
37464
- getHash: getHash,
37465
37494
  getOptimizedDepPath: getOptimizedDepPath,
37466
37495
  getDepsCacheDir: getDepsCacheDir,
37467
37496
  isOptimizedDepFile: isOptimizedDepFile,
37468
37497
  createIsOptimizedDepUrl: createIsOptimizedDepUrl,
37469
- optimizeDepInfoFromFile: optimizeDepInfoFromFile,
37498
+ getDepHash: getDepHash,
37499
+ getHash: getHash,
37500
+ optimizedDepInfoFromId: optimizedDepInfoFromId,
37501
+ optimizedDepInfoFromFile: optimizedDepInfoFromFile,
37470
37502
  optimizedDepNeedsInterop: optimizedDepNeedsInterop
37471
37503
  };
37472
37504
 
@@ -37720,8 +37752,8 @@ function resolvePlugin(baseOptions) {
37720
37752
  server = _server;
37721
37753
  isOptimizedDepUrl = createIsOptimizedDepUrl(server.config);
37722
37754
  },
37723
- resolveId(id, importer, resolveOpts) {
37724
- var _a, _b, _c, _d;
37755
+ async resolveId(id, importer, resolveOpts) {
37756
+ var _a, _b, _c, _d, _e;
37725
37757
  const ssr = (resolveOpts === null || resolveOpts === void 0 ? void 0 : resolveOpts.ssr) === true;
37726
37758
  if (id.startsWith(browserExternalId)) {
37727
37759
  return id;
@@ -37736,16 +37768,18 @@ function resolvePlugin(baseOptions) {
37736
37768
  const options = {
37737
37769
  isRequire,
37738
37770
  ...baseOptions,
37739
- isFromTsImporter: isTsRequest(importer !== null && importer !== void 0 ? importer : '')
37771
+ isFromTsImporter: isTsRequest(importer !== null && importer !== void 0 ? importer : ''),
37772
+ scan: (_d = resolveOpts === null || resolveOpts === void 0 ? void 0 : resolveOpts.scan) !== null && _d !== void 0 ? _d : baseOptions.scan
37740
37773
  };
37741
37774
  let res;
37742
37775
  // resolve pre-bundled deps requests, these could be resolved by
37743
37776
  // tryFileResolve or /fs/ resolution but these files may not yet
37744
37777
  // exists if we are in the middle of a deps re-processing
37745
37778
  if (asSrc && (isOptimizedDepUrl === null || isOptimizedDepUrl === void 0 ? void 0 : isOptimizedDepUrl(id))) {
37746
- return id.startsWith(FS_PREFIX)
37779
+ const optimizedPath = id.startsWith(FS_PREFIX)
37747
37780
  ? fsPathFromId(id)
37748
37781
  : normalizePath$4(ensureVolumeInPath(path__default.resolve(root, id.slice(1))));
37782
+ return optimizedPath;
37749
37783
  }
37750
37784
  // explicit fs paths that starts with /@fs/*
37751
37785
  if (asSrc && id.startsWith(FS_PREFIX)) {
@@ -37771,11 +37805,12 @@ function resolvePlugin(baseOptions) {
37771
37805
  const fsPath = path__default.resolve(basedir, id);
37772
37806
  // handle browser field mapping for relative imports
37773
37807
  const normalizedFsPath = normalizePath$4(fsPath);
37774
- if (server && isOptimizedDepFile(normalizedFsPath, server.config)) {
37808
+ if ((server === null || server === void 0 ? void 0 : server._optimizedDeps) &&
37809
+ isOptimizedDepFile(normalizedFsPath, server.config)) {
37775
37810
  // Optimized files could not yet exist in disk, resolve to the full path
37776
37811
  // Inject the current browserHash version if the path doesn't have one
37777
37812
  if (!normalizedFsPath.match(DEP_VERSION_RE)) {
37778
- const browserHash = (_d = optimizeDepInfoFromFile(server._optimizeDepsMetadata, normalizedFsPath)) === null || _d === void 0 ? void 0 : _d.browserHash;
37813
+ const browserHash = (_e = optimizedDepInfoFromFile(server._optimizedDeps.metadata, normalizedFsPath)) === null || _e === void 0 ? void 0 : _e.browserHash;
37779
37814
  if (browserHash) {
37780
37815
  return injectQuery(normalizedFsPath, `v=${browserHash}`);
37781
37816
  }
@@ -37832,7 +37867,8 @@ function resolvePlugin(baseOptions) {
37832
37867
  if (asSrc &&
37833
37868
  server &&
37834
37869
  !ssr &&
37835
- (res = tryOptimizedResolve(id, server, importer))) {
37870
+ !options.scan &&
37871
+ (res = await tryOptimizedResolve(id, server, importer))) {
37836
37872
  return res;
37837
37873
  }
37838
37874
  if (targetWeb &&
@@ -37967,7 +38003,7 @@ function tryResolveFile(file, postfix, options, tryIndex, targetWeb, tryPrefix,
37967
38003
  }
37968
38004
  const idToPkgMap = new Map();
37969
38005
  function tryNodeResolve(id, importer, options, targetWeb, server, ssr) {
37970
- var _a, _b;
38006
+ var _a;
37971
38007
  const { root, dedupe, isBuild, preserveSymlinks, packageCache } = options;
37972
38008
  // split id by last '>' for nested selected packages, for example:
37973
38009
  // 'foo > bar > baz' => 'foo > bar' & 'baz'
@@ -38058,7 +38094,8 @@ function tryNodeResolve(id, importer, options, targetWeb, server, ssr) {
38058
38094
  else {
38059
38095
  if (!resolved.includes('node_modules') || // linked
38060
38096
  !server || // build
38061
- !server._registerMissingImport // initial esbuild scan phase
38097
+ !server._optimizedDeps || // resolving before listening to the server
38098
+ options.scan // initial esbuild scan phase
38062
38099
  ) {
38063
38100
  return { id: resolved };
38064
38101
  }
@@ -38076,7 +38113,7 @@ function tryNodeResolve(id, importer, options, targetWeb, server, ssr) {
38076
38113
  // can cache it without re-validation, but only do so for known js types.
38077
38114
  // otherwise we may introduce duplicated modules for externalized files
38078
38115
  // from pre-bundled deps.
38079
- const versionHash = (_b = server._optimizeDepsMetadata) === null || _b === void 0 ? void 0 : _b.browserHash;
38116
+ const versionHash = server._optimizedDeps.metadata.browserHash;
38080
38117
  if (versionHash && isJsType) {
38081
38118
  resolved = injectQuery(resolved, `v=${versionHash}`);
38082
38119
  }
@@ -38084,31 +38121,30 @@ function tryNodeResolve(id, importer, options, targetWeb, server, ssr) {
38084
38121
  else {
38085
38122
  // this is a missing import, queue optimize-deps re-run and
38086
38123
  // get a resolved its optimized info
38087
- const optimizedInfo = server._registerMissingImport(id, resolved, ssr);
38124
+ const optimizedInfo = server._optimizedDeps.registerMissingImport(id, resolved);
38088
38125
  resolved = getOptimizedUrl(optimizedInfo);
38089
38126
  }
38090
38127
  return { id: resolved };
38091
38128
  }
38092
38129
  }
38093
38130
  const getOptimizedUrl = (optimizedData) => `${optimizedData.file}?v=${optimizedData.browserHash}`;
38094
- function tryOptimizedResolve(id, server, importer) {
38095
- const depData = server._optimizeDepsMetadata;
38096
- if (!depData)
38131
+ async function tryOptimizedResolve(id, server, importer) {
38132
+ const optimizedDeps = server._optimizedDeps;
38133
+ if (!optimizedDeps)
38097
38134
  return;
38098
- // check if id has been optimized
38099
- const isOptimized = depData.optimized[id];
38100
- if (isOptimized) {
38101
- return getOptimizedUrl(isOptimized);
38102
- }
38103
- const isChunk = depData.chunks[id];
38104
- if (isChunk) {
38105
- return getOptimizedUrl(isChunk);
38135
+ await optimizedDeps.scanProcessing;
38136
+ const depInfo = optimizedDepInfoFromId(optimizedDeps.metadata, id);
38137
+ if (depInfo) {
38138
+ return getOptimizedUrl(depInfo);
38106
38139
  }
38107
38140
  if (!importer)
38108
38141
  return;
38109
38142
  // further check if id is imported by nested dependency
38110
38143
  let resolvedSrc;
38111
- for (const [pkgPath, optimizedData] of Object.entries(depData.optimized)) {
38144
+ for (const optimizedData of optimizedDeps.metadata.depInfoList) {
38145
+ if (!optimizedData.src)
38146
+ continue; // Ignore chunks
38147
+ const pkgPath = optimizedData.id;
38112
38148
  // check for scenarios, e.g.
38113
38149
  // pkgPath => "my-lib > foo"
38114
38150
  // id => "foo"
@@ -38157,7 +38193,9 @@ function resolvePackageEntry(id, { dir, data, setResolvedCache, getResolvedCache
38157
38193
  : isObject$4(data.browser) && data.browser['.'];
38158
38194
  if (browserEntry) {
38159
38195
  // check if the package also has a "module" field.
38160
- if (typeof data.module === 'string' && data.module !== browserEntry) {
38196
+ if (!options.isRequire &&
38197
+ typeof data.module === 'string' &&
38198
+ data.module !== browserEntry) {
38161
38199
  // if both are present, we may have a problem: some package points both
38162
38200
  // to ESM, with "module" targeting Node.js, while some packages points
38163
38201
  // "module" to browser ESM and "browser" to UMD.
@@ -44841,7 +44879,7 @@ async function getCertificate(cacheDir) {
44841
44879
  return content;
44842
44880
  }
44843
44881
  catch {
44844
- const content = (await Promise.resolve().then(function () { return require('./dep-00d6c1ad.js'); })).createCertificate();
44882
+ const content = (await Promise.resolve().then(function () { return require('./dep-368120f8.js'); })).createCertificate();
44845
44883
  fs$n.promises
44846
44884
  .mkdir(cacheDir, { recursive: true })
44847
44885
  .then(() => fs$n.promises.writeFile(cachePath, content))
@@ -49069,12 +49107,24 @@ function abortHandshake(socket, code, message, headers) {
49069
49107
  }
49070
49108
 
49071
49109
  const HMR_HEADER = 'vite-hmr';
49110
+ const wsServerEvents = [
49111
+ 'connection',
49112
+ 'error',
49113
+ 'headers',
49114
+ 'listening',
49115
+ 'message'
49116
+ ];
49072
49117
  function createWebSocketServer(server, config, httpsOptions) {
49073
49118
  let wss;
49074
49119
  let httpsServer = undefined;
49075
49120
  const hmr = isObject$4(config.server.hmr) && config.server.hmr;
49076
- const wsServer = (hmr && hmr.server) ||
49077
- ((!(hmr && hmr.port) || hmr.port !== config.server.port) && server);
49121
+ const hmrServer = hmr && hmr.server;
49122
+ const hmrPort = hmr && hmr.port;
49123
+ // TODO: the main server port may not have been chosen yet as it may use the next available
49124
+ const portsAreCompatible = !hmrPort || hmrPort === config.server.port;
49125
+ const wsServer = hmrServer || (portsAreCompatible && server);
49126
+ const customListeners = new Map();
49127
+ const clientsMap = new WeakMap();
49078
49128
  if (wsServer) {
49079
49129
  wss = new websocketServer({ noServer: true });
49080
49130
  wsServer.on('upgrade', (req, socket, head) => {
@@ -49087,7 +49137,7 @@ function createWebSocketServer(server, config, httpsOptions) {
49087
49137
  }
49088
49138
  else {
49089
49139
  const websocketServerOptions = {};
49090
- const port = (hmr && hmr.port) || 24678;
49140
+ const port = hmrPort || 24678;
49091
49141
  const host = (hmr && hmr.host) || undefined;
49092
49142
  if (httpsOptions) {
49093
49143
  // if we're serving the middlewares over https, the ws library doesn't support automatically creating an https server, so we need to do it ourselves
@@ -49117,6 +49167,22 @@ function createWebSocketServer(server, config, httpsOptions) {
49117
49167
  wss = new websocketServer(websocketServerOptions);
49118
49168
  }
49119
49169
  wss.on('connection', (socket) => {
49170
+ socket.on('message', (raw) => {
49171
+ if (!customListeners.size)
49172
+ return;
49173
+ let parsed;
49174
+ try {
49175
+ parsed = JSON.parse(String(raw));
49176
+ }
49177
+ catch { }
49178
+ if (!parsed || parsed.type !== 'custom' || !parsed.event)
49179
+ return;
49180
+ const listeners = customListeners.get(parsed.event);
49181
+ if (!(listeners === null || listeners === void 0 ? void 0 : listeners.size))
49182
+ return;
49183
+ const client = getSocketClent(socket);
49184
+ listeners.forEach((listener) => listener(parsed.data, client));
49185
+ });
49120
49186
  socket.send(JSON.stringify({ type: 'connected' }));
49121
49187
  if (bufferedError) {
49122
49188
  socket.send(JSON.stringify(bufferedError));
@@ -49128,15 +49194,70 @@ function createWebSocketServer(server, config, httpsOptions) {
49128
49194
  config.logger.error(colors$1.red(`WebSocket server error:\n${e.stack || e.message}`), { error: e });
49129
49195
  }
49130
49196
  });
49197
+ // Provide a wrapper to the ws client so we can send messages in JSON format
49198
+ // To be consistent with server.ws.send
49199
+ function getSocketClent(socket) {
49200
+ if (!clientsMap.has(socket)) {
49201
+ clientsMap.set(socket, {
49202
+ send: (...args) => {
49203
+ let payload;
49204
+ if (typeof args[0] === 'string') {
49205
+ payload = {
49206
+ type: 'custom',
49207
+ event: args[0],
49208
+ data: args[1]
49209
+ };
49210
+ }
49211
+ else {
49212
+ payload = args[0];
49213
+ }
49214
+ socket.send(JSON.stringify(payload));
49215
+ },
49216
+ socket
49217
+ });
49218
+ }
49219
+ return clientsMap.get(socket);
49220
+ }
49131
49221
  // On page reloads, if a file fails to compile and returns 500, the server
49132
49222
  // sends the error payload before the client connection is established.
49133
49223
  // If we have no open clients, buffer the error and send it to the next
49134
49224
  // connected client.
49135
49225
  let bufferedError = null;
49136
49226
  return {
49137
- on: wss.on.bind(wss),
49138
- off: wss.off.bind(wss),
49139
- send(payload) {
49227
+ on: ((event, fn) => {
49228
+ if (wsServerEvents.includes(event))
49229
+ wss.on(event, fn);
49230
+ else {
49231
+ if (!customListeners.has(event)) {
49232
+ customListeners.set(event, new Set());
49233
+ }
49234
+ customListeners.get(event).add(fn);
49235
+ }
49236
+ }),
49237
+ off: ((event, fn) => {
49238
+ var _a;
49239
+ if (wsServerEvents.includes(event)) {
49240
+ wss.off(event, fn);
49241
+ }
49242
+ else {
49243
+ (_a = customListeners.get(event)) === null || _a === void 0 ? void 0 : _a.delete(fn);
49244
+ }
49245
+ }),
49246
+ get clients() {
49247
+ return new Set(Array.from(wss.clients).map(getSocketClent));
49248
+ },
49249
+ send(...args) {
49250
+ let payload;
49251
+ if (typeof args[0] === 'string') {
49252
+ payload = {
49253
+ type: 'custom',
49254
+ event: args[0],
49255
+ data: args[1]
49256
+ };
49257
+ }
49258
+ else {
49259
+ payload = args[0];
49260
+ }
49140
49261
  if (payload.type === 'error' && !wss.clients.size) {
49141
49262
  bufferedError = payload;
49142
49263
  return;
@@ -52866,8 +52987,9 @@ function optimizedDepsPlugin() {
52866
52987
  server = _server;
52867
52988
  },
52868
52989
  async load(id) {
52990
+ var _a, _b;
52869
52991
  if (server && isOptimizedDepFile(id, server.config)) {
52870
- const metadata = server === null || server === void 0 ? void 0 : server._optimizeDepsMetadata;
52992
+ const metadata = (_a = server === null || server === void 0 ? void 0 : server._optimizedDeps) === null || _a === void 0 ? void 0 : _a.metadata;
52871
52993
  if (metadata) {
52872
52994
  const file = cleanUrl(id);
52873
52995
  const versionMatch = id.match(DEP_VERSION_RE);
@@ -52875,7 +52997,7 @@ function optimizedDepsPlugin() {
52875
52997
  ? versionMatch[1].split('=')[1]
52876
52998
  : undefined;
52877
52999
  // Search in both the currently optimized and newly discovered deps
52878
- const info = optimizeDepInfoFromFile(metadata, file);
53000
+ const info = optimizedDepInfoFromFile(metadata, file);
52879
53001
  if (info) {
52880
53002
  if (browserHash && info.browserHash !== browserHash) {
52881
53003
  throwOutdatedRequest(id);
@@ -52891,9 +53013,9 @@ function optimizedDepsPlugin() {
52891
53013
  throwProcessingError(id);
52892
53014
  return;
52893
53015
  }
52894
- const newMetadata = server._optimizeDepsMetadata;
53016
+ const newMetadata = (_b = server._optimizedDeps) === null || _b === void 0 ? void 0 : _b.metadata;
52895
53017
  if (metadata !== newMetadata) {
52896
- const currentInfo = optimizeDepInfoFromFile(newMetadata, file);
53018
+ const currentInfo = optimizedDepInfoFromFile(newMetadata, file);
52897
53019
  if (info.browserHash !== (currentInfo === null || currentInfo === void 0 ? void 0 : currentInfo.browserHash)) {
52898
53020
  throwOutdatedRequest(id);
52899
53021
  }
@@ -53098,7 +53220,7 @@ function getHtmlFilename(url, server) {
53098
53220
  return decodeURIComponent(fsPathFromId(url));
53099
53221
  }
53100
53222
  else {
53101
- return decodeURIComponent(path__default.join(server.config.root, url.slice(1)));
53223
+ return decodeURIComponent(normalizePath$4(path__default.join(server.config.root, url.slice(1))));
53102
53224
  }
53103
53225
  }
53104
53226
  const startsWithSingleSlashRE = /^\/(?!\/)/;
@@ -53126,7 +53248,7 @@ const processNodeUrl = (node, s, config, htmlPath, originalUrl, moduleGraph) =>
53126
53248
  s.overwrite(node.value.loc.start.offset, node.value.loc.end.offset, `"${path__default.posix.join(path__default.posix.relative(originalUrl, '/'), url.slice(1))}"`, { contentOnly: true });
53127
53249
  }
53128
53250
  };
53129
- const devHtmlHook = async (html, { path: htmlPath, server, originalUrl }) => {
53251
+ const devHtmlHook = async (html, { path: htmlPath, filename, server, originalUrl }) => {
53130
53252
  const { config, moduleGraph } = server;
53131
53253
  const base = config.base || '/';
53132
53254
  const s = new MagicString$1(html);
@@ -53135,11 +53257,15 @@ const devHtmlHook = async (html, { path: htmlPath, server, originalUrl }) => {
53135
53257
  const addInlineModule = (node, ext) => {
53136
53258
  inlineModuleIndex++;
53137
53259
  const url = filePath.replace(normalizePath$4(config.root), '');
53138
- const contents = node.children
53139
- .map((child) => child.content || '')
53140
- .join('');
53260
+ const contentNode = node.children[0];
53261
+ const code = contentNode.content;
53262
+ const map = new MagicString$1(html)
53263
+ .snip(contentNode.loc.start.offset, contentNode.loc.end.offset)
53264
+ .generateMap({ hires: true });
53265
+ map.sources = [filename];
53266
+ map.file = filename;
53141
53267
  // add HTML Proxy to Map
53142
- addToHTMLProxyCache(config, url, inlineModuleIndex, contents);
53268
+ addToHTMLProxyCache(config, url, inlineModuleIndex, { code, map });
53143
53269
  // inline js module. convert to src="proxy"
53144
53270
  const modulePath = `${config.base + htmlPath.slice(1)}?html-proxy&index=${inlineModuleIndex}.${ext}`;
53145
53271
  // invalidate the module so the newly cached contents will be served
@@ -53159,7 +53285,7 @@ const devHtmlHook = async (html, { path: htmlPath, server, originalUrl }) => {
53159
53285
  if (src) {
53160
53286
  processNodeUrl(src, s, config, htmlPath, originalUrl, moduleGraph);
53161
53287
  }
53162
- else if (isModule) {
53288
+ else if (isModule && node.children.length) {
53163
53289
  addInlineModule(node, 'js');
53164
53290
  }
53165
53291
  }
@@ -55768,16 +55894,33 @@ function isPrimitive(value) {
55768
55894
  return !value || (typeof value !== 'object' && typeof value !== 'function');
55769
55895
  }
55770
55896
 
55897
+ const isDebugEnabled = _debug('vite:deps').enabled;
55771
55898
  /**
55772
55899
  * The amount to wait for requests to register newly found dependencies before triggering
55773
55900
  * a re-bundle + page reload
55774
55901
  */
55775
55902
  const debounceMs = 100;
55776
- function createMissingImporterRegisterFn(server, initialProcessingPromise) {
55777
- const { logger } = server.config;
55778
- let metadata = server._optimizeDepsMetadata;
55903
+ function createOptimizedDeps(server) {
55904
+ const { config } = server;
55905
+ const { logger } = config;
55906
+ const sessionTimestamp = Date.now().toString();
55907
+ const cachedMetadata = loadCachedDepOptimizationMetadata(config);
55908
+ const optimizedDeps = {
55909
+ metadata: cachedMetadata || createOptimizedDepsMetadata(config, sessionTimestamp),
55910
+ registerMissingImport
55911
+ };
55779
55912
  let handle;
55780
55913
  let newDepsDiscovered = false;
55914
+ let newDepsToLog = [];
55915
+ let newDepsToLogHandle;
55916
+ const logNewlyDiscoveredDeps = () => {
55917
+ if (newDepsToLog.length) {
55918
+ config.logger.info(colors$1.green(`✨ new dependencies optimized: ${depsLogString(newDepsToLog)}`), {
55919
+ timestamp: true
55920
+ });
55921
+ newDepsToLog = [];
55922
+ }
55923
+ };
55781
55924
  let depOptimizationProcessing = newDepOptimizationProcessing();
55782
55925
  let depOptimizationProcessingQueue = [];
55783
55926
  const resolveEnqueuedProcessingPromises = () => {
@@ -55788,28 +55931,59 @@ function createMissingImporterRegisterFn(server, initialProcessingPromise) {
55788
55931
  depOptimizationProcessingQueue = [];
55789
55932
  };
55790
55933
  let enqueuedRerun;
55791
- let currentlyProcessing = true;
55792
- initialProcessingPromise.then(() => {
55793
- currentlyProcessing = false;
55794
- enqueuedRerun === null || enqueuedRerun === void 0 ? void 0 : enqueuedRerun();
55795
- });
55796
- async function rerun(ssr) {
55797
- // debounce time to wait for new missing deps finished, issue a new
55798
- // optimization of deps (both old and newly found) once the previous
55799
- // optimizeDeps processing is finished
55934
+ let currentlyProcessing = false;
55935
+ // If there wasn't a cache or it is outdated, perform a fast scan with esbuild
55936
+ // to quickly find project dependencies and do a first optimize run
55937
+ if (!cachedMetadata) {
55938
+ currentlyProcessing = true;
55939
+ const scanPhaseProcessing = newDepOptimizationProcessing();
55940
+ optimizedDeps.scanProcessing = scanPhaseProcessing.promise;
55941
+ const warmUp = async () => {
55942
+ try {
55943
+ debuggerViteDeps(colors$1.green(`scanning for dependencies...`), {
55944
+ timestamp: true
55945
+ });
55946
+ const { metadata } = optimizedDeps;
55947
+ const discovered = await discoverProjectDependencies(config, sessionTimestamp);
55948
+ // Respect the scan phase discover order to improve reproducibility
55949
+ for (const depInfo of Object.values(discovered)) {
55950
+ addOptimizedDepInfo(metadata, 'discovered', {
55951
+ ...depInfo,
55952
+ processing: depOptimizationProcessing.promise
55953
+ });
55954
+ }
55955
+ debuggerViteDeps(colors$1.green(`dependencies found: ${depsLogString(Object.keys(discovered))}`), {
55956
+ timestamp: true
55957
+ });
55958
+ scanPhaseProcessing.resolve();
55959
+ optimizedDeps.scanProcessing = undefined;
55960
+ runOptimizer();
55961
+ }
55962
+ catch (e) {
55963
+ logger.error(e.message);
55964
+ if (optimizedDeps.scanProcessing) {
55965
+ scanPhaseProcessing.resolve();
55966
+ optimizedDeps.scanProcessing = undefined;
55967
+ }
55968
+ }
55969
+ };
55970
+ setTimeout(warmUp, 0);
55971
+ }
55972
+ async function runOptimizer(isRerun = false) {
55973
+ // Ensure that rerun is called sequentially
55974
+ enqueuedRerun = undefined;
55975
+ currentlyProcessing = true;
55976
+ // Ensure that a rerun will not be issued for current discovered deps
55977
+ if (handle)
55978
+ clearTimeout(handle);
55800
55979
  // a succesful completion of the optimizeDeps rerun will end up
55801
55980
  // creating new bundled version of all current and discovered deps
55802
55981
  // in the cache dir and a new metadata info object assigned
55803
- // to server._optimizeDepsMetadata. A fullReload is only issued if
55982
+ // to optimizeDeps.metadata. A fullReload is only issued if
55804
55983
  // the previous bundled dependencies have changed.
55805
- // if the rerun fails, server._optimizeDepsMetadata remains untouched,
55984
+ // if the rerun fails, optimizeDeps.metadata remains untouched,
55806
55985
  // current discovered deps are cleaned, and a fullReload is issued
55807
- // Ensure that rerun is called sequentially
55808
- enqueuedRerun = undefined;
55809
- currentlyProcessing = true;
55810
- logger.info(colors$1.yellow(`new dependencies found: ${Object.keys(metadata.discovered).join(', ')}, updating...`), {
55811
- timestamp: true
55812
- });
55986
+ let { metadata } = optimizedDeps;
55813
55987
  // All deps, previous known and newly discovered are rebundled,
55814
55988
  // respect insertion order to keep the metadata file stable
55815
55989
  const newDeps = {};
@@ -55817,9 +55991,10 @@ function createMissingImporterRegisterFn(server, initialProcessingPromise) {
55817
55991
  for (const dep of Object.keys(metadata.optimized)) {
55818
55992
  newDeps[dep] = { ...metadata.optimized[dep] };
55819
55993
  }
55820
- // Don't clone discovered info objects, they are read after awaited
55821
55994
  for (const dep of Object.keys(metadata.discovered)) {
55822
- newDeps[dep] = metadata.discovered[dep];
55995
+ // Clone the discovered info discarding its processing promise
55996
+ const { processing, ...info } = metadata.discovered[dep];
55997
+ newDeps[dep] = info;
55823
55998
  }
55824
55999
  newDepsDiscovered = false;
55825
56000
  // Add the current depOptimizationProcessing to the queue, these
@@ -55828,32 +56003,70 @@ function createMissingImporterRegisterFn(server, initialProcessingPromise) {
55828
56003
  // Create a new promise for the next rerun, discovered missing
55829
56004
  // dependencies will be asigned this promise from this point
55830
56005
  depOptimizationProcessing = newDepOptimizationProcessing();
55831
- let newData = null;
55832
56006
  try {
55833
- const optimizeDeps = await createOptimizeDepsRun(server.config, true, false, metadata, newDeps, ssr);
55834
- const processingResult = await optimizeDeps.run();
56007
+ const processingResult = await runOptimizeDeps(config, newDeps);
56008
+ const newData = processingResult.metadata;
56009
+ // After a re-optimization, if the internal bundled chunks change a full page reload
56010
+ // is required. If the files are stable, we can avoid the reload that is expensive
56011
+ // for large applications. Comparing their fileHash we can find out if it is safe to
56012
+ // keep the current browser state.
56013
+ const needsReload = metadata.hash !== newData.hash ||
56014
+ Object.keys(metadata.optimized).some((dep) => {
56015
+ return (metadata.optimized[dep].fileHash !== newData.optimized[dep].fileHash);
56016
+ });
55835
56017
  const commitProcessing = () => {
55836
56018
  processingResult.commit();
55837
- newData = optimizeDeps.metadata;
55838
- // update ssr externals
55839
- if (ssr) {
55840
- server._ssrExternals = resolveSSRExternal(server.config, Object.keys(newData.optimized));
55841
- }
55842
56019
  // While optimizeDeps is running, new missing deps may be discovered,
55843
56020
  // in which case they will keep being added to metadata.discovered
55844
- for (const o of Object.keys(metadata.discovered)) {
55845
- if (!newData.optimized[o]) {
55846
- newData.discovered[o] = metadata.discovered[o];
56021
+ for (const id in metadata.discovered) {
56022
+ if (!newData.optimized[id]) {
56023
+ addOptimizedDepInfo(newData, 'discovered', metadata.discovered[id]);
55847
56024
  }
55848
56025
  }
55849
- metadata = server._optimizeDepsMetadata = newData;
56026
+ // If we don't reload the page, we need to keep browserHash stable
56027
+ if (!needsReload) {
56028
+ newData.browserHash = metadata.browserHash;
56029
+ for (const dep in newData.chunks) {
56030
+ newData.chunks[dep].browserHash = metadata.browserHash;
56031
+ }
56032
+ for (const dep in newData.optimized) {
56033
+ newData.optimized[dep].browserHash = (metadata.optimized[dep] || metadata.discovered[dep]).browserHash;
56034
+ }
56035
+ }
56036
+ // Commit hash and needsInterop changes to the discovered deps info
56037
+ // object. Allow for code to await for the discovered processing promise
56038
+ // and use the information in the same object
56039
+ for (const o in newData.optimized) {
56040
+ const discovered = metadata.discovered[o];
56041
+ if (discovered) {
56042
+ const optimized = newData.optimized[o];
56043
+ discovered.browserHash = optimized.browserHash;
56044
+ discovered.fileHash = optimized.fileHash;
56045
+ discovered.needsInterop = optimized.needsInterop;
56046
+ discovered.processing = undefined;
56047
+ }
56048
+ }
56049
+ if (isRerun) {
56050
+ newDepsToLog.push(...Object.keys(newData.optimized).filter((dep) => !metadata.optimized[dep]));
56051
+ }
56052
+ metadata = optimizedDeps.metadata = newData;
55850
56053
  resolveEnqueuedProcessingPromises();
55851
56054
  };
55852
- if (!processingResult.alteredFiles) {
56055
+ if (!needsReload) {
55853
56056
  commitProcessing();
55854
- logger.info(colors$1.green(`✨ new dependencies pre-bundled...`), {
55855
- timestamp: true
55856
- });
56057
+ if (!isDebugEnabled) {
56058
+ if (newDepsToLogHandle)
56059
+ clearTimeout(newDepsToLogHandle);
56060
+ newDepsToLogHandle = setTimeout(() => {
56061
+ newDepsToLogHandle = undefined;
56062
+ logNewlyDiscoveredDeps();
56063
+ }, 2 * debounceMs);
56064
+ }
56065
+ else {
56066
+ debuggerViteDeps(colors$1.green(`✨ optimized dependencies unchanged`), {
56067
+ timestamp: true
56068
+ });
56069
+ }
55857
56070
  }
55858
56071
  else {
55859
56072
  if (newDepsDiscovered) {
@@ -55862,13 +56075,19 @@ function createMissingImporterRegisterFn(server, initialProcessingPromise) {
55862
56075
  // We don't resolve the processing promise, as they will be resolved
55863
56076
  // once a rerun is committed
55864
56077
  processingResult.cancel();
55865
- logger.info(colors$1.green(`✨ delaying reload as new dependencies have been found...`), {
56078
+ debuggerViteDeps(colors$1.green(`✨ delaying reload as new dependencies have been found...`), {
55866
56079
  timestamp: true
55867
56080
  });
55868
56081
  }
55869
56082
  else {
55870
56083
  commitProcessing();
55871
- logger.info(colors$1.green(`✨ dependencies updated, reloading page...`), {
56084
+ if (!isDebugEnabled) {
56085
+ if (newDepsToLogHandle)
56086
+ clearTimeout(newDepsToLogHandle);
56087
+ newDepsToLogHandle = undefined;
56088
+ logNewlyDiscoveredDeps();
56089
+ }
56090
+ logger.info(colors$1.green(`✨ optimized dependencies changed. reloading`), {
55872
56091
  timestamp: true
55873
56092
  });
55874
56093
  fullReload();
@@ -55896,14 +56115,25 @@ function createMissingImporterRegisterFn(server, initialProcessingPromise) {
55896
56115
  path: '*'
55897
56116
  });
55898
56117
  }
55899
- const discoveredTimestamp = Date.now();
56118
+ async function rerun() {
56119
+ // debounce time to wait for new missing deps finished, issue a new
56120
+ // optimization of deps (both old and newly found) once the previous
56121
+ // optimizeDeps processing is finished
56122
+ const deps = Object.keys(optimizedDeps.metadata.discovered);
56123
+ const depsString = depsLogString(deps);
56124
+ debuggerViteDeps(colors$1.green(`new dependencies found: ${depsString}`), {
56125
+ timestamp: true
56126
+ });
56127
+ runOptimizer(true);
56128
+ }
55900
56129
  function getDiscoveredBrowserHash(hash, deps, missing) {
55901
- return getHash(hash +
55902
- JSON.stringify(deps) +
55903
- JSON.stringify(missing) +
55904
- discoveredTimestamp);
56130
+ return getHash(hash + JSON.stringify(deps) + JSON.stringify(missing) + sessionTimestamp);
55905
56131
  }
55906
- return function registerMissingImport(id, resolved, ssr) {
56132
+ function registerMissingImport(id, resolved, ssr) {
56133
+ if (optimizedDeps.scanProcessing) {
56134
+ config.logger.error('Vite internal error: registering missing import before initial scanning is over');
56135
+ }
56136
+ const { metadata } = optimizedDeps;
55907
56137
  const optimized = metadata.optimized[id];
55908
56138
  if (optimized) {
55909
56139
  return optimized;
@@ -55919,7 +56149,8 @@ function createMissingImporterRegisterFn(server, initialProcessingPromise) {
55919
56149
  return missing;
55920
56150
  }
55921
56151
  newDepsDiscovered = true;
55922
- missing = metadata.discovered[id] = {
56152
+ missing = addOptimizedDepInfo(metadata, 'discovered', {
56153
+ id,
55923
56154
  file: getOptimizedDepPath(id, server.config),
55924
56155
  src: resolved,
55925
56156
  // Assing a browserHash to this missing dependency that is unique to
@@ -55930,15 +56161,18 @@ function createMissingImporterRegisterFn(server, initialProcessingPromise) {
55930
56161
  // loading of this pre-bundled dep needs to await for its processing
55931
56162
  // promise to be resolved
55932
56163
  processing: depOptimizationProcessing.promise
55933
- };
56164
+ });
55934
56165
  // Debounced rerun, let other missing dependencies be discovered before
55935
56166
  // the running next optimizeDeps
55936
56167
  enqueuedRerun = undefined;
55937
56168
  if (handle)
55938
56169
  clearTimeout(handle);
56170
+ if (newDepsToLogHandle)
56171
+ clearTimeout(newDepsToLogHandle);
56172
+ newDepsToLogHandle = undefined;
55939
56173
  handle = setTimeout(() => {
55940
56174
  handle = undefined;
55941
- enqueuedRerun = () => rerun(ssr);
56175
+ enqueuedRerun = rerun;
55942
56176
  if (!currentlyProcessing) {
55943
56177
  enqueuedRerun();
55944
56178
  }
@@ -55946,7 +56180,8 @@ function createMissingImporterRegisterFn(server, initialProcessingPromise) {
55946
56180
  // Return the path for the optimized bundle, this path is known before
55947
56181
  // esbuild is run to generate the pre-bundle
55948
56182
  return missing;
55949
- };
56183
+ }
56184
+ return optimizedDeps;
55950
56185
  }
55951
56186
 
55952
56187
  // https://github.com/vitejs/vite/issues/2820#issuecomment-812495079
@@ -56056,12 +56291,18 @@ async function createServer(inlineConfig = {}) {
56056
56291
  },
56057
56292
  transformIndexHtml: null,
56058
56293
  async ssrLoadModule(url, opts) {
56059
- let configFileDependencies = [];
56060
- const metadata = server._optimizeDepsMetadata;
56061
- if (metadata) {
56062
- configFileDependencies = Object.keys(metadata.optimized);
56294
+ if (!server._ssrExternals) {
56295
+ let knownImports = [];
56296
+ const optimizedDeps = server._optimizedDeps;
56297
+ if (optimizedDeps) {
56298
+ await optimizedDeps.scanProcessing;
56299
+ knownImports = [
56300
+ ...Object.keys(optimizedDeps.metadata.optimized),
56301
+ ...Object.keys(optimizedDeps.metadata.discovered)
56302
+ ];
56303
+ }
56304
+ server._ssrExternals = resolveSSRExternal(config, knownImports);
56063
56305
  }
56064
- server._ssrExternals || (server._ssrExternals = resolveSSRExternal(config, configFileDependencies));
56065
56306
  return ssrLoadModule(url, server, undefined, undefined, opts === null || opts === void 0 ? void 0 : opts.fixStacktrace);
56066
56307
  },
56067
56308
  ssrFixStacktrace(e) {
@@ -56106,12 +56347,11 @@ async function createServer(inlineConfig = {}) {
56106
56347
  }
56107
56348
  return server._restartPromise;
56108
56349
  },
56109
- _optimizeDepsMetadata: null,
56350
+ _optimizedDeps: null,
56110
56351
  _ssrExternals: null,
56111
56352
  _globImporters: Object.create(null),
56112
56353
  _restartPromise: null,
56113
56354
  _forceOptimizeOnRestart: false,
56114
- _registerMissingImport: null,
56115
56355
  _pendingRequests: new Map()
56116
56356
  };
56117
56357
  server.transformIndexHtml = createDevHtmlTransformFn(server);
@@ -56230,29 +56470,15 @@ async function createServer(inlineConfig = {}) {
56230
56470
  }
56231
56471
  // error handler
56232
56472
  middlewares.use(errorMiddleware(server, !!middlewareMode));
56233
- const runOptimize = async () => {
56234
- const optimizeDeps = await createOptimizeDepsRun(config, config.server.force);
56235
- // Don't await for the optimization to finish, we can start the
56236
- // server right away here
56237
- server._optimizeDepsMetadata = optimizeDeps.metadata;
56238
- // Run deps optimization in parallel
56239
- const initialProcessingPromise = optimizeDeps
56240
- .run()
56241
- .then((result) => result.commit());
56242
- // While running the first optimizeDeps, _registerMissingImport is null
56243
- // so the resolve plugin resolves straight to node_modules during the
56244
- // deps discovery scan phase
56245
- server._registerMissingImport = createMissingImporterRegisterFn(server, initialProcessingPromise);
56246
- };
56247
56473
  if (!middlewareMode && httpServer) {
56248
56474
  let isOptimized = false;
56249
- // overwrite listen to run optimizer before server start
56475
+ // overwrite listen to init optimizer before server start
56250
56476
  const listen = httpServer.listen.bind(httpServer);
56251
56477
  httpServer.listen = (async (port, ...args) => {
56252
56478
  if (!isOptimized) {
56253
56479
  try {
56254
56480
  await container.buildStart({});
56255
- await runOptimize();
56481
+ server._optimizedDeps = createOptimizedDeps(server);
56256
56482
  isOptimized = true;
56257
56483
  }
56258
56484
  catch (e) {
@@ -56265,7 +56491,7 @@ async function createServer(inlineConfig = {}) {
56265
56491
  }
56266
56492
  else {
56267
56493
  await container.buildStart({});
56268
- await runOptimize();
56494
+ server._optimizedDeps = createOptimizedDeps(server);
56269
56495
  }
56270
56496
  return server;
56271
56497
  }
@@ -56847,15 +57073,20 @@ function importAnalysisPlugin(config) {
56847
57073
  url = url.replace(base, '/');
56848
57074
  }
56849
57075
  let importerFile = importer;
56850
- if (moduleListContains((_a = config.optimizeDeps) === null || _a === void 0 ? void 0 : _a.exclude, url) &&
56851
- server._optimizeDepsMetadata) {
56852
- // if the dependency encountered in the optimized file was excluded from the optimization
56853
- // the dependency needs to be resolved starting from the original source location of the optimized file
56854
- // because starting from node_modules/.vite will not find the dependency if it was not hoisted
56855
- // (that is, if it is under node_modules directory in the package source of the optimized file)
56856
- for (const optimizedModule of Object.values(server._optimizeDepsMetadata.optimized)) {
56857
- if (optimizedModule.file === importerModule.file) {
56858
- importerFile = optimizedModule.src;
57076
+ if (moduleListContains((_a = config.optimizeDeps) === null || _a === void 0 ? void 0 : _a.exclude, url)) {
57077
+ const optimizedDeps = server._optimizedDeps;
57078
+ if (optimizedDeps) {
57079
+ await optimizedDeps.scanProcessing;
57080
+ // if the dependency encountered in the optimized file was excluded from the optimization
57081
+ // the dependency needs to be resolved starting from the original source location of the optimized file
57082
+ // because starting from node_modules/.vite will not find the dependency if it was not hoisted
57083
+ // (that is, if it is under node_modules directory in the package source of the optimized file)
57084
+ for (const optimizedModule of optimizedDeps.metadata.depInfoList) {
57085
+ if (!optimizedModule.src)
57086
+ continue; // Ignore chunks
57087
+ if (optimizedModule.file === importerModule.file) {
57088
+ importerFile = optimizedModule.src;
57089
+ }
56859
57090
  }
56860
57091
  }
56861
57092
  }
@@ -57021,14 +57252,15 @@ function importAnalysisPlugin(config) {
57021
57252
  if (url !== specifier) {
57022
57253
  importRewrites.push(async () => {
57023
57254
  let rewriteDone = false;
57024
- if (isOptimizedDepFile(resolvedId, config) &&
57255
+ if ((server === null || server === void 0 ? void 0 : server._optimizedDeps) &&
57256
+ isOptimizedDepFile(resolvedId, config) &&
57025
57257
  !resolvedId.match(optimizedDepChunkRE)) {
57026
57258
  // for optimized cjs deps, support named imports by rewriting named imports to const assignments.
57027
57259
  // internal optimized chunks don't need es interop and are excluded
57028
57260
  // The browserHash in resolvedId could be stale in which case there will be a full
57029
57261
  // page reload. We could return a 404 in that case but it is safe to return the request
57030
57262
  const file = cleanUrl(resolvedId); // Remove ?v={hash}
57031
- const needsInterop = await optimizedDepNeedsInterop(server._optimizeDepsMetadata, file);
57263
+ const needsInterop = await optimizedDepNeedsInterop(server._optimizedDeps.metadata, file);
57032
57264
  if (needsInterop === undefined) {
57033
57265
  // Non-entry dynamic imports from dependencies will reach here as there isn't
57034
57266
  // optimize info for them, but they don't need es interop. If the request isn't
@@ -57531,9 +57763,9 @@ function preAliasPlugin() {
57531
57763
  configureServer(_server) {
57532
57764
  server = _server;
57533
57765
  },
57534
- resolveId(id, importer, options) {
57535
- if (!(options === null || options === void 0 ? void 0 : options.ssr) && bareImportRE.test(id)) {
57536
- return tryOptimizedResolve(id, server, importer);
57766
+ async resolveId(id, importer, options) {
57767
+ if (!(options === null || options === void 0 ? void 0 : options.ssr) && bareImportRE.test(id) && !(options === null || options === void 0 ? void 0 : options.scan)) {
57768
+ return await tryOptimizedResolve(id, server, importer);
57537
57769
  }
57538
57770
  }
57539
57771
  };
@@ -57654,11 +57886,20 @@ function definePlugin(config) {
57654
57886
 
57655
57887
  const WORKER_FILE_ID = 'worker_url_file';
57656
57888
  function getWorkerType(code, noCommentsCode, i) {
57889
+ function err(e, pos) {
57890
+ const error = new Error(e);
57891
+ error.pos = pos;
57892
+ throw error;
57893
+ }
57657
57894
  const commaIndex = noCommentsCode.indexOf(',', i);
57658
57895
  if (commaIndex === -1) {
57659
57896
  return 'classic';
57660
57897
  }
57661
57898
  const endIndex = noCommentsCode.indexOf(')', i);
57899
+ // case: ') ... ,' mean no worker options params
57900
+ if (commaIndex > endIndex) {
57901
+ return 'classic';
57902
+ }
57662
57903
  // need to find in comment code
57663
57904
  let workerOptsString = code.substring(commaIndex + 1, endIndex);
57664
57905
  const hasViteIgnore = /\/\*\s*@vite-ignore\s*\*\//.test(workerOptsString);
@@ -57676,8 +57917,8 @@ function getWorkerType(code, noCommentsCode, i) {
57676
57917
  }
57677
57918
  catch (e) {
57678
57919
  // can't parse by JSON5, so the worker options had unexpect char.
57679
- throw new Error('Vite is unable to parse the worker options as the value is not static.' +
57680
- 'To ignore this error, please use /* @vite-ignore */ in the worker options.');
57920
+ err('Vite is unable to parse the worker options as the value is not static.' +
57921
+ 'To ignore this error, please use /* @vite-ignore */ in the worker options.', commaIndex + 1);
57681
57922
  }
57682
57923
  if (['classic', 'module'].includes(workerOpts.type)) {
57683
57924
  return workerOpts.type;
@@ -58205,6 +58446,7 @@ async function resolveConfig(inlineConfig, command, defaultMode = 'development')
58205
58446
  cacheDir,
58206
58447
  command,
58207
58448
  mode,
58449
+ isWorker: false,
58208
58450
  isProduction,
58209
58451
  plugins: userPlugins,
58210
58452
  server,
@@ -58235,7 +58477,7 @@ async function resolveConfig(inlineConfig, command, defaultMode = 'development')
58235
58477
  };
58236
58478
  // flat config.worker.plugin
58237
58479
  const [workerPrePlugins, workerNormalPlugins, workerPostPlugins] = sortUserPlugins((_g = config.worker) === null || _g === void 0 ? void 0 : _g.plugins);
58238
- const workerResolved = { ...resolved };
58480
+ const workerResolved = { ...resolved, isWorker: true };
58239
58481
  resolved.worker.plugins = await resolvePlugins(workerResolved, workerPrePlugins, workerNormalPlugins, workerPostPlugins);
58240
58482
  // call configResolved worker plugins hooks
58241
58483
  await Promise.all(resolved.worker.plugins.map((p) => { var _a; return (_a = p.configResolved) === null || _a === void 0 ? void 0 : _a.call(p, workerResolved); }));