webpack 4.39.1 → 4.40.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -1
- package/lib/BannerPlugin.js +7 -10
- package/lib/CachePlugin.js +12 -14
- package/lib/ChunkGroup.js +12 -12
- package/lib/Compilation.js +152 -9
- package/lib/Compiler.js +15 -5
- package/lib/Entrypoint.js +2 -2
- package/lib/HotModuleReplacementPlugin.js +18 -4
- package/lib/MainTemplate.js +8 -1
- package/lib/NormalModule.js +6 -2
- package/lib/Parser.js +1 -1
- package/lib/SourceMapDevToolPlugin.js +19 -7
- package/lib/Stats.js +18 -10
- package/lib/TemplatedPathPlugin.js +11 -6
- package/lib/buildChunkGraph.js +49 -14
- package/lib/debug/ProfilingPlugin.js +1 -1
- package/lib/logging/Logger.js +2 -2
- package/lib/logging/createConsoleLogger.js +2 -1
- package/lib/optimize/ConcatenatedModule.js +8 -0
- package/lib/performance/SizeLimitsPlugin.js +35 -15
- package/lib/wasm/WebAssemblyGenerator.js +13 -7
- package/package.json +5 -5
package/README.md
CHANGED
@@ -14,6 +14,7 @@
|
|
14
14
|
[![builds2][builds2]][builds2-url]
|
15
15
|
[![coverage][cover]][cover-url]
|
16
16
|
[![licenses][licenses]][licenses-url]
|
17
|
+
[![PR's welcome][prs]][prs-url]
|
17
18
|
|
18
19
|
<br>
|
19
20
|
<a href="https://dependabot.com/compatibility-score.html?dependency-name=webpack&package-manager=npm_and_yarn&new-version=latest">
|
@@ -346,7 +347,7 @@ If you create a loader or plugin, we would <3 for you to open source it, and put
|
|
346
347
|
|
347
348
|
<h2 align="center">Support</h2>
|
348
349
|
|
349
|
-
We consider webpack to be a low-level tool used not only individually but also layered beneath other awesome tools. Because of its flexibility, webpack isn't always the _easiest_ entry-level solution, however we do believe it is the most powerful. That said, we're always looking for ways improve and simplify the tool without compromising functionality. If you have any ideas on ways to accomplish this, we're all ears!
|
350
|
+
We consider webpack to be a low-level tool used not only individually but also layered beneath other awesome tools. Because of its flexibility, webpack isn't always the _easiest_ entry-level solution, however we do believe it is the most powerful. That said, we're always looking for ways to improve and simplify the tool without compromising functionality. If you have any ideas on ways to accomplish this, we're all ears!
|
350
351
|
|
351
352
|
If you're just getting started, take a look at [our new docs and concepts page](https://webpack.js.org/concepts/). This has a high level overview that is great for beginners!!
|
352
353
|
|
@@ -767,6 +768,9 @@ src="https://static.monei.net/monei-logo.svg" height="30" alt="MONEI"></a>
|
|
767
768
|
[tests]: https://img.shields.io/travis/webpack/webpack/master.svg
|
768
769
|
[tests-url]: https://travis-ci.org/webpack/webpack
|
769
770
|
|
771
|
+
[prs]: https://img.shields.io/badge/PRs-welcome-brightgreen.svg
|
772
|
+
[prs-url]: https://webpack.js.org/contribute/
|
773
|
+
|
770
774
|
[builds-url]: https://ci.appveyor.com/project/sokra/webpack/branch/master
|
771
775
|
[builds]: https://ci.appveyor.com/api/projects/status/github/webpack/webpack?svg=true
|
772
776
|
|
package/lib/BannerPlugin.js
CHANGED
@@ -81,7 +81,6 @@ class BannerPlugin {
|
|
81
81
|
continue;
|
82
82
|
}
|
83
83
|
|
84
|
-
let basename;
|
85
84
|
let query = "";
|
86
85
|
let filename = file;
|
87
86
|
const hash = compilation.hash;
|
@@ -94,11 +93,10 @@ class BannerPlugin {
|
|
94
93
|
|
95
94
|
const lastSlashIndex = filename.lastIndexOf("/");
|
96
95
|
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
}
|
96
|
+
const basename =
|
97
|
+
lastSlashIndex === -1
|
98
|
+
? filename
|
99
|
+
: filename.substr(lastSlashIndex + 1);
|
102
100
|
|
103
101
|
const data = {
|
104
102
|
hash,
|
@@ -110,10 +108,9 @@ class BannerPlugin {
|
|
110
108
|
|
111
109
|
const comment = compilation.getPath(banner(data), data);
|
112
110
|
|
113
|
-
compilation.
|
114
|
-
|
115
|
-
"\n",
|
116
|
-
compilation.assets[file]
|
111
|
+
compilation.updateAsset(
|
112
|
+
file,
|
113
|
+
old => new ConcatSource(comment, "\n", old)
|
117
114
|
);
|
118
115
|
}
|
119
116
|
}
|
package/lib/CachePlugin.js
CHANGED
@@ -24,21 +24,19 @@ class CachePlugin {
|
|
24
24
|
compilation.hooks.childCompiler.tap(
|
25
25
|
"CachePlugin",
|
26
26
|
(childCompiler, compilerName, compilerIndex) => {
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
cache.children = {};
|
31
|
-
}
|
32
|
-
if (!cache.children[compilerName]) {
|
33
|
-
cache.children[compilerName] = [];
|
34
|
-
}
|
35
|
-
if (cache.children[compilerName][compilerIndex]) {
|
36
|
-
childCache = cache.children[compilerName][compilerIndex];
|
37
|
-
} else {
|
38
|
-
cache.children[compilerName].push((childCache = {}));
|
39
|
-
}
|
40
|
-
registerCacheToCompiler(childCompiler, childCache);
|
27
|
+
let childCache;
|
28
|
+
if (!cache.children) {
|
29
|
+
cache.children = {};
|
41
30
|
}
|
31
|
+
if (!cache.children[compilerName]) {
|
32
|
+
cache.children[compilerName] = [];
|
33
|
+
}
|
34
|
+
if (cache.children[compilerName][compilerIndex]) {
|
35
|
+
childCache = cache.children[compilerName][compilerIndex];
|
36
|
+
} else {
|
37
|
+
cache.children[compilerName].push((childCache = {}));
|
38
|
+
}
|
39
|
+
registerCacheToCompiler(childCompiler, childCache);
|
42
40
|
}
|
43
41
|
);
|
44
42
|
});
|
package/lib/ChunkGroup.js
CHANGED
@@ -70,12 +70,12 @@ class ChunkGroup {
|
|
70
70
|
this.chunks = [];
|
71
71
|
/** @type {OriginRecord[]} */
|
72
72
|
this.origins = [];
|
73
|
-
/**
|
73
|
+
/** Indices in top-down order */
|
74
74
|
/** @private @type {Map<Module, number>} */
|
75
|
-
this.
|
76
|
-
/**
|
75
|
+
this._moduleIndices = new Map();
|
76
|
+
/** Indices in bottom-up order */
|
77
77
|
/** @private @type {Map<Module, number>} */
|
78
|
-
this.
|
78
|
+
this._moduleIndices2 = new Map();
|
79
79
|
}
|
80
80
|
|
81
81
|
/**
|
@@ -174,7 +174,7 @@ class ChunkGroup {
|
|
174
174
|
/**
|
175
175
|
* add a chunk into ChunkGroup. Is pushed on or prepended
|
176
176
|
* @param {Chunk} chunk chunk being pushed into ChunkGroupS
|
177
|
-
* @returns {boolean} returns true if chunk addition was
|
177
|
+
* @returns {boolean} returns true if chunk addition was successful.
|
178
178
|
*/
|
179
179
|
pushChunk(chunk) {
|
180
180
|
const oldIdx = this.chunks.indexOf(chunk);
|
@@ -187,8 +187,8 @@ class ChunkGroup {
|
|
187
187
|
|
188
188
|
/**
|
189
189
|
* @param {Chunk} oldChunk chunk to be replaced
|
190
|
-
* @param {Chunk} newChunk New
|
191
|
-
* @returns {boolean}
|
190
|
+
* @param {Chunk} newChunk New chunk that will be replaced with
|
191
|
+
* @returns {boolean} returns true if the replacement was successful
|
192
192
|
*/
|
193
193
|
replaceChunk(oldChunk, newChunk) {
|
194
194
|
const oldIdx = this.chunks.indexOf(oldChunk);
|
@@ -369,7 +369,7 @@ class ChunkGroup {
|
|
369
369
|
|
370
370
|
/**
|
371
371
|
* we need to iterate again over the children
|
372
|
-
* to remove this from the
|
372
|
+
* to remove this from the child's parents.
|
373
373
|
* This can not be done in the above loop
|
374
374
|
* as it is not guaranteed that `this._parents` contains anything.
|
375
375
|
*/
|
@@ -460,7 +460,7 @@ class ChunkGroup {
|
|
460
460
|
* @returns {void}
|
461
461
|
*/
|
462
462
|
setModuleIndex(module, index) {
|
463
|
-
this.
|
463
|
+
this._moduleIndices.set(module, index);
|
464
464
|
}
|
465
465
|
|
466
466
|
/**
|
@@ -469,7 +469,7 @@ class ChunkGroup {
|
|
469
469
|
* @returns {number} index
|
470
470
|
*/
|
471
471
|
getModuleIndex(module) {
|
472
|
-
return this.
|
472
|
+
return this._moduleIndices.get(module);
|
473
473
|
}
|
474
474
|
|
475
475
|
/**
|
@@ -479,7 +479,7 @@ class ChunkGroup {
|
|
479
479
|
* @returns {void}
|
480
480
|
*/
|
481
481
|
setModuleIndex2(module, index) {
|
482
|
-
this.
|
482
|
+
this._moduleIndices2.set(module, index);
|
483
483
|
}
|
484
484
|
|
485
485
|
/**
|
@@ -488,7 +488,7 @@ class ChunkGroup {
|
|
488
488
|
* @returns {number} index
|
489
489
|
*/
|
490
490
|
getModuleIndex2(module) {
|
491
|
-
return this.
|
491
|
+
return this._moduleIndices2.get(module);
|
492
492
|
}
|
493
493
|
|
494
494
|
checkConstraints() {
|
package/lib/Compilation.js
CHANGED
@@ -37,11 +37,11 @@ const compareLocations = require("./compareLocations");
|
|
37
37
|
const { Logger, LogType } = require("./logging/Logger");
|
38
38
|
const ErrorHelpers = require("./ErrorHelpers");
|
39
39
|
const buildChunkGraph = require("./buildChunkGraph");
|
40
|
+
const WebpackError = require("./WebpackError");
|
40
41
|
|
41
42
|
/** @typedef {import("./Module")} Module */
|
42
43
|
/** @typedef {import("./Compiler")} Compiler */
|
43
44
|
/** @typedef {import("webpack-sources").Source} Source */
|
44
|
-
/** @typedef {import("./WebpackError")} WebpackError */
|
45
45
|
/** @typedef {import("./DependenciesBlockVariable")} DependenciesBlockVariable */
|
46
46
|
/** @typedef {import("./dependencies/SingleEntryDependency")} SingleEntryDependency */
|
47
47
|
/** @typedef {import("./dependencies/MultiEntryDependency")} MultiEntryDependency */
|
@@ -104,6 +104,21 @@ const buildChunkGraph = require("./buildChunkGraph");
|
|
104
104
|
* @property {string[]=} trace
|
105
105
|
*/
|
106
106
|
|
107
|
+
/**
|
108
|
+
* @typedef {Object} AssetInfo
|
109
|
+
* @property {boolean=} immutable true, if the asset can be long term cached forever (contains a hash)
|
110
|
+
* @property {number=} size size in bytes, only set after asset has been emitted
|
111
|
+
* @property {boolean=} development true, when asset is only used for development and doesn't count towards user-facing assets
|
112
|
+
* @property {boolean=} hotModuleReplacement true, when asset ships data for updating an existing application (HMR)
|
113
|
+
*/
|
114
|
+
|
115
|
+
/**
|
116
|
+
* @typedef {Object} Asset
|
117
|
+
* @property {string} name the filename of the asset
|
118
|
+
* @property {Source} source source of the asset
|
119
|
+
* @property {AssetInfo} info info about the asset
|
120
|
+
*/
|
121
|
+
|
107
122
|
/**
|
108
123
|
* @param {Chunk} a first chunk to sort by id
|
109
124
|
* @param {Chunk} b second chunk to sort by id
|
@@ -206,6 +221,25 @@ const addAllToSet = (set, otherSet) => {
|
|
206
221
|
}
|
207
222
|
};
|
208
223
|
|
224
|
+
/**
|
225
|
+
* @param {Source} a a source
|
226
|
+
* @param {Source} b another source
|
227
|
+
* @returns {boolean} true, when both sources are equal
|
228
|
+
*/
|
229
|
+
const isSourceEqual = (a, b) => {
|
230
|
+
if (a === b) return true;
|
231
|
+
// TODO webpack 5: check .buffer() instead, it's called anyway during emit
|
232
|
+
/** @type {Buffer|string} */
|
233
|
+
let aSource = a.source();
|
234
|
+
/** @type {Buffer|string} */
|
235
|
+
let bSource = b.source();
|
236
|
+
if (aSource === bSource) return true;
|
237
|
+
if (typeof aSource === "string" && typeof bSource === "string") return false;
|
238
|
+
if (!Buffer.isBuffer(aSource)) aSource = Buffer.from(aSource, "utf-8");
|
239
|
+
if (!Buffer.isBuffer(bSource)) bSource = Buffer.from(bSource, "utf-8");
|
240
|
+
return aSource.equals(bSource);
|
241
|
+
};
|
242
|
+
|
209
243
|
class Compilation extends Tapable {
|
210
244
|
/**
|
211
245
|
* Creates an instance of Compilation.
|
@@ -446,6 +480,7 @@ class Compilation extends Tapable {
|
|
446
480
|
this.entries = [];
|
447
481
|
/** @private @type {{name: string, request: string, module: Module}[]} */
|
448
482
|
this._preparedEntrypoints = [];
|
483
|
+
/** @type {Map<string, Entrypoint>} */
|
449
484
|
this.entrypoints = new Map();
|
450
485
|
/** @type {Chunk[]} */
|
451
486
|
this.chunks = [];
|
@@ -465,6 +500,8 @@ class Compilation extends Tapable {
|
|
465
500
|
this.additionalChunkAssets = [];
|
466
501
|
/** @type {CompilationAssets} */
|
467
502
|
this.assets = {};
|
503
|
+
/** @type {Map<string, AssetInfo>} */
|
504
|
+
this.assetsInfo = new Map();
|
468
505
|
/** @type {WebpackError[]} */
|
469
506
|
this.errors = [];
|
470
507
|
/** @type {WebpackError[]} */
|
@@ -1233,6 +1270,7 @@ class Compilation extends Tapable {
|
|
1233
1270
|
this.namedChunkGroups.clear();
|
1234
1271
|
this.additionalChunkAssets.length = 0;
|
1235
1272
|
this.assets = {};
|
1273
|
+
this.assetsInfo.clear();
|
1236
1274
|
for (const module of this.modules) {
|
1237
1275
|
module.unseal();
|
1238
1276
|
}
|
@@ -1963,13 +2001,107 @@ class Compilation extends Tapable {
|
|
1963
2001
|
this.hash = this.fullHash.substr(0, hashDigestLength);
|
1964
2002
|
}
|
1965
2003
|
|
2004
|
+
/**
|
2005
|
+
* @param {string} file file name
|
2006
|
+
* @param {Source} source asset source
|
2007
|
+
* @param {AssetInfo} assetInfo extra asset information
|
2008
|
+
* @returns {void}
|
2009
|
+
*/
|
2010
|
+
emitAsset(file, source, assetInfo = {}) {
|
2011
|
+
if (this.assets[file]) {
|
2012
|
+
if (!isSourceEqual(this.assets[file], source)) {
|
2013
|
+
// TODO webpack 5: make this an error instead
|
2014
|
+
this.warnings.push(
|
2015
|
+
new WebpackError(
|
2016
|
+
`Conflict: Multiple assets emit different content to the same filename ${file}`
|
2017
|
+
)
|
2018
|
+
);
|
2019
|
+
this.assets[file] = source;
|
2020
|
+
this.assetsInfo.set(file, assetInfo);
|
2021
|
+
return;
|
2022
|
+
}
|
2023
|
+
const oldInfo = this.assetsInfo.get(file);
|
2024
|
+
this.assetsInfo.set(file, Object.assign({}, oldInfo, assetInfo));
|
2025
|
+
return;
|
2026
|
+
}
|
2027
|
+
this.assets[file] = source;
|
2028
|
+
this.assetsInfo.set(file, assetInfo);
|
2029
|
+
}
|
2030
|
+
|
2031
|
+
/**
|
2032
|
+
* @param {string} file file name
|
2033
|
+
* @param {Source | function(Source): Source} newSourceOrFunction new asset source or function converting old to new
|
2034
|
+
* @param {AssetInfo | function(AssetInfo | undefined): AssetInfo} assetInfoUpdateOrFunction new asset info or function converting old to new
|
2035
|
+
*/
|
2036
|
+
updateAsset(
|
2037
|
+
file,
|
2038
|
+
newSourceOrFunction,
|
2039
|
+
assetInfoUpdateOrFunction = undefined
|
2040
|
+
) {
|
2041
|
+
if (!this.assets[file]) {
|
2042
|
+
throw new Error(
|
2043
|
+
`Called Compilation.updateAsset for not existing filename ${file}`
|
2044
|
+
);
|
2045
|
+
}
|
2046
|
+
if (typeof newSourceOrFunction === "function") {
|
2047
|
+
this.assets[file] = newSourceOrFunction(this.assets[file]);
|
2048
|
+
} else {
|
2049
|
+
this.assets[file] = newSourceOrFunction;
|
2050
|
+
}
|
2051
|
+
if (assetInfoUpdateOrFunction !== undefined) {
|
2052
|
+
const oldInfo = this.assetsInfo.get(file);
|
2053
|
+
if (typeof assetInfoUpdateOrFunction === "function") {
|
2054
|
+
this.assetsInfo.set(file, assetInfoUpdateOrFunction(oldInfo || {}));
|
2055
|
+
} else {
|
2056
|
+
this.assetsInfo.set(
|
2057
|
+
file,
|
2058
|
+
Object.assign({}, oldInfo, assetInfoUpdateOrFunction)
|
2059
|
+
);
|
2060
|
+
}
|
2061
|
+
}
|
2062
|
+
}
|
2063
|
+
|
2064
|
+
getAssets() {
|
2065
|
+
/** @type {Asset[]} */
|
2066
|
+
const array = [];
|
2067
|
+
for (const assetName of Object.keys(this.assets)) {
|
2068
|
+
if (Object.prototype.hasOwnProperty.call(this.assets, assetName)) {
|
2069
|
+
array.push({
|
2070
|
+
name: assetName,
|
2071
|
+
source: this.assets[assetName],
|
2072
|
+
info: this.assetsInfo.get(assetName) || {}
|
2073
|
+
});
|
2074
|
+
}
|
2075
|
+
}
|
2076
|
+
return array;
|
2077
|
+
}
|
2078
|
+
|
2079
|
+
/**
|
2080
|
+
* @param {string} name the name of the asset
|
2081
|
+
* @returns {Asset | undefined} the asset or undefined when not found
|
2082
|
+
*/
|
2083
|
+
getAsset(name) {
|
2084
|
+
if (!Object.prototype.hasOwnProperty.call(this.assets, name))
|
2085
|
+
return undefined;
|
2086
|
+
return {
|
2087
|
+
name,
|
2088
|
+
source: this.assets[name],
|
2089
|
+
info: this.assetsInfo.get(name) || {}
|
2090
|
+
};
|
2091
|
+
}
|
2092
|
+
|
1966
2093
|
createModuleAssets() {
|
1967
2094
|
for (let i = 0; i < this.modules.length; i++) {
|
1968
2095
|
const module = this.modules[i];
|
1969
2096
|
if (module.buildInfo.assets) {
|
2097
|
+
const assetsInfo = module.buildInfo.assetsInfo;
|
1970
2098
|
for (const assetName of Object.keys(module.buildInfo.assets)) {
|
1971
2099
|
const fileName = this.getPath(assetName);
|
1972
|
-
this.
|
2100
|
+
this.emitAsset(
|
2101
|
+
fileName,
|
2102
|
+
module.buildInfo.assets[assetName],
|
2103
|
+
assetsInfo ? assetsInfo.get(assetName) : undefined
|
2104
|
+
);
|
1973
2105
|
this.hooks.moduleAsset.call(module, fileName);
|
1974
2106
|
}
|
1975
2107
|
}
|
@@ -2003,7 +2135,12 @@ class Compilation extends Tapable {
|
|
2003
2135
|
const cacheName = fileManifest.identifier;
|
2004
2136
|
const usedHash = fileManifest.hash;
|
2005
2137
|
filenameTemplate = fileManifest.filenameTemplate;
|
2006
|
-
|
2138
|
+
const pathAndInfo = this.getPathWithInfo(
|
2139
|
+
filenameTemplate,
|
2140
|
+
fileManifest.pathOptions
|
2141
|
+
);
|
2142
|
+
file = pathAndInfo.path;
|
2143
|
+
const assetInfo = pathAndInfo.info;
|
2007
2144
|
|
2008
2145
|
// check if the same filename was already written by another chunk
|
2009
2146
|
const alreadyWritten = alreadyWrittenFiles.get(file);
|
@@ -2051,12 +2188,7 @@ class Compilation extends Tapable {
|
|
2051
2188
|
};
|
2052
2189
|
}
|
2053
2190
|
}
|
2054
|
-
|
2055
|
-
throw new Error(
|
2056
|
-
`Conflict: Multiple assets emit to the same filename ${file}`
|
2057
|
-
);
|
2058
|
-
}
|
2059
|
-
this.assets[file] = source;
|
2191
|
+
this.emitAsset(file, source, assetInfo);
|
2060
2192
|
chunk.files.push(file);
|
2061
2193
|
this.hooks.chunkAsset.call(chunk, file);
|
2062
2194
|
alreadyWrittenFiles.set(file, {
|
@@ -2084,6 +2216,17 @@ class Compilation extends Tapable {
|
|
2084
2216
|
return this.mainTemplate.getAssetPath(filename, data);
|
2085
2217
|
}
|
2086
2218
|
|
2219
|
+
/**
|
2220
|
+
* @param {string} filename used to get asset path with hash
|
2221
|
+
* @param {TODO=} data // TODO: figure out this param type
|
2222
|
+
* @returns {{ path: string, info: AssetInfo }} interpolated path and asset info
|
2223
|
+
*/
|
2224
|
+
getPathWithInfo(filename, data) {
|
2225
|
+
data = data || {};
|
2226
|
+
data.hash = data.hash || this.hash;
|
2227
|
+
return this.mainTemplate.getAssetPathWithInfo(filename, data);
|
2228
|
+
}
|
2229
|
+
|
2087
2230
|
/**
|
2088
2231
|
* This function allows you to run another instance of webpack inside of webpack however as
|
2089
2232
|
* a child with different settings and configurations (if desired) applied. It copies all hooks, plugins
|
package/lib/Compiler.js
CHANGED
@@ -329,8 +329,8 @@ class Compiler extends Tapable {
|
|
329
329
|
if (err) return callback(err);
|
330
330
|
|
331
331
|
this.parentCompilation.children.push(compilation);
|
332
|
-
for (const name of
|
333
|
-
this.parentCompilation.
|
332
|
+
for (const { name, source, info } of compilation.getAssets()) {
|
333
|
+
this.parentCompilation.emitAsset(name, source, info);
|
334
334
|
}
|
335
335
|
|
336
336
|
const entries = Array.from(
|
@@ -356,9 +356,9 @@ class Compiler extends Tapable {
|
|
356
356
|
if (err) return callback(err);
|
357
357
|
|
358
358
|
asyncLib.forEachLimit(
|
359
|
-
compilation.
|
359
|
+
compilation.getAssets(),
|
360
360
|
15,
|
361
|
-
(
|
361
|
+
({ name: file, source }, callback) => {
|
362
362
|
let targetFile = file;
|
363
363
|
const queryStringIdx = targetFile.indexOf("?");
|
364
364
|
if (queryStringIdx >= 0) {
|
@@ -396,10 +396,18 @@ class Compiler extends Tapable {
|
|
396
396
|
// if yes, we skip writing the file
|
397
397
|
// as it's already there
|
398
398
|
// (we assume one doesn't remove files while the Compiler is running)
|
399
|
+
|
400
|
+
compilation.updateAsset(file, cacheEntry.sizeOnlySource, {
|
401
|
+
size: cacheEntry.sizeOnlySource.size()
|
402
|
+
});
|
403
|
+
|
399
404
|
return callback();
|
400
405
|
}
|
401
406
|
}
|
402
407
|
|
408
|
+
// TODO webpack 5: if info.immutable check if file already exists in output
|
409
|
+
// skip emitting if it's already there
|
410
|
+
|
403
411
|
// get the binary (Buffer) content from the Source
|
404
412
|
/** @type {Buffer} */
|
405
413
|
let content;
|
@@ -418,7 +426,9 @@ class Compiler extends Tapable {
|
|
418
426
|
// This allows to GC all memory allocated by the Source
|
419
427
|
// (expect when the Source is stored in any other cache)
|
420
428
|
cacheEntry.sizeOnlySource = new SizeOnlySource(content.length);
|
421
|
-
compilation.
|
429
|
+
compilation.updateAsset(file, cacheEntry.sizeOnlySource, {
|
430
|
+
size: content.length
|
431
|
+
});
|
422
432
|
|
423
433
|
// Write the file to output file system
|
424
434
|
this.outputFileSystem.writeFile(targetPath, content, err => {
|
package/lib/Entrypoint.js
CHANGED
@@ -52,8 +52,8 @@ class Entrypoint extends ChunkGroup {
|
|
52
52
|
|
53
53
|
/**
|
54
54
|
* @param {Chunk} oldChunk chunk to be replaced
|
55
|
-
* @param {Chunk} newChunk New
|
56
|
-
* @returns {boolean}
|
55
|
+
* @param {Chunk} newChunk New chunk that will be replaced with
|
56
|
+
* @returns {boolean} returns true if the replacement was successful
|
57
57
|
*/
|
58
58
|
replaceChunk(oldChunk, newChunk) {
|
59
59
|
if (this.runtimeChunk === oldChunk) this.runtimeChunk = newChunk;
|
@@ -277,12 +277,19 @@ module.exports = class HotModuleReplacementPlugin {
|
|
277
277
|
compilation.moduleTemplates.javascript,
|
278
278
|
compilation.dependencyTemplates
|
279
279
|
);
|
280
|
-
const
|
280
|
+
const {
|
281
|
+
path: filename,
|
282
|
+
info: assetInfo
|
283
|
+
} = compilation.getPathWithInfo(hotUpdateChunkFilename, {
|
281
284
|
hash: records.hash,
|
282
285
|
chunk: currentChunk
|
283
286
|
});
|
284
287
|
compilation.additionalChunkAssets.push(filename);
|
285
|
-
compilation.
|
288
|
+
compilation.emitAsset(
|
289
|
+
filename,
|
290
|
+
source,
|
291
|
+
Object.assign({ hotModuleReplacement: true }, assetInfo)
|
292
|
+
);
|
286
293
|
hotUpdateMainContent.c[chunkId] = true;
|
287
294
|
currentChunk.files.push(filename);
|
288
295
|
compilation.hooks.chunkAsset.call(currentChunk, filename);
|
@@ -292,10 +299,17 @@ module.exports = class HotModuleReplacementPlugin {
|
|
292
299
|
}
|
293
300
|
}
|
294
301
|
const source = new RawSource(JSON.stringify(hotUpdateMainContent));
|
295
|
-
const
|
302
|
+
const {
|
303
|
+
path: filename,
|
304
|
+
info: assetInfo
|
305
|
+
} = compilation.getPathWithInfo(hotUpdateMainFilename, {
|
296
306
|
hash: records.hash
|
297
307
|
});
|
298
|
-
compilation.
|
308
|
+
compilation.emitAsset(
|
309
|
+
filename,
|
310
|
+
source,
|
311
|
+
Object.assign({ hotModuleReplacement: true }, assetInfo)
|
312
|
+
);
|
299
313
|
}
|
300
314
|
);
|
301
315
|
|
package/lib/MainTemplate.js
CHANGED
@@ -122,7 +122,7 @@ module.exports = class MainTemplate extends Tapable {
|
|
122
122
|
"moduleExpression"
|
123
123
|
]),
|
124
124
|
currentHash: new SyncWaterfallHook(["source", "requestedLength"]),
|
125
|
-
assetPath: new SyncWaterfallHook(["path", "options"]),
|
125
|
+
assetPath: new SyncWaterfallHook(["path", "options", "assetInfo"]),
|
126
126
|
hash: new SyncHook(["hash"]),
|
127
127
|
hashForChunk: new SyncHook(["hash", "chunk"]),
|
128
128
|
globalHashPaths: new SyncWaterfallHook(["paths"]),
|
@@ -521,6 +521,13 @@ module.exports = class MainTemplate extends Tapable {
|
|
521
521
|
return this.hooks.assetPath.call(path, options);
|
522
522
|
}
|
523
523
|
|
524
|
+
getAssetPathWithInfo(path, options) {
|
525
|
+
const assetInfo = {};
|
526
|
+
// TODO webpack 5: refactor assetPath hook to receive { path, info } object
|
527
|
+
const newPath = this.hooks.assetPath.call(path, options, assetInfo);
|
528
|
+
return { path: newPath, info: assetInfo };
|
529
|
+
}
|
530
|
+
|
524
531
|
/**
|
525
532
|
* Updates hash with information from this template
|
526
533
|
* @param {Hash} hash the hash to update
|
package/lib/NormalModule.js
CHANGED
@@ -210,15 +210,17 @@ class NormalModule extends Module {
|
|
210
210
|
}
|
211
211
|
};
|
212
212
|
},
|
213
|
-
emitFile: (name, content, sourceMap) => {
|
213
|
+
emitFile: (name, content, sourceMap, assetInfo) => {
|
214
214
|
if (!this.buildInfo.assets) {
|
215
215
|
this.buildInfo.assets = Object.create(null);
|
216
|
+
this.buildInfo.assetsInfo = new Map();
|
216
217
|
}
|
217
218
|
this.buildInfo.assets[name] = this.createSourceForAsset(
|
218
219
|
name,
|
219
220
|
content,
|
220
221
|
sourceMap
|
221
222
|
);
|
223
|
+
this.buildInfo.assetsInfo.set(name, assetInfo);
|
222
224
|
},
|
223
225
|
rootContext: options.context,
|
224
226
|
webpack: true,
|
@@ -432,7 +434,9 @@ class NormalModule extends Module {
|
|
432
434
|
this.buildInfo = {
|
433
435
|
cacheable: false,
|
434
436
|
fileDependencies: new Set(),
|
435
|
-
contextDependencies: new Set()
|
437
|
+
contextDependencies: new Set(),
|
438
|
+
assets: undefined,
|
439
|
+
assetsInfo: undefined
|
436
440
|
};
|
437
441
|
|
438
442
|
return this.doBuild(options, compilation, resolver, fs, err => {
|
package/lib/Parser.js
CHANGED
@@ -29,7 +29,7 @@ const defaultParserOptions = {
|
|
29
29
|
onComment: null
|
30
30
|
};
|
31
31
|
|
32
|
-
// regexp to match at
|
32
|
+
// regexp to match at least one "magic comment"
|
33
33
|
const webpackCommentRegExp = new RegExp(/(^|\W)webpack[A-Z]{1,}[A-Za-z]{1,}:/);
|
34
34
|
|
35
35
|
const EMPTY_COMMENT_OPTIONS = {
|
@@ -175,14 +175,20 @@ class SourceMapDevToolPlugin {
|
|
175
175
|
reportProgress(0.0);
|
176
176
|
const tasks = [];
|
177
177
|
files.forEach(({ file, chunk }, idx) => {
|
178
|
-
const asset = compilation.
|
178
|
+
const asset = compilation.getAsset(file).source;
|
179
179
|
const cache = assetsCache.get(asset);
|
180
180
|
/**
|
181
181
|
* If presented in cache, reassigns assets. Cache assets already have source maps.
|
182
182
|
*/
|
183
183
|
if (cache && cache.file === file) {
|
184
184
|
for (const cachedFile in cache.assets) {
|
185
|
-
|
185
|
+
if (cachedFile === file) {
|
186
|
+
compilation.updateAsset(cachedFile, cache.assets[cachedFile]);
|
187
|
+
} else {
|
188
|
+
compilation.emitAsset(cachedFile, cache.assets[cachedFile], {
|
189
|
+
development: true
|
190
|
+
});
|
191
|
+
}
|
186
192
|
/**
|
187
193
|
* Add file to chunk, if not presented there
|
188
194
|
*/
|
@@ -353,20 +359,24 @@ class SourceMapDevToolPlugin {
|
|
353
359
|
* Add source map url to compilation asset, if {@link currentSourceMappingURLComment} presented
|
354
360
|
*/
|
355
361
|
if (currentSourceMappingURLComment !== false) {
|
356
|
-
|
362
|
+
const asset = new ConcatSource(
|
357
363
|
new RawSource(source),
|
358
364
|
compilation.getPath(
|
359
365
|
currentSourceMappingURLComment,
|
360
366
|
Object.assign({ url: sourceMapUrl }, pathParams)
|
361
367
|
)
|
362
368
|
);
|
369
|
+
assets[file] = asset;
|
370
|
+
compilation.updateAsset(file, asset);
|
363
371
|
}
|
364
372
|
/**
|
365
373
|
* Add source map file to compilation assets and chunk files
|
366
374
|
*/
|
367
|
-
|
368
|
-
|
369
|
-
|
375
|
+
const asset = new RawSource(sourceMapString);
|
376
|
+
assets[sourceMapFile] = asset;
|
377
|
+
compilation.emitAsset(sourceMapFile, asset, {
|
378
|
+
development: true
|
379
|
+
});
|
370
380
|
chunk.files.push(sourceMapFile);
|
371
381
|
} else {
|
372
382
|
if (currentSourceMappingURLComment === false) {
|
@@ -377,7 +387,7 @@ class SourceMapDevToolPlugin {
|
|
377
387
|
/**
|
378
388
|
* Add source map as data url to asset
|
379
389
|
*/
|
380
|
-
|
390
|
+
const asset = new ConcatSource(
|
381
391
|
new RawSource(source),
|
382
392
|
currentSourceMappingURLComment
|
383
393
|
.replace(/\[map\]/g, () => sourceMapString)
|
@@ -390,6 +400,8 @@ class SourceMapDevToolPlugin {
|
|
390
400
|
).toString("base64")}`
|
391
401
|
)
|
392
402
|
);
|
403
|
+
assets[file] = asset;
|
404
|
+
compilation.updateAsset(file, asset);
|
393
405
|
}
|
394
406
|
});
|
395
407
|
reportProgress(1.0);
|
package/lib/Stats.js
CHANGED
@@ -404,26 +404,27 @@ class Stats {
|
|
404
404
|
}
|
405
405
|
if (showAssets) {
|
406
406
|
const assetsByFile = {};
|
407
|
-
const compilationAssets =
|
407
|
+
const compilationAssets = compilation
|
408
|
+
.getAssets()
|
409
|
+
.sort((a, b) => (a.name < b.name ? -1 : 1));
|
408
410
|
obj.assetsByChunkName = {};
|
409
411
|
obj.assets = compilationAssets
|
410
|
-
.map(
|
412
|
+
.map(({ name, source, info }) => {
|
411
413
|
const obj = {
|
412
|
-
name
|
413
|
-
size:
|
414
|
+
name,
|
415
|
+
size: source.size(),
|
414
416
|
chunks: [],
|
415
417
|
chunkNames: [],
|
418
|
+
info,
|
416
419
|
// TODO webpack 5: remove .emitted
|
417
|
-
emitted:
|
418
|
-
compilation.assets[asset].emitted ||
|
419
|
-
compilation.emittedAssets.has(asset)
|
420
|
+
emitted: source.emitted || compilation.emittedAssets.has(name)
|
420
421
|
};
|
421
422
|
|
422
423
|
if (showPerformance) {
|
423
|
-
obj.isOverSizeLimit =
|
424
|
+
obj.isOverSizeLimit = source.isOverSizeLimit;
|
424
425
|
}
|
425
426
|
|
426
|
-
assetsByFile[
|
427
|
+
assetsByFile[name] = obj;
|
427
428
|
return obj;
|
428
429
|
})
|
429
430
|
.filter(createAssetFilter());
|
@@ -1040,7 +1041,14 @@ class Stats {
|
|
1040
1041
|
color: colors.bold
|
1041
1042
|
},
|
1042
1043
|
{
|
1043
|
-
value:
|
1044
|
+
value: [
|
1045
|
+
asset.emitted && "[emitted]",
|
1046
|
+
asset.info.immutable && "[immutable]",
|
1047
|
+
asset.info.development && "[dev]",
|
1048
|
+
asset.info.hotModuleReplacement && "[hmr]"
|
1049
|
+
]
|
1050
|
+
.filter(Boolean)
|
1051
|
+
.join(" "),
|
1044
1052
|
color: colors.green
|
1045
1053
|
},
|
1046
1054
|
{
|
@@ -23,8 +23,9 @@ const REGEXP_HASH_FOR_TEST = new RegExp(REGEXP_HASH.source, "i"),
|
|
23
23
|
REGEXP_CONTENTHASH_FOR_TEST = new RegExp(REGEXP_CONTENTHASH.source, "i"),
|
24
24
|
REGEXP_NAME_FOR_TEST = new RegExp(REGEXP_NAME.source, "i");
|
25
25
|
|
26
|
-
const withHashLength = (replacer, handlerFn) => {
|
26
|
+
const withHashLength = (replacer, handlerFn, assetInfo) => {
|
27
27
|
const fn = (match, hashLength, ...args) => {
|
28
|
+
if (assetInfo) assetInfo.immutable = true;
|
28
29
|
const length = hashLength && parseInt(hashLength, 10);
|
29
30
|
if (length && handlerFn) {
|
30
31
|
return handlerFn(length);
|
@@ -59,7 +60,7 @@ const escapePathVariables = value => {
|
|
59
60
|
: value;
|
60
61
|
};
|
61
62
|
|
62
|
-
const replacePathVariables = (path, data) => {
|
63
|
+
const replacePathVariables = (path, data, assetInfo) => {
|
63
64
|
const chunk = data.chunk;
|
64
65
|
const chunkId = chunk && chunk.id;
|
65
66
|
const chunkName = chunk && (chunk.name || chunk.id);
|
@@ -97,19 +98,23 @@ const replacePathVariables = (path, data) => {
|
|
97
98
|
path
|
98
99
|
.replace(
|
99
100
|
REGEXP_HASH,
|
100
|
-
withHashLength(getReplacer(data.hash), data.hashWithLength)
|
101
|
+
withHashLength(getReplacer(data.hash), data.hashWithLength, assetInfo)
|
101
102
|
)
|
102
103
|
.replace(
|
103
104
|
REGEXP_CHUNKHASH,
|
104
|
-
withHashLength(getReplacer(chunkHash), chunkHashWithLength)
|
105
|
+
withHashLength(getReplacer(chunkHash), chunkHashWithLength, assetInfo)
|
105
106
|
)
|
106
107
|
.replace(
|
107
108
|
REGEXP_CONTENTHASH,
|
108
|
-
withHashLength(
|
109
|
+
withHashLength(
|
110
|
+
getReplacer(contentHash),
|
111
|
+
contentHashWithLength,
|
112
|
+
assetInfo
|
113
|
+
)
|
109
114
|
)
|
110
115
|
.replace(
|
111
116
|
REGEXP_MODULEHASH,
|
112
|
-
withHashLength(getReplacer(moduleHash), moduleHashWithLength)
|
117
|
+
withHashLength(getReplacer(moduleHash), moduleHashWithLength, assetInfo)
|
113
118
|
)
|
114
119
|
.replace(REGEXP_ID, getReplacer(chunkId))
|
115
120
|
.replace(REGEXP_MODULEID, getReplacer(moduleId))
|
package/lib/buildChunkGraph.js
CHANGED
@@ -28,11 +28,13 @@ const GraphHelpers = require("./GraphHelpers");
|
|
28
28
|
|
29
29
|
/**
|
30
30
|
* @typedef {Object} ChunkGroupInfo
|
31
|
+
* @property {ChunkGroup} chunkGroup the chunk group
|
31
32
|
* @property {Set<Module>} minAvailableModules current minimal set of modules available at this point
|
32
33
|
* @property {boolean} minAvailableModulesOwned true, if minAvailableModules is owned and can be modified
|
33
34
|
* @property {Set<Module>[]} availableModulesToBeMerged enqueued updates to the minimal set of available modules
|
34
35
|
* @property {QueueItem[]} skippedItems queue items that were skipped because module is already available in parent chunks (need to reconsider when minAvailableModules is shrinking)
|
35
36
|
* @property {Set<Module>} resultingAvailableModules set of modules available including modules from this chunk group
|
37
|
+
* @property {Set<ChunkGroup>} children set of children chunk groups, that will be revisited when availableModules shrink
|
36
38
|
*/
|
37
39
|
|
38
40
|
/**
|
@@ -196,11 +198,13 @@ const visitModules = (
|
|
196
198
|
});
|
197
199
|
}
|
198
200
|
chunkGroupInfoMap.set(chunkGroup, {
|
201
|
+
chunkGroup,
|
199
202
|
minAvailableModules: new Set(),
|
200
203
|
minAvailableModulesOwned: true,
|
201
204
|
availableModulesToBeMerged: [],
|
202
205
|
skippedItems: [],
|
203
|
-
resultingAvailableModules: undefined
|
206
|
+
resultingAvailableModules: undefined,
|
207
|
+
children: undefined
|
204
208
|
});
|
205
209
|
return queue;
|
206
210
|
};
|
@@ -418,7 +422,7 @@ const visitModules = (
|
|
418
422
|
}
|
419
423
|
logger.timeEnd("visiting");
|
420
424
|
|
421
|
-
|
425
|
+
while (queueConnect.size > 0) {
|
422
426
|
logger.time("calculating available modules");
|
423
427
|
|
424
428
|
// Figure out new parents for chunk groups
|
@@ -435,17 +439,26 @@ const visitModules = (
|
|
435
439
|
}
|
436
440
|
}
|
437
441
|
info.resultingAvailableModules = resultingAvailableModules;
|
442
|
+
if (info.children === undefined) {
|
443
|
+
info.children = targets;
|
444
|
+
} else {
|
445
|
+
for (const target of targets) {
|
446
|
+
info.children.add(target);
|
447
|
+
}
|
448
|
+
}
|
438
449
|
|
439
450
|
// 2. Update chunk group info
|
440
451
|
for (const target of targets) {
|
441
452
|
let chunkGroupInfo = chunkGroupInfoMap.get(target);
|
442
453
|
if (chunkGroupInfo === undefined) {
|
443
454
|
chunkGroupInfo = {
|
455
|
+
chunkGroup: target,
|
444
456
|
minAvailableModules: undefined,
|
445
457
|
minAvailableModulesOwned: undefined,
|
446
458
|
availableModulesToBeMerged: [],
|
447
459
|
skippedItems: [],
|
448
|
-
resultingAvailableModules: undefined
|
460
|
+
resultingAvailableModules: undefined,
|
461
|
+
children: undefined
|
449
462
|
};
|
450
463
|
chunkGroupInfoMap.set(target, chunkGroupInfo);
|
451
464
|
}
|
@@ -463,7 +476,7 @@ const visitModules = (
|
|
463
476
|
// Execute the merge
|
464
477
|
for (const info of outdatedChunkGroupInfo) {
|
465
478
|
const availableModulesToBeMerged = info.availableModulesToBeMerged;
|
466
|
-
let
|
479
|
+
let cachedMinAvailableModules = info.minAvailableModules;
|
467
480
|
|
468
481
|
// 1. Get minimal available modules
|
469
482
|
// It doesn't make sense to traverse a chunk again with more available modules.
|
@@ -474,29 +487,31 @@ const visitModules = (
|
|
474
487
|
}
|
475
488
|
let changed = false;
|
476
489
|
for (const availableModules of availableModulesToBeMerged) {
|
477
|
-
if (
|
478
|
-
|
479
|
-
info.minAvailableModules =
|
490
|
+
if (cachedMinAvailableModules === undefined) {
|
491
|
+
cachedMinAvailableModules = availableModules;
|
492
|
+
info.minAvailableModules = cachedMinAvailableModules;
|
480
493
|
info.minAvailableModulesOwned = false;
|
481
494
|
changed = true;
|
482
495
|
} else {
|
483
496
|
if (info.minAvailableModulesOwned) {
|
484
497
|
// We own it and can modify it
|
485
|
-
for (const m of
|
498
|
+
for (const m of cachedMinAvailableModules) {
|
486
499
|
if (!availableModules.has(m)) {
|
487
|
-
|
500
|
+
cachedMinAvailableModules.delete(m);
|
488
501
|
changed = true;
|
489
502
|
}
|
490
503
|
}
|
491
504
|
} else {
|
492
|
-
for (const m of
|
505
|
+
for (const m of cachedMinAvailableModules) {
|
493
506
|
if (!availableModules.has(m)) {
|
494
|
-
//
|
507
|
+
// cachedMinAvailableModules need to be modified
|
495
508
|
// but we don't own it
|
496
|
-
// construct a new Set as intersection of
|
509
|
+
// construct a new Set as intersection of cachedMinAvailableModules and availableModules
|
497
510
|
/** @type {Set<Module>} */
|
498
511
|
const newSet = new Set();
|
499
|
-
const iterator =
|
512
|
+
const iterator = cachedMinAvailableModules[
|
513
|
+
Symbol.iterator
|
514
|
+
]();
|
500
515
|
/** @type {IteratorResult<Module>} */
|
501
516
|
let it;
|
502
517
|
while (!(it = iterator.next()).done) {
|
@@ -510,9 +525,16 @@ const visitModules = (
|
|
510
525
|
newSet.add(module);
|
511
526
|
}
|
512
527
|
}
|
513
|
-
|
528
|
+
cachedMinAvailableModules = newSet;
|
514
529
|
info.minAvailableModulesOwned = true;
|
515
530
|
info.minAvailableModules = newSet;
|
531
|
+
|
532
|
+
// Update the cache from the first queue
|
533
|
+
// if the chunkGroup is currently cached
|
534
|
+
if (chunkGroup === info.chunkGroup) {
|
535
|
+
minAvailableModules = cachedMinAvailableModules;
|
536
|
+
}
|
537
|
+
|
516
538
|
changed = true;
|
517
539
|
break;
|
518
540
|
}
|
@@ -528,6 +550,19 @@ const visitModules = (
|
|
528
550
|
queue.push(queueItem);
|
529
551
|
}
|
530
552
|
info.skippedItems.length = 0;
|
553
|
+
|
554
|
+
// 3. Reconsider children chunk groups
|
555
|
+
if (info.children !== undefined) {
|
556
|
+
const chunkGroup = info.chunkGroup;
|
557
|
+
for (const c of info.children) {
|
558
|
+
let connectList = queueConnect.get(chunkGroup);
|
559
|
+
if (connectList === undefined) {
|
560
|
+
connectList = new Set();
|
561
|
+
queueConnect.set(chunkGroup, connectList);
|
562
|
+
}
|
563
|
+
connectList.add(c);
|
564
|
+
}
|
565
|
+
}
|
531
566
|
}
|
532
567
|
outdatedChunkGroupInfo.clear();
|
533
568
|
logger.timeEnd("merging available modules");
|
@@ -391,8 +391,8 @@ const makeNewProfiledTapFn = (hookName, tracer, { name, type, fn }) => {
|
|
391
391
|
id,
|
392
392
|
cat: defaultCategory
|
393
393
|
});
|
394
|
+
const callback = args.pop();
|
394
395
|
fn(...args, (...r) => {
|
395
|
-
const callback = args.pop();
|
396
396
|
tracer.trace.end({
|
397
397
|
name,
|
398
398
|
id,
|
package/lib/logging/Logger.js
CHANGED
@@ -32,14 +32,14 @@ const LogType = Object.freeze({
|
|
32
32
|
|
33
33
|
exports.LogType = LogType;
|
34
34
|
|
35
|
-
/** @typedef {LogType} LogTypeEnum */
|
35
|
+
/** @typedef {keyof LogType} LogTypeEnum */
|
36
36
|
|
37
37
|
const LOG_SYMBOL = Symbol("webpack logger raw log method");
|
38
38
|
const TIMERS_SYMBOL = Symbol("webpack logger times");
|
39
39
|
|
40
40
|
class WebpackLogger {
|
41
41
|
/**
|
42
|
-
* @param {function(
|
42
|
+
* @param {function(LogTypeEnum, any[]=): void} log log function
|
43
43
|
*/
|
44
44
|
constructor(log) {
|
45
45
|
this[LOG_SYMBOL] = log;
|
@@ -375,6 +375,14 @@ class ConcatenatedModule extends Module {
|
|
375
375
|
}
|
376
376
|
Object.assign(this.buildInfo.assets, m.buildInfo.assets);
|
377
377
|
}
|
378
|
+
if (m.buildInfo.assetsInfo) {
|
379
|
+
if (this.buildInfo.assetsInfo === undefined) {
|
380
|
+
this.buildInfo.assetsInfo = new Map();
|
381
|
+
}
|
382
|
+
for (const [key, value] of m.buildInfo.assetsInfo) {
|
383
|
+
this.buildInfo.assetsInfo.set(key, value);
|
384
|
+
}
|
385
|
+
}
|
378
386
|
}
|
379
387
|
}
|
380
388
|
this._identifier = this._createIdentifier();
|
@@ -7,6 +7,9 @@ const EntrypointsOverSizeLimitWarning = require("./EntrypointsOverSizeLimitWarni
|
|
7
7
|
const AssetsOverSizeLimitWarning = require("./AssetsOverSizeLimitWarning");
|
8
8
|
const NoAsyncChunksWarning = require("./NoAsyncChunksWarning");
|
9
9
|
|
10
|
+
/** @typedef {import("../Compiler")} Compiler */
|
11
|
+
/** @typedef {import("../Entrypoint")} Entrypoint */
|
12
|
+
|
10
13
|
module.exports = class SizeLimitsPlugin {
|
11
14
|
constructor(options) {
|
12
15
|
this.hints = options.hints;
|
@@ -14,54 +17,71 @@ module.exports = class SizeLimitsPlugin {
|
|
14
17
|
this.maxEntrypointSize = options.maxEntrypointSize;
|
15
18
|
this.assetFilter = options.assetFilter;
|
16
19
|
}
|
20
|
+
|
21
|
+
/**
|
22
|
+
* @param {Compiler} compiler webpack compiler
|
23
|
+
* @returns {void}
|
24
|
+
*/
|
17
25
|
apply(compiler) {
|
18
26
|
const entrypointSizeLimit = this.maxEntrypointSize;
|
19
27
|
const assetSizeLimit = this.maxAssetSize;
|
20
28
|
const hints = this.hints;
|
21
|
-
const assetFilter =
|
29
|
+
const assetFilter =
|
30
|
+
this.assetFilter || ((name, source, info) => !info.development);
|
22
31
|
|
23
32
|
compiler.hooks.afterEmit.tap("SizeLimitsPlugin", compilation => {
|
24
33
|
const warnings = [];
|
25
34
|
|
35
|
+
/**
|
36
|
+
* @param {Entrypoint} entrypoint an entrypoint
|
37
|
+
* @returns {number} the size of the entrypoint
|
38
|
+
*/
|
26
39
|
const getEntrypointSize = entrypoint =>
|
27
40
|
entrypoint.getFiles().reduce((currentSize, file) => {
|
28
|
-
|
29
|
-
|
41
|
+
const asset = compilation.getAsset(file);
|
42
|
+
if (
|
43
|
+
asset &&
|
44
|
+
assetFilter(asset.name, asset.source, asset.info) &&
|
45
|
+
asset.source
|
46
|
+
) {
|
47
|
+
return currentSize + (asset.info.size || asset.source.size());
|
30
48
|
}
|
31
49
|
|
32
50
|
return currentSize;
|
33
51
|
}, 0);
|
34
52
|
|
35
53
|
const assetsOverSizeLimit = [];
|
36
|
-
for (const
|
37
|
-
if (!assetFilter(
|
54
|
+
for (const { name, source, info } of compilation.getAssets()) {
|
55
|
+
if (!assetFilter(name, source, info) || !source) {
|
38
56
|
continue;
|
39
57
|
}
|
40
58
|
|
41
|
-
const
|
42
|
-
const size = asset.size();
|
59
|
+
const size = info.size || source.size();
|
43
60
|
if (size > assetSizeLimit) {
|
44
61
|
assetsOverSizeLimit.push({
|
45
|
-
name
|
46
|
-
size
|
62
|
+
name,
|
63
|
+
size
|
47
64
|
});
|
48
|
-
|
65
|
+
/** @type {any} */ (source).isOverSizeLimit = true;
|
49
66
|
}
|
50
67
|
}
|
51
68
|
|
69
|
+
const fileFilter = name => {
|
70
|
+
const asset = compilation.getAsset(name);
|
71
|
+
return assetFilter(asset.name, asset.source, asset.info);
|
72
|
+
};
|
73
|
+
|
52
74
|
const entrypointsOverLimit = [];
|
53
|
-
for (const
|
54
|
-
const name = pair[0];
|
55
|
-
const entry = pair[1];
|
75
|
+
for (const [name, entry] of compilation.entrypoints) {
|
56
76
|
const size = getEntrypointSize(entry);
|
57
77
|
|
58
78
|
if (size > entrypointSizeLimit) {
|
59
79
|
entrypointsOverLimit.push({
|
60
80
|
name: name,
|
61
81
|
size: size,
|
62
|
-
files: entry.getFiles().filter(
|
82
|
+
files: entry.getFiles().filter(fileFilter)
|
63
83
|
});
|
64
|
-
entry.isOverSizeLimit = true;
|
84
|
+
/** @type {any} */ (entry).isOverSizeLimit = true;
|
65
85
|
}
|
66
86
|
}
|
67
87
|
|
@@ -70,7 +70,7 @@ const getImportedGlobals = ast => {
|
|
70
70
|
|
71
71
|
t.traverse(ast, {
|
72
72
|
ModuleImport({ node }) {
|
73
|
-
if (t.isGlobalType(node.descr)
|
73
|
+
if (t.isGlobalType(node.descr)) {
|
74
74
|
importedGlobals.push(node);
|
75
75
|
}
|
76
76
|
}
|
@@ -79,12 +79,18 @@ const getImportedGlobals = ast => {
|
|
79
79
|
return importedGlobals;
|
80
80
|
};
|
81
81
|
|
82
|
+
/**
|
83
|
+
* Get the count for imported func
|
84
|
+
*
|
85
|
+
* @param {Object} ast Module's AST
|
86
|
+
* @returns {Number} - count
|
87
|
+
*/
|
82
88
|
const getCountImportedFunc = ast => {
|
83
89
|
let count = 0;
|
84
90
|
|
85
91
|
t.traverse(ast, {
|
86
92
|
ModuleImport({ node }) {
|
87
|
-
if (t.isFuncImportDescr(node.descr)
|
93
|
+
if (t.isFuncImportDescr(node.descr)) {
|
88
94
|
count++;
|
89
95
|
}
|
90
96
|
}
|
@@ -133,7 +139,7 @@ const getNextFuncIndex = (ast, countImportedFunc) => {
|
|
133
139
|
};
|
134
140
|
|
135
141
|
/**
|
136
|
-
*
|
142
|
+
* Creates an init instruction for a global type
|
137
143
|
* @param {t.GlobalType} globalType the global type
|
138
144
|
* @returns {t.Instruction} init expression
|
139
145
|
*/
|
@@ -156,7 +162,7 @@ const createDefaultInitForGlobal = globalType => {
|
|
156
162
|
/**
|
157
163
|
* Rewrite the import globals:
|
158
164
|
* - removes the ModuleImport instruction
|
159
|
-
* - injects at the same offset a mutable global of the same
|
165
|
+
* - injects at the same offset a mutable global of the same type
|
160
166
|
*
|
161
167
|
* Since the imported globals are before the other global declarations, our
|
162
168
|
* indices will be preserved.
|
@@ -172,7 +178,7 @@ const rewriteImportedGlobals = state => bin => {
|
|
172
178
|
|
173
179
|
bin = editWithAST(state.ast, bin, {
|
174
180
|
ModuleImport(path) {
|
175
|
-
if (t.isGlobalType(path.node.descr)
|
181
|
+
if (t.isGlobalType(path.node.descr)) {
|
176
182
|
const globalType = path.node.descr;
|
177
183
|
|
178
184
|
globalType.mutability = "var";
|
@@ -206,8 +212,8 @@ const rewriteImportedGlobals = state => bin => {
|
|
206
212
|
|
207
213
|
additionalInitCode.push(
|
208
214
|
/**
|
209
|
-
* get_global in global
|
210
|
-
* They have the same indices
|
215
|
+
* get_global in global initializer only works for imported globals.
|
216
|
+
* They have the same indices as the init params, so use the
|
211
217
|
* same index.
|
212
218
|
*/
|
213
219
|
t.instruction("get_local", [initialGlobalidx]),
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "webpack",
|
3
|
-
"version": "4.
|
3
|
+
"version": "4.40.1",
|
4
4
|
"author": "Tobias Koppers @sokra",
|
5
5
|
"description": "Packs CommonJs/AMD modules for the browser. Allows to split your codebase into multiple bundles, which can be loaded on demand. Support loaders to preprocess files, i.e. json, jsx, es7, css, less, ... and your custom stuff.",
|
6
6
|
"license": "MIT",
|
@@ -53,8 +53,8 @@
|
|
53
53
|
"husky": "^1.1.3",
|
54
54
|
"i18n-webpack-plugin": "^1.0.0",
|
55
55
|
"istanbul": "^0.4.5",
|
56
|
-
"jest": "24.
|
57
|
-
"jest-junit": "^
|
56
|
+
"jest": "^24.9.0",
|
57
|
+
"jest-junit": "^8.0.0",
|
58
58
|
"json-loader": "^0.5.7",
|
59
59
|
"json-schema-to-typescript": "^6.0.1",
|
60
60
|
"less": "^3.9.0",
|
@@ -120,12 +120,12 @@
|
|
120
120
|
"pretest": "yarn lint",
|
121
121
|
"prelint": "yarn setup",
|
122
122
|
"lint": "yarn code-lint && yarn jest-lint && yarn type-lint && yarn special-lint",
|
123
|
-
"code-lint": "eslint --
|
123
|
+
"code-lint": "eslint . --ext '.js' --cache",
|
124
124
|
"type-lint": "tsc --pretty",
|
125
125
|
"special-lint": "node tooling/inherit-types && node tooling/format-schemas && node tooling/compile-to-definitions",
|
126
126
|
"special-lint-fix": "node tooling/inherit-types --write --override && node tooling/format-schemas --write && node tooling/compile-to-definitions --write",
|
127
127
|
"fix": "yarn code-lint --fix && yarn special-lint-fix",
|
128
|
-
"pretty": "prettier --loglevel warn --write \"*.{ts,js,json,yml,yaml}\" \"{setup,lib,bin,hot,buildin,benchmark,tooling,schemas}/**/*.{js,json}\" \"test/*.js\" \"test/{configCases,watchCases,statsCases,hotCases}/**/webpack.config.js\" \"examples/**/webpack.config.js\"",
|
128
|
+
"pretty": "prettier --loglevel warn --write \"*.{ts,js,json,yml,yaml}\" \"{setup,lib,bin,hot,buildin,benchmark,tooling,schemas}/**/*.{js,json}\" \"test/*.js\" \"test/helpers/*.js\" \"test/{configCases,watchCases,statsCases,hotCases}/**/webpack.config.js\" \"examples/**/webpack.config.js\"",
|
129
129
|
"jest-lint": "node --max-old-space-size=4096 node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/test/*.lint.js\" --no-verbose",
|
130
130
|
"benchmark": "node --max-old-space-size=4096 --trace-deprecation node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/test/*.benchmark.js\" --runInBand",
|
131
131
|
"cover": "yarn cover:all && yarn cover:report",
|