webpack 5.68.0 → 5.69.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webpack might be problematic. Click here for more details.
- package/lib/ChunkGraph.js +1 -2
- package/lib/Compilation.js +2 -0
- package/lib/ExportsInfo.js +4 -4
- package/lib/NormalModuleFactory.js +25 -27
- package/lib/ProgressPlugin.js +1 -1
- package/lib/TemplatedPathPlugin.js +48 -23
- package/lib/asset/AssetGenerator.js +3 -2
- package/lib/buildChunkGraph.js +1 -1
- package/lib/config/defaults.js +7 -2
- package/lib/css/CssLoadingRuntimeModule.js +63 -70
- package/lib/css/CssModulesPlugin.js +2 -1
- package/lib/debug/ProfilingPlugin.js +3 -4
- package/lib/dependencies/ContextElementDependency.js +8 -2
- package/lib/dependencies/ExportsInfoDependency.js +6 -0
- package/lib/index.js +5 -0
- package/lib/javascript/JavascriptModulesPlugin.js +27 -2
- package/lib/javascript/StartupHelpers.js +3 -2
- package/lib/library/AssignLibraryPlugin.js +8 -2
- package/lib/node/NodeTargetPlugin.js +1 -0
- package/lib/optimize/ConcatenatedModule.js +10 -4
- package/lib/schemes/HttpUriPlugin.js +24 -3
- package/lib/serialization/FileMiddleware.js +44 -9
- package/lib/util/compileBooleanMatcher.js +1 -1
- package/lib/util/deterministicGrouping.js +1 -1
- package/lib/util/identifier.js +65 -44
- package/lib/util/nonNumericOnlyHash.js +22 -0
- package/lib/util/semver.js +17 -10
- package/package.json +12 -12
- package/types.d.ts +39 -18
@@ -14,6 +14,7 @@ const { getAllChunks } = require("./ChunkHelpers");
|
|
14
14
|
/** @typedef {import("../Chunk")} Chunk */
|
15
15
|
/** @typedef {import("../Compilation")} Compilation */
|
16
16
|
/** @typedef {import("../ChunkGraph")} ChunkGraph */
|
17
|
+
/** @typedef {import("../ChunkGraph").EntryModuleWithChunkGroup} EntryModuleWithChunkGroup */
|
17
18
|
/** @typedef {import("../ChunkGroup")} ChunkGroup */
|
18
19
|
/** @typedef {import("../RuntimeTemplate")} RuntimeTemplate */
|
19
20
|
/** @typedef {(string|number)[]} EntryItem */
|
@@ -23,7 +24,7 @@ const EXPORT_PREFIX = "var __webpack_exports__ = ";
|
|
23
24
|
/**
|
24
25
|
* @param {ChunkGraph} chunkGraph chunkGraph
|
25
26
|
* @param {RuntimeTemplate} runtimeTemplate runtimeTemplate
|
26
|
-
* @param {
|
27
|
+
* @param {EntryModuleWithChunkGroup[]} entries entries
|
27
28
|
* @param {Chunk} chunk chunk
|
28
29
|
* @param {boolean} passive true: passive startup with on chunks loaded
|
29
30
|
* @returns {string} runtime code
|
@@ -101,7 +102,7 @@ exports.generateEntryStartup = (
|
|
101
102
|
/**
|
102
103
|
* @param {Hash} hash the hash to update
|
103
104
|
* @param {ChunkGraph} chunkGraph chunkGraph
|
104
|
-
* @param {
|
105
|
+
* @param {EntryModuleWithChunkGroup[]} entries entries
|
105
106
|
* @param {Chunk} chunk chunk
|
106
107
|
* @returns {void}
|
107
108
|
*/
|
@@ -222,9 +222,15 @@ class AssignLibraryPlugin extends AbstractLibraryPlugin {
|
|
222
222
|
* @param {LibraryContext<T>} libraryContext context
|
223
223
|
* @returns {string | undefined} bailout reason
|
224
224
|
*/
|
225
|
-
embedInRuntimeBailout(
|
225
|
+
embedInRuntimeBailout(
|
226
|
+
module,
|
227
|
+
{ chunk, codeGenerationResults },
|
228
|
+
{ options, compilation }
|
229
|
+
) {
|
230
|
+
const { data } = codeGenerationResults.get(module, chunk.runtime);
|
226
231
|
const topLevelDeclarations =
|
227
|
-
|
232
|
+
(data && data.get("topLevelDeclarations")) ||
|
233
|
+
(module.buildInfo && module.buildInfo.topLevelDeclarations);
|
228
234
|
if (!topLevelDeclarations)
|
229
235
|
return "it doesn't tell about top level declarations.";
|
230
236
|
const fullNameResolved = this._getResolvedFullName(
|
@@ -822,10 +822,6 @@ class ConcatenatedModule extends Module {
|
|
822
822
|
const topLevelDeclarations = this.buildInfo.topLevelDeclarations;
|
823
823
|
if (topLevelDeclarations !== undefined) {
|
824
824
|
for (const decl of m.buildInfo.topLevelDeclarations) {
|
825
|
-
// reserved names will always be renamed
|
826
|
-
if (RESERVED_NAMES.has(decl)) continue;
|
827
|
-
// TODO actually this is incorrect since with renaming there could be more
|
828
|
-
// We should do the renaming during build
|
829
825
|
topLevelDeclarations.add(decl);
|
830
826
|
}
|
831
827
|
}
|
@@ -1113,6 +1109,8 @@ class ConcatenatedModule extends Module {
|
|
1113
1109
|
|
1114
1110
|
// List of all used names to avoid conflicts
|
1115
1111
|
const allUsedNames = new Set(RESERVED_NAMES);
|
1112
|
+
// Updated Top level declarations are created by renaming
|
1113
|
+
const topLevelDeclarations = new Set();
|
1116
1114
|
|
1117
1115
|
// List of additional names in scope for module references
|
1118
1116
|
/** @type {Map<string, { usedNames: Set<string>, alreadyCheckedScopes: Set<TODO> }>} */
|
@@ -1257,6 +1255,7 @@ class ConcatenatedModule extends Module {
|
|
1257
1255
|
);
|
1258
1256
|
allUsedNames.add(newName);
|
1259
1257
|
info.internalNames.set(name, newName);
|
1258
|
+
topLevelDeclarations.add(newName);
|
1260
1259
|
const source = info.source;
|
1261
1260
|
const allIdentifiers = new Set(
|
1262
1261
|
references.map(r => r.identifier).concat(variable.identifiers)
|
@@ -1283,6 +1282,7 @@ class ConcatenatedModule extends Module {
|
|
1283
1282
|
} else {
|
1284
1283
|
allUsedNames.add(name);
|
1285
1284
|
info.internalNames.set(name, name);
|
1285
|
+
topLevelDeclarations.add(name);
|
1286
1286
|
}
|
1287
1287
|
}
|
1288
1288
|
let namespaceObjectName;
|
@@ -1300,6 +1300,7 @@ class ConcatenatedModule extends Module {
|
|
1300
1300
|
allUsedNames.add(namespaceObjectName);
|
1301
1301
|
}
|
1302
1302
|
info.namespaceObjectName = namespaceObjectName;
|
1303
|
+
topLevelDeclarations.add(namespaceObjectName);
|
1303
1304
|
break;
|
1304
1305
|
}
|
1305
1306
|
case "external": {
|
@@ -1311,6 +1312,7 @@ class ConcatenatedModule extends Module {
|
|
1311
1312
|
);
|
1312
1313
|
allUsedNames.add(externalName);
|
1313
1314
|
info.name = externalName;
|
1315
|
+
topLevelDeclarations.add(externalName);
|
1314
1316
|
break;
|
1315
1317
|
}
|
1316
1318
|
}
|
@@ -1323,6 +1325,7 @@ class ConcatenatedModule extends Module {
|
|
1323
1325
|
);
|
1324
1326
|
allUsedNames.add(externalNameInterop);
|
1325
1327
|
info.interopNamespaceObjectName = externalNameInterop;
|
1328
|
+
topLevelDeclarations.add(externalNameInterop);
|
1326
1329
|
}
|
1327
1330
|
if (
|
1328
1331
|
info.module.buildMeta.exportsType === "default" &&
|
@@ -1336,6 +1339,7 @@ class ConcatenatedModule extends Module {
|
|
1336
1339
|
);
|
1337
1340
|
allUsedNames.add(externalNameInterop);
|
1338
1341
|
info.interopNamespaceObject2Name = externalNameInterop;
|
1342
|
+
topLevelDeclarations.add(externalNameInterop);
|
1339
1343
|
}
|
1340
1344
|
if (
|
1341
1345
|
info.module.buildMeta.exportsType === "dynamic" ||
|
@@ -1349,6 +1353,7 @@ class ConcatenatedModule extends Module {
|
|
1349
1353
|
);
|
1350
1354
|
allUsedNames.add(externalNameInterop);
|
1351
1355
|
info.interopDefaultAccessName = externalNameInterop;
|
1356
|
+
topLevelDeclarations.add(externalNameInterop);
|
1352
1357
|
}
|
1353
1358
|
}
|
1354
1359
|
|
@@ -1618,6 +1623,7 @@ ${defineGetters}`
|
|
1618
1623
|
const data = new Map();
|
1619
1624
|
if (chunkInitFragments.length > 0)
|
1620
1625
|
data.set("chunkInitFragments", chunkInitFragments);
|
1626
|
+
data.set("topLevelDeclarations", topLevelDeclarations);
|
1621
1627
|
|
1622
1628
|
/** @type {CodeGenerationResult} */
|
1623
1629
|
const resultEntry = {
|
@@ -509,7 +509,7 @@ class HttpUriPlugin {
|
|
509
509
|
|
510
510
|
/**
|
511
511
|
* @param {string} url URL
|
512
|
-
* @param {FetchResult} cachedResult result from cache
|
512
|
+
* @param {FetchResult | RedirectFetchResult} cachedResult result from cache
|
513
513
|
* @param {function((Error | null)=, FetchResult=): void} callback callback
|
514
514
|
* @returns {void}
|
515
515
|
*/
|
@@ -603,9 +603,30 @@ class HttpUriPlugin {
|
|
603
603
|
res.statusCode >= 301 &&
|
604
604
|
res.statusCode <= 308
|
605
605
|
) {
|
606
|
-
|
606
|
+
const result = {
|
607
607
|
location: new URL(location, url).href
|
608
|
-
}
|
608
|
+
};
|
609
|
+
if (
|
610
|
+
!cachedResult ||
|
611
|
+
!("location" in cachedResult) ||
|
612
|
+
cachedResult.location !== result.location ||
|
613
|
+
cachedResult.validUntil < validUntil ||
|
614
|
+
cachedResult.storeLock !== storeLock ||
|
615
|
+
cachedResult.storeCache !== storeCache ||
|
616
|
+
cachedResult.etag !== etag
|
617
|
+
) {
|
618
|
+
return finishWith(result);
|
619
|
+
} else {
|
620
|
+
logger.debug(`GET ${url} [${res.statusCode}] (unchanged)`);
|
621
|
+
return callback(null, {
|
622
|
+
...result,
|
623
|
+
fresh: true,
|
624
|
+
storeLock,
|
625
|
+
storeCache,
|
626
|
+
validUntil,
|
627
|
+
etag
|
628
|
+
});
|
629
|
+
}
|
609
630
|
}
|
610
631
|
const contentType = res.headers["content-type"] || "";
|
611
632
|
const bufferArr = [];
|
@@ -40,6 +40,8 @@ Section -> Buffer
|
|
40
40
|
|
41
41
|
// "wpc" + 1 in little-endian
|
42
42
|
const VERSION = 0x01637077;
|
43
|
+
const WRITE_LIMIT_TOTAL = 0x7fff0000;
|
44
|
+
const WRITE_LIMIT_CHUNK = 511 * 1024 * 1024;
|
43
45
|
|
44
46
|
/**
|
45
47
|
* @param {Buffer[]} buffers buffers
|
@@ -87,7 +89,7 @@ const readUInt64LE = Buffer.prototype.readBigUInt64LE
|
|
87
89
|
* @param {FileMiddleware} middleware this
|
88
90
|
* @param {BufferSerializableType[] | Promise<BufferSerializableType[]>} data data to be serialized
|
89
91
|
* @param {string | boolean} name file base name
|
90
|
-
* @param {function(string | false, Buffer[]): Promise<void>} writeFile writes a file
|
92
|
+
* @param {function(string | false, Buffer[], number): Promise<void>} writeFile writes a file
|
91
93
|
* @param {string | Hash} hashFunction hash function to use
|
92
94
|
* @returns {Promise<SerializeResult>} resulting file pointer and promise
|
93
95
|
*/
|
@@ -212,9 +214,9 @@ const serialize = async (
|
|
212
214
|
if (name === true) {
|
213
215
|
name = hashForName(buf, hashFunction);
|
214
216
|
}
|
215
|
-
backgroundJobs.push(writeFile(name, buf));
|
216
217
|
let size = 0;
|
217
218
|
for (const b of buf) size += b.length;
|
219
|
+
backgroundJobs.push(writeFile(name, buf, size));
|
218
220
|
return {
|
219
221
|
size,
|
220
222
|
name,
|
@@ -422,7 +424,7 @@ class FileMiddleware extends SerializerMiddleware {
|
|
422
424
|
// It's important that we don't touch existing files during serialization
|
423
425
|
// because serialize may read existing files (when deserializing)
|
424
426
|
const allWrittenFiles = new Set();
|
425
|
-
const writeFile = async (name, content) => {
|
427
|
+
const writeFile = async (name, content, size) => {
|
426
428
|
const file = name
|
427
429
|
? join(this.fs, filename, `../${name}${extension}`)
|
428
430
|
: filename;
|
@@ -441,10 +443,7 @@ class FileMiddleware extends SerializerMiddleware {
|
|
441
443
|
[zConstants.BROTLI_PARAM_MODE]: zConstants.BROTLI_MODE_TEXT,
|
442
444
|
[zConstants.BROTLI_PARAM_QUALITY]: 2,
|
443
445
|
[zConstants.BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING]: true,
|
444
|
-
[zConstants.BROTLI_PARAM_SIZE_HINT]:
|
445
|
-
(size, b) => size + b.length,
|
446
|
-
0
|
447
|
-
)
|
446
|
+
[zConstants.BROTLI_PARAM_SIZE_HINT]: size
|
448
447
|
}
|
449
448
|
});
|
450
449
|
}
|
@@ -456,8 +455,44 @@ class FileMiddleware extends SerializerMiddleware {
|
|
456
455
|
stream.on("error", err => reject(err));
|
457
456
|
stream.on("finish", () => resolve());
|
458
457
|
}
|
459
|
-
|
460
|
-
|
458
|
+
// split into chunks for WRITE_LIMIT_CHUNK size
|
459
|
+
const chunks = [];
|
460
|
+
for (const b of content) {
|
461
|
+
if (b.length < WRITE_LIMIT_CHUNK) {
|
462
|
+
chunks.push(b);
|
463
|
+
} else {
|
464
|
+
for (let i = 0; i < b.length; i += WRITE_LIMIT_CHUNK) {
|
465
|
+
chunks.push(b.slice(i, i + WRITE_LIMIT_CHUNK));
|
466
|
+
}
|
467
|
+
}
|
468
|
+
}
|
469
|
+
|
470
|
+
const len = chunks.length;
|
471
|
+
let i = 0;
|
472
|
+
const batchWrite = err => {
|
473
|
+
// will be handled in "on" error handler
|
474
|
+
if (err) return;
|
475
|
+
|
476
|
+
if (i === len) {
|
477
|
+
stream.end();
|
478
|
+
return;
|
479
|
+
}
|
480
|
+
|
481
|
+
// queue up a batch of chunks up to the write limit
|
482
|
+
// end is exclusive
|
483
|
+
let end = i;
|
484
|
+
let sum = chunks[end++].length;
|
485
|
+
while (end < len) {
|
486
|
+
sum += chunks[end].length;
|
487
|
+
if (sum > WRITE_LIMIT_TOTAL) break;
|
488
|
+
end++;
|
489
|
+
}
|
490
|
+
while (i < end - 1) {
|
491
|
+
stream.write(chunks[i++]);
|
492
|
+
}
|
493
|
+
stream.write(chunks[i++], batchWrite);
|
494
|
+
};
|
495
|
+
batchWrite();
|
461
496
|
});
|
462
497
|
if (name) allWrittenFiles.add(file);
|
463
498
|
};
|
@@ -18,7 +18,7 @@ const toSimpleString = str => {
|
|
18
18
|
|
19
19
|
/**
|
20
20
|
* @param {Record<string|number, boolean>} map value map
|
21
|
-
* @returns {
|
21
|
+
* @returns {boolean|(function(string): string)} true/false, when unconditionally true/false, or a template function to determine the value at runtime
|
22
22
|
*/
|
23
23
|
const compileBooleanMatcher = map => {
|
24
24
|
const positiveItems = Object.keys(map).filter(i => map[i]);
|
@@ -498,7 +498,7 @@ module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
|
|
498
498
|
|
499
499
|
// return the results
|
500
500
|
return result.map(group => {
|
501
|
-
/** @type {GroupedItems} */
|
501
|
+
/** @type {GroupedItems<T>} */
|
502
502
|
return {
|
503
503
|
key: group.key,
|
504
504
|
items: group.nodes.map(node => node.item),
|
package/lib/util/identifier.js
CHANGED
@@ -81,7 +81,49 @@ const requestToAbsolute = (context, relativePath) => {
|
|
81
81
|
return relativePath;
|
82
82
|
};
|
83
83
|
|
84
|
-
const makeCacheable =
|
84
|
+
const makeCacheable = realFn => {
|
85
|
+
/** @type {WeakMap<object, Map<string, ParsedResource>>} */
|
86
|
+
const cache = new WeakMap();
|
87
|
+
|
88
|
+
const getCache = associatedObjectForCache => {
|
89
|
+
const entry = cache.get(associatedObjectForCache);
|
90
|
+
if (entry !== undefined) return entry;
|
91
|
+
/** @type {Map<string, ParsedResource>} */
|
92
|
+
const map = new Map();
|
93
|
+
cache.set(associatedObjectForCache, map);
|
94
|
+
return map;
|
95
|
+
};
|
96
|
+
|
97
|
+
/**
|
98
|
+
* @param {string} str the path with query and fragment
|
99
|
+
* @param {Object=} associatedObjectForCache an object to which the cache will be attached
|
100
|
+
* @returns {ParsedResource} parsed parts
|
101
|
+
*/
|
102
|
+
const fn = (str, associatedObjectForCache) => {
|
103
|
+
if (!associatedObjectForCache) return realFn(str);
|
104
|
+
const cache = getCache(associatedObjectForCache);
|
105
|
+
const entry = cache.get(str);
|
106
|
+
if (entry !== undefined) return entry;
|
107
|
+
const result = realFn(str);
|
108
|
+
cache.set(str, result);
|
109
|
+
return result;
|
110
|
+
};
|
111
|
+
|
112
|
+
fn.bindCache = associatedObjectForCache => {
|
113
|
+
const cache = getCache(associatedObjectForCache);
|
114
|
+
return str => {
|
115
|
+
const entry = cache.get(str);
|
116
|
+
if (entry !== undefined) return entry;
|
117
|
+
const result = realFn(str);
|
118
|
+
cache.set(str, result);
|
119
|
+
return result;
|
120
|
+
};
|
121
|
+
};
|
122
|
+
|
123
|
+
return fn;
|
124
|
+
};
|
125
|
+
|
126
|
+
const makeCacheableWithContext = fn => {
|
85
127
|
/** @type {WeakMap<object, Map<string, Map<string, string>>>} */
|
86
128
|
const cache = new WeakMap();
|
87
129
|
|
@@ -215,7 +257,7 @@ const _makePathsRelative = (context, identifier) => {
|
|
215
257
|
.join("");
|
216
258
|
};
|
217
259
|
|
218
|
-
exports.makePathsRelative =
|
260
|
+
exports.makePathsRelative = makeCacheableWithContext(_makePathsRelative);
|
219
261
|
|
220
262
|
/**
|
221
263
|
*
|
@@ -230,7 +272,7 @@ const _makePathsAbsolute = (context, identifier) => {
|
|
230
272
|
.join("");
|
231
273
|
};
|
232
274
|
|
233
|
-
exports.makePathsAbsolute =
|
275
|
+
exports.makePathsAbsolute = makeCacheableWithContext(_makePathsAbsolute);
|
234
276
|
|
235
277
|
/**
|
236
278
|
* @param {string} context absolute context path
|
@@ -244,7 +286,7 @@ const _contextify = (context, request) => {
|
|
244
286
|
.join("!");
|
245
287
|
};
|
246
288
|
|
247
|
-
const contextify =
|
289
|
+
const contextify = makeCacheableWithContext(_contextify);
|
248
290
|
exports.contextify = contextify;
|
249
291
|
|
250
292
|
/**
|
@@ -259,13 +301,15 @@ const _absolutify = (context, request) => {
|
|
259
301
|
.join("!");
|
260
302
|
};
|
261
303
|
|
262
|
-
const absolutify =
|
304
|
+
const absolutify = makeCacheableWithContext(_absolutify);
|
263
305
|
exports.absolutify = absolutify;
|
264
306
|
|
265
307
|
const PATH_QUERY_FRAGMENT_REGEXP =
|
266
308
|
/^((?:\0.|[^?#\0])*)(\?(?:\0.|[^#\0])*)?(#.*)?$/;
|
309
|
+
const PATH_QUERY_REGEXP = /^((?:\0.|[^?\0])*)(\?.*)?$/;
|
267
310
|
|
268
311
|
/** @typedef {{ resource: string, path: string, query: string, fragment: string }} ParsedResource */
|
312
|
+
/** @typedef {{ resource: string, path: string, query: string }} ParsedResourceWithoutFragment */
|
269
313
|
|
270
314
|
/**
|
271
315
|
* @param {string} str the path with query and fragment
|
@@ -280,47 +324,24 @@ const _parseResource = str => {
|
|
280
324
|
fragment: match[3] || ""
|
281
325
|
};
|
282
326
|
};
|
283
|
-
exports.parseResource = (
|
284
|
-
/** @type {WeakMap<object, Map<string, ParsedResource>>} */
|
285
|
-
const cache = new WeakMap();
|
327
|
+
exports.parseResource = makeCacheable(_parseResource);
|
286
328
|
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
* @param {Object=} associatedObjectForCache an object to which the cache will be attached
|
299
|
-
* @returns {ParsedResource} parsed parts
|
300
|
-
*/
|
301
|
-
const fn = (str, associatedObjectForCache) => {
|
302
|
-
if (!associatedObjectForCache) return realFn(str);
|
303
|
-
const cache = getCache(associatedObjectForCache);
|
304
|
-
const entry = cache.get(str);
|
305
|
-
if (entry !== undefined) return entry;
|
306
|
-
const result = realFn(str);
|
307
|
-
cache.set(str, result);
|
308
|
-
return result;
|
309
|
-
};
|
310
|
-
|
311
|
-
fn.bindCache = associatedObjectForCache => {
|
312
|
-
const cache = getCache(associatedObjectForCache);
|
313
|
-
return str => {
|
314
|
-
const entry = cache.get(str);
|
315
|
-
if (entry !== undefined) return entry;
|
316
|
-
const result = realFn(str);
|
317
|
-
cache.set(str, result);
|
318
|
-
return result;
|
319
|
-
};
|
329
|
+
/**
|
330
|
+
* Parse resource, skips fragment part
|
331
|
+
* @param {string} str the path with query and fragment
|
332
|
+
* @returns {ParsedResourceWithoutFragment} parsed parts
|
333
|
+
*/
|
334
|
+
const _parseResourceWithoutFragment = str => {
|
335
|
+
const match = PATH_QUERY_REGEXP.exec(str);
|
336
|
+
return {
|
337
|
+
resource: str,
|
338
|
+
path: match[1].replace(/\0(.)/g, "$1"),
|
339
|
+
query: match[2] ? match[2].replace(/\0(.)/g, "$1") : ""
|
320
340
|
};
|
321
|
-
|
322
|
-
|
323
|
-
|
341
|
+
};
|
342
|
+
exports.parseResourceWithoutFragment = makeCacheable(
|
343
|
+
_parseResourceWithoutFragment
|
344
|
+
);
|
324
345
|
|
325
346
|
/**
|
326
347
|
* @param {string} filename the filename which should be undone
|
@@ -0,0 +1,22 @@
|
|
1
|
+
/*
|
2
|
+
MIT License http://www.opensource.org/licenses/mit-license.php
|
3
|
+
Author Ivan Kopeykin @vankop
|
4
|
+
*/
|
5
|
+
|
6
|
+
"use strict";
|
7
|
+
|
8
|
+
const A_CODE = "a".charCodeAt(0);
|
9
|
+
|
10
|
+
/**
|
11
|
+
* @param {string} hash hash
|
12
|
+
* @param {number} hashLength hash length
|
13
|
+
* @returns {string} returns hash that has at least one non numeric char
|
14
|
+
*/
|
15
|
+
module.exports = (hash, hashLength) => {
|
16
|
+
if (hashLength < 1) return "";
|
17
|
+
const slice = hash.slice(0, hashLength);
|
18
|
+
if (slice.match(/[^\d]/)) return slice;
|
19
|
+
return `${String.fromCharCode(
|
20
|
+
A_CODE + (parseInt(hash[0], 10) % 6)
|
21
|
+
)}${slice.slice(1)}`;
|
22
|
+
};
|
package/lib/util/semver.js
CHANGED
@@ -89,7 +89,9 @@ exports.versionLt = versionLt;
|
|
89
89
|
*/
|
90
90
|
exports.parseRange = str => {
|
91
91
|
const splitAndConvert = str => {
|
92
|
-
return str
|
92
|
+
return str
|
93
|
+
.split(".")
|
94
|
+
.map(item => (item !== "NaN" && `${+item}` === item ? +item : item));
|
93
95
|
};
|
94
96
|
// see https://docs.npmjs.com/misc/semver#range-grammar for grammar
|
95
97
|
const parsePartial = str => {
|
@@ -131,13 +133,15 @@ exports.parseRange = str => {
|
|
131
133
|
return [-range[0] - 1, ...range.slice(1)];
|
132
134
|
};
|
133
135
|
const parseSimple = str => {
|
134
|
-
// simple
|
135
|
-
// primitive
|
136
|
-
// tilde
|
137
|
-
// caret
|
136
|
+
// simple ::= primitive | partial | tilde | caret
|
137
|
+
// primitive ::= ( '<' | '>' | '>=' | '<=' | '=' | '!' ) ( ' ' ) * partial
|
138
|
+
// tilde ::= '~' ( ' ' ) * partial
|
139
|
+
// caret ::= '^' ( ' ' ) * partial
|
138
140
|
const match = /^(\^|~|<=|<|>=|>|=|v|!)/.exec(str);
|
139
141
|
const start = match ? match[0] : "";
|
140
|
-
const remainder = parsePartial(
|
142
|
+
const remainder = parsePartial(
|
143
|
+
start.length ? str.slice(start.length).trim() : str.trim()
|
144
|
+
);
|
141
145
|
switch (start) {
|
142
146
|
case "^":
|
143
147
|
if (remainder.length > 1 && remainder[1] === 0) {
|
@@ -191,11 +195,14 @@ exports.parseRange = str => {
|
|
191
195
|
return [, ...arr, ...items.slice(1).map(() => fn)];
|
192
196
|
};
|
193
197
|
const parseRange = str => {
|
194
|
-
// range ::= hyphen | simple ( ' ' simple ) * | ''
|
195
|
-
// hyphen ::= partial ' - ' partial
|
196
|
-
const items = str.split(
|
198
|
+
// range ::= hyphen | simple ( ' ' ( ' ' ) * simple ) * | ''
|
199
|
+
// hyphen ::= partial ( ' ' ) * ' - ' ( ' ' ) * partial
|
200
|
+
const items = str.split(/\s+-\s+/);
|
197
201
|
if (items.length === 1) {
|
198
|
-
const items = str
|
202
|
+
const items = str
|
203
|
+
.trim()
|
204
|
+
.split(/(?<=[-0-9A-Za-z])\s+/g)
|
205
|
+
.map(parseSimple);
|
199
206
|
return combine(items, 2);
|
200
207
|
}
|
201
208
|
const a = parsePartial(items[0]);
|
package/package.json
CHANGED
@@ -1,12 +1,12 @@
|
|
1
1
|
{
|
2
2
|
"name": "webpack",
|
3
|
-
"version": "5.
|
3
|
+
"version": "5.69.1",
|
4
4
|
"author": "Tobias Koppers @sokra",
|
5
5
|
"description": "Packs CommonJs/AMD modules for the browser. Allows to split your codebase into multiple bundles, which can be loaded on demand. Support loaders to preprocess files, i.e. json, jsx, es7, css, less, ... and your custom stuff.",
|
6
6
|
"license": "MIT",
|
7
7
|
"dependencies": {
|
8
|
-
"@types/eslint-scope": "^3.7.
|
9
|
-
"@types/estree": "^0.0.
|
8
|
+
"@types/eslint-scope": "^3.7.3",
|
9
|
+
"@types/estree": "^0.0.51",
|
10
10
|
"@webassemblyjs/ast": "1.11.1",
|
11
11
|
"@webassemblyjs/wasm-edit": "1.11.1",
|
12
12
|
"@webassemblyjs/wasm-parser": "1.11.1",
|
@@ -39,8 +39,8 @@
|
|
39
39
|
"@babel/core": "^7.11.1",
|
40
40
|
"@babel/preset-react": "^7.10.4",
|
41
41
|
"@types/es-module-lexer": "^0.4.1",
|
42
|
-
"@types/jest": "^27.0
|
43
|
-
"@types/node": "^
|
42
|
+
"@types/jest": "^27.4.0",
|
43
|
+
"@types/node": "^17.0.16",
|
44
44
|
"assemblyscript": "^0.19.16",
|
45
45
|
"babel-loader": "^8.1.0",
|
46
46
|
"benchmark": "^2.1.4",
|
@@ -56,7 +56,7 @@
|
|
56
56
|
"es6-promise-polyfill": "^1.2.0",
|
57
57
|
"eslint": "^7.14.0",
|
58
58
|
"eslint-config-prettier": "^8.1.0",
|
59
|
-
"eslint-plugin-jest": "^24.
|
59
|
+
"eslint-plugin-jest": "^24.7.0",
|
60
60
|
"eslint-plugin-jsdoc": "^33.0.0",
|
61
61
|
"eslint-plugin-node": "^11.0.0",
|
62
62
|
"eslint-plugin-prettier": "^4.0.0",
|
@@ -66,10 +66,10 @@
|
|
66
66
|
"husky": "^6.0.0",
|
67
67
|
"is-ci": "^3.0.0",
|
68
68
|
"istanbul": "^0.4.5",
|
69
|
-
"jest": "^27.
|
70
|
-
"jest-circus": "^27.
|
71
|
-
"jest-cli": "^27.
|
72
|
-
"jest-diff": "^27.
|
69
|
+
"jest": "^27.5.0",
|
70
|
+
"jest-circus": "^27.5.0",
|
71
|
+
"jest-cli": "^27.5.0",
|
72
|
+
"jest-diff": "^27.5.0",
|
73
73
|
"jest-junit": "^13.0.0",
|
74
74
|
"json-loader": "^0.5.7",
|
75
75
|
"json5": "^2.1.3",
|
@@ -98,9 +98,9 @@
|
|
98
98
|
"style-loader": "^2.0.0",
|
99
99
|
"terser": "^5.7.0",
|
100
100
|
"toml": "^3.0.0",
|
101
|
-
"tooling": "webpack/tooling#v1.
|
101
|
+
"tooling": "webpack/tooling#v1.21.0",
|
102
102
|
"ts-loader": "^8.0.2",
|
103
|
-
"typescript": "^4.
|
103
|
+
"typescript": "^4.5.5",
|
104
104
|
"url-loader": "^4.1.0",
|
105
105
|
"wast-loader": "^1.11.0",
|
106
106
|
"webassembly-feature": "1.3.0",
|