webpack 5.68.0 → 5.70.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webpack might be problematic. Click here for more details.
- package/lib/BannerPlugin.js +10 -4
- package/lib/ChunkGraph.js +1 -2
- package/lib/CleanPlugin.js +64 -18
- package/lib/Compilation.js +43 -17
- package/lib/ContextModule.js +90 -26
- package/lib/ContextModuleFactory.js +65 -21
- package/lib/EntryOptionPlugin.js +1 -0
- package/lib/ExportsInfo.js +4 -4
- package/lib/Generator.js +1 -0
- package/lib/ModuleHashingError.js +29 -0
- package/lib/NodeStuffPlugin.js +10 -0
- package/lib/NormalModule.js +21 -16
- package/lib/NormalModuleFactory.js +40 -35
- package/lib/ProgressPlugin.js +4 -5
- package/lib/RuntimeTemplate.js +1 -0
- package/lib/TemplatedPathPlugin.js +48 -23
- package/lib/WebpackOptionsApply.js +2 -0
- package/lib/asset/AssetGenerator.js +122 -33
- package/lib/buildChunkGraph.js +1 -1
- package/lib/cache/ResolverCachePlugin.js +89 -28
- package/lib/config/browserslistTargetHandler.js +3 -5
- package/lib/config/defaults.js +7 -2
- package/lib/config/normalization.js +1 -0
- package/lib/css/CssLoadingRuntimeModule.js +63 -70
- package/lib/css/CssModulesPlugin.js +2 -1
- package/lib/debug/ProfilingPlugin.js +3 -4
- package/lib/dependencies/ContextDependencyHelpers.js +1 -1
- package/lib/dependencies/ContextElementDependency.js +8 -2
- package/lib/dependencies/ExportsInfoDependency.js +6 -0
- package/lib/dependencies/HarmonyAcceptImportDependency.js +5 -3
- package/lib/dependencies/HarmonyExportInitFragment.js +4 -1
- package/lib/dependencies/ImportContextDependency.js +0 -2
- package/lib/dependencies/ImportMetaContextDependency.js +35 -0
- package/lib/dependencies/ImportMetaContextDependencyParserPlugin.js +252 -0
- package/lib/dependencies/ImportMetaContextPlugin.js +59 -0
- package/lib/dependencies/LoaderPlugin.js +2 -0
- package/lib/dependencies/RequireContextDependency.js +0 -16
- package/lib/esm/ModuleChunkLoadingRuntimeModule.js +24 -8
- package/lib/index.js +5 -0
- package/lib/javascript/JavascriptModulesPlugin.js +27 -2
- package/lib/javascript/StartupHelpers.js +3 -2
- package/lib/library/AssignLibraryPlugin.js +8 -2
- package/lib/node/NodeTargetPlugin.js +1 -0
- package/lib/node/ReadFileChunkLoadingRuntimeModule.js +22 -7
- package/lib/node/RequireChunkLoadingRuntimeModule.js +22 -7
- package/lib/optimize/ConcatenatedModule.js +10 -4
- package/lib/schemes/HttpUriPlugin.js +68 -6
- package/lib/serialization/FileMiddleware.js +44 -9
- package/lib/util/compileBooleanMatcher.js +1 -1
- package/lib/util/deterministicGrouping.js +1 -1
- package/lib/util/identifier.js +65 -44
- package/lib/util/internalSerializables.js +2 -0
- package/lib/util/nonNumericOnlyHash.js +22 -0
- package/lib/util/semver.js +17 -10
- package/lib/web/JsonpChunkLoadingRuntimeModule.js +15 -5
- package/lib/webworker/ImportScriptsChunkLoadingRuntimeModule.js +30 -20
- package/module.d.ts +15 -0
- package/package.json +13 -13
- package/schemas/WebpackOptions.check.js +1 -1
- package/schemas/WebpackOptions.json +17 -1
- package/schemas/plugins/schemes/HttpUriPlugin.check.js +1 -1
- package/schemas/plugins/schemes/HttpUriPlugin.json +4 -0
- package/types.d.ts +203 -91
@@ -15,12 +15,33 @@ const { getInitialChunkIds } = require("../javascript/StartupHelpers");
|
|
15
15
|
const compileBooleanMatcher = require("../util/compileBooleanMatcher");
|
16
16
|
const { getUndoPath } = require("../util/identifier");
|
17
17
|
|
18
|
+
/** @typedef {import("../Chunk")} Chunk */
|
19
|
+
|
18
20
|
class ReadFileChunkLoadingRuntimeModule extends RuntimeModule {
|
19
21
|
constructor(runtimeRequirements) {
|
20
22
|
super("readFile chunk loading", RuntimeModule.STAGE_ATTACH);
|
21
23
|
this.runtimeRequirements = runtimeRequirements;
|
22
24
|
}
|
23
25
|
|
26
|
+
/**
|
27
|
+
* @private
|
28
|
+
* @param {Chunk} chunk chunk
|
29
|
+
* @param {string} rootOutputDir root output directory
|
30
|
+
* @returns {string} generated code
|
31
|
+
*/
|
32
|
+
_generateBaseUri(chunk, rootOutputDir) {
|
33
|
+
const options = chunk.getEntryOptions();
|
34
|
+
if (options && options.baseUri) {
|
35
|
+
return `${RuntimeGlobals.baseURI} = ${JSON.stringify(options.baseUri)};`;
|
36
|
+
}
|
37
|
+
|
38
|
+
return `${RuntimeGlobals.baseURI} = require("url").pathToFileURL(${
|
39
|
+
rootOutputDir
|
40
|
+
? `__dirname + ${JSON.stringify("/" + rootOutputDir)}`
|
41
|
+
: "__filename"
|
42
|
+
});`;
|
43
|
+
}
|
44
|
+
|
24
45
|
/**
|
25
46
|
* @returns {string} runtime code
|
26
47
|
*/
|
@@ -67,13 +88,7 @@ class ReadFileChunkLoadingRuntimeModule extends RuntimeModule {
|
|
67
88
|
|
68
89
|
return Template.asString([
|
69
90
|
withBaseURI
|
70
|
-
?
|
71
|
-
`${RuntimeGlobals.baseURI} = require("url").pathToFileURL(${
|
72
|
-
rootOutputDir
|
73
|
-
? `__dirname + ${JSON.stringify("/" + rootOutputDir)}`
|
74
|
-
: "__filename"
|
75
|
-
});`
|
76
|
-
])
|
91
|
+
? this._generateBaseUri(chunk, rootOutputDir)
|
77
92
|
: "// no baseURI",
|
78
93
|
"",
|
79
94
|
"// object to store loaded chunks",
|
@@ -15,12 +15,33 @@ const { getInitialChunkIds } = require("../javascript/StartupHelpers");
|
|
15
15
|
const compileBooleanMatcher = require("../util/compileBooleanMatcher");
|
16
16
|
const { getUndoPath } = require("../util/identifier");
|
17
17
|
|
18
|
+
/** @typedef {import("../Chunk")} Chunk */
|
19
|
+
|
18
20
|
class RequireChunkLoadingRuntimeModule extends RuntimeModule {
|
19
21
|
constructor(runtimeRequirements) {
|
20
22
|
super("require chunk loading", RuntimeModule.STAGE_ATTACH);
|
21
23
|
this.runtimeRequirements = runtimeRequirements;
|
22
24
|
}
|
23
25
|
|
26
|
+
/**
|
27
|
+
* @private
|
28
|
+
* @param {Chunk} chunk chunk
|
29
|
+
* @param {string} rootOutputDir root output directory
|
30
|
+
* @returns {string} generated code
|
31
|
+
*/
|
32
|
+
_generateBaseUri(chunk, rootOutputDir) {
|
33
|
+
const options = chunk.getEntryOptions();
|
34
|
+
if (options && options.baseUri) {
|
35
|
+
return `${RuntimeGlobals.baseURI} = ${JSON.stringify(options.baseUri)};`;
|
36
|
+
}
|
37
|
+
|
38
|
+
return `${RuntimeGlobals.baseURI} = require("url").pathToFileURL(${
|
39
|
+
rootOutputDir !== "./"
|
40
|
+
? `__dirname + ${JSON.stringify("/" + rootOutputDir)}`
|
41
|
+
: "__filename"
|
42
|
+
});`;
|
43
|
+
}
|
44
|
+
|
24
45
|
/**
|
25
46
|
* @returns {string} runtime code
|
26
47
|
*/
|
@@ -67,13 +88,7 @@ class RequireChunkLoadingRuntimeModule extends RuntimeModule {
|
|
67
88
|
|
68
89
|
return Template.asString([
|
69
90
|
withBaseURI
|
70
|
-
?
|
71
|
-
`${RuntimeGlobals.baseURI} = require("url").pathToFileURL(${
|
72
|
-
rootOutputDir !== "./"
|
73
|
-
? `__dirname + ${JSON.stringify("/" + rootOutputDir)}`
|
74
|
-
: "__filename"
|
75
|
-
});`
|
76
|
-
])
|
91
|
+
? this._generateBaseUri(chunk, rootOutputDir)
|
77
92
|
: "// no baseURI",
|
78
93
|
"",
|
79
94
|
"// object to store loaded chunks",
|
@@ -822,10 +822,6 @@ class ConcatenatedModule extends Module {
|
|
822
822
|
const topLevelDeclarations = this.buildInfo.topLevelDeclarations;
|
823
823
|
if (topLevelDeclarations !== undefined) {
|
824
824
|
for (const decl of m.buildInfo.topLevelDeclarations) {
|
825
|
-
// reserved names will always be renamed
|
826
|
-
if (RESERVED_NAMES.has(decl)) continue;
|
827
|
-
// TODO actually this is incorrect since with renaming there could be more
|
828
|
-
// We should do the renaming during build
|
829
825
|
topLevelDeclarations.add(decl);
|
830
826
|
}
|
831
827
|
}
|
@@ -1113,6 +1109,8 @@ class ConcatenatedModule extends Module {
|
|
1113
1109
|
|
1114
1110
|
// List of all used names to avoid conflicts
|
1115
1111
|
const allUsedNames = new Set(RESERVED_NAMES);
|
1112
|
+
// Updated Top level declarations are created by renaming
|
1113
|
+
const topLevelDeclarations = new Set();
|
1116
1114
|
|
1117
1115
|
// List of additional names in scope for module references
|
1118
1116
|
/** @type {Map<string, { usedNames: Set<string>, alreadyCheckedScopes: Set<TODO> }>} */
|
@@ -1257,6 +1255,7 @@ class ConcatenatedModule extends Module {
|
|
1257
1255
|
);
|
1258
1256
|
allUsedNames.add(newName);
|
1259
1257
|
info.internalNames.set(name, newName);
|
1258
|
+
topLevelDeclarations.add(newName);
|
1260
1259
|
const source = info.source;
|
1261
1260
|
const allIdentifiers = new Set(
|
1262
1261
|
references.map(r => r.identifier).concat(variable.identifiers)
|
@@ -1283,6 +1282,7 @@ class ConcatenatedModule extends Module {
|
|
1283
1282
|
} else {
|
1284
1283
|
allUsedNames.add(name);
|
1285
1284
|
info.internalNames.set(name, name);
|
1285
|
+
topLevelDeclarations.add(name);
|
1286
1286
|
}
|
1287
1287
|
}
|
1288
1288
|
let namespaceObjectName;
|
@@ -1300,6 +1300,7 @@ class ConcatenatedModule extends Module {
|
|
1300
1300
|
allUsedNames.add(namespaceObjectName);
|
1301
1301
|
}
|
1302
1302
|
info.namespaceObjectName = namespaceObjectName;
|
1303
|
+
topLevelDeclarations.add(namespaceObjectName);
|
1303
1304
|
break;
|
1304
1305
|
}
|
1305
1306
|
case "external": {
|
@@ -1311,6 +1312,7 @@ class ConcatenatedModule extends Module {
|
|
1311
1312
|
);
|
1312
1313
|
allUsedNames.add(externalName);
|
1313
1314
|
info.name = externalName;
|
1315
|
+
topLevelDeclarations.add(externalName);
|
1314
1316
|
break;
|
1315
1317
|
}
|
1316
1318
|
}
|
@@ -1323,6 +1325,7 @@ class ConcatenatedModule extends Module {
|
|
1323
1325
|
);
|
1324
1326
|
allUsedNames.add(externalNameInterop);
|
1325
1327
|
info.interopNamespaceObjectName = externalNameInterop;
|
1328
|
+
topLevelDeclarations.add(externalNameInterop);
|
1326
1329
|
}
|
1327
1330
|
if (
|
1328
1331
|
info.module.buildMeta.exportsType === "default" &&
|
@@ -1336,6 +1339,7 @@ class ConcatenatedModule extends Module {
|
|
1336
1339
|
);
|
1337
1340
|
allUsedNames.add(externalNameInterop);
|
1338
1341
|
info.interopNamespaceObject2Name = externalNameInterop;
|
1342
|
+
topLevelDeclarations.add(externalNameInterop);
|
1339
1343
|
}
|
1340
1344
|
if (
|
1341
1345
|
info.module.buildMeta.exportsType === "dynamic" ||
|
@@ -1349,6 +1353,7 @@ class ConcatenatedModule extends Module {
|
|
1349
1353
|
);
|
1350
1354
|
allUsedNames.add(externalNameInterop);
|
1351
1355
|
info.interopDefaultAccessName = externalNameInterop;
|
1356
|
+
topLevelDeclarations.add(externalNameInterop);
|
1352
1357
|
}
|
1353
1358
|
}
|
1354
1359
|
|
@@ -1618,6 +1623,7 @@ ${defineGetters}`
|
|
1618
1623
|
const data = new Map();
|
1619
1624
|
if (chunkInitFragments.length > 0)
|
1620
1625
|
data.set("chunkInitFragments", chunkInitFragments);
|
1626
|
+
data.set("topLevelDeclarations", topLevelDeclarations);
|
1621
1627
|
|
1622
1628
|
/** @type {CodeGenerationResult} */
|
1623
1629
|
const resultEntry = {
|
@@ -5,6 +5,7 @@
|
|
5
5
|
|
6
6
|
"use strict";
|
7
7
|
|
8
|
+
const EventEmitter = require("events");
|
8
9
|
const { extname, basename } = require("path");
|
9
10
|
const { URL } = require("url");
|
10
11
|
const { createGunzip, createBrotliDecompress, createInflate } = require("zlib");
|
@@ -19,6 +20,44 @@ const memoize = require("../util/memoize");
|
|
19
20
|
|
20
21
|
const getHttp = memoize(() => require("http"));
|
21
22
|
const getHttps = memoize(() => require("https"));
|
23
|
+
const proxyFetch = (request, proxy) => (url, options, callback) => {
|
24
|
+
const eventEmitter = new EventEmitter();
|
25
|
+
const doRequest = socket =>
|
26
|
+
request
|
27
|
+
.get(url, { ...options, ...(socket && { socket }) }, callback)
|
28
|
+
.on("error", eventEmitter.emit.bind(eventEmitter, "error"));
|
29
|
+
|
30
|
+
if (proxy) {
|
31
|
+
const { hostname: host, port } = new URL(proxy);
|
32
|
+
|
33
|
+
getHttp()
|
34
|
+
.request({
|
35
|
+
host, // IP address of proxy server
|
36
|
+
port, // port of proxy server
|
37
|
+
method: "CONNECT",
|
38
|
+
path: url.host
|
39
|
+
})
|
40
|
+
.on("connect", (res, socket) => {
|
41
|
+
if (res.statusCode === 200) {
|
42
|
+
// connected to proxy server
|
43
|
+
doRequest(socket);
|
44
|
+
}
|
45
|
+
})
|
46
|
+
.on("error", err => {
|
47
|
+
eventEmitter.emit(
|
48
|
+
"error",
|
49
|
+
new Error(
|
50
|
+
`Failed to connect to proxy server "${proxy}": ${err.message}`
|
51
|
+
)
|
52
|
+
);
|
53
|
+
})
|
54
|
+
.end();
|
55
|
+
} else {
|
56
|
+
doRequest();
|
57
|
+
}
|
58
|
+
|
59
|
+
return eventEmitter;
|
60
|
+
};
|
22
61
|
|
23
62
|
/** @type {(() => void)[] | undefined} */
|
24
63
|
let inProgressWrite = undefined;
|
@@ -274,6 +313,7 @@ class HttpUriPlugin {
|
|
274
313
|
this._upgrade = options.upgrade;
|
275
314
|
this._frozen = options.frozen;
|
276
315
|
this._allowedUris = options.allowedUris;
|
316
|
+
this._proxy = options.proxy;
|
277
317
|
}
|
278
318
|
|
279
319
|
/**
|
@@ -282,15 +322,16 @@ class HttpUriPlugin {
|
|
282
322
|
* @returns {void}
|
283
323
|
*/
|
284
324
|
apply(compiler) {
|
325
|
+
const proxy =
|
326
|
+
this._proxy || process.env["http_proxy"] || process.env["HTTP_PROXY"];
|
285
327
|
const schemes = [
|
286
328
|
{
|
287
329
|
scheme: "http",
|
288
|
-
fetch: (
|
330
|
+
fetch: proxyFetch(getHttp(), proxy)
|
289
331
|
},
|
290
332
|
{
|
291
333
|
scheme: "https",
|
292
|
-
fetch: (
|
293
|
-
getHttps().get(url, options, callback)
|
334
|
+
fetch: proxyFetch(getHttps(), proxy)
|
294
335
|
}
|
295
336
|
];
|
296
337
|
let lockfileCache;
|
@@ -509,7 +550,7 @@ class HttpUriPlugin {
|
|
509
550
|
|
510
551
|
/**
|
511
552
|
* @param {string} url URL
|
512
|
-
* @param {FetchResult} cachedResult result from cache
|
553
|
+
* @param {FetchResult | RedirectFetchResult} cachedResult result from cache
|
513
554
|
* @param {function((Error | null)=, FetchResult=): void} callback callback
|
514
555
|
* @returns {void}
|
515
556
|
*/
|
@@ -603,9 +644,30 @@ class HttpUriPlugin {
|
|
603
644
|
res.statusCode >= 301 &&
|
604
645
|
res.statusCode <= 308
|
605
646
|
) {
|
606
|
-
|
647
|
+
const result = {
|
607
648
|
location: new URL(location, url).href
|
608
|
-
}
|
649
|
+
};
|
650
|
+
if (
|
651
|
+
!cachedResult ||
|
652
|
+
!("location" in cachedResult) ||
|
653
|
+
cachedResult.location !== result.location ||
|
654
|
+
cachedResult.validUntil < validUntil ||
|
655
|
+
cachedResult.storeLock !== storeLock ||
|
656
|
+
cachedResult.storeCache !== storeCache ||
|
657
|
+
cachedResult.etag !== etag
|
658
|
+
) {
|
659
|
+
return finishWith(result);
|
660
|
+
} else {
|
661
|
+
logger.debug(`GET ${url} [${res.statusCode}] (unchanged)`);
|
662
|
+
return callback(null, {
|
663
|
+
...result,
|
664
|
+
fresh: true,
|
665
|
+
storeLock,
|
666
|
+
storeCache,
|
667
|
+
validUntil,
|
668
|
+
etag
|
669
|
+
});
|
670
|
+
}
|
609
671
|
}
|
610
672
|
const contentType = res.headers["content-type"] || "";
|
611
673
|
const bufferArr = [];
|
@@ -40,6 +40,8 @@ Section -> Buffer
|
|
40
40
|
|
41
41
|
// "wpc" + 1 in little-endian
|
42
42
|
const VERSION = 0x01637077;
|
43
|
+
const WRITE_LIMIT_TOTAL = 0x7fff0000;
|
44
|
+
const WRITE_LIMIT_CHUNK = 511 * 1024 * 1024;
|
43
45
|
|
44
46
|
/**
|
45
47
|
* @param {Buffer[]} buffers buffers
|
@@ -87,7 +89,7 @@ const readUInt64LE = Buffer.prototype.readBigUInt64LE
|
|
87
89
|
* @param {FileMiddleware} middleware this
|
88
90
|
* @param {BufferSerializableType[] | Promise<BufferSerializableType[]>} data data to be serialized
|
89
91
|
* @param {string | boolean} name file base name
|
90
|
-
* @param {function(string | false, Buffer[]): Promise<void>} writeFile writes a file
|
92
|
+
* @param {function(string | false, Buffer[], number): Promise<void>} writeFile writes a file
|
91
93
|
* @param {string | Hash} hashFunction hash function to use
|
92
94
|
* @returns {Promise<SerializeResult>} resulting file pointer and promise
|
93
95
|
*/
|
@@ -212,9 +214,9 @@ const serialize = async (
|
|
212
214
|
if (name === true) {
|
213
215
|
name = hashForName(buf, hashFunction);
|
214
216
|
}
|
215
|
-
backgroundJobs.push(writeFile(name, buf));
|
216
217
|
let size = 0;
|
217
218
|
for (const b of buf) size += b.length;
|
219
|
+
backgroundJobs.push(writeFile(name, buf, size));
|
218
220
|
return {
|
219
221
|
size,
|
220
222
|
name,
|
@@ -422,7 +424,7 @@ class FileMiddleware extends SerializerMiddleware {
|
|
422
424
|
// It's important that we don't touch existing files during serialization
|
423
425
|
// because serialize may read existing files (when deserializing)
|
424
426
|
const allWrittenFiles = new Set();
|
425
|
-
const writeFile = async (name, content) => {
|
427
|
+
const writeFile = async (name, content, size) => {
|
426
428
|
const file = name
|
427
429
|
? join(this.fs, filename, `../${name}${extension}`)
|
428
430
|
: filename;
|
@@ -441,10 +443,7 @@ class FileMiddleware extends SerializerMiddleware {
|
|
441
443
|
[zConstants.BROTLI_PARAM_MODE]: zConstants.BROTLI_MODE_TEXT,
|
442
444
|
[zConstants.BROTLI_PARAM_QUALITY]: 2,
|
443
445
|
[zConstants.BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING]: true,
|
444
|
-
[zConstants.BROTLI_PARAM_SIZE_HINT]:
|
445
|
-
(size, b) => size + b.length,
|
446
|
-
0
|
447
|
-
)
|
446
|
+
[zConstants.BROTLI_PARAM_SIZE_HINT]: size
|
448
447
|
}
|
449
448
|
});
|
450
449
|
}
|
@@ -456,8 +455,44 @@ class FileMiddleware extends SerializerMiddleware {
|
|
456
455
|
stream.on("error", err => reject(err));
|
457
456
|
stream.on("finish", () => resolve());
|
458
457
|
}
|
459
|
-
|
460
|
-
|
458
|
+
// split into chunks for WRITE_LIMIT_CHUNK size
|
459
|
+
const chunks = [];
|
460
|
+
for (const b of content) {
|
461
|
+
if (b.length < WRITE_LIMIT_CHUNK) {
|
462
|
+
chunks.push(b);
|
463
|
+
} else {
|
464
|
+
for (let i = 0; i < b.length; i += WRITE_LIMIT_CHUNK) {
|
465
|
+
chunks.push(b.slice(i, i + WRITE_LIMIT_CHUNK));
|
466
|
+
}
|
467
|
+
}
|
468
|
+
}
|
469
|
+
|
470
|
+
const len = chunks.length;
|
471
|
+
let i = 0;
|
472
|
+
const batchWrite = err => {
|
473
|
+
// will be handled in "on" error handler
|
474
|
+
if (err) return;
|
475
|
+
|
476
|
+
if (i === len) {
|
477
|
+
stream.end();
|
478
|
+
return;
|
479
|
+
}
|
480
|
+
|
481
|
+
// queue up a batch of chunks up to the write limit
|
482
|
+
// end is exclusive
|
483
|
+
let end = i;
|
484
|
+
let sum = chunks[end++].length;
|
485
|
+
while (end < len) {
|
486
|
+
sum += chunks[end].length;
|
487
|
+
if (sum > WRITE_LIMIT_TOTAL) break;
|
488
|
+
end++;
|
489
|
+
}
|
490
|
+
while (i < end - 1) {
|
491
|
+
stream.write(chunks[i++]);
|
492
|
+
}
|
493
|
+
stream.write(chunks[i++], batchWrite);
|
494
|
+
};
|
495
|
+
batchWrite();
|
461
496
|
});
|
462
497
|
if (name) allWrittenFiles.add(file);
|
463
498
|
};
|
@@ -18,7 +18,7 @@ const toSimpleString = str => {
|
|
18
18
|
|
19
19
|
/**
|
20
20
|
* @param {Record<string|number, boolean>} map value map
|
21
|
-
* @returns {
|
21
|
+
* @returns {boolean|(function(string): string)} true/false, when unconditionally true/false, or a template function to determine the value at runtime
|
22
22
|
*/
|
23
23
|
const compileBooleanMatcher = map => {
|
24
24
|
const positiveItems = Object.keys(map).filter(i => map[i]);
|
@@ -498,7 +498,7 @@ module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
|
|
498
498
|
|
499
499
|
// return the results
|
500
500
|
return result.map(group => {
|
501
|
-
/** @type {GroupedItems} */
|
501
|
+
/** @type {GroupedItems<T>} */
|
502
502
|
return {
|
503
503
|
key: group.key,
|
504
504
|
items: group.nodes.map(node => node.item),
|
package/lib/util/identifier.js
CHANGED
@@ -81,7 +81,49 @@ const requestToAbsolute = (context, relativePath) => {
|
|
81
81
|
return relativePath;
|
82
82
|
};
|
83
83
|
|
84
|
-
const makeCacheable =
|
84
|
+
const makeCacheable = realFn => {
|
85
|
+
/** @type {WeakMap<object, Map<string, ParsedResource>>} */
|
86
|
+
const cache = new WeakMap();
|
87
|
+
|
88
|
+
const getCache = associatedObjectForCache => {
|
89
|
+
const entry = cache.get(associatedObjectForCache);
|
90
|
+
if (entry !== undefined) return entry;
|
91
|
+
/** @type {Map<string, ParsedResource>} */
|
92
|
+
const map = new Map();
|
93
|
+
cache.set(associatedObjectForCache, map);
|
94
|
+
return map;
|
95
|
+
};
|
96
|
+
|
97
|
+
/**
|
98
|
+
* @param {string} str the path with query and fragment
|
99
|
+
* @param {Object=} associatedObjectForCache an object to which the cache will be attached
|
100
|
+
* @returns {ParsedResource} parsed parts
|
101
|
+
*/
|
102
|
+
const fn = (str, associatedObjectForCache) => {
|
103
|
+
if (!associatedObjectForCache) return realFn(str);
|
104
|
+
const cache = getCache(associatedObjectForCache);
|
105
|
+
const entry = cache.get(str);
|
106
|
+
if (entry !== undefined) return entry;
|
107
|
+
const result = realFn(str);
|
108
|
+
cache.set(str, result);
|
109
|
+
return result;
|
110
|
+
};
|
111
|
+
|
112
|
+
fn.bindCache = associatedObjectForCache => {
|
113
|
+
const cache = getCache(associatedObjectForCache);
|
114
|
+
return str => {
|
115
|
+
const entry = cache.get(str);
|
116
|
+
if (entry !== undefined) return entry;
|
117
|
+
const result = realFn(str);
|
118
|
+
cache.set(str, result);
|
119
|
+
return result;
|
120
|
+
};
|
121
|
+
};
|
122
|
+
|
123
|
+
return fn;
|
124
|
+
};
|
125
|
+
|
126
|
+
const makeCacheableWithContext = fn => {
|
85
127
|
/** @type {WeakMap<object, Map<string, Map<string, string>>>} */
|
86
128
|
const cache = new WeakMap();
|
87
129
|
|
@@ -215,7 +257,7 @@ const _makePathsRelative = (context, identifier) => {
|
|
215
257
|
.join("");
|
216
258
|
};
|
217
259
|
|
218
|
-
exports.makePathsRelative =
|
260
|
+
exports.makePathsRelative = makeCacheableWithContext(_makePathsRelative);
|
219
261
|
|
220
262
|
/**
|
221
263
|
*
|
@@ -230,7 +272,7 @@ const _makePathsAbsolute = (context, identifier) => {
|
|
230
272
|
.join("");
|
231
273
|
};
|
232
274
|
|
233
|
-
exports.makePathsAbsolute =
|
275
|
+
exports.makePathsAbsolute = makeCacheableWithContext(_makePathsAbsolute);
|
234
276
|
|
235
277
|
/**
|
236
278
|
* @param {string} context absolute context path
|
@@ -244,7 +286,7 @@ const _contextify = (context, request) => {
|
|
244
286
|
.join("!");
|
245
287
|
};
|
246
288
|
|
247
|
-
const contextify =
|
289
|
+
const contextify = makeCacheableWithContext(_contextify);
|
248
290
|
exports.contextify = contextify;
|
249
291
|
|
250
292
|
/**
|
@@ -259,13 +301,15 @@ const _absolutify = (context, request) => {
|
|
259
301
|
.join("!");
|
260
302
|
};
|
261
303
|
|
262
|
-
const absolutify =
|
304
|
+
const absolutify = makeCacheableWithContext(_absolutify);
|
263
305
|
exports.absolutify = absolutify;
|
264
306
|
|
265
307
|
const PATH_QUERY_FRAGMENT_REGEXP =
|
266
308
|
/^((?:\0.|[^?#\0])*)(\?(?:\0.|[^#\0])*)?(#.*)?$/;
|
309
|
+
const PATH_QUERY_REGEXP = /^((?:\0.|[^?\0])*)(\?.*)?$/;
|
267
310
|
|
268
311
|
/** @typedef {{ resource: string, path: string, query: string, fragment: string }} ParsedResource */
|
312
|
+
/** @typedef {{ resource: string, path: string, query: string }} ParsedResourceWithoutFragment */
|
269
313
|
|
270
314
|
/**
|
271
315
|
* @param {string} str the path with query and fragment
|
@@ -280,47 +324,24 @@ const _parseResource = str => {
|
|
280
324
|
fragment: match[3] || ""
|
281
325
|
};
|
282
326
|
};
|
283
|
-
exports.parseResource = (
|
284
|
-
/** @type {WeakMap<object, Map<string, ParsedResource>>} */
|
285
|
-
const cache = new WeakMap();
|
327
|
+
exports.parseResource = makeCacheable(_parseResource);
|
286
328
|
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
* @param {Object=} associatedObjectForCache an object to which the cache will be attached
|
299
|
-
* @returns {ParsedResource} parsed parts
|
300
|
-
*/
|
301
|
-
const fn = (str, associatedObjectForCache) => {
|
302
|
-
if (!associatedObjectForCache) return realFn(str);
|
303
|
-
const cache = getCache(associatedObjectForCache);
|
304
|
-
const entry = cache.get(str);
|
305
|
-
if (entry !== undefined) return entry;
|
306
|
-
const result = realFn(str);
|
307
|
-
cache.set(str, result);
|
308
|
-
return result;
|
309
|
-
};
|
310
|
-
|
311
|
-
fn.bindCache = associatedObjectForCache => {
|
312
|
-
const cache = getCache(associatedObjectForCache);
|
313
|
-
return str => {
|
314
|
-
const entry = cache.get(str);
|
315
|
-
if (entry !== undefined) return entry;
|
316
|
-
const result = realFn(str);
|
317
|
-
cache.set(str, result);
|
318
|
-
return result;
|
319
|
-
};
|
329
|
+
/**
|
330
|
+
* Parse resource, skips fragment part
|
331
|
+
* @param {string} str the path with query and fragment
|
332
|
+
* @returns {ParsedResourceWithoutFragment} parsed parts
|
333
|
+
*/
|
334
|
+
const _parseResourceWithoutFragment = str => {
|
335
|
+
const match = PATH_QUERY_REGEXP.exec(str);
|
336
|
+
return {
|
337
|
+
resource: str,
|
338
|
+
path: match[1].replace(/\0(.)/g, "$1"),
|
339
|
+
query: match[2] ? match[2].replace(/\0(.)/g, "$1") : ""
|
320
340
|
};
|
321
|
-
|
322
|
-
|
323
|
-
|
341
|
+
};
|
342
|
+
exports.parseResourceWithoutFragment = makeCacheable(
|
343
|
+
_parseResourceWithoutFragment
|
344
|
+
);
|
324
345
|
|
325
346
|
/**
|
326
347
|
* @param {string} filename the filename which should be undone
|
@@ -126,6 +126,8 @@ module.exports = {
|
|
126
126
|
require("../dependencies/ImportMetaHotAcceptDependency"),
|
127
127
|
"dependencies/ImportMetaHotDeclineDependency": () =>
|
128
128
|
require("../dependencies/ImportMetaHotDeclineDependency"),
|
129
|
+
"dependencies/ImportMetaContextDependency": () =>
|
130
|
+
require("../dependencies/ImportMetaContextDependency"),
|
129
131
|
"dependencies/ProvidedDependency": () =>
|
130
132
|
require("../dependencies/ProvidedDependency"),
|
131
133
|
"dependencies/PureExpressionDependency": () =>
|
@@ -0,0 +1,22 @@
|
|
1
|
+
/*
|
2
|
+
MIT License http://www.opensource.org/licenses/mit-license.php
|
3
|
+
Author Ivan Kopeykin @vankop
|
4
|
+
*/
|
5
|
+
|
6
|
+
"use strict";
|
7
|
+
|
8
|
+
const A_CODE = "a".charCodeAt(0);
|
9
|
+
|
10
|
+
/**
|
11
|
+
* @param {string} hash hash
|
12
|
+
* @param {number} hashLength hash length
|
13
|
+
* @returns {string} returns hash that has at least one non numeric char
|
14
|
+
*/
|
15
|
+
module.exports = (hash, hashLength) => {
|
16
|
+
if (hashLength < 1) return "";
|
17
|
+
const slice = hash.slice(0, hashLength);
|
18
|
+
if (slice.match(/[^\d]/)) return slice;
|
19
|
+
return `${String.fromCharCode(
|
20
|
+
A_CODE + (parseInt(hash[0], 10) % 6)
|
21
|
+
)}${slice.slice(1)}`;
|
22
|
+
};
|
package/lib/util/semver.js
CHANGED
@@ -89,7 +89,9 @@ exports.versionLt = versionLt;
|
|
89
89
|
*/
|
90
90
|
exports.parseRange = str => {
|
91
91
|
const splitAndConvert = str => {
|
92
|
-
return str
|
92
|
+
return str
|
93
|
+
.split(".")
|
94
|
+
.map(item => (item !== "NaN" && `${+item}` === item ? +item : item));
|
93
95
|
};
|
94
96
|
// see https://docs.npmjs.com/misc/semver#range-grammar for grammar
|
95
97
|
const parsePartial = str => {
|
@@ -131,13 +133,15 @@ exports.parseRange = str => {
|
|
131
133
|
return [-range[0] - 1, ...range.slice(1)];
|
132
134
|
};
|
133
135
|
const parseSimple = str => {
|
134
|
-
// simple
|
135
|
-
// primitive
|
136
|
-
// tilde
|
137
|
-
// caret
|
136
|
+
// simple ::= primitive | partial | tilde | caret
|
137
|
+
// primitive ::= ( '<' | '>' | '>=' | '<=' | '=' | '!' ) ( ' ' ) * partial
|
138
|
+
// tilde ::= '~' ( ' ' ) * partial
|
139
|
+
// caret ::= '^' ( ' ' ) * partial
|
138
140
|
const match = /^(\^|~|<=|<|>=|>|=|v|!)/.exec(str);
|
139
141
|
const start = match ? match[0] : "";
|
140
|
-
const remainder = parsePartial(
|
142
|
+
const remainder = parsePartial(
|
143
|
+
start.length ? str.slice(start.length).trim() : str.trim()
|
144
|
+
);
|
141
145
|
switch (start) {
|
142
146
|
case "^":
|
143
147
|
if (remainder.length > 1 && remainder[1] === 0) {
|
@@ -191,11 +195,14 @@ exports.parseRange = str => {
|
|
191
195
|
return [, ...arr, ...items.slice(1).map(() => fn)];
|
192
196
|
};
|
193
197
|
const parseRange = str => {
|
194
|
-
// range ::= hyphen | simple ( ' ' simple ) * | ''
|
195
|
-
// hyphen ::= partial ' - ' partial
|
196
|
-
const items = str.split(
|
198
|
+
// range ::= hyphen | simple ( ' ' ( ' ' ) * simple ) * | ''
|
199
|
+
// hyphen ::= partial ( ' ' ) * ' - ' ( ' ' ) * partial
|
200
|
+
const items = str.split(/\s+-\s+/);
|
197
201
|
if (items.length === 1) {
|
198
|
-
const items = str
|
202
|
+
const items = str
|
203
|
+
.trim()
|
204
|
+
.split(/(?<=[-0-9A-Za-z])\s+/g)
|
205
|
+
.map(parseSimple);
|
199
206
|
return combine(items, 2);
|
200
207
|
}
|
201
208
|
const a = parsePartial(items[0]);
|