webpack 5.88.0 → 5.88.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webpack might be problematic. Click here for more details.

@@ -16,6 +16,7 @@ const { getEntryRuntime, mergeRuntime } = require("./util/runtime");
16
16
  /** @typedef {import("./Compilation")} Compilation */
17
17
  /** @typedef {import("./DependenciesBlock")} DependenciesBlock */
18
18
  /** @typedef {import("./Dependency")} Dependency */
19
+ /** @typedef {import("./Dependency").DependencyLocation} DependencyLocation */
19
20
  /** @typedef {import("./Entrypoint")} Entrypoint */
20
21
  /** @typedef {import("./Module")} Module */
21
22
  /** @typedef {import("./ModuleGraph")} ModuleGraph */
@@ -39,15 +40,15 @@ const { getEntryRuntime, mergeRuntime } = require("./util/runtime");
39
40
  * @typedef {Object} ChunkGroupInfo
40
41
  * @property {ChunkGroup} chunkGroup the chunk group
41
42
  * @property {RuntimeSpec} runtime the runtimes
42
- * @property {ModuleSetPlus} minAvailableModules current minimal set of modules available at this point
43
- * @property {boolean} minAvailableModulesOwned true, if minAvailableModules is owned and can be modified
43
+ * @property {ModuleSetPlus | undefined} minAvailableModules current minimal set of modules available at this point
44
+ * @property {boolean | undefined} minAvailableModulesOwned true, if minAvailableModules is owned and can be modified
44
45
  * @property {ModuleSetPlus[]} availableModulesToBeMerged enqueued updates to the minimal set of available modules
45
46
  * @property {Set<Module>=} skippedItems modules that were skipped because module is already available in parent chunks (need to reconsider when minAvailableModules is shrinking)
46
47
  * @property {Set<[Module, ConnectionState]>=} skippedModuleConnections referenced modules that where skipped because they were not active in this runtime
47
- * @property {ModuleSetPlus} resultingAvailableModules set of modules available including modules from this chunk group
48
- * @property {Set<ChunkGroupInfo>} children set of children chunk groups, that will be revisited when availableModules shrink
49
- * @property {Set<ChunkGroupInfo>} availableSources set of chunk groups that are the source for minAvailableModules
50
- * @property {Set<ChunkGroupInfo>} availableChildren set of chunk groups which depend on the this chunk group as availableSource
48
+ * @property {ModuleSetPlus | undefined} resultingAvailableModules set of modules available including modules from this chunk group
49
+ * @property {Set<ChunkGroupInfo> | undefined} children set of children chunk groups, that will be revisited when availableModules shrink
50
+ * @property {Set<ChunkGroupInfo> | undefined} availableSources set of chunk groups that are the source for minAvailableModules
51
+ * @property {Set<ChunkGroupInfo> | undefined} availableChildren set of chunk groups which depend on the this chunk group as availableSource
51
52
  * @property {number} preOrderIndex next pre order index
52
53
  * @property {number} postOrderIndex next post order index
53
54
  * @property {boolean} chunkLoading has a chunk loading mechanism
@@ -199,6 +200,7 @@ const visitModules = (
199
200
 
200
201
  /** @type {RuntimeSpec | false} */
201
202
  let blockModulesMapRuntime = false;
203
+ /** @type {Map<DependenciesBlock, (Module | ConnectionState)[]>} */
202
204
  let blockModulesMap;
203
205
 
204
206
  /**
@@ -239,7 +241,7 @@ const visitModules = (
239
241
  extractBlockModules(module, moduleGraph, runtime, blockModulesMap);
240
242
  blockModules = blockModulesMap.get(block);
241
243
  logger.timeAggregate("visitModules: prepare");
242
- return blockModules;
244
+ return /** @type {(Module | ConnectionState)[]} */ (blockModules);
243
245
  }
244
246
  };
245
247
 
@@ -290,7 +292,7 @@ const visitModules = (
290
292
  for (const [chunkGroup, modules] of inputEntrypointsAndModules) {
291
293
  const runtime = getEntryRuntime(
292
294
  compilation,
293
- chunkGroup.name,
295
+ /** @type {string} */ (chunkGroup.name),
294
296
  chunkGroup.options
295
297
  );
296
298
  /** @type {ChunkGroupInfo} */
@@ -352,7 +354,9 @@ const visitModules = (
352
354
  const { chunkGroup } = chunkGroupInfo;
353
355
  chunkGroupInfo.availableSources = new Set();
354
356
  for (const parent of chunkGroup.parentsIterable) {
355
- const parentChunkGroupInfo = chunkGroupInfoMap.get(parent);
357
+ const parentChunkGroupInfo =
358
+ /** @type {ChunkGroupInfo} */
359
+ (chunkGroupInfoMap.get(parent));
356
360
  chunkGroupInfo.availableSources.add(parentChunkGroupInfo);
357
361
  if (parentChunkGroupInfo.availableChildren === undefined) {
358
362
  parentChunkGroupInfo.availableChildren = new Set();
@@ -399,15 +403,15 @@ const visitModules = (
399
403
  // 1. We create a chunk group with single chunk in it for this Block
400
404
  // but only once (blockChunkGroups map)
401
405
  let cgi = blockChunkGroups.get(b);
402
- /** @type {ChunkGroup} */
406
+ /** @type {ChunkGroup | undefined} */
403
407
  let c;
404
- /** @type {Entrypoint} */
408
+ /** @type {Entrypoint | undefined} */
405
409
  let entrypoint;
406
410
  const entryOptions = b.groupOptions && b.groupOptions.entryOptions;
407
411
  if (cgi === undefined) {
408
412
  const chunkName = (b.groupOptions && b.groupOptions.name) || b.chunkName;
409
413
  if (entryOptions) {
410
- cgi = namedAsyncEntrypoints.get(chunkName);
414
+ cgi = namedAsyncEntrypoints.get(/** @type {string} */ (chunkName));
411
415
  if (!cgi) {
412
416
  entrypoint = compilation.addAsyncEntrypoint(
413
417
  entryOptions,
@@ -505,7 +509,11 @@ const visitModules = (
505
509
  c = cgi.chunkGroup;
506
510
  if (c.isInitial()) {
507
511
  compilation.errors.push(
508
- new AsyncDependencyToInitialChunkError(chunkName, module, b.loc)
512
+ new AsyncDependencyToInitialChunkError(
513
+ /** @type {string} */ (chunkName),
514
+ module,
515
+ b.loc
516
+ )
509
517
  );
510
518
  c = chunkGroup;
511
519
  } else {
@@ -515,7 +523,7 @@ const visitModules = (
515
523
  }
516
524
  blockConnections.set(b, []);
517
525
  }
518
- blockChunkGroups.set(b, cgi);
526
+ blockChunkGroups.set(b, /** @type {ChunkGroupInfo} */ (cgi));
519
527
  } else if (entryOptions) {
520
528
  entrypoint = /** @type {Entrypoint} */ (cgi.chunkGroup);
521
529
  } else {
@@ -536,7 +544,7 @@ const visitModules = (
536
544
  connectList = new Set();
537
545
  queueConnect.set(chunkGroupInfo, connectList);
538
546
  }
539
- connectList.add(cgi);
547
+ connectList.add(/** @type {ChunkGroupInfo} */ (cgi));
540
548
 
541
549
  // TODO check if this really need to be done for each traversal
542
550
  // or if it is enough when it's queued when created
@@ -547,7 +555,7 @@ const visitModules = (
547
555
  module: module,
548
556
  chunk: c.chunks[0],
549
557
  chunkGroup: c,
550
- chunkGroupInfo: cgi
558
+ chunkGroupInfo: /** @type {ChunkGroupInfo} */ (cgi)
551
559
  });
552
560
  } else if (entrypoint !== undefined) {
553
561
  chunkGroupInfo.chunkGroup.addAsyncEntrypoint(entrypoint);
@@ -690,7 +698,7 @@ const visitModules = (
690
698
  const processQueue = () => {
691
699
  while (queue.length) {
692
700
  statProcessedQueueItems++;
693
- const queueItem = queue.pop();
701
+ const queueItem = /** @type {QueueItem} */ (queue.pop());
694
702
  module = queueItem.module;
695
703
  block = queueItem.block;
696
704
  chunk = queueItem.chunk;
@@ -1087,7 +1095,9 @@ const visitModules = (
1087
1095
 
1088
1096
  const processChunkGroupsForCombining = () => {
1089
1097
  for (const info of chunkGroupsForCombining) {
1090
- for (const source of info.availableSources) {
1098
+ for (const source of /** @type {Set<ChunkGroupInfo>} */ (
1099
+ info.availableSources
1100
+ )) {
1091
1101
  if (!source.minAvailableModules) {
1092
1102
  chunkGroupsForCombining.delete(info);
1093
1103
  break;
@@ -1106,7 +1116,9 @@ const visitModules = (
1106
1116
  }
1107
1117
  };
1108
1118
  // combine minAvailableModules from all resultingAvailableModules
1109
- for (const source of info.availableSources) {
1119
+ for (const source of /** @type {Set<ChunkGroupInfo>} */ (
1120
+ info.availableSources
1121
+ )) {
1110
1122
  const resultingAvailableModules =
1111
1123
  calculateResultingAvailableModules(source);
1112
1124
  mergeSet(resultingAvailableModules);
@@ -1126,7 +1138,9 @@ const visitModules = (
1126
1138
  for (const info of outdatedChunkGroupInfo) {
1127
1139
  // 1. Reconsider skipped items
1128
1140
  if (info.skippedItems !== undefined) {
1129
- const { minAvailableModules } = info;
1141
+ const minAvailableModules =
1142
+ /** @type {ModuleSetPlus} */
1143
+ (info.minAvailableModules);
1130
1144
  for (const module of info.skippedItems) {
1131
1145
  if (
1132
1146
  !minAvailableModules.has(module) &&
@@ -1147,7 +1161,9 @@ const visitModules = (
1147
1161
 
1148
1162
  // 2. Reconsider skipped connections
1149
1163
  if (info.skippedModuleConnections !== undefined) {
1150
- const { minAvailableModules } = info;
1164
+ const minAvailableModules =
1165
+ /** @type {ModuleSetPlus} */
1166
+ (info.minAvailableModules);
1151
1167
  for (const entry of info.skippedModuleConnections) {
1152
1168
  const [module, activeState] = entry;
1153
1169
  if (activeState === false) continue;
@@ -20,19 +20,22 @@ const {
20
20
  /** @typedef {import("../../declarations/WebpackOptions").SnapshotOptions} SnapshotOptions */
21
21
  /** @typedef {import("../Cache").Etag} Etag */
22
22
  /** @typedef {import("../Compiler")} Compiler */
23
+ /** @typedef {import("../FileSystemInfo").ResolveBuildDependenciesResult} ResolveBuildDependenciesResult */
23
24
  /** @typedef {import("../FileSystemInfo").Snapshot} Snapshot */
24
25
  /** @typedef {import("../logging/Logger").Logger} Logger */
25
26
  /** @typedef {import("../serialization/ObjectMiddleware").ObjectDeserializerContext} ObjectDeserializerContext */
26
27
  /** @typedef {import("../serialization/ObjectMiddleware").ObjectSerializerContext} ObjectSerializerContext */
27
28
  /** @typedef {import("../util/fs").IntermediateFileSystem} IntermediateFileSystem */
28
29
 
30
+ /** @typedef {Map<string, string | false>} ResolveResults */
31
+
29
32
  class PackContainer {
30
33
  /**
31
34
  * @param {Object} data stored data
32
35
  * @param {string} version version identifier
33
36
  * @param {Snapshot} buildSnapshot snapshot of all build dependencies
34
37
  * @param {Set<string>} buildDependencies list of all unresolved build dependencies captured
35
- * @param {Map<string, string | false>} resolveResults result of the resolved build dependencies
38
+ * @param {ResolveResults} resolveResults result of the resolved build dependencies
36
39
  * @param {Snapshot} resolveBuildDependenciesSnapshot snapshot of the dependencies of the build dependencies resolving
37
40
  */
38
41
  constructor(
@@ -977,6 +980,10 @@ class PackContent {
977
980
  }
978
981
  }
979
982
 
983
+ /**
984
+ * @param {Buffer} buf buffer
985
+ * @returns {Buffer} buffer that can be collected
986
+ */
980
987
  const allowCollectingMemory = buf => {
981
988
  const wasted = buf.buffer.byteLength - buf.byteLength;
982
989
  if (wasted > 8192 && (wasted > 1048576 || wasted > buf.byteLength)) {
@@ -996,10 +1003,10 @@ class PackFileCacheStrategy {
996
1003
  * @param {Logger} options.logger a logger
997
1004
  * @param {SnapshotOptions} options.snapshot options regarding snapshotting
998
1005
  * @param {number} options.maxAge max age of cache items
999
- * @param {boolean} options.profile track and log detailed timing information for individual cache items
1000
- * @param {boolean} options.allowCollectingMemory allow to collect unused memory created during deserialization
1001
- * @param {false | "gzip" | "brotli"} options.compression compression used
1002
- * @param {boolean} options.readonly disable storing cache into filesystem
1006
+ * @param {boolean | undefined} options.profile track and log detailed timing information for individual cache items
1007
+ * @param {boolean | undefined} options.allowCollectingMemory allow to collect unused memory created during deserialization
1008
+ * @param {false | "gzip" | "brotli" | undefined} options.compression compression used
1009
+ * @param {boolean | undefined} options.readonly disable storing cache into filesystem
1003
1010
  */
1004
1011
  constructor({
1005
1012
  compiler,
@@ -1048,7 +1055,7 @@ class PackFileCacheStrategy {
1048
1055
  this.newBuildDependencies = new LazySet();
1049
1056
  /** @type {Snapshot | undefined} */
1050
1057
  this.resolveBuildDependenciesSnapshot = undefined;
1051
- /** @type {Map<string, string | false> | undefined} */
1058
+ /** @type {ResolveResults | undefined} */
1052
1059
  this.resolveResults = undefined;
1053
1060
  /** @type {Snapshot | undefined} */
1054
1061
  this.buildSnapshot = undefined;
@@ -1080,7 +1087,7 @@ class PackFileCacheStrategy {
1080
1087
  let newBuildDependencies;
1081
1088
  /** @type {Snapshot} */
1082
1089
  let resolveBuildDependenciesSnapshot;
1083
- /** @type {Map<string, string | false>} */
1090
+ /** @type {ResolveResults | undefined} */
1084
1091
  let resolveResults;
1085
1092
  logger.time("restore cache container");
1086
1093
  return this.fileSerializer
@@ -1264,6 +1271,9 @@ class PackFileCacheStrategy {
1264
1271
  });
1265
1272
  }
1266
1273
 
1274
+ /**
1275
+ * @param {LazySet<string>} dependencies dependencies to store
1276
+ */
1267
1277
  storeBuildDependencies(dependencies) {
1268
1278
  if (this.readonly) return;
1269
1279
  this.newBuildDependencies.addAll(dependencies);
@@ -1309,7 +1319,7 @@ class PackFileCacheStrategy {
1309
1319
  missing,
1310
1320
  resolveResults,
1311
1321
  resolveDependencies
1312
- } = result;
1322
+ } = /** @type {ResolveBuildDependenciesResult} */ (result);
1313
1323
  if (this.resolveResults) {
1314
1324
  for (const [key, value] of resolveResults) {
1315
1325
  this.resolveResults.set(key, value);
@@ -1404,7 +1414,7 @@ class PackFileCacheStrategy {
1404
1414
  const content = new PackContainer(
1405
1415
  pack,
1406
1416
  this.version,
1407
- this.buildSnapshot,
1417
+ /** @type {Snapshot} */ (this.buildSnapshot),
1408
1418
  updatedBuildDependencies,
1409
1419
  this.resolveResults,
1410
1420
  this.resolveBuildDependenciesSnapshot
@@ -109,6 +109,9 @@ CssLocalIdentifierDependency.Template = class CssLocalIdentifierDependencyTempla
109
109
  const used = moduleGraph
110
110
  .getExportInfo(module, dep.name)
111
111
  .getUsedName(dep.name, runtime);
112
+
113
+ if (!used) return;
114
+
112
115
  const moduleId = chunkGraph.getModuleId(module);
113
116
  const identifier =
114
117
  dep.prefix +
@@ -60,14 +60,27 @@ const chunkHasJs = (chunk, chunkGraph) => {
60
60
  : false;
61
61
  };
62
62
 
63
+ /**
64
+ * @param {Module} module a module
65
+ * @param {string} code the code
66
+ * @returns {string} generated code for the stack
67
+ */
63
68
  const printGeneratedCodeForStack = (module, code) => {
64
69
  const lines = code.split("\n");
65
70
  const n = `${lines.length}`.length;
66
71
  return `\n\nGenerated code for ${module.identifier()}\n${lines
67
- .map((line, i, lines) => {
68
- const iStr = `${i + 1}`;
69
- return `${" ".repeat(n - iStr.length)}${iStr} | ${line}`;
70
- })
72
+ .map(
73
+ /**
74
+ * @param {string} line the line
75
+ * @param {number} i the index
76
+ * @param {string[]} lines the lines
77
+ * @returns {string} the line with line number
78
+ */
79
+ (line, i, lines) => {
80
+ const iStr = `${i + 1}`;
81
+ return `${" ".repeat(n - iStr.length)}${iStr} | ${line}`;
82
+ }
83
+ )
71
84
  .join("\n")}`;
72
85
  };
73
86
 
@@ -443,7 +456,10 @@ class JavascriptModulesPlugin {
443
456
  context.__webpack_require__
444
457
  );
445
458
  } catch (e) {
446
- e.stack += printGeneratedCodeForStack(options.module, code);
459
+ e.stack += printGeneratedCodeForStack(
460
+ options.module,
461
+ /** @type {string} */ (code)
462
+ );
447
463
  throw e;
448
464
  }
449
465
  });
@@ -1007,6 +1023,9 @@ class JavascriptModulesPlugin {
1007
1023
  const useRequire =
1008
1024
  requireFunction || interceptModuleExecution || moduleUsed;
1009
1025
 
1026
+ /**
1027
+ * @type {{startup: string[], beforeStartup: string[], header: string[], afterStartup: string[], allowInlineStartup: boolean}}
1028
+ */
1010
1029
  const result = {
1011
1030
  header: [],
1012
1031
  beforeStartup: [],
@@ -68,8 +68,8 @@ class AmdLibraryPlugin extends AbstractLibraryPlugin {
68
68
  }
69
69
  }
70
70
  return {
71
- name: /** @type {string=} */ (name),
72
- amdContainer: /** @type {string=} */ (amdContainer)
71
+ name: /** @type {string} */ (name),
72
+ amdContainer: /** @type {string} */ (amdContainer)
73
73
  };
74
74
  }
75
75
 
@@ -17,6 +17,7 @@ const AbstractLibraryPlugin = require("./AbstractLibraryPlugin");
17
17
  /** @typedef {import("../../declarations/WebpackOptions").LibraryOptions} LibraryOptions */
18
18
  /** @typedef {import("../../declarations/WebpackOptions").LibraryType} LibraryType */
19
19
  /** @typedef {import("../Chunk")} Chunk */
20
+ /** @typedef {import("../Compilation")} Compilation */
20
21
  /** @typedef {import("../Compilation").ChunkHashContext} ChunkHashContext */
21
22
  /** @typedef {import("../Compiler")} Compiler */
22
23
  /** @typedef {import("../Module")} Module */
@@ -59,6 +60,7 @@ const accessWithInit = (accessor, existingLength, initLast = false) => {
59
60
  let i = 1;
60
61
 
61
62
  // all properties printed so far (excluding base)
63
+ /** @type {string[] | undefined} */
62
64
  let propsSoFar;
63
65
 
64
66
  // if there is existingLength, print all properties until this position as property access
@@ -142,7 +144,7 @@ class AssignLibraryPlugin extends AbstractLibraryPlugin {
142
144
  }
143
145
  }
144
146
  return {
145
- name: /** @type {string|string[]=} */ (name),
147
+ name: /** @type {string | string[]} */ (name),
146
148
  export: library.export
147
149
  };
148
150
  }
@@ -173,12 +175,22 @@ class AssignLibraryPlugin extends AbstractLibraryPlugin {
173
175
  moduleGraph.addExtraReason(module, "used as library export");
174
176
  }
175
177
 
178
+ /**
179
+ * @param {Compilation} compilation the compilation
180
+ * @returns {string[]} the prefix
181
+ */
176
182
  _getPrefix(compilation) {
177
183
  return this.prefix === "global"
178
184
  ? [compilation.runtimeTemplate.globalObject]
179
185
  : this.prefix;
180
186
  }
181
187
 
188
+ /**
189
+ * @param {AssignLibraryPluginParsed} options the library options
190
+ * @param {Chunk} chunk the chunk
191
+ * @param {Compilation} compilation the compilation
192
+ * @returns {Array<string>} the resolved full name
193
+ */
182
194
  _getResolvedFullName(options, chunk, compilation) {
183
195
  const prefix = this._getPrefix(compilation);
184
196
  const fullName = options.name ? prefix.concat(options.name) : prefix;
@@ -12,6 +12,10 @@
12
12
  /** @type {WeakMap<Compiler, Set<LibraryType>>} */
13
13
  const enabledTypes = new WeakMap();
14
14
 
15
+ /**
16
+ * @param {Compiler} compiler the compiler instance
17
+ * @returns {Set<LibraryType>} enabled types
18
+ */
15
19
  const getEnabledTypes = compiler => {
16
20
  let set = enabledTypes.get(compiler);
17
21
  if (set === undefined) {
@@ -59,7 +59,7 @@ class SystemLibraryPlugin extends AbstractLibraryPlugin {
59
59
  );
60
60
  }
61
61
  return {
62
- name: /** @type {string=} */ (name)
62
+ name: /** @type {string} */ (name)
63
63
  };
64
64
  }
65
65
 
@@ -148,6 +148,10 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
148
148
  requiredExternals = externals;
149
149
  }
150
150
 
151
+ /**
152
+ * @param {string} str the string to replace
153
+ * @returns {string} the replaced keys
154
+ */
151
155
  const replaceKeys = str => {
152
156
  return compilation.getPath(str, {
153
157
  chunk
@@ -178,6 +182,10 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
178
182
  );
179
183
  };
180
184
 
185
+ /**
186
+ * @param {string} type the type
187
+ * @returns {string} external require array
188
+ */
181
189
  const externalsRequireArray = type => {
182
190
  return replaceKeys(
183
191
  externals
@@ -185,7 +193,9 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
185
193
  let expr;
186
194
  let request = m.request;
187
195
  if (typeof request === "object") {
188
- request = request[type];
196
+ request =
197
+ /** @type {Record<string, string | string[]>} */
198
+ (request)[type];
189
199
  }
190
200
  if (request === undefined) {
191
201
  throw new Error(
@@ -246,6 +256,10 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
246
256
 
247
257
  const { auxiliaryComment, namedDefine, names } = options;
248
258
 
259
+ /**
260
+ * @param {keyof LibraryCustomUmdCommentObject} type type
261
+ * @returns {string} comment
262
+ */
249
263
  const getAuxiliaryComment = type => {
250
264
  if (auxiliaryComment) {
251
265
  if (typeof auxiliaryComment === "string")
@@ -299,7 +313,11 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
299
313
  " else\n" +
300
314
  " " +
301
315
  replaceKeys(
302
- accessorAccess("root", names.root || names.commonjs)
316
+ accessorAccess(
317
+ "root",
318
+ /** @type {string | string[]} */ (names.root) ||
319
+ /** @type {string} */ (names.commonjs)
320
+ )
303
321
  ) +
304
322
  " = factory(" +
305
323
  externalsRootArray(externals) +
@@ -19,7 +19,7 @@ class StackedCacheMap {
19
19
 
20
20
  /**
21
21
  * @param {ReadonlyMap<K, V>} map map to add
22
- * @param {boolean} immutable if 'map' is immutable and StackedCacheMap can keep referencing it
22
+ * @param {boolean=} immutable if 'map' is immutable and StackedCacheMap can keep referencing it
23
23
  */
24
24
  addAll(map, immutable) {
25
25
  if (immutable) {
@@ -76,7 +76,7 @@ function getScheme(specifier) {
76
76
 
77
77
  /**
78
78
  * @param {string} specifier specifier
79
- * @returns {string|null} protocol if absolute URL specifier provided
79
+ * @returns {string | null | undefined} protocol if absolute URL specifier provided
80
80
  */
81
81
  function getProtocol(specifier) {
82
82
  const scheme = getScheme(specifier);