@datadog/webpack-plugin 2.1.0 → 2.2.0-dev-1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,7 +5,7 @@ import require$$0$1 from 'os';
5
5
  import { spawn } from 'child_process';
6
6
  import { EventEmitter as EventEmitter$1 } from 'node:events';
7
7
  import { URL as URL$1, fileURLToPath } from 'url';
8
- import require$$1$1, { resolve as resolve$1, extname, isAbsolute, normalize } from 'path';
8
+ import path$h, { resolve as resolve$1, extname, isAbsolute, normalize } from 'path';
9
9
  import { File, Buffer as Buffer$1 } from 'buffer';
10
10
  import require$$0$3, { Readable } from 'stream';
11
11
  import { createGzip } from 'zlib';
@@ -6765,6 +6765,12 @@ class TrackedFilesMatcher {
6765
6765
  }
6766
6766
  }
6767
6767
  }
6768
+ displaySource(src) {
6769
+ if (src.length <= 40) {
6770
+ return src;
6771
+ }
6772
+ return `[...]${src.slice(-35)}`;
6773
+ }
6768
6774
  // Looks up the sources declared in the sourcemap and return a list of related tracked files.
6769
6775
  matchSourcemap(srcmapPath, onSourcesNotFound) {
6770
6776
  const buff = fs$j.readFileSync(srcmapPath, "utf8");
@@ -6780,7 +6786,9 @@ class TrackedFilesMatcher {
6780
6786
  }
6781
6787
  const filtered = this.matchSources(sources);
6782
6788
  if (filtered.length === 0) {
6783
- onSourcesNotFound(`Sources not in the tracked files.`);
6789
+ onSourcesNotFound(
6790
+ `${sources.map(this.displaySource).join(", ")} not in the tracked files.`
6791
+ );
6784
6792
  return void 0;
6785
6793
  }
6786
6794
  return filtered;
@@ -8829,11 +8837,12 @@ const getLogger = (level = "warn", name) => (text, type = "debug") => log(text,
8829
8837
  const PLUGIN_NAME$2 = "global-context-plugin";
8830
8838
  const getGlobalContextPlugin = (opts, meta) => {
8831
8839
  const log = getLogger(opts.logLevel, "internal-global-context");
8840
+ const cwd = process.cwd();
8832
8841
  const globalContext = {
8833
8842
  auth: opts.auth,
8834
- cwd: process.cwd(),
8843
+ cwd,
8835
8844
  version: meta.version,
8836
- outputDir: process.cwd(),
8845
+ outputDir: cwd,
8837
8846
  bundler: {
8838
8847
  name: meta.framework
8839
8848
  }
@@ -8847,6 +8856,9 @@ const getGlobalContextPlugin = (opts, meta) => {
8847
8856
  if (build.initialOptions.outdir) {
8848
8857
  globalContext.outputDir = build.initialOptions.outdir;
8849
8858
  }
8859
+ if (build.initialOptions.outfile) {
8860
+ globalContext.outputDir = path$h.dirname(build.initialOptions.outfile);
8861
+ }
8850
8862
  build.initialOptions.metafile = true;
8851
8863
  build.onEnd((result) => {
8852
8864
  if (!result.metafile) {
@@ -8855,7 +8867,7 @@ const getGlobalContextPlugin = (opts, meta) => {
8855
8867
  }
8856
8868
  const files = [];
8857
8869
  for (const [output] of Object.entries(result.metafile.outputs)) {
8858
- files.push({ filepath: require$$1$1.join(globalContext.outputDir, output) });
8870
+ files.push({ filepath: path$h.join(cwd, output) });
8859
8871
  }
8860
8872
  globalContext.outputFiles = files;
8861
8873
  });
@@ -8869,7 +8881,7 @@ const getGlobalContextPlugin = (opts, meta) => {
8869
8881
  compiler.hooks.emit.tap(PLUGIN_NAME$2, (compilation) => {
8870
8882
  const files = [];
8871
8883
  for (const filename of Object.keys(compilation.assets)) {
8872
- files.push({ filepath: require$$1$1.join(globalContext.outputDir, filename) });
8884
+ files.push({ filepath: path$h.join(globalContext.outputDir, filename) });
8873
8885
  }
8874
8886
  globalContext.outputFiles = files;
8875
8887
  });
@@ -8877,11 +8889,35 @@ const getGlobalContextPlugin = (opts, meta) => {
8877
8889
  vite: {
8878
8890
  options(options) {
8879
8891
  globalContext.bundler.config = options;
8892
+ },
8893
+ outputOptions(options) {
8894
+ if (options.dir) {
8895
+ globalContext.outputDir = options.dir;
8896
+ }
8897
+ },
8898
+ writeBundle(options, bundle) {
8899
+ const files = [];
8900
+ for (const filename of Object.keys(bundle)) {
8901
+ files.push({ filepath: path$h.join(globalContext.outputDir, filename) });
8902
+ }
8903
+ globalContext.outputFiles = files;
8880
8904
  }
8881
8905
  },
8882
8906
  rollup: {
8883
8907
  options(options) {
8884
8908
  globalContext.bundler.config = options;
8909
+ },
8910
+ outputOptions(options) {
8911
+ if (options.dir) {
8912
+ globalContext.outputDir = options.dir;
8913
+ }
8914
+ },
8915
+ writeBundle(options, bundle) {
8916
+ const files = [];
8917
+ for (const filename of Object.keys(bundle)) {
8918
+ files.push({ filepath: path$h.join(globalContext.outputDir, filename) });
8919
+ }
8920
+ globalContext.outputFiles = files;
8885
8921
  }
8886
8922
  },
8887
8923
  rspack(compiler) {
@@ -9069,12 +9105,12 @@ if (typeof module !== "undefined") {
9069
9105
  }
9070
9106
 
9071
9107
  const decomposePath = (options, context, sourcemapFilePath) => {
9072
- if (require$$1$1.extname(sourcemapFilePath) !== ".map") {
9108
+ if (path$h.extname(sourcemapFilePath) !== ".map") {
9073
9109
  throw new Error(`The file ${chalk.green.bold(sourcemapFilePath)} is not a sourcemap.`);
9074
9110
  }
9075
9111
  const minifiedFilePath = sourcemapFilePath.replace(/\.map$/, "");
9076
9112
  const relativePath = minifiedFilePath.replace(context.outputDir, "");
9077
- const minifiedUrl = options.minifiedPathPrefix ? require$$1$1.join(options.minifiedPathPrefix, relativePath) : relativePath;
9113
+ const minifiedUrl = options.minifiedPathPrefix ? path$h.join(options.minifiedPathPrefix, relativePath) : relativePath;
9078
9114
  return {
9079
9115
  minifiedFilePath,
9080
9116
  minifiedUrl,
@@ -9096,6 +9132,17 @@ const getSourcemapsFiles = (options, context) => {
9096
9132
  return sourcemapFiles;
9097
9133
  };
9098
9134
 
9135
+ const formatDuration = (duration) => {
9136
+ const days = Math.floor(duration / 1e3 / 60 / 60 / 24);
9137
+ const usedDuration = duration - days * 24 * 60 * 60 * 1e3;
9138
+ const d = new Date(usedDuration);
9139
+ const hours = d.getUTCHours();
9140
+ const minutes = d.getUTCMinutes();
9141
+ const seconds = d.getUTCSeconds();
9142
+ const milliseconds = d.getUTCMilliseconds();
9143
+ return `${days ? `${days}d ` : ""}${hours ? `${hours}h ` : ""}${minutes ? `${minutes}m ` : ""}${seconds ? `${seconds}s ` : ""}${milliseconds}ms`.trim();
9144
+ };
9145
+
9099
9146
  var retry$4 = {};
9100
9147
 
9101
9148
  function RetryOperation(timeouts, options) {
@@ -10187,8 +10234,8 @@ var _default = dist.default = PQueue;
10187
10234
 
10188
10235
  const SLASH_RX = /[/]+|[\\]+/g;
10189
10236
  const SLASH_TRIM_RX = /^[/]+|^[\\]+|[/]+$|[\\]+$/g;
10190
- const prefixRepeat = (path, prefix) => {
10191
- const pathParts = path.replace(SLASH_TRIM_RX, "").split(SLASH_RX);
10237
+ const prefixRepeat = (filePath, prefix) => {
10238
+ const pathParts = filePath.replace(SLASH_TRIM_RX, "").split(SLASH_RX);
10192
10239
  const prefixParts = prefix.replace(SLASH_TRIM_RX, "").split(SLASH_RX);
10193
10240
  const normalizedPath = pathParts.join("/");
10194
10241
  let result = "";
@@ -10200,13 +10247,13 @@ const prefixRepeat = (path, prefix) => {
10200
10247
  }
10201
10248
  return result;
10202
10249
  };
10203
- const checkFile = async (path) => {
10250
+ const checkFile = async (filePath) => {
10204
10251
  const validity = {
10205
10252
  empty: false,
10206
10253
  exists: true
10207
10254
  };
10208
10255
  try {
10209
- const stats = await promises.stat(path);
10256
+ const stats = await promises.stat(filePath);
10210
10257
  if (stats.size === 0) {
10211
10258
  validity.empty = true;
10212
10259
  }
@@ -10281,7 +10328,7 @@ const getPayload = async (sourcemap, metadata, prefix, git) => {
10281
10328
  sourcemap.sourcemapFilePath,
10282
10329
  (reason) => {
10283
10330
  warnings.push(
10284
- `No tracked files found for sources contained in ${sourcemap.sourcemapFilePath}: "${reason}"`
10331
+ `${path$h.basename(sourcemap.sourcemapFilePath)}: "${reason}"`
10285
10332
  );
10286
10333
  }
10287
10334
  ),
@@ -10322,6 +10369,8 @@ const getPayload = async (sourcemap, metadata, prefix, git) => {
10322
10369
  const errorCodesNoRetry = [400, 403, 413];
10323
10370
  const nbRetries = 5;
10324
10371
  const green = chalk.green.bold;
10372
+ const yellow = chalk.yellow.bold;
10373
+ const red = chalk.red.bold;
10325
10374
  const doRequest = async (url, getData2, onRetry) => {
10326
10375
  return retry$2(
10327
10376
  async (bail, attempt) => {
@@ -10337,23 +10386,23 @@ const doRequest = async (url, getData2, onRetry) => {
10337
10386
  duplex: "half"
10338
10387
  });
10339
10388
  } catch (error) {
10340
- bail(new Error(error));
10389
+ bail(error);
10341
10390
  return;
10342
10391
  }
10343
10392
  if (!response.ok) {
10344
- const error = `HTTP ${response.status} ${response.statusText}`;
10393
+ const errorMessage = `HTTP ${response.status} ${response.statusText}`;
10345
10394
  if (errorCodesNoRetry.includes(response.status)) {
10346
- bail(new Error(error));
10395
+ bail(new Error(errorMessage));
10347
10396
  return;
10348
10397
  } else {
10349
- throw new Error(error);
10398
+ throw new Error(errorMessage);
10350
10399
  }
10351
10400
  }
10352
10401
  try {
10353
10402
  const result = await response.json();
10354
10403
  return result;
10355
10404
  } catch (error) {
10356
- bail(new Error(error));
10405
+ bail(error);
10357
10406
  }
10358
10407
  },
10359
10408
  {
@@ -10394,12 +10443,15 @@ const getData$1 = (payload, defaultHeaders = {}) => async () => {
10394
10443
  return { data, headers };
10395
10444
  };
10396
10445
  const upload = async (payloads, options, context, log) => {
10446
+ const errors = [];
10447
+ const warnings = [];
10397
10448
  if (!context.auth?.apiKey) {
10398
- throw new Error("No authentication token provided");
10449
+ errors.push({ error: new Error("No authentication token provided") });
10450
+ return { errors, warnings };
10399
10451
  }
10400
10452
  if (payloads.length === 0) {
10401
- log("No sourcemaps to upload", "warn");
10402
- return;
10453
+ warnings.push("No sourcemaps to upload");
10454
+ return { errors, warnings };
10403
10455
  }
10404
10456
  const queue = new _default({ concurrency: options.maxConcurrency });
10405
10457
  const defaultHeaders = {
@@ -10407,6 +10459,7 @@ const upload = async (payloads, options, context, log) => {
10407
10459
  "DD-EVP-ORIGIN": `${context.bundler.name}-build-plugin_sourcemaps`,
10408
10460
  "DD-EVP-ORIGIN-VERSION": context.version
10409
10461
  };
10462
+ const addPromises = [];
10410
10463
  for (const payload of payloads) {
10411
10464
  const metadata = {
10412
10465
  sourcemap: payload.content.get("source_map")?.path.replace(
@@ -10419,24 +10472,37 @@ const upload = async (payloads, options, context, log) => {
10419
10472
  )
10420
10473
  };
10421
10474
  log(`Queuing ${green(metadata.sourcemap)} | ${green(metadata.file)}`);
10422
- queue.add(async () => {
10423
- await doRequest(
10424
- options.intakeUrl,
10425
- getData$1(payload, defaultHeaders),
10426
- (error, attempt) => {
10427
- log(
10428
- `Failed to upload sourcemaps: ${error.message}
10429
- Retrying ${attempt}/${nbRetries}`,
10430
- "warn"
10475
+ addPromises.push(
10476
+ queue.add(async () => {
10477
+ try {
10478
+ await doRequest(
10479
+ options.intakeUrl,
10480
+ getData$1(payload, defaultHeaders),
10481
+ // On retry we store the error as a warning.
10482
+ (error, attempt) => {
10483
+ const warningMessage = `Failed to upload ${yellow(metadata.sourcemap)} | ${yellow(metadata.file)}:
10484
+ ${error.message}
10485
+ Retrying ${attempt}/${nbRetries}`;
10486
+ warnings.push(warningMessage);
10487
+ log(warningMessage, "warn");
10488
+ }
10431
10489
  );
10490
+ log(`Sent ${green(metadata.sourcemap)} | ${green(metadata.file)}`);
10491
+ } catch (e) {
10492
+ errors.push({ metadata, error: e });
10493
+ if (options.bailOnError === true) {
10494
+ throw e;
10495
+ }
10432
10496
  }
10433
- );
10434
- log(`Sent ${green(metadata.sourcemap)} | ${green(metadata.file)}`);
10435
- });
10497
+ })
10498
+ );
10436
10499
  }
10437
- return queue.onIdle();
10500
+ await Promise.all(addPromises);
10501
+ await queue.onIdle();
10502
+ return { warnings, errors };
10438
10503
  };
10439
10504
  const sendSourcemaps = async (sourcemaps, options, context, log) => {
10505
+ const start = Date.now();
10440
10506
  const prefix = options.minifiedPathPrefix;
10441
10507
  const metadata = {
10442
10508
  git_repository_url: context.git?.remote,
@@ -10452,22 +10518,47 @@ const sendSourcemaps = async (sourcemaps, options, context, log) => {
10452
10518
  );
10453
10519
  const errors = payloads.map((payload) => payload.errors).flat();
10454
10520
  const warnings = payloads.map((payload) => payload.warnings).flat();
10521
+ if (warnings.length > 0) {
10522
+ log(`Warnings while preparing payloads:
10523
+ - ${warnings.join("\n - ")}`, "warn");
10524
+ }
10455
10525
  if (errors.length > 0) {
10456
- const errorMsg = `Failed to upload sourcemaps:
10526
+ const errorMsg = `Failed to prepare payloads, aborting upload :
10457
10527
  - ${errors.join("\n - ")}`;
10458
10528
  log(errorMsg, "error");
10459
- throw new Error(errorMsg);
10529
+ if (options.bailOnError === true) {
10530
+ throw new Error(errorMsg);
10531
+ }
10532
+ return;
10533
+ }
10534
+ const { errors: uploadErrors, warnings: uploadWarnings } = await upload(
10535
+ payloads,
10536
+ options,
10537
+ context,
10538
+ log
10539
+ );
10540
+ log(
10541
+ `Done uploading ${green(sourcemaps.length.toString())} sourcemaps in ${green(formatDuration(Date.now() - start))}.`,
10542
+ "info"
10543
+ );
10544
+ if (uploadErrors.length > 0) {
10545
+ const listOfErrors = ` - ${uploadErrors.map(({ metadata: fileMetadata, error }) => {
10546
+ if (fileMetadata) {
10547
+ return `${red(fileMetadata.file)} | ${red(fileMetadata.sourcemap)} : ${error.message}`;
10548
+ }
10549
+ return error.message;
10550
+ }).join("\n - ")}`;
10551
+ const errorMsg = `Failed to upload some sourcemaps:
10552
+ ${listOfErrors}`;
10553
+ log(errorMsg, "error");
10554
+ if (options.bailOnError === true) {
10555
+ throw new Error(errorMsg);
10556
+ }
10460
10557
  }
10461
- if (warnings.length > 0) {
10558
+ if (uploadWarnings.length > 0) {
10462
10559
  log(`Warnings while uploading sourcemaps:
10463
10560
  - ${warnings.join("\n - ")}`, "warn");
10464
10561
  }
10465
- try {
10466
- await upload(payloads, options, context, log);
10467
- } catch (error) {
10468
- log(`Failed to upload sourcemaps: ${error.message}`, "error");
10469
- throw error;
10470
- }
10471
10562
  };
10472
10563
 
10473
10564
  const uploadSourcemaps = async (options, context, log) => {
@@ -10483,13 +10574,14 @@ const uploadSourcemaps = async (options, context, log) => {
10483
10574
  };
10484
10575
 
10485
10576
  const defaultIntakeUrl = `https://sourcemap-intake.${process.env.DATADOG_SITE || "datadoghq.com"}/api/v2/srcmap`;
10486
- const validateOptions$1 = (config) => {
10577
+ const validateOptions$1 = (config, log) => {
10487
10578
  const errors = [];
10488
10579
  const sourcemapsResults = validateSourcemapsOptions(config);
10489
10580
  errors.push(...sourcemapsResults.errors);
10490
10581
  if (errors.length) {
10491
- throw new Error(`Invalid configuration for ${PLUGIN_NAME$1}:
10492
- - ${errors.join("\n - ")}`);
10582
+ log(`
10583
+ - ${errors.join("\n - ")}`, "error");
10584
+ throw new Error(`Invalid configuration for ${PLUGIN_NAME$1}.`);
10493
10585
  }
10494
10586
  const toReturn = {
10495
10587
  ...config[CONFIG_KEY$1],
@@ -10536,6 +10628,7 @@ const validateSourcemapsOptions = (config) => {
10536
10628
  }
10537
10629
  }
10538
10630
  const sourcemapsWithDefaults = {
10631
+ bailOnError: false,
10539
10632
  dryRun: false,
10540
10633
  maxConcurrency: 20,
10541
10634
  intakeUrl: process.env.DATADOG_SOURCEMAP_INTAKE_URL || validatedOptions.sourcemaps.intakeUrl || defaultIntakeUrl,
@@ -10546,11 +10639,9 @@ const validateSourcemapsOptions = (config) => {
10546
10639
  return toReturn;
10547
10640
  };
10548
10641
 
10549
- const helpers$2 = {
10550
- // Add the helpers you'd like to expose here.
10551
- };
10552
10642
  const getPlugins$2 = (opts, context) => {
10553
- const rumOptions = validateOptions$1(opts);
10643
+ const log = getLogger(opts.logLevel, PLUGIN_NAME$1);
10644
+ const rumOptions = validateOptions$1(opts, log);
10554
10645
  return [
10555
10646
  {
10556
10647
  name: PLUGIN_NAME$1,
@@ -10558,7 +10649,6 @@ const getPlugins$2 = (opts, context) => {
10558
10649
  if (rumOptions.disabled) {
10559
10650
  return;
10560
10651
  }
10561
- const log = getLogger(opts.logLevel, PLUGIN_NAME$1);
10562
10652
  if (rumOptions.sourcemaps) {
10563
10653
  await uploadSourcemaps(rumOptions, context, log);
10564
10654
  }
@@ -11695,7 +11785,7 @@ function retry () {
11695
11785
  };
11696
11786
  } (fs$i));
11697
11787
 
11698
- const path$g = require$$1$1;
11788
+ const path$g = path$h;
11699
11789
 
11700
11790
  // get drive on windows
11701
11791
  function getRootPath (p) {
@@ -11720,7 +11810,7 @@ var win32 = {
11720
11810
  };
11721
11811
 
11722
11812
  const fs$g = gracefulFs;
11723
- const path$f = require$$1$1;
11813
+ const path$f = path$h;
11724
11814
  const invalidWin32Path$1 = win32.invalidWin32Path;
11725
11815
 
11726
11816
  const o777$1 = parseInt('0777', 8);
@@ -11782,7 +11872,7 @@ function mkdirs$2 (p, opts, callback, made) {
11782
11872
  var mkdirs_1$1 = mkdirs$2;
11783
11873
 
11784
11874
  const fs$f = gracefulFs;
11785
- const path$e = require$$1$1;
11875
+ const path$e = path$h;
11786
11876
  const invalidWin32Path = win32.invalidWin32Path;
11787
11877
 
11788
11878
  const o777 = parseInt('0777', 8);
@@ -11850,7 +11940,7 @@ var mkdirs_1 = {
11850
11940
 
11851
11941
  const fs$e = gracefulFs;
11852
11942
  const os = require$$0$1;
11853
- const path$d = require$$1$1;
11943
+ const path$d = path$h;
11854
11944
 
11855
11945
  // HFS, ext{2,3}, FAT do not, Node.js v0.10 does not
11856
11946
  function hasMillisResSync () {
@@ -11939,7 +12029,7 @@ var buffer$1 = function (size) {
11939
12029
  };
11940
12030
 
11941
12031
  const fs$d = gracefulFs;
11942
- const path$c = require$$1$1;
12032
+ const path$c = path$h;
11943
12033
  const mkdirpSync$1 = mkdirs_1.mkdirsSync;
11944
12034
  const utimesSync = utimes$1.utimesMillisSync;
11945
12035
 
@@ -12147,7 +12237,7 @@ var pathExists_1 = {
12147
12237
  };
12148
12238
 
12149
12239
  const fs$b = gracefulFs;
12150
- const path$b = require$$1$1;
12240
+ const path$b = path$h;
12151
12241
  const mkdirp$1 = mkdirs_1.mkdirs;
12152
12242
  const pathExists$7 = pathExists_1.pathExists;
12153
12243
  const utimes = utimes$1.utimesMillis;
@@ -12397,7 +12487,7 @@ var copy$1 = {
12397
12487
  };
12398
12488
 
12399
12489
  const fs$a = gracefulFs;
12400
- const path$a = require$$1$1;
12490
+ const path$a = path$h;
12401
12491
  const assert = require$$5;
12402
12492
 
12403
12493
  const isWindows = (process.platform === 'win32');
@@ -12719,7 +12809,7 @@ var remove$3 = {
12719
12809
 
12720
12810
  const u$7 = universalify.fromCallback;
12721
12811
  const fs$9 = fs$j;
12722
- const path$9 = require$$1$1;
12812
+ const path$9 = path$h;
12723
12813
  const mkdir$5 = mkdirs_1;
12724
12814
  const remove$2 = remove$3;
12725
12815
 
@@ -12765,7 +12855,7 @@ var empty$2 = {
12765
12855
  };
12766
12856
 
12767
12857
  const u$6 = universalify.fromCallback;
12768
- const path$8 = require$$1$1;
12858
+ const path$8 = path$h;
12769
12859
  const fs$8 = gracefulFs;
12770
12860
  const mkdir$4 = mkdirs_1;
12771
12861
  const pathExists$6 = pathExists_1.pathExists;
@@ -12813,7 +12903,7 @@ var file$1 = {
12813
12903
  };
12814
12904
 
12815
12905
  const u$5 = universalify.fromCallback;
12816
- const path$7 = require$$1$1;
12906
+ const path$7 = path$h;
12817
12907
  const fs$7 = gracefulFs;
12818
12908
  const mkdir$3 = mkdirs_1;
12819
12909
  const pathExists$5 = pathExists_1.pathExists;
@@ -12872,7 +12962,7 @@ var link$1 = {
12872
12962
  createLinkSync
12873
12963
  };
12874
12964
 
12875
- const path$6 = require$$1$1;
12965
+ const path$6 = path$h;
12876
12966
  const fs$6 = gracefulFs;
12877
12967
  const pathExists$4 = pathExists_1.pathExists;
12878
12968
 
@@ -13001,7 +13091,7 @@ var symlinkType_1 = {
13001
13091
  };
13002
13092
 
13003
13093
  const u$4 = universalify.fromCallback;
13004
- const path$5 = require$$1$1;
13094
+ const path$5 = path$h;
13005
13095
  const fs$4 = gracefulFs;
13006
13096
  const _mkdirs = mkdirs_1;
13007
13097
  const mkdirs = _mkdirs.mkdirs;
@@ -13230,7 +13320,7 @@ var jsonfile = {
13230
13320
  writeJsonSync: jsonFile$3.writeFileSync
13231
13321
  };
13232
13322
 
13233
- const path$4 = require$$1$1;
13323
+ const path$4 = path$h;
13234
13324
  const mkdir$2 = mkdirs_1;
13235
13325
  const pathExists$2 = pathExists_1.pathExists;
13236
13326
  const jsonFile$2 = jsonfile;
@@ -13257,7 +13347,7 @@ function outputJson (file, data, options, callback) {
13257
13347
  var outputJson_1 = outputJson;
13258
13348
 
13259
13349
  const fs$3 = gracefulFs;
13260
- const path$3 = require$$1$1;
13350
+ const path$3 = path$h;
13261
13351
  const mkdir$1 = mkdirs_1;
13262
13352
  const jsonFile$1 = jsonfile;
13263
13353
 
@@ -13289,7 +13379,7 @@ jsonFile.readJSONSync = jsonFile.readJsonSync;
13289
13379
  var json = jsonFile;
13290
13380
 
13291
13381
  const fs$2 = gracefulFs;
13292
- const path$2 = require$$1$1;
13382
+ const path$2 = path$h;
13293
13383
  const copySync = copySync$1.copySync;
13294
13384
  const removeSync = remove$3.removeSync;
13295
13385
  const mkdirpSync = mkdirs_1.mkdirsSync;
@@ -13406,7 +13496,7 @@ var moveSync_1 = {
13406
13496
 
13407
13497
  const u$1 = universalify.fromCallback;
13408
13498
  const fs$1 = gracefulFs;
13409
- const path$1 = require$$1$1;
13499
+ const path$1 = path$h;
13410
13500
  const copy = copy$1.copy;
13411
13501
  const remove$1 = remove$3.remove;
13412
13502
  const mkdirp = mkdirs_1.mkdirp;
@@ -13487,7 +13577,7 @@ var move_1 = {
13487
13577
 
13488
13578
  const u = universalify.fromCallback;
13489
13579
  const fs = gracefulFs;
13490
- const path = require$$1$1;
13580
+ const path = path$h;
13491
13581
  const mkdir = mkdirs_1;
13492
13582
  const pathExists = pathExists_1.pathExists;
13493
13583
 
@@ -13636,17 +13726,6 @@ const getValueContext = (args) => {
13636
13726
  }));
13637
13727
  };
13638
13728
 
13639
- const formatDuration = (duration) => {
13640
- const days = Math.floor(duration / 1e3 / 60 / 60 / 24);
13641
- const usedDuration = duration - days * 24 * 60 * 60 * 1e3;
13642
- const d = new Date(usedDuration);
13643
- const hours = d.getUTCHours();
13644
- const minutes = d.getUTCMinutes();
13645
- const seconds = d.getUTCSeconds();
13646
- const milliseconds = d.getUTCMilliseconds();
13647
- return `${days ? `${days}d ` : ""}${hours ? `${hours}h ` : ""}${minutes ? `${minutes}m ` : ""}${seconds ? `${seconds}s ` : ""}${milliseconds}ms`.trim();
13648
- };
13649
-
13650
13729
  const outputFiles = async (context, outputOptions, log, cwd) => {
13651
13730
  const { report, metrics, bundler } = context;
13652
13731
  if (typeof outputOptions !== "string" && typeof outputOptions !== "object") {
@@ -13670,7 +13749,7 @@ const outputFiles = async (context, outputOptions, log, cwd) => {
13670
13749
  } else {
13671
13750
  destination = outputOptions;
13672
13751
  }
13673
- const outputPath = require$$1$1.resolve(cwd, destination);
13752
+ const outputPath = path$h.resolve(cwd, destination);
13674
13753
  try {
13675
13754
  const errors = {};
13676
13755
  const filesToWrite = {};
@@ -13700,7 +13779,7 @@ const outputFiles = async (context, outputOptions, log, cwd) => {
13700
13779
  const proms = Object.keys(filesToWrite).map((file) => {
13701
13780
  const start = Date.now();
13702
13781
  log(`Start writing ${file}.json.`);
13703
- return writeFile(require$$1$1.join(outputPath, `${file}.json`), filesToWrite[file].content).then(() => {
13782
+ return writeFile(path$h.join(outputPath, `${file}.json`), filesToWrite[file].content).then(() => {
13704
13783
  log(`Wrote ${file}.json in ${formatDuration(Date.now() - start)}`);
13705
13784
  }).catch((e) => {
13706
13785
  log(
@@ -13883,7 +13962,7 @@ const getInputsDependencies = (list, moduleName, deps = /* @__PURE__ */ new Set(
13883
13962
  };
13884
13963
  const getModulePath$1 = (fullPath, cwd) => {
13885
13964
  const filePath = fullPath.replace("pnp:", "").replace(cwd, "");
13886
- return getDisplayName(require$$1$1.resolve(cwd, filePath), cwd);
13965
+ return getDisplayName(path$h.resolve(cwd, filePath), cwd);
13887
13966
  };
13888
13967
  const getIndexed$1 = (stats, cwd) => {
13889
13968
  const inputsDependencies = {};
@@ -24686,7 +24765,7 @@ virtualStats.VirtualStats = VirtualStats;
24686
24765
  var __importDefault = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) {
24687
24766
  return (mod && mod.__esModule) ? mod : { "default": mod };
24688
24767
  };
24689
- const path_1 = __importDefault(require$$1$1);
24768
+ const path_1 = __importDefault(path$h);
24690
24769
  const virtual_stats_1 = virtualStats;
24691
24770
  let inode = 45000000;
24692
24771
  const ALL = 'all';
@@ -24970,7 +25049,7 @@ var VirtualModulesPlugin$1 = /*@__PURE__*/getDefaultExportFromCjs(lib);
24970
25049
 
24971
25050
  // node_modules/.pnpm/tsup@8.0.2_typescript@5.4.5/node_modules/tsup/assets/esm_shims.js
24972
25051
  var getFilename = () => fileURLToPath(import.meta.url);
24973
- var getDirname = () => require$$1$1.dirname(getFilename());
25052
+ var getDirname = () => path$h.dirname(getFilename());
24974
25053
  var __dirname = /* @__PURE__ */ getDirname();
24975
25054
 
24976
25055
  // node_modules/.pnpm/@jridgewell+sourcemap-codec@1.4.15/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
@@ -25771,7 +25850,7 @@ var ExtToLoader = {
25771
25850
  ".txt": "text"
25772
25851
  };
25773
25852
  function guessLoader(code, id) {
25774
- return ExtToLoader[require$$1$1.extname(id).toLowerCase()] || "js";
25853
+ return ExtToLoader[path$h.extname(id).toLowerCase()] || "js";
25775
25854
  }
25776
25855
  function unwrapLoader(loader, code, id) {
25777
25856
  if (typeof loader === "function")
@@ -25845,8 +25924,8 @@ function createBuildContext(initialOptions) {
25845
25924
  emitFile(emittedFile) {
25846
25925
  const outFileName = emittedFile.fileName || emittedFile.name;
25847
25926
  if (initialOptions.outdir && emittedFile.source && outFileName) {
25848
- const outPath = require$$1$1.resolve(initialOptions.outdir, outFileName);
25849
- const outDir = require$$1$1.dirname(outPath);
25927
+ const outPath = path$h.resolve(initialOptions.outdir, outFileName);
25928
+ const outDir = path$h.dirname(outPath);
25850
25929
  if (!fs$j.existsSync(outDir))
25851
25930
  fs$j.mkdirSync(outDir, { recursive: true });
25852
25931
  fs$j.writeFileSync(outPath, emittedFile.source);
@@ -26047,7 +26126,7 @@ function buildSetup(meta) {
26047
26126
  onLoad({ filter: onLoadFilter }, async (args) => {
26048
26127
  const id = args.path + args.suffix;
26049
26128
  const { errors, warnings, mixedContext } = createPluginContext(context);
26050
- const resolveDir = require$$1$1.dirname(args.path);
26129
+ const resolveDir = path$h.dirname(args.path);
26051
26130
  let code, map;
26052
26131
  if (plugin.load && (!plugin.loadInclude || plugin.loadInclude(id))) {
26053
26132
  const result = await plugin.load.call(mixedContext, id);
@@ -26078,7 +26157,7 @@ function buildSetup(meta) {
26078
26157
  if (plugin.transformInclude && !plugin.transformInclude(id))
26079
26158
  return;
26080
26159
  const { mixedContext, errors, warnings } = createPluginContext(context);
26081
- const resolveDir = require$$1$1.dirname(args.path);
26160
+ const resolveDir = path$h.dirname(args.path);
26082
26161
  let code = await args.getContents();
26083
26162
  let map;
26084
26163
  const result = await plugin.transform.call(mixedContext, code, id);
@@ -26170,7 +26249,7 @@ var ExtToLoader2 = {
26170
26249
  ".node": "napi"
26171
26250
  };
26172
26251
  function guessIdLoader(id) {
26173
- return ExtToLoader2[require$$1$1.extname(id).toLowerCase()] || "js";
26252
+ return ExtToLoader2[path$h.extname(id).toLowerCase()] || "js";
26174
26253
  }
26175
26254
  function transformQuery(context) {
26176
26255
  const queryParamsObject = {};
@@ -26260,7 +26339,7 @@ function toFarmPlugin(plugin, options) {
26260
26339
  farmPlugin.resolve = {
26261
26340
  filters: { sources: [".*", ...filters], importers: [".*"] },
26262
26341
  async executor(params, context) {
26263
- const resolvedIdPath = require$$1$1.resolve(
26342
+ const resolvedIdPath = path$h.resolve(
26264
26343
  process.cwd(),
26265
26344
  params.importer ?? ""
26266
26345
  );
@@ -26829,7 +26908,6 @@ function createUnplugin(factory) {
26829
26908
  const helpers = {
26830
26909
  // Each product should have a unique entry.
26831
26910
  // #helpers-injection-marker
26832
- [CONFIG_KEY$1]: helpers$2,
26833
26911
  [CONFIG_KEY]: helpers$1
26834
26912
  // #helpers-injection-marker
26835
26913
  };
@@ -26837,6 +26915,9 @@ const buildPluginFactory = ({
26837
26915
  version
26838
26916
  }) => {
26839
26917
  return createUnplugin((options, unpluginMetaContext) => {
26918
+ if ("esbuildHostName" in unpluginMetaContext) {
26919
+ unpluginMetaContext.esbuildHostName = "datadog-plugins";
26920
+ }
26840
26921
  const { globalContext, internalPlugins } = getInternalPlugins(options, {
26841
26922
  version,
26842
26923
  ...unpluginMetaContext
@@ -26854,7 +26935,7 @@ const buildPluginFactory = ({
26854
26935
 
26855
26936
  var name = "@datadog/webpack-plugin";
26856
26937
  var packageManager = "yarn@4.0.2";
26857
- var version = "2.1.0";
26938
+ var version = "2.2.0-dev-1";
26858
26939
  var license = "MIT";
26859
26940
  var author = "Datadog";
26860
26941
  var description = "Datadog Webpack Plugin";
@@ -26919,6 +27000,7 @@ var devDependencies = {
26919
27000
  var peerDependencies = {
26920
27001
  webpack: ">= 4.x < 6.x"
26921
27002
  };
27003
+ var stableVersion = "2.1.0";
26922
27004
  var pkg = {
26923
27005
  name: name,
26924
27006
  packageManager: packageManager,
@@ -26936,7 +27018,8 @@ var pkg = {
26936
27018
  files: files,
26937
27019
  scripts: scripts,
26938
27020
  devDependencies: devDependencies,
26939
- peerDependencies: peerDependencies
27021
+ peerDependencies: peerDependencies,
27022
+ stableVersion: stableVersion
26940
27023
  };
26941
27024
 
26942
27025
  const datadogWebpackPlugin = buildPluginFactory({