@module-federation/treeshake-server 0.0.1 → 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,15 +1,18 @@
1
1
  import { Hono } from "hono";
2
2
  import { cors } from "hono/cors";
3
3
  import pino from "pino";
4
+ import node_os from "node:os";
4
5
  import { zValidator } from "@hono/zod-validator";
5
6
  import { nanoid } from "nanoid";
7
+ import p_limit from "p-limit";
6
8
  import { z } from "zod";
7
9
  import { createHash } from "node:crypto";
8
- import node_fs from "node:fs";
9
- import node_os from "node:os";
10
+ import promises from "node:fs/promises";
10
11
  import node_path from "node:path";
11
12
  import { spawn } from "node:child_process";
12
13
  import json_stable_stringify from "json-stable-stringify";
14
+ import node_fs from "node:fs";
15
+ import { timeout } from "hono/timeout";
13
16
  import { serve } from "@hono/node-server";
14
17
  function createDiMiddleware(deps) {
15
18
  return async (c, next)=>{
@@ -75,14 +78,15 @@ function normalizeConfig(config) {
75
78
  return normalizedConfig;
76
79
  }
77
80
  function extractBuildConfig(config, type) {
78
- const { shared, plugins, target, libraryType, usedExports } = config;
81
+ const { shared, plugins, target, libraryType, usedExports, hostName } = config;
79
82
  return {
80
83
  shared,
81
84
  plugins,
82
85
  target,
83
86
  libraryType,
84
87
  usedExports,
85
- type
88
+ type,
89
+ hostName
86
90
  };
87
91
  }
88
92
  const UploadOptionsSchema = z.object({
@@ -211,11 +215,11 @@ const startPeriodicPrune = (intervalMs = DEFAULT_INTERVAL)=>{
211
215
  maybePrune();
212
216
  }, intervalMs);
213
217
  };
214
- const createUniqueTempDirByKey = (key)=>{
218
+ const createUniqueTempDirByKey = async (key)=>{
215
219
  const base = node_path.join(node_os.tmpdir(), `re-shake-share-${key}`);
216
220
  let candidate = base;
217
221
  for(;;)try {
218
- node_fs.mkdirSync(candidate, {
222
+ await promises.mkdir(candidate, {
219
223
  recursive: false
220
224
  });
221
225
  return candidate;
@@ -224,15 +228,22 @@ const createUniqueTempDirByKey = (key)=>{
224
228
  candidate = `${base}-${rand}`;
225
229
  }
226
230
  };
227
- const prepareProject = (config, excludeShared)=>{
231
+ const prepareProject = async (config, excludeShared)=>{
228
232
  const key = createHash('sha256').update(JSON.stringify(config)).digest('hex');
229
- const dir = createUniqueTempDirByKey(key);
230
- const templateDir = node_path.join(__dirname, '..', 'template', 're-shake-share');
231
- node_fs.cpSync(templateDir, dir, {
233
+ const dir = await createUniqueTempDirByKey(key);
234
+ const templateDir = node_path.join(__dirname, '.', 'template', 're-shake-share');
235
+ await promises.cp(templateDir, dir, {
232
236
  recursive: true
233
237
  });
234
238
  const pkgPath = node_path.join(dir, 'package.json');
235
- const pkg = JSON.parse(node_fs.readFileSync(pkgPath, 'utf-8'));
239
+ const indexPath = node_path.join(dir, 'index.ts');
240
+ const rspackConfigPath = node_path.join(dir, 'rspack.config.ts');
241
+ const [pkgContent, indexContent, rspackConfigContent] = await Promise.all([
242
+ promises.readFile(pkgPath, 'utf-8'),
243
+ promises.readFile(indexPath, 'utf-8'),
244
+ promises.readFile(rspackConfigPath, 'utf-8')
245
+ ]);
246
+ const pkg = JSON.parse(pkgContent);
236
247
  const deps = {
237
248
  ...pkg.dependencies || {}
238
249
  };
@@ -273,28 +284,29 @@ const prepareProject = (config, excludeShared)=>{
273
284
  });
274
285
  pluginOptionStr += '\n]';
275
286
  pkg.dependencies = deps;
276
- node_fs.writeFileSync(pkgPath, JSON.stringify(pkg, null, 2));
287
+ const newPkgContent = JSON.stringify(pkg, null, 2);
277
288
  const sharedImportPlaceholder = "${SHARED_IMPORT}";
289
+ const newIndexContent = indexContent.replace(sharedImportPlaceholder, sharedImport);
278
290
  const pluginsPlaceholder = "${ PLUGINS }";
279
291
  const mfConfigPlaceholder = "${ MF_CONFIG }";
280
- const indexPath = node_path.join(dir, 'index.ts');
281
- const indexContent = node_fs.readFileSync(indexPath, 'utf-8');
282
- node_fs.writeFileSync(indexPath, indexContent.replace(sharedImportPlaceholder, sharedImport));
283
- const rspackConfigPath = node_path.join(dir, 'rspack.config.ts');
284
- let cfg = node_fs.readFileSync(rspackConfigPath, 'utf-8');
292
+ let cfg = rspackConfigContent;
285
293
  cfg += pluginImportStr;
286
294
  cfg = cfg.replace(pluginsPlaceholder, pluginOptionStr);
287
295
  cfg = cfg.replace(mfConfigPlaceholder, JSON.stringify(mfConfig, null, 2));
288
- node_fs.writeFileSync(rspackConfigPath, cfg);
296
+ await Promise.all([
297
+ promises.writeFile(pkgPath, newPkgContent),
298
+ promises.writeFile(indexPath, newIndexContent),
299
+ promises.writeFile(rspackConfigPath, cfg)
300
+ ]);
289
301
  return dir;
290
302
  };
291
303
  const installDependencies = async (cwd)=>{
292
304
  markInstallStart();
293
305
  try {
294
- await runCommand('pnpm i', {
306
+ await runCommand("pnpm i --ignore-scripts --prefer-offline --reporter=silent ", {
295
307
  cwd,
296
308
  env: {
297
- npm_config_registry: 'https://registry.npmjs.org/'
309
+ npm_config_registry: process.env.MF_NPM_REGISTRY || 'https://registry.npmjs.org/'
298
310
  }
299
311
  });
300
312
  } finally{
@@ -309,12 +321,13 @@ const buildProject = async (cwd, type)=>{
309
321
  cwd
310
322
  })));
311
323
  };
312
- const retrieveSharedFilepaths = (projectDir, type)=>{
324
+ const retrieveSharedFilepaths = async (projectDir, type)=>{
313
325
  const sharedFilepaths = [];
314
- const collectSharedFilepaths = (t)=>{
326
+ const collectSharedFilepaths = async (t)=>{
315
327
  const dir = 'full' === t ? 'full-shared' : 'dist';
316
328
  const distDir = node_path.join(projectDir, dir);
317
- const stats = JSON.parse(node_fs.readFileSync(node_path.join(distDir, STATS_NAME), 'utf-8'));
329
+ const statsContent = await promises.readFile(node_path.join(distDir, STATS_NAME), 'utf-8');
330
+ const stats = JSON.parse(statsContent);
318
331
  stats.shared.forEach((s)=>{
319
332
  const { name, version, fallback, fallbackName } = s;
320
333
  if (fallback && fallbackName) {
@@ -332,22 +345,31 @@ const retrieveSharedFilepaths = (projectDir, type)=>{
332
345
  }
333
346
  });
334
347
  };
335
- collectSharedFilepaths(type);
348
+ await collectSharedFilepaths(type);
336
349
  return sharedFilepaths;
337
350
  };
338
351
  const runBuild = async (normalizedConfig, excludeShared, type)=>{
339
- const tmpDir = prepareProject(normalizedConfig, excludeShared);
352
+ const tStart = Date.now();
353
+ const tmpDir = await prepareProject(normalizedConfig, excludeShared);
354
+ const tPrepare = Date.now();
355
+ logger_logger.info(`prepareProject took ${tPrepare - tStart}ms`);
340
356
  await installDependencies(tmpDir);
357
+ const tInstall = Date.now();
358
+ logger_logger.info(`installDependencies took ${tInstall - tPrepare}ms`);
341
359
  await buildProject(tmpDir, type);
342
- const sharedFilePaths = retrieveSharedFilepaths(tmpDir, type);
360
+ const tBuild = Date.now();
361
+ logger_logger.info(`buildProject took ${tBuild - tInstall}ms`);
362
+ const sharedFilePaths = await retrieveSharedFilepaths(tmpDir, type);
363
+ const tRetrieve = Date.now();
364
+ logger_logger.info(`retrieveSharedFilepaths took ${tRetrieve - tBuild}ms`);
343
365
  return {
344
366
  sharedFilePaths,
345
367
  dir: tmpDir
346
368
  };
347
369
  };
348
- function cleanUp(tmpDir) {
370
+ async function cleanUp(tmpDir) {
349
371
  if (!tmpDir) return;
350
- node_fs.rmSync(tmpDir, {
372
+ await promises.rm(tmpDir, {
351
373
  recursive: true,
352
374
  force: true
353
375
  });
@@ -359,7 +381,7 @@ const encodeName = function(name, prefix = '', withExt = false) {
359
381
  function retrieveGlobalName(mfName, sharedName, version) {
360
382
  return encodeName(`${mfName}_${sharedName}_${version}`);
361
383
  }
362
- const SERVER_VERSION = 'v0-011501';
384
+ const SERVER_VERSION = 'v0-0205';
363
385
  const UPLOADED_DIR = '_shared-tree-shaking';
364
386
  function createCacheHash(config, type) {
365
387
  const relevant = extractBuildConfig({
@@ -447,7 +469,7 @@ async function uploadToCacheStore(sharedFilePaths, normalizedConfig, store) {
447
469
  const jsonFilePath = filepath.replace(/\.js$/, '.json');
448
470
  const jsonFile = JSON.stringify(res);
449
471
  const jsonCdnUrl = cdnPath.replace(/\.js$/, '.json');
450
- node_fs.writeFileSync(jsonFilePath, jsonFile);
472
+ await promises.writeFile(jsonFilePath, jsonFile);
451
473
  await store.uploadFile(jsonFilePath, jsonCdnUrl);
452
474
  } catch (error) {
453
475
  logger_logger.error(`Failed to upload ${name}@${version} json file: ${error}`);
@@ -465,25 +487,26 @@ const downloadToFile = async (url, destPath)=>{
465
487
  const res = await fetch(url);
466
488
  if (!res.ok) throw new Error(`Download failed: ${res.status} ${res.statusText} - ${url}`);
467
489
  const buf = Buffer.from(await res.arrayBuffer());
468
- await node_fs.promises.mkdir(node_path.dirname(destPath), {
490
+ await promises.mkdir(node_path.dirname(destPath), {
469
491
  recursive: true
470
492
  });
471
- await node_fs.promises.writeFile(destPath, buf);
493
+ await promises.writeFile(destPath, buf);
472
494
  return destPath;
473
495
  };
474
- async function uploadProject(uploadResults, sharedFilePaths, normalizedConfig, publisher) {
475
- const tmpDir = createUniqueTempDirByKey(`upload-project${Date.now().toString()}`);
496
+ async function uploadProject(uploadResults, sharedFilePaths, normalizedConfig, publisher, store) {
497
+ const tmpDir = await createUniqueTempDirByKey(`upload-project${Date.now().toString()}`);
476
498
  const uploaded = [];
477
499
  try {
478
500
  for (const item of uploadResults)try {
479
- const config = normalizedConfig[normalizedKey(item.name, item.version)];
501
+ const sharedKey = normalizedKey(item.name, item.version);
502
+ const config = normalizedConfig[sharedKey];
480
503
  if (!config) {
481
504
  logger_logger.error(`No config found for ${item.name}`);
482
505
  continue;
483
506
  }
484
507
  const { uploadOptions } = config;
485
508
  if (!uploadOptions) throw new Error(`No uploadOptions found for ${item.name}`);
486
- const filename = node_path.basename(new URL(item.cdnUrl).pathname);
509
+ const filename = node_path.basename(new URL(item.cdnUrl, 'http://dummy.com').pathname);
487
510
  const localPath = node_path.join(tmpDir, filename);
488
511
  const hash = createCacheHash({
489
512
  ...config,
@@ -545,16 +568,20 @@ async function uploadProject(uploadResults, sharedFilePaths, normalizedConfig, p
545
568
  version: s.version,
546
569
  globalName: s.globalName,
547
570
  cdnUrl,
548
- type: s.type
571
+ type: s.type,
572
+ modules: s.modules,
573
+ canTreeShaking: s.canTreeShaking
549
574
  });
550
575
  } catch (error) {
551
576
  logger_logger.error(`Failed to upload ${s.name}@${s.version}: ${error}`);
552
577
  }
553
578
  return uploaded;
554
579
  } finally{
555
- node_fs.rmSync(tmpDir, {
580
+ promises.rm(tmpDir, {
556
581
  recursive: true,
557
582
  force: true
583
+ }).catch((err)=>{
584
+ logger_logger.error(`Failed to cleanup dir ${tmpDir}: ${err}`);
558
585
  });
559
586
  }
560
587
  }
@@ -563,14 +590,14 @@ async function upload(sharedFilePaths, uploadResults, normalizedConfig, uploadOp
563
590
  if (!uploadOptions) {
564
591
  const hydrated = await Promise.all(uploadResults.map(async (item)=>{
565
592
  if ('full' !== item.type) return item;
566
- const tmpDir = createUniqueTempDirByKey(`download-full-json${Date.now().toString()}`);
593
+ const tmpDir = await createUniqueTempDirByKey(`download-full-json${Date.now().toString()}`);
567
594
  const jsonPath = node_path.join(tmpDir, `${item.name}-${item.version}.json`);
568
595
  try {
569
596
  const tJson0 = Date.now();
570
597
  await downloadToFile(item.cdnUrl.replace('.js', '.json'), jsonPath);
571
598
  const tJson = Date.now() - tJson0;
572
599
  logger_logger.info(`Downloaded ${item.name}@${item.version} json in ${tJson}ms`);
573
- const jsonContent = JSON.parse(node_fs.readFileSync(jsonPath, 'utf8'));
600
+ const jsonContent = JSON.parse(await promises.readFile(jsonPath, 'utf8'));
574
601
  return {
575
602
  ...item,
576
603
  canTreeShaking: jsonContent.canTreeShaking,
@@ -583,7 +610,7 @@ async function upload(sharedFilePaths, uploadResults, normalizedConfig, uploadOp
583
610
  canTreeShaking: item.canTreeShaking ?? true
584
611
  };
585
612
  } finally{
586
- node_fs.rmSync(tmpDir, {
613
+ await promises.rm(tmpDir, {
587
614
  recursive: true,
588
615
  force: true
589
616
  });
@@ -595,9 +622,10 @@ async function upload(sharedFilePaths, uploadResults, normalizedConfig, uploadOp
595
622
  ];
596
623
  }
597
624
  if (!publisher) throw new Error('uploadOptions provided but no projectPublisher configured (configure the selected adapter to enable it or omit uploadOptions)');
598
- const projectUploadResults = await uploadProject(uploadResults, sharedFilePaths, normalizedConfig, publisher);
625
+ const projectUploadResults = await uploadProject(uploadResults, sharedFilePaths, normalizedConfig, publisher, store);
599
626
  return projectUploadResults;
600
627
  }
628
+ const buildLimit = p_limit(Math.max(1, node_os.cpus().length));
601
629
  const buildRoute = new Hono();
602
630
  buildRoute.post('/', zValidator('json', ConfigSchema), async (c)=>{
603
631
  const logger = c.get('logger');
@@ -610,16 +638,27 @@ buildRoute.post('/', zValidator('json', ConfigSchema), async (c)=>{
610
638
  const store = c.get('objectStore');
611
639
  const publisher = c.get('projectPublisher');
612
640
  try {
641
+ const t0 = Date.now();
613
642
  const { cacheItems, excludeShared, restConfig } = await retrieveCacheItems(normalizedConfig, 're-shake', store);
643
+ const tRetrieveCache = Date.now();
644
+ logger.info(`retrieveCacheItems took ${tRetrieveCache - t0}ms`);
614
645
  let sharedFilePaths = [];
615
646
  let dir;
616
647
  if (Object.keys(restConfig).length > 0) {
617
- const buildResult = await runBuild(normalizedConfig, excludeShared, 're-shake');
648
+ const buildResult = await buildLimit(()=>runBuild(normalizedConfig, excludeShared, 're-shake'));
618
649
  sharedFilePaths = buildResult.sharedFilePaths;
619
650
  dir = buildResult.dir;
620
651
  }
652
+ const tBuild = Date.now();
653
+ logger.info(`runBuild took ${tBuild - tRetrieveCache}ms`);
621
654
  const uploadResults = await upload(sharedFilePaths, cacheItems, normalizedConfig, body.uploadOptions, store, publisher);
622
- cleanUp(dir);
655
+ const tUpload = Date.now();
656
+ logger.info(`upload took ${tUpload - tBuild}ms`);
657
+ cleanUp(dir).catch((err)=>{
658
+ logger.error(`Failed to cleanup dir ${dir}: ${err}`);
659
+ });
660
+ const tCleanUp = Date.now();
661
+ logger.info(`cleanUp scheduled (non-blocking) took ${tCleanUp - tUpload}ms`);
623
662
  return c.json({
624
663
  jobId,
625
664
  status: 'success',
@@ -657,16 +696,27 @@ async function handleCheckTreeshake(c, body) {
657
696
  const store = c.get('objectStore');
658
697
  const publisher = c.get('projectPublisher');
659
698
  try {
699
+ const t0 = Date.now();
660
700
  const { cacheItems, excludeShared, restConfig } = await retrieveCacheItems(normalizedConfig, 'full', store);
701
+ const tRetrieveCache = Date.now();
702
+ logger.info(`retrieveCacheItems took ${tRetrieveCache - t0}ms`);
661
703
  let sharedFilePaths = [];
662
704
  let dir;
663
705
  if (Object.keys(restConfig).length > 0) {
664
- const buildResult = await runBuild(normalizedConfig, excludeShared, 'full');
706
+ const buildResult = await buildLimit(()=>runBuild(normalizedConfig, excludeShared, 'full'));
665
707
  sharedFilePaths = buildResult.sharedFilePaths;
666
708
  dir = buildResult.dir;
667
709
  }
710
+ const tBuild = Date.now();
711
+ logger.info(`runBuild took ${tBuild - tRetrieveCache}ms`);
668
712
  const uploadResults = await upload(sharedFilePaths, cacheItems, normalizedConfig, body.uploadOptions, store, publisher);
669
- cleanUp(dir);
713
+ const tUpload = Date.now();
714
+ logger.info(`upload took ${tUpload - tBuild}ms`);
715
+ cleanUp(dir).catch((err)=>{
716
+ logger.error(`Failed to cleanup dir ${dir}: ${err}`);
717
+ });
718
+ const tCleanUp = Date.now();
719
+ logger.info(`cleanUp scheduled (non-blocking) took ${tCleanUp - tUpload}ms`);
670
720
  return c.json({
671
721
  jobId,
672
722
  status: 'success',
@@ -767,6 +817,7 @@ function createApp(deps, opts) {
767
817
  }));
768
818
  app.use('*', loggerMiddleware);
769
819
  app.use('*', createDiMiddleware(deps));
820
+ app.use('*', timeout(60000));
770
821
  if (null == opts ? void 0 : null == (_opts_appExtensions = opts.appExtensions) ? void 0 : _opts_appExtensions.length) for (const extend of opts.appExtensions)extend(app);
771
822
  app.get('/tree-shaking-shared/healthz', (c)=>c.json({
772
823
  status: 'ok',
@@ -786,7 +837,8 @@ function createServer(opts) {
786
837
  return serve({
787
838
  fetch: opts.app.fetch,
788
839
  port,
789
- hostname
840
+ hostname,
841
+ overrideGlobalObjects: false
790
842
  });
791
843
  }
792
844
  async function createAdapterDeps(params) {
@@ -832,7 +884,8 @@ class LocalObjectStore {
832
884
  localObjectStore_define_property(this, "rootDir", void 0);
833
885
  localObjectStore_define_property(this, "publicBaseUrl", void 0);
834
886
  this.rootDir = (null == opts ? void 0 : opts.rootDir) ?? node_path.join(process.cwd(), 'log', 'static');
835
- const base = (null == opts ? void 0 : opts.publicBaseUrl) ?? '/';
887
+ const port = process.env.PORT || 3000;
888
+ const base = (null == opts ? void 0 : opts.publicBaseUrl) === '/' ? `http://localhost:${port}/` : (null == opts ? void 0 : opts.publicBaseUrl) ?? '/';
836
889
  this.publicBaseUrl = base.endsWith('/') ? base : `${base}/`;
837
890
  }
838
891
  }