@pubm/core 0.4.4 → 0.4.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/dist/assets/index.d.ts +1 -1
  2. package/dist/assets/index.d.ts.map +1 -1
  3. package/dist/assets/pipeline.d.ts +4 -2
  4. package/dist/assets/pipeline.d.ts.map +1 -1
  5. package/dist/assets/types.d.ts +7 -7
  6. package/dist/assets/types.d.ts.map +1 -1
  7. package/dist/config/defaults.d.ts.map +1 -1
  8. package/dist/config/loader.d.ts +1 -1
  9. package/dist/config/loader.d.ts.map +1 -1
  10. package/dist/config/types.d.ts +1 -0
  11. package/dist/config/types.d.ts.map +1 -1
  12. package/dist/context.d.ts +1 -0
  13. package/dist/context.d.ts.map +1 -1
  14. package/dist/ecosystem/ecosystem.d.ts +14 -1
  15. package/dist/ecosystem/ecosystem.d.ts.map +1 -1
  16. package/dist/ecosystem/infer.d.ts.map +1 -1
  17. package/dist/ecosystem/js.d.ts +9 -0
  18. package/dist/ecosystem/js.d.ts.map +1 -1
  19. package/dist/ecosystem/rust.d.ts +1 -1
  20. package/dist/ecosystem/rust.d.ts.map +1 -1
  21. package/dist/index.cjs +347 -174
  22. package/dist/index.js +350 -177
  23. package/dist/manifest/write-versions.d.ts +1 -1
  24. package/dist/manifest/write-versions.d.ts.map +1 -1
  25. package/dist/monorepo/discover.d.ts.map +1 -1
  26. package/dist/monorepo/resolve-workspace.d.ts.map +1 -1
  27. package/dist/plugin/runner.d.ts +1 -1
  28. package/dist/plugin/runner.d.ts.map +1 -1
  29. package/dist/plugin/types.d.ts +6 -6
  30. package/dist/plugin/types.d.ts.map +1 -1
  31. package/dist/registry/custom-registry.d.ts.map +1 -1
  32. package/dist/tasks/dry-run-publish.d.ts.map +1 -1
  33. package/dist/tasks/runner.d.ts.map +1 -1
  34. package/dist/utils/package-manager.d.ts +3 -2
  35. package/dist/utils/package-manager.d.ts.map +1 -1
  36. package/package.json +1 -1
package/dist/index.cjs CHANGED
@@ -20263,6 +20263,10 @@ __export(exports_src, {
20263
20263
  module.exports = __toCommonJS(exports_src);
20264
20264
 
20265
20265
  // src/plugin/runner.ts
20266
+ function isDefined(value) {
20267
+ return value !== undefined;
20268
+ }
20269
+
20266
20270
  class PluginRunner {
20267
20271
  plugins;
20268
20272
  constructor(plugins) {
@@ -20300,7 +20304,7 @@ class PluginRunner {
20300
20304
  }
20301
20305
  collectAssetHooks() {
20302
20306
  const collected = {};
20303
- const resolveChain = this.plugins.map((p) => p.hooks?.resolveAssets).filter(Boolean);
20307
+ const resolveChain = this.plugins.map((p) => p.hooks?.resolveAssets).filter(isDefined);
20304
20308
  if (resolveChain.length > 0) {
20305
20309
  collected.resolveAssets = async (assets, ctx) => {
20306
20310
  let result = assets;
@@ -20310,7 +20314,7 @@ class PluginRunner {
20310
20314
  return result;
20311
20315
  };
20312
20316
  }
20313
- const transformChain = this.plugins.map((p) => p.hooks?.transformAsset).filter(Boolean);
20317
+ const transformChain = this.plugins.map((p) => p.hooks?.transformAsset).filter(isDefined);
20314
20318
  if (transformChain.length > 0) {
20315
20319
  collected.transformAsset = async (asset, ctx) => {
20316
20320
  let items = [asset];
@@ -20325,17 +20329,18 @@ class PluginRunner {
20325
20329
  return items.length === 1 ? items[0] : items;
20326
20330
  };
20327
20331
  }
20328
- const compressChain = this.plugins.map((p) => p.hooks?.compressAsset).filter(Boolean);
20332
+ const compressChain = this.plugins.map((p) => p.hooks?.compressAsset).filter(isDefined);
20329
20333
  if (compressChain.length > 0) {
20330
20334
  collected.compressAsset = async (asset, ctx) => {
20331
- let result = asset;
20332
- for (const hook of compressChain) {
20335
+ const [firstHook, ...restHooks] = compressChain;
20336
+ let result = await firstHook(asset, ctx);
20337
+ for (const hook of restHooks) {
20333
20338
  result = await hook(result, ctx);
20334
20339
  }
20335
20340
  return result;
20336
20341
  };
20337
20342
  }
20338
- const nameChain = this.plugins.map((p) => p.hooks?.nameAsset).filter(Boolean);
20343
+ const nameChain = this.plugins.map((p) => p.hooks?.nameAsset).filter(isDefined);
20339
20344
  if (nameChain.length > 0) {
20340
20345
  collected.nameAsset = (asset, ctx) => {
20341
20346
  let result = "";
@@ -20345,7 +20350,7 @@ class PluginRunner {
20345
20350
  return result;
20346
20351
  };
20347
20352
  }
20348
- const checksumChain = this.plugins.map((p) => p.hooks?.generateChecksums).filter(Boolean);
20353
+ const checksumChain = this.plugins.map((p) => p.hooks?.generateChecksums).filter(isDefined);
20349
20354
  if (checksumChain.length > 0) {
20350
20355
  collected.generateChecksums = async (assets, ctx) => {
20351
20356
  let result = assets;
@@ -20355,7 +20360,7 @@ class PluginRunner {
20355
20360
  return result;
20356
20361
  };
20357
20362
  }
20358
- const uploadHooks = this.plugins.map((p) => p.hooks?.uploadAssets).filter(Boolean);
20363
+ const uploadHooks = this.plugins.map((p) => p.hooks?.uploadAssets).filter(isDefined);
20359
20364
  if (uploadHooks.length > 0) {
20360
20365
  collected.uploadAssets = async (assets, ctx) => {
20361
20366
  const allResults = [];
@@ -20371,7 +20376,7 @@ class PluginRunner {
20371
20376
  }
20372
20377
 
20373
20378
  // src/tasks/runner.ts
20374
- var import_node_fs11 = require("node:fs");
20379
+ var import_node_fs13 = require("node:fs");
20375
20380
  var import_node_os4 = require("node:os");
20376
20381
  var import_node_path20 = __toESM(require("node:path"));
20377
20382
  var import_node_process15 = __toESM(require("node:process"));
@@ -22967,6 +22972,7 @@ function createKeyResolver(packages) {
22967
22972
  }
22968
22973
 
22969
22974
  // src/ecosystem/js.ts
22975
+ var import_node_fs7 = require("node:fs");
22970
22976
  var import_promises4 = require("node:fs/promises");
22971
22977
  var import_node_path11 = __toESM(require("node:path"));
22972
22978
 
@@ -23071,7 +23077,7 @@ function isValidPackageName(packageName) {
23071
23077
  // package.json
23072
23078
  var package_default = {
23073
23079
  name: "@pubm/core",
23074
- version: "0.4.4",
23080
+ version: "0.4.6",
23075
23081
  type: "module",
23076
23082
  description: "Core SDK for pubm - publish manager for multiple registries",
23077
23083
  types: "./dist/index.d.ts",
@@ -23145,7 +23151,7 @@ var coreEngines = package_default.engines ?? {};
23145
23151
  function resolveDefine(injected, fallback) {
23146
23152
  return typeof injected === "string" ? injected : fallback;
23147
23153
  }
23148
- var PUBM_VERSION = resolveDefine("0.4.4", package_default.version);
23154
+ var PUBM_VERSION = resolveDefine("0.4.6", package_default.version);
23149
23155
  var PUBM_ENGINES = {
23150
23156
  node: resolveDefine(">=24", coreEngines.node ?? ">=18"),
23151
23157
  git: resolveDefine(">=2.11.0", coreEngines.git ?? ">=2.11.0"),
@@ -24098,6 +24104,9 @@ async function npmPackageRegistry(packagePath) {
24098
24104
  return new NpmPackageRegistry(manifest.name, packagePath);
24099
24105
  }
24100
24106
 
24107
+ // src/ecosystem/js.ts
24108
+ init_exec();
24109
+
24101
24110
  // src/utils/package.ts
24102
24111
  var import_promises3 = require("node:fs/promises");
24103
24112
  var import_node_path10 = __toESM(require("node:path"));
@@ -24121,16 +24130,28 @@ async function findOutFile(file, { cwd = import_node_process10.default.cwd() } =
24121
24130
  }
24122
24131
 
24123
24132
  // src/utils/package-manager.ts
24124
- var lockFile = {
24133
+ var lockFiles = {
24125
24134
  bun: ["bun.lock", "bun.lockb"],
24126
24135
  npm: ["package-lock.json", "npm-shrinkwrap.json"],
24127
24136
  pnpm: ["pnpm-lock.yaml"],
24128
24137
  yarn: ["yarn.lock"]
24129
24138
  };
24139
+ function getInstallCommand(pm, isYarnBerry) {
24140
+ switch (pm) {
24141
+ case "bun":
24142
+ return ["bun", "install", "--lockfile-only"];
24143
+ case "npm":
24144
+ return ["npm", "install", "--package-lock-only"];
24145
+ case "pnpm":
24146
+ return ["pnpm", "install", "--lockfile-only"];
24147
+ case "yarn":
24148
+ return isYarnBerry ? ["yarn", "install", "--mode", "update-lockfile"] : ["yarn", "install"];
24149
+ }
24150
+ }
24130
24151
  async function getPackageManager() {
24131
- for (const [packageManager, lockFiles] of Object.entries(lockFile)) {
24132
- for (const lockFile2 of lockFiles) {
24133
- if (await findOutFile(lockFile2))
24152
+ for (const [packageManager, files] of Object.entries(lockFiles)) {
24153
+ for (const file of files) {
24154
+ if (await findOutFile(file))
24134
24155
  return packageManager;
24135
24156
  }
24136
24157
  }
@@ -24139,6 +24160,8 @@ async function getPackageManager() {
24139
24160
  }
24140
24161
 
24141
24162
  // src/ecosystem/ecosystem.ts
24163
+ var import_node_fs6 = require("node:fs");
24164
+
24142
24165
  class Ecosystem {
24143
24166
  packagePath;
24144
24167
  constructor(packagePath) {
@@ -24179,9 +24202,17 @@ class Ecosystem {
24179
24202
  async updateSiblingDependencyVersions(_siblingVersions) {
24180
24203
  return false;
24181
24204
  }
24182
- async syncLockfile() {
24205
+ async syncLockfile(_mode = "optional") {
24183
24206
  return;
24184
24207
  }
24208
+ async resolvePublishDependencies(_workspaceVersions) {
24209
+ return new Map;
24210
+ }
24211
+ restorePublishDependencies(backups) {
24212
+ for (const [filePath, content] of backups) {
24213
+ import_node_fs6.writeFileSync(filePath, content, "utf-8");
24214
+ }
24215
+ }
24185
24216
  }
24186
24217
 
24187
24218
  // src/ecosystem/descriptor.ts
@@ -24252,6 +24283,97 @@ class JsEcosystem extends Ecosystem {
24252
24283
  supportedRegistries() {
24253
24284
  return ["npm", "jsr"];
24254
24285
  }
24286
+ async syncLockfile(mode = "optional") {
24287
+ if (mode === "skip")
24288
+ return;
24289
+ const found = await this.findLockfile();
24290
+ if (!found)
24291
+ return;
24292
+ const { lockfilePath, packageManager } = found;
24293
+ const lockfileDir = import_node_path11.default.dirname(lockfilePath);
24294
+ try {
24295
+ let isYarnBerry;
24296
+ if (packageManager === "yarn") {
24297
+ const yarnrcPath = import_node_path11.default.join(lockfileDir, ".yarnrc.yml");
24298
+ try {
24299
+ isYarnBerry = (await import_promises4.stat(yarnrcPath)).isFile();
24300
+ } catch {
24301
+ isYarnBerry = false;
24302
+ }
24303
+ }
24304
+ const [cmd, ...args] = getInstallCommand(packageManager, isYarnBerry);
24305
+ await exec3(cmd, args, { nodeOptions: { cwd: lockfileDir } });
24306
+ return lockfilePath;
24307
+ } catch (error2) {
24308
+ if (mode === "required")
24309
+ throw error2;
24310
+ console.warn(`Warning: Failed to sync lockfile at ${lockfilePath}: ${error2 instanceof Error ? error2.message : error2}`);
24311
+ return;
24312
+ }
24313
+ }
24314
+ async findLockfile() {
24315
+ let dir = this.packagePath;
24316
+ const { root } = import_node_path11.default.parse(dir);
24317
+ while (dir !== root) {
24318
+ for (const [pm, files] of Object.entries(lockFiles)) {
24319
+ for (const file of files) {
24320
+ const candidate = import_node_path11.default.join(dir, file);
24321
+ try {
24322
+ if ((await import_promises4.stat(candidate)).isFile()) {
24323
+ return {
24324
+ lockfilePath: candidate,
24325
+ packageManager: pm
24326
+ };
24327
+ }
24328
+ } catch {}
24329
+ }
24330
+ }
24331
+ dir = import_node_path11.default.dirname(dir);
24332
+ }
24333
+ return;
24334
+ }
24335
+ async resolvePublishDependencies(workspaceVersions) {
24336
+ const backups = new Map;
24337
+ const manifestPath = import_node_path11.default.join(this.packagePath, "package.json");
24338
+ if (!import_node_fs7.existsSync(manifestPath))
24339
+ return backups;
24340
+ const original = import_node_fs7.readFileSync(manifestPath, "utf-8");
24341
+ const pkg = JSON.parse(original);
24342
+ let modified = false;
24343
+ const WORKSPACE_PREFIX = "workspace:";
24344
+ const DEPENDENCY_FIELDS = [
24345
+ "dependencies",
24346
+ "devDependencies",
24347
+ "optionalDependencies",
24348
+ "peerDependencies"
24349
+ ];
24350
+ for (const field of DEPENDENCY_FIELDS) {
24351
+ const deps = pkg[field];
24352
+ if (!deps)
24353
+ continue;
24354
+ for (const [depName, spec] of Object.entries(deps)) {
24355
+ if (!spec.startsWith(WORKSPACE_PREFIX))
24356
+ continue;
24357
+ const range = spec.slice(WORKSPACE_PREFIX.length);
24358
+ if (range === "*" || range === "^" || range === "~") {
24359
+ const version = workspaceVersions.get(depName);
24360
+ if (!version) {
24361
+ throw new Error(`Cannot resolve "${spec}" for dependency "${depName}": package not found in workspace`);
24362
+ }
24363
+ deps[depName] = range === "*" ? version : range === "^" ? `^${version}` : `~${version}`;
24364
+ } else {
24365
+ deps[depName] = range;
24366
+ }
24367
+ modified = true;
24368
+ }
24369
+ }
24370
+ if (modified) {
24371
+ backups.set(manifestPath, original);
24372
+ import_node_fs7.writeFileSync(manifestPath, `${JSON.stringify(pkg, null, 2)}
24373
+ `, "utf-8");
24374
+ }
24375
+ return backups;
24376
+ }
24255
24377
  async createDescriptor() {
24256
24378
  const npmReader = NpmPackageRegistry.reader;
24257
24379
  const jsrReader = JsrPackageRegistry.reader;
@@ -25587,15 +25709,24 @@ class RustEcosystem extends Ecosystem {
25587
25709
  }
25588
25710
  return modified;
25589
25711
  }
25590
- async syncLockfile() {
25712
+ async syncLockfile(mode = "optional") {
25713
+ if (mode === "skip")
25714
+ return;
25591
25715
  const lockfilePath = await this.findLockfile();
25592
25716
  if (!lockfilePath)
25593
25717
  return;
25594
- const name = await this.packageName();
25595
- await exec3("cargo", ["update", "--package", name], {
25596
- nodeOptions: { cwd: import_node_path13.default.dirname(lockfilePath) }
25597
- });
25598
- return lockfilePath;
25718
+ try {
25719
+ const name = await this.packageName();
25720
+ await exec3("cargo", ["update", "--package", name], {
25721
+ nodeOptions: { cwd: import_node_path13.default.dirname(lockfilePath) }
25722
+ });
25723
+ return lockfilePath;
25724
+ } catch (error2) {
25725
+ if (mode === "required")
25726
+ throw error2;
25727
+ console.warn(`Warning: Failed to sync lockfile at ${lockfilePath}: ${error2 instanceof Error ? error2.message : error2}`);
25728
+ return;
25729
+ }
25599
25730
  }
25600
25731
  async findLockfile() {
25601
25732
  let dir = this.packagePath;
@@ -25671,7 +25802,7 @@ init_error();
25671
25802
  init_git();
25672
25803
 
25673
25804
  // src/manifest/write-versions.ts
25674
- async function writeVersionsForEcosystem(ecosystems, versions) {
25805
+ async function writeVersionsForEcosystem(ecosystems, versions, lockfileSync) {
25675
25806
  const modifiedFiles = [];
25676
25807
  for (const { eco, pkg } of ecosystems) {
25677
25808
  const version = versions.get(pkg.path);
@@ -25692,20 +25823,23 @@ async function writeVersionsForEcosystem(ecosystems, versions) {
25692
25823
  }
25693
25824
  await Promise.all(ecosystems.map(({ eco }) => eco.updateSiblingDependencyVersions(nameKeyedVersions)));
25694
25825
  }
25826
+ const syncedLockfiles = new Set;
25695
25827
  for (const { eco } of ecosystems) {
25696
- const lockfilePath = await eco.syncLockfile();
25697
- if (lockfilePath)
25828
+ const lockfilePath = await eco.syncLockfile(lockfileSync);
25829
+ if (lockfilePath && !syncedLockfiles.has(lockfilePath)) {
25830
+ syncedLockfiles.add(lockfilePath);
25698
25831
  modifiedFiles.push(lockfilePath);
25832
+ }
25699
25833
  }
25700
25834
  return modifiedFiles;
25701
25835
  }
25702
25836
 
25703
25837
  // src/monorepo/resolve-workspace.ts
25704
- var import_node_fs8 = require("node:fs");
25838
+ var import_node_fs10 = require("node:fs");
25705
25839
  var import_node_path17 = require("node:path");
25706
25840
 
25707
25841
  // src/monorepo/discover.ts
25708
- var import_node_fs7 = require("node:fs");
25842
+ var import_node_fs9 = require("node:fs");
25709
25843
  var import_node_path16 = __toESM(require("node:path"));
25710
25844
  var import_micromatch2 = __toESM(require_micromatch(), 1);
25711
25845
 
@@ -25782,6 +25916,9 @@ function normalizeRegistryUrl(url) {
25782
25916
  init_exec();
25783
25917
  class CustomPackageRegistry extends NpmPackageRegistry {
25784
25918
  async npm(args, cwd) {
25919
+ if (!this.registry) {
25920
+ throw new Error("Custom registry URL is required for npm operations.");
25921
+ }
25785
25922
  const { stdout } = await exec3("npm", args.concat("--registry", this.registry), {
25786
25923
  throwOnError: true,
25787
25924
  nodeOptions: cwd ? { cwd } : undefined
@@ -25886,7 +26023,7 @@ registryCatalog.register({
25886
26023
  return token.trim().length >= 32;
25887
26024
  },
25888
26025
  resolveDisplayName: async (ctx) => {
25889
- return ctx.packages?.filter((pkg) => pkg.registries?.includes("crates")).map((pkg) => pkg.path) ?? ["crate"];
26026
+ return ctx.packages?.filter((pkg) => pkg.registries?.includes("crates")).map((pkg) => pkg.name) ?? ["crate"];
25890
26027
  },
25891
26028
  concurrentPublish: false,
25892
26029
  orderPackages: (paths) => sortCratesByDependencyOrder(paths),
@@ -25938,6 +26075,12 @@ async function readJsonSafe(path8) {
25938
26075
  return null;
25939
26076
  }
25940
26077
  }
26078
+ function asRecord(value) {
26079
+ return value !== null && typeof value === "object" ? value : null;
26080
+ }
26081
+ function readString(value) {
26082
+ return typeof value === "string" ? value : undefined;
26083
+ }
25941
26084
  async function readFileSafe(path8) {
25942
26085
  try {
25943
26086
  return await import_promises6.readFile(path8, "utf-8");
@@ -25987,8 +26130,8 @@ async function inferJsRegistries(packagePath, rootPath) {
25987
26130
  return [];
25988
26131
  }
25989
26132
  const packageJson = await readJsonSafe(import_node_path14.join(packagePath, "package.json"));
25990
- const packageName = packageJson?.name;
25991
- const publishConfigRegistry = packageJson?.publishConfig?.registry;
26133
+ const packageName = readString(packageJson?.name);
26134
+ const publishConfigRegistry = readString(asRecord(packageJson?.publishConfig)?.registry);
25992
26135
  let npmRegistryUrl = null;
25993
26136
  if (publishConfigRegistry) {
25994
26137
  npmRegistryUrl = publishConfigRegistry;
@@ -26029,7 +26172,7 @@ async function inferRegistries(packagePath, ecosystemKey, rootPath) {
26029
26172
  }
26030
26173
 
26031
26174
  // src/monorepo/workspace.ts
26032
- var import_node_fs6 = require("node:fs");
26175
+ var import_node_fs8 = require("node:fs");
26033
26176
  var import_node_path15 = require("node:path");
26034
26177
 
26035
26178
  // ../../node_modules/.bun/jsonc-parser@3.3.1/node_modules/jsonc-parser/lib/esm/impl/scanner.js
@@ -26842,15 +26985,15 @@ function detectWorkspace(cwd) {
26842
26985
  const root = cwd ?? process.cwd();
26843
26986
  const workspaces = [];
26844
26987
  const pnpmWorkspacePath = import_node_path15.join(root, "pnpm-workspace.yaml");
26845
- if (import_node_fs6.existsSync(pnpmWorkspacePath)) {
26846
- const content = import_node_fs6.readFileSync(pnpmWorkspacePath, "utf-8");
26988
+ if (import_node_fs8.existsSync(pnpmWorkspacePath)) {
26989
+ const content = import_node_fs8.readFileSync(pnpmWorkspacePath, "utf-8");
26847
26990
  const parsed = import_yaml2.parse(content);
26848
26991
  const packages = parsed?.packages ?? [];
26849
26992
  workspaces.push({ type: "pnpm", patterns: packages });
26850
26993
  }
26851
26994
  const cargoTomlPath = import_node_path15.join(root, "Cargo.toml");
26852
- if (import_node_fs6.existsSync(cargoTomlPath)) {
26853
- const content = import_node_fs6.readFileSync(cargoTomlPath, "utf-8");
26995
+ if (import_node_fs8.existsSync(cargoTomlPath)) {
26996
+ const content = import_node_fs8.readFileSync(cargoTomlPath, "utf-8");
26854
26997
  try {
26855
26998
  const parsed = parse(content);
26856
26999
  const workspace = parsed.workspace;
@@ -26865,8 +27008,8 @@ function detectWorkspace(cwd) {
26865
27008
  }
26866
27009
  for (const denoFile of ["deno.json", "deno.jsonc"]) {
26867
27010
  const denoPath = import_node_path15.join(root, denoFile);
26868
- if (import_node_fs6.existsSync(denoPath)) {
26869
- const content = import_node_fs6.readFileSync(denoPath, "utf-8");
27011
+ if (import_node_fs8.existsSync(denoPath)) {
27012
+ const content = import_node_fs8.readFileSync(denoPath, "utf-8");
26870
27013
  try {
26871
27014
  const parsed = denoFile.endsWith(".jsonc") ? parse3(content) : JSON.parse(content);
26872
27015
  if (Array.isArray(parsed?.workspace)) {
@@ -26879,12 +27022,12 @@ function detectWorkspace(cwd) {
26879
27022
  }
26880
27023
  if (!workspaces.some((w2) => w2.type === "pnpm")) {
26881
27024
  const packageJsonPath = import_node_path15.join(root, "package.json");
26882
- if (import_node_fs6.existsSync(packageJsonPath)) {
26883
- const content = import_node_fs6.readFileSync(packageJsonPath, "utf-8");
27025
+ if (import_node_fs8.existsSync(packageJsonPath)) {
27026
+ const content = import_node_fs8.readFileSync(packageJsonPath, "utf-8");
26884
27027
  const pkg = JSON.parse(content);
26885
27028
  if (pkg.workspaces) {
26886
27029
  const bunfigPath = import_node_path15.join(root, "bunfig.toml");
26887
- const isBun2 = import_node_fs6.existsSync(bunfigPath);
27030
+ const isBun2 = import_node_fs8.existsSync(bunfigPath);
26888
27031
  if (Array.isArray(pkg.workspaces)) {
26889
27032
  workspaces.push({
26890
27033
  type: isBun2 ? "bun" : "npm",
@@ -26916,16 +27059,32 @@ function matchesIgnore(pkgPath, ignorePatterns) {
26916
27059
  return regex3.test(normalized);
26917
27060
  });
26918
27061
  }
26919
- function resolvePatterns(cwd, patterns) {
26920
- const entries = import_node_fs7.readdirSync(cwd, { recursive: true, encoding: "utf-8" });
26921
- const dirs = entries.filter((entry) => {
26922
- const fullPath = import_node_path16.default.join(cwd, entry);
27062
+ function readdirRecursiveNoSymlinks(dir, root) {
27063
+ const results = [];
27064
+ let entries;
27065
+ try {
27066
+ entries = import_node_fs9.readdirSync(dir, { encoding: "utf-8" });
27067
+ } catch {
27068
+ return results;
27069
+ }
27070
+ for (const entry of entries) {
27071
+ if (entry === "node_modules" || entry === ".git")
27072
+ continue;
27073
+ const fullPath = import_node_path16.default.join(dir, entry);
26923
27074
  try {
26924
- return import_node_fs7.statSync(fullPath).isDirectory();
26925
- } catch {
26926
- return false;
26927
- }
26928
- });
27075
+ const stat6 = import_node_fs9.lstatSync(fullPath);
27076
+ if (stat6.isSymbolicLink())
27077
+ continue;
27078
+ if (stat6.isDirectory()) {
27079
+ results.push(import_node_path16.default.relative(root, fullPath));
27080
+ results.push(...readdirRecursiveNoSymlinks(fullPath, root));
27081
+ }
27082
+ } catch {}
27083
+ }
27084
+ return results;
27085
+ }
27086
+ function resolvePatterns(cwd, patterns) {
27087
+ const dirs = readdirRecursiveNoSymlinks(cwd, cwd);
26929
27088
  const normalizedDirs = dirs.map((d3) => d3.replace(/\\/g, "/"));
26930
27089
  const matched = import_micromatch2.default(normalizedDirs, patterns);
26931
27090
  return matched.map((d3) => import_node_path16.default.resolve(cwd, d3));
@@ -27022,31 +27181,6 @@ async function discoverPackages(options) {
27022
27181
  }
27023
27182
 
27024
27183
  // src/monorepo/resolve-workspace.ts
27025
- var WORKSPACE_PREFIX = "workspace:";
27026
- var DEPENDENCY_FIELDS = [
27027
- "dependencies",
27028
- "devDependencies",
27029
- "optionalDependencies",
27030
- "peerDependencies"
27031
- ];
27032
- function resolveWorkspaceProtocol(spec, version) {
27033
- if (!spec.startsWith(WORKSPACE_PREFIX))
27034
- return spec;
27035
- const range = spec.slice(WORKSPACE_PREFIX.length);
27036
- switch (range) {
27037
- case "*":
27038
- return version;
27039
- case "^":
27040
- return `^${version}`;
27041
- case "~":
27042
- return `~${version}`;
27043
- default:
27044
- return range;
27045
- }
27046
- }
27047
- function isDynamicWorkspaceSpec(range) {
27048
- return range === "*" || range === "^" || range === "~";
27049
- }
27050
27184
  function collectWorkspaceVersions(cwd) {
27051
27185
  const versions = new Map;
27052
27186
  const workspaces = detectWorkspace(cwd);
@@ -27058,10 +27192,10 @@ function collectWorkspaceVersions(cwd) {
27058
27192
  const dirs = resolvePatterns(cwd, workspace.patterns);
27059
27193
  for (const dir of dirs) {
27060
27194
  const pkgJsonPath = import_node_path17.join(dir, "package.json");
27061
- if (!import_node_fs8.existsSync(pkgJsonPath))
27195
+ if (!import_node_fs10.existsSync(pkgJsonPath))
27062
27196
  continue;
27063
27197
  try {
27064
- const content = import_node_fs8.readFileSync(pkgJsonPath, "utf-8");
27198
+ const content = import_node_fs10.readFileSync(pkgJsonPath, "utf-8");
27065
27199
  const pkg = JSON.parse(content);
27066
27200
  if (typeof pkg.name === "string" && pkg.name && typeof pkg.version === "string" && pkg.version) {
27067
27201
  versions.set(pkg.name, pkg.version);
@@ -27071,44 +27205,9 @@ function collectWorkspaceVersions(cwd) {
27071
27205
  }
27072
27206
  return versions;
27073
27207
  }
27074
- function resolveWorkspaceProtocolsInManifests(packagePaths, workspaceVersions) {
27075
- const backups = new Map;
27076
- for (const pkgPath of packagePaths) {
27077
- const manifestPath = import_node_path17.join(pkgPath, "package.json");
27078
- const original = import_node_fs8.readFileSync(manifestPath, "utf-8");
27079
- const pkg = JSON.parse(original);
27080
- let modified = false;
27081
- for (const field of DEPENDENCY_FIELDS) {
27082
- const deps = pkg[field];
27083
- if (!deps)
27084
- continue;
27085
- for (const [depName, spec] of Object.entries(deps)) {
27086
- if (!spec.startsWith(WORKSPACE_PREFIX))
27087
- continue;
27088
- const range = spec.slice(WORKSPACE_PREFIX.length);
27089
- if (isDynamicWorkspaceSpec(range)) {
27090
- const version = workspaceVersions.get(depName);
27091
- if (!version) {
27092
- throw new Error(`Cannot resolve "${spec}" for dependency "${depName}": package not found in workspace`);
27093
- }
27094
- deps[depName] = resolveWorkspaceProtocol(spec, version);
27095
- } else {
27096
- deps[depName] = range;
27097
- }
27098
- modified = true;
27099
- }
27100
- }
27101
- if (modified) {
27102
- backups.set(manifestPath, original);
27103
- import_node_fs8.writeFileSync(manifestPath, `${JSON.stringify(pkg, null, 2)}
27104
- `, "utf-8");
27105
- }
27106
- }
27107
- return backups;
27108
- }
27109
27208
  function restoreManifests(backups) {
27110
27209
  for (const [filePath, content] of backups) {
27111
- import_node_fs8.writeFileSync(filePath, content, "utf-8");
27210
+ import_node_fs10.writeFileSync(filePath, content, "utf-8");
27112
27211
  }
27113
27212
  }
27114
27213
 
@@ -27412,7 +27511,8 @@ function createContext(config, options, cwd) {
27412
27511
  tag: options.tag ?? "latest",
27413
27512
  promptEnabled: false,
27414
27513
  cleanWorkingTree: false,
27415
- pluginRunner: new PluginRunner([])
27514
+ pluginRunner: new PluginRunner([]),
27515
+ tokenRetryPromises: {}
27416
27516
  };
27417
27517
  const ctx = Object.defineProperties(Object.create(null), {
27418
27518
  config: {
@@ -27499,9 +27599,10 @@ async function withTokenRetry(registryKey, ctx, task, action) {
27499
27599
  if (!descriptor)
27500
27600
  throw error2;
27501
27601
  const config = descriptor.tokenConfig;
27502
- const retryKey = `_tokenRetry_${registryKey}`;
27503
- if (!ctx.runtime[retryKey]) {
27504
- ctx.runtime[retryKey] = (async () => {
27602
+ const retryPromises = ctx.runtime.tokenRetryPromises ?? {};
27603
+ ctx.runtime.tokenRetryPromises = retryPromises;
27604
+ if (!retryPromises[registryKey]) {
27605
+ retryPromises[registryKey] = (async () => {
27505
27606
  task.output = `Auth failed. Re-enter ${config.promptLabel}`;
27506
27607
  const newToken = await task.prompt(ListrEnquirerPromptAdapter).run({
27507
27608
  type: "password",
@@ -27512,7 +27613,7 @@ async function withTokenRetry(registryKey, ctx, task, action) {
27512
27613
  return newToken;
27513
27614
  })();
27514
27615
  }
27515
- await ctx.runtime[retryKey];
27616
+ await retryPromises[registryKey];
27516
27617
  await action();
27517
27618
  }
27518
27619
  }
@@ -27569,7 +27670,11 @@ async function findUnpublishedSiblingDeps(packagePath, siblingPaths) {
27569
27670
  }));
27570
27671
  const siblingDeps = deps.filter((d3) => siblingNameToPath.has(d3));
27571
27672
  const results = await Promise.all(siblingDeps.map(async (name) => {
27572
- const registry = await cratesPackageRegistry(siblingNameToPath.get(name));
27673
+ const siblingPath = siblingNameToPath.get(name);
27674
+ if (!siblingPath) {
27675
+ throw new Error(`Missing sibling crate path for dependency: ${name}`);
27676
+ }
27677
+ const registry = await cratesPackageRegistry(siblingPath);
27573
27678
  const published = await registry.isPublished();
27574
27679
  return published ? null : name;
27575
27680
  }));
@@ -27619,7 +27724,7 @@ function createCratesDryRunPublishTask(packagePath, siblingPaths) {
27619
27724
  // src/tasks/github-release.ts
27620
27725
  init_error();
27621
27726
  init_git();
27622
- var import_node_fs9 = require("node:fs");
27727
+ var import_node_fs11 = require("node:fs");
27623
27728
  var import_semver2 = __toESM(require_semver2(), 1);
27624
27729
  var { prerelease } = import_semver2.default;
27625
27730
 
@@ -27689,7 +27794,7 @@ async function createGitHubRelease(_ctx, options) {
27689
27794
  const uploadUrl = release.upload_url.replace(/\{[^}]*\}/, "");
27690
27795
  const releaseAssets = [];
27691
27796
  for (const asset of options.assets) {
27692
- const archiveContent = import_node_fs9.readFileSync(asset.filePath);
27797
+ const archiveContent = import_node_fs11.readFileSync(asset.filePath);
27693
27798
  const uploadResponse = await fetch(`${uploadUrl}?name=${encodeURIComponent(asset.name)}`, {
27694
27799
  method: "POST",
27695
27800
  headers: {
@@ -27938,7 +28043,7 @@ init_error();
27938
28043
  init_exec();
27939
28044
 
27940
28045
  // src/utils/gh-secrets-sync-state.ts
27941
- var import_node_fs10 = require("node:fs");
28046
+ var import_node_fs12 = require("node:fs");
27942
28047
  var import_node_path18 = __toESM(require("node:path"));
27943
28048
  var SYNC_HASH_FILENAME = "gh-secrets-sync-hash";
27944
28049
  function syncHashFilePath() {
@@ -27946,14 +28051,14 @@ function syncHashFilePath() {
27946
28051
  }
27947
28052
  function readGhSecretsSyncHash() {
27948
28053
  try {
27949
- const value = import_node_fs10.readFileSync(syncHashFilePath(), "utf8").trim();
28054
+ const value = import_node_fs12.readFileSync(syncHashFilePath(), "utf8").trim();
27950
28055
  return value || null;
27951
28056
  } catch {
27952
28057
  return null;
27953
28058
  }
27954
28059
  }
27955
28060
  function writeGhSecretsSyncHash(hash) {
27956
- import_node_fs10.writeFileSync(syncHashFilePath(), `${hash}
28061
+ import_node_fs12.writeFileSync(syncHashFilePath(), `${hash}
27957
28062
  `, "utf8");
27958
28063
  }
27959
28064
 
@@ -28321,7 +28426,7 @@ async function prepareReleaseAssets(ctx, packageName, version, packagePath) {
28321
28426
  const normalizedGroups = normalizeConfig(assetConfig, ctx.config.compress);
28322
28427
  const relevantGroup = normalizedGroups.find((g) => !g.packagePath || g.packagePath === packagePath) ?? { files: [] };
28323
28428
  const tempDir = import_node_path20.join(import_node_os4.tmpdir(), `pubm-assets-${Date.now()}`);
28324
- import_node_fs11.mkdirSync(tempDir, { recursive: true });
28429
+ import_node_fs13.mkdirSync(tempDir, { recursive: true });
28325
28430
  ctx.runtime.tempDir = tempDir;
28326
28431
  const resolvedAssets = resolveAssets(relevantGroup, ctx.config.compress, ctx.cwd);
28327
28432
  const preparedAssets = await runAssetPipeline(resolvedAssets, assetHooks, {
@@ -28341,16 +28446,30 @@ function isReleaseExcluded(config, pkgPath) {
28341
28446
  function getPackageName(ctx, packagePath) {
28342
28447
  return ctx.config.packages.find((p2) => p2.path === packagePath)?.name ?? packagePath;
28343
28448
  }
28449
+ function requirePackageEcosystem(pkg) {
28450
+ if (!pkg.ecosystem) {
28451
+ throw new Error(`Package ${pkg.path} is missing an ecosystem.`);
28452
+ }
28453
+ return pkg.ecosystem;
28454
+ }
28455
+ function requireVersionPlan(ctx) {
28456
+ const { versionPlan } = ctx.runtime;
28457
+ if (!versionPlan) {
28458
+ throw new Error("Version plan is required before running release tasks.");
28459
+ }
28460
+ return versionPlan;
28461
+ }
28344
28462
  async function writeVersions(ctx, versions) {
28345
28463
  const ecosystems = ctx.config.packages.map((pkg) => {
28346
28464
  const absPath = import_node_path20.default.resolve(ctx.cwd ?? import_node_process15.default.cwd(), pkg.path);
28347
- const descriptor = ecosystemCatalog.get(pkg.ecosystem);
28465
+ const ecosystem = requirePackageEcosystem(pkg);
28466
+ const descriptor = ecosystemCatalog.get(ecosystem);
28348
28467
  if (!descriptor)
28349
- throw new Error(`Unknown ecosystem: ${pkg.ecosystem}`);
28468
+ throw new Error(`Unknown ecosystem: ${ecosystem}`);
28350
28469
  const eco = new descriptor.ecosystemClass(absPath);
28351
28470
  return { eco, pkg };
28352
28471
  });
28353
- const lockfileChanges = await writeVersionsForEcosystem(ecosystems, versions);
28472
+ const lockfileChanges = await writeVersionsForEcosystem(ecosystems, versions, ctx.config.lockfileSync);
28354
28473
  const manifestFiles = ecosystems.flatMap(({ eco }) => eco.manifestFiles().map((f2) => import_node_path20.default.resolve(eco.packagePath, f2)));
28355
28474
  return [...manifestFiles, ...lockfileChanges];
28356
28475
  }
@@ -28365,17 +28484,38 @@ function createPublishTaskForPath(registryKey, packagePath) {
28365
28484
  return { title: `Publish to ${registryKey}`, task: async () => {} };
28366
28485
  return factory(packagePath);
28367
28486
  }
28368
- function resolveWorkspaceProtocols(ctx) {
28487
+ async function resolveWorkspaceProtocols(ctx) {
28369
28488
  if (!ctx.cwd)
28370
28489
  return;
28371
28490
  const workspaceVersions = collectWorkspaceVersions(ctx.cwd);
28372
28491
  if (workspaceVersions.size === 0)
28373
28492
  return;
28374
- const packagePaths = ctx.config.packages.map((pkg) => import_node_path20.default.resolve(ctx.cwd, pkg.path));
28375
- const backups = resolveWorkspaceProtocolsInManifests(packagePaths, workspaceVersions);
28376
- if (backups.size > 0) {
28377
- ctx.runtime.workspaceBackups = backups;
28378
- addRollback(async () => restoreManifests(backups), ctx);
28493
+ const allBackups = new Map;
28494
+ for (const pkg of ctx.config.packages) {
28495
+ const absPath = import_node_path20.default.resolve(ctx.cwd, pkg.path);
28496
+ const ecosystem = requirePackageEcosystem(pkg);
28497
+ const descriptor = ecosystemCatalog.get(ecosystem);
28498
+ if (!descriptor)
28499
+ continue;
28500
+ const eco = new descriptor.ecosystemClass(absPath);
28501
+ const backups = await eco.resolvePublishDependencies(workspaceVersions);
28502
+ for (const [k3, v2] of backups) {
28503
+ allBackups.set(k3, v2);
28504
+ }
28505
+ }
28506
+ if (allBackups.size > 0) {
28507
+ ctx.runtime.workspaceBackups = allBackups;
28508
+ addRollback(async () => {
28509
+ for (const pkg of ctx.config.packages) {
28510
+ const absPath = import_node_path20.default.resolve(ctx.cwd, pkg.path);
28511
+ const ecosystem = requirePackageEcosystem(pkg);
28512
+ const descriptor = ecosystemCatalog.get(ecosystem);
28513
+ if (!descriptor)
28514
+ continue;
28515
+ const eco = new descriptor.ecosystemClass(absPath);
28516
+ eco.restorePublishDependencies(allBackups);
28517
+ }
28518
+ }, ctx);
28379
28519
  }
28380
28520
  }
28381
28521
  async function applyVersionsForDryRun(ctx) {
@@ -28495,8 +28635,8 @@ ${[...plan.packages].map(([pkgPath, ver]) => ` ${getPackageName(ctx, pkgPath)}:
28495
28635
  }
28496
28636
  return "";
28497
28637
  }
28498
- function shouldRenderLiveCommandOutput(ctx) {
28499
- return ctx.options.mode !== "ci" && !m && Boolean(import_node_process15.default.stdout.isTTY);
28638
+ function shouldRenderLiveCommandOutput(_ctx) {
28639
+ return !m && Boolean(import_node_process15.default.stdout.isTTY);
28500
28640
  }
28501
28641
  function normalizeLiveCommandOutputLine(line) {
28502
28642
  const normalized = import_node_util3.stripVTControlCharacters(line).trimEnd();
@@ -28577,7 +28717,7 @@ async function run(ctx) {
28577
28717
  await requiredConditionsCheckTask({
28578
28718
  skip: ctx.options.skipConditionsCheck
28579
28719
  }).run(ctx);
28580
- const pipelineListrOptions2 = mode === "ci" || m ? createCiListrOptions() : undefined;
28720
+ const pipelineListrOptions2 = m ? createCiListrOptions() : undefined;
28581
28721
  await createListr([
28582
28722
  {
28583
28723
  skip: ctx.options.skipTests,
@@ -28659,7 +28799,7 @@ async function run(ctx) {
28659
28799
  skip: () => dryRun,
28660
28800
  task: async (ctx2, task) => {
28661
28801
  const git = new Git;
28662
- const snapshotPlan = ctx2.runtime.versionPlan;
28802
+ const snapshotPlan = requireVersionPlan(ctx2);
28663
28803
  const tagName = `v${snapshotPlan.mode !== "independent" ? snapshotPlan.version : ""}`;
28664
28804
  task.output = `Creating tag ${tagName}...`;
28665
28805
  const headCommit = await git.latestCommit();
@@ -28726,7 +28866,7 @@ async function run(ctx) {
28726
28866
  skip: ctx.options.skipConditionsCheck
28727
28867
  }).run(ctx);
28728
28868
  }
28729
- const pipelineListrOptions = mode === "ci" || m ? createCiListrOptions() : undefined;
28869
+ const pipelineListrOptions = m ? createCiListrOptions() : undefined;
28730
28870
  await createListr([
28731
28871
  {
28732
28872
  skip: !hasPrepare || ctx.options.skipTests,
@@ -28792,7 +28932,7 @@ async function run(ctx) {
28792
28932
  const git = new Git;
28793
28933
  let tagCreated = false;
28794
28934
  let commited = false;
28795
- const plan = ctx2.runtime.versionPlan;
28935
+ const plan = requireVersionPlan(ctx2);
28796
28936
  task.output = formatVersionPlan(ctx2);
28797
28937
  addRollback(async () => {
28798
28938
  if (tagCreated) {
@@ -29016,7 +29156,7 @@ ${[...plan.packages].map(([pkgPath, ver]) => `- ${getPackageName(ctx2, pkgPath)}
29016
29156
  task: async (ctx2, parentTask) => {
29017
29157
  parentTask.output = "Running plugin beforePublish hooks...";
29018
29158
  await ctx2.runtime.pluginRunner.runHook("beforePublish", ctx2);
29019
- resolveWorkspaceProtocols(ctx2);
29159
+ await resolveWorkspaceProtocols(ctx2);
29020
29160
  const publishTasks = await collectPublishTasks(ctx2);
29021
29161
  parentTask.title = `Publishing (${countPublishTargets(ctx2)} targets)`;
29022
29162
  parentTask.output = formatRegistryGroupSummary("Concurrent publish tasks", ctx2, true);
@@ -29029,7 +29169,11 @@ ${[...plan.packages].map(([pkgPath, ver]) => `- ${getPackageName(ctx2, pkgPath)}
29029
29169
  skip: (ctx2) => !hasPublish || !!ctx2.options.skipPublish || dryRun || !ctx2.runtime.workspaceBackups?.size,
29030
29170
  title: "Restoring workspace protocols",
29031
29171
  task: (ctx2) => {
29032
- restoreManifests(ctx2.runtime.workspaceBackups);
29172
+ const backups = ctx2.runtime.workspaceBackups;
29173
+ if (!backups) {
29174
+ throw new Error("Workspace backups are required for restore.");
29175
+ }
29176
+ restoreManifests(backups);
29033
29177
  ctx2.runtime.workspaceBackups = undefined;
29034
29178
  }
29035
29179
  },
@@ -29046,7 +29190,7 @@ ${[...plan.packages].map(([pkgPath, ver]) => `- ${getPackageName(ctx2, pkgPath)}
29046
29190
  skip: !dryRun && !(mode === "ci" && hasPrepare),
29047
29191
  title: "Validating publish (dry-run)",
29048
29192
  task: async (ctx2, parentTask) => {
29049
- resolveWorkspaceProtocols(ctx2);
29193
+ await resolveWorkspaceProtocols(ctx2);
29050
29194
  await applyVersionsForDryRun(ctx2);
29051
29195
  const dryRunTasks = await collectDryRunPublishTasks(ctx2);
29052
29196
  parentTask.title = `Validating publish (${countRegistryTargets(collectEcosystemRegistryGroups(ctx2.config))} targets)`;
@@ -29060,7 +29204,11 @@ ${[...plan.packages].map(([pkgPath, ver]) => `- ${getPackageName(ctx2, pkgPath)}
29060
29204
  skip: (ctx2) => !dryRun && !(mode === "ci" && hasPrepare) || !ctx2.runtime.workspaceBackups?.size,
29061
29205
  title: "Restoring workspace protocols",
29062
29206
  task: (ctx2) => {
29063
- restoreManifests(ctx2.runtime.workspaceBackups);
29207
+ const backups = ctx2.runtime.workspaceBackups;
29208
+ if (!backups) {
29209
+ throw new Error("Workspace backups are required for restore.");
29210
+ }
29211
+ restoreManifests(backups);
29064
29212
  ctx2.runtime.workspaceBackups = undefined;
29065
29213
  }
29066
29214
  },
@@ -29068,7 +29216,11 @@ ${[...plan.packages].map(([pkgPath, ver]) => `- ${getPackageName(ctx2, pkgPath)}
29068
29216
  skip: (ctx2) => !dryRun || !ctx2.runtime.dryRunVersionBackup?.size,
29069
29217
  title: "Restoring original versions (dry-run)",
29070
29218
  task: async (ctx2) => {
29071
- await writeVersions(ctx2, ctx2.runtime.dryRunVersionBackup);
29219
+ const backupVersions = ctx2.runtime.dryRunVersionBackup;
29220
+ if (!backupVersions) {
29221
+ throw new Error("Dry-run version backup is required for restore.");
29222
+ }
29223
+ await writeVersions(ctx2, backupVersions);
29072
29224
  ctx2.runtime.dryRunVersionBackup = undefined;
29073
29225
  }
29074
29226
  },
@@ -29095,7 +29247,7 @@ ${[...plan.packages].map(([pkgPath, ver]) => `- ${getPackageName(ctx2, pkgPath)}
29095
29247
  skip: (ctx2) => !hasPublish || !!ctx2.options.skipReleaseDraft || dryRun,
29096
29248
  title: "Creating GitHub Release",
29097
29249
  task: async (ctx2, task) => {
29098
- const plan = ctx2.runtime.versionPlan;
29250
+ const plan = requireVersionPlan(ctx2);
29099
29251
  const tokenResult = resolveGitHubToken();
29100
29252
  let hasToken = !!tokenResult;
29101
29253
  if (tokenResult) {
@@ -29141,8 +29293,8 @@ ${[...plan.packages].map(([pkgPath, ver]) => `- ${getPackageName(ctx2, pkgPath)}
29141
29293
  const pkgConfig = ctx2.config.packages.find((p2) => p2.path === pkgPath);
29142
29294
  if (pkgConfig) {
29143
29295
  const changelogPath = import_node_path20.join(import_node_process15.default.cwd(), pkgConfig.path, "CHANGELOG.md");
29144
- if (import_node_fs11.existsSync(changelogPath)) {
29145
- const section = parseChangelogSection(import_node_fs11.readFileSync(changelogPath, "utf-8"), pkgVersion);
29296
+ if (import_node_fs13.existsSync(changelogPath)) {
29297
+ const section = parseChangelogSection(import_node_fs13.readFileSync(changelogPath, "utf-8"), pkgVersion);
29146
29298
  if (section)
29147
29299
  changelogBody = section;
29148
29300
  }
@@ -29173,7 +29325,7 @@ ${[...plan.packages].map(([pkgPath, ver]) => `- ${getPackageName(ctx2, pkgPath)}
29173
29325
  task.output = `Release already exists for ${tag}, skipped.`;
29174
29326
  }
29175
29327
  if (tempDir)
29176
- import_node_fs11.rmSync(tempDir, { recursive: true, force: true });
29328
+ import_node_fs13.rmSync(tempDir, { recursive: true, force: true });
29177
29329
  }
29178
29330
  } else {
29179
29331
  const version = plan.version;
@@ -29188,8 +29340,8 @@ ${[...plan.packages].map(([pkgPath, ver]) => `- ${getPackageName(ctx2, pkgPath)}
29188
29340
  if (!pkgConfig)
29189
29341
  continue;
29190
29342
  const changelogPath = import_node_path20.join(import_node_process15.default.cwd(), pkgConfig.path, "CHANGELOG.md");
29191
- if (import_node_fs11.existsSync(changelogPath)) {
29192
- const section = parseChangelogSection(import_node_fs11.readFileSync(changelogPath, "utf-8"), pkgVersion);
29343
+ if (import_node_fs13.existsSync(changelogPath)) {
29344
+ const section = parseChangelogSection(import_node_fs13.readFileSync(changelogPath, "utf-8"), pkgVersion);
29193
29345
  if (section) {
29194
29346
  sections.push(`## ${pkgName} v${pkgVersion}
29195
29347
 
@@ -29206,8 +29358,8 @@ ${section}`);
29206
29358
  }
29207
29359
  } else {
29208
29360
  const changelogPath = import_node_path20.join(import_node_process15.default.cwd(), "CHANGELOG.md");
29209
- if (import_node_fs11.existsSync(changelogPath)) {
29210
- const section = parseChangelogSection(import_node_fs11.readFileSync(changelogPath, "utf-8"), version);
29361
+ if (import_node_fs13.existsSync(changelogPath)) {
29362
+ const section = parseChangelogSection(import_node_fs13.readFileSync(changelogPath, "utf-8"), version);
29211
29363
  if (section)
29212
29364
  changelogBody = section;
29213
29365
  }
@@ -29240,7 +29392,7 @@ ${section}`);
29240
29392
  task.output = `Release already exists for ${tag}, skipped.`;
29241
29393
  }
29242
29394
  if (tempDir)
29243
- import_node_fs11.rmSync(tempDir, { recursive: true, force: true });
29395
+ import_node_fs13.rmSync(tempDir, { recursive: true, force: true });
29244
29396
  }
29245
29397
  } else {
29246
29398
  const git = new Git;
@@ -29347,13 +29499,13 @@ function maxBump(a3, b2) {
29347
29499
  return BUMP_ORDER[a3] >= BUMP_ORDER[b2] ? a3 : b2;
29348
29500
  }
29349
29501
  // src/changeset/migrate.ts
29350
- var import_node_fs12 = require("node:fs");
29502
+ var import_node_fs14 = require("node:fs");
29351
29503
  var import_node_path21 = __toESM(require("node:path"));
29352
29504
  var import_node_process16 = __toESM(require("node:process"));
29353
29505
  var SKIPPED_FILES = new Set(["config.json", "README.md"]);
29354
29506
  function migrateFromChangesets(cwd = import_node_process16.default.cwd()) {
29355
29507
  const changesetDir = import_node_path21.default.join(cwd, ".changeset");
29356
- if (!import_node_fs12.existsSync(changesetDir)) {
29508
+ if (!import_node_fs14.existsSync(changesetDir)) {
29357
29509
  return {
29358
29510
  success: false,
29359
29511
  error: ".changeset/ directory not found",
@@ -29362,8 +29514,8 @@ function migrateFromChangesets(cwd = import_node_process16.default.cwd()) {
29362
29514
  };
29363
29515
  }
29364
29516
  const pubmDir = import_node_path21.default.join(cwd, ".pubm", "changesets");
29365
- import_node_fs12.mkdirSync(pubmDir, { recursive: true });
29366
- const files = import_node_fs12.readdirSync(changesetDir);
29517
+ import_node_fs14.mkdirSync(pubmDir, { recursive: true });
29518
+ const files = import_node_fs14.readdirSync(changesetDir);
29367
29519
  const migratedFiles = [];
29368
29520
  let configMigrated = false;
29369
29521
  for (const file of files) {
@@ -29375,14 +29527,14 @@ function migrateFromChangesets(cwd = import_node_process16.default.cwd()) {
29375
29527
  continue;
29376
29528
  }
29377
29529
  if (file === "pre.json") {
29378
- import_node_fs12.copyFileSync(import_node_path21.default.join(changesetDir, file), import_node_path21.default.resolve(cwd, ".pubm", "pre.json"));
29530
+ import_node_fs14.copyFileSync(import_node_path21.default.join(changesetDir, file), import_node_path21.default.resolve(cwd, ".pubm", "pre.json"));
29379
29531
  migratedFiles.push(file);
29380
29532
  continue;
29381
29533
  }
29382
29534
  if (file.endsWith(".md")) {
29383
29535
  const src = import_node_path21.default.join(changesetDir, file);
29384
29536
  const dest = import_node_path21.default.join(pubmDir, file);
29385
- import_node_fs12.copyFileSync(src, dest);
29537
+ import_node_fs14.copyFileSync(src, dest);
29386
29538
  migratedFiles.push(file);
29387
29539
  }
29388
29540
  }
@@ -29454,7 +29606,7 @@ function calculateVersionBumps(currentVersions, cwd = import_node_process18.defa
29454
29606
  return result;
29455
29607
  }
29456
29608
  // src/changeset/writer.ts
29457
- var import_node_fs13 = require("node:fs");
29609
+ var import_node_fs15 = require("node:fs");
29458
29610
  var import_node_path22 = __toESM(require("node:path"));
29459
29611
  var import_node_process19 = __toESM(require("node:process"));
29460
29612
  var import_yaml3 = __toESM(require_dist(), 1);
@@ -29553,12 +29705,12 @@ ${summary}
29553
29705
  }
29554
29706
  function writeChangeset(releases, summary, cwd = import_node_process19.default.cwd()) {
29555
29707
  const changesetsDir = import_node_path22.default.join(cwd, ".pubm", "changesets");
29556
- import_node_fs13.mkdirSync(changesetsDir, { recursive: true });
29708
+ import_node_fs15.mkdirSync(changesetsDir, { recursive: true });
29557
29709
  const id = generateChangesetId();
29558
29710
  const fileName = `${id}.md`;
29559
29711
  const filePath = import_node_path22.default.join(changesetsDir, fileName);
29560
29712
  const content = generateChangesetContent(releases, summary);
29561
- import_node_fs13.writeFileSync(filePath, content, "utf-8");
29713
+ import_node_fs15.writeFileSync(filePath, content, "utf-8");
29562
29714
  return filePath;
29563
29715
  }
29564
29716
  // src/config/defaults.ts
@@ -29584,7 +29736,8 @@ var defaultConfig = {
29584
29736
  saveToken: true,
29585
29737
  releaseDraft: true,
29586
29738
  releaseNotes: true,
29587
- rollbackStrategy: "individual"
29739
+ rollbackStrategy: "individual",
29740
+ lockfileSync: "optional"
29588
29741
  };
29589
29742
  async function resolveConfig(config, cwd) {
29590
29743
  const resolvedCwd = cwd ?? process.cwd();
@@ -30313,17 +30466,32 @@ function formatStageError(stage, error2) {
30313
30466
  const message = error2 instanceof Error ? error2.stack ?? error2.message : String(error2);
30314
30467
  return `[${stage}] ${message}`;
30315
30468
  }
30316
- async function loadConfig(cwd = process.cwd()) {
30317
- const configPath = await findConfigFile(cwd);
30318
- if (!configPath)
30469
+ async function loadConfig(cwd = process.cwd(), configPath) {
30470
+ let resolvedConfigPath;
30471
+ if (configPath) {
30472
+ resolvedConfigPath = import_node_path23.default.resolve(cwd, configPath);
30473
+ try {
30474
+ if (!(await import_promises8.stat(resolvedConfigPath)).isFile()) {
30475
+ throw new Error(`Config path is not a file: ${resolvedConfigPath}`);
30476
+ }
30477
+ } catch (e3) {
30478
+ if (e3.code === "ENOENT") {
30479
+ throw new Error(`Config file not found: ${resolvedConfigPath}`);
30480
+ }
30481
+ throw e3;
30482
+ }
30483
+ } else {
30484
+ resolvedConfigPath = await findConfigFile(cwd);
30485
+ }
30486
+ if (!resolvedConfigPath)
30319
30487
  return null;
30320
30488
  const errors = [];
30321
30489
  try {
30322
- return await importConfigModule(configPath);
30490
+ return await importConfigModule(resolvedConfigPath);
30323
30491
  } catch (error2) {
30324
30492
  errors.push(formatStageError("native import", error2));
30325
30493
  }
30326
- const output = await buildConfig(configPath);
30494
+ const output = await buildConfig(resolvedConfigPath);
30327
30495
  if (!output.success) {
30328
30496
  errors.push(formatStageError("bundled build", output.logs.map((log) => log.message).join(`
30329
30497
  `)));
@@ -30342,11 +30510,11 @@ ${errors.join(`
30342
30510
  }
30343
30511
  const bundledSource = await entrypoint.text();
30344
30512
  try {
30345
- return await importBundledConfig(bundledSource, configPath, output.optionalDependencies);
30513
+ return await importBundledConfig(bundledSource, resolvedConfigPath, output.optionalDependencies);
30346
30514
  } catch (error2) {
30347
30515
  errors.push(formatStageError("bundled import", error2));
30348
30516
  }
30349
- const vmOutput = await buildConfigWithFormat(configPath, "cjs");
30517
+ const vmOutput = await buildConfigWithFormat(resolvedConfigPath, "cjs");
30350
30518
  if (!vmOutput.success) {
30351
30519
  errors.push(formatStageError("bundled vm build", vmOutput.logs.map((log) => log.message).join(`
30352
30520
  `)));
@@ -30364,7 +30532,7 @@ ${errors.join(`
30364
30532
  `)}`);
30365
30533
  }
30366
30534
  try {
30367
- return await executeBundledConfigInVm(await vmEntrypoint.text(), configPath);
30535
+ return await executeBundledConfigInVm(await vmEntrypoint.text(), resolvedConfigPath);
30368
30536
  } catch (error2) {
30369
30537
  errors.push(formatStageError("bundled vm", error2));
30370
30538
  throw new Error(`Failed to load config:
@@ -30784,6 +30952,9 @@ async function handleMultiPackage(ctx, task, packageInfos) {
30784
30952
  if (result === "accepted")
30785
30953
  return;
30786
30954
  if (result === "add_packages") {
30955
+ if (!bumps) {
30956
+ throw new Error("Changeset bumps are required for add_packages.");
30957
+ }
30787
30958
  await handleAddPackages(ctx, task, sortedPackageInfos, currentVersions, graph, bumps);
30788
30959
  return;
30789
30960
  }
@@ -30807,6 +30978,7 @@ async function handleRemainingPackages(ctx, task, remainingPackages, packageInfo
30807
30978
  const versions = new Map([...bumps].map(([p2, b2]) => [p2, b2.newVersion]));
30808
30979
  const publishPaths = new Set(bumps.keys());
30809
30980
  const reverseDeps = buildReverseDeps(graph);
30981
+ let lastBumpType;
30810
30982
  for (const pkg of remainingPackages) {
30811
30983
  const currentVersion = currentVersions.get(pkg.path) ?? pkg.version;
30812
30984
  const deps = graph.get(pkg.path) ?? [];
@@ -30819,8 +30991,9 @@ async function handleRemainingPackages(ctx, task, remainingPackages, packageInfo
30819
30991
  if (pkgNotes.length > 0) {
30820
30992
  task.output = renderPackageVersionSummary(remainingPackages, currentVersions, versions, { activePackage: pkg.path, notes: new Map([[pkg.path, pkgNotes]]) });
30821
30993
  }
30822
- const result = await promptVersion(task, currentVersion, pkg.name);
30994
+ const result = await promptVersion(task, currentVersion, pkg.name, undefined, lastBumpType);
30823
30995
  versions.set(pkg.path, result.version);
30996
+ lastBumpType = result.bumpType;
30824
30997
  if (result.version !== currentVersion) {
30825
30998
  bumpedPackages.add(pkg.path);
30826
30999
  publishPaths.add(pkg.path);
@@ -33110,11 +33283,11 @@ function isBun2() {
33110
33283
  init_ui();
33111
33284
 
33112
33285
  // src/validate/entry-points.ts
33113
- var import_node_fs14 = require("node:fs");
33286
+ var import_node_fs16 = require("node:fs");
33114
33287
  var import_node_path24 = __toESM(require("node:path"));
33115
33288
  var SIMPLE_FIELDS = ["main", "module", "types", "typings"];
33116
33289
  function checkPath(filePath, cwd) {
33117
- return import_node_fs14.existsSync(import_node_path24.default.resolve(cwd, filePath));
33290
+ return import_node_fs16.existsSync(import_node_path24.default.resolve(cwd, filePath));
33118
33291
  }
33119
33292
  function validateExports(exports2, cwd, prefix = "exports") {
33120
33293
  const errors = [];