@eldrforge/kodrdriv 0.0.9 → 0.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/README.md +65 -707
  2. package/dist/arguments.js +79 -55
  3. package/dist/arguments.js.map +1 -1
  4. package/dist/commands/commit.js +28 -16
  5. package/dist/commands/commit.js.map +1 -1
  6. package/dist/commands/link.js +18 -18
  7. package/dist/commands/link.js.map +1 -1
  8. package/dist/commands/publish.js +170 -93
  9. package/dist/commands/publish.js.map +1 -1
  10. package/dist/commands/release.js +8 -0
  11. package/dist/commands/release.js.map +1 -1
  12. package/dist/commands/unlink.js +20 -20
  13. package/dist/commands/unlink.js.map +1 -1
  14. package/dist/constants.js +4 -2
  15. package/dist/constants.js.map +1 -1
  16. package/dist/logging.js +35 -23
  17. package/dist/logging.js.map +1 -1
  18. package/dist/main.js +49 -3
  19. package/dist/main.js.map +1 -1
  20. package/dist/util/child.js +13 -1
  21. package/dist/util/child.js.map +1 -1
  22. package/docs/index.html +17 -0
  23. package/docs/package.json +36 -0
  24. package/docs/public/README.md +132 -0
  25. package/docs/public/advanced-usage.md +188 -0
  26. package/docs/public/code-icon.svg +4 -0
  27. package/docs/public/commands.md +116 -0
  28. package/docs/public/configuration.md +274 -0
  29. package/docs/public/examples.md +352 -0
  30. package/docs/public/kodrdriv-logo.svg +62 -0
  31. package/docs/src/App.css +387 -0
  32. package/docs/src/App.tsx +60 -0
  33. package/docs/src/components/DocumentPage.tsx +56 -0
  34. package/docs/src/components/ErrorMessage.tsx +15 -0
  35. package/docs/src/components/LoadingSpinner.tsx +14 -0
  36. package/docs/src/components/MarkdownRenderer.tsx +56 -0
  37. package/docs/src/components/Navigation.css +73 -0
  38. package/docs/src/components/Navigation.tsx +36 -0
  39. package/docs/src/index.css +61 -0
  40. package/docs/src/main.tsx +10 -0
  41. package/docs/src/test/setup.ts +1 -0
  42. package/docs/src/vite-env.d.ts +10 -0
  43. package/docs/tsconfig.node.json +13 -0
  44. package/docs/vite.config.ts +15 -0
  45. package/docs/vitest.config.ts +15 -0
  46. package/eslint.config.mjs +1 -0
  47. package/package.json +10 -5
  48. package/vitest.config.ts +3 -3
@@ -8,9 +8,9 @@ const scanDirectoryForPackages = async (rootDir, storage)=>{
8
8
  const logger = getLogger();
9
9
  const packageMap = new Map(); // packageName -> relativePath
10
10
  const absoluteRootDir = path.resolve(process.cwd(), rootDir);
11
- logger.debug(`Scanning directory for packages: ${absoluteRootDir}`);
11
+ logger.verbose(`Scanning directory for packages: ${absoluteRootDir}`);
12
12
  if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {
13
- logger.debug(`Root directory does not exist or is not a directory: ${absoluteRootDir}`);
13
+ logger.verbose(`Root directory does not exist or is not a directory: ${absoluteRootDir}`);
14
14
  return packageMap;
15
15
  }
16
16
  try {
@@ -43,11 +43,11 @@ const scanDirectoryForPackages = async (rootDir, storage)=>{
43
43
  const findPackagesByScope = async (dependencies, scopeRoots, storage)=>{
44
44
  const logger = getLogger();
45
45
  const workspacePackages = new Map();
46
- logger.debug(`Checking dependencies against scope roots: ${JSON.stringify(scopeRoots)}`);
46
+ logger.silly(`Checking dependencies against scope roots: ${JSON.stringify(scopeRoots)}`);
47
47
  // First, scan all scope roots to build a comprehensive map of available packages
48
48
  const allPackages = new Map(); // packageName -> relativePath
49
49
  for (const [scope, rootDir] of Object.entries(scopeRoots)){
50
- logger.debug(`Scanning scope ${scope} at root directory: ${rootDir}`);
50
+ logger.verbose(`Scanning scope ${scope} at root directory: ${rootDir}`);
51
51
  const scopePackages = await scanDirectoryForPackages(rootDir, storage);
52
52
  // Add packages from this scope to the overall map
53
53
  for (const [packageName, packagePath] of scopePackages){
@@ -63,7 +63,7 @@ const findPackagesByScope = async (dependencies, scopeRoots, storage)=>{
63
63
  if (allPackages.has(depName)) {
64
64
  const packagePath = allPackages.get(depName);
65
65
  workspacePackages.set(depName, packagePath);
66
- logger.info(`Found sibling package: ${depName} at ${packagePath}`);
66
+ logger.verbose(`Found sibling package: ${depName} at ${packagePath}`);
67
67
  }
68
68
  }
69
69
  return workspacePackages;
@@ -94,7 +94,7 @@ const execute = async (runConfig)=>{
94
94
  const storage = create({
95
95
  log: logger.info
96
96
  });
97
- logger.info('Starting pnpm workspace link management using overrides...');
97
+ logger.verbose('Starting pnpm workspace link management using overrides...');
98
98
  // Read current package.json
99
99
  const packageJsonPath = path.join(process.cwd(), 'package.json');
100
100
  if (!await storage.exists(packageJsonPath)) {
@@ -107,33 +107,33 @@ const execute = async (runConfig)=>{
107
107
  } catch (error) {
108
108
  throw new Error(`Failed to parse package.json: ${error}`);
109
109
  }
110
- logger.info(`Processing package: ${packageJson.name || 'unnamed'}`);
110
+ logger.verbose(`Processing package: ${packageJson.name || 'unnamed'}`);
111
111
  // Get configuration
112
112
  const scopeRoots = ((_runConfig_link = runConfig.link) === null || _runConfig_link === void 0 ? void 0 : _runConfig_link.scopeRoots) || {};
113
113
  const workspaceFileName = ((_runConfig_link1 = runConfig.link) === null || _runConfig_link1 === void 0 ? void 0 : _runConfig_link1.workspaceFile) || 'pnpm-workspace.yaml';
114
114
  const isDryRun = runConfig.dryRun || ((_runConfig_link2 = runConfig.link) === null || _runConfig_link2 === void 0 ? void 0 : _runConfig_link2.dryRun) || false;
115
- logger.debug('Extracted scopeRoots:', JSON.stringify(scopeRoots));
115
+ logger.silly('Extracted scopeRoots:', JSON.stringify(scopeRoots));
116
116
  logger.debug('Extracted workspaceFileName:', workspaceFileName);
117
117
  logger.debug('Extracted isDryRun:', isDryRun);
118
118
  if (Object.keys(scopeRoots).length === 0) {
119
- logger.info('No scope roots configured. Skipping link management.');
119
+ logger.verbose('No scope roots configured. Skipping link management.');
120
120
  return 'No scope roots configured. Skipping link management.';
121
121
  }
122
- logger.info(`Configured scope roots: ${JSON.stringify(scopeRoots)}`);
122
+ logger.silly(`Configured scope roots: ${JSON.stringify(scopeRoots)}`);
123
123
  // Collect all dependencies
124
124
  const allDependencies = {
125
125
  ...packageJson.dependencies,
126
126
  ...packageJson.devDependencies,
127
127
  ...packageJson.peerDependencies
128
128
  };
129
- logger.info(`Found ${Object.keys(allDependencies).length} total dependencies`);
129
+ logger.verbose(`Found ${Object.keys(allDependencies).length} total dependencies`);
130
130
  // Find matching sibling packages
131
131
  const packagesToLink = await findPackagesByScope(allDependencies, scopeRoots, storage);
132
132
  if (packagesToLink.size === 0) {
133
- logger.info('No matching sibling packages found for linking.');
133
+ logger.verbose('No matching sibling packages found for linking.');
134
134
  return 'No matching sibling packages found for linking.';
135
135
  }
136
- logger.info(`Found ${packagesToLink.size} packages to link: ${[
136
+ logger.verbose(`Found ${packagesToLink.size} packages to link: ${[
137
137
  ...packagesToLink.keys()
138
138
  ].join(', ')}`);
139
139
  // Read existing workspace configuration
@@ -158,18 +158,18 @@ const execute = async (runConfig)=>{
158
158
  };
159
159
  // Write the updated workspace file
160
160
  if (isDryRun) {
161
- logger.info('DRY RUN: Would write the following workspace configuration:');
162
- logger.info(yaml.dump(updatedConfig, {
161
+ logger.verbose('DRY RUN: Would write the following workspace configuration:');
162
+ logger.silly(yaml.dump(updatedConfig, {
163
163
  indent: 2
164
164
  }));
165
165
  } else {
166
166
  await writeWorkspaceFile(workspaceFilePath, updatedConfig, storage);
167
- logger.info(`Updated ${workspaceFileName} with ${packagesToLink.size} linked packages in overrides.`);
167
+ logger.verbose(`Updated ${workspaceFileName} with ${packagesToLink.size} linked packages in overrides.`);
168
168
  // Rebuild pnpm lock file and node_modules
169
- logger.info('Running pnpm install to apply links...');
169
+ logger.verbose('Running pnpm install to apply links...');
170
170
  try {
171
171
  await run('pnpm install');
172
- logger.info('Successfully applied links.');
172
+ logger.verbose('Successfully applied links.');
173
173
  } catch (error) {
174
174
  logger.warn(`Failed to run pnpm install: ${error}. You may need to run 'pnpm install' manually.`);
175
175
  }
@@ -1 +1 @@
1
- {"version":3,"file":"link.js","sources":["../../src/commands/link.ts"],"sourcesContent":["import path from 'path';\nimport yaml from 'js-yaml';\nimport { getLogger } from '../logging';\nimport { Config } from '../types';\nimport { create as createStorage } from '../util/storage';\nimport { run } from '../util/child';\n\ninterface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\ninterface PnpmWorkspaceFile {\n packages?: string[];\n overrides?: Record<string, string>;\n}\n\nconst scanDirectoryForPackages = async (rootDir: string, storage: any): Promise<Map<string, string>> => {\n const logger = getLogger();\n const packageMap = new Map<string, string>(); // packageName -> relativePath\n\n const absoluteRootDir = path.resolve(process.cwd(), rootDir);\n logger.debug(`Scanning directory for packages: ${absoluteRootDir}`);\n\n if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {\n logger.debug(`Root directory does not exist or is not a directory: ${absoluteRootDir}`);\n return packageMap;\n }\n\n try {\n // Get all subdirectories in the root directory\n const items = await storage.listFiles(absoluteRootDir);\n\n for (const item of items) {\n const itemPath = path.join(absoluteRootDir, item);\n\n if (await storage.isDirectory(itemPath)) {\n const packageJsonPath = path.join(itemPath, 'package.json');\n\n if (await storage.exists(packageJsonPath)) {\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const packageJson = JSON.parse(packageJsonContent) as PackageJson;\n\n if (packageJson.name) {\n const relativePath = path.relative(process.cwd(), itemPath);\n packageMap.set(packageJson.name, relativePath);\n logger.debug(`Found package: ${packageJson.name} at ${relativePath}`);\n }\n } catch (error) {\n logger.debug(`Failed to parse package.json at ${packageJsonPath}: ${error}`);\n }\n }\n }\n }\n } catch (error) {\n logger.warn(`Failed to read directory ${absoluteRootDir}: ${error}`);\n }\n\n return packageMap;\n};\n\nconst findPackagesByScope = async (dependencies: Record<string, string>, scopeRoots: Record<string, string>, storage: any): Promise<Map<string, string>> => {\n const logger = getLogger();\n const workspacePackages = new Map<string, string>();\n\n logger.debug(`Checking dependencies against scope roots: ${JSON.stringify(scopeRoots)}`);\n\n // First, scan all scope roots to build a comprehensive map of available packages\n const allPackages = new Map<string, string>(); // packageName -> relativePath\n\n for (const [scope, rootDir] of Object.entries(scopeRoots)) {\n logger.debug(`Scanning scope ${scope} at root directory: ${rootDir}`);\n const scopePackages = await scanDirectoryForPackages(rootDir, storage);\n\n // Add packages from this scope to the overall map\n for (const [packageName, packagePath] of scopePackages) {\n if (packageName.startsWith(scope)) {\n allPackages.set(packageName, packagePath);\n logger.debug(`Registered package: ${packageName} -> ${packagePath}`);\n }\n }\n }\n\n // Now check each dependency against our discovered packages\n for (const [depName, depVersion] of Object.entries(dependencies)) {\n logger.debug(`Processing dependency: ${depName}@${depVersion}`);\n\n if (allPackages.has(depName)) {\n const packagePath = allPackages.get(depName)!;\n workspacePackages.set(depName, packagePath);\n logger.info(`Found sibling package: ${depName} at ${packagePath}`);\n }\n }\n\n return workspacePackages;\n};\n\nconst readCurrentWorkspaceFile = async (workspaceFilePath: string, storage: any): Promise<PnpmWorkspaceFile> => {\n if (await storage.exists(workspaceFilePath)) {\n try {\n const content = await storage.readFile(workspaceFilePath, 'utf-8');\n return (yaml.load(content) as PnpmWorkspaceFile) || {};\n } catch (error) {\n throw new Error(`Failed to parse existing workspace file: ${error}`);\n }\n }\n return {};\n};\n\nconst writeWorkspaceFile = async (workspaceFilePath: string, config: PnpmWorkspaceFile, storage: any): Promise<void> => {\n const yamlContent = yaml.dump(config, {\n indent: 2,\n lineWidth: -1,\n noRefs: true,\n sortKeys: false\n });\n await storage.writeFile(workspaceFilePath, yamlContent, 'utf-8');\n};\n\nexport const execute = async (runConfig: Config): Promise<string> => {\n const logger = getLogger();\n const storage = createStorage({ log: logger.info });\n\n logger.info('Starting pnpm workspace link management using overrides...');\n\n // Read current package.json\n const packageJsonPath = path.join(process.cwd(), 'package.json');\n if (!await storage.exists(packageJsonPath)) {\n throw new Error('package.json not found in current directory.');\n }\n\n let packageJson: PackageJson;\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n packageJson = JSON.parse(packageJsonContent);\n } catch (error) {\n throw new Error(`Failed to parse package.json: ${error}`);\n }\n\n logger.info(`Processing package: ${packageJson.name || 'unnamed'}`);\n\n // Get configuration\n const scopeRoots = runConfig.link?.scopeRoots || {};\n const workspaceFileName = runConfig.link?.workspaceFile || 'pnpm-workspace.yaml';\n const isDryRun = runConfig.dryRun || runConfig.link?.dryRun || false;\n\n logger.debug('Extracted scopeRoots:', JSON.stringify(scopeRoots));\n logger.debug('Extracted workspaceFileName:', workspaceFileName);\n logger.debug('Extracted isDryRun:', isDryRun);\n\n if (Object.keys(scopeRoots).length === 0) {\n logger.info('No scope roots configured. Skipping link management.');\n return 'No scope roots configured. Skipping link management.';\n }\n\n logger.info(`Configured scope roots: ${JSON.stringify(scopeRoots)}`);\n\n // Collect all dependencies\n const allDependencies = {\n ...packageJson.dependencies,\n ...packageJson.devDependencies,\n ...packageJson.peerDependencies\n };\n\n logger.info(`Found ${Object.keys(allDependencies).length} total dependencies`);\n\n // Find matching sibling packages\n const packagesToLink = await findPackagesByScope(allDependencies, scopeRoots, storage);\n\n if (packagesToLink.size === 0) {\n logger.info('No matching sibling packages found for linking.');\n return 'No matching sibling packages found for linking.';\n }\n\n logger.info(`Found ${packagesToLink.size} packages to link: ${[...packagesToLink.keys()].join(', ')}`);\n\n // Read existing workspace configuration\n const workspaceFilePath = path.join(process.cwd(), workspaceFileName);\n const workspaceConfig = await readCurrentWorkspaceFile(workspaceFilePath, storage);\n\n // Create overrides\n const newOverrides: Record<string, string> = {};\n for (const [packageName, packagePath] of packagesToLink.entries()) {\n newOverrides[packageName] = `link:${packagePath}`;\n }\n\n const updatedOverrides = { ...(workspaceConfig.overrides || {}), ...newOverrides };\n\n const sortedOverrides = Object.keys(updatedOverrides)\n .sort()\n .reduce((obj, key) => {\n obj[key] = updatedOverrides[key];\n return obj;\n }, {} as Record<string, string>);\n\n const updatedConfig: PnpmWorkspaceFile = {\n ...workspaceConfig,\n overrides: sortedOverrides\n };\n\n\n // Write the updated workspace file\n if (isDryRun) {\n logger.info('DRY RUN: Would write the following workspace configuration:');\n logger.info(yaml.dump(updatedConfig, { indent: 2 }));\n } else {\n await writeWorkspaceFile(workspaceFilePath, updatedConfig, storage);\n logger.info(`Updated ${workspaceFileName} with ${packagesToLink.size} linked packages in overrides.`);\n\n // Rebuild pnpm lock file and node_modules\n logger.info('Running pnpm install to apply links...');\n try {\n await run('pnpm install');\n logger.info('Successfully applied links.');\n } catch (error) {\n logger.warn(`Failed to run pnpm install: ${error}. You may need to run 'pnpm install' manually.`);\n }\n }\n\n const summary = `Successfully linked ${packagesToLink.size} sibling packages:\\n${[...packagesToLink.entries()].map(([name, path]) => ` - ${name}: link:${path}`).join('\\n')}`;\n\n return summary;\n};"],"names":["scanDirectoryForPackages","rootDir","storage","logger","getLogger","packageMap","Map","absoluteRootDir","path","resolve","process","cwd","debug","exists","isDirectory","items","listFiles","item","itemPath","join","packageJsonPath","packageJsonContent","readFile","packageJson","JSON","parse","name","relativePath","relative","set","error","warn","findPackagesByScope","dependencies","scopeRoots","workspacePackages","stringify","allPackages","scope","Object","entries","scopePackages","packageName","packagePath","startsWith","depName","depVersion","has","get","info","readCurrentWorkspaceFile","workspaceFilePath","content","yaml","load","Error","writeWorkspaceFile","config","yamlContent","dump","indent","lineWidth","noRefs","sortKeys","writeFile","execute","runConfig","createStorage","log","link","workspaceFileName","workspaceFile","isDryRun","dryRun","keys","length","allDependencies","devDependencies","peerDependencies","packagesToLink","size","workspaceConfig","newOverrides","updatedOverrides","overrides","sortedOverrides","sort","reduce","obj","key","updatedConfig","run","summary","map"],"mappings":";;;;;;AAmBA,MAAMA,wBAAAA,GAA2B,OAAOC,OAAAA,EAAiBC,OAAAA,GAAAA;AACrD,IAAA,MAAMC,MAAAA,GAASC,SAAAA,EAAAA;IACf,MAAMC,UAAAA,GAAa,IAAIC,GAAAA,EAAAA,CAAAA;AAEvB,IAAA,MAAMC,kBAAkBC,IAAAA,CAAKC,OAAO,CAACC,OAAAA,CAAQC,GAAG,EAAA,EAAIV,OAAAA,CAAAA;AACpDE,IAAAA,MAAAA,CAAOS,KAAK,CAAC,CAAC,iCAAiC,EAAEL,eAAAA,CAAAA,CAAiB,CAAA;IAElE,IAAI,CAAC,MAAML,OAAAA,CAAQW,MAAM,CAACN,eAAAA,CAAAA,IAAoB,CAAC,MAAML,OAAAA,CAAQY,WAAW,CAACP,eAAAA,CAAAA,EAAkB;AACvFJ,QAAAA,MAAAA,CAAOS,KAAK,CAAC,CAAC,qDAAqD,EAAEL,eAAAA,CAAAA,CAAiB,CAAA;QACtF,OAAOF,UAAAA;AACX;IAEA,IAAI;;AAEA,QAAA,MAAMU,KAAAA,GAAQ,MAAMb,OAAAA,CAAQc,SAAS,CAACT,eAAAA,CAAAA;QAEtC,KAAK,MAAMU,QAAQF,KAAAA,CAAO;AACtB,YAAA,MAAMG,QAAAA,GAAWV,IAAAA,CAAKW,IAAI,CAACZ,eAAAA,EAAiBU,IAAAA,CAAAA;AAE5C,YAAA,IAAI,MAAMf,OAAAA,CAAQY,WAAW,CAACI,QAAAA,CAAAA,EAAW;AACrC,gBAAA,MAAME,eAAAA,GAAkBZ,IAAAA,CAAKW,IAAI,CAACD,QAAAA,EAAU,cAAA,CAAA;AAE5C,gBAAA,IAAI,MAAMhB,OAAAA,CAAQW,MAAM,CAACO,eAAAA,CAAAA,EAAkB;oBACvC,IAAI;AACA,wBAAA,MAAMC,kBAAAA,GAAqB,MAAMnB,OAAAA,CAAQoB,QAAQ,CAACF,eAAAA,EAAiB,OAAA,CAAA;wBACnE,MAAMG,WAAAA,GAAcC,IAAAA,CAAKC,KAAK,CAACJ,kBAAAA,CAAAA;wBAE/B,IAAIE,WAAAA,CAAYG,IAAI,EAAE;AAClB,4BAAA,MAAMC,eAAenB,IAAAA,CAAKoB,QAAQ,CAAClB,OAAAA,CAAQC,GAAG,EAAA,EAAIO,QAAAA,CAAAA;AAClDb,4BAAAA,UAAAA,CAAWwB,GAAG,CAACN,WAAAA,CAAYG,IAAI,EAAEC,YAAAA,CAAAA;4BACjCxB,MAAAA,CAAOS,KAAK,CAAC,CAAC,eAAe,EAAEW,YAAYG,IAAI,CAAC,IAAI,EAAEC,YAAAA,CAAAA,CAAc,CAAA;AACxE;AACJ,qBAAA,CAAE,OAAOG,KAAAA,EAAO;wBACZ3B,MAAAA,CAAOS,KAAK,CAAC,CAAC,gCAAgC,EAAEQ,eAAAA,CAAgB,EAAE,EAAEU,KAAAA,CAAAA,CAAO,CAAA;AAC/E;AACJ;AACJ;AACJ;AACJ,KAAA,CAAE,OAAOA,KAAAA,EAAO;QACZ3B,MAAAA,CAAO4B,IAAI,CAAC,CAAC,yBAAyB,EAAExB,eAAAA,CAAgB,EAAE,EAAEuB,KAAAA,CAAAA,CAAO,CAAA;AACvE;IAEA,OAAOzB,UAAAA;AACX,CAAA;AAEA,MAAM2B,mBAAAA,GAAsB,OAAOC,YAAAA,EAAsCC,UAAAA,EAAoChC,OAAAA,GAAAA;AACzG,IAAA,MAAMC,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAM+B,oBAAoB,IAAI7B,GAAAA,EAAAA;IAE9BH,MAAAA,CAAOS,KAAK,CAAC,CAAC,2CAA2C,EAAEY,IAAAA,CAAKY,SAAS,CAACF,UAAAA,CAAAA,CAAAA,CAAa,CAAA;;IAGvF,MAAMG,WAAAA,GAAc,IAAI/B,GAAAA,EAAAA,CAAAA;IAExB,KAAK,MAAM,CAACgC,KAAAA,EAAOrC,OAAAA,CAAQ,IAAIsC,MAAAA,CAAOC,OAAO,CAACN,UAAAA,CAAAA,CAAa;QACvD/B,MAAAA,CAAOS,KAAK,CAAC,CAAC,eAAe,EAAE0B,KAAAA,CAAM,oBAAoB,EAAErC,OAAAA,CAAAA,CAAS,CAAA;QACpE,MAAMwC,aAAAA,GAAgB,MAAMzC,wBAAAA,CAAyBC,OAAAA,EAASC,OAAAA,CAAAA;;AAG9D,QAAA,KAAK,MAAM,CAACwC,WAAAA,EAAaC,WAAAA,CAAY,IAAIF,aAAAA,CAAe;YACpD,IAAIC,WAAAA,CAAYE,UAAU,CAACN,KAAAA,CAAAA,EAAQ;gBAC/BD,WAAAA,CAAYR,GAAG,CAACa,WAAAA,EAAaC,WAAAA,CAAAA;gBAC7BxC,MAAAA,CAAOS,KAAK,CAAC,CAAC,oBAAoB,EAAE8B,WAAAA,CAAY,IAAI,EAAEC,WAAAA,CAAAA,CAAa,CAAA;AACvE;AACJ;AACJ;;IAGA,KAAK,MAAM,CAACE,OAAAA,EAASC,UAAAA,CAAW,IAAIP,MAAAA,CAAOC,OAAO,CAACP,YAAAA,CAAAA,CAAe;QAC9D9B,MAAAA,CAAOS,KAAK,CAAC,CAAC,uBAAuB,EAAEiC,OAAAA,CAAQ,CAAC,EAAEC,UAAAA,CAAAA,CAAY,CAAA;QAE9D,IAAIT,WAAAA,CAAYU,GAAG,CAACF,OAAAA,CAAAA,EAAU;YAC1B,MAAMF,WAAAA,GAAcN,WAAAA,CAAYW,GAAG,CAACH,OAAAA,CAAAA;YACpCV,iBAAAA,CAAkBN,GAAG,CAACgB,OAAAA,EAASF,WAAAA,CAAAA;YAC/BxC,MAAAA,CAAO8C,IAAI,CAAC,CAAC,uBAAuB,EAAEJ,OAAAA,CAAQ,IAAI,EAAEF,WAAAA,CAAAA,CAAa,CAAA;AACrE;AACJ;IAEA,OAAOR,iBAAAA;AACX,CAAA;AAEA,MAAMe,wBAAAA,GAA2B,OAAOC,iBAAAA,EAA2BjD,OAAAA,GAAAA;AAC/D,IAAA,IAAI,MAAMA,OAAAA,CAAQW,MAAM,CAACsC,iBAAAA,CAAAA,EAAoB;QACzC,IAAI;AACA,YAAA,MAAMC,OAAAA,GAAU,MAAMlD,OAAAA,CAAQoB,QAAQ,CAAC6B,iBAAAA,EAAmB,OAAA,CAAA;AAC1D,YAAA,OAAO,IAACE,CAAKC,IAAI,CAACF,YAAkC,EAAC;AACzD,SAAA,CAAE,OAAOtB,KAAAA,EAAO;AACZ,YAAA,MAAM,IAAIyB,KAAAA,CAAM,CAAC,yCAAyC,EAAEzB,KAAAA,CAAAA,CAAO,CAAA;AACvE;AACJ;AACA,IAAA,OAAO,EAAC;AACZ,CAAA;AAEA,MAAM0B,kBAAAA,GAAqB,OAAOL,iBAAAA,EAA2BM,MAAAA,EAA2BvD,OAAAA,GAAAA;AACpF,IAAA,MAAMwD,WAAAA,GAAcL,IAAAA,CAAKM,IAAI,CAACF,MAAAA,EAAQ;QAClCG,MAAAA,EAAQ,CAAA;AACRC,QAAAA,SAAAA,EAAW,EAAC;QACZC,MAAAA,EAAQ,IAAA;QACRC,QAAAA,EAAU;AACd,KAAA,CAAA;AACA,IAAA,MAAM7D,OAAAA,CAAQ8D,SAAS,CAACb,iBAAAA,EAAmBO,WAAAA,EAAa,OAAA,CAAA;AAC5D,CAAA;AAEO,MAAMO,UAAU,OAAOC,SAAAA,GAAAA;AAuBPA,IAAAA,IAAAA,eAAAA,EACOA,gBAAAA,EACWA,gBAAAA;AAxBrC,IAAA,MAAM/D,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMF,UAAUiE,MAAAA,CAAc;AAAEC,QAAAA,GAAAA,EAAKjE,OAAO8C;AAAK,KAAA,CAAA;AAEjD9C,IAAAA,MAAAA,CAAO8C,IAAI,CAAC,4DAAA,CAAA;;AAGZ,IAAA,MAAM7B,kBAAkBZ,IAAAA,CAAKW,IAAI,CAACT,OAAAA,CAAQC,GAAG,EAAA,EAAI,cAAA,CAAA;AACjD,IAAA,IAAI,CAAC,MAAMT,OAAAA,CAAQW,MAAM,CAACO,eAAAA,CAAAA,EAAkB;AACxC,QAAA,MAAM,IAAImC,KAAAA,CAAM,8CAAA,CAAA;AACpB;IAEA,IAAIhC,WAAAA;IACJ,IAAI;AACA,QAAA,MAAMF,kBAAAA,GAAqB,MAAMnB,OAAAA,CAAQoB,QAAQ,CAACF,eAAAA,EAAiB,OAAA,CAAA;QACnEG,WAAAA,GAAcC,IAAAA,CAAKC,KAAK,CAACJ,kBAAAA,CAAAA;AAC7B,KAAA,CAAE,OAAOS,KAAAA,EAAO;AACZ,QAAA,MAAM,IAAIyB,KAAAA,CAAM,CAAC,8BAA8B,EAAEzB,KAAAA,CAAAA,CAAO,CAAA;AAC5D;IAEA3B,MAAAA,CAAO8C,IAAI,CAAC,CAAC,oBAAoB,EAAE1B,WAAAA,CAAYG,IAAI,IAAI,SAAA,CAAA,CAAW,CAAA;;IAGlE,MAAMQ,UAAAA,GAAagC,CAAAA,CAAAA,eAAAA,GAAAA,SAAAA,CAAUG,IAAI,cAAdH,eAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAAA,CAAgBhC,UAAU,KAAI,EAAC;IAClD,MAAMoC,iBAAAA,GAAoBJ,EAAAA,gBAAAA,GAAAA,SAAAA,CAAUG,IAAI,MAAA,IAAA,IAAdH,gBAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAAA,CAAgBK,aAAa,KAAI,qBAAA;IAC3D,MAAMC,QAAAA,GAAWN,SAAAA,CAAUO,MAAM,KAAA,CAAIP,gBAAAA,GAAAA,SAAAA,CAAUG,IAAI,MAAA,IAAA,IAAdH,gBAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAAA,CAAgBO,MAAM,CAAA,IAAI,KAAA;AAE/DtE,IAAAA,MAAAA,CAAOS,KAAK,CAAC,uBAAA,EAAyBY,IAAAA,CAAKY,SAAS,CAACF,UAAAA,CAAAA,CAAAA;IACrD/B,MAAAA,CAAOS,KAAK,CAAC,8BAAA,EAAgC0D,iBAAAA,CAAAA;IAC7CnE,MAAAA,CAAOS,KAAK,CAAC,qBAAA,EAAuB4D,QAAAA,CAAAA;AAEpC,IAAA,IAAIjC,OAAOmC,IAAI,CAACxC,UAAAA,CAAAA,CAAYyC,MAAM,KAAK,CAAA,EAAG;AACtCxE,QAAAA,MAAAA,CAAO8C,IAAI,CAAC,sDAAA,CAAA;QACZ,OAAO,sDAAA;AACX;IAEA9C,MAAAA,CAAO8C,IAAI,CAAC,CAAC,wBAAwB,EAAEzB,IAAAA,CAAKY,SAAS,CAACF,UAAAA,CAAAA,CAAAA,CAAa,CAAA;;AAGnE,IAAA,MAAM0C,eAAAA,GAAkB;AACpB,QAAA,GAAGrD,YAAYU,YAAY;AAC3B,QAAA,GAAGV,YAAYsD,eAAe;AAC9B,QAAA,GAAGtD,YAAYuD;AACnB,KAAA;AAEA3E,IAAAA,MAAAA,CAAO8C,IAAI,CAAC,CAAC,MAAM,EAAEV,MAAAA,CAAOmC,IAAI,CAACE,eAAAA,CAAAA,CAAiBD,MAAM,CAAC,mBAAmB,CAAC,CAAA;;AAG7E,IAAA,MAAMI,cAAAA,GAAiB,MAAM/C,mBAAAA,CAAoB4C,eAAAA,EAAiB1C,UAAAA,EAAYhC,OAAAA,CAAAA;IAE9E,IAAI6E,cAAAA,CAAeC,IAAI,KAAK,CAAA,EAAG;AAC3B7E,QAAAA,MAAAA,CAAO8C,IAAI,CAAC,iDAAA,CAAA;QACZ,OAAO,iDAAA;AACX;IAEA9C,MAAAA,CAAO8C,IAAI,CAAC,CAAC,MAAM,EAAE8B,cAAAA,CAAeC,IAAI,CAAC,mBAAmB,EAAE;AAAID,QAAAA,GAAAA,cAAAA,CAAeL,IAAI;KAAG,CAACvD,IAAI,CAAC,IAAA,CAAA,CAAA,CAAO,CAAA;;AAGrG,IAAA,MAAMgC,oBAAoB3C,IAAAA,CAAKW,IAAI,CAACT,OAAAA,CAAQC,GAAG,EAAA,EAAI2D,iBAAAA,CAAAA;IACnD,MAAMW,eAAAA,GAAkB,MAAM/B,wBAAAA,CAAyBC,iBAAAA,EAAmBjD,OAAAA,CAAAA;;AAG1E,IAAA,MAAMgF,eAAuC,EAAC;AAC9C,IAAA,KAAK,MAAM,CAACxC,WAAAA,EAAaC,YAAY,IAAIoC,cAAAA,CAAevC,OAAO,EAAA,CAAI;AAC/D0C,QAAAA,YAAY,CAACxC,WAAAA,CAAY,GAAG,CAAC,KAAK,EAAEC,WAAAA,CAAAA,CAAa;AACrD;AAEA,IAAA,MAAMwC,gBAAAA,GAAmB;AAAE,QAAA,GAAIF,eAAAA,CAAgBG,SAAS,IAAI,EAAE;AAAG,QAAA,GAAGF;AAAa,KAAA;IAEjF,MAAMG,eAAAA,GAAkB9C,MAAAA,CAAOmC,IAAI,CAACS,gBAAAA,CAAAA,CAC/BG,IAAI,EAAA,CACJC,MAAM,CAAC,CAACC,GAAAA,EAAKC,GAAAA,GAAAA;AACVD,QAAAA,GAAG,CAACC,GAAAA,CAAI,GAAGN,gBAAgB,CAACM,GAAAA,CAAI;QAChC,OAAOD,GAAAA;AACX,KAAA,EAAG,EAAC,CAAA;AAER,IAAA,MAAME,aAAAA,GAAmC;AACrC,QAAA,GAAGT,eAAe;QAClBG,SAAAA,EAAWC;AACf,KAAA;;AAIA,IAAA,IAAIb,QAAAA,EAAU;AACVrE,QAAAA,MAAAA,CAAO8C,IAAI,CAAC,6DAAA,CAAA;AACZ9C,QAAAA,MAAAA,CAAO8C,IAAI,CAACI,IAAAA,CAAKM,IAAI,CAAC+B,aAAAA,EAAe;YAAE9B,MAAAA,EAAQ;AAAE,SAAA,CAAA,CAAA;KACrD,MAAO;QACH,MAAMJ,kBAAAA,CAAmBL,mBAAmBuC,aAAAA,EAAexF,OAAAA,CAAAA;AAC3DC,QAAAA,MAAAA,CAAO8C,IAAI,CAAC,CAAC,QAAQ,EAAEqB,iBAAAA,CAAkB,MAAM,EAAES,cAAAA,CAAeC,IAAI,CAAC,8BAA8B,CAAC,CAAA;;AAGpG7E,QAAAA,MAAAA,CAAO8C,IAAI,CAAC,wCAAA,CAAA;QACZ,IAAI;AACA,YAAA,MAAM0C,GAAAA,CAAI,cAAA,CAAA;AACVxF,YAAAA,MAAAA,CAAO8C,IAAI,CAAC,6BAAA,CAAA;AAChB,SAAA,CAAE,OAAOnB,KAAAA,EAAO;AACZ3B,YAAAA,MAAAA,CAAO4B,IAAI,CAAC,CAAC,4BAA4B,EAAED,KAAAA,CAAM,8CAA8C,CAAC,CAAA;AACpG;AACJ;IAEA,MAAM8D,OAAAA,GAAU,CAAC,oBAAoB,EAAEb,eAAeC,IAAI,CAAC,oBAAoB,EAAE;AAAID,QAAAA,GAAAA,cAAAA,CAAevC,OAAO;AAAG,KAAA,CAACqD,GAAG,CAAC,CAAC,CAACnE,IAAAA,EAAMlB,KAAK,GAAK,CAAC,IAAI,EAAEkB,KAAK,OAAO,EAAElB,MAAM,CAAA,CAAEW,IAAI,CAAC,IAAA,CAAA,CAAA,CAAO;IAE9K,OAAOyE,OAAAA;AACX;;;;"}
1
+ {"version":3,"file":"link.js","sources":["../../src/commands/link.ts"],"sourcesContent":["import path from 'path';\nimport yaml from 'js-yaml';\nimport { getLogger } from '../logging';\nimport { Config } from '../types';\nimport { create as createStorage } from '../util/storage';\nimport { run } from '../util/child';\n\ninterface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\ninterface PnpmWorkspaceFile {\n packages?: string[];\n overrides?: Record<string, string>;\n}\n\nconst scanDirectoryForPackages = async (rootDir: string, storage: any): Promise<Map<string, string>> => {\n const logger = getLogger();\n const packageMap = new Map<string, string>(); // packageName -> relativePath\n\n const absoluteRootDir = path.resolve(process.cwd(), rootDir);\n logger.verbose(`Scanning directory for packages: ${absoluteRootDir}`);\n\n if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {\n logger.verbose(`Root directory does not exist or is not a directory: ${absoluteRootDir}`);\n return packageMap;\n }\n\n try {\n // Get all subdirectories in the root directory\n const items = await storage.listFiles(absoluteRootDir);\n\n for (const item of items) {\n const itemPath = path.join(absoluteRootDir, item);\n\n if (await storage.isDirectory(itemPath)) {\n const packageJsonPath = path.join(itemPath, 'package.json');\n\n if (await storage.exists(packageJsonPath)) {\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const packageJson = JSON.parse(packageJsonContent) as PackageJson;\n\n if (packageJson.name) {\n const relativePath = path.relative(process.cwd(), itemPath);\n packageMap.set(packageJson.name, relativePath);\n logger.debug(`Found package: ${packageJson.name} at ${relativePath}`);\n }\n } catch (error) {\n logger.debug(`Failed to parse package.json at ${packageJsonPath}: ${error}`);\n }\n }\n }\n }\n } catch (error) {\n logger.warn(`Failed to read directory ${absoluteRootDir}: ${error}`);\n }\n\n return packageMap;\n};\n\nconst findPackagesByScope = async (dependencies: Record<string, string>, scopeRoots: Record<string, string>, storage: any): Promise<Map<string, string>> => {\n const logger = getLogger();\n const workspacePackages = new Map<string, string>();\n\n logger.silly(`Checking dependencies against scope roots: ${JSON.stringify(scopeRoots)}`);\n\n // First, scan all scope roots to build a comprehensive map of available packages\n const allPackages = new Map<string, string>(); // packageName -> relativePath\n\n for (const [scope, rootDir] of Object.entries(scopeRoots)) {\n logger.verbose(`Scanning scope ${scope} at root directory: ${rootDir}`);\n const scopePackages = await scanDirectoryForPackages(rootDir, storage);\n\n // Add packages from this scope to the overall map\n for (const [packageName, packagePath] of scopePackages) {\n if (packageName.startsWith(scope)) {\n allPackages.set(packageName, packagePath);\n logger.debug(`Registered package: ${packageName} -> ${packagePath}`);\n }\n }\n }\n\n // Now check each dependency against our discovered packages\n for (const [depName, depVersion] of Object.entries(dependencies)) {\n logger.debug(`Processing dependency: ${depName}@${depVersion}`);\n\n if (allPackages.has(depName)) {\n const packagePath = allPackages.get(depName)!;\n workspacePackages.set(depName, packagePath);\n logger.verbose(`Found sibling package: ${depName} at ${packagePath}`);\n }\n }\n\n return workspacePackages;\n};\n\nconst readCurrentWorkspaceFile = async (workspaceFilePath: string, storage: any): Promise<PnpmWorkspaceFile> => {\n if (await storage.exists(workspaceFilePath)) {\n try {\n const content = await storage.readFile(workspaceFilePath, 'utf-8');\n return (yaml.load(content) as PnpmWorkspaceFile) || {};\n } catch (error) {\n throw new Error(`Failed to parse existing workspace file: ${error}`);\n }\n }\n return {};\n};\n\nconst writeWorkspaceFile = async (workspaceFilePath: string, config: PnpmWorkspaceFile, storage: any): Promise<void> => {\n const yamlContent = yaml.dump(config, {\n indent: 2,\n lineWidth: -1,\n noRefs: true,\n sortKeys: false\n });\n await storage.writeFile(workspaceFilePath, yamlContent, 'utf-8');\n};\n\nexport const execute = async (runConfig: Config): Promise<string> => {\n const logger = getLogger();\n const storage = createStorage({ log: logger.info });\n\n logger.verbose('Starting pnpm workspace link management using overrides...');\n\n // Read current package.json\n const packageJsonPath = path.join(process.cwd(), 'package.json');\n if (!await storage.exists(packageJsonPath)) {\n throw new Error('package.json not found in current directory.');\n }\n\n let packageJson: PackageJson;\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n packageJson = JSON.parse(packageJsonContent);\n } catch (error) {\n throw new Error(`Failed to parse package.json: ${error}`);\n }\n\n logger.verbose(`Processing package: ${packageJson.name || 'unnamed'}`);\n\n // Get configuration\n const scopeRoots = runConfig.link?.scopeRoots || {};\n const workspaceFileName = runConfig.link?.workspaceFile || 'pnpm-workspace.yaml';\n const isDryRun = runConfig.dryRun || runConfig.link?.dryRun || false;\n\n logger.silly('Extracted scopeRoots:', JSON.stringify(scopeRoots));\n logger.debug('Extracted workspaceFileName:', workspaceFileName);\n logger.debug('Extracted isDryRun:', isDryRun);\n\n if (Object.keys(scopeRoots).length === 0) {\n logger.verbose('No scope roots configured. Skipping link management.');\n return 'No scope roots configured. Skipping link management.';\n }\n\n logger.silly(`Configured scope roots: ${JSON.stringify(scopeRoots)}`);\n\n // Collect all dependencies\n const allDependencies = {\n ...packageJson.dependencies,\n ...packageJson.devDependencies,\n ...packageJson.peerDependencies\n };\n\n logger.verbose(`Found ${Object.keys(allDependencies).length} total dependencies`);\n\n // Find matching sibling packages\n const packagesToLink = await findPackagesByScope(allDependencies, scopeRoots, storage);\n\n if (packagesToLink.size === 0) {\n logger.verbose('No matching sibling packages found for linking.');\n return 'No matching sibling packages found for linking.';\n }\n\n logger.verbose(`Found ${packagesToLink.size} packages to link: ${[...packagesToLink.keys()].join(', ')}`);\n\n // Read existing workspace configuration\n const workspaceFilePath = path.join(process.cwd(), workspaceFileName);\n const workspaceConfig = await readCurrentWorkspaceFile(workspaceFilePath, storage);\n\n // Create overrides\n const newOverrides: Record<string, string> = {};\n for (const [packageName, packagePath] of packagesToLink.entries()) {\n newOverrides[packageName] = `link:${packagePath}`;\n }\n\n const updatedOverrides = { ...(workspaceConfig.overrides || {}), ...newOverrides };\n\n const sortedOverrides = Object.keys(updatedOverrides)\n .sort()\n .reduce((obj, key) => {\n obj[key] = updatedOverrides[key];\n return obj;\n }, {} as Record<string, string>);\n\n const updatedConfig: PnpmWorkspaceFile = {\n ...workspaceConfig,\n overrides: sortedOverrides\n };\n\n\n // Write the updated workspace file\n if (isDryRun) {\n logger.verbose('DRY RUN: Would write the following workspace configuration:');\n logger.silly(yaml.dump(updatedConfig, { indent: 2 }));\n } else {\n await writeWorkspaceFile(workspaceFilePath, updatedConfig, storage);\n logger.verbose(`Updated ${workspaceFileName} with ${packagesToLink.size} linked packages in overrides.`);\n\n // Rebuild pnpm lock file and node_modules\n logger.verbose('Running pnpm install to apply links...');\n try {\n await run('pnpm install');\n logger.verbose('Successfully applied links.');\n } catch (error) {\n logger.warn(`Failed to run pnpm install: ${error}. You may need to run 'pnpm install' manually.`);\n }\n }\n\n const summary = `Successfully linked ${packagesToLink.size} sibling packages:\\n${[...packagesToLink.entries()].map(([name, path]) => ` - ${name}: link:${path}`).join('\\n')}`;\n\n return summary;\n};"],"names":["scanDirectoryForPackages","rootDir","storage","logger","getLogger","packageMap","Map","absoluteRootDir","path","resolve","process","cwd","verbose","exists","isDirectory","items","listFiles","item","itemPath","join","packageJsonPath","packageJsonContent","readFile","packageJson","JSON","parse","name","relativePath","relative","set","debug","error","warn","findPackagesByScope","dependencies","scopeRoots","workspacePackages","silly","stringify","allPackages","scope","Object","entries","scopePackages","packageName","packagePath","startsWith","depName","depVersion","has","get","readCurrentWorkspaceFile","workspaceFilePath","content","yaml","load","Error","writeWorkspaceFile","config","yamlContent","dump","indent","lineWidth","noRefs","sortKeys","writeFile","execute","runConfig","createStorage","log","info","link","workspaceFileName","workspaceFile","isDryRun","dryRun","keys","length","allDependencies","devDependencies","peerDependencies","packagesToLink","size","workspaceConfig","newOverrides","updatedOverrides","overrides","sortedOverrides","sort","reduce","obj","key","updatedConfig","run","summary","map"],"mappings":";;;;;;AAmBA,MAAMA,wBAAAA,GAA2B,OAAOC,OAAAA,EAAiBC,OAAAA,GAAAA;AACrD,IAAA,MAAMC,MAAAA,GAASC,SAAAA,EAAAA;IACf,MAAMC,UAAAA,GAAa,IAAIC,GAAAA,EAAAA,CAAAA;AAEvB,IAAA,MAAMC,kBAAkBC,IAAAA,CAAKC,OAAO,CAACC,OAAAA,CAAQC,GAAG,EAAA,EAAIV,OAAAA,CAAAA;AACpDE,IAAAA,MAAAA,CAAOS,OAAO,CAAC,CAAC,iCAAiC,EAAEL,eAAAA,CAAAA,CAAiB,CAAA;IAEpE,IAAI,CAAC,MAAML,OAAAA,CAAQW,MAAM,CAACN,eAAAA,CAAAA,IAAoB,CAAC,MAAML,OAAAA,CAAQY,WAAW,CAACP,eAAAA,CAAAA,EAAkB;AACvFJ,QAAAA,MAAAA,CAAOS,OAAO,CAAC,CAAC,qDAAqD,EAAEL,eAAAA,CAAAA,CAAiB,CAAA;QACxF,OAAOF,UAAAA;AACX;IAEA,IAAI;;AAEA,QAAA,MAAMU,KAAAA,GAAQ,MAAMb,OAAAA,CAAQc,SAAS,CAACT,eAAAA,CAAAA;QAEtC,KAAK,MAAMU,QAAQF,KAAAA,CAAO;AACtB,YAAA,MAAMG,QAAAA,GAAWV,IAAAA,CAAKW,IAAI,CAACZ,eAAAA,EAAiBU,IAAAA,CAAAA;AAE5C,YAAA,IAAI,MAAMf,OAAAA,CAAQY,WAAW,CAACI,QAAAA,CAAAA,EAAW;AACrC,gBAAA,MAAME,eAAAA,GAAkBZ,IAAAA,CAAKW,IAAI,CAACD,QAAAA,EAAU,cAAA,CAAA;AAE5C,gBAAA,IAAI,MAAMhB,OAAAA,CAAQW,MAAM,CAACO,eAAAA,CAAAA,EAAkB;oBACvC,IAAI;AACA,wBAAA,MAAMC,kBAAAA,GAAqB,MAAMnB,OAAAA,CAAQoB,QAAQ,CAACF,eAAAA,EAAiB,OAAA,CAAA;wBACnE,MAAMG,WAAAA,GAAcC,IAAAA,CAAKC,KAAK,CAACJ,kBAAAA,CAAAA;wBAE/B,IAAIE,WAAAA,CAAYG,IAAI,EAAE;AAClB,4BAAA,MAAMC,eAAenB,IAAAA,CAAKoB,QAAQ,CAAClB,OAAAA,CAAQC,GAAG,EAAA,EAAIO,QAAAA,CAAAA;AAClDb,4BAAAA,UAAAA,CAAWwB,GAAG,CAACN,WAAAA,CAAYG,IAAI,EAAEC,YAAAA,CAAAA;4BACjCxB,MAAAA,CAAO2B,KAAK,CAAC,CAAC,eAAe,EAAEP,YAAYG,IAAI,CAAC,IAAI,EAAEC,YAAAA,CAAAA,CAAc,CAAA;AACxE;AACJ,qBAAA,CAAE,OAAOI,KAAAA,EAAO;wBACZ5B,MAAAA,CAAO2B,KAAK,CAAC,CAAC,gCAAgC,EAAEV,eAAAA,CAAgB,EAAE,EAAEW,KAAAA,CAAAA,CAAO,CAAA;AAC/E;AACJ;AACJ;AACJ;AACJ,KAAA,CAAE,OAAOA,KAAAA,EAAO;QACZ5B,MAAAA,CAAO6B,IAAI,CAAC,CAAC,yBAAyB,EAAEzB,eAAAA,CAAgB,EAAE,EAAEwB,KAAAA,CAAAA,CAAO,CAAA;AACvE;IAEA,OAAO1B,UAAAA;AACX,CAAA;AAEA,MAAM4B,mBAAAA,GAAsB,OAAOC,YAAAA,EAAsCC,UAAAA,EAAoCjC,OAAAA,GAAAA;AACzG,IAAA,MAAMC,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMgC,oBAAoB,IAAI9B,GAAAA,EAAAA;IAE9BH,MAAAA,CAAOkC,KAAK,CAAC,CAAC,2CAA2C,EAAEb,IAAAA,CAAKc,SAAS,CAACH,UAAAA,CAAAA,CAAAA,CAAa,CAAA;;IAGvF,MAAMI,WAAAA,GAAc,IAAIjC,GAAAA,EAAAA,CAAAA;IAExB,KAAK,MAAM,CAACkC,KAAAA,EAAOvC,OAAAA,CAAQ,IAAIwC,MAAAA,CAAOC,OAAO,CAACP,UAAAA,CAAAA,CAAa;QACvDhC,MAAAA,CAAOS,OAAO,CAAC,CAAC,eAAe,EAAE4B,KAAAA,CAAM,oBAAoB,EAAEvC,OAAAA,CAAAA,CAAS,CAAA;QACtE,MAAM0C,aAAAA,GAAgB,MAAM3C,wBAAAA,CAAyBC,OAAAA,EAASC,OAAAA,CAAAA;;AAG9D,QAAA,KAAK,MAAM,CAAC0C,WAAAA,EAAaC,WAAAA,CAAY,IAAIF,aAAAA,CAAe;YACpD,IAAIC,WAAAA,CAAYE,UAAU,CAACN,KAAAA,CAAAA,EAAQ;gBAC/BD,WAAAA,CAAYV,GAAG,CAACe,WAAAA,EAAaC,WAAAA,CAAAA;gBAC7B1C,MAAAA,CAAO2B,KAAK,CAAC,CAAC,oBAAoB,EAAEc,WAAAA,CAAY,IAAI,EAAEC,WAAAA,CAAAA,CAAa,CAAA;AACvE;AACJ;AACJ;;IAGA,KAAK,MAAM,CAACE,OAAAA,EAASC,UAAAA,CAAW,IAAIP,MAAAA,CAAOC,OAAO,CAACR,YAAAA,CAAAA,CAAe;QAC9D/B,MAAAA,CAAO2B,KAAK,CAAC,CAAC,uBAAuB,EAAEiB,OAAAA,CAAQ,CAAC,EAAEC,UAAAA,CAAAA,CAAY,CAAA;QAE9D,IAAIT,WAAAA,CAAYU,GAAG,CAACF,OAAAA,CAAAA,EAAU;YAC1B,MAAMF,WAAAA,GAAcN,WAAAA,CAAYW,GAAG,CAACH,OAAAA,CAAAA;YACpCX,iBAAAA,CAAkBP,GAAG,CAACkB,OAAAA,EAASF,WAAAA,CAAAA;YAC/B1C,MAAAA,CAAOS,OAAO,CAAC,CAAC,uBAAuB,EAAEmC,OAAAA,CAAQ,IAAI,EAAEF,WAAAA,CAAAA,CAAa,CAAA;AACxE;AACJ;IAEA,OAAOT,iBAAAA;AACX,CAAA;AAEA,MAAMe,wBAAAA,GAA2B,OAAOC,iBAAAA,EAA2BlD,OAAAA,GAAAA;AAC/D,IAAA,IAAI,MAAMA,OAAAA,CAAQW,MAAM,CAACuC,iBAAAA,CAAAA,EAAoB;QACzC,IAAI;AACA,YAAA,MAAMC,OAAAA,GAAU,MAAMnD,OAAAA,CAAQoB,QAAQ,CAAC8B,iBAAAA,EAAmB,OAAA,CAAA;AAC1D,YAAA,OAAO,IAACE,CAAKC,IAAI,CAACF,YAAkC,EAAC;AACzD,SAAA,CAAE,OAAOtB,KAAAA,EAAO;AACZ,YAAA,MAAM,IAAIyB,KAAAA,CAAM,CAAC,yCAAyC,EAAEzB,KAAAA,CAAAA,CAAO,CAAA;AACvE;AACJ;AACA,IAAA,OAAO,EAAC;AACZ,CAAA;AAEA,MAAM0B,kBAAAA,GAAqB,OAAOL,iBAAAA,EAA2BM,MAAAA,EAA2BxD,OAAAA,GAAAA;AACpF,IAAA,MAAMyD,WAAAA,GAAcL,IAAAA,CAAKM,IAAI,CAACF,MAAAA,EAAQ;QAClCG,MAAAA,EAAQ,CAAA;AACRC,QAAAA,SAAAA,EAAW,EAAC;QACZC,MAAAA,EAAQ,IAAA;QACRC,QAAAA,EAAU;AACd,KAAA,CAAA;AACA,IAAA,MAAM9D,OAAAA,CAAQ+D,SAAS,CAACb,iBAAAA,EAAmBO,WAAAA,EAAa,OAAA,CAAA;AAC5D,CAAA;AAEO,MAAMO,UAAU,OAAOC,SAAAA,GAAAA;AAuBPA,IAAAA,IAAAA,eAAAA,EACOA,gBAAAA,EACWA,gBAAAA;AAxBrC,IAAA,MAAMhE,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMF,UAAUkE,MAAAA,CAAc;AAAEC,QAAAA,GAAAA,EAAKlE,OAAOmE;AAAK,KAAA,CAAA;AAEjDnE,IAAAA,MAAAA,CAAOS,OAAO,CAAC,4DAAA,CAAA;;AAGf,IAAA,MAAMQ,kBAAkBZ,IAAAA,CAAKW,IAAI,CAACT,OAAAA,CAAQC,GAAG,EAAA,EAAI,cAAA,CAAA;AACjD,IAAA,IAAI,CAAC,MAAMT,OAAAA,CAAQW,MAAM,CAACO,eAAAA,CAAAA,EAAkB;AACxC,QAAA,MAAM,IAAIoC,KAAAA,CAAM,8CAAA,CAAA;AACpB;IAEA,IAAIjC,WAAAA;IACJ,IAAI;AACA,QAAA,MAAMF,kBAAAA,GAAqB,MAAMnB,OAAAA,CAAQoB,QAAQ,CAACF,eAAAA,EAAiB,OAAA,CAAA;QACnEG,WAAAA,GAAcC,IAAAA,CAAKC,KAAK,CAACJ,kBAAAA,CAAAA;AAC7B,KAAA,CAAE,OAAOU,KAAAA,EAAO;AACZ,QAAA,MAAM,IAAIyB,KAAAA,CAAM,CAAC,8BAA8B,EAAEzB,KAAAA,CAAAA,CAAO,CAAA;AAC5D;IAEA5B,MAAAA,CAAOS,OAAO,CAAC,CAAC,oBAAoB,EAAEW,WAAAA,CAAYG,IAAI,IAAI,SAAA,CAAA,CAAW,CAAA;;IAGrE,MAAMS,UAAAA,GAAagC,CAAAA,CAAAA,eAAAA,GAAAA,SAAAA,CAAUI,IAAI,cAAdJ,eAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAAA,CAAgBhC,UAAU,KAAI,EAAC;IAClD,MAAMqC,iBAAAA,GAAoBL,EAAAA,gBAAAA,GAAAA,SAAAA,CAAUI,IAAI,MAAA,IAAA,IAAdJ,gBAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAAA,CAAgBM,aAAa,KAAI,qBAAA;IAC3D,MAAMC,QAAAA,GAAWP,SAAAA,CAAUQ,MAAM,KAAA,CAAIR,gBAAAA,GAAAA,SAAAA,CAAUI,IAAI,MAAA,IAAA,IAAdJ,gBAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAAA,CAAgBQ,MAAM,CAAA,IAAI,KAAA;AAE/DxE,IAAAA,MAAAA,CAAOkC,KAAK,CAAC,uBAAA,EAAyBb,IAAAA,CAAKc,SAAS,CAACH,UAAAA,CAAAA,CAAAA;IACrDhC,MAAAA,CAAO2B,KAAK,CAAC,8BAAA,EAAgC0C,iBAAAA,CAAAA;IAC7CrE,MAAAA,CAAO2B,KAAK,CAAC,qBAAA,EAAuB4C,QAAAA,CAAAA;AAEpC,IAAA,IAAIjC,OAAOmC,IAAI,CAACzC,UAAAA,CAAAA,CAAY0C,MAAM,KAAK,CAAA,EAAG;AACtC1E,QAAAA,MAAAA,CAAOS,OAAO,CAAC,sDAAA,CAAA;QACf,OAAO,sDAAA;AACX;IAEAT,MAAAA,CAAOkC,KAAK,CAAC,CAAC,wBAAwB,EAAEb,IAAAA,CAAKc,SAAS,CAACH,UAAAA,CAAAA,CAAAA,CAAa,CAAA;;AAGpE,IAAA,MAAM2C,eAAAA,GAAkB;AACpB,QAAA,GAAGvD,YAAYW,YAAY;AAC3B,QAAA,GAAGX,YAAYwD,eAAe;AAC9B,QAAA,GAAGxD,YAAYyD;AACnB,KAAA;AAEA7E,IAAAA,MAAAA,CAAOS,OAAO,CAAC,CAAC,MAAM,EAAE6B,MAAAA,CAAOmC,IAAI,CAACE,eAAAA,CAAAA,CAAiBD,MAAM,CAAC,mBAAmB,CAAC,CAAA;;AAGhF,IAAA,MAAMI,cAAAA,GAAiB,MAAMhD,mBAAAA,CAAoB6C,eAAAA,EAAiB3C,UAAAA,EAAYjC,OAAAA,CAAAA;IAE9E,IAAI+E,cAAAA,CAAeC,IAAI,KAAK,CAAA,EAAG;AAC3B/E,QAAAA,MAAAA,CAAOS,OAAO,CAAC,iDAAA,CAAA;QACf,OAAO,iDAAA;AACX;IAEAT,MAAAA,CAAOS,OAAO,CAAC,CAAC,MAAM,EAAEqE,cAAAA,CAAeC,IAAI,CAAC,mBAAmB,EAAE;AAAID,QAAAA,GAAAA,cAAAA,CAAeL,IAAI;KAAG,CAACzD,IAAI,CAAC,IAAA,CAAA,CAAA,CAAO,CAAA;;AAGxG,IAAA,MAAMiC,oBAAoB5C,IAAAA,CAAKW,IAAI,CAACT,OAAAA,CAAQC,GAAG,EAAA,EAAI6D,iBAAAA,CAAAA;IACnD,MAAMW,eAAAA,GAAkB,MAAMhC,wBAAAA,CAAyBC,iBAAAA,EAAmBlD,OAAAA,CAAAA;;AAG1E,IAAA,MAAMkF,eAAuC,EAAC;AAC9C,IAAA,KAAK,MAAM,CAACxC,WAAAA,EAAaC,YAAY,IAAIoC,cAAAA,CAAevC,OAAO,EAAA,CAAI;AAC/D0C,QAAAA,YAAY,CAACxC,WAAAA,CAAY,GAAG,CAAC,KAAK,EAAEC,WAAAA,CAAAA,CAAa;AACrD;AAEA,IAAA,MAAMwC,gBAAAA,GAAmB;AAAE,QAAA,GAAIF,eAAAA,CAAgBG,SAAS,IAAI,EAAE;AAAG,QAAA,GAAGF;AAAa,KAAA;IAEjF,MAAMG,eAAAA,GAAkB9C,MAAAA,CAAOmC,IAAI,CAACS,gBAAAA,CAAAA,CAC/BG,IAAI,EAAA,CACJC,MAAM,CAAC,CAACC,GAAAA,EAAKC,GAAAA,GAAAA;AACVD,QAAAA,GAAG,CAACC,GAAAA,CAAI,GAAGN,gBAAgB,CAACM,GAAAA,CAAI;QAChC,OAAOD,GAAAA;AACX,KAAA,EAAG,EAAC,CAAA;AAER,IAAA,MAAME,aAAAA,GAAmC;AACrC,QAAA,GAAGT,eAAe;QAClBG,SAAAA,EAAWC;AACf,KAAA;;AAIA,IAAA,IAAIb,QAAAA,EAAU;AACVvE,QAAAA,MAAAA,CAAOS,OAAO,CAAC,6DAAA,CAAA;AACfT,QAAAA,MAAAA,CAAOkC,KAAK,CAACiB,IAAAA,CAAKM,IAAI,CAACgC,aAAAA,EAAe;YAAE/B,MAAAA,EAAQ;AAAE,SAAA,CAAA,CAAA;KACtD,MAAO;QACH,MAAMJ,kBAAAA,CAAmBL,mBAAmBwC,aAAAA,EAAe1F,OAAAA,CAAAA;AAC3DC,QAAAA,MAAAA,CAAOS,OAAO,CAAC,CAAC,QAAQ,EAAE4D,iBAAAA,CAAkB,MAAM,EAAES,cAAAA,CAAeC,IAAI,CAAC,8BAA8B,CAAC,CAAA;;AAGvG/E,QAAAA,MAAAA,CAAOS,OAAO,CAAC,wCAAA,CAAA;QACf,IAAI;AACA,YAAA,MAAMiF,GAAAA,CAAI,cAAA,CAAA;AACV1F,YAAAA,MAAAA,CAAOS,OAAO,CAAC,6BAAA,CAAA;AACnB,SAAA,CAAE,OAAOmB,KAAAA,EAAO;AACZ5B,YAAAA,MAAAA,CAAO6B,IAAI,CAAC,CAAC,4BAA4B,EAAED,KAAAA,CAAM,8CAA8C,CAAC,CAAA;AACpG;AACJ;IAEA,MAAM+D,OAAAA,GAAU,CAAC,oBAAoB,EAAEb,eAAeC,IAAI,CAAC,oBAAoB,EAAE;AAAID,QAAAA,GAAAA,cAAAA,CAAevC,OAAO;AAAG,KAAA,CAACqD,GAAG,CAAC,CAAC,CAACrE,IAAAA,EAAMlB,KAAK,GAAK,CAAC,IAAI,EAAEkB,KAAK,OAAO,EAAElB,MAAM,CAAA,CAAEW,IAAI,CAAC,IAAA,CAAA,CAAA,CAAO;IAE9K,OAAO2E,OAAAA;AACX;;;;"}
@@ -5,7 +5,7 @@ import { execute as execute$3 } from './release.js';
5
5
  import { execute as execute$4 } from './link.js';
6
6
  import { execute as execute$1 } from './unlink.js';
7
7
  import { getLogger } from '../logging.js';
8
- import { run } from '../util/child.js';
8
+ import { runWithDryRunSupport, run } from '../util/child.js';
9
9
  import { getCurrentBranchName, findOpenPullRequestByHeadRef, createPullRequest, waitForPullRequestChecks, mergePullRequest, createRelease } from '../util/github.js';
10
10
  import { create } from '../util/storage.js';
11
11
  import { incrementPatchVersion } from '../util/general.js';
@@ -34,7 +34,7 @@ const scanNpmrcForEnvVars = async (storage)=>{
34
34
  }
35
35
  return envVars;
36
36
  };
37
- const validateEnvironmentVariables = (requiredEnvVars)=>{
37
+ const validateEnvironmentVariables = (requiredEnvVars, isDryRun)=>{
38
38
  const logger = getLogger();
39
39
  const missingEnvVars = [];
40
40
  for (const envVar of requiredEnvVars){
@@ -43,62 +43,97 @@ const validateEnvironmentVariables = (requiredEnvVars)=>{
43
43
  }
44
44
  }
45
45
  if (missingEnvVars.length > 0) {
46
- logger.error(`Missing required environment variables: ${missingEnvVars.join(', ')}`);
47
- throw new Error(`Missing required environment variables: ${missingEnvVars.join(', ')}. Please set these environment variables before running publish.`);
46
+ if (isDryRun) {
47
+ logger.warn(`DRY RUN: Missing required environment variables: ${missingEnvVars.join(', ')}`);
48
+ } else {
49
+ logger.error(`Missing required environment variables: ${missingEnvVars.join(', ')}`);
50
+ throw new Error(`Missing required environment variables: ${missingEnvVars.join(', ')}. Please set these environment variables before running publish.`);
51
+ }
48
52
  }
49
53
  };
50
54
  const runPrechecks = async (runConfig)=>{
51
- var _packageJson_scripts, _runConfig_publish;
55
+ var _runConfig_publish;
52
56
  const logger = getLogger();
53
57
  const storage = create({
54
58
  log: logger.info
55
59
  });
56
- logger.info('Running prechecks...');
60
+ const isDryRun = runConfig.dryRun || false;
61
+ logger.info(isDryRun ? 'DRY RUN: Running prechecks...' : 'Running prechecks...');
57
62
  // Check if we're in a git repository
58
63
  try {
59
- await run('git rev-parse --git-dir');
64
+ if (isDryRun) {
65
+ logger.info('DRY RUN: Would check git repository with: git rev-parse --git-dir');
66
+ } else {
67
+ await run('git rev-parse --git-dir');
68
+ }
60
69
  // eslint-disable-next-line @typescript-eslint/no-unused-vars
61
70
  } catch (error) {
62
- throw new Error('Not in a git repository. Please run this command from within a git repository.');
71
+ if (!isDryRun) {
72
+ throw new Error('Not in a git repository. Please run this command from within a git repository.');
73
+ }
63
74
  }
64
75
  // Check for uncommitted changes
65
- logger.info('Checking for uncommitted changes...');
76
+ logger.info(isDryRun ? 'DRY RUN: Would check for uncommitted changes...' : 'Checking for uncommitted changes...');
66
77
  try {
67
- const { stdout } = await run('git status --porcelain');
68
- if (stdout.trim()) {
69
- throw new Error('Working directory has uncommitted changes. Please commit or stash your changes before running publish.');
78
+ if (isDryRun) {
79
+ logger.info('DRY RUN: Would check git status with: git status --porcelain');
80
+ } else {
81
+ const { stdout } = await run('git status --porcelain');
82
+ if (stdout.trim()) {
83
+ throw new Error('Working directory has uncommitted changes. Please commit or stash your changes before running publish.');
84
+ }
70
85
  }
71
86
  // eslint-disable-next-line @typescript-eslint/no-unused-vars
72
87
  } catch (error) {
73
- throw new Error('Failed to check git status. Please ensure you are in a valid git repository.');
88
+ if (!isDryRun) {
89
+ throw new Error('Failed to check git status. Please ensure you are in a valid git repository.');
90
+ }
74
91
  }
75
92
  // Check if we're on a release branch
76
- logger.info('Checking current branch...');
77
- const currentBranch = await getCurrentBranchName();
78
- if (!currentBranch.startsWith('release/')) {
79
- throw new Error(`Current branch '${currentBranch}' is not a release branch. Please switch to a release branch (e.g., release/1.0.0) before running publish.`);
93
+ logger.info(isDryRun ? 'DRY RUN: Would check current branch...' : 'Checking current branch...');
94
+ if (isDryRun) {
95
+ logger.info('DRY RUN: Would verify current branch is a release branch (starts with "release/")');
96
+ } else {
97
+ const currentBranch = await getCurrentBranchName();
98
+ if (!currentBranch.startsWith('release/')) {
99
+ throw new Error(`Current branch '${currentBranch}' is not a release branch. Please switch to a release branch (e.g., release/1.0.0) before running publish.`);
100
+ }
80
101
  }
81
102
  // Check if prepublishOnly script exists in package.json
82
- logger.info('Checking for prepublishOnly script...');
103
+ logger.info(isDryRun ? 'DRY RUN: Would check for prepublishOnly script...' : 'Checking for prepublishOnly script...');
83
104
  const packageJsonPath = path.join(process.cwd(), 'package.json');
84
105
  if (!await storage.exists(packageJsonPath)) {
85
- throw new Error('package.json not found in current directory.');
86
- }
87
- let packageJson;
88
- try {
89
- const packageJsonContents = await storage.readFile(packageJsonPath, 'utf-8');
90
- packageJson = JSON.parse(packageJsonContents);
91
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
92
- } catch (error) {
93
- throw new Error('Failed to parse package.json. Please ensure it contains valid JSON.');
94
- }
95
- if (!((_packageJson_scripts = packageJson.scripts) === null || _packageJson_scripts === void 0 ? void 0 : _packageJson_scripts.prepublishOnly)) {
96
- throw new Error('prepublishOnly script is required in package.json but was not found. Please add a prepublishOnly script that runs your pre-flight checks (e.g., clean, lint, build, test).');
106
+ if (!isDryRun) {
107
+ throw new Error('package.json not found in current directory.');
108
+ } else {
109
+ logger.warn('DRY RUN: package.json not found in current directory.');
110
+ }
111
+ } else {
112
+ var _packageJson_scripts;
113
+ let packageJson;
114
+ try {
115
+ const packageJsonContents = await storage.readFile(packageJsonPath, 'utf-8');
116
+ packageJson = JSON.parse(packageJsonContents);
117
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
118
+ } catch (error) {
119
+ if (!isDryRun) {
120
+ throw new Error('Failed to parse package.json. Please ensure it contains valid JSON.');
121
+ } else {
122
+ logger.warn('DRY RUN: Failed to parse package.json. Please ensure it contains valid JSON.');
123
+ }
124
+ }
125
+ if (packageJson && !((_packageJson_scripts = packageJson.scripts) === null || _packageJson_scripts === void 0 ? void 0 : _packageJson_scripts.prepublishOnly)) {
126
+ if (!isDryRun) {
127
+ throw new Error('prepublishOnly script is required in package.json but was not found. Please add a prepublishOnly script that runs your pre-flight checks (e.g., clean, lint, build, test).');
128
+ } else {
129
+ logger.warn('DRY RUN: prepublishOnly script is required in package.json but was not found.');
130
+ }
131
+ }
97
132
  }
98
133
  // Check required environment variables
99
- logger.info('Checking required environment variables...');
134
+ logger.verbose(isDryRun ? 'DRY RUN: Would check required environment variables...' : 'Checking required environment variables...');
100
135
  const coreRequiredEnvVars = ((_runConfig_publish = runConfig.publish) === null || _runConfig_publish === void 0 ? void 0 : _runConfig_publish.requiredEnvVars) || [];
101
- const npmrcEnvVars = await scanNpmrcForEnvVars(storage);
136
+ const npmrcEnvVars = isDryRun ? [] : await scanNpmrcForEnvVars(storage); // Skip .npmrc scan in dry run
102
137
  const allRequiredEnvVars = [
103
138
  ...new Set([
104
139
  ...coreRequiredEnvVars,
@@ -106,108 +141,150 @@ const runPrechecks = async (runConfig)=>{
106
141
  ])
107
142
  ];
108
143
  if (allRequiredEnvVars.length > 0) {
109
- logger.info(`Required environment variables: ${allRequiredEnvVars.join(', ')}`);
110
- validateEnvironmentVariables(allRequiredEnvVars);
144
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Required environment variables: ${allRequiredEnvVars.join(', ')}`);
145
+ validateEnvironmentVariables(allRequiredEnvVars, isDryRun);
111
146
  } else {
112
- logger.info('No required environment variables specified.');
147
+ logger.verbose(isDryRun ? 'DRY RUN: No required environment variables specified.' : 'No required environment variables specified.');
113
148
  }
114
- logger.info('All prechecks passed.');
149
+ logger.info(isDryRun ? 'DRY RUN: All prechecks would pass.' : 'All prechecks passed.');
115
150
  };
116
151
  const execute = async (runConfig)=>{
117
152
  const logger = getLogger();
118
153
  const storage = create({
119
154
  log: logger.info
120
155
  });
156
+ const isDryRun = runConfig.dryRun || false;
121
157
  // Run prechecks before starting any work
122
158
  await runPrechecks(runConfig);
123
- logger.info('Starting release process...');
159
+ logger.info(isDryRun ? 'DRY RUN: Would start release process...' : 'Starting release process...');
124
160
  try {
125
161
  var _runConfig_publish, _runConfig_publish1;
126
162
  // Unlink all workspace packages before starting (if enabled)
127
163
  const shouldUnlink = ((_runConfig_publish = runConfig.publish) === null || _runConfig_publish === void 0 ? void 0 : _runConfig_publish.unlinkWorkspacePackages) !== false; // default to true
128
164
  if (shouldUnlink) {
129
- logger.info('Unlinking workspace packages...');
165
+ logger.verbose(isDryRun ? 'DRY RUN: Would unlink workspace packages...' : 'Unlinking workspace packages...');
130
166
  await execute$1(runConfig);
131
167
  } else {
132
- logger.info('Skipping unlink workspace packages (disabled in config).');
168
+ logger.verbose(isDryRun ? 'DRY RUN: Would skip unlink workspace packages (disabled in config).' : 'Skipping unlink workspace packages (disabled in config).');
169
+ }
170
+ let pr = null;
171
+ if (isDryRun) {
172
+ logger.info('DRY RUN: Would check for existing pull request');
173
+ logger.info('DRY RUN: Assuming no existing PR found for demo purposes');
174
+ } else {
175
+ const branchName = await getCurrentBranchName();
176
+ pr = await findOpenPullRequestByHeadRef(branchName);
133
177
  }
134
- const branchName = await getCurrentBranchName();
135
- let pr = await findOpenPullRequestByHeadRef(branchName);
136
178
  if (pr) {
137
- logger.info(`Found existing pull request for branch ${branchName}: ${pr.html_url}`);
179
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Found existing pull request for branch: ${pr.html_url}`);
138
180
  } else {
139
181
  var _runConfig_publish2;
140
- logger.info('No open pull request found, starting new release publishing process...');
182
+ logger.info(isDryRun ? 'DRY RUN: No open pull request found, would start new release publishing process...' : 'No open pull request found, starting new release publishing process...');
141
183
  // 1. Prepare for release
142
- logger.info('Preparing for release: switching from workspace to remote dependencies.');
143
- logger.info('Updating dependencies to latest versions from registry');
184
+ logger.verbose(isDryRun ? 'DRY RUN: Would prepare for release: switching from workspace to remote dependencies.' : 'Preparing for release: switching from workspace to remote dependencies.');
185
+ logger.verbose(isDryRun ? 'DRY RUN: Would update dependencies to latest versions from registry' : 'Updating dependencies to latest versions from registry');
144
186
  const updatePatterns = (_runConfig_publish2 = runConfig.publish) === null || _runConfig_publish2 === void 0 ? void 0 : _runConfig_publish2.dependencyUpdatePatterns;
145
187
  if (updatePatterns && updatePatterns.length > 0) {
146
- logger.info(`Updating dependencies matching patterns: ${updatePatterns.join(', ')}`);
188
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Updating dependencies matching patterns: ${updatePatterns.join(', ')}`);
147
189
  const patternsArg = updatePatterns.join(' ');
148
- await run(`pnpm update --latest ${patternsArg}`);
190
+ await runWithDryRunSupport(`pnpm update --latest ${patternsArg}`, isDryRun);
149
191
  } else {
150
- logger.info('No dependency update patterns specified, updating all dependencies');
151
- await run('pnpm update --latest');
192
+ logger.verbose(isDryRun ? 'DRY RUN: No dependency update patterns specified, would update all dependencies' : 'No dependency update patterns specified, updating all dependencies');
193
+ await runWithDryRunSupport('pnpm update --latest', isDryRun);
152
194
  }
153
- logger.info('Staging changes for release commit');
154
- await run('git add package.json pnpm-lock.yaml');
155
- logger.info('Running prepublishOnly script...');
156
- await run('pnpm run prepublishOnly');
157
- logger.info('Checking for staged changes...');
158
- if (await hasStagedChanges()) {
159
- logger.info('Staged changes found, creating commit...');
195
+ logger.verbose(isDryRun ? 'DRY RUN: Would stage changes for release commit' : 'Staging changes for release commit');
196
+ await runWithDryRunSupport('git add package.json pnpm-lock.yaml', isDryRun);
197
+ logger.info(isDryRun ? 'DRY RUN: Would run prepublishOnly script...' : 'Running prepublishOnly script...');
198
+ await runWithDryRunSupport('pnpm run prepublishOnly', isDryRun);
199
+ logger.verbose(isDryRun ? 'DRY RUN: Would check for staged changes...' : 'Checking for staged changes...');
200
+ if (isDryRun) {
201
+ logger.verbose('DRY RUN: Assuming staged changes exist for demo purposes');
202
+ logger.verbose('DRY RUN: Would create commit...');
160
203
  await execute$2(runConfig);
161
204
  } else {
162
- logger.info('No changes to commit, skipping commit.');
205
+ if (await hasStagedChanges()) {
206
+ logger.verbose('Staged changes found, creating commit...');
207
+ await execute$2(runConfig);
208
+ } else {
209
+ logger.verbose('No changes to commit, skipping commit.');
210
+ }
163
211
  }
164
- logger.info('Bumping version...');
165
- await run('pnpm version patch');
166
- logger.info('Generating release notes...');
212
+ logger.info(isDryRun ? 'DRY RUN: Would bump version...' : 'Bumping version...');
213
+ await runWithDryRunSupport('pnpm version patch', isDryRun);
214
+ logger.info(isDryRun ? 'DRY RUN: Would generate release notes...' : 'Generating release notes...');
167
215
  const releaseSummary = await execute$3(runConfig);
168
- await storage.writeFile('RELEASE_NOTES.md', releaseSummary.body, 'utf-8');
169
- await storage.writeFile('RELEASE_TITLE.md', releaseSummary.title, 'utf-8');
170
- logger.info('Release notes and title generated and saved to RELEASE_NOTES.md and RELEASE_TITLE.md.');
171
- logger.info('Pushing to origin...');
172
- await run('git push --follow-tags');
173
- logger.info('Creating pull request...');
174
- const { stdout: commitTitle } = await run('git log -1 --pretty=%B');
175
- pr = await createPullRequest(commitTitle, 'Automated release PR.', branchName);
176
- if (!pr) {
177
- throw new Error('Failed to create pull request.');
216
+ if (isDryRun) {
217
+ logger.info('DRY RUN: Would write release notes to RELEASE_NOTES.md and RELEASE_TITLE.md');
218
+ } else {
219
+ await storage.writeFile('RELEASE_NOTES.md', releaseSummary.body, 'utf-8');
220
+ await storage.writeFile('RELEASE_TITLE.md', releaseSummary.title, 'utf-8');
221
+ logger.info('Release notes and title generated and saved to RELEASE_NOTES.md and RELEASE_TITLE.md.');
222
+ }
223
+ logger.info(isDryRun ? 'DRY RUN: Would push to origin...' : 'Pushing to origin...');
224
+ await runWithDryRunSupport('git push --follow-tags', isDryRun);
225
+ logger.info(isDryRun ? 'DRY RUN: Would create pull request...' : 'Creating pull request...');
226
+ if (isDryRun) {
227
+ logger.info('DRY RUN: Would get commit title and create PR with GitHub API');
228
+ pr = {
229
+ number: 123,
230
+ html_url: 'https://github.com/mock/repo/pull/123',
231
+ labels: []
232
+ };
233
+ } else {
234
+ const { stdout: commitTitle } = await run('git log -1 --pretty=%B');
235
+ pr = await createPullRequest(commitTitle, 'Automated release PR.', await getCurrentBranchName());
236
+ if (!pr) {
237
+ throw new Error('Failed to create pull request.');
238
+ }
239
+ logger.info(`Pull request created: ${pr.html_url}`);
178
240
  }
179
- logger.info(`Pull request created: ${pr.html_url}`);
180
241
  }
181
- logger.info(`Waiting for PR #${pr.number} checks to complete...`);
182
- await waitForPullRequestChecks(pr.number);
242
+ logger.info(`${isDryRun ? 'DRY RUN: Would wait for' : 'Waiting for'} PR #${pr.number} checks to complete...`);
243
+ if (!isDryRun) {
244
+ await waitForPullRequestChecks(pr.number);
245
+ }
183
246
  const mergeMethod = ((_runConfig_publish1 = runConfig.publish) === null || _runConfig_publish1 === void 0 ? void 0 : _runConfig_publish1.mergeMethod) || 'squash';
184
- await mergePullRequest(pr.number, mergeMethod);
185
- logger.info('Checking out main branch...');
186
- await run('git checkout main');
187
- await run('git pull origin main');
188
- logger.info('Creating GitHub release...');
189
- const packageJsonContents = await storage.readFile('package.json', 'utf-8');
190
- const { version } = JSON.parse(packageJsonContents);
191
- const tagName = `v${version}`;
192
- const releaseNotesContent = await storage.readFile('RELEASE_NOTES.md', 'utf-8');
193
- const releaseTitle = await storage.readFile('RELEASE_TITLE.md', 'utf-8');
194
- await createRelease(tagName, releaseTitle, releaseNotesContent);
195
- logger.info('Creating new release branch...');
196
- const nextVersion = incrementPatchVersion(version);
197
- const newBranchName = `release/${nextVersion}`;
198
- await run(`git checkout -b ${newBranchName}`);
199
- await run(`git push -u origin ${newBranchName}`);
200
- logger.info(`Branch ${newBranchName} created and pushed to origin.`);
201
- logger.info('Preparation complete.');
247
+ if (isDryRun) {
248
+ logger.info(`DRY RUN: Would merge PR #${pr.number} using ${mergeMethod} method`);
249
+ } else {
250
+ await mergePullRequest(pr.number, mergeMethod);
251
+ }
252
+ logger.info(isDryRun ? 'DRY RUN: Would checkout main branch...' : 'Checking out main branch...');
253
+ await runWithDryRunSupport('git checkout main', isDryRun);
254
+ await runWithDryRunSupport('git pull origin main', isDryRun);
255
+ logger.info(isDryRun ? 'DRY RUN: Would create GitHub release...' : 'Creating GitHub release...');
256
+ if (isDryRun) {
257
+ logger.info('DRY RUN: Would read package.json version and create GitHub release');
258
+ } else {
259
+ const packageJsonContents = await storage.readFile('package.json', 'utf-8');
260
+ const { version } = JSON.parse(packageJsonContents);
261
+ const tagName = `v${version}`;
262
+ const releaseNotesContent = await storage.readFile('RELEASE_NOTES.md', 'utf-8');
263
+ const releaseTitle = await storage.readFile('RELEASE_TITLE.md', 'utf-8');
264
+ await createRelease(tagName, releaseTitle, releaseNotesContent);
265
+ }
266
+ logger.info(isDryRun ? 'DRY RUN: Would create new release branch...' : 'Creating new release branch...');
267
+ if (isDryRun) {
268
+ logger.info('DRY RUN: Would create next release branch (e.g., release/1.0.1) and push to origin');
269
+ } else {
270
+ const packageJsonContents = await storage.readFile('package.json', 'utf-8');
271
+ const { version } = JSON.parse(packageJsonContents);
272
+ const nextVersion = incrementPatchVersion(version);
273
+ const newBranchName = `release/${nextVersion}`;
274
+ await run(`git checkout -b ${newBranchName}`);
275
+ await run(`git push -u origin ${newBranchName}`);
276
+ logger.info(`Branch ${newBranchName} created and pushed to origin.`);
277
+ }
278
+ logger.info(isDryRun ? 'DRY RUN: Preparation would be complete.' : 'Preparation complete.');
202
279
  } finally{
203
280
  var _runConfig_publish3;
204
281
  // Restore linked packages (if enabled)
205
282
  const shouldLink = ((_runConfig_publish3 = runConfig.publish) === null || _runConfig_publish3 === void 0 ? void 0 : _runConfig_publish3.linkWorkspacePackages) !== false; // default to true
206
283
  if (shouldLink) {
207
- logger.info('Restoring linked packages...');
284
+ logger.verbose(isDryRun ? 'DRY RUN: Would restore linked packages...' : 'Restoring linked packages...');
208
285
  await execute$4(runConfig);
209
286
  } else {
210
- logger.info('Skipping restore linked packages (disabled in config).');
287
+ logger.verbose(isDryRun ? 'DRY RUN: Would skip restore linked packages (disabled in config).' : 'Skipping restore linked packages (disabled in config).');
211
288
  }
212
289
  }
213
290
  };