inup 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,66 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.VersionUtils = void 0;
7
+ const chalk_1 = __importDefault(require("chalk"));
8
+ class VersionUtils {
9
+ static applyVersionPrefix(originalSpecifier, targetVersion) {
10
+ // Extract prefix from original specifier (^ or ~)
11
+ const prefixMatch = originalSpecifier.match(/^([^\d]+)/);
12
+ const prefix = prefixMatch ? prefixMatch[1] : '';
13
+ // Return target version with same prefix
14
+ return prefix + targetVersion;
15
+ }
16
+ static getVisualLength(str) {
17
+ // Strip ANSI escape codes to get visual length
18
+ return str.replace(/\u001b\[[0-9;]*m/g, '').length;
19
+ }
20
+ static formatVersionDiff(current, target, colorFn) {
21
+ if (current === target) {
22
+ return chalk_1.default.white(target);
23
+ }
24
+ // Parse semantic versions into parts
25
+ const currentParts = current.split('.').map((part) => parseInt(part) || 0);
26
+ const targetParts = target.split('.').map((part) => parseInt(part) || 0);
27
+ // Find the first differing version segment (major, minor, or patch)
28
+ let firstDiffSegment = -1;
29
+ const maxLength = Math.max(currentParts.length, targetParts.length);
30
+ for (let i = 0; i < maxLength; i++) {
31
+ const currentPart = currentParts[i] || 0;
32
+ const targetPart = targetParts[i] || 0;
33
+ if (currentPart !== targetPart) {
34
+ firstDiffSegment = i;
35
+ break;
36
+ }
37
+ }
38
+ if (firstDiffSegment === -1) {
39
+ // Versions are identical (shouldn't happen due to guard above, but just in case)
40
+ return chalk_1.default.white(target);
41
+ }
42
+ // Build the result with proper coloring
43
+ const result = [];
44
+ for (let i = 0; i < maxLength; i++) {
45
+ const targetPart = targetParts[i] || 0;
46
+ const partStr = targetPart.toString();
47
+ if (i < firstDiffSegment) {
48
+ // Unchanged segment - keep white
49
+ result.push(partStr);
50
+ }
51
+ else {
52
+ // Changed segment or later - apply color
53
+ result.push(colorFn(partStr));
54
+ }
55
+ // Add dot separator if not the last part
56
+ if (i < maxLength - 1) {
57
+ // Color the dot the same as the following part
58
+ const nextPartColor = i + 1 < firstDiffSegment ? chalk_1.default.white : colorFn;
59
+ result.push(nextPartColor('.'));
60
+ }
61
+ }
62
+ return result.join('');
63
+ }
64
+ }
65
+ exports.VersionUtils = VersionUtils;
66
+ //# sourceMappingURL=utils.js.map
@@ -0,0 +1,38 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.executeCommand = executeCommand;
4
+ exports.executeCommandAsync = executeCommandAsync;
5
+ const child_process_1 = require("child_process");
6
+ const util_1 = require("util");
7
+ const execAsync = (0, util_1.promisify)(child_process_1.exec);
8
+ /**
9
+ * Execute a command synchronously
10
+ */
11
+ function executeCommand(command, cwd) {
12
+ try {
13
+ return (0, child_process_1.execSync)(command, {
14
+ encoding: 'utf-8',
15
+ stdio: 'pipe',
16
+ cwd: cwd,
17
+ });
18
+ }
19
+ catch (error) {
20
+ throw new Error(`Command failed: ${command}\n${error}`);
21
+ }
22
+ }
23
+ /**
24
+ * Execute a command asynchronously
25
+ */
26
+ async function executeCommandAsync(command) {
27
+ try {
28
+ const { stdout, stderr } = await execAsync(command, { encoding: 'utf-8' });
29
+ if (stderr && !stdout) {
30
+ throw new Error(stderr);
31
+ }
32
+ return stdout;
33
+ }
34
+ catch (error) {
35
+ throw new Error(`Command failed: ${command}\n${error}`);
36
+ }
37
+ }
38
+ //# sourceMappingURL=exec.js.map
@@ -0,0 +1,225 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.findPackageJson = findPackageJson;
4
+ exports.findWorkspaceRoot = findWorkspaceRoot;
5
+ exports.readPackageJson = readPackageJson;
6
+ exports.readPackageJsonAsync = readPackageJsonAsync;
7
+ exports.collectAllDependencies = collectAllDependencies;
8
+ exports.collectAllDependenciesAsync = collectAllDependenciesAsync;
9
+ exports.findAllPackageJsonFiles = findAllPackageJsonFiles;
10
+ const fs_1 = require("fs");
11
+ const fs_2 = require("fs");
12
+ const path_1 = require("path");
13
+ const package_manager_detector_1 = require("../services/package-manager-detector");
14
+ /**
15
+ * Find package.json in the current working directory
16
+ */
17
+ function findPackageJson(cwd = process.cwd()) {
18
+ const packageJsonPath = (0, path_1.join)(cwd, 'package.json');
19
+ return (0, fs_1.existsSync)(packageJsonPath) ? packageJsonPath : null;
20
+ }
21
+ /**
22
+ * Find the workspace root by detecting package manager and checking for workspace configuration
23
+ * @param cwd - Current working directory
24
+ * @param packageManager - Optional package manager to use (will auto-detect if not provided)
25
+ */
26
+ function findWorkspaceRoot(cwd = process.cwd(), packageManager) {
27
+ // Auto-detect if not provided
28
+ if (!packageManager) {
29
+ const detected = package_manager_detector_1.PackageManagerDetector.detect(cwd);
30
+ packageManager = detected.name;
31
+ }
32
+ return package_manager_detector_1.PackageManagerDetector.findWorkspaceRoot(cwd, packageManager);
33
+ }
34
+ /**
35
+ * Read and parse a package.json file
36
+ */
37
+ function readPackageJson(path) {
38
+ try {
39
+ const content = (0, fs_1.readFileSync)(path, 'utf-8');
40
+ return JSON.parse(content);
41
+ }
42
+ catch (error) {
43
+ throw new Error(`Failed to read package.json: ${error}`);
44
+ }
45
+ }
46
+ /**
47
+ * Read and parse a package.json file asynchronously
48
+ */
49
+ async function readPackageJsonAsync(path) {
50
+ try {
51
+ const content = await fs_2.promises.readFile(path, 'utf-8');
52
+ return JSON.parse(content);
53
+ }
54
+ catch (error) {
55
+ throw new Error(`Failed to read package.json: ${error}`);
56
+ }
57
+ }
58
+ /**
59
+ * Collects all dependencies from multiple package.json files.
60
+ * Always includes regular dependencies and devDependencies.
61
+ * Optionally includes peer and optional dependencies based on flags.
62
+ */
63
+ function collectAllDependencies(packageJsonFiles, options = {}) {
64
+ const { includePeerDeps = false, includeOptionalDeps = false } = options;
65
+ const allDeps = [];
66
+ for (const packageJsonPath of packageJsonFiles) {
67
+ try {
68
+ const packageJson = readPackageJson(packageJsonPath);
69
+ const depTypes = ['dependencies', 'devDependencies'];
70
+ if (includeOptionalDeps) {
71
+ depTypes.push('optionalDependencies');
72
+ }
73
+ if (includePeerDeps) {
74
+ depTypes.push('peerDependencies');
75
+ }
76
+ for (const depType of depTypes) {
77
+ const deps = packageJson[depType];
78
+ if (deps && typeof deps === 'object') {
79
+ for (const [name, version] of Object.entries(deps)) {
80
+ allDeps.push({
81
+ name,
82
+ version: version,
83
+ type: depType,
84
+ packageJsonPath,
85
+ });
86
+ }
87
+ }
88
+ }
89
+ }
90
+ catch (error) {
91
+ // Skip malformed package.json files
92
+ }
93
+ }
94
+ return allDeps;
95
+ }
96
+ /**
97
+ * Collects all dependencies from multiple package.json files asynchronously.
98
+ * Reads all package.json files in parallel for better performance.
99
+ * Always includes regular dependencies and devDependencies.
100
+ * Optionally includes peer and optional dependencies based on flags.
101
+ */
102
+ async function collectAllDependenciesAsync(packageJsonFiles, options = {}) {
103
+ const { includePeerDeps = false, includeOptionalDeps = false } = options;
104
+ // Read all package.json files in parallel
105
+ const packageJsonPromises = packageJsonFiles.map(async (packageJsonPath) => {
106
+ try {
107
+ const packageJson = await readPackageJsonAsync(packageJsonPath);
108
+ return { packageJson, packageJsonPath };
109
+ }
110
+ catch (error) {
111
+ // Skip malformed package.json files
112
+ return null;
113
+ }
114
+ });
115
+ const results = await Promise.all(packageJsonPromises);
116
+ // Collect dependencies from all successfully read package.json files
117
+ const allDeps = [];
118
+ for (const result of results) {
119
+ if (!result)
120
+ continue;
121
+ const { packageJson, packageJsonPath } = result;
122
+ const depTypes = ['dependencies', 'devDependencies'];
123
+ if (includeOptionalDeps) {
124
+ depTypes.push('optionalDependencies');
125
+ }
126
+ if (includePeerDeps) {
127
+ depTypes.push('peerDependencies');
128
+ }
129
+ for (const depType of depTypes) {
130
+ const deps = packageJson[depType];
131
+ if (deps && typeof deps === 'object') {
132
+ for (const [name, version] of Object.entries(deps)) {
133
+ allDeps.push({
134
+ name,
135
+ version: version,
136
+ type: depType,
137
+ packageJsonPath,
138
+ });
139
+ }
140
+ }
141
+ }
142
+ }
143
+ return allDeps;
144
+ }
145
+ /**
146
+ * Find all package.json files recursively
147
+ */
148
+ function findAllPackageJsonFiles(rootDir = process.cwd(), excludePatterns = [], maxDepth = 10, onProgress) {
149
+ const packageJsonFiles = [];
150
+ const visitedPaths = new Set();
151
+ let directoriesScanned = 0;
152
+ // Compile regex patterns for exclude filtering
153
+ const excludeRegexes = excludePatterns.map((pattern) => new RegExp(pattern, 'i'));
154
+ function shouldExcludePath(relativePath) {
155
+ return excludeRegexes.some((regex) => regex.test(relativePath));
156
+ }
157
+ function traverseDirectory(dir, depth = 0) {
158
+ // Prevent infinite recursion with depth limit
159
+ if (depth > maxDepth) {
160
+ return;
161
+ }
162
+ try {
163
+ // Prevent symlink cycles by tracking visited real paths
164
+ const realPath = (0, fs_1.realpathSync)(dir);
165
+ if (visitedPaths.has(realPath)) {
166
+ return;
167
+ }
168
+ visitedPaths.add(realPath);
169
+ directoriesScanned++;
170
+ // Report progress every 10 directories or on first scan
171
+ if (onProgress && (directoriesScanned % 10 === 0 || directoriesScanned === 1)) {
172
+ const relativePath = (0, path_1.relative)(rootDir, dir) || '.';
173
+ onProgress(relativePath, packageJsonFiles.length);
174
+ }
175
+ const files = (0, fs_1.readdirSync)(dir);
176
+ for (const file of files) {
177
+ const fullPath = (0, path_1.join)(dir, file);
178
+ const relativePath = (0, path_1.relative)(rootDir, fullPath);
179
+ // Skip if path matches exclude patterns
180
+ if (shouldExcludePath(relativePath)) {
181
+ continue;
182
+ }
183
+ let stat;
184
+ try {
185
+ stat = (0, fs_1.statSync)(fullPath);
186
+ }
187
+ catch {
188
+ // Skip files/dirs we can't stat (broken symlinks, permission issues)
189
+ continue;
190
+ }
191
+ // Skip common build and dependency directories
192
+ const skipDirs = [
193
+ 'node_modules',
194
+ '.git',
195
+ 'dist',
196
+ 'build',
197
+ '.next',
198
+ 'coverage',
199
+ '.cache',
200
+ 'out',
201
+ '.output',
202
+ '.nuxt',
203
+ '.vercel',
204
+ '.netlify',
205
+ 'lib',
206
+ 'es',
207
+ 'esm',
208
+ 'cjs',
209
+ ];
210
+ if (stat.isDirectory() && !file.startsWith('.') && !skipDirs.includes(file)) {
211
+ traverseDirectory(fullPath, depth + 1);
212
+ }
213
+ else if (file === 'package.json' && stat.isFile()) {
214
+ packageJsonFiles.push(fullPath);
215
+ }
216
+ }
217
+ }
218
+ catch (error) {
219
+ // Skip directories that can't be read (permission issues, etc.)
220
+ }
221
+ }
222
+ traverseDirectory(rootDir);
223
+ return packageJsonFiles;
224
+ }
225
+ //# sourceMappingURL=filesystem.js.map
@@ -0,0 +1,28 @@
1
+ "use strict";
2
+ /**
3
+ * Shared utilities
4
+ */
5
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
6
+ if (k2 === undefined) k2 = k;
7
+ var desc = Object.getOwnPropertyDescriptor(m, k);
8
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
9
+ desc = { enumerable: true, get: function() { return m[k]; } };
10
+ }
11
+ Object.defineProperty(o, k2, desc);
12
+ }) : (function(o, m, k, k2) {
13
+ if (k2 === undefined) k2 = k;
14
+ o[k2] = m[k];
15
+ }));
16
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
17
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
18
+ };
19
+ Object.defineProperty(exports, "__esModule", { value: true });
20
+ exports.collectAllDependenciesAsync = exports.readPackageJsonAsync = void 0;
21
+ __exportStar(require("./filesystem"), exports);
22
+ __exportStar(require("./exec"), exports);
23
+ __exportStar(require("./version"), exports);
24
+ // Re-export async functions for convenience
25
+ var filesystem_1 = require("./filesystem");
26
+ Object.defineProperty(exports, "readPackageJsonAsync", { enumerable: true, get: function () { return filesystem_1.readPackageJsonAsync; } });
27
+ Object.defineProperty(exports, "collectAllDependenciesAsync", { enumerable: true, get: function () { return filesystem_1.collectAllDependenciesAsync; } });
28
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1,130 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.isVersionOutdated = isVersionOutdated;
37
+ exports.getOptimizedRangeVersion = getOptimizedRangeVersion;
38
+ exports.findClosestMinorVersion = findClosestMinorVersion;
39
+ const semver = __importStar(require("semver"));
40
+ /**
41
+ * Checks if a version is outdated compared to the latest version.
42
+ * Handles version prefixes (^, ~, >=, etc.) by coercing them to valid semver.
43
+ */
44
+ function isVersionOutdated(current, latest) {
45
+ try {
46
+ // Remove version prefixes like ^, ~, >=, etc.
47
+ const cleanCurrent = semver.coerce(current)?.version || current;
48
+ const cleanLatest = semver.coerce(latest)?.version || latest;
49
+ return semver.gt(cleanLatest, cleanCurrent);
50
+ }
51
+ catch {
52
+ return false;
53
+ }
54
+ }
55
+ /**
56
+ * Get the optimized range version for a package
57
+ */
58
+ function getOptimizedRangeVersion(packageName, currentRange, allVersions, latestVersion) {
59
+ try {
60
+ // Find the highest version that satisfies the current range
61
+ const satisfyingVersions = allVersions.filter((version) => {
62
+ try {
63
+ return semver.satisfies(version, currentRange);
64
+ }
65
+ catch {
66
+ return false;
67
+ }
68
+ });
69
+ if (satisfyingVersions.length === 0) {
70
+ return latestVersion;
71
+ }
72
+ // Return the highest satisfying version
73
+ return satisfyingVersions.sort(semver.rcompare)[0];
74
+ }
75
+ catch {
76
+ return latestVersion;
77
+ }
78
+ }
79
+ /**
80
+ * Find the closest minor version (same major, higher minor) that satisfies the current range
81
+ * Falls back to patch updates if no minor updates are available
82
+ */
83
+ function findClosestMinorVersion(installedVersion, allVersions) {
84
+ try {
85
+ const coercedInstalled = semver.coerce(installedVersion);
86
+ if (!coercedInstalled) {
87
+ return null;
88
+ }
89
+ const installedMajor = semver.major(coercedInstalled);
90
+ const installedMinor = semver.minor(coercedInstalled);
91
+ let bestMinorVersion = null;
92
+ let bestMinorValue = -1;
93
+ // Single pass to find best minor version in same major
94
+ for (const version of allVersions) {
95
+ try {
96
+ const major = semver.major(version);
97
+ const minor = semver.minor(version);
98
+ if (major === installedMajor && minor > installedMinor && minor > bestMinorValue) {
99
+ bestMinorValue = minor;
100
+ bestMinorVersion = version;
101
+ }
102
+ }
103
+ catch {
104
+ // Skip invalid versions
105
+ }
106
+ }
107
+ if (bestMinorVersion) {
108
+ return bestMinorVersion;
109
+ }
110
+ // Fallback: find highest patch that satisfies current range
111
+ let bestVersion = null;
112
+ for (const version of allVersions) {
113
+ try {
114
+ if (semver.satisfies(version, installedVersion) && semver.gt(version, coercedInstalled)) {
115
+ if (!bestVersion || semver.gt(version, bestVersion)) {
116
+ bestVersion = version;
117
+ }
118
+ }
119
+ }
120
+ catch {
121
+ // Skip invalid versions
122
+ }
123
+ }
124
+ return bestVersion;
125
+ }
126
+ catch {
127
+ return null;
128
+ }
129
+ }
130
+ //# sourceMappingURL=version.js.map
package/package.json ADDED
@@ -0,0 +1,73 @@
1
+ {
2
+ "name": "inup",
3
+ "version": "1.4.0",
4
+ "description": "Interactive CLI tool for upgrading dependencies with ease. Auto-detects and works with npm, yarn, pnpm, and bun. Inspired by yarn upgrade-interactive. Supports monorepos, workspaces, and batch upgrades.",
5
+ "main": "dist/index.js",
6
+ "bin": {
7
+ "inup": "./dist/cli.js"
8
+ },
9
+ "scripts": {
10
+ "build": "rm -rf dist && tsc",
11
+ "dev": "tsc --watch",
12
+ "start": "node dist/cli.js",
13
+ "prepare": "pnpm build",
14
+ "link": "pnpm build && pnpm link --global",
15
+ "version:patch": "pnpm version patch && git push && git push --tags",
16
+ "version:minor": "pnpm version minor && git push && git push --tags",
17
+ "version:major": "pnpm version major && git push && git push --tags",
18
+ "format": "prettier --write src/**/*.ts",
19
+ "format:check": "prettier --check src/**/*.ts",
20
+ "demo:record": "bash docs/demo/record-demo.sh",
21
+ "demo:setup": "cd docs/demo-project && pnpm install"
22
+ },
23
+ "author": "Donfear",
24
+ "keywords": [
25
+ "npm",
26
+ "yarn",
27
+ "pnpm",
28
+ "bun",
29
+ "upgrade",
30
+ "interactive",
31
+ "cli",
32
+ "package-manager",
33
+ "dependency-management",
34
+ "yarn-upgrade-interactive",
35
+ "monorepo",
36
+ "workspace",
37
+ "batch-upgrade"
38
+ ],
39
+ "license": "MIT",
40
+ "homepage": "https://github.com/donfear/inup#readme",
41
+ "repository": {
42
+ "type": "git",
43
+ "url": "git+https://github.com/donfear/inup.git"
44
+ },
45
+ "bugs": {
46
+ "url": "https://github.com/donfear/inup/issues"
47
+ },
48
+ "files": [
49
+ "dist/**/*.js",
50
+ "README.md"
51
+ ],
52
+ "devDependencies": {
53
+ "@types/inquirer": "^9.0.9",
54
+ "@types/keypress": "^2.0.30",
55
+ "@types/node": "^24.10.1",
56
+ "@types/semver": "^7.7.1",
57
+ "prettier": "^3.8.0",
58
+ "typescript": "^5.9.3"
59
+ },
60
+ "dependencies": {
61
+ "chalk": "^5.6.2",
62
+ "commander": "^14.0.2",
63
+ "inquirer": "^13.2.1",
64
+ "keypress": "^0.2.1",
65
+ "nanospinner": "^1.2.2",
66
+ "semver": "^7.7.3",
67
+ "undici": "^7.19.1"
68
+ },
69
+ "engines": {
70
+ "node": ">=20.0.0"
71
+ },
72
+ "packageManager": "pnpm@10.28.1+sha512.7d7dbbca9e99447b7c3bf7a73286afaaf6be99251eb9498baefa7d406892f67b879adb3a1d7e687fc4ccc1a388c7175fbaae567a26ab44d1067b54fcb0d6a316"
73
+ }