inup 1.4.3 → 1.4.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -21
- package/dist/cli.js +18 -2
- package/dist/config/index.js +1 -0
- package/dist/config/project-config.js +87 -0
- package/dist/core/package-detector.js +18 -16
- package/dist/services/changelog-fetcher.js +13 -16
- package/dist/services/index.js +1 -0
- package/dist/services/jsdelivr-registry.js +108 -32
- package/dist/services/npm-registry.js +20 -5
- package/dist/services/persistent-cache.js +242 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
# inup
|
|
1
|
+
# 🚀 inup
|
|
2
2
|
|
|
3
3
|
[](https://www.npmjs.com/package/inup)
|
|
4
4
|
[](https://www.npmjs.com/package/inup)
|
|
5
5
|
[](https://www.npmjs.com/package/inup)
|
|
6
6
|
|
|
7
|
-
|
|
7
|
+
Upgrade your dependencies interactively. Works with npm, yarn, pnpm, and bun.
|
|
8
8
|
|
|
9
9
|

|
|
10
10
|
|
|
11
|
-
##
|
|
11
|
+
## 🚀 Usage
|
|
12
12
|
|
|
13
13
|
```bash
|
|
14
14
|
npx inup
|
|
@@ -20,24 +20,17 @@ Or install globally:
|
|
|
20
20
|
npm install -g inup
|
|
21
21
|
```
|
|
22
22
|
|
|
23
|
-
## Usage
|
|
24
|
-
|
|
25
|
-
```bash
|
|
26
|
-
npx inup
|
|
27
|
-
```
|
|
28
|
-
|
|
29
23
|
That's it. The tool scans your project, finds outdated packages, and lets you pick what to upgrade.
|
|
30
24
|
|
|
31
|
-
##
|
|
25
|
+
## 💡 Why inup?
|
|
32
26
|
|
|
33
|
-
-
|
|
34
|
-
-
|
|
35
|
-
-
|
|
36
|
-
-
|
|
37
|
-
-
|
|
38
|
-
- Package info modal (press `i`)
|
|
27
|
+
- **Inclusive by Default**: We load Dev, Peer, and Optional dependencies automatically. No more restarting the tool because you forgot a `--peer` flag.
|
|
28
|
+
- **Live Toggles**: Toggle dependency types (`d`, `p`, `o`) on the fly without exiting.
|
|
29
|
+
- **Zero Config**: Auto-detects your package manager.
|
|
30
|
+
- **Monorepo Ready**: Seamlessly handles workspaces.
|
|
31
|
+
- **Modern UX**: Search with `/`, view package details with `i`, and swap themes with `t`.
|
|
39
32
|
|
|
40
|
-
## Keyboard Shortcuts
|
|
33
|
+
## ⌨️ Keyboard Shortcuts
|
|
41
34
|
|
|
42
35
|
- `↑/↓` - Navigate packages
|
|
43
36
|
- `←/→` - Select version (current, patch, minor, major)
|
|
@@ -50,18 +43,22 @@ That's it. The tool scans your project, finds outdated packages, and lets you pi
|
|
|
50
43
|
- `i` - View package info
|
|
51
44
|
- `Enter` - Confirm and upgrade
|
|
52
45
|
|
|
53
|
-
## Options
|
|
46
|
+
## ⚙️ Options
|
|
54
47
|
|
|
55
48
|
```bash
|
|
56
49
|
inup [options]
|
|
57
50
|
|
|
58
51
|
-d, --dir <path> Run in specific directory
|
|
59
52
|
-e, --exclude <patterns> Skip directories (comma-separated regex)
|
|
60
|
-
-p, --peer Include peer dependencies
|
|
61
|
-
-o, --optional Include optional dependencies
|
|
62
53
|
--package-manager <name> Force package manager (npm, yarn, pnpm, bun)
|
|
63
54
|
```
|
|
64
55
|
|
|
65
|
-
##
|
|
56
|
+
## 🔒 Privacy
|
|
57
|
+
|
|
58
|
+
We don't track anything. Ever.
|
|
59
|
+
|
|
60
|
+
The only network requests made are to the npm registry and jsDelivr CDN to fetch package version data. That's it.
|
|
61
|
+
|
|
62
|
+
## 📄 License
|
|
66
63
|
|
|
67
64
|
MIT
|
package/dist/cli.js
CHANGED
|
@@ -10,6 +10,7 @@ const fs_1 = require("fs");
|
|
|
10
10
|
const path_1 = require("path");
|
|
11
11
|
const index_1 = require("./index");
|
|
12
12
|
const services_1 = require("./services");
|
|
13
|
+
const config_1 = require("./config");
|
|
13
14
|
const packageJson = JSON.parse((0, fs_1.readFileSync)((0, path_1.join)(__dirname, '../package.json'), 'utf-8'));
|
|
14
15
|
const program = new commander_1.Command();
|
|
15
16
|
program
|
|
@@ -18,17 +19,31 @@ program
|
|
|
18
19
|
.version(packageJson.version)
|
|
19
20
|
.option('-d, --dir <directory>', 'specify directory to run in', process.cwd())
|
|
20
21
|
.option('-e, --exclude <patterns>', 'exclude paths matching regex patterns (comma-separated)', '')
|
|
22
|
+
.option('-i, --ignore <packages>', 'ignore packages (comma-separated, supports glob patterns like @babel/*)')
|
|
21
23
|
.option('--package-manager <name>', 'manually specify package manager (npm, yarn, pnpm, bun)')
|
|
22
24
|
.action(async (options) => {
|
|
23
25
|
console.log(chalk_1.default.bold.blue(`🚀 `) + chalk_1.default.bold.red(`i`) + chalk_1.default.bold.yellow(`n`) + chalk_1.default.bold.blue(`u`) + chalk_1.default.bold.magenta(`p`) + `\n`);
|
|
24
26
|
// Check for updates in the background (non-blocking)
|
|
25
27
|
const updateCheckPromise = (0, services_1.checkForUpdateAsync)('inup', packageJson.version);
|
|
26
|
-
const
|
|
28
|
+
const cwd = (0, path_1.resolve)(options.dir);
|
|
29
|
+
// Load project config from .inuprc
|
|
30
|
+
const projectConfig = (0, config_1.loadProjectConfig)(cwd);
|
|
31
|
+
// Merge CLI exclude patterns with config
|
|
32
|
+
const cliExcludePatterns = options.exclude
|
|
27
33
|
? options.exclude
|
|
28
34
|
.split(',')
|
|
29
35
|
.map((p) => p.trim())
|
|
30
36
|
.filter(Boolean)
|
|
31
37
|
: [];
|
|
38
|
+
const excludePatterns = [...cliExcludePatterns, ...(projectConfig.exclude || [])];
|
|
39
|
+
// Merge CLI ignore patterns with config (CLI takes precedence / adds to config)
|
|
40
|
+
const cliIgnorePatterns = options.ignore
|
|
41
|
+
? options.ignore
|
|
42
|
+
.split(',')
|
|
43
|
+
.map((p) => p.trim())
|
|
44
|
+
.filter(Boolean)
|
|
45
|
+
: [];
|
|
46
|
+
const ignorePackages = [...new Set([...cliIgnorePatterns, ...(projectConfig.ignore || [])])];
|
|
32
47
|
// Validate package manager if provided
|
|
33
48
|
let packageManager;
|
|
34
49
|
if (options.packageManager) {
|
|
@@ -41,8 +56,9 @@ program
|
|
|
41
56
|
packageManager = options.packageManager;
|
|
42
57
|
}
|
|
43
58
|
const upgrader = new index_1.UpgradeRunner({
|
|
44
|
-
cwd
|
|
59
|
+
cwd,
|
|
45
60
|
excludePatterns,
|
|
61
|
+
ignorePackages,
|
|
46
62
|
packageManager,
|
|
47
63
|
});
|
|
48
64
|
await upgrader.run();
|
package/dist/config/index.js
CHANGED
|
@@ -15,4 +15,5 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
|
15
15
|
};
|
|
16
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
17
|
__exportStar(require("./constants"), exports);
|
|
18
|
+
__exportStar(require("./project-config"), exports);
|
|
18
19
|
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.loadProjectConfig = loadProjectConfig;
|
|
4
|
+
exports.isPackageIgnored = isPackageIgnored;
|
|
5
|
+
const fs_1 = require("fs");
|
|
6
|
+
const path_1 = require("path");
|
|
7
|
+
const CONFIG_FILES = ['.inuprc', '.inuprc.json', 'inup.config.json'];
|
|
8
|
+
/**
|
|
9
|
+
* Load project configuration from .inuprc, .inuprc.json, or inup.config.json
|
|
10
|
+
* Searches in the specified directory and parent directories up to root
|
|
11
|
+
*/
|
|
12
|
+
function loadProjectConfig(cwd) {
|
|
13
|
+
let currentDir = cwd;
|
|
14
|
+
while (currentDir !== '/') {
|
|
15
|
+
for (const configFile of CONFIG_FILES) {
|
|
16
|
+
const configPath = (0, path_1.join)(currentDir, configFile);
|
|
17
|
+
if ((0, fs_1.existsSync)(configPath)) {
|
|
18
|
+
try {
|
|
19
|
+
const content = (0, fs_1.readFileSync)(configPath, 'utf-8');
|
|
20
|
+
const config = JSON.parse(content);
|
|
21
|
+
return normalizeConfig(config);
|
|
22
|
+
}
|
|
23
|
+
catch (error) {
|
|
24
|
+
// Invalid JSON or read error - continue searching
|
|
25
|
+
console.warn(`Warning: Failed to parse ${configPath}: ${error}`);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
// Move to parent directory
|
|
30
|
+
const parentDir = (0, path_1.join)(currentDir, '..');
|
|
31
|
+
if (parentDir === currentDir)
|
|
32
|
+
break;
|
|
33
|
+
currentDir = parentDir;
|
|
34
|
+
}
|
|
35
|
+
return {};
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Normalize and validate the config
|
|
39
|
+
*/
|
|
40
|
+
function normalizeConfig(config) {
|
|
41
|
+
const normalized = {};
|
|
42
|
+
if (config.ignore) {
|
|
43
|
+
if (Array.isArray(config.ignore)) {
|
|
44
|
+
normalized.ignore = config.ignore.filter((item) => typeof item === 'string');
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
if (config.exclude) {
|
|
48
|
+
if (Array.isArray(config.exclude)) {
|
|
49
|
+
normalized.exclude = config.exclude.filter((item) => typeof item === 'string');
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
return normalized;
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Check if a package name matches any of the ignore patterns
|
|
56
|
+
* Supports exact matches and glob patterns (* and ?)
|
|
57
|
+
*/
|
|
58
|
+
function isPackageIgnored(packageName, ignorePatterns) {
|
|
59
|
+
for (const pattern of ignorePatterns) {
|
|
60
|
+
if (matchesPattern(packageName, pattern)) {
|
|
61
|
+
return true;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
return false;
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Match a package name against a pattern
|
|
68
|
+
* Supports:
|
|
69
|
+
* - Exact match: "lodash"
|
|
70
|
+
* - Wildcard: "*" matches any sequence of characters
|
|
71
|
+
* - Single char wildcard: "?" matches single character
|
|
72
|
+
* - Scoped packages: "@babel/*" matches all @babel packages
|
|
73
|
+
*/
|
|
74
|
+
function matchesPattern(name, pattern) {
|
|
75
|
+
// Exact match
|
|
76
|
+
if (pattern === name) {
|
|
77
|
+
return true;
|
|
78
|
+
}
|
|
79
|
+
// Convert glob pattern to regex
|
|
80
|
+
const regexPattern = pattern
|
|
81
|
+
.replace(/[.+^${}()|[\]\\]/g, '\\$&') // Escape special regex chars except * and ?
|
|
82
|
+
.replace(/\*/g, '.*') // * matches any sequence
|
|
83
|
+
.replace(/\?/g, '.'); // ? matches single char
|
|
84
|
+
const regex = new RegExp(`^${regexPattern}$`);
|
|
85
|
+
return regex.test(name);
|
|
86
|
+
}
|
|
87
|
+
//# sourceMappingURL=project-config.js.map
|
|
@@ -44,6 +44,7 @@ class PackageDetector {
|
|
|
44
44
|
this.packageJson = null;
|
|
45
45
|
this.cwd = options?.cwd || process.cwd();
|
|
46
46
|
this.excludePatterns = options?.excludePatterns || [];
|
|
47
|
+
this.ignorePackages = options?.ignorePackages || [];
|
|
47
48
|
this.packageJsonPath = (0, utils_1.findPackageJson)(this.cwd);
|
|
48
49
|
if (this.packageJsonPath) {
|
|
49
50
|
this.packageJson = (0, utils_1.readPackageJson)(this.packageJsonPath);
|
|
@@ -67,15 +68,24 @@ class PackageDetector {
|
|
|
67
68
|
includePeerDeps: true,
|
|
68
69
|
includeOptionalDeps: true,
|
|
69
70
|
});
|
|
70
|
-
// Step 3: Get unique package names while filtering out workspace references
|
|
71
|
+
// Step 3: Get unique package names while filtering out workspace references and ignored packages
|
|
71
72
|
this.showProgress('🔍 Identifying unique packages...');
|
|
72
73
|
const uniquePackageNames = new Set();
|
|
73
74
|
const allDeps = [];
|
|
75
|
+
let ignoredCount = 0;
|
|
74
76
|
for (const dep of allDepsRaw) {
|
|
75
|
-
if (
|
|
76
|
-
|
|
77
|
-
uniquePackageNames.add(dep.name);
|
|
77
|
+
if (this.isWorkspaceReference(dep.version)) {
|
|
78
|
+
continue;
|
|
78
79
|
}
|
|
80
|
+
if (this.ignorePackages.length > 0 && (0, config_1.isPackageIgnored)(dep.name, this.ignorePackages)) {
|
|
81
|
+
ignoredCount++;
|
|
82
|
+
continue;
|
|
83
|
+
}
|
|
84
|
+
allDeps.push(dep);
|
|
85
|
+
uniquePackageNames.add(dep.name);
|
|
86
|
+
}
|
|
87
|
+
if (ignoredCount > 0) {
|
|
88
|
+
this.showProgress(`🔍 Skipped ${ignoredCount} ignored package(s)`);
|
|
79
89
|
}
|
|
80
90
|
const packageNames = Array.from(uniquePackageNames);
|
|
81
91
|
// Step 4: Fetch all package data in one call per package
|
|
@@ -88,19 +98,11 @@ class PackageDetector {
|
|
|
88
98
|
}
|
|
89
99
|
}
|
|
90
100
|
const allPackageData = config_1.DEFAULT_REGISTRY === 'jsdelivr'
|
|
91
|
-
? await (0, services_1.getAllPackageDataFromJsdelivr)(packageNames, currentVersions, (
|
|
92
|
-
|
|
93
|
-
const truncatedPackage = currentPackage.length > 40
|
|
94
|
-
? currentPackage.substring(0, 37) + '...'
|
|
95
|
-
: currentPackage;
|
|
96
|
-
this.showProgress(`🌐 Fetching ${percentage}% (${truncatedPackage})`);
|
|
101
|
+
? await (0, services_1.getAllPackageDataFromJsdelivr)(packageNames, currentVersions, (_currentPackage, completed, total) => {
|
|
102
|
+
this.showProgress(`🌐 Checking versions... (${completed}/${total} packages)`);
|
|
97
103
|
})
|
|
98
|
-
: await (0, services_1.getAllPackageData)(packageNames, (
|
|
99
|
-
|
|
100
|
-
const truncatedPackage = currentPackage.length > 40
|
|
101
|
-
? currentPackage.substring(0, 37) + '...'
|
|
102
|
-
: currentPackage;
|
|
103
|
-
this.showProgress(`🌐 Fetching ${percentage}% (${truncatedPackage})`);
|
|
104
|
+
: await (0, services_1.getAllPackageData)(packageNames, (_currentPackage, completed, total) => {
|
|
105
|
+
this.showProgress(`🌐 Checking versions... (${completed}/${total} packages)`);
|
|
104
106
|
});
|
|
105
107
|
try {
|
|
106
108
|
for (const dep of allDeps) {
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.changelogFetcher = exports.ChangelogFetcher = void 0;
|
|
4
|
+
const constants_1 = require("../config/constants");
|
|
4
5
|
/**
|
|
5
6
|
* Fetches package metadata from npm registry
|
|
6
7
|
* Includes description, repository info, and basic metadata
|
|
@@ -69,12 +70,13 @@ class ChangelogFetcher {
|
|
|
69
70
|
}
|
|
70
71
|
}
|
|
71
72
|
/**
|
|
72
|
-
* Fetch data from
|
|
73
|
-
* Returns the package data from
|
|
73
|
+
* Fetch data from jsdelivr CDN
|
|
74
|
+
* Returns the package data by fetching package.json directly from jsdelivr
|
|
74
75
|
*/
|
|
75
76
|
async fetchFromRegistry(packageName) {
|
|
76
77
|
try {
|
|
77
|
-
|
|
78
|
+
// Fetch package.json directly from jsdelivr CDN (resolves to latest automatically)
|
|
79
|
+
const response = await fetch(`${constants_1.JSDELIVR_CDN_URL}/${encodeURIComponent(packageName)}@latest/package.json`, {
|
|
78
80
|
method: 'GET',
|
|
79
81
|
headers: {
|
|
80
82
|
accept: 'application/json',
|
|
@@ -83,20 +85,15 @@ class ChangelogFetcher {
|
|
|
83
85
|
if (!response.ok) {
|
|
84
86
|
return null;
|
|
85
87
|
}
|
|
86
|
-
const
|
|
87
|
-
// Get the latest version data
|
|
88
|
-
const distTags = data['dist-tags'];
|
|
89
|
-
const latestVersion = distTags?.latest;
|
|
90
|
-
const versions = data.versions;
|
|
91
|
-
const latestPackageData = latestVersion ? versions?.[latestVersion] : undefined;
|
|
88
|
+
const pkgData = (await response.json());
|
|
92
89
|
return {
|
|
93
|
-
description:
|
|
94
|
-
homepage:
|
|
95
|
-
repository:
|
|
96
|
-
bugs:
|
|
97
|
-
keywords: (
|
|
98
|
-
author:
|
|
99
|
-
license:
|
|
90
|
+
description: pkgData.description,
|
|
91
|
+
homepage: pkgData.homepage,
|
|
92
|
+
repository: pkgData.repository,
|
|
93
|
+
bugs: pkgData.bugs,
|
|
94
|
+
keywords: (pkgData.keywords || []),
|
|
95
|
+
author: pkgData.author,
|
|
96
|
+
license: pkgData.license,
|
|
100
97
|
};
|
|
101
98
|
}
|
|
102
99
|
catch {
|
package/dist/services/index.js
CHANGED
|
@@ -21,4 +21,5 @@ __exportStar(require("./npm-registry"), exports);
|
|
|
21
21
|
__exportStar(require("./jsdelivr-registry"), exports);
|
|
22
22
|
__exportStar(require("./changelog-fetcher"), exports);
|
|
23
23
|
__exportStar(require("./version-checker"), exports);
|
|
24
|
+
__exportStar(require("./persistent-cache"), exports);
|
|
24
25
|
//# sourceMappingURL=index.js.map
|
|
@@ -40,6 +40,7 @@ const undici_1 = require("undici");
|
|
|
40
40
|
const semver = __importStar(require("semver"));
|
|
41
41
|
const config_1 = require("../config");
|
|
42
42
|
const npm_registry_1 = require("./npm-registry");
|
|
43
|
+
const persistent_cache_1 = require("./persistent-cache");
|
|
43
44
|
// Create a persistent connection pool for jsDelivr CDN with optimal settings
|
|
44
45
|
// This enables connection reuse and HTTP/1.1 keep-alive for blazing fast requests
|
|
45
46
|
const jsdelivrPool = new undici_1.Pool('https://cdn.jsdelivr.net', {
|
|
@@ -49,6 +50,9 @@ const jsdelivrPool = new undici_1.Pool('https://cdn.jsdelivr.net', {
|
|
|
49
50
|
keepAliveMaxTimeout: config_1.REQUEST_TIMEOUT, // Maximum keep-alive timeout
|
|
50
51
|
connectTimeout: config_1.REQUEST_TIMEOUT, // 60 seconds connect timeout
|
|
51
52
|
});
|
|
53
|
+
// Batch configuration for progressive loading
|
|
54
|
+
const BATCH_SIZE = 5;
|
|
55
|
+
const BATCH_TIMEOUT_MS = 500;
|
|
52
56
|
const packageCache = new Map();
|
|
53
57
|
/**
|
|
54
58
|
* Fetches package.json from jsdelivr CDN for a specific version tag using undici pool.
|
|
@@ -86,32 +90,77 @@ async function fetchPackageJsonFromJsdelivr(packageName, versionTag) {
|
|
|
86
90
|
/**
|
|
87
91
|
* Fetches package version data from jsdelivr CDN for multiple packages.
|
|
88
92
|
* Uses undici connection pool for blazing fast performance with connection reuse.
|
|
89
|
-
* Falls back to npm registry
|
|
93
|
+
* Falls back to npm registry immediately when jsdelivr fails (interleaved, not sequential).
|
|
94
|
+
* Supports batched callbacks for progressive UI updates.
|
|
90
95
|
* @param packageNames - Array of package names to fetch
|
|
91
96
|
* @param currentVersions - Optional map of package names to their current versions
|
|
92
97
|
* @param onProgress - Optional progress callback
|
|
98
|
+
* @param onBatchReady - Optional callback for batch updates (fires every BATCH_SIZE packages or BATCH_TIMEOUT_MS)
|
|
93
99
|
* @returns Map of package names to their version data
|
|
94
100
|
*/
|
|
95
|
-
async function getAllPackageDataFromJsdelivr(packageNames, currentVersions, onProgress) {
|
|
101
|
+
async function getAllPackageDataFromJsdelivr(packageNames, currentVersions, onProgress, onBatchReady) {
|
|
96
102
|
const packageData = new Map();
|
|
97
103
|
if (packageNames.length === 0) {
|
|
98
104
|
return packageData;
|
|
99
105
|
}
|
|
100
106
|
const total = packageNames.length;
|
|
101
107
|
let completedCount = 0;
|
|
102
|
-
//
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
108
|
+
// Batch buffer for progressive updates
|
|
109
|
+
let batchBuffer = [];
|
|
110
|
+
let batchTimer = null;
|
|
111
|
+
// Helper to flush the current batch
|
|
112
|
+
const flushBatch = () => {
|
|
113
|
+
if (batchBuffer.length > 0 && onBatchReady) {
|
|
114
|
+
onBatchReady([...batchBuffer]);
|
|
115
|
+
batchBuffer = [];
|
|
116
|
+
}
|
|
117
|
+
if (batchTimer) {
|
|
118
|
+
clearTimeout(batchTimer);
|
|
119
|
+
batchTimer = null;
|
|
120
|
+
}
|
|
121
|
+
};
|
|
122
|
+
// Helper to add package to batch and flush if needed
|
|
123
|
+
const addToBatch = (packageName, data) => {
|
|
124
|
+
if (onBatchReady) {
|
|
125
|
+
batchBuffer.push({ name: packageName, data });
|
|
126
|
+
// Flush if batch is full
|
|
127
|
+
if (batchBuffer.length >= BATCH_SIZE) {
|
|
128
|
+
flushBatch();
|
|
129
|
+
}
|
|
130
|
+
else if (!batchTimer) {
|
|
131
|
+
// Set timer to flush batch after timeout
|
|
132
|
+
batchTimer = setTimeout(flushBatch, BATCH_TIMEOUT_MS);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
};
|
|
136
|
+
// Process individual package fetch with immediate npm fallback on failure
|
|
137
|
+
const fetchPackageWithFallback = async (packageName) => {
|
|
106
138
|
const currentVersion = currentVersions?.get(packageName);
|
|
107
|
-
// Try to get from cache first
|
|
108
|
-
const
|
|
109
|
-
if (
|
|
110
|
-
packageData.set(packageName,
|
|
139
|
+
// Try to get from in-memory cache first (fastest)
|
|
140
|
+
const memoryCached = packageCache.get(packageName);
|
|
141
|
+
if (memoryCached && Date.now() - memoryCached.timestamp < config_1.CACHE_TTL) {
|
|
142
|
+
packageData.set(packageName, memoryCached.data);
|
|
143
|
+
completedCount++;
|
|
144
|
+
if (onProgress) {
|
|
145
|
+
onProgress(packageName, completedCount, total);
|
|
146
|
+
}
|
|
147
|
+
addToBatch(packageName, memoryCached.data);
|
|
148
|
+
return;
|
|
149
|
+
}
|
|
150
|
+
// Try persistent disk cache (fast, survives restarts)
|
|
151
|
+
const diskCached = persistent_cache_1.persistentCache.get(packageName);
|
|
152
|
+
if (diskCached) {
|
|
153
|
+
// Also populate in-memory cache for subsequent accesses
|
|
154
|
+
packageCache.set(packageName, {
|
|
155
|
+
data: diskCached,
|
|
156
|
+
timestamp: Date.now(),
|
|
157
|
+
});
|
|
158
|
+
packageData.set(packageName, diskCached);
|
|
111
159
|
completedCount++;
|
|
112
160
|
if (onProgress) {
|
|
113
161
|
onProgress(packageName, completedCount, total);
|
|
114
162
|
}
|
|
163
|
+
addToBatch(packageName, diskCached);
|
|
115
164
|
return;
|
|
116
165
|
}
|
|
117
166
|
try {
|
|
@@ -131,8 +180,24 @@ async function getAllPackageDataFromJsdelivr(packageNames, currentVersions, onPr
|
|
|
131
180
|
const latestResult = results[0];
|
|
132
181
|
const majorResult = results[1];
|
|
133
182
|
if (!latestResult) {
|
|
134
|
-
// Package not on jsDelivr,
|
|
135
|
-
|
|
183
|
+
// Package not on jsDelivr, immediately try npm fallback
|
|
184
|
+
const npmData = await (0, npm_registry_1.getAllPackageData)([packageName]);
|
|
185
|
+
const result = npmData.get(packageName);
|
|
186
|
+
if (result) {
|
|
187
|
+
packageData.set(packageName, result);
|
|
188
|
+
// Cache in memory
|
|
189
|
+
packageCache.set(packageName, {
|
|
190
|
+
data: result,
|
|
191
|
+
timestamp: Date.now(),
|
|
192
|
+
});
|
|
193
|
+
// Cache to disk for persistence
|
|
194
|
+
persistent_cache_1.persistentCache.set(packageName, result);
|
|
195
|
+
addToBatch(packageName, result);
|
|
196
|
+
}
|
|
197
|
+
completedCount++;
|
|
198
|
+
if (onProgress) {
|
|
199
|
+
onProgress(packageName, completedCount, total);
|
|
200
|
+
}
|
|
136
201
|
return;
|
|
137
202
|
}
|
|
138
203
|
const latestVersion = latestResult.version;
|
|
@@ -145,41 +210,52 @@ async function getAllPackageDataFromJsdelivr(packageNames, currentVersions, onPr
|
|
|
145
210
|
latestVersion,
|
|
146
211
|
allVersions: allVersions.sort(semver.rcompare),
|
|
147
212
|
};
|
|
148
|
-
// Cache the result
|
|
213
|
+
// Cache the result in memory
|
|
149
214
|
packageCache.set(packageName, {
|
|
150
215
|
data: result,
|
|
151
216
|
timestamp: Date.now(),
|
|
152
217
|
});
|
|
218
|
+
// Cache to disk for persistence
|
|
219
|
+
persistent_cache_1.persistentCache.set(packageName, result);
|
|
153
220
|
packageData.set(packageName, result);
|
|
154
221
|
completedCount++;
|
|
155
222
|
if (onProgress) {
|
|
156
223
|
onProgress(packageName, completedCount, total);
|
|
157
224
|
}
|
|
225
|
+
addToBatch(packageName, result);
|
|
158
226
|
}
|
|
159
227
|
catch (error) {
|
|
160
|
-
// On error,
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
228
|
+
// On error, immediately try npm fallback
|
|
229
|
+
try {
|
|
230
|
+
const npmData = await (0, npm_registry_1.getAllPackageData)([packageName]);
|
|
231
|
+
const result = npmData.get(packageName);
|
|
232
|
+
if (result) {
|
|
233
|
+
packageData.set(packageName, result);
|
|
234
|
+
// Cache in memory
|
|
235
|
+
packageCache.set(packageName, {
|
|
236
|
+
data: result,
|
|
237
|
+
timestamp: Date.now(),
|
|
238
|
+
});
|
|
239
|
+
// Cache to disk for persistence
|
|
240
|
+
persistent_cache_1.persistentCache.set(packageName, result);
|
|
241
|
+
addToBatch(packageName, result);
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
catch (npmError) {
|
|
245
|
+
// If both fail, just continue
|
|
246
|
+
}
|
|
169
247
|
completedCount++;
|
|
170
248
|
if (onProgress) {
|
|
171
|
-
onProgress(
|
|
249
|
+
onProgress(packageName, completedCount, total);
|
|
172
250
|
}
|
|
173
|
-
});
|
|
174
|
-
// Merge npm data into results and cache it
|
|
175
|
-
for (const [packageName, data] of npmData.entries()) {
|
|
176
|
-
packageData.set(packageName, data);
|
|
177
|
-
packageCache.set(packageName, {
|
|
178
|
-
data,
|
|
179
|
-
timestamp: Date.now(),
|
|
180
|
-
});
|
|
181
251
|
}
|
|
182
|
-
}
|
|
252
|
+
};
|
|
253
|
+
// Fire all requests simultaneously - they handle fallback internally and immediately
|
|
254
|
+
await Promise.all(packageNames.map(fetchPackageWithFallback));
|
|
255
|
+
// Flush any remaining batch items
|
|
256
|
+
flushBatch();
|
|
257
|
+
// Flush persistent cache to disk
|
|
258
|
+
persistent_cache_1.persistentCache.flush();
|
|
183
259
|
// Clear the progress line and show completion time if no custom progress handler
|
|
184
260
|
if (!onProgress) {
|
|
185
261
|
process.stdout.write('\r' + ' '.repeat(80) + '\r');
|
|
@@ -37,16 +37,27 @@ exports.getAllPackageData = getAllPackageData;
|
|
|
37
37
|
exports.clearPackageCache = clearPackageCache;
|
|
38
38
|
const semver = __importStar(require("semver"));
|
|
39
39
|
const config_1 = require("../config");
|
|
40
|
+
const persistent_cache_1 = require("./persistent-cache");
|
|
40
41
|
const packageCache = new Map();
|
|
41
42
|
/**
|
|
42
43
|
* Fetches package data from npm registry with caching using native fetch.
|
|
43
44
|
* Includes timeout support for slow connections.
|
|
44
45
|
*/
|
|
45
46
|
async function fetchPackageFromRegistry(packageName) {
|
|
46
|
-
// Check cache first
|
|
47
|
-
const
|
|
48
|
-
if (
|
|
49
|
-
return
|
|
47
|
+
// Check in-memory cache first (fastest)
|
|
48
|
+
const memoryCached = packageCache.get(packageName);
|
|
49
|
+
if (memoryCached && Date.now() - memoryCached.timestamp < config_1.CACHE_TTL) {
|
|
50
|
+
return memoryCached.data;
|
|
51
|
+
}
|
|
52
|
+
// Check persistent disk cache (fast, survives restarts)
|
|
53
|
+
const diskCached = persistent_cache_1.persistentCache.get(packageName);
|
|
54
|
+
if (diskCached) {
|
|
55
|
+
// Also populate in-memory cache for subsequent accesses
|
|
56
|
+
packageCache.set(packageName, {
|
|
57
|
+
data: diskCached,
|
|
58
|
+
timestamp: Date.now(),
|
|
59
|
+
});
|
|
60
|
+
return diskCached;
|
|
50
61
|
}
|
|
51
62
|
try {
|
|
52
63
|
const url = `${config_1.NPM_REGISTRY_URL}/${encodeURIComponent(packageName)}`;
|
|
@@ -78,11 +89,13 @@ async function fetchPackageFromRegistry(packageName) {
|
|
|
78
89
|
latestVersion,
|
|
79
90
|
allVersions,
|
|
80
91
|
};
|
|
81
|
-
// Cache the result
|
|
92
|
+
// Cache the result in memory
|
|
82
93
|
packageCache.set(packageName, {
|
|
83
94
|
data: result,
|
|
84
95
|
timestamp: Date.now(),
|
|
85
96
|
});
|
|
97
|
+
// Cache to disk for persistence
|
|
98
|
+
persistent_cache_1.persistentCache.set(packageName, result);
|
|
86
99
|
return result;
|
|
87
100
|
}
|
|
88
101
|
finally {
|
|
@@ -118,6 +131,8 @@ async function getAllPackageData(packageNames, onProgress) {
|
|
|
118
131
|
});
|
|
119
132
|
// Wait for all requests to complete
|
|
120
133
|
await Promise.all(allPromises);
|
|
134
|
+
// Flush persistent cache to disk
|
|
135
|
+
persistent_cache_1.persistentCache.flush();
|
|
121
136
|
// Clear the progress line and show completion time if no custom progress handler
|
|
122
137
|
if (!onProgress) {
|
|
123
138
|
process.stdout.write('\r' + ' '.repeat(80) + '\r');
|
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.persistentCache = void 0;
|
|
7
|
+
const fs_1 = require("fs");
|
|
8
|
+
const path_1 = require("path");
|
|
9
|
+
const env_paths_1 = __importDefault(require("env-paths"));
|
|
10
|
+
// Cache TTL: 24 hours for disk cache (much longer than in-memory 5 minutes)
|
|
11
|
+
const DISK_CACHE_TTL = 24 * 60 * 60 * 1000;
|
|
12
|
+
// Maximum cache size (number of packages)
|
|
13
|
+
const MAX_CACHE_ENTRIES = 5000;
|
|
14
|
+
// Cache file format version (increment when structure changes)
|
|
15
|
+
const CACHE_VERSION = 1;
|
|
16
|
+
/**
|
|
17
|
+
* Persistent cache manager for package registry data.
|
|
18
|
+
* Stores cache on disk for fast repeated runs across CLI invocations.
|
|
19
|
+
*/
|
|
20
|
+
class PersistentCacheManager {
|
|
21
|
+
constructor() {
|
|
22
|
+
this.index = null;
|
|
23
|
+
this.dirty = false;
|
|
24
|
+
const paths = (0, env_paths_1.default)('inup');
|
|
25
|
+
this.cacheDir = (0, path_1.join)(paths.cache, 'registry');
|
|
26
|
+
this.indexPath = (0, path_1.join)(this.cacheDir, 'index.json');
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Ensure cache directory exists
|
|
30
|
+
*/
|
|
31
|
+
ensureCacheDir() {
|
|
32
|
+
if (!(0, fs_1.existsSync)(this.cacheDir)) {
|
|
33
|
+
(0, fs_1.mkdirSync)(this.cacheDir, { recursive: true });
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Load cache index from disk
|
|
38
|
+
*/
|
|
39
|
+
loadIndex() {
|
|
40
|
+
if (this.index) {
|
|
41
|
+
return this.index;
|
|
42
|
+
}
|
|
43
|
+
try {
|
|
44
|
+
if ((0, fs_1.existsSync)(this.indexPath)) {
|
|
45
|
+
const content = (0, fs_1.readFileSync)(this.indexPath, 'utf-8');
|
|
46
|
+
const parsed = JSON.parse(content);
|
|
47
|
+
// Check cache version - invalidate if outdated
|
|
48
|
+
if (parsed.version !== CACHE_VERSION) {
|
|
49
|
+
this.clearCache();
|
|
50
|
+
this.index = { version: CACHE_VERSION, entries: {} };
|
|
51
|
+
return this.index;
|
|
52
|
+
}
|
|
53
|
+
this.index = parsed;
|
|
54
|
+
return this.index;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
catch {
|
|
58
|
+
// Corrupted index, start fresh
|
|
59
|
+
}
|
|
60
|
+
this.index = { version: CACHE_VERSION, entries: {} };
|
|
61
|
+
return this.index;
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Save cache index to disk
|
|
65
|
+
*/
|
|
66
|
+
saveIndex() {
|
|
67
|
+
if (!this.dirty || !this.index) {
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
try {
|
|
71
|
+
this.ensureCacheDir();
|
|
72
|
+
(0, fs_1.writeFileSync)(this.indexPath, JSON.stringify(this.index), 'utf-8');
|
|
73
|
+
this.dirty = false;
|
|
74
|
+
}
|
|
75
|
+
catch {
|
|
76
|
+
// Silently fail - cache is not critical
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Generate a safe filename for a package name
|
|
81
|
+
*/
|
|
82
|
+
getFilename(packageName) {
|
|
83
|
+
// Handle scoped packages: @scope/name -> scope__name
|
|
84
|
+
const safeName = packageName.replace(/^@/, '').replace(/\//g, '__');
|
|
85
|
+
return `${safeName}.json`;
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* Get cached data for a package
|
|
89
|
+
*/
|
|
90
|
+
get(packageName) {
|
|
91
|
+
const index = this.loadIndex();
|
|
92
|
+
const entry = index.entries[packageName];
|
|
93
|
+
if (!entry) {
|
|
94
|
+
return null;
|
|
95
|
+
}
|
|
96
|
+
// Check TTL
|
|
97
|
+
if (Date.now() - entry.timestamp > DISK_CACHE_TTL) {
|
|
98
|
+
// Expired, remove from index
|
|
99
|
+
delete index.entries[packageName];
|
|
100
|
+
this.dirty = true;
|
|
101
|
+
return null;
|
|
102
|
+
}
|
|
103
|
+
// Read the actual cache file
|
|
104
|
+
try {
|
|
105
|
+
const filePath = (0, path_1.join)(this.cacheDir, entry.file);
|
|
106
|
+
if (!(0, fs_1.existsSync)(filePath)) {
|
|
107
|
+
delete index.entries[packageName];
|
|
108
|
+
this.dirty = true;
|
|
109
|
+
return null;
|
|
110
|
+
}
|
|
111
|
+
const content = (0, fs_1.readFileSync)(filePath, 'utf-8');
|
|
112
|
+
const cached = JSON.parse(content);
|
|
113
|
+
return {
|
|
114
|
+
latestVersion: cached.latestVersion,
|
|
115
|
+
allVersions: cached.allVersions,
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
catch {
|
|
119
|
+
// Corrupted cache file, remove from index
|
|
120
|
+
delete index.entries[packageName];
|
|
121
|
+
this.dirty = true;
|
|
122
|
+
return null;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
/**
|
|
126
|
+
* Store data for a package
|
|
127
|
+
*/
|
|
128
|
+
set(packageName, data) {
|
|
129
|
+
const index = this.loadIndex();
|
|
130
|
+
// Evict old entries if cache is too large
|
|
131
|
+
const entryCount = Object.keys(index.entries).length;
|
|
132
|
+
if (entryCount >= MAX_CACHE_ENTRIES) {
|
|
133
|
+
this.evictOldest(Math.floor(MAX_CACHE_ENTRIES * 0.1)); // Evict 10%
|
|
134
|
+
}
|
|
135
|
+
const filename = this.getFilename(packageName);
|
|
136
|
+
const entry = {
|
|
137
|
+
...data,
|
|
138
|
+
timestamp: Date.now(),
|
|
139
|
+
};
|
|
140
|
+
try {
|
|
141
|
+
this.ensureCacheDir();
|
|
142
|
+
const filePath = (0, path_1.join)(this.cacheDir, filename);
|
|
143
|
+
(0, fs_1.writeFileSync)(filePath, JSON.stringify(entry), 'utf-8');
|
|
144
|
+
index.entries[packageName] = {
|
|
145
|
+
file: filename,
|
|
146
|
+
timestamp: Date.now(),
|
|
147
|
+
};
|
|
148
|
+
this.dirty = true;
|
|
149
|
+
}
|
|
150
|
+
catch {
|
|
151
|
+
// Silently fail - cache is not critical
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Batch get multiple packages (returns map of found entries)
|
|
156
|
+
*/
|
|
157
|
+
getMany(packageNames) {
|
|
158
|
+
const results = new Map();
|
|
159
|
+
for (const name of packageNames) {
|
|
160
|
+
const cached = this.get(name);
|
|
161
|
+
if (cached) {
|
|
162
|
+
results.set(name, cached);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
return results;
|
|
166
|
+
}
|
|
167
|
+
/**
|
|
168
|
+
* Batch set multiple packages
|
|
169
|
+
*/
|
|
170
|
+
setMany(entries) {
|
|
171
|
+
for (const [name, data] of entries) {
|
|
172
|
+
this.set(name, data);
|
|
173
|
+
}
|
|
174
|
+
this.flush();
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Evict oldest cache entries
|
|
178
|
+
*/
|
|
179
|
+
evictOldest(count) {
|
|
180
|
+
const index = this.loadIndex();
|
|
181
|
+
const entries = Object.entries(index.entries);
|
|
182
|
+
// Sort by timestamp (oldest first)
|
|
183
|
+
entries.sort((a, b) => a[1].timestamp - b[1].timestamp);
|
|
184
|
+
// Remove oldest entries
|
|
185
|
+
const toRemove = entries.slice(0, count);
|
|
186
|
+
for (const [packageName, entry] of toRemove) {
|
|
187
|
+
try {
|
|
188
|
+
const filePath = (0, path_1.join)(this.cacheDir, entry.file);
|
|
189
|
+
if ((0, fs_1.existsSync)(filePath)) {
|
|
190
|
+
(0, fs_1.unlinkSync)(filePath);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
catch {
|
|
194
|
+
// Ignore deletion errors
|
|
195
|
+
}
|
|
196
|
+
delete index.entries[packageName];
|
|
197
|
+
}
|
|
198
|
+
this.dirty = true;
|
|
199
|
+
}
|
|
200
|
+
/**
|
|
201
|
+
* Clear all cache
|
|
202
|
+
*/
|
|
203
|
+
clearCache() {
|
|
204
|
+
try {
|
|
205
|
+
if ((0, fs_1.existsSync)(this.cacheDir)) {
|
|
206
|
+
const files = (0, fs_1.readdirSync)(this.cacheDir);
|
|
207
|
+
for (const file of files) {
|
|
208
|
+
try {
|
|
209
|
+
(0, fs_1.unlinkSync)((0, path_1.join)(this.cacheDir, file));
|
|
210
|
+
}
|
|
211
|
+
catch {
|
|
212
|
+
// Ignore
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
catch {
|
|
218
|
+
// Ignore
|
|
219
|
+
}
|
|
220
|
+
this.index = { version: CACHE_VERSION, entries: {} };
|
|
221
|
+
this.dirty = true;
|
|
222
|
+
}
|
|
223
|
+
/**
|
|
224
|
+
* Flush pending changes to disk
|
|
225
|
+
*/
|
|
226
|
+
flush() {
|
|
227
|
+
this.saveIndex();
|
|
228
|
+
}
|
|
229
|
+
/**
|
|
230
|
+
* Get cache statistics
|
|
231
|
+
*/
|
|
232
|
+
getStats() {
|
|
233
|
+
const index = this.loadIndex();
|
|
234
|
+
return {
|
|
235
|
+
entries: Object.keys(index.entries).length,
|
|
236
|
+
cacheDir: this.cacheDir,
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
// Export singleton instance
|
|
241
|
+
exports.persistentCache = new PersistentCacheManager();
|
|
242
|
+
//# sourceMappingURL=persistent-cache.js.map
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "inup",
|
|
3
|
-
"version": "1.4.
|
|
3
|
+
"version": "1.4.5",
|
|
4
4
|
"description": "Interactive CLI tool for upgrading dependencies with ease. Auto-detects and works with npm, yarn, pnpm, and bun. Inspired by yarn upgrade-interactive. Supports monorepos, workspaces, and batch upgrades.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"bin": {
|