@socketsecurity/cli-with-sentry 1.1.7 → 1.1.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/README.md +3 -3
- package/dist/cli.js +96 -51
- package/dist/cli.js.map +1 -1
- package/dist/constants.js +8 -5
- package/dist/constants.js.map +1 -1
- package/dist/shadow-npm-inject.js +6 -8
- package/dist/shadow-npm-inject.js.map +1 -1
- package/dist/tsconfig.dts.tsbuildinfo +1 -1
- package/dist/types/commands/fix/cmd-fix.d.mts.map +1 -1
- package/dist/types/commands/fix/handle-fix.d.mts +5 -0
- package/dist/types/commands/fix/handle-fix.d.mts.map +1 -1
- package/dist/types/commands/package/output-purls-shallow-score.d.mts.map +1 -1
- package/dist/types/commands/patch/handle-patch.d.mts.map +1 -1
- package/dist/types/constants.d.mts +3 -1
- package/dist/types/constants.d.mts.map +1 -1
- package/dist/types/shadow/npm/arborist-helpers.d.mts.map +1 -1
- package/dist/types/utils/api.d.mts +22 -1
- package/dist/types/utils/api.d.mts.map +1 -1
- package/dist/types/utils/cve-to-ghsa.d.mts +6 -0
- package/dist/types/utils/cve-to-ghsa.d.mts.map +1 -0
- package/dist/types/utils/github.d.mts.map +1 -1
- package/dist/types/utils/output-formatting.d.mts.map +1 -1
- package/dist/types/utils/purl-to-ghsa.d.mts +6 -0
- package/dist/types/utils/purl-to-ghsa.d.mts.map +1 -0
- package/dist/types/utils/requirements.d.mts +4 -0
- package/dist/types/utils/requirements.d.mts.map +1 -1
- package/dist/types/utils/semver.d.mts +1 -2
- package/dist/types/utils/semver.d.mts.map +1 -1
- package/dist/utils.js +248 -54
- package/dist/utils.js.map +1 -1
- package/dist/vendor.js +15 -18
- package/external/@socketsecurity/registry/lib/url.js +13 -12
- package/package.json +7 -7
- package/requirements.json +2 -2
package/CHANGELOG.md
CHANGED
|
@@ -4,6 +4,19 @@ All notable changes to this project will be documented in this file.
|
|
|
4
4
|
|
|
5
5
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
|
6
6
|
|
|
7
|
+
## [1.1.9](https://github.com/SocketDev/socket-cli/releases/tag/v1.1.9) - 2025-09-11
|
|
8
|
+
|
|
9
|
+
### Added
|
|
10
|
+
- Enhanced `socket fix --id` to accept CVE IDs and PURLs in addition to GHSA IDs
|
|
11
|
+
|
|
12
|
+
### Fixed
|
|
13
|
+
- Correct SOCKET_CLI_API_TIMEOUT environment variable lookup
|
|
14
|
+
|
|
15
|
+
## [1.1.8](https://github.com/SocketDev/socket-cli/releases/tag/v1.1.8) - 2025-09-11
|
|
16
|
+
|
|
17
|
+
### Changed
|
|
18
|
+
- Made insufficient permissions errors more helpful
|
|
19
|
+
|
|
7
20
|
## [1.1.7](https://github.com/SocketDev/socket-cli/releases/tag/v1.1.7) - 2025-09-11
|
|
8
21
|
|
|
9
22
|
### Added
|
package/README.md
CHANGED
|
@@ -111,8 +111,8 @@ npm exec socket
|
|
|
111
111
|
<br/>
|
|
112
112
|
<div align="center">
|
|
113
113
|
<picture>
|
|
114
|
-
<source media="(prefers-color-scheme: dark)" srcset="logo-
|
|
115
|
-
<source media="(prefers-color-scheme: light)" srcset="logo-
|
|
116
|
-
<img width="324" height="108" alt="Socket Logo" src="logo-
|
|
114
|
+
<source media="(prefers-color-scheme: dark)" srcset="logo-dark.png">
|
|
115
|
+
<source media="(prefers-color-scheme: light)" srcset="logo-light.png">
|
|
116
|
+
<img width="324" height="108" alt="Socket Logo" src="logo-light.png">
|
|
117
117
|
</picture>
|
|
118
118
|
</div>
|
package/dist/cli.js
CHANGED
|
@@ -3710,6 +3710,59 @@ async function outputFixResult(result, outputKind) {
|
|
|
3710
3710
|
logger.logger.success('Finished!');
|
|
3711
3711
|
}
|
|
3712
3712
|
|
|
3713
|
+
const GHSA_FORMAT_REGEXP = /^GHSA-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}$/;
|
|
3714
|
+
const CVE_FORMAT_REGEXP = /^CVE-\d{4}-\d{4,}$/;
|
|
3715
|
+
/**
|
|
3716
|
+
* Converts mixed CVE/GHSA/PURL IDs to GHSA IDs only.
|
|
3717
|
+
* Filters out invalid IDs and logs conversion results.
|
|
3718
|
+
*/
|
|
3719
|
+
async function convertIdsToGhsas(ids) {
|
|
3720
|
+
const validGhsas = [];
|
|
3721
|
+
const errors = [];
|
|
3722
|
+
for (const id of ids) {
|
|
3723
|
+
const trimmedId = id.trim();
|
|
3724
|
+
if (trimmedId.startsWith('GHSA-')) {
|
|
3725
|
+
// Already a GHSA ID, validate format
|
|
3726
|
+
if (GHSA_FORMAT_REGEXP.test(trimmedId)) {
|
|
3727
|
+
validGhsas.push(trimmedId);
|
|
3728
|
+
} else {
|
|
3729
|
+
errors.push(`Invalid GHSA format: ${trimmedId}`);
|
|
3730
|
+
}
|
|
3731
|
+
} else if (trimmedId.startsWith('CVE-')) {
|
|
3732
|
+
// Convert CVE to GHSA
|
|
3733
|
+
if (!CVE_FORMAT_REGEXP.test(trimmedId)) {
|
|
3734
|
+
errors.push(`Invalid CVE format: ${trimmedId}`);
|
|
3735
|
+
continue;
|
|
3736
|
+
}
|
|
3737
|
+
|
|
3738
|
+
// eslint-disable-next-line no-await-in-loop
|
|
3739
|
+
const conversionResult = await utils.convertCveToGhsa(trimmedId);
|
|
3740
|
+
if (conversionResult.ok) {
|
|
3741
|
+
validGhsas.push(conversionResult.data);
|
|
3742
|
+
logger.logger.info(`Converted ${trimmedId} to ${conversionResult.data}`);
|
|
3743
|
+
} else {
|
|
3744
|
+
errors.push(`${trimmedId}: ${conversionResult.message}`);
|
|
3745
|
+
}
|
|
3746
|
+
} else if (trimmedId.startsWith('pkg:')) {
|
|
3747
|
+
// Convert PURL to GHSAs
|
|
3748
|
+
// eslint-disable-next-line no-await-in-loop
|
|
3749
|
+
const conversionResult = await utils.convertPurlToGhsas(trimmedId);
|
|
3750
|
+
if (conversionResult.ok && conversionResult.data.length) {
|
|
3751
|
+
validGhsas.push(...conversionResult.data);
|
|
3752
|
+
logger.logger.info(`Converted ${trimmedId} to ${conversionResult.data.length} GHSA(s): ${conversionResult.data.join(', ')}`);
|
|
3753
|
+
} else {
|
|
3754
|
+
errors.push(`${trimmedId}: ${conversionResult.message || 'No GHSAs found'}`);
|
|
3755
|
+
}
|
|
3756
|
+
} else {
|
|
3757
|
+
// Neither CVE, GHSA, nor PURL, skip
|
|
3758
|
+
errors.push(`Unsupported ID format (expected CVE, GHSA, or PURL): ${trimmedId}`);
|
|
3759
|
+
}
|
|
3760
|
+
}
|
|
3761
|
+
if (errors.length) {
|
|
3762
|
+
logger.logger.warn(`Skipped ${errors.length} invalid IDs:\n${errors.map(e => ` - ${e}`).join('\n')}`);
|
|
3763
|
+
}
|
|
3764
|
+
return validGhsas;
|
|
3765
|
+
}
|
|
3713
3766
|
async function handleFix({
|
|
3714
3767
|
autopilot,
|
|
3715
3768
|
cwd,
|
|
@@ -3726,7 +3779,8 @@ async function handleFix({
|
|
|
3726
3779
|
await outputFixResult(await coanaFix({
|
|
3727
3780
|
autopilot,
|
|
3728
3781
|
cwd,
|
|
3729
|
-
|
|
3782
|
+
// Convert mixed CVE/GHSA/PURL inputs to GHSA IDs only
|
|
3783
|
+
ghsas: await convertIdsToGhsas(ghsas),
|
|
3730
3784
|
limit,
|
|
3731
3785
|
orgSlug,
|
|
3732
3786
|
rangeStyle,
|
|
@@ -3753,7 +3807,11 @@ const generalFlags$2 = {
|
|
|
3753
3807
|
id: {
|
|
3754
3808
|
type: 'string',
|
|
3755
3809
|
default: [],
|
|
3756
|
-
description: `Provide a list of
|
|
3810
|
+
description: `Provide a list of vulnerability identifiers to compute fixes for:
|
|
3811
|
+
- ${vendor.terminalLinkExports('GHSA IDs', 'https://docs.github.com/en/code-security/security-advisories/working-with-global-security-advisories-from-the-github-advisory-database/about-the-github-advisory-database#about-ghsa-ids')} (e.g., GHSA-xxxx-xxxx-xxxx)
|
|
3812
|
+
- ${vendor.terminalLinkExports('CVE IDs', 'https://cve.mitre.org/cve/identifiers/')} (e.g., CVE-${new Date().getFullYear()}-1234) - automatically converted to GHSA
|
|
3813
|
+
- ${vendor.terminalLinkExports('PURLs', 'https://github.com/package-url/purl-spec')} (e.g., pkg:npm/package@1.0.0) - automatically converted to GHSA
|
|
3814
|
+
Can be provided as comma separated values or as multiple flags`,
|
|
3757
3815
|
isMultiple: true
|
|
3758
3816
|
},
|
|
3759
3817
|
limit: {
|
|
@@ -3767,14 +3825,8 @@ const generalFlags$2 = {
|
|
|
3767
3825
|
description: `
|
|
3768
3826
|
Define how dependency version ranges are updated in package.json (default 'preserve').
|
|
3769
3827
|
Available styles:
|
|
3770
|
-
* caret - Use ^ range for compatible updates (e.g. ^1.2.3)
|
|
3771
|
-
* gt - Use > to allow any newer version (e.g. >1.2.3)
|
|
3772
|
-
* gte - Use >= to allow any newer version (e.g. >=1.2.3)
|
|
3773
|
-
* lt - Use < to allow only lower versions (e.g. <1.2.3)
|
|
3774
|
-
* lte - Use <= to allow only lower versions (e.g. <=1.2.3)
|
|
3775
3828
|
* pin - Use the exact version (e.g. 1.2.3)
|
|
3776
3829
|
* preserve - Retain the existing version range style as-is
|
|
3777
|
-
* tilde - Use ~ range for patch/minor updates (e.g. ~1.2.3)
|
|
3778
3830
|
`.trim()
|
|
3779
3831
|
}
|
|
3780
3832
|
};
|
|
@@ -3875,23 +3927,6 @@ async function run$I(argv, importMeta, {
|
|
|
3875
3927
|
} = cli.flags;
|
|
3876
3928
|
const dryRun = !!cli.flags['dryRun'];
|
|
3877
3929
|
const minSatisfying = cli.flags['minSatisfying'] || !maxSatisfying;
|
|
3878
|
-
const rawPurls = utils.cmdFlagValueToArray(cli.flags['purl']);
|
|
3879
|
-
const purls = [];
|
|
3880
|
-
for (const purl of rawPurls) {
|
|
3881
|
-
const version = utils.getPurlObject(purl, {
|
|
3882
|
-
throws: false
|
|
3883
|
-
})?.version;
|
|
3884
|
-
if (version) {
|
|
3885
|
-
purls.push(purl);
|
|
3886
|
-
} else {
|
|
3887
|
-
logger.logger.warn(`--purl ${purl} is missing a version and will be ignored.`);
|
|
3888
|
-
}
|
|
3889
|
-
}
|
|
3890
|
-
if (rawPurls.length !== purls.length && !purls.length) {
|
|
3891
|
-
process.exitCode = 1;
|
|
3892
|
-
logger.logger.fail('No valid --purl values provided.');
|
|
3893
|
-
return;
|
|
3894
|
-
}
|
|
3895
3930
|
const outputKind = utils.getOutputKind(json, markdown);
|
|
3896
3931
|
const wasValidInput = utils.checkCommandInput(outputKind, {
|
|
3897
3932
|
test: utils.RangeStyles.includes(rangeStyle),
|
|
@@ -3924,7 +3959,7 @@ async function run$I(argv, importMeta, {
|
|
|
3924
3959
|
const {
|
|
3925
3960
|
spinner
|
|
3926
3961
|
} = constants.default;
|
|
3927
|
-
const ghsas = arrays.arrayUnique([...utils.cmdFlagValueToArray(cli.flags['id']), ...utils.cmdFlagValueToArray(cli.flags['ghsa'])]);
|
|
3962
|
+
const ghsas = arrays.arrayUnique([...utils.cmdFlagValueToArray(cli.flags['id']), ...utils.cmdFlagValueToArray(cli.flags['ghsa']), ...utils.cmdFlagValueToArray(cli.flags['purl'])]);
|
|
3928
3963
|
await handleFix({
|
|
3929
3964
|
autopilot,
|
|
3930
3965
|
cwd,
|
|
@@ -8407,12 +8442,12 @@ function getAlertString(alerts, options) {
|
|
|
8407
8442
|
|
|
8408
8443
|
// We need to create the no-color string regardless because the actual string
|
|
8409
8444
|
// contains a bunch of invisible ANSI chars which would screw up length checks.
|
|
8410
|
-
const colorless = `- Alerts (${bad.length}/${mid.length
|
|
8445
|
+
const colorless = `- Alerts (${bad.length}/${mid.length}/${low.length}):`;
|
|
8411
8446
|
const padding = ` ${' '.repeat(Math.max(0, 20 - colorless.length))}`;
|
|
8412
8447
|
if (colorize) {
|
|
8413
|
-
return `- Alerts (${vendor.yoctocolorsCjsExports.red(bad.length
|
|
8448
|
+
return `- Alerts (${vendor.yoctocolorsCjsExports.red(bad.length)}/${vendor.yoctocolorsCjsExports.yellow(mid.length)}/${low.length}):${padding}${arrays.joinAnd([...bad.map(a => vendor.yoctocolorsCjsExports.red(`${vendor.yoctocolorsCjsExports.dim(`[${a.severity}] `)}${a.type}`)), ...mid.map(a => vendor.yoctocolorsCjsExports.yellow(`${vendor.yoctocolorsCjsExports.dim(`[${a.severity}] `)}${a.type}`)), ...low.map(a => `${vendor.yoctocolorsCjsExports.dim(`[${a.severity}] `)}${a.type}`)])}`;
|
|
8414
8449
|
}
|
|
8415
|
-
return colorless
|
|
8450
|
+
return `${colorless}${padding}${arrays.joinAnd([...bad.map(a => `[${a.severity}] ${a.type}`), ...mid.map(a => `[${a.severity}] ${a.type}`), ...low.map(a => `[${a.severity}] ${a.type}`)])}`;
|
|
8416
8451
|
}
|
|
8417
8452
|
function preProcess(artifacts, requestedPurls) {
|
|
8418
8453
|
// Dedupe results (for example, pypi will emit one package for each system release (win/mac/cpu) even if it's
|
|
@@ -8845,14 +8880,26 @@ async function applyNpmPatches(socketDir, patches, options) {
|
|
|
8845
8880
|
}
|
|
8846
8881
|
return result;
|
|
8847
8882
|
}
|
|
8883
|
+
|
|
8884
|
+
/**
|
|
8885
|
+
* Compute SHA256 hash of file contents.
|
|
8886
|
+
*/
|
|
8848
8887
|
async function computeSHA256(filepath) {
|
|
8849
8888
|
try {
|
|
8850
8889
|
const content = await fs$1.promises.readFile(filepath);
|
|
8851
8890
|
const hash = require$$0$1.createHash('sha256');
|
|
8852
8891
|
hash.update(content);
|
|
8853
|
-
return
|
|
8854
|
-
|
|
8855
|
-
|
|
8892
|
+
return {
|
|
8893
|
+
ok: true,
|
|
8894
|
+
data: hash.digest('hex')
|
|
8895
|
+
};
|
|
8896
|
+
} catch (e) {
|
|
8897
|
+
return {
|
|
8898
|
+
ok: false,
|
|
8899
|
+
message: 'Failed to compute file hash',
|
|
8900
|
+
cause: `Unable to read file ${filepath}: ${e instanceof Error ? e.message : 'Unknown error'}`
|
|
8901
|
+
};
|
|
8902
|
+
}
|
|
8856
8903
|
}
|
|
8857
8904
|
async function findNodeModulesPaths(cwd) {
|
|
8858
8905
|
const rootNmPath = await utils.findUp(constants.NODE_MODULES, {
|
|
@@ -8888,29 +8935,29 @@ async function processFilePatch(pkgPath, fileName, fileInfo, socketDir, options)
|
|
|
8888
8935
|
}
|
|
8889
8936
|
return false;
|
|
8890
8937
|
}
|
|
8891
|
-
const
|
|
8892
|
-
if (!
|
|
8893
|
-
logger.logger.log(`Failed to compute hash for: ${fileName}`);
|
|
8938
|
+
const currentHashResult = await computeSHA256(filepath);
|
|
8939
|
+
if (!currentHashResult.ok) {
|
|
8940
|
+
logger.logger.log(`Failed to compute hash for: ${fileName}: ${currentHashResult.cause || currentHashResult.message}`);
|
|
8894
8941
|
if (wasSpinning) {
|
|
8895
8942
|
spinner?.start();
|
|
8896
8943
|
}
|
|
8897
8944
|
return false;
|
|
8898
8945
|
}
|
|
8899
|
-
if (
|
|
8946
|
+
if (currentHashResult.data === fileInfo.afterHash) {
|
|
8900
8947
|
logger.logger.success(`File already patched: ${fileName}`);
|
|
8901
8948
|
logger.logger.group();
|
|
8902
|
-
logger.logger.log(`Current hash: ${
|
|
8949
|
+
logger.logger.log(`Current hash: ${currentHashResult.data}`);
|
|
8903
8950
|
logger.logger.groupEnd();
|
|
8904
8951
|
if (wasSpinning) {
|
|
8905
8952
|
spinner?.start();
|
|
8906
8953
|
}
|
|
8907
8954
|
return true;
|
|
8908
8955
|
}
|
|
8909
|
-
if (
|
|
8956
|
+
if (currentHashResult.data !== fileInfo.beforeHash) {
|
|
8910
8957
|
logger.logger.fail(`File hash mismatch: ${fileName}`);
|
|
8911
8958
|
logger.logger.group();
|
|
8912
8959
|
logger.logger.log(`Expected: ${fileInfo.beforeHash}`);
|
|
8913
|
-
logger.logger.log(`Current: ${
|
|
8960
|
+
logger.logger.log(`Current: ${currentHashResult.data}`);
|
|
8914
8961
|
logger.logger.log(`Target: ${fileInfo.afterHash}`);
|
|
8915
8962
|
logger.logger.groupEnd();
|
|
8916
8963
|
if (wasSpinning) {
|
|
@@ -8920,7 +8967,7 @@ async function processFilePatch(pkgPath, fileName, fileInfo, socketDir, options)
|
|
|
8920
8967
|
}
|
|
8921
8968
|
logger.logger.success(`File matches expected hash: ${fileName}`);
|
|
8922
8969
|
logger.logger.group();
|
|
8923
|
-
logger.logger.log(`Current hash: ${
|
|
8970
|
+
logger.logger.log(`Current hash: ${currentHashResult.data}`);
|
|
8924
8971
|
logger.logger.log(`Ready to patch to: ${fileInfo.afterHash}`);
|
|
8925
8972
|
logger.logger.group();
|
|
8926
8973
|
if (dryRun) {
|
|
@@ -9118,13 +9165,11 @@ async function run$k(argv, importMeta, {
|
|
|
9118
9165
|
cwd = path.resolve(process.cwd(), cwd);
|
|
9119
9166
|
const dotSocketDirPath = path.join(cwd, constants.DOT_SOCKET);
|
|
9120
9167
|
if (!fs$1.existsSync(dotSocketDirPath)) {
|
|
9121
|
-
|
|
9122
|
-
return;
|
|
9168
|
+
throw new utils.InputError(`No ${constants.DOT_SOCKET} directory found in current directory`);
|
|
9123
9169
|
}
|
|
9124
9170
|
const manifestPath = path.join(dotSocketDirPath, constants.MANIFEST_JSON);
|
|
9125
9171
|
if (!fs$1.existsSync(manifestPath)) {
|
|
9126
|
-
|
|
9127
|
-
return;
|
|
9172
|
+
throw new utils.InputError(`No ${constants.MANIFEST_JSON} found in ${constants.DOT_SOCKET} directory`);
|
|
9128
9173
|
}
|
|
9129
9174
|
const {
|
|
9130
9175
|
spinner
|
|
@@ -11151,7 +11196,7 @@ async function createScanFromGithub({
|
|
|
11151
11196
|
repos
|
|
11152
11197
|
}) {
|
|
11153
11198
|
let targetRepos = repos.trim().split(',').map(r => r.trim()).filter(Boolean);
|
|
11154
|
-
if (all || targetRepos.length
|
|
11199
|
+
if (all || !targetRepos.length) {
|
|
11155
11200
|
// Fetch from Socket API
|
|
11156
11201
|
const result = await fetchListAllRepos(orgSlug, {
|
|
11157
11202
|
direction: 'asc',
|
|
@@ -11522,10 +11567,10 @@ async function streamDownloadWithFetch(localPath, downloadUrl) {
|
|
|
11522
11567
|
ok: true,
|
|
11523
11568
|
data: localPath
|
|
11524
11569
|
};
|
|
11525
|
-
} catch (
|
|
11570
|
+
} catch (e) {
|
|
11526
11571
|
logger.logger.fail('An error was thrown while trying to download a manifest file... url:', downloadUrl);
|
|
11527
11572
|
require$$9.debugDir('inspect', {
|
|
11528
|
-
error
|
|
11573
|
+
error: e
|
|
11529
11574
|
});
|
|
11530
11575
|
|
|
11531
11576
|
// If an error occurs and fileStream was created, attempt to clean up.
|
|
@@ -11539,10 +11584,10 @@ async function streamDownloadWithFetch(localPath, downloadUrl) {
|
|
|
11539
11584
|
});
|
|
11540
11585
|
}
|
|
11541
11586
|
// Construct a more informative error message
|
|
11542
|
-
let detailedError = `Error during download of ${downloadUrl}: ${
|
|
11543
|
-
if (
|
|
11587
|
+
let detailedError = `Error during download of ${downloadUrl}: ${e.message}`;
|
|
11588
|
+
if (e.cause) {
|
|
11544
11589
|
// Include cause if available (e.g., from network errors)
|
|
11545
|
-
detailedError += `\nCause: ${
|
|
11590
|
+
detailedError += `\nCause: ${e.cause}`;
|
|
11546
11591
|
}
|
|
11547
11592
|
if (response && !response.ok) {
|
|
11548
11593
|
// If error was due to bad HTTP status
|
|
@@ -14395,5 +14440,5 @@ void (async () => {
|
|
|
14395
14440
|
await utils.captureException(e);
|
|
14396
14441
|
}
|
|
14397
14442
|
})();
|
|
14398
|
-
//# debugId=
|
|
14443
|
+
//# debugId=712a8ff2-24bd-4ae4-981f-0c05a45a4d0f
|
|
14399
14444
|
//# sourceMappingURL=cli.js.map
|