socket 1.1.1 → 1.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -25,6 +25,7 @@ var registry = require('../external/@socketsecurity/registry');
25
25
  var packages = require('../external/@socketsecurity/registry/lib/packages');
26
26
  var require$$12 = require('../external/@socketsecurity/registry/lib/promises');
27
27
  var regexps = require('../external/@socketsecurity/registry/lib/regexps');
28
+ var require$$0$1 = require('node:crypto');
28
29
  var require$$1 = require('node:util');
29
30
  var os = require('node:os');
30
31
  var promises = require('node:stream/promises');
@@ -3716,68 +3717,27 @@ const cmdFix = {
3716
3717
  hidden: hidden$q,
3717
3718
  run: run$I
3718
3719
  };
3719
- async function run$I(argv, importMeta, {
3720
- parentName
3721
- }) {
3722
- const config = {
3723
- commandName: CMD_NAME$r,
3724
- description: description$x,
3725
- hidden: hidden$q,
3726
- flags: {
3727
- ...flags.commonFlags,
3728
- ...flags.outputFlags,
3729
- autoMerge: {
3730
- type: 'boolean',
3731
- default: false,
3732
- description: `Enable auto-merge for pull requests that Socket opens.\nSee ${vendor.terminalLinkExports('GitHub documentation', 'https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/configuring-pull-request-merges/managing-auto-merge-for-pull-requests-in-your-repository')} for managing auto-merge for pull requests in your repository.`
3733
- },
3734
- autopilot: {
3735
- type: 'boolean',
3736
- default: false,
3737
- description: `Shorthand for --auto-merge --test`,
3738
- hidden: true
3739
- },
3740
- id: {
3741
- type: 'string',
3742
- default: [],
3743
- description: `Provide a list of ${vendor.terminalLinkExports('GHSA IDs', 'https://docs.github.com/en/code-security/security-advisories/working-with-global-security-advisories-from-the-github-advisory-database/about-the-github-advisory-database#about-ghsa-ids')} to compute fixes for, as either a comma separated value or as multiple flags`,
3744
- isMultiple: true
3745
- },
3746
- limit: {
3747
- type: 'number',
3748
- default: DEFAULT_LIMIT,
3749
- description: `The number of fixes to attempt at a time (default ${DEFAULT_LIMIT})`
3750
- },
3751
- maxSatisfying: {
3752
- type: 'boolean',
3753
- default: true,
3754
- description: 'Use the maximum satisfying version for dependency updates',
3755
- hidden: true
3756
- },
3757
- minSatisfying: {
3758
- type: 'boolean',
3759
- default: false,
3760
- description: 'Constrain dependency updates to the minimum satisfying version',
3761
- hidden: true
3762
- },
3763
- prCheck: {
3764
- type: 'boolean',
3765
- default: true,
3766
- description: 'Check for an existing PR before attempting a fix',
3767
- hidden: true
3768
- },
3769
- purl: {
3770
- type: 'string',
3771
- default: [],
3772
- description: `Provide a list of ${vendor.terminalLinkExports('PURLs', 'https://github.com/package-url/purl-spec?tab=readme-ov-file#purl')} to compute fixes for, as either a comma separated value or as\nmultiple flags, instead of querying the Socket API`,
3773
- isMultiple: true,
3774
- shortFlag: 'p',
3775
- hidden: true
3776
- },
3777
- rangeStyle: {
3778
- type: 'string',
3779
- default: 'preserve',
3780
- description: `
3720
+ const generalFlags$2 = {
3721
+ autoMerge: {
3722
+ type: 'boolean',
3723
+ default: false,
3724
+ description: `Enable auto-merge for pull requests that Socket opens.\nSee ${vendor.terminalLinkExports('GitHub documentation', 'https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/configuring-pull-request-merges/managing-auto-merge-for-pull-requests-in-your-repository')} for managing auto-merge for pull requests in your repository.`
3725
+ },
3726
+ id: {
3727
+ type: 'string',
3728
+ default: [],
3729
+ description: `Provide a list of ${vendor.terminalLinkExports('GHSA IDs', 'https://docs.github.com/en/code-security/security-advisories/working-with-global-security-advisories-from-the-github-advisory-database/about-the-github-advisory-database#about-ghsa-ids')} to compute fixes for, as either a comma separated value or as multiple flags`,
3730
+ isMultiple: true
3731
+ },
3732
+ limit: {
3733
+ type: 'number',
3734
+ default: DEFAULT_LIMIT,
3735
+ description: `The number of fixes to attempt at a time (default ${DEFAULT_LIMIT})`
3736
+ },
3737
+ rangeStyle: {
3738
+ type: 'string',
3739
+ default: 'preserve',
3740
+ description: `
3781
3741
  Define how dependency version ranges are updated in package.json (default 'preserve').
3782
3742
  Available styles:
3783
3743
  * caret - Use ^ range for compatible updates (e.g. ^1.2.3)
@@ -3789,19 +3749,70 @@ Available styles:
3789
3749
  * preserve - Retain the existing version range style as-is
3790
3750
  * tilde - Use ~ range for patch/minor updates (e.g. ~1.2.3)
3791
3751
  `.trim()
3792
- },
3793
- test: {
3794
- type: 'boolean',
3795
- default: false,
3796
- description: 'Verify the fix by running unit tests',
3797
- hidden: true
3798
- },
3799
- testScript: {
3800
- type: 'string',
3801
- default: 'test',
3802
- description: "The test script to run for fix attempts (default 'test')",
3803
- hidden: true
3804
- }
3752
+ }
3753
+ };
3754
+ const hiddenFlags = {
3755
+ autopilot: {
3756
+ type: 'boolean',
3757
+ default: false,
3758
+ description: `Shorthand for --auto-merge --test`,
3759
+ hidden: true
3760
+ },
3761
+ ghsa: {
3762
+ ...generalFlags$2['id'],
3763
+ hidden: true
3764
+ },
3765
+ maxSatisfying: {
3766
+ type: 'boolean',
3767
+ default: true,
3768
+ description: 'Use the maximum satisfying version for dependency updates',
3769
+ hidden: true
3770
+ },
3771
+ minSatisfying: {
3772
+ type: 'boolean',
3773
+ default: false,
3774
+ description: 'Constrain dependency updates to the minimum satisfying version',
3775
+ hidden: true
3776
+ },
3777
+ prCheck: {
3778
+ type: 'boolean',
3779
+ default: true,
3780
+ description: 'Check for an existing PR before attempting a fix',
3781
+ hidden: true
3782
+ },
3783
+ purl: {
3784
+ type: 'string',
3785
+ default: [],
3786
+ description: `Provide a list of ${vendor.terminalLinkExports('PURLs', 'https://github.com/package-url/purl-spec?tab=readme-ov-file#purl')} to compute fixes for, as either a comma separated value or as\nmultiple flags, instead of querying the Socket API`,
3787
+ isMultiple: true,
3788
+ shortFlag: 'p',
3789
+ hidden: true
3790
+ },
3791
+ test: {
3792
+ type: 'boolean',
3793
+ default: false,
3794
+ description: 'Verify the fix by running unit tests',
3795
+ hidden: true
3796
+ },
3797
+ testScript: {
3798
+ type: 'string',
3799
+ default: 'test',
3800
+ description: "The test script to run for fix attempts (default 'test')",
3801
+ hidden: true
3802
+ }
3803
+ };
3804
+ async function run$I(argv, importMeta, {
3805
+ parentName
3806
+ }) {
3807
+ const config = {
3808
+ commandName: CMD_NAME$r,
3809
+ description: description$x,
3810
+ hidden: hidden$q,
3811
+ flags: {
3812
+ ...flags.commonFlags,
3813
+ ...flags.outputFlags,
3814
+ ...generalFlags$2,
3815
+ ...hiddenFlags
3805
3816
  },
3806
3817
  help: (command, config) => `
3807
3818
  Usage
@@ -3889,7 +3900,7 @@ Available styles:
3889
3900
  // We patched in this feature with `npx custompatch meow` at
3890
3901
  // socket-cli/patches/meow#13.2.0.patch.
3891
3902
  const unknownFlags = cli.unknownFlags ?? [];
3892
- const ghsas = utils.cmdFlagValueToArray(cli.flags['ghsa']);
3903
+ const ghsas = arrays.arrayUnique([...utils.cmdFlagValueToArray(cli.flags['id']), ...utils.cmdFlagValueToArray(cli.flags['ghsa'])]);
3893
3904
  const limit = Number(cli.flags['limit']) || DEFAULT_LIMIT;
3894
3905
  const maxSatisfying = Boolean(cli.flags['maxSatisfying']);
3895
3906
  const minSatisfying = Boolean(cli.flags['minSatisfying']) || !maxSatisfying;
@@ -8662,6 +8673,30 @@ const cmdPackage = {
8662
8673
  }
8663
8674
  };
8664
8675
 
8676
+ const PatchRecordSchema = vendor.object({
8677
+ exportedAt: vendor.string(),
8678
+ files: vendor.record(vendor.string(),
8679
+ // File path
8680
+ vendor.object({
8681
+ beforeHash: vendor.string(),
8682
+ afterHash: vendor.string()
8683
+ })),
8684
+ vulnerabilities: vendor.record(vendor.string(),
8685
+ // Vulnerability ID like "GHSA-jrhj-2j3q-xf3v"
8686
+ vendor.object({
8687
+ cves: vendor.array(vendor.string()),
8688
+ summary: vendor.string(),
8689
+ severity: vendor.string(),
8690
+ description: vendor.string(),
8691
+ patchExplanation: vendor.string()
8692
+ }))
8693
+ });
8694
+ const PatchManifestSchema = vendor.object({
8695
+ patches: vendor.record(
8696
+ // Package identifier like "npm:simplehttpserver@0.0.6".
8697
+ vendor.string(), PatchRecordSchema)
8698
+ });
8699
+
8665
8700
  async function outputPatchResult(result, outputKind) {
8666
8701
  if (!result.ok) {
8667
8702
  process.exitCode = result.code ?? 1;
@@ -8689,21 +8724,220 @@ async function outputPatchResult(result, outputKind) {
8689
8724
  logger.logger.success('Patch command completed!');
8690
8725
  }
8691
8726
 
8727
+ async function applyNPMPatches(patches, dryRun, socketDir, packages) {
8728
+ const patchLookup = new Map();
8729
+ for (const patchInfo of patches) {
8730
+ const {
8731
+ purl
8732
+ } = patchInfo;
8733
+ const fullName = purl.namespace ? `@${purl.namespace}/${purl.name}` : purl.name;
8734
+ const lookupKey = `${fullName}@${purl.version}`;
8735
+ patchLookup.set(lookupKey, patchInfo);
8736
+ }
8737
+ const nodeModulesFolders = await findNodeModulesFolders(process.cwd());
8738
+ logger.logger.log(`Found ${nodeModulesFolders.length} node_modules folders`);
8739
+ for (const nodeModulesPath of nodeModulesFolders) {
8740
+ try {
8741
+ // eslint-disable-next-line no-await-in-loop
8742
+ const entries = await fs$1.promises.readdir(nodeModulesPath);
8743
+ for (const entry of entries) {
8744
+ const entryPath = path.join(nodeModulesPath, entry);
8745
+ if (entry.startsWith('@')) {
8746
+ try {
8747
+ // eslint-disable-next-line no-await-in-loop
8748
+ const scopedEntries = await fs$1.promises.readdir(entryPath);
8749
+ for (const scopedEntry of scopedEntries) {
8750
+ const packagePath = path.join(entryPath, scopedEntry);
8751
+ // eslint-disable-next-line no-await-in-loop
8752
+ const pkg = await readPackageJson(packagePath);
8753
+ if (pkg) {
8754
+ // Skip if specific packages requested and this isn't one of them
8755
+ if (packages.length > 0 && !packages.includes(pkg.name)) {
8756
+ continue;
8757
+ }
8758
+ const lookupKey = `${pkg.name}@${pkg.version}`;
8759
+ const patchInfo = patchLookup.get(lookupKey);
8760
+ if (patchInfo) {
8761
+ logger.logger.log(`Found match: ${pkg.name}@${pkg.version} at ${packagePath}`);
8762
+ logger.logger.log(` Patch key: ${patchInfo.key}`);
8763
+ logger.logger.log(` Processing files:`);
8764
+ for (const [fileName, fileInfo] of Object.entries(patchInfo.patch.files)) {
8765
+ // eslint-disable-next-line no-await-in-loop
8766
+ await processFilePatch(packagePath, fileName, fileInfo, dryRun, socketDir);
8767
+ }
8768
+ }
8769
+ }
8770
+ }
8771
+ } catch {
8772
+ // Ignore errors reading scoped packages
8773
+ }
8774
+ } else {
8775
+ // eslint-disable-next-line no-await-in-loop
8776
+ const pkg = await readPackageJson(entryPath);
8777
+ if (pkg) {
8778
+ // Skip if specific packages requested and this isn't one of them
8779
+ if (packages.length > 0 && !packages.includes(pkg.name)) {
8780
+ continue;
8781
+ }
8782
+ const lookupKey = `${pkg.name}@${pkg.version}`;
8783
+ const patchInfo = patchLookup.get(lookupKey);
8784
+ if (patchInfo) {
8785
+ logger.logger.log(`Found match: ${pkg.name}@${pkg.version} at ${entryPath}`);
8786
+ logger.logger.log(` Patch key: ${patchInfo.key}`);
8787
+ logger.logger.log(` Processing files:`);
8788
+ for (const [fileName, fileInfo] of Object.entries(patchInfo.patch.files)) {
8789
+ // eslint-disable-next-line no-await-in-loop
8790
+ await processFilePatch(entryPath, fileName, fileInfo, dryRun, socketDir);
8791
+ }
8792
+ }
8793
+ }
8794
+ }
8795
+ }
8796
+ } catch (error) {
8797
+ logger.logger.error(`Error processing ${nodeModulesPath}:`, error);
8798
+ }
8799
+ }
8800
+ }
8801
+ async function computeSHA256(filePath) {
8802
+ try {
8803
+ const content = await fs$1.promises.readFile(filePath);
8804
+ const hash = require$$0$1.createHash('sha256');
8805
+ hash.update(content);
8806
+ return hash.digest('hex');
8807
+ } catch {
8808
+ return null;
8809
+ }
8810
+ }
8811
+ async function findNodeModulesFolders(rootDir) {
8812
+ const nodeModulesPaths = [];
8813
+ async function searchDir(dir) {
8814
+ try {
8815
+ const entries = await fs$1.promises.readdir(dir);
8816
+ for (const entry of entries) {
8817
+ if (entry.startsWith('.') || entry === 'dist' || entry === 'build') {
8818
+ continue;
8819
+ }
8820
+ const fullPath = path.join(dir, entry);
8821
+ // eslint-disable-next-line no-await-in-loop
8822
+ const stats = await fs$1.promises.stat(fullPath);
8823
+ if (stats.isDirectory()) {
8824
+ if (entry === 'node_modules') {
8825
+ nodeModulesPaths.push(fullPath);
8826
+ } else {
8827
+ // eslint-disable-next-line no-await-in-loop
8828
+ await searchDir(fullPath);
8829
+ }
8830
+ }
8831
+ }
8832
+ } catch (error) {
8833
+ // Ignore permission errors or missing directories
8834
+ }
8835
+ }
8836
+ await searchDir(rootDir);
8837
+ return nodeModulesPaths;
8838
+ }
8839
+ function parsePURL(purlString) {
8840
+ const [ecosystem, rest] = purlString.split(':', 2);
8841
+ const [nameAndNamespace, version] = (rest ?? '').split('@', 2);
8842
+ let namespace;
8843
+ let name;
8844
+ if (ecosystem === 'npm' && nameAndNamespace?.startsWith('@')) {
8845
+ const parts = nameAndNamespace.split('/');
8846
+ namespace = parts[0]?.substring(1);
8847
+ name = parts.slice(1).join('/');
8848
+ } else {
8849
+ name = nameAndNamespace ?? '';
8850
+ }
8851
+ return {
8852
+ type: ecosystem ?? 'unknown',
8853
+ namespace: namespace ?? '',
8854
+ name: name ?? '',
8855
+ version: version ?? '0.0.0'
8856
+ };
8857
+ }
8858
+ async function processFilePatch(packagePath, fileName, fileInfo, dryRun, socketDir) {
8859
+ const filePath = path.join(packagePath, fileName);
8860
+ if (!fs$1.existsSync(filePath)) {
8861
+ logger.logger.log(`File not found: ${fileName}`);
8862
+ return;
8863
+ }
8864
+ const currentHash = await computeSHA256(filePath);
8865
+ if (!currentHash) {
8866
+ logger.logger.log(`Failed to compute hash for: ${fileName}`);
8867
+ return;
8868
+ }
8869
+ if (currentHash === fileInfo.beforeHash) {
8870
+ logger.logger.success(`File matches expected hash: ${fileName}`);
8871
+ logger.logger.log(`Current hash: ${currentHash}`);
8872
+ logger.logger.log(`Ready to patch to: ${fileInfo.afterHash}`);
8873
+ {
8874
+ const blobPath = path.join(socketDir, 'blobs', fileInfo.afterHash);
8875
+ if (!fs$1.existsSync(blobPath)) {
8876
+ logger.logger.fail(`Error: Patch file not found at ${blobPath}`);
8877
+ return;
8878
+ }
8879
+ try {
8880
+ await fs$1.promises.copyFile(blobPath, filePath);
8881
+ logger.logger.success(`Patch applied successfully`);
8882
+ } catch (error) {
8883
+ logger.logger.log(`Error applying patch: ${error}`);
8884
+ }
8885
+ }
8886
+ } else if (currentHash === fileInfo.afterHash) {
8887
+ logger.logger.success(`File already patched: ${fileName}`);
8888
+ logger.logger.log(`Current hash: ${currentHash}`);
8889
+ } else {
8890
+ logger.logger.fail(`File hash mismatch: ${fileName}`);
8891
+ logger.logger.log(`Expected: ${fileInfo.beforeHash}`);
8892
+ logger.logger.log(`Current: ${currentHash}`);
8893
+ logger.logger.log(`Target: ${fileInfo.afterHash}`);
8894
+ }
8895
+ }
8896
+ async function readPackageJson(packagePath) {
8897
+ const pkgJsonPath = path.join(packagePath, 'package.json');
8898
+ const pkg = await fs$2.readJson(pkgJsonPath, {
8899
+ throws: false
8900
+ });
8901
+ if (pkg) {
8902
+ return {
8903
+ name: pkg.name || '',
8904
+ version: pkg.version || ''
8905
+ };
8906
+ }
8907
+ return null;
8908
+ }
8692
8909
  async function handlePatch({
8910
+ cwd,
8693
8911
  outputKind,
8694
8912
  packages,
8695
8913
  spinner
8696
8914
  }) {
8697
- spinner.start('Analyzing dependencies for security patches...');
8915
+ const dryRun = false; // TODO: Add dryRun support via config
8916
+
8698
8917
  try {
8699
- // TODO: Implement actual patch logic
8700
- // This is a stub implementation
8701
- const result = {
8702
- ok: true,
8703
- data: {
8704
- patchedPackages: packages.length > 0 ? packages : ['example-package']
8918
+ const dotSocketDirPath = path.join(cwd, '.socket');
8919
+ const manifestPath = path.join(dotSocketDirPath, 'manifest.json');
8920
+
8921
+ // Read the manifest file
8922
+ const manifestContent = await fs$1.promises.readFile(manifestPath, 'utf-8');
8923
+ const manifestData = JSON.parse(manifestContent);
8924
+
8925
+ // Validate the schema
8926
+ const validated = PatchManifestSchema.parse(manifestData);
8927
+
8928
+ // Parse PURLs and group by ecosystem
8929
+ const patchesByEcosystem = {};
8930
+ for (const [key, patch] of Object.entries(validated.patches)) {
8931
+ const purl = parsePURL(key);
8932
+ if (!patchesByEcosystem[purl.type]) {
8933
+ patchesByEcosystem[purl.type] = [];
8705
8934
  }
8706
- };
8935
+ patchesByEcosystem[purl.type]?.push({
8936
+ key,
8937
+ purl,
8938
+ patch
8939
+ });
8940
+ }
8707
8941
  spinner.stop();
8708
8942
  logger.logger.log('');
8709
8943
  if (packages.length > 0) {
@@ -8712,14 +8946,32 @@ async function handlePatch({
8712
8946
  logger.logger.info('Scanning all dependencies for available patches');
8713
8947
  }
8714
8948
  logger.logger.log('');
8949
+ if (patchesByEcosystem['npm']) {
8950
+ await applyNPMPatches(patchesByEcosystem['npm'], dryRun, dotSocketDirPath, packages);
8951
+ }
8952
+ const result = {
8953
+ ok: true,
8954
+ data: {
8955
+ patchedPackages: packages.length > 0 ? packages : ['patched successfully']
8956
+ }
8957
+ };
8715
8958
  await outputPatchResult(result, outputKind);
8716
8959
  } catch (e) {
8717
8960
  spinner.stop();
8961
+ let message = 'Failed to apply patches';
8962
+ let cause = e?.message || 'Unknown error';
8963
+ if (e instanceof SyntaxError) {
8964
+ message = 'Invalid JSON in manifest.json';
8965
+ cause = e.message;
8966
+ } else if (e instanceof Error && 'issues' in e) {
8967
+ message = 'Schema validation failed';
8968
+ cause = String(e);
8969
+ }
8718
8970
  const result = {
8719
8971
  ok: false,
8720
8972
  code: 1,
8721
- message: 'Failed to apply patches',
8722
- cause: e?.message || 'Unknown error'
8973
+ message,
8974
+ cause
8723
8975
  };
8724
8976
  await outputPatchResult(result, outputKind);
8725
8977
  }
@@ -8793,11 +9045,21 @@ async function run$k(argv, importMeta, {
8793
9045
  // Note: path.resolve vs .join:
8794
9046
  // If given path is absolute then cwd should not affect it.
8795
9047
  cwd = path.resolve(process.cwd(), cwd);
9048
+ const dotSocketDirPath = path.join(cwd, '.socket');
9049
+ if (!fs$1.existsSync(dotSocketDirPath)) {
9050
+ logger.logger.error('Error: No .socket directory found in current directory');
9051
+ return;
9052
+ }
9053
+ const manifestPath = path.join(dotSocketDirPath, 'manifest.json');
9054
+ if (!fs$1.existsSync(manifestPath)) {
9055
+ logger.logger.error('Error: No manifest.json found in .socket directory');
9056
+ }
8796
9057
  const {
8797
9058
  spinner
8798
9059
  } = constants;
8799
- const packages = Array.isArray(cli.flags['package']) ? cli.flags['package'].flatMap(p => String(p).split(',')) : String(cli.flags['package'] || '').split(',').filter(Boolean);
9060
+ const packages = utils.cmdFlagValueToArray(cli.flags['package']);
8800
9061
  await handlePatch({
9062
+ cwd,
8801
9063
  outputKind,
8802
9064
  packages,
8803
9065
  spinner
@@ -14002,5 +14264,5 @@ void (async () => {
14002
14264
  await utils.captureException(e);
14003
14265
  }
14004
14266
  })();
14005
- //# debugId=11a3cbfe-6b5a-4bf7-afd9-6885b9deef59
14267
+ //# debugId=41b305c0-5c97-4685-a7f1-88a4cbb5e41c
14006
14268
  //# sourceMappingURL=cli.js.map