@socketsecurity/cli-with-sentry 1.1.4 → 1.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/CHANGELOG.md +20 -14
  2. package/dist/cli.js +517 -351
  3. package/dist/cli.js.map +1 -1
  4. package/dist/constants.js +34 -13
  5. package/dist/constants.js.map +1 -1
  6. package/dist/flags.js +15 -9
  7. package/dist/flags.js.map +1 -1
  8. package/dist/tsconfig.dts.tsbuildinfo +1 -1
  9. package/dist/types/commands/analytics/cmd-analytics.d.mts.map +1 -1
  10. package/dist/types/commands/analytics/handle-analytics.d.mts +6 -5
  11. package/dist/types/commands/analytics/handle-analytics.d.mts.map +1 -1
  12. package/dist/types/commands/analytics/output-analytics.d.mts +6 -5
  13. package/dist/types/commands/analytics/output-analytics.d.mts.map +1 -1
  14. package/dist/types/commands/audit-log/cmd-audit-log.d.mts.map +1 -1
  15. package/dist/types/commands/fix/cmd-fix.d.mts.map +1 -1
  16. package/dist/types/commands/fix/coana-fix.d.mts.map +1 -1
  17. package/dist/types/commands/fix/handle-fix.d.mts +1 -1
  18. package/dist/types/commands/fix/handle-fix.d.mts.map +1 -1
  19. package/dist/types/commands/fix/pull-request.d.mts +2 -2
  20. package/dist/types/commands/fix/pull-request.d.mts.map +1 -1
  21. package/dist/types/commands/fix/types.d.mts +1 -4
  22. package/dist/types/commands/fix/types.d.mts.map +1 -1
  23. package/dist/types/commands/login/cmd-login.d.mts.map +1 -1
  24. package/dist/types/commands/organization/fetch-organization-list.d.mts +1 -1
  25. package/dist/types/commands/organization/fetch-organization-list.d.mts.map +1 -1
  26. package/dist/types/commands/patch/cmd-patch.d.mts.map +1 -1
  27. package/dist/types/commands/patch/handle-patch.d.mts +3 -9
  28. package/dist/types/commands/patch/handle-patch.d.mts.map +1 -1
  29. package/dist/types/commands/patch/output-patch-result.d.mts.map +1 -1
  30. package/dist/types/commands/repository/cmd-repository-create.d.mts.map +1 -1
  31. package/dist/types/commands/repository/cmd-repository-del.d.mts.map +1 -1
  32. package/dist/types/commands/repository/cmd-repository-update.d.mts.map +1 -1
  33. package/dist/types/commands/repository/cmd-repository-view.d.mts.map +1 -1
  34. package/dist/types/commands/scan/cmd-scan-create.d.mts.map +1 -1
  35. package/dist/types/commands/scan/cmd-scan-github.d.mts.map +1 -1
  36. package/dist/types/commands/scan/cmd-scan-list.d.mts.map +1 -1
  37. package/dist/types/commands/scan/cmd-scan-reach.d.mts.map +1 -1
  38. package/dist/types/commands/scan/generate-report.d.mts.map +1 -1
  39. package/dist/types/commands/scan/output-scan-report.d.mts.map +1 -1
  40. package/dist/types/commands/scan/perform-reachability-analysis.d.mts.map +1 -1
  41. package/dist/types/constants.d.mts +21 -7
  42. package/dist/types/constants.d.mts.map +1 -1
  43. package/dist/types/flags.d.mts.map +1 -1
  44. package/dist/types/utils/api.d.mts +8 -7
  45. package/dist/types/utils/api.d.mts.map +1 -1
  46. package/dist/types/utils/config.d.mts.map +1 -1
  47. package/dist/types/utils/git.d.mts +1 -0
  48. package/dist/types/utils/git.d.mts.map +1 -1
  49. package/dist/types/utils/meow-with-subcommands.d.mts.map +1 -1
  50. package/dist/types/utils/purl.d.mts +34 -19
  51. package/dist/types/utils/purl.d.mts.map +1 -1
  52. package/dist/types/utils/socket-package-alert.d.mts.map +1 -1
  53. package/dist/types/utils/spec.d.mts +1 -1
  54. package/dist/types/utils/spec.d.mts.map +1 -1
  55. package/dist/utils.js +94 -66
  56. package/dist/utils.js.map +1 -1
  57. package/dist/vendor.js +378 -378
  58. package/external/@socketsecurity/registry/lib/constants/skip-tests-by-ecosystem.js +0 -1
  59. package/package.json +9 -8
package/dist/cli.js CHANGED
@@ -26,6 +26,7 @@ var packages = require('../external/@socketsecurity/registry/lib/packages');
26
26
  var require$$12 = require('../external/@socketsecurity/registry/lib/promises');
27
27
  var regexps = require('../external/@socketsecurity/registry/lib/regexps');
28
28
  var require$$0$1 = require('node:crypto');
29
+ var registryConstants = require('../external/@socketsecurity/registry/lib/constants');
29
30
  var require$$1 = require('node:util');
30
31
  var os = require('node:os');
31
32
  var promises = require('node:stream/promises');
@@ -44,7 +45,7 @@ async function fetchOrgAnalyticsData(time, options) {
44
45
  }
45
46
  const sockSdk = sockSdkCResult.data;
46
47
  return await utils.handleApiCall(sockSdk.getOrgAnalytics(time.toString()), {
47
- desc: 'analytics data'
48
+ description: 'analytics data'
48
49
  });
49
50
  }
50
51
 
@@ -61,7 +62,7 @@ async function fetchRepoAnalyticsData(repo, time, options) {
61
62
  }
62
63
  const sockSdk = sockSdkCResult.data;
63
64
  return await utils.handleApiCall(sockSdk.getRepoAnalytics(repo, time.toString()), {
64
- desc: 'analytics data'
65
+ description: 'analytics data'
65
66
  });
66
67
  }
67
68
 
@@ -73,7 +74,7 @@ const METRICS = ['total_critical_alerts', 'total_high_alerts', 'total_medium_ale
73
74
  // Note: This maps `new Date(date).getMonth()` to English three letters
74
75
  const Months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
75
76
  async function outputAnalytics(result, {
76
- filePath,
77
+ filepath,
77
78
  outputKind,
78
79
  repo,
79
80
  scope,
@@ -92,10 +93,10 @@ async function outputAnalytics(result, {
92
93
  }
93
94
  if (outputKind === 'json') {
94
95
  const serialized = utils.serializeResultJson(result);
95
- if (filePath) {
96
+ if (filepath) {
96
97
  try {
97
- await fs.writeFile(filePath, serialized, 'utf8');
98
- logger.logger.success(`Data successfully written to ${filePath}`);
98
+ await fs.writeFile(filepath, serialized, 'utf8');
99
+ logger.logger.success(`Data successfully written to ${filepath}`);
99
100
  } catch (e) {
100
101
  process.exitCode = 1;
101
102
  logger.logger.log(utils.serializeResultJson({
@@ -114,10 +115,10 @@ async function outputAnalytics(result, {
114
115
  const serialized = renderMarkdown(fdata, time, repo);
115
116
 
116
117
  // TODO: Do we want to write to file even if there was an error...?
117
- if (filePath) {
118
+ if (filepath) {
118
119
  try {
119
- await fs.writeFile(filePath, serialized, 'utf8');
120
- logger.logger.success(`Data successfully written to ${filePath}`);
120
+ await fs.writeFile(filepath, serialized, 'utf8');
121
+ logger.logger.success(`Data successfully written to ${filepath}`);
121
122
  } catch (e) {
122
123
  logger.logger.error(e);
123
124
  }
@@ -208,7 +209,10 @@ function formatDataRepo(data) {
208
209
  }
209
210
  }
210
211
  const topFiveAlertEntries = Object.entries(totalTopAlerts).sort(([_keya, a], [_keyb, b]) => b - a).slice(0, 5);
211
- for (const [key, value] of topFiveAlertEntries) {
212
+ for (const {
213
+ 0: key,
214
+ 1: value
215
+ } of topFiveAlertEntries) {
212
216
  sortedTopFiveAlerts[key] = value;
213
217
  }
214
218
  return {
@@ -246,7 +250,10 @@ function formatDataOrg(data) {
246
250
  }
247
251
  }
248
252
  const topFiveAlertEntries = Object.entries(totalTopAlerts).sort(([_keya, a], [_keyb, b]) => b - a).slice(0, 5);
249
- for (const [key, value] of topFiveAlertEntries) {
253
+ for (const {
254
+ 0: key,
255
+ 1: value
256
+ } of topFiveAlertEntries) {
250
257
  sortedTopFiveAlerts[key] = value;
251
258
  }
252
259
  return {
@@ -283,7 +290,7 @@ function renderLineCharts(grid, screen, title, coords, data) {
283
290
  }
284
291
 
285
292
  async function handleAnalytics({
286
- filePath,
293
+ filepath,
287
294
  outputKind,
288
295
  repo,
289
296
  scope,
@@ -308,7 +315,7 @@ async function handleAnalytics({
308
315
  };
309
316
  }
310
317
  await outputAnalytics(result, {
311
- filePath,
318
+ filepath,
312
319
  outputKind,
313
320
  repo,
314
321
  scope,
@@ -336,6 +343,7 @@ async function run$Q(argv, importMeta, {
336
343
  ...flags.outputFlags,
337
344
  file: {
338
345
  type: 'string',
346
+ default: '',
339
347
  description: 'Path to store result, only valid with --json/--markdown'
340
348
  }
341
349
  },
@@ -397,7 +405,7 @@ async function run$Q(argv, importMeta, {
397
405
  time = cli.input[0];
398
406
  }
399
407
  const {
400
- file,
408
+ file: filepath,
401
409
  json,
402
410
  markdown
403
411
  } = cli.flags;
@@ -408,7 +416,7 @@ async function run$Q(argv, importMeta, {
408
416
  const wasValidInput = utils.checkCommandInput(outputKind, {
409
417
  nook: true,
410
418
  test: noLegacy,
411
- message: 'Legacy flags are no longer supported. See v1 migration guide.',
419
+ message: `Legacy flags are no longer supported. See ${vendor.terminalLinkExports('v1 migration guide', constants.V1_MIGRATION_GUIDE_URL)}.`,
412
420
  fail: `received legacy flags`
413
421
  }, {
414
422
  nook: true,
@@ -426,7 +434,7 @@ async function run$Q(argv, importMeta, {
426
434
  fail: 'invalid range set, see --help for command arg details.'
427
435
  }, {
428
436
  nook: true,
429
- test: !file || !!json || !!markdown,
437
+ test: !filepath || !!json || !!markdown,
430
438
  message: 'The `--file` flag is only valid when using `--json` or `--markdown`',
431
439
  fail: 'bad'
432
440
  }, {
@@ -448,11 +456,11 @@ async function run$Q(argv, importMeta, {
448
456
  return;
449
457
  }
450
458
  return await handleAnalytics({
451
- scope,
452
- time: time === '90' ? 90 : time === '30' ? 30 : 7,
453
- repo: repoName,
459
+ filepath,
454
460
  outputKind,
455
- filePath: String(file || '')
461
+ repo: repoName,
462
+ scope,
463
+ time: time === '90' ? 90 : time === '30' ? 30 : 7
456
464
  });
457
465
  }
458
466
 
@@ -488,7 +496,7 @@ async function fetchAuditLog(config, options) {
488
496
  page: String(page),
489
497
  per_page: String(perPage)
490
498
  }), {
491
- desc: `audit log for ${orgSlug}`
499
+ description: `audit log for ${orgSlug}`
492
500
  });
493
501
  }
494
502
 
@@ -503,7 +511,7 @@ async function outputAuditLog(result, {
503
511
  if (!result.ok) {
504
512
  process.exitCode = result.code ?? 1;
505
513
  }
506
- if (outputKind === 'json') {
514
+ if (outputKind === constants.OUTPUT_JSON) {
507
515
  logger.logger.log(await outputAsJson(result, {
508
516
  logType,
509
517
  orgSlug,
@@ -515,7 +523,7 @@ async function outputAuditLog(result, {
515
523
  logger.logger.fail(utils.failMsgWithBadge(result.message, result.cause));
516
524
  return;
517
525
  }
518
- if (outputKind === 'markdown') {
526
+ if (outputKind === constants.OUTPUT_MARKDOWN) {
519
527
  logger.logger.log(await outputAsMarkdown(result.data, {
520
528
  logType,
521
529
  orgSlug,
@@ -814,6 +822,7 @@ async function run$P(argv, importMeta, {
814
822
  parentName
815
823
  });
816
824
  const {
825
+ interactive,
817
826
  json,
818
827
  markdown,
819
828
  org: orgFlag,
@@ -821,17 +830,18 @@ async function run$P(argv, importMeta, {
821
830
  perPage
822
831
  } = cli.flags;
823
832
  const dryRun = !!cli.flags['dryRun'];
824
- const interactive = !!cli.flags['interactive'];
825
833
  const noLegacy = !cli.flags['type'];
826
834
  let [typeFilter = ''] = cli.input;
827
835
  typeFilter = String(typeFilter);
828
836
  const hasApiToken = utils.hasDefaultApiToken();
829
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
837
+ const {
838
+ 0: orgSlug
839
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
830
840
  const outputKind = utils.getOutputKind(json, markdown);
831
841
  const wasValidInput = utils.checkCommandInput(outputKind, {
832
842
  nook: true,
833
843
  test: noLegacy,
834
- message: 'Legacy flags are no longer supported. See v1 migration guide.',
844
+ message: `Legacy flags are no longer supported. See ${vendor.terminalLinkExports('v1 migration guide', constants.V1_MIGRATION_GUIDE_URL)}.`,
835
845
  fail: `received legacy flags`
836
846
  }, {
837
847
  nook: true,
@@ -918,7 +928,7 @@ async function fetchCreateOrgFullScan(packagePaths, orgSlug, config, options) {
918
928
  set_as_pending_head: String(pendingHead),
919
929
  tmp: String(tmp)
920
930
  }), {
921
- desc: 'to create a scan'
931
+ description: 'to create a scan'
922
932
  });
923
933
  }
924
934
 
@@ -936,7 +946,7 @@ async function fetchSupportedScanFileNames(options) {
936
946
  }
937
947
  const sockSdk = sockSdkCResult.data;
938
948
  return await utils.handleApiCall(sockSdk.getSupportedScanFiles(), {
939
- desc: 'supported scan file types',
949
+ description: 'supported scan file types',
940
950
  spinner
941
951
  });
942
952
  }
@@ -981,12 +991,12 @@ async function fetchScanData(orgSlug, scanId, options) {
981
991
  const {
982
992
  spinner
983
993
  } = constants.default;
984
- function updateScan(desc) {
985
- scanStatus = desc;
994
+ function updateScan(status) {
995
+ scanStatus = status;
986
996
  updateProgress();
987
997
  }
988
- function updatePolicy(desc) {
989
- policyStatus = desc;
998
+ function updatePolicy(status) {
999
+ policyStatus = status;
990
1000
  updateProgress();
991
1001
  }
992
1002
  function updateProgress() {
@@ -1081,8 +1091,6 @@ async function fetchScanData(orgSlug, scanId, options) {
1081
1091
  };
1082
1092
  }
1083
1093
 
1084
- const UNKNOWN_VALUE = '<unknown>';
1085
-
1086
1094
  // Note: The returned cResult will only be ok:false when the generation
1087
1095
  // failed. It won't reflect the healthy state.
1088
1096
  function generateReport(scan, securityPolicy, {
@@ -1131,9 +1139,9 @@ function generateReport(scan, securityPolicy, {
1131
1139
  scan.forEach(artifact => {
1132
1140
  const {
1133
1141
  alerts,
1134
- name: pkgName = UNKNOWN_VALUE,
1142
+ name: pkgName = constants.UNKNOWN_VALUE,
1135
1143
  type: ecosystem,
1136
- version = UNKNOWN_VALUE
1144
+ version = constants.UNKNOWN_VALUE
1137
1145
  } = artifact;
1138
1146
  alerts?.forEach(alert => {
1139
1147
  const alertName = alert.type; // => policy[type]
@@ -1244,7 +1252,7 @@ function addAlert(art, violations, fold, ecosystem, pkgName, version, alert, pol
1244
1252
  if (!pkgMap.has(version)) {
1245
1253
  pkgMap.set(version, new Map());
1246
1254
  }
1247
- const file = alert.file || UNKNOWN_VALUE;
1255
+ const file = alert.file || constants.UNKNOWN_VALUE;
1248
1256
  const verMap = pkgMap.get(version);
1249
1257
  if (fold === constants.default.FOLD_SETTING_FILE) {
1250
1258
  const existing = verMap.get(file);
@@ -1315,7 +1323,7 @@ async function outputScanReport(result, {
1315
1323
  process.exitCode = result.code ?? 1;
1316
1324
  }
1317
1325
  if (!result.ok) {
1318
- if (outputKind === constants.JSON) {
1326
+ if (outputKind === constants.OUTPUT_JSON) {
1319
1327
  logger.logger.log(utils.serializeResultJson(result));
1320
1328
  return;
1321
1329
  }
@@ -1331,11 +1339,11 @@ async function outputScanReport(result, {
1331
1339
  spinner: constants.default.spinner
1332
1340
  });
1333
1341
  if (!scanReport.ok) {
1334
- // Note: this means generation failed, it does not reflect the healthy state
1342
+ // Note: This means generation failed, it does not reflect the healthy state.
1335
1343
  process.exitCode = scanReport.code ?? 1;
1336
1344
 
1337
1345
  // If report generation somehow failed then .data should not be set.
1338
- if (outputKind === constants.JSON) {
1346
+ if (outputKind === constants.OUTPUT_JSON) {
1339
1347
  logger.logger.log(utils.serializeResultJson(scanReport));
1340
1348
  return;
1341
1349
  }
@@ -1343,13 +1351,13 @@ async function outputScanReport(result, {
1343
1351
  return;
1344
1352
  }
1345
1353
 
1346
- // I don't think we emit the default error message with banner for an unhealhty report, do we?
1347
- // if (!scanReport.data.healhty) {
1354
+ // I don't think we emit the default error message with banner for an unhealthy report, do we?
1355
+ // if (!scanReport.data.healthy) {
1348
1356
  // logger.fail(failMsgWithBadge(scanReport.message, scanReport.cause))
1349
1357
  // return
1350
1358
  // }
1351
1359
 
1352
- if (outputKind === constants.JSON || outputKind === constants.TEXT && filepath && filepath.endsWith(constants.EXT_JSON)) {
1360
+ if (outputKind === constants.OUTPUT_JSON || outputKind === constants.OUTPUT_TEXT && filepath && filepath.endsWith(constants.EXT_JSON)) {
1353
1361
  const json = short ? utils.serializeResultJson(scanReport) : toJsonReport(scanReport.data, includeLicensePolicy);
1354
1362
  if (filepath && filepath !== '-') {
1355
1363
  logger.logger.log('Writing json report to', filepath);
@@ -1359,9 +1367,9 @@ async function outputScanReport(result, {
1359
1367
  return;
1360
1368
  }
1361
1369
  if (outputKind === 'markdown' || filepath && filepath.endsWith('.md')) {
1362
- const md = short ? `healthy = ${scanReport.data.healthy}` : toMarkdownReport(scanReport.data,
1363
- // not short so must be regular report
1364
- includeLicensePolicy);
1370
+ const md = short ? `healthy = ${scanReport.data.healthy}` : toMarkdownReport(
1371
+ // Not short so must be a regular report.
1372
+ scanReport.data, includeLicensePolicy);
1365
1373
  if (filepath && filepath !== '-') {
1366
1374
  logger.logger.log('Writing markdown report to', filepath);
1367
1375
  return await fs.writeFile(filepath, md);
@@ -1550,7 +1558,7 @@ async function performReachabilityAnalysis(options) {
1550
1558
  ...options
1551
1559
  };
1552
1560
 
1553
- // Check if user has enterprise plan for reachability analysis
1561
+ // Check if user has enterprise plan for reachability analysis.
1554
1562
  const orgsCResult = await utils.fetchOrganization();
1555
1563
  if (!orgsCResult.ok) {
1556
1564
  return {
@@ -1569,6 +1577,7 @@ async function performReachabilityAnalysis(options) {
1569
1577
  cause: `Please ${vendor.terminalLinkExports('upgrade your plan', 'https://socket.dev/pricing')}. This feature is only available for organizations with an enterprise plan.`
1570
1578
  };
1571
1579
  }
1580
+ const wasSpinning = !!spinner?.isSpinning;
1572
1581
  let tarHash;
1573
1582
  if (uploadManifests && orgSlug && packagePaths) {
1574
1583
  // Setup SDK for uploading manifests
@@ -1577,14 +1586,13 @@ async function performReachabilityAnalysis(options) {
1577
1586
  return sockSdkCResult;
1578
1587
  }
1579
1588
  const sockSdk = sockSdkCResult.data;
1580
- const wasSpinning = !!spinner?.isSpinning;
1581
1589
 
1582
1590
  // Exclude any .socket.facts.json files that happen to be in the scan
1583
1591
  // folder before the analysis was run.
1584
1592
  const filepathsToUpload = packagePaths.filter(p => path.basename(p).toLowerCase() !== constants.default.DOT_SOCKET_DOT_FACTS_JSON);
1585
1593
  spinner?.start('Uploading manifests for reachability analysis...');
1586
1594
  const uploadCResult = await utils.handleApiCall(sockSdk.uploadManifestFiles(orgSlug, filepathsToUpload), {
1587
- desc: 'upload manifests',
1595
+ description: 'upload manifests',
1588
1596
  spinner
1589
1597
  });
1590
1598
  spinner?.stop();
@@ -1634,7 +1642,6 @@ async function performReachabilityAnalysis(options) {
1634
1642
  spinner,
1635
1643
  stdio: 'inherit'
1636
1644
  });
1637
- const wasSpinning = !!spinner?.isSpinning;
1638
1645
  if (wasSpinning) {
1639
1646
  spinner.start();
1640
1647
  }
@@ -2638,7 +2645,10 @@ async function run$N(argv, importMeta, {
2638
2645
  $ ${command} defaultOrg
2639
2646
 
2640
2647
  Keys:
2641
- ${utils.getSupportedConfigEntries().map(([key, desc]) => ` - ${key} -- ${desc}`).join('\n')}
2648
+ ${utils.getSupportedConfigEntries().map(({
2649
+ 0: key,
2650
+ 1: description
2651
+ }) => ` - ${key} -- ${description}`).join('\n')}
2642
2652
  `
2643
2653
  };
2644
2654
  const cli = utils.meowOrExit({
@@ -2735,7 +2745,10 @@ const config$j = {
2735
2745
 
2736
2746
  KEY is an enum. Valid keys:
2737
2747
 
2738
- ${utils.getSupportedConfigEntries().map(([key, desc]) => ` - ${key} -- ${desc}`).join('\n')}
2748
+ ${utils.getSupportedConfigEntries().map(({
2749
+ 0: key,
2750
+ 1: description
2751
+ }) => ` - ${key} -- ${description}`).join('\n')}
2739
2752
 
2740
2753
  Examples
2741
2754
  $ ${command} defaultOrg
@@ -2993,7 +3006,10 @@ async function run$K(argv, importMeta, {
2993
3006
 
2994
3007
  Keys:
2995
3008
 
2996
- ${utils.getSupportedConfigEntries().map(([key, desc]) => ` - ${key} -- ${desc}`).join('\n')}
3009
+ ${utils.getSupportedConfigEntries().map(({
3010
+ 0: key,
3011
+ 1: description
3012
+ }) => ` - ${key} -- ${description}`).join('\n')}
2997
3013
 
2998
3014
  Examples
2999
3015
  $ ${command} apiProxy https://example.com
@@ -3111,7 +3127,10 @@ async function run$J(argv, importMeta, {
3111
3127
 
3112
3128
  Keys:
3113
3129
 
3114
- ${utils.getSupportedConfigEntries().map(([key, desc]) => ` - ${key} -- ${desc}`).join('\n')}
3130
+ ${utils.getSupportedConfigEntries().map(({
3131
+ 0: key,
3132
+ 1: description
3133
+ }) => ` - ${key} -- ${description}`).join('\n')}
3115
3134
 
3116
3135
  Examples
3117
3136
  $ ${command} defaultOrg
@@ -3246,10 +3265,10 @@ async function openSocketFixPr(owner, repo, branch, ghsaIds, options) {
3246
3265
  }
3247
3266
  return null;
3248
3267
  }
3249
- async function getSocketPrs(owner, repo, options) {
3250
- return (await getSocketPrsWithContext(owner, repo, options)).map(d => d.match);
3268
+ async function getSocketFixPrs(owner, repo, options) {
3269
+ return (await getSocketFixPrsWithContext(owner, repo, options)).map(d => d.match);
3251
3270
  }
3252
- async function getSocketPrsWithContext(owner, repo, options) {
3271
+ async function getSocketFixPrsWithContext(owner, repo, options) {
3253
3272
  const {
3254
3273
  author,
3255
3274
  ghsaId,
@@ -3260,117 +3279,101 @@ async function getSocketPrsWithContext(owner, repo, options) {
3260
3279
  };
3261
3280
  const branchPattern = getSocketFixBranchPattern(ghsaId);
3262
3281
  const checkAuthor = strings.isNonEmptyString(author);
3263
- const octokit = utils.getOctokit();
3264
3282
  const octokitGraphql = utils.getOctokitGraphql();
3265
3283
  const contextualMatches = [];
3266
- const states = (typeof statesValue === 'string' ? statesValue.toLowerCase() === 'all' ? ['OPEN', 'CLOSED', 'MERGED'] : [statesValue] : statesValue).map(s => s.toUpperCase());
3284
+ const states = (typeof statesValue === 'string' ? statesValue.toLowerCase() === 'all' ? [constants.GQL_PR_STATE_OPEN, constants.GQL_PR_STATE_CLOSED, constants.GQL_PR_STATE_MERGED] : [statesValue] : statesValue).map(s => s.toUpperCase());
3267
3285
  try {
3268
- // Optimistically fetch only the first 50 open PRs using GraphQL to minimize
3269
- // API quota usage. Fallback to REST if no matching PRs are found.
3286
+ let hasNextPage = true;
3287
+ let cursor = null;
3288
+ let pageIndex = 0;
3270
3289
  const gqlCacheKey = `${repo}-pr-graphql-snapshot`;
3271
- const gqlResp = await utils.cacheFetch(gqlCacheKey, () => octokitGraphql(`
3272
- query($owner: String!, $repo: String!, $states: [PullRequestState!]) {
3273
- repository(owner: $owner, name: $repo) {
3274
- pullRequests(first: 50, states: $states, orderBy: {field: CREATED_AT, direction: DESC}) {
3275
- nodes {
3276
- author {
3277
- login
3290
+ while (hasNextPage) {
3291
+ // eslint-disable-next-line no-await-in-loop
3292
+ const gqlResp = await utils.cacheFetch(`${gqlCacheKey}-page-${pageIndex}`, () => octokitGraphql(`
3293
+ query($owner: String!, $repo: String!, $states: [PullRequestState!], $after: String) {
3294
+ repository(owner: $owner, name: $repo) {
3295
+ pullRequests(first: 100, states: $states, after: $after, orderBy: {field: CREATED_AT, direction: DESC}) {
3296
+ pageInfo {
3297
+ hasNextPage
3298
+ endCursor
3299
+ }
3300
+ nodes {
3301
+ author {
3302
+ login
3303
+ }
3304
+ baseRefName
3305
+ headRefName
3306
+ mergeStateStatus
3307
+ number
3308
+ state
3309
+ title
3310
+ }
3278
3311
  }
3279
- baseRefName
3280
- headRefName
3281
- mergeStateStatus
3282
- number
3283
- state
3284
- title
3285
3312
  }
3286
3313
  }
3314
+ `, {
3315
+ owner,
3316
+ repo,
3317
+ states,
3318
+ after: cursor
3319
+ }));
3320
+ const {
3321
+ nodes,
3322
+ pageInfo
3323
+ } = gqlResp?.repository?.pullRequests ?? {
3324
+ nodes: [],
3325
+ pageInfo: {
3326
+ hasNextPage: false,
3327
+ endCursor: null
3328
+ }
3329
+ };
3330
+ for (let i = 0, {
3331
+ length
3332
+ } = nodes; i < length; i += 1) {
3333
+ const node = nodes[i];
3334
+ const login = node.author?.login;
3335
+ const matchesAuthor = checkAuthor ? login === author : true;
3336
+ const matchesBranch = branchPattern.test(node.headRefName);
3337
+ if (matchesAuthor && matchesBranch) {
3338
+ contextualMatches.push({
3339
+ context: {
3340
+ apiType: 'graphql',
3341
+ cacheKey: `${gqlCacheKey}-page-${pageIndex}`,
3342
+ data: gqlResp,
3343
+ entry: node,
3344
+ index: i,
3345
+ parent: nodes
3346
+ },
3347
+ match: {
3348
+ ...node,
3349
+ author: login ?? constants.UNKNOWN_VALUE
3287
3350
  }
3288
- }
3289
- `, {
3290
- owner,
3291
- repo,
3292
- states
3293
- }));
3294
- const nodes = gqlResp?.repository?.pullRequests?.nodes ?? [];
3295
- for (let i = 0, {
3296
- length
3297
- } = nodes; i < length; i += 1) {
3298
- const node = nodes[i];
3299
- const login = node.author?.login;
3300
- const matchesAuthor = checkAuthor ? login === author : true;
3301
- const matchesBranch = branchPattern.test(node.headRefName);
3302
- if (matchesAuthor && matchesBranch) {
3303
- contextualMatches.push({
3304
- context: {
3305
- apiType: 'graphql',
3306
- cacheKey: gqlCacheKey,
3307
- data: gqlResp,
3308
- entry: node,
3309
- index: i,
3310
- parent: nodes
3311
- },
3312
- match: {
3313
- ...node,
3314
- author: login ?? '<unknown>'
3315
- }
3316
- });
3351
+ });
3352
+ }
3317
3353
  }
3318
- }
3319
- } catch {}
3320
- if (contextualMatches.length) {
3321
- return contextualMatches;
3322
- }
3323
3354
 
3324
- // Fallback to REST if GraphQL found no matching PRs.
3325
- let allPrs;
3326
- const cacheKey = `${repo}-pull-requests`;
3327
- try {
3328
- allPrs = await utils.cacheFetch(cacheKey, async () => await octokit.paginate(octokit.pulls.list, {
3329
- owner,
3330
- repo,
3331
- state: 'all',
3332
- per_page: 100
3333
- }));
3334
- } catch {}
3335
- if (!allPrs) {
3336
- return contextualMatches;
3337
- }
3338
- for (let i = 0, {
3339
- length
3340
- } = allPrs; i < length; i += 1) {
3341
- const pr = allPrs[i];
3342
- const login = pr.user?.login;
3343
- const headRefName = pr.head.ref;
3344
- const matchesAuthor = checkAuthor ? login === author : true;
3345
- const matchesBranch = branchPattern.test(headRefName);
3346
- if (matchesAuthor && matchesBranch) {
3347
- // Upper cased mergeable_state is equivalent to mergeStateStatus.
3348
- // https://docs.github.com/en/rest/pulls/pulls?apiVersion=2022-11-28#get-a-pull-request
3349
- const mergeStateStatus = pr.mergeable_state?.toUpperCase?.() ?? 'UNKNOWN';
3350
- // The REST API does not have a distinct merged state for pull requests.
3351
- // Instead, a merged pull request is represented as a closed pull request
3352
- // with a non-null merged_at timestamp.
3353
- const state = pr.merged_at ? 'MERGED' : pr.state.toUpperCase();
3354
- contextualMatches.push({
3355
- context: {
3356
- apiType: 'rest',
3357
- cacheKey,
3358
- data: allPrs,
3359
- entry: pr,
3360
- index: i,
3361
- parent: allPrs
3362
- },
3363
- match: {
3364
- author: login ?? '<unknown>',
3365
- baseRefName: pr.base.ref,
3366
- headRefName,
3367
- mergeStateStatus,
3368
- number: pr.number,
3369
- state,
3370
- title: pr.title
3371
- }
3372
- });
3355
+ // Continue to next page.
3356
+ hasNextPage = pageInfo.hasNextPage;
3357
+ cursor = pageInfo.endCursor;
3358
+ pageIndex += 1;
3359
+
3360
+ // Safety limit to prevent infinite loops.
3361
+ if (pageIndex === constants.GQL_PAGE_SENTINEL) {
3362
+ require$$9.debugFn('warn', `GraphQL pagination reached safety limit (${constants.GQL_PAGE_SENTINEL} pages) for ${owner}/${repo}`);
3363
+ break;
3364
+ }
3365
+
3366
+ // Early exit optimization: if we found matches and only looking for specific GHSA,
3367
+ // we can stop pagination since we likely found what we need.
3368
+ if (contextualMatches.length > 0 && ghsaId) {
3369
+ break;
3370
+ }
3373
3371
  }
3372
+ } catch (e) {
3373
+ require$$9.debugFn('error', `GraphQL pagination failed for ${owner}/${repo}`);
3374
+ require$$9.debugDir('inspect', {
3375
+ error: e
3376
+ });
3374
3377
  }
3375
3378
  return contextualMatches;
3376
3379
  }
@@ -3418,7 +3421,7 @@ async function getFixEnv() {
3418
3421
  }
3419
3422
  repoInfo = await utils.getRepoInfo();
3420
3423
  }
3421
- const prs = isCi && repoInfo ? await getSocketPrs(repoInfo.owner, repoInfo.repo, {
3424
+ const prs = isCi && repoInfo ? await getSocketFixPrs(repoInfo.owner, repoInfo.repo, {
3422
3425
  author: gitUser,
3423
3426
  states: 'all'
3424
3427
  }) : [];
@@ -3435,7 +3438,7 @@ async function getFixEnv() {
3435
3438
 
3436
3439
  async function coanaFix(fixConfig) {
3437
3440
  const {
3438
- autoMerge,
3441
+ autopilot,
3439
3442
  cwd,
3440
3443
  ghsas,
3441
3444
  limit,
@@ -3463,7 +3466,7 @@ async function coanaFix(fixConfig) {
3463
3466
  cwd
3464
3467
  });
3465
3468
  const uploadCResult = await utils.handleApiCall(sockSdk.uploadManifestFiles(orgSlug, scanFilepaths), {
3466
- desc: 'upload manifests',
3469
+ description: 'upload manifests',
3467
3470
  spinner
3468
3471
  });
3469
3472
  if (!uploadCResult.ok) {
@@ -3504,18 +3507,40 @@ async function coanaFix(fixConfig) {
3504
3507
  }
3505
3508
  } : fixCResult;
3506
3509
  }
3510
+
3511
+ // Adjust limit based on open Socket Fix PRs.
3512
+ let adjustedLimit = limit;
3513
+ if (shouldOpenPrs && fixEnv.repoInfo) {
3514
+ try {
3515
+ const openPrs = await getSocketFixPrs(fixEnv.repoInfo.owner, fixEnv.repoInfo.repo, {
3516
+ states: constants.GQL_PR_STATE_OPEN
3517
+ });
3518
+ const openPrCount = openPrs.length;
3519
+ // Reduce limit by number of open PRs to avoid creating too many.
3520
+ adjustedLimit = Math.max(0, limit - openPrCount);
3521
+ if (openPrCount > 0) {
3522
+ require$$9.debugFn('notice', `limit: adjusted from ${limit} to ${adjustedLimit} (${openPrCount} open Socket Fix PRs)`);
3523
+ }
3524
+ } catch (e) {
3525
+ require$$9.debugFn('warn', 'Failed to count open PRs, using original limit');
3526
+ require$$9.debugDir('inspect', {
3527
+ error: e
3528
+ });
3529
+ }
3530
+ }
3531
+ const shouldSpawnCoana = adjustedLimit > 0;
3507
3532
  let ids;
3508
- if (isAll) {
3533
+ if (shouldSpawnCoana && isAll) {
3509
3534
  const foundCResult = await utils.spawnCoana(['compute-fixes-and-upgrade-purls', cwd, '--manifests-tar-hash', tarHash, ...(fixConfig.rangeStyle ? ['--range-style', fixConfig.rangeStyle] : []), ...fixConfig.unknownFlags], fixConfig.orgSlug, {
3510
3535
  cwd,
3511
3536
  spinner
3512
3537
  });
3513
3538
  if (foundCResult.ok) {
3514
3539
  const foundIds = utils.cmdFlagValueToArray(/(?<=Vulnerabilities found:).*/.exec(foundCResult.data));
3515
- ids = foundIds.slice(0, limit);
3540
+ ids = foundIds.slice(0, adjustedLimit);
3516
3541
  }
3517
- } else {
3518
- ids = ghsas.slice(0, limit);
3542
+ } else if (shouldSpawnCoana) {
3543
+ ids = ghsas.slice(0, adjustedLimit);
3519
3544
  }
3520
3545
  if (!ids?.length) {
3521
3546
  require$$9.debugFn('notice', 'miss: no GHSA IDs to process');
@@ -3539,7 +3564,7 @@ async function coanaFix(fixConfig) {
3539
3564
  let count = 0;
3540
3565
  let overallFixed = false;
3541
3566
 
3542
- // Process each GHSA ID individually, similar to npm-fix/pnpm-fix.
3567
+ // Process each GHSA ID individually.
3543
3568
  ghsaLoop: for (let i = 0, {
3544
3569
  length
3545
3570
  } = ids; i < length; i += 1) {
@@ -3554,7 +3579,7 @@ async function coanaFix(fixConfig) {
3554
3579
  stdio: 'inherit'
3555
3580
  });
3556
3581
  if (!fixCResult.ok) {
3557
- logger.logger.error(`Update failed for ${ghsaId}: ${fixCResult.message || 'Unknown error'}`);
3582
+ logger.logger.error(`Update failed for ${ghsaId}: ${fixCResult.message || constants.UNKNOWN_ERROR}`);
3558
3583
  continue ghsaLoop;
3559
3584
  }
3560
3585
 
@@ -3620,7 +3645,7 @@ async function coanaFix(fixConfig) {
3620
3645
  } = prResponse;
3621
3646
  const prRef = `PR #${data.number}`;
3622
3647
  logger.logger.success(`Opened ${prRef} for ${ghsaId}.`);
3623
- if (autoMerge) {
3648
+ if (autopilot) {
3624
3649
  logger.logger.indent();
3625
3650
  spinner?.indent();
3626
3651
  // eslint-disable-next-line no-await-in-loop
@@ -3655,8 +3680,8 @@ async function coanaFix(fixConfig) {
3655
3680
  await utils.gitCheckoutBranch(fixEnv.baseBranch, cwd);
3656
3681
  }
3657
3682
  count += 1;
3658
- require$$9.debugFn('notice', `increment: count ${count}/${Math.min(limit, ids.length)}`);
3659
- if (count >= limit) {
3683
+ require$$9.debugFn('notice', `increment: count ${count}/${Math.min(adjustedLimit, ids.length)}`);
3684
+ if (count >= adjustedLimit) {
3660
3685
  break ghsaLoop;
3661
3686
  }
3662
3687
  }
@@ -3686,7 +3711,7 @@ async function outputFixResult(result, outputKind) {
3686
3711
  }
3687
3712
 
3688
3713
  async function handleFix({
3689
- autoMerge,
3714
+ autopilot,
3690
3715
  cwd,
3691
3716
  ghsas,
3692
3717
  limit,
@@ -3694,15 +3719,12 @@ async function handleFix({
3694
3719
  orgSlug,
3695
3720
  outputKind,
3696
3721
  prCheck,
3697
- purls,
3698
3722
  rangeStyle,
3699
3723
  spinner,
3700
- test,
3701
- testScript,
3702
3724
  unknownFlags
3703
3725
  }) {
3704
3726
  await outputFixResult(await coanaFix({
3705
- autoMerge,
3727
+ autopilot,
3706
3728
  cwd,
3707
3729
  ghsas,
3708
3730
  limit,
@@ -3723,7 +3745,7 @@ const cmdFix = {
3723
3745
  run: run$I
3724
3746
  };
3725
3747
  const generalFlags$2 = {
3726
- autoMerge: {
3748
+ autopilot: {
3727
3749
  type: 'boolean',
3728
3750
  default: false,
3729
3751
  description: `Enable auto-merge for pull requests that Socket opens.\nSee ${vendor.terminalLinkExports('GitHub documentation', 'https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/configuring-pull-request-merges/managing-auto-merge-for-pull-requests-in-your-repository')} for managing auto-merge for pull requests in your repository.`
@@ -3757,10 +3779,8 @@ Available styles:
3757
3779
  }
3758
3780
  };
3759
3781
  const hiddenFlags = {
3760
- autopilot: {
3761
- type: 'boolean',
3762
- default: false,
3763
- description: `Shorthand for --auto-merge --test`,
3782
+ autoMerge: {
3783
+ ...generalFlags$2['autopilot'],
3764
3784
  hidden: true
3765
3785
  },
3766
3786
  ghsa: {
@@ -3841,11 +3861,20 @@ async function run$I(argv, importMeta, {
3841
3861
  importMeta,
3842
3862
  parentName
3843
3863
  });
3864
+ const {
3865
+ autopilot,
3866
+ json,
3867
+ limit,
3868
+ markdown,
3869
+ maxSatisfying,
3870
+ prCheck,
3871
+ rangeStyle,
3872
+ // We patched in this feature with `npx custompatch meow` at
3873
+ // socket-cli/patches/meow#13.2.0.patch.
3874
+ unknownFlags = []
3875
+ } = cli.flags;
3844
3876
  const dryRun = !!cli.flags['dryRun'];
3845
- let rangeStyle = cli.flags['rangeStyle'];
3846
- if (!rangeStyle) {
3847
- rangeStyle = 'preserve';
3848
- }
3877
+ const minSatisfying = cli.flags['minSatisfying'] || !maxSatisfying;
3849
3878
  const rawPurls = utils.cmdFlagValueToArray(cli.flags['purl']);
3850
3879
  const purls = [];
3851
3880
  for (const purl of rawPurls) {
@@ -3863,14 +3892,14 @@ async function run$I(argv, importMeta, {
3863
3892
  logger.logger.fail('No valid --purl values provided.');
3864
3893
  return;
3865
3894
  }
3866
- const outputKind = utils.getOutputKind(cli.flags['json'], cli.flags['markdown']);
3895
+ const outputKind = utils.getOutputKind(json, markdown);
3867
3896
  const wasValidInput = utils.checkCommandInput(outputKind, {
3868
3897
  test: utils.RangeStyles.includes(rangeStyle),
3869
3898
  message: `Expecting range style of ${arrays.joinOr(utils.RangeStyles)}`,
3870
3899
  fail: 'invalid'
3871
3900
  }, {
3872
3901
  nook: true,
3873
- test: !cli.flags['json'] || !cli.flags['markdown'],
3902
+ test: !json || !markdown,
3874
3903
  message: 'The json and markdown flags cannot be both set, pick one',
3875
3904
  fail: 'omit one'
3876
3905
  });
@@ -3892,26 +3921,12 @@ async function run$I(argv, importMeta, {
3892
3921
  // Note: path.resolve vs .join:
3893
3922
  // If given path is absolute then cwd should not affect it.
3894
3923
  cwd = path.resolve(process.cwd(), cwd);
3895
- let autoMerge = Boolean(cli.flags['autoMerge']);
3896
- let test = Boolean(cli.flags['test']);
3897
- if (cli.flags['autopilot']) {
3898
- autoMerge = true;
3899
- test = true;
3900
- }
3901
3924
  const {
3902
3925
  spinner
3903
3926
  } = constants.default;
3904
- // We patched in this feature with `npx custompatch meow` at
3905
- // socket-cli/patches/meow#13.2.0.patch.
3906
- const unknownFlags = cli.unknownFlags ?? [];
3907
3927
  const ghsas = arrays.arrayUnique([...utils.cmdFlagValueToArray(cli.flags['id']), ...utils.cmdFlagValueToArray(cli.flags['ghsa'])]);
3908
- const limit = Number(cli.flags['limit']) || DEFAULT_LIMIT;
3909
- const maxSatisfying = Boolean(cli.flags['maxSatisfying']);
3910
- const minSatisfying = Boolean(cli.flags['minSatisfying']) || !maxSatisfying;
3911
- const prCheck = Boolean(cli.flags['prCheck']);
3912
- const testScript = String(cli.flags['testScript'] || 'test');
3913
3928
  await handleFix({
3914
- autoMerge,
3929
+ autopilot,
3915
3930
  cwd,
3916
3931
  ghsas,
3917
3932
  limit,
@@ -3919,11 +3934,8 @@ async function run$I(argv, importMeta, {
3919
3934
  prCheck,
3920
3935
  orgSlug,
3921
3936
  outputKind,
3922
- purls,
3923
3937
  rangeStyle,
3924
3938
  spinner,
3925
- test,
3926
- testScript,
3927
3939
  unknownFlags
3928
3940
  });
3929
3941
  }
@@ -4216,7 +4228,7 @@ async function attemptLogin(apiBaseUrl, apiProxy) {
4216
4228
  }
4217
4229
  const sockSdk = sockSdkCResult.data;
4218
4230
  const orgsCResult = await utils.fetchOrganization({
4219
- desc: 'token verification',
4231
+ description: 'token verification',
4220
4232
  sdk: sockSdk
4221
4233
  });
4222
4234
  if (!orgsCResult.ok) {
@@ -4339,10 +4351,12 @@ async function run$F(argv, importMeta, {
4339
4351
  ...flags.commonFlags,
4340
4352
  apiBaseUrl: {
4341
4353
  type: 'string',
4354
+ default: '',
4342
4355
  description: 'API server to connect to for login'
4343
4356
  },
4344
4357
  apiProxy: {
4345
4358
  type: 'string',
4359
+ default: '',
4346
4360
  description: 'Proxy to use when making connection to API server'
4347
4361
  }
4348
4362
  },
@@ -4377,8 +4391,10 @@ async function run$F(argv, importMeta, {
4377
4391
  if (!vendor.isInteractiveExports()) {
4378
4392
  throw new utils.InputError('Cannot prompt for credentials in a non-interactive shell. Use SOCKET_CLI_API_TOKEN environment variable instead');
4379
4393
  }
4380
- const apiBaseUrl = cli.flags['apiBaseUrl'];
4381
- const apiProxy = cli.flags['apiProxy'];
4394
+ const {
4395
+ apiBaseUrl,
4396
+ apiProxy
4397
+ } = cli.flags;
4382
4398
  await attemptLogin(apiBaseUrl, apiProxy);
4383
4399
  }
4384
4400
 
@@ -7227,7 +7243,7 @@ async function fetchDependencies(config, options) {
7227
7243
  limit,
7228
7244
  offset
7229
7245
  }), {
7230
- desc: 'organization dependencies'
7246
+ description: 'organization dependencies'
7231
7247
  });
7232
7248
  }
7233
7249
 
@@ -7404,7 +7420,7 @@ async function fetchLicensePolicy(orgSlug, options) {
7404
7420
  }
7405
7421
  const sockSdk = sockSdkCResult.data;
7406
7422
  return await utils.handleApiCall(sockSdk.getOrgLicensePolicy(orgSlug), {
7407
- desc: 'organization license policy'
7423
+ description: 'organization license policy'
7408
7424
  });
7409
7425
  }
7410
7426
 
@@ -7501,7 +7517,9 @@ async function run$q(argv, importMeta, {
7501
7517
  const dryRun = !!cli.flags['dryRun'];
7502
7518
  const interactive = !!cli.flags['interactive'];
7503
7519
  const hasApiToken = utils.hasDefaultApiToken();
7504
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
7520
+ const {
7521
+ 0: orgSlug
7522
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
7505
7523
  const outputKind = utils.getOutputKind(json, markdown);
7506
7524
  const wasValidInput = utils.checkCommandInput(outputKind, {
7507
7525
  nook: true,
@@ -7537,7 +7555,7 @@ async function fetchSecurityPolicy(orgSlug, options) {
7537
7555
  }
7538
7556
  const sockSdk = sockSdkCResult.data;
7539
7557
  return await utils.handleApiCall(sockSdk.getOrgSecurityPolicy(orgSlug), {
7540
- desc: 'organization security policy'
7558
+ description: 'organization security policy'
7541
7559
  });
7542
7560
  }
7543
7561
 
@@ -7635,7 +7653,9 @@ async function run$p(argv, importMeta, {
7635
7653
  const dryRun = !!cli.flags['dryRun'];
7636
7654
  const interactive = !!cli.flags['interactive'];
7637
7655
  const hasApiToken = utils.hasDefaultApiToken();
7638
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
7656
+ const {
7657
+ 0: orgSlug
7658
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
7639
7659
  const outputKind = utils.getOutputKind(json, markdown);
7640
7660
  const wasValidInput = utils.checkCommandInput(outputKind, {
7641
7661
  nook: true,
@@ -7816,7 +7836,7 @@ async function fetchQuota(options) {
7816
7836
  }
7817
7837
  const sockSdk = sockSdkCResult.data;
7818
7838
  return await utils.handleApiCall(sockSdk.getQuota(), {
7819
- desc: 'token quota'
7839
+ description: 'token quota'
7820
7840
  });
7821
7841
  }
7822
7842
 
@@ -8286,7 +8306,7 @@ async function fetchPurlsShallowScore(purls, options) {
8286
8306
  }, {
8287
8307
  alerts: 'true'
8288
8308
  }), {
8289
- desc: 'looking up package'
8309
+ description: 'looking up package'
8290
8310
  });
8291
8311
  if (!batchPackageCResult.ok) {
8292
8312
  return batchPackageCResult;
@@ -8705,7 +8725,7 @@ async function outputPatchResult(result, outputKind) {
8705
8725
  if (!result.ok) {
8706
8726
  process.exitCode = result.code ?? 1;
8707
8727
  }
8708
- if (outputKind === 'json') {
8728
+ if (outputKind === constants.OUTPUT_JSON) {
8709
8729
  logger.logger.log(utils.serializeResultJson(result));
8710
8730
  return;
8711
8731
  }
@@ -8716,27 +8736,45 @@ async function outputPatchResult(result, outputKind) {
8716
8736
  const {
8717
8737
  patched
8718
8738
  } = result.data;
8739
+ logger.logger.log('');
8719
8740
  if (patched.length) {
8720
- logger.logger.group(`Successfully processed patches for ${patched.length} package(s):`);
8741
+ logger.logger.group(`Successfully processed patches for ${patched.length} ${words.pluralize('package', patched.length)}:`);
8721
8742
  for (const pkg of patched) {
8722
8743
  logger.logger.success(pkg);
8723
8744
  }
8724
8745
  logger.logger.groupEnd();
8725
8746
  } else {
8726
- logger.logger.info('No packages found requiring patches');
8747
+ logger.logger.warn('No packages found requiring patches');
8727
8748
  }
8728
8749
  logger.logger.log('');
8729
8750
  logger.logger.success('Patch command completed!');
8730
8751
  }
8731
8752
 
8732
- async function applyNPMPatches(patches, purlObjs, socketDir, dryRun) {
8753
+ async function applyNpmPatches(socketDir, patches, options) {
8754
+ const {
8755
+ cwd = process.cwd(),
8756
+ dryRun = false,
8757
+ purlObjs,
8758
+ spinner
8759
+ } = {
8760
+ __proto__: null,
8761
+ ...options
8762
+ };
8763
+ const wasSpinning = !!spinner?.isSpinning;
8764
+ spinner?.start();
8733
8765
  const patchLookup = new Map();
8734
8766
  for (const patchInfo of patches) {
8735
- const key = getLookupKey(patchInfo.purlObj);
8736
- patchLookup.set(key, patchInfo);
8767
+ patchLookup.set(patchInfo.purl, patchInfo);
8737
8768
  }
8738
- const nmPaths = await findNodeModulesPaths(process.cwd());
8739
- logger.logger.log(`Found ${nmPaths.length} node_modules ${words.pluralize('folder', nmPaths.length)}`);
8769
+ const nmPaths = await findNodeModulesPaths(cwd);
8770
+ spinner?.stop();
8771
+ logger.logger.log(`Found ${nmPaths.length} ${constants.NODE_MODULES} ${words.pluralize('folder', nmPaths.length)}`);
8772
+ logger.logger.group('');
8773
+ spinner?.start();
8774
+ const result = {
8775
+ passed: [],
8776
+ failed: []
8777
+ };
8740
8778
  for (const nmPath of nmPaths) {
8741
8779
  // eslint-disable-next-line no-await-in-loop
8742
8780
  const dirNames = await fs$2.readDirNames(nmPath);
@@ -8746,44 +8784,66 @@ async function applyNPMPatches(patches, purlObjs, socketDir, dryRun) {
8746
8784
  const pkgSubNames = isScoped ?
8747
8785
  // eslint-disable-next-line no-await-in-loop
8748
8786
  await fs$2.readDirNames(pkgPath) : [dirName];
8749
- try {
8750
- for (const pkgSubName of pkgSubNames) {
8751
- const dirFullName = isScoped ? `${dirName}/${pkgSubName}` : pkgSubName;
8752
- const pkgPath = path.join(nmPath, dirFullName);
8787
+ for (const pkgSubName of pkgSubNames) {
8788
+ const dirFullName = isScoped ? `${dirName}/${pkgSubName}` : pkgSubName;
8789
+ const pkgPath = path.join(nmPath, dirFullName);
8790
+ // eslint-disable-next-line no-await-in-loop
8791
+ const pkgJson = await packages.readPackageJson(pkgPath, {
8792
+ throws: false
8793
+ });
8794
+ if (!strings.isNonEmptyString(pkgJson?.name) || !strings.isNonEmptyString(pkgJson?.version)) {
8795
+ continue;
8796
+ }
8797
+ const purl = `pkg:npm/${pkgJson.name}@${pkgJson.version}`;
8798
+ const purlObj = utils.getPurlObject(purl, {
8799
+ throws: false
8800
+ });
8801
+ if (!purlObj) {
8802
+ continue;
8803
+ }
8804
+
8805
+ // Skip if specific packages requested and this isn't one of them
8806
+ if (purlObjs?.length && purlObjs.findIndex(p => p.type === constants.NPM && p.namespace === purlObj.namespace && p.name === purlObj.name) === -1) {
8807
+ continue;
8808
+ }
8809
+ const patchInfo = patchLookup.get(purl);
8810
+ if (!patchInfo) {
8811
+ continue;
8812
+ }
8813
+ spinner?.stop();
8814
+ logger.logger.log(`Found match: ${pkgJson.name}@${pkgJson.version} at ${pkgPath}`);
8815
+ logger.logger.log(`Patch key: ${patchInfo.key}`);
8816
+ logger.logger.group(`Processing files:`);
8817
+ spinner?.start();
8818
+ let passed = true;
8819
+ for (const {
8820
+ 0: fileName,
8821
+ 1: fileInfo
8822
+ } of Object.entries(patchInfo.patch.files)) {
8753
8823
  // eslint-disable-next-line no-await-in-loop
8754
- const pkgJson = await packages.readPackageJson(pkgPath, {
8755
- throws: false
8824
+ const filePatchPassed = await processFilePatch(pkgPath, fileName, fileInfo, socketDir, {
8825
+ dryRun,
8826
+ spinner
8756
8827
  });
8757
- if (!strings.isNonEmptyString(pkgJson?.name) || !strings.isNonEmptyString(pkgJson?.version)) {
8758
- continue;
8759
- }
8760
- const pkgFullName = pkgJson.name;
8761
- const purlObj = utils.getPurlObject(`pkg:npm/${pkgFullName}`);
8762
- // Skip if specific packages requested and this isn't one of them
8763
- if (purlObjs.findIndex(p => p.type === 'npm' && p.namespace === purlObj.namespace && p.name === purlObj.name) === -1) {
8764
- continue;
8765
- }
8766
- const patchInfo = patchLookup.get(getLookupKey(purlObj));
8767
- if (!patchInfo) {
8768
- continue;
8769
- }
8770
- logger.logger.log(`Found match: ${pkgFullName}@${pkgJson.version} at ${pkgPath}`);
8771
- logger.logger.log(`Patch key: ${patchInfo.key}`);
8772
- logger.logger.group(`Processing files:`);
8773
- for (const {
8774
- 0: fileName,
8775
- 1: fileInfo
8776
- } of Object.entries(patchInfo.patch.files)) {
8777
- // eslint-disable-next-line no-await-in-loop
8778
- await processFilePatch(pkgPath, fileName, fileInfo, dryRun, socketDir);
8828
+ if (!filePatchPassed) {
8829
+ passed = false;
8779
8830
  }
8780
- logger.logger.groupEnd();
8781
8831
  }
8782
- } catch (error) {
8783
- logger.logger.error(`Error processing ${nmPath}:`, error);
8832
+ logger.logger.groupEnd();
8833
+ if (passed) {
8834
+ result.passed.push(purl);
8835
+ } else {
8836
+ result.failed.push(purl);
8837
+ }
8784
8838
  }
8785
8839
  }
8786
8840
  }
8841
+ spinner?.stop();
8842
+ logger.logger.groupEnd();
8843
+ if (wasSpinning) {
8844
+ spinner.start();
8845
+ }
8846
+ return result;
8787
8847
  }
8788
8848
  async function computeSHA256(filepath) {
8789
8849
  try {
@@ -8805,52 +8865,101 @@ async function findNodeModulesPaths(cwd) {
8805
8865
  return await vendor.outExports.glob([`**/${constants.NODE_MODULES}`], {
8806
8866
  absolute: true,
8807
8867
  cwd: path.dirname(rootNmPath),
8868
+ dot: true,
8808
8869
  onlyDirectories: true
8809
8870
  });
8810
8871
  }
8811
- function getLookupKey(purlObj) {
8812
- const fullName = purlObj.namespace ? `${purlObj.namespace}/${purlObj.name}` : purlObj.name;
8813
- return `${fullName}@${purlObj.version}`;
8814
- }
8815
- async function processFilePatch(pkgPath, fileName, fileInfo, dryRun, socketDir) {
8872
+ async function processFilePatch(pkgPath, fileName, fileInfo, socketDir, options) {
8873
+ const {
8874
+ dryRun,
8875
+ spinner
8876
+ } = {
8877
+ __proto__: null,
8878
+ ...options
8879
+ };
8880
+ const wasSpinning = !!spinner?.isSpinning;
8881
+ spinner?.stop();
8816
8882
  const filepath = path.join(pkgPath, fileName);
8817
8883
  if (!fs$1.existsSync(filepath)) {
8818
8884
  logger.logger.log(`File not found: ${fileName}`);
8819
- return;
8885
+ if (wasSpinning) {
8886
+ spinner?.start();
8887
+ }
8888
+ return false;
8820
8889
  }
8821
8890
  const currentHash = await computeSHA256(filepath);
8822
8891
  if (!currentHash) {
8823
8892
  logger.logger.log(`Failed to compute hash for: ${fileName}`);
8824
- return;
8825
- }
8826
- if (currentHash === fileInfo.beforeHash) {
8827
- logger.logger.success(`File matches expected hash: ${fileName}`);
8828
- logger.logger.log(`Current hash: ${currentHash}`);
8829
- logger.logger.log(`Ready to patch to: ${fileInfo.afterHash}`);
8830
- if (dryRun) {
8831
- logger.logger.log(`(dry run - no changes made)`);
8832
- } else {
8833
- const blobPath = path.join(socketDir, 'blobs', fileInfo.afterHash);
8834
- if (!fs$1.existsSync(blobPath)) {
8835
- logger.logger.fail(`Error: Patch file not found at ${blobPath}`);
8836
- return;
8837
- }
8838
- try {
8839
- await fs$1.promises.copyFile(blobPath, filepath);
8840
- logger.logger.success(`Patch applied successfully`);
8841
- } catch (error) {
8842
- logger.logger.error('Error applying patch:', error);
8843
- }
8893
+ if (wasSpinning) {
8894
+ spinner?.start();
8844
8895
  }
8845
- } else if (currentHash === fileInfo.afterHash) {
8896
+ return false;
8897
+ }
8898
+ if (currentHash === fileInfo.afterHash) {
8846
8899
  logger.logger.success(`File already patched: ${fileName}`);
8900
+ logger.logger.group();
8847
8901
  logger.logger.log(`Current hash: ${currentHash}`);
8848
- } else {
8902
+ logger.logger.groupEnd();
8903
+ if (wasSpinning) {
8904
+ spinner?.start();
8905
+ }
8906
+ return true;
8907
+ }
8908
+ if (currentHash !== fileInfo.beforeHash) {
8849
8909
  logger.logger.fail(`File hash mismatch: ${fileName}`);
8910
+ logger.logger.group();
8850
8911
  logger.logger.log(`Expected: ${fileInfo.beforeHash}`);
8851
8912
  logger.logger.log(`Current: ${currentHash}`);
8852
8913
  logger.logger.log(`Target: ${fileInfo.afterHash}`);
8914
+ logger.logger.groupEnd();
8915
+ if (wasSpinning) {
8916
+ spinner?.start();
8917
+ }
8918
+ return false;
8853
8919
  }
8920
+ logger.logger.success(`File matches expected hash: ${fileName}`);
8921
+ logger.logger.group();
8922
+ logger.logger.log(`Current hash: ${currentHash}`);
8923
+ logger.logger.log(`Ready to patch to: ${fileInfo.afterHash}`);
8924
+ logger.logger.group();
8925
+ if (dryRun) {
8926
+ logger.logger.log(`(dry run - no changes made)`);
8927
+ logger.logger.groupEnd();
8928
+ logger.logger.groupEnd();
8929
+ if (wasSpinning) {
8930
+ spinner?.start();
8931
+ }
8932
+ return false;
8933
+ }
8934
+ const blobPath = path.join(socketDir, 'blobs', fileInfo.afterHash);
8935
+ if (!fs$1.existsSync(blobPath)) {
8936
+ logger.logger.fail(`Error: Patch file not found at ${blobPath}`);
8937
+ logger.logger.groupEnd();
8938
+ logger.logger.groupEnd();
8939
+ if (wasSpinning) {
8940
+ spinner?.start();
8941
+ }
8942
+ return false;
8943
+ }
8944
+ spinner?.start();
8945
+ let result = true;
8946
+ try {
8947
+ await fs$1.promises.copyFile(blobPath, filepath);
8948
+ logger.logger.success(`Patch applied successfully`);
8949
+ } catch (e) {
8950
+ logger.logger.error('Error applying patch');
8951
+ require$$9.debugDir('inspect', {
8952
+ error: e
8953
+ });
8954
+ result = false;
8955
+ }
8956
+ logger.logger.groupEnd();
8957
+ logger.logger.groupEnd();
8958
+ spinner?.stop();
8959
+ if (wasSpinning) {
8960
+ spinner?.start();
8961
+ }
8962
+ return result;
8854
8963
  }
8855
8964
  async function handlePatch({
8856
8965
  cwd,
@@ -8860,7 +8969,7 @@ async function handlePatch({
8860
8969
  spinner
8861
8970
  }) {
8862
8971
  try {
8863
- const dotSocketDirPath = path.join(cwd, '.socket');
8972
+ const dotSocketDirPath = path.join(cwd, constants.DOT_SOCKET);
8864
8973
  const manifestPath = path.join(dotSocketDirPath, 'manifest.json');
8865
8974
  const manifestContent = await fs$1.promises.readFile(manifestPath, 'utf-8');
8866
8975
  const manifestData = JSON.parse(manifestContent);
@@ -8873,7 +8982,11 @@ async function handlePatch({
8873
8982
  0: key,
8874
8983
  1: patch
8875
8984
  } of Object.entries(validated.patches)) {
8876
- const purlObj = utils.getPurlObject(key, {
8985
+ const purl = utils.normalizePurl(key);
8986
+ if (purls.length && !purls.includes(purl)) {
8987
+ continue;
8988
+ }
8989
+ const purlObj = utils.getPurlObject(purl, {
8877
8990
  throws: false
8878
8991
  });
8879
8992
  if (!purlObj) {
@@ -8887,46 +9000,50 @@ async function handlePatch({
8887
9000
  patches.push({
8888
9001
  key,
8889
9002
  patch,
9003
+ purl,
8890
9004
  purlObj
8891
9005
  });
8892
9006
  }
8893
- spinner.stop();
8894
- logger.logger.log('');
8895
- if (purlObjs.length) {
8896
- logger.logger.info(`Checking patches for: ${arrays.joinAnd(purls)}`);
9007
+ if (purls.length) {
9008
+ spinner.start(`Checking patches for: ${arrays.joinAnd(purls)}`);
8897
9009
  } else {
8898
- logger.logger.info('Scanning all dependencies for available patches');
9010
+ spinner.start('Scanning all dependencies for available patches');
8899
9011
  }
8900
- logger.logger.log('');
9012
+ const patched = [];
8901
9013
  const npmPatches = patchesByEcosystem.get(constants.NPM);
8902
9014
  if (npmPatches) {
8903
- await applyNPMPatches(npmPatches, purlObjs, dotSocketDirPath, dryRun);
9015
+ const patchingResults = await applyNpmPatches(dotSocketDirPath, npmPatches, {
9016
+ cwd,
9017
+ dryRun,
9018
+ purlObjs,
9019
+ spinner
9020
+ });
9021
+ patched.push(...patchingResults.passed);
8904
9022
  }
8905
- const result = {
9023
+ spinner.stop();
9024
+ await outputPatchResult({
8906
9025
  ok: true,
8907
9026
  data: {
8908
- patched: purls.length ? purls : ['patched successfully']
9027
+ patched
8909
9028
  }
8910
- };
8911
- await outputPatchResult(result, outputKind);
9029
+ }, outputKind);
8912
9030
  } catch (e) {
8913
9031
  spinner.stop();
8914
9032
  let message = 'Failed to apply patches';
8915
- let cause = e?.message || 'Unknown error';
9033
+ let cause = e?.message || constants.UNKNOWN_ERROR;
8916
9034
  if (e instanceof SyntaxError) {
8917
- message = 'Invalid JSON in manifest.json';
9035
+ message = `Invalid JSON in ${registryConstants.MANIFEST_JSON}`;
8918
9036
  cause = e.message;
8919
9037
  } else if (e instanceof Error && 'issues' in e) {
8920
9038
  message = 'Schema validation failed';
8921
9039
  cause = String(e);
8922
9040
  }
8923
- const result = {
9041
+ await outputPatchResult({
8924
9042
  ok: false,
8925
9043
  code: 1,
8926
9044
  message,
8927
9045
  cause
8928
- };
8929
- await outputPatchResult(result, outputKind);
9046
+ }, outputKind);
8930
9047
  }
8931
9048
  }
8932
9049
 
@@ -8994,14 +9111,15 @@ async function run$k(argv, importMeta, {
8994
9111
  // Note: path.resolve vs .join:
8995
9112
  // If given path is absolute then cwd should not affect it.
8996
9113
  cwd = path.resolve(process.cwd(), cwd);
8997
- const dotSocketDirPath = path.join(cwd, '.socket');
9114
+ const dotSocketDirPath = path.join(cwd, constants.DOT_SOCKET);
8998
9115
  if (!fs$1.existsSync(dotSocketDirPath)) {
8999
- logger.logger.error('Error: No .socket directory found in current directory');
9116
+ logger.logger.error(`Error: No ${constants.DOT_SOCKET} directory found in current directory`);
9000
9117
  return;
9001
9118
  }
9002
- const manifestPath = path.join(dotSocketDirPath, 'manifest.json');
9119
+ const manifestPath = path.join(dotSocketDirPath, constants.MANIFEST_JSON);
9003
9120
  if (!fs$1.existsSync(manifestPath)) {
9004
- logger.logger.error('Error: No manifest.json found in .socket directory');
9121
+ logger.logger.error(`Error: No ${constants.MANIFEST_JSON} found in ${constants.DOT_SOCKET} directory`);
9122
+ return;
9005
9123
  }
9006
9124
  const {
9007
9125
  spinner
@@ -9171,7 +9289,7 @@ async function fetchCreateRepo(config, options) {
9171
9289
  name: repoName,
9172
9290
  visibility
9173
9291
  }), {
9174
- desc: 'to create a repository'
9292
+ description: 'to create a repository'
9175
9293
  });
9176
9294
  }
9177
9295
 
@@ -9293,7 +9411,9 @@ async function run$h(argv, importMeta, {
9293
9411
  const noLegacy = !cli.flags['repoName'];
9294
9412
  const [repoName = ''] = cli.input;
9295
9413
  const hasApiToken = utils.hasDefaultApiToken();
9296
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
9414
+ const {
9415
+ 0: orgSlug
9416
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
9297
9417
  const outputKind = utils.getOutputKind(json, markdown);
9298
9418
  const wasValidInput = utils.checkCommandInput(outputKind, {
9299
9419
  nook: true,
@@ -9303,7 +9423,7 @@ async function run$h(argv, importMeta, {
9303
9423
  }, {
9304
9424
  nook: true,
9305
9425
  test: noLegacy,
9306
- message: 'Legacy flags are no longer supported. See v1 migration guide.',
9426
+ message: `Legacy flags are no longer supported. See ${vendor.terminalLinkExports('v1 migration guide', constants.V1_MIGRATION_GUIDE_URL)}.`,
9307
9427
  fail: `received legacy flags`
9308
9428
  }, {
9309
9429
  test: !!repoName,
@@ -9345,7 +9465,7 @@ async function fetchDeleteRepo(orgSlug, repoName, options) {
9345
9465
  }
9346
9466
  const sockSdk = sockSdkCResult.data;
9347
9467
  return await utils.handleApiCall(sockSdk.deleteOrgRepo(orgSlug, repoName), {
9348
- desc: 'to delete a repository'
9468
+ description: 'to delete a repository'
9349
9469
  });
9350
9470
  }
9351
9471
 
@@ -9427,12 +9547,14 @@ async function run$g(argv, importMeta, {
9427
9547
  const noLegacy = !cli.flags['repoName'];
9428
9548
  const [repoName = ''] = cli.input;
9429
9549
  const hasApiToken = utils.hasDefaultApiToken();
9430
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
9550
+ const {
9551
+ 0: orgSlug
9552
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
9431
9553
  const outputKind = utils.getOutputKind(json, markdown);
9432
9554
  const wasValidInput = utils.checkCommandInput(outputKind, {
9433
9555
  nook: true,
9434
9556
  test: noLegacy,
9435
- message: 'Legacy flags are no longer supported. See v1 migration guide.',
9557
+ message: `Legacy flags are no longer supported. See ${vendor.terminalLinkExports('v1 migration guide', constants.V1_MIGRATION_GUIDE_URL)}.`,
9436
9558
  fail: `received legacy flags`
9437
9559
  }, {
9438
9560
  nook: true,
@@ -9492,7 +9614,7 @@ async function fetchListAllRepos(orgSlug, options) {
9492
9614
  // max
9493
9615
  page: String(nextPage)
9494
9616
  }), {
9495
- desc: 'list of repositories'
9617
+ description: 'list of repositories'
9496
9618
  });
9497
9619
  if (!orgRepoListCResult.ok) {
9498
9620
  return orgRepoListCResult;
@@ -9537,7 +9659,7 @@ async function fetchListRepos(config, options) {
9537
9659
  per_page: String(perPage),
9538
9660
  page: String(page)
9539
9661
  }), {
9540
- desc: 'list of repositories'
9662
+ description: 'list of repositories'
9541
9663
  });
9542
9664
  }
9543
9665
 
@@ -9717,7 +9839,9 @@ async function run$f(argv, importMeta, {
9717
9839
  const dryRun = !!cli.flags['dryRun'];
9718
9840
  const interactive = !!cli.flags['interactive'];
9719
9841
  const hasApiToken = utils.hasDefaultApiToken();
9720
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
9842
+ const {
9843
+ 0: orgSlug
9844
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
9721
9845
  const outputKind = utils.getOutputKind(json, markdown);
9722
9846
  const wasValidInput = utils.checkCommandInput(outputKind, {
9723
9847
  nook: true,
@@ -9789,7 +9913,7 @@ async function fetchUpdateRepo(config, options) {
9789
9913
  orgSlug,
9790
9914
  visibility
9791
9915
  }), {
9792
- desc: 'to update a repository'
9916
+ description: 'to update a repository'
9793
9917
  });
9794
9918
  }
9795
9919
 
@@ -9910,12 +10034,14 @@ async function run$e(argv, importMeta, {
9910
10034
  const noLegacy = !cli.flags['repoName'];
9911
10035
  const [repoName = ''] = cli.input;
9912
10036
  const hasApiToken = utils.hasDefaultApiToken();
9913
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
10037
+ const {
10038
+ 0: orgSlug
10039
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
9914
10040
  const outputKind = utils.getOutputKind(json, markdown);
9915
10041
  const wasValidInput = utils.checkCommandInput(outputKind, {
9916
10042
  nook: true,
9917
10043
  test: noLegacy,
9918
- message: 'Legacy flags are no longer supported. See v1 migration guide.',
10044
+ message: `Legacy flags are no longer supported. See ${vendor.terminalLinkExports('v1 migration guide', constants.V1_MIGRATION_GUIDE_URL)}.`,
9919
10045
  fail: `received legacy flags`
9920
10046
  }, {
9921
10047
  nook: true,
@@ -9962,7 +10088,7 @@ async function fetchViewRepo(orgSlug, repoName, options) {
9962
10088
  }
9963
10089
  const sockSdk = sockSdkCResult.data;
9964
10090
  return await utils.handleApiCall(sockSdk.getOrgRepo(orgSlug, repoName), {
9965
- desc: 'repository data'
10091
+ description: 'repository data'
9966
10092
  });
9967
10093
  }
9968
10094
 
@@ -10070,12 +10196,14 @@ async function run$d(argv, importMeta, {
10070
10196
  const noLegacy = !cli.flags['repoName'];
10071
10197
  const [repoName = ''] = cli.input;
10072
10198
  const hasApiToken = utils.hasDefaultApiToken();
10073
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
10199
+ const {
10200
+ 0: orgSlug
10201
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
10074
10202
  const outputKind = utils.getOutputKind(json, markdown);
10075
10203
  const wasValidInput = utils.checkCommandInput(outputKind, {
10076
10204
  nook: true,
10077
10205
  test: noLegacy,
10078
- message: 'Legacy flags are no longer supported. See v1 migration guide.',
10206
+ message: `Legacy flags are no longer supported. See ${vendor.terminalLinkExports('v1 migration guide', constants.V1_MIGRATION_GUIDE_URL)}.`,
10079
10207
  fail: `received legacy flags`
10080
10208
  }, {
10081
10209
  nook: true,
@@ -10191,29 +10319,31 @@ const generalFlags$1 = {
10191
10319
  },
10192
10320
  branch: {
10193
10321
  type: 'string',
10194
- shortFlag: 'b',
10195
- description: 'Branch name'
10322
+ default: '',
10323
+ description: 'Branch name',
10324
+ shortFlag: 'b'
10196
10325
  },
10197
10326
  commitHash: {
10198
10327
  type: 'string',
10199
- shortFlag: 'ch',
10200
10328
  default: '',
10201
- description: 'Commit hash'
10329
+ description: 'Commit hash',
10330
+ shortFlag: 'ch'
10202
10331
  },
10203
10332
  commitMessage: {
10204
10333
  type: 'string',
10205
- shortFlag: 'm',
10206
10334
  default: '',
10207
- description: 'Commit message'
10335
+ description: 'Commit message',
10336
+ shortFlag: 'm'
10208
10337
  },
10209
10338
  committers: {
10210
10339
  type: 'string',
10211
- shortFlag: 'c',
10212
10340
  default: '',
10213
- description: 'Committers'
10341
+ description: 'Committers',
10342
+ shortFlag: 'c'
10214
10343
  },
10215
10344
  cwd: {
10216
10345
  type: 'string',
10346
+ default: '',
10217
10347
  description: 'working directory, defaults to process.cwd()'
10218
10348
  },
10219
10349
  defaultBranch: {
@@ -10228,11 +10358,13 @@ const generalFlags$1 = {
10228
10358
  },
10229
10359
  pullRequest: {
10230
10360
  type: 'number',
10231
- shortFlag: 'pr',
10232
- description: 'Pull request number'
10361
+ default: 0,
10362
+ description: 'Pull request number',
10363
+ shortFlag: 'pr'
10233
10364
  },
10234
10365
  org: {
10235
10366
  type: 'string',
10367
+ default: '',
10236
10368
  description: 'Force override the organization slug, overrides the default org from config'
10237
10369
  },
10238
10370
  reach: {
@@ -10262,14 +10394,14 @@ const generalFlags$1 = {
10262
10394
  setAsAlertsPage: {
10263
10395
  type: 'boolean',
10264
10396
  default: true,
10265
- aliases: ['pendingHead'],
10266
- description: 'When true and if this is the "default branch" then this Scan will be the one reflected on your alerts page. See help for details. Defaults to true.'
10397
+ description: 'When true and if this is the "default branch" then this Scan will be the one reflected on your alerts page. See help for details. Defaults to true.',
10398
+ aliases: ['pendingHead']
10267
10399
  },
10268
10400
  tmp: {
10269
10401
  type: 'boolean',
10270
- shortFlag: 't',
10271
10402
  default: false,
10272
- description: 'Set the visibility (true/false) of the scan in your dashboard.'
10403
+ description: 'Set the visibility (true/false) of the scan in your dashboard.',
10404
+ shortFlag: 't'
10273
10405
  }
10274
10406
  };
10275
10407
  const cmdScanCreate = {
@@ -10383,9 +10515,11 @@ async function run$c(argv, importMeta, {
10383
10515
  repo: repoName,
10384
10516
  report
10385
10517
  } = cli.flags;
10386
- let [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
10518
+ let {
10519
+ 0: orgSlug
10520
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
10387
10521
  const processCwd = process.cwd();
10388
- const cwd = cwdOverride && cwdOverride !== processCwd ? path.resolve(processCwd, String(cwdOverride)) : processCwd;
10522
+ const cwd = cwdOverride && cwdOverride !== '.' && cwdOverride !== processCwd ? path.resolve(processCwd, cwdOverride) : processCwd;
10389
10523
  const sockJson = utils.readOrDefaultSocketJson(cwd);
10390
10524
 
10391
10525
  // Note: This needs meow booleanDefault=undefined.
@@ -10571,7 +10705,7 @@ async function fetchDeleteOrgFullScan(orgSlug, scanId, options) {
10571
10705
  }
10572
10706
  const sockSdk = sockSdkCResult.data;
10573
10707
  return await utils.handleApiCall(sockSdk.deleteOrgFullScan(orgSlug, scanId), {
10574
- desc: 'to delete a scan'
10708
+ description: 'to delete a scan'
10575
10709
  });
10576
10710
  }
10577
10711
 
@@ -10820,7 +10954,10 @@ async function handleMarkdown(data) {
10820
10954
  logger.logger.log('');
10821
10955
  logger.logger.log('This Scan was considered to be the "base" / "from" / "before" Scan.');
10822
10956
  logger.logger.log('');
10823
- for (const [key, value] of Object.entries(data.before)) {
10957
+ for (const {
10958
+ 0: key,
10959
+ 1: value
10960
+ } of Object.entries(data.before)) {
10824
10961
  if (key === 'pull_request' && !value) {
10825
10962
  continue;
10826
10963
  }
@@ -10834,7 +10971,10 @@ async function handleMarkdown(data) {
10834
10971
  logger.logger.log('');
10835
10972
  logger.logger.log('This Scan was considered to be the "head" / "to" / "after" Scan.');
10836
10973
  logger.logger.log('');
10837
- for (const [key, value] of Object.entries(data.after)) {
10974
+ for (const {
10975
+ 0: key,
10976
+ 1: value
10977
+ } of Object.entries(data.after)) {
10838
10978
  if (key === 'pull_request' && !value) {
10839
10979
  continue;
10840
10980
  }
@@ -10954,7 +11094,9 @@ async function run$a(argv, importMeta, {
10954
11094
  id2 = id2.slice(SOCKET_SBOM_URL_PREFIX_LENGTH);
10955
11095
  }
10956
11096
  const hasApiToken = utils.hasDefaultApiToken();
10957
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
11097
+ const {
11098
+ 0: orgSlug
11099
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
10958
11100
  const outputKind = utils.getOutputKind(json, markdown);
10959
11101
  const wasValidInput = utils.checkCommandInput(outputKind, {
10960
11102
  test: !!(id1 && id2),
@@ -11677,10 +11819,12 @@ async function run$9(argv, importMeta, {
11677
11819
  },
11678
11820
  githubToken: {
11679
11821
  type: 'string',
11822
+ default: constants.default.ENV.SOCKET_CLI_GITHUB_TOKEN,
11680
11823
  description: 'Required GitHub token for authentication.\nMay set environment variable GITHUB_TOKEN or SOCKET_CLI_GITHUB_TOKEN instead.'
11681
11824
  },
11682
11825
  githubApiUrl: {
11683
11826
  type: 'string',
11827
+ default: DEFAULT_GITHUB_URL,
11684
11828
  description: `Base URL of the GitHub API (default: ${DEFAULT_GITHUB_URL})`
11685
11829
  },
11686
11830
  interactive: {
@@ -11690,14 +11834,17 @@ async function run$9(argv, importMeta, {
11690
11834
  },
11691
11835
  org: {
11692
11836
  type: 'string',
11837
+ default: '',
11693
11838
  description: 'Force override the organization slug, overrides the default org from config'
11694
11839
  },
11695
11840
  orgGithub: {
11696
11841
  type: 'string',
11842
+ default: '',
11697
11843
  description: 'Alternate GitHub Org if the name is different than the Socket Org'
11698
11844
  },
11699
11845
  repos: {
11700
11846
  type: 'string',
11847
+ default: '',
11701
11848
  description: 'List of repos to target in a comma-separated format (e.g., repo1,repo2). If not specified, the script will pull the list from Socket and ask you to pick one. Use --all to use them all.'
11702
11849
  }
11703
11850
  },
@@ -11752,7 +11899,9 @@ async function run$9(argv, importMeta, {
11752
11899
  // Note: path.resolve vs .join:
11753
11900
  // If given path is absolute then cwd should not affect it.
11754
11901
  cwd = path.resolve(process.cwd(), cwd);
11755
- let [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
11902
+ let {
11903
+ 0: orgSlug
11904
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
11756
11905
  const sockJson = utils.readOrDefaultSocketJson(cwd);
11757
11906
  if (all === undefined) {
11758
11907
  if (sockJson.defaults?.scan?.github?.all !== undefined) {
@@ -11886,7 +12035,7 @@ async function fetchOrgFullScanList(config, options) {
11886
12035
  page: String(page),
11887
12036
  per_page: String(perPage)
11888
12037
  }), {
11889
- desc: 'list of scans'
12038
+ description: 'list of scans'
11890
12039
  });
11891
12040
  }
11892
12041
 
@@ -12066,12 +12215,14 @@ async function run$8(argv, importMeta, {
12066
12215
  const [repo = '', branchArg = ''] = cli.input;
12067
12216
  const branch = String(branchFlag || branchArg || '');
12068
12217
  const hasApiToken = utils.hasDefaultApiToken();
12069
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
12218
+ const {
12219
+ 0: orgSlug
12220
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
12070
12221
  const outputKind = utils.getOutputKind(json, markdown);
12071
12222
  const wasValidInput = utils.checkCommandInput(outputKind, {
12072
12223
  nook: true,
12073
12224
  test: noLegacy,
12074
- message: 'Legacy flags are no longer supported. See v1 migration guide.',
12225
+ message: `Legacy flags are no longer supported. See ${vendor.terminalLinkExports('v1 migration guide', constants.V1_MIGRATION_GUIDE_URL)}.`,
12075
12226
  fail: `received legacy flags`
12076
12227
  }, {
12077
12228
  nook: true,
@@ -12127,7 +12278,7 @@ async function fetchScanMetadata(orgSlug, scanId, options) {
12127
12278
  }
12128
12279
  const sockSdk = sockSdkCResult.data;
12129
12280
  return await utils.handleApiCall(sockSdk.getOrgFullScanMetadata(orgSlug, scanId), {
12130
- desc: 'meta data for a full scan'
12281
+ description: 'meta data for a full scan'
12131
12282
  });
12132
12283
  }
12133
12284
 
@@ -12147,7 +12298,10 @@ async function outputScanMetadata(result, scanId, outputKind) {
12147
12298
  logger.logger.log('# Scan meta data\n');
12148
12299
  }
12149
12300
  logger.logger.log(`Scan ID: ${scanId}\n`);
12150
- for (const [key, value] of Object.entries(result.data)) {
12301
+ for (const {
12302
+ 0: key,
12303
+ 1: value
12304
+ } of Object.entries(result.data)) {
12151
12305
  if (['id', 'updated_at', 'organization_id', 'repository_id', 'commit_hash', 'html_report_url'].includes(key)) {
12152
12306
  continue;
12153
12307
  }
@@ -12223,7 +12377,9 @@ async function run$7(argv, importMeta, {
12223
12377
  const interactive = !!cli.flags['interactive'];
12224
12378
  const [scanId = ''] = cli.input;
12225
12379
  const hasApiToken = utils.hasDefaultApiToken();
12226
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
12380
+ const {
12381
+ 0: orgSlug
12382
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
12227
12383
  const outputKind = utils.getOutputKind(json, markdown);
12228
12384
  const wasValidInput = utils.checkCommandInput(outputKind, {
12229
12385
  nook: true,
@@ -12338,10 +12494,12 @@ const generalFlags = {
12338
12494
  ...flags.outputFlags,
12339
12495
  cwd: {
12340
12496
  type: 'string',
12497
+ default: '',
12341
12498
  description: 'working directory, defaults to process.cwd()'
12342
12499
  },
12343
12500
  org: {
12344
12501
  type: 'string',
12502
+ default: '',
12345
12503
  description: 'Force override the organization slug, overrides the default org from config'
12346
12504
  }
12347
12505
  };
@@ -12420,7 +12578,7 @@ async function run$6(argv, importMeta, {
12420
12578
  reachEcosystems.push(ecosystem);
12421
12579
  }
12422
12580
  const processCwd = process.cwd();
12423
- const cwd = cwdOverride && cwdOverride !== processCwd ? path.resolve(processCwd, String(cwdOverride)) : processCwd;
12581
+ const cwd = cwdOverride && cwdOverride !== '.' && cwdOverride !== processCwd ? path.resolve(processCwd, cwdOverride) : processCwd;
12424
12582
 
12425
12583
  // Accept zero or more paths. Default to cwd() if none given.
12426
12584
  let targets = cli.input || [cwd];
@@ -12429,7 +12587,9 @@ async function run$6(argv, importMeta, {
12429
12587
  if (!targets.length && !dryRun && interactive) {
12430
12588
  targets = await suggestTarget();
12431
12589
  }
12432
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
12590
+ const {
12591
+ 0: orgSlug
12592
+ } = await utils.determineOrgSlug(orgFlag, interactive, dryRun);
12433
12593
  const hasApiToken = utils.hasDefaultApiToken();
12434
12594
  const outputKind = utils.getOutputKind(json, markdown);
12435
12595
  const wasValidInput = utils.checkCommandInput(outputKind, {
@@ -12579,7 +12739,9 @@ async function run$5(argv, importMeta, {
12579
12739
  const short = !!cli.flags['short'];
12580
12740
  const [scanId = '', filepath = ''] = cli.input;
12581
12741
  const hasApiToken = utils.hasDefaultApiToken();
12582
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
12742
+ const {
12743
+ 0: orgSlug
12744
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
12583
12745
  const outputKind = utils.getOutputKind(json, markdown);
12584
12746
  const wasValidInput = utils.checkCommandInput(outputKind, {
12585
12747
  nook: true,
@@ -13106,7 +13268,7 @@ async function streamScan(orgSlug, scanId, options) {
13106
13268
 
13107
13269
  // Note: this will write to stdout or target file. It's not a noop
13108
13270
  return await utils.handleApiCall(sockSdk.getOrgFullScan(orgSlug, scanId, file === '-' ? undefined : file), {
13109
- desc: 'a scan'
13271
+ description: 'a scan'
13110
13272
  });
13111
13273
  }
13112
13274
 
@@ -13176,7 +13338,9 @@ async function run$3(argv, importMeta, {
13176
13338
  const interactive = !!cli.flags['interactive'];
13177
13339
  const [scanId = '', file = ''] = cli.input;
13178
13340
  const hasApiToken = utils.hasDefaultApiToken();
13179
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
13341
+ const {
13342
+ 0: orgSlug
13343
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
13180
13344
  const outputKind = utils.getOutputKind(json, markdown);
13181
13345
  const wasValidInput = utils.checkCommandInput(outputKind, {
13182
13346
  nook: true,
@@ -13611,7 +13775,9 @@ async function run$2(argv, importMeta, {
13611
13775
  logger.logger.info(`Warning: ignoring these excessive args: ${Array.from(argSet).join(', ')}`);
13612
13776
  }
13613
13777
  const hasApiToken = utils.hasDefaultApiToken();
13614
- const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
13778
+ const {
13779
+ 0: orgSlug
13780
+ } = await utils.determineOrgSlug(String(orgFlag || ''), interactive, dryRun);
13615
13781
  const outputKind = utils.getOutputKind(json, markdown);
13616
13782
  const wasValidInput = utils.checkCommandInput(outputKind, {
13617
13783
  nook: true,
@@ -14224,5 +14390,5 @@ void (async () => {
14224
14390
  await utils.captureException(e);
14225
14391
  }
14226
14392
  })();
14227
- //# debugId=b4ee2d73-3b07-422f-bbc3-db4f36cb62dc
14393
+ //# debugId=d759edd3-a3fb-4517-b02a-4526b3195d3
14228
14394
  //# sourceMappingURL=cli.js.map