@socketsecurity/cli-with-sentry 1.0.79 → 1.0.81

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -3,7 +3,7 @@
3
3
 
4
4
  var require$$0 = require('node:url');
5
5
  var vendor = require('./vendor.js');
6
- var debug = require('../external/@socketsecurity/registry/lib/debug');
6
+ var require$$6 = require('../external/@socketsecurity/registry/lib/debug');
7
7
  var logger = require('../external/@socketsecurity/registry/lib/logger');
8
8
  var utils = require('./utils.js');
9
9
  var fs = require('node:fs/promises');
@@ -15,6 +15,7 @@ var prompts = require('../external/@socketsecurity/registry/lib/prompts');
15
15
  var fs$1 = require('node:fs');
16
16
  var path = require('node:path');
17
17
  var spawn = require('../external/@socketsecurity/registry/lib/spawn');
18
+ var fs$2 = require('../external/@socketsecurity/registry/lib/fs');
18
19
  var strings = require('../external/@socketsecurity/registry/lib/strings');
19
20
  var arrays = require('../external/@socketsecurity/registry/lib/arrays');
20
21
  var registry = require('../external/@socketsecurity/registry');
@@ -22,7 +23,6 @@ var npm = require('../external/@socketsecurity/registry/lib/npm');
22
23
  var packages = require('../external/@socketsecurity/registry/lib/packages');
23
24
  var sorts = require('../external/@socketsecurity/registry/lib/sorts');
24
25
  var regexps = require('../external/@socketsecurity/registry/lib/regexps');
25
- var fs$2 = require('../external/@socketsecurity/registry/lib/fs');
26
26
  var shadowNpmInject = require('./shadow-npm-inject.js');
27
27
  var require$$7 = require('../external/@socketsecurity/registry/lib/objects');
28
28
  var shadowNpmBin = require('./shadow-npm-bin.js');
@@ -619,8 +619,8 @@ ${table}
619
619
  } catch (e) {
620
620
  process.exitCode = 1;
621
621
  logger.logger.fail('There was a problem converting the logs to Markdown, please try the `--json` flag');
622
- debug.debugFn('error', 'caught: unexpected error');
623
- debug.debugDir('inspect', {
622
+ require$$6.debugFn('error', 'caught: unexpected error');
623
+ require$$6.debugDir('inspect', {
624
624
  error: e
625
625
  });
626
626
  return 'Failed to generate the markdown report';
@@ -1016,8 +1016,8 @@ async function fetchScanData(orgSlug, scanId, options) {
1016
1016
  return JSON.parse(line);
1017
1017
  } catch {
1018
1018
  ok = false;
1019
- debug.debugFn('error', 'fail: parse NDJSON');
1020
- debug.debugDir('inspect', {
1019
+ require$$6.debugFn('error', 'fail: parse NDJSON');
1020
+ require$$6.debugDir('inspect', {
1021
1021
  line
1022
1022
  });
1023
1023
  return;
@@ -1519,28 +1519,28 @@ sockJson, cwd = process.cwd()) {
1519
1519
  sbt: false
1520
1520
  };
1521
1521
  if (sockJson?.defaults?.manifest?.sbt?.disabled) {
1522
- debug.debugLog('notice', '[DEBUG] - sbt auto-detection is disabled in socket.json');
1522
+ require$$6.debugLog('notice', '[DEBUG] - sbt auto-detection is disabled in socket.json');
1523
1523
  } else if (fs$1.existsSync(path.join(cwd, 'build.sbt'))) {
1524
- debug.debugLog('notice', '[DEBUG] - Detected a Scala sbt build file');
1524
+ require$$6.debugLog('notice', '[DEBUG] - Detected a Scala sbt build file');
1525
1525
  output.sbt = true;
1526
1526
  output.count += 1;
1527
1527
  }
1528
1528
  if (sockJson?.defaults?.manifest?.gradle?.disabled) {
1529
- debug.debugLog('notice', '[DEBUG] - gradle auto-detection is disabled in socket.json');
1529
+ require$$6.debugLog('notice', '[DEBUG] - gradle auto-detection is disabled in socket.json');
1530
1530
  } else if (fs$1.existsSync(path.join(cwd, 'gradlew'))) {
1531
- debug.debugLog('notice', '[DEBUG] - Detected a gradle build file');
1531
+ require$$6.debugLog('notice', '[DEBUG] - Detected a gradle build file');
1532
1532
  output.gradle = true;
1533
1533
  output.count += 1;
1534
1534
  }
1535
1535
  if (sockJson?.defaults?.manifest?.conda?.disabled) {
1536
- debug.debugLog('notice', '[DEBUG] - conda auto-detection is disabled in socket.json');
1536
+ require$$6.debugLog('notice', '[DEBUG] - conda auto-detection is disabled in socket.json');
1537
1537
  } else {
1538
1538
  const envyml = path.join(cwd, 'environment.yml');
1539
1539
  const hasEnvyml = fs$1.existsSync(envyml);
1540
1540
  const envyaml = path.join(cwd, 'environment.yaml');
1541
1541
  const hasEnvyaml = !hasEnvyml && fs$1.existsSync(envyaml);
1542
1542
  if (hasEnvyml || hasEnvyaml) {
1543
- debug.debugLog('notice', '[DEBUG] - Detected an environment.yml Conda file');
1543
+ require$$6.debugLog('notice', '[DEBUG] - Detected an environment.yml Conda file');
1544
1544
  output.conda = true;
1545
1545
  output.count += 1;
1546
1546
  }
@@ -1716,7 +1716,7 @@ async function convertSbtToMaven({
1716
1716
  // TODO: maybe we can add an option to target a specific file to dump to stdout
1717
1717
  if (out === '-' && poms.length === 1) {
1718
1718
  logger.logger.log('Result:\n```');
1719
- logger.logger.log(await utils.safeReadFile(poms[0]));
1719
+ logger.logger.log(await fs$2.safeReadFile(poms[0]));
1720
1720
  logger.logger.log('```');
1721
1721
  logger.logger.success(`OK`);
1722
1722
  } else if (out === '-') {
@@ -2043,7 +2043,7 @@ async function handleCreateNewScan({
2043
2043
  return;
2044
2044
  }
2045
2045
  logger.logger.success(`Found ${packagePaths.length} local ${words.pluralize('file', packagePaths.length)}`);
2046
- debug.debugDir('inspect', {
2046
+ require$$6.debugDir('inspect', {
2047
2047
  packagePaths
2048
2048
  });
2049
2049
  if (readOnly) {
@@ -3044,33 +3044,20 @@ async function coanaFix(fixConfig) {
3044
3044
  spinner?.stop();
3045
3045
  return lastCResult;
3046
3046
  }
3047
- const spawnOptions = {
3047
+ const isAllOrAuto = ghsas.length === 1 && (ghsas[0] === 'all' || ghsas[0] === 'auto');
3048
+ const ids = isAllOrAuto ? ['all'] : ghsas;
3049
+ const fixCResult = ids.length ? await utils.spawnCoana(['compute-fixes-and-upgrade-purls', cwd, '--manifests-tar-hash', tarHash, '--apply-fixes-to', ...ids, ...fixConfig.unknownFlags], {
3048
3050
  cwd,
3049
3051
  spinner,
3050
3052
  env: {
3051
3053
  SOCKET_ORG_SLUG: orgSlug
3052
3054
  }
3053
- };
3054
- let ids = ghsas;
3055
- if (ids.length === 1 && ids[0] === 'auto') {
3056
- debug.debugFn('notice', 'resolve: GitHub security alerts.');
3057
- const foundIdsCResult = tarHash ? await utils.spawnCoana(['compute-fixes-and-upgrade-purls', cwd, '--manifests-tar-hash', tarHash], spawnOptions) : undefined;
3058
- if (foundIdsCResult) {
3059
- lastCResult = foundIdsCResult;
3060
- }
3061
- if (foundIdsCResult?.ok) {
3062
- ids = utils.cmdFlagValueToArray(/(?<=Vulnerabilities found: )[^\n]+/.exec(foundIdsCResult.data)?.[0]);
3063
- debug.debugDir('inspect', {
3064
- GitHubSecurityAlerts: ids
3065
- });
3066
- }
3067
- }
3068
- const fixCResult = ids.length ? await utils.spawnCoana(['compute-fixes-and-upgrade-purls', cwd, '--manifests-tar-hash', tarHash, '--apply-fixes-to', ...ids, ...fixConfig.unknownFlags], spawnOptions) : undefined;
3055
+ }) : undefined;
3069
3056
  if (fixCResult) {
3070
3057
  lastCResult = fixCResult;
3071
3058
  }
3072
3059
  spinner?.stop();
3073
- debug.debugDir('inspect', {
3060
+ require$$6.debugDir('inspect', {
3074
3061
  lastCResult
3075
3062
  });
3076
3063
  return lastCResult.ok ? {
@@ -3182,15 +3169,15 @@ function getPrsForPurl(fixEnv, partialPurl) {
3182
3169
  prs.push(pr);
3183
3170
  }
3184
3171
  }
3185
- if (debug.isDebug('notice,silly')) {
3172
+ if (require$$6.isDebug('notice,silly')) {
3186
3173
  const fullName = packages.resolvePackageName(partialPurlObj);
3187
3174
  if (prs.length) {
3188
- debug.debugFn('notice', `found: ${prs.length} PRs for ${fullName}`);
3189
- debug.debugDir('silly', {
3175
+ require$$6.debugFn('notice', `found: ${prs.length} PRs for ${fullName}`);
3176
+ require$$6.debugDir('silly', {
3190
3177
  prs
3191
3178
  });
3192
3179
  } else if (fixEnv.prs.length) {
3193
- debug.debugFn('notice', `miss: 0 PRs found for ${fullName}`);
3180
+ require$$6.debugFn('notice', `miss: 0 PRs found for ${fullName}`);
3194
3181
  }
3195
3182
  }
3196
3183
  return prs;
@@ -3204,14 +3191,14 @@ function getOctokit() {
3204
3191
  SOCKET_CLI_GITHUB_TOKEN
3205
3192
  } = constants.ENV;
3206
3193
  if (!SOCKET_CLI_GITHUB_TOKEN) {
3207
- debug.debugFn('notice', 'miss: SOCKET_CLI_GITHUB_TOKEN env var');
3194
+ require$$6.debugFn('notice', 'miss: SOCKET_CLI_GITHUB_TOKEN env var');
3208
3195
  }
3209
3196
  const octokitOptions = {
3210
3197
  auth: SOCKET_CLI_GITHUB_TOKEN,
3211
3198
  // Lazily access constants.ENV.GITHUB_API_URL.
3212
3199
  baseUrl: constants.ENV.GITHUB_API_URL
3213
3200
  };
3214
- debug.debugDir('inspect', {
3201
+ require$$6.debugDir('inspect', {
3215
3202
  octokitOptions
3216
3203
  });
3217
3204
  _octokit = new vendor.Octokit(octokitOptions);
@@ -3226,7 +3213,7 @@ function getOctokitGraphql() {
3226
3213
  SOCKET_CLI_GITHUB_TOKEN
3227
3214
  } = constants.ENV;
3228
3215
  if (!SOCKET_CLI_GITHUB_TOKEN) {
3229
- debug.debugFn('notice', 'miss: SOCKET_CLI_GITHUB_TOKEN env var');
3216
+ require$$6.debugFn('notice', 'miss: SOCKET_CLI_GITHUB_TOKEN env var');
3230
3217
  }
3231
3218
  _octokitGraphql = vendor.graphql2.defaults({
3232
3219
  headers: {
@@ -3254,7 +3241,7 @@ async function readCache(key,
3254
3241
  ttlMs = 5 * 60 * 1000) {
3255
3242
  // Lazily access constants.githubCachePath.
3256
3243
  const cacheJsonPath = path.join(constants.githubCachePath, `${key}.json`);
3257
- const stat = utils.safeStatsSync(cacheJsonPath);
3244
+ const stat = fs$2.safeStatsSync(cacheJsonPath);
3258
3245
  if (stat) {
3259
3246
  const isExpired = Date.now() - stat.mtimeMs > ttlMs;
3260
3247
  if (!isExpired) {
@@ -3310,14 +3297,14 @@ async function cleanupPrs(owner, repo, options) {
3310
3297
  pull_number: prNum,
3311
3298
  state: 'closed'
3312
3299
  });
3313
- debug.debugFn('notice', `pr: closing ${prRef} for ${prToVersion}`);
3300
+ require$$6.debugFn('notice', `pr: closing ${prRef} for ${prToVersion}`);
3314
3301
  // Remove entry from parent object.
3315
3302
  context.parent.splice(context.index, 1);
3316
3303
  // Mark cache to be saved.
3317
3304
  cachesToSave.set(context.cacheKey, context.data);
3318
3305
  return null;
3319
3306
  } catch (e) {
3320
- debug.debugFn('error', `pr: failed to close ${prRef} for ${prToVersion}\n`, e?.message || 'unknown error');
3307
+ require$$6.debugFn('error', `pr: failed to close ${prRef} for ${prToVersion}\n`, e?.message || 'unknown error');
3321
3308
  }
3322
3309
  }
3323
3310
  // Update stale PRs.
@@ -3330,7 +3317,7 @@ async function cleanupPrs(owner, repo, options) {
3330
3317
  base: match.headRefName,
3331
3318
  head: match.baseRefName
3332
3319
  });
3333
- debug.debugFn('notice', `pr: updating stale ${prRef}`);
3320
+ require$$6.debugFn('notice', `pr: updating stale ${prRef}`);
3334
3321
  // Update entry entry.
3335
3322
  if (context.apiType === 'graphql') {
3336
3323
  context.entry.mergeStateStatus = 'CLEAN';
@@ -3341,7 +3328,7 @@ async function cleanupPrs(owner, repo, options) {
3341
3328
  cachesToSave.set(context.cacheKey, context.data);
3342
3329
  } catch (e) {
3343
3330
  const message = e?.message || 'Unknown error';
3344
- debug.debugFn('error', `pr: failed to update ${prRef} - ${message}`);
3331
+ require$$6.debugFn('error', `pr: failed to update ${prRef} - ${message}`);
3345
3332
  }
3346
3333
  }
3347
3334
  return match;
@@ -3542,7 +3529,7 @@ async function openPr(owner, repo, branch, purl, newVersion, options) {
3542
3529
  base: baseBranch,
3543
3530
  body: getSocketPullRequestBody(purlObj, newVersion, workspace)
3544
3531
  };
3545
- debug.debugDir('inspect', {
3532
+ require$$6.debugDir('inspect', {
3546
3533
  octokitPullsCreateParams
3547
3534
  });
3548
3535
  return await octokit.pulls.create(octokitPullsCreateParams);
@@ -3553,7 +3540,7 @@ async function openPr(owner, repo, branch, purl, newVersion, options) {
3553
3540
  const details = errors.map(d => `- ${d.message?.trim() ?? `${d.resource}.${d.field} (${d.code})`}`).join('\n');
3554
3541
  message += `:\n${details}`;
3555
3542
  }
3556
- debug.debugFn('error', message);
3543
+ require$$6.debugFn('error', message);
3557
3544
  }
3558
3545
  return null;
3559
3546
  }
@@ -3564,16 +3551,16 @@ async function setGitRemoteGithubRepoUrl(owner, repo, token, cwd = process.cwd()
3564
3551
  const url = `https://x-access-token:${token}@${host}/${owner}/${repo}`;
3565
3552
  const stdioIgnoreOptions = {
3566
3553
  cwd,
3567
- stdio: debug.isDebug('stdio') ? 'inherit' : 'ignore'
3554
+ stdio: require$$6.isDebug('stdio') ? 'inherit' : 'ignore'
3568
3555
  };
3569
3556
  const quotedCmd = `\`git remote set-url origin ${url}\``;
3570
- debug.debugFn('stdio', `spawn: ${quotedCmd}`);
3557
+ require$$6.debugFn('stdio', `spawn: ${quotedCmd}`);
3571
3558
  try {
3572
3559
  await spawn.spawn('git', ['remote', 'set-url', 'origin', url], stdioIgnoreOptions);
3573
3560
  return true;
3574
3561
  } catch (e) {
3575
- debug.debugFn('error', `caught: ${quotedCmd} failed`);
3576
- debug.debugDir('inspect', {
3562
+ require$$6.debugFn('error', `caught: ${quotedCmd} failed`);
3563
+ require$$6.debugDir('inspect', {
3577
3564
  error: e
3578
3565
  });
3579
3566
  }
@@ -3586,7 +3573,7 @@ function ciRepoInfo() {
3586
3573
  GITHUB_REPOSITORY
3587
3574
  } = constants.ENV;
3588
3575
  if (!GITHUB_REPOSITORY) {
3589
- debug.debugFn('notice', 'miss: GITHUB_REPOSITORY env var');
3576
+ require$$6.debugFn('notice', 'miss: GITHUB_REPOSITORY env var');
3590
3577
  }
3591
3578
  const ownerSlashRepo = GITHUB_REPOSITORY;
3592
3579
  const slashIndex = ownerSlashRepo.indexOf('/');
@@ -3610,9 +3597,9 @@ async function getFixEnv() {
3610
3597
  // but some CI checks are passing,
3611
3598
  constants.ENV.CI || gitEmail || gitUser || githubToken) &&
3612
3599
  // then log about it when in debug mode.
3613
- debug.isDebug('notice')) {
3600
+ require$$6.isDebug('notice')) {
3614
3601
  const envVars = [...(constants.ENV.CI ? [] : ['process.env.CI']), ...(gitEmail ? [] : ['process.env.SOCKET_CLI_GIT_USER_EMAIL']), ...(gitUser ? [] : ['process.env.SOCKET_CLI_GIT_USER_NAME']), ...(githubToken ? [] : ['process.env.GITHUB_TOKEN'])];
3615
- debug.debugFn('notice', `miss: fixEnv.isCi is false, expected ${arrays.joinAnd(envVars)} to be set`);
3602
+ require$$6.debugFn('notice', `miss: fixEnv.isCi is false, expected ${arrays.joinAnd(envVars)} to be set`);
3616
3603
  }
3617
3604
  let repoInfo = null;
3618
3605
  if (isCi) {
@@ -3620,7 +3607,7 @@ async function getFixEnv() {
3620
3607
  }
3621
3608
  if (!repoInfo) {
3622
3609
  if (isCi) {
3623
- debug.debugFn('notice', 'falling back to `git remote get-url origin`');
3610
+ require$$6.debugFn('notice', 'falling back to `git remote get-url origin`');
3624
3611
  }
3625
3612
  repoInfo = await utils.getRepoInfo();
3626
3613
  }
@@ -3744,7 +3731,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3744
3731
  pkgPath: rootPath
3745
3732
  } = pkgEnvDetails;
3746
3733
  const fixEnv = await getFixEnv();
3747
- debug.debugDir('inspect', {
3734
+ require$$6.debugDir('inspect', {
3748
3735
  fixEnv
3749
3736
  });
3750
3737
  const {
@@ -3768,11 +3755,11 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3768
3755
  spinner?.stop();
3769
3756
  logger.logger.info('No fixable vulns found.');
3770
3757
  if (alertsMap.size) {
3771
- debug.debugDir('inspect', {
3758
+ require$$6.debugDir('inspect', {
3772
3759
  alertsMap
3773
3760
  });
3774
3761
  } else {
3775
- debug.debugFn('inspect', '{ alertsMap: Map(0) {} }');
3762
+ require$$6.debugFn('inspect', '{ alertsMap: Map(0) {} }');
3776
3763
  }
3777
3764
  return {
3778
3765
  ok: true,
@@ -3781,14 +3768,14 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3781
3768
  }
3782
3769
  };
3783
3770
  }
3784
- if (debug.isDebug('notice,inspect')) {
3771
+ if (require$$6.isDebug('notice,inspect')) {
3785
3772
  spinner?.stop();
3786
3773
  const partialPurls = Array.from(infoByPartialPurl.keys());
3787
3774
  const {
3788
3775
  length: purlsCount
3789
3776
  } = partialPurls;
3790
- debug.debugFn('notice', `found: ${purlsCount} ${words.pluralize('PURL', purlsCount)} with CVEs`);
3791
- debug.debugDir('inspect', {
3777
+ require$$6.debugFn('notice', `found: ${purlsCount} ${words.pluralize('PURL', purlsCount)} with CVEs`);
3778
+ require$$6.debugDir('inspect', {
3792
3779
  partialPurls
3793
3780
  });
3794
3781
  spinner?.start();
@@ -3837,14 +3824,14 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3837
3824
  const name = packages.resolvePackageName(partialPurlObj);
3838
3825
  const infos = Array.from(infoEntry[1].values());
3839
3826
  if (!infos.length) {
3840
- debug.debugFn('notice', `miss: CVEs expected, but not found, for ${name}`);
3827
+ require$$6.debugFn('notice', `miss: CVEs expected, but not found, for ${name}`);
3841
3828
  continue infoEntriesLoop;
3842
3829
  }
3843
3830
  logger.logger.log(`Processing '${name}'`);
3844
3831
  logger.logger.indent();
3845
3832
  spinner?.indent();
3846
3833
  if (registry.getManifestData(partialPurlObj.type, name)) {
3847
- debug.debugFn('notice', `found: Socket Optimize variant for ${name}`);
3834
+ require$$6.debugFn('notice', `found: Socket Optimize variant for ${name}`);
3848
3835
  }
3849
3836
  // eslint-disable-next-line no-await-in-loop
3850
3837
  const packument = await packages.fetchPackagePackument(name);
@@ -3854,7 +3841,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3854
3841
  // Skip to next package.
3855
3842
  continue infoEntriesLoop;
3856
3843
  }
3857
- debug.debugDir('inspect', {
3844
+ require$$6.debugDir('inspect', {
3858
3845
  infos
3859
3846
  });
3860
3847
  const availableVersions = Object.keys(packument.versions);
@@ -3894,7 +3881,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3894
3881
  }
3895
3882
  const oldVersions = arrays.arrayUnique(shadowNpmInject.findPackageNodes(actualTree, name).map(n => n.version).filter(Boolean));
3896
3883
  if (!oldVersions.length) {
3897
- debug.debugFn('notice', `skip: ${name} not found`);
3884
+ require$$6.debugFn('notice', `skip: ${name} not found`);
3898
3885
  cleanupInfoEntriesLoop();
3899
3886
  // Skip to next package.
3900
3887
  continue infoEntriesLoop;
@@ -3910,8 +3897,8 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3910
3897
  const seenVersions = new Set();
3911
3898
  let hasAnnouncedWorkspace = false;
3912
3899
  let workspaceLogCallCount = logger.logger.logCallCount;
3913
- if (debug.isDebug('notice')) {
3914
- debug.debugFn('notice', `check: workspace ${workspace}`);
3900
+ if (require$$6.isDebug('notice')) {
3901
+ require$$6.debugFn('notice', `check: workspace ${workspace}`);
3915
3902
  hasAnnouncedWorkspace = true;
3916
3903
  workspaceLogCallCount = logger.logger.logCallCount;
3917
3904
  }
@@ -3920,7 +3907,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3920
3907
  const oldPurl = utils.idToPurl(oldId, partialPurlObj.type);
3921
3908
  const node = shadowNpmInject.findPackageNode(actualTree, name, oldVersion);
3922
3909
  if (!node) {
3923
- debug.debugFn('notice', `skip: ${oldId} not found`);
3910
+ require$$6.debugFn('notice', `skip: ${oldId} not found`);
3924
3911
  continue oldVersionsLoop;
3925
3912
  }
3926
3913
  infosLoop: for (const {
@@ -3940,7 +3927,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3940
3927
  continue infosLoop;
3941
3928
  }
3942
3929
  if (vendor.semverExports.gte(oldVersion, newVersion)) {
3943
- debug.debugFn('silly', `skip: ${oldId} is >= ${newVersion}`);
3930
+ require$$6.debugFn('silly', `skip: ${oldId} is >= ${newVersion}`);
3944
3931
  continue infosLoop;
3945
3932
  }
3946
3933
  const branch = getSocketBranchName(oldPurl, newVersion, workspace);
@@ -3949,14 +3936,14 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3949
3936
  }
3950
3937
  const pr = prCheck ? prs.find(p => p.headRefName === branch) : undefined;
3951
3938
  if (pr) {
3952
- debug.debugFn('notice', `skip: PR #${pr.number} for ${name}@${newVersion} exists`);
3939
+ require$$6.debugFn('notice', `skip: PR #${pr.number} for ${name}@${newVersion} exists`);
3953
3940
  seenBranches.add(branch);
3954
3941
  continue infosLoop;
3955
3942
  }
3956
3943
  if (fixEnv.isCi && (
3957
3944
  // eslint-disable-next-line no-await-in-loop
3958
3945
  await utils.gitRemoteBranchExists(branch, cwd))) {
3959
- debug.debugFn('notice', `skip: remote branch "${branch}" for ${name}@${newVersion} exists`);
3946
+ require$$6.debugFn('notice', `skip: remote branch "${branch}" for ${name}@${newVersion} exists`);
3960
3947
  seenBranches.add(branch);
3961
3948
  continue infosLoop;
3962
3949
  }
@@ -3985,7 +3972,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3985
3972
 
3986
3973
  // eslint-disable-next-line no-await-in-loop
3987
3974
  if (!(await hasModifiedFiles(cwd))) {
3988
- debug.debugFn('notice', `skip: no changes for ${name}@${newVersion}`);
3975
+ require$$6.debugFn('notice', `skip: no changes for ${name}@${newVersion}`);
3989
3976
  seenVersions.add(newVersion);
3990
3977
  // Reset things just in case.
3991
3978
  if (fixEnv.isCi) {
@@ -4036,7 +4023,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
4036
4023
 
4037
4024
  // Check repoInfo to make TypeScript happy.
4038
4025
  if (!errored && fixEnv.isCi && fixEnv.repoInfo) {
4039
- debug.debugFn('notice', 'pr: creating');
4026
+ require$$6.debugFn('notice', 'pr: creating');
4040
4027
  try {
4041
4028
  const pushed =
4042
4029
  // eslint-disable-next-line no-await-in-loop
@@ -4116,7 +4103,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
4116
4103
  errored = true;
4117
4104
  }
4118
4105
  } else if (fixEnv.isCi) {
4119
- debug.debugFn('notice', 'skip: PR creation');
4106
+ require$$6.debugFn('notice', 'skip: PR creation');
4120
4107
  }
4121
4108
  if (fixEnv.isCi) {
4122
4109
  spinner?.start();
@@ -4161,12 +4148,12 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
4161
4148
  return {
4162
4149
  ok: false,
4163
4150
  message: 'Update failed',
4164
- cause: `Update failed for ${oldId} in ${workspace}${error ? '; ' + error : ''}`
4151
+ cause: `Update failed for ${oldId} in ${workspace}${error ? `; ${error}` : ''}`
4165
4152
  };
4166
4153
  } else {
4167
4154
  changed = true;
4168
4155
  }
4169
- debug.debugFn('notice', 'increment: count', count + 1);
4156
+ require$$6.debugFn('notice', 'increment: count', count + 1);
4170
4157
  if (++count >= limit) {
4171
4158
  cleanupInfoEntriesLoop();
4172
4159
  // Exit main loop.
@@ -4226,7 +4213,7 @@ async function install$1(pkgEnvDetails, options) {
4226
4213
  __proto__: null,
4227
4214
  ...options
4228
4215
  };
4229
- const useDebug = debug.isDebug('stdio');
4216
+ const useDebug = require$$6.isDebug('stdio');
4230
4217
  const args = [
4231
4218
  // If "true", npm does not run scripts specified in package.json files.
4232
4219
  // Note that commands explicitly intended to run a particular script, such
@@ -4255,7 +4242,7 @@ async function install$1(pkgEnvDetails, options) {
4255
4242
  // https://docs.npmjs.com/cli/v8/using-npm/config#loglevel
4256
4243
  ...(useDebug ? [] : ['--silent']), ...(extraArgs ?? [])];
4257
4244
  const quotedCmd = `\`${pkgEnvDetails.agent} install ${args.join(' ')}\``;
4258
- debug.debugFn('stdio', `spawn: ${quotedCmd}`);
4245
+ require$$6.debugFn('stdio', `spawn: ${quotedCmd}`);
4259
4246
  const isSpinning = spinner?.isSpinning;
4260
4247
  spinner?.stop();
4261
4248
  let errored = false;
@@ -4266,8 +4253,8 @@ async function install$1(pkgEnvDetails, options) {
4266
4253
  stdio: useDebug ? 'inherit' : 'ignore'
4267
4254
  });
4268
4255
  } catch (e) {
4269
- debug.debugFn('error', `caught: ${quotedCmd} failed`);
4270
- debug.debugDir('inspect', {
4256
+ require$$6.debugFn('error', `caught: ${quotedCmd} failed`);
4257
+ require$$6.debugDir('inspect', {
4271
4258
  error: e
4272
4259
  });
4273
4260
  errored = true;
@@ -4277,8 +4264,8 @@ async function install$1(pkgEnvDetails, options) {
4277
4264
  try {
4278
4265
  actualTree = await getActualTree(cwd);
4279
4266
  } catch (e) {
4280
- debug.debugFn('error', 'caught: Arborist error');
4281
- debug.debugDir('inspect', {
4267
+ require$$6.debugFn('error', 'caught: Arborist error');
4268
+ require$$6.debugDir('inspect', {
4282
4269
  error: e
4283
4270
  });
4284
4271
  }
@@ -4315,8 +4302,8 @@ async function npmFix(pkgEnvDetails, fixConfig) {
4315
4302
  }
4316
4303
  } catch (e) {
4317
4304
  spinner?.stop();
4318
- debug.debugFn('error', 'caught: PURL API');
4319
- debug.debugDir('inspect', {
4305
+ require$$6.debugFn('error', 'caught: PURL API');
4306
+ require$$6.debugDir('inspect', {
4320
4307
  error: e
4321
4308
  });
4322
4309
  return {
@@ -4421,7 +4408,7 @@ async function install(pkgEnvDetails, options) {
4421
4408
  // https://github.com/pnpm/pnpm/issues/6778
4422
4409
  '--config.confirmModulesPurge=false', ...(extraArgs ?? [])];
4423
4410
  const quotedCmd = `\`${pkgEnvDetails.agent} install ${args.join(' ')}\``;
4424
- debug.debugFn('stdio', `spawn: ${quotedCmd}`);
4411
+ require$$6.debugFn('stdio', `spawn: ${quotedCmd}`);
4425
4412
  const isSpinning = spinner?.isSpinning;
4426
4413
  spinner?.stop();
4427
4414
  let errored = false;
@@ -4429,11 +4416,11 @@ async function install(pkgEnvDetails, options) {
4429
4416
  await utils.runAgentInstall(pkgEnvDetails, {
4430
4417
  args,
4431
4418
  spinner,
4432
- stdio: debug.isDebug('stdio') ? 'inherit' : 'ignore'
4419
+ stdio: require$$6.isDebug('stdio') ? 'inherit' : 'ignore'
4433
4420
  });
4434
4421
  } catch (e) {
4435
- debug.debugFn('error', `caught: ${quotedCmd} failed`);
4436
- debug.debugDir('inspect', {
4422
+ require$$6.debugFn('error', `caught: ${quotedCmd} failed`);
4423
+ require$$6.debugDir('inspect', {
4437
4424
  error: e
4438
4425
  });
4439
4426
  errored = true;
@@ -4443,8 +4430,8 @@ async function install(pkgEnvDetails, options) {
4443
4430
  try {
4444
4431
  actualTree = await getActualTree(cwd);
4445
4432
  } catch (e) {
4446
- debug.debugFn('error', 'caught: Arborist error');
4447
- debug.debugDir('inspect', {
4433
+ require$$6.debugFn('error', 'caught: Arborist error');
4434
+ require$$6.debugDir('inspect', {
4448
4435
  error: e
4449
4436
  });
4450
4437
  }
@@ -4500,8 +4487,8 @@ async function pnpmFix(pkgEnvDetails, fixConfig) {
4500
4487
  alertsMap = purls.length ? await utils.getAlertsMapFromPurls(purls, getFixAlertsMapOptions()) : await utils.getAlertsMapFromPnpmLockfile(lockfile, getFixAlertsMapOptions());
4501
4488
  } catch (e) {
4502
4489
  spinner?.stop();
4503
- debug.debugFn('error', 'caught: PURL API');
4504
- debug.debugDir('inspect', {
4490
+ require$$6.debugFn('error', 'caught: PURL API');
4491
+ require$$6.debugDir('inspect', {
4505
4492
  error: e
4506
4493
  });
4507
4494
  return {
@@ -4657,7 +4644,7 @@ async function handleFix({
4657
4644
  }, outputKind);
4658
4645
  return;
4659
4646
  }
4660
- debug.debugDir('inspect', {
4647
+ require$$6.debugDir('inspect', {
4661
4648
  pkgEnvDetails
4662
4649
  });
4663
4650
 
@@ -4700,6 +4687,7 @@ async function handleFix({
4700
4687
  const {
4701
4688
  DRY_RUN_NOT_SAVING
4702
4689
  } = constants;
4690
+ const DEFAULT_LIMIT = 10;
4703
4691
  const config$H = {
4704
4692
  commandName: 'fix',
4705
4693
  description: 'Update dependencies with "fixable" Socket alerts',
@@ -4719,14 +4707,14 @@ const config$H = {
4719
4707
  ghsa: {
4720
4708
  type: 'string',
4721
4709
  default: [],
4722
- description: `Provide a list of ${vendor.terminalLinkExports('GHSA IDs', 'https://docs.github.com/en/code-security/security-advisories/working-with-global-security-advisories-from-the-github-advisory-database/about-the-github-advisory-database#about-ghsa-ids')} to compute fixes for, as either a comma separated value or as multiple flags.\n Use '--ghsa auto' to automatically lookup GHSA IDs and compute fixes for them.`,
4710
+ description: `Provide a list of ${vendor.terminalLinkExports('GHSA IDs', 'https://docs.github.com/en/code-security/security-advisories/working-with-global-security-advisories-from-the-github-advisory-database/about-the-github-advisory-database#about-ghsa-ids')} to compute fixes for, as either a comma separated value or as multiple flags.\n Use '--ghsa all' to lookup all GHSA IDs and compute fixes for them.`,
4723
4711
  isMultiple: true,
4724
4712
  hidden: true
4725
4713
  },
4726
4714
  limit: {
4727
4715
  type: 'number',
4728
- default: Infinity,
4729
- description: 'The number of fixes to attempt at a time'
4716
+ default: DEFAULT_LIMIT,
4717
+ description: `The number of fixes to attempt at a time (default ${DEFAULT_LIMIT})`
4730
4718
  },
4731
4719
  maxSatisfying: {
4732
4720
  type: 'boolean',
@@ -4748,7 +4736,7 @@ const config$H = {
4748
4736
  purl: {
4749
4737
  type: 'string',
4750
4738
  default: [],
4751
- description: `Provide a list of ${vendor.terminalLinkExports('PURLs', 'https://github.com/package-url/purl-spec?tab=readme-ov-file#purl')} to compute fixes for, as either a comma separated value or as multiple flags,\n instead of querying the Socket API`,
4739
+ description: `Provide a list of ${vendor.terminalLinkExports('PURLs', 'https://github.com/package-url/purl-spec?tab=readme-ov-file#purl')} to compute fixes for, as either a comma separated value or as\n multiple flags, instead of querying the Socket API`,
4752
4740
  isMultiple: true,
4753
4741
  shortFlag: 'p'
4754
4742
  },
@@ -4756,7 +4744,7 @@ const config$H = {
4756
4744
  type: 'string',
4757
4745
  default: 'preserve',
4758
4746
  description: `
4759
- Define how updated dependency versions should be written in package.json.
4747
+ Define how dependency version ranges are updated in package.json (default 'preserve').
4760
4748
  Available styles:
4761
4749
  * caret - Use ^ range for compatible updates (e.g. ^1.2.3)
4762
4750
  * gt - Use > to allow any newer version (e.g. >1.2.3)
@@ -4776,7 +4764,7 @@ const config$H = {
4776
4764
  testScript: {
4777
4765
  type: 'string',
4778
4766
  default: 'test',
4779
- description: 'The test script to run for each fix attempt'
4767
+ description: "The test script to run for fix attempts (default 'test')"
4780
4768
  }
4781
4769
  },
4782
4770
  help: (command, config) => `
@@ -4868,7 +4856,7 @@ async function run$H(argv, importMeta, {
4868
4856
  // socket-cli/patches/meow#13.2.0.patch.
4869
4857
  const unknownFlags = cli.unknownFlags ?? [];
4870
4858
  const ghsas = utils.cmdFlagValueToArray(cli.flags['ghsa']);
4871
- const limit = (cli.flags['limit'] ? parseInt(String(cli.flags['limit'] || ''), 10) : Infinity) || Infinity;
4859
+ const limit = Number(cli.flags['limit']) || DEFAULT_LIMIT;
4872
4860
  const maxSatisfying = Boolean(cli.flags['maxSatisfying']);
4873
4861
  const minSatisfying = Boolean(cli.flags['minSatisfying']) || !maxSatisfying;
4874
4862
  const prCheck = Boolean(cli.flags['prCheck']);
@@ -4936,9 +4924,9 @@ async function setupTabCompletion(targetName) {
4936
4924
 
4937
4925
  // Target dir is something like ~/.local/share/socket/settings/completion (linux)
4938
4926
  const targetDir = path.dirname(targetPath);
4939
- debug.debugFn('notice', 'target: path + dir', targetPath, targetDir);
4927
+ require$$6.debugFn('notice', 'target: path + dir', targetPath, targetDir);
4940
4928
  if (!fs$1.existsSync(targetDir)) {
4941
- debug.debugFn('notice', 'create: target dir');
4929
+ require$$6.debugFn('notice', 'create: target dir');
4942
4930
  fs$1.mkdirSync(targetDir, {
4943
4931
  recursive: true
4944
4932
  });
@@ -5096,14 +5084,14 @@ async function outputCmdJson(cwd) {
5096
5084
  process.exitCode = 1;
5097
5085
  return;
5098
5086
  }
5099
- if (!utils.safeStatsSync(sockJsonPath)?.isFile()) {
5087
+ if (!fs$2.safeStatsSync(sockJsonPath)?.isFile()) {
5100
5088
  logger.logger.fail(`This is not a regular file (maybe a directory?): ${tildeSockJsonPath}`);
5101
5089
  process.exitCode = 1;
5102
5090
  return;
5103
5091
  }
5104
5092
  logger.logger.success(`This is the contents of ${tildeSockJsonPath}:`);
5105
5093
  logger.logger.error('');
5106
- const data = utils.safeReadFileSync(sockJsonPath);
5094
+ const data = fs$2.safeReadFileSync(sockJsonPath);
5107
5095
  logger.logger.log(data);
5108
5096
  }
5109
5097
 
@@ -5821,7 +5809,7 @@ async function run$B(argv, importMeta, {
5821
5809
  }
5822
5810
  const sockJson = await utils.readOrDefaultSocketJson(cwd);
5823
5811
  const detected = await detectManifestActions(sockJson, cwd);
5824
- debug.debugDir('inspect', {
5812
+ require$$6.debugDir('inspect', {
5825
5813
  detected
5826
5814
  });
5827
5815
  if (cli.flags['dryRun']) {
@@ -6088,7 +6076,7 @@ async function run$z(argv, importMeta, {
6088
6076
  // If given path is absolute then cwd should not affect it.
6089
6077
  cwd = path.resolve(process.cwd(), cwd);
6090
6078
  const sockJson = await utils.readOrDefaultSocketJson(cwd);
6091
- debug.debugFn('inspect', 'override: socket.json gradle', sockJson?.defaults?.manifest?.gradle);
6079
+ require$$6.debugFn('inspect', 'override: socket.json gradle', sockJson?.defaults?.manifest?.gradle);
6092
6080
 
6093
6081
  // Set defaults for any flag/arg that is not given. Check socket.json first.
6094
6082
  if (!bin) {
@@ -6245,7 +6233,7 @@ async function run$y(argv, importMeta, {
6245
6233
  // If given path is absolute then cwd should not affect it.
6246
6234
  cwd = path.resolve(process.cwd(), cwd);
6247
6235
  const sockJson = await utils.readOrDefaultSocketJson(cwd);
6248
- debug.debugFn('inspect', 'override: socket.json gradle', sockJson?.defaults?.manifest?.gradle);
6236
+ require$$6.debugFn('inspect', 'override: socket.json gradle', sockJson?.defaults?.manifest?.gradle);
6249
6237
 
6250
6238
  // Set defaults for any flag/arg that is not given. Check socket.json first.
6251
6239
  if (!bin) {
@@ -6411,7 +6399,7 @@ async function run$x(argv, importMeta, {
6411
6399
  // If given path is absolute then cwd should not affect it.
6412
6400
  cwd = path.resolve(process.cwd(), cwd);
6413
6401
  const sockJson = await utils.readOrDefaultSocketJson(cwd);
6414
- debug.debugFn('inspect', 'override: socket.json sbt', sockJson?.defaults?.manifest?.sbt);
6402
+ require$$6.debugFn('inspect', 'override: socket.json sbt', sockJson?.defaults?.manifest?.sbt);
6415
6403
 
6416
6404
  // Set defaults for any flag/arg that is not given. Check socket.json first.
6417
6405
  if (!bin) {
@@ -6505,7 +6493,7 @@ async function outputManifestSetup(result) {
6505
6493
 
6506
6494
  async function setupManifestConfig(cwd, defaultOnReadError = false) {
6507
6495
  const detected = await detectManifestActions(null, cwd);
6508
- debug.debugDir('inspect', {
6496
+ require$$6.debugDir('inspect', {
6509
6497
  detected
6510
6498
  });
6511
6499
 
@@ -7679,7 +7667,7 @@ async function addOverrides(pkgEnvDetails, pkgPath, options) {
7679
7667
  let loggedAddingText = false;
7680
7668
 
7681
7669
  // Chunk package names to process them in parallel 3 at a time.
7682
- await require$$8.pEach(manifestEntries, 3, async ({
7670
+ await require$$8.pEach(manifestEntries, async ({
7683
7671
  1: data
7684
7672
  }) => {
7685
7673
  const {
@@ -7733,7 +7721,7 @@ async function addOverrides(pkgEnvDetails, pkgPath, options) {
7733
7721
  npmExecPath
7734
7722
  });
7735
7723
  // Chunk package names to process them in parallel 3 at a time.
7736
- await require$$8.pEach(overridesDataObjects, 3, async ({
7724
+ await require$$8.pEach(overridesDataObjects, async ({
7737
7725
  overrides,
7738
7726
  type
7739
7727
  }) => {
@@ -7782,12 +7770,16 @@ async function addOverrides(pkgEnvDetails, pkgPath, options) {
7782
7770
  }
7783
7771
  }
7784
7772
  }
7773
+ }, {
7774
+ concurrency: 3
7785
7775
  });
7786
7776
  }
7777
+ }, {
7778
+ concurrency: 3
7787
7779
  });
7788
7780
  if (isWorkspace) {
7789
7781
  // Chunk package names to process them in parallel 3 at a time.
7790
- await require$$8.pEach(workspacePkgJsonPaths, 3, async workspacePkgJsonPath => {
7782
+ await require$$8.pEach(workspacePkgJsonPaths, async workspacePkgJsonPath => {
7791
7783
  const otherState = await addOverrides(pkgEnvDetails, path.dirname(workspacePkgJsonPath), {
7792
7784
  logger,
7793
7785
  pin,
@@ -7799,6 +7791,8 @@ async function addOverrides(pkgEnvDetails, pkgPath, options) {
7799
7791
  state[key].add(value);
7800
7792
  }
7801
7793
  }
7794
+ }, {
7795
+ concurrency: 3
7802
7796
  });
7803
7797
  }
7804
7798
  if (state.added.size > 0 || state.updated.size > 0) {
@@ -7843,8 +7837,8 @@ async function updateLockfile(pkgEnvDetails, options) {
7843
7837
  }
7844
7838
  } catch (e) {
7845
7839
  spinner?.stop();
7846
- debug.debugFn('error', 'fail: update');
7847
- debug.debugDir('inspect', {
7840
+ require$$6.debugFn('error', 'fail: update');
7841
+ require$$6.debugDir('inspect', {
7848
7842
  error: e
7849
7843
  });
7850
7844
  return {
@@ -7898,10 +7892,10 @@ async function applyOptimization(pkgEnvDetails, {
7898
7892
  ok: true,
7899
7893
  data: {
7900
7894
  addedCount,
7901
- updatedCount,
7895
+ addedInWorkspaces: state.addedInWorkspaces.size,
7902
7896
  pkgJsonChanged,
7903
- updatedInWorkspaces: state.updatedInWorkspaces.size,
7904
- addedInWorkspaces: state.addedInWorkspaces.size
7897
+ updatedCount,
7898
+ updatedInWorkspaces: state.updatedInWorkspaces.size
7905
7899
  }
7906
7900
  };
7907
7901
  }
@@ -7971,7 +7965,7 @@ async function handleOptimize({
7971
7965
  await outputOptimizeResult({
7972
7966
  ok: false,
7973
7967
  message: 'Unsupported',
7974
- cause: utils.cmdPrefixMessage(CMD_NAME, `${agent} v${agentVersion} does not support overrides. Soon, though ⚡`)
7968
+ cause: utils.cmdPrefixMessage(CMD_NAME, `${agent} v${agentVersion} does not support overrides.`)
7975
7969
  }, outputKind);
7976
7970
  return;
7977
7971
  }
@@ -9209,7 +9203,7 @@ function formatReportCard(artifact, color) {
9209
9203
  };
9210
9204
  const alertString = getAlertString(artifact.alerts, !color);
9211
9205
  if (!artifact.ecosystem) {
9212
- debug.debugFn('notice', 'miss: artifact ecosystem', artifact);
9206
+ require$$6.debugFn('notice', 'miss: artifact ecosystem', artifact);
9213
9207
  }
9214
9208
  const purl = `pkg:${artifact.ecosystem}/${artifact.name}${artifact.version ? '@' + artifact.version : ''}`;
9215
9209
  return ['Package: ' + (color ? vendor.yoctocolorsCjsExports.bold(purl) : purl), '', ...Object.entries(scoreResult).map(score => `- ${score[0]}:`.padEnd(20, ' ') + ` ${formatScore(score[1], !color, true)}`), alertString].join('\n');
@@ -10018,8 +10012,8 @@ async function fetchListAllRepos(orgSlug, options) {
10018
10012
  desc: 'list of repositories'
10019
10013
  });
10020
10014
  if (!orgRepoListCResult.ok) {
10021
- debug.debugFn('error', 'fail: fetch repo');
10022
- debug.debugDir('inspect', {
10015
+ require$$6.debugFn('error', 'fail: fetch repo');
10016
+ require$$6.debugDir('inspect', {
10023
10017
  orgRepoListCResult
10024
10018
  });
10025
10019
  return orgRepoListCResult;
@@ -11604,7 +11598,7 @@ async function scanOneRepo(repoSlug, {
11604
11598
  };
11605
11599
  }
11606
11600
  const tmpDir = fs$1.mkdtempSync(path.join(os.tmpdir(), repoSlug));
11607
- debug.debugFn('notice', 'init: temp dir for scan root', tmpDir);
11601
+ require$$6.debugFn('notice', 'init: temp dir for scan root', tmpDir);
11608
11602
  const downloadResult = await testAndDownloadManifestFiles({
11609
11603
  files,
11610
11604
  tmpDir,
@@ -11717,11 +11711,11 @@ async function testAndDownloadManifestFile({
11717
11711
  repoApiUrl,
11718
11712
  tmpDir
11719
11713
  }) {
11720
- debug.debugFn('notice', 'testing: file', file);
11714
+ require$$6.debugFn('notice', 'testing: file', file);
11721
11715
  const supportedFilesCResult = await fetchSupportedScanFileNames();
11722
11716
  const supportedFiles = supportedFilesCResult.ok ? supportedFilesCResult.data : undefined;
11723
11717
  if (!supportedFiles || !utils.isReportSupportedFile(file, supportedFiles)) {
11724
- debug.debugFn('notice', ' - skip: not a known pattern');
11718
+ require$$6.debugFn('notice', ' - skip: not a known pattern');
11725
11719
  // Not an error.
11726
11720
  return {
11727
11721
  ok: true,
@@ -11730,7 +11724,7 @@ async function testAndDownloadManifestFile({
11730
11724
  }
11731
11725
  };
11732
11726
  }
11733
- debug.debugFn('notice', 'found: manifest file, going to attempt to download it;', file);
11727
+ require$$6.debugFn('notice', 'found: manifest file, going to attempt to download it;', file);
11734
11728
  const result = await downloadManifestFile({
11735
11729
  file,
11736
11730
  tmpDir,
@@ -11752,9 +11746,9 @@ async function downloadManifestFile({
11752
11746
  repoApiUrl,
11753
11747
  tmpDir
11754
11748
  }) {
11755
- debug.debugFn('notice', 'request: download url from GitHub');
11749
+ require$$6.debugFn('notice', 'request: download url from GitHub');
11756
11750
  const fileUrl = `${repoApiUrl}/contents/${file}?ref=${defaultBranch}`;
11757
- debug.debugDir('inspect', {
11751
+ require$$6.debugDir('inspect', {
11758
11752
  fileUrl
11759
11753
  });
11760
11754
  const downloadUrlResponse = await fetch(fileUrl, {
@@ -11763,9 +11757,9 @@ async function downloadManifestFile({
11763
11757
  Authorization: `Bearer ${githubToken}`
11764
11758
  }
11765
11759
  });
11766
- debug.debugFn('notice', 'complete: request');
11760
+ require$$6.debugFn('notice', 'complete: request');
11767
11761
  const downloadUrlText = await downloadUrlResponse.text();
11768
- debug.debugFn('inspect', 'response: raw download url', downloadUrlText);
11762
+ require$$6.debugFn('inspect', 'response: raw download url', downloadUrlText);
11769
11763
  let downloadUrl;
11770
11764
  try {
11771
11765
  downloadUrl = JSON.parse(downloadUrlText).download_url;
@@ -11778,7 +11772,7 @@ async function downloadManifestFile({
11778
11772
  };
11779
11773
  }
11780
11774
  const localPath = path.join(tmpDir, file);
11781
- debug.debugFn('notice', 'download: manifest file started', downloadUrl, '->', localPath);
11775
+ require$$6.debugFn('notice', 'download: manifest file started', downloadUrl, '->', localPath);
11782
11776
 
11783
11777
  // Now stream the file to that file...
11784
11778
  const result = await streamDownloadWithFetch(localPath, downloadUrl);
@@ -11787,7 +11781,7 @@ async function downloadManifestFile({
11787
11781
  logger.logger.fail(`Failed to download manifest file, skipping to next file. File: ${file}`);
11788
11782
  return result;
11789
11783
  }
11790
- debug.debugFn('notice', 'download: manifest file completed');
11784
+ require$$6.debugFn('notice', 'download: manifest file completed');
11791
11785
  return {
11792
11786
  ok: true,
11793
11787
  data: undefined
@@ -11839,7 +11833,7 @@ async function streamDownloadWithFetch(localPath, downloadUrl) {
11839
11833
  };
11840
11834
  } catch (error) {
11841
11835
  logger.logger.fail('An error was thrown while trying to download a manifest file... url:', downloadUrl);
11842
- debug.debugDir('inspect', {
11836
+ require$$6.debugDir('inspect', {
11843
11837
  error
11844
11838
  });
11845
11839
 
@@ -11863,7 +11857,7 @@ async function streamDownloadWithFetch(localPath, downloadUrl) {
11863
11857
  // If error was due to bad HTTP status
11864
11858
  detailedError += ` (HTTP Status: ${response.status} ${response.statusText})`;
11865
11859
  }
11866
- debug.debugFn('error', detailedError);
11860
+ require$$6.debugFn('error', detailedError);
11867
11861
  return {
11868
11862
  ok: false,
11869
11863
  message: 'Download Failed',
@@ -11880,14 +11874,14 @@ async function getLastCommitDetails({
11880
11874
  }) {
11881
11875
  logger.logger.info(`Requesting last commit for default branch ${defaultBranch} for ${orgGithub}/${repoSlug}...`);
11882
11876
  const commitApiUrl = `${repoApiUrl}/commits?sha=${defaultBranch}&per_page=1`;
11883
- debug.debugFn('inspect', 'url: commit', commitApiUrl);
11877
+ require$$6.debugFn('inspect', 'url: commit', commitApiUrl);
11884
11878
  const commitResponse = await fetch(commitApiUrl, {
11885
11879
  headers: {
11886
11880
  Authorization: `Bearer ${githubToken}`
11887
11881
  }
11888
11882
  });
11889
11883
  const commitText = await commitResponse.text();
11890
- debug.debugFn('inspect', 'response: commit', commitText);
11884
+ require$$6.debugFn('inspect', 'response: commit', commitText);
11891
11885
  let lastCommit;
11892
11886
  try {
11893
11887
  lastCommit = JSON.parse(commitText)?.[0];
@@ -11974,7 +11968,7 @@ async function getRepoDetails({
11974
11968
  repoSlug
11975
11969
  }) {
11976
11970
  const repoApiUrl = `${githubApiUrl}/repos/${orgGithub}/${repoSlug}`;
11977
- debug.debugDir('inspect', {
11971
+ require$$6.debugDir('inspect', {
11978
11972
  repoApiUrl
11979
11973
  });
11980
11974
  const repoDetailsResponse = await fetch(repoApiUrl, {
@@ -11985,7 +11979,7 @@ async function getRepoDetails({
11985
11979
  });
11986
11980
  logger.logger.success(`Request completed.`);
11987
11981
  const repoDetailsText = await repoDetailsResponse.text();
11988
- debug.debugFn('inspect', 'response: repo', repoDetailsText);
11982
+ require$$6.debugFn('inspect', 'response: repo', repoDetailsText);
11989
11983
  let repoDetails;
11990
11984
  try {
11991
11985
  repoDetails = JSON.parse(repoDetailsText);
@@ -12024,7 +12018,7 @@ async function getRepoBranchTree({
12024
12018
  }) {
12025
12019
  logger.logger.info(`Requesting default branch file tree; branch \`${defaultBranch}\`, repo \`${orgGithub}/${repoSlug}\`...`);
12026
12020
  const treeApiUrl = `${repoApiUrl}/git/trees/${defaultBranch}?recursive=1`;
12027
- debug.debugFn('inspect', 'url: tree', treeApiUrl);
12021
+ require$$6.debugFn('inspect', 'url: tree', treeApiUrl);
12028
12022
  const treeResponse = await fetch(treeApiUrl, {
12029
12023
  method: 'GET',
12030
12024
  headers: {
@@ -12032,7 +12026,7 @@ async function getRepoBranchTree({
12032
12026
  }
12033
12027
  });
12034
12028
  const treeText = await treeResponse.text();
12035
- debug.debugFn('inspect', 'response: tree', treeText);
12029
+ require$$6.debugFn('inspect', 'response: tree', treeText);
12036
12030
  let treeDetails;
12037
12031
  try {
12038
12032
  treeDetails = JSON.parse(treeText);
@@ -12061,7 +12055,7 @@ async function getRepoBranchTree({
12061
12055
  };
12062
12056
  }
12063
12057
  if (!treeDetails.tree || !Array.isArray(treeDetails.tree)) {
12064
- debug.debugDir('inspect', {
12058
+ require$$6.debugDir('inspect', {
12065
12059
  treeDetails: {
12066
12060
  tree: treeDetails.tree
12067
12061
  }
@@ -13373,8 +13367,8 @@ async function fetchScan(orgSlug, scanId) {
13373
13367
  return JSON.parse(line);
13374
13368
  } catch {
13375
13369
  ok = false;
13376
- debug.debugFn('error', 'fail: parse NDJSON');
13377
- debug.debugDir('inspect', {
13370
+ require$$6.debugFn('error', 'fail: parse NDJSON');
13371
+ require$$6.debugDir('inspect', {
13378
13372
  line
13379
13373
  });
13380
13374
  return null;
@@ -14267,8 +14261,8 @@ Do you want to install "safe npm" (this will create an alias to the socket-npm c
14267
14261
  }
14268
14262
  }
14269
14263
  } catch (e) {
14270
- debug.debugFn('error', 'fail: setup tab completion');
14271
- debug.debugDir('inspect', {
14264
+ require$$6.debugFn('error', 'fail: setup tab completion');
14265
+ require$$6.debugDir('inspect', {
14272
14266
  error: e
14273
14267
  });
14274
14268
  // Ignore. Skip tab completion setup.
@@ -14583,8 +14577,8 @@ void (async () => {
14583
14577
  });
14584
14578
  } catch (e) {
14585
14579
  process.exitCode = 1;
14586
- debug.debugFn('error', 'Uncaught error (BAD!):');
14587
- debug.debugDir('inspect', {
14580
+ require$$6.debugFn('error', 'Uncaught error (BAD!):');
14581
+ require$$6.debugDir('inspect', {
14588
14582
  error: e
14589
14583
  });
14590
14584
  let errorBody;
@@ -14628,7 +14622,7 @@ void (async () => {
14628
14622
  logger.logger.error('\n'); // Any-spinner-newline
14629
14623
  logger.logger.fail(utils.failMsgWithBadge(errorTitle, errorMessage));
14630
14624
  if (errorBody) {
14631
- debug.debugDir('inspect', {
14625
+ require$$6.debugDir('inspect', {
14632
14626
  errorBody
14633
14627
  });
14634
14628
  }
@@ -14636,5 +14630,5 @@ void (async () => {
14636
14630
  await utils.captureException(e);
14637
14631
  }
14638
14632
  })();
14639
- //# debugId=17c5b5ba-1b60-498d-8e4a-b6943d431dd2
14633
+ //# debugId=8579bbdd-d381-460b-9333-6759b277974a
14640
14634
  //# sourceMappingURL=cli.js.map