@socketsecurity/cli-with-sentry 1.0.79 → 1.0.80

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -3,7 +3,7 @@
3
3
 
4
4
  var require$$0 = require('node:url');
5
5
  var vendor = require('./vendor.js');
6
- var debug = require('../external/@socketsecurity/registry/lib/debug');
6
+ var require$$6 = require('../external/@socketsecurity/registry/lib/debug');
7
7
  var logger = require('../external/@socketsecurity/registry/lib/logger');
8
8
  var utils = require('./utils.js');
9
9
  var fs = require('node:fs/promises');
@@ -15,6 +15,7 @@ var prompts = require('../external/@socketsecurity/registry/lib/prompts');
15
15
  var fs$1 = require('node:fs');
16
16
  var path = require('node:path');
17
17
  var spawn = require('../external/@socketsecurity/registry/lib/spawn');
18
+ var fs$2 = require('../external/@socketsecurity/registry/lib/fs');
18
19
  var strings = require('../external/@socketsecurity/registry/lib/strings');
19
20
  var arrays = require('../external/@socketsecurity/registry/lib/arrays');
20
21
  var registry = require('../external/@socketsecurity/registry');
@@ -22,7 +23,6 @@ var npm = require('../external/@socketsecurity/registry/lib/npm');
22
23
  var packages = require('../external/@socketsecurity/registry/lib/packages');
23
24
  var sorts = require('../external/@socketsecurity/registry/lib/sorts');
24
25
  var regexps = require('../external/@socketsecurity/registry/lib/regexps');
25
- var fs$2 = require('../external/@socketsecurity/registry/lib/fs');
26
26
  var shadowNpmInject = require('./shadow-npm-inject.js');
27
27
  var require$$7 = require('../external/@socketsecurity/registry/lib/objects');
28
28
  var shadowNpmBin = require('./shadow-npm-bin.js');
@@ -619,8 +619,8 @@ ${table}
619
619
  } catch (e) {
620
620
  process.exitCode = 1;
621
621
  logger.logger.fail('There was a problem converting the logs to Markdown, please try the `--json` flag');
622
- debug.debugFn('error', 'caught: unexpected error');
623
- debug.debugDir('inspect', {
622
+ require$$6.debugFn('error', 'caught: unexpected error');
623
+ require$$6.debugDir('inspect', {
624
624
  error: e
625
625
  });
626
626
  return 'Failed to generate the markdown report';
@@ -1016,8 +1016,8 @@ async function fetchScanData(orgSlug, scanId, options) {
1016
1016
  return JSON.parse(line);
1017
1017
  } catch {
1018
1018
  ok = false;
1019
- debug.debugFn('error', 'fail: parse NDJSON');
1020
- debug.debugDir('inspect', {
1019
+ require$$6.debugFn('error', 'fail: parse NDJSON');
1020
+ require$$6.debugDir('inspect', {
1021
1021
  line
1022
1022
  });
1023
1023
  return;
@@ -1519,28 +1519,28 @@ sockJson, cwd = process.cwd()) {
1519
1519
  sbt: false
1520
1520
  };
1521
1521
  if (sockJson?.defaults?.manifest?.sbt?.disabled) {
1522
- debug.debugLog('notice', '[DEBUG] - sbt auto-detection is disabled in socket.json');
1522
+ require$$6.debugLog('notice', '[DEBUG] - sbt auto-detection is disabled in socket.json');
1523
1523
  } else if (fs$1.existsSync(path.join(cwd, 'build.sbt'))) {
1524
- debug.debugLog('notice', '[DEBUG] - Detected a Scala sbt build file');
1524
+ require$$6.debugLog('notice', '[DEBUG] - Detected a Scala sbt build file');
1525
1525
  output.sbt = true;
1526
1526
  output.count += 1;
1527
1527
  }
1528
1528
  if (sockJson?.defaults?.manifest?.gradle?.disabled) {
1529
- debug.debugLog('notice', '[DEBUG] - gradle auto-detection is disabled in socket.json');
1529
+ require$$6.debugLog('notice', '[DEBUG] - gradle auto-detection is disabled in socket.json');
1530
1530
  } else if (fs$1.existsSync(path.join(cwd, 'gradlew'))) {
1531
- debug.debugLog('notice', '[DEBUG] - Detected a gradle build file');
1531
+ require$$6.debugLog('notice', '[DEBUG] - Detected a gradle build file');
1532
1532
  output.gradle = true;
1533
1533
  output.count += 1;
1534
1534
  }
1535
1535
  if (sockJson?.defaults?.manifest?.conda?.disabled) {
1536
- debug.debugLog('notice', '[DEBUG] - conda auto-detection is disabled in socket.json');
1536
+ require$$6.debugLog('notice', '[DEBUG] - conda auto-detection is disabled in socket.json');
1537
1537
  } else {
1538
1538
  const envyml = path.join(cwd, 'environment.yml');
1539
1539
  const hasEnvyml = fs$1.existsSync(envyml);
1540
1540
  const envyaml = path.join(cwd, 'environment.yaml');
1541
1541
  const hasEnvyaml = !hasEnvyml && fs$1.existsSync(envyaml);
1542
1542
  if (hasEnvyml || hasEnvyaml) {
1543
- debug.debugLog('notice', '[DEBUG] - Detected an environment.yml Conda file');
1543
+ require$$6.debugLog('notice', '[DEBUG] - Detected an environment.yml Conda file');
1544
1544
  output.conda = true;
1545
1545
  output.count += 1;
1546
1546
  }
@@ -1716,7 +1716,7 @@ async function convertSbtToMaven({
1716
1716
  // TODO: maybe we can add an option to target a specific file to dump to stdout
1717
1717
  if (out === '-' && poms.length === 1) {
1718
1718
  logger.logger.log('Result:\n```');
1719
- logger.logger.log(await utils.safeReadFile(poms[0]));
1719
+ logger.logger.log(await fs$2.safeReadFile(poms[0]));
1720
1720
  logger.logger.log('```');
1721
1721
  logger.logger.success(`OK`);
1722
1722
  } else if (out === '-') {
@@ -2043,7 +2043,7 @@ async function handleCreateNewScan({
2043
2043
  return;
2044
2044
  }
2045
2045
  logger.logger.success(`Found ${packagePaths.length} local ${words.pluralize('file', packagePaths.length)}`);
2046
- debug.debugDir('inspect', {
2046
+ require$$6.debugDir('inspect', {
2047
2047
  packagePaths
2048
2048
  });
2049
2049
  if (readOnly) {
@@ -3044,33 +3044,20 @@ async function coanaFix(fixConfig) {
3044
3044
  spinner?.stop();
3045
3045
  return lastCResult;
3046
3046
  }
3047
- const spawnOptions = {
3047
+ const isAuto = ghsas.length === 1 && (ghsas[0] === 'all' || ghsas[0] === 'auto');
3048
+ const ids = isAuto ? ['all'] : ghsas;
3049
+ const fixCResult = ids.length ? await utils.spawnCoana(['compute-fixes-and-upgrade-purls', cwd, '--manifests-tar-hash', tarHash, '--apply-fixes-to', ...ids, ...fixConfig.unknownFlags], {
3048
3050
  cwd,
3049
3051
  spinner,
3050
3052
  env: {
3051
3053
  SOCKET_ORG_SLUG: orgSlug
3052
3054
  }
3053
- };
3054
- let ids = ghsas;
3055
- if (ids.length === 1 && ids[0] === 'auto') {
3056
- debug.debugFn('notice', 'resolve: GitHub security alerts.');
3057
- const foundIdsCResult = tarHash ? await utils.spawnCoana(['compute-fixes-and-upgrade-purls', cwd, '--manifests-tar-hash', tarHash], spawnOptions) : undefined;
3058
- if (foundIdsCResult) {
3059
- lastCResult = foundIdsCResult;
3060
- }
3061
- if (foundIdsCResult?.ok) {
3062
- ids = utils.cmdFlagValueToArray(/(?<=Vulnerabilities found: )[^\n]+/.exec(foundIdsCResult.data)?.[0]);
3063
- debug.debugDir('inspect', {
3064
- GitHubSecurityAlerts: ids
3065
- });
3066
- }
3067
- }
3068
- const fixCResult = ids.length ? await utils.spawnCoana(['compute-fixes-and-upgrade-purls', cwd, '--manifests-tar-hash', tarHash, '--apply-fixes-to', ...ids, ...fixConfig.unknownFlags], spawnOptions) : undefined;
3055
+ }) : undefined;
3069
3056
  if (fixCResult) {
3070
3057
  lastCResult = fixCResult;
3071
3058
  }
3072
3059
  spinner?.stop();
3073
- debug.debugDir('inspect', {
3060
+ require$$6.debugDir('inspect', {
3074
3061
  lastCResult
3075
3062
  });
3076
3063
  return lastCResult.ok ? {
@@ -3182,15 +3169,15 @@ function getPrsForPurl(fixEnv, partialPurl) {
3182
3169
  prs.push(pr);
3183
3170
  }
3184
3171
  }
3185
- if (debug.isDebug('notice,silly')) {
3172
+ if (require$$6.isDebug('notice,silly')) {
3186
3173
  const fullName = packages.resolvePackageName(partialPurlObj);
3187
3174
  if (prs.length) {
3188
- debug.debugFn('notice', `found: ${prs.length} PRs for ${fullName}`);
3189
- debug.debugDir('silly', {
3175
+ require$$6.debugFn('notice', `found: ${prs.length} PRs for ${fullName}`);
3176
+ require$$6.debugDir('silly', {
3190
3177
  prs
3191
3178
  });
3192
3179
  } else if (fixEnv.prs.length) {
3193
- debug.debugFn('notice', `miss: 0 PRs found for ${fullName}`);
3180
+ require$$6.debugFn('notice', `miss: 0 PRs found for ${fullName}`);
3194
3181
  }
3195
3182
  }
3196
3183
  return prs;
@@ -3204,14 +3191,14 @@ function getOctokit() {
3204
3191
  SOCKET_CLI_GITHUB_TOKEN
3205
3192
  } = constants.ENV;
3206
3193
  if (!SOCKET_CLI_GITHUB_TOKEN) {
3207
- debug.debugFn('notice', 'miss: SOCKET_CLI_GITHUB_TOKEN env var');
3194
+ require$$6.debugFn('notice', 'miss: SOCKET_CLI_GITHUB_TOKEN env var');
3208
3195
  }
3209
3196
  const octokitOptions = {
3210
3197
  auth: SOCKET_CLI_GITHUB_TOKEN,
3211
3198
  // Lazily access constants.ENV.GITHUB_API_URL.
3212
3199
  baseUrl: constants.ENV.GITHUB_API_URL
3213
3200
  };
3214
- debug.debugDir('inspect', {
3201
+ require$$6.debugDir('inspect', {
3215
3202
  octokitOptions
3216
3203
  });
3217
3204
  _octokit = new vendor.Octokit(octokitOptions);
@@ -3226,7 +3213,7 @@ function getOctokitGraphql() {
3226
3213
  SOCKET_CLI_GITHUB_TOKEN
3227
3214
  } = constants.ENV;
3228
3215
  if (!SOCKET_CLI_GITHUB_TOKEN) {
3229
- debug.debugFn('notice', 'miss: SOCKET_CLI_GITHUB_TOKEN env var');
3216
+ require$$6.debugFn('notice', 'miss: SOCKET_CLI_GITHUB_TOKEN env var');
3230
3217
  }
3231
3218
  _octokitGraphql = vendor.graphql2.defaults({
3232
3219
  headers: {
@@ -3254,7 +3241,7 @@ async function readCache(key,
3254
3241
  ttlMs = 5 * 60 * 1000) {
3255
3242
  // Lazily access constants.githubCachePath.
3256
3243
  const cacheJsonPath = path.join(constants.githubCachePath, `${key}.json`);
3257
- const stat = utils.safeStatsSync(cacheJsonPath);
3244
+ const stat = fs$2.safeStatsSync(cacheJsonPath);
3258
3245
  if (stat) {
3259
3246
  const isExpired = Date.now() - stat.mtimeMs > ttlMs;
3260
3247
  if (!isExpired) {
@@ -3310,14 +3297,14 @@ async function cleanupPrs(owner, repo, options) {
3310
3297
  pull_number: prNum,
3311
3298
  state: 'closed'
3312
3299
  });
3313
- debug.debugFn('notice', `pr: closing ${prRef} for ${prToVersion}`);
3300
+ require$$6.debugFn('notice', `pr: closing ${prRef} for ${prToVersion}`);
3314
3301
  // Remove entry from parent object.
3315
3302
  context.parent.splice(context.index, 1);
3316
3303
  // Mark cache to be saved.
3317
3304
  cachesToSave.set(context.cacheKey, context.data);
3318
3305
  return null;
3319
3306
  } catch (e) {
3320
- debug.debugFn('error', `pr: failed to close ${prRef} for ${prToVersion}\n`, e?.message || 'unknown error');
3307
+ require$$6.debugFn('error', `pr: failed to close ${prRef} for ${prToVersion}\n`, e?.message || 'unknown error');
3321
3308
  }
3322
3309
  }
3323
3310
  // Update stale PRs.
@@ -3330,7 +3317,7 @@ async function cleanupPrs(owner, repo, options) {
3330
3317
  base: match.headRefName,
3331
3318
  head: match.baseRefName
3332
3319
  });
3333
- debug.debugFn('notice', `pr: updating stale ${prRef}`);
3320
+ require$$6.debugFn('notice', `pr: updating stale ${prRef}`);
3334
3321
  // Update entry entry.
3335
3322
  if (context.apiType === 'graphql') {
3336
3323
  context.entry.mergeStateStatus = 'CLEAN';
@@ -3341,7 +3328,7 @@ async function cleanupPrs(owner, repo, options) {
3341
3328
  cachesToSave.set(context.cacheKey, context.data);
3342
3329
  } catch (e) {
3343
3330
  const message = e?.message || 'Unknown error';
3344
- debug.debugFn('error', `pr: failed to update ${prRef} - ${message}`);
3331
+ require$$6.debugFn('error', `pr: failed to update ${prRef} - ${message}`);
3345
3332
  }
3346
3333
  }
3347
3334
  return match;
@@ -3542,7 +3529,7 @@ async function openPr(owner, repo, branch, purl, newVersion, options) {
3542
3529
  base: baseBranch,
3543
3530
  body: getSocketPullRequestBody(purlObj, newVersion, workspace)
3544
3531
  };
3545
- debug.debugDir('inspect', {
3532
+ require$$6.debugDir('inspect', {
3546
3533
  octokitPullsCreateParams
3547
3534
  });
3548
3535
  return await octokit.pulls.create(octokitPullsCreateParams);
@@ -3553,7 +3540,7 @@ async function openPr(owner, repo, branch, purl, newVersion, options) {
3553
3540
  const details = errors.map(d => `- ${d.message?.trim() ?? `${d.resource}.${d.field} (${d.code})`}`).join('\n');
3554
3541
  message += `:\n${details}`;
3555
3542
  }
3556
- debug.debugFn('error', message);
3543
+ require$$6.debugFn('error', message);
3557
3544
  }
3558
3545
  return null;
3559
3546
  }
@@ -3564,16 +3551,16 @@ async function setGitRemoteGithubRepoUrl(owner, repo, token, cwd = process.cwd()
3564
3551
  const url = `https://x-access-token:${token}@${host}/${owner}/${repo}`;
3565
3552
  const stdioIgnoreOptions = {
3566
3553
  cwd,
3567
- stdio: debug.isDebug('stdio') ? 'inherit' : 'ignore'
3554
+ stdio: require$$6.isDebug('stdio') ? 'inherit' : 'ignore'
3568
3555
  };
3569
3556
  const quotedCmd = `\`git remote set-url origin ${url}\``;
3570
- debug.debugFn('stdio', `spawn: ${quotedCmd}`);
3557
+ require$$6.debugFn('stdio', `spawn: ${quotedCmd}`);
3571
3558
  try {
3572
3559
  await spawn.spawn('git', ['remote', 'set-url', 'origin', url], stdioIgnoreOptions);
3573
3560
  return true;
3574
3561
  } catch (e) {
3575
- debug.debugFn('error', `caught: ${quotedCmd} failed`);
3576
- debug.debugDir('inspect', {
3562
+ require$$6.debugFn('error', `caught: ${quotedCmd} failed`);
3563
+ require$$6.debugDir('inspect', {
3577
3564
  error: e
3578
3565
  });
3579
3566
  }
@@ -3586,7 +3573,7 @@ function ciRepoInfo() {
3586
3573
  GITHUB_REPOSITORY
3587
3574
  } = constants.ENV;
3588
3575
  if (!GITHUB_REPOSITORY) {
3589
- debug.debugFn('notice', 'miss: GITHUB_REPOSITORY env var');
3576
+ require$$6.debugFn('notice', 'miss: GITHUB_REPOSITORY env var');
3590
3577
  }
3591
3578
  const ownerSlashRepo = GITHUB_REPOSITORY;
3592
3579
  const slashIndex = ownerSlashRepo.indexOf('/');
@@ -3610,9 +3597,9 @@ async function getFixEnv() {
3610
3597
  // but some CI checks are passing,
3611
3598
  constants.ENV.CI || gitEmail || gitUser || githubToken) &&
3612
3599
  // then log about it when in debug mode.
3613
- debug.isDebug('notice')) {
3600
+ require$$6.isDebug('notice')) {
3614
3601
  const envVars = [...(constants.ENV.CI ? [] : ['process.env.CI']), ...(gitEmail ? [] : ['process.env.SOCKET_CLI_GIT_USER_EMAIL']), ...(gitUser ? [] : ['process.env.SOCKET_CLI_GIT_USER_NAME']), ...(githubToken ? [] : ['process.env.GITHUB_TOKEN'])];
3615
- debug.debugFn('notice', `miss: fixEnv.isCi is false, expected ${arrays.joinAnd(envVars)} to be set`);
3602
+ require$$6.debugFn('notice', `miss: fixEnv.isCi is false, expected ${arrays.joinAnd(envVars)} to be set`);
3616
3603
  }
3617
3604
  let repoInfo = null;
3618
3605
  if (isCi) {
@@ -3620,7 +3607,7 @@ async function getFixEnv() {
3620
3607
  }
3621
3608
  if (!repoInfo) {
3622
3609
  if (isCi) {
3623
- debug.debugFn('notice', 'falling back to `git remote get-url origin`');
3610
+ require$$6.debugFn('notice', 'falling back to `git remote get-url origin`');
3624
3611
  }
3625
3612
  repoInfo = await utils.getRepoInfo();
3626
3613
  }
@@ -3744,7 +3731,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3744
3731
  pkgPath: rootPath
3745
3732
  } = pkgEnvDetails;
3746
3733
  const fixEnv = await getFixEnv();
3747
- debug.debugDir('inspect', {
3734
+ require$$6.debugDir('inspect', {
3748
3735
  fixEnv
3749
3736
  });
3750
3737
  const {
@@ -3768,11 +3755,11 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3768
3755
  spinner?.stop();
3769
3756
  logger.logger.info('No fixable vulns found.');
3770
3757
  if (alertsMap.size) {
3771
- debug.debugDir('inspect', {
3758
+ require$$6.debugDir('inspect', {
3772
3759
  alertsMap
3773
3760
  });
3774
3761
  } else {
3775
- debug.debugFn('inspect', '{ alertsMap: Map(0) {} }');
3762
+ require$$6.debugFn('inspect', '{ alertsMap: Map(0) {} }');
3776
3763
  }
3777
3764
  return {
3778
3765
  ok: true,
@@ -3781,14 +3768,14 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3781
3768
  }
3782
3769
  };
3783
3770
  }
3784
- if (debug.isDebug('notice,inspect')) {
3771
+ if (require$$6.isDebug('notice,inspect')) {
3785
3772
  spinner?.stop();
3786
3773
  const partialPurls = Array.from(infoByPartialPurl.keys());
3787
3774
  const {
3788
3775
  length: purlsCount
3789
3776
  } = partialPurls;
3790
- debug.debugFn('notice', `found: ${purlsCount} ${words.pluralize('PURL', purlsCount)} with CVEs`);
3791
- debug.debugDir('inspect', {
3777
+ require$$6.debugFn('notice', `found: ${purlsCount} ${words.pluralize('PURL', purlsCount)} with CVEs`);
3778
+ require$$6.debugDir('inspect', {
3792
3779
  partialPurls
3793
3780
  });
3794
3781
  spinner?.start();
@@ -3837,14 +3824,14 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3837
3824
  const name = packages.resolvePackageName(partialPurlObj);
3838
3825
  const infos = Array.from(infoEntry[1].values());
3839
3826
  if (!infos.length) {
3840
- debug.debugFn('notice', `miss: CVEs expected, but not found, for ${name}`);
3827
+ require$$6.debugFn('notice', `miss: CVEs expected, but not found, for ${name}`);
3841
3828
  continue infoEntriesLoop;
3842
3829
  }
3843
3830
  logger.logger.log(`Processing '${name}'`);
3844
3831
  logger.logger.indent();
3845
3832
  spinner?.indent();
3846
3833
  if (registry.getManifestData(partialPurlObj.type, name)) {
3847
- debug.debugFn('notice', `found: Socket Optimize variant for ${name}`);
3834
+ require$$6.debugFn('notice', `found: Socket Optimize variant for ${name}`);
3848
3835
  }
3849
3836
  // eslint-disable-next-line no-await-in-loop
3850
3837
  const packument = await packages.fetchPackagePackument(name);
@@ -3854,7 +3841,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3854
3841
  // Skip to next package.
3855
3842
  continue infoEntriesLoop;
3856
3843
  }
3857
- debug.debugDir('inspect', {
3844
+ require$$6.debugDir('inspect', {
3858
3845
  infos
3859
3846
  });
3860
3847
  const availableVersions = Object.keys(packument.versions);
@@ -3894,7 +3881,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3894
3881
  }
3895
3882
  const oldVersions = arrays.arrayUnique(shadowNpmInject.findPackageNodes(actualTree, name).map(n => n.version).filter(Boolean));
3896
3883
  if (!oldVersions.length) {
3897
- debug.debugFn('notice', `skip: ${name} not found`);
3884
+ require$$6.debugFn('notice', `skip: ${name} not found`);
3898
3885
  cleanupInfoEntriesLoop();
3899
3886
  // Skip to next package.
3900
3887
  continue infoEntriesLoop;
@@ -3910,8 +3897,8 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3910
3897
  const seenVersions = new Set();
3911
3898
  let hasAnnouncedWorkspace = false;
3912
3899
  let workspaceLogCallCount = logger.logger.logCallCount;
3913
- if (debug.isDebug('notice')) {
3914
- debug.debugFn('notice', `check: workspace ${workspace}`);
3900
+ if (require$$6.isDebug('notice')) {
3901
+ require$$6.debugFn('notice', `check: workspace ${workspace}`);
3915
3902
  hasAnnouncedWorkspace = true;
3916
3903
  workspaceLogCallCount = logger.logger.logCallCount;
3917
3904
  }
@@ -3920,7 +3907,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3920
3907
  const oldPurl = utils.idToPurl(oldId, partialPurlObj.type);
3921
3908
  const node = shadowNpmInject.findPackageNode(actualTree, name, oldVersion);
3922
3909
  if (!node) {
3923
- debug.debugFn('notice', `skip: ${oldId} not found`);
3910
+ require$$6.debugFn('notice', `skip: ${oldId} not found`);
3924
3911
  continue oldVersionsLoop;
3925
3912
  }
3926
3913
  infosLoop: for (const {
@@ -3940,7 +3927,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3940
3927
  continue infosLoop;
3941
3928
  }
3942
3929
  if (vendor.semverExports.gte(oldVersion, newVersion)) {
3943
- debug.debugFn('silly', `skip: ${oldId} is >= ${newVersion}`);
3930
+ require$$6.debugFn('silly', `skip: ${oldId} is >= ${newVersion}`);
3944
3931
  continue infosLoop;
3945
3932
  }
3946
3933
  const branch = getSocketBranchName(oldPurl, newVersion, workspace);
@@ -3949,14 +3936,14 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3949
3936
  }
3950
3937
  const pr = prCheck ? prs.find(p => p.headRefName === branch) : undefined;
3951
3938
  if (pr) {
3952
- debug.debugFn('notice', `skip: PR #${pr.number} for ${name}@${newVersion} exists`);
3939
+ require$$6.debugFn('notice', `skip: PR #${pr.number} for ${name}@${newVersion} exists`);
3953
3940
  seenBranches.add(branch);
3954
3941
  continue infosLoop;
3955
3942
  }
3956
3943
  if (fixEnv.isCi && (
3957
3944
  // eslint-disable-next-line no-await-in-loop
3958
3945
  await utils.gitRemoteBranchExists(branch, cwd))) {
3959
- debug.debugFn('notice', `skip: remote branch "${branch}" for ${name}@${newVersion} exists`);
3946
+ require$$6.debugFn('notice', `skip: remote branch "${branch}" for ${name}@${newVersion} exists`);
3960
3947
  seenBranches.add(branch);
3961
3948
  continue infosLoop;
3962
3949
  }
@@ -3985,7 +3972,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
3985
3972
 
3986
3973
  // eslint-disable-next-line no-await-in-loop
3987
3974
  if (!(await hasModifiedFiles(cwd))) {
3988
- debug.debugFn('notice', `skip: no changes for ${name}@${newVersion}`);
3975
+ require$$6.debugFn('notice', `skip: no changes for ${name}@${newVersion}`);
3989
3976
  seenVersions.add(newVersion);
3990
3977
  // Reset things just in case.
3991
3978
  if (fixEnv.isCi) {
@@ -4036,7 +4023,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
4036
4023
 
4037
4024
  // Check repoInfo to make TypeScript happy.
4038
4025
  if (!errored && fixEnv.isCi && fixEnv.repoInfo) {
4039
- debug.debugFn('notice', 'pr: creating');
4026
+ require$$6.debugFn('notice', 'pr: creating');
4040
4027
  try {
4041
4028
  const pushed =
4042
4029
  // eslint-disable-next-line no-await-in-loop
@@ -4116,7 +4103,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
4116
4103
  errored = true;
4117
4104
  }
4118
4105
  } else if (fixEnv.isCi) {
4119
- debug.debugFn('notice', 'skip: PR creation');
4106
+ require$$6.debugFn('notice', 'skip: PR creation');
4120
4107
  }
4121
4108
  if (fixEnv.isCi) {
4122
4109
  spinner?.start();
@@ -4166,7 +4153,7 @@ async function agentFix(pkgEnvDetails, actualTree, alertsMap, installer, {
4166
4153
  } else {
4167
4154
  changed = true;
4168
4155
  }
4169
- debug.debugFn('notice', 'increment: count', count + 1);
4156
+ require$$6.debugFn('notice', 'increment: count', count + 1);
4170
4157
  if (++count >= limit) {
4171
4158
  cleanupInfoEntriesLoop();
4172
4159
  // Exit main loop.
@@ -4226,7 +4213,7 @@ async function install$1(pkgEnvDetails, options) {
4226
4213
  __proto__: null,
4227
4214
  ...options
4228
4215
  };
4229
- const useDebug = debug.isDebug('stdio');
4216
+ const useDebug = require$$6.isDebug('stdio');
4230
4217
  const args = [
4231
4218
  // If "true", npm does not run scripts specified in package.json files.
4232
4219
  // Note that commands explicitly intended to run a particular script, such
@@ -4255,7 +4242,7 @@ async function install$1(pkgEnvDetails, options) {
4255
4242
  // https://docs.npmjs.com/cli/v8/using-npm/config#loglevel
4256
4243
  ...(useDebug ? [] : ['--silent']), ...(extraArgs ?? [])];
4257
4244
  const quotedCmd = `\`${pkgEnvDetails.agent} install ${args.join(' ')}\``;
4258
- debug.debugFn('stdio', `spawn: ${quotedCmd}`);
4245
+ require$$6.debugFn('stdio', `spawn: ${quotedCmd}`);
4259
4246
  const isSpinning = spinner?.isSpinning;
4260
4247
  spinner?.stop();
4261
4248
  let errored = false;
@@ -4266,8 +4253,8 @@ async function install$1(pkgEnvDetails, options) {
4266
4253
  stdio: useDebug ? 'inherit' : 'ignore'
4267
4254
  });
4268
4255
  } catch (e) {
4269
- debug.debugFn('error', `caught: ${quotedCmd} failed`);
4270
- debug.debugDir('inspect', {
4256
+ require$$6.debugFn('error', `caught: ${quotedCmd} failed`);
4257
+ require$$6.debugDir('inspect', {
4271
4258
  error: e
4272
4259
  });
4273
4260
  errored = true;
@@ -4277,8 +4264,8 @@ async function install$1(pkgEnvDetails, options) {
4277
4264
  try {
4278
4265
  actualTree = await getActualTree(cwd);
4279
4266
  } catch (e) {
4280
- debug.debugFn('error', 'caught: Arborist error');
4281
- debug.debugDir('inspect', {
4267
+ require$$6.debugFn('error', 'caught: Arborist error');
4268
+ require$$6.debugDir('inspect', {
4282
4269
  error: e
4283
4270
  });
4284
4271
  }
@@ -4315,8 +4302,8 @@ async function npmFix(pkgEnvDetails, fixConfig) {
4315
4302
  }
4316
4303
  } catch (e) {
4317
4304
  spinner?.stop();
4318
- debug.debugFn('error', 'caught: PURL API');
4319
- debug.debugDir('inspect', {
4305
+ require$$6.debugFn('error', 'caught: PURL API');
4306
+ require$$6.debugDir('inspect', {
4320
4307
  error: e
4321
4308
  });
4322
4309
  return {
@@ -4421,7 +4408,7 @@ async function install(pkgEnvDetails, options) {
4421
4408
  // https://github.com/pnpm/pnpm/issues/6778
4422
4409
  '--config.confirmModulesPurge=false', ...(extraArgs ?? [])];
4423
4410
  const quotedCmd = `\`${pkgEnvDetails.agent} install ${args.join(' ')}\``;
4424
- debug.debugFn('stdio', `spawn: ${quotedCmd}`);
4411
+ require$$6.debugFn('stdio', `spawn: ${quotedCmd}`);
4425
4412
  const isSpinning = spinner?.isSpinning;
4426
4413
  spinner?.stop();
4427
4414
  let errored = false;
@@ -4429,11 +4416,11 @@ async function install(pkgEnvDetails, options) {
4429
4416
  await utils.runAgentInstall(pkgEnvDetails, {
4430
4417
  args,
4431
4418
  spinner,
4432
- stdio: debug.isDebug('stdio') ? 'inherit' : 'ignore'
4419
+ stdio: require$$6.isDebug('stdio') ? 'inherit' : 'ignore'
4433
4420
  });
4434
4421
  } catch (e) {
4435
- debug.debugFn('error', `caught: ${quotedCmd} failed`);
4436
- debug.debugDir('inspect', {
4422
+ require$$6.debugFn('error', `caught: ${quotedCmd} failed`);
4423
+ require$$6.debugDir('inspect', {
4437
4424
  error: e
4438
4425
  });
4439
4426
  errored = true;
@@ -4443,8 +4430,8 @@ async function install(pkgEnvDetails, options) {
4443
4430
  try {
4444
4431
  actualTree = await getActualTree(cwd);
4445
4432
  } catch (e) {
4446
- debug.debugFn('error', 'caught: Arborist error');
4447
- debug.debugDir('inspect', {
4433
+ require$$6.debugFn('error', 'caught: Arborist error');
4434
+ require$$6.debugDir('inspect', {
4448
4435
  error: e
4449
4436
  });
4450
4437
  }
@@ -4500,8 +4487,8 @@ async function pnpmFix(pkgEnvDetails, fixConfig) {
4500
4487
  alertsMap = purls.length ? await utils.getAlertsMapFromPurls(purls, getFixAlertsMapOptions()) : await utils.getAlertsMapFromPnpmLockfile(lockfile, getFixAlertsMapOptions());
4501
4488
  } catch (e) {
4502
4489
  spinner?.stop();
4503
- debug.debugFn('error', 'caught: PURL API');
4504
- debug.debugDir('inspect', {
4490
+ require$$6.debugFn('error', 'caught: PURL API');
4491
+ require$$6.debugDir('inspect', {
4505
4492
  error: e
4506
4493
  });
4507
4494
  return {
@@ -4657,7 +4644,7 @@ async function handleFix({
4657
4644
  }, outputKind);
4658
4645
  return;
4659
4646
  }
4660
- debug.debugDir('inspect', {
4647
+ require$$6.debugDir('inspect', {
4661
4648
  pkgEnvDetails
4662
4649
  });
4663
4650
 
@@ -4719,7 +4706,7 @@ const config$H = {
4719
4706
  ghsa: {
4720
4707
  type: 'string',
4721
4708
  default: [],
4722
- description: `Provide a list of ${vendor.terminalLinkExports('GHSA IDs', 'https://docs.github.com/en/code-security/security-advisories/working-with-global-security-advisories-from-the-github-advisory-database/about-the-github-advisory-database#about-ghsa-ids')} to compute fixes for, as either a comma separated value or as multiple flags.\n Use '--ghsa auto' to automatically lookup GHSA IDs and compute fixes for them.`,
4709
+ description: `Provide a list of ${vendor.terminalLinkExports('GHSA IDs', 'https://docs.github.com/en/code-security/security-advisories/working-with-global-security-advisories-from-the-github-advisory-database/about-the-github-advisory-database#about-ghsa-ids')} to compute fixes for, as either a comma separated value or as multiple flags.\n Use '--ghsa all' to lookup all GHSA IDs and compute fixes for them.`,
4723
4710
  isMultiple: true,
4724
4711
  hidden: true
4725
4712
  },
@@ -4936,9 +4923,9 @@ async function setupTabCompletion(targetName) {
4936
4923
 
4937
4924
  // Target dir is something like ~/.local/share/socket/settings/completion (linux)
4938
4925
  const targetDir = path.dirname(targetPath);
4939
- debug.debugFn('notice', 'target: path + dir', targetPath, targetDir);
4926
+ require$$6.debugFn('notice', 'target: path + dir', targetPath, targetDir);
4940
4927
  if (!fs$1.existsSync(targetDir)) {
4941
- debug.debugFn('notice', 'create: target dir');
4928
+ require$$6.debugFn('notice', 'create: target dir');
4942
4929
  fs$1.mkdirSync(targetDir, {
4943
4930
  recursive: true
4944
4931
  });
@@ -5096,14 +5083,14 @@ async function outputCmdJson(cwd) {
5096
5083
  process.exitCode = 1;
5097
5084
  return;
5098
5085
  }
5099
- if (!utils.safeStatsSync(sockJsonPath)?.isFile()) {
5086
+ if (!fs$2.safeStatsSync(sockJsonPath)?.isFile()) {
5100
5087
  logger.logger.fail(`This is not a regular file (maybe a directory?): ${tildeSockJsonPath}`);
5101
5088
  process.exitCode = 1;
5102
5089
  return;
5103
5090
  }
5104
5091
  logger.logger.success(`This is the contents of ${tildeSockJsonPath}:`);
5105
5092
  logger.logger.error('');
5106
- const data = utils.safeReadFileSync(sockJsonPath);
5093
+ const data = fs$2.safeReadFileSync(sockJsonPath);
5107
5094
  logger.logger.log(data);
5108
5095
  }
5109
5096
 
@@ -5821,7 +5808,7 @@ async function run$B(argv, importMeta, {
5821
5808
  }
5822
5809
  const sockJson = await utils.readOrDefaultSocketJson(cwd);
5823
5810
  const detected = await detectManifestActions(sockJson, cwd);
5824
- debug.debugDir('inspect', {
5811
+ require$$6.debugDir('inspect', {
5825
5812
  detected
5826
5813
  });
5827
5814
  if (cli.flags['dryRun']) {
@@ -6088,7 +6075,7 @@ async function run$z(argv, importMeta, {
6088
6075
  // If given path is absolute then cwd should not affect it.
6089
6076
  cwd = path.resolve(process.cwd(), cwd);
6090
6077
  const sockJson = await utils.readOrDefaultSocketJson(cwd);
6091
- debug.debugFn('inspect', 'override: socket.json gradle', sockJson?.defaults?.manifest?.gradle);
6078
+ require$$6.debugFn('inspect', 'override: socket.json gradle', sockJson?.defaults?.manifest?.gradle);
6092
6079
 
6093
6080
  // Set defaults for any flag/arg that is not given. Check socket.json first.
6094
6081
  if (!bin) {
@@ -6245,7 +6232,7 @@ async function run$y(argv, importMeta, {
6245
6232
  // If given path is absolute then cwd should not affect it.
6246
6233
  cwd = path.resolve(process.cwd(), cwd);
6247
6234
  const sockJson = await utils.readOrDefaultSocketJson(cwd);
6248
- debug.debugFn('inspect', 'override: socket.json gradle', sockJson?.defaults?.manifest?.gradle);
6235
+ require$$6.debugFn('inspect', 'override: socket.json gradle', sockJson?.defaults?.manifest?.gradle);
6249
6236
 
6250
6237
  // Set defaults for any flag/arg that is not given. Check socket.json first.
6251
6238
  if (!bin) {
@@ -6411,7 +6398,7 @@ async function run$x(argv, importMeta, {
6411
6398
  // If given path is absolute then cwd should not affect it.
6412
6399
  cwd = path.resolve(process.cwd(), cwd);
6413
6400
  const sockJson = await utils.readOrDefaultSocketJson(cwd);
6414
- debug.debugFn('inspect', 'override: socket.json sbt', sockJson?.defaults?.manifest?.sbt);
6401
+ require$$6.debugFn('inspect', 'override: socket.json sbt', sockJson?.defaults?.manifest?.sbt);
6415
6402
 
6416
6403
  // Set defaults for any flag/arg that is not given. Check socket.json first.
6417
6404
  if (!bin) {
@@ -6505,7 +6492,7 @@ async function outputManifestSetup(result) {
6505
6492
 
6506
6493
  async function setupManifestConfig(cwd, defaultOnReadError = false) {
6507
6494
  const detected = await detectManifestActions(null, cwd);
6508
- debug.debugDir('inspect', {
6495
+ require$$6.debugDir('inspect', {
6509
6496
  detected
6510
6497
  });
6511
6498
 
@@ -7679,7 +7666,7 @@ async function addOverrides(pkgEnvDetails, pkgPath, options) {
7679
7666
  let loggedAddingText = false;
7680
7667
 
7681
7668
  // Chunk package names to process them in parallel 3 at a time.
7682
- await require$$8.pEach(manifestEntries, 3, async ({
7669
+ await require$$8.pEach(manifestEntries, async ({
7683
7670
  1: data
7684
7671
  }) => {
7685
7672
  const {
@@ -7733,7 +7720,7 @@ async function addOverrides(pkgEnvDetails, pkgPath, options) {
7733
7720
  npmExecPath
7734
7721
  });
7735
7722
  // Chunk package names to process them in parallel 3 at a time.
7736
- await require$$8.pEach(overridesDataObjects, 3, async ({
7723
+ await require$$8.pEach(overridesDataObjects, async ({
7737
7724
  overrides,
7738
7725
  type
7739
7726
  }) => {
@@ -7782,12 +7769,16 @@ async function addOverrides(pkgEnvDetails, pkgPath, options) {
7782
7769
  }
7783
7770
  }
7784
7771
  }
7772
+ }, {
7773
+ concurrency: 3
7785
7774
  });
7786
7775
  }
7776
+ }, {
7777
+ concurrency: 3
7787
7778
  });
7788
7779
  if (isWorkspace) {
7789
7780
  // Chunk package names to process them in parallel 3 at a time.
7790
- await require$$8.pEach(workspacePkgJsonPaths, 3, async workspacePkgJsonPath => {
7781
+ await require$$8.pEach(workspacePkgJsonPaths, async workspacePkgJsonPath => {
7791
7782
  const otherState = await addOverrides(pkgEnvDetails, path.dirname(workspacePkgJsonPath), {
7792
7783
  logger,
7793
7784
  pin,
@@ -7799,6 +7790,8 @@ async function addOverrides(pkgEnvDetails, pkgPath, options) {
7799
7790
  state[key].add(value);
7800
7791
  }
7801
7792
  }
7793
+ }, {
7794
+ concurrency: 3
7802
7795
  });
7803
7796
  }
7804
7797
  if (state.added.size > 0 || state.updated.size > 0) {
@@ -7843,8 +7836,8 @@ async function updateLockfile(pkgEnvDetails, options) {
7843
7836
  }
7844
7837
  } catch (e) {
7845
7838
  spinner?.stop();
7846
- debug.debugFn('error', 'fail: update');
7847
- debug.debugDir('inspect', {
7839
+ require$$6.debugFn('error', 'fail: update');
7840
+ require$$6.debugDir('inspect', {
7848
7841
  error: e
7849
7842
  });
7850
7843
  return {
@@ -9209,7 +9202,7 @@ function formatReportCard(artifact, color) {
9209
9202
  };
9210
9203
  const alertString = getAlertString(artifact.alerts, !color);
9211
9204
  if (!artifact.ecosystem) {
9212
- debug.debugFn('notice', 'miss: artifact ecosystem', artifact);
9205
+ require$$6.debugFn('notice', 'miss: artifact ecosystem', artifact);
9213
9206
  }
9214
9207
  const purl = `pkg:${artifact.ecosystem}/${artifact.name}${artifact.version ? '@' + artifact.version : ''}`;
9215
9208
  return ['Package: ' + (color ? vendor.yoctocolorsCjsExports.bold(purl) : purl), '', ...Object.entries(scoreResult).map(score => `- ${score[0]}:`.padEnd(20, ' ') + ` ${formatScore(score[1], !color, true)}`), alertString].join('\n');
@@ -10018,8 +10011,8 @@ async function fetchListAllRepos(orgSlug, options) {
10018
10011
  desc: 'list of repositories'
10019
10012
  });
10020
10013
  if (!orgRepoListCResult.ok) {
10021
- debug.debugFn('error', 'fail: fetch repo');
10022
- debug.debugDir('inspect', {
10014
+ require$$6.debugFn('error', 'fail: fetch repo');
10015
+ require$$6.debugDir('inspect', {
10023
10016
  orgRepoListCResult
10024
10017
  });
10025
10018
  return orgRepoListCResult;
@@ -11604,7 +11597,7 @@ async function scanOneRepo(repoSlug, {
11604
11597
  };
11605
11598
  }
11606
11599
  const tmpDir = fs$1.mkdtempSync(path.join(os.tmpdir(), repoSlug));
11607
- debug.debugFn('notice', 'init: temp dir for scan root', tmpDir);
11600
+ require$$6.debugFn('notice', 'init: temp dir for scan root', tmpDir);
11608
11601
  const downloadResult = await testAndDownloadManifestFiles({
11609
11602
  files,
11610
11603
  tmpDir,
@@ -11717,11 +11710,11 @@ async function testAndDownloadManifestFile({
11717
11710
  repoApiUrl,
11718
11711
  tmpDir
11719
11712
  }) {
11720
- debug.debugFn('notice', 'testing: file', file);
11713
+ require$$6.debugFn('notice', 'testing: file', file);
11721
11714
  const supportedFilesCResult = await fetchSupportedScanFileNames();
11722
11715
  const supportedFiles = supportedFilesCResult.ok ? supportedFilesCResult.data : undefined;
11723
11716
  if (!supportedFiles || !utils.isReportSupportedFile(file, supportedFiles)) {
11724
- debug.debugFn('notice', ' - skip: not a known pattern');
11717
+ require$$6.debugFn('notice', ' - skip: not a known pattern');
11725
11718
  // Not an error.
11726
11719
  return {
11727
11720
  ok: true,
@@ -11730,7 +11723,7 @@ async function testAndDownloadManifestFile({
11730
11723
  }
11731
11724
  };
11732
11725
  }
11733
- debug.debugFn('notice', 'found: manifest file, going to attempt to download it;', file);
11726
+ require$$6.debugFn('notice', 'found: manifest file, going to attempt to download it;', file);
11734
11727
  const result = await downloadManifestFile({
11735
11728
  file,
11736
11729
  tmpDir,
@@ -11752,9 +11745,9 @@ async function downloadManifestFile({
11752
11745
  repoApiUrl,
11753
11746
  tmpDir
11754
11747
  }) {
11755
- debug.debugFn('notice', 'request: download url from GitHub');
11748
+ require$$6.debugFn('notice', 'request: download url from GitHub');
11756
11749
  const fileUrl = `${repoApiUrl}/contents/${file}?ref=${defaultBranch}`;
11757
- debug.debugDir('inspect', {
11750
+ require$$6.debugDir('inspect', {
11758
11751
  fileUrl
11759
11752
  });
11760
11753
  const downloadUrlResponse = await fetch(fileUrl, {
@@ -11763,9 +11756,9 @@ async function downloadManifestFile({
11763
11756
  Authorization: `Bearer ${githubToken}`
11764
11757
  }
11765
11758
  });
11766
- debug.debugFn('notice', 'complete: request');
11759
+ require$$6.debugFn('notice', 'complete: request');
11767
11760
  const downloadUrlText = await downloadUrlResponse.text();
11768
- debug.debugFn('inspect', 'response: raw download url', downloadUrlText);
11761
+ require$$6.debugFn('inspect', 'response: raw download url', downloadUrlText);
11769
11762
  let downloadUrl;
11770
11763
  try {
11771
11764
  downloadUrl = JSON.parse(downloadUrlText).download_url;
@@ -11778,7 +11771,7 @@ async function downloadManifestFile({
11778
11771
  };
11779
11772
  }
11780
11773
  const localPath = path.join(tmpDir, file);
11781
- debug.debugFn('notice', 'download: manifest file started', downloadUrl, '->', localPath);
11774
+ require$$6.debugFn('notice', 'download: manifest file started', downloadUrl, '->', localPath);
11782
11775
 
11783
11776
  // Now stream the file to that file...
11784
11777
  const result = await streamDownloadWithFetch(localPath, downloadUrl);
@@ -11787,7 +11780,7 @@ async function downloadManifestFile({
11787
11780
  logger.logger.fail(`Failed to download manifest file, skipping to next file. File: ${file}`);
11788
11781
  return result;
11789
11782
  }
11790
- debug.debugFn('notice', 'download: manifest file completed');
11783
+ require$$6.debugFn('notice', 'download: manifest file completed');
11791
11784
  return {
11792
11785
  ok: true,
11793
11786
  data: undefined
@@ -11839,7 +11832,7 @@ async function streamDownloadWithFetch(localPath, downloadUrl) {
11839
11832
  };
11840
11833
  } catch (error) {
11841
11834
  logger.logger.fail('An error was thrown while trying to download a manifest file... url:', downloadUrl);
11842
- debug.debugDir('inspect', {
11835
+ require$$6.debugDir('inspect', {
11843
11836
  error
11844
11837
  });
11845
11838
 
@@ -11863,7 +11856,7 @@ async function streamDownloadWithFetch(localPath, downloadUrl) {
11863
11856
  // If error was due to bad HTTP status
11864
11857
  detailedError += ` (HTTP Status: ${response.status} ${response.statusText})`;
11865
11858
  }
11866
- debug.debugFn('error', detailedError);
11859
+ require$$6.debugFn('error', detailedError);
11867
11860
  return {
11868
11861
  ok: false,
11869
11862
  message: 'Download Failed',
@@ -11880,14 +11873,14 @@ async function getLastCommitDetails({
11880
11873
  }) {
11881
11874
  logger.logger.info(`Requesting last commit for default branch ${defaultBranch} for ${orgGithub}/${repoSlug}...`);
11882
11875
  const commitApiUrl = `${repoApiUrl}/commits?sha=${defaultBranch}&per_page=1`;
11883
- debug.debugFn('inspect', 'url: commit', commitApiUrl);
11876
+ require$$6.debugFn('inspect', 'url: commit', commitApiUrl);
11884
11877
  const commitResponse = await fetch(commitApiUrl, {
11885
11878
  headers: {
11886
11879
  Authorization: `Bearer ${githubToken}`
11887
11880
  }
11888
11881
  });
11889
11882
  const commitText = await commitResponse.text();
11890
- debug.debugFn('inspect', 'response: commit', commitText);
11883
+ require$$6.debugFn('inspect', 'response: commit', commitText);
11891
11884
  let lastCommit;
11892
11885
  try {
11893
11886
  lastCommit = JSON.parse(commitText)?.[0];
@@ -11974,7 +11967,7 @@ async function getRepoDetails({
11974
11967
  repoSlug
11975
11968
  }) {
11976
11969
  const repoApiUrl = `${githubApiUrl}/repos/${orgGithub}/${repoSlug}`;
11977
- debug.debugDir('inspect', {
11970
+ require$$6.debugDir('inspect', {
11978
11971
  repoApiUrl
11979
11972
  });
11980
11973
  const repoDetailsResponse = await fetch(repoApiUrl, {
@@ -11985,7 +11978,7 @@ async function getRepoDetails({
11985
11978
  });
11986
11979
  logger.logger.success(`Request completed.`);
11987
11980
  const repoDetailsText = await repoDetailsResponse.text();
11988
- debug.debugFn('inspect', 'response: repo', repoDetailsText);
11981
+ require$$6.debugFn('inspect', 'response: repo', repoDetailsText);
11989
11982
  let repoDetails;
11990
11983
  try {
11991
11984
  repoDetails = JSON.parse(repoDetailsText);
@@ -12024,7 +12017,7 @@ async function getRepoBranchTree({
12024
12017
  }) {
12025
12018
  logger.logger.info(`Requesting default branch file tree; branch \`${defaultBranch}\`, repo \`${orgGithub}/${repoSlug}\`...`);
12026
12019
  const treeApiUrl = `${repoApiUrl}/git/trees/${defaultBranch}?recursive=1`;
12027
- debug.debugFn('inspect', 'url: tree', treeApiUrl);
12020
+ require$$6.debugFn('inspect', 'url: tree', treeApiUrl);
12028
12021
  const treeResponse = await fetch(treeApiUrl, {
12029
12022
  method: 'GET',
12030
12023
  headers: {
@@ -12032,7 +12025,7 @@ async function getRepoBranchTree({
12032
12025
  }
12033
12026
  });
12034
12027
  const treeText = await treeResponse.text();
12035
- debug.debugFn('inspect', 'response: tree', treeText);
12028
+ require$$6.debugFn('inspect', 'response: tree', treeText);
12036
12029
  let treeDetails;
12037
12030
  try {
12038
12031
  treeDetails = JSON.parse(treeText);
@@ -12061,7 +12054,7 @@ async function getRepoBranchTree({
12061
12054
  };
12062
12055
  }
12063
12056
  if (!treeDetails.tree || !Array.isArray(treeDetails.tree)) {
12064
- debug.debugDir('inspect', {
12057
+ require$$6.debugDir('inspect', {
12065
12058
  treeDetails: {
12066
12059
  tree: treeDetails.tree
12067
12060
  }
@@ -13373,8 +13366,8 @@ async function fetchScan(orgSlug, scanId) {
13373
13366
  return JSON.parse(line);
13374
13367
  } catch {
13375
13368
  ok = false;
13376
- debug.debugFn('error', 'fail: parse NDJSON');
13377
- debug.debugDir('inspect', {
13369
+ require$$6.debugFn('error', 'fail: parse NDJSON');
13370
+ require$$6.debugDir('inspect', {
13378
13371
  line
13379
13372
  });
13380
13373
  return null;
@@ -14267,8 +14260,8 @@ Do you want to install "safe npm" (this will create an alias to the socket-npm c
14267
14260
  }
14268
14261
  }
14269
14262
  } catch (e) {
14270
- debug.debugFn('error', 'fail: setup tab completion');
14271
- debug.debugDir('inspect', {
14263
+ require$$6.debugFn('error', 'fail: setup tab completion');
14264
+ require$$6.debugDir('inspect', {
14272
14265
  error: e
14273
14266
  });
14274
14267
  // Ignore. Skip tab completion setup.
@@ -14583,8 +14576,8 @@ void (async () => {
14583
14576
  });
14584
14577
  } catch (e) {
14585
14578
  process.exitCode = 1;
14586
- debug.debugFn('error', 'Uncaught error (BAD!):');
14587
- debug.debugDir('inspect', {
14579
+ require$$6.debugFn('error', 'Uncaught error (BAD!):');
14580
+ require$$6.debugDir('inspect', {
14588
14581
  error: e
14589
14582
  });
14590
14583
  let errorBody;
@@ -14628,7 +14621,7 @@ void (async () => {
14628
14621
  logger.logger.error('\n'); // Any-spinner-newline
14629
14622
  logger.logger.fail(utils.failMsgWithBadge(errorTitle, errorMessage));
14630
14623
  if (errorBody) {
14631
- debug.debugDir('inspect', {
14624
+ require$$6.debugDir('inspect', {
14632
14625
  errorBody
14633
14626
  });
14634
14627
  }
@@ -14636,5 +14629,5 @@ void (async () => {
14636
14629
  await utils.captureException(e);
14637
14630
  }
14638
14631
  })();
14639
- //# debugId=17c5b5ba-1b60-498d-8e4a-b6943d431dd2
14632
+ //# debugId=6fe85e04-7139-46a4-be64-93d64600371
14640
14633
  //# sourceMappingURL=cli.js.map