@socketsecurity/cli-with-sentry 0.15.14 → 0.15.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. package/dist/.config/tsconfig.dts.tsbuildinfo +1 -1
  2. package/dist/cli.js +848 -677
  3. package/dist/cli.js.map +1 -1
  4. package/dist/constants.js +3 -3
  5. package/dist/constants.js.map +1 -1
  6. package/dist/types/commands/fix/npm-fix.d.mts.map +1 -1
  7. package/dist/types/commands/fix/open-pr.d.mts.map +1 -1
  8. package/dist/types/commands/fix/pnpm-fix.d.mts.map +1 -1
  9. package/dist/types/commands/manifest/cmd-manifest-auto.d.mts.map +1 -1
  10. package/dist/types/commands/manifest/detect-manifest-actions.d.mts +7 -0
  11. package/dist/types/commands/manifest/detect-manifest-actions.d.mts.map +1 -0
  12. package/dist/types/commands/repos/cmd-repos-list.d.mts.map +1 -1
  13. package/dist/types/commands/repos/fetch-list-all-repos.d.mts +8 -0
  14. package/dist/types/commands/repos/fetch-list-all-repos.d.mts.map +1 -0
  15. package/dist/types/commands/repos/handle-list-repos.d.mts +3 -2
  16. package/dist/types/commands/repos/handle-list-repos.d.mts.map +1 -1
  17. package/dist/types/commands/repos/output-list-repos.d.mts +1 -1
  18. package/dist/types/commands/repos/output-list-repos.d.mts.map +1 -1
  19. package/dist/types/commands/scan/cmd-scan-create.d.mts.map +1 -1
  20. package/external/@socketsecurity/registry/lib/logger.d.ts +6 -1
  21. package/external/@socketsecurity/registry/lib/logger.js +39 -3
  22. package/external/@socketsecurity/registry/lib/spinner.js +13 -2
  23. package/external/@socketsecurity/registry/lib/strings.d.ts +7 -2
  24. package/external/@socketsecurity/registry/lib/strings.js +6 -0
  25. package/external/@socketsecurity/registry/package.json +1 -1
  26. package/package.json +3 -2
package/dist/cli.js CHANGED
@@ -3516,7 +3516,7 @@ async function enablePrAutoMerge({
3516
3516
  if (error instanceof vendor.GraphqlResponseError && error.errors) {
3517
3517
  const details = error.errors.map(({
3518
3518
  message
3519
- }) => ` - ${message.trim()}`).join('\n');
3519
+ }) => ` - ${message.trim()}`).join('\n').trim();
3520
3520
  message += `:\n${details}`;
3521
3521
  }
3522
3522
  logger.logger.error(message);
@@ -3686,11 +3686,18 @@ async function npmFix(pkgEnvDetails, {
3686
3686
  // Calling arb.reify() creates the arb.diff object, nulls-out arb.idealTree,
3687
3687
  // and populates arb.actualTree.
3688
3688
  let actualTree = await arb.reify();
3689
- const alertsMap = purls.length ? await utils.getAlertsMapFromPurls(purls, getAlertMapOptions({
3690
- limit
3691
- })) : await shadowInject.getAlertsMapFromArborist(arb, getAlertMapOptions({
3692
- limit
3693
- }));
3689
+ let alertsMap;
3690
+ try {
3691
+ alertsMap = purls.length ? await utils.getAlertsMapFromPurls(purls, getAlertMapOptions({
3692
+ limit
3693
+ })) : await shadowInject.getAlertsMapFromArborist(arb, getAlertMapOptions({
3694
+ limit
3695
+ }));
3696
+ } catch (e) {
3697
+ spinner?.stop();
3698
+ logger.logger.error(e?.message || 'Unknown Socket batch PURL API error');
3699
+ return;
3700
+ }
3694
3701
  const infoByPkgName = utils.getCveInfoFromAlertsMap(alertsMap, {
3695
3702
  limit
3696
3703
  });
@@ -3713,11 +3720,11 @@ async function npmFix(pkgEnvDetails, {
3713
3720
  infoEntriesLoop: for (let i = 0, {
3714
3721
  length
3715
3722
  } = sortedInfoEntries; i < length; i += 1) {
3723
+ const isLastInfoEntry = i === length - 1;
3716
3724
  const {
3717
3725
  0: name,
3718
3726
  1: infos
3719
3727
  } = sortedInfoEntries[i];
3720
- const isLastInfoEntry = i === length - 1;
3721
3728
  logger.logger.log(`Processing vulnerable package: ${name}`);
3722
3729
  logger.logger.indent();
3723
3730
  spinner?.indent();
@@ -3736,17 +3743,14 @@ async function npmFix(pkgEnvDetails, {
3736
3743
  const warningsForAfter = new Set();
3737
3744
 
3738
3745
  // eslint-disable-next-line no-unused-labels
3739
- for (const pkgJsonPath of pkgJsonPaths) {
3746
+ for (let j = 0, {
3747
+ length: length_j
3748
+ } = pkgJsonPaths; j < length_j; j += 1) {
3749
+ const isLastPkgJsonPath = j === length_j - 1;
3750
+ const pkgJsonPath = pkgJsonPaths[j];
3740
3751
  const pkgPath = path.dirname(pkgJsonPath);
3741
3752
  const isWorkspaceRoot = pkgJsonPath === pkgEnvDetails.editablePkgJson.filename;
3742
3753
  const workspaceName = isWorkspaceRoot ? 'root' : path.relative(rootPath, pkgPath);
3743
- logger.logger.log(`Checking workspace: ${workspaceName}`);
3744
- const workspaceLogCallCount = logger.logger.logCallCount;
3745
-
3746
- // eslint-disable-next-line no-await-in-loop
3747
- actualTree = await install$1(arb, {
3748
- cwd
3749
- });
3750
3754
  const oldVersions = arrays.arrayUnique(shadowInject.findPackageNodes(actualTree, name).map(n => n.target?.version ?? n.version).filter(Boolean));
3751
3755
  if (!oldVersions.length) {
3752
3756
  logger.logger.warn(`Unexpected condition: Lockfile entries not found for ${name}.\n`);
@@ -3762,12 +3766,21 @@ async function npmFix(pkgEnvDetails, {
3762
3766
  const editablePkgJson = await packages.readPackageJson(pkgJsonPath, {
3763
3767
  editable: true
3764
3768
  });
3769
+ let hasAnnouncedWorkspace = false;
3770
+ let workspaceLogCallCount = logger.logger.logCallCount;
3771
+ if (debug.isDebug()) {
3772
+ debug.debugLog(`Checking workspace: ${workspaceName}`);
3773
+ hasAnnouncedWorkspace = true;
3774
+ workspaceLogCallCount = logger.logger.logCallCount;
3775
+ }
3765
3776
  oldVersionsLoop: for (const oldVersion of oldVersions) {
3766
3777
  const oldId = `${name}@${oldVersion}`;
3767
3778
  const oldPurl = utils.idToPurl(oldId);
3768
3779
  const node = shadowInject.findPackageNode(actualTree, name, oldVersion);
3769
3780
  if (!node) {
3770
- logger.logger.warn(`Unexpected condition: Arborist node not found, skipping ${oldId}`);
3781
+ if (hasAnnouncedWorkspace) {
3782
+ logger.logger.warn(`Unexpected condition: Arborist node not found, skipping ${oldId}`);
3783
+ }
3771
3784
  continue oldVersionsLoop;
3772
3785
  }
3773
3786
  infosLoop: for (const {
@@ -3807,18 +3820,18 @@ async function npmFix(pkgEnvDetails, {
3807
3820
  if (!(await editablePkgJson.save({
3808
3821
  ignoreWhitespace: true
3809
3822
  }))) {
3810
- logger.logger.info(`${workspaceName}/package.json not changed, skipping`);
3823
+ debug.debugLog(`${workspaceName}/package.json not changed, skipping`);
3811
3824
  // Reset things just in case.
3812
3825
  if (isCi) {
3813
3826
  // eslint-disable-next-line no-await-in-loop
3814
3827
  await gitResetAndClean(baseBranch, cwd);
3815
- // eslint-disable-next-line no-await-in-loop
3816
- actualTree = await install$1(arb, {
3817
- cwd
3818
- });
3819
3828
  }
3820
3829
  continue infosLoop;
3821
3830
  }
3831
+ if (!hasAnnouncedWorkspace) {
3832
+ hasAnnouncedWorkspace = true;
3833
+ workspaceLogCallCount = logger.logger.logCallCount;
3834
+ }
3822
3835
  spinner?.start();
3823
3836
  spinner?.info(`Installing ${newId} in ${workspaceName}`);
3824
3837
  let error;
@@ -3836,14 +3849,25 @@ async function npmFix(pkgEnvDetails, {
3836
3849
  stdio: 'ignore'
3837
3850
  });
3838
3851
  }
3839
- spinner?.successAndStop(`Fixed ${name} in ${workspaceName}`);
3852
+ spinner?.success(`Fixed ${name} in ${workspaceName}`);
3840
3853
  } catch (e) {
3841
3854
  errored = true;
3842
3855
  error = e;
3843
3856
  }
3857
+ spinner?.stop();
3844
3858
  if (!errored && isCi) {
3845
3859
  const branch = getSocketBranchName(oldPurl, newVersion, workspaceName);
3846
3860
  try {
3861
+ const moddedFilepaths =
3862
+ // eslint-disable-next-line no-await-in-loop
3863
+ (await gitUnstagedModifiedFiles(cwd)).filter(p => {
3864
+ const basename = path.basename(p);
3865
+ return basename === 'package.json' || basename === 'package-lock.json';
3866
+ });
3867
+ if (!moddedFilepaths.length) {
3868
+ logger.logger.warn('Unexpected condition: Nothing to commit, skipping PR creation.');
3869
+ continue infosLoop;
3870
+ }
3847
3871
  const {
3848
3872
  owner,
3849
3873
  repo
@@ -3851,27 +3875,35 @@ async function npmFix(pkgEnvDetails, {
3851
3875
  // eslint-disable-next-line no-await-in-loop
3852
3876
  if (await prExistForBranch(owner, repo, branch)) {
3853
3877
  debug.debugLog(`Branch "${branch}" exists, skipping PR creation.`);
3878
+ // eslint-disable-next-line no-await-in-loop
3879
+ await gitResetAndClean(baseBranch, cwd);
3880
+ // eslint-disable-next-line no-await-in-loop
3881
+ actualTree = await install$1(arb, {
3882
+ cwd
3883
+ });
3854
3884
  continue infosLoop;
3855
3885
  }
3856
3886
  // eslint-disable-next-line no-await-in-loop
3857
3887
  if (await gitRemoteBranchExists(branch, cwd)) {
3858
3888
  debug.debugLog(`Remote branch "${branch}" exists, skipping PR creation.`);
3859
- continue infosLoop;
3860
- }
3861
- const moddedFilepaths =
3862
- // eslint-disable-next-line no-await-in-loop
3863
- (await gitUnstagedModifiedFiles(cwd)).filter(p => {
3864
- const basename = path.basename(p);
3865
- return basename === 'package.json' || basename === 'package-lock.json';
3866
- });
3867
- if (!moddedFilepaths.length) {
3868
- logger.logger.warn('Unexpected condition: Nothing to commit, skipping PR creation.');
3889
+ // eslint-disable-next-line no-await-in-loop
3890
+ await gitResetAndClean(baseBranch, cwd);
3891
+ // eslint-disable-next-line no-await-in-loop
3892
+ actualTree = await install$1(arb, {
3893
+ cwd
3894
+ });
3869
3895
  continue infosLoop;
3870
3896
  }
3871
3897
  if (
3872
3898
  // eslint-disable-next-line no-await-in-loop
3873
3899
  !(await gitCreateAndPushBranch(branch, getSocketCommitMessage(oldPurl, newVersion, workspaceName), moddedFilepaths, cwd))) {
3874
3900
  logger.logger.warn('Unexpected condition: Push failed, skipping PR creation.');
3901
+ // eslint-disable-next-line no-await-in-loop
3902
+ await gitResetAndClean(baseBranch, cwd);
3903
+ // eslint-disable-next-line no-await-in-loop
3904
+ actualTree = await install$1(arb, {
3905
+ cwd
3906
+ });
3875
3907
  continue infosLoop;
3876
3908
  }
3877
3909
  // eslint-disable-next-line no-await-in-loop
@@ -3888,10 +3920,14 @@ async function npmFix(pkgEnvDetails, {
3888
3920
  const {
3889
3921
  data
3890
3922
  } = prResponse;
3891
- logger.logger.info(`Opened PR #${data.number}.`);
3923
+ logger.logger.success(`Opened PR #${data.number}.`);
3892
3924
  if (autoMerge) {
3925
+ logger.logger.indent();
3926
+ spinner?.indent();
3893
3927
  // eslint-disable-next-line no-await-in-loop
3894
3928
  await enablePrAutoMerge(data);
3929
+ logger.logger.dedent();
3930
+ spinner?.dedent();
3895
3931
  }
3896
3932
  }
3897
3933
  } catch (e) {
@@ -3909,6 +3945,7 @@ async function npmFix(pkgEnvDetails, {
3909
3945
  }
3910
3946
  if (errored) {
3911
3947
  if (!isCi) {
3948
+ spinner?.start();
3912
3949
  editablePkgJson.update(revertData);
3913
3950
  // eslint-disable-next-line no-await-in-loop
3914
3951
  await Promise.all([utils.removeNodeModules(cwd), editablePkgJson.save({
@@ -3918,8 +3955,9 @@ async function npmFix(pkgEnvDetails, {
3918
3955
  actualTree = await install$1(arb, {
3919
3956
  cwd
3920
3957
  });
3958
+ spinner?.stop();
3921
3959
  }
3922
- spinner?.failAndStop(`Update failed for ${oldId} in ${workspaceName}`, error);
3960
+ logger.logger.fail(`Update failed for ${oldId} in ${workspaceName}`, error);
3923
3961
  }
3924
3962
  if (++count >= limit) {
3925
3963
  logger.logger.dedent();
@@ -3928,15 +3966,15 @@ async function npmFix(pkgEnvDetails, {
3928
3966
  }
3929
3967
  }
3930
3968
  }
3931
- if (logger.logger.logCallCount > workspaceLogCallCount) {
3932
- logger.logger.log('');
3969
+ if (!isLastPkgJsonPath && logger.logger.logCallCount > workspaceLogCallCount) {
3970
+ logger.logger.logNewline();
3933
3971
  }
3934
3972
  }
3935
3973
  for (const warningText of warningsForAfter) {
3936
3974
  logger.logger.warn(warningText);
3937
3975
  }
3938
3976
  if (!isLastInfoEntry) {
3939
- logger.logger.log('');
3977
+ logger.logger.logNewline();
3940
3978
  }
3941
3979
  logger.logger.dedent();
3942
3980
  spinner?.dedent();
@@ -4026,17 +4064,25 @@ async function pnpmFix(pkgEnvDetails, {
4026
4064
  });
4027
4065
  lockfile = await utils.readPnpmLockfile(lockfilePath);
4028
4066
  }
4067
+
4029
4068
  // Exit early if pnpm-lock.yaml is not found.
4030
4069
  if (!lockfile) {
4031
4070
  spinner?.stop();
4032
4071
  logger.logger.error('Required pnpm-lock.yaml not found.');
4033
4072
  return;
4034
4073
  }
4035
- const alertsMap = purls.length ? await utils.getAlertsMapFromPurls(purls, getAlertMapOptions({
4036
- limit
4037
- })) : await utils.getAlertsMapFromPnpmLockfile(lockfile, getAlertMapOptions({
4038
- limit
4039
- }));
4074
+ let alertsMap;
4075
+ try {
4076
+ alertsMap = purls.length ? await utils.getAlertsMapFromPurls(purls, getAlertMapOptions({
4077
+ limit
4078
+ })) : await utils.getAlertsMapFromPnpmLockfile(lockfile, getAlertMapOptions({
4079
+ limit
4080
+ }));
4081
+ } catch (e) {
4082
+ spinner?.stop();
4083
+ logger.logger.error(e?.message || 'Unknown Socket batch PURL API error');
4084
+ return;
4085
+ }
4040
4086
  const infoByPkgName = utils.getCveInfoFromAlertsMap(alertsMap, {
4041
4087
  limit
4042
4088
  });
@@ -4059,11 +4105,11 @@ async function pnpmFix(pkgEnvDetails, {
4059
4105
  infoEntriesLoop: for (let i = 0, {
4060
4106
  length
4061
4107
  } = sortedInfoEntries; i < length; i += 1) {
4108
+ const isLastInfoEntry = i === length - 1;
4062
4109
  const {
4063
4110
  0: name,
4064
4111
  1: infos
4065
4112
  } = sortedInfoEntries[i];
4066
- const isLastInfoEntry = i === length - 1;
4067
4113
  logger.logger.log(`Processing vulnerable package: ${name}`);
4068
4114
  logger.logger.indent();
4069
4115
  spinner?.indent();
@@ -4082,18 +4128,26 @@ async function pnpmFix(pkgEnvDetails, {
4082
4128
  const warningsForAfter = new Set();
4083
4129
 
4084
4130
  // eslint-disable-next-line no-unused-labels
4085
- for (const pkgJsonPath of pkgJsonPaths) {
4131
+ for (let j = 0, {
4132
+ length: length_j
4133
+ } = pkgJsonPaths; j < length_j; j += 1) {
4134
+ const isLastPkgJsonPath = j === length_j - 1;
4135
+ const pkgJsonPath = pkgJsonPaths[j];
4086
4136
  const pkgPath = path.dirname(pkgJsonPath);
4087
4137
  const isWorkspaceRoot = pkgJsonPath === pkgEnvDetails.editablePkgJson.filename;
4088
4138
  const workspaceName = isWorkspaceRoot ? 'root' : path.relative(rootPath, pkgPath);
4089
- logger.logger.log(`Checking workspace: ${workspaceName}`);
4090
- const workspaceLogCallCount = logger.logger.logCallCount;
4091
4139
 
4092
- // eslint-disable-next-line no-await-in-loop
4093
- actualTree = await install(pkgEnvDetails, {
4094
- cwd,
4095
- spinner
4096
- });
4140
+ // actualTree may not be defined on the first iteration of pkgJsonPathsLoop.
4141
+ if (!actualTree) {
4142
+ actualTree = fs$1.existsSync(path.join(rootPath, 'node_modules')) ?
4143
+ // eslint-disable-next-line no-await-in-loop
4144
+ await getActualTree(cwd) :
4145
+ // eslint-disable-next-line no-await-in-loop
4146
+ await install(pkgEnvDetails, {
4147
+ cwd,
4148
+ spinner
4149
+ });
4150
+ }
4097
4151
  const oldVersions = arrays.arrayUnique(shadowInject.findPackageNodes(actualTree, name).map(n => n.version).filter(Boolean));
4098
4152
  if (!oldVersions.length) {
4099
4153
  logger.logger.warn(`Unexpected condition: Lockfile entries not found for ${name}.\n`);
@@ -4109,15 +4163,24 @@ async function pnpmFix(pkgEnvDetails, {
4109
4163
  const editablePkgJson = await packages.readPackageJson(pkgJsonPath, {
4110
4164
  editable: true
4111
4165
  });
4112
- // Get current overrides for revert logic
4166
+ // Get current overrides for revert logic.
4113
4167
  const oldPnpmSection = editablePkgJson.content[PNPM$7];
4114
4168
  const oldOverrides = oldPnpmSection?.[OVERRIDES$2];
4169
+ let hasAnnouncedWorkspace = false;
4170
+ let workspaceLogCallCount = logger.logger.logCallCount;
4171
+ if (debug.isDebug()) {
4172
+ debug.debugLog(`Checking workspace: ${workspaceName}`);
4173
+ hasAnnouncedWorkspace = true;
4174
+ workspaceLogCallCount = logger.logger.logCallCount;
4175
+ }
4115
4176
  oldVersionsLoop: for (const oldVersion of oldVersions) {
4116
4177
  const oldId = `${name}@${oldVersion}`;
4117
4178
  const oldPurl = utils.idToPurl(oldId);
4118
4179
  const node = shadowInject.findPackageNode(actualTree, name, oldVersion);
4119
4180
  if (!node) {
4120
- logger.logger.warn(`Unexpected condition: Arborist node not found, skipping ${oldId}`);
4181
+ if (hasAnnouncedWorkspace) {
4182
+ logger.logger.warn(`Unexpected condition: Arborist node not found, skipping ${oldId}`);
4183
+ }
4121
4184
  continue oldVersionsLoop;
4122
4185
  }
4123
4186
  infosLoop: for (const {
@@ -4176,19 +4239,18 @@ async function pnpmFix(pkgEnvDetails, {
4176
4239
  if (!(await editablePkgJson.save({
4177
4240
  ignoreWhitespace: true
4178
4241
  }))) {
4179
- logger.logger.info(`${workspaceName}/package.json not changed, skipping`);
4242
+ debug.debugLog(`${workspaceName}/package.json not changed, skipping`);
4180
4243
  // Reset things just in case.
4181
4244
  if (isCi) {
4182
4245
  // eslint-disable-next-line no-await-in-loop
4183
4246
  await gitResetAndClean(baseBranch, cwd);
4184
- // eslint-disable-next-line no-await-in-loop
4185
- actualTree = await install(pkgEnvDetails, {
4186
- cwd,
4187
- spinner
4188
- });
4189
4247
  }
4190
4248
  continue infosLoop;
4191
4249
  }
4250
+ if (!hasAnnouncedWorkspace) {
4251
+ hasAnnouncedWorkspace = true;
4252
+ workspaceLogCallCount = logger.logger.logCallCount;
4253
+ }
4192
4254
  spinner?.start();
4193
4255
  spinner?.info(`Installing ${newId} in ${workspaceName}`);
4194
4256
  let error;
@@ -4207,15 +4269,25 @@ async function pnpmFix(pkgEnvDetails, {
4207
4269
  stdio: 'ignore'
4208
4270
  });
4209
4271
  }
4210
- spinner?.successAndStop(`Fixed ${name} in ${workspaceName}`);
4272
+ spinner?.success(`Fixed ${name} in ${workspaceName}`);
4211
4273
  } catch (e) {
4212
4274
  error = e;
4213
4275
  errored = true;
4214
- spinner?.stop();
4215
4276
  }
4277
+ spinner?.stop();
4216
4278
  if (!errored && isCi) {
4217
4279
  const branch = getSocketBranchName(oldPurl, newVersion, workspaceName);
4218
4280
  try {
4281
+ const moddedFilepaths =
4282
+ // eslint-disable-next-line no-await-in-loop
4283
+ (await gitUnstagedModifiedFiles(cwd)).filter(p => {
4284
+ const basename = path.basename(p);
4285
+ return basename === 'package.json' || basename === 'pnpm-lock.yaml';
4286
+ });
4287
+ if (!moddedFilepaths.length) {
4288
+ logger.logger.warn('Unexpected condition: Nothing to commit, skipping PR creation.');
4289
+ continue infosLoop;
4290
+ }
4219
4291
  const {
4220
4292
  owner,
4221
4293
  repo
@@ -4223,27 +4295,38 @@ async function pnpmFix(pkgEnvDetails, {
4223
4295
  // eslint-disable-next-line no-await-in-loop
4224
4296
  if (await prExistForBranch(owner, repo, branch)) {
4225
4297
  debug.debugLog(`Branch "${branch}" exists, skipping PR creation.`);
4298
+ // eslint-disable-next-line no-await-in-loop
4299
+ await gitResetAndClean(baseBranch, cwd);
4300
+ // eslint-disable-next-line no-await-in-loop
4301
+ actualTree = await install(pkgEnvDetails, {
4302
+ cwd,
4303
+ spinner
4304
+ });
4226
4305
  continue infosLoop;
4227
4306
  }
4228
4307
  // eslint-disable-next-line no-await-in-loop
4229
4308
  if (await gitRemoteBranchExists(branch, cwd)) {
4230
4309
  debug.debugLog(`Remote branch "${branch}" exists, skipping PR creation.`);
4231
- continue infosLoop;
4232
- }
4233
- const moddedFilepaths =
4234
- // eslint-disable-next-line no-await-in-loop
4235
- (await gitUnstagedModifiedFiles(cwd)).filter(p => {
4236
- const basename = path.basename(p);
4237
- return basename === 'package.json' || basename === 'pnpm-lock.yaml';
4238
- });
4239
- if (!moddedFilepaths.length) {
4240
- logger.logger.warn('Unexpected condition: Nothing to commit, skipping PR creation.');
4310
+ // eslint-disable-next-line no-await-in-loop
4311
+ await gitResetAndClean(baseBranch, cwd);
4312
+ // eslint-disable-next-line no-await-in-loop
4313
+ actualTree = await install(pkgEnvDetails, {
4314
+ cwd,
4315
+ spinner
4316
+ });
4241
4317
  continue infosLoop;
4242
4318
  }
4243
4319
  if (
4244
4320
  // eslint-disable-next-line no-await-in-loop
4245
4321
  !(await gitCreateAndPushBranch(branch, getSocketCommitMessage(oldPurl, newVersion, workspaceName), moddedFilepaths, cwd))) {
4246
4322
  logger.logger.warn('Unexpected condition: Push failed, skipping PR creation.');
4323
+ // eslint-disable-next-line no-await-in-loop
4324
+ await gitResetAndClean(baseBranch, cwd);
4325
+ // eslint-disable-next-line no-await-in-loop
4326
+ actualTree = await install(pkgEnvDetails, {
4327
+ cwd,
4328
+ spinner
4329
+ });
4247
4330
  continue infosLoop;
4248
4331
  }
4249
4332
  // eslint-disable-next-line no-await-in-loop
@@ -4260,10 +4343,14 @@ async function pnpmFix(pkgEnvDetails, {
4260
4343
  const {
4261
4344
  data
4262
4345
  } = prResponse;
4263
- logger.logger.info(`Opened PR #${data.number}.`);
4346
+ logger.logger.success(`Opened PR #${data.number}.`);
4264
4347
  if (autoMerge) {
4348
+ logger.logger.indent();
4349
+ spinner?.indent();
4265
4350
  // eslint-disable-next-line no-await-in-loop
4266
4351
  await enablePrAutoMerge(data);
4352
+ logger.logger.dedent();
4353
+ spinner?.dedent();
4267
4354
  }
4268
4355
  }
4269
4356
  } catch (e) {
@@ -4282,6 +4369,7 @@ async function pnpmFix(pkgEnvDetails, {
4282
4369
  }
4283
4370
  if (errored) {
4284
4371
  if (!isCi) {
4372
+ spinner?.start();
4285
4373
  editablePkgJson.update(revertData);
4286
4374
  // eslint-disable-next-line no-await-in-loop
4287
4375
  await Promise.all([utils.removeNodeModules(cwd), editablePkgJson.save({
@@ -4292,8 +4380,9 @@ async function pnpmFix(pkgEnvDetails, {
4292
4380
  cwd,
4293
4381
  spinner
4294
4382
  });
4383
+ spinner?.stop();
4295
4384
  }
4296
- spinner?.failAndStop(`Update failed for ${oldId} in ${workspaceName}`, error);
4385
+ logger.logger.fail(`Update failed for ${oldId} in ${workspaceName}`, error);
4297
4386
  }
4298
4387
  if (++count >= limit) {
4299
4388
  logger.logger.dedent();
@@ -4302,15 +4391,15 @@ async function pnpmFix(pkgEnvDetails, {
4302
4391
  }
4303
4392
  }
4304
4393
  }
4305
- if (logger.logger.logCallCount > workspaceLogCallCount) {
4306
- logger.logger.log('');
4394
+ if (!isLastPkgJsonPath && logger.logger.logCallCount > workspaceLogCallCount) {
4395
+ logger.logger.logNewline();
4307
4396
  }
4308
4397
  }
4309
4398
  for (const warningText of warningsForAfter) {
4310
4399
  logger.logger.warn(warningText);
4311
4400
  }
4312
4401
  if (!isLastInfoEntry) {
4313
- logger.logger.log('');
4402
+ logger.logger.logNewline();
4314
4403
  }
4315
4404
  logger.logger.dedent();
4316
4405
  spinner?.dedent();
@@ -5091,80 +5180,321 @@ async function run$z(argv, importMeta, {
5091
5180
  attemptLogout();
5092
5181
  }
5093
5182
 
5094
- async function convertCondaToRequirements(target, cwd, verbose) {
5095
- let contents;
5096
- if (target === '-') {
5097
- if (verbose) {
5098
- logger.logger.info(`[VERBOSE] reading input from stdin`);
5183
+ async function convertGradleToMaven(target, bin, cwd, verbose, gradleOpts) {
5184
+ // TODO: impl json/md
5185
+ if (verbose) {
5186
+ logger.logger.log('[VERBOSE] Resolving:', [cwd, bin]);
5187
+ }
5188
+ const rbin = path.resolve(cwd, bin);
5189
+ if (verbose) {
5190
+ logger.logger.log('[VERBOSE] Resolving:', [cwd, target]);
5191
+ }
5192
+ const rtarget = path.resolve(cwd, target);
5193
+ const binExists = fs$1.existsSync(rbin);
5194
+ const targetExists = fs$1.existsSync(rtarget);
5195
+ logger.logger.group('gradle2maven:');
5196
+ if (verbose || debug.isDebug()) {
5197
+ logger.logger.log(`[VERBOSE] - Absolute bin path: \`${rbin}\` (${binExists ? 'found' : vendor.yoctocolorsCjsExports.red('not found!')})`);
5198
+ logger.logger.log(`[VERBOSE] - Absolute target path: \`${rtarget}\` (${targetExists ? 'found' : vendor.yoctocolorsCjsExports.red('not found!')})`);
5199
+ } else {
5200
+ logger.logger.log(`- executing: \`${rbin}\``);
5201
+ if (!binExists) {
5202
+ logger.logger.warn('Warning: It appears the executable could not be found at this location. An error might be printed later because of that.');
5099
5203
  }
5100
- const buf = [];
5101
- contents = await new Promise((resolve, reject) => {
5102
- process.stdin.on('data', chunk => {
5103
- const input = chunk.toString();
5104
- buf.push(input);
5105
- });
5106
- process.stdin.on('end', () => {
5107
- resolve(buf.join(''));
5108
- });
5109
- process.stdin.on('error', e => {
5110
- if (verbose) {
5111
- logger.logger.error('Unexpected error while reading from stdin:', e);
5112
- }
5113
- reject(e);
5114
- });
5115
- process.stdin.on('close', () => {
5116
- if (buf.length === 0) {
5117
- if (verbose) {
5118
- logger.logger.error('stdin closed explicitly without data received');
5119
- }
5120
- reject(new Error('No data received from stdin'));
5121
- } else {
5122
- if (verbose) {
5123
- logger.logger.error('warning: stdin closed explicitly with some data received');
5124
- }
5125
- resolve(buf.join(''));
5126
- }
5127
- });
5128
- });
5129
- if (!contents) {
5130
- return {
5131
- ok: false,
5132
- message: 'Manifest Generation Failed',
5133
- cause: 'No data received from stdin'
5134
- };
5204
+ logger.logger.log(`- src dir: \`${rtarget}\``);
5205
+ if (!targetExists) {
5206
+ logger.logger.warn('Warning: It appears the src dir could not be found at this location. An error might be printed later because of that.');
5135
5207
  }
5136
- } else {
5137
- const f = path.resolve(cwd, target);
5208
+ }
5209
+ logger.logger.groupEnd();
5210
+ try {
5211
+ // Run gradlew with the init script we provide which should yield zero or more
5212
+ // pom files. We have to figure out where to store those pom files such that
5213
+ // we can upload them and predict them through the GitHub API. We could do a
5214
+ // .socket folder. We could do a socket.pom.gz with all the poms, although
5215
+ // I'd prefer something plain-text if it is to be committed.
5216
+
5217
+ // Note: init.gradle will be exported by .config/rollup.dist.config.mjs
5218
+ const initLocation = path.join(constants.distPath, 'init.gradle');
5219
+ const commandArgs = ['--init-script', initLocation, ...gradleOpts, 'pom'];
5138
5220
  if (verbose) {
5139
- logger.logger.info(`[VERBOSE] target file: ${f}`);
5221
+ logger.logger.log('[VERBOSE] Executing:', [bin], ', args:', commandArgs);
5140
5222
  }
5141
- if (!fs$1.existsSync(f)) {
5142
- return {
5143
- ok: false,
5144
- message: 'Manifest Generation Failed',
5145
- cause: `Input file not found at ${f}`
5146
- };
5223
+ logger.logger.log(`Converting gradle to maven from \`${bin}\` on \`${target}\` ...`);
5224
+ const output = await execGradleWithSpinner(rbin, commandArgs, rtarget, cwd);
5225
+ if (verbose) {
5226
+ logger.logger.group('[VERBOSE] gradle stdout:');
5227
+ logger.logger.log(output);
5228
+ logger.logger.groupEnd();
5147
5229
  }
5148
- contents = fs$1.readFileSync(target, 'utf8');
5149
- if (!contents) {
5150
- return {
5151
- ok: false,
5152
- message: 'Manifest Generation Failed',
5153
- cause: 'File is empty'
5154
- };
5230
+ if (output.code !== 0) {
5231
+ process.exitCode = 1;
5232
+ logger.logger.fail(`Gradle exited with exit code ${output.code}`);
5233
+ // (In verbose mode, stderr was printed above, no need to repeat it)
5234
+ if (!verbose) {
5235
+ logger.logger.group('stderr:');
5236
+ logger.logger.error(output.stderr);
5237
+ logger.logger.groupEnd();
5238
+ }
5239
+ return;
5240
+ }
5241
+ logger.logger.success('Executed gradle successfully');
5242
+ logger.logger.log('Reported exports:');
5243
+ output.stdout.replace(/^POM file copied to: (.*)/gm, (_all, fn) => {
5244
+ logger.logger.log('- ', fn);
5245
+ return fn;
5246
+ });
5247
+ logger.logger.log('');
5248
+ logger.logger.log('Next step is to generate a Scan by running the `socket scan create` command on the same directory');
5249
+ } catch (e) {
5250
+ process.exitCode = 1;
5251
+ logger.logger.fail('There was an unexpected error while generating manifests' + (verbose ? '' : ' (use --verbose for details)'));
5252
+ if (verbose) {
5253
+ logger.logger.group('[VERBOSE] error:');
5254
+ logger.logger.log(e);
5255
+ logger.logger.groupEnd();
5155
5256
  }
5156
5257
  }
5157
- return {
5158
- ok: true,
5159
- data: {
5160
- contents,
5161
- pip: convertCondaToRequirementsFromInput(contents)
5258
+ }
5259
+ async function execGradleWithSpinner(bin, commandArgs, target, cwd) {
5260
+ // Lazily access constants.spinner.
5261
+ const {
5262
+ spinner
5263
+ } = constants;
5264
+ let pass = false;
5265
+ try {
5266
+ spinner.start(`Running gradlew... (this can take a while, it depends on how long gradlew has to run)`);
5267
+ const output = await spawn.spawn(bin, commandArgs, {
5268
+ // We can pipe the output through to have the user see the result
5269
+ // of running gradlew, but then we can't (easily) gather the output
5270
+ // to discover the generated files... probably a flag we should allow?
5271
+ // stdio: isDebug() ? 'inherit' : undefined,
5272
+ cwd: target || cwd
5273
+ });
5274
+ pass = true;
5275
+ const {
5276
+ code,
5277
+ stderr,
5278
+ stdout
5279
+ } = output;
5280
+ return {
5281
+ code,
5282
+ stdout,
5283
+ stderr
5284
+ };
5285
+ } finally {
5286
+ if (pass) {
5287
+ spinner.successAndStop('Completed gradlew execution');
5288
+ } else {
5289
+ spinner.failAndStop('There was an error while trying to run gradlew.');
5162
5290
  }
5163
- };
5291
+ }
5164
5292
  }
5165
5293
 
5166
- // Just extract the first pip block, if one exists at all.
5167
- function convertCondaToRequirementsFromInput(input) {
5294
+ async function convertSbtToMaven(target, bin, out, verbose, sbtOpts) {
5295
+ // TODO: impl json/md
5296
+
5297
+ // Lazily access constants.spinner.
5298
+ const {
5299
+ spinner
5300
+ } = constants;
5301
+ const rbin = path.resolve(bin);
5302
+ const rtarget = path.resolve(target);
5303
+ if (verbose) {
5304
+ logger.logger.group('sbt2maven:');
5305
+ logger.logger.log(`[VERBOSE] - Absolute bin path: \`${rbin}\``);
5306
+ logger.logger.log(`[VERBOSE] - Absolute target path: \`${rtarget}\``);
5307
+ // logger.log(`[VERBOSE] - Absolute out path: \`${rout}\``)
5308
+ logger.logger.groupEnd();
5309
+ } else {
5310
+ logger.logger.group('sbt2maven:');
5311
+ logger.logger.log(`- executing: \`${bin}\``);
5312
+ logger.logger.log(`- src dir: \`${target}\``);
5313
+ // logger.log(`- dst dir: \`${out}\``)
5314
+ logger.logger.groupEnd();
5315
+ }
5316
+ try {
5317
+ spinner.start(`Converting sbt to maven from \`${bin}\` on \`${target}\`...`);
5318
+
5319
+ // Run sbt with the init script we provide which should yield zero or more
5320
+ // pom files. We have to figure out where to store those pom files such that
5321
+ // we can upload them and predict them through the GitHub API. We could do a
5322
+ // .socket folder. We could do a socket.pom.gz with all the poms, although
5323
+ // I'd prefer something plain-text if it is to be committed.
5324
+ const output = await spawn.spawn(bin, ['makePom'].concat(sbtOpts), {
5325
+ cwd: target || '.'
5326
+ });
5327
+ spinner.stop();
5328
+ if (verbose) {
5329
+ logger.logger.group('[VERBOSE] sbt stdout:');
5330
+ logger.logger.log(output);
5331
+ logger.logger.groupEnd();
5332
+ }
5333
+ if (output.stderr) {
5334
+ process.exitCode = 1;
5335
+ logger.logger.fail('There were errors while running sbt');
5336
+ // (In verbose mode, stderr was printed above, no need to repeat it)
5337
+ if (!verbose) {
5338
+ logger.logger.group('[VERBOSE] stderr:');
5339
+ logger.logger.error(output.stderr);
5340
+ logger.logger.groupEnd();
5341
+ }
5342
+ return;
5343
+ }
5344
+ const poms = [];
5345
+ output.stdout.replace(/Wrote (.*?.pom)\n/g, (_all, fn) => {
5346
+ poms.push(fn);
5347
+ return fn;
5348
+ });
5349
+ if (!poms.length) {
5350
+ process.exitCode = 1;
5351
+ logger.logger.fail('There were no errors from sbt but it seems to not have generated any poms either');
5352
+ return;
5353
+ }
5354
+ // Move the pom file to ...? initial cwd? loc will be an absolute path, or dump to stdout
5355
+ // TODO: what to do with multiple output files? Do we want to dump them to stdout? Raw or with separators or ?
5356
+ // TODO: maybe we can add an option to target a specific file to dump to stdout
5357
+ if (out === '-' && poms.length === 1) {
5358
+ logger.logger.log('Result:\n```');
5359
+ logger.logger.log(await utils.safeReadFile(poms[0]));
5360
+ logger.logger.log('```');
5361
+ logger.logger.success(`OK`);
5362
+ } else if (out === '-') {
5363
+ process.exitCode = 1;
5364
+ logger.logger.fail('Requested out target was stdout but there are multiple generated files');
5365
+ poms.forEach(fn => logger.logger.error('-', fn));
5366
+ logger.logger.info('Exiting now...');
5367
+ return;
5368
+ } else {
5369
+ // if (verbose) {
5370
+ // logger.log(
5371
+ // `Moving manifest file from \`${loc.replace(/^\/home\/[^/]*?\//, '~/')}\` to \`${out}\``
5372
+ // )
5373
+ // } else {
5374
+ // logger.log('Moving output pom file')
5375
+ // }
5376
+ // TODO: do we prefer fs-extra? renaming can be gnarly on windows and fs-extra's version is better
5377
+ // await renamep(loc, out)
5378
+ logger.logger.success(`Generated ${poms.length} pom files`);
5379
+ poms.forEach(fn => logger.logger.log('-', fn));
5380
+ logger.logger.success(`OK`);
5381
+ }
5382
+ } catch (e) {
5383
+ process.exitCode = 1;
5384
+ spinner.stop();
5385
+ logger.logger.fail('There was an unexpected error while running this' + (verbose ? '' : ' (use --verbose for details)'));
5386
+ if (verbose) {
5387
+ logger.logger.group('[VERBOSE] error:');
5388
+ logger.logger.log(e);
5389
+ logger.logger.groupEnd();
5390
+ }
5391
+ }
5392
+ }
5393
+
5394
+ // The point here is to attempt to detect the various supported manifest files
5395
+ // the CLI can generate. This would be environments that we can't do server side
5396
+
5397
+ async function detectManifestActions(cwd = process.cwd()) {
5398
+ const output = {
5399
+ cdxgen: false,
5400
+ // TODO
5401
+ conda: false,
5402
+ gradle: false,
5403
+ sbt: false
5404
+ };
5405
+ if (fs$1.existsSync(path.join(cwd, 'build.sbt'))) {
5406
+ debug.debugLog('Detected a Scala sbt build, running default Scala generator...');
5407
+ output.sbt = true;
5408
+ }
5409
+ if (fs$1.existsSync(path.join(cwd, 'gradlew'))) {
5410
+ debug.debugLog('Detected a gradle build, running default gradle generator...');
5411
+ output.gradle = true;
5412
+ }
5413
+ const envyml = path.join(cwd, 'environment.yml');
5414
+ const hasEnvyml = fs$1.existsSync(envyml);
5415
+ const envyaml = path.join(cwd, 'environment.yaml');
5416
+ const hasEnvyaml = !hasEnvyml && fs$1.existsSync(envyaml);
5417
+ if (hasEnvyml || hasEnvyaml) {
5418
+ debug.debugLog('Detected an environment.yml file, running default Conda generator...');
5419
+ output.conda = true;
5420
+ }
5421
+ return output;
5422
+ }
5423
+
5424
+ async function convertCondaToRequirements(target, cwd, verbose) {
5425
+ let contents;
5426
+ if (target === '-') {
5427
+ if (verbose) {
5428
+ logger.logger.info(`[VERBOSE] reading input from stdin`);
5429
+ }
5430
+ const buf = [];
5431
+ contents = await new Promise((resolve, reject) => {
5432
+ process.stdin.on('data', chunk => {
5433
+ const input = chunk.toString();
5434
+ buf.push(input);
5435
+ });
5436
+ process.stdin.on('end', () => {
5437
+ resolve(buf.join(''));
5438
+ });
5439
+ process.stdin.on('error', e => {
5440
+ if (verbose) {
5441
+ logger.logger.error('Unexpected error while reading from stdin:', e);
5442
+ }
5443
+ reject(e);
5444
+ });
5445
+ process.stdin.on('close', () => {
5446
+ if (buf.length === 0) {
5447
+ if (verbose) {
5448
+ logger.logger.error('stdin closed explicitly without data received');
5449
+ }
5450
+ reject(new Error('No data received from stdin'));
5451
+ } else {
5452
+ if (verbose) {
5453
+ logger.logger.error('warning: stdin closed explicitly with some data received');
5454
+ }
5455
+ resolve(buf.join(''));
5456
+ }
5457
+ });
5458
+ });
5459
+ if (!contents) {
5460
+ return {
5461
+ ok: false,
5462
+ message: 'Manifest Generation Failed',
5463
+ cause: 'No data received from stdin'
5464
+ };
5465
+ }
5466
+ } else {
5467
+ const f = path.resolve(cwd, target);
5468
+ if (verbose) {
5469
+ logger.logger.info(`[VERBOSE] target file: ${f}`);
5470
+ }
5471
+ if (!fs$1.existsSync(f)) {
5472
+ return {
5473
+ ok: false,
5474
+ message: 'Manifest Generation Failed',
5475
+ cause: `Input file not found at ${f}`
5476
+ };
5477
+ }
5478
+ contents = fs$1.readFileSync(target, 'utf8');
5479
+ if (!contents) {
5480
+ return {
5481
+ ok: false,
5482
+ message: 'Manifest Generation Failed',
5483
+ cause: 'File is empty'
5484
+ };
5485
+ }
5486
+ }
5487
+ return {
5488
+ ok: true,
5489
+ data: {
5490
+ contents,
5491
+ pip: convertCondaToRequirementsFromInput(contents)
5492
+ }
5493
+ };
5494
+ }
5495
+
5496
+ // Just extract the first pip block, if one exists at all.
5497
+ function convertCondaToRequirementsFromInput(input) {
5168
5498
  const keeping = [];
5169
5499
  let collecting = false;
5170
5500
  let delim = '-';
@@ -5269,48 +5599,34 @@ const {
5269
5599
  DRY_RUN_BAILING_NOW: DRY_RUN_BAILING_NOW$v
5270
5600
  } = constants;
5271
5601
  const config$y = {
5272
- commandName: 'conda',
5273
- description: '[beta] Convert a Conda environment.yml file to a python requirements.txt',
5602
+ commandName: 'auto',
5603
+ description: 'Auto-detect build and attempt to generate manifest file',
5274
5604
  hidden: false,
5275
5605
  flags: {
5276
5606
  ...utils.commonFlags,
5277
- ...utils.outputFlags,
5278
5607
  cwd: {
5279
5608
  type: 'string',
5280
5609
  description: 'Set the cwd, defaults to process.cwd()'
5281
5610
  },
5282
- out: {
5283
- type: 'string',
5284
- default: '-',
5285
- description: 'Output target (use `-` or omit to print to stdout)'
5286
- },
5287
5611
  verbose: {
5288
5612
  type: 'boolean',
5289
- description: 'Print debug messages'
5613
+ default: false,
5614
+ description: 'Enable debug output, may help when running into errors'
5290
5615
  }
5291
5616
  },
5292
5617
  help: (command, config) => `
5293
5618
  Usage
5294
- $ ${command} FILE
5295
-
5296
- Warning: While we don't support Conda necessarily, this tool extracts the pip
5297
- block from an environment.yml and outputs it as a requirements.txt
5298
- which you can scan as if it were a pypi package.
5299
-
5300
- USE AT YOUR OWN RISK
5301
-
5302
- Note: FILE can be a dash (-) to indicate stdin. This way you can pipe the
5303
- contents of a file to have it processed.
5619
+ $ ${command}
5304
5620
 
5305
5621
  Options
5306
5622
  ${utils.getFlagListOutput(config.flags, 6)}
5307
5623
 
5308
- Examples
5309
-
5310
- $ ${command} ./environment.yml
5624
+ Tries to figure out what language your current repo uses. If it finds a
5625
+ supported case then it will try to generate the manifest file for that
5626
+ language with the default or detected settings.
5311
5627
  `
5312
5628
  };
5313
- const cmdManifestConda = {
5629
+ const cmdManifestAuto = {
5314
5630
  description: config$y.description,
5315
5631
  hidden: config$y.hidden,
5316
5632
  run: run$y
@@ -5325,189 +5641,71 @@ async function run$y(argv, importMeta, {
5325
5641
  parentName
5326
5642
  });
5327
5643
  const {
5328
- cwd = process.cwd(),
5329
- json = false,
5330
- markdown = false,
5331
- out = '-',
5332
- verbose = false
5644
+ cwd: cwdFlag,
5645
+ json,
5646
+ markdown,
5647
+ verbose: verboseFlag
5333
5648
  } = cli.flags;
5334
5649
  const outputKind = utils.getOutputKind(json, markdown); // TODO: impl json/md further
5335
-
5336
- const [target = ''] = cli.input;
5650
+ const cwd = String(cwdFlag || process.cwd());
5651
+ const verbose = !!verboseFlag;
5337
5652
  if (verbose) {
5338
5653
  logger.logger.group('- ', parentName, config$y.commandName, ':');
5339
5654
  logger.logger.group('- flags:', cli.flags);
5340
5655
  logger.logger.groupEnd();
5341
- logger.logger.log('- target:', target);
5342
- logger.logger.log('- output:', out);
5656
+ logger.logger.log('- input:', cli.input);
5657
+ logger.logger.log('- cwd:', cwd);
5343
5658
  logger.logger.groupEnd();
5344
5659
  }
5345
- const wasValidInput = utils.checkCommandInput(outputKind, {
5346
- test: !!target,
5347
- message: 'The FILE arg is required',
5348
- pass: 'ok',
5349
- fail: 'missing'
5350
- }, {
5351
- nook: true,
5352
- test: cli.input.length <= 1,
5353
- message: 'Can only accept one DIR (make sure to escape spaces!)',
5354
- pass: 'ok',
5355
- fail: 'received ' + cli.input.length
5356
- }, {
5357
- nook: true,
5358
- test: !json || !markdown,
5359
- message: 'The `--json` and `--markdown` flags can not be used at the same time',
5360
- pass: 'ok',
5361
- fail: 'bad'
5362
- });
5363
- if (!wasValidInput) {
5364
- return;
5365
- }
5366
- logger.logger.warn('Warning: This will approximate your Conda dependencies using PyPI. We do not yet officially support Conda. Use at your own risk.');
5660
+ const result = await detectManifestActions(String(cwd));
5661
+ debug.debugLog(result);
5367
5662
  if (cli.flags['dryRun']) {
5368
5663
  logger.logger.log(DRY_RUN_BAILING_NOW$v);
5369
5664
  return;
5370
5665
  }
5371
- await handleManifestConda(target, String(out || ''), json ? 'json' : markdown ? 'markdown' : 'text', String(cwd), Boolean(verbose));
5372
- }
5373
-
5374
- async function convertGradleToMaven(target, bin, cwd, verbose, gradleOpts) {
5375
- // TODO: impl json/md
5376
- if (verbose) {
5377
- logger.logger.log('[VERBOSE] Resolving:', [cwd, bin]);
5378
- }
5379
- const rbin = path.resolve(cwd, bin);
5380
- if (verbose) {
5381
- logger.logger.log('[VERBOSE] Resolving:', [cwd, target]);
5382
- }
5383
- const rtarget = path.resolve(cwd, target);
5384
- const binExists = fs$1.existsSync(rbin);
5385
- const targetExists = fs$1.existsSync(rtarget);
5386
- logger.logger.group('gradle2maven:');
5387
- if (verbose || debug.isDebug()) {
5388
- logger.logger.log(`[VERBOSE] - Absolute bin path: \`${rbin}\` (${binExists ? 'found' : vendor.yoctocolorsCjsExports.red('not found!')})`);
5389
- logger.logger.log(`[VERBOSE] - Absolute target path: \`${rtarget}\` (${targetExists ? 'found' : vendor.yoctocolorsCjsExports.red('not found!')})`);
5390
- } else {
5391
- logger.logger.log(`- executing: \`${rbin}\``);
5392
- if (!binExists) {
5393
- logger.logger.warn('Warning: It appears the executable could not be found at this location. An error might be printed later because of that.');
5394
- }
5395
- logger.logger.log(`- src dir: \`${rtarget}\``);
5396
- if (!targetExists) {
5397
- logger.logger.warn('Warning: It appears the src dir could not be found at this location. An error might be printed later because of that.');
5398
- }
5399
- }
5400
- logger.logger.groupEnd();
5401
- try {
5402
- // Run gradlew with the init script we provide which should yield zero or more
5403
- // pom files. We have to figure out where to store those pom files such that
5404
- // we can upload them and predict them through the GitHub API. We could do a
5405
- // .socket folder. We could do a socket.pom.gz with all the poms, although
5406
- // I'd prefer something plain-text if it is to be committed.
5407
-
5408
- // Note: init.gradle will be exported by .config/rollup.dist.config.mjs
5409
- const initLocation = path.join(constants.distPath, 'init.gradle');
5410
- const commandArgs = ['--init-script', initLocation, ...gradleOpts, 'pom'];
5411
- if (verbose) {
5412
- logger.logger.log('[VERBOSE] Executing:', [bin], ', args:', commandArgs);
5413
- }
5414
- logger.logger.log(`Converting gradle to maven from \`${bin}\` on \`${target}\` ...`);
5415
- const output = await execGradleWithSpinner(rbin, commandArgs, rtarget, cwd);
5416
- if (verbose) {
5417
- logger.logger.group('[VERBOSE] gradle stdout:');
5418
- logger.logger.log(output);
5419
- logger.logger.groupEnd();
5420
- }
5421
- if (output.code !== 0) {
5422
- process.exitCode = 1;
5423
- logger.logger.fail(`Gradle exited with exit code ${output.code}`);
5424
- // (In verbose mode, stderr was printed above, no need to repeat it)
5425
- if (!verbose) {
5426
- logger.logger.group('stderr:');
5427
- logger.logger.error(output.stderr);
5428
- logger.logger.groupEnd();
5429
- }
5430
- return;
5431
- }
5432
- logger.logger.success('Executed gradle successfully');
5433
- logger.logger.log('Reported exports:');
5434
- output.stdout.replace(/^POM file copied to: (.*)/gm, (_all, fn) => {
5435
- logger.logger.log('- ', fn);
5436
- return fn;
5437
- });
5666
+ const found = Object.values(result).reduce((sum, now) => now ? sum + 1 : sum, 0);
5667
+ if (!found) {
5668
+ logger.logger.fail(' Was unable to discover any targets for which we can generate manifest files...');
5438
5669
  logger.logger.log('');
5439
- logger.logger.log('Next step is to generate a Scan by running the `socket scan create` command on the same directory');
5440
- } catch (e) {
5670
+ logger.logger.log('- Make sure this script would work with your target build (see `socket manifest --help` for your target).');
5671
+ logger.logger.log('- Make sure to run it from the correct dir (use --cwd to target another dir)');
5672
+ logger.logger.log('- Make sure the necessary build tools are available (`PATH`)');
5441
5673
  process.exitCode = 1;
5442
- logger.logger.fail('There was an unexpected error while generating manifests' + (verbose ? '' : ' (use --verbose for details)'));
5443
- if (verbose) {
5444
- logger.logger.group('[VERBOSE] error:');
5445
- logger.logger.log(e);
5446
- logger.logger.groupEnd();
5447
- }
5674
+ return;
5448
5675
  }
5449
- }
5450
- async function execGradleWithSpinner(bin, commandArgs, target, cwd) {
5451
- // Lazily access constants.spinner.
5452
- const {
5453
- spinner
5454
- } = constants;
5455
- let pass = false;
5456
- try {
5457
- spinner.start(`Running gradlew... (this can take a while, it depends on how long gradlew has to run)`);
5458
- const output = await spawn.spawn(bin, commandArgs, {
5459
- // We can pipe the output through to have the user see the result
5460
- // of running gradlew, but then we can't (easily) gather the output
5461
- // to discover the generated files... probably a flag we should allow?
5462
- // stdio: isDebug() ? 'inherit' : undefined,
5463
- cwd: target || cwd
5464
- });
5465
- pass = true;
5466
- const {
5467
- code,
5468
- stderr,
5469
- stdout
5470
- } = output;
5471
- return {
5472
- code,
5473
- stdout,
5474
- stderr
5475
- };
5476
- } finally {
5477
- if (pass) {
5478
- spinner.successAndStop('Completed gradlew execution');
5479
- } else {
5480
- spinner.failAndStop('There was an error while trying to run gradlew.');
5481
- }
5676
+ if (result.sbt) {
5677
+ logger.logger.log('Detected a Scala sbt build, generating pom files with sbt...');
5678
+ await convertSbtToMaven(cwd, 'sbt', './socket.sbt.pom.xml', verbose, []);
5679
+ }
5680
+ if (result.gradle) {
5681
+ logger.logger.log('Detected a gradle build (Gradle, Kotlin, Scala), running default gradle generator...');
5682
+ await convertGradleToMaven(cwd, path.join(cwd, 'gradlew'), cwd, verbose, []);
5683
+ }
5684
+ if (result.conda) {
5685
+ logger.logger.log('Detected an environment.yml file, running default Conda generator...');
5686
+ await handleManifestConda(cwd, '', outputKind, cwd, verbose);
5482
5687
  }
5688
+ logger.logger.success(`Finished. Should have attempted to generate manifest files for ${found} targets.`);
5483
5689
  }
5484
5690
 
5485
5691
  const {
5486
5692
  DRY_RUN_BAILING_NOW: DRY_RUN_BAILING_NOW$u
5487
5693
  } = constants;
5488
5694
  const config$x = {
5489
- commandName: 'gradle',
5490
- description: '[beta] Use Gradle to generate a manifest file (`pom.xml`) for a Gradle/Java/Kotlin/etc project',
5695
+ commandName: 'conda',
5696
+ description: '[beta] Convert a Conda environment.yml file to a python requirements.txt',
5491
5697
  hidden: false,
5492
5698
  flags: {
5493
5699
  ...utils.commonFlags,
5494
- bin: {
5495
- type: 'string',
5496
- description: 'Location of gradlew binary to use, default: CWD/gradlew'
5497
- },
5700
+ ...utils.outputFlags,
5498
5701
  cwd: {
5499
5702
  type: 'string',
5500
5703
  description: 'Set the cwd, defaults to process.cwd()'
5501
5704
  },
5502
- gradleOpts: {
5503
- type: 'string',
5504
- default: '',
5505
- description: 'Additional options to pass on to ./gradlew, see `./gradlew --help`'
5506
- },
5507
- task: {
5705
+ out: {
5508
5706
  type: 'string',
5509
- default: 'all',
5510
- description: 'Task to target. By default targets all'
5707
+ default: '-',
5708
+ description: 'Output target (use `-` or omit to print to stdout)'
5511
5709
  },
5512
5710
  verbose: {
5513
5711
  type: 'boolean',
@@ -5516,38 +5714,26 @@ const config$x = {
5516
5714
  },
5517
5715
  help: (command, config) => `
5518
5716
  Usage
5519
- $ ${command} [--bin=path/to/gradle/binary] [--out=path/to/result] DIR
5520
-
5521
- Options
5522
- ${utils.getFlagListOutput(config.flags, 6)}
5523
-
5524
- Uses gradle, preferably through your local project \`gradlew\`, to generate a
5525
- \`pom.xml\` file for each task. If you have no \`gradlew\` you can try the
5526
- global \`gradle\` binary but that may not work (hard to predict).
5527
-
5528
- The \`pom.xml\` is a manifest file similar to \`package.json\` for npm or
5529
- or requirements.txt for PyPi), but specifically for Maven, which is Java's
5530
- dependency repository. Languages like Kotlin and Scala piggy back on it too.
5531
-
5532
- There are some caveats with the gradle to \`pom.xml\` conversion:
5717
+ $ ${command} FILE
5533
5718
 
5534
- - each task will generate its own xml file and by default it generates one xml
5535
- for every task.
5719
+ Warning: While we don't support Conda necessarily, this tool extracts the pip
5720
+ block from an environment.yml and outputs it as a requirements.txt
5721
+ which you can scan as if it were a pypi package.
5536
5722
 
5537
- - it's possible certain features don't translate well into the xml. If you
5538
- think something is missing that could be supported please reach out.
5723
+ USE AT YOUR OWN RISK
5539
5724
 
5540
- - it works with your \`gradlew\` from your repo and local settings and config
5725
+ Note: FILE can be a dash (-) to indicate stdin. This way you can pipe the
5726
+ contents of a file to have it processed.
5541
5727
 
5542
- Support is beta. Please report issues or give us feedback on what's missing.
5728
+ Options
5729
+ ${utils.getFlagListOutput(config.flags, 6)}
5543
5730
 
5544
5731
  Examples
5545
5732
 
5546
- $ ${command} .
5547
- $ ${command} --bin=../gradlew .
5733
+ $ ${command} ./environment.yml
5548
5734
  `
5549
5735
  };
5550
- const cmdManifestGradle = {
5736
+ const cmdManifestConda = {
5551
5737
  description: config$x.description,
5552
5738
  hidden: config$x.hidden,
5553
5739
  run: run$x
@@ -5561,193 +5747,79 @@ async function run$x(argv, importMeta, {
5561
5747
  importMeta,
5562
5748
  parentName
5563
5749
  });
5564
- const verbose = Boolean(cli.flags['verbose']);
5565
5750
  const {
5566
- json,
5567
- markdown
5751
+ cwd = process.cwd(),
5752
+ json = false,
5753
+ markdown = false,
5754
+ out = '-',
5755
+ verbose = false
5568
5756
  } = cli.flags;
5569
5757
  const outputKind = utils.getOutputKind(json, markdown); // TODO: impl json/md further
5570
5758
 
5759
+ const [target = ''] = cli.input;
5571
5760
  if (verbose) {
5572
5761
  logger.logger.group('- ', parentName, config$x.commandName, ':');
5573
5762
  logger.logger.group('- flags:', cli.flags);
5574
5763
  logger.logger.groupEnd();
5575
- logger.logger.log('- input:', cli.input);
5764
+ logger.logger.log('- target:', target);
5765
+ logger.logger.log('- output:', out);
5576
5766
  logger.logger.groupEnd();
5577
5767
  }
5578
- const [target = ''] = cli.input;
5579
-
5580
- // TODO: I'm not sure it's feasible to parse source file from stdin. We could
5581
- // try, store contents in a file in some folder, target that folder... what
5582
- // would the file name be?
5583
-
5584
5768
  const wasValidInput = utils.checkCommandInput(outputKind, {
5585
- test: !!target && target !== '-',
5586
- message: 'The DIR arg is required',
5769
+ test: !!target,
5770
+ message: 'The FILE arg is required',
5587
5771
  pass: 'ok',
5588
- fail: target === '-' ? 'stdin is not supported' : 'missing'
5772
+ fail: 'missing'
5589
5773
  }, {
5590
5774
  nook: true,
5591
5775
  test: cli.input.length <= 1,
5592
5776
  message: 'Can only accept one DIR (make sure to escape spaces!)',
5593
5777
  pass: 'ok',
5594
5778
  fail: 'received ' + cli.input.length
5779
+ }, {
5780
+ nook: true,
5781
+ test: !json || !markdown,
5782
+ message: 'The `--json` and `--markdown` flags can not be used at the same time',
5783
+ pass: 'ok',
5784
+ fail: 'bad'
5595
5785
  });
5596
5786
  if (!wasValidInput) {
5597
5787
  return;
5598
5788
  }
5599
- const {
5600
- bin = path.join(target, 'gradlew'),
5601
- cwd = process.cwd()
5602
- } = cli.flags;
5603
- if (verbose) {
5604
- logger.logger.group();
5605
- logger.logger.log('- target:', target);
5606
- logger.logger.log('- gradle bin:', bin);
5607
- logger.logger.groupEnd();
5608
- }
5609
- let gradleOpts = [];
5610
- if (cli.flags['gradleOpts']) {
5611
- gradleOpts = cli.flags['gradleOpts'].split(' ').map(s => s.trim()).filter(Boolean);
5612
- }
5789
+ logger.logger.warn('Warning: This will approximate your Conda dependencies using PyPI. We do not yet officially support Conda. Use at your own risk.');
5613
5790
  if (cli.flags['dryRun']) {
5614
5791
  logger.logger.log(DRY_RUN_BAILING_NOW$u);
5615
5792
  return;
5616
5793
  }
5617
- await convertGradleToMaven(target, String(bin), String(cwd), verbose, gradleOpts);
5618
- }
5619
-
5620
- async function convertSbtToMaven(target, bin, out, verbose, sbtOpts) {
5621
- // TODO: impl json/md
5622
-
5623
- // Lazily access constants.spinner.
5624
- const {
5625
- spinner
5626
- } = constants;
5627
- const rbin = path.resolve(bin);
5628
- const rtarget = path.resolve(target);
5629
- if (verbose) {
5630
- logger.logger.group('sbt2maven:');
5631
- logger.logger.log(`[VERBOSE] - Absolute bin path: \`${rbin}\``);
5632
- logger.logger.log(`[VERBOSE] - Absolute target path: \`${rtarget}\``);
5633
- // logger.log(`[VERBOSE] - Absolute out path: \`${rout}\``)
5634
- logger.logger.groupEnd();
5635
- } else {
5636
- logger.logger.group('sbt2maven:');
5637
- logger.logger.log(`- executing: \`${bin}\``);
5638
- logger.logger.log(`- src dir: \`${target}\``);
5639
- // logger.log(`- dst dir: \`${out}\``)
5640
- logger.logger.groupEnd();
5641
- }
5642
- try {
5643
- spinner.start(`Converting sbt to maven from \`${bin}\` on \`${target}\`...`);
5644
-
5645
- // Run sbt with the init script we provide which should yield zero or more
5646
- // pom files. We have to figure out where to store those pom files such that
5647
- // we can upload them and predict them through the GitHub API. We could do a
5648
- // .socket folder. We could do a socket.pom.gz with all the poms, although
5649
- // I'd prefer something plain-text if it is to be committed.
5650
- const output = await spawn.spawn(bin, ['makePom'].concat(sbtOpts), {
5651
- cwd: target || '.'
5652
- });
5653
- spinner.stop();
5654
- if (verbose) {
5655
- logger.logger.group('[VERBOSE] sbt stdout:');
5656
- logger.logger.log(output);
5657
- logger.logger.groupEnd();
5658
- }
5659
- if (output.stderr) {
5660
- process.exitCode = 1;
5661
- logger.logger.fail('There were errors while running sbt');
5662
- // (In verbose mode, stderr was printed above, no need to repeat it)
5663
- if (!verbose) {
5664
- logger.logger.group('[VERBOSE] stderr:');
5665
- logger.logger.error(output.stderr);
5666
- logger.logger.groupEnd();
5667
- }
5668
- return;
5669
- }
5670
- const poms = [];
5671
- output.stdout.replace(/Wrote (.*?.pom)\n/g, (_all, fn) => {
5672
- poms.push(fn);
5673
- return fn;
5674
- });
5675
- if (!poms.length) {
5676
- process.exitCode = 1;
5677
- logger.logger.fail('There were no errors from sbt but it seems to not have generated any poms either');
5678
- return;
5679
- }
5680
- // Move the pom file to ...? initial cwd? loc will be an absolute path, or dump to stdout
5681
- // TODO: what to do with multiple output files? Do we want to dump them to stdout? Raw or with separators or ?
5682
- // TODO: maybe we can add an option to target a specific file to dump to stdout
5683
- if (out === '-' && poms.length === 1) {
5684
- logger.logger.log('Result:\n```');
5685
- logger.logger.log(await utils.safeReadFile(poms[0]));
5686
- logger.logger.log('```');
5687
- logger.logger.success(`OK`);
5688
- } else if (out === '-') {
5689
- process.exitCode = 1;
5690
- logger.logger.fail('Requested out target was stdout but there are multiple generated files');
5691
- poms.forEach(fn => logger.logger.error('-', fn));
5692
- logger.logger.info('Exiting now...');
5693
- return;
5694
- } else {
5695
- // if (verbose) {
5696
- // logger.log(
5697
- // `Moving manifest file from \`${loc.replace(/^\/home\/[^/]*?\//, '~/')}\` to \`${out}\``
5698
- // )
5699
- // } else {
5700
- // logger.log('Moving output pom file')
5701
- // }
5702
- // TODO: do we prefer fs-extra? renaming can be gnarly on windows and fs-extra's version is better
5703
- // await renamep(loc, out)
5704
- logger.logger.success(`Generated ${poms.length} pom files`);
5705
- poms.forEach(fn => logger.logger.log('-', fn));
5706
- logger.logger.success(`OK`);
5707
- }
5708
- } catch (e) {
5709
- process.exitCode = 1;
5710
- spinner.stop();
5711
- logger.logger.fail('There was an unexpected error while running this' + (verbose ? '' : ' (use --verbose for details)'));
5712
- if (verbose) {
5713
- logger.logger.group('[VERBOSE] error:');
5714
- logger.logger.log(e);
5715
- logger.logger.groupEnd();
5716
- }
5717
- }
5794
+ await handleManifestConda(target, String(out || ''), json ? 'json' : markdown ? 'markdown' : 'text', String(cwd), Boolean(verbose));
5718
5795
  }
5719
5796
 
5720
5797
  const {
5721
5798
  DRY_RUN_BAILING_NOW: DRY_RUN_BAILING_NOW$t
5722
5799
  } = constants;
5723
5800
  const config$w = {
5724
- commandName: 'scala',
5725
- description: "[beta] Generate a manifest file (`pom.xml`) from Scala's `build.sbt` file",
5801
+ commandName: 'gradle',
5802
+ description: '[beta] Use Gradle to generate a manifest file (`pom.xml`) for a Gradle/Java/Kotlin/etc project',
5726
5803
  hidden: false,
5727
5804
  flags: {
5728
5805
  ...utils.commonFlags,
5729
5806
  bin: {
5730
5807
  type: 'string',
5731
- default: 'sbt',
5732
- description: 'Location of sbt binary to use'
5808
+ description: 'Location of gradlew binary to use, default: CWD/gradlew'
5733
5809
  },
5734
5810
  cwd: {
5735
5811
  type: 'string',
5736
5812
  description: 'Set the cwd, defaults to process.cwd()'
5737
5813
  },
5738
- out: {
5814
+ gradleOpts: {
5739
5815
  type: 'string',
5740
- default: './socket.pom.xml',
5741
- description: 'Path of output file; where to store the resulting manifest, see also --stdout'
5742
- },
5743
- stdout: {
5744
- type: 'boolean',
5745
- description: 'Print resulting pom.xml to stdout (supersedes --out)'
5816
+ default: '',
5817
+ description: 'Additional options to pass on to ./gradlew, see `./gradlew --help`'
5746
5818
  },
5747
- sbtOpts: {
5819
+ task: {
5748
5820
  type: 'string',
5749
- default: '',
5750
- description: 'Additional options to pass on to sbt, as per `sbt --help`'
5821
+ default: 'all',
5822
+ description: 'Task to target. By default targets all'
5751
5823
  },
5752
5824
  verbose: {
5753
5825
  type: 'boolean',
@@ -5756,43 +5828,38 @@ const config$w = {
5756
5828
  },
5757
5829
  help: (command, config) => `
5758
5830
  Usage
5759
- $ ${command} [--bin=path/to/sbt/binary] [--out=path/to/result] FILE|DIR
5831
+ $ ${command} [--bin=path/to/gradle/binary] [--out=path/to/result] DIR
5760
5832
 
5761
5833
  Options
5762
5834
  ${utils.getFlagListOutput(config.flags, 6)}
5763
5835
 
5764
- Uses \`sbt makePom\` to generate a \`pom.xml\` from your \`build.sbt\` file.
5765
- This xml file is the dependency manifest (like a package.json
5766
- for Node.js or requirements.txt for PyPi), but specifically for Scala.
5767
-
5768
- There are some caveats with \`build.sbt\` to \`pom.xml\` conversion:
5836
+ Uses gradle, preferably through your local project \`gradlew\`, to generate a
5837
+ \`pom.xml\` file for each task. If you have no \`gradlew\` you can try the
5838
+ global \`gradle\` binary but that may not work (hard to predict).
5769
5839
 
5770
- - the xml is exported as socket.pom.xml as to not confuse existing build tools
5771
- but it will first hit your /target/sbt<version> folder (as a different name)
5840
+ The \`pom.xml\` is a manifest file similar to \`package.json\` for npm or
5841
+ or requirements.txt for PyPi), but specifically for Maven, which is Java's
5842
+ dependency repository. Languages like Kotlin and Scala piggy back on it too.
5772
5843
 
5773
- - the pom.xml format (standard by Scala) does not support certain sbt features
5774
- - \`excludeAll()\`, \`dependencyOverrides\`, \`force()\`, \`relativePath\`
5775
- - For details: https://www.scala-sbt.org/1.x/docs/Library-Management.html
5844
+ There are some caveats with the gradle to \`pom.xml\` conversion:
5776
5845
 
5777
- - it uses your sbt settings and local configuration verbatim
5846
+ - each task will generate its own xml file and by default it generates one xml
5847
+ for every task.
5778
5848
 
5779
- - it can only export one target per run, so if you have multiple targets like
5780
- development and production, you must run them separately.
5849
+ - it's possible certain features don't translate well into the xml. If you
5850
+ think something is missing that could be supported please reach out.
5781
5851
 
5782
- You can optionally configure the path to the \`sbt\` bin to invoke.
5852
+ - it works with your \`gradlew\` from your repo and local settings and config
5783
5853
 
5784
5854
  Support is beta. Please report issues or give us feedback on what's missing.
5785
5855
 
5786
- This is only for SBT. If your Scala setup uses gradle, please see the help
5787
- sections for \`socket manifest gradle\` or \`socket cdxgen\`.
5788
-
5789
5856
  Examples
5790
5857
 
5791
- $ ${command} ./build.sbt
5792
- $ ${command} --bin=/usr/bin/sbt ./build.sbt
5858
+ $ ${command} .
5859
+ $ ${command} --bin=../gradlew .
5793
5860
  `
5794
5861
  };
5795
- const cmdManifestScala = {
5862
+ const cmdManifestGradle = {
5796
5863
  description: config$w.description,
5797
5864
  hidden: config$w.hidden,
5798
5865
  run: run$w
@@ -5841,68 +5908,99 @@ async function run$w(argv, importMeta, {
5841
5908
  if (!wasValidInput) {
5842
5909
  return;
5843
5910
  }
5844
- let bin = 'sbt';
5845
- if (cli.flags['bin']) {
5846
- bin = cli.flags['bin'];
5847
- }
5848
- let out = './socket.pom.xml';
5849
- if (cli.flags['out']) {
5850
- out = cli.flags['out'];
5851
- }
5852
- if (cli.flags['stdout']) {
5853
- out = '-';
5854
- }
5911
+ const {
5912
+ bin = path.join(target, 'gradlew'),
5913
+ cwd = process.cwd()
5914
+ } = cli.flags;
5855
5915
  if (verbose) {
5856
5916
  logger.logger.group();
5857
5917
  logger.logger.log('- target:', target);
5858
5918
  logger.logger.log('- gradle bin:', bin);
5859
- logger.logger.log('- out:', out);
5860
5919
  logger.logger.groupEnd();
5861
5920
  }
5862
- let sbtOpts = [];
5863
- if (cli.flags['sbtOpts']) {
5864
- sbtOpts = cli.flags['sbtOpts'].split(' ').map(s => s.trim()).filter(Boolean);
5921
+ let gradleOpts = [];
5922
+ if (cli.flags['gradleOpts']) {
5923
+ gradleOpts = cli.flags['gradleOpts'].split(' ').map(s => s.trim()).filter(Boolean);
5865
5924
  }
5866
5925
  if (cli.flags['dryRun']) {
5867
5926
  logger.logger.log(DRY_RUN_BAILING_NOW$t);
5868
5927
  return;
5869
5928
  }
5870
- await convertSbtToMaven(target, bin, out, verbose, sbtOpts);
5929
+ await convertGradleToMaven(target, String(bin), String(cwd), verbose, gradleOpts);
5871
5930
  }
5872
5931
 
5873
5932
  const {
5874
5933
  DRY_RUN_BAILING_NOW: DRY_RUN_BAILING_NOW$s
5875
5934
  } = constants;
5935
+
5936
+ // TODO: we may want to dedupe some pieces for all gradle languages. I think it
5937
+ // makes sense to have separate commands for them and I think it makes
5938
+ // sense for the help panels to note the requested language, rather than
5939
+ // `socket manifest kotlin` to print help screens with `gradle` as the
5940
+ // command. Room for improvement.
5876
5941
  const config$v = {
5877
- commandName: 'auto',
5878
- description: 'Auto-detect build and attempt to generate manifest file',
5942
+ commandName: 'kotlin',
5943
+ description: '[beta] Use Gradle to generate a manifest file (`pom.xml`) for a Kotlin project',
5879
5944
  hidden: false,
5880
5945
  flags: {
5881
5946
  ...utils.commonFlags,
5947
+ bin: {
5948
+ type: 'string',
5949
+ description: 'Location of gradlew binary to use, default: CWD/gradlew'
5950
+ },
5882
5951
  cwd: {
5883
5952
  type: 'string',
5884
5953
  description: 'Set the cwd, defaults to process.cwd()'
5885
5954
  },
5955
+ gradleOpts: {
5956
+ type: 'string',
5957
+ default: '',
5958
+ description: 'Additional options to pass on to ./gradlew, see `./gradlew --help`'
5959
+ },
5960
+ task: {
5961
+ type: 'string',
5962
+ default: 'all',
5963
+ description: 'Task to target. By default targets all'
5964
+ },
5886
5965
  verbose: {
5887
5966
  type: 'boolean',
5888
- default: false,
5889
- description: 'Enable debug output, may help when running into errors'
5967
+ description: 'Print debug messages'
5890
5968
  }
5891
- // TODO: support output flags
5892
5969
  },
5893
5970
  help: (command, config) => `
5894
5971
  Usage
5895
- $ ${command}
5972
+ $ ${command} [--bin=path/to/gradle/binary] [--out=path/to/result] DIR
5973
+
5974
+ Options
5975
+ ${utils.getFlagListOutput(config.flags, 6)}
5976
+
5977
+ Uses gradle, preferably through your local project \`gradlew\`, to generate a
5978
+ \`pom.xml\` file for each task. If you have no \`gradlew\` you can try the
5979
+ global \`gradle\` binary but that may not work (hard to predict).
5980
+
5981
+ The \`pom.xml\` is a manifest file similar to \`package.json\` for npm or
5982
+ or requirements.txt for PyPi), but specifically for Maven, which is Java's
5983
+ dependency repository. Languages like Kotlin and Scala piggy back on it too.
5984
+
5985
+ There are some caveats with the gradle to \`pom.xml\` conversion:
5986
+
5987
+ - each task will generate its own xml file and by default it generates one xml
5988
+ for every task. (This may be a good thing!)
5896
5989
 
5897
- Options
5898
- ${utils.getFlagListOutput(config.flags, 6)}
5990
+ - it's possible certain features don't translate well into the xml. If you
5991
+ think something is missing that could be supported please reach out.
5899
5992
 
5900
- Tries to figure out what language your current repo uses. If it finds a
5901
- supported case then it will try to generate the manifest file for that
5902
- language with the default or detected settings.
5993
+ - it works with your \`gradlew\` from your repo and local settings and config
5994
+
5995
+ Support is beta. Please report issues or give us feedback on what's missing.
5996
+
5997
+ Examples
5998
+
5999
+ $ ${command} .
6000
+ $ ${command} --bin=../gradlew .
5903
6001
  `
5904
6002
  };
5905
- const cmdManifestAuto = {
6003
+ const cmdManifestKotlin = {
5906
6004
  description: config$v.description,
5907
6005
  hidden: config$v.hidden,
5908
6006
  run: run$v
@@ -5916,127 +6014,93 @@ async function run$v(argv, importMeta, {
5916
6014
  importMeta,
5917
6015
  parentName
5918
6016
  });
5919
- const verbose = !!cli.flags['verbose'];
5920
- const cwd = cli.flags['cwd'] ?? process.cwd();
5921
- // TODO: impl json/md
6017
+ const verbose = Boolean(cli.flags['verbose']);
6018
+ const {
6019
+ json,
6020
+ markdown
6021
+ } = cli.flags;
6022
+ const outputKind = utils.getOutputKind(json, markdown); // TODO: impl json/md further
5922
6023
 
5923
6024
  if (verbose) {
5924
6025
  logger.logger.group('- ', parentName, config$v.commandName, ':');
5925
6026
  logger.logger.group('- flags:', cli.flags);
5926
6027
  logger.logger.groupEnd();
5927
6028
  logger.logger.log('- input:', cli.input);
5928
- logger.logger.log('- cwd:', cwd);
5929
6029
  logger.logger.groupEnd();
5930
6030
  }
5931
- const subArgs = [];
5932
- if (verbose) {
5933
- subArgs.push('--verbose');
5934
- }
5935
- const dir = cwd;
5936
- if (fs$1.existsSync(path.join(dir, 'build.sbt'))) {
5937
- logger.logger.log('Detected a Scala sbt build, running default Scala generator...');
5938
- if (cwd) {
5939
- subArgs.push('--cwd', cwd);
5940
- }
5941
- subArgs.push(dir);
5942
- if (cli.flags['dryRun']) {
5943
- logger.logger.log(DRY_RUN_BAILING_NOW$s);
5944
- return;
5945
- }
5946
- await cmdManifestScala.run(subArgs, importMeta, {
5947
- parentName
5948
- });
6031
+ const [target = ''] = cli.input;
6032
+
6033
+ // TODO: I'm not sure it's feasible to parse source file from stdin. We could
6034
+ // try, store contents in a file in some folder, target that folder... what
6035
+ // would the file name be?
6036
+
6037
+ const wasValidInput = utils.checkCommandInput(outputKind, {
6038
+ test: !!target && target !== '-',
6039
+ message: 'The DIR arg is required',
6040
+ pass: 'ok',
6041
+ fail: target === '-' ? 'stdin is not supported' : 'missing'
6042
+ }, {
6043
+ nook: true,
6044
+ test: cli.input.length <= 1,
6045
+ message: 'Can only accept one DIR (make sure to escape spaces!)',
6046
+ pass: 'ok',
6047
+ fail: 'received ' + cli.input.length
6048
+ });
6049
+ if (!wasValidInput) {
5949
6050
  return;
5950
6051
  }
5951
- if (fs$1.existsSync(path.join(dir, 'gradlew'))) {
5952
- logger.logger.log('Detected a gradle build, running default gradle generator...');
5953
- if (cwd) {
5954
- // This command takes the cwd as first arg.
5955
- subArgs.push(cwd);
5956
- }
5957
- if (cli.flags['dryRun']) {
5958
- logger.logger.log(DRY_RUN_BAILING_NOW$s);
5959
- return;
5960
- }
5961
- await cmdManifestGradle.run(subArgs, importMeta, {
5962
- parentName
5963
- });
5964
- return;
6052
+ const {
6053
+ bin = path.join(target, 'gradlew'),
6054
+ cwd = process.cwd()
6055
+ } = cli.flags;
6056
+ if (verbose) {
6057
+ logger.logger.group();
6058
+ logger.logger.log('- target:', target);
6059
+ logger.logger.log('- gradle bin:', bin);
6060
+ logger.logger.groupEnd();
5965
6061
  }
5966
- const envyml = path.join(dir, 'environment.yml');
5967
- const hasEnvyml = fs$1.existsSync(envyml);
5968
- const envyaml = path.join(dir, 'environment.yaml');
5969
- const hasEnvyaml = !hasEnvyml && fs$1.existsSync(envyaml);
5970
- if (hasEnvyml || hasEnvyaml) {
5971
- logger.logger.log('Detected an environment.yml file, running default Conda generator...');
5972
- // This command takes the TARGET as first arg.
5973
- subArgs.push(hasEnvyml ? envyml : hasEnvyaml ? envyaml : '');
5974
- if (cli.flags['dryRun']) {
5975
- logger.logger.log(DRY_RUN_BAILING_NOW$s);
5976
- return;
5977
- }
5978
- await cmdManifestConda.run(subArgs, importMeta, {
5979
- parentName
5980
- });
5981
- return;
6062
+ let gradleOpts = [];
6063
+ if (cli.flags['gradleOpts']) {
6064
+ gradleOpts = cli.flags['gradleOpts'].split(' ').map(s => s.trim()).filter(Boolean);
5982
6065
  }
5983
6066
  if (cli.flags['dryRun']) {
5984
6067
  logger.logger.log(DRY_RUN_BAILING_NOW$s);
5985
6068
  return;
5986
6069
  }
5987
-
5988
- // Show new help screen and exit.
5989
- vendor.meow(`
5990
- $ ${parentName} ${config$v.commandName}
5991
-
5992
- Unfortunately this script did not discover a supported language in the
5993
- current folder.
5994
-
5995
- - Make sure this script would work with your target build
5996
- - Make sure to run it from the correct folder
5997
- - Make sure the necessary build tools are available (\`PATH\`)
5998
-
5999
- If that doesn't work, see \`${parentName} <lang> --help\` for config details for
6000
- your target language.
6001
- `, {
6002
- argv: [],
6003
- description: config$v.description,
6004
- importMeta
6005
- }).showHelp();
6070
+ await convertGradleToMaven(target, String(bin), String(cwd), verbose, gradleOpts);
6006
6071
  }
6007
6072
 
6008
6073
  const {
6009
6074
  DRY_RUN_BAILING_NOW: DRY_RUN_BAILING_NOW$r
6010
6075
  } = constants;
6011
-
6012
- // TODO: we may want to dedupe some pieces for all gradle languages. I think it
6013
- // makes sense to have separate commands for them and I think it makes
6014
- // sense for the help panels to note the requested language, rather than
6015
- // `socket manifest kotlin` to print help screens with `gradle` as the
6016
- // command. Room for improvement.
6017
6076
  const config$u = {
6018
- commandName: 'kotlin',
6019
- description: '[beta] Use Gradle to generate a manifest file (`pom.xml`) for a Kotlin project',
6077
+ commandName: 'scala',
6078
+ description: "[beta] Generate a manifest file (`pom.xml`) from Scala's `build.sbt` file",
6020
6079
  hidden: false,
6021
6080
  flags: {
6022
6081
  ...utils.commonFlags,
6023
6082
  bin: {
6024
6083
  type: 'string',
6025
- description: 'Location of gradlew binary to use, default: CWD/gradlew'
6084
+ default: 'sbt',
6085
+ description: 'Location of sbt binary to use'
6026
6086
  },
6027
6087
  cwd: {
6028
6088
  type: 'string',
6029
6089
  description: 'Set the cwd, defaults to process.cwd()'
6030
6090
  },
6031
- gradleOpts: {
6091
+ out: {
6032
6092
  type: 'string',
6033
- default: '',
6034
- description: 'Additional options to pass on to ./gradlew, see `./gradlew --help`'
6093
+ default: './socket.pom.xml',
6094
+ description: 'Path of output file; where to store the resulting manifest, see also --stdout'
6035
6095
  },
6036
- task: {
6096
+ stdout: {
6097
+ type: 'boolean',
6098
+ description: 'Print resulting pom.xml to stdout (supersedes --out)'
6099
+ },
6100
+ sbtOpts: {
6037
6101
  type: 'string',
6038
- default: 'all',
6039
- description: 'Task to target. By default targets all'
6102
+ default: '',
6103
+ description: 'Additional options to pass on to sbt, as per `sbt --help`'
6040
6104
  },
6041
6105
  verbose: {
6042
6106
  type: 'boolean',
@@ -6045,38 +6109,43 @@ const config$u = {
6045
6109
  },
6046
6110
  help: (command, config) => `
6047
6111
  Usage
6048
- $ ${command} [--bin=path/to/gradle/binary] [--out=path/to/result] DIR
6112
+ $ ${command} [--bin=path/to/sbt/binary] [--out=path/to/result] FILE|DIR
6049
6113
 
6050
6114
  Options
6051
6115
  ${utils.getFlagListOutput(config.flags, 6)}
6052
6116
 
6053
- Uses gradle, preferably through your local project \`gradlew\`, to generate a
6054
- \`pom.xml\` file for each task. If you have no \`gradlew\` you can try the
6055
- global \`gradle\` binary but that may not work (hard to predict).
6117
+ Uses \`sbt makePom\` to generate a \`pom.xml\` from your \`build.sbt\` file.
6118
+ This xml file is the dependency manifest (like a package.json
6119
+ for Node.js or requirements.txt for PyPi), but specifically for Scala.
6056
6120
 
6057
- The \`pom.xml\` is a manifest file similar to \`package.json\` for npm or
6058
- or requirements.txt for PyPi), but specifically for Maven, which is Java's
6059
- dependency repository. Languages like Kotlin and Scala piggy back on it too.
6121
+ There are some caveats with \`build.sbt\` to \`pom.xml\` conversion:
6060
6122
 
6061
- There are some caveats with the gradle to \`pom.xml\` conversion:
6123
+ - the xml is exported as socket.pom.xml as to not confuse existing build tools
6124
+ but it will first hit your /target/sbt<version> folder (as a different name)
6062
6125
 
6063
- - each task will generate its own xml file and by default it generates one xml
6064
- for every task. (This may be a good thing!)
6126
+ - the pom.xml format (standard by Scala) does not support certain sbt features
6127
+ - \`excludeAll()\`, \`dependencyOverrides\`, \`force()\`, \`relativePath\`
6128
+ - For details: https://www.scala-sbt.org/1.x/docs/Library-Management.html
6065
6129
 
6066
- - it's possible certain features don't translate well into the xml. If you
6067
- think something is missing that could be supported please reach out.
6130
+ - it uses your sbt settings and local configuration verbatim
6068
6131
 
6069
- - it works with your \`gradlew\` from your repo and local settings and config
6132
+ - it can only export one target per run, so if you have multiple targets like
6133
+ development and production, you must run them separately.
6134
+
6135
+ You can optionally configure the path to the \`sbt\` bin to invoke.
6070
6136
 
6071
6137
  Support is beta. Please report issues or give us feedback on what's missing.
6072
6138
 
6139
+ This is only for SBT. If your Scala setup uses gradle, please see the help
6140
+ sections for \`socket manifest gradle\` or \`socket cdxgen\`.
6141
+
6073
6142
  Examples
6074
6143
 
6075
- $ ${command} .
6076
- $ ${command} --bin=../gradlew .
6144
+ $ ${command} ./build.sbt
6145
+ $ ${command} --bin=/usr/bin/sbt ./build.sbt
6077
6146
  `
6078
6147
  };
6079
- const cmdManifestKotlin = {
6148
+ const cmdManifestScala = {
6080
6149
  description: config$u.description,
6081
6150
  hidden: config$u.hidden,
6082
6151
  run: run$u
@@ -6125,25 +6194,33 @@ async function run$u(argv, importMeta, {
6125
6194
  if (!wasValidInput) {
6126
6195
  return;
6127
6196
  }
6128
- const {
6129
- bin = path.join(target, 'gradlew'),
6130
- cwd = process.cwd()
6131
- } = cli.flags;
6197
+ let bin = 'sbt';
6198
+ if (cli.flags['bin']) {
6199
+ bin = cli.flags['bin'];
6200
+ }
6201
+ let out = './socket.pom.xml';
6202
+ if (cli.flags['out']) {
6203
+ out = cli.flags['out'];
6204
+ }
6205
+ if (cli.flags['stdout']) {
6206
+ out = '-';
6207
+ }
6132
6208
  if (verbose) {
6133
6209
  logger.logger.group();
6134
6210
  logger.logger.log('- target:', target);
6135
6211
  logger.logger.log('- gradle bin:', bin);
6212
+ logger.logger.log('- out:', out);
6136
6213
  logger.logger.groupEnd();
6137
6214
  }
6138
- let gradleOpts = [];
6139
- if (cli.flags['gradleOpts']) {
6140
- gradleOpts = cli.flags['gradleOpts'].split(' ').map(s => s.trim()).filter(Boolean);
6215
+ let sbtOpts = [];
6216
+ if (cli.flags['sbtOpts']) {
6217
+ sbtOpts = cli.flags['sbtOpts'].split(' ').map(s => s.trim()).filter(Boolean);
6141
6218
  }
6142
6219
  if (cli.flags['dryRun']) {
6143
6220
  logger.logger.log(DRY_RUN_BAILING_NOW$r);
6144
6221
  return;
6145
6222
  }
6146
- await convertGradleToMaven(target, String(bin), String(cwd), verbose, gradleOpts);
6223
+ await convertSbtToMaven(target, bin, out, verbose, sbtOpts);
6147
6224
  }
6148
6225
 
6149
6226
  const config$t = {
@@ -8681,6 +8758,50 @@ async function run$d(argv, importMeta, {
8681
8758
  await handleDeleteRepo(orgSlug, repoName, outputKind);
8682
8759
  }
8683
8760
 
8761
+ async function fetchListAllRepos({
8762
+ direction,
8763
+ orgSlug,
8764
+ sort
8765
+ }) {
8766
+ const sockSdkResult = await utils.setupSdk();
8767
+ if (!sockSdkResult.ok) {
8768
+ return sockSdkResult;
8769
+ }
8770
+ const sockSdk = sockSdkResult.data;
8771
+ const rows = [];
8772
+ let protection = 0;
8773
+ let nextPage = 0;
8774
+ while (nextPage >= 0) {
8775
+ if (++protection > 100) {
8776
+ return {
8777
+ ok: false,
8778
+ message: 'Infinite loop detected',
8779
+ cause: `Either there are over 100 pages of results or the fetch has run into an infinite loop. Breaking it off now. nextPage=${nextPage}`
8780
+ };
8781
+ }
8782
+ // eslint-disable-next-line no-await-in-loop
8783
+ const result = await utils.handleApiCall(sockSdk.getOrgRepoList(orgSlug, {
8784
+ sort,
8785
+ direction,
8786
+ per_page: String(100),
8787
+ // max
8788
+ page: String(nextPage)
8789
+ }), 'list of repositories');
8790
+ if (!result.ok) {
8791
+ return result;
8792
+ }
8793
+ result.data.results.forEach(row => rows.push(row));
8794
+ nextPage = result.data.nextPage ?? -1;
8795
+ }
8796
+ return {
8797
+ ok: true,
8798
+ data: {
8799
+ results: rows,
8800
+ nextPage: null
8801
+ }
8802
+ };
8803
+ }
8804
+
8684
8805
  async function fetchListRepos({
8685
8806
  direction,
8686
8807
  orgSlug,
@@ -8702,18 +8823,33 @@ async function fetchListRepos({
8702
8823
  }
8703
8824
 
8704
8825
  // @ts-ignore
8705
- async function outputListRepos(result, outputKind) {
8826
+ async function outputListRepos(result, outputKind, page, nextPage, sort, perPage, direction) {
8706
8827
  if (!result.ok) {
8707
8828
  process.exitCode = result.code ?? 1;
8708
8829
  }
8709
8830
  if (outputKind === 'json') {
8710
- logger.logger.log(utils.serializeResultJson(result));
8831
+ if (result.ok) {
8832
+ logger.logger.log(utils.serializeResultJson({
8833
+ ok: true,
8834
+ data: {
8835
+ data: result.data,
8836
+ direction,
8837
+ nextPage: nextPage ?? 0,
8838
+ page,
8839
+ perPage,
8840
+ sort
8841
+ }
8842
+ }));
8843
+ } else {
8844
+ logger.logger.log(utils.serializeResultJson(result));
8845
+ }
8711
8846
  return;
8712
8847
  }
8713
8848
  if (!result.ok) {
8714
8849
  logger.logger.fail(utils.failMsgWithBadge(result.message, result.cause));
8715
8850
  return;
8716
8851
  }
8852
+ logger.logger.log(`Result page: ${page}, results per page: ${perPage === Infinity ? 'all' : perPage}, sorted by: ${sort}, direction: ${direction}`);
8717
8853
  const options = {
8718
8854
  columns: [{
8719
8855
  field: 'id',
@@ -8733,9 +8869,18 @@ async function outputListRepos(result, outputKind) {
8733
8869
  }]
8734
8870
  };
8735
8871
  logger.logger.log(vendor.srcExports(options, result.data.results));
8872
+ if (nextPage) {
8873
+ logger.logger.info(`This is page ${page}. Server indicated there are more results available on page ${nextPage}...`);
8874
+ logger.logger.info(`(Hint: you can use \`socket repos list --page ${nextPage}\`)`);
8875
+ } else if (perPage === Infinity) {
8876
+ logger.logger.info(`This should be the entire list available on the server.`);
8877
+ } else {
8878
+ logger.logger.info(`This is page ${page}. Server indicated this is the last page with results.`);
8879
+ }
8736
8880
  }
8737
8881
 
8738
8882
  async function handleListRepos({
8883
+ all,
8739
8884
  direction,
8740
8885
  orgSlug,
8741
8886
  outputKind,
@@ -8743,14 +8888,28 @@ async function handleListRepos({
8743
8888
  per_page,
8744
8889
  sort
8745
8890
  }) {
8746
- const data = await fetchListRepos({
8747
- direction,
8748
- orgSlug,
8749
- page,
8750
- per_page,
8751
- sort
8752
- });
8753
- await outputListRepos(data, outputKind);
8891
+ if (all) {
8892
+ const data = await fetchListAllRepos({
8893
+ direction,
8894
+ orgSlug,
8895
+ sort
8896
+ });
8897
+ await outputListRepos(data, outputKind, 0, 0, sort, Infinity, direction);
8898
+ } else {
8899
+ const data = await fetchListRepos({
8900
+ direction,
8901
+ orgSlug,
8902
+ page,
8903
+ per_page,
8904
+ sort
8905
+ });
8906
+ if (!data.ok) {
8907
+ await outputListRepos(data, outputKind, 0, 0, '', 0, direction);
8908
+ } else {
8909
+ // Note: nextPage defaults to 0, is null when there's no next page
8910
+ await outputListRepos(data, outputKind, page, data.data.nextPage, sort, per_page, direction);
8911
+ }
8912
+ }
8754
8913
  }
8755
8914
 
8756
8915
  const {
@@ -8763,11 +8922,10 @@ const config$c = {
8763
8922
  flags: {
8764
8923
  ...utils.commonFlags,
8765
8924
  ...utils.outputFlags,
8766
- sort: {
8767
- type: 'string',
8768
- shortFlag: 's',
8769
- default: 'created_at',
8770
- description: 'Sorting option'
8925
+ all: {
8926
+ type: 'boolean',
8927
+ default: false,
8928
+ description: 'By default view shows the last n repos. This flag allows you to fetch the entire list. Will ignore --page and --perPage.'
8771
8929
  },
8772
8930
  direction: {
8773
8931
  type: 'string',
@@ -8794,6 +8952,12 @@ const config$c = {
8794
8952
  shortFlag: 'p',
8795
8953
  default: 1,
8796
8954
  description: 'Page number'
8955
+ },
8956
+ sort: {
8957
+ type: 'string',
8958
+ shortFlag: 's',
8959
+ default: 'created_at',
8960
+ description: 'Sorting option'
8797
8961
  }
8798
8962
  },
8799
8963
  help: (command, config) => `
@@ -8826,15 +8990,15 @@ async function run$c(argv, importMeta, {
8826
8990
  parentName
8827
8991
  });
8828
8992
  const {
8829
- json,
8830
- markdown
8831
- } = cli.flags;
8832
- const outputKind = utils.getOutputKind(json, markdown);
8833
- const {
8993
+ all,
8994
+ direction = 'desc',
8834
8995
  dryRun,
8835
8996
  interactive,
8997
+ json,
8998
+ markdown,
8836
8999
  org: orgFlag
8837
9000
  } = cli.flags;
9001
+ const outputKind = utils.getOutputKind(json, markdown);
8838
9002
  const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), cli.input[0] || '', !!interactive, !!dryRun);
8839
9003
  const hasApiToken = utils.hasDefaultToken();
8840
9004
  const wasValidInput = utils.checkCommandInput(outputKind, {
@@ -8855,6 +9019,12 @@ async function run$c(argv, importMeta, {
8855
9019
  message: 'You need to be logged in to use this command. See `socket login`.',
8856
9020
  pass: 'ok',
8857
9021
  fail: 'missing API token'
9022
+ }, {
9023
+ nook: true,
9024
+ test: direction === 'asc' || direction === 'desc',
9025
+ message: 'The --direction value must be "asc" or "desc"',
9026
+ pass: 'ok',
9027
+ fail: 'unexpected value'
8858
9028
  });
8859
9029
  if (!wasValidInput) {
8860
9030
  return;
@@ -8864,7 +9034,8 @@ async function run$c(argv, importMeta, {
8864
9034
  return;
8865
9035
  }
8866
9036
  await handleListRepos({
8867
- direction: cli.flags['direction'] === 'asc' ? 'asc' : 'desc',
9037
+ all: Boolean(all),
9038
+ direction: direction === 'asc' ? 'asc' : 'desc',
8868
9039
  orgSlug,
8869
9040
  outputKind,
8870
9041
  page: Number(cli.flags['page']) || 1,
@@ -9312,11 +9483,6 @@ const config$9 = {
9312
9483
  default: true,
9313
9484
  description: 'Allow for interactive elements, asking for input. Use --no-interactive to prevent any input questions, defaulting them to cancel/no.'
9314
9485
  },
9315
- pendingHead: {
9316
- type: 'boolean',
9317
- default: true,
9318
- description: 'Designate this full-scan as the latest scan of a given branch. This must be set to have it show up in the dashboard.'
9319
- },
9320
9486
  pullRequest: {
9321
9487
  type: 'number',
9322
9488
  shortFlag: 'pr',
@@ -9342,11 +9508,17 @@ const config$9 = {
9342
9508
  default: false,
9343
9509
  description: 'Wait for the scan creation to complete, then basically run `socket scan report` on it'
9344
9510
  },
9511
+ setAsAlertsPage: {
9512
+ type: 'boolean',
9513
+ default: true,
9514
+ aliases: ['pendingHead'],
9515
+ description: 'When true and if this is the "default branch" then this Scan will be the one reflected on your alerts page. See help for details. Defaults to true.'
9516
+ },
9345
9517
  tmp: {
9346
9518
  type: 'boolean',
9347
9519
  shortFlag: 't',
9348
9520
  default: false,
9349
- description: 'Set the visibility (true/false) of the scan in your dashboard. Can not be used when --pendingHead is set.'
9521
+ description: 'Set the visibility (true/false) of the scan in your dashboard.'
9350
9522
  }
9351
9523
  },
9352
9524
  // TODO: your project's "socket.yml" file's "projectIgnorePaths"
@@ -9378,8 +9550,12 @@ const config$9 = {
9378
9550
  Note: for a first run you probably want to set --defaultBranch to indicate
9379
9551
  the default branch name, like "main" or "master".
9380
9552
 
9381
- Note: --pendingHead is enabled by default and makes a scan show up in your
9382
- dashboard. You can use \`--no-pendingHead\` to have it not show up.
9553
+ The "alerts page" (https://socket.dev/dashboard/org/YOURORG/alerts) will show
9554
+ the results from the last scan designated as the "pending head" on the branch
9555
+ configured on Socket to be the "default branch". When creating a scan the
9556
+ --setAsAlertsPage flag will default to true to update this. You can prevent
9557
+ this by using --no-setAsAlertsPage. This flag is ignored for any branch that
9558
+ is not designated as the "default branch". It is disabled when using --tmp.
9383
9559
 
9384
9560
  Options
9385
9561
  ${utils.getFlagListOutput(config.flags, 6)}
@@ -9415,14 +9591,15 @@ async function run$9(argv, importMeta, {
9415
9591
  json,
9416
9592
  markdown,
9417
9593
  org: orgFlag,
9418
- pendingHead,
9419
9594
  pullRequest,
9420
9595
  readOnly,
9421
9596
  repo: repoName = 'socket-default-repository',
9422
9597
  report,
9598
+ setAsAlertsPage: pendingHeadFlag,
9423
9599
  tmp
9424
9600
  } = cli.flags;
9425
9601
  const outputKind = utils.getOutputKind(json, markdown);
9602
+ const pendingHead = tmp ? false : pendingHeadFlag;
9426
9603
  let [orgSlug, defaultOrgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), cli.input[0] || '', interactive, dryRun);
9427
9604
  if (!defaultOrgSlug) {
9428
9605
  // Tmp. just for TS. will drop this later.
@@ -9486,12 +9663,6 @@ async function run$9(argv, importMeta, {
9486
9663
  message: 'This command requires an API token for access',
9487
9664
  pass: 'ok',
9488
9665
  fail: 'missing (try `socket login`)'
9489
- }, {
9490
- nook: true,
9491
- test: !pendingHead || !tmp,
9492
- message: 'Can not use --pendingHead and --tmp at the same time',
9493
- pass: 'ok',
9494
- fail: 'remove at least one flag'
9495
9666
  }, {
9496
9667
  nook: true,
9497
9668
  test: !pendingHead || !!branchName,
@@ -11535,5 +11706,5 @@ void (async () => {
11535
11706
  await utils.captureException(e);
11536
11707
  }
11537
11708
  })();
11538
- //# debugId=f795e75b-2414-4f4b-8612-273af979480d
11709
+ //# debugId=58ee5340-de19-42ee-8faf-fef7b5047b56
11539
11710
  //# sourceMappingURL=cli.js.map