@socketsecurity/cli-with-sentry 0.15.15 → 0.15.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/.config/tsconfig.dts.tsbuildinfo +1 -1
- package/dist/cli.js +1035 -886
- package/dist/cli.js.map +1 -1
- package/dist/constants.js +3 -3
- package/dist/constants.js.map +1 -1
- package/dist/socket-completion.bash +3 -3
- package/dist/types/commands/ci/cmd-ci.d.mts.map +1 -1
- package/dist/types/commands/ci/handle-ci.d.mts +1 -1
- package/dist/types/commands/ci/handle-ci.d.mts.map +1 -1
- package/dist/types/commands/fix/npm-fix.d.mts.map +1 -1
- package/dist/types/commands/fix/open-pr.d.mts.map +1 -1
- package/dist/types/commands/fix/pnpm-fix.d.mts.map +1 -1
- package/dist/types/commands/manifest/cmd-manifest-auto.d.mts.map +1 -1
- package/dist/types/commands/manifest/detect-manifest-actions.d.mts +9 -0
- package/dist/types/commands/manifest/detect-manifest-actions.d.mts.map +1 -0
- package/dist/types/commands/manifest/generate_auto_manifest.d.mts +4 -0
- package/dist/types/commands/manifest/generate_auto_manifest.d.mts.map +1 -0
- package/dist/types/commands/repos/cmd-repos-list.d.mts.map +1 -1
- package/dist/types/commands/repos/fetch-list-all-repos.d.mts +8 -0
- package/dist/types/commands/repos/fetch-list-all-repos.d.mts.map +1 -0
- package/dist/types/commands/repos/handle-list-repos.d.mts +3 -2
- package/dist/types/commands/repos/handle-list-repos.d.mts.map +1 -1
- package/dist/types/commands/repos/output-list-repos.d.mts +1 -1
- package/dist/types/commands/repos/output-list-repos.d.mts.map +1 -1
- package/dist/types/commands/scan/cmd-scan-create.d.mts.map +1 -1
- package/dist/types/commands/scan/handle-create-new-scan.d.mts +2 -1
- package/dist/types/commands/scan/handle-create-new-scan.d.mts.map +1 -1
- package/external/@socketsecurity/registry/lib/constants/skip-tests-by-ecosystem.js +1 -5
- package/external/@socketsecurity/registry/lib/logger.d.ts +6 -1
- package/external/@socketsecurity/registry/lib/logger.js +39 -3
- package/external/@socketsecurity/registry/lib/spinner.js +13 -2
- package/external/@socketsecurity/registry/lib/strings.d.ts +7 -2
- package/external/@socketsecurity/registry/lib/strings.js +6 -0
- package/external/@socketsecurity/registry/manifest.json +2 -2
- package/external/@socketsecurity/registry/package.json +2 -2
- package/package.json +3 -2
package/dist/cli.js
CHANGED
|
@@ -14,6 +14,7 @@ var fs$1 = require('node:fs');
|
|
|
14
14
|
var path = require('node:path');
|
|
15
15
|
var shadowBin = require('./shadow-bin.js');
|
|
16
16
|
var prompts = require('../external/@socketsecurity/registry/lib/prompts');
|
|
17
|
+
var spawn = require('../external/@socketsecurity/registry/lib/spawn');
|
|
17
18
|
var util = require('node:util');
|
|
18
19
|
var arrays = require('../external/@socketsecurity/registry/lib/arrays');
|
|
19
20
|
var registry = require('../external/@socketsecurity/registry');
|
|
@@ -22,7 +23,6 @@ var packages = require('../external/@socketsecurity/registry/lib/packages');
|
|
|
22
23
|
var sorts = require('../external/@socketsecurity/registry/lib/sorts');
|
|
23
24
|
var path$1 = require('../external/@socketsecurity/registry/lib/path');
|
|
24
25
|
var regexps = require('../external/@socketsecurity/registry/lib/regexps');
|
|
25
|
-
var spawn = require('../external/@socketsecurity/registry/lib/spawn');
|
|
26
26
|
var fs$2 = require('../external/@socketsecurity/registry/lib/fs');
|
|
27
27
|
var shadowInject = require('./shadow-inject.js');
|
|
28
28
|
var objects = require('../external/@socketsecurity/registry/lib/objects');
|
|
@@ -1769,165 +1769,621 @@ async function outputCreateNewScan(result, outputKind, interactive) {
|
|
|
1769
1769
|
}
|
|
1770
1770
|
}
|
|
1771
1771
|
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
|
|
1777
|
-
|
|
1778
|
-
|
|
1779
|
-
|
|
1780
|
-
|
|
1781
|
-
|
|
1782
|
-
|
|
1783
|
-
|
|
1784
|
-
|
|
1785
|
-
|
|
1786
|
-
|
|
1787
|
-
|
|
1788
|
-
tmp
|
|
1789
|
-
}) {
|
|
1790
|
-
const supportedFileNames = await fetchSupportedScanFileNames();
|
|
1791
|
-
if (!supportedFileNames.ok) {
|
|
1792
|
-
await outputCreateNewScan(supportedFileNames, outputKind, interactive);
|
|
1793
|
-
return;
|
|
1794
|
-
}
|
|
1795
|
-
const packagePaths = await utils.getPackageFilesForScan(cwd, targets, supportedFileNames.data);
|
|
1796
|
-
const wasValidInput = utils.checkCommandInput(outputKind, {
|
|
1797
|
-
nook: true,
|
|
1798
|
-
test: packagePaths.length > 0,
|
|
1799
|
-
pass: 'ok',
|
|
1800
|
-
fail: 'found no eligible files to scan',
|
|
1801
|
-
message: 'TARGET (file/dir) must contain matching / supported file types for a scan'
|
|
1802
|
-
});
|
|
1803
|
-
if (!wasValidInput) {
|
|
1804
|
-
return;
|
|
1772
|
+
// The point here is to attempt to detect the various supported manifest files
|
|
1773
|
+
// the CLI can generate. This would be environments that we can't do server side
|
|
1774
|
+
|
|
1775
|
+
async function detectManifestActions(cwd = process.cwd()) {
|
|
1776
|
+
const output = {
|
|
1777
|
+
cdxgen: false,
|
|
1778
|
+
// TODO
|
|
1779
|
+
count: 0,
|
|
1780
|
+
conda: false,
|
|
1781
|
+
gradle: false,
|
|
1782
|
+
sbt: false
|
|
1783
|
+
};
|
|
1784
|
+
if (fs$1.existsSync(path.join(cwd, 'build.sbt'))) {
|
|
1785
|
+
debug.debugLog('Detected a Scala sbt build, running default Scala generator...');
|
|
1786
|
+
output.sbt = true;
|
|
1787
|
+
output.count += 1;
|
|
1805
1788
|
}
|
|
1806
|
-
if (
|
|
1807
|
-
|
|
1808
|
-
|
|
1789
|
+
if (fs$1.existsSync(path.join(cwd, 'gradlew'))) {
|
|
1790
|
+
debug.debugLog('Detected a gradle build, running default gradle generator...');
|
|
1791
|
+
output.gradle = true;
|
|
1792
|
+
output.count += 1;
|
|
1809
1793
|
}
|
|
1810
|
-
const
|
|
1811
|
-
|
|
1812
|
-
|
|
1813
|
-
|
|
1814
|
-
|
|
1815
|
-
|
|
1816
|
-
|
|
1817
|
-
|
|
1818
|
-
if (data.ok && report) {
|
|
1819
|
-
if (data.data?.id) {
|
|
1820
|
-
await handleScanReport({
|
|
1821
|
-
filePath: '-',
|
|
1822
|
-
fold: 'version',
|
|
1823
|
-
includeLicensePolicy: true,
|
|
1824
|
-
orgSlug,
|
|
1825
|
-
outputKind,
|
|
1826
|
-
reportLevel: 'error',
|
|
1827
|
-
scanId: data.data.id,
|
|
1828
|
-
short: false
|
|
1829
|
-
});
|
|
1830
|
-
} else {
|
|
1831
|
-
await outputCreateNewScan({
|
|
1832
|
-
ok: false,
|
|
1833
|
-
message: 'Missing Scan ID',
|
|
1834
|
-
cause: 'Server did not respond with a scan ID',
|
|
1835
|
-
data: data.data
|
|
1836
|
-
}, outputKind, interactive);
|
|
1837
|
-
}
|
|
1838
|
-
} else {
|
|
1839
|
-
await outputCreateNewScan(data, outputKind, interactive);
|
|
1794
|
+
const envyml = path.join(cwd, 'environment.yml');
|
|
1795
|
+
const hasEnvyml = fs$1.existsSync(envyml);
|
|
1796
|
+
const envyaml = path.join(cwd, 'environment.yaml');
|
|
1797
|
+
const hasEnvyaml = !hasEnvyml && fs$1.existsSync(envyaml);
|
|
1798
|
+
if (hasEnvyml || hasEnvyaml) {
|
|
1799
|
+
debug.debugLog('Detected an environment.yml file, running default Conda generator...');
|
|
1800
|
+
output.conda = true;
|
|
1801
|
+
output.count += 1;
|
|
1840
1802
|
}
|
|
1803
|
+
return output;
|
|
1841
1804
|
}
|
|
1842
1805
|
|
|
1843
|
-
async function
|
|
1844
|
-
//
|
|
1845
|
-
|
|
1846
|
-
|
|
1847
|
-
// }
|
|
1848
|
-
const result = await getDefaultOrgSlug();
|
|
1849
|
-
if (!result.ok) {
|
|
1850
|
-
process.exitCode = result.code ?? 1;
|
|
1851
|
-
// Always assume json mode
|
|
1852
|
-
logger.logger.log(utils.serializeResultJson(result));
|
|
1853
|
-
return;
|
|
1806
|
+
async function convertGradleToMaven(target, bin, cwd, verbose, gradleOpts) {
|
|
1807
|
+
// TODO: impl json/md
|
|
1808
|
+
if (verbose) {
|
|
1809
|
+
logger.logger.log('[VERBOSE] Resolving:', [cwd, bin]);
|
|
1854
1810
|
}
|
|
1811
|
+
const rbin = path.resolve(cwd, bin);
|
|
1812
|
+
if (verbose) {
|
|
1813
|
+
logger.logger.log('[VERBOSE] Resolving:', [cwd, target]);
|
|
1814
|
+
}
|
|
1815
|
+
const rtarget = path.resolve(cwd, target);
|
|
1816
|
+
const binExists = fs$1.existsSync(rbin);
|
|
1817
|
+
const targetExists = fs$1.existsSync(rtarget);
|
|
1818
|
+
logger.logger.group('gradle2maven:');
|
|
1819
|
+
if (verbose || debug.isDebug()) {
|
|
1820
|
+
logger.logger.log(`[VERBOSE] - Absolute bin path: \`${rbin}\` (${binExists ? 'found' : vendor.yoctocolorsCjsExports.red('not found!')})`);
|
|
1821
|
+
logger.logger.log(`[VERBOSE] - Absolute target path: \`${rtarget}\` (${targetExists ? 'found' : vendor.yoctocolorsCjsExports.red('not found!')})`);
|
|
1822
|
+
} else {
|
|
1823
|
+
logger.logger.log(`- executing: \`${rbin}\``);
|
|
1824
|
+
if (!binExists) {
|
|
1825
|
+
logger.logger.warn('Warning: It appears the executable could not be found at this location. An error might be printed later because of that.');
|
|
1826
|
+
}
|
|
1827
|
+
logger.logger.log(`- src dir: \`${rtarget}\``);
|
|
1828
|
+
if (!targetExists) {
|
|
1829
|
+
logger.logger.warn('Warning: It appears the src dir could not be found at this location. An error might be printed later because of that.');
|
|
1830
|
+
}
|
|
1831
|
+
}
|
|
1832
|
+
logger.logger.groupEnd();
|
|
1833
|
+
try {
|
|
1834
|
+
// Run gradlew with the init script we provide which should yield zero or more
|
|
1835
|
+
// pom files. We have to figure out where to store those pom files such that
|
|
1836
|
+
// we can upload them and predict them through the GitHub API. We could do a
|
|
1837
|
+
// .socket folder. We could do a socket.pom.gz with all the poms, although
|
|
1838
|
+
// I'd prefer something plain-text if it is to be committed.
|
|
1855
1839
|
|
|
1856
|
-
|
|
1857
|
-
|
|
1858
|
-
|
|
1859
|
-
|
|
1860
|
-
|
|
1861
|
-
|
|
1862
|
-
|
|
1863
|
-
|
|
1864
|
-
|
|
1865
|
-
|
|
1866
|
-
|
|
1867
|
-
|
|
1868
|
-
|
|
1869
|
-
|
|
1870
|
-
|
|
1871
|
-
|
|
1872
|
-
|
|
1873
|
-
|
|
1874
|
-
|
|
1875
|
-
|
|
1876
|
-
|
|
1877
|
-
}
|
|
1878
|
-
|
|
1879
|
-
|
|
1880
|
-
|
|
1881
|
-
|
|
1882
|
-
|
|
1883
|
-
|
|
1884
|
-
|
|
1885
|
-
|
|
1886
|
-
|
|
1887
|
-
|
|
1888
|
-
}
|
|
1889
|
-
|
|
1890
|
-
|
|
1891
|
-
|
|
1892
|
-
|
|
1893
|
-
|
|
1894
|
-
|
|
1895
|
-
|
|
1896
|
-
|
|
1897
|
-
It will use the default org for the set API token.
|
|
1898
|
-
`
|
|
1899
|
-
};
|
|
1900
|
-
const cmdCI = {
|
|
1901
|
-
description: config$L.description,
|
|
1902
|
-
hidden: config$L.hidden,
|
|
1903
|
-
run: run$L
|
|
1904
|
-
};
|
|
1905
|
-
async function run$L(argv, importMeta, {
|
|
1906
|
-
parentName
|
|
1907
|
-
}) {
|
|
1908
|
-
const cli = utils.meowOrExit({
|
|
1909
|
-
argv,
|
|
1910
|
-
config: config$L,
|
|
1911
|
-
importMeta,
|
|
1912
|
-
parentName
|
|
1913
|
-
});
|
|
1914
|
-
if (cli.flags['dryRun']) {
|
|
1915
|
-
logger.logger.log(DRY_RUN_BAILING_NOW$H);
|
|
1916
|
-
return;
|
|
1840
|
+
// Note: init.gradle will be exported by .config/rollup.dist.config.mjs
|
|
1841
|
+
const initLocation = path.join(constants.distPath, 'init.gradle');
|
|
1842
|
+
const commandArgs = ['--init-script', initLocation, ...gradleOpts, 'pom'];
|
|
1843
|
+
if (verbose) {
|
|
1844
|
+
logger.logger.log('[VERBOSE] Executing:', [bin], ', args:', commandArgs);
|
|
1845
|
+
}
|
|
1846
|
+
logger.logger.log(`Converting gradle to maven from \`${bin}\` on \`${target}\` ...`);
|
|
1847
|
+
const output = await execGradleWithSpinner(rbin, commandArgs, rtarget, cwd);
|
|
1848
|
+
if (verbose) {
|
|
1849
|
+
logger.logger.group('[VERBOSE] gradle stdout:');
|
|
1850
|
+
logger.logger.log(output);
|
|
1851
|
+
logger.logger.groupEnd();
|
|
1852
|
+
}
|
|
1853
|
+
if (output.code !== 0) {
|
|
1854
|
+
process.exitCode = 1;
|
|
1855
|
+
logger.logger.fail(`Gradle exited with exit code ${output.code}`);
|
|
1856
|
+
// (In verbose mode, stderr was printed above, no need to repeat it)
|
|
1857
|
+
if (!verbose) {
|
|
1858
|
+
logger.logger.group('stderr:');
|
|
1859
|
+
logger.logger.error(output.stderr);
|
|
1860
|
+
logger.logger.groupEnd();
|
|
1861
|
+
}
|
|
1862
|
+
return;
|
|
1863
|
+
}
|
|
1864
|
+
logger.logger.success('Executed gradle successfully');
|
|
1865
|
+
logger.logger.log('Reported exports:');
|
|
1866
|
+
output.stdout.replace(/^POM file copied to: (.*)/gm, (_all, fn) => {
|
|
1867
|
+
logger.logger.log('- ', fn);
|
|
1868
|
+
return fn;
|
|
1869
|
+
});
|
|
1870
|
+
logger.logger.log('');
|
|
1871
|
+
logger.logger.log('Next step is to generate a Scan by running the `socket scan create` command on the same directory');
|
|
1872
|
+
} catch (e) {
|
|
1873
|
+
process.exitCode = 1;
|
|
1874
|
+
logger.logger.fail('There was an unexpected error while generating manifests' + (verbose ? '' : ' (use --verbose for details)'));
|
|
1875
|
+
if (verbose) {
|
|
1876
|
+
logger.logger.group('[VERBOSE] error:');
|
|
1877
|
+
logger.logger.log(e);
|
|
1878
|
+
logger.logger.groupEnd();
|
|
1879
|
+
}
|
|
1917
1880
|
}
|
|
1918
|
-
await handleCI();
|
|
1919
1881
|
}
|
|
1920
|
-
|
|
1921
|
-
|
|
1922
|
-
|
|
1923
|
-
|
|
1924
|
-
|
|
1925
|
-
|
|
1926
|
-
|
|
1882
|
+
async function execGradleWithSpinner(bin, commandArgs, target, cwd) {
|
|
1883
|
+
// Lazily access constants.spinner.
|
|
1884
|
+
const {
|
|
1885
|
+
spinner
|
|
1886
|
+
} = constants;
|
|
1887
|
+
let pass = false;
|
|
1888
|
+
try {
|
|
1889
|
+
spinner.start(`Running gradlew... (this can take a while, it depends on how long gradlew has to run)`);
|
|
1890
|
+
const output = await spawn.spawn(bin, commandArgs, {
|
|
1891
|
+
// We can pipe the output through to have the user see the result
|
|
1892
|
+
// of running gradlew, but then we can't (easily) gather the output
|
|
1893
|
+
// to discover the generated files... probably a flag we should allow?
|
|
1894
|
+
// stdio: isDebug() ? 'inherit' : undefined,
|
|
1895
|
+
cwd: target || cwd
|
|
1896
|
+
});
|
|
1897
|
+
pass = true;
|
|
1898
|
+
const {
|
|
1899
|
+
code,
|
|
1900
|
+
stderr,
|
|
1901
|
+
stdout
|
|
1902
|
+
} = output;
|
|
1927
1903
|
return {
|
|
1928
|
-
|
|
1929
|
-
|
|
1930
|
-
|
|
1904
|
+
code,
|
|
1905
|
+
stdout,
|
|
1906
|
+
stderr
|
|
1907
|
+
};
|
|
1908
|
+
} finally {
|
|
1909
|
+
if (pass) {
|
|
1910
|
+
spinner.successAndStop('Completed gradlew execution');
|
|
1911
|
+
} else {
|
|
1912
|
+
spinner.failAndStop('There was an error while trying to run gradlew.');
|
|
1913
|
+
}
|
|
1914
|
+
}
|
|
1915
|
+
}
|
|
1916
|
+
|
|
1917
|
+
async function convertSbtToMaven(target, bin, out, verbose, sbtOpts) {
|
|
1918
|
+
// TODO: impl json/md
|
|
1919
|
+
|
|
1920
|
+
// Lazily access constants.spinner.
|
|
1921
|
+
const {
|
|
1922
|
+
spinner
|
|
1923
|
+
} = constants;
|
|
1924
|
+
const rbin = path.resolve(bin);
|
|
1925
|
+
const rtarget = path.resolve(target);
|
|
1926
|
+
if (verbose) {
|
|
1927
|
+
logger.logger.group('sbt2maven:');
|
|
1928
|
+
logger.logger.log(`[VERBOSE] - Absolute bin path: \`${rbin}\``);
|
|
1929
|
+
logger.logger.log(`[VERBOSE] - Absolute target path: \`${rtarget}\``);
|
|
1930
|
+
// logger.log(`[VERBOSE] - Absolute out path: \`${rout}\``)
|
|
1931
|
+
logger.logger.groupEnd();
|
|
1932
|
+
} else {
|
|
1933
|
+
logger.logger.group('sbt2maven:');
|
|
1934
|
+
logger.logger.log(`- executing: \`${bin}\``);
|
|
1935
|
+
logger.logger.log(`- src dir: \`${target}\``);
|
|
1936
|
+
// logger.log(`- dst dir: \`${out}\``)
|
|
1937
|
+
logger.logger.groupEnd();
|
|
1938
|
+
}
|
|
1939
|
+
try {
|
|
1940
|
+
spinner.start(`Converting sbt to maven from \`${bin}\` on \`${target}\`...`);
|
|
1941
|
+
|
|
1942
|
+
// Run sbt with the init script we provide which should yield zero or more
|
|
1943
|
+
// pom files. We have to figure out where to store those pom files such that
|
|
1944
|
+
// we can upload them and predict them through the GitHub API. We could do a
|
|
1945
|
+
// .socket folder. We could do a socket.pom.gz with all the poms, although
|
|
1946
|
+
// I'd prefer something plain-text if it is to be committed.
|
|
1947
|
+
const output = await spawn.spawn(bin, ['makePom'].concat(sbtOpts), {
|
|
1948
|
+
cwd: target || '.'
|
|
1949
|
+
});
|
|
1950
|
+
spinner.stop();
|
|
1951
|
+
if (verbose) {
|
|
1952
|
+
logger.logger.group('[VERBOSE] sbt stdout:');
|
|
1953
|
+
logger.logger.log(output);
|
|
1954
|
+
logger.logger.groupEnd();
|
|
1955
|
+
}
|
|
1956
|
+
if (output.stderr) {
|
|
1957
|
+
process.exitCode = 1;
|
|
1958
|
+
logger.logger.fail('There were errors while running sbt');
|
|
1959
|
+
// (In verbose mode, stderr was printed above, no need to repeat it)
|
|
1960
|
+
if (!verbose) {
|
|
1961
|
+
logger.logger.group('[VERBOSE] stderr:');
|
|
1962
|
+
logger.logger.error(output.stderr);
|
|
1963
|
+
logger.logger.groupEnd();
|
|
1964
|
+
}
|
|
1965
|
+
return;
|
|
1966
|
+
}
|
|
1967
|
+
const poms = [];
|
|
1968
|
+
output.stdout.replace(/Wrote (.*?.pom)\n/g, (_all, fn) => {
|
|
1969
|
+
poms.push(fn);
|
|
1970
|
+
return fn;
|
|
1971
|
+
});
|
|
1972
|
+
if (!poms.length) {
|
|
1973
|
+
process.exitCode = 1;
|
|
1974
|
+
logger.logger.fail('There were no errors from sbt but it seems to not have generated any poms either');
|
|
1975
|
+
return;
|
|
1976
|
+
}
|
|
1977
|
+
// Move the pom file to ...? initial cwd? loc will be an absolute path, or dump to stdout
|
|
1978
|
+
// TODO: what to do with multiple output files? Do we want to dump them to stdout? Raw or with separators or ?
|
|
1979
|
+
// TODO: maybe we can add an option to target a specific file to dump to stdout
|
|
1980
|
+
if (out === '-' && poms.length === 1) {
|
|
1981
|
+
logger.logger.log('Result:\n```');
|
|
1982
|
+
logger.logger.log(await utils.safeReadFile(poms[0]));
|
|
1983
|
+
logger.logger.log('```');
|
|
1984
|
+
logger.logger.success(`OK`);
|
|
1985
|
+
} else if (out === '-') {
|
|
1986
|
+
process.exitCode = 1;
|
|
1987
|
+
logger.logger.fail('Requested out target was stdout but there are multiple generated files');
|
|
1988
|
+
poms.forEach(fn => logger.logger.error('-', fn));
|
|
1989
|
+
logger.logger.info('Exiting now...');
|
|
1990
|
+
return;
|
|
1991
|
+
} else {
|
|
1992
|
+
// if (verbose) {
|
|
1993
|
+
// logger.log(
|
|
1994
|
+
// `Moving manifest file from \`${loc.replace(/^\/home\/[^/]*?\//, '~/')}\` to \`${out}\``
|
|
1995
|
+
// )
|
|
1996
|
+
// } else {
|
|
1997
|
+
// logger.log('Moving output pom file')
|
|
1998
|
+
// }
|
|
1999
|
+
// TODO: do we prefer fs-extra? renaming can be gnarly on windows and fs-extra's version is better
|
|
2000
|
+
// await renamep(loc, out)
|
|
2001
|
+
logger.logger.success(`Generated ${poms.length} pom files`);
|
|
2002
|
+
poms.forEach(fn => logger.logger.log('-', fn));
|
|
2003
|
+
logger.logger.success(`OK`);
|
|
2004
|
+
}
|
|
2005
|
+
} catch (e) {
|
|
2006
|
+
process.exitCode = 1;
|
|
2007
|
+
spinner.stop();
|
|
2008
|
+
logger.logger.fail('There was an unexpected error while running this' + (verbose ? '' : ' (use --verbose for details)'));
|
|
2009
|
+
if (verbose) {
|
|
2010
|
+
logger.logger.group('[VERBOSE] error:');
|
|
2011
|
+
logger.logger.log(e);
|
|
2012
|
+
logger.logger.groupEnd();
|
|
2013
|
+
}
|
|
2014
|
+
}
|
|
2015
|
+
}
|
|
2016
|
+
|
|
2017
|
+
async function convertCondaToRequirements(target, cwd, verbose) {
|
|
2018
|
+
let contents;
|
|
2019
|
+
if (target === '-') {
|
|
2020
|
+
if (verbose) {
|
|
2021
|
+
logger.logger.info(`[VERBOSE] reading input from stdin`);
|
|
2022
|
+
}
|
|
2023
|
+
const buf = [];
|
|
2024
|
+
contents = await new Promise((resolve, reject) => {
|
|
2025
|
+
process.stdin.on('data', chunk => {
|
|
2026
|
+
const input = chunk.toString();
|
|
2027
|
+
buf.push(input);
|
|
2028
|
+
});
|
|
2029
|
+
process.stdin.on('end', () => {
|
|
2030
|
+
resolve(buf.join(''));
|
|
2031
|
+
});
|
|
2032
|
+
process.stdin.on('error', e => {
|
|
2033
|
+
if (verbose) {
|
|
2034
|
+
logger.logger.error('Unexpected error while reading from stdin:', e);
|
|
2035
|
+
}
|
|
2036
|
+
reject(e);
|
|
2037
|
+
});
|
|
2038
|
+
process.stdin.on('close', () => {
|
|
2039
|
+
if (buf.length === 0) {
|
|
2040
|
+
if (verbose) {
|
|
2041
|
+
logger.logger.error('stdin closed explicitly without data received');
|
|
2042
|
+
}
|
|
2043
|
+
reject(new Error('No data received from stdin'));
|
|
2044
|
+
} else {
|
|
2045
|
+
if (verbose) {
|
|
2046
|
+
logger.logger.error('warning: stdin closed explicitly with some data received');
|
|
2047
|
+
}
|
|
2048
|
+
resolve(buf.join(''));
|
|
2049
|
+
}
|
|
2050
|
+
});
|
|
2051
|
+
});
|
|
2052
|
+
if (!contents) {
|
|
2053
|
+
return {
|
|
2054
|
+
ok: false,
|
|
2055
|
+
message: 'Manifest Generation Failed',
|
|
2056
|
+
cause: 'No data received from stdin'
|
|
2057
|
+
};
|
|
2058
|
+
}
|
|
2059
|
+
} else {
|
|
2060
|
+
const f = path.resolve(cwd, target);
|
|
2061
|
+
if (verbose) {
|
|
2062
|
+
logger.logger.info(`[VERBOSE] target file: ${f}`);
|
|
2063
|
+
}
|
|
2064
|
+
if (!fs$1.existsSync(f)) {
|
|
2065
|
+
return {
|
|
2066
|
+
ok: false,
|
|
2067
|
+
message: 'Manifest Generation Failed',
|
|
2068
|
+
cause: `Input file not found at ${f}`
|
|
2069
|
+
};
|
|
2070
|
+
}
|
|
2071
|
+
contents = fs$1.readFileSync(target, 'utf8');
|
|
2072
|
+
if (!contents) {
|
|
2073
|
+
return {
|
|
2074
|
+
ok: false,
|
|
2075
|
+
message: 'Manifest Generation Failed',
|
|
2076
|
+
cause: 'File is empty'
|
|
2077
|
+
};
|
|
2078
|
+
}
|
|
2079
|
+
}
|
|
2080
|
+
return {
|
|
2081
|
+
ok: true,
|
|
2082
|
+
data: {
|
|
2083
|
+
contents,
|
|
2084
|
+
pip: convertCondaToRequirementsFromInput(contents)
|
|
2085
|
+
}
|
|
2086
|
+
};
|
|
2087
|
+
}
|
|
2088
|
+
|
|
2089
|
+
// Just extract the first pip block, if one exists at all.
|
|
2090
|
+
function convertCondaToRequirementsFromInput(input) {
|
|
2091
|
+
const keeping = [];
|
|
2092
|
+
let collecting = false;
|
|
2093
|
+
let delim = '-';
|
|
2094
|
+
let indent = '';
|
|
2095
|
+
input.split('\n').some(line => {
|
|
2096
|
+
if (!line) {
|
|
2097
|
+
// Ignore empty lines
|
|
2098
|
+
return;
|
|
2099
|
+
}
|
|
2100
|
+
if (collecting) {
|
|
2101
|
+
if (line.startsWith('#')) {
|
|
2102
|
+
// Ignore comment lines (keep?)
|
|
2103
|
+
return;
|
|
2104
|
+
}
|
|
2105
|
+
if (line.startsWith(delim)) {
|
|
2106
|
+
// In this case we have a line with the same indentation as the
|
|
2107
|
+
// `- pip:` line, so we have reached the end of the pip block.
|
|
2108
|
+
return true; // the end
|
|
2109
|
+
} else {
|
|
2110
|
+
if (!indent) {
|
|
2111
|
+
// Store the indentation of the block
|
|
2112
|
+
if (line.trim().startsWith('-')) {
|
|
2113
|
+
indent = line.split('-')[0] + '-';
|
|
2114
|
+
if (indent.length <= delim.length) {
|
|
2115
|
+
// The first line after the `pip:` line does not indent further
|
|
2116
|
+
// than that so the block is empty?
|
|
2117
|
+
return true;
|
|
2118
|
+
}
|
|
2119
|
+
}
|
|
2120
|
+
}
|
|
2121
|
+
if (line.startsWith(indent)) {
|
|
2122
|
+
keeping.push(line.slice(indent.length).trim());
|
|
2123
|
+
} else {
|
|
2124
|
+
// Unexpected input. bail.
|
|
2125
|
+
return true;
|
|
2126
|
+
}
|
|
2127
|
+
}
|
|
2128
|
+
} else {
|
|
2129
|
+
// Note: the line may end with a line comment so don't === it.
|
|
2130
|
+
if (line.trim().startsWith('- pip:')) {
|
|
2131
|
+
delim = line.split('-')[0] + '-';
|
|
2132
|
+
collecting = true;
|
|
2133
|
+
}
|
|
2134
|
+
}
|
|
2135
|
+
});
|
|
2136
|
+
return keeping.join('\n');
|
|
2137
|
+
}
|
|
2138
|
+
|
|
2139
|
+
async function outputRequirements(result, outputKind, out) {
|
|
2140
|
+
if (!result.ok) {
|
|
2141
|
+
process.exitCode = result.code ?? 1;
|
|
2142
|
+
}
|
|
2143
|
+
if (!result.ok) {
|
|
2144
|
+
if (outputKind === 'json') {
|
|
2145
|
+
logger.logger.log(utils.serializeResultJson(result));
|
|
2146
|
+
return;
|
|
2147
|
+
}
|
|
2148
|
+
logger.logger.fail(utils.failMsgWithBadge(result.message, result.cause));
|
|
2149
|
+
return;
|
|
2150
|
+
}
|
|
2151
|
+
if (outputKind === 'json') {
|
|
2152
|
+
const json = utils.serializeResultJson(result);
|
|
2153
|
+
if (out === '-') {
|
|
2154
|
+
logger.logger.log(json);
|
|
2155
|
+
} else {
|
|
2156
|
+
fs$1.writeFileSync(out, json, 'utf8');
|
|
2157
|
+
}
|
|
2158
|
+
return;
|
|
2159
|
+
}
|
|
2160
|
+
if (outputKind === 'markdown') {
|
|
2161
|
+
const arr = [];
|
|
2162
|
+
arr.push('# Converted Conda file');
|
|
2163
|
+
arr.push('');
|
|
2164
|
+
arr.push('This is the Conda `environment.yml` file converted to python `requirements.txt`:');
|
|
2165
|
+
arr.push('');
|
|
2166
|
+
arr.push('```file=requirements.txt');
|
|
2167
|
+
arr.push(result.data.pip);
|
|
2168
|
+
arr.push('```');
|
|
2169
|
+
arr.push('');
|
|
2170
|
+
const md = arr.join('\n');
|
|
2171
|
+
if (out === '-') {
|
|
2172
|
+
logger.logger.log(md);
|
|
2173
|
+
} else {
|
|
2174
|
+
fs$1.writeFileSync(out, md, 'utf8');
|
|
2175
|
+
}
|
|
2176
|
+
return;
|
|
2177
|
+
}
|
|
2178
|
+
if (out === '-') {
|
|
2179
|
+
logger.logger.log(result.data.pip);
|
|
2180
|
+
logger.logger.log('');
|
|
2181
|
+
} else {
|
|
2182
|
+
fs$1.writeFileSync(out, result.data.pip, 'utf8');
|
|
2183
|
+
}
|
|
2184
|
+
}
|
|
2185
|
+
|
|
2186
|
+
async function handleManifestConda(target, out, outputKind, cwd, verbose) {
|
|
2187
|
+
const data = await convertCondaToRequirements(target, cwd, verbose);
|
|
2188
|
+
await outputRequirements(data, outputKind, out);
|
|
2189
|
+
}
|
|
2190
|
+
|
|
2191
|
+
async function generateAutoManifest(detected, cwd, verbose, outputKind) {
|
|
2192
|
+
if (detected.sbt) {
|
|
2193
|
+
logger.logger.log('Detected a Scala sbt build, generating pom files with sbt...');
|
|
2194
|
+
await convertSbtToMaven(cwd, 'sbt', './socket.sbt.pom.xml', verbose, []);
|
|
2195
|
+
}
|
|
2196
|
+
if (detected.gradle) {
|
|
2197
|
+
logger.logger.log('Detected a gradle build (Gradle, Kotlin, Scala), running default gradle generator...');
|
|
2198
|
+
await convertGradleToMaven(cwd, path.join(cwd, 'gradlew'), cwd, verbose, []);
|
|
2199
|
+
}
|
|
2200
|
+
if (detected.conda) {
|
|
2201
|
+
logger.logger.log('Detected an environment.yml file, running default Conda generator...');
|
|
2202
|
+
await handleManifestConda(cwd, '', outputKind, cwd, verbose);
|
|
2203
|
+
}
|
|
2204
|
+
}
|
|
2205
|
+
|
|
2206
|
+
async function handleCreateNewScan({
|
|
2207
|
+
autoManifest,
|
|
2208
|
+
branchName,
|
|
2209
|
+
commitHash,
|
|
2210
|
+
commitMessage,
|
|
2211
|
+
committers,
|
|
2212
|
+
cwd,
|
|
2213
|
+
defaultBranch,
|
|
2214
|
+
interactive,
|
|
2215
|
+
orgSlug,
|
|
2216
|
+
outputKind,
|
|
2217
|
+
pendingHead,
|
|
2218
|
+
pullRequest,
|
|
2219
|
+
readOnly,
|
|
2220
|
+
repoName,
|
|
2221
|
+
report,
|
|
2222
|
+
targets,
|
|
2223
|
+
tmp
|
|
2224
|
+
}) {
|
|
2225
|
+
if (autoManifest) {
|
|
2226
|
+
logger.logger.info('Auto generating manifest files ...');
|
|
2227
|
+
const detected = await detectManifestActions(cwd);
|
|
2228
|
+
await generateAutoManifest(detected, cwd, false, outputKind);
|
|
2229
|
+
logger.logger.info('Auto generation finished. Proceeding with Scan creation.');
|
|
2230
|
+
}
|
|
2231
|
+
const supportedFileNames = await fetchSupportedScanFileNames();
|
|
2232
|
+
if (!supportedFileNames.ok) {
|
|
2233
|
+
await outputCreateNewScan(supportedFileNames, outputKind, interactive);
|
|
2234
|
+
return;
|
|
2235
|
+
}
|
|
2236
|
+
const packagePaths = await utils.getPackageFilesForScan(cwd, targets, supportedFileNames.data);
|
|
2237
|
+
const wasValidInput = utils.checkCommandInput(outputKind, {
|
|
2238
|
+
nook: true,
|
|
2239
|
+
test: packagePaths.length > 0,
|
|
2240
|
+
pass: 'ok',
|
|
2241
|
+
fail: 'found no eligible files to scan',
|
|
2242
|
+
message: 'TARGET (file/dir) must contain matching / supported file types for a scan'
|
|
2243
|
+
});
|
|
2244
|
+
if (!wasValidInput) {
|
|
2245
|
+
return;
|
|
2246
|
+
}
|
|
2247
|
+
if (readOnly) {
|
|
2248
|
+
logger.logger.log('[ReadOnly] Bailing now');
|
|
2249
|
+
return;
|
|
2250
|
+
}
|
|
2251
|
+
const data = await fetchCreateOrgFullScan(packagePaths, orgSlug, defaultBranch, pendingHead, tmp, cwd, {
|
|
2252
|
+
commitHash,
|
|
2253
|
+
commitMessage,
|
|
2254
|
+
committers,
|
|
2255
|
+
pullRequest,
|
|
2256
|
+
repoName,
|
|
2257
|
+
branchName
|
|
2258
|
+
});
|
|
2259
|
+
if (data.ok && report) {
|
|
2260
|
+
if (data.data?.id) {
|
|
2261
|
+
await handleScanReport({
|
|
2262
|
+
filePath: '-',
|
|
2263
|
+
fold: 'version',
|
|
2264
|
+
includeLicensePolicy: true,
|
|
2265
|
+
orgSlug,
|
|
2266
|
+
outputKind,
|
|
2267
|
+
reportLevel: 'error',
|
|
2268
|
+
scanId: data.data.id,
|
|
2269
|
+
short: false
|
|
2270
|
+
});
|
|
2271
|
+
} else {
|
|
2272
|
+
await outputCreateNewScan({
|
|
2273
|
+
ok: false,
|
|
2274
|
+
message: 'Missing Scan ID',
|
|
2275
|
+
cause: 'Server did not respond with a scan ID',
|
|
2276
|
+
data: data.data
|
|
2277
|
+
}, outputKind, interactive);
|
|
2278
|
+
}
|
|
2279
|
+
} else {
|
|
2280
|
+
await outputCreateNewScan(data, outputKind, interactive);
|
|
2281
|
+
}
|
|
2282
|
+
}
|
|
2283
|
+
|
|
2284
|
+
async function handleCI(autoManifest) {
|
|
2285
|
+
// ci: {
|
|
2286
|
+
// description: 'Alias for "report create --view --strict"',
|
|
2287
|
+
// argv: ['report', 'create', '--view', '--strict']
|
|
2288
|
+
// }
|
|
2289
|
+
const result = await getDefaultOrgSlug();
|
|
2290
|
+
if (!result.ok) {
|
|
2291
|
+
process.exitCode = result.code ?? 1;
|
|
2292
|
+
// Always assume json mode
|
|
2293
|
+
logger.logger.log(utils.serializeResultJson(result));
|
|
2294
|
+
return;
|
|
2295
|
+
}
|
|
2296
|
+
|
|
2297
|
+
// TODO: does it make sense to discover the commit details from local git?
|
|
2298
|
+
// TODO: does it makes sense to use custom branch/repo names here? probably socket.yml, right
|
|
2299
|
+
await handleCreateNewScan({
|
|
2300
|
+
autoManifest,
|
|
2301
|
+
branchName: 'socket-default-branch',
|
|
2302
|
+
commitMessage: '',
|
|
2303
|
+
commitHash: '',
|
|
2304
|
+
committers: '',
|
|
2305
|
+
cwd: process.cwd(),
|
|
2306
|
+
defaultBranch: false,
|
|
2307
|
+
interactive: false,
|
|
2308
|
+
orgSlug: result.data,
|
|
2309
|
+
outputKind: 'json',
|
|
2310
|
+
pendingHead: true,
|
|
2311
|
+
// when true, requires branch name set, tmp false
|
|
2312
|
+
pullRequest: 0,
|
|
2313
|
+
repoName: 'socket-default-repository',
|
|
2314
|
+
readOnly: false,
|
|
2315
|
+
report: true,
|
|
2316
|
+
targets: ['.'],
|
|
2317
|
+
tmp: false // don't set when pendingHead is true
|
|
2318
|
+
});
|
|
2319
|
+
}
|
|
2320
|
+
|
|
2321
|
+
const {
|
|
2322
|
+
DRY_RUN_BAILING_NOW: DRY_RUN_BAILING_NOW$H
|
|
2323
|
+
} = constants;
|
|
2324
|
+
const config$L = {
|
|
2325
|
+
commandName: 'ci',
|
|
2326
|
+
description: 'Create a new scan and report whether it passes your security policy',
|
|
2327
|
+
hidden: true,
|
|
2328
|
+
flags: {
|
|
2329
|
+
...utils.commonFlags,
|
|
2330
|
+
autoManifest: {
|
|
2331
|
+
type: 'boolean',
|
|
2332
|
+
default: false,
|
|
2333
|
+
// dev tools is not likely to be set up so this is safer
|
|
2334
|
+
description: 'Auto generate manifest files where detected? See autoManifest flag in `socket scan create`'
|
|
2335
|
+
}
|
|
2336
|
+
},
|
|
2337
|
+
help: (parentName, _config) => `
|
|
2338
|
+
Usage
|
|
2339
|
+
$ ${parentName}
|
|
2340
|
+
|
|
2341
|
+
Options
|
|
2342
|
+
${utils.getFlagListOutput(config$L.flags, 6)}
|
|
2343
|
+
|
|
2344
|
+
This command is intended to use in CI runs to allow automated systems to
|
|
2345
|
+
accept or reject a current build. When the scan does not pass your security
|
|
2346
|
+
policy, the exit code will be non-zero.
|
|
2347
|
+
|
|
2348
|
+
It will use the default org for the set API token.
|
|
2349
|
+
|
|
2350
|
+
The --autoManifest flag does the same as the one from \`socket scan create\`
|
|
2351
|
+
but is not enabled by default since the CI is less likely to be set up with
|
|
2352
|
+
all the necessary dev tooling. Enable it if you want the scan to include
|
|
2353
|
+
locally generated manifests like for gradle and sbt.
|
|
2354
|
+
`
|
|
2355
|
+
};
|
|
2356
|
+
const cmdCI = {
|
|
2357
|
+
description: config$L.description,
|
|
2358
|
+
hidden: config$L.hidden,
|
|
2359
|
+
run: run$L
|
|
2360
|
+
};
|
|
2361
|
+
async function run$L(argv, importMeta, {
|
|
2362
|
+
parentName
|
|
2363
|
+
}) {
|
|
2364
|
+
const cli = utils.meowOrExit({
|
|
2365
|
+
argv,
|
|
2366
|
+
config: config$L,
|
|
2367
|
+
importMeta,
|
|
2368
|
+
parentName
|
|
2369
|
+
});
|
|
2370
|
+
if (cli.flags['dryRun']) {
|
|
2371
|
+
logger.logger.log(DRY_RUN_BAILING_NOW$H);
|
|
2372
|
+
return;
|
|
2373
|
+
}
|
|
2374
|
+
await handleCI(Boolean(cli.flags['autoManifest']));
|
|
2375
|
+
}
|
|
2376
|
+
|
|
2377
|
+
async function discoverConfigValue(key) {
|
|
2378
|
+
// This will have to be a specific implementation per key because certain
|
|
2379
|
+
// keys should request information from particular API endpoints while
|
|
2380
|
+
// others should simply return their default value, like endpoint URL.
|
|
2381
|
+
|
|
2382
|
+
if (!utils.supportedConfigKeys.has(key)) {
|
|
2383
|
+
return {
|
|
2384
|
+
ok: false,
|
|
2385
|
+
message: 'Auto discover failed',
|
|
2386
|
+
cause: 'Requested key is not a valid config key.'
|
|
1931
2387
|
};
|
|
1932
2388
|
}
|
|
1933
2389
|
if (key === 'apiBaseUrl') {
|
|
@@ -3516,7 +3972,7 @@ async function enablePrAutoMerge({
|
|
|
3516
3972
|
if (error instanceof vendor.GraphqlResponseError && error.errors) {
|
|
3517
3973
|
const details = error.errors.map(({
|
|
3518
3974
|
message
|
|
3519
|
-
}) => ` - ${message.trim()}`).join('\n');
|
|
3975
|
+
}) => ` - ${message.trim()}`).join('\n').trim();
|
|
3520
3976
|
message += `:\n${details}`;
|
|
3521
3977
|
}
|
|
3522
3978
|
logger.logger.error(message);
|
|
@@ -3686,11 +4142,18 @@ async function npmFix(pkgEnvDetails, {
|
|
|
3686
4142
|
// Calling arb.reify() creates the arb.diff object, nulls-out arb.idealTree,
|
|
3687
4143
|
// and populates arb.actualTree.
|
|
3688
4144
|
let actualTree = await arb.reify();
|
|
3689
|
-
|
|
3690
|
-
|
|
3691
|
-
|
|
3692
|
-
|
|
3693
|
-
|
|
4145
|
+
let alertsMap;
|
|
4146
|
+
try {
|
|
4147
|
+
alertsMap = purls.length ? await utils.getAlertsMapFromPurls(purls, getAlertMapOptions({
|
|
4148
|
+
limit
|
|
4149
|
+
})) : await shadowInject.getAlertsMapFromArborist(arb, getAlertMapOptions({
|
|
4150
|
+
limit
|
|
4151
|
+
}));
|
|
4152
|
+
} catch (e) {
|
|
4153
|
+
spinner?.stop();
|
|
4154
|
+
logger.logger.error(e?.message || 'Unknown Socket batch PURL API error.');
|
|
4155
|
+
return;
|
|
4156
|
+
}
|
|
3694
4157
|
const infoByPkgName = utils.getCveInfoFromAlertsMap(alertsMap, {
|
|
3695
4158
|
limit
|
|
3696
4159
|
});
|
|
@@ -3842,11 +4305,12 @@ async function npmFix(pkgEnvDetails, {
|
|
|
3842
4305
|
stdio: 'ignore'
|
|
3843
4306
|
});
|
|
3844
4307
|
}
|
|
3845
|
-
spinner?.
|
|
4308
|
+
spinner?.success(`Fixed ${name} in ${workspaceName}`);
|
|
3846
4309
|
} catch (e) {
|
|
3847
4310
|
errored = true;
|
|
3848
4311
|
error = e;
|
|
3849
4312
|
}
|
|
4313
|
+
spinner?.stop();
|
|
3850
4314
|
if (!errored && isCi) {
|
|
3851
4315
|
const branch = getSocketBranchName(oldPurl, newVersion, workspaceName);
|
|
3852
4316
|
try {
|
|
@@ -3912,10 +4376,14 @@ async function npmFix(pkgEnvDetails, {
|
|
|
3912
4376
|
const {
|
|
3913
4377
|
data
|
|
3914
4378
|
} = prResponse;
|
|
3915
|
-
logger.logger.
|
|
4379
|
+
logger.logger.success(`Opened PR #${data.number}.`);
|
|
3916
4380
|
if (autoMerge) {
|
|
4381
|
+
logger.logger.indent();
|
|
4382
|
+
spinner?.indent();
|
|
3917
4383
|
// eslint-disable-next-line no-await-in-loop
|
|
3918
4384
|
await enablePrAutoMerge(data);
|
|
4385
|
+
logger.logger.dedent();
|
|
4386
|
+
spinner?.dedent();
|
|
3919
4387
|
}
|
|
3920
4388
|
}
|
|
3921
4389
|
} catch (e) {
|
|
@@ -3933,6 +4401,7 @@ async function npmFix(pkgEnvDetails, {
|
|
|
3933
4401
|
}
|
|
3934
4402
|
if (errored) {
|
|
3935
4403
|
if (!isCi) {
|
|
4404
|
+
spinner?.start();
|
|
3936
4405
|
editablePkgJson.update(revertData);
|
|
3937
4406
|
// eslint-disable-next-line no-await-in-loop
|
|
3938
4407
|
await Promise.all([utils.removeNodeModules(cwd), editablePkgJson.save({
|
|
@@ -3942,8 +4411,9 @@ async function npmFix(pkgEnvDetails, {
|
|
|
3942
4411
|
actualTree = await install$1(arb, {
|
|
3943
4412
|
cwd
|
|
3944
4413
|
});
|
|
4414
|
+
spinner?.stop();
|
|
3945
4415
|
}
|
|
3946
|
-
|
|
4416
|
+
logger.logger.fail(`Update failed for ${oldId} in ${workspaceName}`, error);
|
|
3947
4417
|
}
|
|
3948
4418
|
if (++count >= limit) {
|
|
3949
4419
|
logger.logger.dedent();
|
|
@@ -3953,14 +4423,14 @@ async function npmFix(pkgEnvDetails, {
|
|
|
3953
4423
|
}
|
|
3954
4424
|
}
|
|
3955
4425
|
if (!isLastPkgJsonPath && logger.logger.logCallCount > workspaceLogCallCount) {
|
|
3956
|
-
logger.logger.
|
|
4426
|
+
logger.logger.logNewline();
|
|
3957
4427
|
}
|
|
3958
4428
|
}
|
|
3959
4429
|
for (const warningText of warningsForAfter) {
|
|
3960
4430
|
logger.logger.warn(warningText);
|
|
3961
4431
|
}
|
|
3962
4432
|
if (!isLastInfoEntry) {
|
|
3963
|
-
logger.logger.
|
|
4433
|
+
logger.logger.logNewline();
|
|
3964
4434
|
}
|
|
3965
4435
|
logger.logger.dedent();
|
|
3966
4436
|
spinner?.dedent();
|
|
@@ -4057,11 +4527,18 @@ async function pnpmFix(pkgEnvDetails, {
|
|
|
4057
4527
|
logger.logger.error('Required pnpm-lock.yaml not found.');
|
|
4058
4528
|
return;
|
|
4059
4529
|
}
|
|
4060
|
-
|
|
4061
|
-
|
|
4062
|
-
|
|
4063
|
-
|
|
4064
|
-
|
|
4530
|
+
let alertsMap;
|
|
4531
|
+
try {
|
|
4532
|
+
alertsMap = purls.length ? await utils.getAlertsMapFromPurls(purls, getAlertMapOptions({
|
|
4533
|
+
limit
|
|
4534
|
+
})) : await utils.getAlertsMapFromPnpmLockfile(lockfile, getAlertMapOptions({
|
|
4535
|
+
limit
|
|
4536
|
+
}));
|
|
4537
|
+
} catch (e) {
|
|
4538
|
+
spinner?.stop();
|
|
4539
|
+
logger.logger.error(e?.message || 'Unknown Socket batch PURL API error.');
|
|
4540
|
+
return;
|
|
4541
|
+
}
|
|
4065
4542
|
const infoByPkgName = utils.getCveInfoFromAlertsMap(alertsMap, {
|
|
4066
4543
|
limit
|
|
4067
4544
|
});
|
|
@@ -4248,12 +4725,12 @@ async function pnpmFix(pkgEnvDetails, {
|
|
|
4248
4725
|
stdio: 'ignore'
|
|
4249
4726
|
});
|
|
4250
4727
|
}
|
|
4251
|
-
spinner?.
|
|
4728
|
+
spinner?.success(`Fixed ${name} in ${workspaceName}`);
|
|
4252
4729
|
} catch (e) {
|
|
4253
4730
|
error = e;
|
|
4254
4731
|
errored = true;
|
|
4255
|
-
spinner?.stop();
|
|
4256
4732
|
}
|
|
4733
|
+
spinner?.stop();
|
|
4257
4734
|
if (!errored && isCi) {
|
|
4258
4735
|
const branch = getSocketBranchName(oldPurl, newVersion, workspaceName);
|
|
4259
4736
|
try {
|
|
@@ -4322,10 +4799,14 @@ async function pnpmFix(pkgEnvDetails, {
|
|
|
4322
4799
|
const {
|
|
4323
4800
|
data
|
|
4324
4801
|
} = prResponse;
|
|
4325
|
-
logger.logger.
|
|
4802
|
+
logger.logger.success(`Opened PR #${data.number}.`);
|
|
4326
4803
|
if (autoMerge) {
|
|
4804
|
+
logger.logger.indent();
|
|
4805
|
+
spinner?.indent();
|
|
4327
4806
|
// eslint-disable-next-line no-await-in-loop
|
|
4328
4807
|
await enablePrAutoMerge(data);
|
|
4808
|
+
logger.logger.dedent();
|
|
4809
|
+
spinner?.dedent();
|
|
4329
4810
|
}
|
|
4330
4811
|
}
|
|
4331
4812
|
} catch (e) {
|
|
@@ -4344,6 +4825,7 @@ async function pnpmFix(pkgEnvDetails, {
|
|
|
4344
4825
|
}
|
|
4345
4826
|
if (errored) {
|
|
4346
4827
|
if (!isCi) {
|
|
4828
|
+
spinner?.start();
|
|
4347
4829
|
editablePkgJson.update(revertData);
|
|
4348
4830
|
// eslint-disable-next-line no-await-in-loop
|
|
4349
4831
|
await Promise.all([utils.removeNodeModules(cwd), editablePkgJson.save({
|
|
@@ -4354,8 +4836,9 @@ async function pnpmFix(pkgEnvDetails, {
|
|
|
4354
4836
|
cwd,
|
|
4355
4837
|
spinner
|
|
4356
4838
|
});
|
|
4839
|
+
spinner?.stop();
|
|
4357
4840
|
}
|
|
4358
|
-
|
|
4841
|
+
logger.logger.fail(`Update failed for ${oldId} in ${workspaceName}`, error);
|
|
4359
4842
|
}
|
|
4360
4843
|
if (++count >= limit) {
|
|
4361
4844
|
logger.logger.dedent();
|
|
@@ -4365,14 +4848,14 @@ async function pnpmFix(pkgEnvDetails, {
|
|
|
4365
4848
|
}
|
|
4366
4849
|
}
|
|
4367
4850
|
if (!isLastPkgJsonPath && logger.logger.logCallCount > workspaceLogCallCount) {
|
|
4368
|
-
logger.logger.
|
|
4851
|
+
logger.logger.logNewline();
|
|
4369
4852
|
}
|
|
4370
4853
|
}
|
|
4371
4854
|
for (const warningText of warningsForAfter) {
|
|
4372
4855
|
logger.logger.warn(warningText);
|
|
4373
4856
|
}
|
|
4374
4857
|
if (!isLastInfoEntry) {
|
|
4375
|
-
logger.logger.
|
|
4858
|
+
logger.logger.logNewline();
|
|
4376
4859
|
}
|
|
4377
4860
|
logger.logger.dedent();
|
|
4378
4861
|
spinner?.dedent();
|
|
@@ -5149,230 +5632,42 @@ async function run$z(argv, importMeta, {
|
|
|
5149
5632
|
if (cli.flags['dryRun']) {
|
|
5150
5633
|
logger.logger.log(DRY_RUN_BAILING_NOW$w);
|
|
5151
5634
|
return;
|
|
5152
|
-
}
|
|
5153
|
-
attemptLogout();
|
|
5154
|
-
}
|
|
5155
|
-
|
|
5156
|
-
async function convertCondaToRequirements(target, cwd, verbose) {
|
|
5157
|
-
let contents;
|
|
5158
|
-
if (target === '-') {
|
|
5159
|
-
if (verbose) {
|
|
5160
|
-
logger.logger.info(`[VERBOSE] reading input from stdin`);
|
|
5161
|
-
}
|
|
5162
|
-
const buf = [];
|
|
5163
|
-
contents = await new Promise((resolve, reject) => {
|
|
5164
|
-
process.stdin.on('data', chunk => {
|
|
5165
|
-
const input = chunk.toString();
|
|
5166
|
-
buf.push(input);
|
|
5167
|
-
});
|
|
5168
|
-
process.stdin.on('end', () => {
|
|
5169
|
-
resolve(buf.join(''));
|
|
5170
|
-
});
|
|
5171
|
-
process.stdin.on('error', e => {
|
|
5172
|
-
if (verbose) {
|
|
5173
|
-
logger.logger.error('Unexpected error while reading from stdin:', e);
|
|
5174
|
-
}
|
|
5175
|
-
reject(e);
|
|
5176
|
-
});
|
|
5177
|
-
process.stdin.on('close', () => {
|
|
5178
|
-
if (buf.length === 0) {
|
|
5179
|
-
if (verbose) {
|
|
5180
|
-
logger.logger.error('stdin closed explicitly without data received');
|
|
5181
|
-
}
|
|
5182
|
-
reject(new Error('No data received from stdin'));
|
|
5183
|
-
} else {
|
|
5184
|
-
if (verbose) {
|
|
5185
|
-
logger.logger.error('warning: stdin closed explicitly with some data received');
|
|
5186
|
-
}
|
|
5187
|
-
resolve(buf.join(''));
|
|
5188
|
-
}
|
|
5189
|
-
});
|
|
5190
|
-
});
|
|
5191
|
-
if (!contents) {
|
|
5192
|
-
return {
|
|
5193
|
-
ok: false,
|
|
5194
|
-
message: 'Manifest Generation Failed',
|
|
5195
|
-
cause: 'No data received from stdin'
|
|
5196
|
-
};
|
|
5197
|
-
}
|
|
5198
|
-
} else {
|
|
5199
|
-
const f = path.resolve(cwd, target);
|
|
5200
|
-
if (verbose) {
|
|
5201
|
-
logger.logger.info(`[VERBOSE] target file: ${f}`);
|
|
5202
|
-
}
|
|
5203
|
-
if (!fs$1.existsSync(f)) {
|
|
5204
|
-
return {
|
|
5205
|
-
ok: false,
|
|
5206
|
-
message: 'Manifest Generation Failed',
|
|
5207
|
-
cause: `Input file not found at ${f}`
|
|
5208
|
-
};
|
|
5209
|
-
}
|
|
5210
|
-
contents = fs$1.readFileSync(target, 'utf8');
|
|
5211
|
-
if (!contents) {
|
|
5212
|
-
return {
|
|
5213
|
-
ok: false,
|
|
5214
|
-
message: 'Manifest Generation Failed',
|
|
5215
|
-
cause: 'File is empty'
|
|
5216
|
-
};
|
|
5217
|
-
}
|
|
5218
|
-
}
|
|
5219
|
-
return {
|
|
5220
|
-
ok: true,
|
|
5221
|
-
data: {
|
|
5222
|
-
contents,
|
|
5223
|
-
pip: convertCondaToRequirementsFromInput(contents)
|
|
5224
|
-
}
|
|
5225
|
-
};
|
|
5226
|
-
}
|
|
5227
|
-
|
|
5228
|
-
// Just extract the first pip block, if one exists at all.
|
|
5229
|
-
function convertCondaToRequirementsFromInput(input) {
|
|
5230
|
-
const keeping = [];
|
|
5231
|
-
let collecting = false;
|
|
5232
|
-
let delim = '-';
|
|
5233
|
-
let indent = '';
|
|
5234
|
-
input.split('\n').some(line => {
|
|
5235
|
-
if (!line) {
|
|
5236
|
-
// Ignore empty lines
|
|
5237
|
-
return;
|
|
5238
|
-
}
|
|
5239
|
-
if (collecting) {
|
|
5240
|
-
if (line.startsWith('#')) {
|
|
5241
|
-
// Ignore comment lines (keep?)
|
|
5242
|
-
return;
|
|
5243
|
-
}
|
|
5244
|
-
if (line.startsWith(delim)) {
|
|
5245
|
-
// In this case we have a line with the same indentation as the
|
|
5246
|
-
// `- pip:` line, so we have reached the end of the pip block.
|
|
5247
|
-
return true; // the end
|
|
5248
|
-
} else {
|
|
5249
|
-
if (!indent) {
|
|
5250
|
-
// Store the indentation of the block
|
|
5251
|
-
if (line.trim().startsWith('-')) {
|
|
5252
|
-
indent = line.split('-')[0] + '-';
|
|
5253
|
-
if (indent.length <= delim.length) {
|
|
5254
|
-
// The first line after the `pip:` line does not indent further
|
|
5255
|
-
// than that so the block is empty?
|
|
5256
|
-
return true;
|
|
5257
|
-
}
|
|
5258
|
-
}
|
|
5259
|
-
}
|
|
5260
|
-
if (line.startsWith(indent)) {
|
|
5261
|
-
keeping.push(line.slice(indent.length).trim());
|
|
5262
|
-
} else {
|
|
5263
|
-
// Unexpected input. bail.
|
|
5264
|
-
return true;
|
|
5265
|
-
}
|
|
5266
|
-
}
|
|
5267
|
-
} else {
|
|
5268
|
-
// Note: the line may end with a line comment so don't === it.
|
|
5269
|
-
if (line.trim().startsWith('- pip:')) {
|
|
5270
|
-
delim = line.split('-')[0] + '-';
|
|
5271
|
-
collecting = true;
|
|
5272
|
-
}
|
|
5273
|
-
}
|
|
5274
|
-
});
|
|
5275
|
-
return keeping.join('\n');
|
|
5276
|
-
}
|
|
5277
|
-
|
|
5278
|
-
async function outputRequirements(result, outputKind, out) {
|
|
5279
|
-
if (!result.ok) {
|
|
5280
|
-
process.exitCode = result.code ?? 1;
|
|
5281
|
-
}
|
|
5282
|
-
if (!result.ok) {
|
|
5283
|
-
if (outputKind === 'json') {
|
|
5284
|
-
logger.logger.log(utils.serializeResultJson(result));
|
|
5285
|
-
return;
|
|
5286
|
-
}
|
|
5287
|
-
logger.logger.fail(utils.failMsgWithBadge(result.message, result.cause));
|
|
5288
|
-
return;
|
|
5289
|
-
}
|
|
5290
|
-
if (outputKind === 'json') {
|
|
5291
|
-
const json = utils.serializeResultJson(result);
|
|
5292
|
-
if (out === '-') {
|
|
5293
|
-
logger.logger.log(json);
|
|
5294
|
-
} else {
|
|
5295
|
-
fs$1.writeFileSync(out, json, 'utf8');
|
|
5296
|
-
}
|
|
5297
|
-
return;
|
|
5298
|
-
}
|
|
5299
|
-
if (outputKind === 'markdown') {
|
|
5300
|
-
const arr = [];
|
|
5301
|
-
arr.push('# Converted Conda file');
|
|
5302
|
-
arr.push('');
|
|
5303
|
-
arr.push('This is the Conda `environment.yml` file converted to python `requirements.txt`:');
|
|
5304
|
-
arr.push('');
|
|
5305
|
-
arr.push('```file=requirements.txt');
|
|
5306
|
-
arr.push(result.data.pip);
|
|
5307
|
-
arr.push('```');
|
|
5308
|
-
arr.push('');
|
|
5309
|
-
const md = arr.join('\n');
|
|
5310
|
-
if (out === '-') {
|
|
5311
|
-
logger.logger.log(md);
|
|
5312
|
-
} else {
|
|
5313
|
-
fs$1.writeFileSync(out, md, 'utf8');
|
|
5314
|
-
}
|
|
5315
|
-
return;
|
|
5316
|
-
}
|
|
5317
|
-
if (out === '-') {
|
|
5318
|
-
logger.logger.log(result.data.pip);
|
|
5319
|
-
logger.logger.log('');
|
|
5320
|
-
} else {
|
|
5321
|
-
fs$1.writeFileSync(out, result.data.pip, 'utf8');
|
|
5322
|
-
}
|
|
5323
|
-
}
|
|
5324
|
-
|
|
5325
|
-
async function handleManifestConda(target, out, outputKind, cwd, verbose) {
|
|
5326
|
-
const data = await convertCondaToRequirements(target, cwd, verbose);
|
|
5327
|
-
await outputRequirements(data, outputKind, out);
|
|
5635
|
+
}
|
|
5636
|
+
attemptLogout();
|
|
5328
5637
|
}
|
|
5329
5638
|
|
|
5330
5639
|
const {
|
|
5331
5640
|
DRY_RUN_BAILING_NOW: DRY_RUN_BAILING_NOW$v
|
|
5332
5641
|
} = constants;
|
|
5333
5642
|
const config$y = {
|
|
5334
|
-
commandName: '
|
|
5335
|
-
description: '
|
|
5643
|
+
commandName: 'auto',
|
|
5644
|
+
description: 'Auto-detect build and attempt to generate manifest file',
|
|
5336
5645
|
hidden: false,
|
|
5337
5646
|
flags: {
|
|
5338
5647
|
...utils.commonFlags,
|
|
5339
|
-
...utils.outputFlags,
|
|
5340
5648
|
cwd: {
|
|
5341
5649
|
type: 'string',
|
|
5342
5650
|
description: 'Set the cwd, defaults to process.cwd()'
|
|
5343
5651
|
},
|
|
5344
|
-
out: {
|
|
5345
|
-
type: 'string',
|
|
5346
|
-
default: '-',
|
|
5347
|
-
description: 'Output target (use `-` or omit to print to stdout)'
|
|
5348
|
-
},
|
|
5349
5652
|
verbose: {
|
|
5350
5653
|
type: 'boolean',
|
|
5351
|
-
|
|
5654
|
+
default: false,
|
|
5655
|
+
description: 'Enable debug output, may help when running into errors'
|
|
5352
5656
|
}
|
|
5353
5657
|
},
|
|
5354
5658
|
help: (command, config) => `
|
|
5355
5659
|
Usage
|
|
5356
|
-
$ ${command}
|
|
5357
|
-
|
|
5358
|
-
Warning: While we don't support Conda necessarily, this tool extracts the pip
|
|
5359
|
-
block from an environment.yml and outputs it as a requirements.txt
|
|
5360
|
-
which you can scan as if it were a pypi package.
|
|
5361
|
-
|
|
5362
|
-
USE AT YOUR OWN RISK
|
|
5363
|
-
|
|
5364
|
-
Note: FILE can be a dash (-) to indicate stdin. This way you can pipe the
|
|
5365
|
-
contents of a file to have it processed.
|
|
5660
|
+
$ ${command}
|
|
5366
5661
|
|
|
5367
5662
|
Options
|
|
5368
5663
|
${utils.getFlagListOutput(config.flags, 6)}
|
|
5369
5664
|
|
|
5370
|
-
|
|
5371
|
-
|
|
5372
|
-
|
|
5665
|
+
Tries to figure out what language your current repo uses. If it finds a
|
|
5666
|
+
supported case then it will try to generate the manifest file for that
|
|
5667
|
+
language with the default or detected settings.
|
|
5373
5668
|
`
|
|
5374
5669
|
};
|
|
5375
|
-
const
|
|
5670
|
+
const cmdManifestAuto = {
|
|
5376
5671
|
description: config$y.description,
|
|
5377
5672
|
hidden: config$y.hidden,
|
|
5378
5673
|
run: run$y
|
|
@@ -5387,189 +5682,59 @@ async function run$y(argv, importMeta, {
|
|
|
5387
5682
|
parentName
|
|
5388
5683
|
});
|
|
5389
5684
|
const {
|
|
5390
|
-
cwd
|
|
5391
|
-
json
|
|
5392
|
-
markdown
|
|
5393
|
-
|
|
5394
|
-
verbose = false
|
|
5685
|
+
cwd: cwdFlag,
|
|
5686
|
+
json,
|
|
5687
|
+
markdown,
|
|
5688
|
+
verbose: verboseFlag
|
|
5395
5689
|
} = cli.flags;
|
|
5396
5690
|
const outputKind = utils.getOutputKind(json, markdown); // TODO: impl json/md further
|
|
5397
|
-
|
|
5398
|
-
const
|
|
5691
|
+
const cwd = String(cwdFlag || process.cwd());
|
|
5692
|
+
const verbose = !!verboseFlag;
|
|
5399
5693
|
if (verbose) {
|
|
5400
5694
|
logger.logger.group('- ', parentName, config$y.commandName, ':');
|
|
5401
5695
|
logger.logger.group('- flags:', cli.flags);
|
|
5402
5696
|
logger.logger.groupEnd();
|
|
5403
|
-
logger.logger.log('-
|
|
5404
|
-
logger.logger.log('-
|
|
5697
|
+
logger.logger.log('- input:', cli.input);
|
|
5698
|
+
logger.logger.log('- cwd:', cwd);
|
|
5405
5699
|
logger.logger.groupEnd();
|
|
5406
5700
|
}
|
|
5407
|
-
const
|
|
5408
|
-
|
|
5409
|
-
message: 'The FILE arg is required',
|
|
5410
|
-
pass: 'ok',
|
|
5411
|
-
fail: 'missing'
|
|
5412
|
-
}, {
|
|
5413
|
-
nook: true,
|
|
5414
|
-
test: cli.input.length <= 1,
|
|
5415
|
-
message: 'Can only accept one DIR (make sure to escape spaces!)',
|
|
5416
|
-
pass: 'ok',
|
|
5417
|
-
fail: 'received ' + cli.input.length
|
|
5418
|
-
}, {
|
|
5419
|
-
nook: true,
|
|
5420
|
-
test: !json || !markdown,
|
|
5421
|
-
message: 'The `--json` and `--markdown` flags can not be used at the same time',
|
|
5422
|
-
pass: 'ok',
|
|
5423
|
-
fail: 'bad'
|
|
5424
|
-
});
|
|
5425
|
-
if (!wasValidInput) {
|
|
5426
|
-
return;
|
|
5427
|
-
}
|
|
5428
|
-
logger.logger.warn('Warning: This will approximate your Conda dependencies using PyPI. We do not yet officially support Conda. Use at your own risk.');
|
|
5701
|
+
const detected = await detectManifestActions(String(cwd));
|
|
5702
|
+
debug.debugLog(detected);
|
|
5429
5703
|
if (cli.flags['dryRun']) {
|
|
5430
5704
|
logger.logger.log(DRY_RUN_BAILING_NOW$v);
|
|
5431
5705
|
return;
|
|
5432
5706
|
}
|
|
5433
|
-
|
|
5434
|
-
|
|
5435
|
-
|
|
5436
|
-
async function convertGradleToMaven(target, bin, cwd, verbose, gradleOpts) {
|
|
5437
|
-
// TODO: impl json/md
|
|
5438
|
-
if (verbose) {
|
|
5439
|
-
logger.logger.log('[VERBOSE] Resolving:', [cwd, bin]);
|
|
5440
|
-
}
|
|
5441
|
-
const rbin = path.resolve(cwd, bin);
|
|
5442
|
-
if (verbose) {
|
|
5443
|
-
logger.logger.log('[VERBOSE] Resolving:', [cwd, target]);
|
|
5444
|
-
}
|
|
5445
|
-
const rtarget = path.resolve(cwd, target);
|
|
5446
|
-
const binExists = fs$1.existsSync(rbin);
|
|
5447
|
-
const targetExists = fs$1.existsSync(rtarget);
|
|
5448
|
-
logger.logger.group('gradle2maven:');
|
|
5449
|
-
if (verbose || debug.isDebug()) {
|
|
5450
|
-
logger.logger.log(`[VERBOSE] - Absolute bin path: \`${rbin}\` (${binExists ? 'found' : vendor.yoctocolorsCjsExports.red('not found!')})`);
|
|
5451
|
-
logger.logger.log(`[VERBOSE] - Absolute target path: \`${rtarget}\` (${targetExists ? 'found' : vendor.yoctocolorsCjsExports.red('not found!')})`);
|
|
5452
|
-
} else {
|
|
5453
|
-
logger.logger.log(`- executing: \`${rbin}\``);
|
|
5454
|
-
if (!binExists) {
|
|
5455
|
-
logger.logger.warn('Warning: It appears the executable could not be found at this location. An error might be printed later because of that.');
|
|
5456
|
-
}
|
|
5457
|
-
logger.logger.log(`- src dir: \`${rtarget}\``);
|
|
5458
|
-
if (!targetExists) {
|
|
5459
|
-
logger.logger.warn('Warning: It appears the src dir could not be found at this location. An error might be printed later because of that.');
|
|
5460
|
-
}
|
|
5461
|
-
}
|
|
5462
|
-
logger.logger.groupEnd();
|
|
5463
|
-
try {
|
|
5464
|
-
// Run gradlew with the init script we provide which should yield zero or more
|
|
5465
|
-
// pom files. We have to figure out where to store those pom files such that
|
|
5466
|
-
// we can upload them and predict them through the GitHub API. We could do a
|
|
5467
|
-
// .socket folder. We could do a socket.pom.gz with all the poms, although
|
|
5468
|
-
// I'd prefer something plain-text if it is to be committed.
|
|
5469
|
-
|
|
5470
|
-
// Note: init.gradle will be exported by .config/rollup.dist.config.mjs
|
|
5471
|
-
const initLocation = path.join(constants.distPath, 'init.gradle');
|
|
5472
|
-
const commandArgs = ['--init-script', initLocation, ...gradleOpts, 'pom'];
|
|
5473
|
-
if (verbose) {
|
|
5474
|
-
logger.logger.log('[VERBOSE] Executing:', [bin], ', args:', commandArgs);
|
|
5475
|
-
}
|
|
5476
|
-
logger.logger.log(`Converting gradle to maven from \`${bin}\` on \`${target}\` ...`);
|
|
5477
|
-
const output = await execGradleWithSpinner(rbin, commandArgs, rtarget, cwd);
|
|
5478
|
-
if (verbose) {
|
|
5479
|
-
logger.logger.group('[VERBOSE] gradle stdout:');
|
|
5480
|
-
logger.logger.log(output);
|
|
5481
|
-
logger.logger.groupEnd();
|
|
5482
|
-
}
|
|
5483
|
-
if (output.code !== 0) {
|
|
5484
|
-
process.exitCode = 1;
|
|
5485
|
-
logger.logger.fail(`Gradle exited with exit code ${output.code}`);
|
|
5486
|
-
// (In verbose mode, stderr was printed above, no need to repeat it)
|
|
5487
|
-
if (!verbose) {
|
|
5488
|
-
logger.logger.group('stderr:');
|
|
5489
|
-
logger.logger.error(output.stderr);
|
|
5490
|
-
logger.logger.groupEnd();
|
|
5491
|
-
}
|
|
5492
|
-
return;
|
|
5493
|
-
}
|
|
5494
|
-
logger.logger.success('Executed gradle successfully');
|
|
5495
|
-
logger.logger.log('Reported exports:');
|
|
5496
|
-
output.stdout.replace(/^POM file copied to: (.*)/gm, (_all, fn) => {
|
|
5497
|
-
logger.logger.log('- ', fn);
|
|
5498
|
-
return fn;
|
|
5499
|
-
});
|
|
5707
|
+
if (!detected.count) {
|
|
5708
|
+
logger.logger.fail('Was unable to discover any targets for which we can generate manifest files...');
|
|
5500
5709
|
logger.logger.log('');
|
|
5501
|
-
logger.logger.log('
|
|
5502
|
-
|
|
5710
|
+
logger.logger.log('- Make sure this script would work with your target build (see `socket manifest --help` for your target).');
|
|
5711
|
+
logger.logger.log('- Make sure to run it from the correct dir (use --cwd to target another dir)');
|
|
5712
|
+
logger.logger.log('- Make sure the necessary build tools are available (`PATH`)');
|
|
5503
5713
|
process.exitCode = 1;
|
|
5504
|
-
|
|
5505
|
-
if (verbose) {
|
|
5506
|
-
logger.logger.group('[VERBOSE] error:');
|
|
5507
|
-
logger.logger.log(e);
|
|
5508
|
-
logger.logger.groupEnd();
|
|
5509
|
-
}
|
|
5510
|
-
}
|
|
5511
|
-
}
|
|
5512
|
-
async function execGradleWithSpinner(bin, commandArgs, target, cwd) {
|
|
5513
|
-
// Lazily access constants.spinner.
|
|
5514
|
-
const {
|
|
5515
|
-
spinner
|
|
5516
|
-
} = constants;
|
|
5517
|
-
let pass = false;
|
|
5518
|
-
try {
|
|
5519
|
-
spinner.start(`Running gradlew... (this can take a while, it depends on how long gradlew has to run)`);
|
|
5520
|
-
const output = await spawn.spawn(bin, commandArgs, {
|
|
5521
|
-
// We can pipe the output through to have the user see the result
|
|
5522
|
-
// of running gradlew, but then we can't (easily) gather the output
|
|
5523
|
-
// to discover the generated files... probably a flag we should allow?
|
|
5524
|
-
// stdio: isDebug() ? 'inherit' : undefined,
|
|
5525
|
-
cwd: target || cwd
|
|
5526
|
-
});
|
|
5527
|
-
pass = true;
|
|
5528
|
-
const {
|
|
5529
|
-
code,
|
|
5530
|
-
stderr,
|
|
5531
|
-
stdout
|
|
5532
|
-
} = output;
|
|
5533
|
-
return {
|
|
5534
|
-
code,
|
|
5535
|
-
stdout,
|
|
5536
|
-
stderr
|
|
5537
|
-
};
|
|
5538
|
-
} finally {
|
|
5539
|
-
if (pass) {
|
|
5540
|
-
spinner.successAndStop('Completed gradlew execution');
|
|
5541
|
-
} else {
|
|
5542
|
-
spinner.failAndStop('There was an error while trying to run gradlew.');
|
|
5543
|
-
}
|
|
5714
|
+
return;
|
|
5544
5715
|
}
|
|
5716
|
+
await generateAutoManifest(detected, cwd, verbose, outputKind);
|
|
5717
|
+
logger.logger.success(`Finished. Should have attempted to generate manifest files for ${detected.count} targets.`);
|
|
5545
5718
|
}
|
|
5546
5719
|
|
|
5547
5720
|
const {
|
|
5548
5721
|
DRY_RUN_BAILING_NOW: DRY_RUN_BAILING_NOW$u
|
|
5549
5722
|
} = constants;
|
|
5550
5723
|
const config$x = {
|
|
5551
|
-
commandName: '
|
|
5552
|
-
description: '[beta]
|
|
5724
|
+
commandName: 'conda',
|
|
5725
|
+
description: '[beta] Convert a Conda environment.yml file to a python requirements.txt',
|
|
5553
5726
|
hidden: false,
|
|
5554
5727
|
flags: {
|
|
5555
5728
|
...utils.commonFlags,
|
|
5556
|
-
|
|
5557
|
-
type: 'string',
|
|
5558
|
-
description: 'Location of gradlew binary to use, default: CWD/gradlew'
|
|
5559
|
-
},
|
|
5729
|
+
...utils.outputFlags,
|
|
5560
5730
|
cwd: {
|
|
5561
5731
|
type: 'string',
|
|
5562
5732
|
description: 'Set the cwd, defaults to process.cwd()'
|
|
5563
5733
|
},
|
|
5564
|
-
|
|
5565
|
-
type: 'string',
|
|
5566
|
-
default: '',
|
|
5567
|
-
description: 'Additional options to pass on to ./gradlew, see `./gradlew --help`'
|
|
5568
|
-
},
|
|
5569
|
-
task: {
|
|
5734
|
+
out: {
|
|
5570
5735
|
type: 'string',
|
|
5571
|
-
default: '
|
|
5572
|
-
description: '
|
|
5736
|
+
default: '-',
|
|
5737
|
+
description: 'Output target (use `-` or omit to print to stdout)'
|
|
5573
5738
|
},
|
|
5574
5739
|
verbose: {
|
|
5575
5740
|
type: 'boolean',
|
|
@@ -5578,38 +5743,26 @@ const config$x = {
|
|
|
5578
5743
|
},
|
|
5579
5744
|
help: (command, config) => `
|
|
5580
5745
|
Usage
|
|
5581
|
-
$ ${command}
|
|
5582
|
-
|
|
5583
|
-
Options
|
|
5584
|
-
${utils.getFlagListOutput(config.flags, 6)}
|
|
5585
|
-
|
|
5586
|
-
Uses gradle, preferably through your local project \`gradlew\`, to generate a
|
|
5587
|
-
\`pom.xml\` file for each task. If you have no \`gradlew\` you can try the
|
|
5588
|
-
global \`gradle\` binary but that may not work (hard to predict).
|
|
5589
|
-
|
|
5590
|
-
The \`pom.xml\` is a manifest file similar to \`package.json\` for npm or
|
|
5591
|
-
or requirements.txt for PyPi), but specifically for Maven, which is Java's
|
|
5592
|
-
dependency repository. Languages like Kotlin and Scala piggy back on it too.
|
|
5593
|
-
|
|
5594
|
-
There are some caveats with the gradle to \`pom.xml\` conversion:
|
|
5746
|
+
$ ${command} FILE
|
|
5595
5747
|
|
|
5596
|
-
|
|
5597
|
-
|
|
5748
|
+
Warning: While we don't support Conda necessarily, this tool extracts the pip
|
|
5749
|
+
block from an environment.yml and outputs it as a requirements.txt
|
|
5750
|
+
which you can scan as if it were a pypi package.
|
|
5598
5751
|
|
|
5599
|
-
|
|
5600
|
-
think something is missing that could be supported please reach out.
|
|
5752
|
+
USE AT YOUR OWN RISK
|
|
5601
5753
|
|
|
5602
|
-
|
|
5754
|
+
Note: FILE can be a dash (-) to indicate stdin. This way you can pipe the
|
|
5755
|
+
contents of a file to have it processed.
|
|
5603
5756
|
|
|
5604
|
-
|
|
5757
|
+
Options
|
|
5758
|
+
${utils.getFlagListOutput(config.flags, 6)}
|
|
5605
5759
|
|
|
5606
5760
|
Examples
|
|
5607
5761
|
|
|
5608
|
-
$ ${command} .
|
|
5609
|
-
$ ${command} --bin=../gradlew .
|
|
5762
|
+
$ ${command} ./environment.yml
|
|
5610
5763
|
`
|
|
5611
5764
|
};
|
|
5612
|
-
const
|
|
5765
|
+
const cmdManifestConda = {
|
|
5613
5766
|
description: config$x.description,
|
|
5614
5767
|
hidden: config$x.hidden,
|
|
5615
5768
|
run: run$x
|
|
@@ -5623,193 +5776,79 @@ async function run$x(argv, importMeta, {
|
|
|
5623
5776
|
importMeta,
|
|
5624
5777
|
parentName
|
|
5625
5778
|
});
|
|
5626
|
-
const verbose = Boolean(cli.flags['verbose']);
|
|
5627
5779
|
const {
|
|
5628
|
-
|
|
5629
|
-
|
|
5780
|
+
cwd = process.cwd(),
|
|
5781
|
+
json = false,
|
|
5782
|
+
markdown = false,
|
|
5783
|
+
out = '-',
|
|
5784
|
+
verbose = false
|
|
5630
5785
|
} = cli.flags;
|
|
5631
5786
|
const outputKind = utils.getOutputKind(json, markdown); // TODO: impl json/md further
|
|
5632
5787
|
|
|
5788
|
+
const [target = ''] = cli.input;
|
|
5633
5789
|
if (verbose) {
|
|
5634
5790
|
logger.logger.group('- ', parentName, config$x.commandName, ':');
|
|
5635
5791
|
logger.logger.group('- flags:', cli.flags);
|
|
5636
5792
|
logger.logger.groupEnd();
|
|
5637
|
-
logger.logger.log('-
|
|
5793
|
+
logger.logger.log('- target:', target);
|
|
5794
|
+
logger.logger.log('- output:', out);
|
|
5638
5795
|
logger.logger.groupEnd();
|
|
5639
5796
|
}
|
|
5640
|
-
const [target = ''] = cli.input;
|
|
5641
|
-
|
|
5642
|
-
// TODO: I'm not sure it's feasible to parse source file from stdin. We could
|
|
5643
|
-
// try, store contents in a file in some folder, target that folder... what
|
|
5644
|
-
// would the file name be?
|
|
5645
|
-
|
|
5646
5797
|
const wasValidInput = utils.checkCommandInput(outputKind, {
|
|
5647
|
-
test: !!target
|
|
5648
|
-
message: 'The
|
|
5798
|
+
test: !!target,
|
|
5799
|
+
message: 'The FILE arg is required',
|
|
5649
5800
|
pass: 'ok',
|
|
5650
|
-
fail:
|
|
5801
|
+
fail: 'missing'
|
|
5651
5802
|
}, {
|
|
5652
5803
|
nook: true,
|
|
5653
5804
|
test: cli.input.length <= 1,
|
|
5654
5805
|
message: 'Can only accept one DIR (make sure to escape spaces!)',
|
|
5655
5806
|
pass: 'ok',
|
|
5656
5807
|
fail: 'received ' + cli.input.length
|
|
5808
|
+
}, {
|
|
5809
|
+
nook: true,
|
|
5810
|
+
test: !json || !markdown,
|
|
5811
|
+
message: 'The `--json` and `--markdown` flags can not be used at the same time',
|
|
5812
|
+
pass: 'ok',
|
|
5813
|
+
fail: 'bad'
|
|
5657
5814
|
});
|
|
5658
5815
|
if (!wasValidInput) {
|
|
5659
5816
|
return;
|
|
5660
5817
|
}
|
|
5661
|
-
|
|
5662
|
-
bin = path.join(target, 'gradlew'),
|
|
5663
|
-
cwd = process.cwd()
|
|
5664
|
-
} = cli.flags;
|
|
5665
|
-
if (verbose) {
|
|
5666
|
-
logger.logger.group();
|
|
5667
|
-
logger.logger.log('- target:', target);
|
|
5668
|
-
logger.logger.log('- gradle bin:', bin);
|
|
5669
|
-
logger.logger.groupEnd();
|
|
5670
|
-
}
|
|
5671
|
-
let gradleOpts = [];
|
|
5672
|
-
if (cli.flags['gradleOpts']) {
|
|
5673
|
-
gradleOpts = cli.flags['gradleOpts'].split(' ').map(s => s.trim()).filter(Boolean);
|
|
5674
|
-
}
|
|
5818
|
+
logger.logger.warn('Warning: This will approximate your Conda dependencies using PyPI. We do not yet officially support Conda. Use at your own risk.');
|
|
5675
5819
|
if (cli.flags['dryRun']) {
|
|
5676
5820
|
logger.logger.log(DRY_RUN_BAILING_NOW$u);
|
|
5677
5821
|
return;
|
|
5678
5822
|
}
|
|
5679
|
-
await
|
|
5680
|
-
}
|
|
5681
|
-
|
|
5682
|
-
async function convertSbtToMaven(target, bin, out, verbose, sbtOpts) {
|
|
5683
|
-
// TODO: impl json/md
|
|
5684
|
-
|
|
5685
|
-
// Lazily access constants.spinner.
|
|
5686
|
-
const {
|
|
5687
|
-
spinner
|
|
5688
|
-
} = constants;
|
|
5689
|
-
const rbin = path.resolve(bin);
|
|
5690
|
-
const rtarget = path.resolve(target);
|
|
5691
|
-
if (verbose) {
|
|
5692
|
-
logger.logger.group('sbt2maven:');
|
|
5693
|
-
logger.logger.log(`[VERBOSE] - Absolute bin path: \`${rbin}\``);
|
|
5694
|
-
logger.logger.log(`[VERBOSE] - Absolute target path: \`${rtarget}\``);
|
|
5695
|
-
// logger.log(`[VERBOSE] - Absolute out path: \`${rout}\``)
|
|
5696
|
-
logger.logger.groupEnd();
|
|
5697
|
-
} else {
|
|
5698
|
-
logger.logger.group('sbt2maven:');
|
|
5699
|
-
logger.logger.log(`- executing: \`${bin}\``);
|
|
5700
|
-
logger.logger.log(`- src dir: \`${target}\``);
|
|
5701
|
-
// logger.log(`- dst dir: \`${out}\``)
|
|
5702
|
-
logger.logger.groupEnd();
|
|
5703
|
-
}
|
|
5704
|
-
try {
|
|
5705
|
-
spinner.start(`Converting sbt to maven from \`${bin}\` on \`${target}\`...`);
|
|
5706
|
-
|
|
5707
|
-
// Run sbt with the init script we provide which should yield zero or more
|
|
5708
|
-
// pom files. We have to figure out where to store those pom files such that
|
|
5709
|
-
// we can upload them and predict them through the GitHub API. We could do a
|
|
5710
|
-
// .socket folder. We could do a socket.pom.gz with all the poms, although
|
|
5711
|
-
// I'd prefer something plain-text if it is to be committed.
|
|
5712
|
-
const output = await spawn.spawn(bin, ['makePom'].concat(sbtOpts), {
|
|
5713
|
-
cwd: target || '.'
|
|
5714
|
-
});
|
|
5715
|
-
spinner.stop();
|
|
5716
|
-
if (verbose) {
|
|
5717
|
-
logger.logger.group('[VERBOSE] sbt stdout:');
|
|
5718
|
-
logger.logger.log(output);
|
|
5719
|
-
logger.logger.groupEnd();
|
|
5720
|
-
}
|
|
5721
|
-
if (output.stderr) {
|
|
5722
|
-
process.exitCode = 1;
|
|
5723
|
-
logger.logger.fail('There were errors while running sbt');
|
|
5724
|
-
// (In verbose mode, stderr was printed above, no need to repeat it)
|
|
5725
|
-
if (!verbose) {
|
|
5726
|
-
logger.logger.group('[VERBOSE] stderr:');
|
|
5727
|
-
logger.logger.error(output.stderr);
|
|
5728
|
-
logger.logger.groupEnd();
|
|
5729
|
-
}
|
|
5730
|
-
return;
|
|
5731
|
-
}
|
|
5732
|
-
const poms = [];
|
|
5733
|
-
output.stdout.replace(/Wrote (.*?.pom)\n/g, (_all, fn) => {
|
|
5734
|
-
poms.push(fn);
|
|
5735
|
-
return fn;
|
|
5736
|
-
});
|
|
5737
|
-
if (!poms.length) {
|
|
5738
|
-
process.exitCode = 1;
|
|
5739
|
-
logger.logger.fail('There were no errors from sbt but it seems to not have generated any poms either');
|
|
5740
|
-
return;
|
|
5741
|
-
}
|
|
5742
|
-
// Move the pom file to ...? initial cwd? loc will be an absolute path, or dump to stdout
|
|
5743
|
-
// TODO: what to do with multiple output files? Do we want to dump them to stdout? Raw or with separators or ?
|
|
5744
|
-
// TODO: maybe we can add an option to target a specific file to dump to stdout
|
|
5745
|
-
if (out === '-' && poms.length === 1) {
|
|
5746
|
-
logger.logger.log('Result:\n```');
|
|
5747
|
-
logger.logger.log(await utils.safeReadFile(poms[0]));
|
|
5748
|
-
logger.logger.log('```');
|
|
5749
|
-
logger.logger.success(`OK`);
|
|
5750
|
-
} else if (out === '-') {
|
|
5751
|
-
process.exitCode = 1;
|
|
5752
|
-
logger.logger.fail('Requested out target was stdout but there are multiple generated files');
|
|
5753
|
-
poms.forEach(fn => logger.logger.error('-', fn));
|
|
5754
|
-
logger.logger.info('Exiting now...');
|
|
5755
|
-
return;
|
|
5756
|
-
} else {
|
|
5757
|
-
// if (verbose) {
|
|
5758
|
-
// logger.log(
|
|
5759
|
-
// `Moving manifest file from \`${loc.replace(/^\/home\/[^/]*?\//, '~/')}\` to \`${out}\``
|
|
5760
|
-
// )
|
|
5761
|
-
// } else {
|
|
5762
|
-
// logger.log('Moving output pom file')
|
|
5763
|
-
// }
|
|
5764
|
-
// TODO: do we prefer fs-extra? renaming can be gnarly on windows and fs-extra's version is better
|
|
5765
|
-
// await renamep(loc, out)
|
|
5766
|
-
logger.logger.success(`Generated ${poms.length} pom files`);
|
|
5767
|
-
poms.forEach(fn => logger.logger.log('-', fn));
|
|
5768
|
-
logger.logger.success(`OK`);
|
|
5769
|
-
}
|
|
5770
|
-
} catch (e) {
|
|
5771
|
-
process.exitCode = 1;
|
|
5772
|
-
spinner.stop();
|
|
5773
|
-
logger.logger.fail('There was an unexpected error while running this' + (verbose ? '' : ' (use --verbose for details)'));
|
|
5774
|
-
if (verbose) {
|
|
5775
|
-
logger.logger.group('[VERBOSE] error:');
|
|
5776
|
-
logger.logger.log(e);
|
|
5777
|
-
logger.logger.groupEnd();
|
|
5778
|
-
}
|
|
5779
|
-
}
|
|
5823
|
+
await handleManifestConda(target, String(out || ''), json ? 'json' : markdown ? 'markdown' : 'text', String(cwd), Boolean(verbose));
|
|
5780
5824
|
}
|
|
5781
5825
|
|
|
5782
5826
|
const {
|
|
5783
5827
|
DRY_RUN_BAILING_NOW: DRY_RUN_BAILING_NOW$t
|
|
5784
5828
|
} = constants;
|
|
5785
5829
|
const config$w = {
|
|
5786
|
-
commandName: '
|
|
5787
|
-
description:
|
|
5830
|
+
commandName: 'gradle',
|
|
5831
|
+
description: '[beta] Use Gradle to generate a manifest file (`pom.xml`) for a Gradle/Java/Kotlin/etc project',
|
|
5788
5832
|
hidden: false,
|
|
5789
5833
|
flags: {
|
|
5790
5834
|
...utils.commonFlags,
|
|
5791
5835
|
bin: {
|
|
5792
5836
|
type: 'string',
|
|
5793
|
-
default: '
|
|
5794
|
-
description: 'Location of sbt binary to use'
|
|
5837
|
+
description: 'Location of gradlew binary to use, default: CWD/gradlew'
|
|
5795
5838
|
},
|
|
5796
5839
|
cwd: {
|
|
5797
5840
|
type: 'string',
|
|
5798
5841
|
description: 'Set the cwd, defaults to process.cwd()'
|
|
5799
5842
|
},
|
|
5800
|
-
|
|
5843
|
+
gradleOpts: {
|
|
5801
5844
|
type: 'string',
|
|
5802
|
-
default: '
|
|
5803
|
-
description: '
|
|
5804
|
-
},
|
|
5805
|
-
stdout: {
|
|
5806
|
-
type: 'boolean',
|
|
5807
|
-
description: 'Print resulting pom.xml to stdout (supersedes --out)'
|
|
5845
|
+
default: '',
|
|
5846
|
+
description: 'Additional options to pass on to ./gradlew, see `./gradlew --help`'
|
|
5808
5847
|
},
|
|
5809
|
-
|
|
5848
|
+
task: {
|
|
5810
5849
|
type: 'string',
|
|
5811
|
-
default: '',
|
|
5812
|
-
description: '
|
|
5850
|
+
default: 'all',
|
|
5851
|
+
description: 'Task to target. By default targets all'
|
|
5813
5852
|
},
|
|
5814
5853
|
verbose: {
|
|
5815
5854
|
type: 'boolean',
|
|
@@ -5818,43 +5857,38 @@ const config$w = {
|
|
|
5818
5857
|
},
|
|
5819
5858
|
help: (command, config) => `
|
|
5820
5859
|
Usage
|
|
5821
|
-
$ ${command} [--bin=path/to/
|
|
5860
|
+
$ ${command} [--bin=path/to/gradle/binary] [--out=path/to/result] DIR
|
|
5822
5861
|
|
|
5823
5862
|
Options
|
|
5824
5863
|
${utils.getFlagListOutput(config.flags, 6)}
|
|
5825
5864
|
|
|
5826
|
-
Uses
|
|
5827
|
-
|
|
5828
|
-
|
|
5829
|
-
|
|
5830
|
-
There are some caveats with \`build.sbt\` to \`pom.xml\` conversion:
|
|
5865
|
+
Uses gradle, preferably through your local project \`gradlew\`, to generate a
|
|
5866
|
+
\`pom.xml\` file for each task. If you have no \`gradlew\` you can try the
|
|
5867
|
+
global \`gradle\` binary but that may not work (hard to predict).
|
|
5831
5868
|
|
|
5832
|
-
|
|
5833
|
-
|
|
5869
|
+
The \`pom.xml\` is a manifest file similar to \`package.json\` for npm or
|
|
5870
|
+
or requirements.txt for PyPi), but specifically for Maven, which is Java's
|
|
5871
|
+
dependency repository. Languages like Kotlin and Scala piggy back on it too.
|
|
5834
5872
|
|
|
5835
|
-
|
|
5836
|
-
- \`excludeAll()\`, \`dependencyOverrides\`, \`force()\`, \`relativePath\`
|
|
5837
|
-
- For details: https://www.scala-sbt.org/1.x/docs/Library-Management.html
|
|
5873
|
+
There are some caveats with the gradle to \`pom.xml\` conversion:
|
|
5838
5874
|
|
|
5839
|
-
-
|
|
5875
|
+
- each task will generate its own xml file and by default it generates one xml
|
|
5876
|
+
for every task.
|
|
5840
5877
|
|
|
5841
|
-
- it
|
|
5842
|
-
|
|
5878
|
+
- it's possible certain features don't translate well into the xml. If you
|
|
5879
|
+
think something is missing that could be supported please reach out.
|
|
5843
5880
|
|
|
5844
|
-
|
|
5881
|
+
- it works with your \`gradlew\` from your repo and local settings and config
|
|
5845
5882
|
|
|
5846
5883
|
Support is beta. Please report issues or give us feedback on what's missing.
|
|
5847
5884
|
|
|
5848
|
-
This is only for SBT. If your Scala setup uses gradle, please see the help
|
|
5849
|
-
sections for \`socket manifest gradle\` or \`socket cdxgen\`.
|
|
5850
|
-
|
|
5851
5885
|
Examples
|
|
5852
5886
|
|
|
5853
|
-
$ ${command}
|
|
5854
|
-
$ ${command} --bin
|
|
5887
|
+
$ ${command} .
|
|
5888
|
+
$ ${command} --bin=../gradlew .
|
|
5855
5889
|
`
|
|
5856
5890
|
};
|
|
5857
|
-
const
|
|
5891
|
+
const cmdManifestGradle = {
|
|
5858
5892
|
description: config$w.description,
|
|
5859
5893
|
hidden: config$w.hidden,
|
|
5860
5894
|
run: run$w
|
|
@@ -5903,68 +5937,99 @@ async function run$w(argv, importMeta, {
|
|
|
5903
5937
|
if (!wasValidInput) {
|
|
5904
5938
|
return;
|
|
5905
5939
|
}
|
|
5906
|
-
|
|
5907
|
-
|
|
5908
|
-
|
|
5909
|
-
}
|
|
5910
|
-
let out = './socket.pom.xml';
|
|
5911
|
-
if (cli.flags['out']) {
|
|
5912
|
-
out = cli.flags['out'];
|
|
5913
|
-
}
|
|
5914
|
-
if (cli.flags['stdout']) {
|
|
5915
|
-
out = '-';
|
|
5916
|
-
}
|
|
5940
|
+
const {
|
|
5941
|
+
bin = path.join(target, 'gradlew'),
|
|
5942
|
+
cwd = process.cwd()
|
|
5943
|
+
} = cli.flags;
|
|
5917
5944
|
if (verbose) {
|
|
5918
5945
|
logger.logger.group();
|
|
5919
5946
|
logger.logger.log('- target:', target);
|
|
5920
5947
|
logger.logger.log('- gradle bin:', bin);
|
|
5921
|
-
logger.logger.log('- out:', out);
|
|
5922
5948
|
logger.logger.groupEnd();
|
|
5923
5949
|
}
|
|
5924
|
-
let
|
|
5925
|
-
if (cli.flags['
|
|
5926
|
-
|
|
5950
|
+
let gradleOpts = [];
|
|
5951
|
+
if (cli.flags['gradleOpts']) {
|
|
5952
|
+
gradleOpts = cli.flags['gradleOpts'].split(' ').map(s => s.trim()).filter(Boolean);
|
|
5927
5953
|
}
|
|
5928
5954
|
if (cli.flags['dryRun']) {
|
|
5929
5955
|
logger.logger.log(DRY_RUN_BAILING_NOW$t);
|
|
5930
5956
|
return;
|
|
5931
5957
|
}
|
|
5932
|
-
await
|
|
5958
|
+
await convertGradleToMaven(target, String(bin), String(cwd), verbose, gradleOpts);
|
|
5933
5959
|
}
|
|
5934
5960
|
|
|
5935
5961
|
const {
|
|
5936
5962
|
DRY_RUN_BAILING_NOW: DRY_RUN_BAILING_NOW$s
|
|
5937
5963
|
} = constants;
|
|
5964
|
+
|
|
5965
|
+
// TODO: we may want to dedupe some pieces for all gradle languages. I think it
|
|
5966
|
+
// makes sense to have separate commands for them and I think it makes
|
|
5967
|
+
// sense for the help panels to note the requested language, rather than
|
|
5968
|
+
// `socket manifest kotlin` to print help screens with `gradle` as the
|
|
5969
|
+
// command. Room for improvement.
|
|
5938
5970
|
const config$v = {
|
|
5939
|
-
commandName: '
|
|
5940
|
-
description: '
|
|
5971
|
+
commandName: 'kotlin',
|
|
5972
|
+
description: '[beta] Use Gradle to generate a manifest file (`pom.xml`) for a Kotlin project',
|
|
5941
5973
|
hidden: false,
|
|
5942
5974
|
flags: {
|
|
5943
5975
|
...utils.commonFlags,
|
|
5976
|
+
bin: {
|
|
5977
|
+
type: 'string',
|
|
5978
|
+
description: 'Location of gradlew binary to use, default: CWD/gradlew'
|
|
5979
|
+
},
|
|
5944
5980
|
cwd: {
|
|
5945
5981
|
type: 'string',
|
|
5946
5982
|
description: 'Set the cwd, defaults to process.cwd()'
|
|
5947
5983
|
},
|
|
5984
|
+
gradleOpts: {
|
|
5985
|
+
type: 'string',
|
|
5986
|
+
default: '',
|
|
5987
|
+
description: 'Additional options to pass on to ./gradlew, see `./gradlew --help`'
|
|
5988
|
+
},
|
|
5989
|
+
task: {
|
|
5990
|
+
type: 'string',
|
|
5991
|
+
default: 'all',
|
|
5992
|
+
description: 'Task to target. By default targets all'
|
|
5993
|
+
},
|
|
5948
5994
|
verbose: {
|
|
5949
5995
|
type: 'boolean',
|
|
5950
|
-
|
|
5951
|
-
description: 'Enable debug output, may help when running into errors'
|
|
5996
|
+
description: 'Print debug messages'
|
|
5952
5997
|
}
|
|
5953
|
-
// TODO: support output flags
|
|
5954
5998
|
},
|
|
5955
5999
|
help: (command, config) => `
|
|
5956
6000
|
Usage
|
|
5957
|
-
$ ${command}
|
|
6001
|
+
$ ${command} [--bin=path/to/gradle/binary] [--out=path/to/result] DIR
|
|
6002
|
+
|
|
6003
|
+
Options
|
|
6004
|
+
${utils.getFlagListOutput(config.flags, 6)}
|
|
6005
|
+
|
|
6006
|
+
Uses gradle, preferably through your local project \`gradlew\`, to generate a
|
|
6007
|
+
\`pom.xml\` file for each task. If you have no \`gradlew\` you can try the
|
|
6008
|
+
global \`gradle\` binary but that may not work (hard to predict).
|
|
6009
|
+
|
|
6010
|
+
The \`pom.xml\` is a manifest file similar to \`package.json\` for npm or
|
|
6011
|
+
or requirements.txt for PyPi), but specifically for Maven, which is Java's
|
|
6012
|
+
dependency repository. Languages like Kotlin and Scala piggy back on it too.
|
|
6013
|
+
|
|
6014
|
+
There are some caveats with the gradle to \`pom.xml\` conversion:
|
|
6015
|
+
|
|
6016
|
+
- each task will generate its own xml file and by default it generates one xml
|
|
6017
|
+
for every task. (This may be a good thing!)
|
|
6018
|
+
|
|
6019
|
+
- it's possible certain features don't translate well into the xml. If you
|
|
6020
|
+
think something is missing that could be supported please reach out.
|
|
5958
6021
|
|
|
5959
|
-
|
|
5960
|
-
${utils.getFlagListOutput(config.flags, 6)}
|
|
6022
|
+
- it works with your \`gradlew\` from your repo and local settings and config
|
|
5961
6023
|
|
|
5962
|
-
|
|
5963
|
-
|
|
5964
|
-
|
|
6024
|
+
Support is beta. Please report issues or give us feedback on what's missing.
|
|
6025
|
+
|
|
6026
|
+
Examples
|
|
6027
|
+
|
|
6028
|
+
$ ${command} .
|
|
6029
|
+
$ ${command} --bin=../gradlew .
|
|
5965
6030
|
`
|
|
5966
6031
|
};
|
|
5967
|
-
const
|
|
6032
|
+
const cmdManifestKotlin = {
|
|
5968
6033
|
description: config$v.description,
|
|
5969
6034
|
hidden: config$v.hidden,
|
|
5970
6035
|
run: run$v
|
|
@@ -5978,127 +6043,93 @@ async function run$v(argv, importMeta, {
|
|
|
5978
6043
|
importMeta,
|
|
5979
6044
|
parentName
|
|
5980
6045
|
});
|
|
5981
|
-
const verbose =
|
|
5982
|
-
const
|
|
5983
|
-
|
|
6046
|
+
const verbose = Boolean(cli.flags['verbose']);
|
|
6047
|
+
const {
|
|
6048
|
+
json,
|
|
6049
|
+
markdown
|
|
6050
|
+
} = cli.flags;
|
|
6051
|
+
const outputKind = utils.getOutputKind(json, markdown); // TODO: impl json/md further
|
|
5984
6052
|
|
|
5985
6053
|
if (verbose) {
|
|
5986
6054
|
logger.logger.group('- ', parentName, config$v.commandName, ':');
|
|
5987
6055
|
logger.logger.group('- flags:', cli.flags);
|
|
5988
6056
|
logger.logger.groupEnd();
|
|
5989
6057
|
logger.logger.log('- input:', cli.input);
|
|
5990
|
-
logger.logger.log('- cwd:', cwd);
|
|
5991
6058
|
logger.logger.groupEnd();
|
|
5992
6059
|
}
|
|
5993
|
-
const
|
|
5994
|
-
|
|
5995
|
-
|
|
5996
|
-
|
|
5997
|
-
|
|
5998
|
-
|
|
5999
|
-
|
|
6000
|
-
|
|
6001
|
-
|
|
6002
|
-
|
|
6003
|
-
|
|
6004
|
-
|
|
6005
|
-
|
|
6006
|
-
|
|
6007
|
-
|
|
6008
|
-
|
|
6009
|
-
|
|
6010
|
-
|
|
6060
|
+
const [target = ''] = cli.input;
|
|
6061
|
+
|
|
6062
|
+
// TODO: I'm not sure it's feasible to parse source file from stdin. We could
|
|
6063
|
+
// try, store contents in a file in some folder, target that folder... what
|
|
6064
|
+
// would the file name be?
|
|
6065
|
+
|
|
6066
|
+
const wasValidInput = utils.checkCommandInput(outputKind, {
|
|
6067
|
+
test: !!target && target !== '-',
|
|
6068
|
+
message: 'The DIR arg is required',
|
|
6069
|
+
pass: 'ok',
|
|
6070
|
+
fail: target === '-' ? 'stdin is not supported' : 'missing'
|
|
6071
|
+
}, {
|
|
6072
|
+
nook: true,
|
|
6073
|
+
test: cli.input.length <= 1,
|
|
6074
|
+
message: 'Can only accept one DIR (make sure to escape spaces!)',
|
|
6075
|
+
pass: 'ok',
|
|
6076
|
+
fail: 'received ' + cli.input.length
|
|
6077
|
+
});
|
|
6078
|
+
if (!wasValidInput) {
|
|
6011
6079
|
return;
|
|
6012
6080
|
}
|
|
6013
|
-
|
|
6014
|
-
|
|
6015
|
-
|
|
6016
|
-
|
|
6017
|
-
|
|
6018
|
-
|
|
6019
|
-
|
|
6020
|
-
|
|
6021
|
-
|
|
6022
|
-
}
|
|
6023
|
-
await cmdManifestGradle.run(subArgs, importMeta, {
|
|
6024
|
-
parentName
|
|
6025
|
-
});
|
|
6026
|
-
return;
|
|
6081
|
+
const {
|
|
6082
|
+
bin = path.join(target, 'gradlew'),
|
|
6083
|
+
cwd = process.cwd()
|
|
6084
|
+
} = cli.flags;
|
|
6085
|
+
if (verbose) {
|
|
6086
|
+
logger.logger.group();
|
|
6087
|
+
logger.logger.log('- target:', target);
|
|
6088
|
+
logger.logger.log('- gradle bin:', bin);
|
|
6089
|
+
logger.logger.groupEnd();
|
|
6027
6090
|
}
|
|
6028
|
-
|
|
6029
|
-
|
|
6030
|
-
|
|
6031
|
-
const hasEnvyaml = !hasEnvyml && fs$1.existsSync(envyaml);
|
|
6032
|
-
if (hasEnvyml || hasEnvyaml) {
|
|
6033
|
-
logger.logger.log('Detected an environment.yml file, running default Conda generator...');
|
|
6034
|
-
// This command takes the TARGET as first arg.
|
|
6035
|
-
subArgs.push(hasEnvyml ? envyml : hasEnvyaml ? envyaml : '');
|
|
6036
|
-
if (cli.flags['dryRun']) {
|
|
6037
|
-
logger.logger.log(DRY_RUN_BAILING_NOW$s);
|
|
6038
|
-
return;
|
|
6039
|
-
}
|
|
6040
|
-
await cmdManifestConda.run(subArgs, importMeta, {
|
|
6041
|
-
parentName
|
|
6042
|
-
});
|
|
6043
|
-
return;
|
|
6091
|
+
let gradleOpts = [];
|
|
6092
|
+
if (cli.flags['gradleOpts']) {
|
|
6093
|
+
gradleOpts = cli.flags['gradleOpts'].split(' ').map(s => s.trim()).filter(Boolean);
|
|
6044
6094
|
}
|
|
6045
6095
|
if (cli.flags['dryRun']) {
|
|
6046
6096
|
logger.logger.log(DRY_RUN_BAILING_NOW$s);
|
|
6047
6097
|
return;
|
|
6048
6098
|
}
|
|
6049
|
-
|
|
6050
|
-
// Show new help screen and exit.
|
|
6051
|
-
vendor.meow(`
|
|
6052
|
-
$ ${parentName} ${config$v.commandName}
|
|
6053
|
-
|
|
6054
|
-
Unfortunately this script did not discover a supported language in the
|
|
6055
|
-
current folder.
|
|
6056
|
-
|
|
6057
|
-
- Make sure this script would work with your target build
|
|
6058
|
-
- Make sure to run it from the correct folder
|
|
6059
|
-
- Make sure the necessary build tools are available (\`PATH\`)
|
|
6060
|
-
|
|
6061
|
-
If that doesn't work, see \`${parentName} <lang> --help\` for config details for
|
|
6062
|
-
your target language.
|
|
6063
|
-
`, {
|
|
6064
|
-
argv: [],
|
|
6065
|
-
description: config$v.description,
|
|
6066
|
-
importMeta
|
|
6067
|
-
}).showHelp();
|
|
6099
|
+
await convertGradleToMaven(target, String(bin), String(cwd), verbose, gradleOpts);
|
|
6068
6100
|
}
|
|
6069
6101
|
|
|
6070
6102
|
const {
|
|
6071
6103
|
DRY_RUN_BAILING_NOW: DRY_RUN_BAILING_NOW$r
|
|
6072
6104
|
} = constants;
|
|
6073
|
-
|
|
6074
|
-
// TODO: we may want to dedupe some pieces for all gradle languages. I think it
|
|
6075
|
-
// makes sense to have separate commands for them and I think it makes
|
|
6076
|
-
// sense for the help panels to note the requested language, rather than
|
|
6077
|
-
// `socket manifest kotlin` to print help screens with `gradle` as the
|
|
6078
|
-
// command. Room for improvement.
|
|
6079
6105
|
const config$u = {
|
|
6080
|
-
commandName: '
|
|
6081
|
-
description:
|
|
6106
|
+
commandName: 'scala',
|
|
6107
|
+
description: "[beta] Generate a manifest file (`pom.xml`) from Scala's `build.sbt` file",
|
|
6082
6108
|
hidden: false,
|
|
6083
6109
|
flags: {
|
|
6084
6110
|
...utils.commonFlags,
|
|
6085
6111
|
bin: {
|
|
6086
6112
|
type: 'string',
|
|
6087
|
-
|
|
6113
|
+
default: 'sbt',
|
|
6114
|
+
description: 'Location of sbt binary to use'
|
|
6088
6115
|
},
|
|
6089
6116
|
cwd: {
|
|
6090
6117
|
type: 'string',
|
|
6091
6118
|
description: 'Set the cwd, defaults to process.cwd()'
|
|
6092
6119
|
},
|
|
6093
|
-
|
|
6120
|
+
out: {
|
|
6094
6121
|
type: 'string',
|
|
6095
|
-
default: '',
|
|
6096
|
-
description: '
|
|
6122
|
+
default: './socket.pom.xml',
|
|
6123
|
+
description: 'Path of output file; where to store the resulting manifest, see also --stdout'
|
|
6097
6124
|
},
|
|
6098
|
-
|
|
6125
|
+
stdout: {
|
|
6126
|
+
type: 'boolean',
|
|
6127
|
+
description: 'Print resulting pom.xml to stdout (supersedes --out)'
|
|
6128
|
+
},
|
|
6129
|
+
sbtOpts: {
|
|
6099
6130
|
type: 'string',
|
|
6100
|
-
default: '
|
|
6101
|
-
description: '
|
|
6131
|
+
default: '',
|
|
6132
|
+
description: 'Additional options to pass on to sbt, as per `sbt --help`'
|
|
6102
6133
|
},
|
|
6103
6134
|
verbose: {
|
|
6104
6135
|
type: 'boolean',
|
|
@@ -6107,38 +6138,43 @@ const config$u = {
|
|
|
6107
6138
|
},
|
|
6108
6139
|
help: (command, config) => `
|
|
6109
6140
|
Usage
|
|
6110
|
-
$ ${command} [--bin=path/to/
|
|
6141
|
+
$ ${command} [--bin=path/to/sbt/binary] [--out=path/to/result] FILE|DIR
|
|
6111
6142
|
|
|
6112
6143
|
Options
|
|
6113
6144
|
${utils.getFlagListOutput(config.flags, 6)}
|
|
6114
6145
|
|
|
6115
|
-
Uses
|
|
6116
|
-
|
|
6117
|
-
|
|
6146
|
+
Uses \`sbt makePom\` to generate a \`pom.xml\` from your \`build.sbt\` file.
|
|
6147
|
+
This xml file is the dependency manifest (like a package.json
|
|
6148
|
+
for Node.js or requirements.txt for PyPi), but specifically for Scala.
|
|
6118
6149
|
|
|
6119
|
-
|
|
6120
|
-
or requirements.txt for PyPi), but specifically for Maven, which is Java's
|
|
6121
|
-
dependency repository. Languages like Kotlin and Scala piggy back on it too.
|
|
6150
|
+
There are some caveats with \`build.sbt\` to \`pom.xml\` conversion:
|
|
6122
6151
|
|
|
6123
|
-
|
|
6152
|
+
- the xml is exported as socket.pom.xml as to not confuse existing build tools
|
|
6153
|
+
but it will first hit your /target/sbt<version> folder (as a different name)
|
|
6124
6154
|
|
|
6125
|
-
-
|
|
6126
|
-
|
|
6155
|
+
- the pom.xml format (standard by Scala) does not support certain sbt features
|
|
6156
|
+
- \`excludeAll()\`, \`dependencyOverrides\`, \`force()\`, \`relativePath\`
|
|
6157
|
+
- For details: https://www.scala-sbt.org/1.x/docs/Library-Management.html
|
|
6127
6158
|
|
|
6128
|
-
- it
|
|
6129
|
-
think something is missing that could be supported please reach out.
|
|
6159
|
+
- it uses your sbt settings and local configuration verbatim
|
|
6130
6160
|
|
|
6131
|
-
- it
|
|
6161
|
+
- it can only export one target per run, so if you have multiple targets like
|
|
6162
|
+
development and production, you must run them separately.
|
|
6163
|
+
|
|
6164
|
+
You can optionally configure the path to the \`sbt\` bin to invoke.
|
|
6132
6165
|
|
|
6133
6166
|
Support is beta. Please report issues or give us feedback on what's missing.
|
|
6134
6167
|
|
|
6168
|
+
This is only for SBT. If your Scala setup uses gradle, please see the help
|
|
6169
|
+
sections for \`socket manifest gradle\` or \`socket cdxgen\`.
|
|
6170
|
+
|
|
6135
6171
|
Examples
|
|
6136
6172
|
|
|
6137
|
-
$ ${command} .
|
|
6138
|
-
$ ${command} --bin
|
|
6173
|
+
$ ${command} ./build.sbt
|
|
6174
|
+
$ ${command} --bin=/usr/bin/sbt ./build.sbt
|
|
6139
6175
|
`
|
|
6140
6176
|
};
|
|
6141
|
-
const
|
|
6177
|
+
const cmdManifestScala = {
|
|
6142
6178
|
description: config$u.description,
|
|
6143
6179
|
hidden: config$u.hidden,
|
|
6144
6180
|
run: run$u
|
|
@@ -6187,25 +6223,33 @@ async function run$u(argv, importMeta, {
|
|
|
6187
6223
|
if (!wasValidInput) {
|
|
6188
6224
|
return;
|
|
6189
6225
|
}
|
|
6190
|
-
|
|
6191
|
-
|
|
6192
|
-
|
|
6193
|
-
}
|
|
6226
|
+
let bin = 'sbt';
|
|
6227
|
+
if (cli.flags['bin']) {
|
|
6228
|
+
bin = cli.flags['bin'];
|
|
6229
|
+
}
|
|
6230
|
+
let out = './socket.pom.xml';
|
|
6231
|
+
if (cli.flags['out']) {
|
|
6232
|
+
out = cli.flags['out'];
|
|
6233
|
+
}
|
|
6234
|
+
if (cli.flags['stdout']) {
|
|
6235
|
+
out = '-';
|
|
6236
|
+
}
|
|
6194
6237
|
if (verbose) {
|
|
6195
6238
|
logger.logger.group();
|
|
6196
6239
|
logger.logger.log('- target:', target);
|
|
6197
6240
|
logger.logger.log('- gradle bin:', bin);
|
|
6241
|
+
logger.logger.log('- out:', out);
|
|
6198
6242
|
logger.logger.groupEnd();
|
|
6199
6243
|
}
|
|
6200
|
-
let
|
|
6201
|
-
if (cli.flags['
|
|
6202
|
-
|
|
6244
|
+
let sbtOpts = [];
|
|
6245
|
+
if (cli.flags['sbtOpts']) {
|
|
6246
|
+
sbtOpts = cli.flags['sbtOpts'].split(' ').map(s => s.trim()).filter(Boolean);
|
|
6203
6247
|
}
|
|
6204
6248
|
if (cli.flags['dryRun']) {
|
|
6205
6249
|
logger.logger.log(DRY_RUN_BAILING_NOW$r);
|
|
6206
6250
|
return;
|
|
6207
6251
|
}
|
|
6208
|
-
await
|
|
6252
|
+
await convertSbtToMaven(target, bin, out, verbose, sbtOpts);
|
|
6209
6253
|
}
|
|
6210
6254
|
|
|
6211
6255
|
const config$t = {
|
|
@@ -8743,6 +8787,50 @@ async function run$d(argv, importMeta, {
|
|
|
8743
8787
|
await handleDeleteRepo(orgSlug, repoName, outputKind);
|
|
8744
8788
|
}
|
|
8745
8789
|
|
|
8790
|
+
async function fetchListAllRepos({
|
|
8791
|
+
direction,
|
|
8792
|
+
orgSlug,
|
|
8793
|
+
sort
|
|
8794
|
+
}) {
|
|
8795
|
+
const sockSdkResult = await utils.setupSdk();
|
|
8796
|
+
if (!sockSdkResult.ok) {
|
|
8797
|
+
return sockSdkResult;
|
|
8798
|
+
}
|
|
8799
|
+
const sockSdk = sockSdkResult.data;
|
|
8800
|
+
const rows = [];
|
|
8801
|
+
let protection = 0;
|
|
8802
|
+
let nextPage = 0;
|
|
8803
|
+
while (nextPage >= 0) {
|
|
8804
|
+
if (++protection > 100) {
|
|
8805
|
+
return {
|
|
8806
|
+
ok: false,
|
|
8807
|
+
message: 'Infinite loop detected',
|
|
8808
|
+
cause: `Either there are over 100 pages of results or the fetch has run into an infinite loop. Breaking it off now. nextPage=${nextPage}`
|
|
8809
|
+
};
|
|
8810
|
+
}
|
|
8811
|
+
// eslint-disable-next-line no-await-in-loop
|
|
8812
|
+
const result = await utils.handleApiCall(sockSdk.getOrgRepoList(orgSlug, {
|
|
8813
|
+
sort,
|
|
8814
|
+
direction,
|
|
8815
|
+
per_page: String(100),
|
|
8816
|
+
// max
|
|
8817
|
+
page: String(nextPage)
|
|
8818
|
+
}), 'list of repositories');
|
|
8819
|
+
if (!result.ok) {
|
|
8820
|
+
return result;
|
|
8821
|
+
}
|
|
8822
|
+
result.data.results.forEach(row => rows.push(row));
|
|
8823
|
+
nextPage = result.data.nextPage ?? -1;
|
|
8824
|
+
}
|
|
8825
|
+
return {
|
|
8826
|
+
ok: true,
|
|
8827
|
+
data: {
|
|
8828
|
+
results: rows,
|
|
8829
|
+
nextPage: null
|
|
8830
|
+
}
|
|
8831
|
+
};
|
|
8832
|
+
}
|
|
8833
|
+
|
|
8746
8834
|
async function fetchListRepos({
|
|
8747
8835
|
direction,
|
|
8748
8836
|
orgSlug,
|
|
@@ -8764,18 +8852,33 @@ async function fetchListRepos({
|
|
|
8764
8852
|
}
|
|
8765
8853
|
|
|
8766
8854
|
// @ts-ignore
|
|
8767
|
-
async function outputListRepos(result, outputKind) {
|
|
8855
|
+
async function outputListRepos(result, outputKind, page, nextPage, sort, perPage, direction) {
|
|
8768
8856
|
if (!result.ok) {
|
|
8769
8857
|
process.exitCode = result.code ?? 1;
|
|
8770
8858
|
}
|
|
8771
8859
|
if (outputKind === 'json') {
|
|
8772
|
-
|
|
8860
|
+
if (result.ok) {
|
|
8861
|
+
logger.logger.log(utils.serializeResultJson({
|
|
8862
|
+
ok: true,
|
|
8863
|
+
data: {
|
|
8864
|
+
data: result.data,
|
|
8865
|
+
direction,
|
|
8866
|
+
nextPage: nextPage ?? 0,
|
|
8867
|
+
page,
|
|
8868
|
+
perPage,
|
|
8869
|
+
sort
|
|
8870
|
+
}
|
|
8871
|
+
}));
|
|
8872
|
+
} else {
|
|
8873
|
+
logger.logger.log(utils.serializeResultJson(result));
|
|
8874
|
+
}
|
|
8773
8875
|
return;
|
|
8774
8876
|
}
|
|
8775
8877
|
if (!result.ok) {
|
|
8776
8878
|
logger.logger.fail(utils.failMsgWithBadge(result.message, result.cause));
|
|
8777
8879
|
return;
|
|
8778
8880
|
}
|
|
8881
|
+
logger.logger.log(`Result page: ${page}, results per page: ${perPage === Infinity ? 'all' : perPage}, sorted by: ${sort}, direction: ${direction}`);
|
|
8779
8882
|
const options = {
|
|
8780
8883
|
columns: [{
|
|
8781
8884
|
field: 'id',
|
|
@@ -8795,9 +8898,18 @@ async function outputListRepos(result, outputKind) {
|
|
|
8795
8898
|
}]
|
|
8796
8899
|
};
|
|
8797
8900
|
logger.logger.log(vendor.srcExports(options, result.data.results));
|
|
8901
|
+
if (nextPage) {
|
|
8902
|
+
logger.logger.info(`This is page ${page}. Server indicated there are more results available on page ${nextPage}...`);
|
|
8903
|
+
logger.logger.info(`(Hint: you can use \`socket repos list --page ${nextPage}\`)`);
|
|
8904
|
+
} else if (perPage === Infinity) {
|
|
8905
|
+
logger.logger.info(`This should be the entire list available on the server.`);
|
|
8906
|
+
} else {
|
|
8907
|
+
logger.logger.info(`This is page ${page}. Server indicated this is the last page with results.`);
|
|
8908
|
+
}
|
|
8798
8909
|
}
|
|
8799
8910
|
|
|
8800
8911
|
async function handleListRepos({
|
|
8912
|
+
all,
|
|
8801
8913
|
direction,
|
|
8802
8914
|
orgSlug,
|
|
8803
8915
|
outputKind,
|
|
@@ -8805,14 +8917,28 @@ async function handleListRepos({
|
|
|
8805
8917
|
per_page,
|
|
8806
8918
|
sort
|
|
8807
8919
|
}) {
|
|
8808
|
-
|
|
8809
|
-
|
|
8810
|
-
|
|
8811
|
-
|
|
8812
|
-
|
|
8813
|
-
|
|
8814
|
-
|
|
8815
|
-
|
|
8920
|
+
if (all) {
|
|
8921
|
+
const data = await fetchListAllRepos({
|
|
8922
|
+
direction,
|
|
8923
|
+
orgSlug,
|
|
8924
|
+
sort
|
|
8925
|
+
});
|
|
8926
|
+
await outputListRepos(data, outputKind, 0, 0, sort, Infinity, direction);
|
|
8927
|
+
} else {
|
|
8928
|
+
const data = await fetchListRepos({
|
|
8929
|
+
direction,
|
|
8930
|
+
orgSlug,
|
|
8931
|
+
page,
|
|
8932
|
+
per_page,
|
|
8933
|
+
sort
|
|
8934
|
+
});
|
|
8935
|
+
if (!data.ok) {
|
|
8936
|
+
await outputListRepos(data, outputKind, 0, 0, '', 0, direction);
|
|
8937
|
+
} else {
|
|
8938
|
+
// Note: nextPage defaults to 0, is null when there's no next page
|
|
8939
|
+
await outputListRepos(data, outputKind, page, data.data.nextPage, sort, per_page, direction);
|
|
8940
|
+
}
|
|
8941
|
+
}
|
|
8816
8942
|
}
|
|
8817
8943
|
|
|
8818
8944
|
const {
|
|
@@ -8825,11 +8951,10 @@ const config$c = {
|
|
|
8825
8951
|
flags: {
|
|
8826
8952
|
...utils.commonFlags,
|
|
8827
8953
|
...utils.outputFlags,
|
|
8828
|
-
|
|
8829
|
-
type: '
|
|
8830
|
-
|
|
8831
|
-
|
|
8832
|
-
description: 'Sorting option'
|
|
8954
|
+
all: {
|
|
8955
|
+
type: 'boolean',
|
|
8956
|
+
default: false,
|
|
8957
|
+
description: 'By default view shows the last n repos. This flag allows you to fetch the entire list. Will ignore --page and --perPage.'
|
|
8833
8958
|
},
|
|
8834
8959
|
direction: {
|
|
8835
8960
|
type: 'string',
|
|
@@ -8856,6 +8981,12 @@ const config$c = {
|
|
|
8856
8981
|
shortFlag: 'p',
|
|
8857
8982
|
default: 1,
|
|
8858
8983
|
description: 'Page number'
|
|
8984
|
+
},
|
|
8985
|
+
sort: {
|
|
8986
|
+
type: 'string',
|
|
8987
|
+
shortFlag: 's',
|
|
8988
|
+
default: 'created_at',
|
|
8989
|
+
description: 'Sorting option'
|
|
8859
8990
|
}
|
|
8860
8991
|
},
|
|
8861
8992
|
help: (command, config) => `
|
|
@@ -8888,15 +9019,15 @@ async function run$c(argv, importMeta, {
|
|
|
8888
9019
|
parentName
|
|
8889
9020
|
});
|
|
8890
9021
|
const {
|
|
8891
|
-
|
|
8892
|
-
|
|
8893
|
-
} = cli.flags;
|
|
8894
|
-
const outputKind = utils.getOutputKind(json, markdown);
|
|
8895
|
-
const {
|
|
9022
|
+
all,
|
|
9023
|
+
direction = 'desc',
|
|
8896
9024
|
dryRun,
|
|
8897
9025
|
interactive,
|
|
9026
|
+
json,
|
|
9027
|
+
markdown,
|
|
8898
9028
|
org: orgFlag
|
|
8899
9029
|
} = cli.flags;
|
|
9030
|
+
const outputKind = utils.getOutputKind(json, markdown);
|
|
8900
9031
|
const [orgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), cli.input[0] || '', !!interactive, !!dryRun);
|
|
8901
9032
|
const hasApiToken = utils.hasDefaultToken();
|
|
8902
9033
|
const wasValidInput = utils.checkCommandInput(outputKind, {
|
|
@@ -8917,6 +9048,12 @@ async function run$c(argv, importMeta, {
|
|
|
8917
9048
|
message: 'You need to be logged in to use this command. See `socket login`.',
|
|
8918
9049
|
pass: 'ok',
|
|
8919
9050
|
fail: 'missing API token'
|
|
9051
|
+
}, {
|
|
9052
|
+
nook: true,
|
|
9053
|
+
test: direction === 'asc' || direction === 'desc',
|
|
9054
|
+
message: 'The --direction value must be "asc" or "desc"',
|
|
9055
|
+
pass: 'ok',
|
|
9056
|
+
fail: 'unexpected value'
|
|
8920
9057
|
});
|
|
8921
9058
|
if (!wasValidInput) {
|
|
8922
9059
|
return;
|
|
@@ -8926,7 +9063,8 @@ async function run$c(argv, importMeta, {
|
|
|
8926
9063
|
return;
|
|
8927
9064
|
}
|
|
8928
9065
|
await handleListRepos({
|
|
8929
|
-
|
|
9066
|
+
all: Boolean(all),
|
|
9067
|
+
direction: direction === 'asc' ? 'asc' : 'desc',
|
|
8930
9068
|
orgSlug,
|
|
8931
9069
|
outputKind,
|
|
8932
9070
|
page: Number(cli.flags['page']) || 1,
|
|
@@ -9336,6 +9474,11 @@ const config$9 = {
|
|
|
9336
9474
|
flags: {
|
|
9337
9475
|
...utils.commonFlags,
|
|
9338
9476
|
...utils.outputFlags,
|
|
9477
|
+
autoManifest: {
|
|
9478
|
+
type: 'boolean',
|
|
9479
|
+
default: false,
|
|
9480
|
+
description: 'Run `socket manifest auto` before collecting manifest files? This would be necessary for languages like Scala, Gradle, and Kotlin, See `socket manifest auto --help`.'
|
|
9481
|
+
},
|
|
9339
9482
|
branch: {
|
|
9340
9483
|
type: 'string',
|
|
9341
9484
|
shortFlag: 'b',
|
|
@@ -9374,11 +9517,6 @@ const config$9 = {
|
|
|
9374
9517
|
default: true,
|
|
9375
9518
|
description: 'Allow for interactive elements, asking for input. Use --no-interactive to prevent any input questions, defaulting them to cancel/no.'
|
|
9376
9519
|
},
|
|
9377
|
-
pendingHead: {
|
|
9378
|
-
type: 'boolean',
|
|
9379
|
-
default: true,
|
|
9380
|
-
description: 'Designate this full-scan as the latest scan of a given branch. This must be set to have it show up in the dashboard.'
|
|
9381
|
-
},
|
|
9382
9520
|
pullRequest: {
|
|
9383
9521
|
type: 'number',
|
|
9384
9522
|
shortFlag: 'pr',
|
|
@@ -9404,11 +9542,17 @@ const config$9 = {
|
|
|
9404
9542
|
default: false,
|
|
9405
9543
|
description: 'Wait for the scan creation to complete, then basically run `socket scan report` on it'
|
|
9406
9544
|
},
|
|
9545
|
+
setAsAlertsPage: {
|
|
9546
|
+
type: 'boolean',
|
|
9547
|
+
default: true,
|
|
9548
|
+
aliases: ['pendingHead'],
|
|
9549
|
+
description: 'When true and if this is the "default branch" then this Scan will be the one reflected on your alerts page. See help for details. Defaults to true.'
|
|
9550
|
+
},
|
|
9407
9551
|
tmp: {
|
|
9408
9552
|
type: 'boolean',
|
|
9409
9553
|
shortFlag: 't',
|
|
9410
9554
|
default: false,
|
|
9411
|
-
description: 'Set the visibility (true/false) of the scan in your dashboard.
|
|
9555
|
+
description: 'Set the visibility (true/false) of the scan in your dashboard.'
|
|
9412
9556
|
}
|
|
9413
9557
|
},
|
|
9414
9558
|
// TODO: your project's "socket.yml" file's "projectIgnorePaths"
|
|
@@ -9440,8 +9584,12 @@ const config$9 = {
|
|
|
9440
9584
|
Note: for a first run you probably want to set --defaultBranch to indicate
|
|
9441
9585
|
the default branch name, like "main" or "master".
|
|
9442
9586
|
|
|
9443
|
-
|
|
9444
|
-
|
|
9587
|
+
The "alerts page" (https://socket.dev/dashboard/org/YOURORG/alerts) will show
|
|
9588
|
+
the results from the last scan designated as the "pending head" on the branch
|
|
9589
|
+
configured on Socket to be the "default branch". When creating a scan the
|
|
9590
|
+
--setAsAlertsPage flag will default to true to update this. You can prevent
|
|
9591
|
+
this by using --no-setAsAlertsPage. This flag is ignored for any branch that
|
|
9592
|
+
is not designated as the "default branch". It is disabled when using --tmp.
|
|
9445
9593
|
|
|
9446
9594
|
Options
|
|
9447
9595
|
${utils.getFlagListOutput(config.flags, 6)}
|
|
@@ -9466,6 +9614,7 @@ async function run$9(argv, importMeta, {
|
|
|
9466
9614
|
parentName
|
|
9467
9615
|
});
|
|
9468
9616
|
const {
|
|
9617
|
+
autoManifest = false,
|
|
9469
9618
|
branch: branchName = 'socket-default-branch',
|
|
9470
9619
|
commitHash,
|
|
9471
9620
|
commitMessage,
|
|
@@ -9477,14 +9626,15 @@ async function run$9(argv, importMeta, {
|
|
|
9477
9626
|
json,
|
|
9478
9627
|
markdown,
|
|
9479
9628
|
org: orgFlag,
|
|
9480
|
-
pendingHead,
|
|
9481
9629
|
pullRequest,
|
|
9482
9630
|
readOnly,
|
|
9483
9631
|
repo: repoName = 'socket-default-repository',
|
|
9484
9632
|
report,
|
|
9633
|
+
setAsAlertsPage: pendingHeadFlag,
|
|
9485
9634
|
tmp
|
|
9486
9635
|
} = cli.flags;
|
|
9487
9636
|
const outputKind = utils.getOutputKind(json, markdown);
|
|
9637
|
+
const pendingHead = tmp ? false : pendingHeadFlag;
|
|
9488
9638
|
let [orgSlug, defaultOrgSlug] = await utils.determineOrgSlug(String(orgFlag || ''), cli.input[0] || '', interactive, dryRun);
|
|
9489
9639
|
if (!defaultOrgSlug) {
|
|
9490
9640
|
// Tmp. just for TS. will drop this later.
|
|
@@ -9519,6 +9669,10 @@ async function run$9(argv, importMeta, {
|
|
|
9519
9669
|
updatedInput = true;
|
|
9520
9670
|
}
|
|
9521
9671
|
}
|
|
9672
|
+
const detected = await detectManifestActions(cwd);
|
|
9673
|
+
if (detected.count > 0 && !autoManifest) {
|
|
9674
|
+
logger.logger.info(`Detected ${detected.count} manifest targets we could try to generate. Please set the --autoManifest flag if you want to include languages covered by \`socket manifest auto\` in the Scan.`);
|
|
9675
|
+
}
|
|
9522
9676
|
if (updatedInput && orgSlug && targets?.length) {
|
|
9523
9677
|
logger.logger.info('Note: You can invoke this command next time to skip the interactive questions:');
|
|
9524
9678
|
logger.logger.info('```');
|
|
@@ -9548,12 +9702,6 @@ async function run$9(argv, importMeta, {
|
|
|
9548
9702
|
message: 'This command requires an API token for access',
|
|
9549
9703
|
pass: 'ok',
|
|
9550
9704
|
fail: 'missing (try `socket login`)'
|
|
9551
|
-
}, {
|
|
9552
|
-
nook: true,
|
|
9553
|
-
test: !pendingHead || !tmp,
|
|
9554
|
-
message: 'Can not use --pendingHead and --tmp at the same time',
|
|
9555
|
-
pass: 'ok',
|
|
9556
|
-
fail: 'remove at least one flag'
|
|
9557
9705
|
}, {
|
|
9558
9706
|
nook: true,
|
|
9559
9707
|
test: !pendingHead || !!branchName,
|
|
@@ -9577,6 +9725,7 @@ async function run$9(argv, importMeta, {
|
|
|
9577
9725
|
return;
|
|
9578
9726
|
}
|
|
9579
9727
|
await handleCreateNewScan({
|
|
9728
|
+
autoManifest: Boolean(autoManifest),
|
|
9580
9729
|
branchName: branchName,
|
|
9581
9730
|
commitHash: commitHash && String(commitHash) || '',
|
|
9582
9731
|
commitMessage: commitMessage && String(commitMessage) || '',
|
|
@@ -11597,5 +11746,5 @@ void (async () => {
|
|
|
11597
11746
|
await utils.captureException(e);
|
|
11598
11747
|
}
|
|
11599
11748
|
})();
|
|
11600
|
-
//# debugId=
|
|
11749
|
+
//# debugId=a62b48d4-63b9-4b1e-b260-b363f86c80da
|
|
11601
11750
|
//# sourceMappingURL=cli.js.map
|