@coana-tech/cli 15.0.8 → 15.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.mjs +245 -107
- package/package.json +1 -1
- package/repos/coana-tech/goana/bin/goana-darwin-amd64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-darwin-arm64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-linux-amd64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-linux-arm64.gz +0 -0
- package/repos/coana-tech/javap-service/javap-service.jar +0 -0
package/cli.mjs
CHANGED
|
@@ -16020,9 +16020,9 @@ var require_picomatch = __commonJS({
|
|
|
16020
16020
|
var utils = require_utils();
|
|
16021
16021
|
var constants4 = require_constants();
|
|
16022
16022
|
var isObject2 = (val2) => val2 && typeof val2 === "object" && !Array.isArray(val2);
|
|
16023
|
-
var
|
|
16023
|
+
var picomatch13 = (glob2, options, returnState = false) => {
|
|
16024
16024
|
if (Array.isArray(glob2)) {
|
|
16025
|
-
const fns = glob2.map((input) =>
|
|
16025
|
+
const fns = glob2.map((input) => picomatch13(input, options, returnState));
|
|
16026
16026
|
const arrayMatcher = (str) => {
|
|
16027
16027
|
for (const isMatch4 of fns) {
|
|
16028
16028
|
const state2 = isMatch4(str);
|
|
@@ -16038,16 +16038,16 @@ var require_picomatch = __commonJS({
|
|
|
16038
16038
|
}
|
|
16039
16039
|
const opts = options || {};
|
|
16040
16040
|
const posix3 = opts.windows;
|
|
16041
|
-
const regex = isState ?
|
|
16041
|
+
const regex = isState ? picomatch13.compileRe(glob2, options) : picomatch13.makeRe(glob2, options, false, true);
|
|
16042
16042
|
const state = regex.state;
|
|
16043
16043
|
delete regex.state;
|
|
16044
16044
|
let isIgnored = () => false;
|
|
16045
16045
|
if (opts.ignore) {
|
|
16046
16046
|
const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };
|
|
16047
|
-
isIgnored =
|
|
16047
|
+
isIgnored = picomatch13(opts.ignore, ignoreOpts, returnState);
|
|
16048
16048
|
}
|
|
16049
16049
|
const matcher = (input, returnObject = false) => {
|
|
16050
|
-
const { isMatch: isMatch4, match: match2, output } =
|
|
16050
|
+
const { isMatch: isMatch4, match: match2, output } = picomatch13.test(input, regex, options, { glob: glob2, posix: posix3 });
|
|
16051
16051
|
const result = { glob: glob2, state, regex, posix: posix3, input, output, match: match2, isMatch: isMatch4 };
|
|
16052
16052
|
if (typeof opts.onResult === "function") {
|
|
16053
16053
|
opts.onResult(result);
|
|
@@ -16073,7 +16073,7 @@ var require_picomatch = __commonJS({
|
|
|
16073
16073
|
}
|
|
16074
16074
|
return matcher;
|
|
16075
16075
|
};
|
|
16076
|
-
|
|
16076
|
+
picomatch13.test = (input, regex, options, { glob: glob2, posix: posix3 } = {}) => {
|
|
16077
16077
|
if (typeof input !== "string") {
|
|
16078
16078
|
throw new TypeError("Expected input to be a string");
|
|
16079
16079
|
}
|
|
@@ -16090,24 +16090,24 @@ var require_picomatch = __commonJS({
|
|
|
16090
16090
|
}
|
|
16091
16091
|
if (match2 === false || opts.capture === true) {
|
|
16092
16092
|
if (opts.matchBase === true || opts.basename === true) {
|
|
16093
|
-
match2 =
|
|
16093
|
+
match2 = picomatch13.matchBase(input, regex, options, posix3);
|
|
16094
16094
|
} else {
|
|
16095
16095
|
match2 = regex.exec(output);
|
|
16096
16096
|
}
|
|
16097
16097
|
}
|
|
16098
16098
|
return { isMatch: Boolean(match2), match: match2, output };
|
|
16099
16099
|
};
|
|
16100
|
-
|
|
16101
|
-
const regex = glob2 instanceof RegExp ? glob2 :
|
|
16100
|
+
picomatch13.matchBase = (input, glob2, options) => {
|
|
16101
|
+
const regex = glob2 instanceof RegExp ? glob2 : picomatch13.makeRe(glob2, options);
|
|
16102
16102
|
return regex.test(utils.basename(input));
|
|
16103
16103
|
};
|
|
16104
|
-
|
|
16105
|
-
|
|
16106
|
-
if (Array.isArray(pattern)) return pattern.map((p3) =>
|
|
16104
|
+
picomatch13.isMatch = (str, patterns, options) => picomatch13(patterns, options)(str);
|
|
16105
|
+
picomatch13.parse = (pattern, options) => {
|
|
16106
|
+
if (Array.isArray(pattern)) return pattern.map((p3) => picomatch13.parse(p3, options));
|
|
16107
16107
|
return parse16(pattern, { ...options, fastpaths: false });
|
|
16108
16108
|
};
|
|
16109
|
-
|
|
16110
|
-
|
|
16109
|
+
picomatch13.scan = (input, options) => scan(input, options);
|
|
16110
|
+
picomatch13.compileRe = (state, options, returnOutput = false, returnState = false) => {
|
|
16111
16111
|
if (returnOutput === true) {
|
|
16112
16112
|
return state.output;
|
|
16113
16113
|
}
|
|
@@ -16118,13 +16118,13 @@ var require_picomatch = __commonJS({
|
|
|
16118
16118
|
if (state && state.negated === true) {
|
|
16119
16119
|
source = `^(?!${source}).*$`;
|
|
16120
16120
|
}
|
|
16121
|
-
const regex =
|
|
16121
|
+
const regex = picomatch13.toRegex(source, options);
|
|
16122
16122
|
if (returnState === true) {
|
|
16123
16123
|
regex.state = state;
|
|
16124
16124
|
}
|
|
16125
16125
|
return regex;
|
|
16126
16126
|
};
|
|
16127
|
-
|
|
16127
|
+
picomatch13.makeRe = (input, options = {}, returnOutput = false, returnState = false) => {
|
|
16128
16128
|
if (!input || typeof input !== "string") {
|
|
16129
16129
|
throw new TypeError("Expected a non-empty string");
|
|
16130
16130
|
}
|
|
@@ -16135,9 +16135,9 @@ var require_picomatch = __commonJS({
|
|
|
16135
16135
|
if (!parsed.output) {
|
|
16136
16136
|
parsed = parse16(input, options);
|
|
16137
16137
|
}
|
|
16138
|
-
return
|
|
16138
|
+
return picomatch13.compileRe(parsed, options, returnOutput, returnState);
|
|
16139
16139
|
};
|
|
16140
|
-
|
|
16140
|
+
picomatch13.toRegex = (source, options) => {
|
|
16141
16141
|
try {
|
|
16142
16142
|
const opts = options || {};
|
|
16143
16143
|
return new RegExp(source, opts.flags || (opts.nocase ? "i" : ""));
|
|
@@ -16146,8 +16146,8 @@ var require_picomatch = __commonJS({
|
|
|
16146
16146
|
return /$^/;
|
|
16147
16147
|
}
|
|
16148
16148
|
};
|
|
16149
|
-
|
|
16150
|
-
module2.exports =
|
|
16149
|
+
picomatch13.constants = constants4;
|
|
16150
|
+
module2.exports = picomatch13;
|
|
16151
16151
|
}
|
|
16152
16152
|
});
|
|
16153
16153
|
|
|
@@ -16157,14 +16157,14 @@ var require_picomatch2 = __commonJS({
|
|
|
16157
16157
|
"use strict";
|
|
16158
16158
|
var pico = require_picomatch();
|
|
16159
16159
|
var utils = require_utils();
|
|
16160
|
-
function
|
|
16160
|
+
function picomatch13(glob2, options, returnState = false) {
|
|
16161
16161
|
if (options && (options.windows === null || options.windows === void 0)) {
|
|
16162
16162
|
options = { ...options, windows: utils.isWindows() };
|
|
16163
16163
|
}
|
|
16164
16164
|
return pico(glob2, options, returnState);
|
|
16165
16165
|
}
|
|
16166
|
-
Object.assign(
|
|
16167
|
-
module2.exports =
|
|
16166
|
+
Object.assign(picomatch13, pico);
|
|
16167
|
+
module2.exports = picomatch13;
|
|
16168
16168
|
}
|
|
16169
16169
|
});
|
|
16170
16170
|
|
|
@@ -25038,15 +25038,15 @@ var require_file = __commonJS({
|
|
|
25038
25038
|
_incFile(callback) {
|
|
25039
25039
|
debug("_incFile", this.filename);
|
|
25040
25040
|
const ext2 = path9.extname(this._basename);
|
|
25041
|
-
const
|
|
25041
|
+
const basename14 = path9.basename(this._basename, ext2);
|
|
25042
25042
|
const tasks = [];
|
|
25043
25043
|
if (this.zippedArchive) {
|
|
25044
25044
|
tasks.push(
|
|
25045
25045
|
function(cb) {
|
|
25046
25046
|
const num = this._created > 0 && !this.tailable ? this._created : "";
|
|
25047
25047
|
this._compressFile(
|
|
25048
|
-
path9.join(this.dirname, `${
|
|
25049
|
-
path9.join(this.dirname, `${
|
|
25048
|
+
path9.join(this.dirname, `${basename14}${num}${ext2}`),
|
|
25049
|
+
path9.join(this.dirname, `${basename14}${num}${ext2}.gz`),
|
|
25050
25050
|
cb
|
|
25051
25051
|
);
|
|
25052
25052
|
}.bind(this)
|
|
@@ -25056,9 +25056,9 @@ var require_file = __commonJS({
|
|
|
25056
25056
|
function(cb) {
|
|
25057
25057
|
if (!this.tailable) {
|
|
25058
25058
|
this._created += 1;
|
|
25059
|
-
this._checkMaxFilesIncrementing(ext2,
|
|
25059
|
+
this._checkMaxFilesIncrementing(ext2, basename14, cb);
|
|
25060
25060
|
} else {
|
|
25061
|
-
this._checkMaxFilesTailable(ext2,
|
|
25061
|
+
this._checkMaxFilesTailable(ext2, basename14, cb);
|
|
25062
25062
|
}
|
|
25063
25063
|
}.bind(this)
|
|
25064
25064
|
);
|
|
@@ -25072,9 +25072,9 @@ var require_file = __commonJS({
|
|
|
25072
25072
|
*/
|
|
25073
25073
|
_getFile() {
|
|
25074
25074
|
const ext2 = path9.extname(this._basename);
|
|
25075
|
-
const
|
|
25075
|
+
const basename14 = path9.basename(this._basename, ext2);
|
|
25076
25076
|
const isRotation = this.rotationFormat ? this.rotationFormat() : this._created;
|
|
25077
|
-
return !this.tailable && this._created ? `${
|
|
25077
|
+
return !this.tailable && this._created ? `${basename14}${isRotation}${ext2}` : `${basename14}${ext2}`;
|
|
25078
25078
|
}
|
|
25079
25079
|
/**
|
|
25080
25080
|
* Increment the number of files created or checked by this instance.
|
|
@@ -25084,14 +25084,14 @@ var require_file = __commonJS({
|
|
|
25084
25084
|
* @returns {undefined}
|
|
25085
25085
|
* @private
|
|
25086
25086
|
*/
|
|
25087
|
-
_checkMaxFilesIncrementing(ext2,
|
|
25087
|
+
_checkMaxFilesIncrementing(ext2, basename14, callback) {
|
|
25088
25088
|
if (!this.maxFiles || this._created < this.maxFiles) {
|
|
25089
25089
|
return setImmediate(callback);
|
|
25090
25090
|
}
|
|
25091
25091
|
const oldest = this._created - this.maxFiles;
|
|
25092
25092
|
const isOldest = oldest !== 0 ? oldest : "";
|
|
25093
25093
|
const isZipped = this.zippedArchive ? ".gz" : "";
|
|
25094
|
-
const filePath = `${
|
|
25094
|
+
const filePath = `${basename14}${isOldest}${ext2}${isZipped}`;
|
|
25095
25095
|
const target = path9.join(this.dirname, filePath);
|
|
25096
25096
|
fs11.unlink(target, callback);
|
|
25097
25097
|
}
|
|
@@ -25106,7 +25106,7 @@ var require_file = __commonJS({
|
|
|
25106
25106
|
* @returns {undefined}
|
|
25107
25107
|
* @private
|
|
25108
25108
|
*/
|
|
25109
|
-
_checkMaxFilesTailable(ext2,
|
|
25109
|
+
_checkMaxFilesTailable(ext2, basename14, callback) {
|
|
25110
25110
|
const tasks = [];
|
|
25111
25111
|
if (!this.maxFiles) {
|
|
25112
25112
|
return;
|
|
@@ -25114,21 +25114,21 @@ var require_file = __commonJS({
|
|
|
25114
25114
|
const isZipped = this.zippedArchive ? ".gz" : "";
|
|
25115
25115
|
for (let x2 = this.maxFiles - 1; x2 > 1; x2--) {
|
|
25116
25116
|
tasks.push(function(i7, cb) {
|
|
25117
|
-
let fileName3 = `${
|
|
25117
|
+
let fileName3 = `${basename14}${i7 - 1}${ext2}${isZipped}`;
|
|
25118
25118
|
const tmppath = path9.join(this.dirname, fileName3);
|
|
25119
25119
|
fs11.exists(tmppath, (exists2) => {
|
|
25120
25120
|
if (!exists2) {
|
|
25121
25121
|
return cb(null);
|
|
25122
25122
|
}
|
|
25123
|
-
fileName3 = `${
|
|
25123
|
+
fileName3 = `${basename14}${i7}${ext2}${isZipped}`;
|
|
25124
25124
|
fs11.rename(tmppath, path9.join(this.dirname, fileName3), cb);
|
|
25125
25125
|
});
|
|
25126
25126
|
}.bind(this, x2));
|
|
25127
25127
|
}
|
|
25128
25128
|
asyncSeries(tasks, () => {
|
|
25129
25129
|
fs11.rename(
|
|
25130
|
-
path9.join(this.dirname, `${
|
|
25131
|
-
path9.join(this.dirname, `${
|
|
25130
|
+
path9.join(this.dirname, `${basename14}${ext2}${isZipped}`),
|
|
25131
|
+
path9.join(this.dirname, `${basename14}1${ext2}${isZipped}`),
|
|
25132
25132
|
callback
|
|
25133
25133
|
);
|
|
25134
25134
|
});
|
|
@@ -39550,9 +39550,9 @@ var require_picomatch3 = __commonJS({
|
|
|
39550
39550
|
var utils = require_utils3();
|
|
39551
39551
|
var constants4 = require_constants3();
|
|
39552
39552
|
var isObject2 = (val2) => val2 && typeof val2 === "object" && !Array.isArray(val2);
|
|
39553
|
-
var
|
|
39553
|
+
var picomatch13 = (glob2, options, returnState = false) => {
|
|
39554
39554
|
if (Array.isArray(glob2)) {
|
|
39555
|
-
const fns = glob2.map((input) =>
|
|
39555
|
+
const fns = glob2.map((input) => picomatch13(input, options, returnState));
|
|
39556
39556
|
const arrayMatcher = (str) => {
|
|
39557
39557
|
for (const isMatch4 of fns) {
|
|
39558
39558
|
const state2 = isMatch4(str);
|
|
@@ -39568,16 +39568,16 @@ var require_picomatch3 = __commonJS({
|
|
|
39568
39568
|
}
|
|
39569
39569
|
const opts = options || {};
|
|
39570
39570
|
const posix3 = utils.isWindows(options);
|
|
39571
|
-
const regex = isState ?
|
|
39571
|
+
const regex = isState ? picomatch13.compileRe(glob2, options) : picomatch13.makeRe(glob2, options, false, true);
|
|
39572
39572
|
const state = regex.state;
|
|
39573
39573
|
delete regex.state;
|
|
39574
39574
|
let isIgnored = () => false;
|
|
39575
39575
|
if (opts.ignore) {
|
|
39576
39576
|
const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };
|
|
39577
|
-
isIgnored =
|
|
39577
|
+
isIgnored = picomatch13(opts.ignore, ignoreOpts, returnState);
|
|
39578
39578
|
}
|
|
39579
39579
|
const matcher = (input, returnObject = false) => {
|
|
39580
|
-
const { isMatch: isMatch4, match: match2, output } =
|
|
39580
|
+
const { isMatch: isMatch4, match: match2, output } = picomatch13.test(input, regex, options, { glob: glob2, posix: posix3 });
|
|
39581
39581
|
const result = { glob: glob2, state, regex, posix: posix3, input, output, match: match2, isMatch: isMatch4 };
|
|
39582
39582
|
if (typeof opts.onResult === "function") {
|
|
39583
39583
|
opts.onResult(result);
|
|
@@ -39603,7 +39603,7 @@ var require_picomatch3 = __commonJS({
|
|
|
39603
39603
|
}
|
|
39604
39604
|
return matcher;
|
|
39605
39605
|
};
|
|
39606
|
-
|
|
39606
|
+
picomatch13.test = (input, regex, options, { glob: glob2, posix: posix3 } = {}) => {
|
|
39607
39607
|
if (typeof input !== "string") {
|
|
39608
39608
|
throw new TypeError("Expected input to be a string");
|
|
39609
39609
|
}
|
|
@@ -39620,24 +39620,24 @@ var require_picomatch3 = __commonJS({
|
|
|
39620
39620
|
}
|
|
39621
39621
|
if (match2 === false || opts.capture === true) {
|
|
39622
39622
|
if (opts.matchBase === true || opts.basename === true) {
|
|
39623
|
-
match2 =
|
|
39623
|
+
match2 = picomatch13.matchBase(input, regex, options, posix3);
|
|
39624
39624
|
} else {
|
|
39625
39625
|
match2 = regex.exec(output);
|
|
39626
39626
|
}
|
|
39627
39627
|
}
|
|
39628
39628
|
return { isMatch: Boolean(match2), match: match2, output };
|
|
39629
39629
|
};
|
|
39630
|
-
|
|
39631
|
-
const regex = glob2 instanceof RegExp ? glob2 :
|
|
39630
|
+
picomatch13.matchBase = (input, glob2, options, posix3 = utils.isWindows(options)) => {
|
|
39631
|
+
const regex = glob2 instanceof RegExp ? glob2 : picomatch13.makeRe(glob2, options);
|
|
39632
39632
|
return regex.test(path9.basename(input));
|
|
39633
39633
|
};
|
|
39634
|
-
|
|
39635
|
-
|
|
39636
|
-
if (Array.isArray(pattern)) return pattern.map((p3) =>
|
|
39634
|
+
picomatch13.isMatch = (str, patterns, options) => picomatch13(patterns, options)(str);
|
|
39635
|
+
picomatch13.parse = (pattern, options) => {
|
|
39636
|
+
if (Array.isArray(pattern)) return pattern.map((p3) => picomatch13.parse(p3, options));
|
|
39637
39637
|
return parse16(pattern, { ...options, fastpaths: false });
|
|
39638
39638
|
};
|
|
39639
|
-
|
|
39640
|
-
|
|
39639
|
+
picomatch13.scan = (input, options) => scan(input, options);
|
|
39640
|
+
picomatch13.compileRe = (state, options, returnOutput = false, returnState = false) => {
|
|
39641
39641
|
if (returnOutput === true) {
|
|
39642
39642
|
return state.output;
|
|
39643
39643
|
}
|
|
@@ -39648,13 +39648,13 @@ var require_picomatch3 = __commonJS({
|
|
|
39648
39648
|
if (state && state.negated === true) {
|
|
39649
39649
|
source = `^(?!${source}).*$`;
|
|
39650
39650
|
}
|
|
39651
|
-
const regex =
|
|
39651
|
+
const regex = picomatch13.toRegex(source, options);
|
|
39652
39652
|
if (returnState === true) {
|
|
39653
39653
|
regex.state = state;
|
|
39654
39654
|
}
|
|
39655
39655
|
return regex;
|
|
39656
39656
|
};
|
|
39657
|
-
|
|
39657
|
+
picomatch13.makeRe = (input, options = {}, returnOutput = false, returnState = false) => {
|
|
39658
39658
|
if (!input || typeof input !== "string") {
|
|
39659
39659
|
throw new TypeError("Expected a non-empty string");
|
|
39660
39660
|
}
|
|
@@ -39665,9 +39665,9 @@ var require_picomatch3 = __commonJS({
|
|
|
39665
39665
|
if (!parsed.output) {
|
|
39666
39666
|
parsed = parse16(input, options);
|
|
39667
39667
|
}
|
|
39668
|
-
return
|
|
39668
|
+
return picomatch13.compileRe(parsed, options, returnOutput, returnState);
|
|
39669
39669
|
};
|
|
39670
|
-
|
|
39670
|
+
picomatch13.toRegex = (source, options) => {
|
|
39671
39671
|
try {
|
|
39672
39672
|
const opts = options || {};
|
|
39673
39673
|
return new RegExp(source, opts.flags || (opts.nocase ? "i" : ""));
|
|
@@ -39676,8 +39676,8 @@ var require_picomatch3 = __commonJS({
|
|
|
39676
39676
|
return /$^/;
|
|
39677
39677
|
}
|
|
39678
39678
|
};
|
|
39679
|
-
|
|
39680
|
-
module2.exports =
|
|
39679
|
+
picomatch13.constants = constants4;
|
|
39680
|
+
module2.exports = picomatch13;
|
|
39681
39681
|
}
|
|
39682
39682
|
});
|
|
39683
39683
|
|
|
@@ -39695,7 +39695,7 @@ var require_micromatch = __commonJS({
|
|
|
39695
39695
|
"use strict";
|
|
39696
39696
|
var util5 = __require("util");
|
|
39697
39697
|
var braces = require_braces();
|
|
39698
|
-
var
|
|
39698
|
+
var picomatch13 = require_picomatch4();
|
|
39699
39699
|
var utils = require_utils3();
|
|
39700
39700
|
var isEmptyString = (val2) => val2 === "" || val2 === "./";
|
|
39701
39701
|
var micromatch4 = (list2, patterns, options) => {
|
|
@@ -39712,7 +39712,7 @@ var require_micromatch = __commonJS({
|
|
|
39712
39712
|
}
|
|
39713
39713
|
};
|
|
39714
39714
|
for (let i7 = 0; i7 < patterns.length; i7++) {
|
|
39715
|
-
let isMatch4 =
|
|
39715
|
+
let isMatch4 = picomatch13(String(patterns[i7]), { ...options, onResult }, true);
|
|
39716
39716
|
let negated = isMatch4.state.negated || isMatch4.state.negatedExtglob;
|
|
39717
39717
|
if (negated) negatives++;
|
|
39718
39718
|
for (let item of list2) {
|
|
@@ -39740,8 +39740,8 @@ var require_micromatch = __commonJS({
|
|
|
39740
39740
|
return matches;
|
|
39741
39741
|
};
|
|
39742
39742
|
micromatch4.match = micromatch4;
|
|
39743
|
-
micromatch4.matcher = (pattern, options) =>
|
|
39744
|
-
micromatch4.isMatch = (str, patterns, options) =>
|
|
39743
|
+
micromatch4.matcher = (pattern, options) => picomatch13(pattern, options);
|
|
39744
|
+
micromatch4.isMatch = (str, patterns, options) => picomatch13(patterns, options)(str);
|
|
39745
39745
|
micromatch4.any = micromatch4.isMatch;
|
|
39746
39746
|
micromatch4.not = (list2, patterns, options = {}) => {
|
|
39747
39747
|
patterns = [].concat(patterns).map(String);
|
|
@@ -39788,7 +39788,7 @@ var require_micromatch = __commonJS({
|
|
|
39788
39788
|
micromatch4.some = (list2, patterns, options) => {
|
|
39789
39789
|
let items = [].concat(list2);
|
|
39790
39790
|
for (let pattern of [].concat(patterns)) {
|
|
39791
|
-
let isMatch4 =
|
|
39791
|
+
let isMatch4 = picomatch13(String(pattern), options);
|
|
39792
39792
|
if (items.some((item) => isMatch4(item))) {
|
|
39793
39793
|
return true;
|
|
39794
39794
|
}
|
|
@@ -39798,7 +39798,7 @@ var require_micromatch = __commonJS({
|
|
|
39798
39798
|
micromatch4.every = (list2, patterns, options) => {
|
|
39799
39799
|
let items = [].concat(list2);
|
|
39800
39800
|
for (let pattern of [].concat(patterns)) {
|
|
39801
|
-
let isMatch4 =
|
|
39801
|
+
let isMatch4 = picomatch13(String(pattern), options);
|
|
39802
39802
|
if (!items.every((item) => isMatch4(item))) {
|
|
39803
39803
|
return false;
|
|
39804
39804
|
}
|
|
@@ -39809,23 +39809,23 @@ var require_micromatch = __commonJS({
|
|
|
39809
39809
|
if (typeof str !== "string") {
|
|
39810
39810
|
throw new TypeError(`Expected a string: "${util5.inspect(str)}"`);
|
|
39811
39811
|
}
|
|
39812
|
-
return [].concat(patterns).every((p3) =>
|
|
39812
|
+
return [].concat(patterns).every((p3) => picomatch13(p3, options)(str));
|
|
39813
39813
|
};
|
|
39814
39814
|
micromatch4.capture = (glob2, input, options) => {
|
|
39815
39815
|
let posix3 = utils.isWindows(options);
|
|
39816
|
-
let regex =
|
|
39816
|
+
let regex = picomatch13.makeRe(String(glob2), { ...options, capture: true });
|
|
39817
39817
|
let match2 = regex.exec(posix3 ? utils.toPosixSlashes(input) : input);
|
|
39818
39818
|
if (match2) {
|
|
39819
39819
|
return match2.slice(1).map((v) => v === void 0 ? "" : v);
|
|
39820
39820
|
}
|
|
39821
39821
|
};
|
|
39822
|
-
micromatch4.makeRe = (...args2) =>
|
|
39823
|
-
micromatch4.scan = (...args2) =>
|
|
39822
|
+
micromatch4.makeRe = (...args2) => picomatch13.makeRe(...args2);
|
|
39823
|
+
micromatch4.scan = (...args2) => picomatch13.scan(...args2);
|
|
39824
39824
|
micromatch4.parse = (patterns, options) => {
|
|
39825
39825
|
let res = [];
|
|
39826
39826
|
for (let pattern of [].concat(patterns || [])) {
|
|
39827
39827
|
for (let str of braces(String(pattern), options)) {
|
|
39828
|
-
res.push(
|
|
39828
|
+
res.push(picomatch13.parse(str, options));
|
|
39829
39829
|
}
|
|
39830
39830
|
}
|
|
39831
39831
|
return res;
|
|
@@ -102682,7 +102682,7 @@ var require_parseParams = __commonJS({
|
|
|
102682
102682
|
var require_basename = __commonJS({
|
|
102683
102683
|
"../../node_modules/.pnpm/@fastify+busboy@2.1.1/node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
|
|
102684
102684
|
"use strict";
|
|
102685
|
-
module2.exports = function
|
|
102685
|
+
module2.exports = function basename14(path9) {
|
|
102686
102686
|
if (typeof path9 !== "string") {
|
|
102687
102687
|
return "";
|
|
102688
102688
|
}
|
|
@@ -102709,7 +102709,7 @@ var require_multipart = __commonJS({
|
|
|
102709
102709
|
var Dicer = require_Dicer();
|
|
102710
102710
|
var parseParams = require_parseParams();
|
|
102711
102711
|
var decodeText = require_decodeText();
|
|
102712
|
-
var
|
|
102712
|
+
var basename14 = require_basename();
|
|
102713
102713
|
var getLimit2 = require_getLimit();
|
|
102714
102714
|
var RE_BOUNDARY = /^boundary$/i;
|
|
102715
102715
|
var RE_FIELD = /^form-data$/i;
|
|
@@ -102826,7 +102826,7 @@ var require_multipart = __commonJS({
|
|
|
102826
102826
|
} else if (RE_FILENAME.test(parsed[i7][0])) {
|
|
102827
102827
|
filename = parsed[i7][1];
|
|
102828
102828
|
if (!preservePath) {
|
|
102829
|
-
filename =
|
|
102829
|
+
filename = basename14(filename);
|
|
102830
102830
|
}
|
|
102831
102831
|
}
|
|
102832
102832
|
}
|
|
@@ -121662,8 +121662,8 @@ var require_tmp = __commonJS({
|
|
|
121662
121662
|
if (option === "name") {
|
|
121663
121663
|
if (path9.isAbsolute(name2))
|
|
121664
121664
|
throw new Error(`${option} option must not contain an absolute path, found "${name2}".`);
|
|
121665
|
-
let
|
|
121666
|
-
if (
|
|
121665
|
+
let basename14 = path9.basename(name2);
|
|
121666
|
+
if (basename14 === ".." || basename14 === "." || basename14 !== name2)
|
|
121667
121667
|
throw new Error(`${option} option must not contain a path, found "${name2}".`);
|
|
121668
121668
|
} else {
|
|
121669
121669
|
if (path9.isAbsolute(name2) && !name2.startsWith(tmpDir)) {
|
|
@@ -147531,7 +147531,7 @@ var require_micromatch2 = __commonJS({
|
|
|
147531
147531
|
"use strict";
|
|
147532
147532
|
var util5 = __require("util");
|
|
147533
147533
|
var braces = require_braces2();
|
|
147534
|
-
var
|
|
147534
|
+
var picomatch13 = require_picomatch4();
|
|
147535
147535
|
var utils = require_utils3();
|
|
147536
147536
|
var isEmptyString = (v) => v === "" || v === "./";
|
|
147537
147537
|
var hasBraces = (v) => {
|
|
@@ -147552,7 +147552,7 @@ var require_micromatch2 = __commonJS({
|
|
|
147552
147552
|
}
|
|
147553
147553
|
};
|
|
147554
147554
|
for (let i7 = 0; i7 < patterns.length; i7++) {
|
|
147555
|
-
let isMatch4 =
|
|
147555
|
+
let isMatch4 = picomatch13(String(patterns[i7]), { ...options, onResult }, true);
|
|
147556
147556
|
let negated = isMatch4.state.negated || isMatch4.state.negatedExtglob;
|
|
147557
147557
|
if (negated) negatives++;
|
|
147558
147558
|
for (let item of list2) {
|
|
@@ -147580,8 +147580,8 @@ var require_micromatch2 = __commonJS({
|
|
|
147580
147580
|
return matches;
|
|
147581
147581
|
};
|
|
147582
147582
|
micromatch4.match = micromatch4;
|
|
147583
|
-
micromatch4.matcher = (pattern, options) =>
|
|
147584
|
-
micromatch4.isMatch = (str, patterns, options) =>
|
|
147583
|
+
micromatch4.matcher = (pattern, options) => picomatch13(pattern, options);
|
|
147584
|
+
micromatch4.isMatch = (str, patterns, options) => picomatch13(patterns, options)(str);
|
|
147585
147585
|
micromatch4.any = micromatch4.isMatch;
|
|
147586
147586
|
micromatch4.not = (list2, patterns, options = {}) => {
|
|
147587
147587
|
patterns = [].concat(patterns).map(String);
|
|
@@ -147628,7 +147628,7 @@ var require_micromatch2 = __commonJS({
|
|
|
147628
147628
|
micromatch4.some = (list2, patterns, options) => {
|
|
147629
147629
|
let items = [].concat(list2);
|
|
147630
147630
|
for (let pattern of [].concat(patterns)) {
|
|
147631
|
-
let isMatch4 =
|
|
147631
|
+
let isMatch4 = picomatch13(String(pattern), options);
|
|
147632
147632
|
if (items.some((item) => isMatch4(item))) {
|
|
147633
147633
|
return true;
|
|
147634
147634
|
}
|
|
@@ -147638,7 +147638,7 @@ var require_micromatch2 = __commonJS({
|
|
|
147638
147638
|
micromatch4.every = (list2, patterns, options) => {
|
|
147639
147639
|
let items = [].concat(list2);
|
|
147640
147640
|
for (let pattern of [].concat(patterns)) {
|
|
147641
|
-
let isMatch4 =
|
|
147641
|
+
let isMatch4 = picomatch13(String(pattern), options);
|
|
147642
147642
|
if (!items.every((item) => isMatch4(item))) {
|
|
147643
147643
|
return false;
|
|
147644
147644
|
}
|
|
@@ -147649,23 +147649,23 @@ var require_micromatch2 = __commonJS({
|
|
|
147649
147649
|
if (typeof str !== "string") {
|
|
147650
147650
|
throw new TypeError(`Expected a string: "${util5.inspect(str)}"`);
|
|
147651
147651
|
}
|
|
147652
|
-
return [].concat(patterns).every((p3) =>
|
|
147652
|
+
return [].concat(patterns).every((p3) => picomatch13(p3, options)(str));
|
|
147653
147653
|
};
|
|
147654
147654
|
micromatch4.capture = (glob2, input, options) => {
|
|
147655
147655
|
let posix3 = utils.isWindows(options);
|
|
147656
|
-
let regex =
|
|
147656
|
+
let regex = picomatch13.makeRe(String(glob2), { ...options, capture: true });
|
|
147657
147657
|
let match2 = regex.exec(posix3 ? utils.toPosixSlashes(input) : input);
|
|
147658
147658
|
if (match2) {
|
|
147659
147659
|
return match2.slice(1).map((v) => v === void 0 ? "" : v);
|
|
147660
147660
|
}
|
|
147661
147661
|
};
|
|
147662
|
-
micromatch4.makeRe = (...args2) =>
|
|
147663
|
-
micromatch4.scan = (...args2) =>
|
|
147662
|
+
micromatch4.makeRe = (...args2) => picomatch13.makeRe(...args2);
|
|
147663
|
+
micromatch4.scan = (...args2) => picomatch13.scan(...args2);
|
|
147664
147664
|
micromatch4.parse = (patterns, options) => {
|
|
147665
147665
|
let res = [];
|
|
147666
147666
|
for (let pattern of [].concat(patterns || [])) {
|
|
147667
147667
|
for (let str of braces(String(pattern), options)) {
|
|
147668
|
-
res.push(
|
|
147668
|
+
res.push(picomatch13.parse(str, options));
|
|
147669
147669
|
}
|
|
147670
147670
|
}
|
|
147671
147671
|
return res;
|
|
@@ -155160,8 +155160,8 @@ var require_pattern = __commonJS({
|
|
|
155160
155160
|
}
|
|
155161
155161
|
exports2.endsWithSlashGlobStar = endsWithSlashGlobStar;
|
|
155162
155162
|
function isAffectDepthOfReadingPattern(pattern) {
|
|
155163
|
-
const
|
|
155164
|
-
return endsWithSlashGlobStar(pattern) || isStaticPattern(
|
|
155163
|
+
const basename14 = path9.basename(pattern);
|
|
155164
|
+
return endsWithSlashGlobStar(pattern) || isStaticPattern(basename14);
|
|
155165
155165
|
}
|
|
155166
155166
|
exports2.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern;
|
|
155167
155167
|
function expandPatternsWithBraceExpansion(patterns) {
|
|
@@ -199710,7 +199710,7 @@ var {
|
|
|
199710
199710
|
// dist/index.js
|
|
199711
199711
|
import { mkdir as mkdir7, mkdtemp as mkdtemp2, readFile as readFile38, rm as rm4, writeFile as writeFile17 } from "fs/promises";
|
|
199712
199712
|
import { tmpdir as tmpdir5 } from "os";
|
|
199713
|
-
import { dirname as
|
|
199713
|
+
import { dirname as dirname27, join as join35, resolve as resolve44 } from "path";
|
|
199714
199714
|
|
|
199715
199715
|
// ../../node_modules/.pnpm/remeda@2.14.0/node_modules/remeda/dist/chunk-ANXBDSUI.js
|
|
199716
199716
|
var s = { done: false, hasNext: false };
|
|
@@ -204464,6 +204464,9 @@ function extractPregeneratedSbomPatterns(supportedFiles) {
|
|
|
204464
204464
|
}
|
|
204465
204465
|
return patterns;
|
|
204466
204466
|
}
|
|
204467
|
+
async function getPregeneratedSbomMatcher() {
|
|
204468
|
+
return initializePregeneratedSbomMatcher();
|
|
204469
|
+
}
|
|
204467
204470
|
async function initializePregeneratedSbomMatcher() {
|
|
204468
204471
|
if (pregeneratedSbomMatcher) {
|
|
204469
204472
|
return pregeneratedSbomMatcher;
|
|
@@ -235059,7 +235062,7 @@ import { existsSync as existsSync30, writeFileSync as writeFileSync3 } from "fs"
|
|
|
235059
235062
|
import { mkdir as mkdir6, rm as rm3, writeFile as writeFile15 } from "fs/promises";
|
|
235060
235063
|
var import_lodash15 = __toESM(require_lodash(), 1);
|
|
235061
235064
|
import os2 from "os";
|
|
235062
|
-
var
|
|
235065
|
+
var import_picomatch12 = __toESM(require_picomatch2(), 1);
|
|
235063
235066
|
import { join as join34, relative as relative22, resolve as resolve42 } from "path";
|
|
235064
235067
|
|
|
235065
235068
|
// ../utils/src/dashboard-api/shared-api.ts
|
|
@@ -235807,13 +235810,116 @@ Alternatively, you can reduce the --memory-limit option to require fewer mapping
|
|
|
235807
235810
|
}
|
|
235808
235811
|
}
|
|
235809
235812
|
|
|
235813
|
+
// dist/internal/validate-lock-files.js
|
|
235814
|
+
var import_picomatch11 = __toESM(require_picomatch2(), 1);
|
|
235815
|
+
import { basename as basename11, dirname as dirname25 } from "path";
|
|
235816
|
+
var IGNORED_DIRS = [".git", "node_modules", "target", "build", ".gradle", "buildSrc"];
|
|
235817
|
+
var GRADLE_PROJECT_FILES = /* @__PURE__ */ new Set(["build.gradle", "build.gradle.kts"]);
|
|
235818
|
+
var SBT_PROJECT_FILE = "build.sbt";
|
|
235819
|
+
var SBT_LOCK_FILE = "build.sbt.lock";
|
|
235820
|
+
var GRADLE_LOCK_FILE = "gradle.lockfile";
|
|
235821
|
+
var GRADLE_CATALOG_FILE = "libs.versions.toml";
|
|
235822
|
+
var GRADLE_LEGACY_LOCKS_PATTERN = /(?:^|\/)gradle\/dependency-locks\/[^/]+\.lockfile$/;
|
|
235823
|
+
async function findGradleAndSbtProjectsMissingLockFiles(rootWorkingDirectory, options = {}) {
|
|
235824
|
+
const rawFiles = await getFilesRelative(rootWorkingDirectory, IGNORED_DIRS);
|
|
235825
|
+
const allFiles = options.excludeDirs?.length ? rawFiles.filter((file) => !import_picomatch11.default.isMatch(file, options.excludeDirs)) : rawFiles;
|
|
235826
|
+
let sbomMatcher;
|
|
235827
|
+
try {
|
|
235828
|
+
sbomMatcher = await getPregeneratedSbomMatcher();
|
|
235829
|
+
} catch (e) {
|
|
235830
|
+
const message2 = e instanceof Error ? e.message : String(e);
|
|
235831
|
+
logger.warn(`Could not fetch CDX/SPDX SBOM patterns from Socket API; proceeding without SBOM-based bypass for the lock-file check (${message2})`);
|
|
235832
|
+
sbomMatcher = () => false;
|
|
235833
|
+
}
|
|
235834
|
+
if (allFiles.some((file) => sbomMatcher(basename11(file)))) {
|
|
235835
|
+
return [];
|
|
235836
|
+
}
|
|
235837
|
+
const sbtProjectDirs = /* @__PURE__ */ new Set();
|
|
235838
|
+
const gradleProjectDirs = /* @__PURE__ */ new Set();
|
|
235839
|
+
const sbtLockDirs = /* @__PURE__ */ new Set();
|
|
235840
|
+
const gradleLockDirs = /* @__PURE__ */ new Set();
|
|
235841
|
+
const gradleCatalogDirs = /* @__PURE__ */ new Set();
|
|
235842
|
+
for (const file of allFiles) {
|
|
235843
|
+
const base = basename11(file);
|
|
235844
|
+
const dir = dirname25(file);
|
|
235845
|
+
if (base === SBT_PROJECT_FILE) {
|
|
235846
|
+
sbtProjectDirs.add(dir);
|
|
235847
|
+
} else if (GRADLE_PROJECT_FILES.has(base)) {
|
|
235848
|
+
gradleProjectDirs.add(dir);
|
|
235849
|
+
}
|
|
235850
|
+
if (base === SBT_LOCK_FILE) {
|
|
235851
|
+
sbtLockDirs.add(dir);
|
|
235852
|
+
} else if (base === GRADLE_LOCK_FILE) {
|
|
235853
|
+
gradleLockDirs.add(dir);
|
|
235854
|
+
} else if (GRADLE_LEGACY_LOCKS_PATTERN.test(file)) {
|
|
235855
|
+
const projectDir = dirname25(dirname25(dir)) || ".";
|
|
235856
|
+
gradleLockDirs.add(projectDir);
|
|
235857
|
+
} else if (base === GRADLE_CATALOG_FILE) {
|
|
235858
|
+
gradleCatalogDirs.add(dir);
|
|
235859
|
+
if (basename11(dir) === "gradle") {
|
|
235860
|
+
gradleCatalogDirs.add(dirname25(dir) || ".");
|
|
235861
|
+
}
|
|
235862
|
+
}
|
|
235863
|
+
}
|
|
235864
|
+
const missing = [];
|
|
235865
|
+
for (const projectDir of sbtProjectDirs) {
|
|
235866
|
+
if (!sbtLockDirs.has(projectDir)) {
|
|
235867
|
+
missing.push({
|
|
235868
|
+
packageManager: "SBT",
|
|
235869
|
+
projectDir,
|
|
235870
|
+
expectedFiles: [`${SBT_LOCK_FILE} in this directory`]
|
|
235871
|
+
});
|
|
235872
|
+
}
|
|
235873
|
+
}
|
|
235874
|
+
for (const projectDir of gradleProjectDirs) {
|
|
235875
|
+
if (gradleLockDirs.has(projectDir))
|
|
235876
|
+
continue;
|
|
235877
|
+
if (hasAncestorIn(projectDir, gradleCatalogDirs))
|
|
235878
|
+
continue;
|
|
235879
|
+
if (hasAncestorIn(projectDir, gradleLockDirs))
|
|
235880
|
+
continue;
|
|
235881
|
+
missing.push({
|
|
235882
|
+
packageManager: "GRADLE",
|
|
235883
|
+
projectDir,
|
|
235884
|
+
expectedFiles: [
|
|
235885
|
+
`${GRADLE_LOCK_FILE} in this directory`,
|
|
235886
|
+
`a ${GRADLE_CATALOG_FILE} catalog in this directory or any ancestor`
|
|
235887
|
+
]
|
|
235888
|
+
});
|
|
235889
|
+
}
|
|
235890
|
+
missing.sort((a4, b) => {
|
|
235891
|
+
if (a4.packageManager !== b.packageManager) {
|
|
235892
|
+
return a4.packageManager.localeCompare(b.packageManager);
|
|
235893
|
+
}
|
|
235894
|
+
return a4.projectDir.localeCompare(b.projectDir);
|
|
235895
|
+
});
|
|
235896
|
+
return missing;
|
|
235897
|
+
}
|
|
235898
|
+
function hasAncestorIn(dir, ancestorSet) {
|
|
235899
|
+
let current = dir;
|
|
235900
|
+
while (current !== void 0) {
|
|
235901
|
+
if (ancestorSet.has(current))
|
|
235902
|
+
return true;
|
|
235903
|
+
current = nextParent(current);
|
|
235904
|
+
}
|
|
235905
|
+
return false;
|
|
235906
|
+
}
|
|
235907
|
+
function nextParent(dir) {
|
|
235908
|
+
if (dir === "." || dir === "")
|
|
235909
|
+
return void 0;
|
|
235910
|
+
const parent2 = dirname25(dir);
|
|
235911
|
+
if (parent2 === dir)
|
|
235912
|
+
return void 0;
|
|
235913
|
+
return parent2 === "." ? "." : parent2;
|
|
235914
|
+
}
|
|
235915
|
+
|
|
235810
235916
|
// dist/internal/constants.js
|
|
235811
235917
|
var DEFAULT_REPORT_FILENAME_BASE = "coana-report";
|
|
235812
235918
|
|
|
235813
235919
|
// dist/internal/exclude-dirs-from-configuration-files.js
|
|
235814
235920
|
import { existsSync as existsSync27 } from "fs";
|
|
235815
235921
|
import { readFile as readFile35 } from "fs/promises";
|
|
235816
|
-
import { basename as
|
|
235922
|
+
import { basename as basename12, resolve as resolve41 } from "path";
|
|
235817
235923
|
var import_yaml2 = __toESM(require_dist11(), 1);
|
|
235818
235924
|
async function inferExcludeDirsFromConfigurationFiles(rootWorkingDir) {
|
|
235819
235925
|
const socketYmlConfigFile = resolve41(rootWorkingDir, "socket.yml");
|
|
@@ -235835,7 +235941,7 @@ async function inferExcludeDirsFromSocketConfig(socketConfigFile) {
|
|
|
235835
235941
|
const ignorePaths = ignorePathsNode.items.map((item) => String(item));
|
|
235836
235942
|
if (ignorePaths.some((ignorePath) => ignorePath.includes("!")))
|
|
235837
235943
|
return void 0;
|
|
235838
|
-
logger.info(`Inferring paths to exclude based on Socket config file: ${
|
|
235944
|
+
logger.info(`Inferring paths to exclude based on Socket config file: ${basename12(socketConfigFile)}`);
|
|
235839
235945
|
return ignorePaths;
|
|
235840
235946
|
} catch (e) {
|
|
235841
235947
|
return void 0;
|
|
@@ -236045,7 +236151,7 @@ function transformToVulnChainNode(dependencyTree) {
|
|
|
236045
236151
|
}
|
|
236046
236152
|
|
|
236047
236153
|
// dist/internal/socket-mode-helpers-socket-dependency-trees.js
|
|
236048
|
-
import { basename as
|
|
236154
|
+
import { basename as basename13, dirname as dirname26, join as join32, sep as sep5 } from "path";
|
|
236049
236155
|
var REQUIREMENTS_FILES_SEARCH_DEPTH2 = 3;
|
|
236050
236156
|
var venvExcludes = [
|
|
236051
236157
|
"venv",
|
|
@@ -236073,8 +236179,8 @@ var venvExcludes = [
|
|
|
236073
236179
|
function inferWorkspaceFromManifestPath(ecosystem, manifestPath, properPythonProjects) {
|
|
236074
236180
|
switch (ecosystem) {
|
|
236075
236181
|
case "NPM": {
|
|
236076
|
-
const base =
|
|
236077
|
-
const dir =
|
|
236182
|
+
const base = basename13(manifestPath);
|
|
236183
|
+
const dir = dirname26(manifestPath);
|
|
236078
236184
|
return base === "package.json" ? dir || "." : void 0;
|
|
236079
236185
|
}
|
|
236080
236186
|
case "MAVEN": {
|
|
@@ -236084,8 +236190,8 @@ function inferWorkspaceFromManifestPath(ecosystem, manifestPath, properPythonPro
|
|
|
236084
236190
|
if (venvExcludes.some((exclude) => manifestPath.startsWith(`${exclude}/`) || manifestPath.includes(`/${exclude}/`))) {
|
|
236085
236191
|
return void 0;
|
|
236086
236192
|
}
|
|
236087
|
-
const base =
|
|
236088
|
-
const dir =
|
|
236193
|
+
const base = basename13(manifestPath);
|
|
236194
|
+
const dir = dirname26(manifestPath);
|
|
236089
236195
|
const workspaceDir = dir === "" ? "." : dir;
|
|
236090
236196
|
if (properPythonProjects.includes(workspaceDir)) {
|
|
236091
236197
|
return workspaceDir;
|
|
@@ -236108,15 +236214,15 @@ function inferWorkspaceFromManifestPath(ecosystem, manifestPath, properPythonPro
|
|
|
236108
236214
|
return ".";
|
|
236109
236215
|
}
|
|
236110
236216
|
case "RUST": {
|
|
236111
|
-
return
|
|
236217
|
+
return dirname26(manifestPath) || ".";
|
|
236112
236218
|
}
|
|
236113
236219
|
case "GO": {
|
|
236114
|
-
const base =
|
|
236115
|
-
const dir =
|
|
236220
|
+
const base = basename13(manifestPath);
|
|
236221
|
+
const dir = dirname26(manifestPath);
|
|
236116
236222
|
return base === "go.mod" ? dir || "." : void 0;
|
|
236117
236223
|
}
|
|
236118
236224
|
case "RUBYGEMS": {
|
|
236119
|
-
return
|
|
236225
|
+
return dirname26(manifestPath) || ".";
|
|
236120
236226
|
}
|
|
236121
236227
|
default: {
|
|
236122
236228
|
return ".";
|
|
@@ -236126,9 +236232,9 @@ function inferWorkspaceFromManifestPath(ecosystem, manifestPath, properPythonPro
|
|
|
236126
236232
|
function inferProjectFromManifestPath(ecosystem, manifestPath) {
|
|
236127
236233
|
switch (ecosystem) {
|
|
236128
236234
|
case "NPM": {
|
|
236129
|
-
const filename =
|
|
236235
|
+
const filename = basename13(manifestPath);
|
|
236130
236236
|
if (["package-lock.json", "pnpm-lock.yaml", "pnpm-lock.yml", "yarn.lock"].includes(filename)) {
|
|
236131
|
-
return
|
|
236237
|
+
return dirname26(manifestPath) || ".";
|
|
236132
236238
|
}
|
|
236133
236239
|
return void 0;
|
|
236134
236240
|
}
|
|
@@ -236168,8 +236274,8 @@ async function fetchArtifactsFromSocket(rootWorkingDirectory, manifestsTarHash,
|
|
|
236168
236274
|
}
|
|
236169
236275
|
const allFiles = await getFilesRelative(rootWorkingDirectory, venvExcludes);
|
|
236170
236276
|
for (const file of allFiles) {
|
|
236171
|
-
const base =
|
|
236172
|
-
const workspaceDir =
|
|
236277
|
+
const base = basename13(file);
|
|
236278
|
+
const workspaceDir = dirname26(file) || ".";
|
|
236173
236279
|
if (base === "pyproject.toml" || base === "setup.py" && await isSetupPySetuptools(join32(rootWorkingDirectory, file))) {
|
|
236174
236280
|
if (!properPythonProjects.includes(workspaceDir)) {
|
|
236175
236281
|
properPythonProjects.push(workspaceDir);
|
|
@@ -251917,7 +252023,7 @@ async function onlineScan(dependencyTree, apiKey, timeout) {
|
|
|
251917
252023
|
}
|
|
251918
252024
|
|
|
251919
252025
|
// dist/version.js
|
|
251920
|
-
var version3 = "15.0.
|
|
252026
|
+
var version3 = "15.0.9";
|
|
251921
252027
|
|
|
251922
252028
|
// dist/cli-core.js
|
|
251923
252029
|
var { mapValues, omit, partition, pickBy: pickBy2 } = import_lodash15.default;
|
|
@@ -252158,6 +252264,17 @@ var CliCore = class {
|
|
|
252158
252264
|
const ecosystems = this.filterEcosystemsByPurlTypes(getEcosystemsFromManifestFileNames(manifestFiles));
|
|
252159
252265
|
await validateExternalDependencies(ecosystems, "run", manifestFiles);
|
|
252160
252266
|
}
|
|
252267
|
+
if (!this.options.reachContinueOnMissingLockFiles && !this.options.useOnlyPregeneratedSboms) {
|
|
252268
|
+
const missingLockFiles = await findGradleAndSbtProjectsMissingLockFiles(this.rootWorkingDirectory, {
|
|
252269
|
+
excludeDirs: this.options.excludeDirs
|
|
252270
|
+
});
|
|
252271
|
+
if (missingLockFiles.length > 0) {
|
|
252272
|
+
this.logMissingLockFilesError(missingLockFiles);
|
|
252273
|
+
throw new AnalysisHaltError([
|
|
252274
|
+
`${missingLockFiles.length} Gradle/SBT ${missingLockFiles.length === 1 ? "project is" : "projects are"} missing a lock file, version catalog, or pre-generated SBOM. See the log above for the list and remediation commands.`
|
|
252275
|
+
]);
|
|
252276
|
+
}
|
|
252277
|
+
}
|
|
252161
252278
|
this.sendProgress("SCAN_FOR_VULNERABILITIES", true, ".", ".");
|
|
252162
252279
|
const { artifacts, ecosystemToWorkspaceToAnalysisData, ecosystemToWorkspaceToVulnerabilities } = await fetchArtifactsFromSocket(this.rootWorkingDirectory, this.options.manifestsTarHash, "reachability", this.options.useUnreachableFromPrecomputation, this.options.useOnlyPregeneratedSboms);
|
|
252163
252280
|
this.sendProgress("SCAN_FOR_VULNERABILITIES", false, ".", ".");
|
|
@@ -252203,7 +252320,7 @@ var CliCore = class {
|
|
|
252203
252320
|
continue;
|
|
252204
252321
|
if (includeDirs.length > 0) {
|
|
252205
252322
|
const relPath = relative22(this.rootWorkingDirectory, resolvedWorkspace);
|
|
252206
|
-
if (!
|
|
252323
|
+
if (!import_picomatch12.default.isMatch(relPath, includeDirs))
|
|
252207
252324
|
continue;
|
|
252208
252325
|
}
|
|
252209
252326
|
filteredWorkspaces[workspace] = analysisData;
|
|
@@ -252894,6 +253011,27 @@ Subproject: ${subproject}`);
|
|
|
252894
253011
|
];
|
|
252895
253012
|
logger.error(displayLines.join("\n"));
|
|
252896
253013
|
}
|
|
253014
|
+
logMissingLockFilesError(missing) {
|
|
253015
|
+
const entries = missing.slice(0, 20).map((m4) => ` - ${m4.projectDir} (${m4.packageManager}): expected ${m4.expectedFiles.join(" or ")}`);
|
|
253016
|
+
if (missing.length > 20) {
|
|
253017
|
+
entries.push(` ... and ${missing.length - 20} more`);
|
|
253018
|
+
}
|
|
253019
|
+
const displayLines = [
|
|
253020
|
+
"",
|
|
253021
|
+
kleur_default.red().bold("Missing Lock Files"),
|
|
253022
|
+
"Gradle and SBT reachability analysis requires a lock file, a Gradle version catalog, or a pre-generated CDX/SPDX SBOM. The following projects are missing all of these:",
|
|
253023
|
+
...entries,
|
|
253024
|
+
"",
|
|
253025
|
+
"To fix this, commit one of:",
|
|
253026
|
+
" Gradle: `./gradlew dependencies --write-locks`, or a `libs.versions.toml` catalog.",
|
|
253027
|
+
" SBT: `sbt dependencyLockWrite` (requires the sbt-dependency-lock plugin).",
|
|
253028
|
+
" Or (any): a CycloneDX / SPDX SBOM produced by your preferred generator (e.g. `cyclonedx-gradle-plugin`, `sbt-sbom`, `syft`, or `cdxgen`) \u2014 any Socket-recognized CDX/SPDX file anywhere under the project root covers every Gradle/SBT subproject at once.",
|
|
253029
|
+
"",
|
|
253030
|
+
"Alternatively, use --reach-continue-on-missing-lock-files to bypass this check.",
|
|
253031
|
+
""
|
|
253032
|
+
];
|
|
253033
|
+
logger.error(displayLines.join("\n"));
|
|
253034
|
+
}
|
|
252897
253035
|
logAnalysisErrors(totalErrorCount, uniqueErrors) {
|
|
252898
253036
|
const errorLines = uniqueErrors.slice(0, 20).map((msg) => ` - ${msg}`);
|
|
252899
253037
|
if (uniqueErrors.length > 20) {
|
|
@@ -253242,7 +253380,7 @@ async function writeAnalysisDebugInfo(outputFilePath, ecosystemToWorkspaceToVuln
|
|
|
253242
253380
|
handleNexeBinaryMode();
|
|
253243
253381
|
var program2 = new Command();
|
|
253244
253382
|
var run2 = new Command();
|
|
253245
|
-
run2.name("run").argument("<path>", "File system path to folder containing the project").option("-o, --output-dir <path>", "Write json report to <path>/coana-report.json").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).option("--silent-spinner", "Silence spinner", "CI" in process.env || !process.stdin.isTTY).option("-p, --print-report", "Print the report to the console", false).option("--offline-database <path>", "Path to a coana-offline-db.json file for running the CLI without internet connectivity", void 0).option("-t, --timeout <timeout>", "Set API <timeout> in milliseconds to Coana backend.", "300000").option("-a, --analysis-timeout <timeout>", "Set <timeout> in seconds for each reachability analysis run").option("--memory-limit <memoryInMB>", "Set memory limit for analysis to <memoryInMB> megabytes of memory.", "8192").option("-c, --concurrency <concurrency>", "Set the maximum number of concurrent reachability analysis runs. It's recommended to choose a concurrency level that ensures that each analysis run has at least the --memory-limit amount of memory available. NPM reachability analysis does not support concurrent execution, so the concurrency level is ignored for NPM.", "1").option("--api-key <key>", "Set the Coana dashboard API key. By setting you also enable the dashboard integration.").addOption(new Option("--write-report-to-file", "Write the report dashboard-compatible report to dashboard-report.json. This report may help the Coana team debug issues with the report insertion mechanism.").default(false).hideHelp()).option("--project-name <repoName>", "Set the name of the repository. Used for dashboard integration.").option("--repo-url <repoUrl>", "Set the URL of the repository. Used for dashboard integration.").option("--include-dirs <relativeDirs...>", "globs for directories to include from the detection of subprojects (space-separated)(use relative paths from the project root). Notice, projects that are not included may still be scanned if they are referenced from included projects.").option("--exclude-dirs <relativeDirs...>", "globs for directories to exclude from the detection of subprojects (space-separated)(use relative paths from the project root). Notice, excluded projects may still be scanned if they are referenced from non-excluded projects.").option("--disable-analysis-splitting", "Limits Coana to at most 1 reachability analysis run per workspace").option("--print-analysis-log-file", "Store log output from the JavaScript/TypeScript reachability analysis in the file js-analysis.log file in the root of each workspace", false).option("--entry-points <entryPoints...>", "List of files to analyze for root workspace. The reachability analysis automatically analyzes all files used by the entry points. If not provided, all JavaScript and TypeScript files are considered entry points. For non-root workspaces, all JavaScript and TypeScript files are analyzed as well.").option("--include-projects-with-no-reachability-support", "Also runs Coana on projects where we support traditional SCA, but does not yet support reachability analysis.", false).option("--ecosystems <ecosystems...>", "List of ecosystems to analyze (space-separated). Currently NPM, PIP, MAVEN, NUGET and GO are supported. Default is all supported ecosystems.").addOption(new Option("--purl-types <purlTypes...>", "List of PURL types to analyze (space-separated). Currently npm, pypi, maven, nuget, golang and cargo are supported. Default is all supported purl types.").hideHelp()).option("--changed-files <files...>", "List of files that have changed. If provided, Coana only analyzes workspaces and modules that contain changed files.").option("--disable-report-submission", "Disable the submission of the report to the Coana dashboard. Used by the pipeline blocking feature.", false).option("--disable-analytics-sharing", "Disable analytics sharing.", false).option("--provider-project <path>", "File system path to folder containing the provider project (Only supported for Maven, Gradle, and SBT)").option("--provider-workspaces <dirs...>", "List of workspaces that build the provided runtime environment (Only supported for Maven, Gradle, and SBT)", (paths) => paths.split(" ")).option("--lightweight-reachability", "Runs Coana in lightweight mode. This increases analysis speed but also raises the risk of Coana misclassifying the reachability of certain complex vulnerabilities. Recommended only for use with Coana Guardrail mode.", false).addOption(new Option("--run-without-docker", "Run package managers and reachability analyzers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--run-env <env>", "Specifies the environment in which the CLI is run. So far only MANAGED_SCAN and UNKNOWN are supported.").default("UNKNOWN").choices(["UNKNOWN", "MANAGED_SCAN"]).hideHelp()).addOption(new Option("--guardrail-mode", "Run Coana in guardrail mode. This mode is used to prevent new reachable vulnerabilities from being introduced into the codebase. Usually run as a CI check when pushing new commits to a pull request.")).option("--ignore-failing-workspaces", "Continue processing when a workspace fails instead of exiting. Failed workspaces will be logged at termination.", false).option("--reach-continue-on-install-errors", "Continue analysis when package installation fails, falling back to precomputed (Tier 2) reachability results. By default, the CLI halts on installation errors in socket mode.", process.env.COANA_CONTINUE_ON_INSTALL_ERRORS === "true").option("--reach-continue-on-analysis-errors", "Continue analysis when errors occur (timeouts, OOM, parse errors, etc.), falling back to precomputed (Tier 2) reachability results. By default, the CLI halts on analysis errors in socket mode.", false).option("--reach-continue-on-no-source-files", "Continue analysis when a workspace contains no source files for its ecosystem. By default, the CLI halts in socket mode.", false).addOption(new Option("--socket-mode <output-file>", "Run Coana in socket mode and write report to <output-file>").hideHelp()).addOption(new Option("--manifests-tar-hash <hash>", "Hash of the tarball containing all manifest files already uploaded to Socket. If provided, Socket will be used for computing dependency trees.").hideHelp()).option("--skip-cache-usage", "Do not attempt to use cached analysis configuration from previous runs", false).addOption(new Option("--lazy-mode", "Enable lazy analysis mode for JavaScript/TypeScript. This can significantly speed up analysis by only analyzing code that is actually relevant for the vulnerabilities being analyzed.").default(false).hideHelp()).addOption(new Option("--min-severity <severity>", "Set the minimum severity of vulnerabilities to analyze. Supported severities are info, low, moderate, high and critical.").choices(["info", "INFO", "low", "LOW", "moderate", "MODERATE", "high", "HIGH", "critical", "CRITICAL"])).option("--use-unreachable-from-precomputation", "Skip the reachability analysis for vulnerabilities that are already known to be unreachable from the precomputed reachability analysis (Tier 2).", false).addOption(new Option("--use-only-pregenerated-sboms", "Only include artifacts that have CDX or SPDX files in their manifest files.").default(false).hideHelp()).option("--disable-external-tool-checks", "Disable validation of external tools (npm, python, go, etc.) before running analysis.", false).version(version3).configureHelp({ sortOptions: true }).action(async (path9, options) => {
|
|
253383
|
+
run2.name("run").argument("<path>", "File system path to folder containing the project").option("-o, --output-dir <path>", "Write json report to <path>/coana-report.json").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).option("--silent-spinner", "Silence spinner", "CI" in process.env || !process.stdin.isTTY).option("-p, --print-report", "Print the report to the console", false).option("--offline-database <path>", "Path to a coana-offline-db.json file for running the CLI without internet connectivity", void 0).option("-t, --timeout <timeout>", "Set API <timeout> in milliseconds to Coana backend.", "300000").option("-a, --analysis-timeout <timeout>", "Set <timeout> in seconds for each reachability analysis run").option("--memory-limit <memoryInMB>", "Set memory limit for analysis to <memoryInMB> megabytes of memory.", "8192").option("-c, --concurrency <concurrency>", "Set the maximum number of concurrent reachability analysis runs. It's recommended to choose a concurrency level that ensures that each analysis run has at least the --memory-limit amount of memory available. NPM reachability analysis does not support concurrent execution, so the concurrency level is ignored for NPM.", "1").option("--api-key <key>", "Set the Coana dashboard API key. By setting you also enable the dashboard integration.").addOption(new Option("--write-report-to-file", "Write the report dashboard-compatible report to dashboard-report.json. This report may help the Coana team debug issues with the report insertion mechanism.").default(false).hideHelp()).option("--project-name <repoName>", "Set the name of the repository. Used for dashboard integration.").option("--repo-url <repoUrl>", "Set the URL of the repository. Used for dashboard integration.").option("--include-dirs <relativeDirs...>", "globs for directories to include from the detection of subprojects (space-separated)(use relative paths from the project root). Notice, projects that are not included may still be scanned if they are referenced from included projects.").option("--exclude-dirs <relativeDirs...>", "globs for directories to exclude from the detection of subprojects (space-separated)(use relative paths from the project root). Notice, excluded projects may still be scanned if they are referenced from non-excluded projects.").option("--disable-analysis-splitting", "Limits Coana to at most 1 reachability analysis run per workspace").option("--print-analysis-log-file", "Store log output from the JavaScript/TypeScript reachability analysis in the file js-analysis.log file in the root of each workspace", false).option("--entry-points <entryPoints...>", "List of files to analyze for root workspace. The reachability analysis automatically analyzes all files used by the entry points. If not provided, all JavaScript and TypeScript files are considered entry points. For non-root workspaces, all JavaScript and TypeScript files are analyzed as well.").option("--include-projects-with-no-reachability-support", "Also runs Coana on projects where we support traditional SCA, but does not yet support reachability analysis.", false).option("--ecosystems <ecosystems...>", "List of ecosystems to analyze (space-separated). Currently NPM, PIP, MAVEN, NUGET and GO are supported. Default is all supported ecosystems.").addOption(new Option("--purl-types <purlTypes...>", "List of PURL types to analyze (space-separated). Currently npm, pypi, maven, nuget, golang and cargo are supported. Default is all supported purl types.").hideHelp()).option("--changed-files <files...>", "List of files that have changed. If provided, Coana only analyzes workspaces and modules that contain changed files.").option("--disable-report-submission", "Disable the submission of the report to the Coana dashboard. Used by the pipeline blocking feature.", false).option("--disable-analytics-sharing", "Disable analytics sharing.", false).option("--provider-project <path>", "File system path to folder containing the provider project (Only supported for Maven, Gradle, and SBT)").option("--provider-workspaces <dirs...>", "List of workspaces that build the provided runtime environment (Only supported for Maven, Gradle, and SBT)", (paths) => paths.split(" ")).option("--lightweight-reachability", "Runs Coana in lightweight mode. This increases analysis speed but also raises the risk of Coana misclassifying the reachability of certain complex vulnerabilities. Recommended only for use with Coana Guardrail mode.", false).addOption(new Option("--run-without-docker", "Run package managers and reachability analyzers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--run-env <env>", "Specifies the environment in which the CLI is run. So far only MANAGED_SCAN and UNKNOWN are supported.").default("UNKNOWN").choices(["UNKNOWN", "MANAGED_SCAN"]).hideHelp()).addOption(new Option("--guardrail-mode", "Run Coana in guardrail mode. This mode is used to prevent new reachable vulnerabilities from being introduced into the codebase. Usually run as a CI check when pushing new commits to a pull request.")).option("--ignore-failing-workspaces", "Continue processing when a workspace fails instead of exiting. Failed workspaces will be logged at termination.", false).option("--reach-continue-on-install-errors", "Continue analysis when package installation fails, falling back to precomputed (Tier 2) reachability results. By default, the CLI halts on installation errors in socket mode.", process.env.COANA_CONTINUE_ON_INSTALL_ERRORS === "true").option("--reach-continue-on-analysis-errors", "Continue analysis when errors occur (timeouts, OOM, parse errors, etc.), falling back to precomputed (Tier 2) reachability results. By default, the CLI halts on analysis errors in socket mode.", false).option("--reach-continue-on-no-source-files", "Continue analysis when a workspace contains no source files for its ecosystem. By default, the CLI halts in socket mode.", false).option("--reach-continue-on-missing-lock-files", "Continue analysis when a Gradle or SBT project is missing its lock file (or Gradle version catalog / pre-generated SBOM). By default, the CLI halts in socket mode.", process.env.COANA_REACH_CONTINUE_ON_MISSING_LOCK_FILES === "true").addOption(new Option("--socket-mode <output-file>", "Run Coana in socket mode and write report to <output-file>").hideHelp()).addOption(new Option("--manifests-tar-hash <hash>", "Hash of the tarball containing all manifest files already uploaded to Socket. If provided, Socket will be used for computing dependency trees.").hideHelp()).option("--skip-cache-usage", "Do not attempt to use cached analysis configuration from previous runs", false).addOption(new Option("--lazy-mode", "Enable lazy analysis mode for JavaScript/TypeScript. This can significantly speed up analysis by only analyzing code that is actually relevant for the vulnerabilities being analyzed.").default(false).hideHelp()).addOption(new Option("--min-severity <severity>", "Set the minimum severity of vulnerabilities to analyze. Supported severities are info, low, moderate, high and critical.").choices(["info", "INFO", "low", "LOW", "moderate", "MODERATE", "high", "HIGH", "critical", "CRITICAL"])).option("--use-unreachable-from-precomputation", "Skip the reachability analysis for vulnerabilities that are already known to be unreachable from the precomputed reachability analysis (Tier 2).", false).addOption(new Option("--use-only-pregenerated-sboms", "Only include artifacts that have CDX or SPDX files in their manifest files.").default(false).hideHelp()).option("--disable-external-tool-checks", "Disable validation of external tools (npm, python, go, etc.) before running analysis.", false).version(version3).configureHelp({ sortOptions: true }).action(async (path9, options) => {
|
|
253246
253384
|
process.env.DOCKER_IMAGE_TAG ??= version3;
|
|
253247
253385
|
options.ecosystems = options.ecosystems?.map((e) => e.toUpperCase());
|
|
253248
253386
|
options.minSeverity = options.minSeverity?.toUpperCase();
|
|
@@ -253293,7 +253431,7 @@ computeFixesAndUpgradePurlsCmd.name("compute-fixes-and-upgrade-purls").argument(
|
|
|
253293
253431
|
const output = await computeFixesAndUpgradePurls(path9, optionsToUse, logFile);
|
|
253294
253432
|
if (options.outputFile) {
|
|
253295
253433
|
const outputFile = resolve44(options.outputFile);
|
|
253296
|
-
await mkdir7(
|
|
253434
|
+
await mkdir7(dirname27(outputFile), { recursive: true });
|
|
253297
253435
|
await writeFile17(outputFile, JSON.stringify(output, null, 2));
|
|
253298
253436
|
logger.info(`Result written to: ${outputFile}`);
|
|
253299
253437
|
}
|
package/package.json
CHANGED
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|