trawly 0.0.2 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,13 +1,446 @@
1
1
  // src/scanner.ts
2
- import { existsSync as existsSync2, statSync } from "fs";
3
- import { basename, resolve as resolve4, join as join2 } from "path";
2
+ import { existsSync as existsSync4, statSync as statSync2 } from "fs";
3
+ import { dirname as dirname3, resolve as resolve7, join as join4 } from "path";
4
+
5
+ // src/baseline.ts
6
+ import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
7
+ import { dirname, resolve } from "path";
8
+ var BaselineError = class extends Error {
9
+ constructor(message) {
10
+ super(message);
11
+ this.name = "BaselineError";
12
+ }
13
+ };
14
+ function applyBaseline(findings, cwd, baselinePath) {
15
+ if (!baselinePath) return void 0;
16
+ const absolute = resolve(cwd, baselinePath);
17
+ const loaded = readBaseline(absolute);
18
+ const fingerprints = new Set(loaded.findings);
19
+ let existing = 0;
20
+ let fresh = 0;
21
+ const marked = findings.map((finding) => {
22
+ if (fingerprints.has(finding.fingerprint)) {
23
+ existing++;
24
+ return { ...finding, baseline: "existing" };
25
+ }
26
+ fresh++;
27
+ return { ...finding, baseline: "new" };
28
+ });
29
+ return {
30
+ result: {
31
+ path: absolute,
32
+ loaded: true,
33
+ total: findings.length,
34
+ existing,
35
+ new: fresh
36
+ },
37
+ findings: marked
38
+ };
39
+ }
40
+ function writeBaseline(findings, cwd, baselinePath, existing) {
41
+ const absolute = resolve(cwd, baselinePath);
42
+ const unique = [...new Set(findings.map((f) => f.fingerprint))].sort();
43
+ const payload = {
44
+ version: 1,
45
+ generatedAt: (/* @__PURE__ */ new Date()).toISOString(),
46
+ findings: unique
47
+ };
48
+ mkdirSync(dirname(absolute), { recursive: true });
49
+ writeFileSync(absolute, `${JSON.stringify(payload, null, 2)}
50
+ `);
51
+ return {
52
+ path: existing?.path,
53
+ loaded: existing?.loaded ?? false,
54
+ written: absolute,
55
+ total: findings.length,
56
+ existing: existing?.existing ?? 0,
57
+ new: existing?.new ?? findings.length
58
+ };
59
+ }
60
+ function readBaseline(path) {
61
+ if (!existsSync(path)) {
62
+ throw new BaselineError(`Baseline file does not exist: ${path}`);
63
+ }
64
+ let parsed;
65
+ try {
66
+ parsed = JSON.parse(readFileSync(path, "utf8"));
67
+ } catch (err) {
68
+ throw new BaselineError(
69
+ `Failed to parse baseline ${path}: ${err.message}`
70
+ );
71
+ }
72
+ if (!isRecord(parsed) || parsed.version !== 1) {
73
+ throw new BaselineError(`${path}: unsupported baseline format.`);
74
+ }
75
+ if (!Array.isArray(parsed.findings)) {
76
+ throw new BaselineError(`${path}: findings must be an array.`);
77
+ }
78
+ const findings = parsed.findings.filter((v) => typeof v === "string");
79
+ return {
80
+ version: 1,
81
+ generatedAt: typeof parsed.generatedAt === "string" ? parsed.generatedAt : "",
82
+ findings
83
+ };
84
+ }
85
+ function isRecord(value) {
86
+ return typeof value === "object" && value !== null && !Array.isArray(value);
87
+ }
88
+
89
+ // src/config.ts
90
+ import { existsSync as existsSync2, readFileSync as readFileSync2 } from "fs";
91
+ import { resolve as resolve2, join } from "path";
92
+ import { parse as parseToml } from "smol-toml";
93
+ var CONFIG_NAME = "trawly.toml";
94
+ var FAIL_ON_VALUES = /* @__PURE__ */ new Set([
95
+ "critical",
96
+ "high",
97
+ "moderate",
98
+ "low",
99
+ "none"
100
+ ]);
101
+ var POLICY_VALUES = /* @__PURE__ */ new Set([
102
+ "ci",
103
+ "strict",
104
+ "library",
105
+ "app"
106
+ ]);
107
+ var ConfigError = class extends Error {
108
+ constructor(message) {
109
+ super(message);
110
+ this.name = "ConfigError";
111
+ }
112
+ };
113
+ function loadConfig(cwd, explicitPath) {
114
+ const configPath = explicitPath ? resolve2(cwd, explicitPath) : findConfig(cwd);
115
+ if (!configPath) return { config: { ignore: [] } };
116
+ if (!existsSync2(configPath)) {
117
+ throw new ConfigError(`Config file does not exist: ${configPath}`);
118
+ }
119
+ let raw;
120
+ try {
121
+ raw = parseToml(readFileSync2(configPath, "utf8"));
122
+ } catch (err) {
123
+ throw new ConfigError(
124
+ `Failed to parse ${configPath}: ${err.message}`
125
+ );
126
+ }
127
+ return { path: configPath, config: normalizeConfig(raw, configPath) };
128
+ }
129
+ function findConfig(cwd) {
130
+ const candidate = join(cwd, CONFIG_NAME);
131
+ return existsSync2(candidate) ? candidate : void 0;
132
+ }
133
+ function normalizeConfig(raw, path) {
134
+ if (!isRecord2(raw)) throw new ConfigError(`${path} must be a TOML table.`);
135
+ const failOn = optionalString(raw.failOn, "failOn", path);
136
+ if (failOn !== void 0 && !FAIL_ON_VALUES.has(failOn)) {
137
+ throw new ConfigError(
138
+ `${path}: failOn must be one of ${[...FAIL_ON_VALUES].join(", ")}.`
139
+ );
140
+ }
141
+ const policy = optionalString(raw.policy, "policy", path);
142
+ if (policy !== void 0 && !POLICY_VALUES.has(policy)) {
143
+ throw new ConfigError(
144
+ `${path}: policy must be one of ${[...POLICY_VALUES].join(", ")}.`
145
+ );
146
+ }
147
+ const risk = optionalBoolean(raw.risk, "risk", path);
148
+ const env = optionalBoolean(raw.env, "env", path);
149
+ const allowedRegistries = normalizeStringArray(
150
+ raw.allowedRegistries,
151
+ "allowedRegistries",
152
+ path
153
+ );
154
+ if (raw.ignore !== void 0 && raw.IgnoredVulns !== void 0) {
155
+ console.warn(
156
+ `${path}: both "ignore" and legacy "IgnoredVulns" are defined; using "ignore".`
157
+ );
158
+ }
159
+ const ignore = normalizeIgnore(raw.ignore ?? raw.IgnoredVulns ?? [], path);
160
+ return {
161
+ failOn,
162
+ policy,
163
+ risk,
164
+ env,
165
+ allowedRegistries,
166
+ ignore
167
+ };
168
+ }
169
+ function normalizeIgnore(raw, path) {
170
+ if (raw === void 0) return [];
171
+ if (!Array.isArray(raw)) {
172
+ throw new ConfigError(`${path}: ignore must be an array of tables.`);
173
+ }
174
+ return raw.map((item, idx) => {
175
+ if (!isRecord2(item)) {
176
+ throw new ConfigError(`${path}: ignore[${idx}] must be a table.`);
177
+ }
178
+ const id = requiredString(item.id, `ignore[${idx}].id`, path);
179
+ const expires = requiredDateString(
180
+ item.expires,
181
+ `ignore[${idx}].expires`,
182
+ path
183
+ );
184
+ const reason = requiredString(item.reason, `ignore[${idx}].reason`, path);
185
+ return {
186
+ id,
187
+ expires,
188
+ reason,
189
+ package: optionalString(item.package, `ignore[${idx}].package`, path),
190
+ ecosystem: optionalString(item.ecosystem, `ignore[${idx}].ecosystem`, path),
191
+ version: optionalString(item.version, `ignore[${idx}].version`, path)
192
+ };
193
+ });
194
+ }
195
+ function normalizeStringArray(raw, field, path) {
196
+ if (raw === void 0) return void 0;
197
+ if (!Array.isArray(raw) || raw.some((v) => typeof v !== "string")) {
198
+ throw new ConfigError(`${path}: ${field} must be an array of strings.`);
199
+ }
200
+ return raw;
201
+ }
202
+ function requiredDateString(raw, field, path) {
203
+ const value = requiredString(raw, field, path);
204
+ if (!isIsoDate(value)) {
205
+ throw new ConfigError(`${path}: ${field} must be YYYY-MM-DD.`);
206
+ }
207
+ return value;
208
+ }
209
+ function requiredString(raw, field, path) {
210
+ if (typeof raw !== "string" || raw.trim() === "") {
211
+ throw new ConfigError(`${path}: ${field} is required.`);
212
+ }
213
+ return raw;
214
+ }
215
+ function optionalString(raw, field, path) {
216
+ if (raw === void 0) return void 0;
217
+ if (typeof raw !== "string") {
218
+ throw new ConfigError(`${path}: ${field} must be a string.`);
219
+ }
220
+ return raw;
221
+ }
222
+ function optionalBoolean(raw, field, path) {
223
+ if (raw === void 0) return void 0;
224
+ if (typeof raw !== "boolean") {
225
+ throw new ConfigError(`${path}: ${field} must be true or false.`);
226
+ }
227
+ return raw;
228
+ }
229
+ function isIsoDate(s) {
230
+ if (!/^\d{4}-\d{2}-\d{2}$/.test(s)) return false;
231
+ const date = /* @__PURE__ */ new Date(`${s}T00:00:00.000Z`);
232
+ return !Number.isNaN(date.getTime()) && date.toISOString().startsWith(s);
233
+ }
234
+ function isRecord2(value) {
235
+ return typeof value === "object" && value !== null && !Array.isArray(value);
236
+ }
237
+
238
+ // src/env.ts
239
+ import {
240
+ lstatSync,
241
+ readdirSync,
242
+ readFileSync as readFileSync3,
243
+ statSync
244
+ } from "fs";
245
+ import { join as join2, relative } from "path";
246
+
247
+ // src/fingerprint.ts
248
+ import { createHash } from "crypto";
249
+ function fingerprintFinding(input) {
250
+ return stableHash([
251
+ input.source,
252
+ input.type,
253
+ input.id,
254
+ input.ecosystem,
255
+ input.packageName,
256
+ input.installedVersion
257
+ ]);
258
+ }
259
+ function packageKey(pkg) {
260
+ return pkg.purl ?? `${pkg.ecosystem}:${pkg.name}@${pkg.version}`;
261
+ }
262
+ function stableHash(parts) {
263
+ return createHash("sha256").update(parts.join("\0")).digest("hex");
264
+ }
265
+
266
+ // src/env.ts
267
+ var MAX_ENV_FILE_BYTES = 1024 * 1024;
268
+ var SKIP_DIRS = /* @__PURE__ */ new Set([
269
+ ".git",
270
+ ".hg",
271
+ ".svn",
272
+ "coverage",
273
+ "dist",
274
+ "node_modules",
275
+ "vendor"
276
+ ]);
277
+ var SAFE_ENV_SUFFIXES = /* @__PURE__ */ new Set([
278
+ "default",
279
+ "defaults",
280
+ "dist",
281
+ "example",
282
+ "sample",
283
+ "template"
284
+ ]);
285
+ var SECRET_KEY_RE = /(?:^|_)(?:SECRET|TOKEN|PASSWORD|PASS|PWD|PRIVATE_KEY|API_KEY|ACCESS_KEY|AUTH|CREDENTIAL|DATABASE_URL|DB_URL|REDIS_URL|MONGO_URI|CONNECTION_STRING|WEBHOOK|CLIENT_SECRET)(?:$|_)/i;
286
+ var PRIVATE_KEY_RE = /PRIVATE_KEY|BEGIN_[A-Z0-9_]+_PRIVATE_KEY/i;
287
+ var PLACEHOLDER_RE = /^(?:|changeme|change_me|change-me|example|example-value|placeholder|replace_me|replace-me|todo|test|dummy|your_.+|<.+>|\$\{.+\}|x+)$/i;
288
+ function scanEnvFiles(cwd) {
289
+ const warnings = [];
290
+ const findings = [];
291
+ let filesScanned = 0;
292
+ for (const file of findEnvFiles(cwd)) {
293
+ let raw;
294
+ try {
295
+ const stat = statSync(file);
296
+ if (stat.size > MAX_ENV_FILE_BYTES) {
297
+ warnings.push(
298
+ `Skipped env file ${relative(cwd, file)} because it is larger than 1 MiB.`
299
+ );
300
+ continue;
301
+ }
302
+ raw = readFileSync3(file, "utf8");
303
+ } catch (err) {
304
+ warnings.push(
305
+ `Could not read env file ${relative(cwd, file)}: ${err.message}`
306
+ );
307
+ continue;
308
+ }
309
+ filesScanned++;
310
+ const rel = normalizePath(relative(cwd, file));
311
+ findings.push(envFileFinding(file, rel));
312
+ for (const assignment of parseEnvAssignments(raw)) {
313
+ if (!isSensitiveAssignment(assignment)) continue;
314
+ findings.push(envSecretFinding(file, rel, assignment));
315
+ }
316
+ }
317
+ return { findings, warnings, filesScanned };
318
+ }
319
+ function findEnvFiles(root) {
320
+ const out = [];
321
+ const stack = [root];
322
+ while (stack.length > 0) {
323
+ const dir = stack.pop();
324
+ let entries;
325
+ try {
326
+ entries = readdirSync(dir);
327
+ } catch {
328
+ continue;
329
+ }
330
+ for (const entry of entries) {
331
+ const path = join2(dir, entry);
332
+ let stat;
333
+ try {
334
+ stat = lstatSync(path);
335
+ } catch {
336
+ continue;
337
+ }
338
+ if (stat.isSymbolicLink()) continue;
339
+ if (stat.isDirectory()) {
340
+ if (!SKIP_DIRS.has(entry)) stack.push(path);
341
+ continue;
342
+ }
343
+ if (stat.isFile() && isEnvFile(entry)) out.push(path);
344
+ }
345
+ }
346
+ return out.sort();
347
+ }
348
+ function isEnvFile(name) {
349
+ if (name === ".env") return true;
350
+ if (!name.startsWith(".env.")) return false;
351
+ const suffixes = name.slice(".env.".length).toLowerCase().split(".").filter(Boolean);
352
+ return !suffixes.some((suffix) => SAFE_ENV_SUFFIXES.has(suffix));
353
+ }
354
+ function parseEnvAssignments(raw) {
355
+ const out = [];
356
+ raw.split(/\r?\n/).forEach((line, index) => {
357
+ const trimmed = line.trim();
358
+ if (!trimmed || trimmed.startsWith("#")) return;
359
+ const match = /^(?:export\s+)?([A-Za-z_][A-Za-z0-9_]*)\s*=\s*(.*)$/.exec(
360
+ trimmed
361
+ );
362
+ if (!match) return;
363
+ const key = match[1];
364
+ const value = unquote(match[2].trim());
365
+ out.push({ key, value, line: index + 1 });
366
+ });
367
+ return out;
368
+ }
369
+ function isSensitiveAssignment(assignment) {
370
+ if (!SECRET_KEY_RE.test(assignment.key)) return false;
371
+ return !PLACEHOLDER_RE.test(assignment.value.trim());
372
+ }
373
+ function envFileFinding(sourceFile, rel) {
374
+ const id = "TRAWLY-ENV-FILE";
375
+ return {
376
+ id,
377
+ source: "trawly",
378
+ type: "secret",
379
+ severity: "moderate",
380
+ ecosystem: "env",
381
+ packageName: ".env file",
382
+ installedVersion: rel,
383
+ summary: "Committed env file detected. Verify it does not contain secrets and prefer committing an example/template file instead.",
384
+ fixedVersions: [],
385
+ affectedPaths: [rel],
386
+ fingerprint: fingerprintFinding({
387
+ source: "trawly",
388
+ type: "secret",
389
+ id,
390
+ ecosystem: "env",
391
+ packageName: ".env file",
392
+ installedVersion: rel
393
+ }),
394
+ aliases: [],
395
+ sourceFile,
396
+ line: 1
397
+ };
398
+ }
399
+ function envSecretFinding(sourceFile, rel, assignment) {
400
+ const id = "TRAWLY-ENV-SECRET";
401
+ return {
402
+ id,
403
+ source: "trawly",
404
+ type: "secret",
405
+ severity: PRIVATE_KEY_RE.test(assignment.key) ? "critical" : "high",
406
+ ecosystem: "env",
407
+ packageName: assignment.key,
408
+ installedVersion: rel,
409
+ summary: "Committed env file contains a secret-like variable. The value is intentionally omitted from this report.",
410
+ fixedVersions: [],
411
+ affectedPaths: [rel],
412
+ fingerprint: fingerprintFinding({
413
+ source: "trawly",
414
+ type: "secret",
415
+ id,
416
+ ecosystem: "env",
417
+ packageName: assignment.key,
418
+ installedVersion: rel
419
+ }),
420
+ aliases: [],
421
+ sourceFile,
422
+ line: assignment.line
423
+ };
424
+ }
425
+ function unquote(value) {
426
+ if (value.startsWith('"') && value.endsWith('"') || value.startsWith("'") && value.endsWith("'")) {
427
+ return value.slice(1, -1);
428
+ }
429
+ return value;
430
+ }
431
+ function normalizePath(path) {
432
+ return path.split(/[\\/]/).join("/");
433
+ }
434
+
435
+ // src/extractors/lockfile.ts
436
+ import { basename as basename2 } from "path";
4
437
 
5
438
  // src/extractors/npm-package-lock.ts
6
- import { readFileSync } from "fs";
7
- import { resolve } from "path";
439
+ import { readFileSync as readFileSync4 } from "fs";
440
+ import { resolve as resolve3 } from "path";
8
441
  function parseNpmPackageLock(filePath) {
9
- const absolute = resolve(filePath);
10
- const raw = readFileSync(absolute, "utf8");
442
+ const absolute = resolve3(filePath);
443
+ const raw = readFileSync4(absolute, "utf8");
11
444
  let parsed;
12
445
  try {
13
446
  parsed = JSON.parse(raw);
@@ -45,8 +478,14 @@ function parseNpmPackageLock(filePath) {
45
478
  direct: directDeps.has(name) && isTopLevelInstance(path),
46
479
  dev: Boolean(entry.dev || entry.devOptional),
47
480
  optional: Boolean(entry.optional || entry.devOptional),
481
+ inputKind: "lockfile",
482
+ sourceFile: absolute,
483
+ line: lineOf(raw, JSON.stringify(path)),
484
+ manager: "npm",
48
485
  resolved: entry.resolved,
49
- integrity: entry.integrity
486
+ integrity: entry.integrity,
487
+ registry: registryFromResolved(entry.resolved),
488
+ hasInstallScript: Boolean(entry.hasInstallScript)
50
489
  });
51
490
  }
52
491
  return instances;
@@ -85,277 +524,856 @@ function isTopLevelInstance(path) {
85
524
  if (first === -1) return false;
86
525
  return path.indexOf("node_modules/", first + 1) === -1;
87
526
  }
527
+ function registryFromResolved(resolved) {
528
+ if (!resolved) return void 0;
529
+ try {
530
+ const url = new URL(resolved);
531
+ return `${url.protocol}//${url.host}`;
532
+ } catch {
533
+ return void 0;
534
+ }
535
+ }
536
+ function lineOf(raw, needle) {
537
+ const idx = raw.indexOf(needle);
538
+ if (idx === -1) return void 0;
539
+ return raw.slice(0, idx).split(/\r?\n/).length;
540
+ }
541
+
542
+ // src/extractors/pnpm-lock.ts
543
+ import { readFileSync as readFileSync6 } from "fs";
544
+ import { resolve as resolve4 } from "path";
545
+ import { parse as parseYaml } from "yaml";
546
+
547
+ // src/extractors/package-json.ts
548
+ import { existsSync as existsSync3, readFileSync as readFileSync5 } from "fs";
549
+ import { dirname as dirname2, join as join3 } from "path";
550
+ function readPackageJsonInfoFrom(filePath) {
551
+ return readPackageJsonInfo(dirname2(filePath));
552
+ }
553
+ function readPackageJsonInfo(cwd) {
554
+ const info = {
555
+ dependencies: /* @__PURE__ */ new Set(),
556
+ devDependencies: /* @__PURE__ */ new Set(),
557
+ optionalDependencies: /* @__PURE__ */ new Set(),
558
+ allDirect: /* @__PURE__ */ new Set()
559
+ };
560
+ const path = join3(cwd, "package.json");
561
+ if (!existsSync3(path)) return info;
562
+ try {
563
+ const raw = JSON.parse(readFileSync5(path, "utf8"));
564
+ collect(raw.dependencies, info.dependencies, info.allDirect);
565
+ collect(raw.devDependencies, info.devDependencies, info.allDirect);
566
+ collect(raw.optionalDependencies, info.optionalDependencies, info.allDirect);
567
+ collect(raw.peerDependencies, info.dependencies, info.allDirect);
568
+ } catch {
569
+ return info;
570
+ }
571
+ return info;
572
+ }
573
+ function collect(value, target, allDirect) {
574
+ if (typeof value !== "object" || value === null || Array.isArray(value)) return;
575
+ for (const name of Object.keys(value)) {
576
+ target.add(name);
577
+ allDirect.add(name);
578
+ }
579
+ }
88
580
 
89
581
  // src/extractors/pnpm-lock.ts
90
- import { readFileSync as readFileSync2 } from "fs";
91
- import { resolve as resolve2 } from "path";
92
- import yaml from "js-yaml";
93
582
  var SUPPORTED_MAJOR_VERSIONS = /* @__PURE__ */ new Set([6, 9]);
94
583
  function parsePnpmLock(filePath) {
95
- const absolute = resolve2(filePath);
96
- const raw = readFileSync2(absolute, "utf8");
584
+ const absolute = resolve4(filePath);
585
+ const raw = readFileSync6(absolute, "utf8");
97
586
  let parsed;
98
587
  try {
99
- parsed = yaml.load(raw) ?? {};
588
+ parsed = parseYaml(raw);
100
589
  } catch (err) {
101
590
  throw new Error(
102
591
  `Failed to parse ${absolute}: ${err.message}`
103
592
  );
104
593
  }
105
- const versionRaw = parsed.lockfileVersion;
106
- const major = parseLockfileMajor(versionRaw);
594
+ const major = parseLockfileMajor(parsed.lockfileVersion);
107
595
  if (major === null || !SUPPORTED_MAJOR_VERSIONS.has(major)) {
108
596
  throw new Error(
109
- `Unsupported pnpm lockfileVersion ${String(versionRaw)} in ${absolute}. Supported: 6.x, 9.x.`
597
+ `Unsupported pnpm lockfileVersion ${String(parsed.lockfileVersion)} in ${absolute}. Supported: 6.x, 9.x.`
110
598
  );
111
599
  }
112
- const packages = parsed.packages;
113
- if (!packages || typeof packages !== "object") {
114
- throw new Error(
115
- `Lockfile ${absolute} has no "packages" map; cannot extract installed versions.`
116
- );
600
+ if (!parsed.packages || typeof parsed.packages !== "object") {
601
+ throw new Error(`Lockfile ${absolute} has no "packages" map.`);
117
602
  }
118
- const importers = parsed.importers ?? {
119
- ".": {
120
- dependencies: parsed.dependencies,
121
- devDependencies: parsed.devDependencies,
122
- optionalDependencies: parsed.optionalDependencies
123
- }
124
- };
125
- const direct = collectDirectFromImporters(importers);
603
+ const rootInfo = readPackageJsonInfoFrom(absolute);
604
+ const importerDirect = collectImporterDirect(parsed);
605
+ const directDeps = importerDirect.all.size > 0 ? importerDirect.all : rootInfo.allDirect;
606
+ const devDeps = importerDirect.dev.size > 0 ? importerDirect.dev : rootInfo.devDependencies;
607
+ const optionalDeps = importerDirect.optional.size > 0 ? importerDirect.optional : rootInfo.optionalDependencies;
126
608
  const instances = [];
127
- for (const [key, entry] of Object.entries(packages)) {
128
- const parsed2 = parsePnpmPackageKey(key);
129
- if (!parsed2) continue;
130
- const name = entry.name ?? parsed2.name;
131
- const version = entry.version ?? parsed2.version;
132
- if (!name || !version) continue;
133
- const isDirect = direct.prod.has(name) || direct.dev.has(name) || direct.optional.has(name);
134
- const onlyDev = direct.dev.has(name) && !direct.prod.has(name);
135
- const onlyOptional = direct.optional.has(name) && !direct.prod.has(name) && !direct.dev.has(name);
609
+ for (const [key, entry] of Object.entries(parsed.packages)) {
610
+ const parsedKey = parsePnpmPackageKey(key);
611
+ if (!parsedKey) continue;
612
+ const direct = directDeps.has(parsedKey.name);
136
613
  instances.push({
137
- name,
138
- version,
614
+ name: parsedKey.name,
615
+ version: parsedKey.version,
139
616
  ecosystem: "npm",
140
617
  path: key,
141
- direct: isDirect,
142
- dev: Boolean(entry.dev) || onlyDev,
143
- optional: Boolean(entry.optional) || onlyOptional,
618
+ direct,
619
+ dev: direct ? devDeps.has(parsedKey.name) : Boolean(entry.dev),
620
+ optional: direct ? optionalDeps.has(parsedKey.name) : Boolean(entry.optional),
621
+ inputKind: "lockfile",
622
+ sourceFile: absolute,
623
+ line: lineOf2(raw, key),
624
+ manager: "pnpm",
144
625
  resolved: entry.resolution?.tarball,
145
- integrity: entry.resolution?.integrity
626
+ integrity: entry.resolution?.integrity,
627
+ registry: registryFromResolved2(entry.resolution?.tarball),
628
+ hasInstallScript: Boolean(entry.requiresBuild)
146
629
  });
147
630
  }
148
631
  return instances;
149
632
  }
633
+ function parsePnpmPackageKey(key) {
634
+ let normalized = key.replace(/^\/+/, "");
635
+ const peerStart = normalized.indexOf("(");
636
+ if (peerStart !== -1) normalized = normalized.slice(0, peerStart);
637
+ normalized = normalized.split("_")[0] ?? normalized;
638
+ const at = normalized.lastIndexOf("@");
639
+ if (at <= 0) return null;
640
+ const name = normalized.slice(0, at);
641
+ const version = normalized.slice(at + 1);
642
+ if (!name || !version) return null;
643
+ return { name, version };
644
+ }
645
+ function collectImporterDirect(lock) {
646
+ const all = /* @__PURE__ */ new Set();
647
+ const dev = /* @__PURE__ */ new Set();
648
+ const optional = /* @__PURE__ */ new Set();
649
+ const importers = lock.importers ?? {
650
+ ".": {
651
+ dependencies: lock.dependencies,
652
+ devDependencies: lock.devDependencies,
653
+ optionalDependencies: lock.optionalDependencies
654
+ }
655
+ };
656
+ for (const importer of Object.values(importers)) {
657
+ addKeys(importer.dependencies, all);
658
+ addKeys(importer.devDependencies, all, dev);
659
+ addKeys(importer.optionalDependencies, all, optional);
660
+ }
661
+ return { all, dev, optional };
662
+ }
150
663
  function parseLockfileMajor(value) {
151
664
  if (typeof value === "number") return Math.trunc(value);
152
665
  if (typeof value === "string") {
153
- const num = parseInt(value, 10);
154
- return Number.isNaN(num) ? null : num;
666
+ const major = Number.parseInt(value, 10);
667
+ return Number.isNaN(major) ? null : major;
155
668
  }
156
669
  return null;
157
670
  }
158
- function collectDirectFromImporters(importers) {
159
- const prod = /* @__PURE__ */ new Set();
160
- const dev = /* @__PURE__ */ new Set();
161
- const optional = /* @__PURE__ */ new Set();
162
- for (const importer of Object.values(importers)) {
163
- if (!importer) continue;
164
- addDepNames(importer.dependencies, prod);
165
- addDepNames(importer.devDependencies, dev);
166
- addDepNames(importer.optionalDependencies, optional);
671
+ function addKeys(value, all, bucket) {
672
+ if (!value) return;
673
+ for (const name of Object.keys(value)) {
674
+ all.add(name);
675
+ bucket?.add(name);
676
+ }
677
+ }
678
+ function registryFromResolved2(resolved) {
679
+ if (!resolved) return void 0;
680
+ try {
681
+ const url = new URL(resolved);
682
+ return `${url.protocol}//${url.host}`;
683
+ } catch {
684
+ return void 0;
685
+ }
686
+ }
687
+ function lineOf2(raw, needle) {
688
+ const idx = raw.indexOf(needle);
689
+ if (idx === -1) return void 0;
690
+ return raw.slice(0, idx).split(/\r?\n/).length;
691
+ }
692
+
693
+ // src/extractors/yarn-lock.ts
694
+ import { readFileSync as readFileSync7 } from "fs";
695
+ import { resolve as resolve5 } from "path";
696
+ import { parse as parseYaml2 } from "yaml";
697
+ import * as yarnClassicModule from "@yarnpkg/lockfile";
698
+ var yarnClassic = "parse" in yarnClassicModule ? yarnClassicModule : yarnClassicModule.default;
699
+ var LOCAL_YARN_PROTOCOLS = ["workspace:", "patch:", "portal:", "file:"];
700
+ function parseYarnLock(filePath) {
701
+ const absolute = resolve5(filePath);
702
+ const raw = readFileSync7(absolute, "utf8");
703
+ return isBerryLock(raw) ? parseYarnBerryLock(absolute, raw) : parseYarnClassicLock(absolute, raw);
704
+ }
705
+ function parseYarnClassicLock(absolute, raw) {
706
+ const parsed = yarnClassic.parse(raw);
707
+ if (parsed.type === "conflict") {
708
+ throw new Error(`Yarn lockfile ${absolute} contains merge conflicts.`);
709
+ }
710
+ const rootInfo = readPackageJsonInfoFrom(absolute);
711
+ const instances = [];
712
+ for (const [descriptor, value] of Object.entries(parsed.object)) {
713
+ if (!isRecord3(value)) continue;
714
+ const entry = value;
715
+ if (!entry.version) continue;
716
+ const name = parseYarnDescriptorName(descriptor);
717
+ if (!name) continue;
718
+ const direct = rootInfo.allDirect.has(name);
719
+ instances.push({
720
+ name,
721
+ version: entry.version,
722
+ ecosystem: "npm",
723
+ path: descriptor,
724
+ direct,
725
+ dev: direct ? rootInfo.devDependencies.has(name) : false,
726
+ optional: direct ? rootInfo.optionalDependencies.has(name) : false,
727
+ inputKind: "lockfile",
728
+ sourceFile: absolute,
729
+ line: lineOf3(raw, descriptor),
730
+ manager: "yarn",
731
+ resolved: entry.resolved,
732
+ integrity: entry.integrity,
733
+ registry: registryFromResolved3(entry.resolved),
734
+ hasInstallScript: false
735
+ });
736
+ }
737
+ return dedupeInstances(instances);
738
+ }
739
+ function parseYarnBerryLock(absolute, raw) {
740
+ let parsed;
741
+ try {
742
+ parsed = parseYaml2(raw);
743
+ } catch (err) {
744
+ throw new Error(
745
+ `Failed to parse ${absolute}: ${err.message}`
746
+ );
747
+ }
748
+ const rootInfo = readPackageJsonInfoFrom(absolute);
749
+ const instances = [];
750
+ for (const [descriptor, value] of Object.entries(parsed)) {
751
+ if (descriptor === "__metadata" || !isRecord3(value)) continue;
752
+ const entry = value;
753
+ if (!entry.version) continue;
754
+ const resolution = entry.resolution ?? descriptor;
755
+ if (hasLocalYarnProtocol(resolution) || hasLocalYarnProtocol(descriptor)) {
756
+ continue;
757
+ }
758
+ const name = parseYarnDescriptorName(resolution) ?? parseYarnDescriptorName(descriptor);
759
+ if (!name) continue;
760
+ const direct = rootInfo.allDirect.has(name);
761
+ instances.push({
762
+ name,
763
+ version: entry.version,
764
+ ecosystem: "npm",
765
+ path: descriptor,
766
+ direct,
767
+ dev: direct ? rootInfo.devDependencies.has(name) : false,
768
+ optional: direct ? rootInfo.optionalDependencies.has(name) : false,
769
+ inputKind: "lockfile",
770
+ sourceFile: absolute,
771
+ line: lineOf3(raw, descriptor),
772
+ manager: "yarn",
773
+ integrity: entry.checksum,
774
+ hasInstallScript: false
775
+ });
776
+ }
777
+ return dedupeInstances(instances);
778
+ }
779
+ function hasLocalYarnProtocol(value) {
780
+ const normalized = value.trim().replace(/^"|"$/g, "");
781
+ return LOCAL_YARN_PROTOCOLS.some(
782
+ (protocol) => normalized.startsWith(protocol) || normalized.includes(`@${protocol}`)
783
+ );
784
+ }
785
+ function parseYarnDescriptorName(descriptor) {
786
+ const first = descriptor.split(",")[0]?.trim().replace(/^"|"$/g, "");
787
+ if (!first) return null;
788
+ for (const marker of ["@npm:", "@patch:", "@workspace:", "@portal:", "@file:"]) {
789
+ const idx = first.lastIndexOf(marker);
790
+ if (idx > 0) return first.slice(0, idx);
791
+ }
792
+ if (first.startsWith("@")) {
793
+ const slash = first.indexOf("/");
794
+ if (slash === -1) return null;
795
+ const at2 = first.indexOf("@", slash + 1);
796
+ return at2 === -1 ? first : first.slice(0, at2);
797
+ }
798
+ const at = first.indexOf("@");
799
+ return at === -1 ? first : first.slice(0, at);
800
+ }
801
+ function isBerryLock(raw) {
802
+ return raw.includes("__metadata:") || raw.includes("cacheKey:");
803
+ }
804
+ function dedupeInstances(instances) {
805
+ const seen = /* @__PURE__ */ new Set();
806
+ const out = [];
807
+ for (const instance of instances) {
808
+ const key = `${instance.name}@${instance.version}`;
809
+ if (seen.has(key)) continue;
810
+ seen.add(key);
811
+ out.push(instance);
812
+ }
813
+ return out;
814
+ }
815
+ function registryFromResolved3(resolved) {
816
+ if (!resolved) return void 0;
817
+ try {
818
+ const url = new URL(resolved);
819
+ return `${url.protocol}//${url.host}`;
820
+ } catch {
821
+ return void 0;
822
+ }
823
+ }
824
+ function lineOf3(raw, needle) {
825
+ const idx = raw.indexOf(needle);
826
+ if (idx === -1) return void 0;
827
+ return raw.slice(0, idx).split(/\r?\n/).length;
828
+ }
829
+ function isRecord3(value) {
830
+ return typeof value === "object" && value !== null && !Array.isArray(value);
831
+ }
832
+
833
+ // src/extractors/lockfile.ts
834
+ function parseLockfile(filePath) {
835
+ const file = basename2(filePath);
836
+ if (file === "package-lock.json" || file === "npm-shrinkwrap.json") {
837
+ return parseNpmPackageLock(filePath);
838
+ }
839
+ if (file === "pnpm-lock.yaml") return parsePnpmLock(filePath);
840
+ if (file === "yarn.lock") return parseYarnLock(filePath);
841
+ throw new Error(
842
+ `Unsupported lockfile ${filePath}. Supported: package-lock.json, npm-shrinkwrap.json, pnpm-lock.yaml, yarn.lock.`
843
+ );
844
+ }
845
+
846
+ // src/extractors/sbom.ts
847
+ import { readFileSync as readFileSync8 } from "fs";
848
+ import { basename as basename3, resolve as resolve6 } from "path";
849
+ import { XMLParser } from "fast-xml-parser";
850
+ import { PackageURL } from "packageurl-js";
851
+ function parseSbom(filePath) {
852
+ const absolute = resolve6(filePath);
853
+ const raw = readFileSync8(absolute, "utf8");
854
+ const trimmed = raw.trimStart();
855
+ if (trimmed.startsWith("{")) return parseJsonSbom(absolute, raw);
856
+ if (trimmed.startsWith("<")) return parseCycloneDxXml(absolute, raw);
857
+ return parseSpdxTagValue(absolute, raw);
858
+ }
859
+ function parseJsonSbom(absolute, raw) {
860
+ let parsed;
861
+ try {
862
+ parsed = JSON.parse(raw);
863
+ } catch (err) {
864
+ throw new Error(
865
+ `Failed to parse ${absolute}: ${err.message}`
866
+ );
867
+ }
868
+ if (!isRecord4(parsed)) {
869
+ throw new Error(`SBOM ${absolute} must contain a JSON object.`);
870
+ }
871
+ if (Array.isArray(parsed.components)) {
872
+ return parseCycloneDxJson(absolute, raw, parsed.components);
873
+ }
874
+ if (Array.isArray(parsed.packages)) {
875
+ return parseSpdxJson(absolute, raw, parsed.packages);
876
+ }
877
+ throw new Error(
878
+ `Could not detect SBOM format for ${absolute}; expected CycloneDX or SPDX.`
879
+ );
880
+ }
881
+ function parseCycloneDxJson(absolute, raw, components) {
882
+ const instances = [];
883
+ for (const component of components) {
884
+ if (!isRecord4(component) || typeof component.purl !== "string") continue;
885
+ const pkg = parsePurlPackage(component.purl);
886
+ if (!pkg) continue;
887
+ instances.push(sbomPackage(pkg, absolute, raw, component.purl));
888
+ }
889
+ return dedupe(instances);
890
+ }
891
+ function parseCycloneDxXml(absolute, raw) {
892
+ const parser = new XMLParser({
893
+ ignoreAttributes: false,
894
+ attributeNamePrefix: "",
895
+ textNodeName: "#text"
896
+ });
897
+ let parsed;
898
+ try {
899
+ parsed = parser.parse(raw);
900
+ } catch (err) {
901
+ throw new Error(
902
+ `Failed to parse ${absolute}: ${err.message}`
903
+ );
904
+ }
905
+ const bom = isRecord4(parsed) ? parsed.bom : void 0;
906
+ const components = isRecord4(bom) && isRecord4(bom.components) ? arrayify(bom.components.component) : [];
907
+ const instances = [];
908
+ for (const component of components) {
909
+ if (!isRecord4(component) || typeof component.purl !== "string") continue;
910
+ const pkg = parsePurlPackage(component.purl);
911
+ if (!pkg) continue;
912
+ instances.push(sbomPackage(pkg, absolute, raw, component.purl));
913
+ }
914
+ return dedupe(instances);
915
+ }
916
+ function parseSpdxJson(absolute, raw, packages) {
917
+ const instances = [];
918
+ for (const pkgRecord of packages) {
919
+ if (!isRecord4(pkgRecord)) continue;
920
+ const externalRefs = Array.isArray(pkgRecord.externalRefs) ? pkgRecord.externalRefs : [];
921
+ for (const ref of externalRefs) {
922
+ if (!isRecord4(ref)) continue;
923
+ const locator = ref.referenceLocator;
924
+ const type = String(ref.referenceType ?? "").toLowerCase();
925
+ if (type !== "purl" || typeof locator !== "string") continue;
926
+ const pkg = parsePurlPackage(locator);
927
+ if (!pkg) continue;
928
+ instances.push(sbomPackage(pkg, absolute, raw, locator));
929
+ }
930
+ }
931
+ return dedupe(instances);
932
+ }
933
+ function parseSpdxTagValue(absolute, raw) {
934
+ if (!raw.includes("SPDXVersion:")) {
935
+ throw new Error(
936
+ `Could not detect SBOM format for ${absolute}; expected CycloneDX or SPDX.`
937
+ );
938
+ }
939
+ const instances = [];
940
+ for (const line of raw.split(/\r?\n/)) {
941
+ const match = line.match(/^ExternalRef:\s+PACKAGE-MANAGER\s+purl\s+(\S+)/);
942
+ if (!match?.[1]) continue;
943
+ const pkg = parsePurlPackage(match[1]);
944
+ if (!pkg) continue;
945
+ instances.push(sbomPackage(pkg, absolute, raw, match[1]));
946
+ }
947
+ return dedupe(instances);
948
+ }
949
+ function parsePurlPackage(purl) {
950
+ let parsed;
951
+ try {
952
+ parsed = PackageURL.fromString(purl);
953
+ } catch {
954
+ return null;
955
+ }
956
+ if (!parsed.version) return null;
957
+ const ecosystem = purlTypeToOsvEcosystem(parsed.type);
958
+ if (!ecosystem) return null;
959
+ return {
960
+ name: purlName(parsed),
961
+ version: parsed.version,
962
+ ecosystem,
963
+ purl
964
+ };
965
+ }
966
+ function sbomPackage(pkg, absolute, raw, needle) {
967
+ return {
968
+ name: pkg.name,
969
+ version: pkg.version,
970
+ ecosystem: pkg.ecosystem,
971
+ path: `${basename3(absolute)}:${pkg.purl}`,
972
+ direct: false,
973
+ dev: false,
974
+ optional: false,
975
+ inputKind: "sbom",
976
+ purl: pkg.purl,
977
+ sourceFile: absolute,
978
+ line: lineOf4(raw, needle),
979
+ manager: "sbom"
980
+ };
981
+ }
982
+ function purlTypeToOsvEcosystem(type) {
983
+ switch (type.toLowerCase()) {
984
+ case "npm":
985
+ return "npm";
986
+ case "pypi":
987
+ return "PyPI";
988
+ case "maven":
989
+ return "Maven";
990
+ case "gem":
991
+ return "RubyGems";
992
+ case "nuget":
993
+ return "NuGet";
994
+ case "golang":
995
+ case "go":
996
+ return "Go";
997
+ case "cargo":
998
+ return "crates.io";
999
+ case "composer":
1000
+ return "Packagist";
1001
+ case "deb":
1002
+ return "Debian";
1003
+ case "apk":
1004
+ return "Alpine";
1005
+ default:
1006
+ return null;
1007
+ }
1008
+ }
1009
+ function purlName(purl) {
1010
+ if (purl.type.toLowerCase() === "npm" && purl.namespace) {
1011
+ const scope = purl.namespace.startsWith("@") ? purl.namespace : `@${purl.namespace}`;
1012
+ return `${scope}/${purl.name}`;
1013
+ }
1014
+ if (purl.type.toLowerCase() === "maven" && purl.namespace) {
1015
+ return `${purl.namespace}:${purl.name}`;
1016
+ }
1017
+ return purl.namespace ? `${purl.namespace}/${purl.name}` : purl.name;
1018
+ }
1019
+ function dedupe(instances) {
1020
+ const seen = /* @__PURE__ */ new Set();
1021
+ const out = [];
1022
+ for (const instance of instances) {
1023
+ const key = instance.purl ?? `${instance.ecosystem}:${instance.name}@${instance.version}`;
1024
+ if (seen.has(key)) continue;
1025
+ seen.add(key);
1026
+ out.push(instance);
1027
+ }
1028
+ return out;
1029
+ }
1030
+ function arrayify(value) {
1031
+ if (value === void 0) return [];
1032
+ return Array.isArray(value) ? value : [value];
1033
+ }
1034
+ function lineOf4(raw, needle) {
1035
+ const idx = raw.indexOf(needle);
1036
+ if (idx === -1) return void 0;
1037
+ return raw.slice(0, idx).split(/\r?\n/).length;
1038
+ }
1039
+ function isRecord4(value) {
1040
+ return typeof value === "object" && value !== null && !Array.isArray(value);
1041
+ }
1042
+
1043
+ // src/ignore.ts
1044
+ function applyIgnores(findings, ignores, now) {
1045
+ if (ignores.length === 0) return { active: findings, ignored: [], warnings: [] };
1046
+ const warnings = [];
1047
+ const activeIgnores = ignores.filter((entry) => {
1048
+ const expires = /* @__PURE__ */ new Date(`${entry.expires}T23:59:59.999Z`);
1049
+ if (Number.isNaN(expires.getTime()) || expires < now) {
1050
+ warnings.push(
1051
+ `Ignore for ${entry.id} expired on ${entry.expires} and was not applied.`
1052
+ );
1053
+ return false;
1054
+ }
1055
+ return true;
1056
+ });
1057
+ const active = [];
1058
+ const ignored = [];
1059
+ for (const finding of findings) {
1060
+ const matched = activeIgnores.some((entry) => matchesIgnore(finding, entry));
1061
+ if (matched) {
1062
+ ignored.push({ ...finding, ignored: true });
1063
+ } else {
1064
+ active.push(finding);
1065
+ }
1066
+ }
1067
+ return { active, ignored, warnings };
1068
+ }
1069
+ function matchesIgnore(finding, entry) {
1070
+ const ids = /* @__PURE__ */ new Set([finding.id, ...finding.aliases]);
1071
+ if (!ids.has(entry.id)) return false;
1072
+ if (entry.package && entry.package !== finding.packageName) return false;
1073
+ if (entry.ecosystem && entry.ecosystem !== finding.ecosystem) return false;
1074
+ if (entry.version && entry.version !== finding.installedVersion) return false;
1075
+ return true;
1076
+ }
1077
+
1078
+ // src/policy.ts
1079
+ var POLICY_PRESETS = {
1080
+ ci: {
1081
+ failOn: "high",
1082
+ risk: true,
1083
+ env: false,
1084
+ includeDev: true
1085
+ },
1086
+ strict: {
1087
+ failOn: "moderate",
1088
+ risk: true,
1089
+ env: true,
1090
+ includeDev: true
1091
+ },
1092
+ library: {
1093
+ failOn: "moderate",
1094
+ risk: true,
1095
+ env: false,
1096
+ includeDev: false
1097
+ },
1098
+ app: {
1099
+ failOn: "high",
1100
+ risk: true,
1101
+ env: true,
1102
+ includeDev: true
1103
+ }
1104
+ };
1105
+ function resolvePolicy(requested, configured) {
1106
+ const name = requested ?? configured;
1107
+ return name ? POLICY_PRESETS[name] : void 0;
1108
+ }
1109
+
1110
+ // src/risk.ts
1111
+ var REGISTRY_URL = "https://registry.npmjs.org";
1112
+ var REGISTRY_ENV = "TRAWLY_NPM_REGISTRY_URL";
1113
+ var REQUEST_TIMEOUT_MS = 15e3;
1114
+ var NEW_VERSION_DAYS = 30;
1115
+ var NEW_PACKAGE_DAYS = 90;
1116
+ var PACKUMENT_CONCURRENCY = 8;
1117
+ var PACKUMENT_MAX_RETRIES = 3;
1118
+ var PACKUMENT_BACKOFF_MS = 250;
1119
+ async function collectRiskSignals(packages, options) {
1120
+ if (!options.enabled) return { findings: [], warnings: [] };
1121
+ const findings = [];
1122
+ const warnings = [];
1123
+ for (const pkg of packages) {
1124
+ if (pkg.hasInstallScript) {
1125
+ findings.push(riskFinding(pkg, {
1126
+ id: "TRAWLY-INSTALL-SCRIPT",
1127
+ severity: "moderate",
1128
+ summary: `${pkg.name}@${pkg.version} declares install-time scripts or requires a build step.`
1129
+ }));
1130
+ }
1131
+ const registry = normalizeRegistry(pkg.registry);
1132
+ if (registry && !isAllowedRegistry(registry, options.allowedRegistries)) {
1133
+ findings.push(riskFinding(pkg, {
1134
+ id: "TRAWLY-UNEXPECTED-REGISTRY",
1135
+ severity: "moderate",
1136
+ summary: `${pkg.name}@${pkg.version} was resolved from unexpected registry ${registry}.`
1137
+ }));
1138
+ }
1139
+ }
1140
+ const npmPackageGroups = groupNpmPackages(packages);
1141
+ const fetchImpl = options.fetchImpl ?? fetch;
1142
+ const groupsByName = /* @__PURE__ */ new Map();
1143
+ for (const group of npmPackageGroups) {
1144
+ const name = group[0]?.name;
1145
+ if (!name) continue;
1146
+ const list = groupsByName.get(name) ?? [];
1147
+ list.push(group);
1148
+ groupsByName.set(name, list);
1149
+ }
1150
+ await mapWithConcurrency(
1151
+ [...groupsByName.entries()],
1152
+ PACKUMENT_CONCURRENCY,
1153
+ async ([name, groups]) => {
1154
+ let packument;
1155
+ try {
1156
+ packument = await fetchPackument(fetchImpl, name);
1157
+ } catch (err) {
1158
+ warnings.push(
1159
+ `Could not fetch npm publish metadata for ${name}: ${err.message}`
1160
+ );
1161
+ return;
1162
+ }
1163
+ const createdAt = parseDate(packument.time?.created);
1164
+ const isNewPackage = !!createdAt && daysBetween(createdAt, options.now) < NEW_PACKAGE_DAYS;
1165
+ for (const group of groups) {
1166
+ const representative = group[0];
1167
+ if (!representative) continue;
1168
+ const versionAt = parseDate(packument.time?.[representative.version]);
1169
+ const deprecated = packument.versions?.[representative.version]?.deprecated;
1170
+ if (isNewPackage) {
1171
+ for (const pkg of group) {
1172
+ findings.push(
1173
+ riskFinding(pkg, {
1174
+ id: "TRAWLY-NEW-PACKAGE",
1175
+ severity: "moderate",
1176
+ summary: `${pkg.name} was first published less than ${NEW_PACKAGE_DAYS} days ago.`
1177
+ })
1178
+ );
1179
+ }
1180
+ }
1181
+ if (deprecated) {
1182
+ for (const pkg of group) {
1183
+ findings.push(
1184
+ riskFinding(pkg, {
1185
+ id: "TRAWLY-DEPRECATED-PACKAGE",
1186
+ severity: "moderate",
1187
+ summary: `${pkg.name}@${pkg.version} is deprecated: ${deprecated}`
1188
+ })
1189
+ );
1190
+ }
1191
+ }
1192
+ if (versionAt && daysBetween(versionAt, options.now) < NEW_VERSION_DAYS) {
1193
+ for (const pkg of group) {
1194
+ findings.push(
1195
+ riskFinding(pkg, {
1196
+ id: "TRAWLY-NEW-VERSION",
1197
+ severity: "low",
1198
+ summary: `${pkg.name}@${pkg.version} was published less than ${NEW_VERSION_DAYS} days ago.`
1199
+ })
1200
+ );
1201
+ }
1202
+ }
1203
+ }
1204
+ }
1205
+ );
1206
+ return { findings, warnings };
1207
+ }
1208
+ function riskFinding(pkg, input) {
1209
+ return {
1210
+ id: input.id,
1211
+ source: "trawly",
1212
+ type: "risk-signal",
1213
+ severity: input.severity,
1214
+ ecosystem: pkg.ecosystem,
1215
+ packageName: pkg.name,
1216
+ installedVersion: pkg.version,
1217
+ summary: input.summary,
1218
+ fixedVersions: [],
1219
+ affectedPaths: [pkg.path],
1220
+ fingerprint: fingerprintFinding({
1221
+ source: "trawly",
1222
+ type: "risk-signal",
1223
+ id: input.id,
1224
+ ecosystem: pkg.ecosystem,
1225
+ packageName: pkg.name,
1226
+ installedVersion: pkg.version
1227
+ }),
1228
+ aliases: [],
1229
+ sourceFile: pkg.sourceFile,
1230
+ line: pkg.line
1231
+ };
1232
+ }
1233
+ function groupNpmPackages(packages) {
1234
+ const groups = /* @__PURE__ */ new Map();
1235
+ for (const pkg of packages) {
1236
+ if (pkg.ecosystem !== "npm") continue;
1237
+ const key = `${pkg.name}@${pkg.version}`;
1238
+ const group = groups.get(key) ?? [];
1239
+ group.push(pkg);
1240
+ groups.set(key, group);
1241
+ }
1242
+ return [...groups.values()];
1243
+ }
1244
+ async function fetchPackument(fetchImpl, name) {
1245
+ const registry = (process.env[REGISTRY_ENV] ?? REGISTRY_URL).replace(/\/+$/, "");
1246
+ const url = `${registry}/${encodePackageName(name)}`;
1247
+ let lastErr;
1248
+ for (let attempt = 0; attempt <= PACKUMENT_MAX_RETRIES; attempt++) {
1249
+ const controller = new AbortController();
1250
+ const timer = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS);
1251
+ try {
1252
+ const res = await fetchImpl(url, {
1253
+ signal: controller.signal,
1254
+ headers: { accept: "application/json" }
1255
+ });
1256
+ if (res.ok) return await res.json();
1257
+ const err = new RegistryHttpError(
1258
+ `registry ${res.status}: ${res.statusText}`,
1259
+ res.status,
1260
+ retryAfterMs(res.headers)
1261
+ );
1262
+ if (!isRetryableRegistryError(err) || attempt === PACKUMENT_MAX_RETRIES) {
1263
+ throw err;
1264
+ }
1265
+ lastErr = err;
1266
+ await sleep(retryDelayMs(err, attempt));
1267
+ } catch (err) {
1268
+ if (err instanceof RegistryHttpError) throw err;
1269
+ lastErr = err;
1270
+ if (attempt === PACKUMENT_MAX_RETRIES) break;
1271
+ await sleep(retryDelayMs(void 0, attempt));
1272
+ } finally {
1273
+ clearTimeout(timer);
1274
+ }
1275
+ }
1276
+ throw lastErr;
1277
+ }
1278
+ async function mapWithConcurrency(items, concurrency, worker) {
1279
+ let next = 0;
1280
+ const workers = Array.from(
1281
+ { length: Math.min(concurrency, items.length) },
1282
+ async () => {
1283
+ while (next < items.length) {
1284
+ const item = items[next++];
1285
+ if (item !== void 0) await worker(item);
1286
+ }
1287
+ }
1288
+ );
1289
+ await Promise.all(workers);
1290
+ }
1291
+ var RegistryHttpError = class extends Error {
1292
+ constructor(message, status, retryAfterMs3) {
1293
+ super(message);
1294
+ this.status = status;
1295
+ this.retryAfterMs = retryAfterMs3;
167
1296
  }
168
- return { prod, dev, optional };
1297
+ status;
1298
+ retryAfterMs;
1299
+ };
1300
+ function isRetryableRegistryError(err) {
1301
+ return err.status === 429 || err.status >= 500;
169
1302
  }
170
- function addDepNames(block, into) {
171
- if (!block) return;
172
- for (const name of Object.keys(block)) into.add(name);
1303
+ function retryDelayMs(err, attempt) {
1304
+ if (err?.retryAfterMs !== void 0) return err.retryAfterMs;
1305
+ const base = PACKUMENT_BACKOFF_MS * 2 ** attempt;
1306
+ return base + Math.floor(Math.random() * Math.min(base, 100));
173
1307
  }
174
- function parsePnpmPackageKey(key) {
175
- let working = key.startsWith("/") ? key.slice(1) : key;
176
- const parenIdx = working.indexOf("(");
177
- if (parenIdx !== -1) working = working.slice(0, parenIdx);
178
- const startSearch = working.startsWith("@") ? 1 : 0;
179
- const atIdx = working.indexOf("@", startSearch);
180
- if (atIdx <= 0) return null;
181
- const name = working.slice(0, atIdx);
182
- const version = working.slice(atIdx + 1);
183
- if (!name || !version) return null;
184
- return { name, version };
1308
+ function retryAfterMs(headers) {
1309
+ const value = headers.get("retry-after");
1310
+ if (!value) return void 0;
1311
+ const seconds = Number(value);
1312
+ if (Number.isFinite(seconds) && seconds >= 0) return seconds * 1e3;
1313
+ const date = Date.parse(value);
1314
+ if (Number.isNaN(date)) return void 0;
1315
+ return Math.max(0, date - Date.now());
185
1316
  }
186
-
187
- // src/extractors/yarn-lock.ts
188
- import { existsSync, readFileSync as readFileSync3 } from "fs";
189
- import { dirname, join, resolve as resolve3 } from "path";
190
- import yaml2 from "js-yaml";
191
- function parseYarnLock(filePath) {
192
- const absolute = resolve3(filePath);
193
- const content = readFileSync3(absolute, "utf8");
194
- const projectDir = dirname(absolute);
195
- const directs = readDirectDepsFromPackageJson(projectDir);
196
- const isBerry = /^__metadata:/m.test(content);
197
- return isBerry ? parseBerry(absolute, content, directs) : parseClassic(absolute, content, directs);
198
- }
199
- function readDirectDepsFromPackageJson(projectDir) {
200
- const result = {
201
- prod: /* @__PURE__ */ new Set(),
202
- dev: /* @__PURE__ */ new Set(),
203
- optional: /* @__PURE__ */ new Set(),
204
- any: /* @__PURE__ */ new Set()
205
- };
206
- const pkgPath = join(projectDir, "package.json");
207
- if (!existsSync(pkgPath)) return result;
208
- try {
209
- const pkg = JSON.parse(readFileSync3(pkgPath, "utf8"));
210
- for (const name of Object.keys(pkg.dependencies ?? {})) result.prod.add(name);
211
- for (const name of Object.keys(pkg.devDependencies ?? {})) result.dev.add(name);
212
- for (const name of Object.keys(pkg.optionalDependencies ?? {}))
213
- result.optional.add(name);
214
- for (const set of [result.prod, result.dev, result.optional]) {
215
- for (const n of set) result.any.add(n);
216
- }
217
- for (const name of Object.keys(pkg.peerDependencies ?? {})) result.any.add(name);
218
- } catch {
219
- }
220
- return result;
1317
+ function sleep(ms) {
1318
+ return new Promise((resolve11) => setTimeout(resolve11, ms));
221
1319
  }
222
- function parseClassic(absolute, content, directs) {
223
- const entries = parseClassicEntries(content);
224
- const instances = [];
225
- for (const entry of entries) {
226
- const version = entry.fields.version;
227
- if (!version) continue;
228
- const names = uniq(entry.specs.map((s) => parseYarnSpec(s).name).filter(Boolean));
229
- const name = names[0];
230
- if (!name) continue;
231
- const isDirect = names.some((n) => directs.any.has(n));
232
- const inProd = names.some((n) => directs.prod.has(n));
233
- const inDev = names.some((n) => directs.dev.has(n));
234
- const inOpt = names.some((n) => directs.optional.has(n));
235
- instances.push({
236
- name,
237
- version,
238
- ecosystem: "npm",
239
- path: `${name}@${version}`,
240
- direct: isDirect,
241
- dev: inDev && !inProd,
242
- optional: inOpt && !inProd && !inDev,
243
- resolved: entry.fields.resolved,
244
- integrity: entry.fields.integrity
245
- });
246
- }
247
- void absolute;
248
- return instances;
1320
+ function isAllowedRegistry(registry, allowed) {
1321
+ const normalizedAllowed = allowed.map(normalizeRegistry).filter(isString);
1322
+ return normalizedAllowed.includes(registry);
249
1323
  }
250
- function parseClassicEntries(content) {
251
- const lines = content.split(/\r?\n/);
252
- const entries = [];
253
- let current = null;
254
- for (const rawLine of lines) {
255
- if (rawLine === "" || rawLine.trimStart().startsWith("#")) continue;
256
- if (!/^\s/.test(rawLine)) {
257
- if (current) entries.push(current);
258
- const header = rawLine.replace(/:\s*$/, "");
259
- current = { specs: splitClassicSpecs(header), fields: {} };
260
- continue;
261
- }
262
- if (!current) continue;
263
- const indent = rawLine.match(/^ +/)?.[0].length ?? 0;
264
- if (indent !== 2) continue;
265
- const trimmed = rawLine.trim();
266
- const m = trimmed.match(/^([^\s"]+)\s+"((?:[^"\\]|\\.)*)"$/) ?? trimmed.match(/^([^\s"]+)\s+(\S+)$/);
267
- if (m && m[1] !== void 0 && m[2] !== void 0) {
268
- current.fields[m[1]] = m[2];
269
- }
1324
+ function normalizeRegistry(value) {
1325
+ if (!value) return void 0;
1326
+ try {
1327
+ const url = new URL(value);
1328
+ return `${url.protocol}//${url.host}`;
1329
+ } catch {
1330
+ return value.replace(/\/+$/, "");
270
1331
  }
271
- if (current) entries.push(current);
272
- return entries;
273
1332
  }
274
- function splitClassicSpecs(header) {
275
- const out = [];
276
- let cur = "";
277
- let inQuote = false;
278
- for (const ch of header) {
279
- if (ch === '"') {
280
- inQuote = !inQuote;
281
- continue;
282
- }
283
- if (ch === "," && !inQuote) {
284
- const spec = cur.trim();
285
- if (spec) out.push(spec);
286
- cur = "";
287
- continue;
1333
+ function encodePackageName(name) {
1334
+ if (name.startsWith("@")) {
1335
+ const slash = name.indexOf("/");
1336
+ if (slash !== -1) {
1337
+ return `${encodeURIComponent(name.slice(0, slash))}%2F${encodeURIComponent(name.slice(slash + 1))}`;
288
1338
  }
289
- cur += ch;
290
1339
  }
291
- const last = cur.trim();
292
- if (last) out.push(last);
293
- return out;
1340
+ return encodeURIComponent(name);
294
1341
  }
295
- function parseBerry(absolute, content, directs) {
296
- let parsed;
297
- try {
298
- parsed = yaml2.load(content) ?? {};
299
- } catch (err) {
300
- throw new Error(
301
- `Failed to parse ${absolute}: ${err.message}`
302
- );
303
- }
304
- const instances = [];
305
- for (const [key, value] of Object.entries(parsed)) {
306
- if (key === "__metadata") continue;
307
- if (!value || typeof value !== "object") continue;
308
- const entry = value;
309
- if (!entry.version) continue;
310
- const specs = splitClassicSpecs(key);
311
- const names = uniq(specs.map((s) => parseYarnSpec(s).name).filter(Boolean));
312
- const name = names[0];
313
- if (!name) continue;
314
- const isDirect = names.some((n) => directs.any.has(n));
315
- const inProd = names.some((n) => directs.prod.has(n));
316
- const inDev = names.some((n) => directs.dev.has(n));
317
- const inOpt = names.some((n) => directs.optional.has(n));
318
- instances.push({
319
- name,
320
- version: entry.version,
321
- ecosystem: "npm",
322
- path: `${name}@${entry.version}`,
323
- direct: isDirect,
324
- dev: inDev && !inProd,
325
- optional: inOpt && !inProd && !inDev,
326
- resolved: entry.resolution,
327
- integrity: entry.checksum
328
- });
329
- }
330
- return instances;
1342
+ function parseDate(value) {
1343
+ if (!value) return void 0;
1344
+ const date = new Date(value);
1345
+ return Number.isNaN(date.getTime()) ? void 0 : date;
331
1346
  }
332
- function parseYarnSpec(spec) {
333
- const startSearch = spec.startsWith("@") ? 1 : 0;
334
- const atIdx = spec.indexOf("@", startSearch);
335
- if (atIdx <= 0) return { name: spec, selector: "" };
336
- const name = spec.slice(0, atIdx);
337
- let selector = spec.slice(atIdx + 1);
338
- if (selector.startsWith("npm:")) selector = selector.slice(4);
339
- return { name, selector };
1347
+ function daysBetween(a, b) {
1348
+ return (b.getTime() - a.getTime()) / 864e5;
340
1349
  }
341
- function uniq(values) {
342
- return Array.from(new Set(values));
1350
+ function isString(value) {
1351
+ return typeof value === "string";
343
1352
  }
344
1353
 
345
1354
  // src/sources/osv.ts
346
1355
  var OSV_QUERYBATCH_URL = "https://api.osv.dev/v1/querybatch";
347
1356
  var OSV_VULN_URL = "https://api.osv.dev/v1/vulns";
348
1357
  var QUERY_CHUNK_SIZE = 500;
349
- var REQUEST_TIMEOUT_MS = 15e3;
1358
+ var REQUEST_TIMEOUT_MS2 = 15e3;
350
1359
  var MAX_RETRIES = 2;
1360
+ var DETAIL_CONCURRENCY = 8;
351
1361
  function dedupeForQuery(packages) {
352
1362
  const seen = /* @__PURE__ */ new Set();
353
1363
  const out = [];
354
1364
  for (const pkg of packages) {
355
- const key = `${pkg.name}@${pkg.version}`;
1365
+ const key = packageKey(pkg);
356
1366
  if (seen.has(key)) continue;
357
1367
  seen.add(key);
358
- out.push({ name: pkg.name, version: pkg.version });
1368
+ if (pkg.purl) out.push({ name: pkg.name, version: pkg.version, purl: pkg.purl });
1369
+ else if (pkg.ecosystem === "npm") out.push({ name: pkg.name, version: pkg.version });
1370
+ else {
1371
+ out.push({
1372
+ name: pkg.name,
1373
+ version: pkg.version,
1374
+ ecosystem: pkg.ecosystem
1375
+ });
1376
+ }
359
1377
  }
360
1378
  return out;
361
1379
  }
@@ -365,33 +1383,14 @@ async function queryOsv(packages, deps = {}) {
365
1383
  if (unique.length === 0) return [];
366
1384
  const idsByPackage = /* @__PURE__ */ new Map();
367
1385
  for (const chunk of chunked(unique, QUERY_CHUNK_SIZE)) {
368
- const body = {
369
- queries: chunk.map((q) => ({
370
- package: { ecosystem: "npm", name: q.name },
371
- version: q.version
372
- }))
373
- };
374
- const res = await postJson(
375
- fetchImpl,
376
- OSV_QUERYBATCH_URL,
377
- body
378
- );
379
- res.results.forEach((result, i) => {
380
- const q = chunk[i];
381
- if (!q) return;
382
- const key = `${q.name}@${q.version}`;
383
- if (!result.vulns || result.vulns.length === 0) return;
384
- const ids = idsByPackage.get(key) ?? /* @__PURE__ */ new Set();
385
- for (const v of result.vulns) ids.add(v.id);
386
- idsByPackage.set(key, ids);
387
- });
1386
+ await queryBatchWithPagination(fetchImpl, chunk, idsByPackage);
388
1387
  }
389
1388
  const allIds = /* @__PURE__ */ new Set();
390
1389
  for (const ids of idsByPackage.values()) {
391
1390
  for (const id of ids) allIds.add(id);
392
1391
  }
393
1392
  const detailsById = /* @__PURE__ */ new Map();
394
- for (const id of allIds) {
1393
+ await mapWithConcurrency2([...allIds], DETAIL_CONCURRENCY, async (id) => {
395
1394
  try {
396
1395
  const detail = await getJson(
397
1396
  fetchImpl,
@@ -400,10 +1399,10 @@ async function queryOsv(packages, deps = {}) {
400
1399
  detailsById.set(id, detail);
401
1400
  } catch {
402
1401
  }
403
- }
1402
+ });
404
1403
  const findings = [];
405
1404
  for (const pkg of packages) {
406
- const key = `${pkg.name}@${pkg.version}`;
1405
+ const key = packageKey(pkg);
407
1406
  const ids = idsByPackage.get(key);
408
1407
  if (!ids) continue;
409
1408
  for (const id of ids) {
@@ -413,28 +1412,88 @@ async function queryOsv(packages, deps = {}) {
413
1412
  }
414
1413
  return findings;
415
1414
  }
1415
+ async function queryBatchWithPagination(fetchImpl, initial, idsByPackage) {
1416
+ let pending = initial;
1417
+ const pageTokens = /* @__PURE__ */ new Map();
1418
+ while (pending.length > 0) {
1419
+ const res = await postJson(
1420
+ fetchImpl,
1421
+ OSV_QUERYBATCH_URL,
1422
+ { queries: pending.map((q) => toOsvQuery(q, pageTokens.get(queryKey(q)))) }
1423
+ );
1424
+ const next = [];
1425
+ res.results.forEach((result, i) => {
1426
+ const q = pending[i];
1427
+ if (!q) return;
1428
+ const key = queryKey(q);
1429
+ if (result.vulns && result.vulns.length > 0) {
1430
+ const ids = idsByPackage.get(key) ?? /* @__PURE__ */ new Set();
1431
+ for (const v of result.vulns) ids.add(v.id);
1432
+ idsByPackage.set(key, ids);
1433
+ }
1434
+ if (result.next_page_token) {
1435
+ pageTokens.set(key, result.next_page_token);
1436
+ next.push(q);
1437
+ } else {
1438
+ pageTokens.delete(key);
1439
+ }
1440
+ });
1441
+ pending = next;
1442
+ }
1443
+ }
1444
+ function toOsvQuery(q, pageToken) {
1445
+ const query = q.purl ? { package: { purl: q.purl } } : {
1446
+ package: { ecosystem: q.ecosystem ?? "npm", name: q.name },
1447
+ version: q.version
1448
+ };
1449
+ return pageToken ? { ...query, page_token: pageToken } : query;
1450
+ }
1451
+ function queryKey(q) {
1452
+ return q.purl ?? `${q.ecosystem ?? "npm"}:${q.name}@${q.version}`;
1453
+ }
416
1454
  function buildFinding(pkg, id, detail) {
417
- const severity = detail ? parseSeverity(detail) : "unknown";
1455
+ const severity = detail ? parseSeverity(detail, pkg.name) : "unknown";
418
1456
  const summary = detail?.summary ?? detail?.details ?? id;
1457
+ const aliases = detail?.aliases ?? [];
1458
+ const fingerprint = fingerprintFinding({
1459
+ source: "osv",
1460
+ type: "vulnerability",
1461
+ id,
1462
+ ecosystem: pkg.ecosystem,
1463
+ packageName: pkg.name,
1464
+ installedVersion: pkg.version
1465
+ });
419
1466
  return {
420
1467
  id,
421
1468
  source: "osv",
422
1469
  type: "vulnerability",
423
1470
  severity,
1471
+ ecosystem: pkg.ecosystem,
424
1472
  packageName: pkg.name,
425
1473
  installedVersion: pkg.version,
426
1474
  summary: truncate(summary, 240),
427
1475
  url: pickAdvisoryUrl(detail) ?? `https://osv.dev/vulnerability/${id}`,
428
1476
  fixedVersions: detail ? collectFixedVersions(detail, pkg.name) : [],
429
- affectedPaths: [pkg.path]
1477
+ affectedPaths: [pkg.path],
1478
+ fingerprint,
1479
+ aliases,
1480
+ sourceFile: pkg.sourceFile,
1481
+ line: pkg.line
430
1482
  };
431
1483
  }
432
- function parseSeverity(detail) {
1484
+ function parseSeverity(detail, packageName) {
433
1485
  const dbSpecific = detail.database_specific?.severity?.toLowerCase();
434
1486
  if (dbSpecific === "critical" || dbSpecific === "high" || dbSpecific === "moderate" || dbSpecific === "low") {
435
1487
  return dbSpecific;
436
1488
  }
437
1489
  if (dbSpecific === "medium") return "moderate";
1490
+ for (const aff of matchingAffected(detail, packageName)) {
1491
+ const ecosystemSeverity = aff.ecosystem_specific?.severity?.toLowerCase();
1492
+ if (ecosystemSeverity === "critical" || ecosystemSeverity === "high" || ecosystemSeverity === "moderate" || ecosystemSeverity === "low") {
1493
+ return ecosystemSeverity;
1494
+ }
1495
+ if (ecosystemSeverity === "medium") return "moderate";
1496
+ }
438
1497
  const cvss = detail.severity?.find((s) => s.type?.startsWith("CVSS_"));
439
1498
  if (cvss) {
440
1499
  const score = parseCvssScore(cvss.score);
@@ -458,8 +1517,7 @@ function pickAdvisoryUrl(detail) {
458
1517
  }
459
1518
  function collectFixedVersions(detail, packageName) {
460
1519
  const out = /* @__PURE__ */ new Set();
461
- for (const aff of detail.affected ?? []) {
462
- if (aff.package?.name && aff.package.name !== packageName) continue;
1520
+ for (const aff of matchingAffected(detail, packageName)) {
463
1521
  for (const range of aff.ranges ?? []) {
464
1522
  for (const event of range.events ?? []) {
465
1523
  if (event.fixed) out.add(event.fixed);
@@ -468,15 +1526,35 @@ function collectFixedVersions(detail, packageName) {
468
1526
  }
469
1527
  return [...out];
470
1528
  }
1529
+ function matchingAffected(detail, packageName) {
1530
+ if (!packageName) return detail.affected ?? [];
1531
+ return (detail.affected ?? []).filter((aff) => {
1532
+ const affectedName = aff.package?.name;
1533
+ return !affectedName || affectedName === packageName;
1534
+ });
1535
+ }
471
1536
  function* chunked(items, size) {
472
1537
  for (let i = 0; i < items.length; i += size) {
473
1538
  yield items.slice(i, i + size);
474
1539
  }
475
1540
  }
1541
+ async function mapWithConcurrency2(items, concurrency, worker) {
1542
+ let next = 0;
1543
+ const workers = Array.from(
1544
+ { length: Math.min(concurrency, items.length) },
1545
+ async () => {
1546
+ while (next < items.length) {
1547
+ const item = items[next++];
1548
+ if (item !== void 0) await worker(item);
1549
+ }
1550
+ }
1551
+ );
1552
+ await Promise.all(workers);
1553
+ }
476
1554
  async function postJson(fetchImpl, url, body) {
477
1555
  return withRetry(async () => {
478
1556
  const controller = new AbortController();
479
- const timer = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS);
1557
+ const timer = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS2);
480
1558
  try {
481
1559
  const res = await fetchImpl(url, {
482
1560
  method: "POST",
@@ -485,7 +1563,11 @@ async function postJson(fetchImpl, url, body) {
485
1563
  signal: controller.signal
486
1564
  });
487
1565
  if (!res.ok) {
488
- throw new HttpError(`OSV ${res.status}: ${res.statusText}`, res.status);
1566
+ throw new HttpError(
1567
+ `OSV ${res.status}: ${res.statusText}`,
1568
+ res.status,
1569
+ retryAfterMs2(res.headers)
1570
+ );
489
1571
  }
490
1572
  return await res.json();
491
1573
  } finally {
@@ -496,11 +1578,15 @@ async function postJson(fetchImpl, url, body) {
496
1578
  async function getJson(fetchImpl, url) {
497
1579
  return withRetry(async () => {
498
1580
  const controller = new AbortController();
499
- const timer = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS);
1581
+ const timer = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS2);
500
1582
  try {
501
1583
  const res = await fetchImpl(url, { signal: controller.signal });
502
1584
  if (!res.ok) {
503
- throw new HttpError(`OSV ${res.status}: ${res.statusText}`, res.status);
1585
+ throw new HttpError(
1586
+ `OSV ${res.status}: ${res.statusText}`,
1587
+ res.status,
1588
+ retryAfterMs2(res.headers)
1589
+ );
504
1590
  }
505
1591
  return await res.json();
506
1592
  } finally {
@@ -509,11 +1595,13 @@ async function getJson(fetchImpl, url) {
509
1595
  });
510
1596
  }
511
1597
  var HttpError = class extends Error {
512
- constructor(message, status) {
1598
+ constructor(message, status, retryAfterMs3) {
513
1599
  super(message);
514
1600
  this.status = status;
1601
+ this.retryAfterMs = retryAfterMs3;
515
1602
  }
516
1603
  status;
1604
+ retryAfterMs;
517
1605
  };
518
1606
  async function withRetry(fn) {
519
1607
  let lastErr;
@@ -523,15 +1611,25 @@ async function withRetry(fn) {
523
1611
  } catch (err) {
524
1612
  lastErr = err;
525
1613
  if (!isRetryable(err) || attempt === MAX_RETRIES) break;
526
- await new Promise((r) => setTimeout(r, 250 * 2 ** attempt));
1614
+ const delay = err instanceof HttpError && err.retryAfterMs !== void 0 ? err.retryAfterMs : 250 * 2 ** attempt;
1615
+ await new Promise((r) => setTimeout(r, delay));
527
1616
  }
528
1617
  }
529
1618
  throw lastErr;
530
1619
  }
531
1620
  function isRetryable(err) {
532
- if (err instanceof HttpError) return err.status >= 500;
1621
+ if (err instanceof HttpError) return err.status === 429 || err.status >= 500;
533
1622
  return true;
534
1623
  }
1624
+ function retryAfterMs2(headers) {
1625
+ const value = headers.get("retry-after");
1626
+ if (!value) return void 0;
1627
+ const seconds = Number(value);
1628
+ if (Number.isFinite(seconds) && seconds >= 0) return seconds * 1e3;
1629
+ const date = Date.parse(value);
1630
+ if (Number.isNaN(date)) return void 0;
1631
+ return Math.max(0, date - Date.now());
1632
+ }
535
1633
  function truncate(s, max) {
536
1634
  if (s.length <= max) return s;
537
1635
  return `${s.slice(0, max - 1)}\u2026`;
@@ -547,80 +1645,125 @@ var SEVERITY_RANK = {
547
1645
  };
548
1646
 
549
1647
  // src/scanner.ts
1648
+ var DEFAULT_ALLOWED_REGISTRIES = [
1649
+ "https://registry.npmjs.org",
1650
+ "https://registry.yarnpkg.com"
1651
+ ];
550
1652
  async function scanProject(options = {}) {
551
- const cwd = resolve4(options.cwd ?? process.cwd());
552
- const lockfilePath = options.lockfile ? resolve4(cwd, options.lockfile) : detectLockfile(cwd);
553
- if (!lockfilePath) {
1653
+ const cwd = resolve7(options.cwd ?? process.cwd());
1654
+ const loadedConfig = loadConfig(cwd, options.config);
1655
+ const policy = resolvePolicy(options.policy, loadedConfig.config.policy);
1656
+ const lockfilePaths = options.lockfile ? normalizePaths(cwd, options.lockfile) : detectLockfiles(cwd);
1657
+ const sbomPaths = normalizePaths(cwd, options.sbom);
1658
+ const envEnabled = options.env ?? loadedConfig.config.env ?? policy?.env ?? false;
1659
+ if (lockfilePaths.length === 0 && sbomPaths.length === 0 && !envEnabled) {
554
1660
  throw new ScanInputError(
555
- `No supported lockfile found in ${cwd}. Pass --lockfile or run in a directory with package-lock.json, pnpm-lock.yaml, or yarn.lock.`
1661
+ `No supported lockfile or SBOM found in ${cwd}. Pass --lockfile/--sbom or run in a directory with package-lock.json, pnpm-lock.yaml, or yarn.lock.`
556
1662
  );
557
1663
  }
558
1664
  return scanLockfile({
559
- lockfilePath,
560
- includeDev: options.includeDev,
1665
+ lockfilePath: lockfilePaths,
1666
+ sbom: sbomPaths,
1667
+ cwd,
1668
+ config: options.config,
1669
+ policy: options.policy,
1670
+ baseline: options.baseline,
1671
+ writeBaseline: options.writeBaseline,
1672
+ risk: options.risk ?? loadedConfig.config.risk ?? policy?.risk,
1673
+ env: envEnabled,
1674
+ allowedRegistries: options.allowedRegistries ?? loadedConfig.config.allowedRegistries,
1675
+ includeDev: options.includeDev ?? policy?.includeDev,
561
1676
  prodOnly: options.prodOnly,
562
- fetchImpl: options.fetchImpl
1677
+ fetchImpl: options.fetchImpl,
1678
+ now: options.now
563
1679
  });
564
1680
  }
565
1681
  async function scanLockfile(options) {
566
- const { lockfilePath } = options;
567
- if (!existsSync2(lockfilePath)) {
568
- throw new ScanInputError(`Lockfile does not exist: ${lockfilePath}`);
569
- }
570
- const stat = statSync(lockfilePath);
571
- if (!stat.isFile()) {
572
- throw new ScanInputError(`Lockfile path is not a file: ${lockfilePath}`);
573
- }
574
- const allInstances = parseLockfile(lockfilePath);
575
- const instances = filterInstances(allInstances, options);
1682
+ const initialCwd = resolve7(options.cwd ?? process.cwd());
1683
+ const lockfilePaths = normalizePaths(initialCwd, options.lockfilePath);
1684
+ const sbomPaths = normalizePaths(initialCwd, options.sbom);
1685
+ const cwd = options.cwd ? initialCwd : deriveCwdFromInputs(lockfilePaths, sbomPaths, initialCwd);
1686
+ const now = options.now ?? /* @__PURE__ */ new Date();
1687
+ const loadedConfig = loadConfig(cwd, options.config);
1688
+ const policy = resolvePolicy(options.policy, loadedConfig.config.policy);
1689
+ const envEnabled = options.env ?? loadedConfig.config.env ?? policy?.env ?? false;
1690
+ for (const path of [...lockfilePaths, ...sbomPaths]) validateFile(path);
1691
+ const allInstances = [
1692
+ ...lockfilePaths.flatMap((path) => parseLockfile(path)),
1693
+ ...sbomPaths.flatMap((path) => parseSbom(path))
1694
+ ];
1695
+ const instances = filterInstances(allInstances, {
1696
+ ...options,
1697
+ includeDev: options.includeDev ?? policy?.includeDev
1698
+ });
576
1699
  const errors = [];
577
- let findings = [];
1700
+ const warnings = [];
1701
+ const envResult = envEnabled ? scanEnvFiles(cwd) : { findings: [], warnings: [], filesScanned: 0 };
1702
+ warnings.push(...envResult.warnings);
1703
+ let findings = [...envResult.findings];
578
1704
  try {
579
- findings = await queryOsv(instances, { fetchImpl: options.fetchImpl });
1705
+ findings.push(...await queryOsv(instances, { fetchImpl: options.fetchImpl }));
580
1706
  } catch (err) {
581
1707
  errors.push({
582
1708
  message: "Failed to query OSV advisory database",
583
1709
  cause: err.message
584
1710
  });
585
1711
  }
1712
+ const riskEnabled = options.risk ?? loadedConfig.config.risk ?? policy?.risk ?? true;
1713
+ const risk = await collectRiskSignals(instances, {
1714
+ enabled: riskEnabled,
1715
+ allowedRegistries: options.allowedRegistries ?? loadedConfig.config.allowedRegistries ?? DEFAULT_ALLOWED_REGISTRIES,
1716
+ fetchImpl: options.fetchImpl,
1717
+ now
1718
+ });
1719
+ findings.push(...risk.findings);
1720
+ warnings.push(...risk.warnings);
1721
+ const ignoreResult = applyIgnores(
1722
+ findings,
1723
+ loadedConfig.config.ignore,
1724
+ now
1725
+ );
1726
+ warnings.push(...ignoreResult.warnings);
1727
+ findings = ignoreResult.active;
586
1728
  findings.sort(compareFindings);
1729
+ ignoreResult.ignored.sort(compareFindings);
1730
+ const appliedBaseline = applyBaseline(findings, cwd, options.baseline);
1731
+ let baseline = appliedBaseline?.result;
1732
+ if (appliedBaseline) {
1733
+ findings = appliedBaseline.findings;
1734
+ }
1735
+ if (options.writeBaseline) {
1736
+ baseline = writeBaseline(findings, cwd, options.writeBaseline, baseline);
1737
+ }
587
1738
  return {
588
- scannedAt: (/* @__PURE__ */ new Date()).toISOString(),
1739
+ scannedAt: now.toISOString(),
589
1740
  packagesScanned: instances.length,
590
1741
  findings,
1742
+ ignoredFindings: ignoreResult.ignored,
591
1743
  summary: summarize(findings),
592
- errors
1744
+ errors,
1745
+ warnings,
1746
+ baseline
593
1747
  };
594
1748
  }
1749
+ function deriveCwdFromInputs(lockfilePaths, sbomPaths, fallback) {
1750
+ const firstInput = lockfilePaths[0] ?? sbomPaths[0];
1751
+ return firstInput ? dirname3(firstInput) : fallback;
1752
+ }
595
1753
  var ScanInputError = class extends Error {
596
1754
  constructor(message) {
597
1755
  super(message);
598
1756
  this.name = "ScanInputError";
599
1757
  }
600
1758
  };
601
- var LOCKFILE_CANDIDATES = [
602
- "pnpm-lock.yaml",
603
- "yarn.lock",
604
- "package-lock.json",
605
- "npm-shrinkwrap.json"
606
- ];
607
- function detectLockfile(cwd) {
608
- for (const file of LOCKFILE_CANDIDATES) {
609
- const candidate = join2(cwd, file);
610
- if (existsSync2(candidate)) return candidate;
611
- }
612
- return void 0;
613
- }
614
- function parseLockfile(lockfilePath) {
615
- const name = basename(lockfilePath);
616
- if (name === "pnpm-lock.yaml") return parsePnpmLock(lockfilePath);
617
- if (name === "yarn.lock") return parseYarnLock(lockfilePath);
618
- if (name === "package-lock.json" || name === "npm-shrinkwrap.json") {
619
- return parseNpmPackageLock(lockfilePath);
620
- }
621
- throw new ScanInputError(
622
- `Unsupported lockfile name: ${name}. Supported: package-lock.json, npm-shrinkwrap.json, pnpm-lock.yaml, yarn.lock.`
623
- );
1759
+ function detectLockfiles(cwd) {
1760
+ const candidates = [
1761
+ "package-lock.json",
1762
+ "npm-shrinkwrap.json",
1763
+ "pnpm-lock.yaml",
1764
+ "yarn.lock"
1765
+ ].map((file) => join4(cwd, file));
1766
+ return candidates.filter((candidate) => existsSync4(candidate));
624
1767
  }
625
1768
  function filterInstances(instances, options) {
626
1769
  const includeDev = options.prodOnly ? false : options.includeDev !== false;
@@ -630,6 +1773,7 @@ function filterInstances(instances, options) {
630
1773
  function compareFindings(a, b) {
631
1774
  const sev = SEVERITY_RANK[b.severity] - SEVERITY_RANK[a.severity];
632
1775
  if (sev !== 0) return sev;
1776
+ if (a.source !== b.source) return a.source.localeCompare(b.source);
633
1777
  if (a.packageName !== b.packageName) {
634
1778
  return a.packageName.localeCompare(b.packageName);
635
1779
  }
@@ -652,20 +1796,603 @@ function summarize(findings) {
652
1796
  function meetsThreshold(findings, threshold) {
653
1797
  if (threshold === "none") return false;
654
1798
  const min = SEVERITY_RANK[threshold];
655
- return findings.some((f) => SEVERITY_RANK[f.severity] >= min);
1799
+ return findings.some(
1800
+ (f) => f.baseline !== "existing" && SEVERITY_RANK[f.severity] >= min
1801
+ );
1802
+ }
1803
+ function normalizePaths(cwd, value) {
1804
+ if (!value) return [];
1805
+ const values = Array.isArray(value) ? value : [value];
1806
+ return [...new Set(values.map((path) => resolve7(cwd, path)))];
1807
+ }
1808
+ function validateFile(path) {
1809
+ if (!existsSync4(path)) {
1810
+ throw new ScanInputError(`Input file does not exist: ${path}`);
1811
+ }
1812
+ const stat = statSync2(path);
1813
+ if (!stat.isFile()) {
1814
+ throw new ScanInputError(`Input path is not a file: ${path}`);
1815
+ }
1816
+ }
1817
+
1818
+ // src/env-scan.ts
1819
+ import { spawn } from "child_process";
1820
+ import { existsSync as existsSync5, readdirSync as readdirSync2, readFileSync as readFileSync9 } from "fs";
1821
+ import { join as join5, relative as relative2, resolve as resolve8, sep } from "path";
1822
+ var DEFAULT_SKIP_DIRS = /* @__PURE__ */ new Set([
1823
+ "node_modules",
1824
+ ".git",
1825
+ "dist",
1826
+ "build",
1827
+ ".next",
1828
+ ".nuxt",
1829
+ ".svelte-kit",
1830
+ ".turbo",
1831
+ ".cache",
1832
+ "coverage",
1833
+ "out",
1834
+ ".vercel",
1835
+ ".output"
1836
+ ]);
1837
+ var EXAMPLE_NAME = /^\.env(\.[^.]+)*\.(example|sample|template|dist)$/i;
1838
+ var EXAMPLE_SUFFIX_NAME = /(\.|^)(example|sample|template)$/i;
1839
+ async function scanEnv(options = {}) {
1840
+ const cwd = resolve8(options.cwd ?? process.cwd());
1841
+ const skipDirs = options.skipDirs ? new Set(options.skipDirs) : DEFAULT_SKIP_DIRS;
1842
+ const maxDepth = options.maxDepth ?? 6;
1843
+ const errors = [];
1844
+ const envFiles = discoverEnvFiles(cwd, skipDirs, maxDepth);
1845
+ const inGit = isGitRepo(cwd);
1846
+ const gitignorePath = join5(cwd, ".gitignore");
1847
+ const hasGitignore = existsSync5(gitignorePath);
1848
+ const exampleFiles = envFiles.filter(isExampleFile);
1849
+ const realEnvFiles = envFiles.filter((f) => !isExampleFile(f));
1850
+ const [trackedSet, ignoredMap, publishCheck, exampleSecrets] = await Promise.all([
1851
+ inGit ? gitTracked(cwd, envFiles) : Promise.resolve(/* @__PURE__ */ new Set()),
1852
+ inGit ? gitCheckIgnore(cwd, envFiles) : Promise.resolve(fallbackIgnoreMap(cwd, envFiles)),
1853
+ checkPublishExposure(cwd, realEnvFiles),
1854
+ scanExampleFilesForSecrets(cwd, exampleFiles)
1855
+ ]).catch((err) => {
1856
+ errors.push({
1857
+ message: "env scan: parallel checks failed",
1858
+ cause: err.message
1859
+ });
1860
+ return [
1861
+ /* @__PURE__ */ new Set(),
1862
+ /* @__PURE__ */ new Map(),
1863
+ [],
1864
+ []
1865
+ ];
1866
+ });
1867
+ const issues = [];
1868
+ if (envFiles.length > 0 && !hasGitignore) {
1869
+ issues.push({
1870
+ kind: "no-gitignore",
1871
+ severity: "moderate",
1872
+ file: ".gitignore",
1873
+ message: "Project has env files but no .gitignore.",
1874
+ detail: "Add a .gitignore that includes .env and .env.* before committing."
1875
+ });
1876
+ }
1877
+ for (const file of realEnvFiles) {
1878
+ if (trackedSet.has(file)) {
1879
+ issues.push({
1880
+ kind: "tracked-by-git",
1881
+ severity: "critical",
1882
+ file,
1883
+ message: `${file} is tracked by git.`,
1884
+ detail: "This file is committed to the repo. Run `git rm --cached` and rotate any secrets that were exposed."
1885
+ });
1886
+ continue;
1887
+ }
1888
+ const ignored = ignoredMap.get(file);
1889
+ if (hasGitignore && ignored === false) {
1890
+ issues.push({
1891
+ kind: "not-gitignored",
1892
+ severity: "high",
1893
+ file,
1894
+ message: `${file} exists but is not covered by .gitignore.`,
1895
+ detail: "Add a matching pattern (e.g. `.env*`) to .gitignore."
1896
+ });
1897
+ }
1898
+ }
1899
+ for (const f of publishCheck) {
1900
+ issues.push({
1901
+ kind: "would-be-published",
1902
+ severity: "critical",
1903
+ file: f.file,
1904
+ message: `${f.file} would be included in the published npm tarball.`,
1905
+ detail: f.reason
1906
+ });
1907
+ }
1908
+ for (const f of exampleSecrets) {
1909
+ issues.push({
1910
+ kind: "secret-in-example",
1911
+ severity: "high",
1912
+ file: f.file,
1913
+ message: `${f.file} appears to contain a real secret.`,
1914
+ detail: `Matched pattern: ${f.pattern} on key \`${f.key}\`. Example files should hold placeholder values only.`
1915
+ });
1916
+ }
1917
+ issues.sort(compareIssues);
1918
+ return {
1919
+ scannedAt: (/* @__PURE__ */ new Date()).toISOString(),
1920
+ cwd,
1921
+ envFiles,
1922
+ issues,
1923
+ summary: summarizeIssues(issues),
1924
+ errors
1925
+ };
1926
+ }
1927
+ function compareIssues(a, b) {
1928
+ const rank = {
1929
+ critical: 4,
1930
+ high: 3,
1931
+ moderate: 2,
1932
+ low: 1,
1933
+ unknown: 0
1934
+ };
1935
+ const sev = rank[b.severity] - rank[a.severity];
1936
+ if (sev !== 0) return sev;
1937
+ if (a.file !== b.file) return a.file.localeCompare(b.file);
1938
+ return a.kind.localeCompare(b.kind);
1939
+ }
1940
+ function summarizeIssues(issues) {
1941
+ const out = {
1942
+ critical: 0,
1943
+ high: 0,
1944
+ moderate: 0,
1945
+ low: 0,
1946
+ unknown: 0
1947
+ };
1948
+ for (const i of issues) out[i.severity]++;
1949
+ return out;
1950
+ }
1951
+ function discoverEnvFiles(cwd, skipDirs, maxDepth) {
1952
+ const found = [];
1953
+ walk(cwd, cwd, 0);
1954
+ found.sort();
1955
+ return found;
1956
+ function walk(dir, root, depth) {
1957
+ if (depth > maxDepth) return;
1958
+ let entries;
1959
+ try {
1960
+ entries = readdirSync2(dir, { withFileTypes: true });
1961
+ } catch {
1962
+ return;
1963
+ }
1964
+ for (const entry of entries) {
1965
+ if (entry.isDirectory()) {
1966
+ if (skipDirs.has(entry.name) || entry.name.startsWith(".git")) continue;
1967
+ walk(join5(dir, entry.name), root, depth + 1);
1968
+ continue;
1969
+ }
1970
+ if (!entry.isFile() && !entry.isSymbolicLink()) continue;
1971
+ if (isEnvFilename(entry.name)) {
1972
+ found.push(toPosix(relative2(root, join5(dir, entry.name))));
1973
+ }
1974
+ }
1975
+ }
1976
+ }
1977
+ function isEnvFilename(name) {
1978
+ if (name === ".env") return true;
1979
+ if (name.startsWith(".env.")) return true;
1980
+ return false;
1981
+ }
1982
+ function isExampleFile(file) {
1983
+ const base = file.split("/").pop() ?? file;
1984
+ if (EXAMPLE_NAME.test(base)) return true;
1985
+ const segments = base.split(".");
1986
+ const last = segments[segments.length - 1] ?? "";
1987
+ return EXAMPLE_SUFFIX_NAME.test(last);
1988
+ }
1989
+ function toPosix(p) {
1990
+ return sep === "/" ? p : p.split(sep).join("/");
1991
+ }
1992
+ function isGitRepo(cwd) {
1993
+ let dir = cwd;
1994
+ for (let i = 0; i < 32; i++) {
1995
+ if (existsSync5(join5(dir, ".git"))) return true;
1996
+ const parent = resolve8(dir, "..");
1997
+ if (parent === dir) return false;
1998
+ dir = parent;
1999
+ }
2000
+ return false;
2001
+ }
2002
+ async function gitTracked(cwd, files) {
2003
+ if (files.length === 0) return /* @__PURE__ */ new Set();
2004
+ const { stdout, code } = await runGit(cwd, ["ls-files", "-z", "--", ...files]);
2005
+ if (code !== 0) return /* @__PURE__ */ new Set();
2006
+ const tracked = stdout.split("\0").filter(Boolean).map(toPosix);
2007
+ return new Set(tracked);
2008
+ }
2009
+ async function gitCheckIgnore(cwd, files) {
2010
+ const result = /* @__PURE__ */ new Map();
2011
+ if (files.length === 0) return result;
2012
+ const stdin = `${files.join("\0")}\0`;
2013
+ const { stdout, code } = await runGit(
2014
+ cwd,
2015
+ ["check-ignore", "--no-index", "-v", "-n", "-z", "--stdin"],
2016
+ stdin
2017
+ );
2018
+ if (code !== 0 && code !== 1) {
2019
+ for (const f of files) result.set(f, false);
2020
+ return result;
2021
+ }
2022
+ const parts = stdout.split("\0");
2023
+ for (let i = 0; i + 3 < parts.length; i += 4) {
2024
+ const source = parts[i];
2025
+ const pathname = toPosix(parts[i + 3] ?? "");
2026
+ if (!pathname) continue;
2027
+ result.set(pathname, source !== "");
2028
+ }
2029
+ for (const f of files) {
2030
+ if (!result.has(f)) result.set(f, false);
2031
+ }
2032
+ return result;
2033
+ }
2034
+ function fallbackIgnoreMap(cwd, files) {
2035
+ const result = /* @__PURE__ */ new Map();
2036
+ const patterns = readIgnoreFile(join5(cwd, ".gitignore"));
2037
+ for (const f of files) result.set(f, matchesAny(f, patterns));
2038
+ return result;
2039
+ }
2040
+ function runGit(cwd, args, stdin) {
2041
+ return new Promise((resolveP) => {
2042
+ const child = spawn("git", args, {
2043
+ cwd,
2044
+ stdio: [stdin === void 0 ? "ignore" : "pipe", "pipe", "pipe"]
2045
+ });
2046
+ let stdout = "";
2047
+ let stderr = "";
2048
+ child.stdout?.on("data", (d) => {
2049
+ stdout += d.toString("utf8");
2050
+ });
2051
+ child.stderr?.on("data", (d) => {
2052
+ stderr += d.toString("utf8");
2053
+ });
2054
+ child.on("error", () => resolveP({ stdout, stderr, code: -1 }));
2055
+ child.on(
2056
+ "close",
2057
+ (code) => resolveP({ stdout, stderr, code: code ?? -1 })
2058
+ );
2059
+ if (stdin !== void 0 && child.stdin) {
2060
+ child.stdin.end(stdin);
2061
+ }
2062
+ });
2063
+ }
2064
+ async function checkPublishExposure(cwd, envFiles) {
2065
+ if (envFiles.length === 0) return [];
2066
+ const pkgPath = join5(cwd, "package.json");
2067
+ if (!existsSync5(pkgPath)) return [];
2068
+ let pkg;
2069
+ try {
2070
+ pkg = JSON.parse(readFileSync9(pkgPath, "utf8"));
2071
+ } catch {
2072
+ return [];
2073
+ }
2074
+ if (pkg.private === true) return [];
2075
+ const findings = [];
2076
+ if (Array.isArray(pkg.files)) {
2077
+ const allowList = pkg.files.filter((x) => typeof x === "string");
2078
+ for (const file of envFiles) {
2079
+ if (matchesAny(file, allowList)) {
2080
+ findings.push({
2081
+ file,
2082
+ reason: `package.json "files" allowlist matches this path. Remove the entry or move the file.`
2083
+ });
2084
+ }
2085
+ }
2086
+ return findings;
2087
+ }
2088
+ const npmignorePath = join5(cwd, ".npmignore");
2089
+ const ignorePath = existsSync5(npmignorePath) ? npmignorePath : join5(cwd, ".gitignore");
2090
+ const ignoreSource = existsSync5(ignorePath) ? ignorePath === npmignorePath ? ".npmignore" : ".gitignore" : null;
2091
+ const patterns = ignoreSource ? readIgnoreFile(ignorePath) : [];
2092
+ for (const file of envFiles) {
2093
+ if (!matchesAny(file, patterns)) {
2094
+ findings.push({
2095
+ file,
2096
+ reason: ignoreSource ? `Not matched by any pattern in ${ignoreSource}. Add an entry like \`.env*\`.` : `No .npmignore or .gitignore present, so npm will include this file in the published tarball. Add an .npmignore.`
2097
+ });
2098
+ }
2099
+ }
2100
+ return findings;
2101
+ }
2102
+ function readIgnoreFile(path) {
2103
+ if (!existsSync5(path)) return [];
2104
+ try {
2105
+ return readFileSync9(path, "utf8").split(/\r?\n/).map((l) => l.trim()).filter((l) => l.length > 0 && !l.startsWith("#"));
2106
+ } catch {
2107
+ return [];
2108
+ }
2109
+ }
2110
+ function matchesAny(file, patterns) {
2111
+ let matched = false;
2112
+ for (const raw of patterns) {
2113
+ let pattern = raw;
2114
+ let negate = false;
2115
+ if (pattern.startsWith("!")) {
2116
+ negate = true;
2117
+ pattern = pattern.slice(1);
2118
+ }
2119
+ if (pattern.endsWith("/")) pattern = pattern.slice(0, -1);
2120
+ if (matchesPattern(file, pattern)) matched = !negate;
2121
+ }
2122
+ return matched;
2123
+ }
2124
+ function matchesPattern(file, pattern) {
2125
+ if (!pattern) return false;
2126
+ const anchored = pattern.startsWith("/");
2127
+ const pat = anchored ? pattern.slice(1) : pattern;
2128
+ const hasSlash = pat.includes("/");
2129
+ const candidates = anchored ? [file] : hasSlash ? [file] : [file, file.split("/").pop() ?? file];
2130
+ const re = globToRegex(pat);
2131
+ return candidates.some((c) => re.test(c));
2132
+ }
2133
+ function globToRegex(pattern) {
2134
+ let re = "^";
2135
+ for (let i = 0; i < pattern.length; i++) {
2136
+ const ch = pattern[i];
2137
+ if (ch === "*") {
2138
+ if (pattern[i + 1] === "*") {
2139
+ re += ".*";
2140
+ i++;
2141
+ if (pattern[i + 1] === "/") i++;
2142
+ } else {
2143
+ re += "[^/]*";
2144
+ }
2145
+ } else if (ch === "?") {
2146
+ re += "[^/]";
2147
+ } else if (ch === ".") {
2148
+ re += "\\.";
2149
+ } else if (/[\\^$+()=!|{}[\]]/.test(ch ?? "")) {
2150
+ re += `\\${ch}`;
2151
+ } else {
2152
+ re += ch;
2153
+ }
2154
+ }
2155
+ re += "$";
2156
+ return new RegExp(re);
2157
+ }
2158
+ var SECRET_PATTERNS = [
2159
+ { name: "AWS access key id", re: /\bAKIA[0-9A-Z]{16}\b/ },
2160
+ { name: "GitHub token", re: /\bgh[pousr]_[A-Za-z0-9]{36,}\b/ },
2161
+ { name: "Slack token", re: /\bxox[abprs]-[A-Za-z0-9-]{10,}\b/ },
2162
+ { name: "Stripe live key", re: /\bsk_live_[A-Za-z0-9]{16,}\b/ },
2163
+ { name: "Google API key", re: /\bAIza[0-9A-Za-z_-]{35}\b/ },
2164
+ { name: "Generic JWT", re: /\beyJ[A-Za-z0-9_-]{10,}\.[A-Za-z0-9_-]{10,}\.[A-Za-z0-9_-]{10,}\b/ },
2165
+ { name: "Private key block", re: /-----BEGIN (RSA |EC |OPENSSH |DSA |PGP )?PRIVATE KEY-----/ }
2166
+ ];
2167
+ var PLACEHOLDER_RE2 = /^(?:|x+|y+|<.*>|\{.*\}|change[-_ ]?me|todo|placeholder|your[-_ ].+|example|dummy|fake|test)$/i;
2168
+ async function scanExampleFilesForSecrets(cwd, files) {
2169
+ const findings = [];
2170
+ await Promise.all(
2171
+ files.map(async (file) => {
2172
+ let content;
2173
+ try {
2174
+ content = readFileSync9(join5(cwd, file), "utf8");
2175
+ } catch {
2176
+ return;
2177
+ }
2178
+ const lines = content.split(/\r?\n/);
2179
+ for (const line of lines) {
2180
+ const trimmed = line.trim();
2181
+ if (!trimmed || trimmed.startsWith("#")) continue;
2182
+ const eq = trimmed.indexOf("=");
2183
+ if (eq <= 0) continue;
2184
+ const key = trimmed.slice(0, eq).trim();
2185
+ let value = trimmed.slice(eq + 1).trim();
2186
+ if (value.startsWith('"') && value.endsWith('"') || value.startsWith("'") && value.endsWith("'")) {
2187
+ value = value.slice(1, -1);
2188
+ } else {
2189
+ const hashIdx = value.indexOf(" #");
2190
+ if (hashIdx >= 0) value = value.slice(0, hashIdx).trim();
2191
+ }
2192
+ if (!value || PLACEHOLDER_RE2.test(value)) continue;
2193
+ for (const pat of SECRET_PATTERNS) {
2194
+ if (pat.re.test(value)) {
2195
+ findings.push({ file, key, pattern: pat.name });
2196
+ break;
2197
+ }
2198
+ }
2199
+ }
2200
+ })
2201
+ );
2202
+ return findings;
2203
+ }
2204
+ function envIssuesMeetThreshold(issues, threshold) {
2205
+ if (threshold === "none") return false;
2206
+ const rank = {
2207
+ critical: 4,
2208
+ high: 3,
2209
+ moderate: 2,
2210
+ low: 1,
2211
+ unknown: 0
2212
+ };
2213
+ const min = rank[threshold];
2214
+ return issues.some((i) => rank[i.severity] >= min);
2215
+ }
2216
+
2217
+ // src/init.ts
2218
+ import { existsSync as existsSync6, writeFileSync as writeFileSync2 } from "fs";
2219
+ import { resolve as resolve9 } from "path";
2220
+ async function initProject(options = {}) {
2221
+ const cwd = resolve9(options.cwd ?? process.cwd());
2222
+ const policy = options.policy ?? "ci";
2223
+ const configPath = resolve9(cwd, options.config ?? "trawly.toml");
2224
+ const baselinePath = options.baseline ?? "trawly-baseline.json";
2225
+ const warnings = [];
2226
+ let configWritten = false;
2227
+ if (options.overwrite || !existsSync6(configPath)) {
2228
+ writeFileSync2(configPath, renderConfig(policy, baselinePath));
2229
+ configWritten = true;
2230
+ } else {
2231
+ warnings.push(`${configPath} already exists; leaving it unchanged.`);
2232
+ }
2233
+ let scan;
2234
+ let baselineWritten = false;
2235
+ if (options.writeBaseline !== false) {
2236
+ try {
2237
+ scan = await scanProject({
2238
+ cwd,
2239
+ config: configPath,
2240
+ policy,
2241
+ risk: options.risk,
2242
+ env: options.env,
2243
+ writeBaseline: baselinePath,
2244
+ fetchImpl: options.fetchImpl
2245
+ });
2246
+ baselineWritten = scan.baseline?.written !== void 0;
2247
+ } catch (err) {
2248
+ if (err instanceof ScanInputError) {
2249
+ warnings.push(
2250
+ "No supported lockfile or SBOM was found, so no baseline was written."
2251
+ );
2252
+ } else {
2253
+ throw err;
2254
+ }
2255
+ }
2256
+ }
2257
+ return {
2258
+ configPath,
2259
+ configWritten,
2260
+ baselinePath: resolve9(cwd, baselinePath),
2261
+ baselineWritten,
2262
+ scan,
2263
+ warnings
2264
+ };
2265
+ }
2266
+ function renderConfig(policy, baselinePath) {
2267
+ const preset = POLICY_PRESETS[policy];
2268
+ return [
2269
+ `policy = "${policy}"`,
2270
+ `failOn = "${preset.failOn}"`,
2271
+ `risk = ${String(preset.risk)}`,
2272
+ `env = ${String(preset.env)}`,
2273
+ 'allowedRegistries = ["https://registry.npmjs.org", "https://registry.yarnpkg.com"]',
2274
+ "",
2275
+ `# Existing findings are tracked in ${baselinePath}.`,
2276
+ "# Ignore entries must expire.",
2277
+ "# [[ignore]]",
2278
+ '# id = "GHSA-example"',
2279
+ '# package = "example-package"',
2280
+ '# expires = "2026-06-30"',
2281
+ '# reason = "Not reachable in this application"',
2282
+ ""
2283
+ ].join("\n");
2284
+ }
2285
+
2286
+ // src/why.ts
2287
+ import { existsSync as existsSync7 } from "fs";
2288
+ import { join as join6, resolve as resolve10 } from "path";
2289
+ function explainWhy(packageName, options = {}) {
2290
+ const cwd = resolve10(options.cwd ?? process.cwd());
2291
+ const lockfiles = options.lockfile ? normalizePaths2(cwd, options.lockfile) : detectLockfiles2(cwd);
2292
+ const packages = lockfiles.flatMap((path) => parseLockfile(path));
2293
+ const matches = packages.filter((pkg) => pkg.name === packageName).map((pkg) => ({
2294
+ package: pkg,
2295
+ chain: inferChain(pkg),
2296
+ note: graphNote(pkg)
2297
+ })).sort((a, b) => {
2298
+ const source = (a.package.sourceFile ?? "").localeCompare(
2299
+ b.package.sourceFile ?? ""
2300
+ );
2301
+ if (source !== 0) return source;
2302
+ return a.package.path.localeCompare(b.package.path);
2303
+ });
2304
+ return { packageName, lockfiles, matches };
2305
+ }
2306
+ function inferChain(pkg) {
2307
+ if (pkg.manager === "npm") {
2308
+ const chain = packageNamesFromNodeModulesPath(pkg.path);
2309
+ if (chain.length > 0) return chain;
2310
+ }
2311
+ return [pkg.name];
2312
+ }
2313
+ function graphNote(pkg) {
2314
+ if (pkg.manager === "npm") return void 0;
2315
+ if (pkg.direct) return "direct dependency";
2316
+ return `${pkg.manager ?? "lockfile"} lock entry; full parent chain is not available yet`;
2317
+ }
2318
+ function packageNamesFromNodeModulesPath(path) {
2319
+ const parts = path.split("/");
2320
+ const names = [];
2321
+ for (let i = 0; i < parts.length; i++) {
2322
+ if (parts[i] !== "node_modules") continue;
2323
+ const first = parts[i + 1];
2324
+ if (!first) continue;
2325
+ if (first.startsWith("@")) {
2326
+ const second = parts[i + 2];
2327
+ if (!second) continue;
2328
+ names.push(`${first}/${second}`);
2329
+ i += 2;
2330
+ } else {
2331
+ names.push(first);
2332
+ i += 1;
2333
+ }
2334
+ }
2335
+ return names;
2336
+ }
2337
+ function detectLockfiles2(cwd) {
2338
+ const candidates = [
2339
+ "package-lock.json",
2340
+ "npm-shrinkwrap.json",
2341
+ "pnpm-lock.yaml",
2342
+ "yarn.lock"
2343
+ ].map((file) => join6(cwd, file));
2344
+ return candidates.filter((candidate) => existsSync7(candidate));
2345
+ }
2346
+ function normalizePaths2(cwd, value) {
2347
+ if (!value) return [];
2348
+ const values = Array.isArray(value) ? value : [value];
2349
+ return [...new Set(values.map((path) => resolve10(cwd, path)))];
2350
+ }
2351
+
2352
+ // src/version.ts
2353
+ import { readFileSync as readFileSync10 } from "fs";
2354
+ var FALLBACK_VERSION = "0.1.0";
2355
+ var TRAWLY_VERSION = readPackageVersion();
2356
+ function readPackageVersion() {
2357
+ try {
2358
+ const packageJson = JSON.parse(
2359
+ readFileSync10(new URL("../package.json", import.meta.url), "utf8")
2360
+ );
2361
+ return typeof packageJson.version === "string" ? packageJson.version : FALLBACK_VERSION;
2362
+ } catch {
2363
+ return FALLBACK_VERSION;
2364
+ }
656
2365
  }
657
2366
  export {
2367
+ BaselineError,
2368
+ ConfigError,
2369
+ POLICY_PRESETS,
658
2370
  SEVERITY_RANK,
659
2371
  ScanInputError,
2372
+ TRAWLY_VERSION,
2373
+ applyBaseline,
660
2374
  compareFindings,
661
2375
  dedupeForQuery,
2376
+ envIssuesMeetThreshold,
2377
+ explainWhy,
2378
+ initProject,
2379
+ loadConfig,
662
2380
  meetsThreshold,
2381
+ parseLockfile,
663
2382
  parseNpmPackageLock,
664
2383
  parsePnpmLock,
2384
+ parsePnpmPackageKey,
2385
+ parsePurlPackage,
2386
+ parseSbom,
2387
+ parseYarnDescriptorName,
665
2388
  parseYarnLock,
666
2389
  queryOsv,
2390
+ resolvePolicy,
2391
+ scanEnv,
2392
+ scanEnvFiles,
667
2393
  scanLockfile,
668
2394
  scanProject,
669
- summarize
2395
+ summarize,
2396
+ writeBaseline
670
2397
  };
671
2398
  //# sourceMappingURL=index.js.map