aura-security 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +446 -0
- package/deploy/AWS-DEPLOYMENT.md +358 -0
- package/deploy/terraform/main.tf +362 -0
- package/deploy/terraform/terraform.tfvars.example +6 -0
- package/dist/agents/base.d.ts +44 -0
- package/dist/agents/base.js +96 -0
- package/dist/agents/index.d.ts +14 -0
- package/dist/agents/index.js +17 -0
- package/dist/agents/policy/evaluator.d.ts +15 -0
- package/dist/agents/policy/evaluator.js +183 -0
- package/dist/agents/policy/index.d.ts +12 -0
- package/dist/agents/policy/index.js +15 -0
- package/dist/agents/policy/validator.d.ts +15 -0
- package/dist/agents/policy/validator.js +182 -0
- package/dist/agents/scanners/gitleaks.d.ts +14 -0
- package/dist/agents/scanners/gitleaks.js +155 -0
- package/dist/agents/scanners/grype.d.ts +14 -0
- package/dist/agents/scanners/grype.js +109 -0
- package/dist/agents/scanners/index.d.ts +15 -0
- package/dist/agents/scanners/index.js +27 -0
- package/dist/agents/scanners/npm-audit.d.ts +13 -0
- package/dist/agents/scanners/npm-audit.js +129 -0
- package/dist/agents/scanners/semgrep.d.ts +14 -0
- package/dist/agents/scanners/semgrep.js +131 -0
- package/dist/agents/scanners/trivy.d.ts +14 -0
- package/dist/agents/scanners/trivy.js +122 -0
- package/dist/agents/types.d.ts +137 -0
- package/dist/agents/types.js +91 -0
- package/dist/auditor/index.d.ts +3 -0
- package/dist/auditor/index.js +2 -0
- package/dist/auditor/pipeline.d.ts +19 -0
- package/dist/auditor/pipeline.js +240 -0
- package/dist/auditor/validator.d.ts +17 -0
- package/dist/auditor/validator.js +58 -0
- package/dist/aura/client.d.ts +29 -0
- package/dist/aura/client.js +125 -0
- package/dist/aura/index.d.ts +4 -0
- package/dist/aura/index.js +2 -0
- package/dist/aura/server.d.ts +45 -0
- package/dist/aura/server.js +343 -0
- package/dist/cli.d.ts +17 -0
- package/dist/cli.js +1433 -0
- package/dist/client/index.d.ts +41 -0
- package/dist/client/index.js +170 -0
- package/dist/compliance/index.d.ts +40 -0
- package/dist/compliance/index.js +292 -0
- package/dist/database/index.d.ts +77 -0
- package/dist/database/index.js +395 -0
- package/dist/index.d.ts +25 -0
- package/dist/index.js +762 -0
- package/dist/integrations/aura-scanner.d.ts +69 -0
- package/dist/integrations/aura-scanner.js +155 -0
- package/dist/integrations/aws-scanner.d.ts +63 -0
- package/dist/integrations/aws-scanner.js +624 -0
- package/dist/integrations/config.d.ts +69 -0
- package/dist/integrations/config.js +212 -0
- package/dist/integrations/github.d.ts +45 -0
- package/dist/integrations/github.js +201 -0
- package/dist/integrations/gitlab.d.ts +36 -0
- package/dist/integrations/gitlab.js +110 -0
- package/dist/integrations/index.d.ts +11 -0
- package/dist/integrations/index.js +11 -0
- package/dist/integrations/local-scanner.d.ts +146 -0
- package/dist/integrations/local-scanner.js +1654 -0
- package/dist/integrations/notifications.d.ts +99 -0
- package/dist/integrations/notifications.js +305 -0
- package/dist/integrations/scanners.d.ts +57 -0
- package/dist/integrations/scanners.js +217 -0
- package/dist/integrations/slop-scanner.d.ts +69 -0
- package/dist/integrations/slop-scanner.js +155 -0
- package/dist/integrations/webhook.d.ts +37 -0
- package/dist/integrations/webhook.js +256 -0
- package/dist/orchestrator/index.d.ts +72 -0
- package/dist/orchestrator/index.js +187 -0
- package/dist/output/index.d.ts +152 -0
- package/dist/output/index.js +399 -0
- package/dist/pipeline/index.d.ts +72 -0
- package/dist/pipeline/index.js +313 -0
- package/dist/sbom/index.d.ts +94 -0
- package/dist/sbom/index.js +298 -0
- package/dist/schemas/index.d.ts +2 -0
- package/dist/schemas/index.js +2 -0
- package/dist/schemas/input.schema.d.ts +87 -0
- package/dist/schemas/input.schema.js +44 -0
- package/dist/schemas/output.schema.d.ts +115 -0
- package/dist/schemas/output.schema.js +64 -0
- package/dist/serve-visualizer.d.ts +2 -0
- package/dist/serve-visualizer.js +78 -0
- package/dist/slop/client.d.ts +29 -0
- package/dist/slop/client.js +125 -0
- package/dist/slop/index.d.ts +4 -0
- package/dist/slop/index.js +2 -0
- package/dist/slop/server.d.ts +45 -0
- package/dist/slop/server.js +343 -0
- package/dist/types/events.d.ts +62 -0
- package/dist/types/events.js +2 -0
- package/dist/types/index.d.ts +1 -0
- package/dist/types/index.js +1 -0
- package/dist/visualizer/index.d.ts +4 -0
- package/dist/visualizer/index.js +181 -0
- package/dist/websocket/index.d.ts +88 -0
- package/dist/websocket/index.js +195 -0
- package/dist/zones/index.d.ts +7 -0
- package/dist/zones/index.js +7 -0
- package/dist/zones/manager.d.ts +101 -0
- package/dist/zones/manager.js +304 -0
- package/dist/zones/types.d.ts +78 -0
- package/dist/zones/types.js +33 -0
- package/package.json +84 -0
- package/visualizer/app.js +0 -0
- package/visualizer/index-minimal.html +1771 -0
- package/visualizer/index.html +2933 -0
- package/visualizer/landing.html +1328 -0
- package/visualizer/styles.css +0 -0
|
@@ -0,0 +1,313 @@
|
|
|
1
|
+
// Pipeline Framework - Extensible security analysis pipeline
|
|
2
|
+
// Compose multiple analysis stages with pluggable rules
|
|
3
|
+
// Built-in analysis stages
|
|
4
|
+
export class SecretsDetectionStage {
|
|
5
|
+
name = 'secrets-detection';
|
|
6
|
+
description = 'Detect hardcoded secrets and credentials in code changes';
|
|
7
|
+
patterns = [
|
|
8
|
+
{ name: 'API Key', regex: /api[_-]?key\s*[=:]\s*['"][^'"]{8,}['"]/gi },
|
|
9
|
+
{ name: 'Secret', regex: /secret\s*[=:]\s*['"][^'"]{8,}['"]/gi },
|
|
10
|
+
{ name: 'Password', regex: /password\s*[=:]\s*['"][^'"]{4,}['"]/gi },
|
|
11
|
+
{ name: 'Bearer Token', regex: /bearer\s+[a-z0-9_-]{20,}/gi },
|
|
12
|
+
{ name: 'Private Key', regex: /-----BEGIN\s+(RSA|EC|OPENSSH|PGP)\s+PRIVATE\s+KEY-----/g },
|
|
13
|
+
{ name: 'AWS Key', regex: /AKIA[0-9A-Z]{16}/g },
|
|
14
|
+
{ name: 'GitHub Token', regex: /gh[pousr]_[A-Za-z0-9_]{36,}/g },
|
|
15
|
+
{ name: 'JWT', regex: /eyJ[A-Za-z0-9_-]*\.eyJ[A-Za-z0-9_-]*\.[A-Za-z0-9_-]*/g }
|
|
16
|
+
];
|
|
17
|
+
analyze(ctx) {
|
|
18
|
+
const { diff } = ctx.input.change_event;
|
|
19
|
+
const lines = diff.split('\n');
|
|
20
|
+
const findings = [];
|
|
21
|
+
for (let i = 0; i < lines.length; i++) {
|
|
22
|
+
const line = lines[i];
|
|
23
|
+
if (!line.startsWith('+'))
|
|
24
|
+
continue;
|
|
25
|
+
for (const { name, regex } of this.patterns) {
|
|
26
|
+
regex.lastIndex = 0;
|
|
27
|
+
if (regex.test(line)) {
|
|
28
|
+
findings.push({ pattern: name, line: i + 1 });
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
if (findings.length > 0) {
|
|
33
|
+
ctx.events.push(this.createEvent({
|
|
34
|
+
severity: 'critical',
|
|
35
|
+
claim: `Detected ${findings.length} potential secret(s): ${[...new Set(findings.map(f => f.pattern))].join(', ')}`,
|
|
36
|
+
attackPath: [
|
|
37
|
+
'Credentials committed to repository',
|
|
38
|
+
'Secrets exposed in version control history',
|
|
39
|
+
'Attacker extracts credentials from git history or logs'
|
|
40
|
+
],
|
|
41
|
+
affectedAssets: findings.map(f => `line:${f.line}`),
|
|
42
|
+
evidenceRefs: findings.map(f => ({ type: 'diff', pointer: `line:${f.line}` })),
|
|
43
|
+
assuranceBreak: ['integrity', 'access_control'],
|
|
44
|
+
confidence: 0.95
|
|
45
|
+
}));
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
createEvent(result) {
|
|
49
|
+
return {
|
|
50
|
+
event_type: 'escalation_triggered',
|
|
51
|
+
target: 'self',
|
|
52
|
+
payload: {
|
|
53
|
+
severity: result.severity,
|
|
54
|
+
claim: result.claim,
|
|
55
|
+
attack_path: result.attackPath,
|
|
56
|
+
affected_assets: result.affectedAssets,
|
|
57
|
+
evidence_refs: result.evidenceRefs,
|
|
58
|
+
assurance_break: result.assuranceBreak,
|
|
59
|
+
confidence: result.confidence
|
|
60
|
+
},
|
|
61
|
+
timestamp: new Date().toISOString()
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
export class VulnerabilityScanStage {
|
|
66
|
+
name = 'vulnerability-scan';
|
|
67
|
+
description = 'Analyze vulnerability scan results from evidence bundle';
|
|
68
|
+
analyze(ctx) {
|
|
69
|
+
const { vuln_scan } = ctx.input.evidence_bundle;
|
|
70
|
+
if (!vuln_scan)
|
|
71
|
+
return;
|
|
72
|
+
const vulns = this.parseVulnScan(vuln_scan);
|
|
73
|
+
if (vulns.critical > 0) {
|
|
74
|
+
ctx.events.push({
|
|
75
|
+
event_type: 'escalation_triggered',
|
|
76
|
+
target: 'self',
|
|
77
|
+
payload: {
|
|
78
|
+
severity: 'critical',
|
|
79
|
+
claim: `${vulns.critical} critical vulnerabilities detected in dependencies`,
|
|
80
|
+
attack_path: [
|
|
81
|
+
'Known CVE present in deployed dependencies',
|
|
82
|
+
'Attacker identifies vulnerable component version',
|
|
83
|
+
'Exploit executed against production system'
|
|
84
|
+
],
|
|
85
|
+
affected_assets: ['dependencies'],
|
|
86
|
+
evidence_refs: [{ type: 'scan', pointer: 'vuln_scan:critical' }],
|
|
87
|
+
assurance_break: ['integrity'],
|
|
88
|
+
confidence: 0.95
|
|
89
|
+
},
|
|
90
|
+
timestamp: new Date().toISOString()
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
if (vulns.high > 0) {
|
|
94
|
+
ctx.events.push({
|
|
95
|
+
event_type: 'finding_raised',
|
|
96
|
+
target: 'self',
|
|
97
|
+
payload: {
|
|
98
|
+
severity: 'high',
|
|
99
|
+
claim: `${vulns.high} high severity vulnerabilities detected`,
|
|
100
|
+
attack_path: [
|
|
101
|
+
'Known vulnerability in dependency chain',
|
|
102
|
+
'Attacker chains vulnerability with other weaknesses',
|
|
103
|
+
'System compromise achieved'
|
|
104
|
+
],
|
|
105
|
+
affected_assets: ['dependencies'],
|
|
106
|
+
evidence_refs: [{ type: 'scan', pointer: 'vuln_scan:high' }],
|
|
107
|
+
assurance_break: ['integrity'],
|
|
108
|
+
confidence: 0.85
|
|
109
|
+
},
|
|
110
|
+
timestamp: new Date().toISOString()
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
parseVulnScan(scan) {
|
|
115
|
+
const criticalMatch = scan.match(/critical[:\s]+(\d+)/i);
|
|
116
|
+
const highMatch = scan.match(/high[:\s]+(\d+)/i);
|
|
117
|
+
const mediumMatch = scan.match(/medium[:\s]+(\d+)/i);
|
|
118
|
+
const lowMatch = scan.match(/low[:\s]+(\d+)/i);
|
|
119
|
+
return {
|
|
120
|
+
critical: criticalMatch ? parseInt(criticalMatch[1], 10) : 0,
|
|
121
|
+
high: highMatch ? parseInt(highMatch[1], 10) : 0,
|
|
122
|
+
medium: mediumMatch ? parseInt(mediumMatch[1], 10) : 0,
|
|
123
|
+
low: lowMatch ? parseInt(lowMatch[1], 10) : 0
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
export class CriticalAssetStage {
|
|
128
|
+
name = 'critical-asset-monitor';
|
|
129
|
+
description = 'Monitor changes to critical asset paths';
|
|
130
|
+
analyze(ctx) {
|
|
131
|
+
const { files_changed } = ctx.input.change_event;
|
|
132
|
+
const { critical_assets, risk_tolerance } = ctx.input.policy_context;
|
|
133
|
+
const criticalChanges = [];
|
|
134
|
+
for (const file of files_changed) {
|
|
135
|
+
const fileLower = file.toLowerCase();
|
|
136
|
+
for (const asset of critical_assets) {
|
|
137
|
+
if (fileLower.includes(asset.toLowerCase())) {
|
|
138
|
+
criticalChanges.push(file);
|
|
139
|
+
break;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
if (criticalChanges.length > 0) {
|
|
144
|
+
const severity = this.adjustSeverity('high', risk_tolerance);
|
|
145
|
+
ctx.events.push({
|
|
146
|
+
event_type: 'finding_raised',
|
|
147
|
+
target: 'self',
|
|
148
|
+
payload: {
|
|
149
|
+
severity,
|
|
150
|
+
claim: `${criticalChanges.length} critical asset file(s) modified`,
|
|
151
|
+
attack_path: [
|
|
152
|
+
'Attacker gains commit access or compromises developer',
|
|
153
|
+
'Modifies critical asset configuration or code',
|
|
154
|
+
'Changes bypass review due to file location complexity'
|
|
155
|
+
],
|
|
156
|
+
affected_assets: criticalChanges,
|
|
157
|
+
evidence_refs: criticalChanges.map(f => ({ type: 'diff', pointer: f })),
|
|
158
|
+
assurance_break: ['integrity', 'access_control'],
|
|
159
|
+
confidence: 0.85
|
|
160
|
+
},
|
|
161
|
+
timestamp: new Date().toISOString()
|
|
162
|
+
});
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
adjustSeverity(base, tolerance) {
|
|
166
|
+
if (tolerance === 'low') {
|
|
167
|
+
if (base === 'low')
|
|
168
|
+
return 'medium';
|
|
169
|
+
if (base === 'medium')
|
|
170
|
+
return 'high';
|
|
171
|
+
if (base === 'high')
|
|
172
|
+
return 'critical';
|
|
173
|
+
}
|
|
174
|
+
if (tolerance === 'high') {
|
|
175
|
+
if (base === 'critical')
|
|
176
|
+
return 'high';
|
|
177
|
+
if (base === 'high')
|
|
178
|
+
return 'medium';
|
|
179
|
+
if (base === 'medium')
|
|
180
|
+
return 'low';
|
|
181
|
+
}
|
|
182
|
+
return base;
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
export class InfrastructureChangeStage {
|
|
186
|
+
name = 'infrastructure-change';
|
|
187
|
+
description = 'Analyze infrastructure-as-code changes';
|
|
188
|
+
iacPatterns = [
|
|
189
|
+
/\.tf$/, // Terraform
|
|
190
|
+
/\.tfvars$/,
|
|
191
|
+
/cloudformation/i,
|
|
192
|
+
/\.yaml$/,
|
|
193
|
+
/\.yml$/,
|
|
194
|
+
/kubernetes/i,
|
|
195
|
+
/helm/i,
|
|
196
|
+
/docker-compose/i,
|
|
197
|
+
/Dockerfile$/i
|
|
198
|
+
];
|
|
199
|
+
analyze(ctx) {
|
|
200
|
+
const { type, files_changed } = ctx.input.change_event;
|
|
201
|
+
if (type !== 'infra_change') {
|
|
202
|
+
// Check if any files look like IaC
|
|
203
|
+
const iacFiles = files_changed.filter(f => this.iacPatterns.some(p => p.test(f)));
|
|
204
|
+
if (iacFiles.length === 0)
|
|
205
|
+
return;
|
|
206
|
+
ctx.uncertainties.push('Detected potential IaC files but change type is not infra_change');
|
|
207
|
+
}
|
|
208
|
+
ctx.events.push({
|
|
209
|
+
event_type: 'finding_raised',
|
|
210
|
+
target: 'self',
|
|
211
|
+
payload: {
|
|
212
|
+
severity: 'medium',
|
|
213
|
+
claim: 'Infrastructure change requires manual security review',
|
|
214
|
+
attack_path: [
|
|
215
|
+
'IaC misconfiguration introduced',
|
|
216
|
+
'Security boundaries or network rules modified',
|
|
217
|
+
'Attacker gains access to previously protected resources'
|
|
218
|
+
],
|
|
219
|
+
affected_assets: ['infrastructure'],
|
|
220
|
+
evidence_refs: files_changed.map(f => ({ type: 'diff', pointer: f })),
|
|
221
|
+
assurance_break: ['isolation', 'access_control'],
|
|
222
|
+
confidence: 0.6
|
|
223
|
+
},
|
|
224
|
+
timestamp: new Date().toISOString()
|
|
225
|
+
});
|
|
226
|
+
ctx.assumptions.push('Infrastructure changes require human approval regardless of automated checks');
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
export class ProductionDeployStage {
|
|
230
|
+
name = 'production-deploy-guard';
|
|
231
|
+
description = 'Guard against direct production deployments';
|
|
232
|
+
analyze(ctx) {
|
|
233
|
+
const { type, environment } = ctx.input.change_event;
|
|
234
|
+
if (type === 'deploy' && environment === 'prod') {
|
|
235
|
+
ctx.events.push({
|
|
236
|
+
event_type: 'finding_raised',
|
|
237
|
+
target: 'self',
|
|
238
|
+
payload: {
|
|
239
|
+
severity: 'medium',
|
|
240
|
+
claim: 'Direct production deployment detected - verify staging validation completed',
|
|
241
|
+
attack_path: [
|
|
242
|
+
'Code deployed directly to production',
|
|
243
|
+
'Untested changes reach production environment',
|
|
244
|
+
'Runtime errors or vulnerabilities exposed to users'
|
|
245
|
+
],
|
|
246
|
+
affected_assets: ['production-environment'],
|
|
247
|
+
evidence_refs: [{ type: 'diff', pointer: ctx.input.change_event.commit }],
|
|
248
|
+
assurance_break: ['isolation'],
|
|
249
|
+
confidence: 0.7
|
|
250
|
+
},
|
|
251
|
+
timestamp: new Date().toISOString()
|
|
252
|
+
});
|
|
253
|
+
ctx.assumptions.push('Production deployments should pass through staging first');
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
// Pipeline executor
|
|
258
|
+
export class SecurityPipeline {
|
|
259
|
+
stages = [];
|
|
260
|
+
constructor() {
|
|
261
|
+
// Add default stages
|
|
262
|
+
this.stages = [
|
|
263
|
+
new SecretsDetectionStage(),
|
|
264
|
+
new VulnerabilityScanStage(),
|
|
265
|
+
new CriticalAssetStage(),
|
|
266
|
+
new InfrastructureChangeStage(),
|
|
267
|
+
new ProductionDeployStage()
|
|
268
|
+
];
|
|
269
|
+
}
|
|
270
|
+
addStage(stage) {
|
|
271
|
+
this.stages.push(stage);
|
|
272
|
+
}
|
|
273
|
+
removeStage(name) {
|
|
274
|
+
this.stages = this.stages.filter(s => s.name !== name);
|
|
275
|
+
}
|
|
276
|
+
getStages() {
|
|
277
|
+
return [...this.stages];
|
|
278
|
+
}
|
|
279
|
+
async execute(input) {
|
|
280
|
+
const ctx = {
|
|
281
|
+
input,
|
|
282
|
+
events: [],
|
|
283
|
+
assumptions: [],
|
|
284
|
+
uncertainties: [],
|
|
285
|
+
metadata: new Map()
|
|
286
|
+
};
|
|
287
|
+
// Add analysis started event
|
|
288
|
+
ctx.events.push({
|
|
289
|
+
event_type: 'analysis_started',
|
|
290
|
+
target: 'self',
|
|
291
|
+
payload: {
|
|
292
|
+
severity: 'low',
|
|
293
|
+
claim: `Pipeline started: ${this.stages.length} stages`,
|
|
294
|
+
attack_path: ['Initiated security analysis'],
|
|
295
|
+
affected_assets: [],
|
|
296
|
+
evidence_refs: [{ type: 'diff', pointer: input.change_event.id }],
|
|
297
|
+
assurance_break: [],
|
|
298
|
+
confidence: 1.0
|
|
299
|
+
},
|
|
300
|
+
timestamp: new Date().toISOString()
|
|
301
|
+
});
|
|
302
|
+
// Run all stages
|
|
303
|
+
for (const stage of this.stages) {
|
|
304
|
+
try {
|
|
305
|
+
await stage.analyze(ctx);
|
|
306
|
+
}
|
|
307
|
+
catch (err) {
|
|
308
|
+
ctx.uncertainties.push(`Stage ${stage.name} failed: ${err}`);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
return ctx;
|
|
312
|
+
}
|
|
313
|
+
}
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import type { LocalScanResult } from '../integrations/local-scanner.js';
|
|
2
|
+
export interface CycloneDXComponent {
|
|
3
|
+
type: 'library' | 'application' | 'framework' | 'file' | 'container' | 'operating-system';
|
|
4
|
+
name: string;
|
|
5
|
+
version: string;
|
|
6
|
+
purl?: string;
|
|
7
|
+
licenses?: Array<{
|
|
8
|
+
license: {
|
|
9
|
+
id?: string;
|
|
10
|
+
name?: string;
|
|
11
|
+
};
|
|
12
|
+
}>;
|
|
13
|
+
hashes?: Array<{
|
|
14
|
+
alg: string;
|
|
15
|
+
content: string;
|
|
16
|
+
}>;
|
|
17
|
+
}
|
|
18
|
+
export interface CycloneDXVulnerability {
|
|
19
|
+
id: string;
|
|
20
|
+
source?: {
|
|
21
|
+
name: string;
|
|
22
|
+
url?: string;
|
|
23
|
+
};
|
|
24
|
+
ratings?: Array<{
|
|
25
|
+
severity: 'critical' | 'high' | 'medium' | 'low' | 'info' | 'none' | 'unknown';
|
|
26
|
+
method?: string;
|
|
27
|
+
score?: number;
|
|
28
|
+
}>;
|
|
29
|
+
description?: string;
|
|
30
|
+
recommendation?: string;
|
|
31
|
+
affects?: Array<{
|
|
32
|
+
ref: string;
|
|
33
|
+
}>;
|
|
34
|
+
}
|
|
35
|
+
export interface CycloneDXDocument {
|
|
36
|
+
bomFormat: 'CycloneDX';
|
|
37
|
+
specVersion: '1.5';
|
|
38
|
+
serialNumber: string;
|
|
39
|
+
version: number;
|
|
40
|
+
metadata: {
|
|
41
|
+
timestamp: string;
|
|
42
|
+
tools?: Array<{
|
|
43
|
+
vendor: string;
|
|
44
|
+
name: string;
|
|
45
|
+
version: string;
|
|
46
|
+
}>;
|
|
47
|
+
component?: CycloneDXComponent;
|
|
48
|
+
};
|
|
49
|
+
components: CycloneDXComponent[];
|
|
50
|
+
vulnerabilities?: CycloneDXVulnerability[];
|
|
51
|
+
}
|
|
52
|
+
export interface SPDXPackage {
|
|
53
|
+
SPDXID: string;
|
|
54
|
+
name: string;
|
|
55
|
+
versionInfo: string;
|
|
56
|
+
downloadLocation: string;
|
|
57
|
+
filesAnalyzed: boolean;
|
|
58
|
+
licenseConcluded?: string;
|
|
59
|
+
licenseDeclared?: string;
|
|
60
|
+
copyrightText?: string;
|
|
61
|
+
externalRefs?: Array<{
|
|
62
|
+
referenceCategory: string;
|
|
63
|
+
referenceType: string;
|
|
64
|
+
referenceLocator: string;
|
|
65
|
+
}>;
|
|
66
|
+
}
|
|
67
|
+
export interface SPDXDocument {
|
|
68
|
+
spdxVersion: 'SPDX-2.3';
|
|
69
|
+
dataLicense: 'CC0-1.0';
|
|
70
|
+
SPDXID: 'SPDXRef-DOCUMENT';
|
|
71
|
+
name: string;
|
|
72
|
+
documentNamespace: string;
|
|
73
|
+
creationInfo: {
|
|
74
|
+
created: string;
|
|
75
|
+
creators: string[];
|
|
76
|
+
licenseListVersion?: string;
|
|
77
|
+
};
|
|
78
|
+
packages: SPDXPackage[];
|
|
79
|
+
relationships: Array<{
|
|
80
|
+
spdxElementId: string;
|
|
81
|
+
relationshipType: string;
|
|
82
|
+
relatedSpdxElement: string;
|
|
83
|
+
}>;
|
|
84
|
+
}
|
|
85
|
+
export interface SBOMOptions {
|
|
86
|
+
format: 'cyclonedx' | 'spdx';
|
|
87
|
+
includeVulnerabilities?: boolean;
|
|
88
|
+
includeLicenses?: boolean;
|
|
89
|
+
projectName?: string;
|
|
90
|
+
projectVersion?: string;
|
|
91
|
+
}
|
|
92
|
+
export declare function generateCycloneDX(targetPath: string, scanResult?: LocalScanResult, options?: SBOMOptions): CycloneDXDocument;
|
|
93
|
+
export declare function generateSPDX(targetPath: string, scanResult?: LocalScanResult, options?: SBOMOptions): SPDXDocument;
|
|
94
|
+
export declare function generateSBOM(targetPath: string, scanResult?: LocalScanResult, options?: SBOMOptions): CycloneDXDocument | SPDXDocument;
|
|
@@ -0,0 +1,298 @@
|
|
|
1
|
+
// SBOM (Software Bill of Materials) Generator
|
|
2
|
+
// Supports CycloneDX 1.5 and SPDX 2.3 formats
|
|
3
|
+
import { spawnSync } from 'child_process';
|
|
4
|
+
import { existsSync, readFileSync } from 'fs';
|
|
5
|
+
import { join } from 'path';
|
|
6
|
+
function isToolAvailable(tool) {
|
|
7
|
+
try {
|
|
8
|
+
const result = spawnSync(tool, ['--version'], { encoding: 'utf-8', timeout: 5000 });
|
|
9
|
+
return result.status === 0;
|
|
10
|
+
}
|
|
11
|
+
catch {
|
|
12
|
+
return false;
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
// Try to use Syft for better SBOM generation
|
|
16
|
+
function runSyft(targetPath, format) {
|
|
17
|
+
if (!isToolAvailable('syft')) {
|
|
18
|
+
return null;
|
|
19
|
+
}
|
|
20
|
+
try {
|
|
21
|
+
const result = spawnSync('syft', [targetPath, '-o', format], {
|
|
22
|
+
encoding: 'utf-8',
|
|
23
|
+
timeout: 120000,
|
|
24
|
+
maxBuffer: 50 * 1024 * 1024
|
|
25
|
+
});
|
|
26
|
+
if (result.status === 0 && result.stdout) {
|
|
27
|
+
return result.stdout;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
catch { }
|
|
31
|
+
return null;
|
|
32
|
+
}
|
|
33
|
+
// Generate package URL (purl)
|
|
34
|
+
function generatePurl(name, version, type) {
|
|
35
|
+
const typeMap = {
|
|
36
|
+
npm: 'npm',
|
|
37
|
+
pip: 'pypi',
|
|
38
|
+
go: 'golang',
|
|
39
|
+
cargo: 'cargo',
|
|
40
|
+
gem: 'gem',
|
|
41
|
+
composer: 'composer',
|
|
42
|
+
maven: 'maven'
|
|
43
|
+
};
|
|
44
|
+
const purlType = typeMap[type] || 'generic';
|
|
45
|
+
return `pkg:${purlType}/${encodeURIComponent(name)}@${encodeURIComponent(version)}`;
|
|
46
|
+
}
|
|
47
|
+
// Read package.json dependencies
|
|
48
|
+
function readNpmDependencies(targetPath) {
|
|
49
|
+
const deps = [];
|
|
50
|
+
const pkgPath = join(targetPath, 'package.json');
|
|
51
|
+
const lockPath = join(targetPath, 'package-lock.json');
|
|
52
|
+
if (existsSync(lockPath)) {
|
|
53
|
+
try {
|
|
54
|
+
const lock = JSON.parse(readFileSync(lockPath, 'utf-8'));
|
|
55
|
+
const packages = lock.packages || {};
|
|
56
|
+
for (const [path, info] of Object.entries(packages)) {
|
|
57
|
+
if (path === '' || !path.includes('node_modules/'))
|
|
58
|
+
continue;
|
|
59
|
+
const pkgInfo = info;
|
|
60
|
+
const name = path.replace(/^node_modules\//, '').replace(/.*node_modules\//, '');
|
|
61
|
+
if (name && pkgInfo.version) {
|
|
62
|
+
deps.push({ name, version: pkgInfo.version });
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
catch { }
|
|
67
|
+
}
|
|
68
|
+
else if (existsSync(pkgPath)) {
|
|
69
|
+
try {
|
|
70
|
+
const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'));
|
|
71
|
+
const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
72
|
+
for (const [name, version] of Object.entries(allDeps)) {
|
|
73
|
+
deps.push({ name, version: String(version).replace(/^[\^~]/, '') });
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
catch { }
|
|
77
|
+
}
|
|
78
|
+
return deps;
|
|
79
|
+
}
|
|
80
|
+
// Read Python requirements
|
|
81
|
+
function readPythonDependencies(targetPath) {
|
|
82
|
+
const deps = [];
|
|
83
|
+
const reqPath = join(targetPath, 'requirements.txt');
|
|
84
|
+
if (existsSync(reqPath)) {
|
|
85
|
+
try {
|
|
86
|
+
const content = readFileSync(reqPath, 'utf-8');
|
|
87
|
+
const lines = content.split('\n');
|
|
88
|
+
for (const line of lines) {
|
|
89
|
+
const trimmed = line.trim();
|
|
90
|
+
if (!trimmed || trimmed.startsWith('#'))
|
|
91
|
+
continue;
|
|
92
|
+
const match = trimmed.match(/^([a-zA-Z0-9_-]+)[=<>~!]*=*([0-9.]+)?/);
|
|
93
|
+
if (match) {
|
|
94
|
+
deps.push({
|
|
95
|
+
name: match[1],
|
|
96
|
+
version: match[2] || 'unknown'
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
catch { }
|
|
102
|
+
}
|
|
103
|
+
return deps;
|
|
104
|
+
}
|
|
105
|
+
// Generate CycloneDX SBOM
|
|
106
|
+
export function generateCycloneDX(targetPath, scanResult, options = { format: 'cyclonedx' }) {
|
|
107
|
+
// Try Syft first
|
|
108
|
+
const syftOutput = runSyft(targetPath, 'cyclonedx-json');
|
|
109
|
+
if (syftOutput) {
|
|
110
|
+
try {
|
|
111
|
+
const sbom = JSON.parse(syftOutput);
|
|
112
|
+
// Add vulnerabilities if we have scan results
|
|
113
|
+
if (options.includeVulnerabilities && scanResult) {
|
|
114
|
+
sbom.vulnerabilities = scanResult.packages
|
|
115
|
+
.filter(p => p.vulnId)
|
|
116
|
+
.map(p => ({
|
|
117
|
+
id: p.vulnId,
|
|
118
|
+
ratings: [{
|
|
119
|
+
severity: p.severity,
|
|
120
|
+
method: 'other'
|
|
121
|
+
}],
|
|
122
|
+
description: p.title,
|
|
123
|
+
affects: [{
|
|
124
|
+
ref: `pkg:npm/${p.name}@${p.version}`
|
|
125
|
+
}]
|
|
126
|
+
}));
|
|
127
|
+
}
|
|
128
|
+
return sbom;
|
|
129
|
+
}
|
|
130
|
+
catch { }
|
|
131
|
+
}
|
|
132
|
+
// Manual generation
|
|
133
|
+
const components = [];
|
|
134
|
+
// Read NPM dependencies
|
|
135
|
+
const npmDeps = readNpmDependencies(targetPath);
|
|
136
|
+
for (const dep of npmDeps) {
|
|
137
|
+
components.push({
|
|
138
|
+
type: 'library',
|
|
139
|
+
name: dep.name,
|
|
140
|
+
version: dep.version,
|
|
141
|
+
purl: generatePurl(dep.name, dep.version, 'npm')
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
// Read Python dependencies
|
|
145
|
+
const pyDeps = readPythonDependencies(targetPath);
|
|
146
|
+
for (const dep of pyDeps) {
|
|
147
|
+
components.push({
|
|
148
|
+
type: 'library',
|
|
149
|
+
name: dep.name,
|
|
150
|
+
version: dep.version,
|
|
151
|
+
purl: generatePurl(dep.name, dep.version, 'pip')
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
// Create vulnerabilities from scan results
|
|
155
|
+
const vulnerabilities = [];
|
|
156
|
+
if (options.includeVulnerabilities && scanResult) {
|
|
157
|
+
for (const pkg of scanResult.packages) {
|
|
158
|
+
if (pkg.vulnId) {
|
|
159
|
+
vulnerabilities.push({
|
|
160
|
+
id: pkg.vulnId,
|
|
161
|
+
ratings: [{
|
|
162
|
+
severity: pkg.severity,
|
|
163
|
+
method: 'other'
|
|
164
|
+
}],
|
|
165
|
+
description: pkg.title,
|
|
166
|
+
recommendation: pkg.fixedVersion ? `Upgrade to ${pkg.fixedVersion}` : undefined,
|
|
167
|
+
affects: [{
|
|
168
|
+
ref: components.find(c => c.name === pkg.name)?.purl || `pkg:generic/${pkg.name}@${pkg.version}`
|
|
169
|
+
}]
|
|
170
|
+
});
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
const serialNumber = `urn:uuid:${crypto.randomUUID()}`;
|
|
175
|
+
return {
|
|
176
|
+
bomFormat: 'CycloneDX',
|
|
177
|
+
specVersion: '1.5',
|
|
178
|
+
serialNumber,
|
|
179
|
+
version: 1,
|
|
180
|
+
metadata: {
|
|
181
|
+
timestamp: new Date().toISOString(),
|
|
182
|
+
tools: [{
|
|
183
|
+
vendor: 'AuraSecurity',
|
|
184
|
+
name: 'aura-security',
|
|
185
|
+
version: '0.2.0'
|
|
186
|
+
}],
|
|
187
|
+
component: {
|
|
188
|
+
type: 'application',
|
|
189
|
+
name: options.projectName || 'unknown',
|
|
190
|
+
version: options.projectVersion || '0.0.0'
|
|
191
|
+
}
|
|
192
|
+
},
|
|
193
|
+
components,
|
|
194
|
+
vulnerabilities: vulnerabilities.length > 0 ? vulnerabilities : undefined
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
// Generate SPDX SBOM
|
|
198
|
+
export function generateSPDX(targetPath, scanResult, options = { format: 'spdx' }) {
|
|
199
|
+
// Try Syft first
|
|
200
|
+
const syftOutput = runSyft(targetPath, 'spdx-json');
|
|
201
|
+
if (syftOutput) {
|
|
202
|
+
try {
|
|
203
|
+
return JSON.parse(syftOutput);
|
|
204
|
+
}
|
|
205
|
+
catch { }
|
|
206
|
+
}
|
|
207
|
+
// Manual generation
|
|
208
|
+
const packages = [];
|
|
209
|
+
const relationships = [];
|
|
210
|
+
// Root package
|
|
211
|
+
const rootId = 'SPDXRef-Package-root';
|
|
212
|
+
packages.push({
|
|
213
|
+
SPDXID: rootId,
|
|
214
|
+
name: options.projectName || 'unknown',
|
|
215
|
+
versionInfo: options.projectVersion || '0.0.0',
|
|
216
|
+
downloadLocation: 'NOASSERTION',
|
|
217
|
+
filesAnalyzed: false,
|
|
218
|
+
licenseConcluded: 'NOASSERTION',
|
|
219
|
+
copyrightText: 'NOASSERTION'
|
|
220
|
+
});
|
|
221
|
+
relationships.push({
|
|
222
|
+
spdxElementId: 'SPDXRef-DOCUMENT',
|
|
223
|
+
relationshipType: 'DESCRIBES',
|
|
224
|
+
relatedSpdxElement: rootId
|
|
225
|
+
});
|
|
226
|
+
// NPM dependencies
|
|
227
|
+
const npmDeps = readNpmDependencies(targetPath);
|
|
228
|
+
for (let i = 0; i < npmDeps.length; i++) {
|
|
229
|
+
const dep = npmDeps[i];
|
|
230
|
+
const id = `SPDXRef-Package-npm-${i}`;
|
|
231
|
+
packages.push({
|
|
232
|
+
SPDXID: id,
|
|
233
|
+
name: dep.name,
|
|
234
|
+
versionInfo: dep.version,
|
|
235
|
+
downloadLocation: `https://www.npmjs.com/package/${dep.name}`,
|
|
236
|
+
filesAnalyzed: false,
|
|
237
|
+
licenseConcluded: 'NOASSERTION',
|
|
238
|
+
copyrightText: 'NOASSERTION',
|
|
239
|
+
externalRefs: [{
|
|
240
|
+
referenceCategory: 'PACKAGE-MANAGER',
|
|
241
|
+
referenceType: 'purl',
|
|
242
|
+
referenceLocator: generatePurl(dep.name, dep.version, 'npm')
|
|
243
|
+
}]
|
|
244
|
+
});
|
|
245
|
+
relationships.push({
|
|
246
|
+
spdxElementId: rootId,
|
|
247
|
+
relationshipType: 'DEPENDS_ON',
|
|
248
|
+
relatedSpdxElement: id
|
|
249
|
+
});
|
|
250
|
+
}
|
|
251
|
+
// Python dependencies
|
|
252
|
+
const pyDeps = readPythonDependencies(targetPath);
|
|
253
|
+
for (let i = 0; i < pyDeps.length; i++) {
|
|
254
|
+
const dep = pyDeps[i];
|
|
255
|
+
const id = `SPDXRef-Package-pip-${i}`;
|
|
256
|
+
packages.push({
|
|
257
|
+
SPDXID: id,
|
|
258
|
+
name: dep.name,
|
|
259
|
+
versionInfo: dep.version,
|
|
260
|
+
downloadLocation: `https://pypi.org/project/${dep.name}/`,
|
|
261
|
+
filesAnalyzed: false,
|
|
262
|
+
licenseConcluded: 'NOASSERTION',
|
|
263
|
+
copyrightText: 'NOASSERTION',
|
|
264
|
+
externalRefs: [{
|
|
265
|
+
referenceCategory: 'PACKAGE-MANAGER',
|
|
266
|
+
referenceType: 'purl',
|
|
267
|
+
referenceLocator: generatePurl(dep.name, dep.version, 'pip')
|
|
268
|
+
}]
|
|
269
|
+
});
|
|
270
|
+
relationships.push({
|
|
271
|
+
spdxElementId: rootId,
|
|
272
|
+
relationshipType: 'DEPENDS_ON',
|
|
273
|
+
relatedSpdxElement: id
|
|
274
|
+
});
|
|
275
|
+
}
|
|
276
|
+
const namespace = `https://spdx.org/spdxdocs/${options.projectName || 'project'}-${Date.now()}`;
|
|
277
|
+
return {
|
|
278
|
+
spdxVersion: 'SPDX-2.3',
|
|
279
|
+
dataLicense: 'CC0-1.0',
|
|
280
|
+
SPDXID: 'SPDXRef-DOCUMENT',
|
|
281
|
+
name: options.projectName || 'unknown',
|
|
282
|
+
documentNamespace: namespace,
|
|
283
|
+
creationInfo: {
|
|
284
|
+
created: new Date().toISOString(),
|
|
285
|
+
creators: ['Tool: aura-security-0.2.0'],
|
|
286
|
+
licenseListVersion: '3.19'
|
|
287
|
+
},
|
|
288
|
+
packages,
|
|
289
|
+
relationships
|
|
290
|
+
};
|
|
291
|
+
}
|
|
292
|
+
// Main SBOM generation function
|
|
293
|
+
export function generateSBOM(targetPath, scanResult, options = { format: 'cyclonedx' }) {
|
|
294
|
+
if (options.format === 'spdx') {
|
|
295
|
+
return generateSPDX(targetPath, scanResult, options);
|
|
296
|
+
}
|
|
297
|
+
return generateCycloneDX(targetPath, scanResult, options);
|
|
298
|
+
}
|