ship-safe 4.0.0 → 4.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -92,6 +92,7 @@ npx ship-safe audit .
92
92
  - `--html [file]` — custom HTML report path (default: `ship-safe-report.html`)
93
93
  - `--no-deps` — skip dependency audit
94
94
  - `--no-ai` — skip AI classification
95
+ - `--no-cache` — force full rescan (ignore cached results)
95
96
 
96
97
  ---
97
98
 
@@ -181,6 +182,54 @@ npx ship-safe mcp
181
182
 
182
183
  ---
183
184
 
185
+ ## Claude Code Plugin
186
+
187
+ Use Ship Safe directly inside Claude Code — no CLI needed:
188
+
189
+ ```bash
190
+ claude plugin add github:asamassekou10/ship-safe
191
+ ```
192
+
193
+ | Command | Description |
194
+ |---------|-------------|
195
+ | `/ship-safe` | Full security audit — 12 agents, remediation plan, auto-fix |
196
+ | `/ship-safe-scan` | Quick scan for leaked secrets |
197
+ | `/ship-safe-score` | Security health score (0-100) |
198
+
199
+ Claude interprets the results, explains findings in plain language, and can fix issues directly in your codebase.
200
+
201
+ ---
202
+
203
+ ## Incremental Scanning
204
+
205
+ Ship Safe caches file hashes and findings in `.ship-safe/context.json`. On subsequent runs, only changed files are re-scanned — unchanged files reuse cached results.
206
+
207
+ ```
208
+ ✔ [Phase 1/4] Secrets: 41 found (0 changed, 313 cached)
209
+ ```
210
+
211
+ - **~40% faster** on repeated scans
212
+ - **Auto-invalidation** — cache expires after 24 hours or when ship-safe updates
213
+ - **`--no-cache`** — force a full rescan anytime
214
+
215
+ The cache is stored in `.ship-safe/` which is automatically excluded from scans.
216
+
217
+ ---
218
+
219
+ ## Smart `.gitignore` Handling
220
+
221
+ Ship Safe respects your `.gitignore` for build output, caches, and vendor directories — but **always scans security-sensitive files** even if gitignored:
222
+
223
+ | Skipped (gitignore respected) | Always scanned (gitignore overridden) |
224
+ |-------------------------------|---------------------------------------|
225
+ | `node_modules/`, `dist/`, `build/` | `.env`, `.env.local`, `.env.production` |
226
+ | `*.log`, `*.pkl`, vendor dirs | `*.pem`, `*.key`, `*.p12` |
227
+ | Cache directories, IDE files | `credentials.json`, `*.secret` |
228
+
229
+ Why? Files like `.env` are gitignored *because* they contain secrets — which is exactly what a security scanner should catch.
230
+
231
+ ---
232
+
184
233
  ## Multi-LLM Support
185
234
 
186
235
  Ship Safe supports multiple AI providers for classification:
@@ -18,7 +18,7 @@
18
18
  import fs from 'fs';
19
19
  import path from 'path';
20
20
  import fg from 'fast-glob';
21
- import { SKIP_DIRS, SKIP_EXTENSIONS, MAX_FILE_SIZE } from '../utils/patterns.js';
21
+ import { SKIP_DIRS, SKIP_EXTENSIONS, MAX_FILE_SIZE, loadGitignorePatterns } from '../utils/patterns.js';
22
22
 
23
23
  // =============================================================================
24
24
  // FINDING FACTORY
@@ -95,6 +95,10 @@ export class BaseAgent {
95
95
  async discoverFiles(rootPath, extraGlobs = ['**/*']) {
96
96
  const globIgnore = Array.from(SKIP_DIRS).map(dir => `**/${dir}/**`);
97
97
 
98
+ // Respect .gitignore patterns
99
+ const gitignoreGlobs = loadGitignorePatterns(rootPath);
100
+ globIgnore.push(...gitignoreGlobs);
101
+
98
102
  // Load .ship-safeignore patterns
99
103
  const ignorePatterns = this._loadIgnorePatterns(rootPath);
100
104
  for (const p of ignorePatterns) {
@@ -145,6 +149,15 @@ export class BaseAgent {
145
149
  }
146
150
  }
147
151
 
152
+ /**
153
+ * Get the files this agent should scan.
154
+ * If incremental scanning is active (changedFiles in context), returns only changed files.
155
+ * Otherwise returns all files. Agents that need the full file list can use context.files directly.
156
+ */
157
+ getFilesToScan(context) {
158
+ return context.changedFiles || context.files;
159
+ }
160
+
148
161
  /**
149
162
  * Read a file safely, returning null on failure.
150
163
  */
@@ -81,6 +81,11 @@ export class Orchestrator {
81
81
 
82
82
  // ── 4. Run each agent ─────────────────────────────────────────────────────
83
83
  const context = { rootPath: absolutePath, files, recon, options };
84
+
85
+ // Pass changedFiles for incremental scanning (agents can use this to scope analysis)
86
+ if (options.changedFiles) {
87
+ context.changedFiles = options.changedFiles;
88
+ }
84
89
  const agentResults = [];
85
90
  let allFindings = [];
86
91
 
@@ -82,6 +82,7 @@ program
82
82
  .option('--json', 'Output results as JSON (useful for CI)')
83
83
  .option('--sarif', 'Output results in SARIF format (for GitHub Code Scanning)')
84
84
  .option('--include-tests', 'Also scan test files (excluded by default to reduce false positives)')
85
+ .option('--no-cache', 'Force full rescan (ignore cached results)')
85
86
  .action(scanCommand);
86
87
 
87
88
  // -----------------------------------------------------------------------------
@@ -190,6 +191,7 @@ program
190
191
  .option('--html [file]', 'HTML report path (default: ship-safe-report.html)')
191
192
  .option('--no-deps', 'Skip dependency audit')
192
193
  .option('--no-ai', 'Skip AI classification')
194
+ .option('--no-cache', 'Force full rescan (ignore cached results)')
193
195
  .option('-v, --verbose', 'Verbose output')
194
196
  .action(auditCommand);
195
197
 
@@ -29,9 +29,11 @@ import {
29
29
  SECURITY_PATTERNS,
30
30
  SKIP_DIRS,
31
31
  SKIP_EXTENSIONS,
32
- MAX_FILE_SIZE
32
+ MAX_FILE_SIZE,
33
+ loadGitignorePatterns
33
34
  } from '../utils/patterns.js';
34
35
  import { isHighEntropyMatch, getConfidence } from '../utils/entropy.js';
36
+ import { CacheManager } from '../utils/cache-manager.js';
35
37
 
36
38
  // =============================================================================
37
39
  // CONSTANTS
@@ -84,16 +86,36 @@ export async function auditCommand(targetPath = '.', options = {}) {
84
86
  console.log();
85
87
  }
86
88
 
89
+ // ── Cache Layer ──────────────────────────────────────────────────────────
90
+ const useCache = options.cache !== false;
91
+ const cache = new CacheManager(absolutePath);
92
+ let cacheData = useCache ? cache.load() : null;
93
+ let cacheDiff = null;
94
+ let allFiles = [];
95
+
87
96
  // ── Phase 1: Secret Scan ──────────────────────────────────────────────────
88
97
  const secretSpinner = machineOutput ? null : ora({ text: chalk.white('[Phase 1/4] Scanning for secrets...'), color: 'cyan' }).start();
89
98
  let secretFindings = [];
90
99
  let filesScanned = 0;
91
100
 
92
101
  try {
93
- const files = await findFiles(absolutePath);
94
- filesScanned = files.length;
102
+ allFiles = await findFiles(absolutePath);
103
+ filesScanned = allFiles.length;
104
+
105
+ // Determine which files need scanning (incremental if cache exists)
106
+ let filesToScan = allFiles;
107
+ let cachedSecretFindings = [];
108
+
109
+ if (cacheData) {
110
+ cacheDiff = cache.diff(allFiles);
111
+ filesToScan = cacheDiff.changedFiles;
112
+ // Reuse cached findings for unchanged files (secrets only)
113
+ cachedSecretFindings = cacheDiff.cachedFindings.filter(
114
+ f => f.category === 'secrets' || f.category === 'secret'
115
+ );
116
+ }
95
117
 
96
- for (const file of files) {
118
+ for (const file of filesToScan) {
97
119
  const fileResults = scanFileForSecrets(file);
98
120
  for (const f of fileResults) {
99
121
  secretFindings.push({
@@ -112,10 +134,17 @@ export async function auditCommand(targetPath = '.', options = {}) {
112
134
  }
113
135
  }
114
136
 
137
+ // Merge with cached findings for unchanged files
138
+ secretFindings = [...secretFindings, ...cachedSecretFindings];
139
+
140
+ const cacheNote = cacheDiff && cacheDiff.changedFiles.length < allFiles.length
141
+ ? ` (${cacheDiff.changedFiles.length} changed, ${cacheDiff.unchangedCount} cached)`
142
+ : '';
143
+
115
144
  if (secretSpinner) secretSpinner.succeed(
116
145
  secretFindings.length === 0
117
- ? chalk.green('[Phase 1/4] Secrets: clean')
118
- : chalk.red(`[Phase 1/4] Secrets: ${secretFindings.length} found`)
146
+ ? chalk.green(`[Phase 1/4] Secrets: clean${cacheNote}`)
147
+ : chalk.red(`[Phase 1/4] Secrets: ${secretFindings.length} found${cacheNote}`)
119
148
  );
120
149
  } catch (err) {
121
150
  if (secretSpinner) secretSpinner.fail(chalk.red(`[Phase 1/4] Secret scan failed: ${err.message}`));
@@ -130,7 +159,12 @@ export async function auditCommand(targetPath = '.', options = {}) {
130
159
  try {
131
160
  const orchestrator = buildOrchestrator();
132
161
  // Suppress individual agent spinners by using quiet mode
133
- const results = await orchestrator.runAll(absolutePath, { quiet: true });
162
+ // Pass changedFiles for incremental scanning if cache is valid
163
+ const orchestratorOpts = { quiet: true };
164
+ if (cacheDiff && cacheDiff.changedFiles.length < allFiles.length) {
165
+ orchestratorOpts.changedFiles = cacheDiff.changedFiles;
166
+ }
167
+ const results = await orchestrator.runAll(absolutePath, orchestratorOpts);
134
168
  recon = results.recon;
135
169
  agentFindings = results.findings;
136
170
  agentResults = results.agentResults;
@@ -207,6 +241,21 @@ export async function auditCommand(targetPath = '.', options = {}) {
207
241
  }
208
242
  }
209
243
 
244
+ // ── Save Cache ──────────────────────────────────────────────────────────
245
+ if (useCache) {
246
+ try {
247
+ // Merge agent findings back for cache (secret + agent findings from changed files)
248
+ // plus cached findings from unchanged files
249
+ const cachedAgentFindings = cacheData && cacheDiff
250
+ ? cacheDiff.cachedFindings.filter(f => f.category !== 'secrets' && f.category !== 'secret')
251
+ : [];
252
+ const allFindingsForCache = [...secretFindings, ...agentFindings, ...cachedAgentFindings];
253
+ cache.save(allFiles, deduplicateFindings(allFindingsForCache), recon, scoreResult);
254
+ } catch {
255
+ // Silent — caching should never break a scan
256
+ }
257
+ }
258
+
210
259
  // ── Build Remediation Plan ────────────────────────────────────────────────
211
260
  const remediationPlan = buildRemediationPlan(filteredFindings, depVulns, absolutePath);
212
261
 
@@ -230,26 +279,28 @@ export async function auditCommand(targetPath = '.', options = {}) {
230
279
  console.log(chalk.cyan(` Full report: ${chalk.white.bold(htmlPath)}`));
231
280
  }
232
281
 
233
- // ── Policy Violations ────────────────────────────────────────────────────
234
- const violations = policy.evaluate(scoreResult, filteredFindings);
235
- if (violations.length > 0) {
236
- console.log();
237
- console.log(chalk.red.bold(' Policy Violations:'));
238
- for (const v of violations.slice(0, 5)) {
239
- console.log(chalk.red(` ✗ ${v.message}`));
282
+ if (!machineOutput) {
283
+ // ── Policy Violations ──────────────────────────────────────────────────
284
+ const violations = policy.evaluate(scoreResult, filteredFindings);
285
+ if (violations.length > 0) {
286
+ console.log();
287
+ console.log(chalk.red.bold(' Policy Violations:'));
288
+ for (const v of violations.slice(0, 5)) {
289
+ console.log(chalk.red(` ✗ ${v.message}`));
290
+ }
240
291
  }
241
- }
242
292
 
243
- // ── Trend ─────────────────────────────────────────────────────────────────
244
- const trend = scoringEngine.getTrend(absolutePath, scoreResult.score);
245
- if (trend) {
246
- const arrow = trend.diff > 0 ? chalk.green('↑') : trend.diff < 0 ? chalk.red('↓') : chalk.gray('→');
247
- console.log(chalk.gray(` Trend: ${trend.previousScore} → ${trend.currentScore} ${arrow} (${trend.diff > 0 ? '+' : ''}${trend.diff})`));
248
- }
293
+ // ── Trend ───────────────────────────────────────────────────────────────
294
+ const trend = scoringEngine.getTrend(absolutePath, scoreResult.score);
295
+ if (trend) {
296
+ const arrow = trend.diff > 0 ? chalk.green('↑') : trend.diff < 0 ? chalk.red('↓') : chalk.gray('→');
297
+ console.log(chalk.gray(` Trend: ${trend.previousScore} → ${trend.currentScore} ${arrow} (${trend.diff > 0 ? '+' : ''}${trend.diff})`));
298
+ }
249
299
 
250
- console.log();
251
- console.log(chalk.cyan('═'.repeat(60)));
252
- console.log();
300
+ console.log();
301
+ console.log(chalk.cyan('═'.repeat(60)));
302
+ console.log();
303
+ }
253
304
 
254
305
  process.exit(scoreResult.score >= 75 ? 0 : 1);
255
306
  }
@@ -495,6 +546,10 @@ function outputSARIF(findings, rootPath) {
495
546
  async function findFiles(rootPath) {
496
547
  const globIgnore = Array.from(SKIP_DIRS).map(dir => `**/${dir}/**`);
497
548
 
549
+ // Respect .gitignore patterns
550
+ const gitignoreGlobs = loadGitignorePatterns(rootPath);
551
+ globIgnore.push(...gitignoreGlobs);
552
+
498
553
  // Load .ship-safeignore
499
554
  const ignorePath = path.join(rootPath, '.ship-safeignore');
500
555
  if (fs.existsSync(ignorePath)) {
@@ -30,10 +30,12 @@ import {
30
30
  SKIP_DIRS,
31
31
  SKIP_EXTENSIONS,
32
32
  TEST_FILE_PATTERNS,
33
- MAX_FILE_SIZE
33
+ MAX_FILE_SIZE,
34
+ loadGitignorePatterns
34
35
  } from '../utils/patterns.js';
35
36
  import { isHighEntropyMatch, getConfidence } from '../utils/entropy.js';
36
37
  import * as output from '../utils/output.js';
38
+ import { CacheManager } from '../utils/cache-manager.js';
37
39
 
38
40
  // =============================================================================
39
41
  // CUSTOM PATTERNS (.ship-safe.json)
@@ -110,13 +112,49 @@ export async function scanCommand(targetPath = '.', options = {}) {
110
112
  try {
111
113
  // Find all files
112
114
  const files = await findFiles(absolutePath, ignorePatterns, options);
113
- spinner.text = `Scanning ${files.length} files...`;
115
+
116
+ // Cache: determine which files changed
117
+ const useCache = options.cache !== false;
118
+ const cache = new CacheManager(absolutePath);
119
+ const cacheData = useCache ? cache.load() : null;
120
+ let filesToScan = files;
121
+ let cacheDiff = null;
122
+ const cachedResults = [];
123
+
124
+ if (cacheData) {
125
+ cacheDiff = cache.diff(files);
126
+ filesToScan = cacheDiff.changedFiles;
127
+
128
+ // Group cached findings by file
129
+ const cachedByFile = {};
130
+ for (const f of cacheDiff.cachedFindings) {
131
+ if (!cachedByFile[f.file]) cachedByFile[f.file] = [];
132
+ cachedByFile[f.file].push({
133
+ line: f.line,
134
+ column: f.column,
135
+ matched: f.matched,
136
+ patternName: f.rule || f.title,
137
+ severity: f.severity,
138
+ confidence: f.confidence,
139
+ description: f.description,
140
+ category: f.category,
141
+ });
142
+ }
143
+ for (const [file, findings] of Object.entries(cachedByFile)) {
144
+ cachedResults.push({ file, findings });
145
+ }
146
+ }
147
+
148
+ const cacheNote = cacheDiff && filesToScan.length < files.length
149
+ ? ` (${filesToScan.length} changed, ${cacheDiff.unchangedCount} cached)`
150
+ : '';
151
+ spinner.text = `Scanning ${filesToScan.length} files${cacheNote}...`;
114
152
 
115
153
  // Scan each file
116
154
  const results = [];
117
155
  let scannedCount = 0;
118
156
 
119
- for (const file of files) {
157
+ for (const file of filesToScan) {
120
158
  const findings = await scanFile(file, allPatterns);
121
159
  if (findings.length > 0) {
122
160
  results.push({ file, findings });
@@ -124,7 +162,36 @@ export async function scanCommand(targetPath = '.', options = {}) {
124
162
 
125
163
  scannedCount++;
126
164
  if (options.verbose) {
127
- spinner.text = `Scanned ${scannedCount}/${files.length}: ${path.relative(absolutePath, file)}`;
165
+ spinner.text = `Scanned ${scannedCount}/${filesToScan.length}: ${path.relative(absolutePath, file)}`;
166
+ }
167
+ }
168
+
169
+ // Merge with cached results
170
+ const allResults = [...results, ...cachedResults];
171
+
172
+ // Save cache
173
+ if (useCache) {
174
+ try {
175
+ const allFindings = [];
176
+ for (const { file, findings } of allResults) {
177
+ for (const f of findings) {
178
+ allFindings.push({
179
+ file,
180
+ line: f.line,
181
+ column: f.column,
182
+ severity: f.severity,
183
+ category: f.category || 'secrets',
184
+ rule: f.patternName,
185
+ title: f.patternName,
186
+ description: f.description,
187
+ matched: f.matched,
188
+ confidence: f.confidence,
189
+ });
190
+ }
191
+ }
192
+ cache.save(files, allFindings, null, null);
193
+ } catch {
194
+ // Silent
128
195
  }
129
196
  }
130
197
 
@@ -132,15 +199,15 @@ export async function scanCommand(targetPath = '.', options = {}) {
132
199
 
133
200
  // Output results
134
201
  if (options.sarif) {
135
- outputSARIF(results, absolutePath);
202
+ outputSARIF(allResults, absolutePath);
136
203
  } else if (options.json) {
137
- outputJSON(results, files.length);
204
+ outputJSON(allResults, files.length);
138
205
  } else {
139
- outputPretty(results, files.length, absolutePath);
206
+ outputPretty(allResults, files.length, absolutePath);
140
207
  }
141
208
 
142
209
  // Exit with appropriate code
143
- const hasFindings = results.length > 0;
210
+ const hasFindings = allResults.length > 0;
144
211
  process.exit(hasFindings ? 1 : 0);
145
212
 
146
213
  } catch (err) {
@@ -204,6 +271,10 @@ async function findFiles(rootPath, ignorePatterns, options = {}) {
204
271
  // Build ignore patterns from SKIP_DIRS
205
272
  const globIgnore = Array.from(SKIP_DIRS).map(dir => `**/${dir}/**`);
206
273
 
274
+ // Respect .gitignore patterns
275
+ const gitignoreGlobs = loadGitignorePatterns(rootPath);
276
+ globIgnore.push(...gitignoreGlobs);
277
+
207
278
  // Find all files
208
279
  const files = await fg('**/*', {
209
280
  cwd: rootPath,
package/cli/index.js CHANGED
@@ -46,5 +46,8 @@ export { SBOMGenerator } from './agents/sbom-generator.js';
46
46
  export { PolicyEngine } from './agents/policy-engine.js';
47
47
  export { HTMLReporter } from './agents/html-reporter.js';
48
48
 
49
+ // ── Caching ──────────────────────────────────────────────────────────────────
50
+ export { CacheManager } from './utils/cache-manager.js';
51
+
49
52
  // ── LLM Providers ─────────────────────────────────────────────────────────────
50
53
  export { createProvider, autoDetectProvider } from './providers/llm-provider.js';
@@ -0,0 +1,258 @@
1
+ /**
2
+ * Cache Manager
3
+ * =============
4
+ *
5
+ * Provides incremental scanning by caching file hashes and findings.
6
+ * On subsequent runs, only changed files are re-scanned.
7
+ *
8
+ * Cache location: .ship-safe/context.json
9
+ *
10
+ * USAGE:
11
+ * import { CacheManager } from './cache-manager.js';
12
+ * const cache = new CacheManager(rootPath);
13
+ * const { changedFiles, cachedFindings } = await cache.getChangedFiles(currentFiles);
14
+ * // ... scan only changedFiles ...
15
+ * cache.save(allFiles, allFindings, recon, scoreResult);
16
+ */
17
+
18
+ import fs from 'fs';
19
+ import path from 'path';
20
+ import crypto from 'crypto';
21
+ import { readFileSync } from 'fs';
22
+ import { fileURLToPath } from 'url';
23
+ import { dirname, join } from 'path';
24
+
25
+ // Read version from package.json
26
+ const __filename = fileURLToPath(import.meta.url);
27
+ const __dirname = dirname(__filename);
28
+ const PACKAGE_VERSION = JSON.parse(readFileSync(join(__dirname, '../../package.json'), 'utf8')).version;
29
+
30
+ // Cache TTL: 24 hours
31
+ const CACHE_TTL_MS = 24 * 60 * 60 * 1000;
32
+
33
+ export class CacheManager {
34
+ /**
35
+ * @param {string} rootPath — Absolute path to project root
36
+ */
37
+ constructor(rootPath) {
38
+ this.rootPath = rootPath;
39
+ this.cacheDir = path.join(rootPath, '.ship-safe');
40
+ this.cachePath = path.join(this.cacheDir, 'context.json');
41
+ this.cache = null;
42
+ }
43
+
44
+ /**
45
+ * Load the cache from disk. Returns null if cache is missing, expired, or invalid.
46
+ */
47
+ load() {
48
+ try {
49
+ if (!fs.existsSync(this.cachePath)) return null;
50
+
51
+ const raw = fs.readFileSync(this.cachePath, 'utf-8');
52
+ const cache = JSON.parse(raw);
53
+
54
+ // Version mismatch — patterns may have changed
55
+ if (cache.version !== PACKAGE_VERSION) return null;
56
+
57
+ // TTL expired
58
+ const age = Date.now() - new Date(cache.generatedAt).getTime();
59
+ if (age > CACHE_TTL_MS) return null;
60
+
61
+ this.cache = cache;
62
+ return cache;
63
+ } catch {
64
+ return null;
65
+ }
66
+ }
67
+
68
+ /**
69
+ * Compute SHA-256 hash of a file's contents.
70
+ */
71
+ hashFile(filePath) {
72
+ try {
73
+ const content = fs.readFileSync(filePath);
74
+ return crypto.createHash('sha256').update(content).digest('hex');
75
+ } catch {
76
+ return null;
77
+ }
78
+ }
79
+
80
+ /**
81
+ * Compare current files against cached file index to find what changed.
82
+ *
83
+ * @param {string[]} currentFiles — Array of absolute file paths
84
+ * @returns {{ changedFiles: string[], cachedFindings: object[], unchangedCount: number, newCount: number, modifiedCount: number, deletedCount: number }}
85
+ */
86
+ diff(currentFiles) {
87
+ if (!this.cache || !this.cache.fileIndex) {
88
+ return {
89
+ changedFiles: currentFiles,
90
+ cachedFindings: [],
91
+ unchangedCount: 0,
92
+ newCount: currentFiles.length,
93
+ modifiedCount: 0,
94
+ deletedCount: 0,
95
+ };
96
+ }
97
+
98
+ const cachedIndex = this.cache.fileIndex;
99
+ const cachedFindings = this.cache.lastFindings || {};
100
+ const changedFiles = [];
101
+ const reusedFindings = [];
102
+ let unchangedCount = 0;
103
+ let newCount = 0;
104
+ let modifiedCount = 0;
105
+
106
+ const currentSet = new Set(currentFiles);
107
+
108
+ for (const file of currentFiles) {
109
+ const relPath = path.relative(this.rootPath, file).replace(/\\/g, '/');
110
+ const cached = cachedIndex[relPath];
111
+
112
+ if (!cached) {
113
+ // New file — needs scanning
114
+ changedFiles.push(file);
115
+ newCount++;
116
+ continue;
117
+ }
118
+
119
+ // Quick size check before expensive hash
120
+ try {
121
+ const stats = fs.statSync(file);
122
+ if (stats.size !== cached.size) {
123
+ changedFiles.push(file);
124
+ modifiedCount++;
125
+ continue;
126
+ }
127
+ } catch {
128
+ changedFiles.push(file);
129
+ modifiedCount++;
130
+ continue;
131
+ }
132
+
133
+ // Hash check
134
+ const currentHash = this.hashFile(file);
135
+ if (currentHash !== cached.hash) {
136
+ changedFiles.push(file);
137
+ modifiedCount++;
138
+ continue;
139
+ }
140
+
141
+ // File unchanged — reuse cached findings
142
+ unchangedCount++;
143
+ if (cachedFindings[relPath]) {
144
+ // Restore absolute paths for cached findings
145
+ for (const finding of cachedFindings[relPath]) {
146
+ reusedFindings.push({ ...finding, file });
147
+ }
148
+ }
149
+ }
150
+
151
+ // Count deleted files (in cache but not in current)
152
+ const currentRelPaths = new Set(
153
+ currentFiles.map(f => path.relative(this.rootPath, f).replace(/\\/g, '/'))
154
+ );
155
+ const deletedCount = Object.keys(cachedIndex).filter(p => !currentRelPaths.has(p)).length;
156
+
157
+ return {
158
+ changedFiles,
159
+ cachedFindings: reusedFindings,
160
+ unchangedCount,
161
+ newCount,
162
+ modifiedCount,
163
+ deletedCount,
164
+ };
165
+ }
166
+
167
+ /**
168
+ * Save the cache to disk.
169
+ *
170
+ * @param {string[]} allFiles — All scanned file paths
171
+ * @param {object[]} allFindings — All findings from the scan
172
+ * @param {object} recon — ReconAgent output
173
+ * @param {object} [scoreResult] — Optional score result
174
+ */
175
+ save(allFiles, allFindings, recon, scoreResult) {
176
+ try {
177
+ // Ensure .ship-safe directory exists
178
+ if (!fs.existsSync(this.cacheDir)) {
179
+ fs.mkdirSync(this.cacheDir, { recursive: true });
180
+ }
181
+
182
+ // Build file index with hashes
183
+ const fileIndex = {};
184
+ for (const file of allFiles) {
185
+ const relPath = path.relative(this.rootPath, file).replace(/\\/g, '/');
186
+ const hash = this.hashFile(file);
187
+ if (hash) {
188
+ try {
189
+ const stats = fs.statSync(file);
190
+ fileIndex[relPath] = {
191
+ hash,
192
+ size: stats.size,
193
+ lastScanned: new Date().toISOString(),
194
+ };
195
+ } catch {
196
+ // Skip files we can't stat
197
+ }
198
+ }
199
+ }
200
+
201
+ // Group findings by file (relative paths)
202
+ const lastFindings = {};
203
+ for (const f of allFindings) {
204
+ const relPath = path.relative(this.rootPath, f.file).replace(/\\/g, '/');
205
+ if (!lastFindings[relPath]) lastFindings[relPath] = [];
206
+ // Store a lightweight copy (no absolute paths)
207
+ lastFindings[relPath].push({
208
+ line: f.line,
209
+ column: f.column,
210
+ severity: f.severity,
211
+ category: f.category,
212
+ rule: f.rule,
213
+ title: f.title,
214
+ description: f.description,
215
+ matched: f.matched,
216
+ confidence: f.confidence,
217
+ cwe: f.cwe,
218
+ owasp: f.owasp,
219
+ fix: f.fix,
220
+ });
221
+ }
222
+
223
+ const cache = {
224
+ version: PACKAGE_VERSION,
225
+ generatedAt: new Date().toISOString(),
226
+ rootPath: this.rootPath,
227
+ recon: recon || null,
228
+ fileIndex,
229
+ lastFindings,
230
+ stats: {
231
+ totalFiles: allFiles.length,
232
+ totalFindings: allFindings.length,
233
+ lastScore: scoreResult?.score ?? null,
234
+ lastGrade: scoreResult?.grade?.letter ?? null,
235
+ },
236
+ };
237
+
238
+ fs.writeFileSync(this.cachePath, JSON.stringify(cache, null, 2));
239
+ } catch {
240
+ // Silent failure — caching should never break a scan
241
+ }
242
+ }
243
+
244
+ /**
245
+ * Delete the cache file.
246
+ */
247
+ invalidate() {
248
+ try {
249
+ if (fs.existsSync(this.cachePath)) {
250
+ fs.unlinkSync(this.cachePath);
251
+ }
252
+ } catch {
253
+ // Silent
254
+ }
255
+ }
256
+ }
257
+
258
+ export default CacheManager;
@@ -1,3 +1,6 @@
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+
1
4
  /**
2
5
  * Secret Detection Patterns
3
6
  * =========================
@@ -783,6 +786,7 @@ export const SKIP_DIRS = new Set([
783
786
  '.expo',
784
787
  '.docusaurus',
785
788
  '.storybook',
789
+ '.ship-safe',
786
790
  ]);
787
791
 
788
792
  export const SKIP_EXTENSIONS = new Set([
@@ -809,6 +813,97 @@ export const SKIP_EXTENSIONS = new Set([
809
813
  // Maximum file size to scan (1MB)
810
814
  export const MAX_FILE_SIZE = 1_000_000;
811
815
 
816
+ // =============================================================================
817
+ // .GITIGNORE LOADING
818
+ // =============================================================================
819
+
820
+ // Gitignore patterns that should NEVER be skipped by a security scanner.
821
+ // These files are gitignored precisely because they contain secrets or
822
+ // sensitive config — which is exactly what we want to detect.
823
+ const SECURITY_SENSITIVE_PATTERNS = new Set([
824
+ '.env',
825
+ '.env.local',
826
+ '.env.development',
827
+ '.env.development.local',
828
+ '.env.test',
829
+ '.env.test.local',
830
+ '.env.production',
831
+ '.env.production.local',
832
+ '.env.staging',
833
+ '*.pem',
834
+ '*.key',
835
+ '*.p12',
836
+ '*.pfx',
837
+ '*.jks',
838
+ '*.keystore',
839
+ '*.crt',
840
+ '*.cer',
841
+ 'credentials.json',
842
+ 'service-account.json',
843
+ 'serviceAccountKey.json',
844
+ '*.secret',
845
+ 'htpasswd',
846
+ '.htpasswd',
847
+ 'id_rsa',
848
+ 'id_ed25519',
849
+ '*.sqlite',
850
+ '*.db',
851
+ ]);
852
+
853
+ /**
854
+ * Load patterns from .gitignore file in the project root.
855
+ * Returns an array of glob-compatible ignore patterns.
856
+ *
857
+ * Smart filtering: skips gitignored build output, caches, and vendor dirs,
858
+ * but ALWAYS scans security-sensitive files (.env, *.key, *.pem, etc.)
859
+ * even if they appear in .gitignore.
860
+ */
861
+ export function loadGitignorePatterns(rootPath) {
862
+ const gitignorePath = path.join(rootPath, '.gitignore');
863
+ try {
864
+ if (!fs.existsSync(gitignorePath)) return [];
865
+ return fs.readFileSync(gitignorePath, 'utf-8')
866
+ .split('\n')
867
+ .map(l => l.trim())
868
+ .filter(l => l && !l.startsWith('#') && !l.startsWith('!'))
869
+ .filter(p => !isSecuritySensitive(p))
870
+ .map(p => {
871
+ // Convert .gitignore patterns to fast-glob ignore patterns
872
+ if (p.startsWith('/')) {
873
+ // Rooted pattern: /build → build/**
874
+ return p.slice(1) + (p.endsWith('/') ? '**' : '');
875
+ }
876
+ if (p.endsWith('/')) {
877
+ // Directory pattern: logs/ → **/logs/**
878
+ return `**/${p}**`;
879
+ }
880
+ // General pattern: *.log → **/*.log, dist → **/dist, **/dist/**
881
+ if (!p.includes('/') && !p.includes('*')) {
882
+ return [`**/${p}`, `**/${p}/**`];
883
+ }
884
+ return `**/${p}`;
885
+ })
886
+ .flat();
887
+ } catch {
888
+ return [];
889
+ }
890
+ }
891
+
892
+ /**
893
+ * Check if a .gitignore pattern targets security-sensitive files.
894
+ * These should always be scanned regardless of .gitignore.
895
+ */
896
+ function isSecuritySensitive(pattern) {
897
+ const cleaned = pattern.replace(/^\//, '').replace(/\/$/, '');
898
+ if (SECURITY_SENSITIVE_PATTERNS.has(cleaned)) return true;
899
+ // Check wildcard patterns like *.pem, *.key
900
+ for (const sensitive of SECURITY_SENSITIVE_PATTERNS) {
901
+ if (sensitive.startsWith('*') && cleaned.endsWith(sensitive.slice(1))) return true;
902
+ if (cleaned === sensitive || cleaned.endsWith('/' + sensitive)) return true;
903
+ }
904
+ return false;
905
+ }
906
+
812
907
  // =============================================================================
813
908
  // SECURITY VULNERABILITY PATTERNS
814
909
  // =============================================================================
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ship-safe",
3
- "version": "4.0.0",
3
+ "version": "4.1.0",
4
4
  "description": "AI-powered multi-agent security platform. 12 agents scan 50+ attack classes. Red team your code before attackers do.",
5
5
  "main": "cli/index.js",
6
6
  "bin": {