@nebzdev/bun-security-scanner 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +238 -0
- package/package.json +37 -0
- package/src/cache.ts +48 -0
- package/src/client.ts +72 -0
- package/src/config.ts +11 -0
- package/src/display.ts +24 -0
- package/src/index.ts +21 -0
- package/src/osv.ts +61 -0
- package/src/scanner.ts +81 -0
- package/src/severity.ts +22 -0
- package/src/snyk/client.ts +137 -0
- package/src/snyk/config.ts +15 -0
- package/src/snyk/index.ts +38 -0
- package/src/snyk/severity.ts +10 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Muneeb Samuels
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
# bun-security-scanner
|
|
2
|
+
|
|
3
|
+
[](https://badge.fury.io/js/%40nebzdev%2Fbun-security-scanner)
|
|
4
|
+
[](https://opensource.org/licenses/MIT)
|
|
5
|
+
[](https://osv.dev)
|
|
6
|
+
[](https://snyk.io)
|
|
7
|
+
|
|
8
|
+
A [Bun security scanner](https://bun.com/docs/pm/security-scanner-api) that checks your dependencies against vulnerability databases before they get installed. Uses [Google's OSV database](https://osv.dev) by default โ no API keys required.
|
|
9
|
+
|
|
10
|
+
- ๐ **Automatic scanning**: runs transparently on every `bun install`
|
|
11
|
+
- โก **Fast**: 24-hour per-package lockfile cache means repeat installs skip the network entirely
|
|
12
|
+
- ๐ **Two backends**: OSV (free, no setup) or Snyk (commercial, broader coverage)
|
|
13
|
+
- ๐ **Fail-open by default**: a downed API never blocks your install
|
|
14
|
+
- ๐ฏ **CVSS fallback**: uses score-based severity when a label isn't available
|
|
15
|
+
- ๐ ๏ธ **Configurable**: tune behaviour via environment variables
|
|
16
|
+
|
|
17
|
+
---
|
|
18
|
+
|
|
19
|
+
## ๐ฆ Installation
|
|
20
|
+
|
|
21
|
+
```sh
|
|
22
|
+
bun add -d @nebzdev/bun-security-scanner
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
Then register it in your project's `bunfig.toml`:
|
|
26
|
+
|
|
27
|
+
```toml
|
|
28
|
+
[install.security]
|
|
29
|
+
scanner = "@nebzdev/bun-security-scanner"
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
That's it. The scanner runs automatically on the next `bun install`.
|
|
33
|
+
|
|
34
|
+
### Local development
|
|
35
|
+
|
|
36
|
+
Point `bunfig.toml` directly at the entry file using an absolute or relative path:
|
|
37
|
+
|
|
38
|
+
```toml
|
|
39
|
+
[install.security]
|
|
40
|
+
scanner = "../bun-osv-scanner/src/index.ts"
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
---
|
|
44
|
+
|
|
45
|
+
## ๐ Backends
|
|
46
|
+
|
|
47
|
+
The scanner ships with two backends, controlled by the `SCANNER_BACKEND` environment variable.
|
|
48
|
+
|
|
49
|
+
### OSV (default)
|
|
50
|
+
|
|
51
|
+
Queries [Google's OSV database](https://osv.dev) โ free, no credentials required.
|
|
52
|
+
|
|
53
|
+
```toml
|
|
54
|
+
[install.security]
|
|
55
|
+
scanner = "@nebzdev/bun-security-scanner"
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
### Snyk
|
|
59
|
+
|
|
60
|
+
Queries [Snyk's vulnerability database](https://security.snyk.io) โ commercial, often surfaces issues earlier. Requires a Snyk account.
|
|
61
|
+
|
|
62
|
+
```toml
|
|
63
|
+
# bunfig.toml
|
|
64
|
+
[install.security]
|
|
65
|
+
scanner = "@nebzdev/bun-security-scanner"
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
```sh
|
|
69
|
+
# .env
|
|
70
|
+
SCANNER_BACKEND=snyk
|
|
71
|
+
SNYK_TOKEN=your-token
|
|
72
|
+
SNYK_ORG_ID=your-org-id
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
---
|
|
76
|
+
|
|
77
|
+
## ๐ก๏ธ How it works
|
|
78
|
+
|
|
79
|
+
When `bun install` runs, Bun calls the scanner with the full list of packages to be installed. The scanner:
|
|
80
|
+
|
|
81
|
+
1. **Filters** non-resolvable versions โ workspace, git, file, and path dependencies are skipped
|
|
82
|
+
2. **Checks the cache** โ packages seen within the last 24 hours skip the network entirely
|
|
83
|
+
3. **Queries the backend** for any uncached packages
|
|
84
|
+
4. **Returns advisories** to Bun, which surfaces them as warnings or fatal errors
|
|
85
|
+
|
|
86
|
+
---
|
|
87
|
+
|
|
88
|
+
## โ ๏ธ Advisory levels
|
|
89
|
+
|
|
90
|
+
| Level | Trigger | Bun behaviour |
|
|
91
|
+
|-------|---------|---------------|
|
|
92
|
+
| `fatal` | CRITICAL or HIGH severity; or CVSS score โฅ 7.0 | Installation halts |
|
|
93
|
+
| `warn` | MODERATE or LOW severity; or CVSS score < 7.0 | User is prompted; auto-cancelled in CI |
|
|
94
|
+
|
|
95
|
+
---
|
|
96
|
+
|
|
97
|
+
## โ๏ธ Configuration
|
|
98
|
+
|
|
99
|
+
All options are set via environment variables โ in your shell, or in a `.env` file at the project root (Bun loads it automatically).
|
|
100
|
+
|
|
101
|
+
### Shared
|
|
102
|
+
|
|
103
|
+
| Variable | Default | Description |
|
|
104
|
+
|----------|---------|-------------|
|
|
105
|
+
| `SCANNER_BACKEND` | `osv` | Backend to use: `osv` or `snyk` |
|
|
106
|
+
|
|
107
|
+
### OSV backend
|
|
108
|
+
|
|
109
|
+
| Variable | Default | Description |
|
|
110
|
+
|----------|---------|-------------|
|
|
111
|
+
| `OSV_FAIL_CLOSED` | `false` | Throw on network error instead of failing open |
|
|
112
|
+
| `OSV_NO_CACHE` | `false` | Always query OSV fresh, bypassing the local cache |
|
|
113
|
+
| `OSV_CACHE_FILE` | `.osv.lock` | Path to the cache file |
|
|
114
|
+
| `OSV_TIMEOUT_MS` | `10000` | Per-request timeout in milliseconds |
|
|
115
|
+
| `OSV_API_BASE` | `https://api.osv.dev/v1` | OSV API base URL |
|
|
116
|
+
|
|
117
|
+
### Snyk backend
|
|
118
|
+
|
|
119
|
+
| Variable | Default | Description |
|
|
120
|
+
|----------|---------|-------------|
|
|
121
|
+
| `SNYK_TOKEN` | โ | **Required.** Snyk API token |
|
|
122
|
+
| `SNYK_ORG_ID` | โ | **Required.** Snyk organization ID |
|
|
123
|
+
| `SNYK_FAIL_CLOSED` | `false` | Throw on network error instead of failing open |
|
|
124
|
+
| `SNYK_NO_CACHE` | `false` | Always query Snyk fresh, bypassing the local cache |
|
|
125
|
+
| `SNYK_CACHE_FILE` | `.snyk.lock` | Path to the cache file |
|
|
126
|
+
| `SNYK_TIMEOUT_MS` | `10000` | Per-request timeout in milliseconds |
|
|
127
|
+
| `SNYK_RATE_LIMIT` | `160` | Max requests per minute (hard cap: 180) |
|
|
128
|
+
| `SNYK_CONCURRENCY` | `10` | Max concurrent connections |
|
|
129
|
+
| `SNYK_API_BASE` | `https://api.snyk.io/rest` | Regional endpoint override |
|
|
130
|
+
| `SNYK_API_VERSION` | `2024-04-29` | Snyk REST API version date |
|
|
131
|
+
|
|
132
|
+
### Fail-open vs fail-closed
|
|
133
|
+
|
|
134
|
+
By default the scanner **fails open**: if the backend is unreachable the scan is skipped and installation proceeds normally. Set `OSV_FAIL_CLOSED=true` or `SNYK_FAIL_CLOSED=true` to invert this.
|
|
135
|
+
|
|
136
|
+
```sh
|
|
137
|
+
# .env โ strict mode
|
|
138
|
+
OSV_FAIL_CLOSED=true
|
|
139
|
+
```
|
|
140
|
+
|
|
141
|
+
---
|
|
142
|
+
|
|
143
|
+
## ๐๏ธ Cache
|
|
144
|
+
|
|
145
|
+
Results are cached per `package@version` in a lock file at the project root with a 24-hour TTL. Because a published package version is immutable, its vulnerability profile is stable within that window.
|
|
146
|
+
|
|
147
|
+
| Backend | Lock file |
|
|
148
|
+
|---------|-----------|
|
|
149
|
+
| OSV | `.osv.lock` |
|
|
150
|
+
| Snyk | `.snyk.lock` |
|
|
151
|
+
|
|
152
|
+
The files are designed to be **committed to git** โ similar to a lockfile, committing them means your team and CI share the cache from day one without waiting for a warm-up scan.
|
|
153
|
+
|
|
154
|
+
```sh
|
|
155
|
+
git add .osv.lock # or .snyk.lock
|
|
156
|
+
```
|
|
157
|
+
|
|
158
|
+
To force a fresh scan:
|
|
159
|
+
|
|
160
|
+
```sh
|
|
161
|
+
OSV_NO_CACHE=true bun install
|
|
162
|
+
# or
|
|
163
|
+
SNYK_NO_CACHE=true bun install
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
---
|
|
167
|
+
|
|
168
|
+
## ๐ ๏ธ Development
|
|
169
|
+
|
|
170
|
+
### Setup
|
|
171
|
+
|
|
172
|
+
```sh
|
|
173
|
+
git clone https://github.com/muneebs/bun-osv-scanner.git
|
|
174
|
+
cd bun-osv-scanner
|
|
175
|
+
bun install
|
|
176
|
+
```
|
|
177
|
+
|
|
178
|
+
### Commands
|
|
179
|
+
|
|
180
|
+
```sh
|
|
181
|
+
bun test # Run all tests
|
|
182
|
+
bun run lint # Lint source files
|
|
183
|
+
bun run format # Check formatting
|
|
184
|
+
bun run format:write # Auto-fix formatting
|
|
185
|
+
bun run check # Lint + format check together
|
|
186
|
+
bun run check:write # Lint + format, auto-fix what it can
|
|
187
|
+
```
|
|
188
|
+
|
|
189
|
+
### Project structure
|
|
190
|
+
|
|
191
|
+
```
|
|
192
|
+
bun-osv-scanner/
|
|
193
|
+
โโโ src/
|
|
194
|
+
โ โโโ __tests__/ # Test suite (bun:test)
|
|
195
|
+
โ โโโ snyk/ # Snyk backend
|
|
196
|
+
โ โโโ cache.ts # 24h lockfile cache
|
|
197
|
+
โ โโโ client.ts # OSV API client
|
|
198
|
+
โ โโโ config.ts # OSV constants and env vars
|
|
199
|
+
โ โโโ display.ts # TTY progress spinner
|
|
200
|
+
โ โโโ index.ts # Entry point โ dispatches to OSV or Snyk
|
|
201
|
+
โ โโโ osv.ts # OSV scanner implementation
|
|
202
|
+
โ โโโ severity.ts # OSV level classification
|
|
203
|
+
โโโ bunfig.toml
|
|
204
|
+
โโโ package.json
|
|
205
|
+
```
|
|
206
|
+
|
|
207
|
+
### Backend comparison
|
|
208
|
+
|
|
209
|
+
| | OSV | Snyk |
|
|
210
|
+
|---|---|---|
|
|
211
|
+
| API key required | No | Yes |
|
|
212
|
+
| Batch endpoint | Yes (1000/req) | No (per-package, 180 req/min) |
|
|
213
|
+
| Coverage | Community feeds + GitHub Advisory | Snyk's proprietary database |
|
|
214
|
+
| Cache file | `.osv.lock` | `.snyk.lock` |
|
|
215
|
+
|
|
216
|
+
---
|
|
217
|
+
|
|
218
|
+
## โ ๏ธ Limitations
|
|
219
|
+
|
|
220
|
+
- Only scans npm packages with concrete semver versions. `workspace:`, `file:`, `git:`, and range-only specifiers are skipped.
|
|
221
|
+
- OSV aggregates GitHub Advisory, NVD, and other feeds โ coverage may lag slightly behind a vulnerability's public disclosure.
|
|
222
|
+
- The OSV batch API has a hard limit of 1,000 queries per request. Larger projects are split across multiple requests automatically.
|
|
223
|
+
- Snyk's per-package endpoint is rate-limited to 180 req/min. At that rate, a project with 2,000+ packages will take several minutes on the first scan.
|
|
224
|
+
|
|
225
|
+
---
|
|
226
|
+
|
|
227
|
+
## ๐ License
|
|
228
|
+
|
|
229
|
+
MIT ยฉ [Muneeb Samuels](https://github.com/muneebs)
|
|
230
|
+
|
|
231
|
+
---
|
|
232
|
+
|
|
233
|
+
## ๐ Links
|
|
234
|
+
|
|
235
|
+
- [๐ฆ npm](https://www.npmjs.com/package/@nebzdev/bun-security-scanner)
|
|
236
|
+
- [๐ Issue tracker](https://github.com/muneebs/bun-osv-scanner/issues)
|
|
237
|
+
- [๐ OSV database](https://osv.dev)
|
|
238
|
+
- [๐ Bun security scanner docs](https://bun.com/docs/pm/security-scanner-api)
|
package/package.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@nebzdev/bun-security-scanner",
|
|
3
|
+
"description": "Bun security scanner powered by Google's OSV vulnerability database",
|
|
4
|
+
"version": "1.0.1",
|
|
5
|
+
"author": "Muneeb Samuels",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"exports": {
|
|
8
|
+
"./package.json": "./package.json",
|
|
9
|
+
".": "./src/index.ts"
|
|
10
|
+
},
|
|
11
|
+
"files": [
|
|
12
|
+
"src",
|
|
13
|
+
"!src/__tests__",
|
|
14
|
+
"README.md"
|
|
15
|
+
],
|
|
16
|
+
"keywords": [
|
|
17
|
+
"bun",
|
|
18
|
+
"security",
|
|
19
|
+
"osv",
|
|
20
|
+
"snyk",
|
|
21
|
+
"scanner",
|
|
22
|
+
"vulnerability"
|
|
23
|
+
],
|
|
24
|
+
"scripts": {
|
|
25
|
+
"publish:npm": "npm publish --access public",
|
|
26
|
+
"lint": "biome lint ./src",
|
|
27
|
+
"format": "biome format ./src",
|
|
28
|
+
"format:write": "biome format --write ./src",
|
|
29
|
+
"check": "biome check ./src",
|
|
30
|
+
"check:write": "biome check --write ./src"
|
|
31
|
+
},
|
|
32
|
+
"devDependencies": {
|
|
33
|
+
"@biomejs/biome": "2.4.10",
|
|
34
|
+
"@types/bun": "latest",
|
|
35
|
+
"typescript": "^5.0.0"
|
|
36
|
+
}
|
|
37
|
+
}
|
package/src/cache.ts
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import { rename } from 'node:fs/promises';
|
|
2
|
+
|
|
3
|
+
const CACHE_TTL_MS = 24 * 60 * 60 * 1000;
|
|
4
|
+
|
|
5
|
+
export interface CacheEntry {
|
|
6
|
+
advisories: Bun.Security.Advisory[];
|
|
7
|
+
cachedAt: number;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
type Cache = Record<string, CacheEntry>;
|
|
11
|
+
|
|
12
|
+
function isValidCache(data: unknown): data is Cache {
|
|
13
|
+
if (typeof data !== 'object' || data === null || Array.isArray(data))
|
|
14
|
+
return false;
|
|
15
|
+
return Object.values(data).every(
|
|
16
|
+
(entry) =>
|
|
17
|
+
typeof entry === 'object' &&
|
|
18
|
+
entry !== null &&
|
|
19
|
+
Array.isArray((entry as CacheEntry).advisories) &&
|
|
20
|
+
typeof (entry as CacheEntry).cachedAt === 'number'
|
|
21
|
+
);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export async function readCache(cacheFile: string): Promise<Cache> {
|
|
25
|
+
try {
|
|
26
|
+
const data: unknown = JSON.parse(await Bun.file(cacheFile).text());
|
|
27
|
+
return isValidCache(data) ? data : {};
|
|
28
|
+
} catch {
|
|
29
|
+
return {};
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export async function writeCache(
|
|
34
|
+
cache: Cache,
|
|
35
|
+
cacheFile: string
|
|
36
|
+
): Promise<void> {
|
|
37
|
+
try {
|
|
38
|
+
// Write to a temp file first, then rename โ prevents partial-write corruption
|
|
39
|
+
// if the process is killed or two installs run concurrently.
|
|
40
|
+
const tmp = `${cacheFile}.tmp`;
|
|
41
|
+
await Bun.write(tmp, JSON.stringify(cache, null, 2));
|
|
42
|
+
await rename(tmp, cacheFile);
|
|
43
|
+
} catch {}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export function isFresh(entry: CacheEntry): boolean {
|
|
47
|
+
return Date.now() - entry.cachedAt < CACHE_TTL_MS;
|
|
48
|
+
}
|
package/src/client.ts
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import { FETCH_TIMEOUT_MS, OSV_API_BASE, OSV_BATCH_SIZE } from './config';
|
|
2
|
+
|
|
3
|
+
export interface OsvBatchResponse {
|
|
4
|
+
results: Array<{
|
|
5
|
+
vulns?: Array<{ id: string; modified: string }>;
|
|
6
|
+
next_page_token?: string;
|
|
7
|
+
}>;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export interface OsvVulnerability {
|
|
11
|
+
id: string;
|
|
12
|
+
summary?: string;
|
|
13
|
+
references?: Array<{ type: string; url: string }>;
|
|
14
|
+
severity?: Array<{ type: string; score: string }>;
|
|
15
|
+
database_specific?: {
|
|
16
|
+
severity?: string;
|
|
17
|
+
cvss?: { score?: number };
|
|
18
|
+
[key: string]: unknown;
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function fetchWithTimeout(
|
|
23
|
+
url: string,
|
|
24
|
+
options?: RequestInit
|
|
25
|
+
): Promise<Response> {
|
|
26
|
+
const controller = new AbortController();
|
|
27
|
+
const timer = setTimeout(() => controller.abort(), FETCH_TIMEOUT_MS);
|
|
28
|
+
return fetch(url, { ...options, signal: controller.signal }).finally(() =>
|
|
29
|
+
clearTimeout(timer)
|
|
30
|
+
);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// workspace:, file:, git:, and range specifiers cause a 400 for the whole batch.
|
|
34
|
+
export function isResolvable(version: string): boolean {
|
|
35
|
+
return /^v?\d+\.\d+/.test(version);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export async function batchQuery(
|
|
39
|
+
packages: Bun.Security.Package[]
|
|
40
|
+
): Promise<OsvBatchResponse['results']> {
|
|
41
|
+
const results: OsvBatchResponse['results'] = [];
|
|
42
|
+
|
|
43
|
+
for (let i = 0; i < packages.length; i += OSV_BATCH_SIZE) {
|
|
44
|
+
const chunk = packages.slice(i, i + OSV_BATCH_SIZE);
|
|
45
|
+
const res = await fetchWithTimeout(`${OSV_API_BASE}/querybatch`, {
|
|
46
|
+
method: 'POST',
|
|
47
|
+
headers: { 'Content-Type': 'application/json' },
|
|
48
|
+
body: JSON.stringify({
|
|
49
|
+
queries: chunk.map((p) => ({
|
|
50
|
+
version: p.version,
|
|
51
|
+
package: { name: p.name, ecosystem: 'npm' },
|
|
52
|
+
})),
|
|
53
|
+
}),
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
if (!res.ok) {
|
|
57
|
+
const body = await res.text().catch(() => '');
|
|
58
|
+
throw new Error(`OSV API ${res.status}: ${body || res.statusText}`);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const { results: chunkResults } = (await res.json()) as OsvBatchResponse;
|
|
62
|
+
results.push(...chunkResults);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
return results;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
export async function fetchVuln(id: string): Promise<OsvVulnerability | null> {
|
|
69
|
+
const res = await fetchWithTimeout(`${OSV_API_BASE}/vulns/${id}`);
|
|
70
|
+
if (!res.ok) return null;
|
|
71
|
+
return res.json() as Promise<OsvVulnerability>;
|
|
72
|
+
}
|
package/src/config.ts
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
export const OSV_API_BASE = Bun.env.OSV_API_BASE ?? 'https://api.osv.dev/v1';
|
|
2
|
+
// Hard limit enforced by the OSV API โ exceeding it returns 400 "Too many queries".
|
|
3
|
+
export const OSV_BATCH_SIZE = 1000;
|
|
4
|
+
export const FETCH_TIMEOUT_MS = Number(Bun.env.OSV_TIMEOUT_MS) || 10_000;
|
|
5
|
+
export const PREFERRED_REF_TYPES = ['ADVISORY', 'WEB', 'ARTICLE'] as const;
|
|
6
|
+
export const CACHE_FILE = Bun.env.OSV_CACHE_FILE ?? '.osv.lock';
|
|
7
|
+
export const CACHE_TTL_MS = 24 * 60 * 60 * 1000;
|
|
8
|
+
|
|
9
|
+
// When true, network failures throw and cancel installation rather than failing open.
|
|
10
|
+
export const FAIL_CLOSED = Bun.env.OSV_FAIL_CLOSED === 'true';
|
|
11
|
+
export const NO_CACHE = Bun.env.OSV_NO_CACHE === 'true';
|
package/src/display.ts
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
export function startSpinner(message: string) {
|
|
2
|
+
if (!process.stderr.isTTY)
|
|
3
|
+
return { update: (_: string) => {}, stop: () => {} };
|
|
4
|
+
|
|
5
|
+
const frames = ['โ ', 'โ ', 'โ น', 'โ ธ', 'โ ผ', 'โ ด', 'โ ฆ', 'โ ง', 'โ ', 'โ '];
|
|
6
|
+
let i = 0;
|
|
7
|
+
let current = message;
|
|
8
|
+
|
|
9
|
+
process.stderr.write(`${frames[0]} ${current}`);
|
|
10
|
+
const interval = setInterval(
|
|
11
|
+
() => process.stderr.write(`\r${frames[++i % frames.length]} ${current}`),
|
|
12
|
+
80
|
|
13
|
+
);
|
|
14
|
+
|
|
15
|
+
return {
|
|
16
|
+
update(msg: string) {
|
|
17
|
+
current = msg;
|
|
18
|
+
},
|
|
19
|
+
stop() {
|
|
20
|
+
clearInterval(interval);
|
|
21
|
+
process.stderr.write('\r\x1b[2K');
|
|
22
|
+
},
|
|
23
|
+
};
|
|
24
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { backend as osvBackend } from './osv';
|
|
2
|
+
import { type Backend, createScanner } from './scanner';
|
|
3
|
+
import { backend as snykBackend } from './snyk/index';
|
|
4
|
+
|
|
5
|
+
const registry: Record<string, Backend> = {
|
|
6
|
+
osv: osvBackend,
|
|
7
|
+
snyk: snykBackend,
|
|
8
|
+
};
|
|
9
|
+
|
|
10
|
+
const backendName = (Bun.env.SCANNER_BACKEND ?? 'osv').toLowerCase();
|
|
11
|
+
const selected = registry[backendName];
|
|
12
|
+
|
|
13
|
+
if (!selected) {
|
|
14
|
+
process.stderr.write(
|
|
15
|
+
`[@nebzdev/bun-security-scanner] Unknown SCANNER_BACKEND "${backendName}", falling back to osv.\n`
|
|
16
|
+
);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export const scanner: Bun.Security.Scanner = createScanner(
|
|
20
|
+
selected ?? osvBackend
|
|
21
|
+
);
|
package/src/osv.ts
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import type { OsvVulnerability } from './client';
|
|
2
|
+
import { batchQuery, fetchVuln } from './client';
|
|
3
|
+
import { CACHE_FILE, FAIL_CLOSED, NO_CACHE } from './config';
|
|
4
|
+
import { type Backend, createScanner } from './scanner';
|
|
5
|
+
import { advisoryUrl, severityLevel } from './severity';
|
|
6
|
+
|
|
7
|
+
const backend: Backend = {
|
|
8
|
+
name: 'OSV',
|
|
9
|
+
cacheFile: CACHE_FILE,
|
|
10
|
+
noCache: NO_CACHE,
|
|
11
|
+
failClosed: FAIL_CLOSED,
|
|
12
|
+
|
|
13
|
+
async fetchAdvisories(packages, onStatus) {
|
|
14
|
+
const batchResults = await batchQuery(packages);
|
|
15
|
+
|
|
16
|
+
const affected: Array<{ pkg: Bun.Security.Package; vulnId: string }> = [];
|
|
17
|
+
for (let i = 0; i < packages.length; i++) {
|
|
18
|
+
for (const { id } of batchResults[i]?.vulns ?? []) {
|
|
19
|
+
affected.push({ pkg: packages[i], vulnId: id });
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const result = new Map<string, Bun.Security.Advisory[]>();
|
|
24
|
+
for (const pkg of packages) {
|
|
25
|
+
result.set(`${pkg.name}@${pkg.version}`, []);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
if (affected.length > 0) {
|
|
29
|
+
const uniqueIds = [...new Set(affected.map((a) => a.vulnId))];
|
|
30
|
+
const vulnById = new Map<string, OsvVulnerability>();
|
|
31
|
+
|
|
32
|
+
onStatus(
|
|
33
|
+
`Fetching details for ${uniqueIds.length} ${uniqueIds.length === 1 ? 'vulnerability' : 'vulnerabilities'}...`
|
|
34
|
+
);
|
|
35
|
+
|
|
36
|
+
await Promise.all(
|
|
37
|
+
uniqueIds.map(async (id) => {
|
|
38
|
+
const vuln = await fetchVuln(id);
|
|
39
|
+
if (vuln) vulnById.set(id, vuln);
|
|
40
|
+
})
|
|
41
|
+
);
|
|
42
|
+
|
|
43
|
+
for (const { pkg, vulnId } of affected) {
|
|
44
|
+
const vuln = vulnById.get(vulnId);
|
|
45
|
+
if (vuln) {
|
|
46
|
+
result.get(`${pkg.name}@${pkg.version}`)?.push({
|
|
47
|
+
level: severityLevel(vuln),
|
|
48
|
+
package: pkg.name,
|
|
49
|
+
url: advisoryUrl(vuln),
|
|
50
|
+
description: vuln.summary ?? vuln.id,
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
return result;
|
|
57
|
+
},
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
export { backend };
|
|
61
|
+
export const scanner = createScanner(backend);
|
package/src/scanner.ts
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import { isFresh, readCache, writeCache } from './cache';
|
|
2
|
+
import { isResolvable } from './client';
|
|
3
|
+
import { startSpinner } from './display';
|
|
4
|
+
|
|
5
|
+
export interface Backend {
|
|
6
|
+
readonly name: string;
|
|
7
|
+
readonly cacheFile: string;
|
|
8
|
+
readonly noCache: boolean;
|
|
9
|
+
readonly failClosed: boolean;
|
|
10
|
+
validateConfig?(): void;
|
|
11
|
+
fetchAdvisories(
|
|
12
|
+
packages: Bun.Security.Package[],
|
|
13
|
+
onStatus: (message: string) => void
|
|
14
|
+
): Promise<Map<string, Bun.Security.Advisory[]>>;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export function createScanner(backend: Backend): Bun.Security.Scanner {
|
|
18
|
+
return {
|
|
19
|
+
version: '1',
|
|
20
|
+
|
|
21
|
+
async scan({ packages }) {
|
|
22
|
+
backend.validateConfig?.();
|
|
23
|
+
|
|
24
|
+
const queryable = packages.filter(
|
|
25
|
+
(p) => p.name && isResolvable(p.version)
|
|
26
|
+
);
|
|
27
|
+
if (queryable.length === 0) return [];
|
|
28
|
+
|
|
29
|
+
const cache = backend.noCache ? {} : await readCache(backend.cacheFile);
|
|
30
|
+
|
|
31
|
+
const cachedAdvisories: Bun.Security.Advisory[] = [];
|
|
32
|
+
const toQuery: Bun.Security.Package[] = [];
|
|
33
|
+
|
|
34
|
+
for (const pkg of queryable) {
|
|
35
|
+
const entry = cache[`${pkg.name}@${pkg.version}`];
|
|
36
|
+
if (entry && isFresh(entry)) {
|
|
37
|
+
cachedAdvisories.push(...entry.advisories);
|
|
38
|
+
} else {
|
|
39
|
+
toQuery.push(pkg);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
if (toQuery.length === 0) return cachedAdvisories;
|
|
44
|
+
|
|
45
|
+
const hitCount = queryable.length - toQuery.length;
|
|
46
|
+
const spinner = startSpinner(
|
|
47
|
+
hitCount > 0
|
|
48
|
+
? `Scanning ${toQuery.length} packages via ${backend.name} (${hitCount} cached)...`
|
|
49
|
+
: `Scanning ${queryable.length} packages via ${backend.name}...`
|
|
50
|
+
);
|
|
51
|
+
|
|
52
|
+
try {
|
|
53
|
+
const advisoryMap = await backend.fetchAdvisories(toQuery, (msg) =>
|
|
54
|
+
spinner.update(msg)
|
|
55
|
+
);
|
|
56
|
+
|
|
57
|
+
spinner.stop();
|
|
58
|
+
|
|
59
|
+
for (const [key, advisories] of advisoryMap) {
|
|
60
|
+
cache[key] = { advisories, cachedAt: Date.now() };
|
|
61
|
+
}
|
|
62
|
+
if (!backend.noCache) void writeCache(cache, backend.cacheFile);
|
|
63
|
+
|
|
64
|
+
return [...cachedAdvisories, ...[...advisoryMap.values()].flat()];
|
|
65
|
+
} catch (err) {
|
|
66
|
+
spinner.stop();
|
|
67
|
+
|
|
68
|
+
if (backend.failClosed) {
|
|
69
|
+
throw new Error(
|
|
70
|
+
`${backend.name} scan failed: ${err instanceof Error ? err.message : err}`
|
|
71
|
+
);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
process.stderr.write(
|
|
75
|
+
`\n${backend.name} scan failed (${err instanceof Error ? err.message : err}), skipping.\n`
|
|
76
|
+
);
|
|
77
|
+
return cachedAdvisories;
|
|
78
|
+
}
|
|
79
|
+
},
|
|
80
|
+
};
|
|
81
|
+
}
|
package/src/severity.ts
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import type { OsvVulnerability } from './client';
|
|
2
|
+
import { PREFERRED_REF_TYPES } from './config';
|
|
3
|
+
|
|
4
|
+
export function severityLevel(vuln: OsvVulnerability): 'fatal' | 'warn' {
|
|
5
|
+
const s = vuln.database_specific?.severity?.toUpperCase();
|
|
6
|
+
if (s === 'CRITICAL' || s === 'HIGH') return 'fatal';
|
|
7
|
+
if (s === 'MODERATE' || s === 'LOW') return 'warn';
|
|
8
|
+
|
|
9
|
+
// Fallback: numeric CVSS score (โฅ7.0 = HIGH/CRITICAL threshold).
|
|
10
|
+
const score = vuln.database_specific?.cvss?.score;
|
|
11
|
+
if (typeof score === 'number') return score >= 7.0 ? 'fatal' : 'warn';
|
|
12
|
+
|
|
13
|
+
return 'warn';
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export function advisoryUrl(vuln: OsvVulnerability): string {
|
|
17
|
+
for (const type of PREFERRED_REF_TYPES) {
|
|
18
|
+
const ref = vuln.references?.find((r) => r.type === type);
|
|
19
|
+
if (ref) return ref.url;
|
|
20
|
+
}
|
|
21
|
+
return `https://osv.dev/vulnerability/${vuln.id}`;
|
|
22
|
+
}
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
import {
|
|
2
|
+
CONCURRENCY,
|
|
3
|
+
FETCH_TIMEOUT_MS,
|
|
4
|
+
RATE_LIMIT,
|
|
5
|
+
SNYK_API_BASE,
|
|
6
|
+
SNYK_API_VERSION,
|
|
7
|
+
SNYK_ORG_ID,
|
|
8
|
+
SNYK_TOKEN,
|
|
9
|
+
} from './config';
|
|
10
|
+
|
|
11
|
+
export interface SnykIssue {
|
|
12
|
+
id: string;
|
|
13
|
+
attributes: {
|
|
14
|
+
title: string;
|
|
15
|
+
type: string;
|
|
16
|
+
effective_severity_level: 'critical' | 'high' | 'medium' | 'low';
|
|
17
|
+
description?: string;
|
|
18
|
+
problems?: Array<{ id: string; source: string }>;
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
interface SnykResponse {
|
|
23
|
+
data: SnykIssue[];
|
|
24
|
+
links?: { next?: string };
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// Sliding window rate limiter โ tracks request timestamps within the last minute.
|
|
28
|
+
class RateLimiter {
|
|
29
|
+
private readonly timestamps: number[] = [];
|
|
30
|
+
private readonly windowMs = 60_000;
|
|
31
|
+
|
|
32
|
+
constructor(private readonly limit: number) {}
|
|
33
|
+
|
|
34
|
+
async acquire(): Promise<void> {
|
|
35
|
+
const now = Date.now();
|
|
36
|
+
while (
|
|
37
|
+
this.timestamps.length > 0 &&
|
|
38
|
+
now - this.timestamps[0] >= this.windowMs
|
|
39
|
+
) {
|
|
40
|
+
this.timestamps.shift();
|
|
41
|
+
}
|
|
42
|
+
if (this.timestamps.length < this.limit) {
|
|
43
|
+
this.timestamps.push(Date.now());
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
// Wait until the oldest request falls outside the window, then retry.
|
|
47
|
+
const waitMs = this.windowMs - (Date.now() - this.timestamps[0]) + 10;
|
|
48
|
+
await Bun.sleep(waitMs);
|
|
49
|
+
return this.acquire();
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
const rateLimiter = new RateLimiter(RATE_LIMIT);
|
|
54
|
+
|
|
55
|
+
function fetchWithTimeout(
|
|
56
|
+
url: string,
|
|
57
|
+
options?: RequestInit
|
|
58
|
+
): Promise<Response> {
|
|
59
|
+
const controller = new AbortController();
|
|
60
|
+
const timer = setTimeout(() => controller.abort(), FETCH_TIMEOUT_MS);
|
|
61
|
+
return fetch(url, { ...options, signal: controller.signal }).finally(() =>
|
|
62
|
+
clearTimeout(timer)
|
|
63
|
+
);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
export function validateConfig(): void {
|
|
67
|
+
if (!SNYK_TOKEN)
|
|
68
|
+
throw new Error('SNYK_TOKEN is required for the Snyk scanner');
|
|
69
|
+
if (!SNYK_ORG_ID)
|
|
70
|
+
throw new Error('SNYK_ORG_ID is required for the Snyk scanner');
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
function buildPurl(name: string, version: string): string {
|
|
74
|
+
// Scoped packages: @scope/name -> pkg:npm/scope/name@version (PURL spec ยง7)
|
|
75
|
+
if (name.startsWith('@')) {
|
|
76
|
+
const slash = name.indexOf('/', 1);
|
|
77
|
+
const scope = name.slice(1, slash);
|
|
78
|
+
const pkg = name.slice(slash + 1);
|
|
79
|
+
return `pkg:npm/${scope}/${pkg}@${version}`;
|
|
80
|
+
}
|
|
81
|
+
return `pkg:npm/${name}@${version}`;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
async function fetchPackageIssues(
|
|
85
|
+
name: string,
|
|
86
|
+
version: string,
|
|
87
|
+
retries = 3
|
|
88
|
+
): Promise<SnykIssue[]> {
|
|
89
|
+
await rateLimiter.acquire();
|
|
90
|
+
|
|
91
|
+
const purl = encodeURIComponent(buildPurl(name, version));
|
|
92
|
+
const url = `${SNYK_API_BASE}/orgs/${SNYK_ORG_ID}/packages/${purl}/issues?version=${SNYK_API_VERSION}&limit=1000`;
|
|
93
|
+
|
|
94
|
+
const res = await fetchWithTimeout(url, {
|
|
95
|
+
headers: {
|
|
96
|
+
Authorization: `token ${SNYK_TOKEN}`,
|
|
97
|
+
'Content-Type': 'application/vnd.api+json',
|
|
98
|
+
},
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
if (res.status === 429 && retries > 0) {
|
|
102
|
+
const retryAfter = res.headers.get('Retry-After');
|
|
103
|
+
const waitMs = retryAfter ? Number(retryAfter) * 1000 : 60_000;
|
|
104
|
+
await Bun.sleep(waitMs);
|
|
105
|
+
return fetchPackageIssues(name, version, retries - 1);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
if (res.status === 404) return [];
|
|
109
|
+
|
|
110
|
+
if (!res.ok) {
|
|
111
|
+
const body = await res.text().catch(() => '');
|
|
112
|
+
throw new Error(`Snyk API ${res.status}: ${body || res.statusText}`);
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
const { data } = (await res.json()) as SnykResponse;
|
|
116
|
+
return data ?? [];
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
export async function batchFetchIssues(
|
|
120
|
+
packages: Bun.Security.Package[],
|
|
121
|
+
onProgress?: (completed: number, total: number) => void
|
|
122
|
+
): Promise<Map<string, SnykIssue[]>> {
|
|
123
|
+
const results = new Map<string, SnykIssue[]>();
|
|
124
|
+
let completed = 0;
|
|
125
|
+
|
|
126
|
+
for (let i = 0; i < packages.length; i += CONCURRENCY) {
|
|
127
|
+
await Promise.all(
|
|
128
|
+
packages.slice(i, i + CONCURRENCY).map(async (pkg) => {
|
|
129
|
+
const issues = await fetchPackageIssues(pkg.name, pkg.version);
|
|
130
|
+
results.set(`${pkg.name}@${pkg.version}`, issues);
|
|
131
|
+
onProgress?.(++completed, packages.length);
|
|
132
|
+
})
|
|
133
|
+
);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
return results;
|
|
137
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
export const SNYK_API_BASE =
|
|
2
|
+
Bun.env.SNYK_API_BASE ?? 'https://api.snyk.io/rest';
|
|
3
|
+
export const SNYK_API_VERSION = Bun.env.SNYK_API_VERSION ?? '2024-04-29';
|
|
4
|
+
export const SNYK_TOKEN = Bun.env.SNYK_TOKEN;
|
|
5
|
+
export const SNYK_ORG_ID = Bun.env.SNYK_ORG_ID;
|
|
6
|
+
export const FETCH_TIMEOUT_MS = Number(Bun.env.SNYK_TIMEOUT_MS) || 10_000;
|
|
7
|
+
export const FAIL_CLOSED = Bun.env.SNYK_FAIL_CLOSED === 'true';
|
|
8
|
+
export const NO_CACHE = Bun.env.SNYK_NO_CACHE === 'true';
|
|
9
|
+
// Max concurrent connections (independent of rate limit)
|
|
10
|
+
export const CONCURRENCY = Number(Bun.env.SNYK_CONCURRENCY) || 10;
|
|
11
|
+
// Requests per minute โ hard ceiling is 180; default leaves headroom
|
|
12
|
+
export const RATE_LIMIT = Math.min(Number(Bun.env.SNYK_RATE_LIMIT) || 160, 180);
|
|
13
|
+
|
|
14
|
+
export const CACHE_FILE = Bun.env.SNYK_CACHE_FILE ?? '.snyk.lock';
|
|
15
|
+
export const CACHE_TTL_MS = 24 * 60 * 60 * 1000;
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { type Backend, createScanner } from '../scanner';
|
|
2
|
+
import { batchFetchIssues, validateConfig } from './client';
|
|
3
|
+
import { CACHE_FILE, FAIL_CLOSED, NO_CACHE } from './config';
|
|
4
|
+
import { advisoryUrl, severityLevel } from './severity';
|
|
5
|
+
|
|
6
|
+
const backend: Backend = {
|
|
7
|
+
name: 'Snyk',
|
|
8
|
+
cacheFile: CACHE_FILE,
|
|
9
|
+
noCache: NO_CACHE,
|
|
10
|
+
failClosed: FAIL_CLOSED,
|
|
11
|
+
validateConfig,
|
|
12
|
+
|
|
13
|
+
async fetchAdvisories(packages, onStatus) {
|
|
14
|
+
const issueMap = await batchFetchIssues(packages, (done, total) => {
|
|
15
|
+
onStatus(`Scanning packages via Snyk (${done}/${total})...`);
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
const result = new Map<string, Bun.Security.Advisory[]>();
|
|
19
|
+
for (const pkg of packages) {
|
|
20
|
+
const key = `${pkg.name}@${pkg.version}`;
|
|
21
|
+
const issues = issueMap.get(key) ?? [];
|
|
22
|
+
result.set(
|
|
23
|
+
key,
|
|
24
|
+
issues.map((issue) => ({
|
|
25
|
+
level: severityLevel(issue),
|
|
26
|
+
package: pkg.name,
|
|
27
|
+
url: advisoryUrl(issue),
|
|
28
|
+
description: issue.attributes.title,
|
|
29
|
+
}))
|
|
30
|
+
);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
return result;
|
|
34
|
+
},
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
export { backend };
|
|
38
|
+
export const scanner = createScanner(backend);
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { SnykIssue } from './client';
|
|
2
|
+
|
|
3
|
+
export function severityLevel(issue: SnykIssue): 'fatal' | 'warn' {
|
|
4
|
+
const level = issue.attributes.effective_severity_level;
|
|
5
|
+
return level === 'critical' || level === 'high' ? 'fatal' : 'warn';
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export function advisoryUrl(issue: SnykIssue): string {
|
|
9
|
+
return `https://security.snyk.io/vuln/${issue.id}`;
|
|
10
|
+
}
|