@preship/core 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,241 @@
1
+ type ProjectType = 'npm' | 'yarn' | 'pnpm';
2
+ interface DetectedProject {
3
+ type: ProjectType;
4
+ path: string;
5
+ lockFile: string;
6
+ framework?: string;
7
+ }
8
+ interface Dependency {
9
+ name: string;
10
+ version: string;
11
+ license: string;
12
+ licenseSource: LicenseSource;
13
+ path?: string;
14
+ isDirect: boolean;
15
+ isDevDependency: boolean;
16
+ }
17
+ type LicenseSource = 'package-json-license-field' | 'license-file' | 'package-json-licenses-field' | 'npm-registry' | 'github-api' | 'cache' | 'unknown';
18
+ interface ParsedDependency {
19
+ name: string;
20
+ version: string;
21
+ isDirect: boolean;
22
+ isDevDependency: boolean;
23
+ }
24
+ type PolicyVerdict = 'allowed' | 'warned' | 'rejected' | 'unknown';
25
+ interface PolicyResult {
26
+ dependency: Dependency;
27
+ verdict: PolicyVerdict;
28
+ reason: string;
29
+ }
30
+ interface ScanResult {
31
+ projectPath: string;
32
+ projectType: ProjectType;
33
+ framework?: string;
34
+ timestamp: string;
35
+ totalPackages: number;
36
+ allowed: PolicyResult[];
37
+ warned: PolicyResult[];
38
+ rejected: PolicyResult[];
39
+ unknown: PolicyResult[];
40
+ passed: boolean;
41
+ scanDurationMs: number;
42
+ }
43
+ type ResolverMode = 'auto' | 'online' | 'local';
44
+ interface PreshipConfig {
45
+ policy?: string;
46
+ reject?: string[];
47
+ warn?: string[];
48
+ allow?: string[];
49
+ flagUnknown?: boolean;
50
+ scanDevDependencies?: boolean;
51
+ exceptions?: ExceptionEntry[];
52
+ output?: 'table' | 'json' | 'csv';
53
+ mode?: ResolverMode;
54
+ networkTimeout?: number;
55
+ networkConcurrency?: number;
56
+ cache?: boolean;
57
+ cacheTTL?: number;
58
+ scanTimeout?: number;
59
+ }
60
+ interface ExceptionEntry {
61
+ package: string;
62
+ reason: string;
63
+ approvedBy?: string;
64
+ date?: string;
65
+ }
66
+ interface PolicyTemplate {
67
+ name: string;
68
+ description: string;
69
+ reject: string[];
70
+ warn: string[];
71
+ allow: string[];
72
+ flagUnknown: boolean;
73
+ scanDevDependencies: boolean;
74
+ }
75
+
76
+ declare function loadConfig(projectPath: string): PreshipConfig;
77
+
78
+ declare function detectProjects(rootPath: string): DetectedProject[];
79
+
80
+ declare function parseNpmLockfile(lockfilePath: string, packageJsonPath: string): ParsedDependency[];
81
+
82
+ declare function parseYarnLockfile(lockfilePath: string, packageJsonPath: string): ParsedDependency[];
83
+
84
+ declare function parsePnpmLockfile(lockfilePath: string, packageJsonPath: string): ParsedDependency[];
85
+
86
+ /**
87
+ * Encrypted Project-Level License Cache
88
+ *
89
+ * Caches resolved license data in `.preship-cache.json` (encrypted with AES-256-CBC).
90
+ * Encryption key is stored in `.preship-key` (generated once, gitignored).
91
+ *
92
+ * Cache key: "packageName@version" — license doesn't change for a given version.
93
+ * UNKNOWN results are NOT cached (failures may resolve on next scan).
94
+ */
95
+ interface CacheEntry {
96
+ license: string;
97
+ licenseSource: string;
98
+ cachedAt: number;
99
+ }
100
+ interface CacheFile {
101
+ version: 1;
102
+ entries: Record<string, CacheEntry>;
103
+ }
104
+ /**
105
+ * Load or generate the encryption key.
106
+ * Key is a 32-byte random value stored as hex in `.preship-key`.
107
+ */
108
+ declare function getOrCreateKey(projectPath: string): Buffer;
109
+ /**
110
+ * Build cache key from package name and version.
111
+ */
112
+ declare function cacheKey(packageName: string, version: string): string;
113
+ /**
114
+ * Create an empty cache file structure.
115
+ */
116
+ declare function createEmptyCache(): CacheFile;
117
+ /**
118
+ * Load cache from disk, decrypt, and parse.
119
+ * Returns empty cache if file doesn't exist, is corrupt, or key is wrong.
120
+ */
121
+ declare function loadCache(cacheFilePath: string, key: Buffer): CacheFile;
122
+ /**
123
+ * Save cache to disk: serialize → encrypt → atomic write (write .tmp then rename).
124
+ */
125
+ declare function saveCache(cacheFilePath: string, cache: CacheFile, key: Buffer): void;
126
+ /**
127
+ * Look up a single entry in cache.
128
+ * Returns null if not found or expired (past TTL).
129
+ *
130
+ * @param ttlSeconds - Cache TTL in seconds
131
+ */
132
+ declare function getCacheEntry(cache: CacheFile, packageName: string, version: string, ttlSeconds: number): CacheEntry | null;
133
+ /**
134
+ * Set a single entry in cache (in-memory).
135
+ * Call saveCache() after scan completes to persist.
136
+ */
137
+ declare function setCacheEntry(cache: CacheFile, packageName: string, version: string, license: string, licenseSource: string): void;
138
+
139
+ /**
140
+ * Adaptive Rate Limiter
141
+ *
142
+ * Reads rate-limit headers from API responses and slows down
143
+ * only when approaching limits. Shared across all concurrent
144
+ * workers in a single scan run.
145
+ *
146
+ * Tracks separate state for GitHub API and npm registry.
147
+ */
148
+ interface RateLimitState {
149
+ /** Remaining requests before limit (from X-RateLimit-Remaining) */
150
+ remaining: number | null;
151
+ /** Total request limit (from X-RateLimit-Limit) */
152
+ limit: number | null;
153
+ /** Epoch seconds when limit resets (from X-RateLimit-Reset) */
154
+ resetAt: number | null;
155
+ /** Delay in ms from Retry-After header */
156
+ retryAfterMs: number | null;
157
+ /** Hard-blocked (403 with 0 remaining) */
158
+ blocked: boolean;
159
+ }
160
+ interface RateLimiterConfig {
161
+ /** Threshold ratio at which to start slowing down (default: 0.2 = 20% remaining) */
162
+ slowdownThreshold: number;
163
+ /** Maximum delay in ms when approaching limit (default: 2000) */
164
+ maxDelay: number;
165
+ }
166
+ declare class AdaptiveRateLimiter {
167
+ private githubState;
168
+ private npmState;
169
+ private config;
170
+ constructor(config?: Partial<RateLimiterConfig>);
171
+ /**
172
+ * Update rate-limit state from response headers.
173
+ * Call after every fetch to either source.
174
+ *
175
+ * Reads: X-RateLimit-Remaining, X-RateLimit-Limit, X-RateLimit-Reset, Retry-After
176
+ */
177
+ updateFromResponse(source: 'github' | 'npm', response: Response): void;
178
+ /**
179
+ * Wait if necessary before making the next request.
180
+ *
181
+ * Returns true if OK to proceed, false if the source is hard-blocked.
182
+ *
183
+ * Delay logic:
184
+ * 1. If blocked → return false
185
+ * 2. If Retry-After is set → wait that exact duration
186
+ * 3. If remaining/limit < threshold → proportional delay
187
+ * 4. Otherwise → no delay
188
+ */
189
+ waitIfNeeded(source: 'github' | 'npm'): Promise<boolean>;
190
+ /**
191
+ * Check if a source is currently hard-blocked.
192
+ */
193
+ isBlocked(source: 'github' | 'npm'): boolean;
194
+ /**
195
+ * Get the current state for a source (for testing/inspection).
196
+ */
197
+ getState(source: 'github' | 'npm'): Readonly<RateLimitState>;
198
+ /**
199
+ * Reset all rate-limit state. Call at start of each scan run.
200
+ */
201
+ reset(): void;
202
+ private delay;
203
+ }
204
+
205
+ /**
206
+ * Scan Timeout Controller
207
+ *
208
+ * Provides a top-level abort mechanism for the entire scan.
209
+ * When the scan timeout fires, all in-flight and future network requests
210
+ * are aborted, and the resolver falls back to local resolution (auto mode)
211
+ * or UNKNOWN (online mode).
212
+ */
213
+ interface ScanTimeoutController {
214
+ /** The top-level AbortSignal — aborted when scan timeout fires */
215
+ signal: AbortSignal;
216
+ /** Check if the scan has timed out */
217
+ isTimedOut(): boolean;
218
+ /**
219
+ * Create a child AbortController for a single network request.
220
+ * The child aborts if either:
221
+ * - The per-request timeout fires, OR
222
+ * - The scan-level timeout fires
223
+ *
224
+ * Returns { controller, cleanup }.
225
+ * ALWAYS call cleanup() after the request completes to prevent memory leaks.
226
+ */
227
+ createRequestController(perRequestTimeout: number): {
228
+ controller: AbortController;
229
+ cleanup: () => void;
230
+ };
231
+ /** Cancel the scan timeout timer. Call when scan completes normally. */
232
+ dispose(): void;
233
+ }
234
+ /**
235
+ * Create a scan timeout controller.
236
+ *
237
+ * @param timeout - Total scan timeout in ms. 0 or undefined = no timeout (never aborts).
238
+ */
239
+ declare function createScanTimeoutController(timeout?: number): ScanTimeoutController;
240
+
241
+ export { AdaptiveRateLimiter, type CacheEntry, type CacheFile, type Dependency, type DetectedProject, type ExceptionEntry, type LicenseSource, type ParsedDependency, type PolicyResult, type PolicyTemplate, type PolicyVerdict, type PreshipConfig, type ProjectType, type ResolverMode, type ScanResult, type ScanTimeoutController, cacheKey, createEmptyCache, createScanTimeoutController, detectProjects, getCacheEntry, getOrCreateKey, loadCache, loadConfig, parseNpmLockfile, parsePnpmLockfile, parseYarnLockfile, saveCache, setCacheEntry };
package/dist/index.js ADDED
@@ -0,0 +1,912 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+
30
+ // src/index.ts
31
+ var index_exports = {};
32
+ __export(index_exports, {
33
+ AdaptiveRateLimiter: () => AdaptiveRateLimiter,
34
+ cacheKey: () => cacheKey,
35
+ createEmptyCache: () => createEmptyCache,
36
+ createScanTimeoutController: () => createScanTimeoutController,
37
+ detectProjects: () => detectProjects,
38
+ getCacheEntry: () => getCacheEntry,
39
+ getOrCreateKey: () => getOrCreateKey,
40
+ loadCache: () => loadCache,
41
+ loadConfig: () => loadConfig,
42
+ parseNpmLockfile: () => parseNpmLockfile,
43
+ parsePnpmLockfile: () => parsePnpmLockfile,
44
+ parseYarnLockfile: () => parseYarnLockfile,
45
+ saveCache: () => saveCache,
46
+ setCacheEntry: () => setCacheEntry
47
+ });
48
+ module.exports = __toCommonJS(index_exports);
49
+
50
+ // src/config.ts
51
+ var fs = __toESM(require("fs"));
52
+ var path = __toESM(require("path"));
53
+ var yaml = __toESM(require("js-yaml"));
54
+ var CONFIG_FILENAMES = [
55
+ "preship-config.yml",
56
+ "preship-config.yaml",
57
+ "preship-config.json"
58
+ ];
59
+ var VALID_POLICIES = ["commercial-safe", "strict", "permissive-only"];
60
+ var VALID_OUTPUTS = ["table", "json", "csv"];
61
+ var VALID_MODES = ["auto", "online", "local"];
62
+ function validateConfig(config, filePath) {
63
+ if (config.policy !== void 0) {
64
+ if (typeof config.policy !== "string" || !VALID_POLICIES.includes(config.policy)) {
65
+ throw new Error(
66
+ `Invalid preship-config at ${filePath}:
67
+ 'policy' must be one of: ${VALID_POLICIES.join(", ")}, got '${config.policy}'.
68
+ See https://github.com/cyfoxinc/preship for config docs.`
69
+ );
70
+ }
71
+ }
72
+ if (config.output !== void 0) {
73
+ if (typeof config.output !== "string" || !VALID_OUTPUTS.includes(config.output)) {
74
+ throw new Error(
75
+ `Invalid preship-config at ${filePath}:
76
+ 'output' must be one of: ${VALID_OUTPUTS.join(", ")}, got '${config.output}'.
77
+ See https://github.com/cyfoxinc/preship for config docs.`
78
+ );
79
+ }
80
+ }
81
+ for (const field of ["reject", "warn", "allow"]) {
82
+ if (config[field] !== void 0) {
83
+ if (!Array.isArray(config[field])) {
84
+ throw new Error(
85
+ `Invalid preship-config at ${filePath}:
86
+ '${field}' must be an array of strings, got ${typeof config[field]}.
87
+ See https://github.com/cyfoxinc/preship for config docs.`
88
+ );
89
+ }
90
+ for (const item of config[field]) {
91
+ if (typeof item !== "string") {
92
+ throw new Error(
93
+ `Invalid preship-config at ${filePath}:
94
+ '${field}' must contain only strings, found ${typeof item}.
95
+ See https://github.com/cyfoxinc/preship for config docs.`
96
+ );
97
+ }
98
+ }
99
+ }
100
+ }
101
+ if (config.flagUnknown !== void 0 && typeof config.flagUnknown !== "boolean") {
102
+ throw new Error(
103
+ `Invalid preship-config at ${filePath}:
104
+ 'flagUnknown' must be a boolean, got ${typeof config.flagUnknown}.
105
+ See https://github.com/cyfoxinc/preship for config docs.`
106
+ );
107
+ }
108
+ if (config.scanDevDependencies !== void 0 && typeof config.scanDevDependencies !== "boolean") {
109
+ throw new Error(
110
+ `Invalid preship-config at ${filePath}:
111
+ 'scanDevDependencies' must be a boolean, got ${typeof config.scanDevDependencies}.
112
+ See https://github.com/cyfoxinc/preship for config docs.`
113
+ );
114
+ }
115
+ if (config.exceptions !== void 0) {
116
+ if (!Array.isArray(config.exceptions)) {
117
+ throw new Error(
118
+ `Invalid preship-config at ${filePath}:
119
+ 'exceptions' must be an array, got ${typeof config.exceptions}.
120
+ See https://github.com/cyfoxinc/preship for config docs.`
121
+ );
122
+ }
123
+ for (const entry of config.exceptions) {
124
+ if (typeof entry !== "object" || entry === null) {
125
+ throw new Error(
126
+ `Invalid preship-config at ${filePath}:
127
+ Each entry in 'exceptions' must be an object with 'package' and 'reason' fields.
128
+ See https://github.com/cyfoxinc/preship for config docs.`
129
+ );
130
+ }
131
+ const e = entry;
132
+ if (typeof e.package !== "string" || typeof e.reason !== "string") {
133
+ throw new Error(
134
+ `Invalid preship-config at ${filePath}:
135
+ Each exception must have 'package' (string) and 'reason' (string) fields.
136
+ See https://github.com/cyfoxinc/preship for config docs.`
137
+ );
138
+ }
139
+ }
140
+ }
141
+ if (config.mode !== void 0) {
142
+ if (typeof config.mode !== "string" || !VALID_MODES.includes(config.mode)) {
143
+ throw new Error(
144
+ `Invalid preship-config at ${filePath}:
145
+ 'mode' must be one of: ${VALID_MODES.join(", ")}, got '${config.mode}'.
146
+ See https://github.com/cyfoxinc/preship for config docs.`
147
+ );
148
+ }
149
+ }
150
+ if (config.networkTimeout !== void 0) {
151
+ if (typeof config.networkTimeout !== "number" || config.networkTimeout <= 0) {
152
+ throw new Error(
153
+ `Invalid preship-config at ${filePath}:
154
+ 'networkTimeout' must be a positive number (milliseconds), got '${config.networkTimeout}'.
155
+ See https://github.com/cyfoxinc/preship for config docs.`
156
+ );
157
+ }
158
+ }
159
+ if (config.networkConcurrency !== void 0) {
160
+ if (typeof config.networkConcurrency !== "number" || config.networkConcurrency <= 0 || !Number.isInteger(config.networkConcurrency)) {
161
+ throw new Error(
162
+ `Invalid preship-config at ${filePath}:
163
+ 'networkConcurrency' must be a positive integer, got '${config.networkConcurrency}'.
164
+ See https://github.com/cyfoxinc/preship for config docs.`
165
+ );
166
+ }
167
+ }
168
+ if (config.cache !== void 0 && typeof config.cache !== "boolean") {
169
+ throw new Error(
170
+ `Invalid preship-config at ${filePath}:
171
+ 'cache' must be a boolean, got ${typeof config.cache}.
172
+ See https://github.com/cyfoxinc/preship for config docs.`
173
+ );
174
+ }
175
+ if (config.cacheTTL !== void 0) {
176
+ if (typeof config.cacheTTL !== "number" || config.cacheTTL <= 0) {
177
+ throw new Error(
178
+ `Invalid preship-config at ${filePath}:
179
+ 'cacheTTL' must be a positive number (seconds), got '${config.cacheTTL}'.
180
+ See https://github.com/cyfoxinc/preship for config docs.`
181
+ );
182
+ }
183
+ }
184
+ if (config.scanTimeout !== void 0) {
185
+ if (typeof config.scanTimeout !== "number" || config.scanTimeout < 0) {
186
+ throw new Error(
187
+ `Invalid preship-config at ${filePath}:
188
+ 'scanTimeout' must be a non-negative number (milliseconds, 0 = disabled), got '${config.scanTimeout}'.
189
+ See https://github.com/cyfoxinc/preship for config docs.`
190
+ );
191
+ }
192
+ }
193
+ return config;
194
+ }
195
+ function getDefaultConfig() {
196
+ return {
197
+ policy: "commercial-safe",
198
+ flagUnknown: true,
199
+ scanDevDependencies: false,
200
+ output: "table",
201
+ exceptions: [],
202
+ mode: "auto",
203
+ networkTimeout: 5e3,
204
+ networkConcurrency: 10,
205
+ cache: true,
206
+ cacheTTL: 604800,
207
+ // 7 days in seconds
208
+ scanTimeout: 6e4
209
+ // 60 seconds in ms (0 = disabled)
210
+ };
211
+ }
212
+ function loadConfig(projectPath) {
213
+ for (const filename of CONFIG_FILENAMES) {
214
+ const filePath = path.join(projectPath, filename);
215
+ if (!fs.existsSync(filePath)) {
216
+ continue;
217
+ }
218
+ const content = fs.readFileSync(filePath, "utf-8");
219
+ let parsed;
220
+ if (filename.endsWith(".json")) {
221
+ try {
222
+ parsed = JSON.parse(content);
223
+ } catch {
224
+ throw new Error(
225
+ `Failed to parse ${filePath}: Invalid JSON.
226
+ The config file may be corrupted. Check syntax and try again.`
227
+ );
228
+ }
229
+ } else {
230
+ try {
231
+ parsed = yaml.load(content);
232
+ } catch {
233
+ throw new Error(
234
+ `Failed to parse ${filePath}: Invalid YAML.
235
+ The config file may be corrupted. Check syntax and try again.`
236
+ );
237
+ }
238
+ }
239
+ if (parsed === null || parsed === void 0 || typeof parsed !== "object") {
240
+ return getDefaultConfig();
241
+ }
242
+ const validated = validateConfig(parsed, filePath);
243
+ const defaults = getDefaultConfig();
244
+ return {
245
+ ...defaults,
246
+ ...validated,
247
+ exceptions: validated.exceptions ?? defaults.exceptions
248
+ };
249
+ }
250
+ return getDefaultConfig();
251
+ }
252
+
253
+ // src/detector.ts
254
+ var fs2 = __toESM(require("fs"));
255
+ var path2 = __toESM(require("path"));
256
+ var LOCK_FILES = [
257
+ { filename: "package-lock.json", type: "npm" },
258
+ { filename: "yarn.lock", type: "yarn" },
259
+ { filename: "pnpm-lock.yaml", type: "pnpm" }
260
+ ];
261
+ function detectFramework(packageJsonPath) {
262
+ try {
263
+ const content = fs2.readFileSync(packageJsonPath, "utf-8");
264
+ const pkg = JSON.parse(content);
265
+ const allDeps = {
266
+ ...pkg.dependencies || {},
267
+ ...pkg.devDependencies || {}
268
+ };
269
+ if (allDeps["next"]) return "nextjs";
270
+ if (allDeps["react"]) return "react";
271
+ if (allDeps["express"]) return "express";
272
+ if (allDeps["fastify"]) return "fastify";
273
+ if (allDeps["@nestjs/core"] || allDeps["nestjs"]) return "nestjs";
274
+ return void 0;
275
+ } catch {
276
+ return void 0;
277
+ }
278
+ }
279
+ function detectProjects(rootPath) {
280
+ const absRoot = path2.resolve(rootPath);
281
+ const packageJsonPath = path2.join(absRoot, "package.json");
282
+ if (!fs2.existsSync(packageJsonPath)) {
283
+ throw new Error(
284
+ `No package.json found in ${absRoot}.
285
+ PreShip needs a Node.js project to scan.
286
+ Make sure you're running from the project root.`
287
+ );
288
+ }
289
+ const detected = [];
290
+ for (const entry of LOCK_FILES) {
291
+ const lockFilePath = path2.join(absRoot, entry.filename);
292
+ if (fs2.existsSync(lockFilePath)) {
293
+ detected.push({
294
+ type: entry.type,
295
+ path: absRoot,
296
+ lockFile: lockFilePath,
297
+ framework: detectFramework(packageJsonPath)
298
+ });
299
+ }
300
+ }
301
+ if (detected.length === 0) {
302
+ throw new Error(
303
+ `No supported lock file found in ${absRoot}.
304
+ PreShip needs a lock file to scan dependencies.
305
+ Run 'npm install', 'yarn install', or 'pnpm install' first.`
306
+ );
307
+ }
308
+ return [detected[0]];
309
+ }
310
+
311
+ // src/parsers/npm.ts
312
+ var fs3 = __toESM(require("fs"));
313
+ function parseNpmLockfile(lockfilePath, packageJsonPath) {
314
+ let lockContent;
315
+ try {
316
+ lockContent = fs3.readFileSync(lockfilePath, "utf-8");
317
+ } catch {
318
+ throw new Error(
319
+ `Failed to read ${lockfilePath}.
320
+ The lock file may be missing or corrupted. Try running 'npm install'.`
321
+ );
322
+ }
323
+ let lockfile;
324
+ try {
325
+ lockfile = JSON.parse(lockContent);
326
+ } catch {
327
+ throw new Error(
328
+ `Failed to parse ${lockfilePath}: Invalid JSON.
329
+ The lock file may be corrupted. Try deleting it and running 'npm install' again.`
330
+ );
331
+ }
332
+ let pkgJson;
333
+ try {
334
+ pkgJson = JSON.parse(fs3.readFileSync(packageJsonPath, "utf-8"));
335
+ } catch {
336
+ throw new Error(
337
+ `Failed to read or parse ${packageJsonPath}.
338
+ Make sure package.json exists and is valid JSON.`
339
+ );
340
+ }
341
+ const directDeps = new Set(Object.keys(pkgJson.dependencies || {}));
342
+ const directDevDeps = new Set(Object.keys(pkgJson.devDependencies || {}));
343
+ const version = lockfile.lockfileVersion ?? 1;
344
+ if (version >= 2 && lockfile.packages) {
345
+ return parseV2Packages(lockfile.packages, directDeps, directDevDeps);
346
+ }
347
+ if (lockfile.dependencies) {
348
+ return parseV1Dependencies(lockfile.dependencies, directDeps, directDevDeps);
349
+ }
350
+ return [];
351
+ }
352
+ function parseV2Packages(packages, directDeps, directDevDeps) {
353
+ const results = [];
354
+ const seen = /* @__PURE__ */ new Set();
355
+ for (const [key, entry] of Object.entries(packages)) {
356
+ if (key === "") continue;
357
+ if (!key.startsWith("node_modules/")) continue;
358
+ const parts = key.split("node_modules/");
359
+ const name = parts[parts.length - 1];
360
+ if (!name || !entry.version) continue;
361
+ const dedupKey = `${name}@${entry.version}`;
362
+ if (seen.has(dedupKey)) continue;
363
+ seen.add(dedupKey);
364
+ const isDirect = directDeps.has(name) || directDevDeps.has(name);
365
+ const isDevDependency = directDevDeps.has(name) && !directDeps.has(name);
366
+ results.push({
367
+ name,
368
+ version: entry.version,
369
+ isDirect,
370
+ isDevDependency
371
+ });
372
+ }
373
+ return results;
374
+ }
375
+ function parseV1Dependencies(dependencies, directDeps, directDevDeps) {
376
+ const results = [];
377
+ function walk(deps) {
378
+ for (const [name, entry] of Object.entries(deps)) {
379
+ const isDirect = directDeps.has(name) || directDevDeps.has(name);
380
+ const isDevDependency = directDevDeps.has(name) && !directDeps.has(name);
381
+ results.push({
382
+ name,
383
+ version: entry.version,
384
+ isDirect,
385
+ isDevDependency
386
+ });
387
+ if (entry.dependencies) {
388
+ walk(entry.dependencies);
389
+ }
390
+ }
391
+ }
392
+ walk(dependencies);
393
+ return results;
394
+ }
395
+
396
+ // src/parsers/yarn.ts
397
+ var fs4 = __toESM(require("fs"));
398
+ function parseYarnLockfile(lockfilePath, packageJsonPath) {
399
+ let lockContent;
400
+ try {
401
+ lockContent = fs4.readFileSync(lockfilePath, "utf-8");
402
+ } catch {
403
+ throw new Error(
404
+ `Failed to read ${lockfilePath}.
405
+ The lock file may be missing or corrupted. Try running 'yarn install'.`
406
+ );
407
+ }
408
+ let pkgJson;
409
+ try {
410
+ pkgJson = JSON.parse(fs4.readFileSync(packageJsonPath, "utf-8"));
411
+ } catch {
412
+ throw new Error(
413
+ `Failed to read or parse ${packageJsonPath}.
414
+ Make sure package.json exists and is valid JSON.`
415
+ );
416
+ }
417
+ const directDeps = new Set(Object.keys(pkgJson.dependencies || {}));
418
+ const directDevDeps = new Set(Object.keys(pkgJson.devDependencies || {}));
419
+ const isV2 = lockContent.includes("__metadata:");
420
+ if (isV2) {
421
+ return parseYarnV2(lockContent, directDeps, directDevDeps);
422
+ }
423
+ return parseYarnV1(lockContent, directDeps, directDevDeps);
424
+ }
425
+ function extractPackageName(specifier) {
426
+ const spec = specifier.replace(/^"/, "").replace(/"$/, "");
427
+ if (spec.startsWith("@")) {
428
+ const idx2 = spec.indexOf("@", 1);
429
+ return idx2 === -1 ? spec : spec.substring(0, idx2);
430
+ }
431
+ const idx = spec.indexOf("@");
432
+ return idx === -1 ? spec : spec.substring(0, idx);
433
+ }
434
+ function extractPackageNameV2(specifier) {
435
+ const spec = specifier.replace(/^"/, "").replace(/"$/, "");
436
+ if (spec.startsWith("@")) {
437
+ const idx2 = spec.indexOf("@", 1);
438
+ if (idx2 === -1) return spec;
439
+ return spec.substring(0, idx2);
440
+ }
441
+ const idx = spec.indexOf("@");
442
+ return idx === -1 ? spec : spec.substring(0, idx);
443
+ }
444
+ function parseYarnV1(content, directDeps, directDevDeps) {
445
+ const results = [];
446
+ const seen = /* @__PURE__ */ new Set();
447
+ const lines = content.split("\n");
448
+ let i = 0;
449
+ while (i < lines.length) {
450
+ const line = lines[i];
451
+ if (!line || line.startsWith("#") || line.trim() === "") {
452
+ i++;
453
+ continue;
454
+ }
455
+ if (!line.startsWith(" ") && line.endsWith(":")) {
456
+ const specLine = line.replace(/:$/, "").trim().replace(/^"/, "").replace(/"$/, "");
457
+ const firstSpec = specLine.split(",")[0].trim();
458
+ const name = extractPackageName(firstSpec);
459
+ let version;
460
+ let j = i + 1;
461
+ while (j < lines.length && lines[j].startsWith(" ")) {
462
+ const vMatch = lines[j].match(/^\s+version\s+"?([^"\s]+)"?/);
463
+ if (vMatch) {
464
+ version = vMatch[1];
465
+ break;
466
+ }
467
+ j++;
468
+ }
469
+ if (name && version) {
470
+ const dedupKey = `${name}@${version}`;
471
+ if (!seen.has(dedupKey)) {
472
+ seen.add(dedupKey);
473
+ const isDirect = directDeps.has(name) || directDevDeps.has(name);
474
+ const isDevDependency = directDevDeps.has(name) && !directDeps.has(name);
475
+ results.push({ name, version, isDirect, isDevDependency });
476
+ }
477
+ }
478
+ i = j > i ? j : i + 1;
479
+ } else {
480
+ i++;
481
+ }
482
+ }
483
+ return results;
484
+ }
485
+ function parseYarnV2(content, directDeps, directDevDeps) {
486
+ const results = [];
487
+ const seen = /* @__PURE__ */ new Set();
488
+ const lines = content.split("\n");
489
+ let i = 0;
490
+ while (i < lines.length) {
491
+ const line = lines[i];
492
+ if (!line || line.startsWith("#") || line.trim() === "") {
493
+ i++;
494
+ continue;
495
+ }
496
+ if (line.startsWith("__metadata:")) {
497
+ i++;
498
+ while (i < lines.length && lines[i].startsWith(" ")) i++;
499
+ continue;
500
+ }
501
+ if (!line.startsWith(" ") && line.endsWith(":")) {
502
+ const specLine = line.replace(/:$/, "").trim().replace(/^"/, "").replace(/"$/, "");
503
+ const firstSpec = specLine.split(",")[0].trim();
504
+ const name = extractPackageNameV2(firstSpec);
505
+ let version;
506
+ let j = i + 1;
507
+ while (j < lines.length && lines[j].startsWith(" ")) {
508
+ const vMatch = lines[j].match(/^\s+version:\s+"?([^"\s]+)"?/);
509
+ if (vMatch) {
510
+ version = vMatch[1];
511
+ break;
512
+ }
513
+ j++;
514
+ }
515
+ if (name && version) {
516
+ const dedupKey = `${name}@${version}`;
517
+ if (!seen.has(dedupKey)) {
518
+ seen.add(dedupKey);
519
+ const isDirect = directDeps.has(name) || directDevDeps.has(name);
520
+ const isDevDependency = directDevDeps.has(name) && !directDeps.has(name);
521
+ results.push({ name, version, isDirect, isDevDependency });
522
+ }
523
+ }
524
+ i = j > i ? j : i + 1;
525
+ } else {
526
+ i++;
527
+ }
528
+ }
529
+ return results;
530
+ }
531
+
532
+ // src/parsers/pnpm.ts
533
+ var fs5 = __toESM(require("fs"));
534
+ var yaml2 = __toESM(require("js-yaml"));
535
+ function parsePnpmLockfile(lockfilePath, packageJsonPath) {
536
+ let lockContent;
537
+ try {
538
+ lockContent = fs5.readFileSync(lockfilePath, "utf-8");
539
+ } catch {
540
+ throw new Error(
541
+ `Failed to read ${lockfilePath}.
542
+ The lock file may be missing or corrupted. Try running 'pnpm install'.`
543
+ );
544
+ }
545
+ let lockfile;
546
+ try {
547
+ lockfile = yaml2.load(lockContent);
548
+ } catch {
549
+ throw new Error(
550
+ `Failed to parse ${lockfilePath}: Invalid YAML.
551
+ The lock file may be corrupted. Try deleting it and running 'pnpm install' again.`
552
+ );
553
+ }
554
+ if (!lockfile || !lockfile.packages) {
555
+ return [];
556
+ }
557
+ let pkgJson;
558
+ try {
559
+ pkgJson = JSON.parse(fs5.readFileSync(packageJsonPath, "utf-8"));
560
+ } catch {
561
+ throw new Error(
562
+ `Failed to read or parse ${packageJsonPath}.
563
+ Make sure package.json exists and is valid JSON.`
564
+ );
565
+ }
566
+ const directDeps = new Set(Object.keys(pkgJson.dependencies || {}));
567
+ const directDevDeps = new Set(Object.keys(pkgJson.devDependencies || {}));
568
+ const version = parseLockfileVersion(lockfile.lockfileVersion);
569
+ const results = [];
570
+ const seen = /* @__PURE__ */ new Set();
571
+ for (const [key] of Object.entries(lockfile.packages)) {
572
+ const parsed = parsePackageKey(key, version);
573
+ if (!parsed) continue;
574
+ const { name, pkgVersion } = parsed;
575
+ const dedupKey = `${name}@${pkgVersion}`;
576
+ if (seen.has(dedupKey)) continue;
577
+ seen.add(dedupKey);
578
+ const isDirect = directDeps.has(name) || directDevDeps.has(name);
579
+ const isDevDependency = directDevDeps.has(name) && !directDeps.has(name);
580
+ results.push({
581
+ name,
582
+ version: pkgVersion,
583
+ isDirect,
584
+ isDevDependency
585
+ });
586
+ }
587
+ return results;
588
+ }
589
+ function parseLockfileVersion(version) {
590
+ if (version === void 0) return 5;
591
+ const v = typeof version === "string" ? parseFloat(version) : version;
592
+ return isNaN(v) ? 5 : v;
593
+ }
594
+ function parsePackageKey(key, version) {
595
+ if (version >= 9 || !key.startsWith("/") && key.includes("@")) {
596
+ return parseV9Key(key);
597
+ }
598
+ return parseV5Key(key);
599
+ }
600
+ function parseV9Key(key) {
601
+ if (key.startsWith("@")) {
602
+ const idx2 = key.indexOf("@", 1);
603
+ if (idx2 === -1) return null;
604
+ const name2 = key.substring(0, idx2);
605
+ const pkgVersion2 = key.substring(idx2 + 1);
606
+ if (!name2 || !pkgVersion2) return null;
607
+ return { name: name2, pkgVersion: pkgVersion2 };
608
+ }
609
+ const idx = key.indexOf("@");
610
+ if (idx === -1) return null;
611
+ const name = key.substring(0, idx);
612
+ const pkgVersion = key.substring(idx + 1);
613
+ if (!name || !pkgVersion) return null;
614
+ return { name, pkgVersion };
615
+ }
616
+ function parseV5Key(key) {
617
+ if (!key.startsWith("/")) return null;
618
+ const withoutSlash = key.substring(1);
619
+ if (withoutSlash.startsWith("@")) {
620
+ const parts2 = withoutSlash.split("/");
621
+ if (parts2.length < 3) return null;
622
+ const name2 = `${parts2[0]}/${parts2[1]}`;
623
+ const pkgVersion2 = parts2[2];
624
+ if (!pkgVersion2) return null;
625
+ return { name: name2, pkgVersion: pkgVersion2 };
626
+ }
627
+ const parts = withoutSlash.split("/");
628
+ if (parts.length < 2) return null;
629
+ const name = parts[0];
630
+ const pkgVersion = parts[1];
631
+ if (!name || !pkgVersion) return null;
632
+ return { name, pkgVersion };
633
+ }
634
+
635
+ // src/cache.ts
636
+ var crypto = __toESM(require("crypto"));
637
+ var fs6 = __toESM(require("fs"));
638
+ var path3 = __toESM(require("path"));
639
+ var KEY_FILENAME = ".preship-key";
640
+ var ALGORITHM = "aes-256-cbc";
641
+ var IV_LENGTH = 16;
642
+ function getOrCreateKey(projectPath) {
643
+ const keyPath = path3.join(projectPath, KEY_FILENAME);
644
+ try {
645
+ if (fs6.existsSync(keyPath)) {
646
+ const hex = fs6.readFileSync(keyPath, "utf-8").trim();
647
+ if (hex.length === 64) {
648
+ return Buffer.from(hex, "hex");
649
+ }
650
+ }
651
+ } catch {
652
+ }
653
+ const key = crypto.randomBytes(32);
654
+ try {
655
+ fs6.writeFileSync(keyPath, key.toString("hex"), { mode: 384 });
656
+ } catch {
657
+ }
658
+ return key;
659
+ }
660
+ function encrypt(data, key) {
661
+ const iv = crypto.randomBytes(IV_LENGTH);
662
+ const cipher = crypto.createCipheriv(ALGORITHM, key, iv);
663
+ let encrypted = cipher.update(data, "utf-8", "hex");
664
+ encrypted += cipher.final("hex");
665
+ return `${iv.toString("hex")}:${encrypted}`;
666
+ }
667
+ function decrypt(encrypted, key) {
668
+ const separatorIndex = encrypted.indexOf(":");
669
+ if (separatorIndex === -1) {
670
+ throw new Error("Invalid encrypted format");
671
+ }
672
+ const ivHex = encrypted.substring(0, separatorIndex);
673
+ const cipherHex = encrypted.substring(separatorIndex + 1);
674
+ const iv = Buffer.from(ivHex, "hex");
675
+ if (iv.length !== IV_LENGTH) {
676
+ throw new Error("Invalid IV length");
677
+ }
678
+ const decipher = crypto.createDecipheriv(ALGORITHM, key, iv);
679
+ let decrypted = decipher.update(cipherHex, "hex", "utf-8");
680
+ decrypted += decipher.final("utf-8");
681
+ return decrypted;
682
+ }
683
+ function cacheKey(packageName, version) {
684
+ return `${packageName}@${version}`;
685
+ }
686
+ function createEmptyCache() {
687
+ return { version: 1, entries: {} };
688
+ }
689
+ function loadCache(cacheFilePath, key) {
690
+ try {
691
+ if (!fs6.existsSync(cacheFilePath)) {
692
+ return createEmptyCache();
693
+ }
694
+ const encryptedContent = fs6.readFileSync(cacheFilePath, "utf-8").trim();
695
+ if (!encryptedContent) {
696
+ return createEmptyCache();
697
+ }
698
+ const jsonStr = decrypt(encryptedContent, key);
699
+ const parsed = JSON.parse(jsonStr);
700
+ if (!parsed || parsed.version !== 1 || typeof parsed.entries !== "object") {
701
+ return createEmptyCache();
702
+ }
703
+ return parsed;
704
+ } catch {
705
+ return createEmptyCache();
706
+ }
707
+ }
708
+ function saveCache(cacheFilePath, cache, key) {
709
+ try {
710
+ const jsonStr = JSON.stringify(cache);
711
+ const encrypted = encrypt(jsonStr, key);
712
+ const tmpPath = cacheFilePath + ".tmp";
713
+ fs6.writeFileSync(tmpPath, encrypted, "utf-8");
714
+ fs6.renameSync(tmpPath, cacheFilePath);
715
+ } catch {
716
+ }
717
+ }
718
+ function getCacheEntry(cache, packageName, version, ttlSeconds) {
719
+ const key = cacheKey(packageName, version);
720
+ const entry = cache.entries[key];
721
+ if (!entry) {
722
+ return null;
723
+ }
724
+ const ageSeconds = (Date.now() - entry.cachedAt) / 1e3;
725
+ if (ageSeconds >= ttlSeconds) {
726
+ return null;
727
+ }
728
+ return entry;
729
+ }
730
+ function setCacheEntry(cache, packageName, version, license, licenseSource) {
731
+ const key = cacheKey(packageName, version);
732
+ cache.entries[key] = {
733
+ license,
734
+ licenseSource,
735
+ cachedAt: Date.now()
736
+ };
737
+ }
738
+
739
+ // src/rate-limiter.ts
740
+ function createEmptyState() {
741
+ return {
742
+ remaining: null,
743
+ limit: null,
744
+ resetAt: null,
745
+ retryAfterMs: null,
746
+ blocked: false
747
+ };
748
+ }
749
+ var AdaptiveRateLimiter = class {
750
+ githubState;
751
+ npmState;
752
+ config;
753
+ constructor(config) {
754
+ this.githubState = createEmptyState();
755
+ this.npmState = createEmptyState();
756
+ this.config = {
757
+ slowdownThreshold: config?.slowdownThreshold ?? 0.2,
758
+ maxDelay: config?.maxDelay ?? 2e3
759
+ };
760
+ }
761
+ /**
762
+ * Update rate-limit state from response headers.
763
+ * Call after every fetch to either source.
764
+ *
765
+ * Reads: X-RateLimit-Remaining, X-RateLimit-Limit, X-RateLimit-Reset, Retry-After
766
+ */
767
+ updateFromResponse(source, response) {
768
+ const state = source === "github" ? this.githubState : this.npmState;
769
+ const remaining = response.headers.get("X-RateLimit-Remaining");
770
+ const limit = response.headers.get("X-RateLimit-Limit");
771
+ const reset = response.headers.get("X-RateLimit-Reset");
772
+ const retryAfter = response.headers.get("Retry-After");
773
+ if (remaining !== null) {
774
+ const parsed = parseInt(remaining, 10);
775
+ if (!isNaN(parsed)) state.remaining = parsed;
776
+ }
777
+ if (limit !== null) {
778
+ const parsed = parseInt(limit, 10);
779
+ if (!isNaN(parsed)) state.limit = parsed;
780
+ }
781
+ if (reset !== null) {
782
+ const parsed = parseInt(reset, 10);
783
+ if (!isNaN(parsed)) state.resetAt = parsed;
784
+ }
785
+ if (retryAfter !== null) {
786
+ const seconds = parseInt(retryAfter, 10);
787
+ if (!isNaN(seconds) && seconds > 0) {
788
+ state.retryAfterMs = seconds * 1e3;
789
+ }
790
+ }
791
+ if (response.status === 403 && state.remaining === 0) {
792
+ state.blocked = true;
793
+ }
794
+ }
795
+ /**
796
+ * Wait if necessary before making the next request.
797
+ *
798
+ * Returns true if OK to proceed, false if the source is hard-blocked.
799
+ *
800
+ * Delay logic:
801
+ * 1. If blocked → return false
802
+ * 2. If Retry-After is set → wait that exact duration
803
+ * 3. If remaining/limit < threshold → proportional delay
804
+ * 4. Otherwise → no delay
805
+ */
806
+ async waitIfNeeded(source) {
807
+ const state = source === "github" ? this.githubState : this.npmState;
808
+ if (state.blocked) {
809
+ return false;
810
+ }
811
+ if (state.retryAfterMs !== null && state.retryAfterMs > 0) {
812
+ const delay = state.retryAfterMs;
813
+ state.retryAfterMs = null;
814
+ await this.delay(delay);
815
+ return true;
816
+ }
817
+ if (state.remaining !== null && state.limit !== null && state.limit > 0) {
818
+ const ratio = state.remaining / state.limit;
819
+ if (ratio < this.config.slowdownThreshold) {
820
+ const progress = 1 - ratio / this.config.slowdownThreshold;
821
+ const delayMs = Math.round(this.config.maxDelay * progress);
822
+ if (delayMs > 0) {
823
+ await this.delay(delayMs);
824
+ }
825
+ }
826
+ }
827
+ return true;
828
+ }
829
+ /**
830
+ * Check if a source is currently hard-blocked.
831
+ */
832
+ isBlocked(source) {
833
+ const state = source === "github" ? this.githubState : this.npmState;
834
+ return state.blocked;
835
+ }
836
+ /**
837
+ * Get the current state for a source (for testing/inspection).
838
+ */
839
+ getState(source) {
840
+ return source === "github" ? { ...this.githubState } : { ...this.npmState };
841
+ }
842
+ /**
843
+ * Reset all rate-limit state. Call at start of each scan run.
844
+ */
845
+ reset() {
846
+ this.githubState = createEmptyState();
847
+ this.npmState = createEmptyState();
848
+ }
849
+ delay(ms) {
850
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
851
+ }
852
+ };
853
+
854
+ // src/scan-timeout.ts
855
+ function createScanTimeoutController(timeout) {
856
+ const topController = new AbortController();
857
+ let timedOut = false;
858
+ let scanTimeoutId;
859
+ if (timeout && timeout > 0) {
860
+ scanTimeoutId = setTimeout(() => {
861
+ timedOut = true;
862
+ topController.abort();
863
+ }, timeout);
864
+ }
865
+ return {
866
+ signal: topController.signal,
867
+ isTimedOut() {
868
+ return timedOut;
869
+ },
870
+ createRequestController(perRequestTimeout) {
871
+ const child = new AbortController();
872
+ const requestTimeoutId = setTimeout(() => child.abort(), perRequestTimeout);
873
+ if (topController.signal.aborted) {
874
+ child.abort();
875
+ }
876
+ const onScanAbort = () => {
877
+ child.abort();
878
+ };
879
+ if (!topController.signal.aborted) {
880
+ topController.signal.addEventListener("abort", onScanAbort, { once: true });
881
+ }
882
+ const cleanup = () => {
883
+ clearTimeout(requestTimeoutId);
884
+ topController.signal.removeEventListener("abort", onScanAbort);
885
+ };
886
+ return { controller: child, cleanup };
887
+ },
888
+ dispose() {
889
+ if (scanTimeoutId !== void 0) {
890
+ clearTimeout(scanTimeoutId);
891
+ scanTimeoutId = void 0;
892
+ }
893
+ }
894
+ };
895
+ }
896
+ // Annotate the CommonJS export names for ESM import in node:
897
+ 0 && (module.exports = {
898
+ AdaptiveRateLimiter,
899
+ cacheKey,
900
+ createEmptyCache,
901
+ createScanTimeoutController,
902
+ detectProjects,
903
+ getCacheEntry,
904
+ getOrCreateKey,
905
+ loadCache,
906
+ loadConfig,
907
+ parseNpmLockfile,
908
+ parsePnpmLockfile,
909
+ parseYarnLockfile,
910
+ saveCache,
911
+ setCacheEntry
912
+ });
package/package.json ADDED
@@ -0,0 +1,29 @@
1
+ {
2
+ "name": "@preship/core",
3
+ "version": "1.0.0",
4
+ "description": "Core infrastructure for PreShip — types, config, detection, parsing, caching, and rate limiting",
5
+ "author": "Cyfox Inc.",
6
+ "license": "Apache-2.0",
7
+ "main": "./dist/index.js",
8
+ "types": "./dist/index.d.ts",
9
+ "exports": {
10
+ ".": {
11
+ "types": "./dist/index.d.ts",
12
+ "require": "./dist/index.js",
13
+ "default": "./dist/index.js"
14
+ }
15
+ },
16
+ "files": [
17
+ "dist"
18
+ ],
19
+ "scripts": {
20
+ "build": "tsup src/index.ts --format cjs --dts --clean",
21
+ "lint": "tsc --noEmit"
22
+ },
23
+ "dependencies": {
24
+ "glob": "^10.5.0",
25
+ "js-yaml": "^4.1.1",
26
+ "spdx-expression-parse": "^4.0.0",
27
+ "spdx-license-list": "^6.11.0"
28
+ }
29
+ }