@sanjay5114/cdx 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cdx.js +27 -0
- package/commands/auth.js +197 -0
- package/commands/config.js +165 -0
- package/commands/create.js +474 -0
- package/index.js +530 -0
- package/lib/ai.js +249 -0
- package/lib/auth.js +120 -0
- package/lib/fetch.js +46 -0
- package/lib/scanner.js +351 -0
- package/lib/store.js +83 -0
- package/lib/ui.js +477 -0
- package/package.json +35 -0
package/lib/ai.js
ADDED
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
const fetch = require("./fetch");
|
|
4
|
+
|
|
5
|
+
const MODELS = [
|
|
6
|
+
"gemini-2.5-pro-preview-03-25",
|
|
7
|
+
"gemini-2.0-flash",
|
|
8
|
+
"gemini-1.5-pro",
|
|
9
|
+
"gemini-1.5-flash",
|
|
10
|
+
];
|
|
11
|
+
|
|
12
|
+
function geminiUrl(model) {
|
|
13
|
+
return `https://generativelanguage.googleapis.com/v1beta/models/${model}:generateContent`;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function sleep(ms) { return new Promise(r => setTimeout(r, ms)); }
|
|
17
|
+
|
|
18
|
+
async function geminiRaw(apiKey, model, prompt, maxTokens = 8192) {
|
|
19
|
+
const res = await fetch(geminiUrl(model), {
|
|
20
|
+
method : "POST",
|
|
21
|
+
headers: { "Content-Type": "application/json", "x-goog-api-key": apiKey },
|
|
22
|
+
body : JSON.stringify({
|
|
23
|
+
contents : [{ parts: [{ text: prompt }] }],
|
|
24
|
+
generationConfig: { temperature: 0.4, maxOutputTokens: maxTokens, topP: 0.95 },
|
|
25
|
+
}),
|
|
26
|
+
signal: AbortSignal.timeout(90_000),
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
if (res.status === 429) { const e = new Error("RATE_LIMIT"); e.isRateLimit = true; throw e; }
|
|
30
|
+
if (res.status === 503) { const e = new Error("SERVICE_UNAVAILABLE"); e.isRetryable = true; throw e; }
|
|
31
|
+
if (!res.ok) {
|
|
32
|
+
const txt = await res.text().catch(() => "");
|
|
33
|
+
throw new Error(`HTTP ${res.status}: ${txt.slice(0, 200)}`);
|
|
34
|
+
}
|
|
35
|
+
const data = await res.json();
|
|
36
|
+
const text = data?.candidates?.[0]?.content?.parts?.[0]?.text;
|
|
37
|
+
if (!text) {
|
|
38
|
+
const reason = data?.candidates?.[0]?.finishReason;
|
|
39
|
+
if (reason === "SAFETY") throw new Error("Response blocked by safety filters.");
|
|
40
|
+
throw new Error("Empty response from model.");
|
|
41
|
+
}
|
|
42
|
+
return text;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async function geminiCall(apiKey, prompt, opts = {}) {
|
|
46
|
+
const { maxTokens = 8192, maxRetries = 6, baseDelay = 3000, onProgress, modelIndex = 0 } = opts;
|
|
47
|
+
const report = (msg) => { if (onProgress) onProgress(msg); else process.stdout.write(`\r ${msg} `); };
|
|
48
|
+
const model = MODELS[Math.min(modelIndex, MODELS.length - 1)];
|
|
49
|
+
let lastErr;
|
|
50
|
+
|
|
51
|
+
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
|
52
|
+
try {
|
|
53
|
+
return await geminiRaw(apiKey, model, prompt, maxTokens);
|
|
54
|
+
} catch (e) {
|
|
55
|
+
lastErr = e;
|
|
56
|
+
if (e.isRateLimit || e.isRetryable) {
|
|
57
|
+
const waitMs = Math.min(baseDelay * Math.pow(2, attempt) + Math.floor(Math.random() * 1500), 60_000);
|
|
58
|
+
const waitSecs = Math.ceil(waitMs / 1000);
|
|
59
|
+
for (let s = waitSecs; s > 0; s--) {
|
|
60
|
+
report(`Rate-limited on ${model}. Retrying in ${s}s… (attempt ${attempt + 1}/${maxRetries})`);
|
|
61
|
+
await sleep(1000);
|
|
62
|
+
}
|
|
63
|
+
if (!onProgress) process.stdout.write("\n");
|
|
64
|
+
} else {
|
|
65
|
+
if (modelIndex < MODELS.length - 1) {
|
|
66
|
+
report(`Model ${model} failed. Cascading to next model…`);
|
|
67
|
+
await sleep(1500);
|
|
68
|
+
return geminiCall(apiKey, prompt, { ...opts, modelIndex: modelIndex + 1, maxRetries: 3 });
|
|
69
|
+
}
|
|
70
|
+
if (attempt > 0) await sleep(2000);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
const err = new Error(`AI generation failed: ${lastErr?.message}`);
|
|
75
|
+
err.code = lastErr?.isRateLimit ? "RATE_LIMIT_EXCEEDED" : "AI_ERROR";
|
|
76
|
+
throw err;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
function extractJson(raw) {
|
|
80
|
+
try { return JSON.parse(raw); } catch {}
|
|
81
|
+
const stripped = raw.replace(/```(?:json)?\n?/g, "").trim();
|
|
82
|
+
try { return JSON.parse(stripped); } catch {}
|
|
83
|
+
const m = stripped.match(/(\[[\s\S]*\]|\{[\s\S]*\})/);
|
|
84
|
+
if (m) { try { return JSON.parse(m[1]); } catch {} }
|
|
85
|
+
return null;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
async function pass1FileSummaries(apiKey, metas, opts = {}) {
|
|
89
|
+
const BATCH = 8;
|
|
90
|
+
const summaries = [];
|
|
91
|
+
for (let i = 0; i < metas.length; i += BATCH) {
|
|
92
|
+
const batch = metas.slice(i, i + BATCH);
|
|
93
|
+
opts.onProgress && opts.onProgress(`Pass 1 — Analyzing files ${i + 1}–${Math.min(i + BATCH, metas.length)} of ${metas.length}…`);
|
|
94
|
+
const prompt = `You are a senior software architect conducting a rigorous code review.
|
|
95
|
+
Analyze the file metadata and provide a precise technical summary for each file.
|
|
96
|
+
|
|
97
|
+
REQUIREMENTS:
|
|
98
|
+
- Each summary: 1-2 sentences, highly specific (mention what the module DOES, not that it "exists")
|
|
99
|
+
- Use engineering terminology (e.g., "implements OAuth 2.0 PKCE flow", "exposes REST endpoints")
|
|
100
|
+
- Identify architectural role: controller, service, utility, middleware, model, gateway, config, etc.
|
|
101
|
+
- Do NOT use generic phrases like "handles logic" or "manages stuff"
|
|
102
|
+
|
|
103
|
+
Return ONLY a valid JSON array, no markdown, no preamble:
|
|
104
|
+
[{ "file": "path", "role": "architectural role", "summary": "precise description" }]
|
|
105
|
+
|
|
106
|
+
FILE METADATA:
|
|
107
|
+
${JSON.stringify(batch, null, 2)}`;
|
|
108
|
+
try {
|
|
109
|
+
const raw = await geminiCall(apiKey, prompt, { maxTokens: 2048, ...opts });
|
|
110
|
+
const parsed = extractJson(raw);
|
|
111
|
+
if (Array.isArray(parsed)) summaries.push(...parsed);
|
|
112
|
+
else for (const m of batch) summaries.push({ file: m.file, role: "module", summary: `${m.ext} module — ${m.functions} functions, complexity score ${m.complexity}.` });
|
|
113
|
+
opts.onProgress && opts.onProgress(`Pass 1 — Completed ${Math.min(i + BATCH, metas.length)}/${metas.length}`);
|
|
114
|
+
} catch (e) { throw e; }
|
|
115
|
+
}
|
|
116
|
+
return summaries;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
async function pass2SystemOverview(apiKey, metas, summaries, stackInfo, opts = {}) {
|
|
120
|
+
opts.onProgress && opts.onProgress("Pass 2 — Synthesizing system architecture overview…");
|
|
121
|
+
const prompt = `You are a Principal Engineer authoring formal internal architecture documentation for a ${stackInfo.stack} project.
|
|
122
|
+
|
|
123
|
+
Using the file metadata and module summaries below, produce a comprehensive SYSTEM OVERVIEW of 600–900 words.
|
|
124
|
+
|
|
125
|
+
Use these exact subheadings:
|
|
126
|
+
## Executive Summary
|
|
127
|
+
## Architectural Pattern
|
|
128
|
+
## Core Subsystems
|
|
129
|
+
## Data Flow & Integration Points
|
|
130
|
+
## Technology Stack
|
|
131
|
+
## Dependency Topology
|
|
132
|
+
## Quality & Risk Profile
|
|
133
|
+
|
|
134
|
+
STYLE: Precise corporate engineering language. Reference real file paths. No hedging. Declarative prose only.
|
|
135
|
+
|
|
136
|
+
STACK CONTEXT: ${stackInfo.stack} | ${stackInfo.reasons.join("; ")}
|
|
137
|
+
TOTAL FILES: ${metas.length}
|
|
138
|
+
|
|
139
|
+
FILE SUMMARIES:
|
|
140
|
+
${JSON.stringify(summaries, null, 2)}
|
|
141
|
+
|
|
142
|
+
FULL METADATA:
|
|
143
|
+
${JSON.stringify(metas, null, 2)}`;
|
|
144
|
+
return geminiCall(apiKey, prompt, { maxTokens: 4096, ...opts });
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
async function pass3FullDocs(apiKey, overview, summaries, metas, stackInfo, opts = {}) {
|
|
148
|
+
opts.onProgress && opts.onProgress("Pass 3 — Generating comprehensive documentation suite…");
|
|
149
|
+
const prompt = `You are a Staff Technical Writer at a Tier-1 software engineering organization producing the definitive documentation suite for this codebase.
|
|
150
|
+
|
|
151
|
+
Generate ALL sections with MAXIMUM DETAIL and CORPORATE-GRADE QUALITY.
|
|
152
|
+
|
|
153
|
+
Use THESE EXACT HEADING MARKERS:
|
|
154
|
+
# README.md
|
|
155
|
+
# Architecture
|
|
156
|
+
# Onboarding
|
|
157
|
+
# Usage
|
|
158
|
+
# Security
|
|
159
|
+
# API Reference
|
|
160
|
+
# Contributing
|
|
161
|
+
# Changelog
|
|
162
|
+
|
|
163
|
+
SECTION SPECIFICATIONS:
|
|
164
|
+
|
|
165
|
+
# README.md (400-600 words)
|
|
166
|
+
- Project name, tagline, concise description
|
|
167
|
+
- Badges (build, version, license — placeholder URLs)
|
|
168
|
+
- Table of Contents (linked)
|
|
169
|
+
- Features section (8-12 specific concrete features)
|
|
170
|
+
- Prerequisites (exact versions where inferable)
|
|
171
|
+
- Quick Start (numbered commands)
|
|
172
|
+
- Project Structure (tree view with descriptions)
|
|
173
|
+
- Configuration reference table (key | type | default | description)
|
|
174
|
+
- Links to other doc sections
|
|
175
|
+
|
|
176
|
+
# Architecture (500-700 words)
|
|
177
|
+
- Overview paragraph
|
|
178
|
+
- ASCII diagram showing module relationships
|
|
179
|
+
- Subsystem breakdown with responsibilities
|
|
180
|
+
- Design decisions and trade-offs
|
|
181
|
+
- Primary user flow as ASCII sequence diagram
|
|
182
|
+
- Technology decisions rationale
|
|
183
|
+
|
|
184
|
+
# Onboarding
|
|
185
|
+
- Environment setup (step-by-step, exact commands)
|
|
186
|
+
- Environment variables table (variable | required | default | description)
|
|
187
|
+
- Development workflow
|
|
188
|
+
- Running tests
|
|
189
|
+
- Common pitfalls and solutions (at least 5 items)
|
|
190
|
+
|
|
191
|
+
# Usage
|
|
192
|
+
- CLI reference table (command | flags | description | example)
|
|
193
|
+
- Detailed examples for all major use cases with realistic output
|
|
194
|
+
- Advanced configuration scenarios
|
|
195
|
+
|
|
196
|
+
# Security
|
|
197
|
+
- Authentication & authorization model
|
|
198
|
+
- Secrets management policy (exact file locations, permissions)
|
|
199
|
+
- Data classification table
|
|
200
|
+
- Threat model (4+ specific threats with mitigations)
|
|
201
|
+
- Vulnerability disclosure process
|
|
202
|
+
|
|
203
|
+
# API Reference
|
|
204
|
+
- Infer from file structure: endpoints, request/response schemas, error codes
|
|
205
|
+
|
|
206
|
+
# Contributing
|
|
207
|
+
- Contribution workflow (fork, branch naming, PR template)
|
|
208
|
+
- Coding standards and linting
|
|
209
|
+
- Review checklist (10+ items)
|
|
210
|
+
|
|
211
|
+
# Changelog
|
|
212
|
+
- [Unreleased] section with inferred features
|
|
213
|
+
- v1.0.0 initial release stub
|
|
214
|
+
|
|
215
|
+
CRITICAL RULES:
|
|
216
|
+
1. Reference REAL file paths from metadata — never invent filenames
|
|
217
|
+
2. Every section MUST be substantive — no one-liners or thin bullets
|
|
218
|
+
3. Tables must be proper markdown with | alignment
|
|
219
|
+
4. ASCII diagrams must be architecturally accurate
|
|
220
|
+
5. Do NOT use placeholder text like "[Add here]"
|
|
221
|
+
6. Omit rather than fabricate if something cannot be inferred
|
|
222
|
+
|
|
223
|
+
CONTEXT: Stack: ${stackInfo.stack} | Total files: ${metas.length}
|
|
224
|
+
|
|
225
|
+
SYSTEM OVERVIEW:
|
|
226
|
+
${overview}
|
|
227
|
+
|
|
228
|
+
FILE SUMMARIES:
|
|
229
|
+
${JSON.stringify(summaries, null, 2)}
|
|
230
|
+
|
|
231
|
+
FILE METADATA:
|
|
232
|
+
${JSON.stringify(metas, null, 2)}`;
|
|
233
|
+
return geminiCall(apiKey, prompt, { maxTokens: 8192, ...opts });
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
async function pass4Improve(apiKey, existingDoc, instruction, opts = {}) {
|
|
237
|
+
opts.onProgress && opts.onProgress(`Applying: "${instruction.slice(0, 60)}…"`);
|
|
238
|
+
const prompt = `You are a senior technical writer improving an existing documentation set.
|
|
239
|
+
|
|
240
|
+
INSTRUCTION: "${instruction}"
|
|
241
|
+
|
|
242
|
+
Apply this instruction to the documentation below. Return the COMPLETE updated documentation — do not truncate or omit existing sections unless the instruction explicitly removes them. Maintain all heading markers.
|
|
243
|
+
|
|
244
|
+
EXISTING DOCUMENTATION:
|
|
245
|
+
${existingDoc}`;
|
|
246
|
+
return geminiCall(apiKey, prompt, { maxTokens: 8192, ...opts });
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
module.exports = { geminiCall, pass1FileSummaries, pass2SystemOverview, pass3FullDocs, pass4Improve, extractJson, MODELS };
|
package/lib/auth.js
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* CDX Firebase Authentication
|
|
5
|
+
* Handles sign-up, sign-in, token refresh, and secure local JWT persistence.
|
|
6
|
+
* Uses the Firebase Auth REST API (no native SDK required).
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const fetch = require("./fetch");
|
|
10
|
+
const store = require("./store");
|
|
11
|
+
|
|
12
|
+
const FB_BASE = "https://identitytoolkit.googleapis.com/v1/accounts";
|
|
13
|
+
const REFRESH_URL = "https://securetoken.googleapis.com/v1/token";
|
|
14
|
+
|
|
15
|
+
function fbUrl(action, apiKey) { return `${FB_BASE}:${action}?key=${apiKey}`; }
|
|
16
|
+
|
|
17
|
+
async function fbPost(url, body) {
|
|
18
|
+
const res = await fetch(url, {
|
|
19
|
+
method : "POST",
|
|
20
|
+
headers: { "Content-Type": "application/json" },
|
|
21
|
+
body : JSON.stringify(body),
|
|
22
|
+
signal : AbortSignal.timeout(15_000),
|
|
23
|
+
});
|
|
24
|
+
const data = await res.json();
|
|
25
|
+
if (!res.ok) {
|
|
26
|
+
const msg = data?.error?.message || `HTTP ${res.status}`;
|
|
27
|
+
const e = new Error(humanizeFirebaseError(msg));
|
|
28
|
+
e.code = msg;
|
|
29
|
+
throw e;
|
|
30
|
+
}
|
|
31
|
+
return data;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
function humanizeFirebaseError(code) {
|
|
35
|
+
const MAP = {
|
|
36
|
+
"EMAIL_EXISTS" : "An account with this email address already exists.",
|
|
37
|
+
"INVALID_EMAIL" : "The email address is not valid.",
|
|
38
|
+
"WEAK_PASSWORD" : "Password must be at least 6 characters.",
|
|
39
|
+
"EMAIL_NOT_FOUND" : "No account found with this email address.",
|
|
40
|
+
"INVALID_PASSWORD" : "Incorrect password. Please try again.",
|
|
41
|
+
"USER_DISABLED" : "This account has been disabled.",
|
|
42
|
+
"TOO_MANY_ATTEMPTS_TRY_LATER": "Too many failed attempts. Please try again later.",
|
|
43
|
+
"INVALID_LOGIN_CREDENTIALS" : "Invalid email or password.",
|
|
44
|
+
"TOKEN_EXPIRED" : "Your session has expired. Please sign in again.",
|
|
45
|
+
"USER_NOT_FOUND" : "No user found with these credentials.",
|
|
46
|
+
"OPERATION_NOT_ALLOWED" : "Email/password sign-in is not enabled for this project.",
|
|
47
|
+
"MISSING_PASSWORD" : "Password is required.",
|
|
48
|
+
};
|
|
49
|
+
for (const [k, v] of Object.entries(MAP)) { if (code.includes(k)) return v; }
|
|
50
|
+
return `Authentication error: ${code}`;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
async function signUp(apiKey, email, password, displayName = "") {
|
|
54
|
+
const data = await fbPost(fbUrl("signUp", apiKey), { email, password, returnSecureToken: true });
|
|
55
|
+
if (displayName && data.idToken) {
|
|
56
|
+
await fbPost(fbUrl("update", apiKey), { idToken: data.idToken, displayName, returnSecureToken: false }).catch(() => {});
|
|
57
|
+
}
|
|
58
|
+
return { uid: data.localId, email: data.email, displayName: displayName || data.email.split("@")[0], idToken: data.idToken, refreshToken: data.refreshToken, expiresIn: data.expiresIn };
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
async function signIn(apiKey, email, password) {
|
|
62
|
+
const data = await fbPost(fbUrl("signInWithPassword", apiKey), { email, password, returnSecureToken: true });
|
|
63
|
+
return { uid: data.localId, email: data.email, displayName: data.displayName || data.email.split("@")[0], idToken: data.idToken, refreshToken: data.refreshToken, expiresIn: data.expiresIn };
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
async function doRefreshToken(apiKey, rToken) {
|
|
67
|
+
const res = await fetch(`${REFRESH_URL}?key=${apiKey}`, {
|
|
68
|
+
method : "POST",
|
|
69
|
+
headers: { "Content-Type": "application/x-www-form-urlencoded" },
|
|
70
|
+
body : `grant_type=refresh_token&refresh_token=${encodeURIComponent(rToken)}`,
|
|
71
|
+
signal : AbortSignal.timeout(15_000),
|
|
72
|
+
});
|
|
73
|
+
const data = await res.json();
|
|
74
|
+
if (!res.ok) throw new Error(humanizeFirebaseError(data?.error?.message || `HTTP ${res.status}`));
|
|
75
|
+
return { idToken: data.id_token, refreshToken: data.refresh_token, expiresIn: data.expires_in };
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
async function sendPasswordReset(apiKey, email) {
|
|
79
|
+
await fbPost(fbUrl("sendOobCode", apiKey), { requestType: "PASSWORD_RESET", email });
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
function persistSession(session) {
|
|
83
|
+
store.saveSession(session.idToken);
|
|
84
|
+
store.set("refreshToken", session.refreshToken);
|
|
85
|
+
store.set("userEmail", session.email);
|
|
86
|
+
store.set("userName", session.displayName || "");
|
|
87
|
+
store.set("userUid", session.uid || "");
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
async function restoreSession() {
|
|
91
|
+
const apiKey = store.get("firebaseApiKey");
|
|
92
|
+
const idToken = store.loadSession();
|
|
93
|
+
const refreshTok = store.get("refreshToken");
|
|
94
|
+
const email = store.get("userEmail");
|
|
95
|
+
if (!apiKey || !email) return null;
|
|
96
|
+
if (idToken && store.sessionIsAlive(idToken)) {
|
|
97
|
+
const d = store.decodeJwt(idToken);
|
|
98
|
+
return { uid: d?.user_id || d?.sub || "", email: d?.email || email, displayName: store.get("userName") || email.split("@")[0], idToken };
|
|
99
|
+
}
|
|
100
|
+
if (refreshTok) {
|
|
101
|
+
try {
|
|
102
|
+
const r = await doRefreshToken(apiKey, refreshTok);
|
|
103
|
+
store.saveSession(r.idToken);
|
|
104
|
+
store.set("refreshToken", r.refreshToken);
|
|
105
|
+
const d = store.decodeJwt(r.idToken);
|
|
106
|
+
return { uid: d?.user_id || d?.sub || "", email: d?.email || email, displayName: store.get("userName") || email.split("@")[0], idToken: r.idToken };
|
|
107
|
+
} catch {}
|
|
108
|
+
}
|
|
109
|
+
return null;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
function signOut() {
|
|
113
|
+
store.clearSession();
|
|
114
|
+
store.del("refreshToken");
|
|
115
|
+
store.del("userEmail");
|
|
116
|
+
store.del("userName");
|
|
117
|
+
store.del("userUid");
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
module.exports = { signUp, signIn, doRefreshToken, sendPasswordReset, persistSession, restoreSession, signOut };
|
package/lib/fetch.js
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Universal fetch helper.
|
|
5
|
+
* Node 18+ ships native `fetch` globally — use it.
|
|
6
|
+
* Older Node: dynamically import node-fetch v2 (CJS-compatible).
|
|
7
|
+
* This avoids the ERR_REQUIRE_ESM crash from node-fetch v3.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
let _fetchFn = null;
|
|
11
|
+
|
|
12
|
+
async function getFetch() {
|
|
13
|
+
if (_fetchFn) return _fetchFn;
|
|
14
|
+
|
|
15
|
+
// Node 18+ has globalThis.fetch natively
|
|
16
|
+
if (typeof globalThis.fetch === "function") {
|
|
17
|
+
_fetchFn = globalThis.fetch.bind(globalThis);
|
|
18
|
+
return _fetchFn;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// Fallback: node-fetch v2 (CommonJS)
|
|
22
|
+
try {
|
|
23
|
+
_fetchFn = require("node-fetch");
|
|
24
|
+
return _fetchFn;
|
|
25
|
+
} catch {}
|
|
26
|
+
|
|
27
|
+
// Last resort: dynamic import (works with node-fetch v2 or v3)
|
|
28
|
+
try {
|
|
29
|
+
const mod = await import("node-fetch");
|
|
30
|
+
_fetchFn = mod.default || mod;
|
|
31
|
+
return _fetchFn;
|
|
32
|
+
} catch (e) {
|
|
33
|
+
throw new Error("No fetch implementation available. Run: npm install node-fetch@2");
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Drop-in async fetch wrapper.
|
|
39
|
+
* Usage: const res = await fetch(url, options);
|
|
40
|
+
*/
|
|
41
|
+
async function fetch(url, options) {
|
|
42
|
+
const fn = await getFetch();
|
|
43
|
+
return fn(url, options);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
module.exports = fetch;
|