@sanjay5114/cdx 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cdx.js +27 -0
- package/commands/auth.js +197 -0
- package/commands/config.js +165 -0
- package/commands/create.js +474 -0
- package/index.js +530 -0
- package/lib/ai.js +249 -0
- package/lib/auth.js +120 -0
- package/lib/fetch.js +46 -0
- package/lib/scanner.js +351 -0
- package/lib/store.js +83 -0
- package/lib/ui.js +477 -0
- package/package.json +35 -0
package/lib/scanner.js
ADDED
|
@@ -0,0 +1,351 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* CDX File Scanner & Metadata Pipeline
|
|
5
|
+
*
|
|
6
|
+
* Stages: SCAN → FILTER → PARSE_METADATA → DETECT_STACK
|
|
7
|
+
* → PRIORITIZE → SCORE → SELECT_TOP_N → CHUNK → COMPRESS → TOKEN_CHECK
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
const fs = require("fs").promises;
|
|
11
|
+
const path = require("path");
|
|
12
|
+
|
|
13
|
+
// ─── Constants ────────────────────────────────────────────────────────────────
|
|
14
|
+
const CODE_EXTS = new Set([
|
|
15
|
+
".js",".ts",".jsx",".tsx",".mjs",".cjs",
|
|
16
|
+
".py",".go",".rs",".java",".c",".cpp",".h",".hpp",
|
|
17
|
+
".rb",".php",".swift",".kt",".cs",".dart",
|
|
18
|
+
".json",".yaml",".yml",".toml",
|
|
19
|
+
".md",".mdx",".txt",
|
|
20
|
+
".html",".htm",".css",".scss",".sass",".less",".vue",".svelte",".astro",".svg",
|
|
21
|
+
".sh",".bash",".zsh",
|
|
22
|
+
".env.example",
|
|
23
|
+
"dockerfile","makefile","procfile",
|
|
24
|
+
]);
|
|
25
|
+
|
|
26
|
+
const IGNORE_DIRS = new Set([
|
|
27
|
+
"node_modules",".git","build","dist","out","assets",
|
|
28
|
+
".next",".nuxt","coverage","__pycache__",".venv","venv",
|
|
29
|
+
".cache",".turbo",".parcel-cache","storybook-static",
|
|
30
|
+
"public","static","vendor",".gradle","target",
|
|
31
|
+
]);
|
|
32
|
+
|
|
33
|
+
const IGNORE_FILES = new Set([
|
|
34
|
+
".DS_Store","Thumbs.db","package-lock.json","yarn.lock","pnpm-lock.yaml",
|
|
35
|
+
".npmrc",".nvmrc",".node-version",".editorconfig",
|
|
36
|
+
]);
|
|
37
|
+
|
|
38
|
+
const SENSITIVE_RX = [
|
|
39
|
+
/\.env$/i,/secret/i,/credential/i,/password/i,
|
|
40
|
+
/private[_-]?key/i,/\.pem$/i,/\.key$/i,/\.pfx$/i,
|
|
41
|
+
/\.p12$/i,/id_rsa/i,/id_ed25519/i,/\.cert$/i,
|
|
42
|
+
];
|
|
43
|
+
|
|
44
|
+
const MAX_FILE_BYTES = 500_000;
|
|
45
|
+
const TOP_N = 25;
|
|
46
|
+
const CHUNK_LINES = 300;
|
|
47
|
+
const TOKEN_BUDGET = 12_000;
|
|
48
|
+
const TOKENS_PER_META = 60;
|
|
49
|
+
|
|
50
|
+
// ─── Sensitivity ──────────────────────────────────────────────────────────────
|
|
51
|
+
function isSensitive(filePath) {
|
|
52
|
+
const base = path.basename(filePath).toLowerCase();
|
|
53
|
+
return SENSITIVE_RX.some(rx => rx.test(base));
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// ─── Local file scan ──────────────────────────────────────────────────────────
|
|
57
|
+
async function scanLocal(dir, includeHidden = false, files = []) {
|
|
58
|
+
let entries;
|
|
59
|
+
try { entries = await fs.readdir(dir, { withFileTypes: true }); }
|
|
60
|
+
catch { return files; }
|
|
61
|
+
|
|
62
|
+
for (const ent of entries) {
|
|
63
|
+
if (!includeHidden && ent.name.startsWith(".")) continue;
|
|
64
|
+
if (IGNORE_DIRS.has(ent.name.toLowerCase()) || IGNORE_FILES.has(ent.name)) continue;
|
|
65
|
+
const fullPath = path.join(dir, ent.name);
|
|
66
|
+
if (ent.isDirectory()) {
|
|
67
|
+
await scanLocal(fullPath, includeHidden, files);
|
|
68
|
+
} else {
|
|
69
|
+
const ext = path.extname(ent.name).toLowerCase();
|
|
70
|
+
const base = ent.name.toLowerCase();
|
|
71
|
+
if (CODE_EXTS.has(ext) || CODE_EXTS.has(base)) {
|
|
72
|
+
try {
|
|
73
|
+
const { size } = await fs.stat(fullPath);
|
|
74
|
+
if (size <= MAX_FILE_BYTES) files.push({ path: fullPath, size });
|
|
75
|
+
} catch {}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
return files;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// ─── Parse metadata from local file ──────────────────────────────────────────
|
|
83
|
+
async function parseLocal(filePath) {
|
|
84
|
+
const isSens = isSensitive(filePath);
|
|
85
|
+
const content = isSens ? "" : await fs.readFile(filePath, "utf8").catch(() => "");
|
|
86
|
+
return buildMeta(path.relative(process.cwd(), filePath), path.extname(filePath), content, isSens);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// ─── Parse metadata from raw content string ──────────────────────────────────
|
|
90
|
+
function buildMeta(file, ext, content = "", sensitive = false) {
|
|
91
|
+
const lines = content ? content.split("\n") : [];
|
|
92
|
+
const fnMatches = content.match(/function\s+\w+|\b\w+\s*[:=]\s*(?:async\s*)?\(|(?:def|fn|func)\s+\w+|\w+\s*\(.*\)\s*[{:]/g) || [];
|
|
93
|
+
const importCnt = (content.match(/\b(?:require|import)\b/g) || []).length;
|
|
94
|
+
const exportCnt = (content.match(/\b(?:module\.exports|export\s+(?:default|const|function|class))\b/g) || []).length;
|
|
95
|
+
const complexity = (content.match(/\b(?:if|else|for|while|switch|catch|&&|\|\||\?)\b/g) || []).length;
|
|
96
|
+
const classCnt = (content.match(/\bclass\s+\w+/g) || []).length;
|
|
97
|
+
const asyncCnt = (content.match(/\basync\b/g) || []).length;
|
|
98
|
+
const testFile = /\.(test|spec)\.[jt]sx?$/.test(file) || /__(tests?|mocks?)__/.test(file);
|
|
99
|
+
|
|
100
|
+
return {
|
|
101
|
+
file, ext: ext.toLowerCase(),
|
|
102
|
+
size : lines.length,
|
|
103
|
+
functions: Math.min(fnMatches.length, 999),
|
|
104
|
+
imports : importCnt,
|
|
105
|
+
exports : exportCnt,
|
|
106
|
+
complexity,
|
|
107
|
+
classes : classCnt,
|
|
108
|
+
asyncOps : asyncCnt,
|
|
109
|
+
testFile, sensitive,
|
|
110
|
+
chunked : false,
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// ─── Stack Detection ──────────────────────────────────────────────────────────
|
|
115
|
+
async function detectStack(metas, opts = {}) {
|
|
116
|
+
const files = metas.map(m => m.file.toLowerCase());
|
|
117
|
+
const hasFile = (rx) => files.some(f => rx.test(f));
|
|
118
|
+
const extCount = (ext) => metas.filter(m => m.ext === ext).length;
|
|
119
|
+
|
|
120
|
+
const checks = [
|
|
121
|
+
{ stack: "next", score: (hasFile(/next\.config/) ? 10 : 0) + (hasFile(/\/pages\/|\/app\//) ? 5 : 0) },
|
|
122
|
+
{ stack: "react", score: (extCount(".jsx") + extCount(".tsx")) * 2 },
|
|
123
|
+
{ stack: "vue", score: extCount(".vue") * 3 },
|
|
124
|
+
{ stack: "svelte", score: extCount(".svelte") * 3 },
|
|
125
|
+
{ stack: "angular", score: hasFile(/\.component\.ts$/) ? 10 : 0 },
|
|
126
|
+
{ stack: "flutter", score: extCount(".dart") * 3 + (hasFile(/pubspec\.yaml/) ? 5 : 0) },
|
|
127
|
+
{ stack: "python", score: extCount(".py") * 2 + (hasFile(/requirements\.txt|setup\.py|pyproject\.toml/) ? 4 : 0) },
|
|
128
|
+
{ stack: "go", score: extCount(".go") * 2 + (hasFile(/go\.mod|go\.sum/) ? 4 : 0) },
|
|
129
|
+
{ stack: "rust", score: extCount(".rs") * 2 + (hasFile(/cargo\.toml/) ? 4 : 0) },
|
|
130
|
+
{ stack: "java", score: extCount(".java") * 2 + (hasFile(/pom\.xml|build\.gradle/) ? 4 : 0) },
|
|
131
|
+
{ stack: "node", score: hasFile(/package\.json/) ? 4 : 0 },
|
|
132
|
+
];
|
|
133
|
+
|
|
134
|
+
checks.sort((a, b) => b.score - a.score);
|
|
135
|
+
const top = checks[0];
|
|
136
|
+
const stack = top.score > 2 ? top.stack : "generic";
|
|
137
|
+
const reasons = [];
|
|
138
|
+
|
|
139
|
+
if (hasFile(/next\.config/)) reasons.push("next.config found");
|
|
140
|
+
if (hasFile(/package\.json/)) reasons.push("Node.js project");
|
|
141
|
+
if (extCount(".py") > 0) reasons.push(`${extCount(".py")} Python files`);
|
|
142
|
+
if (extCount(".go") > 0) reasons.push(`${extCount(".go")} Go files`);
|
|
143
|
+
if (extCount(".dart") > 0) reasons.push(`${extCount(".dart")} Dart files`);
|
|
144
|
+
if (hasFile(/pubspec\.yaml/)) reasons.push("Flutter pubspec.yaml");
|
|
145
|
+
if (extCount(".jsx") + extCount(".tsx") > 0) reasons.push(`${extCount(".jsx") + extCount(".tsx")} React components`);
|
|
146
|
+
|
|
147
|
+
return { stack, reasons: reasons.length ? reasons : ["generic codebase"] };
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// ─── Stack-aware priority boosting ───────────────────────────────────────────
|
|
151
|
+
const STACK_PATTERNS = {
|
|
152
|
+
next : [/^pages\/|^app\/|next\.config|middleware\./],
|
|
153
|
+
react : [/^src\/|components\/|hooks\/|context\//],
|
|
154
|
+
vue : [/\.vue$|store\//],
|
|
155
|
+
flutter: [/^lib\/|\.dart$/],
|
|
156
|
+
python : [/\.py$/],
|
|
157
|
+
go : [/\.go$|main\.go|cmd\//],
|
|
158
|
+
node : [/^src\/|server\.|app\.|index\.js|routes?\//],
|
|
159
|
+
generic: [],
|
|
160
|
+
};
|
|
161
|
+
|
|
162
|
+
function prioritize(metas, stack) {
|
|
163
|
+
const patterns = STACK_PATTERNS[stack] || [];
|
|
164
|
+
if (!patterns.length) return metas;
|
|
165
|
+
const priority = metas.filter(m => patterns.some(rx => rx.test(m.file.toLowerCase())));
|
|
166
|
+
const rest = metas.filter(m => !priority.includes(m));
|
|
167
|
+
return [...priority, ...rest];
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
// ─── Scoring ──────────────────────────────────────────────────────────────────
|
|
171
|
+
function score(metas) {
|
|
172
|
+
return metas.map(m => ({
|
|
173
|
+
...m,
|
|
174
|
+
score:
|
|
175
|
+
m.functions * 3.0 +
|
|
176
|
+
m.classes * 4.0 +
|
|
177
|
+
m.exports * 2.5 +
|
|
178
|
+
m.imports * 1.5 +
|
|
179
|
+
m.complexity * 1.5 +
|
|
180
|
+
m.asyncOps * 1.0 +
|
|
181
|
+
Math.min(m.size / 60, 8) +
|
|
182
|
+
(m.testFile ? -6 : 0) +
|
|
183
|
+
(m.sensitive ? -20 : 0) +
|
|
184
|
+
// Bonus for key filenames
|
|
185
|
+
(/index\.|main\.|app\.|server\./.test(m.file) ? 5 : 0) +
|
|
186
|
+
(/config|setting|env/.test(m.file.toLowerCase()) ? 3 : 0),
|
|
187
|
+
}));
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
function selectTopN(scored, n = TOP_N) {
|
|
191
|
+
return [...scored].sort((a, b) => b.score - a.score).slice(0, n);
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
function chunkLarge(metas) {
|
|
195
|
+
return metas.map(m => m.size > CHUNK_LINES ? { ...m, chunked: true } : m);
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
function compress(metas) {
|
|
199
|
+
return metas.map(({ file, ext, size, functions, imports, exports, complexity, classes, asyncOps, testFile, sensitive, chunked }) =>
|
|
200
|
+
({ file, ext, size, functions, imports, exports, complexity, classes, asyncOps, testFile: !!testFile, sensitive: !!sensitive, chunked: !!chunked }));
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
function tokenCheck(metas) {
|
|
204
|
+
return metas.slice(0, Math.floor(TOKEN_BUDGET / TOKENS_PER_META));
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
// ─── Full local pipeline ──────────────────────────────────────────────────────
|
|
208
|
+
async function runLocalPipeline(cwd, opts = {}) {
|
|
209
|
+
const files = await scanLocal(cwd, opts.includeHidden || false);
|
|
210
|
+
if (!files.length) throw new Error("No source files found in this directory.");
|
|
211
|
+
|
|
212
|
+
const metas = [];
|
|
213
|
+
for (const f of files) {
|
|
214
|
+
try { metas.push(await parseLocal(f.path)); }
|
|
215
|
+
catch {}
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
const stackInfo = await detectStack(metas);
|
|
219
|
+
const prioritized = prioritize(metas, stackInfo.stack);
|
|
220
|
+
const scored = score(prioritized);
|
|
221
|
+
const top = selectTopN(scored, TOP_N);
|
|
222
|
+
const chunked = chunkLarge(top);
|
|
223
|
+
const compressed = compress(chunked);
|
|
224
|
+
const selected = tokenCheck(compressed);
|
|
225
|
+
|
|
226
|
+
return { raw: metas, selected, stackInfo };
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
// ─── GitHub remote pipeline ───────────────────────────────────────────────────
|
|
230
|
+
const fetch = require("./fetch");
|
|
231
|
+
|
|
232
|
+
async function githubRequest(token, method, endpoint, body) {
|
|
233
|
+
const res = await fetch(`https://api.github.com${endpoint}`, {
|
|
234
|
+
method,
|
|
235
|
+
headers: {
|
|
236
|
+
Authorization : `Bearer ${token}`,
|
|
237
|
+
Accept : "application/vnd.github+json",
|
|
238
|
+
"Content-Type" : "application/json",
|
|
239
|
+
"User-Agent" : "CDX-CLI/3.0",
|
|
240
|
+
"X-GitHub-Api-Version" : "2022-11-28",
|
|
241
|
+
},
|
|
242
|
+
body : body ? JSON.stringify(body) : undefined,
|
|
243
|
+
signal: AbortSignal.timeout(20_000),
|
|
244
|
+
});
|
|
245
|
+
if (!res.ok && res.status !== 404) {
|
|
246
|
+
const txt = await res.text().catch(() => "");
|
|
247
|
+
throw new Error(`GitHub API ${res.status}: ${txt.slice(0, 300)}`);
|
|
248
|
+
}
|
|
249
|
+
return res.status === 204 ? null : res.json().catch(() => null);
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
async function runRemotePipeline(owner, repo, token) {
|
|
253
|
+
const repoData = await githubRequest(token, "GET", `/repos/${owner}/${repo}`);
|
|
254
|
+
if (!repoData) throw new Error("Could not fetch remote repository info.");
|
|
255
|
+
const branch = repoData.default_branch || "main";
|
|
256
|
+
|
|
257
|
+
const treeData = await githubRequest(token, "GET", `/repos/${owner}/${repo}/git/trees/${branch}?recursive=1`);
|
|
258
|
+
if (!treeData?.tree) throw new Error("Could not fetch repository tree.");
|
|
259
|
+
|
|
260
|
+
const blobs = treeData.tree.filter(f => f.type === "blob");
|
|
261
|
+
const metas = [];
|
|
262
|
+
|
|
263
|
+
for (const f of blobs) {
|
|
264
|
+
const parts = f.path.split("/");
|
|
265
|
+
if (parts.some(p => IGNORE_DIRS.has(p.toLowerCase()))) continue;
|
|
266
|
+
if (IGNORE_FILES.has(path.basename(f.path))) continue;
|
|
267
|
+
|
|
268
|
+
const ext = path.extname(f.path).toLowerCase();
|
|
269
|
+
const base = path.basename(f.path).toLowerCase();
|
|
270
|
+
if (!CODE_EXTS.has(ext) && !CODE_EXTS.has(base)) continue;
|
|
271
|
+
|
|
272
|
+
const isSens = isSensitive(f.path);
|
|
273
|
+
let content = "";
|
|
274
|
+
|
|
275
|
+
if (!isSens && (f.size || 0) <= MAX_FILE_BYTES) {
|
|
276
|
+
const blob = await githubRequest(token, "GET", `/repos/${owner}/${repo}/git/blobs/${f.sha}`).catch(() => null);
|
|
277
|
+
if (blob?.content) {
|
|
278
|
+
content = Buffer.from(blob.content, blob.encoding || "base64").toString("utf8");
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
metas.push(buildMeta(f.path, ext, content, isSens));
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
if (!metas.length) throw new Error("No source files found in remote repository.");
|
|
285
|
+
|
|
286
|
+
const stackInfo = await detectStack(metas);
|
|
287
|
+
const prioritized = prioritize(metas, stackInfo.stack);
|
|
288
|
+
const scored = score(prioritized);
|
|
289
|
+
const top = selectTopN(scored, TOP_N);
|
|
290
|
+
const chunked = chunkLarge(top);
|
|
291
|
+
const compressed = compress(chunked);
|
|
292
|
+
const selected = tokenCheck(compressed);
|
|
293
|
+
|
|
294
|
+
return { raw: metas, selected, stackInfo };
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
// ─── GitHub push ──────────────────────────────────────────────────────────────
|
|
298
|
+
async function upsertFile(token, owner, repo, filePath, content, msg) {
|
|
299
|
+
const endpoint = `/repos/${owner}/${repo}/contents/${filePath}`;
|
|
300
|
+
const existing = await githubRequest(token, "GET", endpoint).catch(() => null);
|
|
301
|
+
const sha = existing?.sha;
|
|
302
|
+
await githubRequest(token, "PUT", endpoint, {
|
|
303
|
+
message: msg,
|
|
304
|
+
content: Buffer.from(content, "utf8").toString("base64"),
|
|
305
|
+
...(sha ? { sha } : {}),
|
|
306
|
+
});
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
async function pushToGitHub(token, owner, repo, files, onProgress) {
|
|
310
|
+
const pushed = [], failed = [];
|
|
311
|
+
for (const [fp, content] of Object.entries(files)) {
|
|
312
|
+
try {
|
|
313
|
+
await upsertFile(token, owner, repo, fp, content, `docs: update ${fp} via CDX [skip ci]`);
|
|
314
|
+
pushed.push(fp);
|
|
315
|
+
onProgress && onProgress(`Pushed: ${fp}`);
|
|
316
|
+
} catch (e) {
|
|
317
|
+
failed.push({ path: fp, error: e.message });
|
|
318
|
+
onProgress && onProgress(`Failed: ${fp} — ${e.message.split("\n")[0]}`);
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
return { pushed, failed };
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
async function fetchGitHubRepos(token) {
|
|
325
|
+
const all = [];
|
|
326
|
+
for (let page = 1; page <= 3; page++) {
|
|
327
|
+
const data = await githubRequest(token, "GET", `/user/repos?per_page=100&page=${page}&sort=updated`);
|
|
328
|
+
if (!data?.length) break;
|
|
329
|
+
all.push(...data);
|
|
330
|
+
if (data.length < 100) break;
|
|
331
|
+
}
|
|
332
|
+
return all;
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
// ─── Markdown section extractor ───────────────────────────────────────────────
|
|
336
|
+
function extractSection(doc, heading) {
|
|
337
|
+
const re = new RegExp(`#\\s*${heading}([\\s\\S]*?)(?=\\n#\\s|$)`, "i");
|
|
338
|
+
const m = doc.match(re);
|
|
339
|
+
return m ? m[1].trim() : null;
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
module.exports = {
|
|
343
|
+
runLocalPipeline,
|
|
344
|
+
runRemotePipeline,
|
|
345
|
+
pushToGitHub,
|
|
346
|
+
fetchGitHubRepos,
|
|
347
|
+
detectStack,
|
|
348
|
+
extractSection,
|
|
349
|
+
isSensitive,
|
|
350
|
+
IGNORE_DIRS, CODE_EXTS,
|
|
351
|
+
};
|
package/lib/store.js
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
const fs = require("fs");
|
|
4
|
+
const path = require("path");
|
|
5
|
+
const os = require("os");
|
|
6
|
+
|
|
7
|
+
const CDX_DIR = path.join(os.homedir(), ".cdx");
|
|
8
|
+
const CFG_PATH = path.join(CDX_DIR, "config.json");
|
|
9
|
+
const TOKEN_PATH = path.join(CDX_DIR, ".session");
|
|
10
|
+
|
|
11
|
+
// ── Hardcoded Firebase config from google-services.json ───────────────────────
|
|
12
|
+
// Project: mkchit | App: com.sas.mk
|
|
13
|
+
const BUNDLED_FIREBASE_KEY = "xxxxxxxxxxxxxxxxxxxxxxxxxx";
|
|
14
|
+
|
|
15
|
+
function ensureDir() {
|
|
16
|
+
if (!fs.existsSync(CDX_DIR)) fs.mkdirSync(CDX_DIR, { recursive: true, mode: 0o700 });
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function load() {
|
|
20
|
+
ensureDir();
|
|
21
|
+
try {
|
|
22
|
+
var raw = fs.readFileSync(CFG_PATH, "utf8");
|
|
23
|
+
return JSON.parse(raw);
|
|
24
|
+
} catch { return {}; }
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function save(cfg) {
|
|
28
|
+
ensureDir();
|
|
29
|
+
var tmp = CFG_PATH + ".tmp." + process.pid;
|
|
30
|
+
fs.writeFileSync(tmp, JSON.stringify(cfg, null, 2), { mode: 0o600 });
|
|
31
|
+
fs.renameSync(tmp, CFG_PATH);
|
|
32
|
+
try { fs.chmodSync(CFG_PATH, 0o600); } catch {}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
function get(key) {
|
|
36
|
+
// firebaseApiKey always falls back to bundled key
|
|
37
|
+
if (key === "firebaseApiKey") {
|
|
38
|
+
var val = load()[key];
|
|
39
|
+
return val || BUNDLED_FIREBASE_KEY;
|
|
40
|
+
}
|
|
41
|
+
return load()[key];
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
function set(key, val) { var c = load(); c[key] = val; save(c); }
|
|
45
|
+
function del(key) { var c = load(); delete c[key]; save(c); }
|
|
46
|
+
function clear() { save({}); clearSession(); }
|
|
47
|
+
|
|
48
|
+
// ── JWT session ───────────────────────────────────────────────────────────────
|
|
49
|
+
function saveSession(jwt) {
|
|
50
|
+
ensureDir();
|
|
51
|
+
fs.writeFileSync(TOKEN_PATH, jwt, { mode: 0o600 });
|
|
52
|
+
try { fs.chmodSync(TOKEN_PATH, 0o600); } catch {}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function loadSession() {
|
|
56
|
+
try { return fs.readFileSync(TOKEN_PATH, "utf8").trim(); } catch { return null; }
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
function clearSession() {
|
|
60
|
+
try { fs.unlinkSync(TOKEN_PATH); } catch {}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function decodeJwt(token) {
|
|
64
|
+
try {
|
|
65
|
+
var payload = token.split(".")[1];
|
|
66
|
+
var padded = payload + "=".repeat((4 - payload.length % 4) % 4);
|
|
67
|
+
return JSON.parse(Buffer.from(padded, "base64url").toString("utf8"));
|
|
68
|
+
} catch { return null; }
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
function sessionIsAlive(token) {
|
|
72
|
+
if (!token) return false;
|
|
73
|
+
var d = decodeJwt(token);
|
|
74
|
+
if (!d || !d.exp) return false;
|
|
75
|
+
return d.exp * 1000 > Date.now() + 60_000;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
module.exports = {
|
|
79
|
+
load, save, get, set, del, clear,
|
|
80
|
+
saveSession, loadSession, clearSession,
|
|
81
|
+
decodeJwt, sessionIsAlive,
|
|
82
|
+
BUNDLED_FIREBASE_KEY,
|
|
83
|
+
};
|