opensecurity 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/login.js ADDED
@@ -0,0 +1,583 @@
1
+ import readline from "node:readline";
2
+ import http from "node:http";
3
+ import crypto from "node:crypto";
4
+ import { exec, spawn } from "node:child_process";
5
+ import { loadGlobalConfig, saveGlobalConfig } from "./config.js";
6
+ import { saveOAuthProfile } from "./oauthStore.js";
7
+ export function askQuestion(question) {
8
+ const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
9
+ return new Promise((resolve) => {
10
+ rl.question(question, (answer) => {
11
+ rl.close();
12
+ resolve(answer.trim());
13
+ });
14
+ });
15
+ }
16
+ // For unit tests
17
+ export async function saveMockToken(token, env = process.env) {
18
+ const current = await loadGlobalConfig(env);
19
+ const updated = { ...current, apiKey: token };
20
+ await saveGlobalConfig(updated, env);
21
+ return updated;
22
+ }
23
+ export async function login(env = process.env, mode, model, provider) {
24
+ console.log("\n\x1b[32m\u25C7\x1b[0m \x1b[1mOpenSecurity Authentication\x1b[0m");
25
+ if (mode === "api_key") {
26
+ return loginWithApiKey(env, model, provider);
27
+ }
28
+ if (mode === "oauth") {
29
+ if (provider && provider !== "openai") {
30
+ throw new Error("OAuth is only supported for OpenAI.");
31
+ }
32
+ return loginWithOAuth(env, model);
33
+ }
34
+ const modeChoice = await promptLoginMode();
35
+ if (modeChoice === "api_key") {
36
+ return loginWithApiKey(env, model, provider);
37
+ }
38
+ // Option 1: Codex OAuth (Default)
39
+ return loginWithOAuth(env, model);
40
+ }
41
+ async function loginWithApiKey(env = process.env, model, provider) {
42
+ const current = await loadGlobalConfig(env);
43
+ const selectedProvider = provider ?? (await chooseProvider(current.provider ?? "openai"));
44
+ const key = await askQuestion(`Enter your ${providerLabel(selectedProvider)} API Key: `);
45
+ if (selectedProvider === "openai" && !key.startsWith("sk-")) {
46
+ console.error("\x1b[31mError: Invalid OpenAI API key format.\x1b[0m");
47
+ process.exit(1);
48
+ }
49
+ const selectedModel = await chooseModel({
50
+ current: model ?? current.model,
51
+ provider: selectedProvider,
52
+ source: selectedProvider === "openai" ? "openai" : undefined,
53
+ apiKey: selectedProvider === "openai" ? key : undefined
54
+ });
55
+ const updated = {
56
+ ...current,
57
+ provider: selectedProvider,
58
+ authMode: "api_key",
59
+ model: selectedModel ?? current.model,
60
+ apiKey: selectedProvider === "openai" ? key : current.apiKey,
61
+ providerApiKey: selectedProvider === "openai" ? current.providerApiKey : key
62
+ };
63
+ await saveGlobalConfig(updated, env);
64
+ console.log(`\n✅ Successfully saved ${providerLabel(selectedProvider)} API Key.`);
65
+ return updated;
66
+ }
67
+ async function loginWithOAuth(env = process.env, model) {
68
+ const provider = (env.OPENSECURITY_OAUTH_PROVIDER ?? "codex-cli");
69
+ if (provider === "codex-cli") {
70
+ return codexCliOAuthLogin(env, model);
71
+ }
72
+ return codexOAuthLogin(env, model);
73
+ }
74
+ async function codexCliOAuthLogin(env = process.env, model) {
75
+ await runCodexLogin();
76
+ const current = await loadGlobalConfig(env);
77
+ const selectedModel = await chooseModel({
78
+ current: model ?? current.model,
79
+ source: "codex",
80
+ provider: "openai"
81
+ });
82
+ const updated = {
83
+ ...current,
84
+ authMode: "oauth",
85
+ oauthProvider: "codex-cli",
86
+ authProfileId: "codex-cli",
87
+ model: selectedModel ?? current.model
88
+ };
89
+ await saveGlobalConfig(updated, env);
90
+ console.log("\n✅ Successfully authenticated with OpenAI/Codex via codex CLI.");
91
+ return updated;
92
+ }
93
+ async function codexOAuthLogin(env = process.env, model, port = 1455) {
94
+ const current = await loadGlobalConfig(env);
95
+ console.log("\n\x1b[32m\u25C7\x1b[0m \x1b[1mOpenAI Codex OAuth\x1b[0m");
96
+ console.log(" Browser will open for OpenAI authentication.");
97
+ console.log(" OpenAI OAuth uses localhost:1455 for the callback.\n");
98
+ const state = crypto.randomBytes(16).toString("hex");
99
+ const codeVerifier = crypto.randomBytes(32).toString("base64url");
100
+ const codeChallenge = crypto.createHash("sha256").update(codeVerifier).digest("base64url");
101
+ const clientId = "app_EMoamEEZ73f0CkXaXp7hrann";
102
+ const redirectUri = encodeURIComponent(`http://localhost:${port}/auth/callback`);
103
+ const proxyBaseUrl = env.OPENSECURITY_PROXY_URL ?? "http://localhost:8787/v1/responses";
104
+ const authUrl = `https://auth.openai.com/oauth/authorize?response_type=code` +
105
+ `&client_id=${clientId}` +
106
+ `&redirect_uri=${redirectUri}` +
107
+ `&scope=openid+profile+email+offline_access` +
108
+ `&code_challenge=${codeChallenge}` +
109
+ `&code_challenge_method=S256` +
110
+ `&state=${state}` +
111
+ `&id_token_add_organizations=true` +
112
+ `&codex_cli_simplified_flow=true` +
113
+ `&originator=pi`;
114
+ console.log(`Open: ${authUrl}\n`);
115
+ return new Promise((resolve) => {
116
+ const server = http.createServer(async (req, res) => {
117
+ try {
118
+ const url = new URL(req.url || "", `http://${req.headers.host}`);
119
+ if (url.pathname === "/auth/callback") {
120
+ const code = url.searchParams.get("code");
121
+ const returnedState = url.searchParams.get("state");
122
+ if (returnedState !== state) {
123
+ res.writeHead(400);
124
+ res.end("State mismatch. Security error.");
125
+ return;
126
+ }
127
+ if (code) {
128
+ res.writeHead(200, { "Content-Type": "text/html" });
129
+ res.end(`
130
+ <html>
131
+ <head><title>Success</title><style>body { font-family: -apple-system, sans-serif; text-align: center; margin-top: 50px; }</style></head>
132
+ <body>
133
+ <h1>✅ Authentication Successful!</h1>
134
+ <p>OpenSecurity has successfully authenticated via OpenAI/Codex.</p>
135
+ <p>You can close this window and return to your terminal.</p>
136
+ <script>window.close();</script>
137
+ </body>
138
+ </html>
139
+ `);
140
+ const tokens = await exchangeCodeForTokens({
141
+ code,
142
+ codeVerifier,
143
+ clientId,
144
+ redirectUri: `http://localhost:${port}/auth/callback`
145
+ });
146
+ if (!tokens.access_token) {
147
+ throw new Error("OAuth token exchange did not return an access_token.");
148
+ }
149
+ const expiresAt = tokens.expires_in
150
+ ? Date.now() + tokens.expires_in * 1000
151
+ : undefined;
152
+ await saveOAuthProfile({
153
+ provider: "codex",
154
+ accessToken: tokens.access_token,
155
+ refreshToken: tokens.refresh_token,
156
+ tokenType: tokens.token_type,
157
+ scope: tokens.scope,
158
+ expiresAt,
159
+ obtainedAt: Date.now()
160
+ }, env);
161
+ const selectedModel = await chooseModel({
162
+ current: model ?? current.model,
163
+ source: "codex",
164
+ provider: "openai"
165
+ });
166
+ const updated = {
167
+ ...current,
168
+ baseUrl: proxyBaseUrl,
169
+ apiType: "responses",
170
+ authMode: "oauth",
171
+ authProfileId: "codex",
172
+ oauthProvider: "proxy",
173
+ model: selectedModel ?? current.model
174
+ };
175
+ await saveGlobalConfig(updated, env);
176
+ console.log(`\n✅ Successfully authenticated with OpenAI/Codex.`);
177
+ console.log(` Proxy base URL set to ${proxyBaseUrl}`);
178
+ server.close();
179
+ resolve(updated);
180
+ }
181
+ else {
182
+ res.writeHead(400);
183
+ res.end("No authorization code received.");
184
+ }
185
+ }
186
+ else {
187
+ res.writeHead(404);
188
+ res.end("Not Found");
189
+ }
190
+ }
191
+ catch (err) {
192
+ res.writeHead(500);
193
+ res.end("Internal error");
194
+ }
195
+ });
196
+ server.listen(port, () => {
197
+ exec(`open "${authUrl}"`);
198
+ });
199
+ });
200
+ }
201
+ function runCodexLogin() {
202
+ return new Promise((resolve, reject) => {
203
+ const proc = spawn("codex", ["login"], { stdio: "inherit" });
204
+ proc.on("error", reject);
205
+ proc.on("exit", (code) => {
206
+ if (code === 0)
207
+ resolve();
208
+ else
209
+ reject(new Error(`codex login failed with exit code ${code ?? "unknown"}`));
210
+ });
211
+ });
212
+ }
213
+ async function promptLoginMode() {
214
+ try {
215
+ return await interactiveSelectLoginMode();
216
+ }
217
+ catch {
218
+ // fall through to text prompt
219
+ }
220
+ const answer = await askQuestion("Select auth mode (oauth/api_key): ");
221
+ return answer.trim() === "api_key" ? "api_key" : "oauth";
222
+ }
223
+ async function interactiveSelectLoginMode() {
224
+ return new Promise((resolve, reject) => {
225
+ const readline = require("node:readline");
226
+ const { input, output, cleanup: baseCleanup } = getInteractiveStreams();
227
+ readline.emitKeypressEvents(input);
228
+ input.setRawMode(true);
229
+ const options = [
230
+ { label: "OpenAI Codex OAuth (browser)", value: "oauth" },
231
+ { label: "OpenAI API Key (manual)", value: "api_key" }
232
+ ];
233
+ let index = 0;
234
+ const render = () => {
235
+ output.write("\x1b[2J\x1b[H");
236
+ output.write("Select authentication method\n");
237
+ for (let i = 0; i < options.length; i += 1) {
238
+ const prefix = i === index ? "◉" : "○";
239
+ output.write(`${prefix} ${options[i].label}\n`);
240
+ }
241
+ output.write("\nUse ↑/↓ to move, Enter to select.\n");
242
+ };
243
+ const cleanup = () => {
244
+ input.off("keypress", onKeypress);
245
+ baseCleanup();
246
+ };
247
+ const onKeypress = (_, key) => {
248
+ if (key.ctrl && key.name === "c") {
249
+ cleanup();
250
+ reject(new Error("Selection cancelled."));
251
+ return;
252
+ }
253
+ if (key.name === "down") {
254
+ index = (index + 1) % options.length;
255
+ render();
256
+ return;
257
+ }
258
+ if (key.name === "up") {
259
+ index = (index - 1 + options.length) % options.length;
260
+ render();
261
+ return;
262
+ }
263
+ if (key.name === "return") {
264
+ const value = options[index].value;
265
+ cleanup();
266
+ resolve(value);
267
+ }
268
+ };
269
+ input.on("keypress", onKeypress);
270
+ render();
271
+ });
272
+ }
273
+ function shouldForceInteractive() {
274
+ return process.env.OPENSECURITY_FORCE_TTY === "1";
275
+ }
276
+ function getInteractiveStreams() {
277
+ if (process.stdin.isTTY && process.stdout.isTTY) {
278
+ const wasRaw = process.stdin.isRaw;
279
+ const cleanup = () => {
280
+ if (!wasRaw)
281
+ process.stdin.setRawMode(false);
282
+ process.stdout.write("\x1b[2J\x1b[H");
283
+ };
284
+ return { input: process.stdin, output: process.stdout, cleanup };
285
+ }
286
+ try {
287
+ const tty = require("node:tty");
288
+ const fs = require("node:fs");
289
+ const fd = fs.openSync("/dev/tty", "r+");
290
+ const input = new tty.ReadStream(fd);
291
+ const output = new tty.WriteStream(fd);
292
+ const cleanup = () => {
293
+ input.setRawMode(false);
294
+ input.pause();
295
+ output.write("\x1b[2J\x1b[H");
296
+ fs.closeSync(fd);
297
+ };
298
+ return { input, output, cleanup };
299
+ }
300
+ catch (err) {
301
+ throw new Error("No TTY available for interactive selection.");
302
+ }
303
+ }
304
+ async function chooseModel(params) {
305
+ const { current, source, apiKey, provider } = params;
306
+ const models = provider === "openai" && source === "openai" && apiKey
307
+ ? await fetchOpenAiModels(apiKey)
308
+ : provider === "openai" && source === "codex"
309
+ ? getCodexModelChoices()
310
+ : apiKey
311
+ ? await fetchProviderModels(provider, apiKey)
312
+ : getProviderModelChoices(provider);
313
+ return promptForModel({
314
+ current,
315
+ models
316
+ });
317
+ }
318
+ function getCodexModelChoices() {
319
+ return [
320
+ "openai-codex/gpt-5.1",
321
+ "openai-codex/gpt-5.1-codex-max",
322
+ "openai-codex/gpt-5.1-codex-mini",
323
+ "openai-codex/gpt-5.2",
324
+ "openai-codex/gpt-5.2-codex",
325
+ "openai-codex/gpt-5.3-codex",
326
+ "openai-codex/gpt-5.3-codex-spark"
327
+ ];
328
+ }
329
+ async function fetchOpenAiModels(apiKey) {
330
+ const res = await fetch("https://api.openai.com/v1/models", {
331
+ headers: {
332
+ Authorization: `Bearer ${apiKey}`
333
+ }
334
+ });
335
+ if (!res.ok) {
336
+ return [];
337
+ }
338
+ const data = (await res.json());
339
+ const ids = data.data?.map((m) => m.id) ?? [];
340
+ return unique(ids).sort();
341
+ }
342
+ function unique(items) {
343
+ return Array.from(new Set(items));
344
+ }
345
+ async function promptForModel(params) {
346
+ const { current, models } = params;
347
+ const choices = [
348
+ { name: `Keep current${current ? ` (${current})` : ""}`, value: undefined },
349
+ ...models.map((id) => ({ name: id, value: id })),
350
+ { name: "Custom model id…", value: "__custom__" }
351
+ ];
352
+ const selected = await selectFromList("Default model", choices);
353
+ if (selected === "__custom__") {
354
+ const custom = await askQuestion("Enter custom model id: ");
355
+ return custom.trim() || undefined;
356
+ }
357
+ return selected;
358
+ }
359
+ async function selectFromList(message, choices) {
360
+ return interactiveSelect(message, choices);
361
+ }
362
+ async function interactiveSelect(message, choices) {
363
+ return new Promise((resolve, reject) => {
364
+ const readline = require("node:readline");
365
+ const { input, output, cleanup: baseCleanup } = getInteractiveStreams();
366
+ readline.emitKeypressEvents(input);
367
+ input.setRawMode(true);
368
+ let index = 0;
369
+ const render = () => {
370
+ output.write("\x1b[2J\x1b[H");
371
+ output.write(`${message}\n`);
372
+ for (let i = 0; i < choices.length; i += 1) {
373
+ const prefix = i === index ? "●" : "○";
374
+ output.write(`${prefix} ${choices[i].name}\n`);
375
+ }
376
+ output.write("\nUse ↑/↓ to move, Enter to select.\n");
377
+ };
378
+ const onKeypress = (_, key) => {
379
+ if (key.ctrl && key.name === "c") {
380
+ cleanup();
381
+ reject(new Error("Selection cancelled."));
382
+ return;
383
+ }
384
+ if (key.name === "down") {
385
+ index = (index + 1) % choices.length;
386
+ render();
387
+ return;
388
+ }
389
+ if (key.name === "up") {
390
+ index = (index - 1 + choices.length) % choices.length;
391
+ render();
392
+ return;
393
+ }
394
+ if (key.name === "return") {
395
+ const value = choices[index].value;
396
+ cleanup();
397
+ resolve(value);
398
+ }
399
+ };
400
+ const cleanup = () => {
401
+ input.off("keypress", onKeypress);
402
+ baseCleanup();
403
+ };
404
+ input.on("keypress", onKeypress);
405
+ render();
406
+ });
407
+ }
408
+ async function exchangeCodeForTokens(params) {
409
+ const body = new URLSearchParams({
410
+ grant_type: "authorization_code",
411
+ client_id: params.clientId,
412
+ code_verifier: params.codeVerifier,
413
+ code: params.code,
414
+ redirect_uri: params.redirectUri
415
+ });
416
+ const res = await fetch("https://auth.openai.com/oauth/token", {
417
+ method: "POST",
418
+ headers: { "Content-Type": "application/x-www-form-urlencoded" },
419
+ body
420
+ });
421
+ if (!res.ok) {
422
+ const text = await res.text();
423
+ throw new Error(`OAuth token exchange failed: ${res.status} ${text}`);
424
+ }
425
+ return res.json();
426
+ }
427
+ async function chooseProvider(current) {
428
+ const choices = [
429
+ { name: "OpenAI", value: "openai" },
430
+ { name: "Anthropic", value: "anthropic" },
431
+ { name: "Google Gemini", value: "google" },
432
+ { name: "Mistral", value: "mistral" },
433
+ { name: "xAI", value: "xai" },
434
+ { name: "Cohere", value: "cohere" }
435
+ ];
436
+ const selected = await selectFromList(`Provider (current: ${current})`, choices);
437
+ return selected ?? current;
438
+ }
439
+ function providerLabel(provider) {
440
+ switch (provider) {
441
+ case "openai":
442
+ return "OpenAI";
443
+ case "anthropic":
444
+ return "Anthropic";
445
+ case "google":
446
+ return "Google Gemini";
447
+ case "mistral":
448
+ return "Mistral";
449
+ case "xai":
450
+ return "xAI";
451
+ case "cohere":
452
+ return "Cohere";
453
+ default:
454
+ return "Provider";
455
+ }
456
+ }
457
+ function getProviderModelChoices(provider) {
458
+ switch (provider) {
459
+ case "anthropic":
460
+ return [
461
+ "claude-opus",
462
+ "claude-sonnet"
463
+ ];
464
+ case "google":
465
+ return [
466
+ "gemini-2.5-pro",
467
+ "gemini-2.5-flash"
468
+ ];
469
+ case "mistral":
470
+ return [
471
+ "codestral-latest",
472
+ "mistral-medium-latest"
473
+ ];
474
+ case "xai":
475
+ return [
476
+ "grok-4-1-fast-reasoning",
477
+ "grok-4-1-fast-non-reasoning",
478
+ "grok-code-fast-1"
479
+ ];
480
+ case "cohere":
481
+ return [
482
+ "command-a-03-2025",
483
+ "command-a-reasoning-08-2025"
484
+ ];
485
+ case "openai":
486
+ default:
487
+ return [
488
+ "gpt-5.2",
489
+ "gpt-5.1",
490
+ "gpt-4.1",
491
+ "gpt-4o-mini"
492
+ ];
493
+ }
494
+ }
495
+ async function fetchProviderModels(provider, apiKey) {
496
+ switch (provider) {
497
+ case "anthropic":
498
+ return fetchAnthropicModels(apiKey);
499
+ case "google":
500
+ return fetchGeminiModels(apiKey);
501
+ case "mistral":
502
+ return fetchMistralModels(apiKey);
503
+ case "cohere":
504
+ return fetchCohereModels(apiKey);
505
+ case "openai":
506
+ return fetchOpenAiModels(apiKey);
507
+ case "xai":
508
+ default:
509
+ return getProviderModelChoices(provider);
510
+ }
511
+ }
512
+ async function fetchAnthropicModels(apiKey) {
513
+ const res = await fetch("https://api.anthropic.com/v1/models", {
514
+ headers: {
515
+ "x-api-key": apiKey,
516
+ "anthropic-version": "2023-06-01"
517
+ }
518
+ });
519
+ if (!res.ok) {
520
+ console.error(`\x1b[33mWarning:\x1b[0m Anthropic models API failed (${res.status}). Using fallback list.`);
521
+ return getProviderModelChoices("anthropic");
522
+ }
523
+ const data = (await res.json());
524
+ const ids = data.data?.map((m) => m.id).filter(Boolean);
525
+ if (!ids?.length) {
526
+ console.error("\x1b[33mWarning:\x1b[0m Anthropic models API returned no models. Using fallback list.");
527
+ }
528
+ return ids?.length ? ids : getProviderModelChoices("anthropic");
529
+ }
530
+ async function fetchGeminiModels(apiKey) {
531
+ const res = await fetch(`https://generativelanguage.googleapis.com/v1beta/models?key=${apiKey}`);
532
+ if (!res.ok) {
533
+ console.error(`\x1b[33mWarning:\x1b[0m Gemini models API failed (${res.status}). Using fallback list.`);
534
+ return getProviderModelChoices("google");
535
+ }
536
+ const data = (await res.json());
537
+ const models = (data.models ?? [])
538
+ .filter((m) => m.supportedGenerationMethods?.includes("generateContent"))
539
+ .map((m) => m.name)
540
+ .filter(Boolean)
541
+ .map((name) => name.startsWith("models/") ? name.slice("models/".length) : name);
542
+ if (!models.length) {
543
+ console.error("\x1b[33mWarning:\x1b[0m Gemini models API returned no models. Using fallback list.");
544
+ }
545
+ return models.length ? models : getProviderModelChoices("google");
546
+ }
547
+ async function fetchMistralModels(apiKey) {
548
+ const res = await fetch("https://api.mistral.ai/v1/models", {
549
+ headers: {
550
+ Authorization: `Bearer ${apiKey}`
551
+ }
552
+ });
553
+ if (!res.ok) {
554
+ console.error(`\x1b[33mWarning:\x1b[0m Mistral models API failed (${res.status}). Using fallback list.`);
555
+ return getProviderModelChoices("mistral");
556
+ }
557
+ const data = (await res.json());
558
+ const ids = data.data?.map((m) => m.id).filter(Boolean);
559
+ if (ids?.length)
560
+ return ids;
561
+ const names = data.models?.map((m) => m.id ?? m.name).filter(Boolean);
562
+ if (!names?.length) {
563
+ console.error("\x1b[33mWarning:\x1b[0m Mistral models API returned no models. Using fallback list.");
564
+ }
565
+ return names?.length ? names : getProviderModelChoices("mistral");
566
+ }
567
+ async function fetchCohereModels(apiKey) {
568
+ const res = await fetch("https://api.cohere.com/v1/models", {
569
+ headers: {
570
+ Authorization: `Bearer ${apiKey}`
571
+ }
572
+ });
573
+ if (!res.ok) {
574
+ console.error(`\x1b[33mWarning:\x1b[0m Cohere models API failed (${res.status}). Using fallback list.`);
575
+ return getProviderModelChoices("cohere");
576
+ }
577
+ const data = (await res.json());
578
+ const names = data.models?.map((m) => m.name ?? m.id).filter(Boolean);
579
+ if (!names?.length) {
580
+ console.error("\x1b[33mWarning:\x1b[0m Cohere models API returned no models. Using fallback list.");
581
+ }
582
+ return names?.length ? names : getProviderModelChoices("cohere");
583
+ }
@@ -0,0 +1,48 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+ import { getConfigDir } from "./config.js";
4
+ const DEFAULT_PROFILE_ID = "codex";
5
+ export function getAuthProfilesPath(env = process.env) {
6
+ return path.join(getConfigDir(env), "auth-profiles.json");
7
+ }
8
+ async function readJsonFile(filePath) {
9
+ try {
10
+ const raw = await fs.readFile(filePath, "utf8");
11
+ return JSON.parse(raw);
12
+ }
13
+ catch (err) {
14
+ if (err?.code === "ENOENT")
15
+ return null;
16
+ throw err;
17
+ }
18
+ }
19
+ async function writeJsonFile(filePath, data) {
20
+ await fs.mkdir(path.dirname(filePath), { recursive: true });
21
+ await fs.writeFile(filePath, JSON.stringify(data, null, 2), "utf8");
22
+ }
23
+ export async function loadAuthStore(env = process.env) {
24
+ const filePath = getAuthProfilesPath(env);
25
+ const existing = await readJsonFile(filePath);
26
+ return existing ?? { profiles: [] };
27
+ }
28
+ export async function saveAuthStore(store, env = process.env) {
29
+ await writeJsonFile(getAuthProfilesPath(env), store);
30
+ }
31
+ export async function saveOAuthProfile(profile, env = process.env) {
32
+ const store = await loadAuthStore(env);
33
+ const id = profile.id ?? DEFAULT_PROFILE_ID;
34
+ const normalized = { ...profile, id };
35
+ const next = store.profiles.filter((p) => p.id !== id);
36
+ next.push(normalized);
37
+ await saveAuthStore({ profiles: next }, env);
38
+ return normalized;
39
+ }
40
+ export async function getOAuthProfile(id, env = process.env) {
41
+ const store = await loadAuthStore(env);
42
+ return store.profiles.find((p) => p.id === id) ?? null;
43
+ }
44
+ export function isTokenExpired(profile, skewMs = 60_000) {
45
+ if (!profile.expiresAt)
46
+ return false;
47
+ return Date.now() + skewMs >= profile.expiresAt;
48
+ }