@upend/cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +231 -0
- package/bin/cli.ts +48 -0
- package/package.json +26 -0
- package/src/commands/deploy.ts +67 -0
- package/src/commands/dev.ts +96 -0
- package/src/commands/infra.ts +227 -0
- package/src/commands/init.ts +323 -0
- package/src/commands/migrate.ts +64 -0
- package/src/config.ts +18 -0
- package/src/index.ts +2 -0
- package/src/lib/auth.ts +89 -0
- package/src/lib/db.ts +14 -0
- package/src/lib/exec.ts +38 -0
- package/src/lib/log.ts +16 -0
- package/src/lib/middleware.ts +51 -0
- package/src/services/claude/index.ts +507 -0
- package/src/services/claude/snapshots.ts +142 -0
- package/src/services/claude/worktree.ts +151 -0
- package/src/services/dashboard/public/index.html +888 -0
- package/src/services/gateway/auth-routes.ts +203 -0
- package/src/services/gateway/index.ts +64 -0
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
import { log } from "../lib/log";
|
|
2
|
+
import { exec, execOrDie, hasCommand } from "../lib/exec";
|
|
3
|
+
import { existsSync, mkdirSync, writeFileSync, readFileSync } from "fs";
|
|
4
|
+
import { join, resolve } from "path";
|
|
5
|
+
|
|
6
|
+
export default async function init(args: string[]) {
|
|
7
|
+
const name = args[0];
|
|
8
|
+
if (!name) {
|
|
9
|
+
log.error("usage: upend init <name>");
|
|
10
|
+
log.dim(" e.g. upend init beta → deploys to beta.upend.site");
|
|
11
|
+
process.exit(1);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
const projectDir = resolve(name);
|
|
15
|
+
const domain = `${name}.upend.site`;
|
|
16
|
+
|
|
17
|
+
if (existsSync(projectDir)) {
|
|
18
|
+
log.error(`directory '${name}' already exists`);
|
|
19
|
+
process.exit(1);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
log.header(`creating ${name}`);
|
|
23
|
+
log.dim(`→ ${domain}`);
|
|
24
|
+
log.blank();
|
|
25
|
+
|
|
26
|
+
mkdirSync(projectDir, { recursive: true });
|
|
27
|
+
|
|
28
|
+
// ── 1. generate JWT keys first (needed for JWKS setup) ──
|
|
29
|
+
|
|
30
|
+
log.info("generating JWT signing keys...");
|
|
31
|
+
mkdirSync(join(projectDir, ".keys"), { recursive: true });
|
|
32
|
+
const { privateKey, publicKey } = await crypto.subtle.generateKey(
|
|
33
|
+
{ name: "RSASSA-PKCS1-v1_5", modulusLength: 2048, publicExponent: new Uint8Array([1, 0, 1]), hash: "SHA-256" },
|
|
34
|
+
true,
|
|
35
|
+
["sign", "verify"]
|
|
36
|
+
);
|
|
37
|
+
const privPem = await exportKeyToPem(privateKey, "PRIVATE");
|
|
38
|
+
const pubPem = await exportKeyToPem(publicKey, "PUBLIC");
|
|
39
|
+
writeFileSync(join(projectDir, ".keys/private.pem"), privPem);
|
|
40
|
+
writeFileSync(join(projectDir, ".keys/public.pem"), pubPem);
|
|
41
|
+
log.success("keys generated");
|
|
42
|
+
|
|
43
|
+
// ── 2. neon database + data API ──
|
|
44
|
+
|
|
45
|
+
let databaseUrl = "";
|
|
46
|
+
let neonDataApi = "";
|
|
47
|
+
let neonProjectId = "";
|
|
48
|
+
|
|
49
|
+
if (await hasCommand("neonctl")) {
|
|
50
|
+
// check auth
|
|
51
|
+
const { exitCode } = await exec(["neonctl", "me"], { silent: true });
|
|
52
|
+
if (exitCode !== 0) {
|
|
53
|
+
log.info("authenticating with neon...");
|
|
54
|
+
await execOrDie(["neonctl", "auth"]);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// create neon project
|
|
58
|
+
log.info("creating neon database...");
|
|
59
|
+
const { stdout: projectJson } = await execOrDie(["neonctl", "projects", "create", "--name", name, "--output", "json"]);
|
|
60
|
+
const project = JSON.parse(projectJson);
|
|
61
|
+
neonProjectId = project.project?.id || project.id;
|
|
62
|
+
log.success(`neon project: ${neonProjectId}`);
|
|
63
|
+
|
|
64
|
+
// get connection string (direct, not pooler)
|
|
65
|
+
const { stdout: connStr } = await execOrDie(["neonctl", "connection-string", "--project-id", neonProjectId]);
|
|
66
|
+
databaseUrl = connStr.trim();
|
|
67
|
+
log.success("connection string ready");
|
|
68
|
+
|
|
69
|
+
// get branch ID for data API setup
|
|
70
|
+
const { stdout: branchJson } = await execOrDie(["neonctl", "branches", "list", "--project-id", neonProjectId, "--output", "json"]);
|
|
71
|
+
const branches = JSON.parse(branchJson);
|
|
72
|
+
const branchId = branches[0]?.id || branches.branches?.[0]?.id;
|
|
73
|
+
|
|
74
|
+
if (branchId) {
|
|
75
|
+
// get neon API token from neonctl credentials
|
|
76
|
+
const neonToken = getNeonToken();
|
|
77
|
+
|
|
78
|
+
if (neonToken) {
|
|
79
|
+
// wait for endpoint to be ready, then enable Data API
|
|
80
|
+
log.info("enabling data API (waiting for endpoint)...");
|
|
81
|
+
for (let attempt = 0; attempt < 10; attempt++) {
|
|
82
|
+
const dataApiRes = await fetch(
|
|
83
|
+
`https://console.neon.tech/api/v2/projects/${neonProjectId}/branches/${branchId}/data-api/neondb`,
|
|
84
|
+
{
|
|
85
|
+
method: "POST",
|
|
86
|
+
headers: {
|
|
87
|
+
"Authorization": `Bearer ${neonToken}`,
|
|
88
|
+
"Content-Type": "application/json",
|
|
89
|
+
},
|
|
90
|
+
body: JSON.stringify({
|
|
91
|
+
auth_provider: "external",
|
|
92
|
+
jwks_url: `https://${domain}/.well-known/jwks.json`,
|
|
93
|
+
provider_name: "upend",
|
|
94
|
+
add_default_grants: true,
|
|
95
|
+
skip_auth_schema: true,
|
|
96
|
+
settings: {
|
|
97
|
+
db_schemas: ["public"],
|
|
98
|
+
jwt_role_claim_key: ".role",
|
|
99
|
+
},
|
|
100
|
+
}),
|
|
101
|
+
}
|
|
102
|
+
);
|
|
103
|
+
|
|
104
|
+
if (dataApiRes.ok) {
|
|
105
|
+
const dataApi = await dataApiRes.json() as any;
|
|
106
|
+
neonDataApi = dataApi.url || "";
|
|
107
|
+
log.success(`data API: ${neonDataApi}`);
|
|
108
|
+
break;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const err = await dataApiRes.text();
|
|
112
|
+
if (err.includes("initializing") && attempt < 9) {
|
|
113
|
+
await new Promise(r => setTimeout(r, 3000));
|
|
114
|
+
continue;
|
|
115
|
+
}
|
|
116
|
+
log.warn(`data API setup failed: ${err}`);
|
|
117
|
+
log.dim("you can enable it manually in the Neon console");
|
|
118
|
+
break;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// JWKS registration requires the URL to be reachable — defer to post-deploy
|
|
122
|
+
log.dim(`JWKS will be registered after deploy (${domain} must be live)`);
|
|
123
|
+
log.dim("run: upend setup:jwks (after first deploy)");
|
|
124
|
+
} else {
|
|
125
|
+
log.warn("couldn't read neon API token — data API needs manual setup");
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
} else {
|
|
129
|
+
log.warn("neonctl not found — install with: npm i -g neonctl");
|
|
130
|
+
log.dim("then re-run: upend init " + name);
|
|
131
|
+
log.dim("or set DATABASE_URL manually in .env");
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
// ── 3. scaffold project files ──
|
|
135
|
+
|
|
136
|
+
log.info("scaffolding project...");
|
|
137
|
+
|
|
138
|
+
// resolve @upend/cli dependency
|
|
139
|
+
const cliPkgPath = new URL("../../package.json", import.meta.url).pathname;
|
|
140
|
+
const cliPkg = JSON.parse(readFileSync(cliPkgPath, "utf-8"));
|
|
141
|
+
const cliRoot = join(cliPkgPath, "..");
|
|
142
|
+
const cliDep = cliPkg.version === "0.1.0" ? `file:${cliRoot}` : `^${cliPkg.version}`;
|
|
143
|
+
|
|
144
|
+
writeFile(projectDir, "upend.config.ts", `import { defineConfig } from "@upend/cli";
|
|
145
|
+
|
|
146
|
+
export default defineConfig({
|
|
147
|
+
name: "${name}",
|
|
148
|
+
database: process.env.DATABASE_URL,
|
|
149
|
+
dataApi: process.env.NEON_DATA_API,
|
|
150
|
+
deploy: {
|
|
151
|
+
host: process.env.DEPLOY_HOST,
|
|
152
|
+
dir: "/opt/upend",
|
|
153
|
+
},
|
|
154
|
+
});
|
|
155
|
+
`);
|
|
156
|
+
|
|
157
|
+
writeFile(projectDir, "package.json", JSON.stringify({
|
|
158
|
+
name,
|
|
159
|
+
private: true,
|
|
160
|
+
type: "module",
|
|
161
|
+
scripts: {
|
|
162
|
+
dev: "upend dev",
|
|
163
|
+
deploy: "upend deploy",
|
|
164
|
+
migrate: "upend migrate",
|
|
165
|
+
},
|
|
166
|
+
dependencies: {
|
|
167
|
+
"@upend/cli": cliDep,
|
|
168
|
+
},
|
|
169
|
+
}, null, 2) + "\n");
|
|
170
|
+
|
|
171
|
+
writeFile(projectDir, ".env", `DATABASE_URL="${databaseUrl}"
|
|
172
|
+
NEON_DATA_API="${neonDataApi}"
|
|
173
|
+
NEON_PROJECT_ID="${neonProjectId}"
|
|
174
|
+
ANTHROPIC_API_KEY=
|
|
175
|
+
DEPLOY_HOST=
|
|
176
|
+
API_PORT=3001
|
|
177
|
+
CLAUDE_PORT=3002
|
|
178
|
+
`);
|
|
179
|
+
|
|
180
|
+
writeFile(projectDir, ".env.example", `DATABASE_URL=postgresql://user:pass@host/db?sslmode=require
|
|
181
|
+
NEON_DATA_API=https://xxx.data-api.neon.tech
|
|
182
|
+
ANTHROPIC_API_KEY=sk-ant-...
|
|
183
|
+
DEPLOY_HOST=ec2-user@x.x.x.x
|
|
184
|
+
`);
|
|
185
|
+
|
|
186
|
+
writeFile(projectDir, ".gitignore", `node_modules/
|
|
187
|
+
.env
|
|
188
|
+
.env.keys
|
|
189
|
+
.keys/
|
|
190
|
+
.snapshots/
|
|
191
|
+
sessions/
|
|
192
|
+
*.log
|
|
193
|
+
.DS_Store
|
|
194
|
+
`);
|
|
195
|
+
|
|
196
|
+
// migrations
|
|
197
|
+
mkdirSync(join(projectDir, "migrations"), { recursive: true });
|
|
198
|
+
writeFile(projectDir, "migrations/001_init.sql", `-- your first migration
|
|
199
|
+
CREATE TABLE IF NOT EXISTS example (
|
|
200
|
+
id BIGSERIAL PRIMARY KEY,
|
|
201
|
+
name TEXT NOT NULL,
|
|
202
|
+
data JSONB DEFAULT '{}',
|
|
203
|
+
created_at TIMESTAMPTZ DEFAULT now(),
|
|
204
|
+
updated_at TIMESTAMPTZ DEFAULT now()
|
|
205
|
+
);
|
|
206
|
+
`);
|
|
207
|
+
|
|
208
|
+
// apps + services
|
|
209
|
+
mkdirSync(join(projectDir, "apps"), { recursive: true });
|
|
210
|
+
writeFile(projectDir, "apps/.gitkeep", "");
|
|
211
|
+
mkdirSync(join(projectDir, "services"), { recursive: true });
|
|
212
|
+
writeFile(projectDir, "services/.gitkeep", "");
|
|
213
|
+
|
|
214
|
+
// CLAUDE.md
|
|
215
|
+
writeFile(projectDir, "CLAUDE.md", `# ${name}
|
|
216
|
+
|
|
217
|
+
You have FULL control of this codebase. Edit anything. Run anything. Create migrations. You are the developer.
|
|
218
|
+
|
|
219
|
+
Changes take effect immediately (Bun --watch). A snapshot was taken before you started — if something breaks, the user can rollback.
|
|
220
|
+
|
|
221
|
+
## Stack
|
|
222
|
+
- **Runtime**: Bun
|
|
223
|
+
- **Framework**: Hono
|
|
224
|
+
- **Database**: Neon Postgres (connection in node_modules/@upend/cli)
|
|
225
|
+
- **Auth**: Custom JWT (RS256)
|
|
226
|
+
- **Domain**: ${domain}
|
|
227
|
+
|
|
228
|
+
## What you can do
|
|
229
|
+
- Edit any file in the project
|
|
230
|
+
- Create new files, migrations, apps
|
|
231
|
+
- Run \`upend migrate\` to apply database migrations
|
|
232
|
+
- Create apps in \`apps/\` that are instantly served at \`/apps/<name>/\`
|
|
233
|
+
|
|
234
|
+
## Data API
|
|
235
|
+
Apps talk to Neon Data API at \`/api/data/<table>\`:
|
|
236
|
+
- GET \`/api/data/example?order=created_at.desc\` — list rows
|
|
237
|
+
- POST \`/api/data/example\` — create (JSON body, \`Prefer: return=representation\`)
|
|
238
|
+
- PATCH \`/api/data/example?id=eq.5\` — update
|
|
239
|
+
- DELETE \`/api/data/example?id=eq.5\` — delete
|
|
240
|
+
All requests need \`Authorization: Bearer <jwt>\` header.
|
|
241
|
+
|
|
242
|
+
## Conventions
|
|
243
|
+
- Migrations: plain SQL in \`migrations/\`, numbered \`001_name.sql\`
|
|
244
|
+
- Apps: static HTML/JS/CSS in \`apps/<name>/\`
|
|
245
|
+
- Custom services: \`services/<name>/index.ts\`
|
|
246
|
+
`);
|
|
247
|
+
|
|
248
|
+
log.success("project scaffolded");
|
|
249
|
+
|
|
250
|
+
// ── 4. encrypt .env ──
|
|
251
|
+
|
|
252
|
+
if (databaseUrl) {
|
|
253
|
+
log.info("encrypting .env...");
|
|
254
|
+
await exec(["bunx", "@dotenvx/dotenvx", "encrypt"], { cwd: projectDir });
|
|
255
|
+
log.success(".env encrypted");
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
// ── 5. git init ──
|
|
259
|
+
|
|
260
|
+
log.info("initializing git...");
|
|
261
|
+
await execOrDie(["git", "init"], { cwd: projectDir });
|
|
262
|
+
await execOrDie(["git", "add", "-A"], { cwd: projectDir });
|
|
263
|
+
await execOrDie(["git", "commit", "-m", "initial commit"], { cwd: projectDir });
|
|
264
|
+
log.success("git initialized");
|
|
265
|
+
|
|
266
|
+
// ── 6. install deps ──
|
|
267
|
+
|
|
268
|
+
log.info("installing dependencies...");
|
|
269
|
+
await execOrDie(["bun", "install"], { cwd: projectDir });
|
|
270
|
+
log.success("dependencies installed");
|
|
271
|
+
|
|
272
|
+
// ── done ──
|
|
273
|
+
|
|
274
|
+
log.blank();
|
|
275
|
+
log.header(`${name} is ready`);
|
|
276
|
+
log.blank();
|
|
277
|
+
log.info(`cd ${name}`);
|
|
278
|
+
if (!databaseUrl) {
|
|
279
|
+
log.info("# add your DATABASE_URL to .env");
|
|
280
|
+
}
|
|
281
|
+
if (!process.env.ANTHROPIC_API_KEY) {
|
|
282
|
+
log.info("# add your ANTHROPIC_API_KEY to .env");
|
|
283
|
+
}
|
|
284
|
+
log.info("upend dev");
|
|
285
|
+
log.blank();
|
|
286
|
+
if (databaseUrl) {
|
|
287
|
+
log.dim(`database: ${neonProjectId}`);
|
|
288
|
+
if (neonDataApi) log.dim(`data API: ${neonDataApi}`);
|
|
289
|
+
log.dim(`JWKS: https://${domain}/.well-known/jwks.json`);
|
|
290
|
+
log.dim(`deploy: upend infra:aws && upend deploy`);
|
|
291
|
+
}
|
|
292
|
+
log.blank();
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
// ── helpers ──
|
|
296
|
+
|
|
297
|
+
function getNeonToken(): string | null {
|
|
298
|
+
const paths = [
|
|
299
|
+
join(process.env.HOME || "", ".config/neonctl/credentials.json"),
|
|
300
|
+
join(process.env.HOME || "", "Library/Application Support/neonctl/credentials.json"),
|
|
301
|
+
];
|
|
302
|
+
for (const p of paths) {
|
|
303
|
+
try {
|
|
304
|
+
const creds = JSON.parse(readFileSync(p, "utf-8"));
|
|
305
|
+
return creds.access_token || null;
|
|
306
|
+
} catch {}
|
|
307
|
+
}
|
|
308
|
+
return null;
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
function writeFile(dir: string, path: string, content: string) {
|
|
312
|
+
const fullPath = join(dir, path);
|
|
313
|
+
mkdirSync(join(fullPath, ".."), { recursive: true });
|
|
314
|
+
writeFileSync(fullPath, content);
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
async function exportKeyToPem(key: CryptoKey, type: "PRIVATE" | "PUBLIC") {
|
|
318
|
+
const format = type === "PRIVATE" ? "pkcs8" : "spki";
|
|
319
|
+
const exported = await crypto.subtle.exportKey(format, key);
|
|
320
|
+
const b64 = Buffer.from(exported).toString("base64");
|
|
321
|
+
const lines = b64.match(/.{1,64}/g)!.join("\n");
|
|
322
|
+
return `-----BEGIN ${type} KEY-----\n${lines}\n-----END ${type} KEY-----\n`;
|
|
323
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { log } from "../lib/log";
|
|
2
|
+
import { readdirSync, readFileSync } from "fs";
|
|
3
|
+
import { join, resolve } from "path";
|
|
4
|
+
|
|
5
|
+
export default async function migrate(args: string[]) {
|
|
6
|
+
const projectDir = resolve(".");
|
|
7
|
+
const migrationsDir = join(projectDir, "migrations");
|
|
8
|
+
|
|
9
|
+
log.header("running migrations");
|
|
10
|
+
|
|
11
|
+
// dynamic import postgres from the project's node_modules
|
|
12
|
+
const postgres = (await import("postgres")).default;
|
|
13
|
+
const sql = postgres(process.env.DATABASE_URL!, {
|
|
14
|
+
max: 1,
|
|
15
|
+
onnotice: () => {},
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
try {
|
|
19
|
+
// ensure migrations table
|
|
20
|
+
await sql`
|
|
21
|
+
CREATE TABLE IF NOT EXISTS _migrations (
|
|
22
|
+
name TEXT PRIMARY KEY,
|
|
23
|
+
ran_at TIMESTAMPTZ DEFAULT now()
|
|
24
|
+
)
|
|
25
|
+
`;
|
|
26
|
+
|
|
27
|
+
const ran = new Set(
|
|
28
|
+
(await sql`SELECT name FROM _migrations`).map((r: any) => r.name)
|
|
29
|
+
);
|
|
30
|
+
|
|
31
|
+
let files: string[];
|
|
32
|
+
try {
|
|
33
|
+
files = readdirSync(migrationsDir)
|
|
34
|
+
.filter((f) => f.endsWith(".sql"))
|
|
35
|
+
.sort();
|
|
36
|
+
} catch {
|
|
37
|
+
log.warn("no migrations directory found");
|
|
38
|
+
process.exit(0);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
let count = 0;
|
|
42
|
+
for (const file of files) {
|
|
43
|
+
if (ran.has(file)) continue;
|
|
44
|
+
log.info(`running: ${file}`);
|
|
45
|
+
const content = readFileSync(join(migrationsDir, file), "utf-8");
|
|
46
|
+
await sql.unsafe(content);
|
|
47
|
+
await sql`INSERT INTO _migrations (name) VALUES (${file})`;
|
|
48
|
+
log.success(file);
|
|
49
|
+
count++;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (count === 0) {
|
|
53
|
+
log.info("no new migrations");
|
|
54
|
+
} else {
|
|
55
|
+
log.success(`${count} migration(s) applied`);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
await sql.end();
|
|
59
|
+
} catch (err: any) {
|
|
60
|
+
log.error(`migration failed: ${err.message}`);
|
|
61
|
+
await sql.end();
|
|
62
|
+
process.exit(1);
|
|
63
|
+
}
|
|
64
|
+
}
|
package/src/config.ts
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
export interface UpendConfig {
|
|
2
|
+
name: string;
|
|
3
|
+
database?: string;
|
|
4
|
+
dataApi?: string;
|
|
5
|
+
auth?: {
|
|
6
|
+
audience?: string;
|
|
7
|
+
tokenExpiry?: string;
|
|
8
|
+
};
|
|
9
|
+
services?: Record<string, { entry: string; port: number }>;
|
|
10
|
+
deploy?: {
|
|
11
|
+
host?: string;
|
|
12
|
+
dir?: string;
|
|
13
|
+
};
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export function defineConfig(config: UpendConfig): UpendConfig {
|
|
17
|
+
return config;
|
|
18
|
+
}
|
package/src/index.ts
ADDED
package/src/lib/auth.ts
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { importPKCS8, importSPKI, exportJWK, SignJWT, jwtVerify } from "jose";
|
|
2
|
+
import { readFileSync, writeFileSync, existsSync, mkdirSync } from "fs";
|
|
3
|
+
import { join } from "path";
|
|
4
|
+
|
|
5
|
+
// KEYS_DIR resolves from the user's project, not the package
|
|
6
|
+
const PROJECT_ROOT = process.env.UPEND_PROJECT || process.cwd();
|
|
7
|
+
const KEYS_DIR = join(PROJECT_ROOT, ".keys");
|
|
8
|
+
const PRIVATE_KEY_PATH = join(KEYS_DIR, "private.pem");
|
|
9
|
+
const PUBLIC_KEY_PATH = join(KEYS_DIR, "public.pem");
|
|
10
|
+
const ALG = "RS256";
|
|
11
|
+
const ISSUER = "upend";
|
|
12
|
+
|
|
13
|
+
async function ensureKeys() {
|
|
14
|
+
if (existsSync(PRIVATE_KEY_PATH) && existsSync(PUBLIC_KEY_PATH)) return;
|
|
15
|
+
|
|
16
|
+
mkdirSync(KEYS_DIR, { recursive: true });
|
|
17
|
+
console.log("[auth] generating RSA key pair...");
|
|
18
|
+
|
|
19
|
+
const { privateKey, publicKey } = await crypto.subtle.generateKey(
|
|
20
|
+
{ name: "RSASSA-PKCS1-v1_5", modulusLength: 2048, publicExponent: new Uint8Array([1, 0, 1]), hash: "SHA-256" },
|
|
21
|
+
true,
|
|
22
|
+
["sign", "verify"]
|
|
23
|
+
);
|
|
24
|
+
|
|
25
|
+
const privPem = await exportKeyToPem(privateKey, "PRIVATE");
|
|
26
|
+
const pubPem = await exportKeyToPem(publicKey, "PUBLIC");
|
|
27
|
+
|
|
28
|
+
writeFileSync(PRIVATE_KEY_PATH, privPem);
|
|
29
|
+
writeFileSync(PUBLIC_KEY_PATH, pubPem);
|
|
30
|
+
console.log("[auth] keys written to .keys/");
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
async function exportKeyToPem(key: CryptoKey, type: "PRIVATE" | "PUBLIC") {
|
|
34
|
+
const format = type === "PRIVATE" ? "pkcs8" : "spki";
|
|
35
|
+
const exported = await crypto.subtle.exportKey(format, key);
|
|
36
|
+
const b64 = Buffer.from(exported).toString("base64");
|
|
37
|
+
const lines = b64.match(/.{1,64}/g)!.join("\n");
|
|
38
|
+
return `-----BEGIN ${type} KEY-----\n${lines}\n-----END ${type} KEY-----\n`;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
let _privateKey: Awaited<ReturnType<typeof importPKCS8>> | null = null;
|
|
42
|
+
let _publicKey: Awaited<ReturnType<typeof importSPKI>> | null = null;
|
|
43
|
+
|
|
44
|
+
async function getPrivateKey() {
|
|
45
|
+
if (!_privateKey) {
|
|
46
|
+
await ensureKeys();
|
|
47
|
+
_privateKey = await importPKCS8(readFileSync(PRIVATE_KEY_PATH, "utf-8"), ALG);
|
|
48
|
+
}
|
|
49
|
+
return _privateKey;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async function getPublicKey() {
|
|
53
|
+
if (!_publicKey) {
|
|
54
|
+
await ensureKeys();
|
|
55
|
+
_publicKey = await importSPKI(readFileSync(PUBLIC_KEY_PATH, "utf-8"), ALG);
|
|
56
|
+
}
|
|
57
|
+
return _publicKey;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export async function signToken(userId: string, email: string, appRole: string = "user") {
|
|
61
|
+
const key = await getPrivateKey();
|
|
62
|
+
return new SignJWT({ email, role: "authenticated", app_role: appRole })
|
|
63
|
+
.setProtectedHeader({ alg: ALG, kid: "upend-1" })
|
|
64
|
+
.setSubject(userId)
|
|
65
|
+
.setIssuer(ISSUER)
|
|
66
|
+
.setAudience("upend")
|
|
67
|
+
.setIssuedAt()
|
|
68
|
+
.setExpirationTime("24h")
|
|
69
|
+
.sign(key);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
export async function verifyToken(token: string) {
|
|
73
|
+
const key = await getPublicKey();
|
|
74
|
+
const { payload } = await jwtVerify(token, key, {
|
|
75
|
+
issuer: ISSUER,
|
|
76
|
+
audience: "upend",
|
|
77
|
+
});
|
|
78
|
+
return payload;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
export async function getJWKS() {
|
|
82
|
+
const key = await getPublicKey();
|
|
83
|
+
const jwk = await exportJWK(key);
|
|
84
|
+
return {
|
|
85
|
+
keys: [
|
|
86
|
+
{ ...jwk, kid: "upend-1", alg: ALG, use: "sig" },
|
|
87
|
+
],
|
|
88
|
+
};
|
|
89
|
+
}
|
package/src/lib/db.ts
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import postgres from "postgres";
|
|
2
|
+
|
|
3
|
+
export const sql = postgres(process.env.DATABASE_URL!, {
|
|
4
|
+
max: 20,
|
|
5
|
+
idle_timeout: 20,
|
|
6
|
+
connect_timeout: 10,
|
|
7
|
+
transform: postgres.camel,
|
|
8
|
+
onnotice: (notice) => console.log("pg:", notice.message),
|
|
9
|
+
});
|
|
10
|
+
|
|
11
|
+
// subscribe to pg NOTIFY for realtime log streaming
|
|
12
|
+
export async function listen(channel: string, fn: (payload: string) => void) {
|
|
13
|
+
await sql.listen(channel, fn);
|
|
14
|
+
}
|
package/src/lib/exec.ts
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { log } from "./log";
|
|
2
|
+
|
|
3
|
+
export async function exec(
|
|
4
|
+
cmd: string[],
|
|
5
|
+
opts: { cwd?: string; env?: Record<string, string>; silent?: boolean } = {}
|
|
6
|
+
): Promise<{ stdout: string; stderr: string; exitCode: number }> {
|
|
7
|
+
const proc = Bun.spawn(cmd, {
|
|
8
|
+
cwd: opts.cwd,
|
|
9
|
+
env: { ...process.env, ...opts.env },
|
|
10
|
+
stdout: "pipe",
|
|
11
|
+
stderr: "pipe",
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
const stdout = await new Response(proc.stdout).text();
|
|
15
|
+
const stderr = await new Response(proc.stderr).text();
|
|
16
|
+
const exitCode = await proc.exited;
|
|
17
|
+
|
|
18
|
+
if (exitCode !== 0 && !opts.silent) {
|
|
19
|
+
log.error(`command failed: ${cmd.join(" ")}`);
|
|
20
|
+
if (stderr.trim()) log.dim(stderr.trim());
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
return { stdout: stdout.trim(), stderr: stderr.trim(), exitCode };
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export async function execOrDie(cmd: string[], opts: { cwd?: string } = {}): Promise<{ stdout: string; stderr: string }> {
|
|
27
|
+
const { stdout, exitCode, stderr } = await exec(cmd, opts);
|
|
28
|
+
if (exitCode !== 0) {
|
|
29
|
+
log.error(stderr || `${cmd.join(" ")} failed`);
|
|
30
|
+
process.exit(1);
|
|
31
|
+
}
|
|
32
|
+
return { stdout, stderr };
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export async function hasCommand(name: string): Promise<boolean> {
|
|
36
|
+
const { exitCode } = await exec(["which", name], { silent: true });
|
|
37
|
+
return exitCode === 0;
|
|
38
|
+
}
|
package/src/lib/log.ts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
const orange = "\x1b[38;5;208m";
|
|
2
|
+
const green = "\x1b[32m";
|
|
3
|
+
const red = "\x1b[31m";
|
|
4
|
+
const dim = "\x1b[2m";
|
|
5
|
+
const reset = "\x1b[0m";
|
|
6
|
+
const bold = "\x1b[1m";
|
|
7
|
+
|
|
8
|
+
export const log = {
|
|
9
|
+
info: (msg: string) => console.log(`${dim}→${reset} ${msg}`),
|
|
10
|
+
success: (msg: string) => console.log(`${green}✓${reset} ${msg}`),
|
|
11
|
+
error: (msg: string) => console.error(`${red}✗${reset} ${msg}`),
|
|
12
|
+
warn: (msg: string) => console.log(`${orange}!${reset} ${msg}`),
|
|
13
|
+
header: (msg: string) => console.log(`\n${bold}${orange}${msg}${reset}\n`),
|
|
14
|
+
dim: (msg: string) => console.log(` ${dim}${msg}${reset}`),
|
|
15
|
+
blank: () => console.log(),
|
|
16
|
+
};
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { createMiddleware } from "hono/factory";
|
|
2
|
+
import { verifyToken } from "./auth";
|
|
3
|
+
|
|
4
|
+
type AuthPayload = {
|
|
5
|
+
sub: string;
|
|
6
|
+
email: string;
|
|
7
|
+
role: string;
|
|
8
|
+
};
|
|
9
|
+
|
|
10
|
+
// middleware that verifies JWT from Authorization header or ?token= query param
|
|
11
|
+
export const requireAuth = createMiddleware<{
|
|
12
|
+
Variables: { user: AuthPayload };
|
|
13
|
+
}>(async (c, next) => {
|
|
14
|
+
const header = c.req.header("Authorization");
|
|
15
|
+
const queryToken = c.req.query("token");
|
|
16
|
+
const token = header?.startsWith("Bearer ") ? header.slice(7) : queryToken;
|
|
17
|
+
const method = c.req.method;
|
|
18
|
+
const path = c.req.path;
|
|
19
|
+
|
|
20
|
+
if (!token) {
|
|
21
|
+
console.log(`[auth] 401 no token: ${method} ${path}`);
|
|
22
|
+
return c.json({ error: "missing or invalid Authorization" }, 401);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
try {
|
|
26
|
+
const payload = await verifyToken(token);
|
|
27
|
+
const user = {
|
|
28
|
+
sub: payload.sub as string,
|
|
29
|
+
email: payload.email as string,
|
|
30
|
+
role: (payload.role as string) || "user",
|
|
31
|
+
};
|
|
32
|
+
console.log(`[auth] ${user.email} → ${method} ${path}`);
|
|
33
|
+
c.set("user", user);
|
|
34
|
+
await next();
|
|
35
|
+
} catch (err: any) {
|
|
36
|
+
console.log(`[auth] 401 invalid token: ${method} ${path} — ${err.message}`);
|
|
37
|
+
return c.json({ error: "invalid token", detail: err.message }, 401);
|
|
38
|
+
}
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
// middleware that requires a specific role
|
|
42
|
+
export const requireRole = (...roles: string[]) =>
|
|
43
|
+
createMiddleware(async (c, next) => {
|
|
44
|
+
const user = c.get("user") as AuthPayload | undefined;
|
|
45
|
+
if (!user) return c.json({ error: "not authenticated" }, 401);
|
|
46
|
+
if (!roles.includes(user.role)) {
|
|
47
|
+
console.log(`[auth] 403 forbidden: ${user.email} needs ${roles.join("|")}, has ${user.role}`);
|
|
48
|
+
return c.json({ error: "forbidden" }, 403);
|
|
49
|
+
}
|
|
50
|
+
await next();
|
|
51
|
+
});
|