@digiko-npm/cms 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/auth/index.d.ts +20 -0
- package/dist/auth/index.js +34 -0
- package/dist/auth-C8Nq_GmD.d.ts +31 -0
- package/dist/config-qNdTlg1g.d.ts +64 -0
- package/dist/http/index.d.ts +15 -0
- package/dist/http/index.js +16 -0
- package/dist/index.d.ts +10 -0
- package/dist/index.js +233 -0
- package/dist/media-ExBfXePZ.d.ts +19 -0
- package/dist/next/index.d.ts +19 -0
- package/dist/next/index.js +54 -0
- package/dist/r2/index.d.ts +34 -0
- package/dist/r2/index.js +82 -0
- package/dist/session/index.d.ts +27 -0
- package/dist/session/index.js +83 -0
- package/dist/supabase/index.d.ts +21 -0
- package/dist/supabase/index.js +22 -0
- package/dist/types/index.d.ts +3 -0
- package/dist/types/index.js +0 -0
- package/package.json +101 -0
- package/src/auth/index.ts +2 -0
- package/src/auth/password.ts +48 -0
- package/src/auth/token.ts +11 -0
- package/src/http/index.ts +1 -0
- package/src/http/status.ts +14 -0
- package/src/index.ts +42 -0
- package/src/next/index.ts +1 -0
- package/src/next/verify-request.ts +54 -0
- package/src/r2/client.ts +28 -0
- package/src/r2/index.ts +2 -0
- package/src/r2/upload.ts +99 -0
- package/src/session/index.ts +2 -0
- package/src/session/rate-limit.ts +66 -0
- package/src/session/store.ts +56 -0
- package/src/supabase/client.ts +10 -0
- package/src/supabase/index.ts +2 -0
- package/src/supabase/server.ts +21 -0
- package/src/types/auth.ts +27 -0
- package/src/types/config.ts +65 -0
- package/src/types/index.ts +22 -0
- package/src/types/media.ts +19 -0
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
// src/session/store.ts
|
|
2
|
+
import { Redis } from "@upstash/redis";
|
|
3
|
+
var DEFAULT_SESSION_DURATION = 24 * 60 * 60 * 1e3;
|
|
4
|
+
function createSessionStore(config) {
|
|
5
|
+
const redis = new Redis({
|
|
6
|
+
url: config.redisUrl,
|
|
7
|
+
token: config.redisToken
|
|
8
|
+
});
|
|
9
|
+
const sessionKey = (token) => `${config.keyPrefix}session:${token}`;
|
|
10
|
+
return {
|
|
11
|
+
async addSession(token, session) {
|
|
12
|
+
const ttlMs = session.expiresAt - Date.now();
|
|
13
|
+
const ttlSeconds = Math.max(Math.ceil(ttlMs / 1e3), 1);
|
|
14
|
+
await redis.set(sessionKey(token), JSON.stringify(session), { ex: ttlSeconds });
|
|
15
|
+
},
|
|
16
|
+
async getSession(token) {
|
|
17
|
+
const data = await redis.get(sessionKey(token));
|
|
18
|
+
if (!data) return void 0;
|
|
19
|
+
const session = typeof data === "string" ? JSON.parse(data) : data;
|
|
20
|
+
if (Date.now() > session.expiresAt) {
|
|
21
|
+
await redis.del(sessionKey(token));
|
|
22
|
+
return void 0;
|
|
23
|
+
}
|
|
24
|
+
return session;
|
|
25
|
+
},
|
|
26
|
+
async removeSession(token) {
|
|
27
|
+
const result = await redis.del(sessionKey(token));
|
|
28
|
+
return result > 0;
|
|
29
|
+
}
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
function getDefaultSessionDuration() {
|
|
33
|
+
return DEFAULT_SESSION_DURATION;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// src/session/rate-limit.ts
|
|
37
|
+
import { Redis as Redis2 } from "@upstash/redis";
|
|
38
|
+
var DEFAULTS = {
|
|
39
|
+
maxAttempts: 10,
|
|
40
|
+
windowMs: 15 * 60 * 1e3
|
|
41
|
+
// 15 minutes
|
|
42
|
+
};
|
|
43
|
+
function createRateLimiter(config) {
|
|
44
|
+
const redis = new Redis2({
|
|
45
|
+
url: config.redisUrl,
|
|
46
|
+
token: config.redisToken
|
|
47
|
+
});
|
|
48
|
+
const maxAttempts = config.maxAttempts ?? DEFAULTS.maxAttempts;
|
|
49
|
+
const windowMs = config.windowMs ?? DEFAULTS.windowMs;
|
|
50
|
+
const rateLimitKey = (key) => `${config.keyPrefix}ratelimit:${key}`;
|
|
51
|
+
return {
|
|
52
|
+
async check(key) {
|
|
53
|
+
const now = Date.now();
|
|
54
|
+
const redisKey = rateLimitKey(key);
|
|
55
|
+
const data = await redis.get(redisKey);
|
|
56
|
+
let entry = null;
|
|
57
|
+
if (data) {
|
|
58
|
+
entry = typeof data === "string" ? JSON.parse(data) : data;
|
|
59
|
+
if (entry && now > entry.resetAt) entry = null;
|
|
60
|
+
}
|
|
61
|
+
if (!entry) {
|
|
62
|
+
const newEntry = { count: 1, resetAt: now + windowMs };
|
|
63
|
+
await redis.set(redisKey, JSON.stringify(newEntry), { ex: Math.ceil(windowMs / 1e3) });
|
|
64
|
+
return { allowed: true, remaining: maxAttempts - 1 };
|
|
65
|
+
}
|
|
66
|
+
if (entry.count >= maxAttempts) {
|
|
67
|
+
return { allowed: false, remaining: 0, retryAfterMs: entry.resetAt - now };
|
|
68
|
+
}
|
|
69
|
+
entry.count++;
|
|
70
|
+
const ttlSeconds = Math.max(Math.ceil((entry.resetAt - now) / 1e3), 1);
|
|
71
|
+
await redis.set(redisKey, JSON.stringify(entry), { ex: ttlSeconds });
|
|
72
|
+
return { allowed: true, remaining: maxAttempts - entry.count };
|
|
73
|
+
},
|
|
74
|
+
async reset(key) {
|
|
75
|
+
await redis.del(rateLimitKey(key));
|
|
76
|
+
}
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
export {
|
|
80
|
+
createRateLimiter,
|
|
81
|
+
createSessionStore,
|
|
82
|
+
getDefaultSessionDuration
|
|
83
|
+
};
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { SupabaseClient } from '@supabase/supabase-js';
|
|
2
|
+
import { c as SupabaseConfig } from '../config-qNdTlg1g.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Browser-side Supabase client with anon key.
|
|
6
|
+
* Respects RLS — only reads published content.
|
|
7
|
+
*/
|
|
8
|
+
declare function createBrowserClient(config: SupabaseConfig): SupabaseClient;
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Server-side Supabase client with service role key.
|
|
12
|
+
* Bypasses RLS — full read/write access.
|
|
13
|
+
*/
|
|
14
|
+
declare function createAdminClient(config: SupabaseConfig): SupabaseClient;
|
|
15
|
+
/**
|
|
16
|
+
* Server-side Supabase client with anon key.
|
|
17
|
+
* Respects RLS — safe for public data fetching.
|
|
18
|
+
*/
|
|
19
|
+
declare function createPublicClient(config: SupabaseConfig): SupabaseClient;
|
|
20
|
+
|
|
21
|
+
export { createAdminClient, createBrowserClient, createPublicClient };
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
// src/supabase/client.ts
|
|
2
|
+
import { createClient } from "@supabase/supabase-js";
|
|
3
|
+
function createBrowserClient(config) {
|
|
4
|
+
return createClient(config.url, config.anonKey);
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
// src/supabase/server.ts
|
|
8
|
+
import { createClient as createClient2 } from "@supabase/supabase-js";
|
|
9
|
+
function createAdminClient(config) {
|
|
10
|
+
if (!config.serviceRoleKey) {
|
|
11
|
+
throw new Error("@digiko-npm/cms: serviceRoleKey is required for createAdminClient");
|
|
12
|
+
}
|
|
13
|
+
return createClient2(config.url, config.serviceRoleKey);
|
|
14
|
+
}
|
|
15
|
+
function createPublicClient(config) {
|
|
16
|
+
return createClient2(config.url, config.anonKey);
|
|
17
|
+
}
|
|
18
|
+
export {
|
|
19
|
+
createAdminClient,
|
|
20
|
+
createBrowserClient,
|
|
21
|
+
createPublicClient
|
|
22
|
+
};
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
export { A as AuthConfig, R as R2Config, a as RateLimiterConfig, b as RequestVerifierConfig, S as SessionStoreConfig, c as SupabaseConfig, U as UploadConfig } from '../config-qNdTlg1g.js';
|
|
2
|
+
export { M as MediaInsert, a as MediaRecord, b as MediaUpdate } from '../media-ExBfXePZ.js';
|
|
3
|
+
export { A as AuthResult, R as RateLimitResult, S as Session, V as VerifyResult } from '../auth-C8Nq_GmD.js';
|
|
File without changes
|
package/package.json
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@digiko-npm/cms",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Reusable CMS utilities — Supabase, Cloudflare R2, auth, sessions.",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"module": "./dist/index.js",
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"exports": {
|
|
10
|
+
".": {
|
|
11
|
+
"import": "./dist/index.js",
|
|
12
|
+
"types": "./dist/index.d.ts"
|
|
13
|
+
},
|
|
14
|
+
"./supabase": {
|
|
15
|
+
"import": "./dist/supabase/index.js",
|
|
16
|
+
"types": "./dist/supabase/index.d.ts"
|
|
17
|
+
},
|
|
18
|
+
"./r2": {
|
|
19
|
+
"import": "./dist/r2/index.js",
|
|
20
|
+
"types": "./dist/r2/index.d.ts"
|
|
21
|
+
},
|
|
22
|
+
"./auth": {
|
|
23
|
+
"import": "./dist/auth/index.js",
|
|
24
|
+
"types": "./dist/auth/index.d.ts"
|
|
25
|
+
},
|
|
26
|
+
"./session": {
|
|
27
|
+
"import": "./dist/session/index.js",
|
|
28
|
+
"types": "./dist/session/index.d.ts"
|
|
29
|
+
},
|
|
30
|
+
"./next": {
|
|
31
|
+
"import": "./dist/next/index.js",
|
|
32
|
+
"types": "./dist/next/index.d.ts"
|
|
33
|
+
},
|
|
34
|
+
"./http": {
|
|
35
|
+
"import": "./dist/http/index.js",
|
|
36
|
+
"types": "./dist/http/index.d.ts"
|
|
37
|
+
},
|
|
38
|
+
"./types": {
|
|
39
|
+
"import": "./dist/types/index.js",
|
|
40
|
+
"types": "./dist/types/index.d.ts"
|
|
41
|
+
}
|
|
42
|
+
},
|
|
43
|
+
"files": [
|
|
44
|
+
"dist/",
|
|
45
|
+
"src/"
|
|
46
|
+
],
|
|
47
|
+
"scripts": {
|
|
48
|
+
"build": "tsup",
|
|
49
|
+
"dev": "tsup --watch",
|
|
50
|
+
"type-check": "tsc --noEmit",
|
|
51
|
+
"lint": "eslint src/",
|
|
52
|
+
"clean": "rm -rf dist",
|
|
53
|
+
"prepublishOnly": "npm run build"
|
|
54
|
+
},
|
|
55
|
+
"publishConfig": {
|
|
56
|
+
"access": "public"
|
|
57
|
+
},
|
|
58
|
+
"peerDependencies": {
|
|
59
|
+
"@supabase/supabase-js": "^2.0.0",
|
|
60
|
+
"@aws-sdk/client-s3": "^3.0.0",
|
|
61
|
+
"@aws-sdk/s3-request-presigner": "^3.0.0",
|
|
62
|
+
"@upstash/redis": "^1.0.0",
|
|
63
|
+
"next": ">=14.0.0"
|
|
64
|
+
},
|
|
65
|
+
"peerDependenciesMeta": {
|
|
66
|
+
"@aws-sdk/client-s3": {
|
|
67
|
+
"optional": true
|
|
68
|
+
},
|
|
69
|
+
"@aws-sdk/s3-request-presigner": {
|
|
70
|
+
"optional": true
|
|
71
|
+
},
|
|
72
|
+
"@upstash/redis": {
|
|
73
|
+
"optional": true
|
|
74
|
+
},
|
|
75
|
+
"next": {
|
|
76
|
+
"optional": true
|
|
77
|
+
}
|
|
78
|
+
},
|
|
79
|
+
"devDependencies": {
|
|
80
|
+
"@supabase/supabase-js": "^2.95.0",
|
|
81
|
+
"@aws-sdk/client-s3": "^3.986.0",
|
|
82
|
+
"@aws-sdk/s3-request-presigner": "^3.986.0",
|
|
83
|
+
"@upstash/redis": "^1.36.0",
|
|
84
|
+
"@types/node": "^20",
|
|
85
|
+
"next": "16.1.6",
|
|
86
|
+
"tsup": "^8.0.0",
|
|
87
|
+
"typescript": "^5"
|
|
88
|
+
},
|
|
89
|
+
"repository": {
|
|
90
|
+
"type": "git",
|
|
91
|
+
"url": "git+https://github.com/digiko-dev/cms.git"
|
|
92
|
+
},
|
|
93
|
+
"license": "MIT",
|
|
94
|
+
"keywords": [
|
|
95
|
+
"cms",
|
|
96
|
+
"supabase",
|
|
97
|
+
"cloudflare-r2",
|
|
98
|
+
"admin",
|
|
99
|
+
"auth"
|
|
100
|
+
]
|
|
101
|
+
}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import crypto from 'crypto'
|
|
2
|
+
import type { AuthConfig } from '../types/config'
|
|
3
|
+
|
|
4
|
+
const DEFAULTS = {
|
|
5
|
+
iterations: 100_000,
|
|
6
|
+
keyLength: 64,
|
|
7
|
+
digest: 'sha512',
|
|
8
|
+
} as const
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Hash a password using PBKDF2.
|
|
12
|
+
* Returns a hex-encoded hash string.
|
|
13
|
+
*/
|
|
14
|
+
export function hashPassword(
|
|
15
|
+
password: string,
|
|
16
|
+
salt: string,
|
|
17
|
+
config?: AuthConfig['pbkdf2']
|
|
18
|
+
): string {
|
|
19
|
+
const iterations = config?.iterations ?? DEFAULTS.iterations
|
|
20
|
+
const keyLength = config?.keyLength ?? DEFAULTS.keyLength
|
|
21
|
+
const digest = config?.digest ?? DEFAULTS.digest
|
|
22
|
+
|
|
23
|
+
return crypto
|
|
24
|
+
.pbkdf2Sync(password, salt, iterations, keyLength, digest)
|
|
25
|
+
.toString('hex')
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Verify a password against a stored hash using timing-safe comparison.
|
|
30
|
+
* Returns true if the password matches.
|
|
31
|
+
*/
|
|
32
|
+
export function verifyPassword(
|
|
33
|
+
password: string,
|
|
34
|
+
salt: string,
|
|
35
|
+
storedHash: string,
|
|
36
|
+
config?: AuthConfig['pbkdf2']
|
|
37
|
+
): boolean {
|
|
38
|
+
const inputHash = hashPassword(password, salt, config)
|
|
39
|
+
|
|
40
|
+
const storedBuffer = Buffer.from(storedHash, 'hex')
|
|
41
|
+
const inputBuffer = Buffer.from(inputHash, 'hex')
|
|
42
|
+
|
|
43
|
+
if (storedBuffer.length !== inputBuffer.length) {
|
|
44
|
+
return false
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
return crypto.timingSafeEqual(storedBuffer, inputBuffer)
|
|
48
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import crypto from 'crypto'
|
|
2
|
+
|
|
3
|
+
const DEFAULT_TOKEN_BYTES = 32
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Generate a cryptographically secure session token.
|
|
7
|
+
* Returns a hex string (default: 64 characters from 32 bytes).
|
|
8
|
+
*/
|
|
9
|
+
export function generateSessionToken(bytes?: number): string {
|
|
10
|
+
return crypto.randomBytes(bytes ?? DEFAULT_TOKEN_BYTES).toString('hex')
|
|
11
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { HTTP_STATUS, type HttpStatus } from './status'
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export const HTTP_STATUS = {
|
|
2
|
+
OK: 200,
|
|
3
|
+
CREATED: 201,
|
|
4
|
+
NO_CONTENT: 204,
|
|
5
|
+
BAD_REQUEST: 400,
|
|
6
|
+
UNAUTHORIZED: 401,
|
|
7
|
+
FORBIDDEN: 403,
|
|
8
|
+
NOT_FOUND: 404,
|
|
9
|
+
CONFLICT: 409,
|
|
10
|
+
TOO_MANY_REQUESTS: 429,
|
|
11
|
+
INTERNAL_ERROR: 500,
|
|
12
|
+
} as const
|
|
13
|
+
|
|
14
|
+
export type HttpStatus = (typeof HTTP_STATUS)[keyof typeof HTTP_STATUS]
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
// Supabase
|
|
2
|
+
export { createBrowserClient } from './supabase/client'
|
|
3
|
+
export { createAdminClient, createPublicClient } from './supabase/server'
|
|
4
|
+
|
|
5
|
+
// Cloudflare R2
|
|
6
|
+
export { createR2Client, getR2Bucket, getR2PublicUrl } from './r2/client'
|
|
7
|
+
export { uploadFile, type UploadOptions, type UploadResult } from './r2/upload'
|
|
8
|
+
|
|
9
|
+
// Auth
|
|
10
|
+
export { hashPassword, verifyPassword } from './auth/password'
|
|
11
|
+
export { generateSessionToken } from './auth/token'
|
|
12
|
+
|
|
13
|
+
// Sessions
|
|
14
|
+
export { createSessionStore, getDefaultSessionDuration, type SessionStore } from './session/store'
|
|
15
|
+
export { createRateLimiter, type RateLimiter } from './session/rate-limit'
|
|
16
|
+
|
|
17
|
+
// HTTP
|
|
18
|
+
export { HTTP_STATUS, type HttpStatus } from './http/status'
|
|
19
|
+
|
|
20
|
+
// Types
|
|
21
|
+
export type {
|
|
22
|
+
SupabaseConfig,
|
|
23
|
+
R2Config,
|
|
24
|
+
SessionStoreConfig,
|
|
25
|
+
RateLimiterConfig,
|
|
26
|
+
AuthConfig,
|
|
27
|
+
UploadConfig,
|
|
28
|
+
RequestVerifierConfig,
|
|
29
|
+
} from './types/config'
|
|
30
|
+
|
|
31
|
+
export type {
|
|
32
|
+
MediaRecord,
|
|
33
|
+
MediaInsert,
|
|
34
|
+
MediaUpdate,
|
|
35
|
+
} from './types/media'
|
|
36
|
+
|
|
37
|
+
export type {
|
|
38
|
+
Session,
|
|
39
|
+
AuthResult,
|
|
40
|
+
VerifyResult,
|
|
41
|
+
RateLimitResult,
|
|
42
|
+
} from './types/auth'
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { createRequestVerifier } from './verify-request'
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { NextRequest, NextResponse } from 'next/server'
|
|
2
|
+
import type { RequestVerifierConfig } from '../types/config'
|
|
3
|
+
import { HTTP_STATUS } from '../http/status'
|
|
4
|
+
|
|
5
|
+
const DEFAULTS = {
|
|
6
|
+
cookieName: 'admin_session',
|
|
7
|
+
unauthorizedMessage: 'Unauthorized',
|
|
8
|
+
sessionExpiredMessage: 'Session expired',
|
|
9
|
+
} as const
|
|
10
|
+
|
|
11
|
+
type AuthSuccess = { authorized: true; token: string }
|
|
12
|
+
type AuthFailure = { authorized: false; response: NextResponse }
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Create a request verifier for Next.js API routes.
|
|
16
|
+
* Extracts session token from cookie or Authorization header,
|
|
17
|
+
* then validates against the provided session store.
|
|
18
|
+
*/
|
|
19
|
+
export function createRequestVerifier(config: RequestVerifierConfig) {
|
|
20
|
+
const cookieName = config.cookieName ?? DEFAULTS.cookieName
|
|
21
|
+
const unauthorizedMsg = config.unauthorizedMessage ?? DEFAULTS.unauthorizedMessage
|
|
22
|
+
const sessionExpiredMsg = config.sessionExpiredMessage ?? DEFAULTS.sessionExpiredMessage
|
|
23
|
+
|
|
24
|
+
return async function verifyAdminRequest(
|
|
25
|
+
request: NextRequest
|
|
26
|
+
): Promise<AuthSuccess | AuthFailure> {
|
|
27
|
+
const token =
|
|
28
|
+
request.cookies.get(cookieName)?.value ||
|
|
29
|
+
request.headers.get('Authorization')?.replace('Bearer ', '')
|
|
30
|
+
|
|
31
|
+
if (!token) {
|
|
32
|
+
return {
|
|
33
|
+
authorized: false,
|
|
34
|
+
response: NextResponse.json(
|
|
35
|
+
{ error: unauthorizedMsg },
|
|
36
|
+
{ status: HTTP_STATUS.UNAUTHORIZED }
|
|
37
|
+
),
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const session = await config.getSession(token)
|
|
42
|
+
if (!session) {
|
|
43
|
+
return {
|
|
44
|
+
authorized: false,
|
|
45
|
+
response: NextResponse.json(
|
|
46
|
+
{ error: sessionExpiredMsg },
|
|
47
|
+
{ status: HTTP_STATUS.UNAUTHORIZED }
|
|
48
|
+
),
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
return { authorized: true, token }
|
|
53
|
+
}
|
|
54
|
+
}
|
package/src/r2/client.ts
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { S3Client } from '@aws-sdk/client-s3'
|
|
2
|
+
import type { R2Config } from '../types/config'
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Create a Cloudflare R2 client (S3-compatible).
|
|
6
|
+
*/
|
|
7
|
+
export function createR2Client(config: R2Config): S3Client {
|
|
8
|
+
return new S3Client({
|
|
9
|
+
region: 'auto',
|
|
10
|
+
endpoint: `https://${config.accountId}.r2.cloudflarestorage.com`,
|
|
11
|
+
credentials: {
|
|
12
|
+
accessKeyId: config.accessKeyId,
|
|
13
|
+
secretAccessKey: config.secretAccessKey,
|
|
14
|
+
},
|
|
15
|
+
requestChecksumCalculation: 'WHEN_REQUIRED',
|
|
16
|
+
responseChecksumValidation: 'WHEN_REQUIRED',
|
|
17
|
+
})
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/** Get the R2 bucket name from config */
|
|
21
|
+
export function getR2Bucket(config: R2Config): string {
|
|
22
|
+
return config.bucketName
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/** Get the R2 public URL from config */
|
|
26
|
+
export function getR2PublicUrl(config: R2Config): string {
|
|
27
|
+
return config.publicUrl
|
|
28
|
+
}
|
package/src/r2/index.ts
ADDED
package/src/r2/upload.ts
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import type { UploadConfig } from '../types/config'
|
|
2
|
+
import type { MediaRecord } from '../types/media'
|
|
3
|
+
|
|
4
|
+
export interface UploadOptions {
|
|
5
|
+
file: File
|
|
6
|
+
folder?: string
|
|
7
|
+
onProgress?: (percent: number) => void
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export interface UploadResult {
|
|
11
|
+
url: string
|
|
12
|
+
key: string
|
|
13
|
+
media: MediaRecord
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Upload a file to R2 via presigned URL, then register in the DB.
|
|
18
|
+
*
|
|
19
|
+
* Flow:
|
|
20
|
+
* 1. POST to uploadEndpoint → get presigned PUT URL
|
|
21
|
+
* 2. PUT file directly to R2 (with progress tracking via XHR)
|
|
22
|
+
* 3. POST metadata to mediaEndpoint → register the upload
|
|
23
|
+
*/
|
|
24
|
+
export async function uploadFile(
|
|
25
|
+
config: UploadConfig,
|
|
26
|
+
{ file, folder = 'media', onProgress }: UploadOptions
|
|
27
|
+
): Promise<UploadResult> {
|
|
28
|
+
// 1. Get presigned URL
|
|
29
|
+
const presignRes = await fetch(config.uploadEndpoint, {
|
|
30
|
+
method: 'POST',
|
|
31
|
+
headers: { 'Content-Type': 'application/json' },
|
|
32
|
+
body: JSON.stringify({
|
|
33
|
+
filename: file.name,
|
|
34
|
+
contentType: file.type,
|
|
35
|
+
folder,
|
|
36
|
+
}),
|
|
37
|
+
})
|
|
38
|
+
|
|
39
|
+
if (!presignRes.ok) {
|
|
40
|
+
throw new Error(`Failed to get upload URL: ${presignRes.status}`)
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
const { uploadUrl, publicUrl, key } = await presignRes.json()
|
|
44
|
+
|
|
45
|
+
// 2. Upload directly to R2 with progress
|
|
46
|
+
await uploadToR2(uploadUrl, file, file.type, onProgress)
|
|
47
|
+
|
|
48
|
+
// 3. Register in the database
|
|
49
|
+
const confirmRes = await fetch(config.mediaEndpoint, {
|
|
50
|
+
method: 'POST',
|
|
51
|
+
headers: { 'Content-Type': 'application/json' },
|
|
52
|
+
body: JSON.stringify({
|
|
53
|
+
filename: key.split('/').pop(),
|
|
54
|
+
original_name: file.name,
|
|
55
|
+
mime_type: file.type,
|
|
56
|
+
size_bytes: file.size,
|
|
57
|
+
url: publicUrl,
|
|
58
|
+
}),
|
|
59
|
+
})
|
|
60
|
+
|
|
61
|
+
if (!confirmRes.ok) {
|
|
62
|
+
throw new Error(`Failed to register upload: ${confirmRes.status}`)
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const media: MediaRecord = await confirmRes.json()
|
|
66
|
+
return { url: publicUrl, key, media }
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
function uploadToR2(
|
|
70
|
+
url: string,
|
|
71
|
+
file: File,
|
|
72
|
+
contentType: string,
|
|
73
|
+
onProgress?: (percent: number) => void
|
|
74
|
+
): Promise<void> {
|
|
75
|
+
return new Promise((resolve, reject) => {
|
|
76
|
+
const xhr = new XMLHttpRequest()
|
|
77
|
+
|
|
78
|
+
xhr.upload.addEventListener('progress', (e) => {
|
|
79
|
+
if (e.lengthComputable && onProgress) {
|
|
80
|
+
onProgress(Math.round((e.loaded / e.total) * 100))
|
|
81
|
+
}
|
|
82
|
+
})
|
|
83
|
+
|
|
84
|
+
xhr.addEventListener('load', () => {
|
|
85
|
+
if (xhr.status >= 200 && xhr.status < 300) {
|
|
86
|
+
resolve()
|
|
87
|
+
} else {
|
|
88
|
+
reject(new Error(`Upload failed with status ${xhr.status}`))
|
|
89
|
+
}
|
|
90
|
+
})
|
|
91
|
+
|
|
92
|
+
xhr.addEventListener('error', () => reject(new Error('Upload failed')))
|
|
93
|
+
xhr.addEventListener('abort', () => reject(new Error('Upload aborted')))
|
|
94
|
+
|
|
95
|
+
xhr.open('PUT', url)
|
|
96
|
+
xhr.setRequestHeader('Content-Type', contentType)
|
|
97
|
+
xhr.send(file)
|
|
98
|
+
})
|
|
99
|
+
}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { Redis } from '@upstash/redis'
|
|
2
|
+
import type { RateLimiterConfig } from '../types/config'
|
|
3
|
+
import type { RateLimitResult } from '../types/auth'
|
|
4
|
+
|
|
5
|
+
const DEFAULTS = {
|
|
6
|
+
maxAttempts: 10,
|
|
7
|
+
windowMs: 15 * 60 * 1000, // 15 minutes
|
|
8
|
+
} as const
|
|
9
|
+
|
|
10
|
+
interface RateLimitEntry {
|
|
11
|
+
count: number
|
|
12
|
+
resetAt: number
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export interface RateLimiter {
|
|
16
|
+
check: (key: string) => Promise<RateLimitResult>
|
|
17
|
+
reset: (key: string) => Promise<void>
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Create a Redis-backed sliding-window rate limiter.
|
|
22
|
+
* Keys are namespaced with the configured keyPrefix.
|
|
23
|
+
*/
|
|
24
|
+
export function createRateLimiter(config: RateLimiterConfig): RateLimiter {
|
|
25
|
+
const redis = new Redis({
|
|
26
|
+
url: config.redisUrl,
|
|
27
|
+
token: config.redisToken,
|
|
28
|
+
})
|
|
29
|
+
|
|
30
|
+
const maxAttempts = config.maxAttempts ?? DEFAULTS.maxAttempts
|
|
31
|
+
const windowMs = config.windowMs ?? DEFAULTS.windowMs
|
|
32
|
+
const rateLimitKey = (key: string) => `${config.keyPrefix}ratelimit:${key}`
|
|
33
|
+
|
|
34
|
+
return {
|
|
35
|
+
async check(key: string): Promise<RateLimitResult> {
|
|
36
|
+
const now = Date.now()
|
|
37
|
+
const redisKey = rateLimitKey(key)
|
|
38
|
+
const data = await redis.get<string>(redisKey)
|
|
39
|
+
let entry: RateLimitEntry | null = null
|
|
40
|
+
|
|
41
|
+
if (data) {
|
|
42
|
+
entry = typeof data === 'string' ? JSON.parse(data) : data
|
|
43
|
+
if (entry && now > entry.resetAt) entry = null
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if (!entry) {
|
|
47
|
+
const newEntry: RateLimitEntry = { count: 1, resetAt: now + windowMs }
|
|
48
|
+
await redis.set(redisKey, JSON.stringify(newEntry), { ex: Math.ceil(windowMs / 1000) })
|
|
49
|
+
return { allowed: true, remaining: maxAttempts - 1 }
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (entry.count >= maxAttempts) {
|
|
53
|
+
return { allowed: false, remaining: 0, retryAfterMs: entry.resetAt - now }
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
entry.count++
|
|
57
|
+
const ttlSeconds = Math.max(Math.ceil((entry.resetAt - now) / 1000), 1)
|
|
58
|
+
await redis.set(redisKey, JSON.stringify(entry), { ex: ttlSeconds })
|
|
59
|
+
return { allowed: true, remaining: maxAttempts - entry.count }
|
|
60
|
+
},
|
|
61
|
+
|
|
62
|
+
async reset(key: string): Promise<void> {
|
|
63
|
+
await redis.del(rateLimitKey(key))
|
|
64
|
+
},
|
|
65
|
+
}
|
|
66
|
+
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import { Redis } from '@upstash/redis'
|
|
2
|
+
import type { SessionStoreConfig } from '../types/config'
|
|
3
|
+
import type { Session } from '../types/auth'
|
|
4
|
+
|
|
5
|
+
const DEFAULT_SESSION_DURATION = 24 * 60 * 60 * 1000 // 24 hours
|
|
6
|
+
|
|
7
|
+
export interface SessionStore {
|
|
8
|
+
addSession: (token: string, session: Session) => Promise<void>
|
|
9
|
+
getSession: (token: string) => Promise<Session | undefined>
|
|
10
|
+
removeSession: (token: string) => Promise<boolean>
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Create a Redis-backed session store.
|
|
15
|
+
* All keys are namespaced with the configured keyPrefix.
|
|
16
|
+
*/
|
|
17
|
+
export function createSessionStore(config: SessionStoreConfig): SessionStore {
|
|
18
|
+
const redis = new Redis({
|
|
19
|
+
url: config.redisUrl,
|
|
20
|
+
token: config.redisToken,
|
|
21
|
+
})
|
|
22
|
+
|
|
23
|
+
const sessionKey = (token: string) => `${config.keyPrefix}session:${token}`
|
|
24
|
+
|
|
25
|
+
return {
|
|
26
|
+
async addSession(token: string, session: Session): Promise<void> {
|
|
27
|
+
const ttlMs = session.expiresAt - Date.now()
|
|
28
|
+
const ttlSeconds = Math.max(Math.ceil(ttlMs / 1000), 1)
|
|
29
|
+
await redis.set(sessionKey(token), JSON.stringify(session), { ex: ttlSeconds })
|
|
30
|
+
},
|
|
31
|
+
|
|
32
|
+
async getSession(token: string): Promise<Session | undefined> {
|
|
33
|
+
const data = await redis.get<string>(sessionKey(token))
|
|
34
|
+
if (!data) return undefined
|
|
35
|
+
|
|
36
|
+
const session: Session = typeof data === 'string' ? JSON.parse(data) : data
|
|
37
|
+
|
|
38
|
+
if (Date.now() > session.expiresAt) {
|
|
39
|
+
await redis.del(sessionKey(token))
|
|
40
|
+
return undefined
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
return session
|
|
44
|
+
},
|
|
45
|
+
|
|
46
|
+
async removeSession(token: string): Promise<boolean> {
|
|
47
|
+
const result = await redis.del(sessionKey(token))
|
|
48
|
+
return result > 0
|
|
49
|
+
},
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/** Get the default session duration in milliseconds */
|
|
54
|
+
export function getDefaultSessionDuration(): number {
|
|
55
|
+
return DEFAULT_SESSION_DURATION
|
|
56
|
+
}
|