@alteran/astro 0.1.7 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +38 -3
- package/migrations/0005_odd_bishop.sql +18 -0
- package/migrations/meta/0005_snapshot.json +429 -0
- package/migrations/meta/_journal.json +7 -0
- package/package.json +3 -2
- package/src/db/dal.ts +33 -0
- package/src/db/schema.ts +7 -0
- package/src/lib/blob-refs.ts +99 -0
- package/src/lib/secrets.ts +47 -0
- package/src/pages/xrpc/com.atproto.identity.getRecommendedDidCredentials.ts +99 -0
- package/src/pages/xrpc/com.atproto.repo.applyWrites.ts +20 -0
- package/src/pages/xrpc/com.atproto.repo.importRepo.ts +142 -0
- package/src/pages/xrpc/com.atproto.repo.listMissingBlobs.ts +88 -0
- package/src/pages/xrpc/com.atproto.repo.uploadBlob.ts +16 -4
- package/src/pages/xrpc/com.atproto.server.activateAccount.ts +53 -0
- package/src/pages/xrpc/com.atproto.server.checkAccountStatus.ts +92 -0
- package/src/pages/xrpc/com.atproto.server.createAccount.ts +79 -0
- package/src/pages/xrpc/com.atproto.server.deactivateAccount.ts +53 -0
- package/src/pages/xrpc/com.atproto.sync.getBlob.ts +84 -0
- package/src/worker/runtime.ts +11 -6
- package/types/env.d.ts +18 -8
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Blob Reference Extraction Utilities
|
|
3
|
+
*
|
|
4
|
+
* Provides functions to extract blob CIDs from AT Protocol records.
|
|
5
|
+
* Used during migration and for blob usage tracking.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Extract all blob CIDs from a record object
|
|
10
|
+
*
|
|
11
|
+
* @param obj - The record object to scan
|
|
12
|
+
* @returns Set of blob CIDs found in the record
|
|
13
|
+
*/
|
|
14
|
+
export function extractBlobRefs(obj: any): Set<string> {
|
|
15
|
+
const refs = new Set<string>();
|
|
16
|
+
extractBlobRefsRecursive(obj, refs);
|
|
17
|
+
return refs;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Recursively extract blob CIDs from an object
|
|
22
|
+
*/
|
|
23
|
+
function extractBlobRefsRecursive(obj: any, refs: Set<string>): void {
|
|
24
|
+
if (!obj || typeof obj !== 'object') return;
|
|
25
|
+
|
|
26
|
+
// Check for blob reference pattern ($type: 'blob')
|
|
27
|
+
if (obj.$type === 'blob' && obj.ref) {
|
|
28
|
+
if (typeof obj.ref === 'object' && obj.ref.$link) {
|
|
29
|
+
refs.add(obj.ref.$link);
|
|
30
|
+
} else if (typeof obj.ref === 'string') {
|
|
31
|
+
refs.add(obj.ref);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// Check for direct CID link pattern
|
|
36
|
+
if (obj.$link && typeof obj.$link === 'string') {
|
|
37
|
+
// Only add if it looks like a blob CID (not a record CID)
|
|
38
|
+
// Blob CIDs typically start with specific multihash prefixes
|
|
39
|
+
refs.add(obj.$link);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Check for legacy blob patterns
|
|
43
|
+
if (obj.cid && typeof obj.cid === 'string') {
|
|
44
|
+
refs.add(obj.cid);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Recurse into nested objects and arrays
|
|
48
|
+
if (Array.isArray(obj)) {
|
|
49
|
+
for (const item of obj) {
|
|
50
|
+
extractBlobRefsRecursive(item, refs);
|
|
51
|
+
}
|
|
52
|
+
} else {
|
|
53
|
+
for (const value of Object.values(obj)) {
|
|
54
|
+
extractBlobRefsRecursive(value, refs);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Extract blob references from known Bluesky record types
|
|
61
|
+
* This is more specific than the generic extractor and handles
|
|
62
|
+
* common patterns in app.bsky.* records.
|
|
63
|
+
*/
|
|
64
|
+
export function extractBskyBlobRefs(record: any): Set<string> {
|
|
65
|
+
const refs = new Set<string>();
|
|
66
|
+
|
|
67
|
+
// Handle app.bsky.feed.post embeds
|
|
68
|
+
if (record.embed) {
|
|
69
|
+
extractBlobRefsRecursive(record.embed, refs);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// Handle app.bsky.actor.profile avatar and banner
|
|
73
|
+
if (record.avatar) {
|
|
74
|
+
extractBlobRefsRecursive(record.avatar, refs);
|
|
75
|
+
}
|
|
76
|
+
if (record.banner) {
|
|
77
|
+
extractBlobRefsRecursive(record.banner, refs);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Handle app.bsky.feed.generator avatar
|
|
81
|
+
if (record.$type === 'app.bsky.feed.generator' && record.avatar) {
|
|
82
|
+
extractBlobRefsRecursive(record.avatar, refs);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// Fallback to generic extraction for any other patterns
|
|
86
|
+
extractBlobRefsRecursive(record, refs);
|
|
87
|
+
|
|
88
|
+
return refs;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Convert blob references to R2 keys
|
|
93
|
+
*
|
|
94
|
+
* @param cids - Set of blob CIDs
|
|
95
|
+
* @returns Array of R2 keys
|
|
96
|
+
*/
|
|
97
|
+
export function blobCidsToKeys(cids: Set<string>): string[] {
|
|
98
|
+
return Array.from(cids).map(cid => `blobs/by-cid/${cid}`);
|
|
99
|
+
}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import type { Env } from '../env';
|
|
2
|
+
import type { SecretsStoreSecret } from '../../types/env';
|
|
3
|
+
|
|
4
|
+
const SECRET_KEYS = [
|
|
5
|
+
'PDS_DID',
|
|
6
|
+
'PDS_HANDLE',
|
|
7
|
+
'USER_PASSWORD',
|
|
8
|
+
'ACCESS_TOKEN_SECRET',
|
|
9
|
+
'REFRESH_TOKEN_SECRET',
|
|
10
|
+
'REPO_SIGNING_KEY',
|
|
11
|
+
'REPO_SIGNING_PUBLIC_KEY',
|
|
12
|
+
'JWT_ED25519_PRIVATE_KEY',
|
|
13
|
+
'JWT_ED25519_PUBLIC_KEY',
|
|
14
|
+
] as const satisfies readonly (keyof Env)[];
|
|
15
|
+
|
|
16
|
+
function isSecretStoreBinding(value: unknown): value is SecretsStoreSecret {
|
|
17
|
+
return !!value && typeof value === 'object' && typeof (value as any).get === 'function';
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export async function resolveSecret(
|
|
21
|
+
value: string | SecretsStoreSecret | undefined
|
|
22
|
+
): Promise<string | undefined> {
|
|
23
|
+
if (value === undefined) return undefined;
|
|
24
|
+
if (typeof value === 'string') return value;
|
|
25
|
+
if (isSecretStoreBinding(value)) return value.get();
|
|
26
|
+
return undefined;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Return a shallow-cloned Env where all known secret fields are materialized to strings.
|
|
31
|
+
* Non-secret bindings (DB, BLOBS, SEQUENCER, vars) are preserved as-is.
|
|
32
|
+
*/
|
|
33
|
+
export async function resolveEnvSecrets<E extends Env>(env: E): Promise<E> {
|
|
34
|
+
const resolved: Record<string, unknown> = { ...env };
|
|
35
|
+
|
|
36
|
+
await Promise.all(
|
|
37
|
+
SECRET_KEYS.map(async (key) => {
|
|
38
|
+
const val = await resolveSecret((env as any)[key]);
|
|
39
|
+
if (val !== undefined) {
|
|
40
|
+
resolved[key as string] = val;
|
|
41
|
+
}
|
|
42
|
+
})
|
|
43
|
+
);
|
|
44
|
+
|
|
45
|
+
return resolved as E;
|
|
46
|
+
}
|
|
47
|
+
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import type { APIContext } from 'astro';
|
|
2
|
+
import { isAuthorized, unauthorized } from '../../lib/auth';
|
|
3
|
+
|
|
4
|
+
export const prerender = false;
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* com.atproto.identity.getRecommendedDidCredentials
|
|
8
|
+
*
|
|
9
|
+
* Returns recommended DID credentials for this PDS.
|
|
10
|
+
* Used during migration to update identity documents.
|
|
11
|
+
*/
|
|
12
|
+
export async function GET({ locals, request }: APIContext) {
|
|
13
|
+
const { env } = locals.runtime;
|
|
14
|
+
|
|
15
|
+
if (!(await isAuthorized(request, env))) return unauthorized();
|
|
16
|
+
|
|
17
|
+
try {
|
|
18
|
+
const did = env.PDS_DID ?? 'did:example:single-user';
|
|
19
|
+
const handle = env.PDS_HANDLE ?? 'example.com';
|
|
20
|
+
const hostname = env.PDS_HOSTNAME ?? handle;
|
|
21
|
+
|
|
22
|
+
// Get signing key if available
|
|
23
|
+
let signingKey: string | undefined;
|
|
24
|
+
if (env.REPO_SIGNING_PUBLIC_KEY) {
|
|
25
|
+
// Convert raw public key to multibase format
|
|
26
|
+
const pubKeyStr = String(env.REPO_SIGNING_PUBLIC_KEY);
|
|
27
|
+
const pubKeyBytes = Uint8Array.from(atob(pubKeyStr), c => c.charCodeAt(0));
|
|
28
|
+
|
|
29
|
+
// Ed25519 multicodec prefix (0xed01) + public key
|
|
30
|
+
const multicodecBytes = new Uint8Array(2 + pubKeyBytes.length);
|
|
31
|
+
multicodecBytes[0] = 0xed;
|
|
32
|
+
multicodecBytes[1] = 0x01;
|
|
33
|
+
multicodecBytes.set(pubKeyBytes, 2);
|
|
34
|
+
|
|
35
|
+
// Base58 encode with 'z' prefix for multibase
|
|
36
|
+
signingKey = 'z' + base58Encode(multicodecBytes);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
return new Response(
|
|
40
|
+
JSON.stringify({
|
|
41
|
+
did,
|
|
42
|
+
handle,
|
|
43
|
+
pds: `https://${hostname}`,
|
|
44
|
+
signingKey,
|
|
45
|
+
alsoKnownAs: [`at://${handle}`],
|
|
46
|
+
verificationMethods: signingKey ? {
|
|
47
|
+
atproto: signingKey
|
|
48
|
+
} : undefined,
|
|
49
|
+
services: {
|
|
50
|
+
atproto_pds: {
|
|
51
|
+
type: 'AtprotoPersonalDataServer',
|
|
52
|
+
endpoint: `https://${hostname}`
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}),
|
|
56
|
+
{ status: 200, headers: { 'Content-Type': 'application/json' } }
|
|
57
|
+
);
|
|
58
|
+
} catch (error: any) {
|
|
59
|
+
return new Response(
|
|
60
|
+
JSON.stringify({
|
|
61
|
+
error: 'InternalServerError',
|
|
62
|
+
message: error.message || 'Failed to get DID credentials'
|
|
63
|
+
}),
|
|
64
|
+
{ status: 500, headers: { 'Content-Type': 'application/json' } }
|
|
65
|
+
);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* Base58 encode (Bitcoin alphabet)
|
|
71
|
+
*/
|
|
72
|
+
function base58Encode(bytes: Uint8Array): string {
|
|
73
|
+
const ALPHABET = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz';
|
|
74
|
+
|
|
75
|
+
// Convert bytes to bigint
|
|
76
|
+
let num = 0n;
|
|
77
|
+
for (const byte of bytes) {
|
|
78
|
+
num = num * 256n + BigInt(byte);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Convert to base58
|
|
82
|
+
let result = '';
|
|
83
|
+
while (num > 0n) {
|
|
84
|
+
const remainder = Number(num % 58n);
|
|
85
|
+
result = ALPHABET[remainder] + result;
|
|
86
|
+
num = num / 58n;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Add leading '1's for leading zero bytes
|
|
90
|
+
for (const byte of bytes) {
|
|
91
|
+
if (byte === 0) {
|
|
92
|
+
result = '1' + result;
|
|
93
|
+
} else {
|
|
94
|
+
break;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return result;
|
|
99
|
+
}
|
|
@@ -2,6 +2,9 @@ import type { APIContext } from 'astro';
|
|
|
2
2
|
import { RepoManager } from '../../services/repo-manager';
|
|
3
3
|
import { readJson } from '../../lib/util';
|
|
4
4
|
import { bumpRoot } from '../../db/repo';
|
|
5
|
+
import { isAuthorized, unauthorized } from '../../lib/auth';
|
|
6
|
+
import { isAccountActive } from '../../db/dal';
|
|
7
|
+
import { checkRate } from '../../lib/ratelimit';
|
|
5
8
|
|
|
6
9
|
export const prerender = false;
|
|
7
10
|
|
|
@@ -11,6 +14,23 @@ export const prerender = false;
|
|
|
11
14
|
*/
|
|
12
15
|
export async function POST({ locals, request }: APIContext) {
|
|
13
16
|
const { env } = locals.runtime;
|
|
17
|
+
if (!(await isAuthorized(request, env))) return unauthorized();
|
|
18
|
+
|
|
19
|
+
// Check if account is active
|
|
20
|
+
const did = env.PDS_DID ?? 'did:example:single-user';
|
|
21
|
+
const active = await isAccountActive(env, did);
|
|
22
|
+
if (!active) {
|
|
23
|
+
return new Response(
|
|
24
|
+
JSON.stringify({
|
|
25
|
+
error: 'AccountDeactivated',
|
|
26
|
+
message: 'Account is deactivated. Activate it before making changes.'
|
|
27
|
+
}),
|
|
28
|
+
{ status: 403, headers: { 'Content-Type': 'application/json' } }
|
|
29
|
+
);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const rateLimitResponse = await checkRate(env, request, 'writes');
|
|
33
|
+
if (rateLimitResponse) return rateLimitResponse;
|
|
14
34
|
|
|
15
35
|
try {
|
|
16
36
|
const body = await readJson(request);
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
import type { APIContext } from 'astro';
|
|
2
|
+
import { isAuthorized, unauthorized } from '../../lib/auth';
|
|
3
|
+
import { parseCarFile } from '../../lib/car-reader';
|
|
4
|
+
import { D1Blockstore } from '../../lib/mst';
|
|
5
|
+
import { getDb } from '../../db/client';
|
|
6
|
+
import { repo_root, commit_log } from '../../db/schema';
|
|
7
|
+
import { putRecord } from '../../db/dal';
|
|
8
|
+
import * as dagCbor from '@ipld/dag-cbor';
|
|
9
|
+
import { CID } from 'multiformats/cid';
|
|
10
|
+
|
|
11
|
+
export const prerender = false;
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* com.atproto.repo.importRepo
|
|
15
|
+
*
|
|
16
|
+
* Imports a repository from a CAR (Content Addressable aRchive) file.
|
|
17
|
+
* This is used during account migration to transfer the complete repo history.
|
|
18
|
+
*/
|
|
19
|
+
export async function POST({ locals, request }: APIContext) {
|
|
20
|
+
const { env } = locals.runtime;
|
|
21
|
+
|
|
22
|
+
if (!(await isAuthorized(request, env))) return unauthorized();
|
|
23
|
+
|
|
24
|
+
try {
|
|
25
|
+
const contentType = request.headers.get('content-type');
|
|
26
|
+
if (contentType !== 'application/vnd.ipld.car') {
|
|
27
|
+
return new Response(
|
|
28
|
+
JSON.stringify({
|
|
29
|
+
error: 'InvalidRequest',
|
|
30
|
+
message: 'Content-Type must be application/vnd.ipld.car'
|
|
31
|
+
}),
|
|
32
|
+
{ status: 400, headers: { 'Content-Type': 'application/json' } }
|
|
33
|
+
);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const did = env.PDS_DID ?? 'did:example:single-user';
|
|
37
|
+
const carBytes = new Uint8Array(await request.arrayBuffer());
|
|
38
|
+
|
|
39
|
+
// Parse CAR file
|
|
40
|
+
const { header, blocks } = parseCarFile(carBytes);
|
|
41
|
+
|
|
42
|
+
if (blocks.length === 0) {
|
|
43
|
+
return new Response(
|
|
44
|
+
JSON.stringify({
|
|
45
|
+
error: 'InvalidRequest',
|
|
46
|
+
message: 'CAR file contains no blocks'
|
|
47
|
+
}),
|
|
48
|
+
{ status: 400, headers: { 'Content-Type': 'application/json' } }
|
|
49
|
+
);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Store all blocks in blockstore
|
|
53
|
+
const blockstore = new D1Blockstore(env);
|
|
54
|
+
for (const block of blocks) {
|
|
55
|
+
await blockstore.put(block.cid, block.bytes);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Find the commit block (root of the CAR)
|
|
59
|
+
const rootCid = header.roots[0];
|
|
60
|
+
if (!rootCid) {
|
|
61
|
+
return new Response(
|
|
62
|
+
JSON.stringify({
|
|
63
|
+
error: 'InvalidRequest',
|
|
64
|
+
message: 'CAR file has no root CID'
|
|
65
|
+
}),
|
|
66
|
+
{ status: 400, headers: { 'Content-Type': 'application/json' } }
|
|
67
|
+
);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// Decode the commit to get repo details
|
|
71
|
+
const commitBlock = blocks.find(b => b.cid.equals(rootCid));
|
|
72
|
+
if (!commitBlock) {
|
|
73
|
+
return new Response(
|
|
74
|
+
JSON.stringify({
|
|
75
|
+
error: 'InvalidRequest',
|
|
76
|
+
message: 'Root commit block not found in CAR'
|
|
77
|
+
}),
|
|
78
|
+
{ status: 400, headers: { 'Content-Type': 'application/json' } }
|
|
79
|
+
);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
const commit = dagCbor.decode(commitBlock.bytes) as any;
|
|
83
|
+
const rev = commit.rev || commit.version || 1;
|
|
84
|
+
|
|
85
|
+
// Update repo root
|
|
86
|
+
const db = getDb(env);
|
|
87
|
+
await db
|
|
88
|
+
.insert(repo_root)
|
|
89
|
+
.values({
|
|
90
|
+
did,
|
|
91
|
+
commitCid: rootCid.toString(),
|
|
92
|
+
rev: typeof rev === 'string' ? parseInt(rev) : rev,
|
|
93
|
+
})
|
|
94
|
+
.onConflictDoUpdate({
|
|
95
|
+
target: repo_root.did,
|
|
96
|
+
set: {
|
|
97
|
+
commitCid: rootCid.toString(),
|
|
98
|
+
rev: typeof rev === 'string' ? parseInt(rev) : rev,
|
|
99
|
+
},
|
|
100
|
+
})
|
|
101
|
+
.run();
|
|
102
|
+
|
|
103
|
+
// Index records from MST
|
|
104
|
+
// Note: This is a simplified implementation
|
|
105
|
+
// A full implementation would walk the MST tree and index all records
|
|
106
|
+
let recordCount = 0;
|
|
107
|
+
for (const block of blocks) {
|
|
108
|
+
try {
|
|
109
|
+
const obj = dagCbor.decode(block.bytes) as any;
|
|
110
|
+
|
|
111
|
+
// Check if this looks like a record (has $type)
|
|
112
|
+
if (obj && typeof obj === 'object' && obj.$type) {
|
|
113
|
+
// This is a record, we should index it
|
|
114
|
+
// For now, we'll skip detailed indexing and let it be done lazily
|
|
115
|
+
recordCount++;
|
|
116
|
+
}
|
|
117
|
+
} catch {
|
|
118
|
+
// Not a valid CBOR object or not a record, skip
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
return new Response(
|
|
123
|
+
JSON.stringify({
|
|
124
|
+
did,
|
|
125
|
+
commitCid: rootCid.toString(),
|
|
126
|
+
rev,
|
|
127
|
+
blocksImported: blocks.length,
|
|
128
|
+
recordsFound: recordCount,
|
|
129
|
+
message: 'Repository imported successfully'
|
|
130
|
+
}),
|
|
131
|
+
{ status: 200, headers: { 'Content-Type': 'application/json' } }
|
|
132
|
+
);
|
|
133
|
+
} catch (error: any) {
|
|
134
|
+
return new Response(
|
|
135
|
+
JSON.stringify({
|
|
136
|
+
error: 'InternalServerError',
|
|
137
|
+
message: error.message || 'Failed to import repository'
|
|
138
|
+
}),
|
|
139
|
+
{ status: 500, headers: { 'Content-Type': 'application/json' } }
|
|
140
|
+
);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import type { APIContext } from 'astro';
|
|
2
|
+
import { isAuthorized, unauthorized } from '../../lib/auth';
|
|
3
|
+
import { getDb } from '../../db/client';
|
|
4
|
+
import { record, blob_ref } from '../../db/schema';
|
|
5
|
+
import { eq } from 'drizzle-orm';
|
|
6
|
+
import { extractBlobRefs } from '../../lib/blob-refs';
|
|
7
|
+
|
|
8
|
+
export const prerender = false;
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* com.atproto.repo.listMissingBlobs
|
|
12
|
+
*
|
|
13
|
+
* Lists blob CIDs that are referenced in records but not present in blob storage.
|
|
14
|
+
* Used during migration to identify which blobs need to be transferred.
|
|
15
|
+
*/
|
|
16
|
+
export async function GET({ locals, request, url }: APIContext) {
|
|
17
|
+
const { env } = locals.runtime;
|
|
18
|
+
|
|
19
|
+
if (!(await isAuthorized(request, env))) return unauthorized();
|
|
20
|
+
|
|
21
|
+
try {
|
|
22
|
+
const did = env.PDS_DID ?? 'did:example:single-user';
|
|
23
|
+
const limit = parseInt(url.searchParams.get('limit') || '500');
|
|
24
|
+
const cursor = url.searchParams.get('cursor') || '';
|
|
25
|
+
|
|
26
|
+
const db = getDb(env);
|
|
27
|
+
|
|
28
|
+
// Get all records for this DID
|
|
29
|
+
const records = await db
|
|
30
|
+
.select()
|
|
31
|
+
.from(record)
|
|
32
|
+
.where(eq(record.did, did))
|
|
33
|
+
.all();
|
|
34
|
+
|
|
35
|
+
// Get all blob refs for this DID
|
|
36
|
+
const blobs = await db
|
|
37
|
+
.select()
|
|
38
|
+
.from(blob_ref)
|
|
39
|
+
.where(eq(blob_ref.did, did))
|
|
40
|
+
.all();
|
|
41
|
+
|
|
42
|
+
// Create a set of existing blob CIDs
|
|
43
|
+
const existingBlobCids = new Set(blobs.map(b => b.cid));
|
|
44
|
+
|
|
45
|
+
// Extract blob references from records
|
|
46
|
+
const referencedBlobs = new Set<string>();
|
|
47
|
+
for (const rec of records) {
|
|
48
|
+
try {
|
|
49
|
+
const data = JSON.parse(rec.json);
|
|
50
|
+
const refs = extractBlobRefs(data);
|
|
51
|
+
refs.forEach(ref => referencedBlobs.add(ref));
|
|
52
|
+
} catch {
|
|
53
|
+
// Skip invalid JSON
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Find missing blobs
|
|
58
|
+
const missingBlobs: string[] = [];
|
|
59
|
+
for (const cid of referencedBlobs) {
|
|
60
|
+
if (!existingBlobCids.has(cid)) {
|
|
61
|
+
if (!cursor || cid > cursor) {
|
|
62
|
+
missingBlobs.push(cid);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// Sort and limit
|
|
68
|
+
missingBlobs.sort();
|
|
69
|
+
const page = missingBlobs.slice(0, limit);
|
|
70
|
+
const nextCursor = page.length === limit ? page[page.length - 1] : undefined;
|
|
71
|
+
|
|
72
|
+
return new Response(
|
|
73
|
+
JSON.stringify({
|
|
74
|
+
blobs: page.map(cid => ({ cid })),
|
|
75
|
+
cursor: nextCursor,
|
|
76
|
+
}),
|
|
77
|
+
{ status: 200, headers: { 'Content-Type': 'application/json' } }
|
|
78
|
+
);
|
|
79
|
+
} catch (error: any) {
|
|
80
|
+
return new Response(
|
|
81
|
+
JSON.stringify({
|
|
82
|
+
error: 'InternalServerError',
|
|
83
|
+
message: error.message || 'Failed to list missing blobs'
|
|
84
|
+
}),
|
|
85
|
+
{ status: 500, headers: { 'Content-Type': 'application/json' } }
|
|
86
|
+
);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
@@ -3,7 +3,7 @@ import { isAuthorized, unauthorized } from '../../lib/auth';
|
|
|
3
3
|
import { checkRate } from '../../lib/ratelimit';
|
|
4
4
|
import { isAllowedMime } from '../../lib/util';
|
|
5
5
|
import { R2BlobStore } from '../../services/r2-blob-store';
|
|
6
|
-
import { putBlobRef, checkBlobQuota, updateBlobQuota } from '../../db/dal';
|
|
6
|
+
import { putBlobRef, checkBlobQuota, updateBlobQuota, isAccountActive } from '../../db/dal';
|
|
7
7
|
|
|
8
8
|
export const prerender = false;
|
|
9
9
|
|
|
@@ -11,6 +11,21 @@ export async function POST({ locals, request }: APIContext) {
|
|
|
11
11
|
const { env } = locals.runtime;
|
|
12
12
|
if (!(await isAuthorized(request, env))) return unauthorized();
|
|
13
13
|
|
|
14
|
+
// Get DID from environment (single-user PDS)
|
|
15
|
+
const did = env.PDS_DID ?? 'did:example:single-user';
|
|
16
|
+
|
|
17
|
+
// Check if account is active
|
|
18
|
+
const active = await isAccountActive(env, did);
|
|
19
|
+
if (!active) {
|
|
20
|
+
return new Response(
|
|
21
|
+
JSON.stringify({
|
|
22
|
+
error: 'AccountDeactivated',
|
|
23
|
+
message: 'Account is deactivated. Activate it before uploading blobs.'
|
|
24
|
+
}),
|
|
25
|
+
{ status: 403, headers: { 'Content-Type': 'application/json' } }
|
|
26
|
+
);
|
|
27
|
+
}
|
|
28
|
+
|
|
14
29
|
const rateLimitResponse = await checkRate(env, request, 'blob');
|
|
15
30
|
if (rateLimitResponse) return rateLimitResponse;
|
|
16
31
|
|
|
@@ -18,9 +33,6 @@ export async function POST({ locals, request }: APIContext) {
|
|
|
18
33
|
const contentType = request.headers.get('content-type') ?? 'application/octet-stream';
|
|
19
34
|
if (!isAllowedMime(env, contentType)) return new Response(JSON.stringify({ error: 'UnsupportedMediaType' }), { status: 415 });
|
|
20
35
|
|
|
21
|
-
// Get DID from environment (single-user PDS)
|
|
22
|
-
const did = env.PDS_DID ?? 'did:example:single-user';
|
|
23
|
-
|
|
24
36
|
// Check quota before upload
|
|
25
37
|
const canUpload = await checkBlobQuota(env, did, buf.byteLength);
|
|
26
38
|
if (!canUpload) {
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import type { APIContext } from 'astro';
|
|
2
|
+
import { isAuthorized, unauthorized } from '../../lib/auth';
|
|
3
|
+
import { setAccountActive, getAccountState } from '../../db/dal';
|
|
4
|
+
|
|
5
|
+
export const prerender = false;
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* com.atproto.server.activateAccount
|
|
9
|
+
*
|
|
10
|
+
* Activates a deactivated account after successful migration.
|
|
11
|
+
* This enables write operations on the PDS.
|
|
12
|
+
*/
|
|
13
|
+
export async function POST({ locals, request }: APIContext) {
|
|
14
|
+
const { env } = locals.runtime;
|
|
15
|
+
|
|
16
|
+
if (!(await isAuthorized(request, env))) return unauthorized();
|
|
17
|
+
|
|
18
|
+
try {
|
|
19
|
+
const did = env.PDS_DID ?? 'did:example:single-user';
|
|
20
|
+
|
|
21
|
+
// Check if account exists
|
|
22
|
+
const accountState = await getAccountState(env, did);
|
|
23
|
+
if (!accountState) {
|
|
24
|
+
return new Response(
|
|
25
|
+
JSON.stringify({
|
|
26
|
+
error: 'AccountNotFound',
|
|
27
|
+
message: 'Account does not exist'
|
|
28
|
+
}),
|
|
29
|
+
{ status: 404, headers: { 'Content-Type': 'application/json' } }
|
|
30
|
+
);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// Activate the account
|
|
34
|
+
await setAccountActive(env, did, true);
|
|
35
|
+
|
|
36
|
+
return new Response(
|
|
37
|
+
JSON.stringify({
|
|
38
|
+
did,
|
|
39
|
+
active: true,
|
|
40
|
+
message: 'Account activated successfully'
|
|
41
|
+
}),
|
|
42
|
+
{ status: 200, headers: { 'Content-Type': 'application/json' } }
|
|
43
|
+
);
|
|
44
|
+
} catch (error: any) {
|
|
45
|
+
return new Response(
|
|
46
|
+
JSON.stringify({
|
|
47
|
+
error: 'InternalServerError',
|
|
48
|
+
message: error.message || 'Failed to activate account'
|
|
49
|
+
}),
|
|
50
|
+
{ status: 500, headers: { 'Content-Type': 'application/json' } }
|
|
51
|
+
);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import type { APIContext } from 'astro';
|
|
2
|
+
import { isAuthorized, unauthorized } from '../../lib/auth';
|
|
3
|
+
import { getAccountState } from '../../db/dal';
|
|
4
|
+
import { getDb } from '../../db/client';
|
|
5
|
+
import { repo_root, record, blob_ref, commit_log } from '../../db/schema';
|
|
6
|
+
import { eq, count } from 'drizzle-orm';
|
|
7
|
+
|
|
8
|
+
export const prerender = false;
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* com.atproto.server.checkAccountStatus
|
|
12
|
+
*
|
|
13
|
+
* Returns account status including:
|
|
14
|
+
* - Active state
|
|
15
|
+
* - Repository head CID and revision
|
|
16
|
+
* - Record count
|
|
17
|
+
* - Blob count
|
|
18
|
+
* - Missing blob count (for migration tracking)
|
|
19
|
+
*/
|
|
20
|
+
export async function GET({ locals, request }: APIContext) {
|
|
21
|
+
const { env } = locals.runtime;
|
|
22
|
+
|
|
23
|
+
if (!(await isAuthorized(request, env))) return unauthorized();
|
|
24
|
+
|
|
25
|
+
try {
|
|
26
|
+
const did = env.PDS_DID ?? 'did:example:single-user';
|
|
27
|
+
const db = getDb(env);
|
|
28
|
+
|
|
29
|
+
// Get account state
|
|
30
|
+
const accountState = await getAccountState(env, did);
|
|
31
|
+
const active = accountState?.active ?? true;
|
|
32
|
+
|
|
33
|
+
// Get repo head
|
|
34
|
+
const repoRoot = await db
|
|
35
|
+
.select()
|
|
36
|
+
.from(repo_root)
|
|
37
|
+
.where(eq(repo_root.did, did))
|
|
38
|
+
.get();
|
|
39
|
+
|
|
40
|
+
// Count records
|
|
41
|
+
const recordCountResult = await db
|
|
42
|
+
.select({ count: count() })
|
|
43
|
+
.from(record)
|
|
44
|
+
.where(eq(record.did, did))
|
|
45
|
+
.get();
|
|
46
|
+
const recordCount = recordCountResult?.count ?? 0;
|
|
47
|
+
|
|
48
|
+
// Count blobs
|
|
49
|
+
const blobCountResult = await db
|
|
50
|
+
.select({ count: count() })
|
|
51
|
+
.from(blob_ref)
|
|
52
|
+
.where(eq(blob_ref.did, did))
|
|
53
|
+
.get();
|
|
54
|
+
const blobCount = blobCountResult?.count ?? 0;
|
|
55
|
+
|
|
56
|
+
// Get latest commit sequence
|
|
57
|
+
const latestCommit = await db
|
|
58
|
+
.select()
|
|
59
|
+
.from(commit_log)
|
|
60
|
+
.orderBy(commit_log.seq)
|
|
61
|
+
.limit(1)
|
|
62
|
+
.get();
|
|
63
|
+
|
|
64
|
+
return new Response(
|
|
65
|
+
JSON.stringify({
|
|
66
|
+
did,
|
|
67
|
+
active,
|
|
68
|
+
head: repoRoot?.commitCid ?? null,
|
|
69
|
+
rev: repoRoot?.rev ?? 0,
|
|
70
|
+
recordCount,
|
|
71
|
+
blobCount,
|
|
72
|
+
indexedRecords: recordCount,
|
|
73
|
+
privateStateValues: 0,
|
|
74
|
+
expectedBlobs: blobCount,
|
|
75
|
+
importedBlobs: blobCount,
|
|
76
|
+
repoBlocks: 0, // Could calculate from blockstore if needed
|
|
77
|
+
repoRev: repoRoot?.rev?.toString() ?? '0',
|
|
78
|
+
repoCommit: repoRoot?.commitCid ?? null,
|
|
79
|
+
seq: latestCommit?.seq ?? 0,
|
|
80
|
+
}),
|
|
81
|
+
{ status: 200, headers: { 'Content-Type': 'application/json' } }
|
|
82
|
+
);
|
|
83
|
+
} catch (error: any) {
|
|
84
|
+
return new Response(
|
|
85
|
+
JSON.stringify({
|
|
86
|
+
error: 'InternalServerError',
|
|
87
|
+
message: error.message || 'Failed to check account status'
|
|
88
|
+
}),
|
|
89
|
+
{ status: 500, headers: { 'Content-Type': 'application/json' } }
|
|
90
|
+
);
|
|
91
|
+
}
|
|
92
|
+
}
|