@getcirrus/pds 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +442 -0
- package/dist/cli.js +1609 -0
- package/dist/index.d.ts +531 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +6227 -0
- package/dist/index.js.map +1 -0
- package/package.json +76 -0
package/dist/cli.js
ADDED
|
@@ -0,0 +1,1609 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { defineCommand, runMain } from "citty";
|
|
3
|
+
import * as p from "@clack/prompts";
|
|
4
|
+
import { randomBytes } from "node:crypto";
|
|
5
|
+
import { Secp256k1Keypair } from "@atproto/crypto";
|
|
6
|
+
import bcrypt from "bcryptjs";
|
|
7
|
+
import { spawn } from "node:child_process";
|
|
8
|
+
import { experimental_patchConfig, experimental_readRawConfig } from "wrangler";
|
|
9
|
+
import { existsSync, readFileSync, writeFileSync } from "node:fs";
|
|
10
|
+
import { resolve } from "node:path";
|
|
11
|
+
import { AtprotoDohHandleResolver } from "@atproto-labs/handle-resolver";
|
|
12
|
+
import { check, didDocument, getPdsEndpoint } from "@atproto/common-web";
|
|
13
|
+
import pc from "picocolors";
|
|
14
|
+
|
|
15
|
+
//#region src/cli/utils/wrangler.ts
|
|
16
|
+
/**
|
|
17
|
+
* Wrangler integration utilities for setting vars and secrets
|
|
18
|
+
*/
|
|
19
|
+
/**
|
|
20
|
+
* Set a var in wrangler.jsonc using experimental_patchConfig
|
|
21
|
+
*/
|
|
22
|
+
function setVar(name, value) {
|
|
23
|
+
const { configPath } = experimental_readRawConfig({});
|
|
24
|
+
if (!configPath) throw new Error("No wrangler config found");
|
|
25
|
+
experimental_patchConfig(configPath, { vars: { [name]: value } });
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Set multiple vars in wrangler.jsonc
|
|
29
|
+
*/
|
|
30
|
+
function setVars(vars) {
|
|
31
|
+
const { configPath } = experimental_readRawConfig({});
|
|
32
|
+
if (!configPath) throw new Error("No wrangler config found");
|
|
33
|
+
experimental_patchConfig(configPath, { vars });
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Get current vars from wrangler config
|
|
37
|
+
*/
|
|
38
|
+
function getVars() {
|
|
39
|
+
const { rawConfig } = experimental_readRawConfig({});
|
|
40
|
+
return rawConfig.vars || {};
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Get current worker name from wrangler config
|
|
44
|
+
*/
|
|
45
|
+
function getWorkerName() {
|
|
46
|
+
const { rawConfig } = experimental_readRawConfig({});
|
|
47
|
+
return rawConfig.name;
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Set worker name in wrangler config
|
|
51
|
+
*/
|
|
52
|
+
function setWorkerName(name) {
|
|
53
|
+
const { configPath } = experimental_readRawConfig({});
|
|
54
|
+
if (!configPath) throw new Error("No wrangler config found");
|
|
55
|
+
experimental_patchConfig(configPath, { name });
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Set a secret using wrangler secret put
|
|
59
|
+
*/
|
|
60
|
+
async function setSecret(name, value) {
|
|
61
|
+
return new Promise((resolve$1, reject) => {
|
|
62
|
+
const child = spawn("wrangler", [
|
|
63
|
+
"secret",
|
|
64
|
+
"put",
|
|
65
|
+
name
|
|
66
|
+
], { stdio: [
|
|
67
|
+
"pipe",
|
|
68
|
+
"inherit",
|
|
69
|
+
"inherit"
|
|
70
|
+
] });
|
|
71
|
+
child.stdin.write(value);
|
|
72
|
+
child.stdin.end();
|
|
73
|
+
child.on("close", (code) => {
|
|
74
|
+
if (code === 0) resolve$1();
|
|
75
|
+
else reject(/* @__PURE__ */ new Error(`wrangler secret put ${name} failed with code ${code}`));
|
|
76
|
+
});
|
|
77
|
+
child.on("error", reject);
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
//#endregion
|
|
82
|
+
//#region src/cli/utils/dotenv.ts
|
|
83
|
+
/**
|
|
84
|
+
* .dev.vars file utilities for local development
|
|
85
|
+
*/
|
|
86
|
+
const DEV_VARS_FILE = ".dev.vars";
|
|
87
|
+
/**
|
|
88
|
+
* Parse a .dev.vars file into a record
|
|
89
|
+
*/
|
|
90
|
+
function readDevVars(dir = process.cwd()) {
|
|
91
|
+
const filePath = resolve(dir, DEV_VARS_FILE);
|
|
92
|
+
if (!existsSync(filePath)) return {};
|
|
93
|
+
const content = readFileSync(filePath, "utf-8");
|
|
94
|
+
const vars = {};
|
|
95
|
+
for (const line of content.split("\n")) {
|
|
96
|
+
const trimmed = line.trim();
|
|
97
|
+
if (!trimmed || trimmed.startsWith("#")) continue;
|
|
98
|
+
const eqIndex = trimmed.indexOf("=");
|
|
99
|
+
if (eqIndex === -1) continue;
|
|
100
|
+
const key = trimmed.slice(0, eqIndex).trim();
|
|
101
|
+
let value = trimmed.slice(eqIndex + 1).trim();
|
|
102
|
+
if (value.startsWith("\"") && value.endsWith("\"") || value.startsWith("'") && value.endsWith("'")) value = value.slice(1, -1);
|
|
103
|
+
vars[key] = value;
|
|
104
|
+
}
|
|
105
|
+
return vars;
|
|
106
|
+
}
|
|
107
|
+
/**
|
|
108
|
+
* Quote a value if it contains special characters
|
|
109
|
+
*/
|
|
110
|
+
function quoteValue(value) {
|
|
111
|
+
if (value.includes(" ") || value.includes("\"") || value.includes("'")) return "\"" + value.replace(/"/g, "\\\"") + "\"";
|
|
112
|
+
return value;
|
|
113
|
+
}
|
|
114
|
+
/**
|
|
115
|
+
* Write vars to .dev.vars file, preserving comments and order
|
|
116
|
+
*/
|
|
117
|
+
function writeDevVars(vars, dir = process.cwd()) {
|
|
118
|
+
const filePath = resolve(dir, DEV_VARS_FILE);
|
|
119
|
+
let existingLines = [];
|
|
120
|
+
if (existsSync(filePath)) existingLines = readFileSync(filePath, "utf-8").split("\n");
|
|
121
|
+
const outputLines = [];
|
|
122
|
+
const updatedKeys = /* @__PURE__ */ new Set();
|
|
123
|
+
for (const line of existingLines) {
|
|
124
|
+
const trimmed = line.trim();
|
|
125
|
+
if (!trimmed || trimmed.startsWith("#")) {
|
|
126
|
+
outputLines.push(line);
|
|
127
|
+
continue;
|
|
128
|
+
}
|
|
129
|
+
const eqIndex = trimmed.indexOf("=");
|
|
130
|
+
if (eqIndex === -1) {
|
|
131
|
+
outputLines.push(line);
|
|
132
|
+
continue;
|
|
133
|
+
}
|
|
134
|
+
const key = trimmed.slice(0, eqIndex).trim();
|
|
135
|
+
if (key in vars) {
|
|
136
|
+
outputLines.push(key + "=" + quoteValue(vars[key]));
|
|
137
|
+
updatedKeys.add(key);
|
|
138
|
+
} else outputLines.push(line);
|
|
139
|
+
}
|
|
140
|
+
for (const [key, value] of Object.entries(vars)) if (!updatedKeys.has(key)) outputLines.push(key + "=" + quoteValue(value));
|
|
141
|
+
writeFileSync(filePath, outputLines.join("\n").trimEnd() + "\n");
|
|
142
|
+
}
|
|
143
|
+
/**
|
|
144
|
+
* Set a single var in .dev.vars
|
|
145
|
+
*/
|
|
146
|
+
function setDevVar(key, value, dir = process.cwd()) {
|
|
147
|
+
const vars = readDevVars(dir);
|
|
148
|
+
vars[key] = value;
|
|
149
|
+
writeDevVars(vars, dir);
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
//#endregion
|
|
153
|
+
//#region src/cli/utils/secrets.ts
|
|
154
|
+
/**
|
|
155
|
+
* Secret generation and management utilities for PDS CLI
|
|
156
|
+
*/
|
|
157
|
+
/**
|
|
158
|
+
* Generate a new secp256k1 signing keypair
|
|
159
|
+
*/
|
|
160
|
+
async function generateSigningKeypair() {
|
|
161
|
+
const keypair = await Secp256k1Keypair.create({ exportable: true });
|
|
162
|
+
return {
|
|
163
|
+
privateKey: Buffer.from(await keypair.export()).toString("hex"),
|
|
164
|
+
publicKey: keypair.did().replace("did:key:", "")
|
|
165
|
+
};
|
|
166
|
+
}
|
|
167
|
+
/**
|
|
168
|
+
* Derive public key from an existing private key
|
|
169
|
+
*/
|
|
170
|
+
async function derivePublicKey(privateKeyHex) {
|
|
171
|
+
return (await Secp256k1Keypair.import(privateKeyHex)).did().replace("did:key:", "");
|
|
172
|
+
}
|
|
173
|
+
/**
|
|
174
|
+
* Generate a random auth token (base64url, 32 bytes)
|
|
175
|
+
*/
|
|
176
|
+
function generateAuthToken() {
|
|
177
|
+
return randomBytes(32).toString("base64url");
|
|
178
|
+
}
|
|
179
|
+
/**
|
|
180
|
+
* Generate a random JWT secret (base64, 32 bytes)
|
|
181
|
+
*/
|
|
182
|
+
function generateJwtSecret() {
|
|
183
|
+
return randomBytes(32).toString("base64");
|
|
184
|
+
}
|
|
185
|
+
/**
|
|
186
|
+
* Hash a password using bcrypt
|
|
187
|
+
*/
|
|
188
|
+
async function hashPassword(password) {
|
|
189
|
+
return bcrypt.hash(password, 10);
|
|
190
|
+
}
|
|
191
|
+
/**
|
|
192
|
+
* Prompt for password with confirmation (max 3 attempts)
|
|
193
|
+
*/
|
|
194
|
+
async function promptPassword(handle) {
|
|
195
|
+
const message = handle ? `Choose a password for @${handle}:` : "Enter password:";
|
|
196
|
+
const MAX_ATTEMPTS = 3;
|
|
197
|
+
let attempts = 0;
|
|
198
|
+
while (attempts < MAX_ATTEMPTS) {
|
|
199
|
+
attempts++;
|
|
200
|
+
const password = await p.password({ message });
|
|
201
|
+
if (p.isCancel(password)) {
|
|
202
|
+
p.cancel("Cancelled");
|
|
203
|
+
process.exit(0);
|
|
204
|
+
}
|
|
205
|
+
const confirm = await p.password({ message: "Confirm password:" });
|
|
206
|
+
if (p.isCancel(confirm)) {
|
|
207
|
+
p.cancel("Cancelled");
|
|
208
|
+
process.exit(0);
|
|
209
|
+
}
|
|
210
|
+
if (password === confirm) return password;
|
|
211
|
+
p.log.error("Passwords do not match. Try again.");
|
|
212
|
+
}
|
|
213
|
+
p.log.error("Too many failed attempts.");
|
|
214
|
+
p.cancel("Password setup cancelled");
|
|
215
|
+
process.exit(1);
|
|
216
|
+
}
|
|
217
|
+
/**
|
|
218
|
+
* Set a secret value, either locally (.dev.vars) or via wrangler
|
|
219
|
+
*/
|
|
220
|
+
async function setSecretValue(name, value, local) {
|
|
221
|
+
if (local) setDevVar(name, value);
|
|
222
|
+
else await setSecret(name, value);
|
|
223
|
+
}
|
|
224
|
+
/**
|
|
225
|
+
* Set a public var in wrangler.jsonc
|
|
226
|
+
*/
|
|
227
|
+
function setPublicVar(name, value, local) {
|
|
228
|
+
if (local) setDevVar(name, value);
|
|
229
|
+
else setVar(name, value);
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
//#endregion
|
|
233
|
+
//#region src/cli/commands/secret/jwt.ts
|
|
234
|
+
/**
|
|
235
|
+
* JWT secret generation command
|
|
236
|
+
*/
|
|
237
|
+
const jwtCommand = defineCommand({
|
|
238
|
+
meta: {
|
|
239
|
+
name: "jwt",
|
|
240
|
+
description: "Generate and set JWT signing secret"
|
|
241
|
+
},
|
|
242
|
+
args: { local: {
|
|
243
|
+
type: "boolean",
|
|
244
|
+
description: "Write to .dev.vars instead of wrangler secrets",
|
|
245
|
+
default: false
|
|
246
|
+
} },
|
|
247
|
+
async run({ args }) {
|
|
248
|
+
p.intro("Generate JWT Secret");
|
|
249
|
+
const secret = generateJwtSecret();
|
|
250
|
+
try {
|
|
251
|
+
await setSecretValue("JWT_SECRET", secret, args.local);
|
|
252
|
+
p.outro(args.local ? "JWT_SECRET written to .dev.vars" : "Done!");
|
|
253
|
+
} catch (error) {
|
|
254
|
+
p.log.error(String(error));
|
|
255
|
+
process.exit(1);
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
});
|
|
259
|
+
|
|
260
|
+
//#endregion
|
|
261
|
+
//#region src/cli/commands/secret/password.ts
|
|
262
|
+
/**
|
|
263
|
+
* Password hash generation command
|
|
264
|
+
*/
|
|
265
|
+
const passwordCommand = defineCommand({
|
|
266
|
+
meta: {
|
|
267
|
+
name: "password",
|
|
268
|
+
description: "Set account password (stored as bcrypt hash)"
|
|
269
|
+
},
|
|
270
|
+
args: { local: {
|
|
271
|
+
type: "boolean",
|
|
272
|
+
description: "Write to .dev.vars instead of wrangler secrets",
|
|
273
|
+
default: false
|
|
274
|
+
} },
|
|
275
|
+
async run({ args }) {
|
|
276
|
+
p.intro("Set Account Password");
|
|
277
|
+
const password = await promptPassword();
|
|
278
|
+
const spinner = p.spinner();
|
|
279
|
+
spinner.start("Hashing password...");
|
|
280
|
+
const passwordHash = await hashPassword(password);
|
|
281
|
+
spinner.stop("Password hashed");
|
|
282
|
+
try {
|
|
283
|
+
await setSecretValue("PASSWORD_HASH", passwordHash, args.local);
|
|
284
|
+
p.outro(args.local ? "PASSWORD_HASH written to .dev.vars" : "Done!");
|
|
285
|
+
} catch (error) {
|
|
286
|
+
p.log.error(String(error));
|
|
287
|
+
process.exit(1);
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
});
|
|
291
|
+
|
|
292
|
+
//#endregion
|
|
293
|
+
//#region src/cli/commands/secret/key.ts
|
|
294
|
+
/**
|
|
295
|
+
* Signing key generation command
|
|
296
|
+
*/
|
|
297
|
+
const keyCommand = defineCommand({
|
|
298
|
+
meta: {
|
|
299
|
+
name: "key",
|
|
300
|
+
description: "Generate and set signing keypair"
|
|
301
|
+
},
|
|
302
|
+
args: { local: {
|
|
303
|
+
type: "boolean",
|
|
304
|
+
description: "Write to .dev.vars instead of wrangler secrets/config",
|
|
305
|
+
default: false
|
|
306
|
+
} },
|
|
307
|
+
async run({ args }) {
|
|
308
|
+
p.intro("Generate Signing Keypair");
|
|
309
|
+
const spinner = p.spinner();
|
|
310
|
+
spinner.start("Generating secp256k1 keypair...");
|
|
311
|
+
const { privateKey, publicKey } = await generateSigningKeypair();
|
|
312
|
+
spinner.stop("Keypair generated");
|
|
313
|
+
try {
|
|
314
|
+
await setSecretValue("SIGNING_KEY", privateKey, args.local);
|
|
315
|
+
setPublicVar("SIGNING_KEY_PUBLIC", publicKey, args.local);
|
|
316
|
+
p.log.info("Public key (for DID document): " + publicKey);
|
|
317
|
+
p.outro(args.local ? "SIGNING_KEY and SIGNING_KEY_PUBLIC written to .dev.vars" : "Done!");
|
|
318
|
+
} catch (error) {
|
|
319
|
+
p.log.error(String(error));
|
|
320
|
+
process.exit(1);
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
});
|
|
324
|
+
|
|
325
|
+
//#endregion
|
|
326
|
+
//#region src/cli/commands/secret/index.ts
|
|
327
|
+
/**
|
|
328
|
+
* Secret management commands
|
|
329
|
+
*/
|
|
330
|
+
const secretCommand = defineCommand({
|
|
331
|
+
meta: {
|
|
332
|
+
name: "secret",
|
|
333
|
+
description: "Manage PDS secrets"
|
|
334
|
+
},
|
|
335
|
+
subCommands: {
|
|
336
|
+
jwt: jwtCommand,
|
|
337
|
+
password: passwordCommand,
|
|
338
|
+
key: keyCommand
|
|
339
|
+
}
|
|
340
|
+
});
|
|
341
|
+
|
|
342
|
+
//#endregion
|
|
343
|
+
//#region src/cli/utils/cli-helpers.ts
|
|
344
|
+
/**
|
|
345
|
+
* Shared CLI utilities for PDS commands
|
|
346
|
+
*/
|
|
347
|
+
/**
|
|
348
|
+
* Prompt for text input, exiting on cancel
|
|
349
|
+
*/
|
|
350
|
+
async function promptText(options) {
|
|
351
|
+
const result = await p.text(options);
|
|
352
|
+
if (p.isCancel(result)) {
|
|
353
|
+
p.cancel("Cancelled");
|
|
354
|
+
process.exit(0);
|
|
355
|
+
}
|
|
356
|
+
return result;
|
|
357
|
+
}
|
|
358
|
+
/**
|
|
359
|
+
* Prompt for confirmation, exiting on cancel
|
|
360
|
+
*/
|
|
361
|
+
async function promptConfirm(options) {
|
|
362
|
+
const result = await p.confirm(options);
|
|
363
|
+
if (p.isCancel(result)) {
|
|
364
|
+
p.cancel("Cancelled");
|
|
365
|
+
process.exit(0);
|
|
366
|
+
}
|
|
367
|
+
return result;
|
|
368
|
+
}
|
|
369
|
+
/**
|
|
370
|
+
* Prompt for selection, exiting on cancel
|
|
371
|
+
*/
|
|
372
|
+
async function promptSelect(options) {
|
|
373
|
+
const result = await p.select(options);
|
|
374
|
+
if (p.isCancel(result)) {
|
|
375
|
+
p.cancel("Cancelled");
|
|
376
|
+
process.exit(0);
|
|
377
|
+
}
|
|
378
|
+
return result;
|
|
379
|
+
}
|
|
380
|
+
/**
|
|
381
|
+
* Get target PDS URL based on mode
|
|
382
|
+
*/
|
|
383
|
+
function getTargetUrl(isDev, pdsHostname) {
|
|
384
|
+
if (isDev) return `http://localhost:${process.env.PORT ? parseInt(process.env.PORT) ?? "5173" : "5173"}`;
|
|
385
|
+
if (!pdsHostname) throw new Error("PDS_HOSTNAME not configured in wrangler.jsonc");
|
|
386
|
+
return `https://${pdsHostname}`;
|
|
387
|
+
}
|
|
388
|
+
/**
|
|
389
|
+
* Extract domain from URL
|
|
390
|
+
*/
|
|
391
|
+
function getDomain(url) {
|
|
392
|
+
try {
|
|
393
|
+
return new URL(url).hostname;
|
|
394
|
+
} catch {
|
|
395
|
+
return url;
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
//#endregion
|
|
400
|
+
//#region src/cli/utils/handle-resolver.ts
|
|
401
|
+
/**
|
|
402
|
+
* Utilities for resolving AT Protocol handles to DIDs
|
|
403
|
+
*/
|
|
404
|
+
const resolver = new AtprotoDohHandleResolver({ dohEndpoint: "https://cloudflare-dns.com/dns-query" });
|
|
405
|
+
/**
|
|
406
|
+
* Resolve a handle to a DID using the AT Protocol handle resolution methods.
|
|
407
|
+
* Uses DNS-over-HTTPS via Cloudflare for DNS resolution.
|
|
408
|
+
*/
|
|
409
|
+
async function resolveHandleToDid(handle) {
|
|
410
|
+
try {
|
|
411
|
+
return await resolver.resolve(handle, { signal: AbortSignal.timeout(1e4) });
|
|
412
|
+
} catch (err) {
|
|
413
|
+
return null;
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
//#endregion
|
|
418
|
+
//#region src/did-resolver.ts
|
|
419
|
+
/**
|
|
420
|
+
* DID resolution for Cloudflare Workers
|
|
421
|
+
*
|
|
422
|
+
* We can't use @atproto/identity directly because it uses `redirect: "error"`
|
|
423
|
+
* which Cloudflare Workers doesn't support. This is a simple implementation
|
|
424
|
+
* that's compatible with Workers.
|
|
425
|
+
*/
|
|
426
|
+
const PLC_DIRECTORY = "https://plc.directory";
|
|
427
|
+
const TIMEOUT_MS = 3e3;
|
|
428
|
+
var DidResolver = class {
|
|
429
|
+
plcUrl;
|
|
430
|
+
timeout;
|
|
431
|
+
cache;
|
|
432
|
+
constructor(opts = {}) {
|
|
433
|
+
this.plcUrl = opts.plcUrl ?? PLC_DIRECTORY;
|
|
434
|
+
this.timeout = opts.timeout ?? TIMEOUT_MS;
|
|
435
|
+
this.cache = opts.didCache;
|
|
436
|
+
}
|
|
437
|
+
async resolve(did) {
|
|
438
|
+
if (this.cache) {
|
|
439
|
+
const cached = await this.cache.checkCache(did);
|
|
440
|
+
if (cached && !cached.expired) {
|
|
441
|
+
if (cached.stale) this.cache.refreshCache(did, () => this.resolveNoCache(did), cached);
|
|
442
|
+
return cached.doc;
|
|
443
|
+
}
|
|
444
|
+
}
|
|
445
|
+
const doc = await this.resolveNoCache(did);
|
|
446
|
+
if (doc && this.cache) await this.cache.cacheDid(did, doc);
|
|
447
|
+
else if (!doc && this.cache) await this.cache.clearEntry(did);
|
|
448
|
+
return doc;
|
|
449
|
+
}
|
|
450
|
+
async resolveNoCache(did) {
|
|
451
|
+
if (did.startsWith("did:web:")) return this.resolveDidWeb(did);
|
|
452
|
+
if (did.startsWith("did:plc:")) return this.resolveDidPlc(did);
|
|
453
|
+
throw new Error(`Unsupported DID method: ${did}`);
|
|
454
|
+
}
|
|
455
|
+
async resolveDidWeb(did) {
|
|
456
|
+
const parts = did.split(":").slice(2);
|
|
457
|
+
if (parts.length === 0) throw new Error(`Invalid did:web format: ${did}`);
|
|
458
|
+
if (parts.length > 1) throw new Error(`Unsupported did:web with path: ${did}`);
|
|
459
|
+
const domain = decodeURIComponent(parts[0]);
|
|
460
|
+
const url = new URL(`https://${domain}/.well-known/did.json`);
|
|
461
|
+
if (url.hostname === "localhost") url.protocol = "http:";
|
|
462
|
+
const controller = new AbortController();
|
|
463
|
+
const timeoutId = setTimeout(() => controller.abort(), this.timeout);
|
|
464
|
+
try {
|
|
465
|
+
const res = await fetch(url.toString(), {
|
|
466
|
+
signal: controller.signal,
|
|
467
|
+
redirect: "manual",
|
|
468
|
+
headers: { accept: "application/did+ld+json,application/json" }
|
|
469
|
+
});
|
|
470
|
+
if (res.status >= 300 && res.status < 400) return null;
|
|
471
|
+
if (!res.ok) return null;
|
|
472
|
+
const doc = await res.json();
|
|
473
|
+
return this.validateDidDoc(did, doc);
|
|
474
|
+
} finally {
|
|
475
|
+
clearTimeout(timeoutId);
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
async resolveDidPlc(did) {
|
|
479
|
+
const url = new URL(`/${encodeURIComponent(did)}`, this.plcUrl);
|
|
480
|
+
const controller = new AbortController();
|
|
481
|
+
const timeoutId = setTimeout(() => controller.abort(), this.timeout);
|
|
482
|
+
try {
|
|
483
|
+
const res = await fetch(url.toString(), {
|
|
484
|
+
signal: controller.signal,
|
|
485
|
+
redirect: "manual",
|
|
486
|
+
headers: { accept: "application/did+ld+json,application/json" }
|
|
487
|
+
});
|
|
488
|
+
if (res.status >= 300 && res.status < 400) return null;
|
|
489
|
+
if (res.status === 404) return null;
|
|
490
|
+
if (!res.ok) throw new Error(`PLC directory error: ${res.status} ${res.statusText}`);
|
|
491
|
+
const doc = await res.json();
|
|
492
|
+
return this.validateDidDoc(did, doc);
|
|
493
|
+
} finally {
|
|
494
|
+
clearTimeout(timeoutId);
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
validateDidDoc(did, doc) {
|
|
498
|
+
if (!check.is(doc, didDocument)) return null;
|
|
499
|
+
if (doc.id !== did) return null;
|
|
500
|
+
return doc;
|
|
501
|
+
}
|
|
502
|
+
};
|
|
503
|
+
|
|
504
|
+
//#endregion
|
|
505
|
+
//#region src/cli/commands/init.ts
|
|
506
|
+
/**
|
|
507
|
+
* Interactive PDS setup wizard
|
|
508
|
+
*/
|
|
509
|
+
/**
|
|
510
|
+
* Slugify a handle to create a worker name
|
|
511
|
+
* e.g., "example.com" -> "example-com-pds"
|
|
512
|
+
*/
|
|
513
|
+
function slugifyHandle(handle) {
|
|
514
|
+
return handle.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "") + "-pds";
|
|
515
|
+
}
|
|
516
|
+
const defaultWorkerName = "my-pds";
|
|
517
|
+
/**
|
|
518
|
+
* Prompt for worker name with validation
|
|
519
|
+
*/
|
|
520
|
+
async function promptWorkerName(handle, currentWorkerName) {
|
|
521
|
+
const placeholder = currentWorkerName && currentWorkerName !== defaultWorkerName ? currentWorkerName : slugifyHandle(handle);
|
|
522
|
+
return promptText({
|
|
523
|
+
message: "Cloudflare Worker name:",
|
|
524
|
+
placeholder,
|
|
525
|
+
initialValue: placeholder,
|
|
526
|
+
validate: (v) => {
|
|
527
|
+
if (!v) return "Worker name is required";
|
|
528
|
+
if (!/^[a-z0-9-]+$/.test(v)) return "Worker name can only contain lowercase letters, numbers, and hyphens";
|
|
529
|
+
}
|
|
530
|
+
});
|
|
531
|
+
}
|
|
532
|
+
/**
|
|
533
|
+
* Run wrangler types to regenerate TypeScript types
|
|
534
|
+
*/
|
|
535
|
+
function runWranglerTypes() {
|
|
536
|
+
return new Promise((resolve$1, reject) => {
|
|
537
|
+
const child = spawn("wrangler", ["types"], { stdio: "pipe" });
|
|
538
|
+
let output = "";
|
|
539
|
+
child.stdout?.on("data", (data) => {
|
|
540
|
+
output += data.toString();
|
|
541
|
+
});
|
|
542
|
+
child.stderr?.on("data", (data) => {
|
|
543
|
+
output += data.toString();
|
|
544
|
+
});
|
|
545
|
+
child.on("close", (code) => {
|
|
546
|
+
if (code === 0) resolve$1();
|
|
547
|
+
else {
|
|
548
|
+
if (output) console.error(output);
|
|
549
|
+
reject(/* @__PURE__ */ new Error(`wrangler types failed with code ${code}`));
|
|
550
|
+
}
|
|
551
|
+
});
|
|
552
|
+
child.on("error", reject);
|
|
553
|
+
});
|
|
554
|
+
}
|
|
555
|
+
const initCommand = defineCommand({
|
|
556
|
+
meta: {
|
|
557
|
+
name: "init",
|
|
558
|
+
description: "Interactive PDS setup wizard"
|
|
559
|
+
},
|
|
560
|
+
args: { production: {
|
|
561
|
+
type: "boolean",
|
|
562
|
+
description: "Deploy secrets to Cloudflare?",
|
|
563
|
+
default: false
|
|
564
|
+
} },
|
|
565
|
+
async run({ args }) {
|
|
566
|
+
p.intro("🦋 PDS Setup");
|
|
567
|
+
const isProduction = args.production;
|
|
568
|
+
if (isProduction) p.log.info("Production mode: secrets will be deployed to Cloudflare");
|
|
569
|
+
p.log.info("Let's set up your new home in the Atmosphere!");
|
|
570
|
+
const wranglerVars = getVars();
|
|
571
|
+
const devVars = readDevVars();
|
|
572
|
+
const currentVars = {
|
|
573
|
+
...devVars,
|
|
574
|
+
...wranglerVars
|
|
575
|
+
};
|
|
576
|
+
const isMigrating = await promptConfirm({
|
|
577
|
+
message: "Are you migrating an existing Bluesky/ATProto account?",
|
|
578
|
+
initialValue: false
|
|
579
|
+
});
|
|
580
|
+
let did;
|
|
581
|
+
let handle;
|
|
582
|
+
let hostname;
|
|
583
|
+
let workerName;
|
|
584
|
+
let initialActive;
|
|
585
|
+
const currentWorkerName = getWorkerName();
|
|
586
|
+
if (isMigrating) {
|
|
587
|
+
p.log.info("Time to pack your bags! 🧳");
|
|
588
|
+
p.log.info("Your new account will be inactive until you're ready to go live.");
|
|
589
|
+
let hostedDomains = [
|
|
590
|
+
".bsky.social",
|
|
591
|
+
".bsky.network",
|
|
592
|
+
".bsky.team"
|
|
593
|
+
];
|
|
594
|
+
const isHostedHandle = (h) => hostedDomains.some((domain) => h?.endsWith(domain));
|
|
595
|
+
let resolvedDid = null;
|
|
596
|
+
let existingHandle = null;
|
|
597
|
+
let attempts = 0;
|
|
598
|
+
const MAX_ATTEMPTS = 3;
|
|
599
|
+
while (!resolvedDid && attempts < MAX_ATTEMPTS) {
|
|
600
|
+
attempts++;
|
|
601
|
+
const currentHandle = await promptText({
|
|
602
|
+
message: "Your current Bluesky/ATProto handle:",
|
|
603
|
+
placeholder: "example.bsky.social",
|
|
604
|
+
validate: (v) => !v ? "Handle is required" : void 0
|
|
605
|
+
});
|
|
606
|
+
existingHandle = currentHandle;
|
|
607
|
+
const spinner$1 = p.spinner();
|
|
608
|
+
spinner$1.start("Finding you in the Atmosphere...");
|
|
609
|
+
resolvedDid = await resolveHandleToDid(currentHandle);
|
|
610
|
+
if (!resolvedDid) {
|
|
611
|
+
spinner$1.stop("Not found");
|
|
612
|
+
p.log.error(`Failed to resolve handle "${currentHandle}"`);
|
|
613
|
+
if (await promptSelect({
|
|
614
|
+
message: "What would you like to do?",
|
|
615
|
+
options: [{
|
|
616
|
+
value: "retry",
|
|
617
|
+
label: "Try a different handle"
|
|
618
|
+
}, {
|
|
619
|
+
value: "manual",
|
|
620
|
+
label: "Enter DID manually"
|
|
621
|
+
}]
|
|
622
|
+
}) === "manual") resolvedDid = await promptText({
|
|
623
|
+
message: "Enter your DID:",
|
|
624
|
+
placeholder: "did:plc:...",
|
|
625
|
+
validate: (v) => {
|
|
626
|
+
if (!v) return "DID is required";
|
|
627
|
+
if (!v.startsWith("did:")) return "DID must start with did:";
|
|
628
|
+
}
|
|
629
|
+
});
|
|
630
|
+
} else {
|
|
631
|
+
try {
|
|
632
|
+
const pdsService = (await new DidResolver().resolve(resolvedDid))?.service?.find((s) => s.type === "AtprotoPersonalDataServer" || s.id === "#atproto_pds");
|
|
633
|
+
if (pdsService?.serviceEndpoint) {
|
|
634
|
+
const describeRes = await fetch(`${pdsService.serviceEndpoint}/xrpc/com.atproto.server.describeServer`);
|
|
635
|
+
if (describeRes.ok) {
|
|
636
|
+
const desc = await describeRes.json();
|
|
637
|
+
if (desc.availableUserDomains?.length) hostedDomains = desc.availableUserDomains.map((d) => d.startsWith(".") ? d : `.${d}`);
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
} catch {}
|
|
641
|
+
spinner$1.stop(`Found you! ${resolvedDid}`);
|
|
642
|
+
if (isHostedHandle(existingHandle)) {
|
|
643
|
+
const theirDomain = hostedDomains.find((d) => existingHandle?.endsWith(d));
|
|
644
|
+
const domainExample = theirDomain ? `*${theirDomain}` : "*.bsky.social";
|
|
645
|
+
p.log.warn(`You'll need a custom domain for your new handle (not ${domainExample}). You can set this up after transferring your data.`);
|
|
646
|
+
}
|
|
647
|
+
if (attempts >= MAX_ATTEMPTS) {
|
|
648
|
+
p.log.error("Unable to resolve handle after 3 attempts.");
|
|
649
|
+
p.log.info("");
|
|
650
|
+
p.log.info("You can:");
|
|
651
|
+
p.log.info(" 1. Double-check your handle spelling");
|
|
652
|
+
p.log.info(" 2. Provide your DID directly if you know it");
|
|
653
|
+
p.log.info(" 3. Run 'pds init' again when ready");
|
|
654
|
+
p.outro("Initialization cancelled.");
|
|
655
|
+
process.exit(1);
|
|
656
|
+
}
|
|
657
|
+
}
|
|
658
|
+
}
|
|
659
|
+
did = resolvedDid;
|
|
660
|
+
handle = await promptText({
|
|
661
|
+
message: "New account handle (must be a domain you control):",
|
|
662
|
+
placeholder: "example.com",
|
|
663
|
+
initialValue: existingHandle && !isHostedHandle(existingHandle) ? existingHandle : currentVars.HANDLE || "",
|
|
664
|
+
validate: (v) => {
|
|
665
|
+
if (!v) return "Handle is required";
|
|
666
|
+
if (isHostedHandle(v)) return "You need a custom domain - hosted handles like *.bsky.social won't work";
|
|
667
|
+
}
|
|
668
|
+
});
|
|
669
|
+
hostname = await promptText({
|
|
670
|
+
message: "Domain where you'll deploy your PDS:",
|
|
671
|
+
placeholder: handle,
|
|
672
|
+
initialValue: currentVars.PDS_HOSTNAME || handle,
|
|
673
|
+
validate: (v) => !v ? "Hostname is required" : void 0
|
|
674
|
+
});
|
|
675
|
+
workerName = await promptWorkerName(handle, currentWorkerName);
|
|
676
|
+
initialActive = "false";
|
|
677
|
+
} else {
|
|
678
|
+
p.log.info("A fresh start in the Atmosphere! ✨");
|
|
679
|
+
hostname = await promptText({
|
|
680
|
+
message: "Domain where you'll deploy your PDS:",
|
|
681
|
+
placeholder: "pds.example.com",
|
|
682
|
+
initialValue: currentVars.PDS_HOSTNAME || "",
|
|
683
|
+
validate: (v) => !v ? "Hostname is required" : void 0
|
|
684
|
+
});
|
|
685
|
+
handle = await promptText({
|
|
686
|
+
message: "Account handle:",
|
|
687
|
+
placeholder: hostname,
|
|
688
|
+
initialValue: currentVars.HANDLE || hostname,
|
|
689
|
+
validate: (v) => !v ? "Handle is required" : void 0
|
|
690
|
+
});
|
|
691
|
+
const didDefault = "did:web:" + hostname;
|
|
692
|
+
did = await promptText({
|
|
693
|
+
message: "Account DID:",
|
|
694
|
+
placeholder: didDefault,
|
|
695
|
+
initialValue: currentVars.DID || didDefault,
|
|
696
|
+
validate: (v) => {
|
|
697
|
+
if (!v) return "DID is required";
|
|
698
|
+
if (!v.startsWith("did:")) return "DID must start with 'did:'";
|
|
699
|
+
}
|
|
700
|
+
});
|
|
701
|
+
workerName = await promptWorkerName(handle, currentWorkerName);
|
|
702
|
+
initialActive = "true";
|
|
703
|
+
if (handle === hostname) p.note([
|
|
704
|
+
"Your handle matches your PDS hostname, so your PDS will",
|
|
705
|
+
"automatically handle domain verification for you!",
|
|
706
|
+
"",
|
|
707
|
+
"For did:web, your PDS serves the DID document at:",
|
|
708
|
+
` https://${hostname}/.well-known/did.json`,
|
|
709
|
+
"",
|
|
710
|
+
"For handle verification, it serves:",
|
|
711
|
+
` https://${hostname}/.well-known/atproto-did`,
|
|
712
|
+
"",
|
|
713
|
+
"No additional DNS or hosting setup needed. Easy! 🎉"
|
|
714
|
+
].join("\n"), "Identity Setup 🪪");
|
|
715
|
+
else p.note([
|
|
716
|
+
"For did:web, your PDS will serve the DID document at:",
|
|
717
|
+
` https://${hostname}/.well-known/did.json`,
|
|
718
|
+
"",
|
|
719
|
+
"To verify your handle, create a DNS TXT record:",
|
|
720
|
+
` _atproto.${handle} TXT "did=${did}"`,
|
|
721
|
+
"",
|
|
722
|
+
"Or serve a file at:",
|
|
723
|
+
` https://${handle}/.well-known/atproto-did`,
|
|
724
|
+
` containing: ${did}`
|
|
725
|
+
].join("\n"), "Identity Setup 🪪");
|
|
726
|
+
}
|
|
727
|
+
const spinner = p.spinner();
|
|
728
|
+
const authToken = await getOrGenerateSecret("AUTH_TOKEN", devVars, async () => {
|
|
729
|
+
spinner.start("Generating auth token...");
|
|
730
|
+
const token = generateAuthToken();
|
|
731
|
+
spinner.stop("Auth token generated");
|
|
732
|
+
return token;
|
|
733
|
+
});
|
|
734
|
+
const signingKey = await getOrGenerateSecret("SIGNING_KEY", devVars, async () => {
|
|
735
|
+
spinner.start("Generating signing keypair...");
|
|
736
|
+
const { privateKey } = await generateSigningKeypair();
|
|
737
|
+
spinner.stop("Signing keypair generated");
|
|
738
|
+
return privateKey;
|
|
739
|
+
});
|
|
740
|
+
const signingKeyPublic = await derivePublicKey(signingKey);
|
|
741
|
+
const jwtSecret = await getOrGenerateSecret("JWT_SECRET", devVars, async () => {
|
|
742
|
+
spinner.start("Generating JWT secret...");
|
|
743
|
+
const secret = generateJwtSecret();
|
|
744
|
+
spinner.stop("JWT secret generated");
|
|
745
|
+
return secret;
|
|
746
|
+
});
|
|
747
|
+
const passwordHash = await getOrGenerateSecret("PASSWORD_HASH", devVars, async () => {
|
|
748
|
+
const password = await promptPassword(handle);
|
|
749
|
+
spinner.start("Hashing password...");
|
|
750
|
+
const hash = await hashPassword(password);
|
|
751
|
+
spinner.stop("Password hashed");
|
|
752
|
+
return hash;
|
|
753
|
+
});
|
|
754
|
+
spinner.start("Updating wrangler.jsonc...");
|
|
755
|
+
setWorkerName(workerName);
|
|
756
|
+
setVars({
|
|
757
|
+
PDS_HOSTNAME: hostname,
|
|
758
|
+
DID: did,
|
|
759
|
+
HANDLE: handle,
|
|
760
|
+
SIGNING_KEY_PUBLIC: signingKeyPublic,
|
|
761
|
+
INITIAL_ACTIVE: initialActive
|
|
762
|
+
});
|
|
763
|
+
spinner.stop("wrangler.jsonc updated");
|
|
764
|
+
const local = !isProduction;
|
|
765
|
+
if (isProduction) spinner.start("Deploying secrets to Cloudflare...");
|
|
766
|
+
else spinner.start("Writing secrets to .dev.vars...");
|
|
767
|
+
await setSecretValue("AUTH_TOKEN", authToken, local);
|
|
768
|
+
await setSecretValue("SIGNING_KEY", signingKey, local);
|
|
769
|
+
await setSecretValue("JWT_SECRET", jwtSecret, local);
|
|
770
|
+
await setSecretValue("PASSWORD_HASH", passwordHash, local);
|
|
771
|
+
spinner.stop(isProduction ? "Secrets deployed" : "Secrets written to .dev.vars");
|
|
772
|
+
spinner.start("Generating TypeScript types...");
|
|
773
|
+
try {
|
|
774
|
+
await runWranglerTypes();
|
|
775
|
+
spinner.stop("TypeScript types generated");
|
|
776
|
+
} catch {
|
|
777
|
+
spinner.stop("Failed to generate types (wrangler types)");
|
|
778
|
+
}
|
|
779
|
+
p.note([
|
|
780
|
+
" Worker name: " + workerName,
|
|
781
|
+
" PDS hostname: " + hostname,
|
|
782
|
+
" DID: " + did,
|
|
783
|
+
" Handle: " + handle,
|
|
784
|
+
" Public signing key: " + signingKeyPublic.slice(0, 20) + "...",
|
|
785
|
+
"",
|
|
786
|
+
isProduction ? "Secrets deployed to Cloudflare ☁️" : "Secrets saved to .dev.vars",
|
|
787
|
+
"",
|
|
788
|
+
"Auth token (save this!):",
|
|
789
|
+
" " + authToken
|
|
790
|
+
].join("\n"), "Your New Home 🏠");
|
|
791
|
+
let deployedSecrets = isProduction;
|
|
792
|
+
if (!isProduction) {
|
|
793
|
+
const deployNow = await p.confirm({
|
|
794
|
+
message: "Push secrets to Cloudflare now?",
|
|
795
|
+
initialValue: false
|
|
796
|
+
});
|
|
797
|
+
if (!p.isCancel(deployNow) && deployNow) {
|
|
798
|
+
spinner.start("Deploying secrets to Cloudflare...");
|
|
799
|
+
await setSecretValue("AUTH_TOKEN", authToken, false);
|
|
800
|
+
await setSecretValue("SIGNING_KEY", signingKey, false);
|
|
801
|
+
await setSecretValue("JWT_SECRET", jwtSecret, false);
|
|
802
|
+
await setSecretValue("PASSWORD_HASH", passwordHash, false);
|
|
803
|
+
spinner.stop("Secrets deployed to Cloudflare");
|
|
804
|
+
deployedSecrets = true;
|
|
805
|
+
}
|
|
806
|
+
}
|
|
807
|
+
if (isMigrating) p.note([
|
|
808
|
+
deployedSecrets ? "Deploy your worker and run the migration:" : "Push secrets, deploy, and run the migration:",
|
|
809
|
+
"",
|
|
810
|
+
...deployedSecrets ? [] : [" pnpm pds init --production", ""],
|
|
811
|
+
" wrangler deploy",
|
|
812
|
+
" pnpm pds migrate",
|
|
813
|
+
"",
|
|
814
|
+
"To test locally first:",
|
|
815
|
+
" pnpm dev # in one terminal",
|
|
816
|
+
" pnpm pds migrate --dev # in another",
|
|
817
|
+
"",
|
|
818
|
+
"Then update your identity and flip the switch! 🦋",
|
|
819
|
+
" https://atproto.com/guides/account-migration"
|
|
820
|
+
].join("\n"), "Next Steps 🧳");
|
|
821
|
+
if (deployedSecrets) p.outro("Run 'wrangler deploy' to launch your PDS! 🚀");
|
|
822
|
+
else p.outro("Run 'pnpm dev' to start your PDS locally! 🦋");
|
|
823
|
+
}
|
|
824
|
+
});
|
|
825
|
+
/**
|
|
826
|
+
* Helper to get a secret from .dev.vars or generate a new one
|
|
827
|
+
*/
|
|
828
|
+
async function getOrGenerateSecret(name, devVars, generate) {
|
|
829
|
+
if (devVars[name]) {
|
|
830
|
+
if (await p.confirm({
|
|
831
|
+
message: `Use ${name} from .dev.vars?`,
|
|
832
|
+
initialValue: true
|
|
833
|
+
}) === true) return devVars[name];
|
|
834
|
+
}
|
|
835
|
+
return generate();
|
|
836
|
+
}
|
|
837
|
+
|
|
838
|
+
//#endregion
|
|
839
|
+
//#region src/cli/utils/pds-client.ts
|
|
840
|
+
var PDSClientError = class extends Error {
|
|
841
|
+
constructor(status, error, message) {
|
|
842
|
+
super(message);
|
|
843
|
+
this.status = status;
|
|
844
|
+
this.error = error;
|
|
845
|
+
this.name = "PDSClientError";
|
|
846
|
+
}
|
|
847
|
+
};
|
|
848
|
+
var PDSClient = class {
|
|
849
|
+
authToken;
|
|
850
|
+
constructor(baseUrl, authToken) {
|
|
851
|
+
this.baseUrl = baseUrl;
|
|
852
|
+
this.authToken = authToken;
|
|
853
|
+
}
|
|
854
|
+
/**
|
|
855
|
+
* Set the auth token for subsequent requests
|
|
856
|
+
*/
|
|
857
|
+
setAuthToken(token) {
|
|
858
|
+
this.authToken = token;
|
|
859
|
+
}
|
|
860
|
+
/**
|
|
861
|
+
* Make an XRPC request
|
|
862
|
+
*/
|
|
863
|
+
async xrpc(method, endpoint, options = {}) {
|
|
864
|
+
const url = new URL(`/xrpc/${endpoint}`, this.baseUrl);
|
|
865
|
+
if (options.params) for (const [key, value] of Object.entries(options.params)) url.searchParams.set(key, value);
|
|
866
|
+
const headers = {};
|
|
867
|
+
if (options.auth && this.authToken) headers["Authorization"] = `Bearer ${this.authToken}`;
|
|
868
|
+
if (options.contentType) headers["Content-Type"] = options.contentType;
|
|
869
|
+
else if (options.body && !(options.body instanceof Uint8Array)) headers["Content-Type"] = "application/json";
|
|
870
|
+
const res = await fetch(url.toString(), {
|
|
871
|
+
method,
|
|
872
|
+
headers,
|
|
873
|
+
body: options.body ? options.body instanceof Uint8Array ? options.body : JSON.stringify(options.body) : void 0
|
|
874
|
+
});
|
|
875
|
+
if (!res.ok) {
|
|
876
|
+
const errorBody = await res.json().catch(() => ({}));
|
|
877
|
+
throw new PDSClientError(res.status, errorBody.error ?? "Unknown", errorBody.message ?? `Request failed: ${res.status}`);
|
|
878
|
+
}
|
|
879
|
+
if ((res.headers.get("content-type") ?? "").includes("application/json")) return res.json();
|
|
880
|
+
return {};
|
|
881
|
+
}
|
|
882
|
+
/**
|
|
883
|
+
* Make a raw request that returns bytes
|
|
884
|
+
*/
|
|
885
|
+
async xrpcBytes(method, endpoint, options = {}) {
|
|
886
|
+
const url = new URL(`/xrpc/${endpoint}`, this.baseUrl);
|
|
887
|
+
if (options.params) for (const [key, value] of Object.entries(options.params)) url.searchParams.set(key, value);
|
|
888
|
+
const headers = {};
|
|
889
|
+
if (options.auth && this.authToken) headers["Authorization"] = `Bearer ${this.authToken}`;
|
|
890
|
+
if (options.contentType) headers["Content-Type"] = options.contentType;
|
|
891
|
+
const res = await fetch(url.toString(), {
|
|
892
|
+
method,
|
|
893
|
+
headers,
|
|
894
|
+
body: options.body
|
|
895
|
+
});
|
|
896
|
+
if (!res.ok) {
|
|
897
|
+
const errorBody = await res.json().catch(() => ({}));
|
|
898
|
+
throw new PDSClientError(res.status, errorBody.error ?? "Unknown", errorBody.message ?? `Request failed: ${res.status}`);
|
|
899
|
+
}
|
|
900
|
+
return {
|
|
901
|
+
bytes: new Uint8Array(await res.arrayBuffer()),
|
|
902
|
+
mimeType: res.headers.get("content-type") ?? "application/octet-stream"
|
|
903
|
+
};
|
|
904
|
+
}
|
|
905
|
+
/**
|
|
906
|
+
* Create a session with identifier and password
|
|
907
|
+
*/
|
|
908
|
+
async createSession(identifier, password) {
|
|
909
|
+
return this.xrpc("POST", "com.atproto.server.createSession", { body: {
|
|
910
|
+
identifier,
|
|
911
|
+
password
|
|
912
|
+
} });
|
|
913
|
+
}
|
|
914
|
+
/**
|
|
915
|
+
* Get repository description including collections
|
|
916
|
+
*/
|
|
917
|
+
async describeRepo(did) {
|
|
918
|
+
return this.xrpc("GET", "com.atproto.repo.describeRepo", { params: { repo: did } });
|
|
919
|
+
}
|
|
920
|
+
/**
|
|
921
|
+
* Get profile stats from AppView (posts, follows, followers counts)
|
|
922
|
+
*/
|
|
923
|
+
async getProfileStats(did) {
|
|
924
|
+
try {
|
|
925
|
+
const res = await fetch(`https://public.api.bsky.app/xrpc/app.bsky.actor.getProfile?actor=${encodeURIComponent(did)}`);
|
|
926
|
+
if (!res.ok) return null;
|
|
927
|
+
const profile = await res.json();
|
|
928
|
+
return {
|
|
929
|
+
postsCount: profile.postsCount ?? 0,
|
|
930
|
+
followsCount: profile.followsCount ?? 0,
|
|
931
|
+
followersCount: profile.followersCount ?? 0
|
|
932
|
+
};
|
|
933
|
+
} catch {
|
|
934
|
+
return null;
|
|
935
|
+
}
|
|
936
|
+
}
|
|
937
|
+
/**
|
|
938
|
+
* Export repository as CAR file
|
|
939
|
+
*/
|
|
940
|
+
async getRepo(did) {
|
|
941
|
+
const { bytes } = await this.xrpcBytes("GET", "com.atproto.sync.getRepo", { params: { did } });
|
|
942
|
+
return bytes;
|
|
943
|
+
}
|
|
944
|
+
/**
|
|
945
|
+
* Get a blob by CID
|
|
946
|
+
*/
|
|
947
|
+
async getBlob(did, cid) {
|
|
948
|
+
return this.xrpcBytes("GET", "com.atproto.sync.getBlob", { params: {
|
|
949
|
+
did,
|
|
950
|
+
cid
|
|
951
|
+
} });
|
|
952
|
+
}
|
|
953
|
+
/**
|
|
954
|
+
* List blobs in repository
|
|
955
|
+
*/
|
|
956
|
+
async listBlobs(did, cursor) {
|
|
957
|
+
const params = { did };
|
|
958
|
+
if (cursor) params.cursor = cursor;
|
|
959
|
+
return this.xrpc("GET", "com.atproto.sync.listBlobs", { params });
|
|
960
|
+
}
|
|
961
|
+
/**
|
|
962
|
+
* Get user preferences
|
|
963
|
+
*/
|
|
964
|
+
async getPreferences() {
|
|
965
|
+
return (await this.xrpc("GET", "app.bsky.actor.getPreferences", { auth: true })).preferences;
|
|
966
|
+
}
|
|
967
|
+
/**
|
|
968
|
+
* Update user preferences
|
|
969
|
+
*/
|
|
970
|
+
async putPreferences(preferences) {
|
|
971
|
+
await this.xrpc("POST", "app.bsky.actor.putPreferences", {
|
|
972
|
+
body: { preferences },
|
|
973
|
+
auth: true
|
|
974
|
+
});
|
|
975
|
+
}
|
|
976
|
+
/**
|
|
977
|
+
* Get account status including migration progress
|
|
978
|
+
*/
|
|
979
|
+
async getAccountStatus() {
|
|
980
|
+
return this.xrpc("GET", "com.atproto.server.getAccountStatus", { auth: true });
|
|
981
|
+
}
|
|
982
|
+
/**
|
|
983
|
+
* Import repository from CAR file
|
|
984
|
+
*/
|
|
985
|
+
async importRepo(carBytes) {
|
|
986
|
+
return this.xrpc("POST", "com.atproto.repo.importRepo", {
|
|
987
|
+
body: carBytes,
|
|
988
|
+
contentType: "application/vnd.ipld.car",
|
|
989
|
+
auth: true
|
|
990
|
+
});
|
|
991
|
+
}
|
|
992
|
+
/**
|
|
993
|
+
* List blobs that are missing (referenced but not imported)
|
|
994
|
+
*/
|
|
995
|
+
async listMissingBlobs(limit, cursor) {
|
|
996
|
+
const params = {};
|
|
997
|
+
if (limit) params.limit = String(limit);
|
|
998
|
+
if (cursor) params.cursor = cursor;
|
|
999
|
+
return this.xrpc("GET", "com.atproto.repo.listMissingBlobs", {
|
|
1000
|
+
params,
|
|
1001
|
+
auth: true
|
|
1002
|
+
});
|
|
1003
|
+
}
|
|
1004
|
+
/**
|
|
1005
|
+
* Upload a blob
|
|
1006
|
+
*/
|
|
1007
|
+
async uploadBlob(bytes, mimeType) {
|
|
1008
|
+
return (await this.xrpc("POST", "com.atproto.repo.uploadBlob", {
|
|
1009
|
+
body: bytes,
|
|
1010
|
+
contentType: mimeType,
|
|
1011
|
+
auth: true
|
|
1012
|
+
})).blob;
|
|
1013
|
+
}
|
|
1014
|
+
/**
|
|
1015
|
+
* Reset migration state (only works on deactivated accounts)
|
|
1016
|
+
*/
|
|
1017
|
+
async resetMigration() {
|
|
1018
|
+
return this.xrpc("POST", "gg.mk.experimental.resetMigration", { auth: true });
|
|
1019
|
+
}
|
|
1020
|
+
/**
|
|
1021
|
+
* Activate account to enable writes
|
|
1022
|
+
*/
|
|
1023
|
+
async activateAccount() {
|
|
1024
|
+
await this.xrpc("POST", "com.atproto.server.activateAccount", { auth: true });
|
|
1025
|
+
}
|
|
1026
|
+
/**
|
|
1027
|
+
* Deactivate account to disable writes
|
|
1028
|
+
*/
|
|
1029
|
+
async deactivateAccount() {
|
|
1030
|
+
await this.xrpc("POST", "com.atproto.server.deactivateAccount", { auth: true });
|
|
1031
|
+
}
|
|
1032
|
+
/**
|
|
1033
|
+
* Check if the PDS is reachable
|
|
1034
|
+
*/
|
|
1035
|
+
async healthCheck() {
|
|
1036
|
+
try {
|
|
1037
|
+
return (await fetch(new URL("/xrpc/_health", this.baseUrl).toString())).ok;
|
|
1038
|
+
} catch {
|
|
1039
|
+
return false;
|
|
1040
|
+
}
|
|
1041
|
+
}
|
|
1042
|
+
};
|
|
1043
|
+
|
|
1044
|
+
//#endregion
|
|
1045
|
+
//#region src/cli/commands/migrate.ts
|
|
1046
|
+
function detectPackageManager() {
|
|
1047
|
+
const userAgent = process.env.npm_config_user_agent || "";
|
|
1048
|
+
if (userAgent.startsWith("yarn")) return "yarn";
|
|
1049
|
+
if (userAgent.startsWith("pnpm")) return "pnpm";
|
|
1050
|
+
if (userAgent.startsWith("bun")) return "bun";
|
|
1051
|
+
return "npm";
|
|
1052
|
+
}
|
|
1053
|
+
const brightNote$1 = (lines) => lines.map((l) => `\x1b[0m${l}`).join("\n");
|
|
1054
|
+
/**
|
|
1055
|
+
* Format number with commas
|
|
1056
|
+
*/
|
|
1057
|
+
function num(n) {
|
|
1058
|
+
return n.toLocaleString();
|
|
1059
|
+
}
|
|
1060
|
+
/**
|
|
1061
|
+
* Format bytes to human-readable size
|
|
1062
|
+
*/
|
|
1063
|
+
function formatBytes(bytes) {
|
|
1064
|
+
if (bytes < 1024) return `${bytes} B`;
|
|
1065
|
+
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
|
|
1066
|
+
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
|
1067
|
+
}
|
|
1068
|
+
const migrateCommand = defineCommand({
|
|
1069
|
+
meta: {
|
|
1070
|
+
name: "migrate",
|
|
1071
|
+
description: "Migrate account from source PDS to your new PDS"
|
|
1072
|
+
},
|
|
1073
|
+
args: {
|
|
1074
|
+
clean: {
|
|
1075
|
+
type: "boolean",
|
|
1076
|
+
description: "Reset migration and start fresh",
|
|
1077
|
+
default: false
|
|
1078
|
+
},
|
|
1079
|
+
dev: {
|
|
1080
|
+
type: "boolean",
|
|
1081
|
+
description: "Target local development server instead of production",
|
|
1082
|
+
default: false
|
|
1083
|
+
}
|
|
1084
|
+
},
|
|
1085
|
+
async run({ args }) {
|
|
1086
|
+
const packageManager = detectPackageManager();
|
|
1087
|
+
const pm = packageManager === "npm" ? "npm run" : packageManager;
|
|
1088
|
+
const isDev = args.dev;
|
|
1089
|
+
const vars = getVars();
|
|
1090
|
+
let targetUrl;
|
|
1091
|
+
try {
|
|
1092
|
+
targetUrl = getTargetUrl(isDev, vars.PDS_HOSTNAME);
|
|
1093
|
+
} catch (err) {
|
|
1094
|
+
p.log.error(err instanceof Error ? err.message : "Configuration error");
|
|
1095
|
+
p.log.info("Run 'pds init' first to configure your PDS.");
|
|
1096
|
+
process.exit(1);
|
|
1097
|
+
}
|
|
1098
|
+
const targetDomain = getDomain(targetUrl);
|
|
1099
|
+
p.intro("🦋 PDS Migration");
|
|
1100
|
+
const spinner = p.spinner();
|
|
1101
|
+
spinner.start(`Checking PDS at ${targetDomain}...`);
|
|
1102
|
+
const targetClient = new PDSClient(targetUrl);
|
|
1103
|
+
if (!await targetClient.healthCheck()) {
|
|
1104
|
+
spinner.stop(`PDS not responding at ${targetDomain}`);
|
|
1105
|
+
if (isDev) {
|
|
1106
|
+
p.log.error(`Your local PDS isn't running at ${targetUrl}`);
|
|
1107
|
+
p.log.info(`Start it with: ${pm} dev`);
|
|
1108
|
+
} else {
|
|
1109
|
+
p.log.error(`Your PDS isn't responding at ${targetUrl}`);
|
|
1110
|
+
p.log.info("Make sure your worker is deployed: wrangler deploy");
|
|
1111
|
+
p.log.info(`Or test locally first: ${pm} pds migrate --dev`);
|
|
1112
|
+
}
|
|
1113
|
+
p.outro("Migration cancelled.");
|
|
1114
|
+
process.exit(1);
|
|
1115
|
+
}
|
|
1116
|
+
spinner.stop(`Connected to ${targetDomain}`);
|
|
1117
|
+
const wranglerVars = getVars();
|
|
1118
|
+
const config = {
|
|
1119
|
+
...readDevVars(),
|
|
1120
|
+
...wranglerVars
|
|
1121
|
+
};
|
|
1122
|
+
const did = config.DID;
|
|
1123
|
+
const handle = config.HANDLE;
|
|
1124
|
+
const authToken = config.AUTH_TOKEN;
|
|
1125
|
+
if (!did) {
|
|
1126
|
+
p.log.error("No DID configured. Run 'pds init' first.");
|
|
1127
|
+
p.outro("Migration cancelled.");
|
|
1128
|
+
process.exit(1);
|
|
1129
|
+
}
|
|
1130
|
+
if (!authToken) {
|
|
1131
|
+
p.log.error("No AUTH_TOKEN found. Run 'pds init' first.");
|
|
1132
|
+
p.outro("Migration cancelled.");
|
|
1133
|
+
process.exit(1);
|
|
1134
|
+
}
|
|
1135
|
+
targetClient.setAuthToken(authToken);
|
|
1136
|
+
spinner.start(`Looking up @${handle}...`);
|
|
1137
|
+
const didDoc = await new DidResolver().resolve(did);
|
|
1138
|
+
if (!didDoc) {
|
|
1139
|
+
spinner.stop("Failed to resolve DID");
|
|
1140
|
+
p.log.error(`Could not resolve DID: ${did}`);
|
|
1141
|
+
p.outro("Migration cancelled.");
|
|
1142
|
+
process.exit(1);
|
|
1143
|
+
}
|
|
1144
|
+
const sourcePdsUrl = getPdsEndpoint(didDoc);
|
|
1145
|
+
if (!sourcePdsUrl) {
|
|
1146
|
+
spinner.stop("No PDS found in DID document");
|
|
1147
|
+
p.log.error("Could not find PDS endpoint in DID document");
|
|
1148
|
+
p.outro("Migration cancelled.");
|
|
1149
|
+
process.exit(1);
|
|
1150
|
+
}
|
|
1151
|
+
const sourceDomain = getDomain(sourcePdsUrl);
|
|
1152
|
+
spinner.stop(`Found your account at ${sourceDomain}`);
|
|
1153
|
+
spinner.start("Checking account status...");
|
|
1154
|
+
const pdsDisplayName = sourceDomain.endsWith(".bsky.network") ? "bsky.social" : sourceDomain;
|
|
1155
|
+
let status;
|
|
1156
|
+
try {
|
|
1157
|
+
status = await targetClient.getAccountStatus();
|
|
1158
|
+
} catch (err) {
|
|
1159
|
+
spinner.stop("Failed to get account status");
|
|
1160
|
+
p.log.error(err instanceof Error ? err.message : "Could not get account status");
|
|
1161
|
+
p.outro("Migration cancelled.");
|
|
1162
|
+
process.exit(1);
|
|
1163
|
+
}
|
|
1164
|
+
spinner.stop("Account status retrieved");
|
|
1165
|
+
if (args.clean) {
|
|
1166
|
+
if (status.active) {
|
|
1167
|
+
p.log.error("Cannot reset: account is active");
|
|
1168
|
+
p.log.info("The --clean flag only works on deactivated accounts.");
|
|
1169
|
+
p.log.info("Your account is already live");
|
|
1170
|
+
p.log.info("");
|
|
1171
|
+
p.log.info("If you need to re-import, first deactivate:");
|
|
1172
|
+
p.log.info(" pnpm pds deactivate");
|
|
1173
|
+
p.outro("Migration cancelled.");
|
|
1174
|
+
process.exit(1);
|
|
1175
|
+
}
|
|
1176
|
+
p.note(brightNote$1([
|
|
1177
|
+
pc.bold("This will permanently delete from your new PDS:"),
|
|
1178
|
+
"",
|
|
1179
|
+
` • ${num(status.repoBlocks)} repository blocks`,
|
|
1180
|
+
` • ${num(status.importedBlobs)} imported images`,
|
|
1181
|
+
" • All blob tracking data",
|
|
1182
|
+
"",
|
|
1183
|
+
pc.bold(`Your data on ${pdsDisplayName} is NOT affected.`),
|
|
1184
|
+
"You'll need to re-import everything."
|
|
1185
|
+
]), "⚠️ Reset Migration Data");
|
|
1186
|
+
const confirmReset = await p.confirm({
|
|
1187
|
+
message: "Are you sure you want to delete this data?",
|
|
1188
|
+
initialValue: false
|
|
1189
|
+
});
|
|
1190
|
+
if (p.isCancel(confirmReset) || !confirmReset) {
|
|
1191
|
+
p.cancel("Keeping your data.");
|
|
1192
|
+
process.exit(0);
|
|
1193
|
+
}
|
|
1194
|
+
spinner.start("Resetting migration state...");
|
|
1195
|
+
try {
|
|
1196
|
+
const result = await targetClient.resetMigration();
|
|
1197
|
+
spinner.stop(`Deleted ${num(result.blocksDeleted)} blocks, ${num(result.blobsCleared)} blobs`);
|
|
1198
|
+
} catch (err) {
|
|
1199
|
+
spinner.stop("Reset failed");
|
|
1200
|
+
p.log.error(err instanceof Error ? err.message : "Could not reset migration");
|
|
1201
|
+
p.outro("Migration cancelled.");
|
|
1202
|
+
process.exit(1);
|
|
1203
|
+
}
|
|
1204
|
+
p.log.success("Clean slate! Starting fresh migration...");
|
|
1205
|
+
status = await targetClient.getAccountStatus();
|
|
1206
|
+
}
|
|
1207
|
+
if (status.active) {
|
|
1208
|
+
p.log.warn(`Your account is already active at ${targetDomain}!`);
|
|
1209
|
+
p.log.info("No migration needed - your PDS is live.");
|
|
1210
|
+
p.outro("All good!");
|
|
1211
|
+
return;
|
|
1212
|
+
}
|
|
1213
|
+
spinner.start(`Fetching your account details from ${pdsDisplayName}...`);
|
|
1214
|
+
const sourceClient = new PDSClient(sourcePdsUrl);
|
|
1215
|
+
try {
|
|
1216
|
+
await sourceClient.describeRepo(did);
|
|
1217
|
+
} catch (err) {
|
|
1218
|
+
spinner.stop("Failed to fetch account details");
|
|
1219
|
+
p.log.error(err instanceof Error ? err.message : "Could not fetch account details from source PDS");
|
|
1220
|
+
p.outro("Migration cancelled.");
|
|
1221
|
+
process.exit(1);
|
|
1222
|
+
}
|
|
1223
|
+
const profileStats = await sourceClient.getProfileStats(did);
|
|
1224
|
+
spinner.stop("Account details fetched");
|
|
1225
|
+
const needsRepoImport = status.repoBlocks === 0 || status.indexedRecords === 0 && status.expectedBlobs === 0;
|
|
1226
|
+
const needsBlobSync = status.expectedBlobs - status.importedBlobs > 0 || needsRepoImport;
|
|
1227
|
+
if (!needsRepoImport && needsBlobSync) {
|
|
1228
|
+
p.log.info("Welcome back!");
|
|
1229
|
+
p.log.info("Looks like you started packing earlier. Let's pick up where we left off.");
|
|
1230
|
+
p.note([
|
|
1231
|
+
`@${handle} (${did.slice(0, 20)}...)`,
|
|
1232
|
+
"",
|
|
1233
|
+
"✓ Repository imported",
|
|
1234
|
+
`◐ Media: ${num(status.importedBlobs)}/${num(status.expectedBlobs)} images and videos transferred`
|
|
1235
|
+
].join("\n"), "Migration Progress");
|
|
1236
|
+
const continueTransfer = await p.confirm({
|
|
1237
|
+
message: "Continue transferring images and video?",
|
|
1238
|
+
initialValue: true
|
|
1239
|
+
});
|
|
1240
|
+
if (p.isCancel(continueTransfer) || !continueTransfer) {
|
|
1241
|
+
p.cancel("Migration paused.");
|
|
1242
|
+
process.exit(0);
|
|
1243
|
+
}
|
|
1244
|
+
} else if (needsRepoImport) {
|
|
1245
|
+
p.log.info("Time to pack your bags!");
|
|
1246
|
+
p.log.info("Let's clone your account to its new home in the Atmosphere.");
|
|
1247
|
+
const statsLines = profileStats ? [
|
|
1248
|
+
` 📝 ${num(profileStats.postsCount)} posts`,
|
|
1249
|
+
` 👥 ${num(profileStats.followsCount)} follows`,
|
|
1250
|
+
` ...plus all your images, likes and preferences`
|
|
1251
|
+
] : [` 📝 Posts, follows, images, likes and preferences`];
|
|
1252
|
+
p.note(brightNote$1([
|
|
1253
|
+
pc.bold(`@${handle}`) + ` (${did.slice(0, 20)}...)`,
|
|
1254
|
+
"",
|
|
1255
|
+
`Currently at: ${sourceDomain}`,
|
|
1256
|
+
`Moving to: ${targetDomain}`,
|
|
1257
|
+
"",
|
|
1258
|
+
"What you're bringing:",
|
|
1259
|
+
...statsLines
|
|
1260
|
+
]), "Your Bluesky Account 🦋");
|
|
1261
|
+
p.log.info(`This will copy your data - nothing is changed or deleted on your current PDS.`);
|
|
1262
|
+
const proceed = await p.confirm({
|
|
1263
|
+
message: "Ready to start moving?",
|
|
1264
|
+
initialValue: true
|
|
1265
|
+
});
|
|
1266
|
+
if (p.isCancel(proceed) || !proceed) {
|
|
1267
|
+
p.cancel("Migration cancelled.");
|
|
1268
|
+
process.exit(0);
|
|
1269
|
+
}
|
|
1270
|
+
} else {
|
|
1271
|
+
p.log.success("Everything looks good!");
|
|
1272
|
+
showNextSteps(pm, pdsDisplayName);
|
|
1273
|
+
p.outro("Welcome to your new home in the Atmosphere! 🦋");
|
|
1274
|
+
return;
|
|
1275
|
+
}
|
|
1276
|
+
const password = await p.password({ message: `Your password for ${pdsDisplayName}:` });
|
|
1277
|
+
if (p.isCancel(password)) {
|
|
1278
|
+
p.cancel("Migration cancelled.");
|
|
1279
|
+
process.exit(0);
|
|
1280
|
+
}
|
|
1281
|
+
spinner.start(`Logging in to ${pdsDisplayName}...`);
|
|
1282
|
+
try {
|
|
1283
|
+
const session = await sourceClient.createSession(did, password);
|
|
1284
|
+
sourceClient.setAuthToken(session.accessJwt);
|
|
1285
|
+
spinner.stop("Authenticated successfully");
|
|
1286
|
+
} catch (err) {
|
|
1287
|
+
spinner.stop("Login failed");
|
|
1288
|
+
if (err instanceof PDSClientError) p.log.error(`Authentication failed: ${err.message}`);
|
|
1289
|
+
else p.log.error(err instanceof Error ? err.message : "Authentication failed");
|
|
1290
|
+
p.outro("Migration cancelled.");
|
|
1291
|
+
process.exit(1);
|
|
1292
|
+
}
|
|
1293
|
+
if (needsRepoImport) {
|
|
1294
|
+
spinner.start(`Exporting your repository from ${pdsDisplayName}...`);
|
|
1295
|
+
let carBytes;
|
|
1296
|
+
try {
|
|
1297
|
+
carBytes = await sourceClient.getRepo(did);
|
|
1298
|
+
spinner.stop(`Downloaded ${formatBytes(carBytes.length)} from ${sourceDomain}`);
|
|
1299
|
+
} catch (err) {
|
|
1300
|
+
spinner.stop("Export failed");
|
|
1301
|
+
p.log.error(err instanceof Error ? err.message : "Could not export repository");
|
|
1302
|
+
p.outro("Migration cancelled.");
|
|
1303
|
+
process.exit(1);
|
|
1304
|
+
}
|
|
1305
|
+
spinner.start(`Importing to ${targetDomain}...`);
|
|
1306
|
+
try {
|
|
1307
|
+
await targetClient.importRepo(carBytes);
|
|
1308
|
+
spinner.stop("Repository imported");
|
|
1309
|
+
} catch (err) {
|
|
1310
|
+
spinner.stop("Import failed");
|
|
1311
|
+
p.log.error(err instanceof Error ? err.message : "Could not import repository");
|
|
1312
|
+
p.outro("Migration cancelled.");
|
|
1313
|
+
process.exit(1);
|
|
1314
|
+
}
|
|
1315
|
+
status = await targetClient.getAccountStatus();
|
|
1316
|
+
}
|
|
1317
|
+
spinner.start("Migrating your preferences...");
|
|
1318
|
+
try {
|
|
1319
|
+
const preferences = await sourceClient.getPreferences();
|
|
1320
|
+
if (preferences.length > 0) {
|
|
1321
|
+
await targetClient.putPreferences(preferences);
|
|
1322
|
+
spinner.stop(`Migrated ${preferences.length} preference${preferences.length === 1 ? "" : "s"}`);
|
|
1323
|
+
} else spinner.stop("No preferences to migrate");
|
|
1324
|
+
} catch (err) {
|
|
1325
|
+
spinner.stop("Skipped preferences (not available)");
|
|
1326
|
+
}
|
|
1327
|
+
if (status.expectedBlobs - status.importedBlobs > 0) {
|
|
1328
|
+
let synced = 0;
|
|
1329
|
+
let totalBlobs = 0;
|
|
1330
|
+
let cursor;
|
|
1331
|
+
let failedBlobs = [];
|
|
1332
|
+
const progressBar = (current, total) => {
|
|
1333
|
+
const width = 20;
|
|
1334
|
+
const ratio = total > 0 ? Math.min(1, current / total) : 0;
|
|
1335
|
+
const filled = Math.round(ratio * width);
|
|
1336
|
+
const empty = width - filled;
|
|
1337
|
+
return `${"█".repeat(filled)}${"░".repeat(empty)} ${current}/${total}`;
|
|
1338
|
+
};
|
|
1339
|
+
spinner.start("Counting images to transfer...");
|
|
1340
|
+
let countCursor;
|
|
1341
|
+
do {
|
|
1342
|
+
const page = await targetClient.listMissingBlobs(500, countCursor);
|
|
1343
|
+
totalBlobs += page.blobs.length;
|
|
1344
|
+
countCursor = page.cursor;
|
|
1345
|
+
} while (countCursor);
|
|
1346
|
+
spinner.message(`Transferring media ${progressBar(0, totalBlobs)}`);
|
|
1347
|
+
do {
|
|
1348
|
+
const page = await targetClient.listMissingBlobs(100, cursor);
|
|
1349
|
+
cursor = page.cursor;
|
|
1350
|
+
for (const blob of page.blobs) try {
|
|
1351
|
+
const { bytes, mimeType } = await sourceClient.getBlob(did, blob.cid);
|
|
1352
|
+
await targetClient.uploadBlob(bytes, mimeType);
|
|
1353
|
+
synced++;
|
|
1354
|
+
spinner.message(`Transferring media ${progressBar(synced, totalBlobs)}`);
|
|
1355
|
+
} catch (err) {
|
|
1356
|
+
synced++;
|
|
1357
|
+
failedBlobs.push(blob.cid);
|
|
1358
|
+
spinner.message(`Transferring media ${progressBar(synced, totalBlobs)}`);
|
|
1359
|
+
}
|
|
1360
|
+
} while (cursor);
|
|
1361
|
+
if (failedBlobs.length > 0) {
|
|
1362
|
+
spinner.stop(`Transferred ${num(synced - failedBlobs.length)} images and videos (${failedBlobs.length} failed)`);
|
|
1363
|
+
p.log.warn(`Run 'pds migrate' again to retry failed transfers.`);
|
|
1364
|
+
} else spinner.stop(`Transferred ${num(synced)} images and videos`);
|
|
1365
|
+
}
|
|
1366
|
+
spinner.start("Verifying migration...");
|
|
1367
|
+
const finalStatus = await targetClient.getAccountStatus();
|
|
1368
|
+
spinner.stop("Verification complete");
|
|
1369
|
+
if (finalStatus.importedBlobs >= finalStatus.expectedBlobs) p.log.success("All packed and moved!");
|
|
1370
|
+
else {
|
|
1371
|
+
p.log.warn(`Migration partially complete. ${finalStatus.expectedBlobs - finalStatus.importedBlobs} images remaining.`);
|
|
1372
|
+
p.log.info("Run 'pds migrate' again to continue.");
|
|
1373
|
+
}
|
|
1374
|
+
showNextSteps(pm, pdsDisplayName);
|
|
1375
|
+
p.outro("Welcome to your new home in the Atmosphere! 🦋");
|
|
1376
|
+
}
|
|
1377
|
+
});
|
|
1378
|
+
function showNextSteps(pm, sourceDomain) {
|
|
1379
|
+
p.note(brightNote$1([
|
|
1380
|
+
pc.bold("Your data is safe in your new PDS."),
|
|
1381
|
+
"Two more steps to go live:",
|
|
1382
|
+
"",
|
|
1383
|
+
pc.bold("1. Update your identity"),
|
|
1384
|
+
" Tell the network where you live now.",
|
|
1385
|
+
` (Requires email verification from ${sourceDomain})`,
|
|
1386
|
+
"",
|
|
1387
|
+
pc.bold("2. Flip the switch"),
|
|
1388
|
+
` ${pm} pds activate`,
|
|
1389
|
+
"",
|
|
1390
|
+
"Docs: https://atproto.com/guides/account-migration"
|
|
1391
|
+
]), "Almost there!");
|
|
1392
|
+
}
|
|
1393
|
+
|
|
1394
|
+
//#endregion
|
|
1395
|
+
//#region src/cli/commands/activate.ts
|
|
1396
|
+
/**
|
|
1397
|
+
* Activate account command - enables writes after migration
|
|
1398
|
+
*/
|
|
1399
|
+
const activateCommand = defineCommand({
|
|
1400
|
+
meta: {
|
|
1401
|
+
name: "activate",
|
|
1402
|
+
description: "Activate your account to enable writes and go live"
|
|
1403
|
+
},
|
|
1404
|
+
args: { dev: {
|
|
1405
|
+
type: "boolean",
|
|
1406
|
+
description: "Target local development server instead of production",
|
|
1407
|
+
default: false
|
|
1408
|
+
} },
|
|
1409
|
+
async run({ args }) {
|
|
1410
|
+
const isDev = args.dev;
|
|
1411
|
+
p.intro("🦋 Activate Account");
|
|
1412
|
+
const vars = getVars();
|
|
1413
|
+
let targetUrl;
|
|
1414
|
+
try {
|
|
1415
|
+
targetUrl = getTargetUrl(isDev, vars.PDS_HOSTNAME);
|
|
1416
|
+
} catch (err) {
|
|
1417
|
+
p.log.error(err instanceof Error ? err.message : "Configuration error");
|
|
1418
|
+
p.log.info("Run 'pds init' first to configure your PDS.");
|
|
1419
|
+
process.exit(1);
|
|
1420
|
+
}
|
|
1421
|
+
const targetDomain = getDomain(targetUrl);
|
|
1422
|
+
const wranglerVars = getVars();
|
|
1423
|
+
const config = {
|
|
1424
|
+
...readDevVars(),
|
|
1425
|
+
...wranglerVars
|
|
1426
|
+
};
|
|
1427
|
+
const authToken = config.AUTH_TOKEN;
|
|
1428
|
+
const handle = config.HANDLE;
|
|
1429
|
+
if (!authToken) {
|
|
1430
|
+
p.log.error("No AUTH_TOKEN found. Run 'pds init' first.");
|
|
1431
|
+
p.outro("Activation cancelled.");
|
|
1432
|
+
process.exit(1);
|
|
1433
|
+
}
|
|
1434
|
+
const client = new PDSClient(targetUrl, authToken);
|
|
1435
|
+
const spinner = p.spinner();
|
|
1436
|
+
spinner.start(`Checking PDS at ${targetDomain}...`);
|
|
1437
|
+
if (!await client.healthCheck()) {
|
|
1438
|
+
spinner.stop(`PDS not responding at ${targetDomain}`);
|
|
1439
|
+
p.log.error(`Your PDS isn't responding at ${targetUrl}`);
|
|
1440
|
+
if (!isDev) p.log.info("Make sure your worker is deployed: wrangler deploy");
|
|
1441
|
+
p.outro("Activation cancelled.");
|
|
1442
|
+
process.exit(1);
|
|
1443
|
+
}
|
|
1444
|
+
spinner.stop(`Connected to ${targetDomain}`);
|
|
1445
|
+
spinner.start("Checking account status...");
|
|
1446
|
+
const status = await client.getAccountStatus();
|
|
1447
|
+
spinner.stop("Account status retrieved");
|
|
1448
|
+
if (status.active) {
|
|
1449
|
+
p.log.warn("Your account is already active!");
|
|
1450
|
+
p.log.info("No action needed - you're live in the Atmosphere. 🦋");
|
|
1451
|
+
p.outro("All good!");
|
|
1452
|
+
return;
|
|
1453
|
+
}
|
|
1454
|
+
p.note([
|
|
1455
|
+
`@${handle || "your-handle"}`,
|
|
1456
|
+
"",
|
|
1457
|
+
"This will enable writes and make your account live.",
|
|
1458
|
+
"Make sure you've:",
|
|
1459
|
+
" ✓ Updated your DID document to point here",
|
|
1460
|
+
" ✓ Completed email verification (if required)"
|
|
1461
|
+
].join("\n"), "Ready to go live?");
|
|
1462
|
+
const confirm = await p.confirm({
|
|
1463
|
+
message: "Activate account?",
|
|
1464
|
+
initialValue: true
|
|
1465
|
+
});
|
|
1466
|
+
if (p.isCancel(confirm) || !confirm) {
|
|
1467
|
+
p.cancel("Activation cancelled.");
|
|
1468
|
+
process.exit(0);
|
|
1469
|
+
}
|
|
1470
|
+
spinner.start("Activating account...");
|
|
1471
|
+
try {
|
|
1472
|
+
await client.activateAccount();
|
|
1473
|
+
spinner.stop("Account activated!");
|
|
1474
|
+
} catch (err) {
|
|
1475
|
+
spinner.stop("Activation failed");
|
|
1476
|
+
p.log.error(err instanceof Error ? err.message : "Could not activate account");
|
|
1477
|
+
p.outro("Activation failed.");
|
|
1478
|
+
process.exit(1);
|
|
1479
|
+
}
|
|
1480
|
+
p.log.success("Welcome to the Atmosphere! 🦋");
|
|
1481
|
+
p.log.info("Your account is now live and accepting writes.");
|
|
1482
|
+
p.outro("All set!");
|
|
1483
|
+
}
|
|
1484
|
+
});
|
|
1485
|
+
|
|
1486
|
+
//#endregion
|
|
1487
|
+
//#region src/cli/commands/deactivate.ts
|
|
1488
|
+
/**
|
|
1489
|
+
* Deactivate account command - disables writes for re-import
|
|
1490
|
+
*/
|
|
1491
|
+
const brightNote = (lines) => lines.map((l) => `\x1b[0m${l}`).join("\n");
|
|
1492
|
+
const bold = (text) => pc.bold(text);
|
|
1493
|
+
const deactivateCommand = defineCommand({
|
|
1494
|
+
meta: {
|
|
1495
|
+
name: "deactivate",
|
|
1496
|
+
description: "Deactivate your account to enable re-import"
|
|
1497
|
+
},
|
|
1498
|
+
args: { dev: {
|
|
1499
|
+
type: "boolean",
|
|
1500
|
+
description: "Target local development server instead of production",
|
|
1501
|
+
default: false
|
|
1502
|
+
} },
|
|
1503
|
+
async run({ args }) {
|
|
1504
|
+
const isDev = args.dev;
|
|
1505
|
+
p.intro("🦋 Deactivate Account");
|
|
1506
|
+
const vars = getVars();
|
|
1507
|
+
let targetUrl;
|
|
1508
|
+
try {
|
|
1509
|
+
targetUrl = getTargetUrl(isDev, vars.PDS_HOSTNAME);
|
|
1510
|
+
} catch (err) {
|
|
1511
|
+
p.log.error(err instanceof Error ? err.message : "Configuration error");
|
|
1512
|
+
p.log.info("Run 'pds init' first to configure your PDS.");
|
|
1513
|
+
process.exit(1);
|
|
1514
|
+
}
|
|
1515
|
+
const targetDomain = getDomain(targetUrl);
|
|
1516
|
+
const wranglerVars = getVars();
|
|
1517
|
+
const config = {
|
|
1518
|
+
...readDevVars(),
|
|
1519
|
+
...wranglerVars
|
|
1520
|
+
};
|
|
1521
|
+
const authToken = config.AUTH_TOKEN;
|
|
1522
|
+
const handle = config.HANDLE;
|
|
1523
|
+
if (!authToken) {
|
|
1524
|
+
p.log.error("No AUTH_TOKEN found. Run 'pds init' first.");
|
|
1525
|
+
p.outro("Deactivation cancelled.");
|
|
1526
|
+
process.exit(1);
|
|
1527
|
+
}
|
|
1528
|
+
const client = new PDSClient(targetUrl, authToken);
|
|
1529
|
+
const spinner = p.spinner();
|
|
1530
|
+
spinner.start(`Checking PDS at ${targetDomain}...`);
|
|
1531
|
+
if (!await client.healthCheck()) {
|
|
1532
|
+
spinner.stop(`PDS not responding at ${targetDomain}`);
|
|
1533
|
+
p.log.error(`Your PDS isn't responding at ${targetUrl}`);
|
|
1534
|
+
if (!isDev) p.log.info("Make sure your worker is deployed: wrangler deploy");
|
|
1535
|
+
p.outro("Deactivation cancelled.");
|
|
1536
|
+
process.exit(1);
|
|
1537
|
+
}
|
|
1538
|
+
spinner.stop(`Connected to ${targetDomain}`);
|
|
1539
|
+
spinner.start("Checking account status...");
|
|
1540
|
+
const status = await client.getAccountStatus();
|
|
1541
|
+
spinner.stop("Account status retrieved");
|
|
1542
|
+
if (!status.active) {
|
|
1543
|
+
p.log.warn("Your account is already deactivated.");
|
|
1544
|
+
p.log.info("Writes are disabled. Use 'pds activate' to re-enable.");
|
|
1545
|
+
p.outro("Already deactivated.");
|
|
1546
|
+
return;
|
|
1547
|
+
}
|
|
1548
|
+
p.note(brightNote([
|
|
1549
|
+
bold(`⚠️ WARNING: This will disable writes for @${handle || "your-handle"}`),
|
|
1550
|
+
"",
|
|
1551
|
+
"Your account will:",
|
|
1552
|
+
" • Stop accepting new posts, follows, and other writes",
|
|
1553
|
+
" • Remain readable in the Atmosphere",
|
|
1554
|
+
" • Allow you to use 'pds migrate --clean' to re-import",
|
|
1555
|
+
"",
|
|
1556
|
+
bold("Only deactivate if you need to re-import your data.")
|
|
1557
|
+
]), "Deactivate Account");
|
|
1558
|
+
const confirm = await p.confirm({
|
|
1559
|
+
message: "Are you sure you want to deactivate?",
|
|
1560
|
+
initialValue: false
|
|
1561
|
+
});
|
|
1562
|
+
if (p.isCancel(confirm) || !confirm) {
|
|
1563
|
+
p.cancel("Deactivation cancelled.");
|
|
1564
|
+
process.exit(0);
|
|
1565
|
+
}
|
|
1566
|
+
spinner.start("Deactivating account...");
|
|
1567
|
+
try {
|
|
1568
|
+
await client.deactivateAccount();
|
|
1569
|
+
spinner.stop("Account deactivated");
|
|
1570
|
+
} catch (err) {
|
|
1571
|
+
spinner.stop("Deactivation failed");
|
|
1572
|
+
p.log.error(err instanceof Error ? err.message : "Could not deactivate account");
|
|
1573
|
+
p.outro("Deactivation failed.");
|
|
1574
|
+
process.exit(1);
|
|
1575
|
+
}
|
|
1576
|
+
p.log.success("Account deactivated");
|
|
1577
|
+
p.log.info("Writes are now disabled.");
|
|
1578
|
+
p.log.info("");
|
|
1579
|
+
p.log.info("To re-import your data:");
|
|
1580
|
+
p.log.info(" pnpm pds migrate --clean");
|
|
1581
|
+
p.log.info("");
|
|
1582
|
+
p.log.info("To re-enable writes:");
|
|
1583
|
+
p.log.info(" pnpm pds activate");
|
|
1584
|
+
p.outro("Deactivated.");
|
|
1585
|
+
}
|
|
1586
|
+
});
|
|
1587
|
+
|
|
1588
|
+
//#endregion
|
|
1589
|
+
//#region src/cli/index.ts
|
|
1590
|
+
/**
|
|
1591
|
+
* PDS CLI - Setup and management for AT Protocol PDS on Cloudflare Workers
|
|
1592
|
+
*/
|
|
1593
|
+
runMain(defineCommand({
|
|
1594
|
+
meta: {
|
|
1595
|
+
name: "pds",
|
|
1596
|
+
version: "0.0.0",
|
|
1597
|
+
description: "AT Protocol PDS setup and management CLI"
|
|
1598
|
+
},
|
|
1599
|
+
subCommands: {
|
|
1600
|
+
init: initCommand,
|
|
1601
|
+
secret: secretCommand,
|
|
1602
|
+
migrate: migrateCommand,
|
|
1603
|
+
activate: activateCommand,
|
|
1604
|
+
deactivate: deactivateCommand
|
|
1605
|
+
}
|
|
1606
|
+
}));
|
|
1607
|
+
|
|
1608
|
+
//#endregion
|
|
1609
|
+
export { };
|