@edge-base/cli 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +182 -0
- package/dist/commands/admin.d.ts +10 -0
- package/dist/commands/admin.d.ts.map +1 -0
- package/dist/commands/admin.js +307 -0
- package/dist/commands/admin.js.map +1 -0
- package/dist/commands/backup.d.ts +148 -0
- package/dist/commands/backup.d.ts.map +1 -0
- package/dist/commands/backup.js +1247 -0
- package/dist/commands/backup.js.map +1 -0
- package/dist/commands/completion.d.ts +3 -0
- package/dist/commands/completion.d.ts.map +1 -0
- package/dist/commands/completion.js +168 -0
- package/dist/commands/completion.js.map +1 -0
- package/dist/commands/create-plugin.d.ts +3 -0
- package/dist/commands/create-plugin.d.ts.map +1 -0
- package/dist/commands/create-plugin.js +208 -0
- package/dist/commands/create-plugin.js.map +1 -0
- package/dist/commands/deploy.d.ts +146 -0
- package/dist/commands/deploy.d.ts.map +1 -0
- package/dist/commands/deploy.js +1823 -0
- package/dist/commands/deploy.js.map +1 -0
- package/dist/commands/describe.d.ts +45 -0
- package/dist/commands/describe.d.ts.map +1 -0
- package/dist/commands/describe.js +114 -0
- package/dist/commands/describe.js.map +1 -0
- package/dist/commands/destroy.d.ts +13 -0
- package/dist/commands/destroy.d.ts.map +1 -0
- package/dist/commands/destroy.js +642 -0
- package/dist/commands/destroy.js.map +1 -0
- package/dist/commands/dev.d.ts +80 -0
- package/dist/commands/dev.d.ts.map +1 -0
- package/dist/commands/dev.js +1131 -0
- package/dist/commands/dev.js.map +1 -0
- package/dist/commands/docker.d.ts +22 -0
- package/dist/commands/docker.d.ts.map +1 -0
- package/dist/commands/docker.js +373 -0
- package/dist/commands/docker.js.map +1 -0
- package/dist/commands/export.d.ts +15 -0
- package/dist/commands/export.d.ts.map +1 -0
- package/dist/commands/export.js +142 -0
- package/dist/commands/export.js.map +1 -0
- package/dist/commands/init.d.ts +7 -0
- package/dist/commands/init.d.ts.map +1 -0
- package/dist/commands/init.js +506 -0
- package/dist/commands/init.js.map +1 -0
- package/dist/commands/keys.d.ts +23 -0
- package/dist/commands/keys.d.ts.map +1 -0
- package/dist/commands/keys.js +347 -0
- package/dist/commands/keys.js.map +1 -0
- package/dist/commands/logs.d.ts +17 -0
- package/dist/commands/logs.d.ts.map +1 -0
- package/dist/commands/logs.js +104 -0
- package/dist/commands/logs.js.map +1 -0
- package/dist/commands/migrate.d.ts +29 -0
- package/dist/commands/migrate.d.ts.map +1 -0
- package/dist/commands/migrate.js +302 -0
- package/dist/commands/migrate.js.map +1 -0
- package/dist/commands/migration.d.ts +18 -0
- package/dist/commands/migration.d.ts.map +1 -0
- package/dist/commands/migration.js +114 -0
- package/dist/commands/migration.js.map +1 -0
- package/dist/commands/neon.d.ts +66 -0
- package/dist/commands/neon.d.ts.map +1 -0
- package/dist/commands/neon.js +600 -0
- package/dist/commands/neon.js.map +1 -0
- package/dist/commands/plugins.d.ts +9 -0
- package/dist/commands/plugins.d.ts.map +1 -0
- package/dist/commands/plugins.js +295 -0
- package/dist/commands/plugins.js.map +1 -0
- package/dist/commands/realtime.d.ts +3 -0
- package/dist/commands/realtime.d.ts.map +1 -0
- package/dist/commands/realtime.js +71 -0
- package/dist/commands/realtime.js.map +1 -0
- package/dist/commands/secret.d.ts +7 -0
- package/dist/commands/secret.d.ts.map +1 -0
- package/dist/commands/secret.js +180 -0
- package/dist/commands/secret.js.map +1 -0
- package/dist/commands/seed.d.ts +21 -0
- package/dist/commands/seed.d.ts.map +1 -0
- package/dist/commands/seed.js +325 -0
- package/dist/commands/seed.js.map +1 -0
- package/dist/commands/telemetry.d.ts +12 -0
- package/dist/commands/telemetry.d.ts.map +1 -0
- package/dist/commands/telemetry.js +57 -0
- package/dist/commands/telemetry.js.map +1 -0
- package/dist/commands/typegen.d.ts +26 -0
- package/dist/commands/typegen.d.ts.map +1 -0
- package/dist/commands/typegen.js +212 -0
- package/dist/commands/typegen.js.map +1 -0
- package/dist/commands/upgrade.d.ts +29 -0
- package/dist/commands/upgrade.d.ts.map +1 -0
- package/dist/commands/upgrade.js +265 -0
- package/dist/commands/upgrade.js.map +1 -0
- package/dist/commands/webhook-test.d.ts +3 -0
- package/dist/commands/webhook-test.d.ts.map +1 -0
- package/dist/commands/webhook-test.js +133 -0
- package/dist/commands/webhook-test.js.map +1 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +183 -0
- package/dist/index.js.map +1 -0
- package/dist/lib/agent-contract.d.ts +36 -0
- package/dist/lib/agent-contract.d.ts.map +1 -0
- package/dist/lib/agent-contract.js +78 -0
- package/dist/lib/agent-contract.js.map +1 -0
- package/dist/lib/cf-auth.d.ts +76 -0
- package/dist/lib/cf-auth.d.ts.map +1 -0
- package/dist/lib/cf-auth.js +321 -0
- package/dist/lib/cf-auth.js.map +1 -0
- package/dist/lib/cli-context.d.ts +23 -0
- package/dist/lib/cli-context.d.ts.map +1 -0
- package/dist/lib/cli-context.js +40 -0
- package/dist/lib/cli-context.js.map +1 -0
- package/dist/lib/cloudflare-deploy-manifest.d.ts +26 -0
- package/dist/lib/cloudflare-deploy-manifest.d.ts.map +1 -0
- package/dist/lib/cloudflare-deploy-manifest.js +107 -0
- package/dist/lib/cloudflare-deploy-manifest.js.map +1 -0
- package/dist/lib/cloudflare-wrangler-resources.d.ts +32 -0
- package/dist/lib/cloudflare-wrangler-resources.d.ts.map +1 -0
- package/dist/lib/cloudflare-wrangler-resources.js +59 -0
- package/dist/lib/cloudflare-wrangler-resources.js.map +1 -0
- package/dist/lib/config-editor.d.ts +139 -0
- package/dist/lib/config-editor.d.ts.map +1 -0
- package/dist/lib/config-editor.js +1188 -0
- package/dist/lib/config-editor.js.map +1 -0
- package/dist/lib/deploy-shared.d.ts +55 -0
- package/dist/lib/deploy-shared.d.ts.map +1 -0
- package/dist/lib/deploy-shared.js +183 -0
- package/dist/lib/deploy-shared.js.map +1 -0
- package/dist/lib/dev-sidecar.d.ts +31 -0
- package/dist/lib/dev-sidecar.d.ts.map +1 -0
- package/dist/lib/dev-sidecar.js +1058 -0
- package/dist/lib/dev-sidecar.js.map +1 -0
- package/dist/lib/fetch-with-timeout.d.ts +14 -0
- package/dist/lib/fetch-with-timeout.d.ts.map +1 -0
- package/dist/lib/fetch-with-timeout.js +29 -0
- package/dist/lib/fetch-with-timeout.js.map +1 -0
- package/dist/lib/function-registry.d.ts +56 -0
- package/dist/lib/function-registry.d.ts.map +1 -0
- package/dist/lib/function-registry.js +210 -0
- package/dist/lib/function-registry.js.map +1 -0
- package/dist/lib/load-config.d.ts +24 -0
- package/dist/lib/load-config.d.ts.map +1 -0
- package/dist/lib/load-config.js +263 -0
- package/dist/lib/load-config.js.map +1 -0
- package/dist/lib/local-secrets.d.ts +2 -0
- package/dist/lib/local-secrets.d.ts.map +1 -0
- package/dist/lib/local-secrets.js +60 -0
- package/dist/lib/local-secrets.js.map +1 -0
- package/dist/lib/managed-resource-names.d.ts +4 -0
- package/dist/lib/managed-resource-names.d.ts.map +1 -0
- package/dist/lib/managed-resource-names.js +19 -0
- package/dist/lib/managed-resource-names.js.map +1 -0
- package/dist/lib/migrator.d.ts +57 -0
- package/dist/lib/migrator.d.ts.map +1 -0
- package/dist/lib/migrator.js +321 -0
- package/dist/lib/migrator.js.map +1 -0
- package/dist/lib/neon.d.ts +41 -0
- package/dist/lib/neon.d.ts.map +1 -0
- package/dist/lib/neon.js +325 -0
- package/dist/lib/neon.js.map +1 -0
- package/dist/lib/node-tools.d.ts +10 -0
- package/dist/lib/node-tools.d.ts.map +1 -0
- package/dist/lib/node-tools.js +32 -0
- package/dist/lib/node-tools.js.map +1 -0
- package/dist/lib/npm.d.ts +8 -0
- package/dist/lib/npm.d.ts.map +1 -0
- package/dist/lib/npm.js +10 -0
- package/dist/lib/npm.js.map +1 -0
- package/dist/lib/npx.d.ts +9 -0
- package/dist/lib/npx.d.ts.map +1 -0
- package/dist/lib/npx.js +11 -0
- package/dist/lib/npx.js.map +1 -0
- package/dist/lib/project-runtime.d.ts +38 -0
- package/dist/lib/project-runtime.d.ts.map +1 -0
- package/dist/lib/project-runtime.js +122 -0
- package/dist/lib/project-runtime.js.map +1 -0
- package/dist/lib/prompts.d.ts +28 -0
- package/dist/lib/prompts.d.ts.map +1 -0
- package/dist/lib/prompts.js +85 -0
- package/dist/lib/prompts.js.map +1 -0
- package/dist/lib/rate-limit-bindings.d.ts +11 -0
- package/dist/lib/rate-limit-bindings.d.ts.map +1 -0
- package/dist/lib/rate-limit-bindings.js +52 -0
- package/dist/lib/rate-limit-bindings.js.map +1 -0
- package/dist/lib/realtime-provision.d.ts +22 -0
- package/dist/lib/realtime-provision.d.ts.map +1 -0
- package/dist/lib/realtime-provision.js +246 -0
- package/dist/lib/realtime-provision.js.map +1 -0
- package/dist/lib/resolve-options.d.ts +42 -0
- package/dist/lib/resolve-options.d.ts.map +1 -0
- package/dist/lib/resolve-options.js +98 -0
- package/dist/lib/resolve-options.js.map +1 -0
- package/dist/lib/runtime-scaffold.d.ts +17 -0
- package/dist/lib/runtime-scaffold.d.ts.map +1 -0
- package/dist/lib/runtime-scaffold.js +366 -0
- package/dist/lib/runtime-scaffold.js.map +1 -0
- package/dist/lib/schema-check.d.ts +79 -0
- package/dist/lib/schema-check.d.ts.map +1 -0
- package/dist/lib/schema-check.js +347 -0
- package/dist/lib/schema-check.js.map +1 -0
- package/dist/lib/spinner.d.ts +20 -0
- package/dist/lib/spinner.d.ts.map +1 -0
- package/dist/lib/spinner.js +42 -0
- package/dist/lib/spinner.js.map +1 -0
- package/dist/lib/telemetry.d.ts +37 -0
- package/dist/lib/telemetry.d.ts.map +1 -0
- package/dist/lib/telemetry.js +98 -0
- package/dist/lib/telemetry.js.map +1 -0
- package/dist/lib/turnstile-provision.d.ts +27 -0
- package/dist/lib/turnstile-provision.d.ts.map +1 -0
- package/dist/lib/turnstile-provision.js +144 -0
- package/dist/lib/turnstile-provision.js.map +1 -0
- package/dist/lib/update-check.d.ts +13 -0
- package/dist/lib/update-check.d.ts.map +1 -0
- package/dist/lib/update-check.js +110 -0
- package/dist/lib/update-check.js.map +1 -0
- package/dist/lib/wrangler-secrets.d.ts +3 -0
- package/dist/lib/wrangler-secrets.d.ts.map +1 -0
- package/dist/lib/wrangler-secrets.js +32 -0
- package/dist/lib/wrangler-secrets.js.map +1 -0
- package/dist/lib/wrangler.d.ts +9 -0
- package/dist/lib/wrangler.d.ts.map +1 -0
- package/dist/lib/wrangler.js +84 -0
- package/dist/lib/wrangler.js.map +1 -0
- package/dist/templates/plugin/README.md.tmpl +91 -0
- package/dist/templates/plugin/client/js/package.json.tmpl +23 -0
- package/dist/templates/plugin/client/js/src/index.ts.tmpl +68 -0
- package/dist/templates/plugin/client/js/tsconfig.json.tmpl +14 -0
- package/dist/templates/plugin/server/package.json.tmpl +19 -0
- package/dist/templates/plugin/server/src/index.ts.tmpl +59 -0
- package/dist/templates/plugin/server/tsconfig.json.tmpl +14 -0
- package/llms.txt +94 -0
- package/package.json +60 -0
|
@@ -0,0 +1,1247 @@
|
|
|
1
|
+
import { Command } from 'commander';
|
|
2
|
+
import { writeFileSync, readFileSync, existsSync, mkdirSync, chmodSync, readdirSync, statSync, rmSync, } from 'node:fs';
|
|
3
|
+
import { execFileSync } from 'node:child_process';
|
|
4
|
+
import { resolve, join, dirname } from 'node:path';
|
|
5
|
+
import { createInterface } from 'node:readline';
|
|
6
|
+
import chalk from 'chalk';
|
|
7
|
+
import { isCliStructuredError, raiseCliError, raiseNeedsInput } from '../lib/agent-contract.js';
|
|
8
|
+
import { spin } from '../lib/spinner.js';
|
|
9
|
+
import { isJson, isNonInteractive } from '../lib/cli-context.js';
|
|
10
|
+
import { fetchWithTimeout } from '../lib/fetch-with-timeout.js';
|
|
11
|
+
import { npxCommand } from '../lib/npx.js';
|
|
12
|
+
import { resolveServiceKey, resolveServerUrl } from '../lib/resolve-options.js';
|
|
13
|
+
async function apiCall(opts, path, method = 'POST', body) {
|
|
14
|
+
const url = `${opts.url.replace(/\/$/, '')}/admin/api/backup${path}`;
|
|
15
|
+
const init = {
|
|
16
|
+
method,
|
|
17
|
+
headers: {
|
|
18
|
+
'Content-Type': 'application/json',
|
|
19
|
+
'X-EdgeBase-Service-Key': opts.serviceKey,
|
|
20
|
+
},
|
|
21
|
+
};
|
|
22
|
+
if (body && method === 'POST') {
|
|
23
|
+
init.body = JSON.stringify(body);
|
|
24
|
+
}
|
|
25
|
+
const resp = await fetchWithTimeout(url, init);
|
|
26
|
+
if (!resp.ok) {
|
|
27
|
+
const text = await resp.text();
|
|
28
|
+
throw new Error(`API error (${resp.status}): ${text}`);
|
|
29
|
+
}
|
|
30
|
+
return resp.json();
|
|
31
|
+
}
|
|
32
|
+
async function apiBinary(opts, path) {
|
|
33
|
+
const url = `${opts.url.replace(/\/$/, '')}/admin/api/backup${path}`;
|
|
34
|
+
const resp = await fetchWithTimeout(url, {
|
|
35
|
+
method: 'POST',
|
|
36
|
+
headers: { 'X-EdgeBase-Service-Key': opts.serviceKey },
|
|
37
|
+
});
|
|
38
|
+
if (!resp.ok) {
|
|
39
|
+
throw new Error(`API error (${resp.status}): ${await resp.text()}`);
|
|
40
|
+
}
|
|
41
|
+
return {
|
|
42
|
+
buffer: await resp.arrayBuffer(),
|
|
43
|
+
contentType: resp.headers.get('content-type') || 'application/octet-stream',
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
async function apiUpload(opts, path, body, contentType) {
|
|
47
|
+
const url = `${opts.url.replace(/\/$/, '')}/admin/api/backup${path}`;
|
|
48
|
+
const resp = await fetchWithTimeout(url, {
|
|
49
|
+
method: 'POST',
|
|
50
|
+
headers: {
|
|
51
|
+
'X-EdgeBase-Service-Key': opts.serviceKey,
|
|
52
|
+
'Content-Type': contentType,
|
|
53
|
+
},
|
|
54
|
+
body,
|
|
55
|
+
});
|
|
56
|
+
if (!resp.ok) {
|
|
57
|
+
throw new Error(`API error (${resp.status}): ${await resp.text()}`);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
// ─── Helpers ───
|
|
61
|
+
function isObjectRecord(value) {
|
|
62
|
+
return !!value && typeof value === 'object' && !Array.isArray(value);
|
|
63
|
+
}
|
|
64
|
+
function isStringRecord(value) {
|
|
65
|
+
return isObjectRecord(value) && Object.values(value).every((entry) => typeof entry === 'string');
|
|
66
|
+
}
|
|
67
|
+
function isUnknownArrayRecord(value) {
|
|
68
|
+
return isObjectRecord(value) && Object.values(value).every((entry) => Array.isArray(entry));
|
|
69
|
+
}
|
|
70
|
+
function isDODump(value) {
|
|
71
|
+
if (!isObjectRecord(value))
|
|
72
|
+
return false;
|
|
73
|
+
if (typeof value.doName !== 'string')
|
|
74
|
+
return false;
|
|
75
|
+
if (value.doType !== 'database' && value.doType !== 'auth')
|
|
76
|
+
return false;
|
|
77
|
+
if (typeof value.timestamp !== 'string')
|
|
78
|
+
return false;
|
|
79
|
+
if (!isUnknownArrayRecord(value.tables))
|
|
80
|
+
return false;
|
|
81
|
+
if (value.schema !== undefined && !isStringRecord(value.schema))
|
|
82
|
+
return false;
|
|
83
|
+
return true;
|
|
84
|
+
}
|
|
85
|
+
function isDODumpRecord(value) {
|
|
86
|
+
return isObjectRecord(value) && Object.values(value).every((entry) => isDODump(entry));
|
|
87
|
+
}
|
|
88
|
+
function isStringArray(value) {
|
|
89
|
+
return Array.isArray(value) && value.every((entry) => typeof entry === 'string');
|
|
90
|
+
}
|
|
91
|
+
function isDataNamespaceDump(value) {
|
|
92
|
+
if (!isObjectRecord(value))
|
|
93
|
+
return false;
|
|
94
|
+
if (value.type !== 'data')
|
|
95
|
+
return false;
|
|
96
|
+
if (typeof value.namespace !== 'string')
|
|
97
|
+
return false;
|
|
98
|
+
if (typeof value.timestamp !== 'string')
|
|
99
|
+
return false;
|
|
100
|
+
if (!isUnknownArrayRecord(value.tables))
|
|
101
|
+
return false;
|
|
102
|
+
if (value.tableOrder !== undefined && !isStringArray(value.tableOrder))
|
|
103
|
+
return false;
|
|
104
|
+
return true;
|
|
105
|
+
}
|
|
106
|
+
function isDataNamespaceDumpRecord(value) {
|
|
107
|
+
return isObjectRecord(value) && Object.values(value).every((entry) => isDataNamespaceDump(entry));
|
|
108
|
+
}
|
|
109
|
+
function isStorageObject(value) {
|
|
110
|
+
return (isObjectRecord(value) &&
|
|
111
|
+
typeof value.key === 'string' &&
|
|
112
|
+
typeof value.size === 'number' &&
|
|
113
|
+
Number.isFinite(value.size) &&
|
|
114
|
+
typeof value.etag === 'string' &&
|
|
115
|
+
typeof value.contentType === 'string');
|
|
116
|
+
}
|
|
117
|
+
function isStorageDump(value) {
|
|
118
|
+
return (isObjectRecord(value) && Array.isArray(value.objects) && value.objects.every(isStorageObject));
|
|
119
|
+
}
|
|
120
|
+
function assertBackupFileV1_1(raw) {
|
|
121
|
+
if (!isObjectRecord(raw)) {
|
|
122
|
+
throw new Error('Backup file must be a JSON object.');
|
|
123
|
+
}
|
|
124
|
+
if (raw.version !== '1.1') {
|
|
125
|
+
throw new Error(`Unsupported backup version: ${String(raw.version ?? 'unknown')}`);
|
|
126
|
+
}
|
|
127
|
+
if (typeof raw.timestamp !== 'string' || raw.timestamp.length === 0) {
|
|
128
|
+
throw new Error('Backup file is missing timestamp.');
|
|
129
|
+
}
|
|
130
|
+
if (typeof raw.source !== 'string' || raw.source.length === 0) {
|
|
131
|
+
throw new Error('Backup file is missing source.');
|
|
132
|
+
}
|
|
133
|
+
if (!isObjectRecord(raw.control)) {
|
|
134
|
+
throw new Error('Backup file is missing control metadata.');
|
|
135
|
+
}
|
|
136
|
+
if (!isUnknownArrayRecord(raw.control.d1)) {
|
|
137
|
+
throw new Error('Backup file is missing control.d1.');
|
|
138
|
+
}
|
|
139
|
+
if (!isObjectRecord(raw.auth)) {
|
|
140
|
+
throw new Error('Backup file is missing auth metadata.');
|
|
141
|
+
}
|
|
142
|
+
if (!isUnknownArrayRecord(raw.auth.d1)) {
|
|
143
|
+
throw new Error('Backup file is missing auth.d1.');
|
|
144
|
+
}
|
|
145
|
+
if (!isDODumpRecord(raw.auth.shards)) {
|
|
146
|
+
throw new Error('Backup file is missing auth.shards.');
|
|
147
|
+
}
|
|
148
|
+
if (!isDODumpRecord(raw.databases)) {
|
|
149
|
+
throw new Error('Backup file is missing databases.');
|
|
150
|
+
}
|
|
151
|
+
if (raw.dataNamespaces !== undefined && !isDataNamespaceDumpRecord(raw.dataNamespaces)) {
|
|
152
|
+
throw new Error('Backup file has invalid data namespace dumps.');
|
|
153
|
+
}
|
|
154
|
+
if (raw.config !== undefined && !isObjectRecord(raw.config)) {
|
|
155
|
+
throw new Error('Backup file has invalid config metadata.');
|
|
156
|
+
}
|
|
157
|
+
if (raw.secrets !== undefined && !isStringRecord(raw.secrets)) {
|
|
158
|
+
throw new Error('Backup file has invalid secrets metadata.');
|
|
159
|
+
}
|
|
160
|
+
if (raw.storage !== undefined && !isStorageDump(raw.storage)) {
|
|
161
|
+
throw new Error('Backup file has invalid storage metadata.');
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
function resolveOptions(options) {
|
|
165
|
+
const url = resolveServerUrl(options);
|
|
166
|
+
const serviceKey = resolveServiceKey(options);
|
|
167
|
+
return { url, serviceKey };
|
|
168
|
+
}
|
|
169
|
+
/**
|
|
170
|
+
* Run async tasks with a concurrency limit.
|
|
171
|
+
* Returns results in the same order as the input tasks.
|
|
172
|
+
*/
|
|
173
|
+
async function throttle(tasks, limit) {
|
|
174
|
+
const results = new Array(tasks.length);
|
|
175
|
+
let nextIdx = 0;
|
|
176
|
+
async function worker() {
|
|
177
|
+
while (nextIdx < tasks.length) {
|
|
178
|
+
const idx = nextIdx++;
|
|
179
|
+
results[idx] = await tasks[idx]();
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
const workers = Array.from({ length: Math.min(limit, tasks.length) }, () => worker());
|
|
183
|
+
await Promise.all(workers);
|
|
184
|
+
return results;
|
|
185
|
+
}
|
|
186
|
+
/**
|
|
187
|
+
* Run async tasks with a concurrency limit and collect both fulfilled/rejected results.
|
|
188
|
+
* Results stay aligned with the input task order.
|
|
189
|
+
*/
|
|
190
|
+
async function throttleSettled(tasks, limit) {
|
|
191
|
+
const results = new Array(tasks.length);
|
|
192
|
+
let nextIdx = 0;
|
|
193
|
+
async function worker() {
|
|
194
|
+
while (nextIdx < tasks.length) {
|
|
195
|
+
const idx = nextIdx++;
|
|
196
|
+
try {
|
|
197
|
+
results[idx] = {
|
|
198
|
+
status: 'fulfilled',
|
|
199
|
+
value: await tasks[idx](),
|
|
200
|
+
};
|
|
201
|
+
}
|
|
202
|
+
catch (reason) {
|
|
203
|
+
results[idx] = {
|
|
204
|
+
status: 'rejected',
|
|
205
|
+
reason,
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
const workers = Array.from({ length: Math.min(limit, tasks.length) }, () => worker());
|
|
211
|
+
await Promise.all(workers);
|
|
212
|
+
return results;
|
|
213
|
+
}
|
|
214
|
+
function errorMessage(err) {
|
|
215
|
+
return err instanceof Error ? err.message : String(err);
|
|
216
|
+
}
|
|
217
|
+
function outputJson(payload) {
|
|
218
|
+
console.log(JSON.stringify(payload));
|
|
219
|
+
}
|
|
220
|
+
function logHuman(...args) {
|
|
221
|
+
if (!isJson())
|
|
222
|
+
console.log(...args);
|
|
223
|
+
}
|
|
224
|
+
function logHumanError(...args) {
|
|
225
|
+
if (!isJson())
|
|
226
|
+
console.error(...args);
|
|
227
|
+
}
|
|
228
|
+
function writeHuman(text) {
|
|
229
|
+
if (!isJson())
|
|
230
|
+
process.stdout.write(text);
|
|
231
|
+
}
|
|
232
|
+
function collectSettledFailures(results, labels) {
|
|
233
|
+
const failures = [];
|
|
234
|
+
for (let i = 0; i < results.length; i++) {
|
|
235
|
+
const result = results[i];
|
|
236
|
+
if (result?.status === 'rejected') {
|
|
237
|
+
failures.push(`${labels[i] ?? `task-${i + 1}`}: ${errorMessage(result.reason)}`);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
return failures;
|
|
241
|
+
}
|
|
242
|
+
function summarizeFailures(stage, failures, max = 3) {
|
|
243
|
+
if (failures.length === 0)
|
|
244
|
+
return `${stage} failed.`;
|
|
245
|
+
const preview = failures.slice(0, max).join('; ');
|
|
246
|
+
const remaining = failures.length - Math.min(failures.length, max);
|
|
247
|
+
return remaining > 0
|
|
248
|
+
? `${stage} failed (${failures.length} total): ${preview}; +${remaining} more`
|
|
249
|
+
: `${stage} failed: ${preview}`;
|
|
250
|
+
}
|
|
251
|
+
function parseBackupFile(raw) {
|
|
252
|
+
assertBackupFileV1_1(raw);
|
|
253
|
+
return raw;
|
|
254
|
+
}
|
|
255
|
+
function resolveDirectDataNamespaceNames(config) {
|
|
256
|
+
if (!config || !isObjectRecord(config.databases)) {
|
|
257
|
+
return [];
|
|
258
|
+
}
|
|
259
|
+
const namespaces = [];
|
|
260
|
+
for (const [namespace, rawBlock] of Object.entries(config.databases)) {
|
|
261
|
+
if (!isObjectRecord(rawBlock))
|
|
262
|
+
continue;
|
|
263
|
+
if (!isObjectRecord(rawBlock.tables) || Object.keys(rawBlock.tables).length === 0)
|
|
264
|
+
continue;
|
|
265
|
+
const provider = typeof rawBlock.provider === 'string' ? rawBlock.provider : undefined;
|
|
266
|
+
if (provider === 'd1' || provider === 'neon' || provider === 'postgres') {
|
|
267
|
+
namespaces.push(namespace);
|
|
268
|
+
continue;
|
|
269
|
+
}
|
|
270
|
+
if (provider === 'do') {
|
|
271
|
+
continue;
|
|
272
|
+
}
|
|
273
|
+
if (rawBlock.instance !== undefined) {
|
|
274
|
+
continue;
|
|
275
|
+
}
|
|
276
|
+
const access = isObjectRecord(rawBlock.access) ? rawBlock.access : null;
|
|
277
|
+
if (access && (access.canCreate !== undefined || access.access !== undefined)) {
|
|
278
|
+
continue;
|
|
279
|
+
}
|
|
280
|
+
namespaces.push(namespace);
|
|
281
|
+
}
|
|
282
|
+
return namespaces.sort();
|
|
283
|
+
}
|
|
284
|
+
function resolveDownloadSessionPaths(edgebaseTmpDir, timestamp, resumeStorageDir) {
|
|
285
|
+
const sessionDir = resumeStorageDir
|
|
286
|
+
? dirname(resumeStorageDir)
|
|
287
|
+
: join(edgebaseTmpDir, `backup-${timestamp}`);
|
|
288
|
+
return {
|
|
289
|
+
sessionDir,
|
|
290
|
+
storageDir: join(sessionDir, 'storage'),
|
|
291
|
+
manifestPath: join(sessionDir, 'manifest.json'),
|
|
292
|
+
};
|
|
293
|
+
}
|
|
294
|
+
/** Parse .dev.vars file → key/value map */
|
|
295
|
+
function parseDevVars(filePath) {
|
|
296
|
+
const content = readFileSync(filePath, 'utf-8');
|
|
297
|
+
const vars = {};
|
|
298
|
+
for (const line of content.split('\n')) {
|
|
299
|
+
const trimmed = line.trim();
|
|
300
|
+
if (!trimmed || trimmed.startsWith('#'))
|
|
301
|
+
continue;
|
|
302
|
+
const eqIdx = trimmed.indexOf('=');
|
|
303
|
+
if (eqIdx > 0) {
|
|
304
|
+
vars[trimmed.slice(0, eqIdx).trim()] = trimmed.slice(eqIdx + 1).trim();
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
return vars;
|
|
308
|
+
}
|
|
309
|
+
/** Read secrets from environment */
|
|
310
|
+
function readSecrets(projectDir) {
|
|
311
|
+
// Docker/Direct: .dev.vars
|
|
312
|
+
const devVarsPath = join(projectDir, '.dev.vars');
|
|
313
|
+
if (existsSync(devVarsPath)) {
|
|
314
|
+
const vars = parseDevVars(devVarsPath);
|
|
315
|
+
const secrets = {};
|
|
316
|
+
const secretKeys = ['JWT_USER_SECRET', 'JWT_ADMIN_SECRET', 'SERVICE_KEY'];
|
|
317
|
+
for (const key of secretKeys) {
|
|
318
|
+
if (vars[key])
|
|
319
|
+
secrets[key] = vars[key];
|
|
320
|
+
}
|
|
321
|
+
if (Object.keys(secrets).length > 0)
|
|
322
|
+
return secrets;
|
|
323
|
+
}
|
|
324
|
+
// Edge: .edgebase/secrets.json
|
|
325
|
+
const secretsJsonPath = join(projectDir, '.edgebase', 'secrets.json');
|
|
326
|
+
if (existsSync(secretsJsonPath)) {
|
|
327
|
+
try {
|
|
328
|
+
return JSON.parse(readFileSync(secretsJsonPath, 'utf-8'));
|
|
329
|
+
}
|
|
330
|
+
catch {
|
|
331
|
+
// Invalid JSON, skip
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
return null;
|
|
335
|
+
}
|
|
336
|
+
/** Write secrets to environment */
|
|
337
|
+
function writeSecrets(projectDir, secrets) {
|
|
338
|
+
const devVarsPath = join(projectDir, '.dev.vars');
|
|
339
|
+
if (existsSync(devVarsPath)) {
|
|
340
|
+
// Update existing .dev.vars
|
|
341
|
+
const existing = parseDevVars(devVarsPath);
|
|
342
|
+
const merged = { ...existing, ...secrets };
|
|
343
|
+
const content = Object.entries(merged)
|
|
344
|
+
.map(([k, v]) => `${k}=${v}`)
|
|
345
|
+
.join('\n');
|
|
346
|
+
writeFileSync(devVarsPath, `# EdgeBase secrets (auto-restored)\n${content}\n`);
|
|
347
|
+
}
|
|
348
|
+
else {
|
|
349
|
+
// Create new .dev.vars
|
|
350
|
+
const content = Object.entries(secrets)
|
|
351
|
+
.map(([k, v]) => `${k}=${v}`)
|
|
352
|
+
.join('\n');
|
|
353
|
+
writeFileSync(devVarsPath, `# EdgeBase secrets (auto-restored)\n${content}\n`);
|
|
354
|
+
chmodSync(devVarsPath, 0o600);
|
|
355
|
+
}
|
|
356
|
+
// Also update .edgebase/secrets.json for Edge environments
|
|
357
|
+
const edgebaseDir = join(projectDir, '.edgebase');
|
|
358
|
+
const secretsJsonPath = join(edgebaseDir, 'secrets.json');
|
|
359
|
+
try {
|
|
360
|
+
if (!existsSync(edgebaseDir)) {
|
|
361
|
+
mkdirSync(edgebaseDir, { recursive: true });
|
|
362
|
+
}
|
|
363
|
+
// Merge with existing secrets.json if present
|
|
364
|
+
let existing = {};
|
|
365
|
+
if (existsSync(secretsJsonPath)) {
|
|
366
|
+
try {
|
|
367
|
+
existing = JSON.parse(readFileSync(secretsJsonPath, 'utf-8'));
|
|
368
|
+
}
|
|
369
|
+
catch {
|
|
370
|
+
/* ignore */
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
const merged = { ...existing, ...secrets };
|
|
374
|
+
writeFileSync(secretsJsonPath, JSON.stringify(merged, null, 2));
|
|
375
|
+
chmodSync(secretsJsonPath, 0o600);
|
|
376
|
+
}
|
|
377
|
+
catch {
|
|
378
|
+
// Non-fatal: .edgebase/secrets.json update failed
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
/** Interactive confirmation prompt */
|
|
382
|
+
async function confirmRestore(message) {
|
|
383
|
+
if (!process.stdin.isTTY || isNonInteractive() || isJson()) {
|
|
384
|
+
raiseNeedsInput({
|
|
385
|
+
code: 'backup_restore_confirmation_required',
|
|
386
|
+
field: 'yes',
|
|
387
|
+
message: 'Backup restore requires explicit confirmation before wiping and replacing target data.',
|
|
388
|
+
hint: 'Review the backup summary, then rerun with --yes.',
|
|
389
|
+
choices: [{
|
|
390
|
+
label: 'Approve restore',
|
|
391
|
+
value: 'yes',
|
|
392
|
+
args: ['--yes'],
|
|
393
|
+
}],
|
|
394
|
+
});
|
|
395
|
+
}
|
|
396
|
+
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
|
397
|
+
return new Promise((resolve) => {
|
|
398
|
+
rl.question(message, (answer) => {
|
|
399
|
+
rl.close();
|
|
400
|
+
resolve(answer.trim().toLowerCase() === 'restore');
|
|
401
|
+
});
|
|
402
|
+
});
|
|
403
|
+
}
|
|
404
|
+
/** Y/N confirmation prompt */
|
|
405
|
+
async function confirmYN(message, defaultNonInteractive = false) {
|
|
406
|
+
if (!process.stdin.isTTY || isNonInteractive() || isJson()) {
|
|
407
|
+
return defaultNonInteractive;
|
|
408
|
+
}
|
|
409
|
+
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
|
410
|
+
return new Promise((resolve) => {
|
|
411
|
+
rl.question(message, (answer) => {
|
|
412
|
+
rl.close();
|
|
413
|
+
resolve(['y', 'yes'].includes(answer.trim().toLowerCase()));
|
|
414
|
+
});
|
|
415
|
+
});
|
|
416
|
+
}
|
|
417
|
+
/** Recursively collect all files in a directory */
|
|
418
|
+
function collectFiles(dir, base = dir) {
|
|
419
|
+
const results = [];
|
|
420
|
+
if (!existsSync(dir))
|
|
421
|
+
return results;
|
|
422
|
+
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
423
|
+
const fullPath = join(dir, entry.name);
|
|
424
|
+
if (entry.isDirectory()) {
|
|
425
|
+
results.push(...collectFiles(fullPath, base));
|
|
426
|
+
}
|
|
427
|
+
else {
|
|
428
|
+
results.push({ path: fullPath, rel: fullPath.slice(base.length + 1) });
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
return results;
|
|
432
|
+
}
|
|
433
|
+
/**
|
|
434
|
+
* Enumerate all DO instances via Cloudflare REST API.
|
|
435
|
+
* Returns hex IDs of DOs that have stored data.
|
|
436
|
+
* Uses cursor pagination for large namespaces.
|
|
437
|
+
*/
|
|
438
|
+
export async function enumerateDOsViaCFAPI(cf, namespaceId) {
|
|
439
|
+
const hexIds = [];
|
|
440
|
+
let cursor = null;
|
|
441
|
+
do {
|
|
442
|
+
const params = new URLSearchParams();
|
|
443
|
+
if (cursor)
|
|
444
|
+
params.set('cursor', cursor);
|
|
445
|
+
params.set('limit', '1000');
|
|
446
|
+
const url = `https://api.cloudflare.com/client/v4/accounts/${cf.accountId}/workers/durable_objects/namespaces/${namespaceId}/objects?${params}`;
|
|
447
|
+
const resp = await fetchWithTimeout(url, {
|
|
448
|
+
headers: { Authorization: `Bearer ${cf.apiToken}` },
|
|
449
|
+
});
|
|
450
|
+
if (!resp.ok) {
|
|
451
|
+
throw new Error(`CF API error (${resp.status}): ${await resp.text()}`);
|
|
452
|
+
}
|
|
453
|
+
const body = (await resp.json());
|
|
454
|
+
for (const obj of body.result) {
|
|
455
|
+
if (obj.hasStoredData) {
|
|
456
|
+
hexIds.push(obj.id);
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
cursor = body.result_info?.cursor ?? null;
|
|
460
|
+
} while (cursor);
|
|
461
|
+
return hexIds;
|
|
462
|
+
}
|
|
463
|
+
/**
|
|
464
|
+
* Get all DO namespace IDs for this worker via Cloudflare REST API.
|
|
465
|
+
* Returns array of { id, name, class } for each namespace.
|
|
466
|
+
*/
|
|
467
|
+
export async function getCFNamespaces(cf) {
|
|
468
|
+
const url = `https://api.cloudflare.com/client/v4/accounts/${cf.accountId}/workers/durable_objects/namespaces`;
|
|
469
|
+
const resp = await fetchWithTimeout(url, {
|
|
470
|
+
headers: { Authorization: `Bearer ${cf.apiToken}` },
|
|
471
|
+
});
|
|
472
|
+
if (!resp.ok) {
|
|
473
|
+
throw new Error(`CF API error (${resp.status}): ${await resp.text()}`);
|
|
474
|
+
}
|
|
475
|
+
const body = (await resp.json());
|
|
476
|
+
return body.result;
|
|
477
|
+
}
|
|
478
|
+
// ─── Commands ───
|
|
479
|
+
export const backupCommand = new Command('backup')
|
|
480
|
+
.alias('bk')
|
|
481
|
+
.description('Backup & restore database');
|
|
482
|
+
// ── backup create ──
|
|
483
|
+
backupCommand
|
|
484
|
+
.command('create')
|
|
485
|
+
.description('Create a full backup of all DO, D1, and optionally R2/secrets')
|
|
486
|
+
.option('--url <url>', 'Worker URL (or EDGEBASE_URL env)')
|
|
487
|
+
.option('--service-key <key>', 'Service Key (or EDGEBASE_SERVICE_KEY env)')
|
|
488
|
+
.option('--output <path>', 'Output directory', './backup')
|
|
489
|
+
.option('--include-secrets', 'Include JWT keys and Service Key in backup')
|
|
490
|
+
.option('--include-storage', 'Include R2 storage files (creates .tar.gz archive)')
|
|
491
|
+
.option('--account-id <id>', 'Cloudflare Account ID for Edge DO enumeration (or CLOUDFLARE_ACCOUNT_ID env)')
|
|
492
|
+
.option('--api-token <token>', 'Cloudflare API Token for Edge DO enumeration (or CLOUDFLARE_API_TOKEN env)')
|
|
493
|
+
.action(async (options) => {
|
|
494
|
+
// Graceful shutdown on SIGINT
|
|
495
|
+
const sigintHandler = () => {
|
|
496
|
+
process.exit(130);
|
|
497
|
+
};
|
|
498
|
+
process.on('SIGINT', sigintHandler);
|
|
499
|
+
const outputDir = resolve(options.output || './backup');
|
|
500
|
+
const projectDir = resolve('.');
|
|
501
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19);
|
|
502
|
+
const isArchive = !!options.includeStorage;
|
|
503
|
+
const backupJsonPath = join(outputDir, `edgebase-backup-${timestamp}.json`);
|
|
504
|
+
const archivePath = join(outputDir, `edgebase-backup-${timestamp}.tar.gz`);
|
|
505
|
+
let storageStageDir = null;
|
|
506
|
+
let storageCleanupDir = null;
|
|
507
|
+
let tarStagingDir = null;
|
|
508
|
+
let totalRows = 0;
|
|
509
|
+
let config;
|
|
510
|
+
let dataNamespaces;
|
|
511
|
+
try {
|
|
512
|
+
const api = resolveOptions(options);
|
|
513
|
+
if (!existsSync(outputDir)) {
|
|
514
|
+
mkdirSync(outputDir, { recursive: true });
|
|
515
|
+
}
|
|
516
|
+
logHuman(chalk.blue('💾 Creating backup...'));
|
|
517
|
+
if (options.includeSecrets)
|
|
518
|
+
logHuman(chalk.yellow(' ⚡ Including secrets'));
|
|
519
|
+
if (options.includeStorage)
|
|
520
|
+
logHuman(chalk.yellow(' ⚡ Including R2 storage'));
|
|
521
|
+
logHuman();
|
|
522
|
+
// ── 1. List all DOs ──
|
|
523
|
+
const enumSpinner = spin('Enumerating DOs...');
|
|
524
|
+
const accountId = options.accountId || process.env.CLOUDFLARE_ACCOUNT_ID;
|
|
525
|
+
const apiToken = options.apiToken || process.env.CLOUDFLARE_API_TOKEN;
|
|
526
|
+
const isEdge = !!(accountId && apiToken);
|
|
527
|
+
let listResult;
|
|
528
|
+
try {
|
|
529
|
+
if (isEdge) {
|
|
530
|
+
// Edge mode: enumerate via Cloudflare REST API
|
|
531
|
+
enumSpinner.text = 'Enumerating DOs via Cloudflare API (Edge mode)...';
|
|
532
|
+
const cf = { accountId: accountId, apiToken: apiToken };
|
|
533
|
+
const namespaces = await getCFNamespaces(cf);
|
|
534
|
+
const allHexIds = [];
|
|
535
|
+
for (const ns of namespaces) {
|
|
536
|
+
const ids = await enumerateDOsViaCFAPI(cf, ns.id);
|
|
537
|
+
allHexIds.push(...ids);
|
|
538
|
+
}
|
|
539
|
+
enumSpinner.text = `Found ${allHexIds.length} DO instances via CF API, resolving names...`;
|
|
540
|
+
// Pass hex IDs to Worker for name resolution
|
|
541
|
+
listResult = await apiCall(api, '/list-dos', 'POST', {
|
|
542
|
+
hexIds: allHexIds,
|
|
543
|
+
});
|
|
544
|
+
}
|
|
545
|
+
else {
|
|
546
|
+
// Config-scan mode: Worker enumerates via config + membership
|
|
547
|
+
listResult = await apiCall(api, '/list-dos', 'POST', {});
|
|
548
|
+
}
|
|
549
|
+
enumSpinner.succeed(`Found ${listResult.total} DO instances`);
|
|
550
|
+
}
|
|
551
|
+
catch (err) {
|
|
552
|
+
enumSpinner.fail(`DO enumeration failed: ${errorMessage(err)}`);
|
|
553
|
+
throw err;
|
|
554
|
+
}
|
|
555
|
+
// ── 2. Dump each DO (10-concurrent throttle) ──
|
|
556
|
+
const control = { d1: {} };
|
|
557
|
+
const auth = { d1: {}, shards: {} };
|
|
558
|
+
const databases = {};
|
|
559
|
+
let dumped = 0;
|
|
560
|
+
const dumpSpinner = spin('Dumping DOs...');
|
|
561
|
+
const dumpTasks = listResult.dos.map((doInfo) => async () => {
|
|
562
|
+
const idx = ++dumped;
|
|
563
|
+
const progress = `[${idx}/${listResult.dos.length}]`;
|
|
564
|
+
dumpSpinner.text = `${progress} Dumping ${doInfo.doName}...`;
|
|
565
|
+
const dump = await apiCall(api, '/dump-do', 'POST', {
|
|
566
|
+
doName: doInfo.doName,
|
|
567
|
+
type: doInfo.type,
|
|
568
|
+
});
|
|
569
|
+
if (doInfo.type === 'auth') {
|
|
570
|
+
auth.shards[doInfo.doName] = dump;
|
|
571
|
+
}
|
|
572
|
+
else {
|
|
573
|
+
databases[doInfo.doName] = dump;
|
|
574
|
+
}
|
|
575
|
+
return dump;
|
|
576
|
+
});
|
|
577
|
+
const dumpResults = await throttleSettled(dumpTasks, 10);
|
|
578
|
+
const dumpFailures = collectSettledFailures(dumpResults, listResult.dos.map((doInfo) => doInfo.doName));
|
|
579
|
+
if (dumpFailures.length > 0) {
|
|
580
|
+
dumpSpinner.fail(`DO dump failed for ${dumpFailures.length} instance(s)`);
|
|
581
|
+
throw new Error(summarizeFailures('DO dump', dumpFailures));
|
|
582
|
+
}
|
|
583
|
+
totalRows = Object.values(databases).reduce((sum, dump) => sum + Object.values(dump.tables).reduce((tableSum, rows) => tableSum + rows.length, 0), 0);
|
|
584
|
+
dumpSpinner.succeed(`Dumped ${listResult.dos.length} DOs (${totalRows} total rows)`);
|
|
585
|
+
// ── 3. Dump D1 ──
|
|
586
|
+
const controlSpinner = spin('Dumping internal control plane...');
|
|
587
|
+
try {
|
|
588
|
+
const controlDump = await apiCall(api, '/dump-control-d1', 'POST');
|
|
589
|
+
control.d1 = controlDump.tables;
|
|
590
|
+
const rowCount = Object.values(controlDump.tables).reduce((sum, rows) => sum + rows.length, 0);
|
|
591
|
+
controlSpinner.succeed(`Control D1: ${Object.keys(controlDump.tables).length} tables, ${rowCount} rows`);
|
|
592
|
+
}
|
|
593
|
+
catch (err) {
|
|
594
|
+
controlSpinner.fail(`Control D1 dump failed: ${errorMessage(err)}`);
|
|
595
|
+
throw err;
|
|
596
|
+
}
|
|
597
|
+
const d1Spinner = spin('Dumping auth database...');
|
|
598
|
+
try {
|
|
599
|
+
const d1Dump = await apiCall(api, '/dump-d1', 'POST');
|
|
600
|
+
auth.d1 = d1Dump.tables;
|
|
601
|
+
const rowCount = Object.values(d1Dump.tables).reduce((sum, rows) => sum + rows.length, 0);
|
|
602
|
+
d1Spinner.succeed(`D1: ${Object.keys(d1Dump.tables).length} tables, ${rowCount} rows`);
|
|
603
|
+
}
|
|
604
|
+
catch (err) {
|
|
605
|
+
d1Spinner.fail(`D1 dump failed: ${errorMessage(err)}`);
|
|
606
|
+
throw err;
|
|
607
|
+
}
|
|
608
|
+
// ── 4. Secrets (optional) ──
|
|
609
|
+
let secrets;
|
|
610
|
+
if (options.includeSecrets) {
|
|
611
|
+
const secretSpinner = spin('Collecting secrets...');
|
|
612
|
+
secrets = readSecrets(projectDir) ?? undefined;
|
|
613
|
+
if (secrets) {
|
|
614
|
+
secretSpinner.succeed(`Collected ${Object.keys(secrets).length} secret keys`);
|
|
615
|
+
}
|
|
616
|
+
else {
|
|
617
|
+
secretSpinner.warn('No secrets found (.dev.vars / .edgebase/secrets.json)');
|
|
618
|
+
}
|
|
619
|
+
}
|
|
620
|
+
// ── 5. R2 Storage (optional) ──
|
|
621
|
+
let storage;
|
|
622
|
+
// Check for existing incomplete download (resume support)
|
|
623
|
+
const edgebaseTmpDir = join(projectDir, '.edgebase', 'tmp');
|
|
624
|
+
if (options.includeStorage) {
|
|
625
|
+
// Check for resumable incomplete download
|
|
626
|
+
let resumeManifest = null;
|
|
627
|
+
let resumeDir = null;
|
|
628
|
+
if (existsSync(edgebaseTmpDir)) {
|
|
629
|
+
const tmpDirs = readdirSync(edgebaseTmpDir)
|
|
630
|
+
.filter((d) => d.startsWith('backup-'))
|
|
631
|
+
.sort()
|
|
632
|
+
.reverse();
|
|
633
|
+
for (const dir of tmpDirs) {
|
|
634
|
+
const manifestPath = join(edgebaseTmpDir, dir, 'manifest.json');
|
|
635
|
+
if (existsSync(manifestPath)) {
|
|
636
|
+
try {
|
|
637
|
+
const manifest = JSON.parse(readFileSync(manifestPath, 'utf-8'));
|
|
638
|
+
const pending = manifest.objects.filter((o) => o.status === 'pending');
|
|
639
|
+
if (pending.length > 0) {
|
|
640
|
+
logHuman(chalk.yellow(` ⚠ Found incomplete R2 download from ${manifest.startedAt}`));
|
|
641
|
+
logHuman(chalk.yellow(` ${pending.length} files remaining out of ${manifest.objects.length}`));
|
|
642
|
+
const resume = await confirmYN(' Resume download? (y/N): ', false);
|
|
643
|
+
if (resume) {
|
|
644
|
+
resumeManifest = manifest;
|
|
645
|
+
resumeDir = join(edgebaseTmpDir, dir, 'storage');
|
|
646
|
+
break;
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
catch {
|
|
651
|
+
/* invalid manifest, skip */
|
|
652
|
+
}
|
|
653
|
+
}
|
|
654
|
+
}
|
|
655
|
+
}
|
|
656
|
+
let objectList;
|
|
657
|
+
let totalBytes;
|
|
658
|
+
let totalMB;
|
|
659
|
+
const sessionPaths = resolveDownloadSessionPaths(edgebaseTmpDir, timestamp, resumeDir);
|
|
660
|
+
if (resumeManifest && resumeDir) {
|
|
661
|
+
// Resume mode
|
|
662
|
+
objectList = resumeManifest.objects.map((o) => ({
|
|
663
|
+
key: o.key,
|
|
664
|
+
size: o.size,
|
|
665
|
+
etag: o.etag,
|
|
666
|
+
contentType: o.contentType,
|
|
667
|
+
}));
|
|
668
|
+
totalBytes = objectList.reduce((sum, obj) => sum + obj.size, 0);
|
|
669
|
+
totalMB = (totalBytes / (1024 * 1024)).toFixed(1);
|
|
670
|
+
}
|
|
671
|
+
else {
|
|
672
|
+
// Fresh download
|
|
673
|
+
const r2ListSpinner = spin('Listing R2 objects...');
|
|
674
|
+
try {
|
|
675
|
+
const storageList = await apiCall(api, '/dump-storage?action=list');
|
|
676
|
+
objectList = storageList.objects;
|
|
677
|
+
totalBytes = objectList.reduce((sum, obj) => sum + obj.size, 0);
|
|
678
|
+
totalMB = (totalBytes / (1024 * 1024)).toFixed(1);
|
|
679
|
+
r2ListSpinner.succeed(`${storageList.total} R2 objects (${totalMB} MB)`);
|
|
680
|
+
}
|
|
681
|
+
catch (err) {
|
|
682
|
+
r2ListSpinner.fail(`R2 listing failed: ${errorMessage(err)}`);
|
|
683
|
+
throw err;
|
|
684
|
+
}
|
|
685
|
+
}
|
|
686
|
+
storage = { objects: objectList };
|
|
687
|
+
if (objectList.length > 0) {
|
|
688
|
+
// R2 confirmation prompt (Gap 9)
|
|
689
|
+
logHuman(chalk.yellow(` ⚠ ${objectList.length} files, ${totalMB} MB to download.`));
|
|
690
|
+
const proceed = await confirmYN(` ${chalk.cyan('Proceed with R2 download?')} (y/N): `, true);
|
|
691
|
+
if (!proceed) {
|
|
692
|
+
logHuman(chalk.yellow(' Skipping R2 storage download.'));
|
|
693
|
+
storage = undefined;
|
|
694
|
+
}
|
|
695
|
+
else {
|
|
696
|
+
storageStageDir = sessionPaths.storageDir;
|
|
697
|
+
storageCleanupDir = sessionPaths.sessionDir;
|
|
698
|
+
mkdirSync(sessionPaths.sessionDir, { recursive: true });
|
|
699
|
+
mkdirSync(sessionPaths.storageDir, { recursive: true });
|
|
700
|
+
// Create/update manifest for resume support (Gap 8)
|
|
701
|
+
let manifest;
|
|
702
|
+
if (resumeManifest) {
|
|
703
|
+
manifest = resumeManifest;
|
|
704
|
+
}
|
|
705
|
+
else {
|
|
706
|
+
manifest = {
|
|
707
|
+
objects: objectList.map((obj) => ({ ...obj, status: 'pending' })),
|
|
708
|
+
startedAt: new Date().toISOString(),
|
|
709
|
+
completedCount: 0,
|
|
710
|
+
};
|
|
711
|
+
writeFileSync(sessionPaths.manifestPath, JSON.stringify(manifest, null, 2));
|
|
712
|
+
}
|
|
713
|
+
const pendingObjects = manifest.objects.filter((obj) => obj.status === 'pending');
|
|
714
|
+
let downloadedCount = manifest.completedCount;
|
|
715
|
+
const totalToDownload = manifest.objects.length;
|
|
716
|
+
const r2DownloadSpinner = spin('Downloading R2 files...');
|
|
717
|
+
const downloadTasks = pendingObjects.map((manifestObj) => async () => {
|
|
718
|
+
const idx = ++downloadedCount;
|
|
719
|
+
r2DownloadSpinner.text = `[${idx}/${totalToDownload}] Downloading ${manifestObj.key}...`;
|
|
720
|
+
const { buffer } = await apiBinary(api, `/dump-storage?action=get&key=${encodeURIComponent(manifestObj.key)}`);
|
|
721
|
+
const filePath = join(sessionPaths.storageDir, manifestObj.key);
|
|
722
|
+
mkdirSync(dirname(filePath), { recursive: true });
|
|
723
|
+
writeFileSync(filePath, Buffer.from(buffer));
|
|
724
|
+
// Mark as done in manifest
|
|
725
|
+
manifestObj.status = 'done';
|
|
726
|
+
manifest.completedCount = downloadedCount;
|
|
727
|
+
writeFileSync(sessionPaths.manifestPath, JSON.stringify(manifest, null, 2));
|
|
728
|
+
});
|
|
729
|
+
const downloadResults = await throttleSettled(downloadTasks, 5);
|
|
730
|
+
const downloadFailures = collectSettledFailures(downloadResults, pendingObjects.map((obj) => obj.key));
|
|
731
|
+
if (downloadFailures.length > 0) {
|
|
732
|
+
r2DownloadSpinner.fail(`R2 download failed for ${downloadFailures.length} object(s)`);
|
|
733
|
+
throw new Error(summarizeFailures('R2 download', downloadFailures));
|
|
734
|
+
}
|
|
735
|
+
r2DownloadSpinner.succeed(`Downloaded ${totalToDownload} R2 files (${totalMB} MB)`);
|
|
736
|
+
}
|
|
737
|
+
}
|
|
738
|
+
}
|
|
739
|
+
// ── 6. Fetch config snapshot ──
|
|
740
|
+
try {
|
|
741
|
+
config = await apiCall(api, '/config', 'GET');
|
|
742
|
+
}
|
|
743
|
+
catch {
|
|
744
|
+
// Config endpoint may not exist on older servers
|
|
745
|
+
}
|
|
746
|
+
// ── 7. Dump D1/Postgres data namespaces ──
|
|
747
|
+
const directNamespaceNames = resolveDirectDataNamespaceNames(config);
|
|
748
|
+
if (directNamespaceNames.length > 0) {
|
|
749
|
+
const dataSpinner = spin('Dumping data namespaces...');
|
|
750
|
+
dataNamespaces = {};
|
|
751
|
+
try {
|
|
752
|
+
for (const namespace of directNamespaceNames) {
|
|
753
|
+
const dump = await apiCall(api, '/dump-data', 'POST', { namespace });
|
|
754
|
+
dataNamespaces[namespace] = dump;
|
|
755
|
+
totalRows += Object.values(dump.tables).reduce((sum, rows) => sum + rows.length, 0);
|
|
756
|
+
}
|
|
757
|
+
dataSpinner.succeed(`Dumped ${directNamespaceNames.length} data namespace(s)`);
|
|
758
|
+
}
|
|
759
|
+
catch (err) {
|
|
760
|
+
dataSpinner.fail(`Data namespace dump failed: ${errorMessage(err)}`);
|
|
761
|
+
throw err;
|
|
762
|
+
}
|
|
763
|
+
}
|
|
764
|
+
// ── 8. Build backup object ──
|
|
765
|
+
const backup = {
|
|
766
|
+
version: '1.1',
|
|
767
|
+
timestamp: new Date().toISOString(),
|
|
768
|
+
source: isEdge
|
|
769
|
+
? 'cloudflare-edge'
|
|
770
|
+
: api.url.includes('workers.dev')
|
|
771
|
+
? 'cloudflare-edge'
|
|
772
|
+
: api.url.includes('localhost')
|
|
773
|
+
? 'local'
|
|
774
|
+
: 'docker',
|
|
775
|
+
control,
|
|
776
|
+
auth,
|
|
777
|
+
databases,
|
|
778
|
+
};
|
|
779
|
+
if (config)
|
|
780
|
+
backup.config = config;
|
|
781
|
+
if (dataNamespaces && Object.keys(dataNamespaces).length > 0) {
|
|
782
|
+
backup.dataNamespaces = dataNamespaces;
|
|
783
|
+
}
|
|
784
|
+
if (secrets)
|
|
785
|
+
backup.secrets = secrets;
|
|
786
|
+
if (storage)
|
|
787
|
+
backup.storage = storage;
|
|
788
|
+
// ── 9. Write output ──
|
|
789
|
+
let outputPath = backupJsonPath;
|
|
790
|
+
if (isArchive && storageStageDir && existsSync(storageStageDir)) {
|
|
791
|
+
// Create tar.gz with backup.json + storage/ files
|
|
792
|
+
tarStagingDir = join(outputDir, `.tmp-tar-${timestamp}`);
|
|
793
|
+
mkdirSync(tarStagingDir, { recursive: true });
|
|
794
|
+
// Copy backup.json into tar staging dir
|
|
795
|
+
writeFileSync(join(tarStagingDir, 'backup.json'), JSON.stringify(backup, null, 2));
|
|
796
|
+
// Copy storage files into tar staging dir
|
|
797
|
+
const storageDestDir = join(tarStagingDir, 'storage');
|
|
798
|
+
mkdirSync(storageDestDir, { recursive: true });
|
|
799
|
+
const storageFiles = collectFiles(storageStageDir);
|
|
800
|
+
for (const file of storageFiles) {
|
|
801
|
+
const dest = join(storageDestDir, file.rel);
|
|
802
|
+
mkdirSync(dirname(dest), { recursive: true });
|
|
803
|
+
writeFileSync(dest, readFileSync(file.path));
|
|
804
|
+
}
|
|
805
|
+
// Create tar.gz using shell tar (same pattern as deploy.ts)
|
|
806
|
+
execFileSync('tar', ['-czf', archivePath, '-C', tarStagingDir, 'backup.json', 'storage'], {
|
|
807
|
+
stdio: 'pipe',
|
|
808
|
+
});
|
|
809
|
+
// Cleanup tmp dirs
|
|
810
|
+
if (storageCleanupDir)
|
|
811
|
+
rmSync(storageCleanupDir, { recursive: true, force: true });
|
|
812
|
+
rmSync(tarStagingDir, { recursive: true, force: true });
|
|
813
|
+
tarStagingDir = null;
|
|
814
|
+
chmodSync(archivePath, 0o600);
|
|
815
|
+
outputPath = archivePath;
|
|
816
|
+
logHuman();
|
|
817
|
+
logHuman(chalk.green('✅ Backup complete!'));
|
|
818
|
+
logHuman(chalk.dim(` File: ${archivePath}`));
|
|
819
|
+
logHuman(chalk.dim(` Size: ${(statSync(archivePath).size / 1024).toFixed(1)} KB`));
|
|
820
|
+
}
|
|
821
|
+
else {
|
|
822
|
+
// JSON-only backup
|
|
823
|
+
writeFileSync(backupJsonPath, JSON.stringify(backup, null, 2), 'utf-8');
|
|
824
|
+
chmodSync(backupJsonPath, 0o600);
|
|
825
|
+
logHuman();
|
|
826
|
+
logHuman(chalk.green('✅ Backup complete!'));
|
|
827
|
+
logHuman(chalk.dim(` File: ${backupJsonPath}`));
|
|
828
|
+
logHuman(chalk.dim(` Size: ${(Buffer.byteLength(JSON.stringify(backup)) / 1024).toFixed(1)} KB`));
|
|
829
|
+
}
|
|
830
|
+
if (isJson()) {
|
|
831
|
+
outputJson({
|
|
832
|
+
status: 'success',
|
|
833
|
+
file: outputPath,
|
|
834
|
+
dos: Object.keys(databases).length,
|
|
835
|
+
dataNamespaces: Object.keys(dataNamespaces ?? {}).length,
|
|
836
|
+
rows: totalRows,
|
|
837
|
+
timestamp,
|
|
838
|
+
});
|
|
839
|
+
return;
|
|
840
|
+
}
|
|
841
|
+
logHuman(chalk.dim(` Auth shards: ${Object.keys(auth.shards).length}`));
|
|
842
|
+
logHuman(chalk.dim(` Database DOs: ${Object.keys(databases).length}`));
|
|
843
|
+
if (dataNamespaces) {
|
|
844
|
+
logHuman(chalk.dim(` Data namespaces: ${Object.keys(dataNamespaces).length}`));
|
|
845
|
+
}
|
|
846
|
+
if (storage)
|
|
847
|
+
logHuman(chalk.dim(` R2 files: ${storage.objects.length}`));
|
|
848
|
+
if (secrets)
|
|
849
|
+
logHuman(chalk.dim(` Secrets: ${Object.keys(secrets).length} keys`));
|
|
850
|
+
logHuman();
|
|
851
|
+
if (secrets) {
|
|
852
|
+
logHuman(chalk.red.bold('⚠ This backup contains SECRETS. Store securely.'));
|
|
853
|
+
logHuman(chalk.red(' File permissions set to 600.'));
|
|
854
|
+
}
|
|
855
|
+
else {
|
|
856
|
+
logHuman(chalk.yellow('⚠'), 'This backup may contain sensitive data (passwords, tokens).');
|
|
857
|
+
logHuman(chalk.yellow(' '), 'File permissions set to 600. Store securely.');
|
|
858
|
+
}
|
|
859
|
+
}
|
|
860
|
+
catch (err) {
|
|
861
|
+
if (isCliStructuredError(err))
|
|
862
|
+
throw err;
|
|
863
|
+
if (existsSync(backupJsonPath))
|
|
864
|
+
rmSync(backupJsonPath, { force: true });
|
|
865
|
+
if (existsSync(archivePath))
|
|
866
|
+
rmSync(archivePath, { force: true });
|
|
867
|
+
if (tarStagingDir && existsSync(tarStagingDir)) {
|
|
868
|
+
rmSync(tarStagingDir, { recursive: true, force: true });
|
|
869
|
+
}
|
|
870
|
+
logHuman();
|
|
871
|
+
logHumanError(chalk.red(`✗ Backup failed: ${errorMessage(err)}`));
|
|
872
|
+
raiseCliError({
|
|
873
|
+
code: 'backup_create_failed',
|
|
874
|
+
message: errorMessage(err),
|
|
875
|
+
});
|
|
876
|
+
}
|
|
877
|
+
finally {
|
|
878
|
+
process.off('SIGINT', sigintHandler);
|
|
879
|
+
}
|
|
880
|
+
});
|
|
881
|
+
// ── backup restore ──
|
|
882
|
+
backupCommand
|
|
883
|
+
.command('restore')
|
|
884
|
+
.description('Restore from a backup file (JSON or tar.gz)')
|
|
885
|
+
.requiredOption('--from <path>', 'Path to backup file (JSON or tar.gz)')
|
|
886
|
+
.option('--url <url>', 'Worker URL (or EDGEBASE_URL env)')
|
|
887
|
+
.option('--service-key <key>', 'Service Key (or EDGEBASE_SERVICE_KEY env)')
|
|
888
|
+
.option('--yes', 'Skip confirmation prompt')
|
|
889
|
+
.option('--skip-secrets', 'Skip restoring secrets even if present in backup')
|
|
890
|
+
.option('--skip-storage', 'Skip restoring R2 storage even if present in backup')
|
|
891
|
+
.option('--account-id <id>', 'Cloudflare Account ID for Edge DO enumeration (or CLOUDFLARE_ACCOUNT_ID env)')
|
|
892
|
+
.option('--api-token <token>', 'Cloudflare API Token for Edge DO enumeration (or CLOUDFLARE_API_TOKEN env)')
|
|
893
|
+
.action(async (options) => {
|
|
894
|
+
// Graceful shutdown on SIGINT
|
|
895
|
+
const sigintHandler = () => {
|
|
896
|
+
process.exit(130);
|
|
897
|
+
};
|
|
898
|
+
process.on('SIGINT', sigintHandler);
|
|
899
|
+
const backupPath = resolve(options.from);
|
|
900
|
+
const projectDir = resolve('.');
|
|
901
|
+
let storageDirPath = null;
|
|
902
|
+
let extractDirPath = null;
|
|
903
|
+
try {
|
|
904
|
+
if (!existsSync(backupPath)) {
|
|
905
|
+
const message = `Backup file not found: ${backupPath}`;
|
|
906
|
+
raiseCliError({
|
|
907
|
+
code: 'backup_file_not_found',
|
|
908
|
+
message,
|
|
909
|
+
hint: 'Check the path passed to --from and retry.',
|
|
910
|
+
});
|
|
911
|
+
}
|
|
912
|
+
const api = resolveOptions(options);
|
|
913
|
+
logHuman(chalk.blue(`🔄 Restoring from: ${backupPath}`));
|
|
914
|
+
logHuman();
|
|
915
|
+
let backup;
|
|
916
|
+
if (backupPath.endsWith('.tar.gz') || backupPath.endsWith('.tgz')) {
|
|
917
|
+
extractDirPath = join(dirname(backupPath), `.tmp-restore-${Date.now()}`);
|
|
918
|
+
mkdirSync(extractDirPath, { recursive: true });
|
|
919
|
+
execFileSync('tar', ['-xzf', backupPath, '-C', extractDirPath], { stdio: 'pipe' });
|
|
920
|
+
const jsonPath = join(extractDirPath, 'backup.json');
|
|
921
|
+
if (!existsSync(jsonPath)) {
|
|
922
|
+
throw new Error('backup.json not found in archive.');
|
|
923
|
+
}
|
|
924
|
+
backup = parseBackupFile(JSON.parse(readFileSync(jsonPath, 'utf-8')));
|
|
925
|
+
const storageDir = join(extractDirPath, 'storage');
|
|
926
|
+
if (existsSync(storageDir)) {
|
|
927
|
+
storageDirPath = storageDir;
|
|
928
|
+
}
|
|
929
|
+
}
|
|
930
|
+
else {
|
|
931
|
+
backup = parseBackupFile(JSON.parse(readFileSync(backupPath, 'utf-8')));
|
|
932
|
+
}
|
|
933
|
+
const controlTableCount = Object.keys(backup.control.d1).length;
|
|
934
|
+
const authShardCount = Object.keys(backup.auth.shards).length;
|
|
935
|
+
const dbCount = Object.keys(backup.databases).length;
|
|
936
|
+
const dataNamespaceCount = Object.keys(backup.dataNamespaces ?? {}).length;
|
|
937
|
+
const d1TableCount = Object.keys(backup.auth.d1).length;
|
|
938
|
+
const hasSecrets = !!backup.secrets && !options.skipSecrets;
|
|
939
|
+
const hasStorage = (!!backup.storage || !!storageDirPath) && !options.skipStorage;
|
|
940
|
+
if (hasStorage && !storageDirPath) {
|
|
941
|
+
throw new Error('Backup includes storage metadata but no extracted storage files were found. Restore from the .tar.gz archive or use --skip-storage.');
|
|
942
|
+
}
|
|
943
|
+
logHuman(chalk.dim(` Backup date: ${backup.timestamp}`));
|
|
944
|
+
logHuman(chalk.dim(` Source: ${backup.source}`));
|
|
945
|
+
logHuman(chalk.dim(` Control D1 tables: ${controlTableCount}`));
|
|
946
|
+
logHuman(chalk.dim(` Auth shards: ${authShardCount}`));
|
|
947
|
+
logHuman(chalk.dim(` Database DOs: ${dbCount}`));
|
|
948
|
+
logHuman(chalk.dim(` Data namespaces: ${dataNamespaceCount}`));
|
|
949
|
+
logHuman(chalk.dim(` D1 tables: ${d1TableCount}`));
|
|
950
|
+
if (hasSecrets) {
|
|
951
|
+
logHuman(chalk.dim(` Secrets: ${Object.keys(backup.secrets).length} keys`));
|
|
952
|
+
}
|
|
953
|
+
if (hasStorage) {
|
|
954
|
+
const fileCount = backup.storage?.objects.length ?? collectFiles(storageDirPath).length;
|
|
955
|
+
logHuman(chalk.dim(` R2 files: ${fileCount}`));
|
|
956
|
+
}
|
|
957
|
+
logHuman();
|
|
958
|
+
if (hasSecrets && backup.secrets) {
|
|
959
|
+
const backupDate = new Date(backup.timestamp);
|
|
960
|
+
const ageMs = Date.now() - backupDate.getTime();
|
|
961
|
+
const ageDays = Math.floor(ageMs / (1000 * 60 * 60 * 24));
|
|
962
|
+
const ageHours = Math.floor(ageMs / (1000 * 60 * 60));
|
|
963
|
+
const ageStr = ageDays > 0 ? `${ageDays} days ago` : `${ageHours} hours ago`;
|
|
964
|
+
logHuman(chalk.yellow.bold('⚠ Secret Warning:'));
|
|
965
|
+
logHuman(chalk.yellow(` Backup secrets were created at ${backup.timestamp} (${ageStr}).`));
|
|
966
|
+
logHuman(chalk.yellow(' Restoring these secrets will invalidate any JWTs issued after the backup.'));
|
|
967
|
+
if (ageDays > 7) {
|
|
968
|
+
logHuman(chalk.red(' ⚡ This backup is over 7 days old. Use caution when restoring secrets.'));
|
|
969
|
+
}
|
|
970
|
+
logHuman();
|
|
971
|
+
}
|
|
972
|
+
if (!options.yes) {
|
|
973
|
+
logHuman(chalk.red.bold('⚠ WARNING: This will WIPE and REPLACE all data at the target.'));
|
|
974
|
+
logHuman(chalk.red(` Target: ${api.url}`));
|
|
975
|
+
logHuman();
|
|
976
|
+
const confirmed = await confirmRestore(` Type ${chalk.cyan('"restore"')} to confirm: `);
|
|
977
|
+
if (!confirmed) {
|
|
978
|
+
logHuman(chalk.yellow(' Aborted.'));
|
|
979
|
+
return;
|
|
980
|
+
}
|
|
981
|
+
logHuman();
|
|
982
|
+
}
|
|
983
|
+
const restoreAccountId = options.accountId || process.env.CLOUDFLARE_ACCOUNT_ID;
|
|
984
|
+
const restoreApiToken = options.apiToken || process.env.CLOUDFLARE_API_TOKEN;
|
|
985
|
+
const isRestoreEdge = !!(restoreAccountId && restoreApiToken);
|
|
986
|
+
let restoredSecrets = false;
|
|
987
|
+
writeHuman(chalk.dim(' Restoring internal control plane...'));
|
|
988
|
+
try {
|
|
989
|
+
await apiCall(api, '/restore-control-d1', 'POST', { tables: backup.control.d1 });
|
|
990
|
+
logHuman(chalk.green(' ✓'));
|
|
991
|
+
}
|
|
992
|
+
catch (err) {
|
|
993
|
+
logHumanError(chalk.red(` ✗ ${errorMessage(err)}`));
|
|
994
|
+
throw new Error(`CONTROL_DB restore failed: ${errorMessage(err)}`);
|
|
995
|
+
}
|
|
996
|
+
writeHuman(chalk.dim(' Restoring auth database...'));
|
|
997
|
+
try {
|
|
998
|
+
await apiCall(api, '/restore-d1', 'POST', { tables: backup.auth.d1 });
|
|
999
|
+
logHuman(chalk.green(' ✓'));
|
|
1000
|
+
}
|
|
1001
|
+
catch (err) {
|
|
1002
|
+
logHumanError(chalk.red(` ✗ ${errorMessage(err)}`));
|
|
1003
|
+
throw new Error(`AUTH_DB restore failed: ${errorMessage(err)}`);
|
|
1004
|
+
}
|
|
1005
|
+
writeHuman(chalk.dim(' Checking for orphan DOs...'));
|
|
1006
|
+
let currentDOs;
|
|
1007
|
+
try {
|
|
1008
|
+
if (isRestoreEdge) {
|
|
1009
|
+
const cf = {
|
|
1010
|
+
accountId: restoreAccountId,
|
|
1011
|
+
apiToken: restoreApiToken,
|
|
1012
|
+
};
|
|
1013
|
+
const namespaces = await getCFNamespaces(cf);
|
|
1014
|
+
const allHexIds = [];
|
|
1015
|
+
for (const ns of namespaces) {
|
|
1016
|
+
const ids = await enumerateDOsViaCFAPI(cf, ns.id);
|
|
1017
|
+
allHexIds.push(...ids);
|
|
1018
|
+
}
|
|
1019
|
+
currentDOs = await apiCall(api, '/list-dos', 'POST', {
|
|
1020
|
+
hexIds: allHexIds,
|
|
1021
|
+
});
|
|
1022
|
+
}
|
|
1023
|
+
else {
|
|
1024
|
+
currentDOs = await apiCall(api, '/list-dos', 'POST', {});
|
|
1025
|
+
}
|
|
1026
|
+
}
|
|
1027
|
+
catch (err) {
|
|
1028
|
+
logHumanError(chalk.red(` ✗ ${errorMessage(err)}`));
|
|
1029
|
+
throw new Error(`Orphan DO enumeration failed: ${errorMessage(err)}`);
|
|
1030
|
+
}
|
|
1031
|
+
const backupDoNames = new Set([
|
|
1032
|
+
...Object.keys(backup.auth.shards),
|
|
1033
|
+
...Object.keys(backup.databases),
|
|
1034
|
+
]);
|
|
1035
|
+
const orphans = currentDOs.dos.filter((item) => !backupDoNames.has(item.doName));
|
|
1036
|
+
if (orphans.length > 0) {
|
|
1037
|
+
const orphanFailures = [];
|
|
1038
|
+
let wiped = 0;
|
|
1039
|
+
for (const orphan of orphans) {
|
|
1040
|
+
try {
|
|
1041
|
+
await apiCall(api, '/wipe-do', 'POST', {
|
|
1042
|
+
doName: orphan.doName,
|
|
1043
|
+
type: orphan.type,
|
|
1044
|
+
});
|
|
1045
|
+
wiped++;
|
|
1046
|
+
}
|
|
1047
|
+
catch (err) {
|
|
1048
|
+
orphanFailures.push(`${orphan.doName}: ${errorMessage(err)}`);
|
|
1049
|
+
}
|
|
1050
|
+
}
|
|
1051
|
+
if (orphanFailures.length > 0) {
|
|
1052
|
+
logHumanError(chalk.red(` ✗ ${summarizeFailures('Orphan wipe', orphanFailures)}`));
|
|
1053
|
+
throw new Error(summarizeFailures('Orphan wipe', orphanFailures));
|
|
1054
|
+
}
|
|
1055
|
+
logHuman(chalk.green(` ✓ ${wiped} orphan DOs wiped`));
|
|
1056
|
+
}
|
|
1057
|
+
else {
|
|
1058
|
+
logHuman(chalk.green(' ✓ No orphans'));
|
|
1059
|
+
}
|
|
1060
|
+
const shardEntries = Object.entries(backup.auth.shards);
|
|
1061
|
+
if (shardEntries.length > 0) {
|
|
1062
|
+
let shardCount = 0;
|
|
1063
|
+
const shardTasks = shardEntries.map(([, doDump]) => async () => {
|
|
1064
|
+
shardCount++;
|
|
1065
|
+
writeHuman(`\r${chalk.dim(` [${shardCount}/${shardEntries.length}] Restoring ${doDump.doName}...`)}`.padEnd(80));
|
|
1066
|
+
await apiCall(api, '/restore-do', 'POST', {
|
|
1067
|
+
doName: doDump.doName,
|
|
1068
|
+
type: 'auth',
|
|
1069
|
+
tables: doDump.tables,
|
|
1070
|
+
});
|
|
1071
|
+
});
|
|
1072
|
+
const shardResults = await throttleSettled(shardTasks, 10);
|
|
1073
|
+
const shardFailures = collectSettledFailures(shardResults, shardEntries.map(([, doDump]) => doDump.doName));
|
|
1074
|
+
if (shardFailures.length > 0) {
|
|
1075
|
+
logHumanError(`\r${chalk.red('✗')} ${summarizeFailures('Auth shard restore', shardFailures)}`.padEnd(80));
|
|
1076
|
+
throw new Error(summarizeFailures('Auth shard restore', shardFailures));
|
|
1077
|
+
}
|
|
1078
|
+
logHuman(`\r${chalk.green('✓')} Restored ${shardEntries.length} Auth shards`.padEnd(80));
|
|
1079
|
+
}
|
|
1080
|
+
const dbEntries = Object.entries(backup.databases);
|
|
1081
|
+
if (dbEntries.length > 0) {
|
|
1082
|
+
let dbCount = 0;
|
|
1083
|
+
const dbTasks = dbEntries.map(([, doDump]) => async () => {
|
|
1084
|
+
dbCount++;
|
|
1085
|
+
writeHuman(`\r${chalk.dim(` [${dbCount}/${dbEntries.length}] Restoring ${doDump.doName}...`)}`.padEnd(80));
|
|
1086
|
+
await apiCall(api, '/restore-do', 'POST', {
|
|
1087
|
+
doName: doDump.doName,
|
|
1088
|
+
type: 'database',
|
|
1089
|
+
tables: doDump.tables,
|
|
1090
|
+
});
|
|
1091
|
+
});
|
|
1092
|
+
const dbResults = await throttleSettled(dbTasks, 10);
|
|
1093
|
+
const dbFailures = collectSettledFailures(dbResults, dbEntries.map(([, doDump]) => doDump.doName));
|
|
1094
|
+
if (dbFailures.length > 0) {
|
|
1095
|
+
logHumanError(`\r${chalk.red('✗')} ${summarizeFailures('Database DO restore', dbFailures)}`.padEnd(80));
|
|
1096
|
+
throw new Error(summarizeFailures('Database DO restore', dbFailures));
|
|
1097
|
+
}
|
|
1098
|
+
logHuman(`\r${chalk.green('✓')} Restored ${dbEntries.length} Database DOs`.padEnd(80));
|
|
1099
|
+
}
|
|
1100
|
+
const dataNamespaceEntries = Object.entries(backup.dataNamespaces ?? {});
|
|
1101
|
+
if (dataNamespaceEntries.length > 0) {
|
|
1102
|
+
let namespaceCount = 0;
|
|
1103
|
+
const namespaceTasks = dataNamespaceEntries.map(([, dump]) => async () => {
|
|
1104
|
+
namespaceCount++;
|
|
1105
|
+
writeHuman(`\r${chalk.dim(` [${namespaceCount}/${dataNamespaceEntries.length}] Restoring data namespace ${dump.namespace}...`)}`.padEnd(80));
|
|
1106
|
+
await apiCall(api, '/restore-data', 'POST', {
|
|
1107
|
+
namespace: dump.namespace,
|
|
1108
|
+
tables: dump.tables,
|
|
1109
|
+
});
|
|
1110
|
+
});
|
|
1111
|
+
const namespaceResults = await throttleSettled(namespaceTasks, 4);
|
|
1112
|
+
const namespaceFailures = collectSettledFailures(namespaceResults, dataNamespaceEntries.map(([, dump]) => dump.namespace));
|
|
1113
|
+
if (namespaceFailures.length > 0) {
|
|
1114
|
+
logHumanError(`\r${chalk.red('✗')} ${summarizeFailures('Data namespace restore', namespaceFailures)}`.padEnd(80));
|
|
1115
|
+
throw new Error(summarizeFailures('Data namespace restore', namespaceFailures));
|
|
1116
|
+
}
|
|
1117
|
+
logHuman(`\r${chalk.green('✓')} Restored ${dataNamespaceEntries.length} data namespace(s)`.padEnd(80));
|
|
1118
|
+
}
|
|
1119
|
+
if (hasStorage && storageDirPath) {
|
|
1120
|
+
writeHuman(chalk.dim(' Wiping existing R2 storage...'));
|
|
1121
|
+
try {
|
|
1122
|
+
const wipeResult = await apiCall(api, '/restore-storage?action=wipe');
|
|
1123
|
+
logHuman(chalk.green(` ✓ ${wipeResult.deleted} deleted`));
|
|
1124
|
+
}
|
|
1125
|
+
catch (err) {
|
|
1126
|
+
logHumanError(chalk.red(` ✗ ${errorMessage(err)}`));
|
|
1127
|
+
throw new Error(`R2 wipe failed: ${errorMessage(err)}`);
|
|
1128
|
+
}
|
|
1129
|
+
const filesToUpload = collectFiles(storageDirPath);
|
|
1130
|
+
if (filesToUpload.length > 0) {
|
|
1131
|
+
let uploadCount = 0;
|
|
1132
|
+
const uploadTasks = filesToUpload.map((file) => async () => {
|
|
1133
|
+
uploadCount++;
|
|
1134
|
+
writeHuman(`\r [${uploadCount}/${filesToUpload.length}] Uploading ${file.rel}...`.padEnd(80));
|
|
1135
|
+
const fileData = readFileSync(file.path);
|
|
1136
|
+
const meta = backup.storage?.objects.find((obj) => obj.key === file.rel);
|
|
1137
|
+
const contentType = meta?.contentType || 'application/octet-stream';
|
|
1138
|
+
await apiUpload(api, `/restore-storage?action=put&key=${encodeURIComponent(file.rel)}`, fileData, contentType);
|
|
1139
|
+
});
|
|
1140
|
+
const uploadResults = await throttleSettled(uploadTasks, 5);
|
|
1141
|
+
const uploadFailures = collectSettledFailures(uploadResults, filesToUpload.map((file) => file.rel));
|
|
1142
|
+
if (uploadFailures.length > 0) {
|
|
1143
|
+
logHumanError(`\r${chalk.red('✗')} ${summarizeFailures('R2 upload', uploadFailures)}`.padEnd(80));
|
|
1144
|
+
throw new Error(summarizeFailures('R2 upload', uploadFailures));
|
|
1145
|
+
}
|
|
1146
|
+
logHuman(`\r${chalk.green('✓')} Uploaded ${filesToUpload.length} R2 files`.padEnd(80));
|
|
1147
|
+
}
|
|
1148
|
+
}
|
|
1149
|
+
writeHuman(chalk.dim(' Resyncing _users_public...'));
|
|
1150
|
+
try {
|
|
1151
|
+
const resyncResult = await apiCall(api, '/resync-users-public', 'POST');
|
|
1152
|
+
logHuman(chalk.green(` ✓ ${resyncResult.totalSynced} users synced from ${resyncResult.shards.length} shards`));
|
|
1153
|
+
}
|
|
1154
|
+
catch (err) {
|
|
1155
|
+
logHuman(chalk.yellow(` ⚠ ${errorMessage(err)}`));
|
|
1156
|
+
}
|
|
1157
|
+
if (hasSecrets && backup.secrets) {
|
|
1158
|
+
writeHuman(chalk.dim(' Restoring secrets...'));
|
|
1159
|
+
try {
|
|
1160
|
+
writeSecrets(projectDir, backup.secrets);
|
|
1161
|
+
const targets = ['.dev.vars', '.edgebase/secrets.json'];
|
|
1162
|
+
const secretFailures = [];
|
|
1163
|
+
if (isRestoreEdge) {
|
|
1164
|
+
for (const [key, value] of Object.entries(backup.secrets)) {
|
|
1165
|
+
try {
|
|
1166
|
+
execFileSync(npxCommand(), ['wrangler', 'secret', 'put', key], {
|
|
1167
|
+
cwd: projectDir,
|
|
1168
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
1169
|
+
input: value,
|
|
1170
|
+
});
|
|
1171
|
+
}
|
|
1172
|
+
catch (err) {
|
|
1173
|
+
secretFailures.push(`${key}: ${errorMessage(err)}`);
|
|
1174
|
+
}
|
|
1175
|
+
}
|
|
1176
|
+
targets.push('Workers Secrets');
|
|
1177
|
+
}
|
|
1178
|
+
if (secretFailures.length > 0) {
|
|
1179
|
+
logHuman(chalk.yellow(` ⚠ Secrets were only partially restored: ${summarizeFailures('Workers Secrets sync', secretFailures)}`));
|
|
1180
|
+
}
|
|
1181
|
+
else {
|
|
1182
|
+
restoredSecrets = true;
|
|
1183
|
+
logHuman(chalk.green(` ✓ ${Object.keys(backup.secrets).length} keys → ${targets.join(', ')}`));
|
|
1184
|
+
}
|
|
1185
|
+
}
|
|
1186
|
+
catch (err) {
|
|
1187
|
+
logHuman(chalk.yellow(` ⚠ ${errorMessage(err)}`));
|
|
1188
|
+
}
|
|
1189
|
+
}
|
|
1190
|
+
logHuman();
|
|
1191
|
+
logHuman(chalk.green('✅ Restore complete!'));
|
|
1192
|
+
if (hasSecrets) {
|
|
1193
|
+
if (restoredSecrets) {
|
|
1194
|
+
logHuman(chalk.yellow('⚠'), 'Secrets restored. Existing JWT tokens remain valid.');
|
|
1195
|
+
}
|
|
1196
|
+
else {
|
|
1197
|
+
logHuman(chalk.yellow('⚠'), 'Secrets were not fully restored. Re-login may be required.');
|
|
1198
|
+
}
|
|
1199
|
+
}
|
|
1200
|
+
else {
|
|
1201
|
+
logHuman(chalk.yellow('⚠'), 'If JWT secrets differ, existing tokens will be invalid.');
|
|
1202
|
+
}
|
|
1203
|
+
}
|
|
1204
|
+
catch (err) {
|
|
1205
|
+
if (isCliStructuredError(err))
|
|
1206
|
+
throw err;
|
|
1207
|
+
logHuman();
|
|
1208
|
+
logHumanError(chalk.red(`✗ Restore failed: ${errorMessage(err)}`));
|
|
1209
|
+
logHuman(chalk.yellow('⚠'), 'Restore stopped before completion. Re-run from a clean target if needed.');
|
|
1210
|
+
raiseCliError({
|
|
1211
|
+
code: 'backup_restore_failed',
|
|
1212
|
+
message: errorMessage(err),
|
|
1213
|
+
hint: 'Restore stopped before completion. Re-run from a clean target if needed.',
|
|
1214
|
+
});
|
|
1215
|
+
}
|
|
1216
|
+
finally {
|
|
1217
|
+
process.off('SIGINT', sigintHandler);
|
|
1218
|
+
if (extractDirPath && extractDirPath.includes('.tmp-restore-')) {
|
|
1219
|
+
rmSync(extractDirPath, { recursive: true, force: true });
|
|
1220
|
+
}
|
|
1221
|
+
}
|
|
1222
|
+
});
|
|
1223
|
+
/** Exported for testing */
|
|
1224
|
+
export const _internals = {
|
|
1225
|
+
apiCall,
|
|
1226
|
+
apiBinary,
|
|
1227
|
+
apiUpload,
|
|
1228
|
+
parseDevVars,
|
|
1229
|
+
readSecrets,
|
|
1230
|
+
writeSecrets,
|
|
1231
|
+
collectFiles,
|
|
1232
|
+
resolveOptions,
|
|
1233
|
+
throttle,
|
|
1234
|
+
throttleSettled,
|
|
1235
|
+
errorMessage,
|
|
1236
|
+
outputJson,
|
|
1237
|
+
logHuman,
|
|
1238
|
+
logHumanError,
|
|
1239
|
+
writeHuman,
|
|
1240
|
+
collectSettledFailures,
|
|
1241
|
+
summarizeFailures,
|
|
1242
|
+
parseBackupFile,
|
|
1243
|
+
resolveDownloadSessionPaths,
|
|
1244
|
+
enumerateDOsViaCFAPI,
|
|
1245
|
+
getCFNamespaces,
|
|
1246
|
+
};
|
|
1247
|
+
//# sourceMappingURL=backup.js.map
|