@edge-base/cli 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +182 -0
- package/dist/commands/admin.d.ts +10 -0
- package/dist/commands/admin.d.ts.map +1 -0
- package/dist/commands/admin.js +307 -0
- package/dist/commands/admin.js.map +1 -0
- package/dist/commands/backup.d.ts +148 -0
- package/dist/commands/backup.d.ts.map +1 -0
- package/dist/commands/backup.js +1247 -0
- package/dist/commands/backup.js.map +1 -0
- package/dist/commands/completion.d.ts +3 -0
- package/dist/commands/completion.d.ts.map +1 -0
- package/dist/commands/completion.js +168 -0
- package/dist/commands/completion.js.map +1 -0
- package/dist/commands/create-plugin.d.ts +3 -0
- package/dist/commands/create-plugin.d.ts.map +1 -0
- package/dist/commands/create-plugin.js +208 -0
- package/dist/commands/create-plugin.js.map +1 -0
- package/dist/commands/deploy.d.ts +146 -0
- package/dist/commands/deploy.d.ts.map +1 -0
- package/dist/commands/deploy.js +1823 -0
- package/dist/commands/deploy.js.map +1 -0
- package/dist/commands/describe.d.ts +45 -0
- package/dist/commands/describe.d.ts.map +1 -0
- package/dist/commands/describe.js +114 -0
- package/dist/commands/describe.js.map +1 -0
- package/dist/commands/destroy.d.ts +13 -0
- package/dist/commands/destroy.d.ts.map +1 -0
- package/dist/commands/destroy.js +642 -0
- package/dist/commands/destroy.js.map +1 -0
- package/dist/commands/dev.d.ts +80 -0
- package/dist/commands/dev.d.ts.map +1 -0
- package/dist/commands/dev.js +1131 -0
- package/dist/commands/dev.js.map +1 -0
- package/dist/commands/docker.d.ts +22 -0
- package/dist/commands/docker.d.ts.map +1 -0
- package/dist/commands/docker.js +373 -0
- package/dist/commands/docker.js.map +1 -0
- package/dist/commands/export.d.ts +15 -0
- package/dist/commands/export.d.ts.map +1 -0
- package/dist/commands/export.js +142 -0
- package/dist/commands/export.js.map +1 -0
- package/dist/commands/init.d.ts +7 -0
- package/dist/commands/init.d.ts.map +1 -0
- package/dist/commands/init.js +506 -0
- package/dist/commands/init.js.map +1 -0
- package/dist/commands/keys.d.ts +23 -0
- package/dist/commands/keys.d.ts.map +1 -0
- package/dist/commands/keys.js +347 -0
- package/dist/commands/keys.js.map +1 -0
- package/dist/commands/logs.d.ts +17 -0
- package/dist/commands/logs.d.ts.map +1 -0
- package/dist/commands/logs.js +104 -0
- package/dist/commands/logs.js.map +1 -0
- package/dist/commands/migrate.d.ts +29 -0
- package/dist/commands/migrate.d.ts.map +1 -0
- package/dist/commands/migrate.js +302 -0
- package/dist/commands/migrate.js.map +1 -0
- package/dist/commands/migration.d.ts +18 -0
- package/dist/commands/migration.d.ts.map +1 -0
- package/dist/commands/migration.js +114 -0
- package/dist/commands/migration.js.map +1 -0
- package/dist/commands/neon.d.ts +66 -0
- package/dist/commands/neon.d.ts.map +1 -0
- package/dist/commands/neon.js +600 -0
- package/dist/commands/neon.js.map +1 -0
- package/dist/commands/plugins.d.ts +9 -0
- package/dist/commands/plugins.d.ts.map +1 -0
- package/dist/commands/plugins.js +295 -0
- package/dist/commands/plugins.js.map +1 -0
- package/dist/commands/realtime.d.ts +3 -0
- package/dist/commands/realtime.d.ts.map +1 -0
- package/dist/commands/realtime.js +71 -0
- package/dist/commands/realtime.js.map +1 -0
- package/dist/commands/secret.d.ts +7 -0
- package/dist/commands/secret.d.ts.map +1 -0
- package/dist/commands/secret.js +180 -0
- package/dist/commands/secret.js.map +1 -0
- package/dist/commands/seed.d.ts +21 -0
- package/dist/commands/seed.d.ts.map +1 -0
- package/dist/commands/seed.js +325 -0
- package/dist/commands/seed.js.map +1 -0
- package/dist/commands/telemetry.d.ts +12 -0
- package/dist/commands/telemetry.d.ts.map +1 -0
- package/dist/commands/telemetry.js +57 -0
- package/dist/commands/telemetry.js.map +1 -0
- package/dist/commands/typegen.d.ts +26 -0
- package/dist/commands/typegen.d.ts.map +1 -0
- package/dist/commands/typegen.js +212 -0
- package/dist/commands/typegen.js.map +1 -0
- package/dist/commands/upgrade.d.ts +29 -0
- package/dist/commands/upgrade.d.ts.map +1 -0
- package/dist/commands/upgrade.js +265 -0
- package/dist/commands/upgrade.js.map +1 -0
- package/dist/commands/webhook-test.d.ts +3 -0
- package/dist/commands/webhook-test.d.ts.map +1 -0
- package/dist/commands/webhook-test.js +133 -0
- package/dist/commands/webhook-test.js.map +1 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +183 -0
- package/dist/index.js.map +1 -0
- package/dist/lib/agent-contract.d.ts +36 -0
- package/dist/lib/agent-contract.d.ts.map +1 -0
- package/dist/lib/agent-contract.js +78 -0
- package/dist/lib/agent-contract.js.map +1 -0
- package/dist/lib/cf-auth.d.ts +76 -0
- package/dist/lib/cf-auth.d.ts.map +1 -0
- package/dist/lib/cf-auth.js +321 -0
- package/dist/lib/cf-auth.js.map +1 -0
- package/dist/lib/cli-context.d.ts +23 -0
- package/dist/lib/cli-context.d.ts.map +1 -0
- package/dist/lib/cli-context.js +40 -0
- package/dist/lib/cli-context.js.map +1 -0
- package/dist/lib/cloudflare-deploy-manifest.d.ts +26 -0
- package/dist/lib/cloudflare-deploy-manifest.d.ts.map +1 -0
- package/dist/lib/cloudflare-deploy-manifest.js +107 -0
- package/dist/lib/cloudflare-deploy-manifest.js.map +1 -0
- package/dist/lib/cloudflare-wrangler-resources.d.ts +32 -0
- package/dist/lib/cloudflare-wrangler-resources.d.ts.map +1 -0
- package/dist/lib/cloudflare-wrangler-resources.js +59 -0
- package/dist/lib/cloudflare-wrangler-resources.js.map +1 -0
- package/dist/lib/config-editor.d.ts +139 -0
- package/dist/lib/config-editor.d.ts.map +1 -0
- package/dist/lib/config-editor.js +1188 -0
- package/dist/lib/config-editor.js.map +1 -0
- package/dist/lib/deploy-shared.d.ts +55 -0
- package/dist/lib/deploy-shared.d.ts.map +1 -0
- package/dist/lib/deploy-shared.js +183 -0
- package/dist/lib/deploy-shared.js.map +1 -0
- package/dist/lib/dev-sidecar.d.ts +31 -0
- package/dist/lib/dev-sidecar.d.ts.map +1 -0
- package/dist/lib/dev-sidecar.js +1058 -0
- package/dist/lib/dev-sidecar.js.map +1 -0
- package/dist/lib/fetch-with-timeout.d.ts +14 -0
- package/dist/lib/fetch-with-timeout.d.ts.map +1 -0
- package/dist/lib/fetch-with-timeout.js +29 -0
- package/dist/lib/fetch-with-timeout.js.map +1 -0
- package/dist/lib/function-registry.d.ts +56 -0
- package/dist/lib/function-registry.d.ts.map +1 -0
- package/dist/lib/function-registry.js +210 -0
- package/dist/lib/function-registry.js.map +1 -0
- package/dist/lib/load-config.d.ts +24 -0
- package/dist/lib/load-config.d.ts.map +1 -0
- package/dist/lib/load-config.js +263 -0
- package/dist/lib/load-config.js.map +1 -0
- package/dist/lib/local-secrets.d.ts +2 -0
- package/dist/lib/local-secrets.d.ts.map +1 -0
- package/dist/lib/local-secrets.js +60 -0
- package/dist/lib/local-secrets.js.map +1 -0
- package/dist/lib/managed-resource-names.d.ts +4 -0
- package/dist/lib/managed-resource-names.d.ts.map +1 -0
- package/dist/lib/managed-resource-names.js +19 -0
- package/dist/lib/managed-resource-names.js.map +1 -0
- package/dist/lib/migrator.d.ts +57 -0
- package/dist/lib/migrator.d.ts.map +1 -0
- package/dist/lib/migrator.js +321 -0
- package/dist/lib/migrator.js.map +1 -0
- package/dist/lib/neon.d.ts +41 -0
- package/dist/lib/neon.d.ts.map +1 -0
- package/dist/lib/neon.js +325 -0
- package/dist/lib/neon.js.map +1 -0
- package/dist/lib/node-tools.d.ts +10 -0
- package/dist/lib/node-tools.d.ts.map +1 -0
- package/dist/lib/node-tools.js +32 -0
- package/dist/lib/node-tools.js.map +1 -0
- package/dist/lib/npm.d.ts +8 -0
- package/dist/lib/npm.d.ts.map +1 -0
- package/dist/lib/npm.js +10 -0
- package/dist/lib/npm.js.map +1 -0
- package/dist/lib/npx.d.ts +9 -0
- package/dist/lib/npx.d.ts.map +1 -0
- package/dist/lib/npx.js +11 -0
- package/dist/lib/npx.js.map +1 -0
- package/dist/lib/project-runtime.d.ts +38 -0
- package/dist/lib/project-runtime.d.ts.map +1 -0
- package/dist/lib/project-runtime.js +122 -0
- package/dist/lib/project-runtime.js.map +1 -0
- package/dist/lib/prompts.d.ts +28 -0
- package/dist/lib/prompts.d.ts.map +1 -0
- package/dist/lib/prompts.js +85 -0
- package/dist/lib/prompts.js.map +1 -0
- package/dist/lib/rate-limit-bindings.d.ts +11 -0
- package/dist/lib/rate-limit-bindings.d.ts.map +1 -0
- package/dist/lib/rate-limit-bindings.js +52 -0
- package/dist/lib/rate-limit-bindings.js.map +1 -0
- package/dist/lib/realtime-provision.d.ts +22 -0
- package/dist/lib/realtime-provision.d.ts.map +1 -0
- package/dist/lib/realtime-provision.js +246 -0
- package/dist/lib/realtime-provision.js.map +1 -0
- package/dist/lib/resolve-options.d.ts +42 -0
- package/dist/lib/resolve-options.d.ts.map +1 -0
- package/dist/lib/resolve-options.js +98 -0
- package/dist/lib/resolve-options.js.map +1 -0
- package/dist/lib/runtime-scaffold.d.ts +17 -0
- package/dist/lib/runtime-scaffold.d.ts.map +1 -0
- package/dist/lib/runtime-scaffold.js +366 -0
- package/dist/lib/runtime-scaffold.js.map +1 -0
- package/dist/lib/schema-check.d.ts +79 -0
- package/dist/lib/schema-check.d.ts.map +1 -0
- package/dist/lib/schema-check.js +347 -0
- package/dist/lib/schema-check.js.map +1 -0
- package/dist/lib/spinner.d.ts +20 -0
- package/dist/lib/spinner.d.ts.map +1 -0
- package/dist/lib/spinner.js +42 -0
- package/dist/lib/spinner.js.map +1 -0
- package/dist/lib/telemetry.d.ts +37 -0
- package/dist/lib/telemetry.d.ts.map +1 -0
- package/dist/lib/telemetry.js +98 -0
- package/dist/lib/telemetry.js.map +1 -0
- package/dist/lib/turnstile-provision.d.ts +27 -0
- package/dist/lib/turnstile-provision.d.ts.map +1 -0
- package/dist/lib/turnstile-provision.js +144 -0
- package/dist/lib/turnstile-provision.js.map +1 -0
- package/dist/lib/update-check.d.ts +13 -0
- package/dist/lib/update-check.d.ts.map +1 -0
- package/dist/lib/update-check.js +110 -0
- package/dist/lib/update-check.js.map +1 -0
- package/dist/lib/wrangler-secrets.d.ts +3 -0
- package/dist/lib/wrangler-secrets.d.ts.map +1 -0
- package/dist/lib/wrangler-secrets.js +32 -0
- package/dist/lib/wrangler-secrets.js.map +1 -0
- package/dist/lib/wrangler.d.ts +9 -0
- package/dist/lib/wrangler.d.ts.map +1 -0
- package/dist/lib/wrangler.js +84 -0
- package/dist/lib/wrangler.js.map +1 -0
- package/dist/templates/plugin/README.md.tmpl +91 -0
- package/dist/templates/plugin/client/js/package.json.tmpl +23 -0
- package/dist/templates/plugin/client/js/src/index.ts.tmpl +68 -0
- package/dist/templates/plugin/client/js/tsconfig.json.tmpl +14 -0
- package/dist/templates/plugin/server/package.json.tmpl +19 -0
- package/dist/templates/plugin/server/src/index.ts.tmpl +59 -0
- package/dist/templates/plugin/server/tsconfig.json.tmpl +14 -0
- package/llms.txt +94 -0
- package/package.json +60 -0
|
@@ -0,0 +1,1823 @@
|
|
|
1
|
+
import { Command } from 'commander';
|
|
2
|
+
import { spawn, execFileSync } from 'node:child_process';
|
|
3
|
+
import { wranglerArgs, wranglerCommand, wranglerHint } from '../lib/wrangler.js';
|
|
4
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync, chmodSync, unlinkSync, } from 'node:fs';
|
|
5
|
+
import { dirname, relative, resolve, join, basename } from 'node:path';
|
|
6
|
+
import { randomBytes } from 'node:crypto';
|
|
7
|
+
import chalk from 'chalk';
|
|
8
|
+
import { loadConfigSafe } from '../lib/load-config.js';
|
|
9
|
+
import { resolveRateLimitBindings } from '../lib/rate-limit-bindings.js';
|
|
10
|
+
import { buildRouteName, detectExports, generateFunctionRegistry, scanFunctions, validateRouteNames, } from '../lib/function-registry.js';
|
|
11
|
+
import { extractDatabases, generateTempWranglerToml, mergePluginTables, } from '../lib/deploy-shared.js';
|
|
12
|
+
import { buildSnapshot, loadSnapshot, saveSnapshot, detectDestructiveChanges, filterAutoPassChanges, handleDestructiveChanges, resetLocalDoState, detectProviderChanges, detectAuthProviderChange, } from '../lib/schema-check.js';
|
|
13
|
+
import { dumpCurrentData, restoreToNewProvider, promptMigration, } from '../lib/migrator.js';
|
|
14
|
+
import { isCliStructuredError, raiseCliError } from '../lib/agent-contract.js';
|
|
15
|
+
import { resolveServiceKey as resolveServiceKeyFromOptions } from '../lib/resolve-options.js';
|
|
16
|
+
import { parseDevVars, parseEnvFile } from '../lib/dev-sidecar.js';
|
|
17
|
+
import { ensureCloudflareAuth, ensureWranglerToml, resolveApiToken } from '../lib/cf-auth.js';
|
|
18
|
+
import { spin } from '../lib/spinner.js';
|
|
19
|
+
import { isJson, isNonInteractive, isQuiet } from '../lib/cli-context.js';
|
|
20
|
+
import { promptConfirm } from '../lib/prompts.js';
|
|
21
|
+
import { injectCaptchaSiteKey, provisionTurnstile, storeSecretIfMissing, } from '../lib/turnstile-provision.js';
|
|
22
|
+
import { listWranglerSecretNames } from '../lib/wrangler-secrets.js';
|
|
23
|
+
import { findCloudflareResourceRecord, readCloudflareDeployManifest, writeCloudflareDeployManifest, } from '../lib/cloudflare-deploy-manifest.js';
|
|
24
|
+
import { parseWranglerResourceConfig } from '../lib/cloudflare-wrangler-resources.js';
|
|
25
|
+
import { buildLegacyManagedD1DatabaseName, buildManagedD1DatabaseName, } from '../lib/managed-resource-names.js';
|
|
26
|
+
import { upsertEnvValue } from '../lib/neon.js';
|
|
27
|
+
import { resolveProjectWorkerName, resolveProjectWorkerUrl, } from '../lib/project-runtime.js';
|
|
28
|
+
import { ensureRuntimeScaffold, getRuntimeServerSrcDir, INTERNAL_D1_BINDINGS, writeRuntimeConfigShim, } from '../lib/runtime-scaffold.js';
|
|
29
|
+
const FULL_CONFIG_EVAL = { allowRegexFallback: false };
|
|
30
|
+
const RELEASE_ENV_HEADER = '# EdgeBase production secrets';
|
|
31
|
+
export function extractWorkerUrlFromWranglerDeployOutput(output) {
|
|
32
|
+
const matches = [...output.matchAll(/https:\/\/[A-Za-z0-9.-]+\.workers\.dev/g)].map((match) => match[0]);
|
|
33
|
+
return matches.at(-1) ?? '';
|
|
34
|
+
}
|
|
35
|
+
function resolveWorkerUrlFromProject(projectDir) {
|
|
36
|
+
return resolveProjectWorkerUrl(projectDir);
|
|
37
|
+
}
|
|
38
|
+
function resolveWorkerNameFromProject(projectDir) {
|
|
39
|
+
return resolveProjectWorkerName(projectDir);
|
|
40
|
+
}
|
|
41
|
+
function resolveDeployedWorkerUrl(projectDir, deployOutput) {
|
|
42
|
+
return (extractWorkerUrlFromWranglerDeployOutput(deployOutput)
|
|
43
|
+
|| process.env.EDGEBASE_URL
|
|
44
|
+
|| resolveWorkerUrlFromProject(projectDir));
|
|
45
|
+
}
|
|
46
|
+
export async function resolveAdminUrlFromRuntime(workerUrl) {
|
|
47
|
+
if (!workerUrl) {
|
|
48
|
+
return null;
|
|
49
|
+
}
|
|
50
|
+
try {
|
|
51
|
+
const response = await fetch(workerUrl, {
|
|
52
|
+
redirect: 'manual',
|
|
53
|
+
signal: AbortSignal.timeout(5_000),
|
|
54
|
+
headers: { accept: 'application/json' },
|
|
55
|
+
});
|
|
56
|
+
if (response.status >= 300 && response.status < 400) {
|
|
57
|
+
const location = response.headers.get('location');
|
|
58
|
+
return location ? new URL(location, workerUrl).toString() : null;
|
|
59
|
+
}
|
|
60
|
+
const contentType = response.headers.get('content-type') ?? '';
|
|
61
|
+
if (!contentType.includes('application/json')) {
|
|
62
|
+
return null;
|
|
63
|
+
}
|
|
64
|
+
const payload = await response.json();
|
|
65
|
+
if (typeof payload?.admin !== 'string' || payload.admin.length === 0) {
|
|
66
|
+
return null;
|
|
67
|
+
}
|
|
68
|
+
return new URL(payload.admin, workerUrl).toString();
|
|
69
|
+
}
|
|
70
|
+
catch {
|
|
71
|
+
return null;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* `npx edgebase deploy` — Config bundling + functions bundling + wrangler deploy.
|
|
76
|
+
* 1. Reads edgebase.config.ts
|
|
77
|
+
* 2. Validates config
|
|
78
|
+
* 3. Scans functions/ directory and generates Lazy Import registry
|
|
79
|
+
* 4. Bundles config as JSON into Worker
|
|
80
|
+
* 5. Runs wrangler deploy
|
|
81
|
+
* 6. Sends warming request to db:_system DO
|
|
82
|
+
*/
|
|
83
|
+
/**
|
|
84
|
+
* Validate config for known issues.
|
|
85
|
+
*/
|
|
86
|
+
export function validateConfig(config, warnings, errors) {
|
|
87
|
+
const SERVICE_KEY_KID_PATTERN = /^[A-Za-z0-9-]+$/;
|
|
88
|
+
// ─── Check 0: Release mode warning ───
|
|
89
|
+
if (!config.release) {
|
|
90
|
+
warnings.push('release is false — all resources are accessible without access rules. ' +
|
|
91
|
+
'Set release: true in edgebase.config.ts before production deployment.');
|
|
92
|
+
}
|
|
93
|
+
// ─── Check 1: Inline Service Key warning ───
|
|
94
|
+
// Production deploys should use secretSource: 'dashboard' (Workers Secrets).
|
|
95
|
+
// Inline secrets risk leaking via git commits.
|
|
96
|
+
const serviceKeys = config.serviceKeys;
|
|
97
|
+
if (serviceKeys?.keys) {
|
|
98
|
+
const seenKids = new Set();
|
|
99
|
+
for (const [index, key] of serviceKeys.keys.entries()) {
|
|
100
|
+
if (!key.kid || typeof key.kid !== 'string') {
|
|
101
|
+
errors.push(`serviceKeys.keys[${index}].kid is required and must be a string.`);
|
|
102
|
+
continue;
|
|
103
|
+
}
|
|
104
|
+
if (!SERVICE_KEY_KID_PATTERN.test(key.kid)) {
|
|
105
|
+
errors.push(`serviceKeys.keys[${index}].kid '${key.kid}' is invalid. ` +
|
|
106
|
+
`Use letters, numbers, and hyphens only. ` +
|
|
107
|
+
`Underscore is reserved by the structured key format 'jb_{kid}_{secret}'.`);
|
|
108
|
+
}
|
|
109
|
+
if (seenKids.has(key.kid)) {
|
|
110
|
+
errors.push(`Duplicate Service Key kid '${key.kid}'. Each serviceKeys.keys entry must be unique.`);
|
|
111
|
+
}
|
|
112
|
+
else {
|
|
113
|
+
seenKids.add(key.kid);
|
|
114
|
+
}
|
|
115
|
+
if (key.secretSource === 'dashboard' && (!key.secretRef || typeof key.secretRef !== 'string')) {
|
|
116
|
+
errors.push(`serviceKeys.keys[${index}] (${key.kid}): secretSource 'dashboard' requires a non-empty secretRef.`);
|
|
117
|
+
}
|
|
118
|
+
if (key.secretSource === 'inline' && (!key.inlineSecret || typeof key.inlineSecret !== 'string')) {
|
|
119
|
+
errors.push(`serviceKeys.keys[${index}] (${key.kid}): secretSource 'inline' requires a non-empty inlineSecret.`);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
const inlineKeys = serviceKeys.keys.filter((k) => k.secretSource === 'inline');
|
|
123
|
+
if (inlineKeys.length > 0) {
|
|
124
|
+
const kids = inlineKeys.map((k) => k.kid ?? 'unknown').join(', ');
|
|
125
|
+
warnings.push(`Service Key(s) [${kids}] use secretSource: 'inline' — ` +
|
|
126
|
+
`inline secrets are stored in edgebase.config.ts and risk leaking via git. ` +
|
|
127
|
+
`Use secretSource: 'dashboard' with Workers Secrets for production.`);
|
|
128
|
+
}
|
|
129
|
+
const rootKeys = serviceKeys.keys.filter((k) => k.tier === 'root');
|
|
130
|
+
if (rootKeys.length > 0
|
|
131
|
+
&& rootKeys.every((key) => !!key.constraints?.tenant || !!key.constraints?.ipCidr?.length)) {
|
|
132
|
+
warnings.push('All root-tier Service Keys are request-scoped via tenant/ipCidr constraints. ' +
|
|
133
|
+
'Internal EdgeBase self-calls for auth hooks, storage hooks, plugin migrations, and function admin helpers ' +
|
|
134
|
+
'need at least one root-tier key without tenant/ipCidr constraints. Prefer a dedicated root key with secretRef: \'SERVICE_KEY\'.');
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
// ─── Check 2: Table name uniqueness across DB blocks (§18) ───
|
|
138
|
+
// Different DB blocks must not share table names — this would cause DO routing collisions.
|
|
139
|
+
const RESERVED_TOP_KEYS = new Set([
|
|
140
|
+
'release',
|
|
141
|
+
'storage',
|
|
142
|
+
'rooms',
|
|
143
|
+
'auth',
|
|
144
|
+
'serviceKeys',
|
|
145
|
+
'captcha',
|
|
146
|
+
'email',
|
|
147
|
+
'push',
|
|
148
|
+
'plugins',
|
|
149
|
+
'rateLimits',
|
|
150
|
+
'functions',
|
|
151
|
+
'databases',
|
|
152
|
+
]);
|
|
153
|
+
const seenTables = new Map(); // tableName → dbKey
|
|
154
|
+
for (const [dbKey, dbBlock] of Object.entries(config.databases ?? {})) {
|
|
155
|
+
if (RESERVED_TOP_KEYS.has(dbKey))
|
|
156
|
+
continue;
|
|
157
|
+
const tables = dbBlock?.tables;
|
|
158
|
+
if (!tables || typeof tables !== 'object')
|
|
159
|
+
continue;
|
|
160
|
+
for (const tableName of Object.keys(tables)) {
|
|
161
|
+
if (seenTables.has(tableName)) {
|
|
162
|
+
errors.push(`Table name '${tableName}' is duplicated in DB block '${seenTables.get(tableName)}' and '${dbKey}'. ` +
|
|
163
|
+
`Table names must be unique across all DB blocks.`);
|
|
164
|
+
}
|
|
165
|
+
else {
|
|
166
|
+
seenTables.set(tableName, dbKey);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
export function collectManagedCronSchedules(config) {
|
|
172
|
+
const cronSchedules = [];
|
|
173
|
+
const maybeFunctions = config?.functions;
|
|
174
|
+
if (maybeFunctions && typeof maybeFunctions === 'object') {
|
|
175
|
+
const fns = maybeFunctions;
|
|
176
|
+
for (const fn of Object.values(fns)) {
|
|
177
|
+
if (fn?.trigger?.type === 'schedule' && fn.trigger.cron && !cronSchedules.includes(fn.trigger.cron)) {
|
|
178
|
+
cronSchedules.push(fn.trigger.cron);
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
const extraCrons = (config?.cloudflare?.extraCrons);
|
|
183
|
+
if (Array.isArray(extraCrons)) {
|
|
184
|
+
for (const cron of extraCrons) {
|
|
185
|
+
if (typeof cron === 'string' && !cronSchedules.includes(cron)) {
|
|
186
|
+
cronSchedules.push(cron);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
if (!cronSchedules.includes('0 3 * * *')) {
|
|
191
|
+
cronSchedules.push('0 3 * * *');
|
|
192
|
+
}
|
|
193
|
+
return cronSchedules;
|
|
194
|
+
}
|
|
195
|
+
/** Exported for testing */
|
|
196
|
+
export const _internals = {
|
|
197
|
+
buildRouteName,
|
|
198
|
+
detectExports,
|
|
199
|
+
scanFunctions,
|
|
200
|
+
generateFunctionRegistry,
|
|
201
|
+
validateRouteNames,
|
|
202
|
+
mergePluginTables,
|
|
203
|
+
provisionKvNamespaces,
|
|
204
|
+
provisionD1Databases,
|
|
205
|
+
provisionSingleInstanceD1Databases,
|
|
206
|
+
buildMergedKvConfig,
|
|
207
|
+
dedupeBindingConfigs,
|
|
208
|
+
buildMergedD1Config,
|
|
209
|
+
parseWranglerJsonOutput,
|
|
210
|
+
parseHyperdriveListOutput,
|
|
211
|
+
dedupeManifestResources,
|
|
212
|
+
provisionVectorizeIndexes,
|
|
213
|
+
generateTempWranglerToml,
|
|
214
|
+
provisionTurnstile,
|
|
215
|
+
storeSecretIfMissing,
|
|
216
|
+
injectCaptchaSiteKey,
|
|
217
|
+
extractDatabases,
|
|
218
|
+
collectManagedCronSchedules,
|
|
219
|
+
isPostgresProvider,
|
|
220
|
+
isHyperdriveAlreadyExistsError,
|
|
221
|
+
resolveAdminUrlFromRuntime,
|
|
222
|
+
resolveReleaseSecretVars,
|
|
223
|
+
inspectAuthEnv,
|
|
224
|
+
collectAuthEnvWarnings,
|
|
225
|
+
copyDevelopmentAuthProviderToRelease,
|
|
226
|
+
resolveExistingR2BucketRecord,
|
|
227
|
+
};
|
|
228
|
+
// ─── KV/D1/Vectorize Auto-Provisioning ───
|
|
229
|
+
function dedupeBindingConfigs(config) {
|
|
230
|
+
const deduped = {};
|
|
231
|
+
const seenBindings = new Set();
|
|
232
|
+
for (const [name, value] of Object.entries(config)) {
|
|
233
|
+
if (!value?.binding || seenBindings.has(value.binding))
|
|
234
|
+
continue;
|
|
235
|
+
deduped[name] = value;
|
|
236
|
+
seenBindings.add(value.binding);
|
|
237
|
+
}
|
|
238
|
+
return deduped;
|
|
239
|
+
}
|
|
240
|
+
function buildInternalKvConfig() {
|
|
241
|
+
return {
|
|
242
|
+
internal: { binding: 'KV' },
|
|
243
|
+
};
|
|
244
|
+
}
|
|
245
|
+
function buildMergedKvConfig(explicitKvConfig) {
|
|
246
|
+
const merged = {};
|
|
247
|
+
const add = (entries) => {
|
|
248
|
+
const deduped = dedupeBindingConfigs(entries);
|
|
249
|
+
const existingBindings = new Set(Object.values(merged).map((entry) => entry.binding));
|
|
250
|
+
for (const [name, value] of Object.entries(deduped)) {
|
|
251
|
+
if (existingBindings.has(value.binding))
|
|
252
|
+
continue;
|
|
253
|
+
merged[name] = value;
|
|
254
|
+
existingBindings.add(value.binding);
|
|
255
|
+
}
|
|
256
|
+
};
|
|
257
|
+
add(buildInternalKvConfig());
|
|
258
|
+
if (explicitKvConfig)
|
|
259
|
+
add(explicitKvConfig);
|
|
260
|
+
return merged;
|
|
261
|
+
}
|
|
262
|
+
function parseWranglerJsonOutput(output) {
|
|
263
|
+
const trimmed = output.trim();
|
|
264
|
+
const candidates = [trimmed, trimmed.slice(trimmed.indexOf('[')), trimmed.slice(trimmed.indexOf('{'))]
|
|
265
|
+
.filter((candidate, index, arr) => candidate && arr.indexOf(candidate) === index);
|
|
266
|
+
for (const candidate of candidates) {
|
|
267
|
+
try {
|
|
268
|
+
return JSON.parse(candidate);
|
|
269
|
+
}
|
|
270
|
+
catch {
|
|
271
|
+
// Try the next candidate.
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
throw new Error('Unexpected Wrangler JSON output.');
|
|
275
|
+
}
|
|
276
|
+
function parseHyperdriveListOutput(output) {
|
|
277
|
+
const trimmed = output.trim();
|
|
278
|
+
if (!trimmed) {
|
|
279
|
+
return [];
|
|
280
|
+
}
|
|
281
|
+
if (trimmed.startsWith('[') || trimmed.startsWith('{')) {
|
|
282
|
+
return parseWranglerJsonOutput(output);
|
|
283
|
+
}
|
|
284
|
+
const rows = [];
|
|
285
|
+
for (const line of output.split(/\r?\n/)) {
|
|
286
|
+
if (!line.trim().startsWith('│')) {
|
|
287
|
+
continue;
|
|
288
|
+
}
|
|
289
|
+
const cells = line
|
|
290
|
+
.split('│')
|
|
291
|
+
.slice(1, -1)
|
|
292
|
+
.map((cell) => cell.trim());
|
|
293
|
+
if (cells.length < 2) {
|
|
294
|
+
continue;
|
|
295
|
+
}
|
|
296
|
+
const [id, name] = cells;
|
|
297
|
+
if (id === 'id' || name === 'name' || !/^[a-f0-9]{32}$/i.test(id)) {
|
|
298
|
+
continue;
|
|
299
|
+
}
|
|
300
|
+
rows.push({ id, name });
|
|
301
|
+
}
|
|
302
|
+
return rows;
|
|
303
|
+
}
|
|
304
|
+
function listHyperdriveConfigs(projectDir) {
|
|
305
|
+
try {
|
|
306
|
+
const output = execFileSync(wranglerCommand(), wranglerArgs(['wrangler', 'hyperdrive', 'list']), {
|
|
307
|
+
cwd: projectDir,
|
|
308
|
+
encoding: 'utf-8',
|
|
309
|
+
stdio: ['ignore', 'pipe', 'ignore'],
|
|
310
|
+
});
|
|
311
|
+
return parseHyperdriveListOutput(output);
|
|
312
|
+
}
|
|
313
|
+
catch {
|
|
314
|
+
return [];
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
function isHyperdriveAlreadyExistsError(message) {
|
|
318
|
+
return /already exists\s*\[code:\s*2017\]/i.test(message);
|
|
319
|
+
}
|
|
320
|
+
function isR2BucketAlreadyExistsError(message) {
|
|
321
|
+
return /bucket.+already exists|already own bucket|bucket named.+already exists/i.test(message);
|
|
322
|
+
}
|
|
323
|
+
function toManifestResourceRecord(binding) {
|
|
324
|
+
return {
|
|
325
|
+
type: binding.type,
|
|
326
|
+
name: binding.name,
|
|
327
|
+
binding: binding.binding,
|
|
328
|
+
id: binding.id,
|
|
329
|
+
managed: binding.managed ?? true,
|
|
330
|
+
source: binding.source ?? 'existing',
|
|
331
|
+
};
|
|
332
|
+
}
|
|
333
|
+
function dedupeManifestResources(resources) {
|
|
334
|
+
const seen = new Map();
|
|
335
|
+
for (const resource of resources) {
|
|
336
|
+
const logicalName = resource.binding ?? resource.name;
|
|
337
|
+
const key = [resource.type, logicalName].join(':');
|
|
338
|
+
seen.set(key, resource);
|
|
339
|
+
}
|
|
340
|
+
return Array.from(seen.values());
|
|
341
|
+
}
|
|
342
|
+
function resolveExistingR2BucketRecord(existingRecord) {
|
|
343
|
+
if (existingRecord?.source === 'created') {
|
|
344
|
+
return {
|
|
345
|
+
managed: existingRecord.managed ?? true,
|
|
346
|
+
source: 'created',
|
|
347
|
+
};
|
|
348
|
+
}
|
|
349
|
+
return {
|
|
350
|
+
managed: false,
|
|
351
|
+
source: existingRecord?.source ?? 'existing',
|
|
352
|
+
};
|
|
353
|
+
}
|
|
354
|
+
function provisionR2Buckets(projectDir, previousManifest) {
|
|
355
|
+
const wranglerPath = join(projectDir, 'wrangler.toml');
|
|
356
|
+
if (!existsSync(wranglerPath))
|
|
357
|
+
return [];
|
|
358
|
+
const wranglerContent = readFileSync(wranglerPath, 'utf-8');
|
|
359
|
+
const { r2Buckets } = parseWranglerResourceConfig(wranglerContent);
|
|
360
|
+
const resources = [];
|
|
361
|
+
for (const bucket of r2Buckets) {
|
|
362
|
+
const existingRecord = findCloudflareResourceRecord(previousManifest, {
|
|
363
|
+
type: 'r2_bucket',
|
|
364
|
+
name: bucket.bucketName,
|
|
365
|
+
binding: bucket.binding,
|
|
366
|
+
id: bucket.bucketName,
|
|
367
|
+
});
|
|
368
|
+
const args = ['wrangler', 'r2', 'bucket', 'create', bucket.bucketName];
|
|
369
|
+
if (bucket.jurisdiction) {
|
|
370
|
+
args.push(`--jurisdiction=${bucket.jurisdiction}`);
|
|
371
|
+
}
|
|
372
|
+
try {
|
|
373
|
+
execFileSync(wranglerCommand(), wranglerArgs(args), {
|
|
374
|
+
cwd: projectDir,
|
|
375
|
+
encoding: 'utf-8',
|
|
376
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
377
|
+
});
|
|
378
|
+
console.log(chalk.green('✓'), `R2 '${bucket.binding}': created → ${bucket.bucketName}`);
|
|
379
|
+
resources.push({
|
|
380
|
+
type: 'r2_bucket',
|
|
381
|
+
name: bucket.bucketName,
|
|
382
|
+
binding: bucket.binding,
|
|
383
|
+
id: bucket.bucketName,
|
|
384
|
+
managed: true,
|
|
385
|
+
source: 'created',
|
|
386
|
+
metadata: bucket.jurisdiction ? { jurisdiction: bucket.jurisdiction } : undefined,
|
|
387
|
+
});
|
|
388
|
+
}
|
|
389
|
+
catch (err) {
|
|
390
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
391
|
+
if (isR2BucketAlreadyExistsError(msg)) {
|
|
392
|
+
const ownership = resolveExistingR2BucketRecord(existingRecord);
|
|
393
|
+
console.log(chalk.dim(` R2 '${bucket.binding}': already exists → ${bucket.bucketName}`));
|
|
394
|
+
resources.push({
|
|
395
|
+
type: 'r2_bucket',
|
|
396
|
+
name: bucket.bucketName,
|
|
397
|
+
binding: bucket.binding,
|
|
398
|
+
id: bucket.bucketName,
|
|
399
|
+
managed: ownership.managed,
|
|
400
|
+
source: ownership.source,
|
|
401
|
+
metadata: bucket.jurisdiction ? { jurisdiction: bucket.jurisdiction } : undefined,
|
|
402
|
+
});
|
|
403
|
+
continue;
|
|
404
|
+
}
|
|
405
|
+
console.log(chalk.red('✗'), `R2 '${bucket.binding}': provisioning failed — ${msg}`);
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
return resources;
|
|
409
|
+
}
|
|
410
|
+
/**
|
|
411
|
+
* Provision KV namespaces declared in config.kv.
|
|
412
|
+
* For each namespace: check if it exists via `wrangler kv namespace list`,
|
|
413
|
+
* create if missing via `wrangler kv namespace create`.
|
|
414
|
+
*/
|
|
415
|
+
function provisionKvNamespaces(kvConfig, projectDir) {
|
|
416
|
+
const bindings = [];
|
|
417
|
+
const dedupedKvConfig = dedupeBindingConfigs(kvConfig);
|
|
418
|
+
// Get existing KV namespaces
|
|
419
|
+
let existingNamespaces = [];
|
|
420
|
+
try {
|
|
421
|
+
const output = execFileSync(wranglerCommand(), wranglerArgs(['wrangler', 'kv', 'namespace', 'list']), {
|
|
422
|
+
cwd: projectDir,
|
|
423
|
+
encoding: 'utf-8',
|
|
424
|
+
stdio: ['ignore', 'pipe', 'ignore'],
|
|
425
|
+
});
|
|
426
|
+
existingNamespaces = parseWranglerJsonOutput(output);
|
|
427
|
+
}
|
|
428
|
+
catch {
|
|
429
|
+
// If listing fails, we'll try to create each one
|
|
430
|
+
}
|
|
431
|
+
for (const [name, config] of Object.entries(dedupedKvConfig)) {
|
|
432
|
+
const bindingName = config.binding;
|
|
433
|
+
// Convention: Wrangler uses Worker name prefix in title
|
|
434
|
+
const existing = existingNamespaces.find((ns) => ns.title.endsWith(`-${bindingName}`) || ns.title === bindingName);
|
|
435
|
+
if (existing) {
|
|
436
|
+
console.log(chalk.dim(` KV '${name}' (${bindingName}): already exists → ${existing.id.slice(0, 8)}…`));
|
|
437
|
+
bindings.push({
|
|
438
|
+
type: 'kv_namespace',
|
|
439
|
+
name,
|
|
440
|
+
binding: bindingName,
|
|
441
|
+
id: existing.id,
|
|
442
|
+
managed: true,
|
|
443
|
+
source: 'existing',
|
|
444
|
+
});
|
|
445
|
+
}
|
|
446
|
+
else {
|
|
447
|
+
// Create new KV namespace
|
|
448
|
+
try {
|
|
449
|
+
const output = execFileSync(wranglerCommand(), wranglerArgs(['wrangler', 'kv', 'namespace', 'create', bindingName]), {
|
|
450
|
+
cwd: projectDir,
|
|
451
|
+
encoding: 'utf-8',
|
|
452
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
453
|
+
});
|
|
454
|
+
// Extract ID from output: "Add the following to your configuration file..."
|
|
455
|
+
// "kv_namespaces = [{ binding = "...", id = "..." }]"
|
|
456
|
+
const idMatch = output.match(/id\s*=\s*"([^"]+)"/);
|
|
457
|
+
if (idMatch) {
|
|
458
|
+
console.log(chalk.green('✓'), `KV '${name}' (${bindingName}): created → ${idMatch[1].slice(0, 8)}…`);
|
|
459
|
+
bindings.push({
|
|
460
|
+
type: 'kv_namespace',
|
|
461
|
+
name,
|
|
462
|
+
binding: bindingName,
|
|
463
|
+
id: idMatch[1],
|
|
464
|
+
managed: true,
|
|
465
|
+
source: 'created',
|
|
466
|
+
});
|
|
467
|
+
}
|
|
468
|
+
else {
|
|
469
|
+
console.log(chalk.yellow('⚠'), `KV '${name}': created but could not parse ID from wrangler output. Skipping managed binding registration.`);
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
catch (err) {
|
|
473
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
474
|
+
console.log(chalk.red('✗'), `KV '${name}': provisioning failed — ${msg}`);
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
return bindings;
|
|
479
|
+
}
|
|
480
|
+
/**
|
|
481
|
+
* Provision D1 databases declared in config.d1.
|
|
482
|
+
* For each database: check via `wrangler d1 list`, create if missing.
|
|
483
|
+
*/
|
|
484
|
+
function provisionD1Databases(d1Config, projectDir, options) {
|
|
485
|
+
const bindings = [];
|
|
486
|
+
const dedupedD1Config = dedupeBindingConfigs(d1Config);
|
|
487
|
+
const workerName = resolveProjectWorkerName(projectDir) || 'edgebase';
|
|
488
|
+
// Get existing D1 databases
|
|
489
|
+
let existingDatabases = [];
|
|
490
|
+
try {
|
|
491
|
+
const output = execFileSync(wranglerCommand(), wranglerArgs(['wrangler', 'd1', 'list', '--json']), {
|
|
492
|
+
cwd: projectDir,
|
|
493
|
+
encoding: 'utf-8',
|
|
494
|
+
stdio: ['ignore', 'pipe', 'ignore'],
|
|
495
|
+
});
|
|
496
|
+
existingDatabases = parseWranglerJsonOutput(output);
|
|
497
|
+
}
|
|
498
|
+
catch {
|
|
499
|
+
// If listing fails, we'll try to create each one
|
|
500
|
+
}
|
|
501
|
+
for (const [name, config] of Object.entries(dedupedD1Config)) {
|
|
502
|
+
const bindingName = config.binding;
|
|
503
|
+
const dbName = buildManagedD1DatabaseName(workerName, name);
|
|
504
|
+
const legacyDbName = buildLegacyManagedD1DatabaseName(name);
|
|
505
|
+
const allowLegacyReuse = !!findCloudflareResourceRecord(options?.previousManifest ?? null, {
|
|
506
|
+
type: 'd1_database',
|
|
507
|
+
name,
|
|
508
|
+
binding: bindingName,
|
|
509
|
+
});
|
|
510
|
+
const existing = existingDatabases.find((db) => db.name === dbName || (allowLegacyReuse && db.name === legacyDbName));
|
|
511
|
+
if (existing) {
|
|
512
|
+
console.log(chalk.dim(` D1 '${name}' (${bindingName}): already exists → ${existing.uuid.slice(0, 8)}…`));
|
|
513
|
+
bindings.push({
|
|
514
|
+
type: 'd1_database',
|
|
515
|
+
name,
|
|
516
|
+
binding: bindingName,
|
|
517
|
+
id: existing.uuid,
|
|
518
|
+
managed: true,
|
|
519
|
+
source: 'existing',
|
|
520
|
+
});
|
|
521
|
+
}
|
|
522
|
+
else {
|
|
523
|
+
try {
|
|
524
|
+
const output = execFileSync(wranglerCommand(), wranglerArgs(['wrangler', 'd1', 'create', dbName]), {
|
|
525
|
+
cwd: projectDir,
|
|
526
|
+
encoding: 'utf-8',
|
|
527
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
528
|
+
});
|
|
529
|
+
const idMatch = output.match(/database_id\s*=\s*"([^"]+)"/);
|
|
530
|
+
if (idMatch) {
|
|
531
|
+
console.log(chalk.green('✓'), `D1 '${name}' (${bindingName}): created → ${idMatch[1].slice(0, 8)}…`);
|
|
532
|
+
bindings.push({
|
|
533
|
+
type: 'd1_database',
|
|
534
|
+
name,
|
|
535
|
+
binding: bindingName,
|
|
536
|
+
id: idMatch[1],
|
|
537
|
+
managed: true,
|
|
538
|
+
source: 'created',
|
|
539
|
+
});
|
|
540
|
+
}
|
|
541
|
+
else {
|
|
542
|
+
console.log(chalk.yellow('⚠'), `D1 '${name}': created but could not parse ID. Skipping managed binding registration.`);
|
|
543
|
+
}
|
|
544
|
+
}
|
|
545
|
+
catch (err) {
|
|
546
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
547
|
+
console.log(chalk.red('✗'), `D1 '${name}': provisioning failed — ${msg}`);
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
return bindings;
|
|
552
|
+
}
|
|
553
|
+
function provisionInternalD1Databases(projectDir, options) {
|
|
554
|
+
const d1Config = buildInternalD1Config();
|
|
555
|
+
return provisionD1Databases(d1Config, projectDir, options);
|
|
556
|
+
}
|
|
557
|
+
function buildInternalD1Config() {
|
|
558
|
+
return Object.fromEntries(INTERNAL_D1_BINDINGS.map(({ name, binding }) => [name, { binding }]));
|
|
559
|
+
}
|
|
560
|
+
function isDynamicDbBlock(dbBlock) {
|
|
561
|
+
if (dbBlock.instance)
|
|
562
|
+
return true;
|
|
563
|
+
// loadConfigSafe() strips function values, so DB-level access often arrives
|
|
564
|
+
// as an empty object. Presence still means "this namespace is dynamic".
|
|
565
|
+
if (dbBlock.access && typeof dbBlock.access === 'object')
|
|
566
|
+
return true;
|
|
567
|
+
return false;
|
|
568
|
+
}
|
|
569
|
+
function isPostgresProvider(provider) {
|
|
570
|
+
return provider === 'neon' || provider === 'postgres';
|
|
571
|
+
}
|
|
572
|
+
function buildSingleInstanceD1Config(databasesConfig) {
|
|
573
|
+
const d1Map = {};
|
|
574
|
+
for (const [namespace, dbBlock] of Object.entries(databasesConfig)) {
|
|
575
|
+
if (!dbBlock)
|
|
576
|
+
continue;
|
|
577
|
+
const provider = dbBlock.provider;
|
|
578
|
+
if (provider === 'neon' || provider === 'postgres' || provider === 'do')
|
|
579
|
+
continue;
|
|
580
|
+
if (provider !== 'd1' && isDynamicDbBlock(dbBlock))
|
|
581
|
+
continue;
|
|
582
|
+
d1Map[`db-${namespace}`] = { binding: `DB_D1_${namespace.toUpperCase()}` };
|
|
583
|
+
}
|
|
584
|
+
return d1Map;
|
|
585
|
+
}
|
|
586
|
+
function buildMergedD1Config(explicitD1Config, databasesConfig) {
|
|
587
|
+
const merged = {};
|
|
588
|
+
const add = (entries) => {
|
|
589
|
+
const deduped = dedupeBindingConfigs(entries);
|
|
590
|
+
const existingBindings = new Set(Object.values(merged).map((entry) => entry.binding));
|
|
591
|
+
for (const [name, value] of Object.entries(deduped)) {
|
|
592
|
+
if (existingBindings.has(value.binding))
|
|
593
|
+
continue;
|
|
594
|
+
merged[name] = value;
|
|
595
|
+
existingBindings.add(value.binding);
|
|
596
|
+
}
|
|
597
|
+
};
|
|
598
|
+
add(buildInternalD1Config());
|
|
599
|
+
if (databasesConfig)
|
|
600
|
+
add(buildSingleInstanceD1Config(databasesConfig));
|
|
601
|
+
if (explicitD1Config)
|
|
602
|
+
add(explicitD1Config);
|
|
603
|
+
return merged;
|
|
604
|
+
}
|
|
605
|
+
function provisionSingleInstanceD1Databases(databasesConfig, projectDir, options) {
|
|
606
|
+
const d1Map = buildSingleInstanceD1Config(databasesConfig);
|
|
607
|
+
if (Object.keys(d1Map).length === 0)
|
|
608
|
+
return [];
|
|
609
|
+
return provisionD1Databases(d1Map, projectDir, options);
|
|
610
|
+
}
|
|
611
|
+
/**
|
|
612
|
+
* Provision Vectorize indexes declared in config.vectorize.
|
|
613
|
+
* For each index: check via `wrangler vectorize list`, create if missing.
|
|
614
|
+
*/
|
|
615
|
+
function provisionVectorizeIndexes(vectorizeConfig, projectDir) {
|
|
616
|
+
const bindings = [];
|
|
617
|
+
// Get existing Vectorize indexes
|
|
618
|
+
let existingIndexes = [];
|
|
619
|
+
try {
|
|
620
|
+
const output = execFileSync(wranglerCommand(), wranglerArgs(['wrangler', 'vectorize', 'list', '--json']), {
|
|
621
|
+
cwd: projectDir,
|
|
622
|
+
encoding: 'utf-8',
|
|
623
|
+
stdio: ['ignore', 'pipe', 'ignore'],
|
|
624
|
+
});
|
|
625
|
+
existingIndexes = parseWranglerJsonOutput(output);
|
|
626
|
+
}
|
|
627
|
+
catch {
|
|
628
|
+
// Vectorize may not be available (free plan)
|
|
629
|
+
}
|
|
630
|
+
for (const [name, config] of Object.entries(vectorizeConfig)) {
|
|
631
|
+
const bindingName = config.binding ?? `VECTORIZE_${name.toUpperCase()}`;
|
|
632
|
+
const indexName = `edgebase-${name}`;
|
|
633
|
+
const existing = existingIndexes.find((idx) => idx.name === indexName);
|
|
634
|
+
if (existing) {
|
|
635
|
+
console.log(chalk.dim(` Vectorize '${name}' (${bindingName}): already exists`));
|
|
636
|
+
bindings.push({
|
|
637
|
+
type: 'vectorize',
|
|
638
|
+
name,
|
|
639
|
+
binding: bindingName,
|
|
640
|
+
id: indexName,
|
|
641
|
+
managed: true,
|
|
642
|
+
source: 'existing',
|
|
643
|
+
});
|
|
644
|
+
}
|
|
645
|
+
else {
|
|
646
|
+
const dimensions = config.dimensions ?? 1536;
|
|
647
|
+
const metric = config.metric ?? 'cosine';
|
|
648
|
+
try {
|
|
649
|
+
execFileSync(wranglerCommand(), wranglerArgs([
|
|
650
|
+
'wrangler',
|
|
651
|
+
'vectorize',
|
|
652
|
+
'create',
|
|
653
|
+
indexName,
|
|
654
|
+
`--dimensions=${dimensions}`,
|
|
655
|
+
`--metric=${metric}`,
|
|
656
|
+
]), {
|
|
657
|
+
cwd: projectDir,
|
|
658
|
+
encoding: 'utf-8',
|
|
659
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
660
|
+
});
|
|
661
|
+
console.log(chalk.green('✓'), `Vectorize '${name}' (${bindingName}): created (${dimensions}d, ${metric})`);
|
|
662
|
+
bindings.push({
|
|
663
|
+
type: 'vectorize',
|
|
664
|
+
name,
|
|
665
|
+
binding: bindingName,
|
|
666
|
+
id: indexName,
|
|
667
|
+
managed: true,
|
|
668
|
+
source: 'created',
|
|
669
|
+
});
|
|
670
|
+
}
|
|
671
|
+
catch (err) {
|
|
672
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
673
|
+
console.log(chalk.yellow('⚠'), `Vectorize '${name}': provisioning failed — ${msg}`);
|
|
674
|
+
console.log(chalk.dim(' Vectorize requires a paid Workers plan.'));
|
|
675
|
+
}
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
return bindings;
|
|
679
|
+
}
|
|
680
|
+
/**
|
|
681
|
+
* Read a single env value from a .env file by key.
|
|
682
|
+
*/
|
|
683
|
+
function readEnvValue(envPath, key) {
|
|
684
|
+
if (!existsSync(envPath))
|
|
685
|
+
return undefined;
|
|
686
|
+
const content = readFileSync(envPath, 'utf-8');
|
|
687
|
+
const match = content.match(new RegExp(`^${key}=(.+)$`, 'm'));
|
|
688
|
+
return match?.[1]?.trim();
|
|
689
|
+
}
|
|
690
|
+
function runProjectPostScaffoldHook(projectDir) {
|
|
691
|
+
const hookPath = join(projectDir, 'scripts', 'edgebase-post-scaffold.mjs');
|
|
692
|
+
if (!existsSync(hookPath))
|
|
693
|
+
return;
|
|
694
|
+
console.log(chalk.dim(` Running project post-scaffold hook: ${basename(hookPath)}`));
|
|
695
|
+
execFileSync(process.execPath, [hookPath, '--project-dir', projectDir], {
|
|
696
|
+
cwd: projectDir,
|
|
697
|
+
stdio: 'inherit',
|
|
698
|
+
});
|
|
699
|
+
}
|
|
700
|
+
/**
|
|
701
|
+
* Provision Hyperdrive configs for database blocks with provider='neon'|'postgres'.
|
|
702
|
+
* For each DB block with non-DO provider: check if Hyperdrive config exists,
|
|
703
|
+
* create if missing via `wrangler hyperdrive create`.
|
|
704
|
+
* Connection string is read from .env.release (DB_POSTGRES_{NAMESPACE}_URL by default,
|
|
705
|
+
* or the db block's custom connectionString env key when provided).
|
|
706
|
+
*
|
|
707
|
+
* Binding convention: DB_POSTGRES_{NAMESPACE_UPPER}
|
|
708
|
+
* Hyperdrive name: edgebase-db-{namespace}
|
|
709
|
+
*/
|
|
710
|
+
function provisionProviderHyperdrives(databases, projectDir) {
|
|
711
|
+
const bindings = [];
|
|
712
|
+
// Filter to PostgreSQL-backed DB blocks
|
|
713
|
+
const pgBlocks = Object.entries(databases).filter(([, block]) => isPostgresProvider(block.provider));
|
|
714
|
+
if (pgBlocks.length === 0)
|
|
715
|
+
return bindings;
|
|
716
|
+
// Get existing Hyperdrive configs
|
|
717
|
+
let existingConfigs = listHyperdriveConfigs(projectDir);
|
|
718
|
+
for (const [namespace, block] of pgBlocks) {
|
|
719
|
+
const hdName = `edgebase-db-${namespace}`;
|
|
720
|
+
const normalized = namespace.toUpperCase().replace(/-/g, '_');
|
|
721
|
+
const bindingName = `DB_POSTGRES_${normalized}`;
|
|
722
|
+
const existing = existingConfigs.find((c) => c.name === hdName);
|
|
723
|
+
if (existing) {
|
|
724
|
+
console.log(chalk.dim(` Hyperdrive '${namespace}' (provider): already exists → ${existing.id.slice(0, 8)}…`));
|
|
725
|
+
bindings.push({
|
|
726
|
+
type: 'hyperdrive',
|
|
727
|
+
name: namespace,
|
|
728
|
+
binding: bindingName,
|
|
729
|
+
id: existing.id,
|
|
730
|
+
managed: true,
|
|
731
|
+
source: 'existing',
|
|
732
|
+
});
|
|
733
|
+
continue;
|
|
734
|
+
}
|
|
735
|
+
// Read connection string from .env.release
|
|
736
|
+
const envReleasePath = join(projectDir, '.env.release');
|
|
737
|
+
const secretKey = block.connectionString ?? `${bindingName}_URL`;
|
|
738
|
+
const connectionString = readEnvValue(envReleasePath, secretKey);
|
|
739
|
+
if (!connectionString) {
|
|
740
|
+
const setupHint = block.provider === 'neon'
|
|
741
|
+
? `\n Or run npx edgebase neon setup --namespace ${namespace}`
|
|
742
|
+
: '';
|
|
743
|
+
console.warn(chalk.yellow(` ⚠ Hyperdrive '${namespace}' (provider): connection string not found.\n` +
|
|
744
|
+
` Add ${secretKey}=postgres://... to .env.release${setupHint}`));
|
|
745
|
+
continue;
|
|
746
|
+
}
|
|
747
|
+
// Create Hyperdrive config
|
|
748
|
+
try {
|
|
749
|
+
const output = execFileSync(wranglerCommand(), wranglerArgs(['wrangler', 'hyperdrive', 'create', hdName, `--connection-string=${connectionString}`]), { cwd: projectDir, encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] });
|
|
750
|
+
// Parse ID from output
|
|
751
|
+
const idMatch = output.match(/id\s*=\s*"?([a-f0-9-]+)"?/i);
|
|
752
|
+
if (idMatch) {
|
|
753
|
+
bindings.push({
|
|
754
|
+
type: 'hyperdrive',
|
|
755
|
+
name: namespace,
|
|
756
|
+
binding: bindingName,
|
|
757
|
+
id: idMatch[1],
|
|
758
|
+
managed: true,
|
|
759
|
+
source: 'created',
|
|
760
|
+
});
|
|
761
|
+
console.log(chalk.green('✓'), `Hyperdrive '${namespace}' (provider): created → ${idMatch[1].slice(0, 8)}…`);
|
|
762
|
+
}
|
|
763
|
+
else {
|
|
764
|
+
console.log(chalk.yellow('⚠'), `Hyperdrive '${namespace}' (provider): created but could not parse ID. Skipping managed binding registration.`);
|
|
765
|
+
}
|
|
766
|
+
}
|
|
767
|
+
catch (err) {
|
|
768
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
769
|
+
if (isHyperdriveAlreadyExistsError(msg)) {
|
|
770
|
+
const existingConfig = listHyperdriveConfigs(projectDir).find((config) => config.name === hdName);
|
|
771
|
+
if (existingConfig) {
|
|
772
|
+
console.log(chalk.dim(` Hyperdrive '${namespace}' (provider): already exists → ${existingConfig.id.slice(0, 8)}…`));
|
|
773
|
+
bindings.push({
|
|
774
|
+
type: 'hyperdrive',
|
|
775
|
+
name: namespace,
|
|
776
|
+
binding: bindingName,
|
|
777
|
+
id: existingConfig.id,
|
|
778
|
+
managed: true,
|
|
779
|
+
source: 'existing',
|
|
780
|
+
});
|
|
781
|
+
existingConfigs = [...existingConfigs, existingConfig];
|
|
782
|
+
continue;
|
|
783
|
+
}
|
|
784
|
+
}
|
|
785
|
+
console.log(chalk.yellow('⚠'), `Hyperdrive '${namespace}' (provider): provisioning failed — ${msg}`);
|
|
786
|
+
}
|
|
787
|
+
}
|
|
788
|
+
return bindings;
|
|
789
|
+
}
|
|
790
|
+
/**
|
|
791
|
+
* Provision Hyperdrive config for auth PostgreSQL when config.auth.provider is 'neon'|'postgres'.
|
|
792
|
+
* Follows the same pattern as provisionProviderHyperdrives but for a single global auth binding.
|
|
793
|
+
*
|
|
794
|
+
* Binding name: AUTH_POSTGRES (matches getAuthPostgresBindingName() in server)
|
|
795
|
+
* Hyperdrive name: edgebase-auth
|
|
796
|
+
* Connection string: read from .env.release AUTH_POSTGRES_URL (or config.auth.connectionString)
|
|
797
|
+
*/
|
|
798
|
+
function provisionAuthPostgresHyperdrive(authConfig, projectDir) {
|
|
799
|
+
const bindings = [];
|
|
800
|
+
const provider = authConfig.provider;
|
|
801
|
+
if (provider !== 'neon' && provider !== 'postgres')
|
|
802
|
+
return bindings;
|
|
803
|
+
const hdName = 'edgebase-auth';
|
|
804
|
+
const bindingName = 'AUTH_POSTGRES';
|
|
805
|
+
// Check existing Hyperdrive configs
|
|
806
|
+
const existingConfigs = listHyperdriveConfigs(projectDir);
|
|
807
|
+
const existing = existingConfigs.find((c) => c.name === hdName);
|
|
808
|
+
if (existing) {
|
|
809
|
+
console.log(chalk.dim(` Hyperdrive 'auth' (${provider}): already exists → ${existing.id.slice(0, 8)}…`));
|
|
810
|
+
bindings.push({
|
|
811
|
+
type: 'hyperdrive',
|
|
812
|
+
name: 'auth',
|
|
813
|
+
binding: bindingName,
|
|
814
|
+
id: existing.id,
|
|
815
|
+
managed: true,
|
|
816
|
+
source: 'existing',
|
|
817
|
+
});
|
|
818
|
+
return bindings;
|
|
819
|
+
}
|
|
820
|
+
// Read connection string from .env.release
|
|
821
|
+
const envReleasePath = join(projectDir, '.env.release');
|
|
822
|
+
const secretKey = authConfig.connectionString ?? 'AUTH_POSTGRES_URL';
|
|
823
|
+
const connectionString = readEnvValue(envReleasePath, secretKey);
|
|
824
|
+
if (!connectionString) {
|
|
825
|
+
const setupHint = provider === 'neon'
|
|
826
|
+
? '\n Or run npx edgebase neon setup --auth'
|
|
827
|
+
: '';
|
|
828
|
+
console.warn(chalk.yellow(` ⚠ Hyperdrive 'auth' (${provider}): connection string not found.\n` +
|
|
829
|
+
` Add ${secretKey}=postgres://... to .env.release${setupHint}`));
|
|
830
|
+
return bindings;
|
|
831
|
+
}
|
|
832
|
+
// Create Hyperdrive config
|
|
833
|
+
try {
|
|
834
|
+
const output = execFileSync(wranglerCommand(), wranglerArgs(['wrangler', 'hyperdrive', 'create', hdName, `--connection-string=${connectionString}`]), { cwd: projectDir, encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] });
|
|
835
|
+
// Parse ID from output
|
|
836
|
+
const idMatch = output.match(/id\s*=\s*"?([a-f0-9-]+)"?/i);
|
|
837
|
+
if (idMatch) {
|
|
838
|
+
bindings.push({
|
|
839
|
+
type: 'hyperdrive',
|
|
840
|
+
name: 'auth',
|
|
841
|
+
binding: bindingName,
|
|
842
|
+
id: idMatch[1],
|
|
843
|
+
managed: true,
|
|
844
|
+
source: 'created',
|
|
845
|
+
});
|
|
846
|
+
console.log(chalk.green('✓'), `Hyperdrive 'auth' (${provider}): created → ${idMatch[1].slice(0, 8)}…`);
|
|
847
|
+
}
|
|
848
|
+
else {
|
|
849
|
+
console.log(chalk.yellow('⚠'), `Hyperdrive 'auth' (${provider}): created but could not parse ID. Skipping managed binding registration.`);
|
|
850
|
+
}
|
|
851
|
+
}
|
|
852
|
+
catch (err) {
|
|
853
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
854
|
+
if (isHyperdriveAlreadyExistsError(msg)) {
|
|
855
|
+
const existingConfig = listHyperdriveConfigs(projectDir).find((config) => config.name === hdName);
|
|
856
|
+
if (existingConfig) {
|
|
857
|
+
console.log(chalk.dim(` Hyperdrive 'auth' (${provider}): already exists → ${existingConfig.id.slice(0, 8)}…`));
|
|
858
|
+
bindings.push({
|
|
859
|
+
type: 'hyperdrive',
|
|
860
|
+
name: 'auth',
|
|
861
|
+
binding: bindingName,
|
|
862
|
+
id: existingConfig.id,
|
|
863
|
+
managed: true,
|
|
864
|
+
source: 'existing',
|
|
865
|
+
});
|
|
866
|
+
return bindings;
|
|
867
|
+
}
|
|
868
|
+
}
|
|
869
|
+
console.log(chalk.yellow('⚠'), `Hyperdrive 'auth' (${provider}): provisioning failed — ${msg}`);
|
|
870
|
+
}
|
|
871
|
+
return bindings;
|
|
872
|
+
}
|
|
873
|
+
export const deployCommand = new Command('deploy')
|
|
874
|
+
.alias('dp')
|
|
875
|
+
.description('Deploy to Cloudflare')
|
|
876
|
+
.option('--dry-run', 'Validate config without deploying')
|
|
877
|
+
.option('--if-destructive <action>', 'Action on destructive schema changes in CI/CD: reject (default) or reset', 'reject')
|
|
878
|
+
.action(async (options) => {
|
|
879
|
+
const projectDir = resolve('.');
|
|
880
|
+
const configPath = join(projectDir, 'edgebase.config.ts');
|
|
881
|
+
const isDryRun = !!options.dryRun;
|
|
882
|
+
const isTTY = !!process.stdin.isTTY;
|
|
883
|
+
if (!existsSync(configPath)) {
|
|
884
|
+
raiseCliError({
|
|
885
|
+
code: 'deploy_config_not_found',
|
|
886
|
+
message: 'edgebase.config.ts not found.',
|
|
887
|
+
hint: 'Run `npm create edge-base@latest my-app` first.',
|
|
888
|
+
});
|
|
889
|
+
}
|
|
890
|
+
if (!isQuiet()) {
|
|
891
|
+
console.log(chalk.blue(isDryRun ? '⚡ Validating EdgeBase deploy...' : '⚡ Deploying EdgeBase...'));
|
|
892
|
+
console.log();
|
|
893
|
+
}
|
|
894
|
+
// ─── Functions Bundling ───
|
|
895
|
+
// Plugin functions are registered at runtime from config.plugins[] (Explicit Import Pattern).
|
|
896
|
+
// No auto-discovery needed — esbuild bundles plugin handlers via import graph.
|
|
897
|
+
// Track function count for dry-run summary
|
|
898
|
+
let functionsCount = 0;
|
|
899
|
+
let functions = [];
|
|
900
|
+
const functionsDir = join(projectDir, 'functions');
|
|
901
|
+
if (existsSync(functionsDir)) {
|
|
902
|
+
functions = scanFunctions(functionsDir);
|
|
903
|
+
validateRouteNames(functions);
|
|
904
|
+
functionsCount = functions.length;
|
|
905
|
+
if (functions.length === 0) {
|
|
906
|
+
console.log(chalk.yellow('⚠'), 'functions/ directory exists but no .ts files found.');
|
|
907
|
+
}
|
|
908
|
+
}
|
|
909
|
+
if (!isDryRun && isTTY && !isJson() && !isNonInteractive()) {
|
|
910
|
+
await promptToSyncAuthReleaseEnv(projectDir);
|
|
911
|
+
}
|
|
912
|
+
const envReleasePath = join(projectDir, '.env.release');
|
|
913
|
+
const releaseVars = existsSync(envReleasePath) ? parseEnvFile(envReleasePath) : {};
|
|
914
|
+
for (const [key, value] of Object.entries(releaseVars)) {
|
|
915
|
+
if (!(key in process.env)) {
|
|
916
|
+
process.env[key] = value;
|
|
917
|
+
}
|
|
918
|
+
}
|
|
919
|
+
// ─── Config Validation ───
|
|
920
|
+
const warnings = [];
|
|
921
|
+
const errors = [];
|
|
922
|
+
let configJson = null;
|
|
923
|
+
if (existsSync(configPath)) {
|
|
924
|
+
try {
|
|
925
|
+
configJson = loadConfigSafe(configPath, projectDir, FULL_CONFIG_EVAL);
|
|
926
|
+
if (configJson) {
|
|
927
|
+
validateConfig(configJson, warnings, errors);
|
|
928
|
+
}
|
|
929
|
+
}
|
|
930
|
+
catch (err) {
|
|
931
|
+
raiseCliError({
|
|
932
|
+
code: 'deploy_config_evaluation_failed',
|
|
933
|
+
message: `Failed to fully evaluate edgebase.config.ts: ${err.message}`,
|
|
934
|
+
hint: 'Install missing config dependencies or fix runtime errors before deploy.',
|
|
935
|
+
});
|
|
936
|
+
}
|
|
937
|
+
}
|
|
938
|
+
warnings.push(...collectAuthEnvWarnings(projectDir));
|
|
939
|
+
for (const w of warnings) {
|
|
940
|
+
console.log(chalk.yellow('⚠'), w);
|
|
941
|
+
}
|
|
942
|
+
for (const e of errors) {
|
|
943
|
+
console.log(chalk.red('✗'), e);
|
|
944
|
+
}
|
|
945
|
+
if (errors.length > 0) {
|
|
946
|
+
raiseCliError({
|
|
947
|
+
code: 'deploy_config_invalid',
|
|
948
|
+
message: `${errors.length} config error(s) found. Fix them before deploying.`,
|
|
949
|
+
hint: 'Docs: https://edgebase.fun/docs/getting-started/configuration',
|
|
950
|
+
details: {
|
|
951
|
+
errors,
|
|
952
|
+
},
|
|
953
|
+
});
|
|
954
|
+
}
|
|
955
|
+
// ─── Schema Destructive Change Detection ───
|
|
956
|
+
let currentSnapshot = null;
|
|
957
|
+
let hasSchemaSnapshot = false;
|
|
958
|
+
if (configJson) {
|
|
959
|
+
try {
|
|
960
|
+
const databases = extractDatabases(configJson);
|
|
961
|
+
if (databases && Object.keys(databases).length > 0) {
|
|
962
|
+
const authProvider = configJson.auth?.provider;
|
|
963
|
+
currentSnapshot = buildSnapshot(databases, authProvider);
|
|
964
|
+
const savedSnapshot = loadSnapshot(projectDir);
|
|
965
|
+
if (savedSnapshot) {
|
|
966
|
+
hasSchemaSnapshot = true;
|
|
967
|
+
let changes = detectDestructiveChanges(savedSnapshot, currentSnapshot);
|
|
968
|
+
changes = filterAutoPassChanges(changes, savedSnapshot, currentSnapshot);
|
|
969
|
+
if (changes.length > 0 && !isDryRun) {
|
|
970
|
+
const isRelease = !!configJson.release;
|
|
971
|
+
const result = await handleDestructiveChanges(changes, isRelease, isTTY, options.ifDestructive);
|
|
972
|
+
if (result.action === 'reset') {
|
|
973
|
+
resetLocalDoState(projectDir);
|
|
974
|
+
saveSnapshot(projectDir, currentSnapshot);
|
|
975
|
+
console.log(chalk.green('✓'), 'Schema snapshot updated after DB reset');
|
|
976
|
+
}
|
|
977
|
+
else if (result.action === 'migration_guide') {
|
|
978
|
+
raiseCliError({
|
|
979
|
+
code: 'deploy_cancelled_for_migration_guide',
|
|
980
|
+
message: 'Deploy cancelled after showing the migration guide.',
|
|
981
|
+
hint: 'Add a migration or rerun after choosing an explicit destructive-change strategy.',
|
|
982
|
+
});
|
|
983
|
+
}
|
|
984
|
+
}
|
|
985
|
+
}
|
|
986
|
+
else if (!isDryRun) {
|
|
987
|
+
// First deploy — create initial snapshot
|
|
988
|
+
saveSnapshot(projectDir, currentSnapshot);
|
|
989
|
+
console.log(chalk.green('✓'), 'Initial schema snapshot created (edgebase-schema.lock.json)');
|
|
990
|
+
}
|
|
991
|
+
}
|
|
992
|
+
}
|
|
993
|
+
catch (err) {
|
|
994
|
+
if (isCliStructuredError(err))
|
|
995
|
+
throw err;
|
|
996
|
+
raiseCliError({
|
|
997
|
+
code: 'deploy_schema_detection_failed',
|
|
998
|
+
message: `Schema change detection failed: ${err instanceof Error ? err.message : String(err)}`,
|
|
999
|
+
hint: 'Delete edgebase-schema.lock.json to reset detection if needed. Docs: https://edgebase.fun/docs/cli/reference#deploy',
|
|
1000
|
+
});
|
|
1001
|
+
}
|
|
1002
|
+
}
|
|
1003
|
+
// ─── Provider Change Detection + Migration ───
|
|
1004
|
+
let pendingRestore = null;
|
|
1005
|
+
if (configJson && currentSnapshot) {
|
|
1006
|
+
const savedSnapshot = loadSnapshot(projectDir);
|
|
1007
|
+
if (savedSnapshot) {
|
|
1008
|
+
const providerChanges = detectProviderChanges(savedSnapshot, currentSnapshot);
|
|
1009
|
+
const authChange = detectAuthProviderChange(savedSnapshot, currentSnapshot);
|
|
1010
|
+
const allChanges = [...providerChanges];
|
|
1011
|
+
if (authChange)
|
|
1012
|
+
allChanges.push(authChange);
|
|
1013
|
+
if (allChanges.length > 0 && isDryRun) {
|
|
1014
|
+
if (!isJson()) {
|
|
1015
|
+
console.log();
|
|
1016
|
+
console.log(chalk.yellow('⚠ Database provider changes detected:'));
|
|
1017
|
+
for (const pc of allChanges) {
|
|
1018
|
+
console.log(chalk.yellow(` • ${pc.namespace}: ${pc.oldProvider} → ${pc.newProvider}`));
|
|
1019
|
+
}
|
|
1020
|
+
console.log(chalk.yellow(' Dry-run skips dump/restore. Run `npx edgebase migrate` or deploy without --dry-run.'));
|
|
1021
|
+
console.log();
|
|
1022
|
+
}
|
|
1023
|
+
}
|
|
1024
|
+
else if (allChanges.length > 0 && (isTTY || isNonInteractive())) {
|
|
1025
|
+
const answer = await promptMigration(allChanges);
|
|
1026
|
+
if (answer === 'migrate') {
|
|
1027
|
+
// Resolve Worker URL from wrangler.toml (currently deployed Worker)
|
|
1028
|
+
let workerUrl = process.env.EDGEBASE_URL ?? '';
|
|
1029
|
+
if (!workerUrl)
|
|
1030
|
+
workerUrl = resolveProjectWorkerUrl(projectDir);
|
|
1031
|
+
if (!workerUrl) {
|
|
1032
|
+
raiseCliError({
|
|
1033
|
+
code: 'deploy_migration_url_required',
|
|
1034
|
+
message: 'Cannot determine Worker URL for migration.',
|
|
1035
|
+
hint: 'Set EDGEBASE_URL or ensure wrangler.toml has a name.',
|
|
1036
|
+
});
|
|
1037
|
+
}
|
|
1038
|
+
// Resolve service key (exits with guidance if not found)
|
|
1039
|
+
const serviceKey = resolveServiceKeyFromOptions({});
|
|
1040
|
+
// Determine scope and namespaces
|
|
1041
|
+
const dataNamespaces = providerChanges.map((pc) => pc.namespace);
|
|
1042
|
+
const scope = authChange
|
|
1043
|
+
? dataNamespaces.length > 0
|
|
1044
|
+
? 'all'
|
|
1045
|
+
: 'auth'
|
|
1046
|
+
: 'data';
|
|
1047
|
+
console.log();
|
|
1048
|
+
console.log(chalk.blue('📦 Pre-deploy: Dumping data from current provider...'));
|
|
1049
|
+
try {
|
|
1050
|
+
const dumped = await dumpCurrentData({
|
|
1051
|
+
scope,
|
|
1052
|
+
namespaces: dataNamespaces.length > 0 ? dataNamespaces : undefined,
|
|
1053
|
+
serverUrl: workerUrl,
|
|
1054
|
+
serviceKey,
|
|
1055
|
+
dryRun: false,
|
|
1056
|
+
});
|
|
1057
|
+
pendingRestore = { dumped, serverUrl: workerUrl, serviceKey };
|
|
1058
|
+
console.log(chalk.green('✓'), 'Data dumped successfully. Proceeding with deploy...');
|
|
1059
|
+
}
|
|
1060
|
+
catch (err) {
|
|
1061
|
+
console.error(chalk.red('✗ Pre-deploy dump failed:'), err.message);
|
|
1062
|
+
console.error(chalk.dim(' Deploy will continue without migration.'));
|
|
1063
|
+
console.error(chalk.dim(' You can migrate manually later with `npx edgebase migrate`.'));
|
|
1064
|
+
console.log();
|
|
1065
|
+
}
|
|
1066
|
+
}
|
|
1067
|
+
}
|
|
1068
|
+
else if (allChanges.length > 0) {
|
|
1069
|
+
// Non-TTY: show warning only
|
|
1070
|
+
console.log();
|
|
1071
|
+
console.log(chalk.yellow('⚠ Database provider changes detected:'));
|
|
1072
|
+
for (const pc of allChanges) {
|
|
1073
|
+
console.log(chalk.yellow(` • ${pc.namespace}: ${pc.oldProvider} → ${pc.newProvider}`));
|
|
1074
|
+
}
|
|
1075
|
+
console.log();
|
|
1076
|
+
console.log(chalk.yellow(' Run `npx edgebase migrate` to migrate data interactively.'));
|
|
1077
|
+
console.log();
|
|
1078
|
+
}
|
|
1079
|
+
}
|
|
1080
|
+
}
|
|
1081
|
+
// TODO(future): Additional validations
|
|
1082
|
+
// - references validation against defined tables
|
|
1083
|
+
// - origin: '*' + credentials: true conflict (M10)
|
|
1084
|
+
if (options.dryRun) {
|
|
1085
|
+
if (isJson()) {
|
|
1086
|
+
const result = {
|
|
1087
|
+
status: 'dry-run',
|
|
1088
|
+
config: basename(configPath),
|
|
1089
|
+
functions: functionsCount,
|
|
1090
|
+
warnings: warnings.length,
|
|
1091
|
+
errors: 0,
|
|
1092
|
+
};
|
|
1093
|
+
if (hasSchemaSnapshot || currentSnapshot)
|
|
1094
|
+
result.schemaSnapshot = true;
|
|
1095
|
+
console.log(JSON.stringify(result));
|
|
1096
|
+
return;
|
|
1097
|
+
}
|
|
1098
|
+
console.log();
|
|
1099
|
+
console.log(chalk.blue('─── Dry Run: Deploy Preview ───'));
|
|
1100
|
+
console.log();
|
|
1101
|
+
console.log(chalk.green('✓'), `Config: ${basename(configPath)}`);
|
|
1102
|
+
if (functionsCount > 0) {
|
|
1103
|
+
console.log(chalk.green('✓'), `Functions validated: ${functionsCount} file(s)`);
|
|
1104
|
+
}
|
|
1105
|
+
if (warnings.length > 0) {
|
|
1106
|
+
console.log(chalk.yellow('⚠'), `Warnings: ${warnings.length}`);
|
|
1107
|
+
}
|
|
1108
|
+
if (hasSchemaSnapshot) {
|
|
1109
|
+
console.log(chalk.green('✓'), 'Schema snapshot: checked');
|
|
1110
|
+
}
|
|
1111
|
+
else if (currentSnapshot) {
|
|
1112
|
+
console.log(chalk.green('✓'), 'Schema snapshot: would be created on first deploy');
|
|
1113
|
+
}
|
|
1114
|
+
// Check for .env.release secrets
|
|
1115
|
+
const envReleasePath = join(projectDir, '.env.release');
|
|
1116
|
+
if (existsSync(envReleasePath)) {
|
|
1117
|
+
const envContent = readFileSync(envReleasePath, 'utf-8');
|
|
1118
|
+
const secretCount = envContent
|
|
1119
|
+
.split('\n')
|
|
1120
|
+
.filter((l) => l.trim() && !l.startsWith('#') && l.includes('=')).length;
|
|
1121
|
+
console.log(chalk.green('✓'), `Secrets: ${secretCount} from .env.release`);
|
|
1122
|
+
}
|
|
1123
|
+
console.log();
|
|
1124
|
+
console.log(chalk.dim(' Run without --dry-run to deploy.'));
|
|
1125
|
+
return;
|
|
1126
|
+
}
|
|
1127
|
+
// ─── Cloudflare Authentication Gate ───
|
|
1128
|
+
const cfAuth = await ensureCloudflareAuth(projectDir, isTTY);
|
|
1129
|
+
ensureWranglerToml(projectDir, cfAuth.accountId);
|
|
1130
|
+
ensureRuntimeScaffold(projectDir);
|
|
1131
|
+
writeRuntimeConfigShim(projectDir, releaseVars);
|
|
1132
|
+
runProjectPostScaffoldHook(projectDir);
|
|
1133
|
+
const previousManifest = readCloudflareDeployManifest(projectDir);
|
|
1134
|
+
console.log();
|
|
1135
|
+
const serverSrcDir = getRuntimeServerSrcDir(projectDir);
|
|
1136
|
+
const registryPath = join(serverSrcDir, '_functions-registry.ts');
|
|
1137
|
+
generateFunctionRegistry(functions, registryPath, {
|
|
1138
|
+
configImportPath: './generated-config.js',
|
|
1139
|
+
functionsImportBasePath: relative(dirname(registryPath), join(projectDir, 'functions')).replace(/\\/g, '/'),
|
|
1140
|
+
});
|
|
1141
|
+
if (functions.length > 0) {
|
|
1142
|
+
console.log(chalk.green('✓'), `Bundled ${functions.length} function(s):`, functions.map((f) => chalk.cyan(f.name)).join(', '));
|
|
1143
|
+
}
|
|
1144
|
+
else {
|
|
1145
|
+
console.log(chalk.green('✓'), 'Bundled 0 user function(s) — plugin functions remain available');
|
|
1146
|
+
}
|
|
1147
|
+
// ─── Cron Schedule Extraction ───
|
|
1148
|
+
const cronSchedules = collectManagedCronSchedules(configJson);
|
|
1149
|
+
// ─── Cloudflare Resource Provisioning ───
|
|
1150
|
+
const provisionedBindings = [];
|
|
1151
|
+
const manifestResources = [];
|
|
1152
|
+
const rateLimitBindings = resolveRateLimitBindings(configJson ?? undefined);
|
|
1153
|
+
let tempWranglerPath = null;
|
|
1154
|
+
const provisionSpinner = spin('Provisioning Cloudflare resources...');
|
|
1155
|
+
manifestResources.push(...provisionR2Buckets(projectDir, previousManifest));
|
|
1156
|
+
if (configJson) {
|
|
1157
|
+
const kvCfg = configJson.kv;
|
|
1158
|
+
const d1Cfg = configJson.d1;
|
|
1159
|
+
const vecCfg = configJson.vectorize;
|
|
1160
|
+
const dbsCfg = configJson.databases;
|
|
1161
|
+
// Check for PostgreSQL-backed database blocks (Hyperdrive)
|
|
1162
|
+
const hasProviderDbs = dbsCfg && Object.values(dbsCfg).some((db) => isPostgresProvider(db.provider));
|
|
1163
|
+
// Check for auth PostgreSQL provider (Hyperdrive)
|
|
1164
|
+
const authCfg = configJson.auth;
|
|
1165
|
+
const hasAuthPostgres = authCfg?.provider === 'neon' || authCfg?.provider === 'postgres';
|
|
1166
|
+
const mergedKvConfig = buildMergedKvConfig(kvCfg);
|
|
1167
|
+
if (Object.keys(mergedKvConfig).length > 0) {
|
|
1168
|
+
provisionedBindings.push(...provisionKvNamespaces(mergedKvConfig, projectDir));
|
|
1169
|
+
}
|
|
1170
|
+
const mergedD1Config = buildMergedD1Config(d1Cfg, dbsCfg);
|
|
1171
|
+
if (Object.keys(mergedD1Config).length > 0) {
|
|
1172
|
+
provisionedBindings.push(...provisionD1Databases(mergedD1Config, projectDir, { previousManifest }));
|
|
1173
|
+
}
|
|
1174
|
+
if (vecCfg && Object.keys(vecCfg).length > 0) {
|
|
1175
|
+
provisionedBindings.push(...provisionVectorizeIndexes(vecCfg, projectDir));
|
|
1176
|
+
}
|
|
1177
|
+
if (dbsCfg && hasProviderDbs) {
|
|
1178
|
+
provisionedBindings.push(...provisionProviderHyperdrives(dbsCfg, projectDir));
|
|
1179
|
+
}
|
|
1180
|
+
if (authCfg && hasAuthPostgres) {
|
|
1181
|
+
provisionedBindings.push(...provisionAuthPostgresHyperdrive(authCfg, projectDir));
|
|
1182
|
+
}
|
|
1183
|
+
}
|
|
1184
|
+
else {
|
|
1185
|
+
provisionedBindings.push(...provisionInternalD1Databases(projectDir, { previousManifest }));
|
|
1186
|
+
}
|
|
1187
|
+
manifestResources.push(...provisionedBindings.map(toManifestResourceRecord));
|
|
1188
|
+
// Generate temp wrangler.toml with bindings + cron triggers
|
|
1189
|
+
const wranglerPath = join(projectDir, 'wrangler.toml');
|
|
1190
|
+
if (existsSync(wranglerPath) &&
|
|
1191
|
+
(provisionedBindings.length > 0 || cronSchedules.length > 0 || rateLimitBindings.length > 0)) {
|
|
1192
|
+
tempWranglerPath = generateTempWranglerToml(wranglerPath, {
|
|
1193
|
+
bindings: provisionedBindings,
|
|
1194
|
+
triggerMode: 'replace',
|
|
1195
|
+
managedCrons: cronSchedules,
|
|
1196
|
+
rateLimitBindings,
|
|
1197
|
+
});
|
|
1198
|
+
if (tempWranglerPath) {
|
|
1199
|
+
console.log(chalk.green('✓'), `Generated temp wrangler.toml with ${provisionedBindings.length} resource binding(s)`);
|
|
1200
|
+
}
|
|
1201
|
+
}
|
|
1202
|
+
provisionSpinner.succeed('Cloudflare resources provisioned');
|
|
1203
|
+
// Generate temp wrangler.toml for cron triggers even if no resource bindings
|
|
1204
|
+
if (!tempWranglerPath && (cronSchedules.length > 0 || rateLimitBindings.length > 0)) {
|
|
1205
|
+
const cronOnlyWranglerPath = join(projectDir, 'wrangler.toml');
|
|
1206
|
+
if (existsSync(cronOnlyWranglerPath)) {
|
|
1207
|
+
tempWranglerPath = generateTempWranglerToml(cronOnlyWranglerPath, {
|
|
1208
|
+
bindings: [],
|
|
1209
|
+
triggerMode: 'replace',
|
|
1210
|
+
managedCrons: cronSchedules,
|
|
1211
|
+
rateLimitBindings,
|
|
1212
|
+
});
|
|
1213
|
+
if (tempWranglerPath) {
|
|
1214
|
+
if (rateLimitBindings.length > 0 && cronSchedules.length > 0) {
|
|
1215
|
+
console.log(chalk.green('✓'), 'Generated temp wrangler.toml with rate-limit bindings and cron trigger(s)');
|
|
1216
|
+
}
|
|
1217
|
+
else if (rateLimitBindings.length > 0) {
|
|
1218
|
+
console.log(chalk.green('✓'), 'Generated temp wrangler.toml with rate-limit bindings');
|
|
1219
|
+
}
|
|
1220
|
+
else {
|
|
1221
|
+
console.log(chalk.green('✓'), 'Generated temp wrangler.toml with cron trigger(s)');
|
|
1222
|
+
}
|
|
1223
|
+
}
|
|
1224
|
+
}
|
|
1225
|
+
}
|
|
1226
|
+
// Ensure the admin dashboard assets ship with deploys even for custom wrangler.toml
|
|
1227
|
+
// files that omitted the EdgeBase-managed [assets] block.
|
|
1228
|
+
if (!tempWranglerPath && existsSync(wranglerPath)) {
|
|
1229
|
+
tempWranglerPath = generateTempWranglerToml(wranglerPath, {
|
|
1230
|
+
bindings: [],
|
|
1231
|
+
triggerMode: 'preserve',
|
|
1232
|
+
});
|
|
1233
|
+
if (tempWranglerPath) {
|
|
1234
|
+
console.log(chalk.green('✓'), 'Generated temp wrangler.toml with admin assets binding');
|
|
1235
|
+
}
|
|
1236
|
+
}
|
|
1237
|
+
// ─── Turnstile Auto-Provisioning ───
|
|
1238
|
+
if (configJson) {
|
|
1239
|
+
const captchaCfg = configJson.captcha;
|
|
1240
|
+
if (captchaCfg) {
|
|
1241
|
+
const turnstileResult = await provisionTurnstile(captchaCfg, projectDir, configJson, cfAuth.accountId);
|
|
1242
|
+
if (turnstileResult) {
|
|
1243
|
+
// §28: Inject siteKey as CAPTCHA_SITE_KEY wrangler var (independent from bundled app config)
|
|
1244
|
+
const targetToml = tempWranglerPath ?? join(projectDir, 'wrangler.toml');
|
|
1245
|
+
injectCaptchaSiteKey(targetToml, turnstileResult.siteKey);
|
|
1246
|
+
if (turnstileResult.managed && turnstileResult.widgetName) {
|
|
1247
|
+
const previousTurnstile = findCloudflareResourceRecord(previousManifest, {
|
|
1248
|
+
type: 'turnstile_widget',
|
|
1249
|
+
name: turnstileResult.widgetName,
|
|
1250
|
+
id: turnstileResult.widgetName,
|
|
1251
|
+
});
|
|
1252
|
+
manifestResources.push({
|
|
1253
|
+
type: 'turnstile_widget',
|
|
1254
|
+
name: turnstileResult.widgetName,
|
|
1255
|
+
id: turnstileResult.widgetName,
|
|
1256
|
+
managed: previousTurnstile?.managed ?? true,
|
|
1257
|
+
source: turnstileResult.source,
|
|
1258
|
+
metadata: { siteKey: turnstileResult.siteKey },
|
|
1259
|
+
});
|
|
1260
|
+
}
|
|
1261
|
+
}
|
|
1262
|
+
console.log();
|
|
1263
|
+
}
|
|
1264
|
+
}
|
|
1265
|
+
// ─── Deploy ───
|
|
1266
|
+
const deployArgs = ['wrangler', 'deploy'];
|
|
1267
|
+
if (tempWranglerPath) {
|
|
1268
|
+
deployArgs.push('--config', tempWranglerPath);
|
|
1269
|
+
console.log(chalk.dim(` Using generated config: ${tempWranglerPath}`));
|
|
1270
|
+
}
|
|
1271
|
+
if (!isQuiet())
|
|
1272
|
+
console.log(chalk.dim(' Running wrangler deploy...'));
|
|
1273
|
+
// Wrap deploy in a promise so we can await post-deploy migration restore
|
|
1274
|
+
let deployExitCode;
|
|
1275
|
+
let deployOutput;
|
|
1276
|
+
try {
|
|
1277
|
+
({ code: deployExitCode, output: deployOutput } = await new Promise((resolveDeploy, rejectDeploy) => {
|
|
1278
|
+
const wrangler = spawn(wranglerCommand(), wranglerArgs(deployArgs), {
|
|
1279
|
+
cwd: projectDir,
|
|
1280
|
+
stdio: ['inherit', 'pipe', 'pipe'],
|
|
1281
|
+
});
|
|
1282
|
+
let capturedDeployOutput = '';
|
|
1283
|
+
wrangler.stdout?.on('data', (chunk) => {
|
|
1284
|
+
const text = chunk.toString();
|
|
1285
|
+
capturedDeployOutput += text;
|
|
1286
|
+
process.stdout.write(text);
|
|
1287
|
+
});
|
|
1288
|
+
wrangler.stderr?.on('data', (chunk) => {
|
|
1289
|
+
const text = chunk.toString();
|
|
1290
|
+
capturedDeployOutput += text;
|
|
1291
|
+
process.stderr.write(text);
|
|
1292
|
+
});
|
|
1293
|
+
wrangler.on('error', (err) => {
|
|
1294
|
+
if (tempWranglerPath)
|
|
1295
|
+
try {
|
|
1296
|
+
unlinkSync(tempWranglerPath);
|
|
1297
|
+
}
|
|
1298
|
+
catch {
|
|
1299
|
+
/* ignore */
|
|
1300
|
+
}
|
|
1301
|
+
rejectDeploy(err);
|
|
1302
|
+
});
|
|
1303
|
+
wrangler.on('exit', (code) => {
|
|
1304
|
+
if (tempWranglerPath) {
|
|
1305
|
+
try {
|
|
1306
|
+
unlinkSync(tempWranglerPath);
|
|
1307
|
+
}
|
|
1308
|
+
catch {
|
|
1309
|
+
/* ignore */
|
|
1310
|
+
}
|
|
1311
|
+
console.log(chalk.dim(' Cleaned up temp wrangler.toml'));
|
|
1312
|
+
}
|
|
1313
|
+
resolveDeploy({ code: code ?? 1, output: capturedDeployOutput });
|
|
1314
|
+
});
|
|
1315
|
+
}));
|
|
1316
|
+
}
|
|
1317
|
+
catch (err) {
|
|
1318
|
+
raiseCliError({
|
|
1319
|
+
code: 'deploy_spawn_failed',
|
|
1320
|
+
message: `Deploy failed to start: ${err.message}`,
|
|
1321
|
+
hint: 'Check your Wrangler installation and Cloudflare authentication, then retry.',
|
|
1322
|
+
});
|
|
1323
|
+
}
|
|
1324
|
+
if (deployExitCode !== 0) {
|
|
1325
|
+
raiseCliError({
|
|
1326
|
+
code: 'deploy_failed',
|
|
1327
|
+
message: `Deploy failed with exit code: ${deployExitCode}`,
|
|
1328
|
+
hint: `Check Cloudflare auth (${wranglerHint(['wrangler', 'whoami'])}), inspect verbose deploy output (${wranglerHint(['wrangler', 'deploy', '--verbose'])}), or re-login (${wranglerHint(['wrangler', 'login'])}).`,
|
|
1329
|
+
details: {
|
|
1330
|
+
exitCode: deployExitCode,
|
|
1331
|
+
},
|
|
1332
|
+
}, deployExitCode);
|
|
1333
|
+
}
|
|
1334
|
+
const deployedWorkerUrl = resolveDeployedWorkerUrl(projectDir, deployOutput);
|
|
1335
|
+
const persistedManifestResources = dedupeManifestResources([
|
|
1336
|
+
...(previousManifest?.resources ?? []),
|
|
1337
|
+
...manifestResources,
|
|
1338
|
+
]);
|
|
1339
|
+
const deployManifestPath = writeCloudflareDeployManifest(projectDir, {
|
|
1340
|
+
version: 2,
|
|
1341
|
+
deployedAt: new Date().toISOString(),
|
|
1342
|
+
accountId: cfAuth.accountId,
|
|
1343
|
+
worker: {
|
|
1344
|
+
name: resolveWorkerNameFromProject(projectDir),
|
|
1345
|
+
url: deployedWorkerUrl,
|
|
1346
|
+
},
|
|
1347
|
+
resources: persistedManifestResources,
|
|
1348
|
+
});
|
|
1349
|
+
if (!isJson()) {
|
|
1350
|
+
console.log(chalk.dim(` Saved deploy manifest: ${deployManifestPath}`));
|
|
1351
|
+
}
|
|
1352
|
+
try {
|
|
1353
|
+
syncEnvSecrets(projectDir, { failOnError: true });
|
|
1354
|
+
ensureManagedWorkerSecrets(projectDir, cfAuth.accountId, { failOnError: true });
|
|
1355
|
+
}
|
|
1356
|
+
catch (err) {
|
|
1357
|
+
raiseCliError({
|
|
1358
|
+
code: 'deploy_secret_sync_failed',
|
|
1359
|
+
message: `Deploy completed but secret synchronization failed: ${err.message}`,
|
|
1360
|
+
hint: 'The Worker was deployed, but required runtime secrets were not fully applied.',
|
|
1361
|
+
});
|
|
1362
|
+
}
|
|
1363
|
+
// Store deploy manifest in KV for runtime self-destruct
|
|
1364
|
+
storeManifestInKv(projectDir, persistedManifestResources, cfAuth.accountId, {
|
|
1365
|
+
workerName: resolveWorkerNameFromProject(projectDir),
|
|
1366
|
+
workerUrl: deployedWorkerUrl,
|
|
1367
|
+
});
|
|
1368
|
+
const deployedAdminUrl = deployedWorkerUrl
|
|
1369
|
+
? await resolveAdminUrlFromRuntime(deployedWorkerUrl)
|
|
1370
|
+
: null;
|
|
1371
|
+
// ─── Post-deploy: Success ───
|
|
1372
|
+
if (isJson()) {
|
|
1373
|
+
// Note: JSON output after migration below
|
|
1374
|
+
if (!pendingRestore) {
|
|
1375
|
+
console.log(JSON.stringify({
|
|
1376
|
+
status: 'success',
|
|
1377
|
+
url: deployedWorkerUrl,
|
|
1378
|
+
adminUrl: deployedAdminUrl,
|
|
1379
|
+
}));
|
|
1380
|
+
}
|
|
1381
|
+
}
|
|
1382
|
+
else {
|
|
1383
|
+
console.log();
|
|
1384
|
+
console.log(chalk.green('✅ Deployed successfully!'));
|
|
1385
|
+
// Show deployed URL summary
|
|
1386
|
+
if (deployedWorkerUrl) {
|
|
1387
|
+
console.log();
|
|
1388
|
+
console.log(chalk.dim(` API: ${deployedWorkerUrl}/api/...`));
|
|
1389
|
+
if (deployedAdminUrl) {
|
|
1390
|
+
console.log(chalk.dim(` Admin: ${deployedAdminUrl}`));
|
|
1391
|
+
}
|
|
1392
|
+
else {
|
|
1393
|
+
console.log(chalk.dim(' Admin: not deployed'));
|
|
1394
|
+
}
|
|
1395
|
+
}
|
|
1396
|
+
}
|
|
1397
|
+
// ─── Post-deploy: Migration Restore ───
|
|
1398
|
+
if (pendingRestore) {
|
|
1399
|
+
console.log();
|
|
1400
|
+
console.log(chalk.blue('📥 Post-deploy: Restoring data to new provider...'));
|
|
1401
|
+
// Determine scope from dumped data
|
|
1402
|
+
const hasAuth = !!pendingRestore.dumped.auth;
|
|
1403
|
+
const hasData = !!pendingRestore.dumped.data && Object.keys(pendingRestore.dumped.data).length > 0;
|
|
1404
|
+
const scope = hasAuth ? (hasData ? 'all' : 'auth') : 'data';
|
|
1405
|
+
try {
|
|
1406
|
+
await restoreToNewProvider({
|
|
1407
|
+
scope,
|
|
1408
|
+
namespaces: hasData ? Object.keys(pendingRestore.dumped.data) : undefined,
|
|
1409
|
+
serverUrl: pendingRestore.serverUrl,
|
|
1410
|
+
serviceKey: pendingRestore.serviceKey,
|
|
1411
|
+
dryRun: false,
|
|
1412
|
+
}, pendingRestore.dumped);
|
|
1413
|
+
console.log();
|
|
1414
|
+
console.log(chalk.green('✓ Data migration complete!'));
|
|
1415
|
+
}
|
|
1416
|
+
catch (err) {
|
|
1417
|
+
console.error();
|
|
1418
|
+
console.error(chalk.red('✗ Post-deploy restore failed:'), err.message);
|
|
1419
|
+
console.error(chalk.dim(' The deploy succeeded but data was not migrated.'));
|
|
1420
|
+
console.error(chalk.dim(' You can retry with: npx edgebase migrate'));
|
|
1421
|
+
}
|
|
1422
|
+
if (isJson()) {
|
|
1423
|
+
console.log(JSON.stringify({ status: 'success', url: deployedWorkerUrl, migrated: true }));
|
|
1424
|
+
}
|
|
1425
|
+
}
|
|
1426
|
+
// Save schema snapshot on successful deploy
|
|
1427
|
+
if (currentSnapshot) {
|
|
1428
|
+
try {
|
|
1429
|
+
saveSnapshot(projectDir, currentSnapshot);
|
|
1430
|
+
console.log(chalk.dim(' Schema snapshot updated (edgebase-schema.lock.json)'));
|
|
1431
|
+
}
|
|
1432
|
+
catch (err) {
|
|
1433
|
+
console.warn(chalk.yellow('⚠ Failed to save schema snapshot:'), err instanceof Error ? err.message : err);
|
|
1434
|
+
console.warn(chalk.yellow(' Next deploy may not detect destructive changes correctly.'));
|
|
1435
|
+
}
|
|
1436
|
+
}
|
|
1437
|
+
});
|
|
1438
|
+
// ─── Sync .env.release → Cloudflare Secrets ───
|
|
1439
|
+
/**
|
|
1440
|
+
* If `.env.release` exists, parse it and bulk-upload all key-value pairs
|
|
1441
|
+
* to Cloudflare Workers Secrets via `wrangler secret bulk`.
|
|
1442
|
+
*
|
|
1443
|
+
* This runs before SERVICE_KEY auto-generation so user-defined secrets
|
|
1444
|
+
* are available first. SERVICE_KEY is excluded even if present in the file
|
|
1445
|
+
* (it is auto-managed by the deploy pipeline).
|
|
1446
|
+
*/
|
|
1447
|
+
function syncEnvSecrets(projectDir, options) {
|
|
1448
|
+
const envReleasePath = join(projectDir, '.env.release');
|
|
1449
|
+
if (!existsSync(envReleasePath))
|
|
1450
|
+
return;
|
|
1451
|
+
const vars = resolveReleaseSecretVars(projectDir);
|
|
1452
|
+
// SERVICE_KEY is auto-managed — ignore if user accidentally included it
|
|
1453
|
+
delete vars['SERVICE_KEY'];
|
|
1454
|
+
const keys = Object.keys(vars);
|
|
1455
|
+
if (keys.length === 0)
|
|
1456
|
+
return;
|
|
1457
|
+
const s = spin('Syncing .env.release → Cloudflare Secrets...');
|
|
1458
|
+
try {
|
|
1459
|
+
execFileSync(wranglerCommand(), wranglerArgs(['wrangler', 'secret', 'bulk']), {
|
|
1460
|
+
cwd: projectDir,
|
|
1461
|
+
input: JSON.stringify(vars),
|
|
1462
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
1463
|
+
});
|
|
1464
|
+
s.succeed(`${keys.length} secret(s) synced: ${keys.join(', ')}`);
|
|
1465
|
+
}
|
|
1466
|
+
catch (err) {
|
|
1467
|
+
s.fail('Failed to sync .env.release secrets');
|
|
1468
|
+
console.error(chalk.dim(' Error: ' + (err.message?.split('\n')[0] ?? '')));
|
|
1469
|
+
console.error(chalk.dim(` You can manually run: ${wranglerHint(['wrangler', 'secret', 'bulk'])} < .env.release`));
|
|
1470
|
+
if (options?.failOnError)
|
|
1471
|
+
throw err;
|
|
1472
|
+
}
|
|
1473
|
+
}
|
|
1474
|
+
function resolveReleaseSecretVars(projectDir) {
|
|
1475
|
+
const envReleasePath = join(projectDir, '.env.release');
|
|
1476
|
+
if (!existsSync(envReleasePath))
|
|
1477
|
+
return {};
|
|
1478
|
+
const vars = parseEnvFile(envReleasePath);
|
|
1479
|
+
for (const key of Object.keys(vars)) {
|
|
1480
|
+
const override = process.env[key];
|
|
1481
|
+
if (typeof override === 'string' && override.length > 0) {
|
|
1482
|
+
vars[key] = override;
|
|
1483
|
+
}
|
|
1484
|
+
}
|
|
1485
|
+
return vars;
|
|
1486
|
+
}
|
|
1487
|
+
function parseCsvEnv(value) {
|
|
1488
|
+
return Array.from(new Set((value ?? '')
|
|
1489
|
+
.split(',')
|
|
1490
|
+
.map((entry) => entry.trim())
|
|
1491
|
+
.filter(Boolean)));
|
|
1492
|
+
}
|
|
1493
|
+
function normalizeAuthEnvSegment(value) {
|
|
1494
|
+
return value
|
|
1495
|
+
.trim()
|
|
1496
|
+
.replace(/[^A-Za-z0-9]+/g, '_')
|
|
1497
|
+
.replace(/^_+|_+$/g, '')
|
|
1498
|
+
.toUpperCase();
|
|
1499
|
+
}
|
|
1500
|
+
function getOAuthEnvKeys(provider) {
|
|
1501
|
+
if (provider.startsWith('oidc:')) {
|
|
1502
|
+
const oidcName = normalizeAuthEnvSegment(provider.slice(5)) || 'CUSTOM';
|
|
1503
|
+
return {
|
|
1504
|
+
clientId: `EDGEBASE_OIDC_${oidcName}_CLIENT_ID`,
|
|
1505
|
+
clientSecret: `EDGEBASE_OIDC_${oidcName}_CLIENT_SECRET`,
|
|
1506
|
+
issuer: `EDGEBASE_OIDC_${oidcName}_ISSUER`,
|
|
1507
|
+
scopes: `EDGEBASE_OIDC_${oidcName}_SCOPES`,
|
|
1508
|
+
};
|
|
1509
|
+
}
|
|
1510
|
+
const providerName = normalizeAuthEnvSegment(provider) || 'CUSTOM';
|
|
1511
|
+
return {
|
|
1512
|
+
clientId: `EDGEBASE_OAUTH_${providerName}_CLIENT_ID`,
|
|
1513
|
+
clientSecret: `EDGEBASE_OAUTH_${providerName}_CLIENT_SECRET`,
|
|
1514
|
+
};
|
|
1515
|
+
}
|
|
1516
|
+
function getRequiredAuthFields(provider) {
|
|
1517
|
+
return provider.startsWith('oidc:')
|
|
1518
|
+
? ['clientId', 'clientSecret', 'issuer']
|
|
1519
|
+
: ['clientId', 'clientSecret'];
|
|
1520
|
+
}
|
|
1521
|
+
function getOptionalAuthFields(provider) {
|
|
1522
|
+
return provider.startsWith('oidc:') ? ['scopes'] : [];
|
|
1523
|
+
}
|
|
1524
|
+
function getAuthFieldValues(vars, provider) {
|
|
1525
|
+
const envKeys = getOAuthEnvKeys(provider);
|
|
1526
|
+
const values = {};
|
|
1527
|
+
if (vars[envKeys.clientId])
|
|
1528
|
+
values.clientId = vars[envKeys.clientId];
|
|
1529
|
+
if (vars[envKeys.clientSecret])
|
|
1530
|
+
values.clientSecret = vars[envKeys.clientSecret];
|
|
1531
|
+
if (envKeys.issuer && vars[envKeys.issuer])
|
|
1532
|
+
values.issuer = vars[envKeys.issuer];
|
|
1533
|
+
if (envKeys.scopes && vars[envKeys.scopes])
|
|
1534
|
+
values.scopes = vars[envKeys.scopes];
|
|
1535
|
+
return values;
|
|
1536
|
+
}
|
|
1537
|
+
function formatAuthFieldList(fields) {
|
|
1538
|
+
return fields.join(', ');
|
|
1539
|
+
}
|
|
1540
|
+
function inspectAuthEnv(projectDir) {
|
|
1541
|
+
const developmentVars = parseDevVars(projectDir);
|
|
1542
|
+
const releaseVars = resolveReleaseSecretVars(projectDir);
|
|
1543
|
+
const developmentProviders = parseCsvEnv(developmentVars.EDGEBASE_AUTH_ALLOWED_OAUTH_PROVIDERS);
|
|
1544
|
+
const releaseProviders = parseCsvEnv(releaseVars.EDGEBASE_AUTH_ALLOWED_OAUTH_PROVIDERS);
|
|
1545
|
+
const seenProviders = new Set();
|
|
1546
|
+
const providers = [...developmentProviders, ...releaseProviders].filter((provider) => {
|
|
1547
|
+
if (seenProviders.has(provider))
|
|
1548
|
+
return false;
|
|
1549
|
+
seenProviders.add(provider);
|
|
1550
|
+
return true;
|
|
1551
|
+
});
|
|
1552
|
+
const inspections = [];
|
|
1553
|
+
for (const provider of providers) {
|
|
1554
|
+
const devEnabled = developmentProviders.includes(provider);
|
|
1555
|
+
const releaseEnabled = releaseProviders.includes(provider);
|
|
1556
|
+
const requiredFields = getRequiredAuthFields(provider);
|
|
1557
|
+
const developmentValues = getAuthFieldValues(developmentVars, provider);
|
|
1558
|
+
const releaseValues = getAuthFieldValues(releaseVars, provider);
|
|
1559
|
+
const missingReleaseFields = requiredFields.filter((field) => !releaseValues[field]);
|
|
1560
|
+
const missingDevelopmentFields = requiredFields.filter((field) => !developmentValues[field]);
|
|
1561
|
+
const summaryParts = [];
|
|
1562
|
+
if (devEnabled && !releaseEnabled) {
|
|
1563
|
+
summaryParts.push('enabled in Development but disabled in Release');
|
|
1564
|
+
}
|
|
1565
|
+
if (releaseEnabled && missingReleaseFields.length > 0) {
|
|
1566
|
+
summaryParts.push(`enabled in Release but missing ${formatAuthFieldList(missingReleaseFields)}`);
|
|
1567
|
+
}
|
|
1568
|
+
if (summaryParts.length === 0)
|
|
1569
|
+
continue;
|
|
1570
|
+
const canCopyToRelease = devEnabled && !releaseEnabled
|
|
1571
|
+
? requiredFields.every((field) => !!releaseValues[field] || !!developmentValues[field])
|
|
1572
|
+
: missingReleaseFields.every((field) => !!developmentValues[field]);
|
|
1573
|
+
if (!canCopyToRelease && missingDevelopmentFields.length > 0) {
|
|
1574
|
+
summaryParts.push(`Development is also missing ${formatAuthFieldList(missingDevelopmentFields)}`);
|
|
1575
|
+
}
|
|
1576
|
+
inspections.push({
|
|
1577
|
+
provider,
|
|
1578
|
+
devEnabled,
|
|
1579
|
+
releaseEnabled,
|
|
1580
|
+
summary: summaryParts.join('; '),
|
|
1581
|
+
canCopyToRelease,
|
|
1582
|
+
requiredFields,
|
|
1583
|
+
missingReleaseFields,
|
|
1584
|
+
missingDevelopmentFields,
|
|
1585
|
+
developmentValues,
|
|
1586
|
+
releaseValues,
|
|
1587
|
+
});
|
|
1588
|
+
}
|
|
1589
|
+
return inspections;
|
|
1590
|
+
}
|
|
1591
|
+
function collectAuthEnvWarnings(projectDir) {
|
|
1592
|
+
const warnings = [];
|
|
1593
|
+
const inspections = inspectAuthEnv(projectDir);
|
|
1594
|
+
const devOnlyProviders = inspections
|
|
1595
|
+
.filter((inspection) => inspection.devEnabled && !inspection.releaseEnabled)
|
|
1596
|
+
.map((inspection) => inspection.provider);
|
|
1597
|
+
if (devOnlyProviders.length > 0) {
|
|
1598
|
+
warnings.push(`OAuth provider(s) enabled in Development but not Release: ${devOnlyProviders.join(', ')}. ` +
|
|
1599
|
+
'Deploy reads .env.release and Cloudflare Secrets only, so these providers will stay disabled in production.');
|
|
1600
|
+
}
|
|
1601
|
+
const releaseProvidersMissingSecrets = inspections
|
|
1602
|
+
.filter((inspection) => inspection.releaseEnabled && inspection.missingReleaseFields.length > 0)
|
|
1603
|
+
.map((inspection) => `${inspection.provider} (${formatAuthFieldList(inspection.missingReleaseFields)})`);
|
|
1604
|
+
if (releaseProvidersMissingSecrets.length > 0) {
|
|
1605
|
+
warnings.push(`Release OAuth provider(s) are enabled but missing required secrets in .env.release: ${releaseProvidersMissingSecrets.join('; ')}.`);
|
|
1606
|
+
}
|
|
1607
|
+
return warnings;
|
|
1608
|
+
}
|
|
1609
|
+
function copyDevelopmentAuthProviderToRelease(projectDir, inspection) {
|
|
1610
|
+
const envReleasePath = join(projectDir, '.env.release');
|
|
1611
|
+
const releaseFileVars = existsSync(envReleasePath) ? parseEnvFile(envReleasePath) : {};
|
|
1612
|
+
const releaseAllowlist = parseCsvEnv(releaseFileVars.EDGEBASE_AUTH_ALLOWED_OAUTH_PROVIDERS);
|
|
1613
|
+
let enabledInRelease = false;
|
|
1614
|
+
if (!releaseAllowlist.includes(inspection.provider)) {
|
|
1615
|
+
releaseAllowlist.push(inspection.provider);
|
|
1616
|
+
upsertEnvValue(envReleasePath, 'EDGEBASE_AUTH_ALLOWED_OAUTH_PROVIDERS', releaseAllowlist.join(','), RELEASE_ENV_HEADER);
|
|
1617
|
+
enabledInRelease = true;
|
|
1618
|
+
}
|
|
1619
|
+
const envKeys = getOAuthEnvKeys(inspection.provider);
|
|
1620
|
+
const copiedFields = [];
|
|
1621
|
+
for (const field of [...inspection.requiredFields, ...getOptionalAuthFields(inspection.provider)]) {
|
|
1622
|
+
const envKey = field === 'clientId'
|
|
1623
|
+
? envKeys.clientId
|
|
1624
|
+
: field === 'clientSecret'
|
|
1625
|
+
? envKeys.clientSecret
|
|
1626
|
+
: field === 'issuer'
|
|
1627
|
+
? envKeys.issuer
|
|
1628
|
+
: envKeys.scopes;
|
|
1629
|
+
if (!envKey)
|
|
1630
|
+
continue;
|
|
1631
|
+
const developmentValue = inspection.developmentValues[field];
|
|
1632
|
+
if (!developmentValue || inspection.releaseValues[field])
|
|
1633
|
+
continue;
|
|
1634
|
+
upsertEnvValue(envReleasePath, envKey, developmentValue, RELEASE_ENV_HEADER);
|
|
1635
|
+
copiedFields.push(field);
|
|
1636
|
+
}
|
|
1637
|
+
return { enabledInRelease, copiedFields };
|
|
1638
|
+
}
|
|
1639
|
+
async function promptToSyncAuthReleaseEnv(projectDir) {
|
|
1640
|
+
const inspections = inspectAuthEnv(projectDir);
|
|
1641
|
+
if (inspections.length === 0)
|
|
1642
|
+
return;
|
|
1643
|
+
console.log();
|
|
1644
|
+
console.log(chalk.yellow('⚠ Auth release environment differences detected:'));
|
|
1645
|
+
for (const inspection of inspections) {
|
|
1646
|
+
const guidance = inspection.canCopyToRelease
|
|
1647
|
+
? inspection.releaseEnabled
|
|
1648
|
+
? 'The CLI can fill the missing Release values from Development.'
|
|
1649
|
+
: 'The CLI can enable this provider in Release and fill any missing values from Development.'
|
|
1650
|
+
: inspection.missingDevelopmentFields.length > 0
|
|
1651
|
+
? `Development is missing ${formatAuthFieldList(inspection.missingDevelopmentFields)}, so the CLI cannot auto-copy it yet.`
|
|
1652
|
+
: 'The CLI cannot auto-copy this provider yet.';
|
|
1653
|
+
console.log(chalk.yellow(` • ${inspection.provider}: ${inspection.summary}. ${guidance}`));
|
|
1654
|
+
}
|
|
1655
|
+
const actionableInspections = inspections.filter((inspection) => inspection.canCopyToRelease);
|
|
1656
|
+
if (actionableInspections.length === 0) {
|
|
1657
|
+
console.log();
|
|
1658
|
+
return;
|
|
1659
|
+
}
|
|
1660
|
+
console.log();
|
|
1661
|
+
const shouldReview = await promptConfirm('Review these providers one by one and optionally copy Development values into Release now?', false);
|
|
1662
|
+
if (!shouldReview) {
|
|
1663
|
+
console.log();
|
|
1664
|
+
return;
|
|
1665
|
+
}
|
|
1666
|
+
console.log();
|
|
1667
|
+
for (const inspection of actionableInspections) {
|
|
1668
|
+
const question = inspection.releaseEnabled
|
|
1669
|
+
? `${inspection.provider}: copy the missing ${formatAuthFieldList(inspection.missingReleaseFields)} from Development into Release?`
|
|
1670
|
+
: `${inspection.provider}: enable this provider in Release and copy any missing values from Development?`;
|
|
1671
|
+
const shouldCopy = await promptConfirm(question, false);
|
|
1672
|
+
if (!shouldCopy)
|
|
1673
|
+
continue;
|
|
1674
|
+
const result = copyDevelopmentAuthProviderToRelease(projectDir, inspection);
|
|
1675
|
+
const changes = [];
|
|
1676
|
+
if (result.enabledInRelease)
|
|
1677
|
+
changes.push('enabled in Release');
|
|
1678
|
+
if (result.copiedFields.length > 0) {
|
|
1679
|
+
changes.push(`copied ${formatAuthFieldList(result.copiedFields)} to .env.release`);
|
|
1680
|
+
}
|
|
1681
|
+
if (changes.length === 0) {
|
|
1682
|
+
changes.push('Release already had the needed values, so no file changes were required');
|
|
1683
|
+
}
|
|
1684
|
+
console.log(chalk.green('✓'), `${inspection.provider}: ${changes.join('; ')}.`);
|
|
1685
|
+
}
|
|
1686
|
+
console.log();
|
|
1687
|
+
}
|
|
1688
|
+
function ensureManagedWorkerSecrets(projectDir, accountId, options) {
|
|
1689
|
+
try {
|
|
1690
|
+
const secretNames = listWranglerSecretNames(projectDir);
|
|
1691
|
+
const edgebaseDir = join(projectDir, '.edgebase');
|
|
1692
|
+
const secretsJsonPath = join(edgebaseDir, 'secrets.json');
|
|
1693
|
+
if (!existsSync(edgebaseDir))
|
|
1694
|
+
mkdirSync(edgebaseDir, { recursive: true });
|
|
1695
|
+
let existingSecrets = {};
|
|
1696
|
+
if (existsSync(secretsJsonPath)) {
|
|
1697
|
+
try {
|
|
1698
|
+
existingSecrets = JSON.parse(readFileSync(secretsJsonPath, 'utf-8'));
|
|
1699
|
+
}
|
|
1700
|
+
catch {
|
|
1701
|
+
/* ignore invalid JSON */
|
|
1702
|
+
}
|
|
1703
|
+
}
|
|
1704
|
+
const generatedAt = new Date().toISOString();
|
|
1705
|
+
const generatedSecrets = [];
|
|
1706
|
+
if (!secretNames.has('SERVICE_KEY')) {
|
|
1707
|
+
generatedSecrets.push({
|
|
1708
|
+
name: 'SERVICE_KEY',
|
|
1709
|
+
value: randomBytes(32).toString('hex'),
|
|
1710
|
+
spinnerLabel: 'Generating Service Key...',
|
|
1711
|
+
});
|
|
1712
|
+
}
|
|
1713
|
+
if (!secretNames.has('JWT_USER_SECRET')) {
|
|
1714
|
+
generatedSecrets.push({
|
|
1715
|
+
name: 'JWT_USER_SECRET',
|
|
1716
|
+
value: randomBytes(32).toString('hex'),
|
|
1717
|
+
spinnerLabel: 'Generating JWT user secret...',
|
|
1718
|
+
});
|
|
1719
|
+
}
|
|
1720
|
+
if (!secretNames.has('JWT_ADMIN_SECRET')) {
|
|
1721
|
+
generatedSecrets.push({
|
|
1722
|
+
name: 'JWT_ADMIN_SECRET',
|
|
1723
|
+
value: randomBytes(32).toString('hex'),
|
|
1724
|
+
spinnerLabel: 'Generating JWT admin secret...',
|
|
1725
|
+
});
|
|
1726
|
+
}
|
|
1727
|
+
// Store CF credentials for self-destruct capability (dashboard "Delete App")
|
|
1728
|
+
try {
|
|
1729
|
+
const { token: apiToken } = resolveApiToken();
|
|
1730
|
+
if (!secretNames.has('CF_API_TOKEN')) {
|
|
1731
|
+
generatedSecrets.push({
|
|
1732
|
+
name: 'CF_API_TOKEN',
|
|
1733
|
+
value: apiToken,
|
|
1734
|
+
spinnerLabel: 'Storing CF API token for self-management...',
|
|
1735
|
+
});
|
|
1736
|
+
}
|
|
1737
|
+
if (!secretNames.has('CF_ACCOUNT_ID')) {
|
|
1738
|
+
generatedSecrets.push({
|
|
1739
|
+
name: 'CF_ACCOUNT_ID',
|
|
1740
|
+
value: accountId,
|
|
1741
|
+
spinnerLabel: 'Storing CF account ID...',
|
|
1742
|
+
});
|
|
1743
|
+
}
|
|
1744
|
+
}
|
|
1745
|
+
catch {
|
|
1746
|
+
// Non-fatal: self-destruct won't be available from dashboard
|
|
1747
|
+
if (!isQuiet()) {
|
|
1748
|
+
console.log(chalk.dim(' ⚠ Could not resolve CF API token — dashboard "Delete App" will be unavailable'));
|
|
1749
|
+
}
|
|
1750
|
+
}
|
|
1751
|
+
for (const secret of generatedSecrets) {
|
|
1752
|
+
const spinner = spin(secret.spinnerLabel);
|
|
1753
|
+
execFileSync(wranglerCommand(), wranglerArgs(['wrangler', 'secret', 'put', secret.name]), {
|
|
1754
|
+
cwd: projectDir,
|
|
1755
|
+
input: secret.value,
|
|
1756
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
1757
|
+
});
|
|
1758
|
+
spinner.succeed(`${secret.name} stored`);
|
|
1759
|
+
existingSecrets[secret.name] = secret.value;
|
|
1760
|
+
if (secret.name === 'SERVICE_KEY') {
|
|
1761
|
+
existingSecrets['SERVICE_KEY_CREATED_AT'] = generatedAt;
|
|
1762
|
+
existingSecrets['SERVICE_KEY_UPDATED_AT'] = generatedAt;
|
|
1763
|
+
console.log(chalk.dim(' Key: sk_' + '*'.repeat(12) + secret.value.slice(-4)));
|
|
1764
|
+
}
|
|
1765
|
+
}
|
|
1766
|
+
if (generatedSecrets.length > 0) {
|
|
1767
|
+
console.log();
|
|
1768
|
+
writeFileSync(secretsJsonPath, JSON.stringify(existingSecrets, null, 2), 'utf-8');
|
|
1769
|
+
chmodSync(secretsJsonPath, 0o600);
|
|
1770
|
+
console.log(chalk.dim(' Saved to .edgebase/secrets.json (backup-ready)'));
|
|
1771
|
+
}
|
|
1772
|
+
}
|
|
1773
|
+
catch (err) {
|
|
1774
|
+
if (options?.failOnError)
|
|
1775
|
+
throw err;
|
|
1776
|
+
}
|
|
1777
|
+
}
|
|
1778
|
+
/**
|
|
1779
|
+
* Store the deploy manifest in KV so the Worker can read it at runtime
|
|
1780
|
+
* for self-destruct ("Delete App" from dashboard).
|
|
1781
|
+
*/
|
|
1782
|
+
function storeManifestInKv(projectDir, resources, accountId, worker) {
|
|
1783
|
+
// Find the internal KV namespace ID from manifest resources
|
|
1784
|
+
const kvResource = resources.find((r) => r.type === 'kv_namespace' && (r.binding === 'KV' || r.name === 'internal'));
|
|
1785
|
+
if (!kvResource?.id) {
|
|
1786
|
+
if (!isQuiet()) {
|
|
1787
|
+
console.log(chalk.dim(' ⚠ KV namespace ID not found — skipping manifest KV store'));
|
|
1788
|
+
}
|
|
1789
|
+
return;
|
|
1790
|
+
}
|
|
1791
|
+
const manifest = {
|
|
1792
|
+
version: 2,
|
|
1793
|
+
deployedAt: new Date().toISOString(),
|
|
1794
|
+
accountId,
|
|
1795
|
+
worker: {
|
|
1796
|
+
name: worker.workerName,
|
|
1797
|
+
url: worker.workerUrl,
|
|
1798
|
+
},
|
|
1799
|
+
resources,
|
|
1800
|
+
};
|
|
1801
|
+
try {
|
|
1802
|
+
execFileSync(wranglerCommand(), wranglerArgs([
|
|
1803
|
+
'wrangler', 'kv', 'key', 'put',
|
|
1804
|
+
'--namespace-id', kvResource.id,
|
|
1805
|
+
'--remote',
|
|
1806
|
+
'__edgebase_deploy_manifest',
|
|
1807
|
+
JSON.stringify(manifest),
|
|
1808
|
+
]), {
|
|
1809
|
+
cwd: projectDir,
|
|
1810
|
+
encoding: 'utf-8',
|
|
1811
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
1812
|
+
});
|
|
1813
|
+
if (!isQuiet()) {
|
|
1814
|
+
console.log(chalk.dim(' Deploy manifest stored in KV for runtime access'));
|
|
1815
|
+
}
|
|
1816
|
+
}
|
|
1817
|
+
catch {
|
|
1818
|
+
if (!isQuiet()) {
|
|
1819
|
+
console.log(chalk.dim(' ⚠ Could not store deploy manifest in KV — dashboard "Delete App" may not work'));
|
|
1820
|
+
}
|
|
1821
|
+
}
|
|
1822
|
+
}
|
|
1823
|
+
//# sourceMappingURL=deploy.js.map
|