void 0.7.0 → 0.7.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents-DqkFfc2c.mjs +151 -0
- package/dist/{auth-cmd-Dx8oPKZC.mjs → auth-cmd-Dk0acCT5.mjs} +2 -2
- package/dist/{better-auth-shared-C9_GHSkR.d.mts → better-auth-shared-CZsIpjey.d.mts} +1 -1
- package/dist/{cache-W82I8ihI.mjs → cache-DGSZ5Bh6.mjs} +2 -2
- package/dist/{cancel-deploy-BOBTqqh0.mjs → cancel-deploy-CrY3kt93.mjs} +1 -1
- package/dist/cli/cli.mjs +37 -177
- package/dist/{client-snXOjrp1.mjs → client-DCqnMpDt.mjs} +161 -18
- package/dist/{create-project-BIA15W7z.mjs → create-project-Bg88Kq_I.mjs} +3 -3
- package/dist/{db-DsRoMcfN.mjs → db-ClNu7vYQ.mjs} +13 -13
- package/dist/{delete-DAP6yDc7.mjs → delete-DXcX1yQZ.mjs} +2 -2
- package/dist/{deploy-BPKblFx6.mjs → deploy-BkjqNk9U.mjs} +888 -185
- package/dist/{domain-BGofcQ6I.mjs → domain-CDQhvYNZ.mjs} +1 -1
- package/dist/{env-CyG3tvU0.mjs → env-CnrQY2b6.mjs} +1 -1
- package/dist/{env-helpers-Dr9Y7RnE.d.mts → env-helpers-CbeM_7-k.d.mts} +1 -1
- package/dist/{gen-U0Ktr4Zd.mjs → gen-C0EY2k27.mjs} +1 -1
- package/dist/{handler-B0ds0OHJ.d.mts → handler-dKQWyF-G.d.mts} +3 -3
- package/dist/index.d.mts +3 -3
- package/dist/index.mjs +13 -12
- package/dist/{init-C7wS5iGP.mjs → init-CPny6w9D.mjs} +63 -28
- package/dist/{link-p2R6NbgN.mjs → link-eZ0aiHFK.mjs} +2 -2
- package/dist/{list-Bfel-QLc.mjs → list-ztyEz4TW.mjs} +2 -2
- package/dist/{login-CkcXUiIu.mjs → login-B5HHT32i.mjs} +1 -1
- package/dist/{logs-DmkrRvx6.mjs → logs-J4BN0LXd.mjs} +1 -1
- package/dist/{mcp-CaQzfeUi.mjs → mcp-Bdu9bnjR.mjs} +1 -1
- package/dist/{node-DDfXj10V.mjs → node-DFqMcZR1.mjs} +3 -3
- package/dist/pages/client.d.mts +1 -1
- package/dist/pages/client.mjs +3 -0
- package/dist/pages/head-client.d.mts +1 -1
- package/dist/pages/head.d.mts +1 -1
- package/dist/pages/index.d.mts +2 -2
- package/dist/pages/index.mjs +1 -1
- package/dist/pages/islands-plugin.d.mts +1 -1
- package/dist/pages/protocol.d.mts +2 -2
- package/dist/pages/protocol.mjs +23 -18
- package/dist/{prepare-BAtWufvm.mjs → prepare-DKkx-2Kt.mjs} +1 -1
- package/dist/{project-cmd-B7lQp3F3.mjs → project-cmd-DKiQYdSd.mjs} +8 -8
- package/dist/{protocol-BWzXs2A2.d.mts → protocol-CK4OFwfR.d.mts} +2 -2
- package/dist/{rollback-gyC59l7U.mjs → rollback-ZNvT8T54.mjs} +1 -1
- package/dist/{runner-6Ep3fNQu.mjs → runner-BUPRnMFN.mjs} +1 -1
- package/dist/runtime/ai.mjs +1 -1
- package/dist/runtime/auth.d.mts +1 -1
- package/dist/runtime/better-auth-pg.d.mts +1 -1
- package/dist/runtime/better-auth-pg.mjs +2 -2
- package/dist/runtime/better-auth.d.mts +1 -1
- package/dist/runtime/better-auth.mjs +2 -2
- package/dist/runtime/client.d.mts +2 -2
- package/dist/runtime/client.mjs +1 -1
- package/dist/runtime/env-helpers.d.mts +1 -1
- package/dist/runtime/env-public-client.d.mts +1 -1
- package/dist/runtime/env-public.d.mts +2 -2
- package/dist/runtime/env-public.mjs +1 -1
- package/dist/runtime/env.mjs +1 -1
- package/dist/runtime/fetch-stream.d.mts +1 -1
- package/dist/runtime/fetch-stream.mjs +1 -1
- package/dist/runtime/fetch.d.mts +1 -1
- package/dist/runtime/fetch.mjs +1 -1
- package/dist/runtime/handler.d.mts +1 -1
- package/dist/runtime/handler.mjs +1 -1
- package/dist/runtime/isr.mjs +1 -1
- package/dist/runtime/migration-handler.mjs +2 -2
- package/dist/runtime/validator.d.mts +1 -1
- package/dist/runtime/ws-server.d.mts +2 -2
- package/dist/runtime/ws.d.mts +3 -3
- package/dist/{secret-CeRSukgM.mjs → secret-BXHx515u.mjs} +2 -2
- package/dist/{skills-ipldjlKE.mjs → skills-CbuYOthf.mjs} +1 -1
- package/package.json +20 -20
- package/skills/void/docs/guide/deployment.md +4 -6
- package/skills/void/docs/index.md +3 -3
- package/skills/void/docs/node_modules/void/AGENTS.md +1 -1
- package/skills/void/docs/node_modules/void/node_modules/@types/node/README.md +1 -1
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/@types/node/README.md +1 -1
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/AGENTS.md +15 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/README.md +208 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/config/build.md +21 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/config/fmt.md +18 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/config/index.md +31 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/config/lint.md +24 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/config/pack.md +17 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/config/run.md +249 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/config/staged.md +15 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/config/test.md +18 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/build.md +40 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/cache.md +119 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/check.md +44 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/ci.md +64 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/commit-hooks.md +51 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/create.md +88 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/dev.md +24 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/env.md +102 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/fmt.md +41 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/ide-integration.md +101 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/implode.md +23 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/index.md +128 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/install.md +147 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/lint.md +50 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/migrate.md +173 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/pack.md +61 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/run.md +324 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/test.md +35 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/troubleshooting.md +132 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/upgrade.md +49 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/vpx.md +66 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/guide/why.md +39 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/index.md +12 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/docs/team.md +35 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/templates/generator/README.md +35 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite-plus/templates/monorepo/README.md +29 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/AGENTS.md +15 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/README.md +208 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/config/build.md +21 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/config/fmt.md +18 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/config/index.md +31 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/config/lint.md +24 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/config/pack.md +17 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/config/run.md +249 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/config/staged.md +15 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/config/test.md +18 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/build.md +40 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/cache.md +119 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/check.md +44 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/ci.md +64 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/commit-hooks.md +51 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/create.md +88 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/dev.md +24 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/env.md +102 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/fmt.md +41 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/ide-integration.md +101 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/implode.md +23 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/index.md +128 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/install.md +147 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/lint.md +50 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/migrate.md +173 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/pack.md +61 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/run.md +324 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/test.md +35 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/troubleshooting.md +132 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/upgrade.md +49 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/vpx.md +66 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/guide/why.md +39 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/index.md +12 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/docs/team.md +35 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/templates/generator/README.md +35 -0
- package/skills/void/docs/node_modules/void/node_modules/vite-plus/templates/monorepo/README.md +29 -0
- package/skills/void/docs/reference/cli.md +10 -7
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/tsdown/README.md +0 -55
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite/LICENSE.md +0 -2230
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite/README.md +0 -20
- package/skills/void/docs/node_modules/void/node_modules/tsdown/README.md +0 -55
- package/skills/void/docs/node_modules/void/node_modules/vite/LICENSE.md +0 -2230
- package/skills/void/docs/node_modules/void/node_modules/vite/README.md +0 -20
- /package/dist/{auth-BdsJ0Aff.d.mts → auth-DrfOTMmr.d.mts} +0 -0
- /package/dist/{auth-migrations-BAtAck2g.mjs → auth-migrations-BwLPwRgH.mjs} +0 -0
- /package/dist/{better-auth-shared-CdYmQGry.mjs → better-auth-shared-APuDaPqW.mjs} +0 -0
- /package/dist/{defer-DcxEsVH1.mjs → defer-2ARBu8Et.mjs} +0 -0
- /package/dist/{drizzle-NnudE_UN.mjs → drizzle-C-NRqGhx.mjs} +0 -0
- /package/dist/{env-raw-BDL4TvdN.mjs → env-raw-DtfQ9E31.mjs} +0 -0
- /package/dist/{fetch-error-BQ8sZ5Nd.mjs → fetch-error-CEr0ACTl.mjs} +0 -0
- /package/dist/{fetch-error-CVZ5CGA-.d.mts → fetch-error-DflegrF3.d.mts} +0 -0
- /package/dist/{head-P-egrtFE.d.mts → head-CZGAunBV.d.mts} +0 -0
- /package/dist/{headers-DCXc7mDs.mjs → headers-YVkHjOyq.mjs} +0 -0
- /package/dist/{preset-D4I73kT4.mjs → preset-DFvePt0l.mjs} +0 -0
- /package/dist/{project-slug-CKam8lF9.mjs → project-slug-KRvHQEQI.mjs} +0 -0
- /package/dist/{resolve-project-Br5BR03U.mjs → resolve-project-DdjLQ2tB.mjs} +0 -0
- /package/dist/{runner-pg-D0wWHYnr.mjs → runner-pg-BI6f6Ncm.mjs} +0 -0
- /package/dist/{standard-schema-9CRjx-uR.d.mts → standard-schema-BfGDWXff.d.mts} +0 -0
- /package/dist/{subcommand-prompt-BKjuNAPb.mjs → subcommand-prompt-BMS1TNG5.mjs} +0 -0
- /package/dist/{types-mHOEwpW4.d.mts → types-AdKzPp2C.d.mts} +0 -0
- /package/dist/{yarn-pnp-BFqMV_bl.mjs → yarn-pnp-6LD6_3Ej.mjs} +0 -0
|
@@ -4,7 +4,7 @@ import { n as cliTitle, r as createSpinner, s as import_picocolors } from "./out
|
|
|
4
4
|
import { t as findVoidAuthConfig } from "./config-CvHtTM0q.mjs";
|
|
5
5
|
import { c as R, g as ge, u as Se, v as ue, x as q, y as ye } from "./dist-Dayj3gCK.mjs";
|
|
6
6
|
import { a as writeProjectConfig, r as readProjectConfig } from "./project-TqORyHn8.mjs";
|
|
7
|
-
import { a as
|
|
7
|
+
import { a as isExpiredTokenError, c as getToken, i as isCliOutdatedError, l as getTokenSource, n as PlatformClient, o as parsePlatformErrorBody, t as PlatformApiError, u as isStagingMode } from "./client-DCqnMpDt.mjs";
|
|
8
8
|
import { c as getDatabaseDialect, f as readConfig, l as isNodeTarget, p as resolveBindingNames } from "./config-BIa9HwVX.mjs";
|
|
9
9
|
import { i as scanJobsSync, n as scanWebSocketRoutesSync, r as scanQueuesSync, t as scanRoutes } from "./scan-C6HMEIdW.mjs";
|
|
10
10
|
import { c as validateSsrEntry, n as detectFramework, r as inferProjectBindings, t as FRAMEWORK_SCAN_DIRS } from "./plugin-inference-oZ6Ybu2_.mjs";
|
|
@@ -16,18 +16,19 @@ import { n as writeDrizzleConfig } from "./config-BzM9Dy7T.mjs";
|
|
|
16
16
|
import { t as collectMigrations } from "./collect-CjeZgz5D.mjs";
|
|
17
17
|
import { r as validateMigrations, t as assertJournalCoherence } from "./validate-CaMavMxu.mjs";
|
|
18
18
|
import { t as scanPages } from "./scan-Ba4hFwlH.mjs";
|
|
19
|
-
import { n as promptProjectSelection, r as promptProjectSetupAction, t as promptAndCreateProject } from "./create-project-
|
|
20
|
-
import { r as resolveProjectBySlug, t as getRequestedProjectSlug } from "./resolve-project-
|
|
21
|
-
import { n as lintDuplicateSources, r as mergeRoutingRules, t as lintDestinationSplats } from "./headers-
|
|
22
|
-
import { t as promptForLoginToken } from "./login-
|
|
23
|
-
import { i as resolveProjectCommand, n as detectPreset, r as formatProjectCommand, t as FRAMEWORK_PRESETS } from "./preset-
|
|
19
|
+
import { n as promptProjectSelection, r as promptProjectSetupAction, t as promptAndCreateProject } from "./create-project-Bg88Kq_I.mjs";
|
|
20
|
+
import { r as resolveProjectBySlug, t as getRequestedProjectSlug } from "./resolve-project-DdjLQ2tB.mjs";
|
|
21
|
+
import { n as lintDuplicateSources, r as mergeRoutingRules, t as lintDestinationSplats } from "./headers-YVkHjOyq.mjs";
|
|
22
|
+
import { t as promptForLoginToken } from "./login-B5HHT32i.mjs";
|
|
23
|
+
import { i as resolveProjectCommand, n as detectPreset, r as formatProjectCommand, t as FRAMEWORK_PRESETS } from "./preset-DFvePt0l.mjs";
|
|
24
24
|
import { createRequire } from "node:module";
|
|
25
|
-
import { copyFileSync, cpSync, existsSync, mkdirSync, readFileSync, readdirSync, renameSync, rmSync, symlinkSync, unlinkSync, writeFileSync } from "node:fs";
|
|
25
|
+
import { copyFileSync, cpSync, createWriteStream, existsSync, mkdirSync, readFileSync, readdirSync, renameSync, rmSync, symlinkSync, unlinkSync, writeFileSync } from "node:fs";
|
|
26
26
|
import { fileURLToPath, pathToFileURL } from "node:url";
|
|
27
27
|
import { build, loadEnv } from "vite";
|
|
28
|
-
import { tmpdir } from "node:os";
|
|
28
|
+
import { homedir, tmpdir } from "node:os";
|
|
29
29
|
import { parse } from "jsonc-parser";
|
|
30
30
|
import { execFile, execFileSync, execSync } from "node:child_process";
|
|
31
|
+
import { createHash } from "node:crypto";
|
|
31
32
|
import { hash } from "blake3-jit";
|
|
32
33
|
import ignore from "ignore";
|
|
33
34
|
const SANDBOX_MIGRATION_TAG = "void-sandbox-v1";
|
|
@@ -808,11 +809,17 @@ function trimSnippet(body) {
|
|
|
808
809
|
function appendDeploymentLine(message, deploymentId) {
|
|
809
810
|
return deploymentId ? `${message}\nDeployment: ${deploymentId}` : message;
|
|
810
811
|
}
|
|
812
|
+
function appendLogLine(message, logPath) {
|
|
813
|
+
return logPath ? `${message}\nDetailed log: ${logPath}` : message;
|
|
814
|
+
}
|
|
815
|
+
function appendTrailingLines(message, deploymentId, logPath) {
|
|
816
|
+
return appendLogLine(appendDeploymentLine(message, deploymentId), logPath);
|
|
817
|
+
}
|
|
811
818
|
/**
|
|
812
819
|
* Format a server-emitted `event: 'error'` payload with an optional deployment id.
|
|
813
820
|
*/
|
|
814
|
-
function formatDeployFailureMessage(message, deploymentId) {
|
|
815
|
-
return
|
|
821
|
+
function formatDeployFailureMessage(message, deploymentId, logPath) {
|
|
822
|
+
return appendTrailingLines(message, deploymentId, logPath);
|
|
816
823
|
}
|
|
817
824
|
/**
|
|
818
825
|
* Format the "stream ended cleanly but no terminal event arrived" case.
|
|
@@ -821,8 +828,8 @@ function formatDeployFailureMessage(message, deploymentId) {
|
|
|
821
828
|
* deploy" message — the server closed the NDJSON stream without emitting
|
|
822
829
|
* `done` or `error`, so there is no underlying error to carry.
|
|
823
830
|
*/
|
|
824
|
-
function formatConnectionLostMessage(deploymentId) {
|
|
825
|
-
return
|
|
831
|
+
function formatConnectionLostMessage(deploymentId, logPath) {
|
|
832
|
+
return appendTrailingLines("deploy: Connection lost during deploy.", deploymentId, logPath);
|
|
826
833
|
}
|
|
827
834
|
/**
|
|
828
835
|
* Convert an exception thrown during the deploy NDJSON stream into a
|
|
@@ -837,18 +844,408 @@ function formatConnectionLostMessage(deploymentId) {
|
|
|
837
844
|
* during deploy: <original>` prefix so the caller can still tell network,
|
|
838
845
|
* parse, and transport errors apart.
|
|
839
846
|
*/
|
|
840
|
-
function formatStreamDeployError(err, deploymentId) {
|
|
847
|
+
function formatStreamDeployError(err, deploymentId, logPath) {
|
|
841
848
|
if (err instanceof PlatformApiError) {
|
|
842
849
|
const parsed = parsePlatformErrorBody(err.body);
|
|
843
850
|
const parsedError = parsed?.error ?? parsed?.message ?? null;
|
|
844
851
|
const effectiveDeploymentId = parsed?.deployment ?? null ?? deploymentId;
|
|
845
|
-
if (err
|
|
852
|
+
if (isCliOutdatedError(err) && parsed?.message) return new Error(appendTrailingLines(parsed.message, effectiveDeploymentId, logPath));
|
|
853
|
+
if (err.status === 409 && parsedError) return new Error(appendTrailingLines(`deploy: Deploy failed: ${parsedError}`, effectiveDeploymentId, logPath));
|
|
846
854
|
const detail = parsedError ?? trimSnippet(err.body);
|
|
847
855
|
const base = detail ? `deploy: Deploy failed (HTTP ${err.status}): ${detail}` : `deploy: Deploy failed (HTTP ${err.status}).`;
|
|
848
|
-
return new Error(
|
|
856
|
+
return new Error(appendTrailingLines(base, effectiveDeploymentId, logPath));
|
|
849
857
|
}
|
|
850
858
|
const original = err instanceof Error ? err.message : String(err);
|
|
851
|
-
return new Error(
|
|
859
|
+
return new Error(appendTrailingLines(`deploy: Connection lost during deploy: '${original}'.`, deploymentId, logPath));
|
|
860
|
+
}
|
|
861
|
+
/**
|
|
862
|
+
* Maximum retry attempts for a single R2 PUT before giving up. Matched to
|
|
863
|
+
* the design doc 0068 spec: 5xx triggers retry, 4xx surfaces immediately.
|
|
864
|
+
*/
|
|
865
|
+
const MAX_R2_PUT_RETRIES = 3;
|
|
866
|
+
/**
|
|
867
|
+
* Retry backoff delays in milliseconds. After attempt `i` fails (5xx) we
|
|
868
|
+
* sleep `RETRY_BACKOFF_MS[i]` before retrying. Indexed from 0; once the
|
|
869
|
+
* array is exhausted we surface the error.
|
|
870
|
+
*/
|
|
871
|
+
const RETRY_BACKOFF_MS = [
|
|
872
|
+
250,
|
|
873
|
+
500,
|
|
874
|
+
1e3
|
|
875
|
+
];
|
|
876
|
+
var DirectR2PutError = class extends Error {
|
|
877
|
+
status;
|
|
878
|
+
hash;
|
|
879
|
+
path;
|
|
880
|
+
/**
|
|
881
|
+
* `body` is the response body text (may be empty / non-text). Useful in
|
|
882
|
+
* the surface error so users can correlate against R2-side errors like
|
|
883
|
+
* `BadDigest` (Content-MD5 mismatch) or signature errors.
|
|
884
|
+
*/
|
|
885
|
+
body;
|
|
886
|
+
constructor(status, hash, path, body) {
|
|
887
|
+
super(`R2 PUT failed for ${path} (${hash}): ${status}${body ? ` ${body}` : ""}`);
|
|
888
|
+
this.name = "DirectR2PutError";
|
|
889
|
+
this.status = status;
|
|
890
|
+
this.hash = hash;
|
|
891
|
+
this.path = path;
|
|
892
|
+
this.body = body;
|
|
893
|
+
}
|
|
894
|
+
};
|
|
895
|
+
/**
|
|
896
|
+
* Issue direct-to-R2 PUTs for every item in `items`, bounded to
|
|
897
|
+
* `R2_PUT_CONCURRENCY`. Surfaces the first failure (4xx immediately, 5xx
|
|
898
|
+
* after retry exhaustion) as a `DirectR2PutError` carrying the asset path
|
|
899
|
+
* and hash so the user can correlate against the JSONL log.
|
|
900
|
+
*
|
|
901
|
+
* Logs `r2_put_start` / `r2_put_end` per asset, plus `r2_put_retry` on
|
|
902
|
+
* each retry attempt. Per-asset durations help identify slow uploads.
|
|
903
|
+
*
|
|
904
|
+
* Memory: each successful PUT clears `item.body` and invokes `onAfterPut`
|
|
905
|
+
* so the caller can drop its own ref (e.g. delete from the in-memory
|
|
906
|
+
* `assetFiles` cache). For huge deploys this keeps peak memory bounded by
|
|
907
|
+
* the upload concurrency rather than the total bundle size.
|
|
908
|
+
*/
|
|
909
|
+
async function uploadAssetsToR2(items, cliLog, fetchImpl = fetch, onAfterPut) {
|
|
910
|
+
if (items.length === 0) return;
|
|
911
|
+
let firstError = null;
|
|
912
|
+
let nextIndex = 0;
|
|
913
|
+
const workerCount = Math.min(10, items.length);
|
|
914
|
+
const workers = [];
|
|
915
|
+
for (let w = 0; w < workerCount; w++) workers.push((async () => {
|
|
916
|
+
while (firstError === null) {
|
|
917
|
+
const i = nextIndex++;
|
|
918
|
+
if (i >= items.length) return;
|
|
919
|
+
try {
|
|
920
|
+
await uploadOne(items[i], cliLog, fetchImpl);
|
|
921
|
+
} catch (err) {
|
|
922
|
+
if (firstError === null) firstError = err;
|
|
923
|
+
return;
|
|
924
|
+
}
|
|
925
|
+
items[i].body = void 0;
|
|
926
|
+
onAfterPut?.(items[i]);
|
|
927
|
+
}
|
|
928
|
+
})());
|
|
929
|
+
await Promise.all(workers);
|
|
930
|
+
if (firstError !== null) throw firstError;
|
|
931
|
+
}
|
|
932
|
+
async function uploadOne(item, cliLog, fetchImpl) {
|
|
933
|
+
if (!item.body) throw new Error(`internal: uploadOne invoked for ${item.path} with no body`);
|
|
934
|
+
const body = item.body;
|
|
935
|
+
const startedAt = Date.now();
|
|
936
|
+
cliLog?.info("r2_put_start", {
|
|
937
|
+
hash: item.hash,
|
|
938
|
+
path: item.path,
|
|
939
|
+
sizeBytes: body.length
|
|
940
|
+
});
|
|
941
|
+
let attempt = 0;
|
|
942
|
+
while (true) {
|
|
943
|
+
let res;
|
|
944
|
+
try {
|
|
945
|
+
res = await fetchImpl(item.url, {
|
|
946
|
+
method: "PUT",
|
|
947
|
+
body: new Uint8Array(body),
|
|
948
|
+
headers: item.headers
|
|
949
|
+
});
|
|
950
|
+
} catch (err) {
|
|
951
|
+
if (attempt < MAX_R2_PUT_RETRIES) {
|
|
952
|
+
cliLog?.warn("r2_put_retry", {
|
|
953
|
+
hash: item.hash,
|
|
954
|
+
path: item.path,
|
|
955
|
+
attempt: attempt + 1,
|
|
956
|
+
status: 0,
|
|
957
|
+
error: err instanceof Error ? err.message : String(err)
|
|
958
|
+
});
|
|
959
|
+
await sleep(RETRY_BACKOFF_MS[attempt]);
|
|
960
|
+
attempt++;
|
|
961
|
+
continue;
|
|
962
|
+
}
|
|
963
|
+
cliLog?.error("r2_put_end", err, {
|
|
964
|
+
hash: item.hash,
|
|
965
|
+
path: item.path,
|
|
966
|
+
durationMs: Date.now() - startedAt,
|
|
967
|
+
status: 0
|
|
968
|
+
});
|
|
969
|
+
throw new DirectR2PutError(0, item.hash, item.path, err instanceof Error ? err.message : String(err));
|
|
970
|
+
}
|
|
971
|
+
if (res.ok) {
|
|
972
|
+
cliLog?.info("r2_put_end", {
|
|
973
|
+
hash: item.hash,
|
|
974
|
+
path: item.path,
|
|
975
|
+
durationMs: Date.now() - startedAt,
|
|
976
|
+
status: res.status
|
|
977
|
+
});
|
|
978
|
+
return;
|
|
979
|
+
}
|
|
980
|
+
const errBody = await safeReadText(res);
|
|
981
|
+
if (res.status >= 500 && attempt < MAX_R2_PUT_RETRIES) {
|
|
982
|
+
cliLog?.warn("r2_put_retry", {
|
|
983
|
+
hash: item.hash,
|
|
984
|
+
path: item.path,
|
|
985
|
+
attempt: attempt + 1,
|
|
986
|
+
status: res.status,
|
|
987
|
+
body: truncate(errBody)
|
|
988
|
+
});
|
|
989
|
+
await sleep(RETRY_BACKOFF_MS[attempt]);
|
|
990
|
+
attempt++;
|
|
991
|
+
continue;
|
|
992
|
+
}
|
|
993
|
+
cliLog?.error("r2_put_end", void 0, {
|
|
994
|
+
hash: item.hash,
|
|
995
|
+
path: item.path,
|
|
996
|
+
durationMs: Date.now() - startedAt,
|
|
997
|
+
status: res.status,
|
|
998
|
+
body: truncate(errBody)
|
|
999
|
+
});
|
|
1000
|
+
throw new DirectR2PutError(res.status, item.hash, item.path, truncate(errBody));
|
|
1001
|
+
}
|
|
1002
|
+
}
|
|
1003
|
+
async function safeReadText(res) {
|
|
1004
|
+
try {
|
|
1005
|
+
return await res.text();
|
|
1006
|
+
} catch {
|
|
1007
|
+
return "";
|
|
1008
|
+
}
|
|
1009
|
+
}
|
|
1010
|
+
function truncate(s) {
|
|
1011
|
+
return s.length > 500 ? `${s.slice(0, 500)}...` : s;
|
|
1012
|
+
}
|
|
1013
|
+
function sleep(ms) {
|
|
1014
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
1015
|
+
}
|
|
1016
|
+
//#endregion
|
|
1017
|
+
//#region src/cli/git-metadata.ts
|
|
1018
|
+
function git(root, args) {
|
|
1019
|
+
try {
|
|
1020
|
+
return execFileSync("git", args, {
|
|
1021
|
+
cwd: root,
|
|
1022
|
+
encoding: "utf-8",
|
|
1023
|
+
stdio: [
|
|
1024
|
+
"ignore",
|
|
1025
|
+
"pipe",
|
|
1026
|
+
"ignore"
|
|
1027
|
+
]
|
|
1028
|
+
}).trim();
|
|
1029
|
+
} catch {
|
|
1030
|
+
return null;
|
|
1031
|
+
}
|
|
1032
|
+
}
|
|
1033
|
+
function stripGitSuffix(pathname) {
|
|
1034
|
+
return pathname.endsWith(".git") ? pathname.slice(0, -4) : pathname;
|
|
1035
|
+
}
|
|
1036
|
+
function normalizeGitRemoteUrl(remote) {
|
|
1037
|
+
if (!remote) return null;
|
|
1038
|
+
const trimmed = remote.trim();
|
|
1039
|
+
if (!trimmed) return null;
|
|
1040
|
+
const scpLike = /^(?:[^@]+@)?([^:]+):(.+)$/.exec(trimmed);
|
|
1041
|
+
if (scpLike && !trimmed.includes("://")) {
|
|
1042
|
+
const host = scpLike[1]?.toLowerCase();
|
|
1043
|
+
const path = stripGitSuffix(scpLike[2] ?? "").replace(/^\/+/, "");
|
|
1044
|
+
return host && path ? `https://${host}/${path}` : null;
|
|
1045
|
+
}
|
|
1046
|
+
try {
|
|
1047
|
+
const url = new URL(trimmed);
|
|
1048
|
+
if (url.protocol !== "https:" && url.protocol !== "http:" && url.protocol !== "ssh:") return null;
|
|
1049
|
+
const host = url.hostname.toLowerCase();
|
|
1050
|
+
const path = stripGitSuffix(url.pathname).replace(/^\/+/, "");
|
|
1051
|
+
return host && path ? `https://${host}/${path}` : null;
|
|
1052
|
+
} catch {
|
|
1053
|
+
return null;
|
|
1054
|
+
}
|
|
1055
|
+
}
|
|
1056
|
+
function buildCommitUrl(repositoryUrl, commit) {
|
|
1057
|
+
if (!repositoryUrl || !commit) return null;
|
|
1058
|
+
try {
|
|
1059
|
+
const url = new URL(repositoryUrl);
|
|
1060
|
+
if (url.hostname === "github.com") return `${repositoryUrl}/commit/${commit}`;
|
|
1061
|
+
if (url.hostname === "gitlab.com") return `${repositoryUrl}/-/commit/${commit}`;
|
|
1062
|
+
if (url.hostname === "bitbucket.org") return `${repositoryUrl}/commits/${commit}`;
|
|
1063
|
+
} catch {
|
|
1064
|
+
return null;
|
|
1065
|
+
}
|
|
1066
|
+
return null;
|
|
1067
|
+
}
|
|
1068
|
+
function getGitRefFromEnv(env) {
|
|
1069
|
+
if (env.GITHUB_HEAD_REF) return env.GITHUB_HEAD_REF;
|
|
1070
|
+
if (env.GITHUB_REF_NAME) return env.GITHUB_REF_NAME;
|
|
1071
|
+
if (env.GITHUB_REF) return env.GITHUB_REF.replace(/^refs\/(?:heads|tags|pull)\//, "");
|
|
1072
|
+
return null;
|
|
1073
|
+
}
|
|
1074
|
+
function getGitHubRepositoryUrl(env) {
|
|
1075
|
+
if (!env.GITHUB_REPOSITORY) return null;
|
|
1076
|
+
return normalizeGitRemoteUrl(`${env.GITHUB_SERVER_URL ?? "https://github.com"}/${env.GITHUB_REPOSITORY}`);
|
|
1077
|
+
}
|
|
1078
|
+
function collectGitDeployMetadata(root, commit, env = process.env) {
|
|
1079
|
+
const repositoryUrl = getGitHubRepositoryUrl(env) ?? normalizeGitRemoteUrl(git(root, [
|
|
1080
|
+
"remote",
|
|
1081
|
+
"get-url",
|
|
1082
|
+
"origin"
|
|
1083
|
+
]));
|
|
1084
|
+
const gitRef = getGitRefFromEnv(env) ?? git(root, [
|
|
1085
|
+
"rev-parse",
|
|
1086
|
+
"--abbrev-ref",
|
|
1087
|
+
"HEAD"
|
|
1088
|
+
]);
|
|
1089
|
+
return {
|
|
1090
|
+
commit,
|
|
1091
|
+
commitUrl: buildCommitUrl(repositoryUrl, commit),
|
|
1092
|
+
gitRef: gitRef && gitRef !== "HEAD" ? gitRef : null,
|
|
1093
|
+
repositoryUrl
|
|
1094
|
+
};
|
|
1095
|
+
}
|
|
1096
|
+
//#endregion
|
|
1097
|
+
//#region src/cli/log-file.ts
|
|
1098
|
+
/**
|
|
1099
|
+
* Walk `err.cause` up to 6 levels and return a plain JSON-friendly chain.
|
|
1100
|
+
* Skips stack traces deliberately — keeps output line-oriented and readable.
|
|
1101
|
+
*/
|
|
1102
|
+
function describeErrorChain(err, depth = 0) {
|
|
1103
|
+
if (depth > 5 || err == null) return String(err);
|
|
1104
|
+
if (err instanceof Error) {
|
|
1105
|
+
const node = {
|
|
1106
|
+
name: err.name,
|
|
1107
|
+
message: err.message
|
|
1108
|
+
};
|
|
1109
|
+
const code = err.code;
|
|
1110
|
+
if (code !== void 0) node.code = code;
|
|
1111
|
+
const errno = err.errno;
|
|
1112
|
+
if (errno !== void 0) node.errno = errno;
|
|
1113
|
+
const cause = err.cause;
|
|
1114
|
+
if (cause != null) {
|
|
1115
|
+
const sub = describeErrorChain(cause, depth + 1);
|
|
1116
|
+
if (typeof sub === "object") node.cause = sub;
|
|
1117
|
+
else node.cause = { message: sub };
|
|
1118
|
+
}
|
|
1119
|
+
return node;
|
|
1120
|
+
}
|
|
1121
|
+
return String(err);
|
|
1122
|
+
}
|
|
1123
|
+
/**
|
|
1124
|
+
* Flatten an error and its `cause` chain to a single inline string suitable
|
|
1125
|
+
* for stderr skimming. Mirrors `describeErrorChain`'s walk (depth cap 6,
|
|
1126
|
+
* same fields, no stack traces) but joins levels with ` -> ` instead of
|
|
1127
|
+
* nesting JSON.
|
|
1128
|
+
*/
|
|
1129
|
+
function formatErrorChainInline(err, depth = 0) {
|
|
1130
|
+
if (depth > 5 || err == null) return String(err);
|
|
1131
|
+
if (err instanceof Error) {
|
|
1132
|
+
let head = err.name ? `${err.name}: ${err.message}` : err.message;
|
|
1133
|
+
const code = err.code;
|
|
1134
|
+
const errno = err.errno;
|
|
1135
|
+
const tags = [];
|
|
1136
|
+
if (code !== void 0) tags.push(`code=${code}`);
|
|
1137
|
+
if (errno !== void 0) tags.push(`errno=${errno}`);
|
|
1138
|
+
if (tags.length > 0) head = `${head} (${tags.join(" ")})`;
|
|
1139
|
+
const cause = err.cause;
|
|
1140
|
+
if (cause != null) return `${head} -> ${formatErrorChainInline(cause, depth + 1)}`;
|
|
1141
|
+
return head;
|
|
1142
|
+
}
|
|
1143
|
+
return String(err);
|
|
1144
|
+
}
|
|
1145
|
+
function formatStderrLine(level, msg, err, fields) {
|
|
1146
|
+
const parts = [level, msg];
|
|
1147
|
+
if (fields) for (const [k, v] of Object.entries(fields)) {
|
|
1148
|
+
if (v === void 0) continue;
|
|
1149
|
+
let s;
|
|
1150
|
+
if (v == null || typeof v === "number" || typeof v === "boolean") s = String(v);
|
|
1151
|
+
else if (typeof v === "string") s = v.length > 200 ? `${v.slice(0, 200)}...` : v;
|
|
1152
|
+
else try {
|
|
1153
|
+
s = JSON.stringify(v);
|
|
1154
|
+
} catch {
|
|
1155
|
+
s = String(v);
|
|
1156
|
+
}
|
|
1157
|
+
parts.push(`${k}=${s}`);
|
|
1158
|
+
}
|
|
1159
|
+
if (err !== void 0) parts.push(`-> err=${formatErrorChainInline(err)}`);
|
|
1160
|
+
return parts.join(" ");
|
|
1161
|
+
}
|
|
1162
|
+
function openDeployLog(opts) {
|
|
1163
|
+
const mirror = opts?.mirrorToStderr === true;
|
|
1164
|
+
const startedAt = Date.now();
|
|
1165
|
+
let path = null;
|
|
1166
|
+
let stream = null;
|
|
1167
|
+
let broken = false;
|
|
1168
|
+
let closed = false;
|
|
1169
|
+
try {
|
|
1170
|
+
const dir = join(homedir(), ".void", "logs");
|
|
1171
|
+
mkdirSync(dir, { recursive: true });
|
|
1172
|
+
const filePath = join(dir, `deploy-${(/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-")}.jsonl`);
|
|
1173
|
+
stream = createWriteStream(filePath, {
|
|
1174
|
+
flags: "a",
|
|
1175
|
+
autoClose: true
|
|
1176
|
+
});
|
|
1177
|
+
stream.on("error", () => {
|
|
1178
|
+
broken = true;
|
|
1179
|
+
});
|
|
1180
|
+
path = filePath;
|
|
1181
|
+
} catch {
|
|
1182
|
+
broken = true;
|
|
1183
|
+
path = null;
|
|
1184
|
+
stream = null;
|
|
1185
|
+
}
|
|
1186
|
+
function write(level, msg, err, fields) {
|
|
1187
|
+
if (mirror) try {
|
|
1188
|
+
process.stderr.write(`${formatStderrLine(level, msg, err, fields)}\n`);
|
|
1189
|
+
} catch {}
|
|
1190
|
+
if (broken || !stream || closed) return;
|
|
1191
|
+
const now = Date.now();
|
|
1192
|
+
const record = {
|
|
1193
|
+
ts: now,
|
|
1194
|
+
elapsedMs: now - startedAt,
|
|
1195
|
+
lvl: level,
|
|
1196
|
+
msg
|
|
1197
|
+
};
|
|
1198
|
+
if (fields) {
|
|
1199
|
+
for (const [k, v] of Object.entries(fields)) if (v !== void 0 && k !== "ts" && k !== "elapsedMs" && k !== "lvl" && k !== "msg" && k !== "err") record[k] = v;
|
|
1200
|
+
}
|
|
1201
|
+
if (err !== void 0) record.err = describeErrorChain(err);
|
|
1202
|
+
let line;
|
|
1203
|
+
try {
|
|
1204
|
+
line = `${JSON.stringify(record)}\n`;
|
|
1205
|
+
} catch {
|
|
1206
|
+
line = `${JSON.stringify({
|
|
1207
|
+
ts: now,
|
|
1208
|
+
elapsedMs: now - startedAt,
|
|
1209
|
+
lvl: level,
|
|
1210
|
+
msg,
|
|
1211
|
+
err: "unserializable fields"
|
|
1212
|
+
})}\n`;
|
|
1213
|
+
}
|
|
1214
|
+
try {
|
|
1215
|
+
stream.write(line);
|
|
1216
|
+
} catch {
|
|
1217
|
+
broken = true;
|
|
1218
|
+
}
|
|
1219
|
+
}
|
|
1220
|
+
return {
|
|
1221
|
+
get path() {
|
|
1222
|
+
return path;
|
|
1223
|
+
},
|
|
1224
|
+
info(msg, fields) {
|
|
1225
|
+
write("info", msg, void 0, fields);
|
|
1226
|
+
},
|
|
1227
|
+
warn(msg, fields) {
|
|
1228
|
+
write("warn", msg, void 0, fields);
|
|
1229
|
+
},
|
|
1230
|
+
error(msg, err, fields) {
|
|
1231
|
+
write("error", msg, err, fields);
|
|
1232
|
+
},
|
|
1233
|
+
debug(msg, fields) {
|
|
1234
|
+
write("debug", msg, void 0, fields);
|
|
1235
|
+
},
|
|
1236
|
+
close() {
|
|
1237
|
+
if (closed) return Promise.resolve();
|
|
1238
|
+
closed = true;
|
|
1239
|
+
if (!stream) return Promise.resolve();
|
|
1240
|
+
return new Promise((resolveClose) => {
|
|
1241
|
+
try {
|
|
1242
|
+
stream.end(() => resolveClose());
|
|
1243
|
+
} catch {
|
|
1244
|
+
resolveClose();
|
|
1245
|
+
}
|
|
1246
|
+
});
|
|
1247
|
+
}
|
|
1248
|
+
};
|
|
852
1249
|
}
|
|
853
1250
|
//#endregion
|
|
854
1251
|
//#region src/shared/utils.ts
|
|
@@ -950,8 +1347,88 @@ function computeAssetHash(content, filePath) {
|
|
|
950
1347
|
return Buffer.from(hash(input)).toString("hex").slice(0, 32);
|
|
951
1348
|
}
|
|
952
1349
|
/**
|
|
1350
|
+
* Map of common asset file extensions → content type. Mirrors
|
|
1351
|
+
* `packages/dispatch/src/static.ts`'s MIME_TYPES table so the value the CLI
|
|
1352
|
+
* declares for the R2 PUT signature matches what dispatch will serve later
|
|
1353
|
+
* (dispatch reads `obj.httpMetadata?.contentType` first, falling back to
|
|
1354
|
+
* path-based inference). Keeping the tables aligned avoids a needless
|
|
1355
|
+
* mismatch where R2-stored content-type and serve-time content-type
|
|
1356
|
+
* disagree on the same asset.
|
|
1357
|
+
*/
|
|
1358
|
+
const ASSET_MIME_TYPES = {
|
|
1359
|
+
html: "text/html; charset=utf-8",
|
|
1360
|
+
htm: "text/html; charset=utf-8",
|
|
1361
|
+
css: "text/css; charset=utf-8",
|
|
1362
|
+
js: "text/javascript; charset=utf-8",
|
|
1363
|
+
mjs: "text/javascript; charset=utf-8",
|
|
1364
|
+
json: "application/json; charset=utf-8",
|
|
1365
|
+
xml: "application/xml; charset=utf-8",
|
|
1366
|
+
svg: "image/svg+xml; charset=utf-8",
|
|
1367
|
+
txt: "text/plain; charset=utf-8",
|
|
1368
|
+
csv: "text/csv; charset=utf-8",
|
|
1369
|
+
png: "image/png",
|
|
1370
|
+
jpg: "image/jpeg",
|
|
1371
|
+
jpeg: "image/jpeg",
|
|
1372
|
+
gif: "image/gif",
|
|
1373
|
+
ico: "image/x-icon",
|
|
1374
|
+
webp: "image/webp",
|
|
1375
|
+
avif: "image/avif",
|
|
1376
|
+
woff: "font/woff",
|
|
1377
|
+
woff2: "font/woff2",
|
|
1378
|
+
ttf: "font/ttf",
|
|
1379
|
+
otf: "font/otf",
|
|
1380
|
+
eot: "application/vnd.ms-fontobject",
|
|
1381
|
+
pdf: "application/pdf",
|
|
1382
|
+
wasm: "application/wasm",
|
|
1383
|
+
webmanifest: "application/manifest+json; charset=utf-8",
|
|
1384
|
+
map: "application/json; charset=utf-8",
|
|
1385
|
+
mp4: "video/mp4",
|
|
1386
|
+
webm: "video/webm",
|
|
1387
|
+
mp3: "audio/mpeg",
|
|
1388
|
+
ogg: "audio/ogg"
|
|
1389
|
+
};
|
|
1390
|
+
/**
|
|
1391
|
+
* Resolve the content-type to declare on a direct-to-R2 PUT for `path`.
|
|
1392
|
+
* The value is part of the signed Content-Type header on the presigned
|
|
1393
|
+
* URL — if the bytes are uploaded with a different content-type, R2
|
|
1394
|
+
* rejects the PUT (signature mismatch). Defaults to
|
|
1395
|
+
* `application/octet-stream` for unknown extensions.
|
|
1396
|
+
*/
|
|
1397
|
+
function getAssetContentType(path) {
|
|
1398
|
+
return ASSET_MIME_TYPES[path.split(".").pop()?.toLowerCase() ?? ""] ?? "application/octet-stream";
|
|
1399
|
+
}
|
|
1400
|
+
/**
|
|
1401
|
+
* Compute MD5 of raw content as a 32-char lowercase hex string.
|
|
1402
|
+
*
|
|
1403
|
+
* Used as the wire-level upload-integrity primitive on direct-to-R2 PUTs:
|
|
1404
|
+
* R2's S3 compatibility layer enforces `Content-MD5` (RFC 1864) on PutObject,
|
|
1405
|
+
* so the upload itself fails if uploaded bytes don't match the declared MD5.
|
|
1406
|
+
* MD5 collision resistance is acceptable here because (a) this hash is purely
|
|
1407
|
+
* an upload-integrity primitive (BLAKE3 remains the canonical content key),
|
|
1408
|
+
* and (b) project-scoped storage (`assets/${projectId}/${blake3}`) means any
|
|
1409
|
+
* collision attack is exploitable only against the attacker's own project.
|
|
1410
|
+
*/
|
|
1411
|
+
function computeMd5(content) {
|
|
1412
|
+
return createHash("md5").update(content).digest("hex");
|
|
1413
|
+
}
|
|
1414
|
+
/**
|
|
1415
|
+
* Compute both BLAKE3 (CF-compatible asset hash) and MD5 (wire-level R2
|
|
1416
|
+
* upload-integrity checksum) from a single in-memory buffer. Returned in
|
|
1417
|
+
* the v2 manifest entry shape for direct-to-R2 deploys.
|
|
1418
|
+
*
|
|
1419
|
+
* Single-pass: callers read the file once into `content`; this function
|
|
1420
|
+
* then derives both digests from that buffer. The file is never read twice.
|
|
1421
|
+
*/
|
|
1422
|
+
function computeAssetHashes(content, filePath) {
|
|
1423
|
+
return {
|
|
1424
|
+
blake3: computeAssetHash(content, filePath),
|
|
1425
|
+
md5: computeMd5(content)
|
|
1426
|
+
};
|
|
1427
|
+
}
|
|
1428
|
+
/**
|
|
953
1429
|
* Create a filter function that excludes .assetsignore, _headers, _redirects,
|
|
954
|
-
* deploy-internal patterns, and any patterns listed in a
|
|
1430
|
+
* OS metadata files, deploy-internal patterns, and any patterns listed in a
|
|
1431
|
+
* .assetsignore file.
|
|
955
1432
|
* Matches Wrangler's createAssetsIgnoreFunction from workers-shared/utils/helpers.ts.
|
|
956
1433
|
*/
|
|
957
1434
|
function createAssetsIgnoreFunction(dir, extraPatterns = []) {
|
|
@@ -959,6 +1436,7 @@ function createAssetsIgnoreFunction(dir, extraPatterns = []) {
|
|
|
959
1436
|
"/.assetsignore",
|
|
960
1437
|
"/_redirects",
|
|
961
1438
|
"/_headers",
|
|
1439
|
+
".DS_Store",
|
|
962
1440
|
...extraPatterns
|
|
963
1441
|
];
|
|
964
1442
|
const assetsIgnorePath = join(dir, ".assetsignore");
|
|
@@ -992,6 +1470,9 @@ function checkWorkerJsAsset(relativePath, assetsIgnoreFilePresent) {
|
|
|
992
1470
|
/**
|
|
993
1471
|
* Collect all files in a directory, compute hashes, and return both the
|
|
994
1472
|
* manifest (for preflight) and the raw file map (for selective packaging).
|
|
1473
|
+
*
|
|
1474
|
+
* Each file is read from disk exactly once. Both BLAKE3 (storage key) and
|
|
1475
|
+
* MD5 (R2 upload-integrity) digests are derived from that single buffer.
|
|
995
1476
|
*/
|
|
996
1477
|
async function collectAndHashAssets(dir, onProgress, opts = {}) {
|
|
997
1478
|
const assetManifest = {};
|
|
@@ -1007,9 +1488,12 @@ async function collectAndHashAssets(dir, onProgress, opts = {}) {
|
|
|
1007
1488
|
if (isIgnored(relativePath)) continue;
|
|
1008
1489
|
checkWorkerJsAsset(relativePath, assetsIgnoreFilePresent);
|
|
1009
1490
|
const content = readFileSync(filePath);
|
|
1491
|
+
const { blake3, md5 } = computeAssetHashes(content, filePath);
|
|
1010
1492
|
assetManifest[relativePath] = {
|
|
1011
|
-
|
|
1012
|
-
|
|
1493
|
+
blake3,
|
|
1494
|
+
md5,
|
|
1495
|
+
size: content.length,
|
|
1496
|
+
hash: blake3
|
|
1013
1497
|
};
|
|
1014
1498
|
assetFiles.set(relativePath, content);
|
|
1015
1499
|
}
|
|
@@ -1044,10 +1528,32 @@ function toSandboxManifest(sandbox) {
|
|
|
1044
1528
|
...sandbox.maxInstances != null && { maxInstances: sandbox.maxInstances }
|
|
1045
1529
|
};
|
|
1046
1530
|
}
|
|
1047
|
-
|
|
1048
|
-
|
|
1531
|
+
/**
|
|
1532
|
+
* If the user configured `routing.fallbacks` on a non-SPA app, the platform
|
|
1533
|
+
* still applies the rules but no `/* -> /index.html` SPA shell is synthesised
|
|
1534
|
+
* — unmatched paths fall through to the platform 404. Returns the warning
|
|
1535
|
+
* message string when that mismatch is detected, or `null` otherwise. Pure
|
|
1536
|
+
* function; callers decide whether/how to surface the message (CLI progress,
|
|
1537
|
+
* `console.warn`, etc.).
|
|
1538
|
+
*/
|
|
1539
|
+
function detectNonSpaFallbackWarning(assetConfig, fallbackRules) {
|
|
1540
|
+
if (assetConfig?.not_found_handling === "single-page-application") return null;
|
|
1541
|
+
if (!fallbackRules || fallbackRules.length === 0) return null;
|
|
1542
|
+
return "routing.fallbacks is set but appType is not \"spa\"; fallbacks will be applied, but no \"/* -> /index.html\" SPA shell fallback is synthesised — unmatched paths fall through to the platform 404.";
|
|
1543
|
+
}
|
|
1544
|
+
/**
|
|
1545
|
+
* Build a Void-app deploy manifest from the inferred bindings + per-feature
|
|
1546
|
+
* inputs. Pure shape transformation: reads no files, has no I/O.
|
|
1547
|
+
*
|
|
1548
|
+
* Note: callers are responsible for surfacing the non-SPA-fallbacks warning
|
|
1549
|
+
* (see `detectNonSpaFallbackWarning`) before invoking this builder. The
|
|
1550
|
+
* builder itself stays I/O-free.
|
|
1551
|
+
*/
|
|
1552
|
+
function buildVoidManifest(opts) {
|
|
1553
|
+
const { bindings, migrations, schedules, ssr, framework, revalidate, vars, queues, prerender, assetConfig, options, hashedAssetsPrefix, headerRules, redirectRules, fallbackRules, dialect, webSockets } = opts;
|
|
1049
1554
|
const manifest = {
|
|
1050
1555
|
version: 3,
|
|
1556
|
+
assetManifestVersion: 2,
|
|
1051
1557
|
bindings: {}
|
|
1052
1558
|
};
|
|
1053
1559
|
if (bindings.needsAuth) manifest.auth = true;
|
|
@@ -1064,14 +1570,10 @@ async function packageBuild(distDir, workerDirName, bindings, migrations, schedu
|
|
|
1064
1570
|
binding: options.bindingNames?.sandbox ?? options.sandbox.binding
|
|
1065
1571
|
});
|
|
1066
1572
|
}
|
|
1067
|
-
if (migrations && migrations.length > 0) {
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
};
|
|
1072
|
-
onProgress?.(`Packaging ${migrations.length} migration${migrations.length === 1 ? "" : "s"}...`);
|
|
1073
|
-
for (const m of migrations) formData.append(`migration:${m.name}`, new Blob([m.sql]), m.name);
|
|
1074
|
-
}
|
|
1573
|
+
if (migrations && migrations.length > 0) manifest.migrations = {
|
|
1574
|
+
dialect: dialect ?? "sqlite",
|
|
1575
|
+
pending: migrations.length
|
|
1576
|
+
};
|
|
1075
1577
|
if (schedules && schedules.length > 0) manifest.schedules = schedules;
|
|
1076
1578
|
if (queues && queues.length > 0) manifest.queues = queues;
|
|
1077
1579
|
if (ssr) manifest.ssr = true;
|
|
@@ -1089,10 +1591,6 @@ async function packageBuild(distDir, workerDirName, bindings, migrations, schedu
|
|
|
1089
1591
|
...chain.assetConfig
|
|
1090
1592
|
};
|
|
1091
1593
|
resolvedFallbackRules = chain.fallbackRules;
|
|
1092
|
-
} else if (fallbackRules && fallbackRules.length > 0) {
|
|
1093
|
-
const msg = "routing.fallbacks is set but appType is not \"spa\"; fallbacks will be applied, but no \"/* -> /index.html\" SPA shell fallback is synthesised — unmatched paths fall through to the platform 404.";
|
|
1094
|
-
onProgress?.(msg);
|
|
1095
|
-
console.warn(`[void] ${msg}`);
|
|
1096
1594
|
}
|
|
1097
1595
|
if (resolvedAssetConfig) manifest.assetConfig = resolvedAssetConfig;
|
|
1098
1596
|
if (vars && Object.keys(vars).length > 0) manifest.vars = vars;
|
|
@@ -1105,45 +1603,40 @@ async function packageBuild(distDir, workerDirName, bindings, migrations, schedu
|
|
|
1105
1603
|
if (resolvedFallbackRules && resolvedFallbackRules.length > 0) manifest.fallbackRules = stripDevOnlyRuleFields(resolvedFallbackRules);
|
|
1106
1604
|
const websocketManifest = toWebSocketManifest(webSockets);
|
|
1107
1605
|
if (websocketManifest) manifest.websocket = websocketManifest;
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
const
|
|
1606
|
+
return manifest;
|
|
1607
|
+
}
|
|
1608
|
+
/**
|
|
1609
|
+
* Read every worker module under `workerDir` once and return them as a map
|
|
1610
|
+
* keyed by relative path. Skipped files (build metadata, wrangler config,
|
|
1611
|
+
* non-module extensions) are filtered out by `isWorkerModule`. Reads each
|
|
1612
|
+
* file from disk exactly once — buffer threading downstream is the caller's
|
|
1613
|
+
* responsibility.
|
|
1614
|
+
*/
|
|
1615
|
+
function collectWorkerFiles(workerDir) {
|
|
1616
|
+
const out = /* @__PURE__ */ new Map();
|
|
1617
|
+
let workerPaths;
|
|
1119
1618
|
try {
|
|
1120
|
-
|
|
1121
|
-
const clientFiles = collectFiles(clientDir);
|
|
1122
|
-
let processed = 0;
|
|
1123
|
-
for (const filePath of clientFiles) {
|
|
1124
|
-
processed++;
|
|
1125
|
-
await tickProgress(processed, clientFiles.length, onProgress, "Packaging assets");
|
|
1126
|
-
const relativePath = toSlash(relative(clientDir, filePath));
|
|
1127
|
-
if (isIgnored("/" + relativePath)) continue;
|
|
1128
|
-
checkWorkerJsAsset(relativePath, assetsIgnoreFilePresent);
|
|
1129
|
-
if (neededAssets && !neededAssets.has("/" + relativePath)) continue;
|
|
1130
|
-
const content = readFileSync(filePath);
|
|
1131
|
-
formData.append(`asset:${relativePath}`, new Blob([content]), relativePath);
|
|
1132
|
-
}
|
|
1619
|
+
workerPaths = collectFiles(workerDir);
|
|
1133
1620
|
} catch (err) {
|
|
1134
|
-
if (err instanceof Error && "code" in err && err.code === "ENOENT")
|
|
1621
|
+
if (err instanceof Error && "code" in err && err.code === "ENOENT") return out;
|
|
1622
|
+
throw err;
|
|
1135
1623
|
}
|
|
1136
|
-
|
|
1137
|
-
|
|
1624
|
+
for (const filePath of workerPaths) {
|
|
1625
|
+
const relativePath = toSlash(relative(workerDir, filePath));
|
|
1626
|
+
if (!isWorkerModule(relativePath)) continue;
|
|
1627
|
+
out.set(relativePath, readFileSync(filePath));
|
|
1628
|
+
}
|
|
1629
|
+
return out;
|
|
1138
1630
|
}
|
|
1139
1631
|
/**
|
|
1140
|
-
*
|
|
1632
|
+
* Build a static-deploy manifest. Pure shape transformation — no I/O.
|
|
1141
1633
|
*/
|
|
1142
|
-
|
|
1143
|
-
const
|
|
1634
|
+
function buildStaticManifest(opts) {
|
|
1635
|
+
const { appType, hashedAssetsPrefix, headerRules, redirectRules, fallbackRules } = opts;
|
|
1144
1636
|
const { assetConfig, fallbackRules: resolvedFallbackRules } = resolveSpaFallbackChain(appType, fallbackRules);
|
|
1145
1637
|
const manifest = {
|
|
1146
1638
|
version: 3,
|
|
1639
|
+
assetManifestVersion: 2,
|
|
1147
1640
|
bindings: {},
|
|
1148
1641
|
type: appType,
|
|
1149
1642
|
assetConfig
|
|
@@ -1152,36 +1645,16 @@ async function packageStaticBuild(outputDir, appType, neededAssets, hashedAssets
|
|
|
1152
1645
|
if (headerRules && headerRules.length > 0) manifest.headerRules = headerRules;
|
|
1153
1646
|
if (redirectRules && redirectRules.length > 0) manifest.redirectRules = stripDevOnlyRuleFields(redirectRules);
|
|
1154
1647
|
if (resolvedFallbackRules && resolvedFallbackRules.length > 0) manifest.fallbackRules = stripDevOnlyRuleFields(resolvedFallbackRules);
|
|
1155
|
-
|
|
1156
|
-
const { isIgnored, assetsIgnoreFilePresent } = createAssetsIgnoreFunction(outputDir);
|
|
1157
|
-
const files = collectFiles(outputDir);
|
|
1158
|
-
let processed = 0;
|
|
1159
|
-
for (const filePath of files) {
|
|
1160
|
-
processed++;
|
|
1161
|
-
await tickProgress(processed, files.length, onProgress, "Packaging assets");
|
|
1162
|
-
const relativePath = toSlash(relative(outputDir, filePath));
|
|
1163
|
-
if (isIgnored("/" + relativePath)) continue;
|
|
1164
|
-
checkWorkerJsAsset(relativePath, assetsIgnoreFilePresent);
|
|
1165
|
-
if (neededAssets && !neededAssets.has("/" + relativePath)) continue;
|
|
1166
|
-
const content = readFileSync(filePath);
|
|
1167
|
-
formData.append(`asset:${relativePath}`, new Blob([content]), relativePath);
|
|
1168
|
-
}
|
|
1169
|
-
if (neededAssets) formData.append("preflight", "true");
|
|
1170
|
-
return formData;
|
|
1648
|
+
return manifest;
|
|
1171
1649
|
}
|
|
1172
1650
|
/**
|
|
1173
|
-
*
|
|
1174
|
-
*
|
|
1175
|
-
* Unlike `packageBuild`, the worker and assets directories are absolute paths
|
|
1176
|
-
* (resolved from the project root + preset), and the worker directory may be
|
|
1177
|
-
* inside the assets directory (SvelteKit, Astro). Worker files are excluded
|
|
1178
|
-
* from asset collection when directories overlap.
|
|
1651
|
+
* Build a framework-deploy manifest. Pure shape transformation — no I/O.
|
|
1179
1652
|
*/
|
|
1180
|
-
|
|
1181
|
-
const formData = new FormData();
|
|
1653
|
+
function buildFrameworkManifest(opts) {
|
|
1182
1654
|
const po = opts.packageOptions;
|
|
1183
1655
|
const manifest = {
|
|
1184
1656
|
version: 3,
|
|
1657
|
+
assetManifestVersion: 2,
|
|
1185
1658
|
bindings: {}
|
|
1186
1659
|
};
|
|
1187
1660
|
const hasMigrations = opts.migrations && opts.migrations.length > 0;
|
|
@@ -1190,14 +1663,10 @@ async function packageFrameworkBuild(opts) {
|
|
|
1190
1663
|
if (opts.bindings.needsKV) manifest.bindings.kv = [po?.bindingNames?.kv ?? "KV"];
|
|
1191
1664
|
if (opts.bindings.needsR2) manifest.bindings.r2 = [po?.bindingNames?.r2 ?? "STORAGE"];
|
|
1192
1665
|
if (opts.bindings.needsAI) manifest.bindings.ai = true;
|
|
1193
|
-
if (opts.migrations && opts.migrations.length > 0) {
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
};
|
|
1198
|
-
opts.onProgress?.(`Packaging ${opts.migrations.length} migration${opts.migrations.length === 1 ? "" : "s"}...`);
|
|
1199
|
-
for (const m of opts.migrations) formData.append(`migration:${m.name}`, new Blob([m.sql]), m.name);
|
|
1200
|
-
}
|
|
1666
|
+
if (opts.migrations && opts.migrations.length > 0) manifest.migrations = {
|
|
1667
|
+
dialect: opts.dialect ?? "sqlite",
|
|
1668
|
+
pending: opts.migrations.length
|
|
1669
|
+
};
|
|
1201
1670
|
if (opts.schedules && opts.schedules.length > 0) manifest.schedules = opts.schedules;
|
|
1202
1671
|
if (opts.queues && opts.queues.length > 0) manifest.queues = opts.queues;
|
|
1203
1672
|
if (opts.revalidate != null && opts.revalidate !== 0) manifest.revalidate = opts.revalidate;
|
|
@@ -1217,38 +1686,7 @@ async function packageFrameworkBuild(opts) {
|
|
|
1217
1686
|
if (opts.fallbackRules && opts.fallbackRules.length > 0) manifest.fallbackRules = stripDevOnlyRuleFields(opts.fallbackRules);
|
|
1218
1687
|
const websocketManifest = toWebSocketManifest(opts.webSockets);
|
|
1219
1688
|
if (websocketManifest) manifest.websocket = websocketManifest;
|
|
1220
|
-
|
|
1221
|
-
const workerDirNorm = opts.workerDir.replace(/\\/g, "/");
|
|
1222
|
-
const assetsDirNorm = opts.assetsDir.replace(/\\/g, "/");
|
|
1223
|
-
const isOverlap = workerDirNorm.startsWith(assetsDirNorm + "/") || workerDirNorm === assetsDirNorm;
|
|
1224
|
-
opts.onProgress?.("Packaging worker files...");
|
|
1225
|
-
const workerFiles = collectFiles(opts.workerDir);
|
|
1226
|
-
for (const filePath of workerFiles) {
|
|
1227
|
-
const relativePath = toSlash(relative(opts.workerDir, filePath));
|
|
1228
|
-
if (!isWorkerModule(relativePath)) continue;
|
|
1229
|
-
const content = readFileSync(filePath);
|
|
1230
|
-
formData.append(`worker:${relativePath}`, new Blob([content]), relativePath);
|
|
1231
|
-
}
|
|
1232
|
-
try {
|
|
1233
|
-
const { isIgnored, assetsIgnoreFilePresent } = createAssetsIgnoreFunction(opts.assetsDir, opts.assetIgnorePatterns);
|
|
1234
|
-
const assetFiles = collectFiles(opts.assetsDir);
|
|
1235
|
-
let processed = 0;
|
|
1236
|
-
for (const filePath of assetFiles) {
|
|
1237
|
-
processed++;
|
|
1238
|
-
await tickProgress(processed, assetFiles.length, opts.onProgress, "Packaging assets");
|
|
1239
|
-
if (isOverlap && filePath.replace(/\\/g, "/").startsWith(workerDirNorm)) continue;
|
|
1240
|
-
const relativePath = toSlash(relative(opts.assetsDir, filePath));
|
|
1241
|
-
if (isIgnored("/" + relativePath)) continue;
|
|
1242
|
-
checkWorkerJsAsset(relativePath, assetsIgnoreFilePresent);
|
|
1243
|
-
if (opts.neededAssets && !opts.neededAssets.has("/" + relativePath)) continue;
|
|
1244
|
-
const content = readFileSync(filePath);
|
|
1245
|
-
formData.append(`asset:${relativePath}`, new Blob([content]), relativePath);
|
|
1246
|
-
}
|
|
1247
|
-
} catch (err) {
|
|
1248
|
-
if (err instanceof Error && "code" in err && err.code === "ENOENT") {} else throw err;
|
|
1249
|
-
}
|
|
1250
|
-
if (opts.neededAssets) formData.append("preflight", "true");
|
|
1251
|
-
return formData;
|
|
1689
|
+
return manifest;
|
|
1252
1690
|
}
|
|
1253
1691
|
/**
|
|
1254
1692
|
* Check if a file (by relative path) should be included as a worker module.
|
|
@@ -1528,6 +1966,7 @@ var deploy_exports = /* @__PURE__ */ __exportAll({
|
|
|
1528
1966
|
resolveStaticBuildCommand: () => resolveStaticBuildCommand,
|
|
1529
1967
|
resolveWorkerDirName: () => resolveWorkerDirName,
|
|
1530
1968
|
runDeploy: () => runDeploy,
|
|
1969
|
+
runStaticDeploy: () => runStaticDeploy,
|
|
1531
1970
|
warnRedundantForceOn3xx: () => warnRedundantForceOn3xx
|
|
1532
1971
|
});
|
|
1533
1972
|
const drizzleKitBin = join(fileURLToPath(import.meta.resolve("drizzle-kit")), "..", "bin.cjs");
|
|
@@ -1573,6 +2012,28 @@ function resolveDeployWranglerCompat(root, config, buildOutputDir) {
|
|
|
1573
2012
|
async function runDeploy(root, options) {
|
|
1574
2013
|
console.log();
|
|
1575
2014
|
ge(cliTitle("deploy"));
|
|
2015
|
+
const cliLog = openDeployLog({ mirrorToStderr: options?.debug === true || process.env.VOID_DEPLOY_DEBUG === "1" });
|
|
2016
|
+
cliLog.info("deploy_start", {
|
|
2017
|
+
cwd: root,
|
|
2018
|
+
projectSlug: options?.projectSlug,
|
|
2019
|
+
dir: options?.dir,
|
|
2020
|
+
spa: options?.spa === true ? true : void 0,
|
|
2021
|
+
skipBuild: options?.skipBuild === true ? true : void 0,
|
|
2022
|
+
nodeVersion: process.version,
|
|
2023
|
+
platform: process.platform,
|
|
2024
|
+
arch: process.arch
|
|
2025
|
+
});
|
|
2026
|
+
try {
|
|
2027
|
+
await runDeployInner(root, options, cliLog);
|
|
2028
|
+
cliLog.info("deploy_end", { ok: true });
|
|
2029
|
+
} catch (err) {
|
|
2030
|
+
cliLog.error("deploy_end", err, { ok: false });
|
|
2031
|
+
throw err;
|
|
2032
|
+
} finally {
|
|
2033
|
+
await cliLog.close().catch(() => {});
|
|
2034
|
+
}
|
|
2035
|
+
}
|
|
2036
|
+
async function runDeployInner(root, options, cliLog) {
|
|
1576
2037
|
let reauthenticated = false;
|
|
1577
2038
|
while (true) try {
|
|
1578
2039
|
const token = getToken(root);
|
|
@@ -1580,38 +2041,54 @@ async function runDeploy(root, options) {
|
|
|
1580
2041
|
R.error("No auth token found. Run `void auth login` first.");
|
|
1581
2042
|
process.exit(1);
|
|
1582
2043
|
}
|
|
1583
|
-
const client = new PlatformClient(token, {
|
|
2044
|
+
const client = new PlatformClient(token, {
|
|
2045
|
+
root,
|
|
2046
|
+
cliLog
|
|
2047
|
+
});
|
|
1584
2048
|
const requestedProjectSlug = getRequestedProjectSlug(options);
|
|
1585
2049
|
let config = null;
|
|
1586
2050
|
if (requestedProjectSlug) {
|
|
1587
2051
|
config = resolveProjectBySlug(await client.listProjects(), requestedProjectSlug);
|
|
1588
|
-
if (!config) if (!process.stdin.isTTY) config = await createProjectFromSlug(root, client, requestedProjectSlug);
|
|
2052
|
+
if (!config) if (!process.stdin.isTTY) config = await createProjectFromSlug(root, client, requestedProjectSlug, cliLog);
|
|
1589
2053
|
else {
|
|
1590
2054
|
const shouldCreate = await ue({ message: `Project "${requestedProjectSlug}" does not exist. Create it?` });
|
|
1591
2055
|
if (q(shouldCreate) || !shouldCreate) {
|
|
1592
2056
|
R.info("Deploy cancelled.");
|
|
1593
2057
|
process.exit(0);
|
|
1594
2058
|
}
|
|
1595
|
-
config = await createProjectFromSlug(root, client, requestedProjectSlug);
|
|
2059
|
+
config = await createProjectFromSlug(root, client, requestedProjectSlug, cliLog);
|
|
1596
2060
|
}
|
|
1597
2061
|
} else config = readProjectConfig(root);
|
|
1598
2062
|
if (!config && !process.stdin.isTTY) {
|
|
1599
2063
|
R.error("No project specified. Set `VOID_PROJECT`, pass `--project <slug>`, or commit `.void/project.json`.");
|
|
1600
2064
|
process.exit(1);
|
|
1601
2065
|
}
|
|
1602
|
-
if (!config) config = await promptDeployProjectConfig(root, client);
|
|
2066
|
+
if (!config) config = await promptDeployProjectConfig(root, client, cliLog);
|
|
2067
|
+
cliLog.info("project_resolved", {
|
|
2068
|
+
projectId: config.projectId,
|
|
2069
|
+
slug: config.slug
|
|
2070
|
+
});
|
|
1603
2071
|
const voidConfig = readConfig(root);
|
|
2072
|
+
cliLog.info("config_resolved", {
|
|
2073
|
+
target: voidConfig.target,
|
|
2074
|
+
appType: voidConfig.inference?.appType,
|
|
2075
|
+
output: voidConfig.output
|
|
2076
|
+
});
|
|
1604
2077
|
const detected = detectFramework(root);
|
|
1605
2078
|
if (detected) {
|
|
1606
|
-
|
|
2079
|
+
cliLog.info("framework_detected", {
|
|
2080
|
+
name: detected.name,
|
|
2081
|
+
class: detected.class
|
|
2082
|
+
});
|
|
2083
|
+
const gitMetadata = collectGitDeployMetadata(root, options?.skipBuild ? null : getGitCommit(root));
|
|
1607
2084
|
const fwPreset = FRAMEWORK_PRESETS[detected.name];
|
|
1608
2085
|
if (detected.class === "b" || detected.class === "c") {
|
|
1609
2086
|
if (!fwPreset) {
|
|
1610
2087
|
R.error(`No deploy preset found for framework "${detected.name}".`);
|
|
1611
2088
|
process.exit(1);
|
|
1612
2089
|
}
|
|
1613
|
-
await runFrameworkDeploy(root, config, client, detected, fwPreset, voidConfig, options?.skipBuild,
|
|
1614
|
-
} else await runFullDeploy(root, config, client, fwPreset, options?.skipBuild,
|
|
2090
|
+
await runFrameworkDeploy(root, config, client, detected, fwPreset, voidConfig, options?.skipBuild, gitMetadata, cliLog);
|
|
2091
|
+
} else await runFullDeploy(root, config, client, fwPreset, options?.skipBuild, gitMetadata, detected, cliLog);
|
|
1615
2092
|
return;
|
|
1616
2093
|
}
|
|
1617
2094
|
let preset = null;
|
|
@@ -1635,14 +2112,14 @@ async function runDeploy(root, options) {
|
|
|
1635
2112
|
dir: options?.dir,
|
|
1636
2113
|
spa: options?.spa
|
|
1637
2114
|
});
|
|
1638
|
-
const
|
|
2115
|
+
const gitMetadata = collectGitDeployMetadata(root, options?.skipBuild ? null : getGitCommit(root));
|
|
1639
2116
|
if (preset) {
|
|
1640
2117
|
preset = {
|
|
1641
2118
|
...preset,
|
|
1642
2119
|
buildCommand: resolveStaticBuildCommand(preset, voidConfig.inference?.build, Boolean(options?.dir))
|
|
1643
2120
|
};
|
|
1644
|
-
await runStaticDeploy(root, config, client, preset, options?.skipBuild,
|
|
1645
|
-
} else await runFullDeploy(root, config, client, void 0, options?.skipBuild,
|
|
2121
|
+
await runStaticDeploy(root, config, client, preset, options?.skipBuild, gitMetadata, void 0, void 0, void 0, void 0, voidConfig.routing, cliLog);
|
|
2122
|
+
} else await runFullDeploy(root, config, client, void 0, options?.skipBuild, gitMetadata, void 0, cliLog);
|
|
1646
2123
|
return;
|
|
1647
2124
|
} catch (error) {
|
|
1648
2125
|
if (isExpiredTokenError(error)) {
|
|
@@ -1657,7 +2134,7 @@ async function runDeploy(root, options) {
|
|
|
1657
2134
|
throw error;
|
|
1658
2135
|
}
|
|
1659
2136
|
}
|
|
1660
|
-
async function promptDeployProjectConfig(root, client) {
|
|
2137
|
+
async function promptDeployProjectConfig(root, client, cliLog) {
|
|
1661
2138
|
const projects = await client.listProjects();
|
|
1662
2139
|
if (await promptProjectSetupAction("Set up a Void project for deploy:", { includeLink: projects.length > 0 }) === "create") return promptAndCreateProject(root, client);
|
|
1663
2140
|
const result = await promptProjectSelection(projects, "Select a project to deploy to:", root, { includeCreate: false });
|
|
@@ -1668,6 +2145,10 @@ async function promptDeployProjectConfig(root, client) {
|
|
|
1668
2145
|
};
|
|
1669
2146
|
writeProjectConfig(root, config);
|
|
1670
2147
|
R.step(`Linked to ${import_picocolors.default.blue(config.slug)}`);
|
|
2148
|
+
cliLog?.info("project_linked", {
|
|
2149
|
+
projectId: config.projectId,
|
|
2150
|
+
slug: config.slug
|
|
2151
|
+
});
|
|
1671
2152
|
return config;
|
|
1672
2153
|
}
|
|
1673
2154
|
function resolveStaticBuildCommand(preset, buildOverride, hasExplicitDir = false) {
|
|
@@ -1729,49 +2210,182 @@ function applyDeployEvent(event, s) {
|
|
|
1729
2210
|
}
|
|
1730
2211
|
return { kind: "continue" };
|
|
1731
2212
|
}
|
|
1732
|
-
|
|
2213
|
+
/**
|
|
2214
|
+
* Apply a streamed `DeployEvent` iterator to the spinner / cliLog and
|
|
2215
|
+
* resolve with the terminal `done` event (or throw on terminal error /
|
|
2216
|
+
* connection loss).
|
|
2217
|
+
*/
|
|
2218
|
+
async function streamDeployEvents(projectId, events, s, cliLog) {
|
|
2219
|
+
const startedAt = Date.now();
|
|
1733
2220
|
let deploymentId = null;
|
|
1734
2221
|
let interrupted = false;
|
|
2222
|
+
cliLog?.info("stream_start", { projectId });
|
|
1735
2223
|
const onSigint = () => {
|
|
1736
2224
|
interrupted = true;
|
|
1737
2225
|
s.stop("Deploy interrupted");
|
|
1738
2226
|
if (deploymentId) R.info(`Deployment: ${deploymentId}`);
|
|
1739
2227
|
else R.info("Deployment id was not received before the interrupt.");
|
|
2228
|
+
cliLog?.warn("stream_interrupted", { deploymentId });
|
|
2229
|
+
cliLog?.close().catch(() => {});
|
|
1740
2230
|
process.exit(130);
|
|
1741
2231
|
};
|
|
1742
2232
|
process.once("SIGINT", onSigint);
|
|
1743
2233
|
try {
|
|
1744
|
-
for await (const event of
|
|
2234
|
+
for await (const event of events) {
|
|
1745
2235
|
const result = applyDeployEvent(event, s);
|
|
1746
|
-
if (result.kind === "start")
|
|
1747
|
-
|
|
1748
|
-
|
|
2236
|
+
if (result.kind === "start") {
|
|
2237
|
+
deploymentId = result.deploymentId;
|
|
2238
|
+
cliLog?.info("stream_deployment_id", { deploymentId });
|
|
2239
|
+
} else if (result.kind === "done") {
|
|
2240
|
+
cliLog?.info("deploy_success", {
|
|
2241
|
+
deploymentId,
|
|
2242
|
+
url: result.event.url,
|
|
2243
|
+
assets: result.event.assets,
|
|
2244
|
+
workers: result.event.workers,
|
|
2245
|
+
durationMs: Date.now() - startedAt
|
|
2246
|
+
});
|
|
2247
|
+
return result.event;
|
|
2248
|
+
} else if (result.kind === "error") {
|
|
2249
|
+
cliLog?.error("deploy_server_error", void 0, {
|
|
2250
|
+
deploymentId,
|
|
2251
|
+
message: result.message
|
|
2252
|
+
});
|
|
2253
|
+
throw new DeployEventError(formatDeployFailureMessage(result.message, deploymentId, cliLog?.path ?? null));
|
|
2254
|
+
}
|
|
1749
2255
|
}
|
|
1750
2256
|
} catch (err) {
|
|
1751
2257
|
if (err instanceof DeployEventError) throw err;
|
|
1752
2258
|
if (interrupted) throw err;
|
|
2259
|
+
cliLog?.error("stream_deploy_caught", err, { deploymentId });
|
|
1753
2260
|
s.stop("Deploy failed");
|
|
1754
|
-
throw formatStreamDeployError(err, deploymentId);
|
|
2261
|
+
throw formatStreamDeployError(err, deploymentId, cliLog?.path ?? null);
|
|
1755
2262
|
} finally {
|
|
1756
2263
|
process.removeListener("SIGINT", onSigint);
|
|
1757
2264
|
}
|
|
2265
|
+
cliLog?.warn("stream_no_terminal_event", { deploymentId });
|
|
1758
2266
|
s.stop("Deploy failed");
|
|
1759
|
-
throw new Error(formatConnectionLostMessage(deploymentId));
|
|
2267
|
+
throw new Error(formatConnectionLostMessage(deploymentId, cliLog?.path ?? null));
|
|
2268
|
+
}
|
|
2269
|
+
/**
|
|
2270
|
+
* Direct-to-R2 deploy orchestration. Issues presigned PUT URLs for each
|
|
2271
|
+
* needed asset, uploads bytes directly to R2, then calls the manifest-only
|
|
2272
|
+
* deploy endpoint to finalize.
|
|
2273
|
+
*
|
|
2274
|
+
* Key invariants:
|
|
2275
|
+
* 1. No needed assets → skip presign + R2 PUT phase entirely (saves a
|
|
2276
|
+
* round-trip when every asset was already present from a prior deploy).
|
|
2277
|
+
* 2. Asset bytes come exclusively from the in-memory `assetFiles` cache
|
|
2278
|
+
* computed by `collectAndHashAssets` — files are NEVER re-read from
|
|
2279
|
+
* disk during this flow (Codex Finding #2 in design doc 0068 review).
|
|
2280
|
+
* 3. R2 PUT failures (4xx, 5xx, network) are surfaced with the asset
|
|
2281
|
+
* path + hash so the user can correlate against the JSONL trace.
|
|
2282
|
+
*/
|
|
2283
|
+
async function streamDirectR2Deploy(client, projectId, manifest, assetManifest, assetFiles, needed, gitMetadata, preflightUsed, workerFiles, s, cliLog) {
|
|
2284
|
+
if (needed.length > 0) {
|
|
2285
|
+
const neededPaths = new Set(needed);
|
|
2286
|
+
const preBufferCount = assetFiles.size;
|
|
2287
|
+
const allPaths = Array.from(assetFiles.keys());
|
|
2288
|
+
for (const path of allPaths) if (!neededPaths.has(path)) assetFiles.delete(path);
|
|
2289
|
+
if (assetFiles.size !== preBufferCount) cliLog?.info("asset_cache_pruned", {
|
|
2290
|
+
before: preBufferCount,
|
|
2291
|
+
after: assetFiles.size
|
|
2292
|
+
});
|
|
2293
|
+
s.message(`Requesting upload URLs for ${needed.length} asset(s)...`);
|
|
2294
|
+
const seenBlake3 = /* @__PURE__ */ new Set();
|
|
2295
|
+
const uniqueNeeded = [];
|
|
2296
|
+
for (const path of needed) {
|
|
2297
|
+
const entry = assetManifest[path];
|
|
2298
|
+
if (!entry) throw new Error(`internal: preflight returned ${path} but assetManifest has no entry`);
|
|
2299
|
+
if (seenBlake3.has(entry.blake3)) continue;
|
|
2300
|
+
seenBlake3.add(entry.blake3);
|
|
2301
|
+
uniqueNeeded.push(path);
|
|
2302
|
+
}
|
|
2303
|
+
const assetsToUpload = uniqueNeeded.map((path) => {
|
|
2304
|
+
const entry = assetManifest[path];
|
|
2305
|
+
return {
|
|
2306
|
+
blake3: entry.blake3,
|
|
2307
|
+
md5: entry.md5,
|
|
2308
|
+
size: entry.size,
|
|
2309
|
+
contentType: getAssetContentType(path)
|
|
2310
|
+
};
|
|
2311
|
+
});
|
|
2312
|
+
cliLog?.info("presign_request_start", {
|
|
2313
|
+
assetCount: assetsToUpload.length,
|
|
2314
|
+
dedupedFrom: needed.length
|
|
2315
|
+
});
|
|
2316
|
+
const presignStartedAt = Date.now();
|
|
2317
|
+
const urls = await client.requestUploadUrls(projectId, assetsToUpload);
|
|
2318
|
+
cliLog?.info("presign_request_end", {
|
|
2319
|
+
urlsReceived: Object.keys(urls).length,
|
|
2320
|
+
durationMs: Date.now() - presignStartedAt
|
|
2321
|
+
});
|
|
2322
|
+
const items = uniqueNeeded.map((path) => {
|
|
2323
|
+
const entry = assetManifest[path];
|
|
2324
|
+
const presigned = urls[entry.blake3];
|
|
2325
|
+
if (!presigned) throw new Error(`Platform did not return an upload URL for ${path} (blake3=${entry.blake3}). This indicates a server-side bug or a stale client.`);
|
|
2326
|
+
const body = assetFiles.get(path);
|
|
2327
|
+
if (!body) throw new Error(`internal: asset ${path} declared in manifest but missing from buffer cache`);
|
|
2328
|
+
return {
|
|
2329
|
+
path,
|
|
2330
|
+
hash: entry.blake3,
|
|
2331
|
+
body,
|
|
2332
|
+
url: presigned.url,
|
|
2333
|
+
headers: presigned.headers
|
|
2334
|
+
};
|
|
2335
|
+
});
|
|
2336
|
+
s.message(`Uploading ${items.length} asset(s) to R2...`);
|
|
2337
|
+
await uploadAssetsToR2(items, cliLog, void 0, (item) => {
|
|
2338
|
+
assetFiles.delete(item.path);
|
|
2339
|
+
});
|
|
2340
|
+
} else cliLog?.info("presign_skipped", { reason: "no_needed_assets" });
|
|
2341
|
+
const payload = {
|
|
2342
|
+
manifest,
|
|
2343
|
+
assetManifest,
|
|
2344
|
+
source: getTokenSource(),
|
|
2345
|
+
commit: gitMetadata?.commit ?? null,
|
|
2346
|
+
repositoryUrl: gitMetadata?.repositoryUrl ?? null,
|
|
2347
|
+
commitUrl: gitMetadata?.commitUrl ?? null,
|
|
2348
|
+
gitRef: gitMetadata?.gitRef ?? null,
|
|
2349
|
+
preflight: preflightUsed ? "true" : null
|
|
2350
|
+
};
|
|
2351
|
+
if (workerFiles && workerFiles.size > 0) {
|
|
2352
|
+
const encoded = {};
|
|
2353
|
+
for (const [relPath, buf] of workerFiles) encoded[relPath] = buf.toString("base64");
|
|
2354
|
+
payload.workerFiles = encoded;
|
|
2355
|
+
}
|
|
2356
|
+
s.message("Finalizing deploy...");
|
|
2357
|
+
cliLog?.info("finalize_start", {
|
|
2358
|
+
workerFileCount: workerFiles?.size ?? 0,
|
|
2359
|
+
assetCount: Object.keys(assetManifest).length
|
|
2360
|
+
});
|
|
2361
|
+
const finalizeStartedAt = Date.now();
|
|
2362
|
+
try {
|
|
2363
|
+
return await streamDeployEvents(projectId, client.finalizeDeploy(projectId, payload), s, cliLog);
|
|
2364
|
+
} finally {
|
|
2365
|
+
cliLog?.info("finalize_end", { durationMs: Date.now() - finalizeStartedAt });
|
|
2366
|
+
}
|
|
1760
2367
|
}
|
|
1761
2368
|
function formatKnownAssetSummary(result, skipped) {
|
|
1762
2369
|
const uploaded = result.assets - skipped;
|
|
1763
2370
|
return skipped > 0 ? `${result.assets} static asset(s) (${skipped} unchanged, ${uploaded} uploaded)` : `${result.assets} static asset(s)`;
|
|
1764
2371
|
}
|
|
1765
|
-
async function runStaticDeploy(root, config, client, preset, skipBuild,
|
|
2372
|
+
async function runStaticDeploy(root, config, client, preset, skipBuild, gitMetadata, hashedAssetsPrefix, headerRules, redirectRules, fallbackRules, routing, cliLog) {
|
|
1766
2373
|
const typeLabel = preset.appType === "spa" ? "Static SPA" : "Static Site";
|
|
1767
2374
|
R.info(`${typeLabel} deploy`);
|
|
2375
|
+
cliLog?.info("deploy_mode", {
|
|
2376
|
+
mode: "static",
|
|
2377
|
+
appType: preset.appType
|
|
2378
|
+
});
|
|
1768
2379
|
if (preset.buildCommand && !skipBuild) {
|
|
1769
2380
|
R.step("Building...");
|
|
2381
|
+
cliLog?.info("build_start", { command: preset.buildCommand });
|
|
2382
|
+
const buildStartedAt = Date.now();
|
|
1770
2383
|
execSync(preset.buildCommand, {
|
|
1771
2384
|
cwd: root,
|
|
1772
2385
|
stdio: "inherit"
|
|
1773
2386
|
});
|
|
1774
|
-
|
|
2387
|
+
cliLog?.info("build_end", { durationMs: Date.now() - buildStartedAt });
|
|
2388
|
+
} else if (skipBuild) cliLog?.info("build_skipped", { reason: "skipBuild" });
|
|
1775
2389
|
if (!existsSync(preset.outputDir)) {
|
|
1776
2390
|
R.error(`deploy: Output directory '${preset.outputDir}' not found. Run the build first.`);
|
|
1777
2391
|
process.exit(1);
|
|
@@ -1793,17 +2407,32 @@ async function runStaticDeploy(root, config, client, preset, skipBuild, commit,
|
|
|
1793
2407
|
const s = createSpinner();
|
|
1794
2408
|
const onProgress = (msg) => s.message(msg);
|
|
1795
2409
|
s.start("Checking for changes...");
|
|
1796
|
-
|
|
2410
|
+
cliLog?.info("preflight_start", { dir: preset.outputDir });
|
|
2411
|
+
const { assetManifest, assetFiles } = await collectAndHashAssets(preset.outputDir, onProgress);
|
|
1797
2412
|
const { needed, skipped } = await client.preflight(config.projectId, assetManifest);
|
|
1798
2413
|
s.stop("Checked for changes");
|
|
2414
|
+
cliLog?.info("preflight_end", {
|
|
2415
|
+
total: Object.keys(assetManifest).length,
|
|
2416
|
+
needed: needed.length,
|
|
2417
|
+
skipped
|
|
2418
|
+
});
|
|
1799
2419
|
if (skipped > 0) R.info(`Skipping ${skipped} unchanged asset(s)`);
|
|
1800
2420
|
s.start("Packaging...");
|
|
2421
|
+
cliLog?.info("package_start", { mode: "static" });
|
|
2422
|
+
const packageStartedAt = Date.now();
|
|
1801
2423
|
const neededSet = skipped > 0 ? new Set(needed) : null;
|
|
1802
|
-
const
|
|
1803
|
-
|
|
1804
|
-
|
|
1805
|
-
|
|
1806
|
-
|
|
2424
|
+
const manifest = buildStaticManifest({
|
|
2425
|
+
appType: preset.appType,
|
|
2426
|
+
hashedAssetsPrefix: hashedAssetsPrefix ?? "assets",
|
|
2427
|
+
headerRules,
|
|
2428
|
+
redirectRules,
|
|
2429
|
+
fallbackRules
|
|
2430
|
+
});
|
|
2431
|
+
cliLog?.info("package_end", {
|
|
2432
|
+
mode: "static",
|
|
2433
|
+
durationMs: Date.now() - packageStartedAt
|
|
2434
|
+
});
|
|
2435
|
+
const result = await streamDirectR2Deploy(client, config.projectId, manifest, assetManifest, assetFiles, needed, gitMetadata, neededSet !== null, void 0, s, cliLog);
|
|
1807
2436
|
Se([`${typeLabel} — ${formatKnownAssetSummary(result, skipped)}`].join("\n"), result.url);
|
|
1808
2437
|
ye("Done!");
|
|
1809
2438
|
}
|
|
@@ -1821,17 +2450,24 @@ function warnRedundantForceOn3xx(count) {
|
|
|
1821
2450
|
/**
|
|
1822
2451
|
* Class B/C framework deploy: build with framework CLI, package with preset output paths.
|
|
1823
2452
|
*/
|
|
1824
|
-
async function runFrameworkDeploy(root, config, client, detected, fwPreset, voidConfig, skipBuild,
|
|
2453
|
+
async function runFrameworkDeploy(root, config, client, detected, fwPreset, voidConfig, skipBuild, gitMetadata, cliLog) {
|
|
1825
2454
|
R.info(`${detected.name} framework deploy`);
|
|
2455
|
+
cliLog?.info("deploy_mode", {
|
|
2456
|
+
mode: "framework",
|
|
2457
|
+
framework: detected.name
|
|
2458
|
+
});
|
|
1826
2459
|
const dialect = getDatabaseDialect(voidConfig);
|
|
1827
2460
|
const buildCmd = voidConfig.inference?.build ?? formatProjectCommand(root, fwPreset.buildCommand);
|
|
1828
2461
|
if (!skipBuild) {
|
|
1829
2462
|
R.step(`Building (${buildCmd})...`);
|
|
2463
|
+
cliLog?.info("build_start", { command: buildCmd });
|
|
2464
|
+
const buildStartedAt = Date.now();
|
|
1830
2465
|
execSync(voidConfig.inference?.build ?? resolveProjectCommand(root, fwPreset.buildCommand), {
|
|
1831
2466
|
cwd: root,
|
|
1832
2467
|
stdio: "inherit"
|
|
1833
2468
|
});
|
|
1834
|
-
|
|
2469
|
+
cliLog?.info("build_end", { durationMs: Date.now() - buildStartedAt });
|
|
2470
|
+
} else cliLog?.info("build_skipped", { reason: "skipBuild" });
|
|
1835
2471
|
const workerDir = join(root, fwPreset.workerDir);
|
|
1836
2472
|
const assetsDir = join(root, fwPreset.assetsDir);
|
|
1837
2473
|
const frameworkAssetIgnorePatterns = fwPreset.workerDir === fwPreset.assetsDir && fwPreset.workerMain === "_worker.js" ? [`/${fwPreset.workerMain}`, "/_routes.json"] : [];
|
|
@@ -1931,45 +2567,54 @@ async function runFrameworkDeploy(root, config, client, detected, fwPreset, void
|
|
|
1931
2567
|
const s = createSpinner();
|
|
1932
2568
|
const onProgress = (msg) => s.message(msg);
|
|
1933
2569
|
s.start("Checking for changes...");
|
|
1934
|
-
|
|
2570
|
+
cliLog?.info("preflight_start", { dir: assetsDir });
|
|
2571
|
+
const { assetManifest, assetFiles } = await collectAndHashAssets(assetsDir, onProgress, { ignorePatterns: frameworkAssetIgnorePatterns });
|
|
1935
2572
|
const { needed, skipped } = await client.preflight(config.projectId, assetManifest, true);
|
|
1936
2573
|
s.stop("Checked for changes");
|
|
2574
|
+
cliLog?.info("preflight_end", {
|
|
2575
|
+
total: Object.keys(assetManifest).length,
|
|
2576
|
+
needed: needed.length,
|
|
2577
|
+
skipped
|
|
2578
|
+
});
|
|
1937
2579
|
if (skipped > 0) R.info(`Skipping ${skipped} unchanged asset(s)`);
|
|
1938
2580
|
s.start("Packaging...");
|
|
2581
|
+
cliLog?.info("package_start", {
|
|
2582
|
+
mode: "framework",
|
|
2583
|
+
framework: detected.name
|
|
2584
|
+
});
|
|
2585
|
+
const packageStartedAt = Date.now();
|
|
1939
2586
|
const neededSet = skipped > 0 ? new Set(needed) : null;
|
|
1940
|
-
const
|
|
2587
|
+
const packageOptions = {
|
|
2588
|
+
bindingNames: resolveBindingNames(voidConfig.inference?.bindings),
|
|
2589
|
+
workerMain: actualWorkerMain,
|
|
2590
|
+
compatibilityDate: wranglerCompat.compatibilityDate,
|
|
2591
|
+
compatibilityFlags: wranglerCompat.compatibilityFlags
|
|
2592
|
+
};
|
|
2593
|
+
const manifest = buildFrameworkManifest({
|
|
1941
2594
|
frameworkName: detected.name,
|
|
1942
|
-
workerDir: actualWorkerDir,
|
|
1943
2595
|
workerMain: actualWorkerMain,
|
|
1944
|
-
assetsDir,
|
|
1945
2596
|
bindings,
|
|
1946
2597
|
migrations: validatedMigrations,
|
|
1947
2598
|
schedules,
|
|
1948
2599
|
queues,
|
|
1949
2600
|
revalidate,
|
|
2601
|
+
revalidateQueryAllowlist: voidConfig.routing?.revalidateQueryAllowlist,
|
|
1950
2602
|
prerender,
|
|
1951
2603
|
assetConfig: { not_found_handling: "none" },
|
|
1952
2604
|
vars: envVars,
|
|
1953
|
-
packageOptions
|
|
1954
|
-
bindingNames: resolveBindingNames(voidConfig.inference?.bindings),
|
|
1955
|
-
workerMain: actualWorkerMain,
|
|
1956
|
-
compatibilityDate: wranglerCompat.compatibilityDate,
|
|
1957
|
-
compatibilityFlags: wranglerCompat.compatibilityFlags
|
|
1958
|
-
},
|
|
1959
|
-
revalidateQueryAllowlist: voidConfig.routing?.revalidateQueryAllowlist,
|
|
1960
|
-
neededAssets: neededSet,
|
|
2605
|
+
packageOptions,
|
|
1961
2606
|
hashedAssetsPrefix: fwPreset.hashedAssetsPrefix ?? "assets",
|
|
1962
2607
|
headerRules,
|
|
1963
2608
|
redirectRules,
|
|
1964
2609
|
fallbackRules,
|
|
1965
|
-
dialect
|
|
1966
|
-
assetIgnorePatterns: frameworkAssetIgnorePatterns,
|
|
1967
|
-
onProgress
|
|
2610
|
+
dialect
|
|
1968
2611
|
});
|
|
1969
|
-
|
|
1970
|
-
|
|
1971
|
-
|
|
1972
|
-
|
|
2612
|
+
const workerFiles = collectWorkerFiles(actualWorkerDir);
|
|
2613
|
+
cliLog?.info("package_end", {
|
|
2614
|
+
mode: "framework",
|
|
2615
|
+
durationMs: Date.now() - packageStartedAt
|
|
2616
|
+
});
|
|
2617
|
+
const result = await streamDirectR2Deploy(client, config.projectId, manifest, assetManifest, assetFiles, needed, gitMetadata, neededSet !== null, workerFiles, s, cliLog);
|
|
1973
2618
|
const summary = [`${result.workers} worker module(s), ${formatKnownAssetSummary(result, skipped)}`];
|
|
1974
2619
|
if (result.migrations) summary.push(`${result.migrations} migration(s) applied`);
|
|
1975
2620
|
if (schedules.length > 0) summary.push(`${schedules.length} cron job(s) scheduled`);
|
|
@@ -1982,10 +2627,16 @@ async function runFrameworkDeploy(root, config, client, detected, fwPreset, void
|
|
|
1982
2627
|
cleanupWrapper(root);
|
|
1983
2628
|
ye("Done!");
|
|
1984
2629
|
}
|
|
1985
|
-
async function runFullDeploy(root, config, client, fwPreset, skipBuild,
|
|
2630
|
+
async function runFullDeploy(root, config, client, fwPreset, skipBuild, gitMetadata, detected, cliLog) {
|
|
2631
|
+
cliLog?.info("deploy_mode", {
|
|
2632
|
+
mode: "full",
|
|
2633
|
+
framework: detected?.name
|
|
2634
|
+
});
|
|
1986
2635
|
if (!skipBuild) {
|
|
1987
2636
|
const buildCmd = formatProjectCommand(root, "vite build");
|
|
1988
2637
|
R.step(`Building (${buildCmd})...`);
|
|
2638
|
+
cliLog?.info("build_start", { command: buildCmd });
|
|
2639
|
+
const buildStartedAt = Date.now();
|
|
1989
2640
|
try {
|
|
1990
2641
|
execSync(resolveProjectCommand(root, "vite build"), {
|
|
1991
2642
|
cwd: root,
|
|
@@ -1995,10 +2646,12 @@ async function runFullDeploy(root, config, client, fwPreset, skipBuild, commit,
|
|
|
1995
2646
|
VOID_DEPLOY_PROJECT_ID: config.projectId
|
|
1996
2647
|
}
|
|
1997
2648
|
});
|
|
2649
|
+
cliLog?.info("build_end", { durationMs: Date.now() - buildStartedAt });
|
|
1998
2650
|
} catch (err) {
|
|
2651
|
+
cliLog?.error("build_failed", err, { durationMs: Date.now() - buildStartedAt });
|
|
1999
2652
|
process.exit(getExitCode(err));
|
|
2000
2653
|
}
|
|
2001
|
-
}
|
|
2654
|
+
} else cliLog?.info("build_skipped", { reason: "skipBuild" });
|
|
2002
2655
|
const frameworkBuildCmd = fwPreset ? formatProjectCommand(root, fwPreset.buildCommand) : void 0;
|
|
2003
2656
|
const distDir = resolveDistDir(root, fwPreset, frameworkBuildCmd);
|
|
2004
2657
|
let assetsPrefix = "assets";
|
|
@@ -2112,11 +2765,12 @@ async function runFullDeploy(root, config, client, fwPreset, skipBuild, commit,
|
|
|
2112
2765
|
if (deployConfig.output === "static" && pageScan && !isSsr) {
|
|
2113
2766
|
if (pageScan.pages.every((p) => p.prerender && (p.params.length === 0 && !p.catchAll || p.hasGetPrerenderPaths)) && !(hasRoutes || hasMiddleware || hasWebSockets || authEnabled) && schedules.length === 0 && queues.length === 0) {
|
|
2114
2767
|
R.info("All pages prerendered — deploying as static site");
|
|
2768
|
+
cliLog?.info("full_to_static_redirect", { reason: "all_pages_prerendered" });
|
|
2115
2769
|
return runStaticDeploy(root, config, client, {
|
|
2116
2770
|
buildCommand: null,
|
|
2117
2771
|
outputDir: clientDir,
|
|
2118
2772
|
appType: "static"
|
|
2119
|
-
}, true,
|
|
2773
|
+
}, true, gitMetadata, assetsPrefix, headerRules, redirectRules, fallbackRules, void 0, cliLog);
|
|
2120
2774
|
}
|
|
2121
2775
|
}
|
|
2122
2776
|
const uniquePrerenderPaths = deployConfig.output === "static" || isFrameworkMode ? [] : isNodeTarget(deployConfig.target) ? await collectPrerenderPathsNode({
|
|
@@ -2146,22 +2800,64 @@ async function runFullDeploy(root, config, client, fwPreset, skipBuild, commit,
|
|
|
2146
2800
|
authEnabled
|
|
2147
2801
|
});
|
|
2148
2802
|
s.start("Checking for changes...");
|
|
2149
|
-
|
|
2803
|
+
cliLog?.info("preflight_start", { dir: clientDir });
|
|
2804
|
+
const { assetManifest, assetFiles } = await collectAndHashAssets(clientDir, onProgress);
|
|
2150
2805
|
const { needed, skipped } = await client.preflight(config.projectId, assetManifest, true);
|
|
2151
2806
|
s.stop("Checked for changes");
|
|
2807
|
+
cliLog?.info("preflight_end", {
|
|
2808
|
+
total: Object.keys(assetManifest).length,
|
|
2809
|
+
needed: needed.length,
|
|
2810
|
+
skipped
|
|
2811
|
+
});
|
|
2152
2812
|
if (skipped > 0) R.info(`Skipping ${skipped} unchanged asset(s)`);
|
|
2153
2813
|
s.start("Packaging...");
|
|
2814
|
+
cliLog?.info("package_start", {
|
|
2815
|
+
mode: "full",
|
|
2816
|
+
isSsr,
|
|
2817
|
+
hasRoutes,
|
|
2818
|
+
hasMiddleware,
|
|
2819
|
+
hasWebSockets,
|
|
2820
|
+
authEnabled,
|
|
2821
|
+
framework: detected?.name
|
|
2822
|
+
});
|
|
2823
|
+
const packageStartedAt = Date.now();
|
|
2154
2824
|
const neededSet = skipped > 0 ? new Set(needed) : null;
|
|
2155
|
-
const
|
|
2825
|
+
const voidPackageOptions = {
|
|
2156
2826
|
compatibilityDate: wranglerCompat?.compatibilityDate,
|
|
2157
2827
|
compatibilityFlags: wranglerCompat?.compatibilityFlags,
|
|
2158
2828
|
...sandboxConfig && { sandbox: sandboxConfig },
|
|
2159
2829
|
revalidateQueryAllowlist: deployConfig.routing?.revalidateQueryAllowlist
|
|
2160
|
-
}
|
|
2161
|
-
|
|
2162
|
-
|
|
2163
|
-
|
|
2164
|
-
|
|
2830
|
+
};
|
|
2831
|
+
const fallbackWarning = detectNonSpaFallbackWarning(assetConfig, fallbackRules);
|
|
2832
|
+
if (fallbackWarning) {
|
|
2833
|
+
onProgress?.(fallbackWarning);
|
|
2834
|
+
console.warn(`[void] ${fallbackWarning}`);
|
|
2835
|
+
}
|
|
2836
|
+
const manifest = buildVoidManifest({
|
|
2837
|
+
bindings: effectiveBindings,
|
|
2838
|
+
migrations: validatedMigrations,
|
|
2839
|
+
schedules,
|
|
2840
|
+
ssr: isSsr || hasRoutes || hasMiddleware || hasWebSockets || authEnabled,
|
|
2841
|
+
framework: isFrameworkMode ? detected.name : void 0,
|
|
2842
|
+
revalidate,
|
|
2843
|
+
vars: envVars,
|
|
2844
|
+
queues,
|
|
2845
|
+
prerender: uniquePrerenderPaths,
|
|
2846
|
+
assetConfig,
|
|
2847
|
+
options: voidPackageOptions,
|
|
2848
|
+
hashedAssetsPrefix: assetsPrefix,
|
|
2849
|
+
headerRules,
|
|
2850
|
+
redirectRules,
|
|
2851
|
+
fallbackRules,
|
|
2852
|
+
dialect,
|
|
2853
|
+
webSockets
|
|
2854
|
+
});
|
|
2855
|
+
const workerFiles = collectWorkerFiles(join(distDir, workerDirName));
|
|
2856
|
+
cliLog?.info("package_end", {
|
|
2857
|
+
mode: "full",
|
|
2858
|
+
durationMs: Date.now() - packageStartedAt
|
|
2859
|
+
});
|
|
2860
|
+
const result = await streamDirectR2Deploy(client, config.projectId, manifest, assetManifest, assetFiles, needed, gitMetadata, neededSet !== null, workerFiles, s, cliLog);
|
|
2165
2861
|
const summary = [`${result.workers} worker module(s), ${formatKnownAssetSummary(result, skipped)}`];
|
|
2166
2862
|
if (result.migrations) summary.push(`${result.migrations} migration(s) applied`);
|
|
2167
2863
|
if (schedules.length > 0) summary.push(`${schedules.length} cron job(s) scheduled`);
|
|
@@ -2174,8 +2870,9 @@ async function runFullDeploy(root, config, client, fwPreset, skipBuild, commit,
|
|
|
2174
2870
|
Se(summary.join("\n"), result.url);
|
|
2175
2871
|
ye("Done!");
|
|
2176
2872
|
}
|
|
2177
|
-
async function createProjectFromSlug(root, client, slug) {
|
|
2873
|
+
async function createProjectFromSlug(root, client, slug, cliLog) {
|
|
2178
2874
|
R.step(`Creating project ${import_picocolors.default.blue(slug)}...`);
|
|
2875
|
+
cliLog?.info("project_create_start", { slug });
|
|
2179
2876
|
const project = await client.createProject(slug);
|
|
2180
2877
|
const config = {
|
|
2181
2878
|
projectId: project.id,
|
|
@@ -2183,10 +2880,16 @@ async function createProjectFromSlug(root, client, slug) {
|
|
|
2183
2880
|
};
|
|
2184
2881
|
writeProjectConfig(root, config);
|
|
2185
2882
|
R.success(`Project created: ${import_picocolors.default.blue(config.slug)}`);
|
|
2883
|
+
cliLog?.info("project_create_success", {
|
|
2884
|
+
projectId: config.projectId,
|
|
2885
|
+
slug: config.slug
|
|
2886
|
+
});
|
|
2186
2887
|
return config;
|
|
2187
2888
|
}
|
|
2188
2889
|
function getGitCommit(root) {
|
|
2189
2890
|
try {
|
|
2891
|
+
const githubSha = process.env.GITHUB_SHA?.trim();
|
|
2892
|
+
if (githubSha && /^[0-9a-f]{7,64}$/i.test(githubSha)) return githubSha;
|
|
2190
2893
|
execSync("git rev-parse --is-inside-work-tree", {
|
|
2191
2894
|
cwd: root,
|
|
2192
2895
|
stdio: "ignore"
|