void 0.1.6 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AGENT_PROMPT.md +15 -0
- package/README.md +62 -123
- package/dist/auth-BdsJ0Aff.d.mts +43 -0
- package/dist/auth-cmd-Dx8oPKZC.mjs +43 -0
- package/dist/auth-migrations-BAtAck2g.mjs +117 -0
- package/dist/better-auth-shared-C9_GHSkR.d.mts +71 -0
- package/dist/better-auth-shared-CdYmQGry.mjs +163 -0
- package/dist/cache-W82I8ihI.mjs +47 -0
- package/dist/cancel-deploy-BOBTqqh0.mjs +59 -0
- package/dist/cf-access-Dee5cXxL.mjs +22 -0
- package/dist/chunk-DJd-R1mw.mjs +34 -0
- package/dist/cli/cli.d.mts +1 -0
- package/dist/cli/cli.mjs +1807 -0
- package/dist/client-snXOjrp1.mjs +565 -0
- package/dist/collect-CjeZgz5D.mjs +55 -0
- package/dist/config-BIa9HwVX.mjs +573 -0
- package/dist/config-BzM9Dy7T.mjs +37 -0
- package/dist/config-CvHtTM0q.mjs +30 -0
- package/dist/create-project-BIA15W7z.mjs +90 -0
- package/dist/db-DsRoMcfN.mjs +895 -0
- package/dist/defer-DcxEsVH1.mjs +49 -0
- package/dist/delete-DAP6yDc7.mjs +64 -0
- package/dist/deploy-BPKblFx6.mjs +2424 -0
- package/dist/discover-B7FkXBLB.mjs +40 -0
- package/dist/dist-DUyXJLkq.mjs +2667 -0
- package/dist/dist-Dayj3gCK.mjs +1287 -0
- package/dist/domain-BGofcQ6I.mjs +79 -0
- package/dist/dotenv-DwO4ti0Z.mjs +173 -0
- package/dist/drizzle-NnudE_UN.mjs +232 -0
- package/dist/env-CyG3tvU0.mjs +301 -0
- package/dist/env-helpers-Dr9Y7RnE.d.mts +52 -0
- package/dist/env-raw-BDL4TvdN.mjs +32 -0
- package/dist/env-types-DknSA4SO.mjs +64 -0
- package/dist/env-validation-DJKjR_8q.mjs +163 -0
- package/dist/fetch-error-BQ8sZ5Nd.mjs +266 -0
- package/dist/fetch-error-CVZ5CGA-.d.mts +20 -0
- package/dist/gen-U0Ktr4Zd.mjs +761 -0
- package/dist/handler-B0ds0OHJ.d.mts +269 -0
- package/dist/head-P-egrtFE.d.mts +45 -0
- package/dist/headers-DCXc7mDs.mjs +279 -0
- package/dist/index.d.mts +32 -0
- package/dist/index.mjs +4695 -0
- package/dist/init-C7wS5iGP.mjs +2625 -0
- package/dist/link-p2R6NbgN.mjs +49 -0
- package/dist/list-Bfel-QLc.mjs +113 -0
- package/dist/log-DXdqnmhF.mjs +26 -0
- package/dist/login-CkcXUiIu.mjs +72 -0
- package/dist/logs-DmkrRvx6.mjs +98 -0
- package/dist/magic-string.es-D6g9UnIy.mjs +1011 -0
- package/dist/mcp-CaQzfeUi.mjs +373 -0
- package/dist/node-DDfXj10V.mjs +54 -0
- package/dist/output-BwlcIYSR.mjs +139 -0
- package/dist/pages/client.d.mts +198 -0
- package/dist/pages/client.mjs +980 -0
- package/dist/pages/head-client.d.mts +15 -0
- package/dist/pages/head-client.mjs +90 -0
- package/dist/pages/head.d.mts +2 -0
- package/dist/pages/head.mjs +112 -0
- package/dist/pages/index.d.mts +38 -0
- package/dist/pages/index.mjs +76 -0
- package/dist/pages/islands-plugin.d.mts +50 -0
- package/dist/pages/islands-plugin.mjs +195 -0
- package/dist/pages/prefetch.d.mts +31 -0
- package/dist/pages/prefetch.mjs +90 -0
- package/dist/pages/protocol.d.mts +3 -0
- package/dist/pages/protocol.mjs +193 -0
- package/dist/pages/serialize.d.mts +10 -0
- package/dist/pages/serialize.mjs +14 -0
- package/dist/pathe.M-eThtNZ-D-kmWkCS.mjs +150 -0
- package/dist/plugin-inference-oZ6Ybu2_.mjs +2447 -0
- package/dist/prepare-BAtWufvm.mjs +99 -0
- package/dist/preset-D4I73kT4.mjs +221 -0
- package/dist/project-TqORyHn8.mjs +72 -0
- package/dist/project-cmd-B7lQp3F3.mjs +67 -0
- package/dist/project-slug-CKam8lF9.mjs +11 -0
- package/dist/project-tsconfig-DfkESbDL.mjs +63 -0
- package/dist/protocol-BWzXs2A2.d.mts +34 -0
- package/dist/providers-B3aMxWzP.mjs +67 -0
- package/dist/resolve-project-Br5BR03U.mjs +29 -0
- package/dist/rollback-gyC59l7U.mjs +92 -0
- package/dist/route-types-DReF1gUY.mjs +255 -0
- package/dist/routes-stub.d.mts +55 -0
- package/dist/routes-stub.mjs +1 -0
- package/dist/runner-6Ep3fNQu.mjs +123 -0
- package/dist/runner-pg-D0wWHYnr.mjs +57 -0
- package/dist/runtime/ai.d.mts +127 -0
- package/dist/runtime/ai.mjs +348 -0
- package/dist/runtime/auth-client-react.d.mts +8 -0
- package/dist/runtime/auth-client-react.mjs +6 -0
- package/dist/runtime/auth-client-solid.d.mts +8 -0
- package/dist/runtime/auth-client-solid.mjs +6 -0
- package/dist/runtime/auth-client-svelte.d.mts +8 -0
- package/dist/runtime/auth-client-svelte.mjs +6 -0
- package/dist/runtime/auth-client-vue.d.mts +8 -0
- package/dist/runtime/auth-client-vue.mjs +6 -0
- package/dist/runtime/auth-client.d.mts +8 -0
- package/dist/runtime/auth-client.mjs +6 -0
- package/dist/runtime/auth.d.mts +2 -0
- package/dist/runtime/auth.mjs +22 -0
- package/dist/runtime/better-auth-pg.d.mts +11 -0
- package/dist/runtime/better-auth-pg.mjs +51 -0
- package/dist/runtime/better-auth.d.mts +11 -0
- package/dist/runtime/better-auth.mjs +33 -0
- package/dist/runtime/client.d.mts +6 -0
- package/dist/runtime/client.mjs +5 -0
- package/dist/runtime/db-pg.d.mts +2 -0
- package/dist/runtime/db-pg.mjs +1 -0
- package/dist/runtime/db.d.mts +17 -0
- package/dist/runtime/db.mjs +30 -0
- package/dist/runtime/drizzle-arktype.d.mts +1 -0
- package/dist/runtime/drizzle-arktype.mjs +2 -0
- package/dist/runtime/drizzle-valibot.d.mts +1 -0
- package/dist/runtime/drizzle-valibot.mjs +2 -0
- package/dist/runtime/drizzle-zod.d.mts +1 -0
- package/dist/runtime/drizzle-zod.mjs +2 -0
- package/dist/runtime/env-helpers.d.mts +2 -0
- package/dist/runtime/env-helpers.mjs +173 -0
- package/dist/runtime/env-public-client.d.mts +22 -0
- package/dist/runtime/env-public-client.mjs +54 -0
- package/dist/runtime/env-public.d.mts +143 -0
- package/dist/runtime/env-public.mjs +366 -0
- package/dist/runtime/env.d.mts +13 -0
- package/dist/runtime/env.mjs +51 -0
- package/dist/runtime/fetch-stream.d.mts +51 -0
- package/dist/runtime/fetch-stream.mjs +81 -0
- package/dist/runtime/fetch.d.mts +59 -0
- package/dist/runtime/fetch.mjs +18 -0
- package/dist/runtime/handler.d.mts +3 -0
- package/dist/runtime/handler.mjs +85 -0
- package/dist/runtime/isr.d.mts +26 -0
- package/dist/runtime/isr.mjs +43 -0
- package/dist/runtime/kv.d.mts +48 -0
- package/dist/runtime/kv.mjs +106 -0
- package/dist/runtime/log.d.mts +24 -0
- package/dist/runtime/log.mjs +31 -0
- package/dist/runtime/migration-handler-pg.d.mts +6 -0
- package/dist/runtime/migration-handler-pg.mjs +85 -0
- package/dist/runtime/migration-handler.d.mts +19 -0
- package/dist/runtime/migration-handler.mjs +92 -0
- package/dist/runtime/queues.d.mts +7 -0
- package/dist/runtime/queues.mjs +8 -0
- package/dist/runtime/remote/binding-handler.d.mts +15 -0
- package/dist/runtime/remote/binding-handler.mjs +208 -0
- package/dist/runtime/remote/index.d.mts +8 -0
- package/dist/runtime/remote/index.mjs +461 -0
- package/dist/runtime/response.d.mts +14 -0
- package/dist/runtime/response.mjs +30 -0
- package/dist/runtime/sandbox.d.mts +17 -0
- package/dist/runtime/sandbox.mjs +19 -0
- package/dist/runtime/schema-d1.d.mts +1 -0
- package/dist/runtime/schema-d1.mjs +2 -0
- package/dist/runtime/schema-pg.d.mts +1 -0
- package/dist/runtime/schema-pg.mjs +2 -0
- package/dist/runtime/seed.d.mts +30 -0
- package/dist/runtime/seed.mjs +6 -0
- package/dist/runtime/storage.d.mts +7 -0
- package/dist/runtime/storage.mjs +14 -0
- package/dist/runtime/validator.d.mts +2 -0
- package/dist/runtime/validator.mjs +72 -0
- package/dist/runtime/ws-server.d.mts +26 -0
- package/dist/runtime/ws-server.mjs +296 -0
- package/dist/runtime/ws.d.mts +123 -0
- package/dist/runtime/ws.mjs +103 -0
- package/dist/scan-Ba4hFwlH.mjs +324 -0
- package/dist/scan-C6HMEIdW.mjs +318 -0
- package/dist/secret-CeRSukgM.mjs +109 -0
- package/dist/skills-ipldjlKE.mjs +62 -0
- package/dist/standard-schema-9CRjx-uR.d.mts +42 -0
- package/dist/subcommand-prompt-BKjuNAPb.mjs +349 -0
- package/dist/sveltekit.d.mts +20 -0
- package/dist/sveltekit.mjs +61 -0
- package/dist/types-mHOEwpW4.d.mts +57 -0
- package/dist/validate-CaMavMxu.mjs +146 -0
- package/dist/yarn-pnp-BFqMV_bl.mjs +196 -0
- package/getting-started-prompt.txt +26 -0
- package/package.json +322 -30
- package/schema.json +364 -0
- package/skills/migrate-vite-cloudflare-to-void/SKILL.md +175 -0
- package/skills/void/SKILL.md +75 -0
- package/skills/void/command/void.md +7 -0
- package/skills/void/docs/guide/ai.md +235 -0
- package/skills/void/docs/guide/app-types.md +103 -0
- package/skills/void/docs/guide/auth.md +257 -0
- package/skills/void/docs/guide/database/d1.md +106 -0
- package/skills/void/docs/guide/database/postgresql.md +106 -0
- package/skills/void/docs/guide/database.md +418 -0
- package/skills/void/docs/guide/deployment.md +98 -0
- package/skills/void/docs/guide/edge/headers.md +79 -0
- package/skills/void/docs/guide/edge/prerendering.md +83 -0
- package/skills/void/docs/guide/edge/redirects.md +116 -0
- package/skills/void/docs/guide/edge/revalidation.md +131 -0
- package/skills/void/docs/guide/edge/rewrites.md +354 -0
- package/skills/void/docs/guide/edge/static-assets.md +72 -0
- package/skills/void/docs/guide/env-vars.md +298 -0
- package/skills/void/docs/guide/index.md +80 -0
- package/skills/void/docs/guide/jobs.md +91 -0
- package/skills/void/docs/guide/kv.md +107 -0
- package/skills/void/docs/guide/pages-routing/actions-and-forms.md +419 -0
- package/skills/void/docs/guide/pages-routing/head.md +130 -0
- package/skills/void/docs/guide/pages-routing/islands.md +405 -0
- package/skills/void/docs/guide/pages-routing/layouts.md +362 -0
- package/skills/void/docs/guide/pages-routing/loaders.md +267 -0
- package/skills/void/docs/guide/pages-routing/markdown.md +625 -0
- package/skills/void/docs/guide/pages-routing/overview.md +236 -0
- package/skills/void/docs/guide/pages-routing/view-transitions.md +140 -0
- package/skills/void/docs/guide/queues.md +140 -0
- package/skills/void/docs/guide/quickstart.md +233 -0
- package/skills/void/docs/guide/remote-dev.md +67 -0
- package/skills/void/docs/guide/sandboxes.md +82 -0
- package/skills/void/docs/guide/server-routing.md +246 -0
- package/skills/void/docs/guide/ssg.md +74 -0
- package/skills/void/docs/guide/ssr.md +105 -0
- package/skills/void/docs/guide/storage.md +67 -0
- package/skills/void/docs/guide/type-safety.md +179 -0
- package/skills/void/docs/guide/typed-fetch.md +113 -0
- package/skills/void/docs/guide/websockets.md +190 -0
- package/skills/void/docs/index.md +48 -0
- package/skills/void/docs/integrations/agents.md +84 -0
- package/skills/void/docs/integrations/cloudflare.md +284 -0
- package/skills/void/docs/integrations/frameworks/analog.md +182 -0
- package/skills/void/docs/integrations/frameworks/astro.md +197 -0
- package/skills/void/docs/integrations/frameworks/nuxt.md +164 -0
- package/skills/void/docs/integrations/frameworks/overview.md +136 -0
- package/skills/void/docs/integrations/frameworks/react-router.md +137 -0
- package/skills/void/docs/integrations/frameworks/sveltekit.md +191 -0
- package/skills/void/docs/integrations/frameworks/tanstack-start.md +140 -0
- package/skills/void/docs/integrations/hono.md +97 -0
- package/skills/void/docs/integrations/nodejs-bun-deno.md +210 -0
- package/skills/void/docs/node_modules/@iconify/vue/README.md +408 -0
- package/skills/void/docs/node_modules/@iconify/vue/offline/readme.md +5 -0
- package/skills/void/docs/node_modules/@voidzero-dev/vitepress-theme/README.md +103 -0
- package/skills/void/docs/node_modules/oxc-minify/README.md +78 -0
- package/skills/void/docs/node_modules/reka-ui/README.md +80 -0
- package/skills/void/docs/node_modules/vitepress/README.md +28 -0
- package/skills/void/docs/node_modules/vitepress/template/api-examples.md +49 -0
- package/skills/void/docs/node_modules/vitepress/template/index.md +28 -0
- package/skills/void/docs/node_modules/vitepress/template/markdown-examples.md +85 -0
- package/skills/void/docs/node_modules/vitepress-plugin-group-icons/README.md +101 -0
- package/skills/void/docs/node_modules/void/AGENTS.md +204 -0
- package/skills/void/docs/node_modules/void/AGENT_PROMPT.md +15 -0
- package/skills/void/docs/node_modules/void/README.md +89 -0
- package/skills/void/docs/node_modules/void/node_modules/@clack/prompts/CHANGELOG.md +591 -0
- package/skills/void/docs/node_modules/void/node_modules/@clack/prompts/README.md +375 -0
- package/skills/void/docs/node_modules/void/node_modules/@cloudflare/sandbox/README.md +174 -0
- package/skills/void/docs/node_modules/void/node_modules/@cloudflare/vite-plugin/README.md +37 -0
- package/skills/void/docs/node_modules/void/node_modules/@cloudflare/workers-types/README.md +135 -0
- package/skills/void/docs/node_modules/void/node_modules/@electric-sql/pglite/README.md +189 -0
- package/skills/void/docs/node_modules/void/node_modules/@hono/oauth-providers/CHANGELOG.md +143 -0
- package/skills/void/docs/node_modules/void/node_modules/@hono/oauth-providers/README.md +1272 -0
- package/skills/void/docs/node_modules/void/node_modules/@napi-rs/keyring/README.md +19 -0
- package/skills/void/docs/node_modules/void/node_modules/@types/better-sqlite3/README.md +15 -0
- package/skills/void/docs/node_modules/void/node_modules/@types/node/README.md +15 -0
- package/skills/void/docs/node_modules/void/node_modules/@types/pg/README.md +15 -0
- package/skills/void/docs/node_modules/void/node_modules/@typescript/native-preview/README.md +22 -0
- package/skills/void/docs/node_modules/void/node_modules/@typescript/native-preview/vendor/vscode-jsonrpc/README.md +69 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/README.md +152 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/@shikijs/engine-javascript/README.md +9 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/@shikijs/transformers/README.md +9 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/@types/node/README.md +15 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/gray-matter/CHANGELOG.md +24 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/gray-matter/README.md +565 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/markdown-exit/README.md +124 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/markdown-it-anchor/README.md +600 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/markdown-it-attrs/README.md +386 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/markdown-it-container/README.md +95 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/markdown-it-emoji/README.md +101 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/pathe/README.md +73 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/shiki/README.md +15 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/tinyglobby/README.md +25 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/tsdown/README.md +55 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite/LICENSE.md +2230 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vite/README.md +20 -0
- package/skills/void/docs/node_modules/void/node_modules/@void/md/node_modules/vue/README.md +58 -0
- package/skills/void/docs/node_modules/void/node_modules/arktype/README.md +165 -0
- package/skills/void/docs/node_modules/void/node_modules/better-auth/LICENSE.md +20 -0
- package/skills/void/docs/node_modules/void/node_modules/better-auth/README.md +32 -0
- package/skills/void/docs/node_modules/void/node_modules/better-sqlite3/README.md +99 -0
- package/skills/void/docs/node_modules/void/node_modules/blake3-jit/README.md +108 -0
- package/skills/void/docs/node_modules/void/node_modules/drizzle-arktype/README.md +51 -0
- package/skills/void/docs/node_modules/void/node_modules/drizzle-kit/README.md +79 -0
- package/skills/void/docs/node_modules/void/node_modules/drizzle-orm/README.md +44 -0
- package/skills/void/docs/node_modules/void/node_modules/drizzle-valibot/README.md +51 -0
- package/skills/void/docs/node_modules/void/node_modules/drizzle-zod/README.md +65 -0
- package/skills/void/docs/node_modules/void/node_modules/es-module-lexer/README.md +390 -0
- package/skills/void/docs/node_modules/void/node_modules/estree-walker/README.md +48 -0
- package/skills/void/docs/node_modules/void/node_modules/hono/README.md +85 -0
- package/skills/void/docs/node_modules/void/node_modules/ignore/README.md +452 -0
- package/skills/void/docs/node_modules/void/node_modules/jsonc-parser/CHANGELOG.md +76 -0
- package/{LICENSE → skills/void/docs/node_modules/void/node_modules/jsonc-parser/LICENSE.md} +21 -21
- package/skills/void/docs/node_modules/void/node_modules/jsonc-parser/README.md +364 -0
- package/skills/void/docs/node_modules/void/node_modules/jsonc-parser/SECURITY.md +41 -0
- package/skills/void/docs/node_modules/void/node_modules/magic-string/README.md +325 -0
- package/skills/void/docs/node_modules/void/node_modules/ofetch/README.md +398 -0
- package/skills/void/docs/node_modules/void/node_modules/pathe/README.md +73 -0
- package/skills/void/docs/node_modules/void/node_modules/pg/README.md +95 -0
- package/skills/void/docs/node_modules/void/node_modules/pglite-server/LICENSE.md +21 -0
- package/skills/void/docs/node_modules/void/node_modules/pglite-server/README.md +135 -0
- package/skills/void/docs/node_modules/void/node_modules/picocolors/README.md +21 -0
- package/skills/void/docs/node_modules/void/node_modules/tinyglobby/README.md +25 -0
- package/skills/void/docs/node_modules/void/node_modules/tsdown/README.md +55 -0
- package/skills/void/docs/node_modules/void/node_modules/valibot/LICENSE.md +9 -0
- package/skills/void/docs/node_modules/void/node_modules/valibot/README.md +94 -0
- package/skills/void/docs/node_modules/void/node_modules/vite/LICENSE.md +2230 -0
- package/skills/void/docs/node_modules/void/node_modules/vite/README.md +20 -0
- package/skills/void/docs/node_modules/void/node_modules/wrangler/README.md +63 -0
- package/skills/void/docs/node_modules/void/node_modules/zod/README.md +191 -0
- package/skills/void/docs/node_modules/void/skills/migrate-vite-cloudflare-to-void/SKILL.md +175 -0
- package/skills/void/docs/node_modules/void/skills/void/SKILL.md +75 -0
- package/skills/void/docs/node_modules/void/skills/void/command/void.md +7 -0
- package/skills/void/docs/reference/api.md +917 -0
- package/skills/void/docs/reference/cli.md +561 -0
- package/skills/void/docs/reference/config.md +408 -0
- package/skills/void/docs/reference/resource-inference.md +149 -0
- package/skills/void/docs/reference/structure.md +176 -0
- package/.npmignore +0 -29
- package/.travis.yml +0 -9
- package/favicon.ico +0 -0
- package/index.js +0 -14
- package/lib/Job.js +0 -150
- package/lib/Void.js +0 -99
- package/lib/scan.js +0 -19
- package/test/credentials.js +0 -20
- package/test/job.js +0 -64
- package/test/static/dir1/test6.html +0 -0
- package/test/static/dir2/test7.html +0 -0
- package/test/static/dir2/test8.html +0 -0
- package/test/static/dir2/test9.html +0 -0
- package/test/static/test1.html +0 -0
- package/test/static/test2.html +0 -0
- package/test/static/test3.html +0 -0
- package/test/void.js +0 -31
- /package/{test/static/dir1/test4.html → skills/void/docs/integrations/auth-providers.md} +0 -0
- /package/{test/static/dir1/test5.html → skills/void/docs/integrations/payment-processors.md} +0 -0
|
@@ -0,0 +1,2424 @@
|
|
|
1
|
+
import { n as __exportAll } from "./chunk-DJd-R1mw.mjs";
|
|
2
|
+
import { a as join, i as isAbsolute, n as dirname, o as relative, r as extname, s as resolve } from "./pathe.M-eThtNZ-D-kmWkCS.mjs";
|
|
3
|
+
import { n as cliTitle, r as createSpinner, s as import_picocolors } from "./output-BwlcIYSR.mjs";
|
|
4
|
+
import { t as findVoidAuthConfig } from "./config-CvHtTM0q.mjs";
|
|
5
|
+
import { c as R, g as ge, u as Se, v as ue, x as q, y as ye } from "./dist-Dayj3gCK.mjs";
|
|
6
|
+
import { a as writeProjectConfig, r as readProjectConfig } from "./project-TqORyHn8.mjs";
|
|
7
|
+
import { a as parsePlatformErrorBody, c as getTokenSource, i as isExpiredTokenError, l as isStagingMode, n as PlatformClient, s as getToken, t as PlatformApiError } from "./client-snXOjrp1.mjs";
|
|
8
|
+
import { c as getDatabaseDialect, f as readConfig, l as isNodeTarget, p as resolveBindingNames } from "./config-BIa9HwVX.mjs";
|
|
9
|
+
import { i as scanJobsSync, n as scanWebSocketRoutesSync, r as scanQueuesSync, t as scanRoutes } from "./scan-C6HMEIdW.mjs";
|
|
10
|
+
import { c as validateSsrEntry, n as detectFramework, r as inferProjectBindings, t as FRAMEWORK_SCAN_DIRS } from "./plugin-inference-oZ6Ybu2_.mjs";
|
|
11
|
+
import { i as voidWarn, r as voidLog } from "./log-DXdqnmhF.mjs";
|
|
12
|
+
import { t as discoverSchema } from "./discover-B7FkXBLB.mjs";
|
|
13
|
+
import { t as findEnvFile } from "./env-types-DknSA4SO.mjs";
|
|
14
|
+
import { i as validateProdEnv, n as formatEnvReport, r as getDeployEnvDefaults, t as fetchRemoteSecretNames } from "./env-validation-DJKjR_8q.mjs";
|
|
15
|
+
import { n as writeDrizzleConfig } from "./config-BzM9Dy7T.mjs";
|
|
16
|
+
import { t as collectMigrations } from "./collect-CjeZgz5D.mjs";
|
|
17
|
+
import { r as validateMigrations, t as assertJournalCoherence } from "./validate-CaMavMxu.mjs";
|
|
18
|
+
import { t as scanPages } from "./scan-Ba4hFwlH.mjs";
|
|
19
|
+
import { n as promptProjectSelection, r as promptProjectSetupAction, t as promptAndCreateProject } from "./create-project-BIA15W7z.mjs";
|
|
20
|
+
import { r as resolveProjectBySlug, t as getRequestedProjectSlug } from "./resolve-project-Br5BR03U.mjs";
|
|
21
|
+
import { n as lintDuplicateSources, r as mergeRoutingRules, t as lintDestinationSplats } from "./headers-DCXc7mDs.mjs";
|
|
22
|
+
import { t as promptForLoginToken } from "./login-CkcXUiIu.mjs";
|
|
23
|
+
import { i as resolveProjectCommand, n as detectPreset, r as formatProjectCommand, t as FRAMEWORK_PRESETS } from "./preset-D4I73kT4.mjs";
|
|
24
|
+
import { createRequire } from "node:module";
|
|
25
|
+
import { copyFileSync, cpSync, existsSync, mkdirSync, readFileSync, readdirSync, renameSync, rmSync, symlinkSync, unlinkSync, writeFileSync } from "node:fs";
|
|
26
|
+
import { fileURLToPath, pathToFileURL } from "node:url";
|
|
27
|
+
import { build, loadEnv } from "vite";
|
|
28
|
+
import { tmpdir } from "node:os";
|
|
29
|
+
import { parse } from "jsonc-parser";
|
|
30
|
+
import { execFile, execFileSync, execSync } from "node:child_process";
|
|
31
|
+
import { hash } from "blake3-jit";
|
|
32
|
+
import ignore from "ignore";
|
|
33
|
+
const SANDBOX_MIGRATION_TAG = "void-sandbox-v1";
|
|
34
|
+
const require = createRequire(import.meta.url);
|
|
35
|
+
function isSandboxEnabled(config) {
|
|
36
|
+
return config.sandbox !== void 0 && config.sandbox !== false;
|
|
37
|
+
}
|
|
38
|
+
function getDefaultSandboxImage() {
|
|
39
|
+
return join(getSandboxPackageRoot(), "Dockerfile");
|
|
40
|
+
}
|
|
41
|
+
function getDefaultSandboxPlatformImage() {
|
|
42
|
+
return `docker.io/cloudflare/sandbox:${JSON.parse(readFileSync(join(getSandboxPackageRoot(), "package.json"), "utf-8")).version ?? "latest"}`;
|
|
43
|
+
}
|
|
44
|
+
function isLocalSandboxImage(image) {
|
|
45
|
+
const lowerImage = image.toLowerCase();
|
|
46
|
+
return isAbsolute(image) || image.startsWith("./") || image.startsWith("../") || lowerImage === "dockerfile" || lowerImage.includes("dockerfile") || lowerImage.endsWith("/dockerfile") || lowerImage.endsWith("\\dockerfile");
|
|
47
|
+
}
|
|
48
|
+
function requiresSandboxPlatformImage(config) {
|
|
49
|
+
if (typeof config.sandbox !== "object" || config.sandbox === null) return false;
|
|
50
|
+
return typeof config.sandbox.image === "string" && isLocalSandboxImage(config.sandbox.image) && config.sandbox.platformImage === void 0;
|
|
51
|
+
}
|
|
52
|
+
function resolveSandboxConfig(config, root = process.cwd()) {
|
|
53
|
+
const raw = typeof config.sandbox === "object" ? config.sandbox : {};
|
|
54
|
+
const image = raw.image ?? getDefaultSandboxImage();
|
|
55
|
+
const imageBuildContext = raw.imageBuildContext;
|
|
56
|
+
const platformImage = raw.platformImage ?? (raw.image && !isLocalSandboxImage(raw.image) ? raw.image : getDefaultSandboxPlatformImage());
|
|
57
|
+
return {
|
|
58
|
+
binding: raw.binding ?? "SANDBOX",
|
|
59
|
+
className: raw.className ?? "Sandbox",
|
|
60
|
+
containerName: raw.containerName ?? "void-sandbox",
|
|
61
|
+
image: resolveSandboxImage(image, root),
|
|
62
|
+
platformImage,
|
|
63
|
+
...imageBuildContext && { imageBuildContext: isAbsolute(imageBuildContext) ? imageBuildContext : join(root, imageBuildContext) },
|
|
64
|
+
...raw.instanceType && { instanceType: raw.instanceType },
|
|
65
|
+
...raw.maxInstances != null && { maxInstances: raw.maxInstances }
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
function getSandboxPackageRoot() {
|
|
69
|
+
return join(dirname(require.resolve("@cloudflare/sandbox")), "..");
|
|
70
|
+
}
|
|
71
|
+
function resolveSandboxImage(image, root) {
|
|
72
|
+
if (!isLocalSandboxImage(image) || isAbsolute(image)) return image;
|
|
73
|
+
return join(root, image);
|
|
74
|
+
}
|
|
75
|
+
//#endregion
|
|
76
|
+
//#region src/cli/wrangler.ts
|
|
77
|
+
const COMPAT_DATE_RE = /^\d{4}-\d{2}-\d{2}$/;
|
|
78
|
+
const NODEJS_ALS_FLAG = "nodejs_als";
|
|
79
|
+
const VOID_CONFIG_FILE = "void.json";
|
|
80
|
+
/** Latest compatibility date the Void test suite is pinned against. */
|
|
81
|
+
const LATEST_KNOWN_COMPAT_DATE = "2026-02-24";
|
|
82
|
+
/**
|
|
83
|
+
* Read compatibility settings from `wrangler.jsonc`/`wrangler.json` and
|
|
84
|
+
* `void.json`, with priority: void.json > wrangler.jsonc > build output.
|
|
85
|
+
*
|
|
86
|
+
* If no explicit `compatibility_date` is configured, pins the latest known-good
|
|
87
|
+
* date in `void.json` so subsequent runs are explicit and stable.
|
|
88
|
+
*
|
|
89
|
+
* @param buildOutputDir - Optional path to the build output directory (e.g.
|
|
90
|
+
* `dist/ssr`) which may contain a plugin-generated `wrangler.json` with the
|
|
91
|
+
* correct compat settings for the built worker.
|
|
92
|
+
*/
|
|
93
|
+
function readWranglerCompat(root, config, buildOutputDir) {
|
|
94
|
+
const voidConfig = config ?? readVoidConfigIfExists(root);
|
|
95
|
+
let raw;
|
|
96
|
+
for (const name of ["wrangler.jsonc", "wrangler.json"]) try {
|
|
97
|
+
raw = readFileSync(join(root, name), "utf-8");
|
|
98
|
+
break;
|
|
99
|
+
} catch {}
|
|
100
|
+
let wranglerDate;
|
|
101
|
+
let wranglerFlags;
|
|
102
|
+
if (raw) {
|
|
103
|
+
const parsed = parseJsonc(raw);
|
|
104
|
+
if (typeof parsed.compatibility_date === "string") {
|
|
105
|
+
validateCompatDate(parsed.compatibility_date, "wrangler config");
|
|
106
|
+
wranglerDate = parsed.compatibility_date;
|
|
107
|
+
}
|
|
108
|
+
if (Array.isArray(parsed.compatibility_flags) && parsed.compatibility_flags.every((f) => typeof f === "string")) wranglerFlags = parsed.compatibility_flags;
|
|
109
|
+
}
|
|
110
|
+
let buildDate;
|
|
111
|
+
let buildFlags;
|
|
112
|
+
if (buildOutputDir) try {
|
|
113
|
+
const buildRaw = readFileSync(join(buildOutputDir, "wrangler.json"), "utf-8");
|
|
114
|
+
const buildParsed = JSON.parse(buildRaw);
|
|
115
|
+
if (typeof buildParsed.compatibility_date === "string") {
|
|
116
|
+
validateCompatDate(buildParsed.compatibility_date, "build output wrangler.json");
|
|
117
|
+
buildDate = buildParsed.compatibility_date;
|
|
118
|
+
}
|
|
119
|
+
if (Array.isArray(buildParsed.compatibility_flags) && buildParsed.compatibility_flags.every((f) => typeof f === "string")) buildFlags = buildParsed.compatibility_flags;
|
|
120
|
+
} catch {}
|
|
121
|
+
const voidDate = voidConfig?.worker?.compatibility_date;
|
|
122
|
+
const voidFlags = voidConfig?.worker?.compatibility_flags;
|
|
123
|
+
if (voidDate !== void 0) validateCompatDate(voidDate, "void.json worker config");
|
|
124
|
+
const result = { compatibilityDate: voidDate ?? wranglerDate ?? buildDate ?? ensureVoidCompatibilityDate(root, voidConfig) };
|
|
125
|
+
const flags = voidFlags ?? wranglerFlags ?? buildFlags;
|
|
126
|
+
if (flags && flags.length > 0) result.compatibilityFlags = flags;
|
|
127
|
+
return result;
|
|
128
|
+
}
|
|
129
|
+
function validateCompatDate(value, source) {
|
|
130
|
+
if (!COMPAT_DATE_RE.test(value)) throw new Error(`config: "compatibility_date" in ${source} must use YYYY-MM-DD format, got '${value}'.`);
|
|
131
|
+
}
|
|
132
|
+
function readVoidConfigIfExists(root) {
|
|
133
|
+
const filePath = join(root, VOID_CONFIG_FILE);
|
|
134
|
+
if (!existsSync(filePath)) return;
|
|
135
|
+
try {
|
|
136
|
+
const parsed = JSON.parse(readFileSync(filePath, "utf-8"));
|
|
137
|
+
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) throw new Error(`config: ${VOID_CONFIG_FILE} must be a JSON object.`);
|
|
138
|
+
return parsed;
|
|
139
|
+
} catch (error) {
|
|
140
|
+
if (error instanceof SyntaxError) throw new Error(`config: Invalid JSON in ${VOID_CONFIG_FILE}.`);
|
|
141
|
+
throw error;
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
function ensureVoidCompatibilityDate(root, config) {
|
|
145
|
+
const filePath = join(root, VOID_CONFIG_FILE);
|
|
146
|
+
let nextConfig = {};
|
|
147
|
+
if (existsSync(filePath)) try {
|
|
148
|
+
const parsed = JSON.parse(readFileSync(filePath, "utf-8"));
|
|
149
|
+
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) throw new Error(`config: ${VOID_CONFIG_FILE} must be a JSON object.`);
|
|
150
|
+
nextConfig = parsed;
|
|
151
|
+
} catch (error) {
|
|
152
|
+
if (error instanceof SyntaxError) throw new Error(`config: Invalid JSON in ${VOID_CONFIG_FILE}.`);
|
|
153
|
+
throw error;
|
|
154
|
+
}
|
|
155
|
+
else if (config) nextConfig = { ...config };
|
|
156
|
+
nextConfig.worker = {
|
|
157
|
+
...nextConfig.worker ?? {},
|
|
158
|
+
compatibility_date: LATEST_KNOWN_COMPAT_DATE
|
|
159
|
+
};
|
|
160
|
+
writeFileSync(filePath, `${JSON.stringify(nextConfig, null, 2)}\n`);
|
|
161
|
+
return LATEST_KNOWN_COMPAT_DATE;
|
|
162
|
+
}
|
|
163
|
+
/**
|
|
164
|
+
* Sync inferred/configured bindings into the project's wrangler.jsonc for local dev.
|
|
165
|
+
* Only adds bindings that are missing — existing bindings are preserved.
|
|
166
|
+
* Returns true if the file was modified.
|
|
167
|
+
*/
|
|
168
|
+
function syncWranglerBindings(root, bindings, options) {
|
|
169
|
+
let configPath;
|
|
170
|
+
let parsed;
|
|
171
|
+
let created = false;
|
|
172
|
+
for (const name of ["wrangler.jsonc", "wrangler.json"]) {
|
|
173
|
+
const p = join(root, name);
|
|
174
|
+
if (existsSync(p)) {
|
|
175
|
+
configPath = p;
|
|
176
|
+
try {
|
|
177
|
+
parsed = parseJsonc(readFileSync(p, "utf-8"));
|
|
178
|
+
} catch {}
|
|
179
|
+
break;
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
if (!configPath || !parsed) {
|
|
183
|
+
configPath = join(root, "wrangler.jsonc");
|
|
184
|
+
parsed = {};
|
|
185
|
+
created = true;
|
|
186
|
+
}
|
|
187
|
+
let modified = false;
|
|
188
|
+
const d1Name = options?.bindingNames?.d1 ?? "DB";
|
|
189
|
+
if (bindings.needsD1) {
|
|
190
|
+
const existing = parsed.d1_databases ?? [];
|
|
191
|
+
if (!existing.some((b) => b.binding === d1Name)) {
|
|
192
|
+
parsed.d1_databases = [...existing, {
|
|
193
|
+
binding: d1Name,
|
|
194
|
+
database_name: "default",
|
|
195
|
+
database_id: "local"
|
|
196
|
+
}];
|
|
197
|
+
modified = true;
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
const kvName = options?.bindingNames?.kv ?? "KV";
|
|
201
|
+
if (bindings.needsKV) {
|
|
202
|
+
const existing = parsed.kv_namespaces ?? [];
|
|
203
|
+
if (!existing.some((b) => b.binding === kvName)) {
|
|
204
|
+
parsed.kv_namespaces = [...existing, {
|
|
205
|
+
binding: kvName,
|
|
206
|
+
id: "local"
|
|
207
|
+
}];
|
|
208
|
+
modified = true;
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
const r2Name = options?.bindingNames?.r2 ?? "STORAGE";
|
|
212
|
+
if (bindings.needsR2) {
|
|
213
|
+
const existing = parsed.r2_buckets ?? [];
|
|
214
|
+
if (!existing.some((b) => b.binding === r2Name)) {
|
|
215
|
+
parsed.r2_buckets = [...existing, {
|
|
216
|
+
binding: r2Name,
|
|
217
|
+
bucket_name: "default"
|
|
218
|
+
}];
|
|
219
|
+
modified = true;
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
if (options?.websockets && options.websockets.length > 0) {
|
|
223
|
+
const existing = parsed.durable_objects?.bindings ?? [];
|
|
224
|
+
const additions = options.websockets.filter((route) => !existing.some((binding) => binding.name === route.bindingName)).map((route) => ({
|
|
225
|
+
name: route.bindingName,
|
|
226
|
+
class_name: route.className
|
|
227
|
+
}));
|
|
228
|
+
if (additions.length > 0) {
|
|
229
|
+
parsed.durable_objects = { bindings: [...existing, ...additions] };
|
|
230
|
+
modified = true;
|
|
231
|
+
}
|
|
232
|
+
if (!Array.isArray(parsed.migrations)) {
|
|
233
|
+
parsed.migrations = [{
|
|
234
|
+
tag: "void-ws-v1",
|
|
235
|
+
new_classes: options.websockets.map((route) => route.className)
|
|
236
|
+
}];
|
|
237
|
+
modified = true;
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
if (bindings.needsSandbox) {
|
|
241
|
+
const sandbox = options?.sandbox ?? resolveSandboxConfig({ sandbox: true }, root);
|
|
242
|
+
const bindingName = options?.bindingNames?.sandbox ?? sandbox?.binding ?? "SANDBOX";
|
|
243
|
+
const className = sandbox?.className ?? "Sandbox";
|
|
244
|
+
const existingDurableObjectBindings = parsed.durable_objects?.bindings ?? [];
|
|
245
|
+
if (!existingDurableObjectBindings.some((binding) => binding.name === bindingName)) {
|
|
246
|
+
parsed.durable_objects = { bindings: [...existingDurableObjectBindings, {
|
|
247
|
+
name: bindingName,
|
|
248
|
+
class_name: className
|
|
249
|
+
}] };
|
|
250
|
+
modified = true;
|
|
251
|
+
}
|
|
252
|
+
const existingContainers = parsed.containers ?? [];
|
|
253
|
+
if (!existingContainers.some((container) => container.class_name === className)) {
|
|
254
|
+
parsed.containers = [...existingContainers, {
|
|
255
|
+
name: sandbox?.containerName ?? "void-sandbox",
|
|
256
|
+
class_name: className,
|
|
257
|
+
image: sandbox?.image,
|
|
258
|
+
...sandbox?.imageBuildContext && { image_build_context: sandbox.imageBuildContext },
|
|
259
|
+
...sandbox?.instanceType && { instance_type: sandbox.instanceType },
|
|
260
|
+
...sandbox?.maxInstances != null && { max_instances: sandbox.maxInstances }
|
|
261
|
+
}];
|
|
262
|
+
modified = true;
|
|
263
|
+
}
|
|
264
|
+
const migrations = Array.isArray(parsed.migrations) ? parsed.migrations : [];
|
|
265
|
+
if (!migrations.some((migration) => migration.tag === "void-sandbox-v1" || migration.new_sqlite_classes?.includes(className))) {
|
|
266
|
+
parsed.migrations = [...migrations, {
|
|
267
|
+
tag: SANDBOX_MIGRATION_TAG,
|
|
268
|
+
new_sqlite_classes: [className]
|
|
269
|
+
}];
|
|
270
|
+
modified = true;
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
if (created || modified) writeFileSync(configPath, JSON.stringify(parsed, null, 2) + "\n");
|
|
274
|
+
return created || modified;
|
|
275
|
+
}
|
|
276
|
+
/**
|
|
277
|
+
* Ensure a wrangler config has an explicit compatibility date.
|
|
278
|
+
* Used when `worker.compatibility_date` is configured in void.json but the
|
|
279
|
+
* active framework reads Cloudflare settings from wrangler.jsonc/json.
|
|
280
|
+
*/
|
|
281
|
+
function ensureWranglerCompatibilityDate(root, compatibilityDate) {
|
|
282
|
+
validateCompatDate(compatibilityDate, "void.json worker config");
|
|
283
|
+
let configPath;
|
|
284
|
+
let parsed;
|
|
285
|
+
for (const name of ["wrangler.jsonc", "wrangler.json"]) {
|
|
286
|
+
const p = join(root, name);
|
|
287
|
+
if (existsSync(p)) {
|
|
288
|
+
configPath = p;
|
|
289
|
+
try {
|
|
290
|
+
parsed = parseJsonc(readFileSync(p, "utf-8"));
|
|
291
|
+
} catch {}
|
|
292
|
+
break;
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
if (!configPath || !parsed) {
|
|
296
|
+
configPath = join(root, "wrangler.jsonc");
|
|
297
|
+
parsed = {};
|
|
298
|
+
}
|
|
299
|
+
if (parsed.compatibility_date === compatibilityDate) return false;
|
|
300
|
+
parsed.compatibility_date = compatibilityDate;
|
|
301
|
+
writeFileSync(configPath, JSON.stringify(parsed, null, 2) + "\n");
|
|
302
|
+
return true;
|
|
303
|
+
}
|
|
304
|
+
/**
|
|
305
|
+
* Ensure the `nodejs_als` compatibility flag exists in wrangler config.
|
|
306
|
+
* Returns true if the config file was modified.
|
|
307
|
+
*/
|
|
308
|
+
function ensureWranglerNodejsAlsFlag(root) {
|
|
309
|
+
let configPath;
|
|
310
|
+
let parsed;
|
|
311
|
+
for (const name of ["wrangler.jsonc", "wrangler.json"]) {
|
|
312
|
+
const p = join(root, name);
|
|
313
|
+
if (existsSync(p)) {
|
|
314
|
+
configPath = p;
|
|
315
|
+
try {
|
|
316
|
+
parsed = parseJsonc(readFileSync(p, "utf-8"));
|
|
317
|
+
} catch {}
|
|
318
|
+
break;
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
if (!configPath || !parsed) {
|
|
322
|
+
configPath = join(root, "wrangler.jsonc");
|
|
323
|
+
parsed = {};
|
|
324
|
+
}
|
|
325
|
+
const compatibilityFlags = Array.isArray(parsed.compatibility_flags) ? parsed.compatibility_flags : [];
|
|
326
|
+
if (compatibilityFlags.includes(NODEJS_ALS_FLAG)) return false;
|
|
327
|
+
parsed.compatibility_flags = [...compatibilityFlags, NODEJS_ALS_FLAG];
|
|
328
|
+
writeFileSync(configPath, JSON.stringify(parsed, null, 2) + "\n");
|
|
329
|
+
return true;
|
|
330
|
+
}
|
|
331
|
+
function parseJsonc(text) {
|
|
332
|
+
const errors = [];
|
|
333
|
+
const result = parse(text, errors, { allowTrailingComma: true });
|
|
334
|
+
if (errors.length > 0) throw new SyntaxError("Failed to parse JSONC");
|
|
335
|
+
return result;
|
|
336
|
+
}
|
|
337
|
+
//#endregion
|
|
338
|
+
//#region src/shared/env.ts
|
|
339
|
+
/**
|
|
340
|
+
* Strip shell environment pollution from a Vite `loadEnv(mode, root, '')`
|
|
341
|
+
* result. Vite returns `.env`-file entries AND every `process.env` entry
|
|
342
|
+
* when called with an empty prefix; we want only the keys that came from
|
|
343
|
+
* `.env` files (either new keys or keys that override a shell value).
|
|
344
|
+
*
|
|
345
|
+
* Used anywhere we need to populate worker vars from `.env` without
|
|
346
|
+
* shipping the developer's shell environment (PATH, HOME, etc.) into
|
|
347
|
+
* `wrangler.json` or the prerender Miniflare instance.
|
|
348
|
+
*/
|
|
349
|
+
function filterLoadedEnv(loaded) {
|
|
350
|
+
const shellKeys = new Set(Object.keys(process.env));
|
|
351
|
+
const env = {};
|
|
352
|
+
for (const [key, value] of Object.entries(loaded)) if (!shellKeys.has(key) || process.env[key] !== value) env[key] = value;
|
|
353
|
+
return env;
|
|
354
|
+
}
|
|
355
|
+
/**
|
|
356
|
+
* Resolve whether the plugin should run in "remote" mode (proxy real
|
|
357
|
+
* D1/KV/R2 via the platform) based on the project config plus an optional
|
|
358
|
+
* `VOID_REMOTE` env override.
|
|
359
|
+
*
|
|
360
|
+
* Tri-state override semantics:
|
|
361
|
+
* - `VOID_REMOTE=1` → force remote mode on
|
|
362
|
+
* - `VOID_REMOTE=0` → force remote mode off (e.g. local/CI builds of a
|
|
363
|
+
* project whose `void.json` has `remote: true`)
|
|
364
|
+
* - unset / any other value → fall back to `config.remote`
|
|
365
|
+
*
|
|
366
|
+
* Both the dev-server plugin (index.ts) and the prerender build plugin
|
|
367
|
+
* (plugin-prerender.ts) must derive `remoteMode` through this helper so
|
|
368
|
+
* dev and build stay in lockstep.
|
|
369
|
+
*/
|
|
370
|
+
function resolveRemoteMode(configRemote) {
|
|
371
|
+
const override = process.env.VOID_REMOTE;
|
|
372
|
+
if (override === "1") return true;
|
|
373
|
+
if (override === "0") return false;
|
|
374
|
+
return configRemote === true;
|
|
375
|
+
}
|
|
376
|
+
const INTERNAL_ENV_KEY_SET = new Set([
|
|
377
|
+
"__VOID_TOKEN",
|
|
378
|
+
"__VOID_PROJECT_ID",
|
|
379
|
+
"__VOID_REMOTE",
|
|
380
|
+
"__VOID_STAGING",
|
|
381
|
+
"__VOID_PROXY_URL",
|
|
382
|
+
"__VOID_PROXY_TOKEN"
|
|
383
|
+
]);
|
|
384
|
+
/**
|
|
385
|
+
* Remove any internal Void env keys from a user-supplied env record. Returns
|
|
386
|
+
* the sanitized record plus the list of keys that were stripped so callers
|
|
387
|
+
* (which live in non-leaf modules) can emit a warning via `voidWarn`.
|
|
388
|
+
*
|
|
389
|
+
* This module is a leaf — it must not import from `../log` or any non-leaf
|
|
390
|
+
* module, otherwise the prerender subprocess's worker bundle acquires
|
|
391
|
+
* unwanted transitive dependencies. That's why warnings are the caller's
|
|
392
|
+
* responsibility.
|
|
393
|
+
*/
|
|
394
|
+
function stripInternalEnvKeys(input) {
|
|
395
|
+
if (!input) return {
|
|
396
|
+
sanitized: {},
|
|
397
|
+
stripped: []
|
|
398
|
+
};
|
|
399
|
+
const sanitized = {};
|
|
400
|
+
const stripped = [];
|
|
401
|
+
for (const [key, value] of Object.entries(input)) {
|
|
402
|
+
if (INTERNAL_ENV_KEY_SET.has(key)) {
|
|
403
|
+
stripped.push(key);
|
|
404
|
+
continue;
|
|
405
|
+
}
|
|
406
|
+
sanitized[key] = value;
|
|
407
|
+
}
|
|
408
|
+
return {
|
|
409
|
+
sanitized,
|
|
410
|
+
stripped
|
|
411
|
+
};
|
|
412
|
+
}
|
|
413
|
+
//#endregion
|
|
414
|
+
//#region src/shared/proxy.ts
|
|
415
|
+
const DEFAULT_PROXY_URL = "https://proxy.void.cloud";
|
|
416
|
+
const STAGING_PROXY_URL = "https://proxy.staging.void.cloud";
|
|
417
|
+
//#endregion
|
|
418
|
+
//#region src/prerender.ts
|
|
419
|
+
const PRERENDER_PATHS_ENDPOINT = "/__void/prerender-paths";
|
|
420
|
+
const STATIC_PAGE_DATA_DIR = "_void/pages";
|
|
421
|
+
function outputFileForPath(path) {
|
|
422
|
+
return path === "/" ? "index.html" : `${path.slice(1)}.html`;
|
|
423
|
+
}
|
|
424
|
+
function pageDataFileForPath(path) {
|
|
425
|
+
return path === "/" ? `${STATIC_PAGE_DATA_DIR}/index.json` : `${STATIC_PAGE_DATA_DIR}/${path.slice(1)}.json`;
|
|
426
|
+
}
|
|
427
|
+
async function readPrerenderPathsResponse(response) {
|
|
428
|
+
if (!response.ok) throw new Error(`prerender: Failed to collect prerender paths. HTTP ${response.status}.`);
|
|
429
|
+
const payload = await response.json();
|
|
430
|
+
if (!Array.isArray(payload.paths) || !payload.paths.every((path) => typeof path === "string")) throw new Error("prerender: Failed to collect prerender paths. Response payload is invalid.");
|
|
431
|
+
return payload.paths;
|
|
432
|
+
}
|
|
433
|
+
async function renderPrerenderPaths(paths, clientDir, fetchPath, fetchPageData) {
|
|
434
|
+
const result = {
|
|
435
|
+
rendered: [],
|
|
436
|
+
failed: []
|
|
437
|
+
};
|
|
438
|
+
for (const path of paths) try {
|
|
439
|
+
const response = await fetchPath(path);
|
|
440
|
+
const html = await response.text();
|
|
441
|
+
if (!response.ok) {
|
|
442
|
+
result.failed.push({
|
|
443
|
+
path,
|
|
444
|
+
error: `HTTP ${response.status}`
|
|
445
|
+
});
|
|
446
|
+
voidWarn(`prerender ${path} failed: HTTP ${response.status}`);
|
|
447
|
+
continue;
|
|
448
|
+
}
|
|
449
|
+
const outputFile = outputFileForPath(path);
|
|
450
|
+
const outputPath = join(clientDir, outputFile);
|
|
451
|
+
mkdirSync(dirname(outputPath), { recursive: true });
|
|
452
|
+
writeFileSync(outputPath, html);
|
|
453
|
+
if (fetchPageData) {
|
|
454
|
+
const pageDataResponse = await fetchPageData(path);
|
|
455
|
+
if (pageDataResponse.headers.get("X-VoidPages")) {
|
|
456
|
+
const pageData = await pageDataResponse.json();
|
|
457
|
+
const pageDataPath = join(clientDir, pageDataFileForPath(path));
|
|
458
|
+
mkdirSync(dirname(pageDataPath), { recursive: true });
|
|
459
|
+
writeFileSync(pageDataPath, JSON.stringify(pageData));
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
result.rendered.push(path);
|
|
463
|
+
voidLog(`prerender \x1b[36m${outputFile}\x1b[39m`);
|
|
464
|
+
} catch (err) {
|
|
465
|
+
const error = err instanceof Error ? err.message : String(err);
|
|
466
|
+
result.failed.push({
|
|
467
|
+
path,
|
|
468
|
+
error
|
|
469
|
+
});
|
|
470
|
+
voidWarn(`prerender ${path} failed: ${error}`);
|
|
471
|
+
}
|
|
472
|
+
return result;
|
|
473
|
+
}
|
|
474
|
+
function normalizeWorkerModulePath(path) {
|
|
475
|
+
return path.replace(/\\/g, "/").replace(/^\.\//, "");
|
|
476
|
+
}
|
|
477
|
+
function resolveWorkerModuleType(relativePath) {
|
|
478
|
+
const normalized = normalizeWorkerModulePath(relativePath);
|
|
479
|
+
if (normalized.includes("/.vite/") || normalized.startsWith(".vite/")) return null;
|
|
480
|
+
if (normalized === "wrangler.json" || normalized === "wrangler.jsonc") return null;
|
|
481
|
+
if (normalized.endsWith(".mjs") || normalized.endsWith(".js")) return "ESModule";
|
|
482
|
+
if (normalized.endsWith(".cjs")) return "CommonJS";
|
|
483
|
+
if (normalized.endsWith(".wasm")) return "CompiledWasm";
|
|
484
|
+
if (normalized.endsWith(".txt")) return "Text";
|
|
485
|
+
if (normalized.endsWith(".bin")) return "Data";
|
|
486
|
+
return null;
|
|
487
|
+
}
|
|
488
|
+
function collectFiles$1(dir) {
|
|
489
|
+
const files = [];
|
|
490
|
+
const entries = readdirSync(dir, { withFileTypes: true });
|
|
491
|
+
for (const entry of entries) {
|
|
492
|
+
const fullPath = join(dir, entry.name);
|
|
493
|
+
if (entry.isDirectory()) files.push(...collectFiles$1(fullPath));
|
|
494
|
+
else files.push(fullPath);
|
|
495
|
+
}
|
|
496
|
+
return files;
|
|
497
|
+
}
|
|
498
|
+
function collectPrerenderWorkerModules(workerDir, workerMain) {
|
|
499
|
+
const normalizedEntry = normalizeWorkerModulePath(workerMain);
|
|
500
|
+
const modules = collectFiles$1(workerDir).flatMap((filePath) => {
|
|
501
|
+
const relativePath = normalizeWorkerModulePath(relative(workerDir, filePath));
|
|
502
|
+
const type = resolveWorkerModuleType(relativePath);
|
|
503
|
+
if (!type) return [];
|
|
504
|
+
const file = readFileSync(filePath);
|
|
505
|
+
return [{
|
|
506
|
+
path: filePath,
|
|
507
|
+
relativePath,
|
|
508
|
+
type,
|
|
509
|
+
contents: type === "CompiledWasm" || type === "Data" ? new Uint8Array(file) : file.toString("utf-8")
|
|
510
|
+
}];
|
|
511
|
+
});
|
|
512
|
+
modules.sort((a, b) => {
|
|
513
|
+
if (a.relativePath === normalizedEntry) return -1;
|
|
514
|
+
if (b.relativePath === normalizedEntry) return 1;
|
|
515
|
+
return a.relativePath.localeCompare(b.relativePath);
|
|
516
|
+
});
|
|
517
|
+
if (!modules.some((module) => module.relativePath === normalizedEntry)) throw new Error(`prerender: Worker entry '${normalizedEntry}' not found in '${workerDir}'.`);
|
|
518
|
+
return modules.map(({ relativePath: _relativePath, ...module }) => module);
|
|
519
|
+
}
|
|
520
|
+
function buildPrerenderBindings(input) {
|
|
521
|
+
const { sanitized: wranglerVars, stripped: wranglerStripped } = stripInternalEnvKeys(input.wranglerVars);
|
|
522
|
+
const { sanitized: env, stripped: envStripped } = stripInternalEnvKeys(input.env);
|
|
523
|
+
const allStripped = [...new Set([...wranglerStripped, ...envStripped])];
|
|
524
|
+
if (allStripped.length > 0) voidWarn(`Ignoring internal env keys in prerender inputs: ${allStripped.join(", ")}. These are reserved for Void runtime use.`);
|
|
525
|
+
const voidBindings = {};
|
|
526
|
+
if (input.voidToken && input.voidProjectId) {
|
|
527
|
+
voidBindings.__VOID_TOKEN = input.voidToken;
|
|
528
|
+
voidBindings.__VOID_PROJECT_ID = input.voidProjectId;
|
|
529
|
+
if (input.voidStaging) voidBindings.__VOID_STAGING = "1";
|
|
530
|
+
voidBindings.__VOID_PROXY_URL = input.voidStaging ? STAGING_PROXY_URL : DEFAULT_PROXY_URL;
|
|
531
|
+
if (input.remoteMode) voidBindings.__VOID_REMOTE = "1";
|
|
532
|
+
}
|
|
533
|
+
return {
|
|
534
|
+
...wranglerVars,
|
|
535
|
+
...env,
|
|
536
|
+
...voidBindings
|
|
537
|
+
};
|
|
538
|
+
}
|
|
539
|
+
async function createCloudflarePrerenderRunner(options) {
|
|
540
|
+
const { root, workerDir, persistDir, clientDir, remoteMode } = options;
|
|
541
|
+
const wranglerPath = join(workerDir, "wrangler.json");
|
|
542
|
+
if (!existsSync(wranglerPath)) throw new Error(`prerender: wrangler.json not found in '${workerDir}'.`);
|
|
543
|
+
const wranglerConfig = JSON.parse(readFileSync(wranglerPath, "utf-8"));
|
|
544
|
+
const wranglerCompat = readWranglerCompat(root, readConfig(root), workerDir);
|
|
545
|
+
const workerMain = wranglerConfig.main ?? "index.js";
|
|
546
|
+
const env = filterLoadedEnv(loadEnv("production", root, ""));
|
|
547
|
+
const voidToken = getToken(root);
|
|
548
|
+
let voidProjectId = options.projectId;
|
|
549
|
+
if (!voidProjectId) voidProjectId = process.env.VOID_DEPLOY_PROJECT_ID || void 0;
|
|
550
|
+
if (!voidProjectId) {
|
|
551
|
+
const projectJsonPath = join(root, ".void", "project.json");
|
|
552
|
+
try {
|
|
553
|
+
voidProjectId = JSON.parse(readFileSync(projectJsonPath, "utf-8")).projectId;
|
|
554
|
+
} catch {}
|
|
555
|
+
}
|
|
556
|
+
const voidStaging = isStagingMode(root);
|
|
557
|
+
const bindings = buildPrerenderBindings({
|
|
558
|
+
wranglerVars: wranglerConfig.vars,
|
|
559
|
+
env,
|
|
560
|
+
voidToken,
|
|
561
|
+
voidProjectId,
|
|
562
|
+
voidStaging,
|
|
563
|
+
remoteMode
|
|
564
|
+
});
|
|
565
|
+
const { Miniflare } = await import(pathToFileURL(createRequire(import.meta.resolve("wrangler/package.json")).resolve("miniflare")).href);
|
|
566
|
+
const mfOptions = {
|
|
567
|
+
modules: collectPrerenderWorkerModules(workerDir, workerMain),
|
|
568
|
+
modulesRoot: workerDir,
|
|
569
|
+
compatibilityDate: wranglerCompat.compatibilityDate,
|
|
570
|
+
compatibilityFlags: wranglerCompat.compatibilityFlags ?? [],
|
|
571
|
+
bindings
|
|
572
|
+
};
|
|
573
|
+
if (clientDir) mfOptions.serviceBindings = { [wranglerConfig.assets?.binding ?? "ASSETS"](request) {
|
|
574
|
+
const filePath = join(clientDir, new URL(request.url).pathname);
|
|
575
|
+
try {
|
|
576
|
+
const content = readFileSync(filePath);
|
|
577
|
+
return new Response(content);
|
|
578
|
+
} catch {
|
|
579
|
+
return new Response("Not Found", { status: 404 });
|
|
580
|
+
}
|
|
581
|
+
} };
|
|
582
|
+
const d1 = wranglerConfig.d1_databases;
|
|
583
|
+
if (Array.isArray(d1) && d1.length > 0) {
|
|
584
|
+
const databases = {};
|
|
585
|
+
for (const db of d1) databases[db.binding] = db.database_id;
|
|
586
|
+
mfOptions.d1Databases = databases;
|
|
587
|
+
mfOptions.d1Persist = persistDir;
|
|
588
|
+
}
|
|
589
|
+
const kv = wranglerConfig.kv_namespaces;
|
|
590
|
+
if (Array.isArray(kv) && kv.length > 0) {
|
|
591
|
+
const namespaces = {};
|
|
592
|
+
for (const ns of kv) namespaces[ns.binding] = ns.id;
|
|
593
|
+
mfOptions.kvNamespaces = namespaces;
|
|
594
|
+
mfOptions.kvPersist = persistDir;
|
|
595
|
+
}
|
|
596
|
+
const r2 = wranglerConfig.r2_buckets;
|
|
597
|
+
if (Array.isArray(r2) && r2.length > 0) {
|
|
598
|
+
const buckets = {};
|
|
599
|
+
for (const bucket of r2) buckets[bucket.binding] = bucket.bucket_name;
|
|
600
|
+
mfOptions.r2Buckets = buckets;
|
|
601
|
+
mfOptions.r2Persist = persistDir;
|
|
602
|
+
}
|
|
603
|
+
const mf = new Miniflare(mfOptions);
|
|
604
|
+
const proxyToken = mfOptions.bindings?.__VOID_PROXY_TOKEN;
|
|
605
|
+
return {
|
|
606
|
+
async collectPaths() {
|
|
607
|
+
const headers = {};
|
|
608
|
+
if (proxyToken) headers["x-void-internal"] = proxyToken;
|
|
609
|
+
return readPrerenderPathsResponse(await mf.dispatchFetch(`http://localhost${PRERENDER_PATHS_ENDPOINT}`, {
|
|
610
|
+
method: "POST",
|
|
611
|
+
headers
|
|
612
|
+
}));
|
|
613
|
+
},
|
|
614
|
+
async fetchPath(path) {
|
|
615
|
+
return mf.dispatchFetch(`http://localhost${path}`);
|
|
616
|
+
},
|
|
617
|
+
async fetchPageData(path) {
|
|
618
|
+
return mf.dispatchFetch(`http://localhost${path}`, { headers: {
|
|
619
|
+
Accept: "application/json",
|
|
620
|
+
Purpose: "prefetch",
|
|
621
|
+
"X-VoidPages": "true"
|
|
622
|
+
} });
|
|
623
|
+
},
|
|
624
|
+
async close() {
|
|
625
|
+
await mf.dispose();
|
|
626
|
+
}
|
|
627
|
+
};
|
|
628
|
+
}
|
|
629
|
+
async function collectPrerenderPaths(options) {
|
|
630
|
+
const runner = await createCloudflarePrerenderRunner(options);
|
|
631
|
+
try {
|
|
632
|
+
return await runner.collectPaths();
|
|
633
|
+
} finally {
|
|
634
|
+
await runner.close();
|
|
635
|
+
}
|
|
636
|
+
}
|
|
637
|
+
/**
|
|
638
|
+
* Prerender pages by spinning up Miniflare with the built worker.
|
|
639
|
+
* Collects paths through the built worker's internal endpoint and then
|
|
640
|
+
* fetches each page, writing HTML responses to `clientDir`.
|
|
641
|
+
*/
|
|
642
|
+
async function prerenderPages(options) {
|
|
643
|
+
const runner = await createCloudflarePrerenderRunner(options);
|
|
644
|
+
try {
|
|
645
|
+
const paths = await runner.collectPaths();
|
|
646
|
+
if (paths.length === 0) return {
|
|
647
|
+
paths,
|
|
648
|
+
rendered: [],
|
|
649
|
+
failed: []
|
|
650
|
+
};
|
|
651
|
+
return {
|
|
652
|
+
paths,
|
|
653
|
+
...await renderPrerenderPaths(paths, options.clientDir, (path) => runner.fetchPath(path), (path) => runner.fetchPageData(path))
|
|
654
|
+
};
|
|
655
|
+
} finally {
|
|
656
|
+
await runner.close();
|
|
657
|
+
}
|
|
658
|
+
}
|
|
659
|
+
async function runNodePrerenderRunner(root, config) {
|
|
660
|
+
const { sanitized: env, stripped } = stripInternalEnvKeys(filterLoadedEnv(loadEnv("production", root, "")));
|
|
661
|
+
if (stripped.length > 0) voidWarn(`Ignoring internal env keys from .env files in Node prerender runner: ${stripped.join(", ")}. These are reserved for Void runtime use.`);
|
|
662
|
+
const runnerPath = join(root, ".void", "prerender-runner.mjs");
|
|
663
|
+
mkdirSync(dirname(runnerPath), { recursive: true });
|
|
664
|
+
writeFileSync(runnerPath, `
|
|
665
|
+
import { pathToFileURL } from "node:url";
|
|
666
|
+
import { join, dirname } from "pathe";
|
|
667
|
+
import { mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
|
668
|
+
|
|
669
|
+
const config = JSON.parse(process.argv[2]);
|
|
670
|
+
const { mode, ssrDir, clientDir } = config;
|
|
671
|
+
|
|
672
|
+
if (clientDir) {
|
|
673
|
+
try {
|
|
674
|
+
const manifestPath = join(clientDir, ".vite", "manifest.json");
|
|
675
|
+
globalThis.__VITE_MANIFEST__ = JSON.parse(readFileSync(manifestPath, "utf-8"));
|
|
676
|
+
} catch {
|
|
677
|
+
// Ignore missing manifests so prerender can continue without client assets.
|
|
678
|
+
}
|
|
679
|
+
}
|
|
680
|
+
|
|
681
|
+
const appUrl = pathToFileURL(join(ssrDir, "app.js")).href;
|
|
682
|
+
const app = await import(appUrl);
|
|
683
|
+
const fetch = app.default?.fetch;
|
|
684
|
+
if (!fetch) {
|
|
685
|
+
console.error("[void] No default export with fetch() found in app.js");
|
|
686
|
+
process.exit(1);
|
|
687
|
+
}
|
|
688
|
+
|
|
689
|
+
async function collectPaths() {
|
|
690
|
+
const headers = {};
|
|
691
|
+
if (process.env.__VOID_PROXY_TOKEN) headers["x-void-internal"] = process.env.__VOID_PROXY_TOKEN;
|
|
692
|
+
const response = await fetch(
|
|
693
|
+
new Request("http://localhost${PRERENDER_PATHS_ENDPOINT}", { method: "POST", headers }),
|
|
694
|
+
);
|
|
695
|
+
if (!response.ok) {
|
|
696
|
+
throw new Error("prerender: Failed to collect prerender paths. HTTP " + response.status + ".");
|
|
697
|
+
}
|
|
698
|
+
const payload = await response.json();
|
|
699
|
+
if (!payload || !Array.isArray(payload.paths) || !payload.paths.every((path) => typeof path === "string")) {
|
|
700
|
+
throw new Error("prerender: Failed to collect prerender paths. Response payload is invalid.");
|
|
701
|
+
}
|
|
702
|
+
return payload.paths;
|
|
703
|
+
}
|
|
704
|
+
|
|
705
|
+
async function renderPaths(paths) {
|
|
706
|
+
if (!clientDir) return { rendered: [], failed: [] };
|
|
707
|
+
const result = { rendered: [], failed: [] };
|
|
708
|
+
|
|
709
|
+
for (const path of paths) {
|
|
710
|
+
try {
|
|
711
|
+
const response = await fetch(new Request("http://localhost" + path));
|
|
712
|
+
const html = await response.text();
|
|
713
|
+
if (!response.ok) {
|
|
714
|
+
result.failed.push({ path, error: "HTTP " + response.status });
|
|
715
|
+
continue;
|
|
716
|
+
}
|
|
717
|
+
const outputFile = path === "/" ? "index.html" : path.slice(1) + ".html";
|
|
718
|
+
const outputPath = join(clientDir, outputFile);
|
|
719
|
+
mkdirSync(dirname(outputPath), { recursive: true });
|
|
720
|
+
writeFileSync(outputPath, html);
|
|
721
|
+
const pageDataResponse = await fetch(new Request("http://localhost" + path, {
|
|
722
|
+
headers: { "Accept": "application/json", "Purpose": "prefetch", "X-VoidPages": "true" },
|
|
723
|
+
}));
|
|
724
|
+
if (pageDataResponse.headers.get("X-VoidPages")) {
|
|
725
|
+
const pageData = await pageDataResponse.json();
|
|
726
|
+
const pageDataFile = path === "/" ? "_void/pages/index.json" : "_void/pages/" + path.slice(1) + ".json";
|
|
727
|
+
const pageDataPath = join(clientDir, pageDataFile);
|
|
728
|
+
mkdirSync(dirname(pageDataPath), { recursive: true });
|
|
729
|
+
writeFileSync(pageDataPath, JSON.stringify(pageData));
|
|
730
|
+
}
|
|
731
|
+
result.rendered.push(path);
|
|
732
|
+
} catch (err) {
|
|
733
|
+
result.failed.push({ path, error: err instanceof Error ? err.message : String(err) });
|
|
734
|
+
}
|
|
735
|
+
}
|
|
736
|
+
|
|
737
|
+
return result;
|
|
738
|
+
}
|
|
739
|
+
|
|
740
|
+
if (mode === "collect") {
|
|
741
|
+
process.stdout.write(JSON.stringify({ paths: await collectPaths() }));
|
|
742
|
+
} else {
|
|
743
|
+
const paths = await collectPaths();
|
|
744
|
+
const result = await renderPaths(paths);
|
|
745
|
+
process.stdout.write(JSON.stringify({ paths, ...result }));
|
|
746
|
+
}
|
|
747
|
+
`);
|
|
748
|
+
try {
|
|
749
|
+
return await new Promise((resolve, reject) => {
|
|
750
|
+
execFile(process.execPath, [runnerPath, JSON.stringify(config)], {
|
|
751
|
+
env: {
|
|
752
|
+
...process.env,
|
|
753
|
+
...env
|
|
754
|
+
},
|
|
755
|
+
cwd: root,
|
|
756
|
+
maxBuffer: 10 * 1024 * 1024
|
|
757
|
+
}, (error, stdout, stderr) => {
|
|
758
|
+
if (stderr) process.stderr.write(stderr);
|
|
759
|
+
if (error) {
|
|
760
|
+
reject(/* @__PURE__ */ new Error(`Prerender child process failed: ${error.message}`));
|
|
761
|
+
return;
|
|
762
|
+
}
|
|
763
|
+
try {
|
|
764
|
+
resolve(JSON.parse(stdout));
|
|
765
|
+
} catch {
|
|
766
|
+
reject(/* @__PURE__ */ new Error(`Failed to parse prerender output: ${stdout}`));
|
|
767
|
+
}
|
|
768
|
+
});
|
|
769
|
+
});
|
|
770
|
+
} finally {
|
|
771
|
+
try {
|
|
772
|
+
unlinkSync(runnerPath);
|
|
773
|
+
} catch {}
|
|
774
|
+
}
|
|
775
|
+
}
|
|
776
|
+
async function collectPrerenderPathsNode(options) {
|
|
777
|
+
return (await runNodePrerenderRunner(options.root, {
|
|
778
|
+
mode: "collect",
|
|
779
|
+
ssrDir: options.ssrDir,
|
|
780
|
+
clientDir: options.clientDir
|
|
781
|
+
})).paths;
|
|
782
|
+
}
|
|
783
|
+
/**
|
|
784
|
+
* Prerender pages for Node.js targets by spawning a child process.
|
|
785
|
+
*
|
|
786
|
+
* Uses a child process to avoid mutating `process.env` in the parent Vite
|
|
787
|
+
* build process. The child imports the built app, collects prerender paths
|
|
788
|
+
* from the internal endpoint, renders each page, and returns the result.
|
|
789
|
+
*/
|
|
790
|
+
async function prerenderPagesNode(options) {
|
|
791
|
+
const result = await runNodePrerenderRunner(options.root, {
|
|
792
|
+
mode: "collect-and-render",
|
|
793
|
+
ssrDir: options.ssrDir,
|
|
794
|
+
clientDir: options.clientDir
|
|
795
|
+
});
|
|
796
|
+
for (const path of result.rendered) voidLog(`prerender \x1b[36m${outputFileForPath(path)}\x1b[39m`);
|
|
797
|
+
for (const { path, error } of result.failed) voidWarn(`prerender ${path} failed: ${error}`);
|
|
798
|
+
return result;
|
|
799
|
+
}
|
|
800
|
+
//#endregion
|
|
801
|
+
//#region src/cli/deploy-errors.ts
|
|
802
|
+
const BODY_SNIPPET_MAX = 200;
|
|
803
|
+
function trimSnippet(body) {
|
|
804
|
+
const collapsed = body.replace(/\s+/g, " ").trim();
|
|
805
|
+
if (collapsed.length <= BODY_SNIPPET_MAX) return collapsed;
|
|
806
|
+
return `${collapsed.slice(0, BODY_SNIPPET_MAX)}...`;
|
|
807
|
+
}
|
|
808
|
+
function appendDeploymentLine(message, deploymentId) {
|
|
809
|
+
return deploymentId ? `${message}\nDeployment: ${deploymentId}` : message;
|
|
810
|
+
}
|
|
811
|
+
/**
|
|
812
|
+
* Format a server-emitted `event: 'error'` payload with an optional deployment id.
|
|
813
|
+
*/
|
|
814
|
+
function formatDeployFailureMessage(message, deploymentId) {
|
|
815
|
+
return appendDeploymentLine(message, deploymentId);
|
|
816
|
+
}
|
|
817
|
+
/**
|
|
818
|
+
* Format the "stream ended cleanly but no terminal event arrived" case.
|
|
819
|
+
*
|
|
820
|
+
* This is the only call site that should emit the bare "Connection lost during
|
|
821
|
+
* deploy" message — the server closed the NDJSON stream without emitting
|
|
822
|
+
* `done` or `error`, so there is no underlying error to carry.
|
|
823
|
+
*/
|
|
824
|
+
function formatConnectionLostMessage(deploymentId) {
|
|
825
|
+
return appendDeploymentLine("deploy: Connection lost during deploy.", deploymentId);
|
|
826
|
+
}
|
|
827
|
+
/**
|
|
828
|
+
* Convert an exception thrown during the deploy NDJSON stream into a
|
|
829
|
+
* user-facing `Error` with a readable message.
|
|
830
|
+
*
|
|
831
|
+
* Three cases:
|
|
832
|
+
* 1. `PlatformApiError` with 409 + a JSON body carrying `{error, deployment}`
|
|
833
|
+
* — surface the server-supplied error and prefer the body's deployment id.
|
|
834
|
+
* 2. Any other `PlatformApiError` — surface as `Deploy failed (HTTP <status>): <detail>`
|
|
835
|
+
* where detail is the parsed `error`/`message` field or a trimmed body snippet.
|
|
836
|
+
* 3. Anything else — preserve the original message under a `Connection lost
|
|
837
|
+
* during deploy: <original>` prefix so the caller can still tell network,
|
|
838
|
+
* parse, and transport errors apart.
|
|
839
|
+
*/
|
|
840
|
+
function formatStreamDeployError(err, deploymentId) {
|
|
841
|
+
if (err instanceof PlatformApiError) {
|
|
842
|
+
const parsed = parsePlatformErrorBody(err.body);
|
|
843
|
+
const parsedError = parsed?.error ?? parsed?.message ?? null;
|
|
844
|
+
const effectiveDeploymentId = parsed?.deployment ?? null ?? deploymentId;
|
|
845
|
+
if (err.status === 409 && parsedError) return new Error(appendDeploymentLine(`deploy: Deploy failed: ${parsedError}`, effectiveDeploymentId));
|
|
846
|
+
const detail = parsedError ?? trimSnippet(err.body);
|
|
847
|
+
const base = detail ? `deploy: Deploy failed (HTTP ${err.status}): ${detail}` : `deploy: Deploy failed (HTTP ${err.status}).`;
|
|
848
|
+
return new Error(appendDeploymentLine(base, effectiveDeploymentId));
|
|
849
|
+
}
|
|
850
|
+
const original = err instanceof Error ? err.message : String(err);
|
|
851
|
+
return new Error(appendDeploymentLine(`deploy: Connection lost during deploy: '${original}'.`, deploymentId));
|
|
852
|
+
}
|
|
853
|
+
//#endregion
|
|
854
|
+
//#region src/shared/utils.ts
|
|
855
|
+
const isWindows = typeof process !== "undefined" && process.platform === "win32";
|
|
856
|
+
function slash(p) {
|
|
857
|
+
return p.replace(/\\/g, "/");
|
|
858
|
+
}
|
|
859
|
+
function toSlash(p) {
|
|
860
|
+
return isWindows ? slash(p) : p;
|
|
861
|
+
}
|
|
862
|
+
//#endregion
|
|
863
|
+
//#region src/cli/package.ts
|
|
864
|
+
const textEncoder = new TextEncoder();
|
|
865
|
+
/**
|
|
866
|
+
* Strip dev-only / internal-only fields from each rule before shipping the
|
|
867
|
+
* manifest. The resulting entries contain only `{ source, destination, status }`.
|
|
868
|
+
*
|
|
869
|
+
* - `origin` is used to build the `X-Void-Routing` trace header during local
|
|
870
|
+
* dev; the deployed platform doesn't consume it.
|
|
871
|
+
* - `force` is used internally by the CLI/router to split rules into the
|
|
872
|
+
* `redirectRules` (pre-asset, always-fires) and `fallbackRules` (post-asset,
|
|
873
|
+
* only when the asset resolver 404s) buckets and to label the trace header
|
|
874
|
+
* on `node`/`bun`/`deno` targets. The platform dispatch worker decides phase
|
|
875
|
+
* purely from which array the rule lives in, so shipping `force` is dead weight.
|
|
876
|
+
*
|
|
877
|
+
* Keep in sync with `stripOriginForProd` in `router/compile.ts`, which strips
|
|
878
|
+
* only `origin` because the Node/Bun/Deno-target generated code still reads
|
|
879
|
+
* `force` at runtime to label the dev trace header.
|
|
880
|
+
*/
|
|
881
|
+
function stripDevOnlyRuleFields(rules) {
|
|
882
|
+
if (!rules) return rules;
|
|
883
|
+
return rules.map(({ origin: _origin, force: _force, ...rest }) => rest);
|
|
884
|
+
}
|
|
885
|
+
/**
|
|
886
|
+
* Layer user-defined `routing.fallbacks` with the SPA app type's implicit
|
|
887
|
+
* `/index.html` fallback. Decision: user fallbacks take precedence; if none
|
|
888
|
+
* matches, the SPA default kicks in.
|
|
889
|
+
*
|
|
890
|
+
* The platform's `not_found_handling: 'single-page-application'` is a global
|
|
891
|
+
* switch — set once per deployment — so we can't have the platform evaluate
|
|
892
|
+
* user fallbacks *before* applying it. Instead, when the user has configured
|
|
893
|
+
* fallbacks on a SPA app, we disable the platform-level SPA setting and
|
|
894
|
+
* append a synthetic catch-all fallback rule (`/*` → `/index.html`) after
|
|
895
|
+
* the user's rules. The dispatch worker evaluates routing rules with
|
|
896
|
+
* first-match-wins semantics, so placing the synthetic rule last means any
|
|
897
|
+
* user rule that matches a more specific pattern fires first; the synthetic
|
|
898
|
+
* rule only catches paths no user rule matched.
|
|
899
|
+
*
|
|
900
|
+
* When the user has no fallbacks configured, the platform-level SPA switch
|
|
901
|
+
* is left intact (zero worker-side overhead for the common case).
|
|
902
|
+
*/
|
|
903
|
+
function resolveSpaFallbackChain(appType, fallbackRules) {
|
|
904
|
+
const userRules = fallbackRules && fallbackRules.length > 0 ? fallbackRules : void 0;
|
|
905
|
+
if (appType !== "spa") return {
|
|
906
|
+
assetConfig: { not_found_handling: appType === "static" ? "404-page" : "none" },
|
|
907
|
+
fallbackRules: userRules
|
|
908
|
+
};
|
|
909
|
+
if (!userRules) return {
|
|
910
|
+
assetConfig: { not_found_handling: "single-page-application" },
|
|
911
|
+
fallbackRules: void 0
|
|
912
|
+
};
|
|
913
|
+
if (userRules.some((r) => r.host === void 0 && r.source === "/*" && r.destination === "/index.html")) return {
|
|
914
|
+
assetConfig: { not_found_handling: "none" },
|
|
915
|
+
fallbackRules: userRules
|
|
916
|
+
};
|
|
917
|
+
const syntheticSpaFallback = {
|
|
918
|
+
source: "/*",
|
|
919
|
+
destination: "/index.html",
|
|
920
|
+
status: 200,
|
|
921
|
+
force: false,
|
|
922
|
+
origin: "spa-default"
|
|
923
|
+
};
|
|
924
|
+
return {
|
|
925
|
+
assetConfig: { not_found_handling: "none" },
|
|
926
|
+
fallbackRules: [...userRules, syntheticSpaFallback]
|
|
927
|
+
};
|
|
928
|
+
}
|
|
929
|
+
const PROGRESS_CHUNK = 50;
|
|
930
|
+
/**
|
|
931
|
+
* Yield to the event loop every PROGRESS_CHUNK iterations so spinner ticks
|
|
932
|
+
* can repaint, and emit a progress message via onProgress. Otherwise the
|
|
933
|
+
* tight sync loop blocks the spinner interval and the user sees no updates.
|
|
934
|
+
*/
|
|
935
|
+
async function tickProgress(i, total, onProgress, label) {
|
|
936
|
+
if (!onProgress) return;
|
|
937
|
+
if (i % PROGRESS_CHUNK !== 0 && i !== total) return;
|
|
938
|
+
onProgress(`${label} (${i}/${total})`);
|
|
939
|
+
await new Promise((resolve) => setImmediate(resolve));
|
|
940
|
+
}
|
|
941
|
+
/**
|
|
942
|
+
* Compute a CF-compatible asset hash: BLAKE3 of base64(content) + extension,
|
|
943
|
+
* first 32 hex chars. Matches Wrangler's hashFile algorithm exactly.
|
|
944
|
+
* Must match the API's computeAssetHash algorithm exactly.
|
|
945
|
+
*/
|
|
946
|
+
function computeAssetHash(content, filePath) {
|
|
947
|
+
const base64 = content.toString("base64");
|
|
948
|
+
const ext = extname(filePath).substring(1);
|
|
949
|
+
const input = textEncoder.encode(base64 + ext);
|
|
950
|
+
return Buffer.from(hash(input)).toString("hex").slice(0, 32);
|
|
951
|
+
}
|
|
952
|
+
/**
|
|
953
|
+
* Create a filter function that excludes .assetsignore, _headers, _redirects,
|
|
954
|
+
* deploy-internal patterns, and any patterns listed in a .assetsignore file.
|
|
955
|
+
* Matches Wrangler's createAssetsIgnoreFunction from workers-shared/utils/helpers.ts.
|
|
956
|
+
*/
|
|
957
|
+
function createAssetsIgnoreFunction(dir, extraPatterns = []) {
|
|
958
|
+
const patterns = [
|
|
959
|
+
"/.assetsignore",
|
|
960
|
+
"/_redirects",
|
|
961
|
+
"/_headers",
|
|
962
|
+
...extraPatterns
|
|
963
|
+
];
|
|
964
|
+
const assetsIgnorePath = join(dir, ".assetsignore");
|
|
965
|
+
let assetsIgnoreFilePresent = false;
|
|
966
|
+
if (existsSync(assetsIgnorePath)) {
|
|
967
|
+
assetsIgnoreFilePresent = true;
|
|
968
|
+
const content = readFileSync(assetsIgnorePath, "utf-8");
|
|
969
|
+
patterns.push(...content.split("\n"));
|
|
970
|
+
}
|
|
971
|
+
const ig = ignore().add(patterns);
|
|
972
|
+
const isIgnored = (relativePath) => {
|
|
973
|
+
const normalized = relativePath.startsWith("/") ? relativePath.slice(1) : relativePath;
|
|
974
|
+
return ig.test(normalized).ignored;
|
|
975
|
+
};
|
|
976
|
+
return {
|
|
977
|
+
isIgnored,
|
|
978
|
+
assetsIgnoreFilePresent
|
|
979
|
+
};
|
|
980
|
+
}
|
|
981
|
+
/**
|
|
982
|
+
* Guard against accidentally uploading _worker.js as a static asset.
|
|
983
|
+
* Matches Wrangler's errorOnLegacyPagesWorkerJSAsset behavior:
|
|
984
|
+
* throws only when no .assetsignore file exists (if .assetsignore is present
|
|
985
|
+
* but doesn't exclude _worker.js, the user intentionally wants to upload it).
|
|
986
|
+
*/
|
|
987
|
+
function checkWorkerJsAsset(relativePath, assetsIgnoreFilePresent) {
|
|
988
|
+
if (assetsIgnoreFilePresent) return;
|
|
989
|
+
const name = relativePath.startsWith("/") ? relativePath.slice(1) : relativePath;
|
|
990
|
+
if (name === "_worker.js" || name.startsWith("_worker.js/")) throw new Error(`deploy: Refusing to upload _worker.js ${name === "_worker.js" ? "file" : "directory"} as an asset. This could expose private server-side code to the public Internet. If this is unintended, remove it or add a .assetsignore file containing "_worker.js". If intentional, add an empty .assetsignore file to suppress this error.`);
|
|
991
|
+
}
|
|
992
|
+
/**
|
|
993
|
+
* Collect all files in a directory, compute hashes, and return both the
|
|
994
|
+
* manifest (for preflight) and the raw file map (for selective packaging).
|
|
995
|
+
*/
|
|
996
|
+
async function collectAndHashAssets(dir, onProgress, opts = {}) {
|
|
997
|
+
const assetManifest = {};
|
|
998
|
+
const assetFiles = /* @__PURE__ */ new Map();
|
|
999
|
+
try {
|
|
1000
|
+
const { isIgnored, assetsIgnoreFilePresent } = createAssetsIgnoreFunction(dir, opts.ignorePatterns);
|
|
1001
|
+
const files = collectFiles(dir);
|
|
1002
|
+
let processed = 0;
|
|
1003
|
+
for (const filePath of files) {
|
|
1004
|
+
processed++;
|
|
1005
|
+
await tickProgress(processed, files.length, onProgress, "Hashing assets");
|
|
1006
|
+
const relativePath = "/" + toSlash(relative(dir, filePath));
|
|
1007
|
+
if (isIgnored(relativePath)) continue;
|
|
1008
|
+
checkWorkerJsAsset(relativePath, assetsIgnoreFilePresent);
|
|
1009
|
+
const content = readFileSync(filePath);
|
|
1010
|
+
assetManifest[relativePath] = {
|
|
1011
|
+
hash: computeAssetHash(content, filePath),
|
|
1012
|
+
size: content.length
|
|
1013
|
+
};
|
|
1014
|
+
assetFiles.set(relativePath, content);
|
|
1015
|
+
}
|
|
1016
|
+
} catch (err) {
|
|
1017
|
+
if (err instanceof Error && "code" in err && err.code === "ENOENT") {} else throw err;
|
|
1018
|
+
}
|
|
1019
|
+
return {
|
|
1020
|
+
assetManifest,
|
|
1021
|
+
assetFiles
|
|
1022
|
+
};
|
|
1023
|
+
}
|
|
1024
|
+
function toWebSocketManifest(routes) {
|
|
1025
|
+
if (!routes || routes.length === 0) return;
|
|
1026
|
+
return {
|
|
1027
|
+
routes: routes.map((route) => ({
|
|
1028
|
+
pattern: route.pattern,
|
|
1029
|
+
filePath: route.filePath,
|
|
1030
|
+
className: route.className,
|
|
1031
|
+
mode: route.mode,
|
|
1032
|
+
hasAuthGate: route.hasAuthGate
|
|
1033
|
+
})),
|
|
1034
|
+
classes: routes.map((route) => route.className)
|
|
1035
|
+
};
|
|
1036
|
+
}
|
|
1037
|
+
function toSandboxManifest(sandbox) {
|
|
1038
|
+
return {
|
|
1039
|
+
bindingName: sandbox.binding,
|
|
1040
|
+
className: sandbox.className,
|
|
1041
|
+
containerName: sandbox.containerName,
|
|
1042
|
+
image: sandbox.platformImage,
|
|
1043
|
+
...sandbox.instanceType && { instanceType: sandbox.instanceType },
|
|
1044
|
+
...sandbox.maxInstances != null && { maxInstances: sandbox.maxInstances }
|
|
1045
|
+
};
|
|
1046
|
+
}
|
|
1047
|
+
async function packageBuild(distDir, workerDirName, bindings, migrations, schedules, ssr, framework, revalidate, vars, queues, prerender, assetConfig, options, neededAssets, hashedAssetsPrefix, headerRules, redirectRules, fallbackRules, dialect, webSockets, onProgress) {
|
|
1048
|
+
const formData = new FormData();
|
|
1049
|
+
const manifest = {
|
|
1050
|
+
version: 3,
|
|
1051
|
+
bindings: {}
|
|
1052
|
+
};
|
|
1053
|
+
if (bindings.needsAuth) manifest.auth = true;
|
|
1054
|
+
const hasMigrations = migrations && migrations.length > 0;
|
|
1055
|
+
if (bindings.needsD1 || bindings.needsAuth || hasMigrations) if (dialect === "postgresql") manifest.bindings.hyperdrive = ["HYPERDRIVE"];
|
|
1056
|
+
else manifest.bindings.d1 = [options?.bindingNames?.d1 ?? "DB"];
|
|
1057
|
+
if (bindings.needsKV) manifest.bindings.kv = [options?.bindingNames?.kv ?? "KV"];
|
|
1058
|
+
if (bindings.needsR2) manifest.bindings.r2 = [options?.bindingNames?.r2 ?? "STORAGE"];
|
|
1059
|
+
if (bindings.needsAI) manifest.bindings.ai = true;
|
|
1060
|
+
if (bindings.needsSandbox) {
|
|
1061
|
+
if (!options?.sandbox) throw new Error("deploy: Sandbox bindings require sandbox package options.");
|
|
1062
|
+
manifest.sandbox = toSandboxManifest({
|
|
1063
|
+
...options.sandbox,
|
|
1064
|
+
binding: options.bindingNames?.sandbox ?? options.sandbox.binding
|
|
1065
|
+
});
|
|
1066
|
+
}
|
|
1067
|
+
if (migrations && migrations.length > 0) {
|
|
1068
|
+
manifest.migrations = {
|
|
1069
|
+
dialect: dialect ?? "sqlite",
|
|
1070
|
+
pending: migrations.length
|
|
1071
|
+
};
|
|
1072
|
+
onProgress?.(`Packaging ${migrations.length} migration${migrations.length === 1 ? "" : "s"}...`);
|
|
1073
|
+
for (const m of migrations) formData.append(`migration:${m.name}`, new Blob([m.sql]), m.name);
|
|
1074
|
+
}
|
|
1075
|
+
if (schedules && schedules.length > 0) manifest.schedules = schedules;
|
|
1076
|
+
if (queues && queues.length > 0) manifest.queues = queues;
|
|
1077
|
+
if (ssr) manifest.ssr = true;
|
|
1078
|
+
if (framework) manifest.framework = framework;
|
|
1079
|
+
manifest.type = framework ? "framework" : "void";
|
|
1080
|
+
if (revalidate != null && revalidate !== 0) manifest.revalidate = revalidate;
|
|
1081
|
+
if (options?.revalidateQueryAllowlist && Object.keys(options.revalidateQueryAllowlist).length > 0) manifest.revalidateQueryAllowlist = options.revalidateQueryAllowlist;
|
|
1082
|
+
if (prerender && prerender.length > 0) manifest.prerender = prerender;
|
|
1083
|
+
let resolvedAssetConfig = assetConfig;
|
|
1084
|
+
let resolvedFallbackRules = fallbackRules;
|
|
1085
|
+
if (assetConfig?.not_found_handling === "single-page-application") {
|
|
1086
|
+
const chain = resolveSpaFallbackChain("spa", fallbackRules);
|
|
1087
|
+
resolvedAssetConfig = {
|
|
1088
|
+
...assetConfig,
|
|
1089
|
+
...chain.assetConfig
|
|
1090
|
+
};
|
|
1091
|
+
resolvedFallbackRules = chain.fallbackRules;
|
|
1092
|
+
} else if (fallbackRules && fallbackRules.length > 0) {
|
|
1093
|
+
const msg = "routing.fallbacks is set but appType is not \"spa\"; fallbacks will be applied, but no \"/* -> /index.html\" SPA shell fallback is synthesised — unmatched paths fall through to the platform 404.";
|
|
1094
|
+
onProgress?.(msg);
|
|
1095
|
+
console.warn(`[void] ${msg}`);
|
|
1096
|
+
}
|
|
1097
|
+
if (resolvedAssetConfig) manifest.assetConfig = resolvedAssetConfig;
|
|
1098
|
+
if (vars && Object.keys(vars).length > 0) manifest.vars = vars;
|
|
1099
|
+
if (options?.workerMain) manifest.workerMain = options.workerMain;
|
|
1100
|
+
if (options?.compatibilityDate) manifest.compatibilityDate = options.compatibilityDate;
|
|
1101
|
+
if (options?.compatibilityFlags && options.compatibilityFlags.length > 0) manifest.compatibilityFlags = options.compatibilityFlags;
|
|
1102
|
+
if (hashedAssetsPrefix) manifest.hashedAssetsPrefix = hashedAssetsPrefix;
|
|
1103
|
+
if (headerRules && headerRules.length > 0) manifest.headerRules = headerRules;
|
|
1104
|
+
if (redirectRules && redirectRules.length > 0) manifest.redirectRules = stripDevOnlyRuleFields(redirectRules);
|
|
1105
|
+
if (resolvedFallbackRules && resolvedFallbackRules.length > 0) manifest.fallbackRules = stripDevOnlyRuleFields(resolvedFallbackRules);
|
|
1106
|
+
const websocketManifest = toWebSocketManifest(webSockets);
|
|
1107
|
+
if (websocketManifest) manifest.websocket = websocketManifest;
|
|
1108
|
+
formData.append("manifest", JSON.stringify(manifest));
|
|
1109
|
+
onProgress?.("Packaging worker files...");
|
|
1110
|
+
const workerDir = join(distDir, workerDirName);
|
|
1111
|
+
const workerFiles = collectFiles(workerDir);
|
|
1112
|
+
for (const filePath of workerFiles) {
|
|
1113
|
+
const relativePath = toSlash(relative(workerDir, filePath));
|
|
1114
|
+
if (!isWorkerModule(relativePath)) continue;
|
|
1115
|
+
const content = readFileSync(filePath);
|
|
1116
|
+
formData.append(`worker:${relativePath}`, new Blob([content]), relativePath);
|
|
1117
|
+
}
|
|
1118
|
+
const clientDir = join(distDir, "client");
|
|
1119
|
+
try {
|
|
1120
|
+
const { isIgnored, assetsIgnoreFilePresent } = createAssetsIgnoreFunction(clientDir);
|
|
1121
|
+
const clientFiles = collectFiles(clientDir);
|
|
1122
|
+
let processed = 0;
|
|
1123
|
+
for (const filePath of clientFiles) {
|
|
1124
|
+
processed++;
|
|
1125
|
+
await tickProgress(processed, clientFiles.length, onProgress, "Packaging assets");
|
|
1126
|
+
const relativePath = toSlash(relative(clientDir, filePath));
|
|
1127
|
+
if (isIgnored("/" + relativePath)) continue;
|
|
1128
|
+
checkWorkerJsAsset(relativePath, assetsIgnoreFilePresent);
|
|
1129
|
+
if (neededAssets && !neededAssets.has("/" + relativePath)) continue;
|
|
1130
|
+
const content = readFileSync(filePath);
|
|
1131
|
+
formData.append(`asset:${relativePath}`, new Blob([content]), relativePath);
|
|
1132
|
+
}
|
|
1133
|
+
} catch (err) {
|
|
1134
|
+
if (err instanceof Error && "code" in err && err.code === "ENOENT") {} else throw err;
|
|
1135
|
+
}
|
|
1136
|
+
if (neededAssets) formData.append("preflight", "true");
|
|
1137
|
+
return formData;
|
|
1138
|
+
}
|
|
1139
|
+
/**
|
|
1140
|
+
* Package a static build (no worker) into a FormData ready for upload.
|
|
1141
|
+
*/
|
|
1142
|
+
async function packageStaticBuild(outputDir, appType, neededAssets, hashedAssetsPrefix, headerRules, redirectRules, fallbackRules, onProgress) {
|
|
1143
|
+
const formData = new FormData();
|
|
1144
|
+
const { assetConfig, fallbackRules: resolvedFallbackRules } = resolveSpaFallbackChain(appType, fallbackRules);
|
|
1145
|
+
const manifest = {
|
|
1146
|
+
version: 3,
|
|
1147
|
+
bindings: {},
|
|
1148
|
+
type: appType,
|
|
1149
|
+
assetConfig
|
|
1150
|
+
};
|
|
1151
|
+
if (hashedAssetsPrefix) manifest.hashedAssetsPrefix = hashedAssetsPrefix;
|
|
1152
|
+
if (headerRules && headerRules.length > 0) manifest.headerRules = headerRules;
|
|
1153
|
+
if (redirectRules && redirectRules.length > 0) manifest.redirectRules = stripDevOnlyRuleFields(redirectRules);
|
|
1154
|
+
if (resolvedFallbackRules && resolvedFallbackRules.length > 0) manifest.fallbackRules = stripDevOnlyRuleFields(resolvedFallbackRules);
|
|
1155
|
+
formData.append("manifest", JSON.stringify(manifest));
|
|
1156
|
+
const { isIgnored, assetsIgnoreFilePresent } = createAssetsIgnoreFunction(outputDir);
|
|
1157
|
+
const files = collectFiles(outputDir);
|
|
1158
|
+
let processed = 0;
|
|
1159
|
+
for (const filePath of files) {
|
|
1160
|
+
processed++;
|
|
1161
|
+
await tickProgress(processed, files.length, onProgress, "Packaging assets");
|
|
1162
|
+
const relativePath = toSlash(relative(outputDir, filePath));
|
|
1163
|
+
if (isIgnored("/" + relativePath)) continue;
|
|
1164
|
+
checkWorkerJsAsset(relativePath, assetsIgnoreFilePresent);
|
|
1165
|
+
if (neededAssets && !neededAssets.has("/" + relativePath)) continue;
|
|
1166
|
+
const content = readFileSync(filePath);
|
|
1167
|
+
formData.append(`asset:${relativePath}`, new Blob([content]), relativePath);
|
|
1168
|
+
}
|
|
1169
|
+
if (neededAssets) formData.append("preflight", "true");
|
|
1170
|
+
return formData;
|
|
1171
|
+
}
|
|
1172
|
+
/**
|
|
1173
|
+
* Package a framework build (Class B/C) into a FormData ready for upload.
|
|
1174
|
+
*
|
|
1175
|
+
* Unlike `packageBuild`, the worker and assets directories are absolute paths
|
|
1176
|
+
* (resolved from the project root + preset), and the worker directory may be
|
|
1177
|
+
* inside the assets directory (SvelteKit, Astro). Worker files are excluded
|
|
1178
|
+
* from asset collection when directories overlap.
|
|
1179
|
+
*/
|
|
1180
|
+
async function packageFrameworkBuild(opts) {
|
|
1181
|
+
const formData = new FormData();
|
|
1182
|
+
const po = opts.packageOptions;
|
|
1183
|
+
const manifest = {
|
|
1184
|
+
version: 3,
|
|
1185
|
+
bindings: {}
|
|
1186
|
+
};
|
|
1187
|
+
const hasMigrations = opts.migrations && opts.migrations.length > 0;
|
|
1188
|
+
if (opts.bindings.needsD1 || opts.bindings.needsAuth || hasMigrations) if (opts.dialect === "postgresql") manifest.bindings.hyperdrive = ["HYPERDRIVE"];
|
|
1189
|
+
else manifest.bindings.d1 = [po?.bindingNames?.d1 ?? "DB"];
|
|
1190
|
+
if (opts.bindings.needsKV) manifest.bindings.kv = [po?.bindingNames?.kv ?? "KV"];
|
|
1191
|
+
if (opts.bindings.needsR2) manifest.bindings.r2 = [po?.bindingNames?.r2 ?? "STORAGE"];
|
|
1192
|
+
if (opts.bindings.needsAI) manifest.bindings.ai = true;
|
|
1193
|
+
if (opts.migrations && opts.migrations.length > 0) {
|
|
1194
|
+
manifest.migrations = {
|
|
1195
|
+
dialect: opts.dialect ?? "sqlite",
|
|
1196
|
+
pending: opts.migrations.length
|
|
1197
|
+
};
|
|
1198
|
+
opts.onProgress?.(`Packaging ${opts.migrations.length} migration${opts.migrations.length === 1 ? "" : "s"}...`);
|
|
1199
|
+
for (const m of opts.migrations) formData.append(`migration:${m.name}`, new Blob([m.sql]), m.name);
|
|
1200
|
+
}
|
|
1201
|
+
if (opts.schedules && opts.schedules.length > 0) manifest.schedules = opts.schedules;
|
|
1202
|
+
if (opts.queues && opts.queues.length > 0) manifest.queues = opts.queues;
|
|
1203
|
+
if (opts.revalidate != null && opts.revalidate !== 0) manifest.revalidate = opts.revalidate;
|
|
1204
|
+
if (opts.revalidateQueryAllowlist && Object.keys(opts.revalidateQueryAllowlist).length > 0) manifest.revalidateQueryAllowlist = opts.revalidateQueryAllowlist;
|
|
1205
|
+
if (opts.prerender && opts.prerender.length > 0) manifest.prerender = opts.prerender;
|
|
1206
|
+
if (opts.assetConfig) manifest.assetConfig = opts.assetConfig;
|
|
1207
|
+
if (opts.vars && Object.keys(opts.vars).length > 0) manifest.vars = opts.vars;
|
|
1208
|
+
manifest.ssr = true;
|
|
1209
|
+
manifest.framework = opts.frameworkName;
|
|
1210
|
+
manifest.type = "framework";
|
|
1211
|
+
manifest.workerMain = po?.workerMain ?? opts.workerMain;
|
|
1212
|
+
if (po?.compatibilityDate) manifest.compatibilityDate = po.compatibilityDate;
|
|
1213
|
+
if (po?.compatibilityFlags && po.compatibilityFlags.length > 0) manifest.compatibilityFlags = po.compatibilityFlags;
|
|
1214
|
+
if (opts.hashedAssetsPrefix) manifest.hashedAssetsPrefix = opts.hashedAssetsPrefix;
|
|
1215
|
+
if (opts.headerRules && opts.headerRules.length > 0) manifest.headerRules = opts.headerRules;
|
|
1216
|
+
if (opts.redirectRules && opts.redirectRules.length > 0) manifest.redirectRules = stripDevOnlyRuleFields(opts.redirectRules);
|
|
1217
|
+
if (opts.fallbackRules && opts.fallbackRules.length > 0) manifest.fallbackRules = stripDevOnlyRuleFields(opts.fallbackRules);
|
|
1218
|
+
const websocketManifest = toWebSocketManifest(opts.webSockets);
|
|
1219
|
+
if (websocketManifest) manifest.websocket = websocketManifest;
|
|
1220
|
+
formData.append("manifest", JSON.stringify(manifest));
|
|
1221
|
+
const workerDirNorm = opts.workerDir.replace(/\\/g, "/");
|
|
1222
|
+
const assetsDirNorm = opts.assetsDir.replace(/\\/g, "/");
|
|
1223
|
+
const isOverlap = workerDirNorm.startsWith(assetsDirNorm + "/") || workerDirNorm === assetsDirNorm;
|
|
1224
|
+
opts.onProgress?.("Packaging worker files...");
|
|
1225
|
+
const workerFiles = collectFiles(opts.workerDir);
|
|
1226
|
+
for (const filePath of workerFiles) {
|
|
1227
|
+
const relativePath = toSlash(relative(opts.workerDir, filePath));
|
|
1228
|
+
if (!isWorkerModule(relativePath)) continue;
|
|
1229
|
+
const content = readFileSync(filePath);
|
|
1230
|
+
formData.append(`worker:${relativePath}`, new Blob([content]), relativePath);
|
|
1231
|
+
}
|
|
1232
|
+
try {
|
|
1233
|
+
const { isIgnored, assetsIgnoreFilePresent } = createAssetsIgnoreFunction(opts.assetsDir, opts.assetIgnorePatterns);
|
|
1234
|
+
const assetFiles = collectFiles(opts.assetsDir);
|
|
1235
|
+
let processed = 0;
|
|
1236
|
+
for (const filePath of assetFiles) {
|
|
1237
|
+
processed++;
|
|
1238
|
+
await tickProgress(processed, assetFiles.length, opts.onProgress, "Packaging assets");
|
|
1239
|
+
if (isOverlap && filePath.replace(/\\/g, "/").startsWith(workerDirNorm)) continue;
|
|
1240
|
+
const relativePath = toSlash(relative(opts.assetsDir, filePath));
|
|
1241
|
+
if (isIgnored("/" + relativePath)) continue;
|
|
1242
|
+
checkWorkerJsAsset(relativePath, assetsIgnoreFilePresent);
|
|
1243
|
+
if (opts.neededAssets && !opts.neededAssets.has("/" + relativePath)) continue;
|
|
1244
|
+
const content = readFileSync(filePath);
|
|
1245
|
+
formData.append(`asset:${relativePath}`, new Blob([content]), relativePath);
|
|
1246
|
+
}
|
|
1247
|
+
} catch (err) {
|
|
1248
|
+
if (err instanceof Error && "code" in err && err.code === "ENOENT") {} else throw err;
|
|
1249
|
+
}
|
|
1250
|
+
if (opts.neededAssets) formData.append("preflight", "true");
|
|
1251
|
+
return formData;
|
|
1252
|
+
}
|
|
1253
|
+
/**
|
|
1254
|
+
* Check if a file (by relative path) should be included as a worker module.
|
|
1255
|
+
* Skips build metadata and files with extensions CF Workers can't handle as modules.
|
|
1256
|
+
*/
|
|
1257
|
+
function isWorkerModule(relativePath) {
|
|
1258
|
+
const norm = relativePath.replace(/\\/g, "/");
|
|
1259
|
+
if (norm.includes("/.vite/") || norm.startsWith(".vite/")) return false;
|
|
1260
|
+
if (norm === "wrangler.json" || norm === "wrangler.jsonc") return false;
|
|
1261
|
+
return /\.(js|mjs|cjs|wasm|map|txt)$/.test(norm);
|
|
1262
|
+
}
|
|
1263
|
+
/**
|
|
1264
|
+
* Recursively collect all file paths in a directory.
|
|
1265
|
+
*/
|
|
1266
|
+
function collectFiles(dir) {
|
|
1267
|
+
const files = [];
|
|
1268
|
+
const entries = readdirSync(dir, { withFileTypes: true });
|
|
1269
|
+
for (const entry of entries) {
|
|
1270
|
+
const fullPath = join(dir, entry.name);
|
|
1271
|
+
if (entry.isDirectory()) files.push(...collectFiles(fullPath));
|
|
1272
|
+
else files.push(fullPath);
|
|
1273
|
+
}
|
|
1274
|
+
return files;
|
|
1275
|
+
}
|
|
1276
|
+
//#endregion
|
|
1277
|
+
//#region src/cli/wrapper.ts
|
|
1278
|
+
const BUNDLED_ENTRY_RE = /^index\.(?:m?js|cjs)$/;
|
|
1279
|
+
function selectBundledEntryFilename(outFiles) {
|
|
1280
|
+
const entryFilename = outFiles.find((file) => BUNDLED_ENTRY_RE.test(file));
|
|
1281
|
+
if (!entryFilename) throw new Error(`deploy: Bundled worker entry not found. Expected index.js, index.mjs, or index.cjs in bundle output. Found: ${outFiles.join(", ") || "(none)"}`);
|
|
1282
|
+
return entryFilename;
|
|
1283
|
+
}
|
|
1284
|
+
/**
|
|
1285
|
+
* Determine whether a wrapper is needed based on the features.
|
|
1286
|
+
*/
|
|
1287
|
+
function needsWrapper(features) {
|
|
1288
|
+
return features.crons.length > 0 || features.queues.length > 0 || !!features.hasMigrations;
|
|
1289
|
+
}
|
|
1290
|
+
/**
|
|
1291
|
+
* Generate a wrapper entry in-place in the worker directory.
|
|
1292
|
+
*
|
|
1293
|
+
* - Renames the original entry (e.g. `index.js` → `__original.js`)
|
|
1294
|
+
* - Compiles cron/queue handlers via a small Vite build
|
|
1295
|
+
* - Writes a wrapper module as the original entry name that imports
|
|
1296
|
+
* the renamed original + compiled handlers
|
|
1297
|
+
*
|
|
1298
|
+
* The original worker code is never re-bundled.
|
|
1299
|
+
*/
|
|
1300
|
+
async function generateWrapper(workerDir, workerMain, root, features) {
|
|
1301
|
+
const renamedName = `__original${extname(workerMain) || ".js"}`;
|
|
1302
|
+
renameSync(join(workerDir, workerMain), join(workerDir, renamedName));
|
|
1303
|
+
let handlerFilename = "__void_handlers.mjs";
|
|
1304
|
+
if (features.crons.length > 0 || features.queues.length > 0) {
|
|
1305
|
+
const tmpDir = join(root, ".void", "handlers");
|
|
1306
|
+
mkdirSync(tmpDir, { recursive: true });
|
|
1307
|
+
const handlersSource = generateHandlersSource(root, features);
|
|
1308
|
+
const handlersEntry = join(tmpDir, "handlers.ts");
|
|
1309
|
+
writeFileSync(handlersEntry, handlersSource);
|
|
1310
|
+
const outDir = join(tmpDir, "out");
|
|
1311
|
+
await build({
|
|
1312
|
+
root: tmpDir,
|
|
1313
|
+
logLevel: "silent",
|
|
1314
|
+
build: {
|
|
1315
|
+
outDir,
|
|
1316
|
+
emptyOutDir: true,
|
|
1317
|
+
lib: {
|
|
1318
|
+
entry: handlersEntry,
|
|
1319
|
+
formats: ["es"],
|
|
1320
|
+
fileName: "__void_handlers"
|
|
1321
|
+
},
|
|
1322
|
+
rollupOptions: { external: [/^cloudflare:/, /^node:/] },
|
|
1323
|
+
minify: false,
|
|
1324
|
+
target: "esnext"
|
|
1325
|
+
}
|
|
1326
|
+
});
|
|
1327
|
+
handlerFilename = readdirSync(outDir).filter((f) => f.startsWith("__void_handlers"))[0] ?? "__void_handlers.mjs";
|
|
1328
|
+
copyFileSync(join(outDir, handlerFilename), join(workerDir, handlerFilename));
|
|
1329
|
+
}
|
|
1330
|
+
let migrations;
|
|
1331
|
+
if (features.hasMigrations) {
|
|
1332
|
+
const { collectMigrations } = await import("./collect-CjeZgz5D.mjs").then((n) => n.n);
|
|
1333
|
+
migrations = collectMigrations(join(root, "db", "migrations"));
|
|
1334
|
+
const handlerFilename_ = features.migrationDialect === "postgresql" ? "migration-handler-pg.mjs" : "migration-handler.mjs";
|
|
1335
|
+
let handlerPath = join(import.meta.dirname, "runtime", handlerFilename_);
|
|
1336
|
+
if (!existsSync(handlerPath)) handlerPath = join(import.meta.dirname, "..", "runtime", handlerFilename_);
|
|
1337
|
+
if (!existsSync(handlerPath)) handlerPath = join(import.meta.dirname, "..", "..", "dist", "runtime", handlerFilename_);
|
|
1338
|
+
if (!existsSync(handlerPath)) throw new Error(`Migration handler not found: ${handlerFilename_}. Run "pnpm -C packages/void build" first.`);
|
|
1339
|
+
copyFileSync(handlerPath, join(workerDir, "__void_migration_handler.mjs"));
|
|
1340
|
+
}
|
|
1341
|
+
const wrapperSource = generateWrapperSource(renamedName, handlerFilename, features, migrations);
|
|
1342
|
+
writeFileSync(join(workerDir, workerMain), wrapperSource);
|
|
1343
|
+
}
|
|
1344
|
+
/**
|
|
1345
|
+
* Bundle a worker entry into a self-contained module.
|
|
1346
|
+
*
|
|
1347
|
+
* Used for frameworks whose build output has unbundled bare imports
|
|
1348
|
+
* (e.g. SvelteKit's `_worker.js` imports from `@sveltejs/kit`).
|
|
1349
|
+
*/
|
|
1350
|
+
async function bundleEntry(root, entry) {
|
|
1351
|
+
const tmpDir = join(root, ".void", "bundle");
|
|
1352
|
+
mkdirSync(tmpDir, { recursive: true });
|
|
1353
|
+
const outDir = join(tmpDir, "out");
|
|
1354
|
+
await build({
|
|
1355
|
+
root: tmpDir,
|
|
1356
|
+
logLevel: "silent",
|
|
1357
|
+
build: {
|
|
1358
|
+
outDir,
|
|
1359
|
+
emptyOutDir: true,
|
|
1360
|
+
lib: {
|
|
1361
|
+
entry,
|
|
1362
|
+
formats: ["es"],
|
|
1363
|
+
fileName: "index"
|
|
1364
|
+
},
|
|
1365
|
+
rollupOptions: {
|
|
1366
|
+
external: [/^cloudflare:/, /^node:/],
|
|
1367
|
+
output: { banner: [
|
|
1368
|
+
"const __noopCache = {",
|
|
1369
|
+
" match() { return Promise.resolve(undefined); },",
|
|
1370
|
+
" put() { return Promise.resolve(); },",
|
|
1371
|
+
" delete() { return Promise.resolve(false); },",
|
|
1372
|
+
"};",
|
|
1373
|
+
"globalThis.caches = { default: __noopCache, open() { return Promise.resolve(__noopCache); } };"
|
|
1374
|
+
].join("\n") }
|
|
1375
|
+
},
|
|
1376
|
+
minify: false,
|
|
1377
|
+
target: "esnext"
|
|
1378
|
+
}
|
|
1379
|
+
});
|
|
1380
|
+
return {
|
|
1381
|
+
entryFilename: selectBundledEntryFilename(readdirSync(outDir)),
|
|
1382
|
+
outputDir: outDir
|
|
1383
|
+
};
|
|
1384
|
+
}
|
|
1385
|
+
/**
|
|
1386
|
+
* Clean up generated wrapper/bundle files. Preserves `dev-trigger-token`
|
|
1387
|
+
* across the wipe so that subsequent `void dev` sessions keep using the
|
|
1388
|
+
* same UUID — invalidating it on every deploy would force users to update
|
|
1389
|
+
* any saved curl scripts that exercise `/__void/{scheduled,queue}`.
|
|
1390
|
+
*/
|
|
1391
|
+
function cleanupWrapper(root) {
|
|
1392
|
+
const voidDir = join(root, ".void");
|
|
1393
|
+
const tokenPath = join(voidDir, "dev-trigger-token");
|
|
1394
|
+
let preservedToken;
|
|
1395
|
+
if (existsSync(tokenPath)) try {
|
|
1396
|
+
preservedToken = readFileSync(tokenPath, "utf-8");
|
|
1397
|
+
} catch {}
|
|
1398
|
+
rmSync(voidDir, {
|
|
1399
|
+
recursive: true,
|
|
1400
|
+
force: true
|
|
1401
|
+
});
|
|
1402
|
+
if (preservedToken !== void 0) {
|
|
1403
|
+
mkdirSync(voidDir, { recursive: true });
|
|
1404
|
+
writeFileSync(tokenPath, preservedToken);
|
|
1405
|
+
}
|
|
1406
|
+
}
|
|
1407
|
+
function generateWrapperSource(renamedEntry, handlersModule, features, migrations) {
|
|
1408
|
+
const lines = [];
|
|
1409
|
+
lines.push(`import original from "./${renamedEntry}";`);
|
|
1410
|
+
if (features.crons.length > 0) lines.push(`import { handleScheduled } from "./${handlersModule}";`);
|
|
1411
|
+
if (features.queues.length > 0) lines.push(`import { handleQueue } from "./${handlersModule}";`);
|
|
1412
|
+
if (migrations && migrations.length > 0) {
|
|
1413
|
+
lines.push(`import { createMigrationHandler } from "./__void_migration_handler.mjs";`);
|
|
1414
|
+
lines.push(`const __migrations = ${JSON.stringify(migrations)};`);
|
|
1415
|
+
lines.push(`const __migrationDialect = ${JSON.stringify(features.migrationDialect ?? "sqlite")};`);
|
|
1416
|
+
}
|
|
1417
|
+
lines.push("");
|
|
1418
|
+
const needsFetchWrap = features.crons.length > 0 || migrations && migrations.length > 0;
|
|
1419
|
+
if (needsFetchWrap) {
|
|
1420
|
+
lines.push("async function wrappedFetch(request, env, ctx) {");
|
|
1421
|
+
lines.push(" const url = new URL(request.url);");
|
|
1422
|
+
if (migrations && migrations.length > 0) {
|
|
1423
|
+
lines.push(" if (request.method === \"POST\" && url.pathname === \"/__void/migrate\") {");
|
|
1424
|
+
lines.push(" const handler = createMigrationHandler(env, __migrations, __migrationDialect);");
|
|
1425
|
+
lines.push(" return handler(request);");
|
|
1426
|
+
lines.push(" }");
|
|
1427
|
+
}
|
|
1428
|
+
if (features.crons.length > 0) {
|
|
1429
|
+
lines.push(" if (request.method === \"POST\" && url.pathname === \"/__void/scheduled\") {");
|
|
1430
|
+
lines.push(" const __token = request.headers.get(\"x-void-internal\");");
|
|
1431
|
+
lines.push(" const __expected = env?.__VOID_PROXY_TOKEN || env?.CRON_SECRET;");
|
|
1432
|
+
lines.push(" if (__expected && __token !== __expected) {");
|
|
1433
|
+
lines.push(" return new Response(JSON.stringify({ error: 'unauthorized' }), {");
|
|
1434
|
+
lines.push(" status: 401,");
|
|
1435
|
+
lines.push(" headers: { \"Content-Type\": \"application/json\" },");
|
|
1436
|
+
lines.push(" });");
|
|
1437
|
+
lines.push(" }");
|
|
1438
|
+
lines.push(" try {");
|
|
1439
|
+
lines.push(" const { cron, scheduledTime } = await request.json();");
|
|
1440
|
+
lines.push(" const controller = { cron, scheduledTime, noRetry() {} };");
|
|
1441
|
+
lines.push(" await handleScheduled(controller, env, ctx);");
|
|
1442
|
+
lines.push(" return new Response(JSON.stringify({ ok: true }), {");
|
|
1443
|
+
lines.push(" headers: { \"Content-Type\": \"application/json\" },");
|
|
1444
|
+
lines.push(" });");
|
|
1445
|
+
lines.push(" } catch (err) {");
|
|
1446
|
+
lines.push(" return new Response(JSON.stringify({ error: String(err) }), {");
|
|
1447
|
+
lines.push(" status: 500,");
|
|
1448
|
+
lines.push(" headers: { \"Content-Type\": \"application/json\" },");
|
|
1449
|
+
lines.push(" });");
|
|
1450
|
+
lines.push(" }");
|
|
1451
|
+
lines.push(" }");
|
|
1452
|
+
}
|
|
1453
|
+
lines.push(" return original.fetch(request, env, ctx);");
|
|
1454
|
+
lines.push("}");
|
|
1455
|
+
lines.push("");
|
|
1456
|
+
}
|
|
1457
|
+
lines.push("export default {");
|
|
1458
|
+
if (needsFetchWrap) lines.push(" fetch: wrappedFetch,");
|
|
1459
|
+
else lines.push(" fetch: original.fetch,");
|
|
1460
|
+
if (features.crons.length > 0) lines.push(" scheduled: handleScheduled,");
|
|
1461
|
+
if (features.queues.length > 0) lines.push(" queue: handleQueue,");
|
|
1462
|
+
lines.push("};");
|
|
1463
|
+
return lines.join("\n");
|
|
1464
|
+
}
|
|
1465
|
+
function generateHandlersSource(root, features) {
|
|
1466
|
+
const lines = [];
|
|
1467
|
+
if (features.crons.length > 0) {
|
|
1468
|
+
for (let i = 0; i < features.crons.length; i++) {
|
|
1469
|
+
const absPath = join(root, "crons", features.crons[i].filePath);
|
|
1470
|
+
lines.push(`import cronHandler${i} from "${absPath}";`);
|
|
1471
|
+
}
|
|
1472
|
+
lines.push("");
|
|
1473
|
+
lines.push("export async function handleScheduled(controller, env, ctx) {");
|
|
1474
|
+
lines.push(" switch (controller.cron) {");
|
|
1475
|
+
for (let i = 0; i < features.crons.length; i++) {
|
|
1476
|
+
for (const expr of features.crons[i].crons) lines.push(` case ${JSON.stringify(expr)}:`);
|
|
1477
|
+
lines.push(` if (typeof cronHandler${i} === "function") {`);
|
|
1478
|
+
lines.push(` await cronHandler${i}(controller, env, ctx);`);
|
|
1479
|
+
lines.push(` } else if (cronHandler${i} && typeof cronHandler${i}.default === "function") {`);
|
|
1480
|
+
lines.push(` await cronHandler${i}.default(controller, env, ctx);`);
|
|
1481
|
+
lines.push(` }`);
|
|
1482
|
+
lines.push(` return;`);
|
|
1483
|
+
}
|
|
1484
|
+
lines.push(" }");
|
|
1485
|
+
lines.push("}");
|
|
1486
|
+
}
|
|
1487
|
+
if (features.queues.length > 0) {
|
|
1488
|
+
lines.push("");
|
|
1489
|
+
for (let i = 0; i < features.queues.length; i++) {
|
|
1490
|
+
const absPath = join(root, "queues", features.queues[i].filePath);
|
|
1491
|
+
lines.push(`import queueHandler${i} from "${absPath}";`);
|
|
1492
|
+
}
|
|
1493
|
+
lines.push("");
|
|
1494
|
+
lines.push("export async function handleQueue(batch, env, ctx) {");
|
|
1495
|
+
lines.push(" const queueSuffix = batch.queue;");
|
|
1496
|
+
lines.push(" const messages = [...batch.messages].map(m => ({");
|
|
1497
|
+
lines.push(" id: m.id,");
|
|
1498
|
+
lines.push(" timestamp: m.timestamp.toISOString(),");
|
|
1499
|
+
lines.push(" body: m.body,");
|
|
1500
|
+
lines.push(" attempts: m.attempts,");
|
|
1501
|
+
lines.push(" }));");
|
|
1502
|
+
lines.push(" const payload = { queue: queueSuffix, messages };");
|
|
1503
|
+
for (let i = 0; i < features.queues.length; i++) {
|
|
1504
|
+
const name = features.queues[i].name;
|
|
1505
|
+
lines.push("");
|
|
1506
|
+
lines.push(` if (queueSuffix === ${JSON.stringify(name)} || queueSuffix.endsWith("-" + ${JSON.stringify(name)})) {`);
|
|
1507
|
+
lines.push(` if (typeof queueHandler${i} === "function") {`);
|
|
1508
|
+
lines.push(` await queueHandler${i}(payload, env);`);
|
|
1509
|
+
lines.push(` } else if (queueHandler${i} && typeof queueHandler${i}.default === "function") {`);
|
|
1510
|
+
lines.push(` await queueHandler${i}.default(payload, env);`);
|
|
1511
|
+
lines.push(` }`);
|
|
1512
|
+
lines.push(` return;`);
|
|
1513
|
+
lines.push(` }`);
|
|
1514
|
+
}
|
|
1515
|
+
lines.push("}");
|
|
1516
|
+
}
|
|
1517
|
+
return lines.join("\n");
|
|
1518
|
+
}
|
|
1519
|
+
//#endregion
|
|
1520
|
+
//#region src/cli/deploy.ts
|
|
1521
|
+
var deploy_exports = /* @__PURE__ */ __exportAll({
|
|
1522
|
+
applyDeployEvent: () => applyDeployEvent,
|
|
1523
|
+
hasDeploySchemaDrift: () => hasDeploySchemaDrift,
|
|
1524
|
+
promptDeployProjectConfig: () => promptDeployProjectConfig,
|
|
1525
|
+
resolveAssetConfig: () => resolveAssetConfig,
|
|
1526
|
+
resolveDeployWranglerCompat: () => resolveDeployWranglerCompat,
|
|
1527
|
+
resolveDistDir: () => resolveDistDir,
|
|
1528
|
+
resolveStaticBuildCommand: () => resolveStaticBuildCommand,
|
|
1529
|
+
resolveWorkerDirName: () => resolveWorkerDirName,
|
|
1530
|
+
runDeploy: () => runDeploy,
|
|
1531
|
+
warnRedundantForceOn3xx: () => warnRedundantForceOn3xx
|
|
1532
|
+
});
|
|
1533
|
+
const drizzleKitBin = join(fileURLToPath(import.meta.resolve("drizzle-kit")), "..", "bin.cjs");
|
|
1534
|
+
const deployDriftCheckDir = join(".void", "deploy-drift-check");
|
|
1535
|
+
var DeployEventError = class extends Error {};
|
|
1536
|
+
function getErrorMessage(error) {
|
|
1537
|
+
return error instanceof Error ? error.message : String(error);
|
|
1538
|
+
}
|
|
1539
|
+
/**
|
|
1540
|
+
* Resolve the Cloudflare Workers Static Assets `not_found_handling` config.
|
|
1541
|
+
*
|
|
1542
|
+
* Any worker capable of producing HTML (SSR, pages, file-based routes, or
|
|
1543
|
+
* middleware) must use `'none'` so the asset binding returns 404 for
|
|
1544
|
+
* unmatched paths and the worker — gated by `run_worker_first` — owns the
|
|
1545
|
+
* response. With `'single-page-application'`, CF short-circuits any
|
|
1546
|
+
* `Sec-Fetch-Mode: navigate` request straight to the SPA shell *before*
|
|
1547
|
+
* `run_worker_first` is consulted, silently bypassing auth middleware and
|
|
1548
|
+
* breaking OAuth callbacks (which arrive as navigations from the provider's
|
|
1549
|
+
* 302 redirect).
|
|
1550
|
+
*
|
|
1551
|
+
* Pure-frontend SPAs without any backend code keep the SPA-fallback config
|
|
1552
|
+
* because the worker has no responsibility for HTML routing.
|
|
1553
|
+
*/
|
|
1554
|
+
function resolveAssetConfig(opts) {
|
|
1555
|
+
return opts.isSsr || opts.hasPages || opts.hasRoutes || opts.hasMiddleware || opts.hasWebSockets === true || opts.authEnabled === true ? { not_found_handling: "none" } : { not_found_handling: "single-page-application" };
|
|
1556
|
+
}
|
|
1557
|
+
function getExitCode(error) {
|
|
1558
|
+
if (typeof error === "object" && error !== null && "status" in error && typeof error.status === "number") return error.status;
|
|
1559
|
+
return 1;
|
|
1560
|
+
}
|
|
1561
|
+
function resolveDeployWranglerCompat(root, config, buildOutputDir) {
|
|
1562
|
+
return isNodeTarget(config.target) ? void 0 : readWranglerCompat(root, config, buildOutputDir);
|
|
1563
|
+
}
|
|
1564
|
+
/**
|
|
1565
|
+
* Run the full deploy pipeline:
|
|
1566
|
+
* 1. Check auth
|
|
1567
|
+
* 2. Resolve project + deploy mode
|
|
1568
|
+
* 3. Build (if needed)
|
|
1569
|
+
* 4. Package output
|
|
1570
|
+
* 5. Upload to platform
|
|
1571
|
+
* 6. Print live URL
|
|
1572
|
+
*/
|
|
1573
|
+
async function runDeploy(root, options) {
|
|
1574
|
+
console.log();
|
|
1575
|
+
ge(cliTitle("deploy"));
|
|
1576
|
+
let reauthenticated = false;
|
|
1577
|
+
while (true) try {
|
|
1578
|
+
const token = getToken(root);
|
|
1579
|
+
if (!token) {
|
|
1580
|
+
R.error("No auth token found. Run `void auth login` first.");
|
|
1581
|
+
process.exit(1);
|
|
1582
|
+
}
|
|
1583
|
+
const client = new PlatformClient(token, { root });
|
|
1584
|
+
const requestedProjectSlug = getRequestedProjectSlug(options);
|
|
1585
|
+
let config = null;
|
|
1586
|
+
if (requestedProjectSlug) {
|
|
1587
|
+
config = resolveProjectBySlug(await client.listProjects(), requestedProjectSlug);
|
|
1588
|
+
if (!config) if (!process.stdin.isTTY) config = await createProjectFromSlug(root, client, requestedProjectSlug);
|
|
1589
|
+
else {
|
|
1590
|
+
const shouldCreate = await ue({ message: `Project "${requestedProjectSlug}" does not exist. Create it?` });
|
|
1591
|
+
if (q(shouldCreate) || !shouldCreate) {
|
|
1592
|
+
R.info("Deploy cancelled.");
|
|
1593
|
+
process.exit(0);
|
|
1594
|
+
}
|
|
1595
|
+
config = await createProjectFromSlug(root, client, requestedProjectSlug);
|
|
1596
|
+
}
|
|
1597
|
+
} else config = readProjectConfig(root);
|
|
1598
|
+
if (!config && !process.stdin.isTTY) {
|
|
1599
|
+
R.error("No project specified. Set `VOID_PROJECT`, pass `--project <slug>`, or commit `.void/project.json`.");
|
|
1600
|
+
process.exit(1);
|
|
1601
|
+
}
|
|
1602
|
+
if (!config) config = await promptDeployProjectConfig(root, client);
|
|
1603
|
+
const voidConfig = readConfig(root);
|
|
1604
|
+
const detected = detectFramework(root);
|
|
1605
|
+
if (detected) {
|
|
1606
|
+
const gitCommit = options?.skipBuild ? null : getGitCommit(root);
|
|
1607
|
+
const fwPreset = FRAMEWORK_PRESETS[detected.name];
|
|
1608
|
+
if (detected.class === "b" || detected.class === "c") {
|
|
1609
|
+
if (!fwPreset) {
|
|
1610
|
+
R.error(`No deploy preset found for framework "${detected.name}".`);
|
|
1611
|
+
process.exit(1);
|
|
1612
|
+
}
|
|
1613
|
+
await runFrameworkDeploy(root, config, client, detected, fwPreset, voidConfig, options?.skipBuild, gitCommit);
|
|
1614
|
+
} else await runFullDeploy(root, config, client, fwPreset, options?.skipBuild, gitCommit, detected);
|
|
1615
|
+
return;
|
|
1616
|
+
}
|
|
1617
|
+
let preset = null;
|
|
1618
|
+
const appType = voidConfig.inference?.appType;
|
|
1619
|
+
if (appType && appType !== "void" && appType !== "framework") {
|
|
1620
|
+
const outputDir = voidConfig.inference?.outputDir;
|
|
1621
|
+
preset = {
|
|
1622
|
+
buildCommand: null,
|
|
1623
|
+
outputDir: outputDir ? resolve(root, outputDir) : join(root, "dist"),
|
|
1624
|
+
appType
|
|
1625
|
+
};
|
|
1626
|
+
const detectedPreset = detectPreset(root, {
|
|
1627
|
+
dir: options?.dir,
|
|
1628
|
+
spa: options?.spa
|
|
1629
|
+
});
|
|
1630
|
+
if (detectedPreset) {
|
|
1631
|
+
preset.buildCommand = detectedPreset.buildCommand;
|
|
1632
|
+
if (!outputDir) preset.outputDir = detectedPreset.outputDir;
|
|
1633
|
+
}
|
|
1634
|
+
} else if (!appType) preset = detectPreset(root, {
|
|
1635
|
+
dir: options?.dir,
|
|
1636
|
+
spa: options?.spa
|
|
1637
|
+
});
|
|
1638
|
+
const gitCommit = options?.skipBuild ? null : getGitCommit(root);
|
|
1639
|
+
if (preset) {
|
|
1640
|
+
preset = {
|
|
1641
|
+
...preset,
|
|
1642
|
+
buildCommand: resolveStaticBuildCommand(preset, voidConfig.inference?.build, Boolean(options?.dir))
|
|
1643
|
+
};
|
|
1644
|
+
await runStaticDeploy(root, config, client, preset, options?.skipBuild, gitCommit, void 0, void 0, void 0, void 0, voidConfig.routing);
|
|
1645
|
+
} else await runFullDeploy(root, config, client, void 0, options?.skipBuild, gitCommit);
|
|
1646
|
+
return;
|
|
1647
|
+
} catch (error) {
|
|
1648
|
+
if (isExpiredTokenError(error)) {
|
|
1649
|
+
if (getTokenSource() === "ci") throw new Error("`VOID_TOKEN` is invalid or expired. Generate a new token and try again.");
|
|
1650
|
+
if (!process.stdin.isTTY || reauthenticated) throw new Error("Auth token expired. Run `void auth login` and try again.");
|
|
1651
|
+
R.warn("Auth token expired. Re-authenticating...");
|
|
1652
|
+
await promptForLoginToken(root);
|
|
1653
|
+
R.success("Re-authenticated. Retrying deploy...");
|
|
1654
|
+
reauthenticated = true;
|
|
1655
|
+
continue;
|
|
1656
|
+
}
|
|
1657
|
+
throw error;
|
|
1658
|
+
}
|
|
1659
|
+
}
|
|
1660
|
+
async function promptDeployProjectConfig(root, client) {
|
|
1661
|
+
const projects = await client.listProjects();
|
|
1662
|
+
if (await promptProjectSetupAction("Set up a Void project for deploy:", { includeLink: projects.length > 0 }) === "create") return promptAndCreateProject(root, client);
|
|
1663
|
+
const result = await promptProjectSelection(projects, "Select a project to deploy to:", root, { includeCreate: false });
|
|
1664
|
+
if (result.action === "create") throw new Error("Unexpected project selection result.");
|
|
1665
|
+
const config = {
|
|
1666
|
+
projectId: result.project.id,
|
|
1667
|
+
slug: result.project.slug
|
|
1668
|
+
};
|
|
1669
|
+
writeProjectConfig(root, config);
|
|
1670
|
+
R.step(`Linked to ${import_picocolors.default.blue(config.slug)}`);
|
|
1671
|
+
return config;
|
|
1672
|
+
}
|
|
1673
|
+
function resolveStaticBuildCommand(preset, buildOverride, hasExplicitDir = false) {
|
|
1674
|
+
if (hasExplicitDir) return null;
|
|
1675
|
+
return buildOverride ?? preset.buildCommand;
|
|
1676
|
+
}
|
|
1677
|
+
/**
|
|
1678
|
+
* Apply the visible side effects of a streamed deploy event (spinner
|
|
1679
|
+
* message or log warning). Returns a discriminated result so the caller
|
|
1680
|
+
* advances its own state (deploymentId capture, final return, error
|
|
1681
|
+
* throw). Pure function of (event, s) so the switch stays unit-testable.
|
|
1682
|
+
*/
|
|
1683
|
+
function applyDeployEvent(event, s) {
|
|
1684
|
+
switch (event.event) {
|
|
1685
|
+
case "start":
|
|
1686
|
+
s.message("Deploying...");
|
|
1687
|
+
return {
|
|
1688
|
+
kind: "start",
|
|
1689
|
+
deploymentId: event.deploymentId
|
|
1690
|
+
};
|
|
1691
|
+
case "heartbeat": return { kind: "continue" };
|
|
1692
|
+
case "asset_upload":
|
|
1693
|
+
if (event.new > 0) s.message(`Uploading assets (${event.new} new, ${event.cached} cached)`);
|
|
1694
|
+
else s.message(`Uploading assets (${event.total} cached)`);
|
|
1695
|
+
return { kind: "continue" };
|
|
1696
|
+
case "provisioning":
|
|
1697
|
+
s.message(`Provisioning ${event.bindings.join(", ")}`);
|
|
1698
|
+
return { kind: "continue" };
|
|
1699
|
+
case "migration_start":
|
|
1700
|
+
s.message(`Running ${event.pending} migration${event.pending > 1 ? "s" : ""}...`);
|
|
1701
|
+
return { kind: "continue" };
|
|
1702
|
+
case "migration":
|
|
1703
|
+
s.message(`Applied migration: ${event.name} (${event.durationMs}ms)`);
|
|
1704
|
+
return { kind: "continue" };
|
|
1705
|
+
case "worker_upload":
|
|
1706
|
+
s.message("Uploading worker...");
|
|
1707
|
+
return { kind: "continue" };
|
|
1708
|
+
case "queue_setup":
|
|
1709
|
+
s.message(`Configuring queue consumers: ${event.queues.join(", ")}`);
|
|
1710
|
+
return { kind: "continue" };
|
|
1711
|
+
case "queue_consumer_conflict":
|
|
1712
|
+
R.warn(`Queue consumer conflict on "${event.queue}": owned by "${event.ownerScript}". Messages for this queue will not be delivered to this deployment until the owning script is removed.`);
|
|
1713
|
+
return { kind: "continue" };
|
|
1714
|
+
case "prerender":
|
|
1715
|
+
s.message(`Prerendering ${event.pages} pages...`);
|
|
1716
|
+
return { kind: "continue" };
|
|
1717
|
+
case "done":
|
|
1718
|
+
s.stop("Deployed!");
|
|
1719
|
+
return {
|
|
1720
|
+
kind: "done",
|
|
1721
|
+
event
|
|
1722
|
+
};
|
|
1723
|
+
case "error":
|
|
1724
|
+
s.stop("Deploy failed");
|
|
1725
|
+
return {
|
|
1726
|
+
kind: "error",
|
|
1727
|
+
message: event.message
|
|
1728
|
+
};
|
|
1729
|
+
}
|
|
1730
|
+
return { kind: "continue" };
|
|
1731
|
+
}
|
|
1732
|
+
async function streamDeploy(client, projectId, formData, s) {
|
|
1733
|
+
let deploymentId = null;
|
|
1734
|
+
let interrupted = false;
|
|
1735
|
+
const onSigint = () => {
|
|
1736
|
+
interrupted = true;
|
|
1737
|
+
s.stop("Deploy interrupted");
|
|
1738
|
+
if (deploymentId) R.info(`Deployment: ${deploymentId}`);
|
|
1739
|
+
else R.info("Deployment id was not received before the interrupt.");
|
|
1740
|
+
process.exit(130);
|
|
1741
|
+
};
|
|
1742
|
+
process.once("SIGINT", onSigint);
|
|
1743
|
+
try {
|
|
1744
|
+
for await (const event of client.deploy(projectId, formData)) {
|
|
1745
|
+
const result = applyDeployEvent(event, s);
|
|
1746
|
+
if (result.kind === "start") deploymentId = result.deploymentId;
|
|
1747
|
+
else if (result.kind === "done") return result.event;
|
|
1748
|
+
else if (result.kind === "error") throw new DeployEventError(formatDeployFailureMessage(result.message, deploymentId));
|
|
1749
|
+
}
|
|
1750
|
+
} catch (err) {
|
|
1751
|
+
if (err instanceof DeployEventError) throw err;
|
|
1752
|
+
if (interrupted) throw err;
|
|
1753
|
+
s.stop("Deploy failed");
|
|
1754
|
+
throw formatStreamDeployError(err, deploymentId);
|
|
1755
|
+
} finally {
|
|
1756
|
+
process.removeListener("SIGINT", onSigint);
|
|
1757
|
+
}
|
|
1758
|
+
s.stop("Deploy failed");
|
|
1759
|
+
throw new Error(formatConnectionLostMessage(deploymentId));
|
|
1760
|
+
}
|
|
1761
|
+
function formatKnownAssetSummary(result, skipped) {
|
|
1762
|
+
const uploaded = result.assets - skipped;
|
|
1763
|
+
return skipped > 0 ? `${result.assets} static asset(s) (${skipped} unchanged, ${uploaded} uploaded)` : `${result.assets} static asset(s)`;
|
|
1764
|
+
}
|
|
1765
|
+
async function runStaticDeploy(root, config, client, preset, skipBuild, commit, hashedAssetsPrefix, headerRules, redirectRules, fallbackRules, routing) {
|
|
1766
|
+
const typeLabel = preset.appType === "spa" ? "Static SPA" : "Static Site";
|
|
1767
|
+
R.info(`${typeLabel} deploy`);
|
|
1768
|
+
if (preset.buildCommand && !skipBuild) {
|
|
1769
|
+
R.step("Building...");
|
|
1770
|
+
execSync(preset.buildCommand, {
|
|
1771
|
+
cwd: root,
|
|
1772
|
+
stdio: "inherit"
|
|
1773
|
+
});
|
|
1774
|
+
}
|
|
1775
|
+
if (!existsSync(preset.outputDir)) {
|
|
1776
|
+
R.error(`deploy: Output directory '${preset.outputDir}' not found. Run the build first.`);
|
|
1777
|
+
process.exit(1);
|
|
1778
|
+
}
|
|
1779
|
+
if (routing) {
|
|
1780
|
+
const merged = mergeRoutingRules({
|
|
1781
|
+
dir: preset.outputDir,
|
|
1782
|
+
routing
|
|
1783
|
+
});
|
|
1784
|
+
warnRedundantForceOn3xx(merged.redundantForceOn3xx);
|
|
1785
|
+
lintDuplicateSources(merged.redirectRules, "redirects/rewrites (pre-asset)");
|
|
1786
|
+
lintDuplicateSources(merged.fallbackRules, "fallbacks (post-asset)");
|
|
1787
|
+
lintDestinationSplats(merged.redirectRules, "redirects/rewrites (pre-asset)");
|
|
1788
|
+
lintDestinationSplats(merged.fallbackRules, "fallbacks (post-asset)");
|
|
1789
|
+
headerRules = merged.headerRules;
|
|
1790
|
+
redirectRules = merged.redirectRules;
|
|
1791
|
+
fallbackRules = merged.fallbackRules;
|
|
1792
|
+
}
|
|
1793
|
+
const s = createSpinner();
|
|
1794
|
+
const onProgress = (msg) => s.message(msg);
|
|
1795
|
+
s.start("Checking for changes...");
|
|
1796
|
+
const { assetManifest } = await collectAndHashAssets(preset.outputDir, onProgress);
|
|
1797
|
+
const { needed, skipped } = await client.preflight(config.projectId, assetManifest);
|
|
1798
|
+
s.stop("Checked for changes");
|
|
1799
|
+
if (skipped > 0) R.info(`Skipping ${skipped} unchanged asset(s)`);
|
|
1800
|
+
s.start("Packaging...");
|
|
1801
|
+
const neededSet = skipped > 0 ? new Set(needed) : null;
|
|
1802
|
+
const formData = await packageStaticBuild(preset.outputDir, preset.appType, neededSet, hashedAssetsPrefix ?? "assets", headerRules, redirectRules, fallbackRules, onProgress);
|
|
1803
|
+
formData.append("assetManifest", JSON.stringify(assetManifest));
|
|
1804
|
+
formData.append("source", getTokenSource());
|
|
1805
|
+
if (commit) formData.append("commit", commit);
|
|
1806
|
+
const result = await streamDeploy(client, config.projectId, formData, s);
|
|
1807
|
+
Se([`${typeLabel} — ${formatKnownAssetSummary(result, skipped)}`].join("\n"), result.url);
|
|
1808
|
+
ye("Done!");
|
|
1809
|
+
}
|
|
1810
|
+
/**
|
|
1811
|
+
* Emit a single aggregated lint warning when a `_redirects` file contains
|
|
1812
|
+
* one or more `!` force suffixes on 3xx entries (e.g. `301!`, `302!`).
|
|
1813
|
+
* The `!` is a no-op on redirects — we parse them as if the `!` weren't
|
|
1814
|
+
* there — but we nudge the user to clean up for clarity. Silenced when
|
|
1815
|
+
* `count === 0`.
|
|
1816
|
+
*/
|
|
1817
|
+
function warnRedundantForceOn3xx(count) {
|
|
1818
|
+
if (count === 0) return;
|
|
1819
|
+
R.warn(`Found ${count} redundant "!" on 3xx entries in _redirects (these always force by nature; safe to remove).`);
|
|
1820
|
+
}
|
|
1821
|
+
/**
|
|
1822
|
+
* Class B/C framework deploy: build with framework CLI, package with preset output paths.
|
|
1823
|
+
*/
|
|
1824
|
+
async function runFrameworkDeploy(root, config, client, detected, fwPreset, voidConfig, skipBuild, commit) {
|
|
1825
|
+
R.info(`${detected.name} framework deploy`);
|
|
1826
|
+
const dialect = getDatabaseDialect(voidConfig);
|
|
1827
|
+
const buildCmd = voidConfig.inference?.build ?? formatProjectCommand(root, fwPreset.buildCommand);
|
|
1828
|
+
if (!skipBuild) {
|
|
1829
|
+
R.step(`Building (${buildCmd})...`);
|
|
1830
|
+
execSync(voidConfig.inference?.build ?? resolveProjectCommand(root, fwPreset.buildCommand), {
|
|
1831
|
+
cwd: root,
|
|
1832
|
+
stdio: "inherit"
|
|
1833
|
+
});
|
|
1834
|
+
}
|
|
1835
|
+
const workerDir = join(root, fwPreset.workerDir);
|
|
1836
|
+
const assetsDir = join(root, fwPreset.assetsDir);
|
|
1837
|
+
const frameworkAssetIgnorePatterns = fwPreset.workerDir === fwPreset.assetsDir && fwPreset.workerMain === "_worker.js" ? [`/${fwPreset.workerMain}`, "/_routes.json"] : [];
|
|
1838
|
+
const { headerRules, redirectRules, fallbackRules, redundantForceOn3xx } = mergeRoutingRules({
|
|
1839
|
+
dir: assetsDir,
|
|
1840
|
+
routing: voidConfig.routing
|
|
1841
|
+
});
|
|
1842
|
+
warnRedundantForceOn3xx(redundantForceOn3xx);
|
|
1843
|
+
lintDuplicateSources(redirectRules, "redirects/rewrites (pre-asset)");
|
|
1844
|
+
lintDuplicateSources(fallbackRules, "fallbacks (post-asset)");
|
|
1845
|
+
lintDestinationSplats(redirectRules, "redirects/rewrites (pre-asset)");
|
|
1846
|
+
lintDestinationSplats(fallbackRules, "fallbacks (post-asset)");
|
|
1847
|
+
if (!existsSync(workerDir)) {
|
|
1848
|
+
R.error(`deploy: Worker output '${fwPreset.workerDir}' not found. Run '${buildCmd}' first.`);
|
|
1849
|
+
process.exit(1);
|
|
1850
|
+
}
|
|
1851
|
+
const bindings = inferProjectBindings(root, voidConfig.inference?.bindings ? { bindings: voidConfig.inference.bindings } : void 0, voidConfig.inference?.scanDirs ?? FRAMEWORK_SCAN_DIRS);
|
|
1852
|
+
if (bindings.needsAuth || findVoidAuthConfig(root) !== null || voidConfig.auth !== void 0) {
|
|
1853
|
+
R.error("deploy: Void-managed Better Auth is only supported in Void apps. Use Better Auth's official integration for this framework instead.");
|
|
1854
|
+
process.exit(1);
|
|
1855
|
+
}
|
|
1856
|
+
const migrationsDir = join(root, "db", "migrations");
|
|
1857
|
+
assertDeploySchemaDriftFree(root, dialect);
|
|
1858
|
+
const rawMigrations = collectMigrations(migrationsDir);
|
|
1859
|
+
let validatedMigrations;
|
|
1860
|
+
try {
|
|
1861
|
+
assertJournalCoherence(migrationsDir, rawMigrations);
|
|
1862
|
+
} catch (err) {
|
|
1863
|
+
R.error(getErrorMessage(err));
|
|
1864
|
+
process.exit(1);
|
|
1865
|
+
}
|
|
1866
|
+
if (rawMigrations.length > 0) try {
|
|
1867
|
+
validatedMigrations = validateMigrations(rawMigrations);
|
|
1868
|
+
R.info(`Found ${validatedMigrations.length} migration(s)`);
|
|
1869
|
+
} catch (err) {
|
|
1870
|
+
R.error(getErrorMessage(err));
|
|
1871
|
+
process.exit(1);
|
|
1872
|
+
}
|
|
1873
|
+
if (dialect === "postgresql" && (bindings.needsD1 || rawMigrations.length > 0)) try {
|
|
1874
|
+
if (!(await client.getResources(config.projectId)).some((resource) => resource.type === "hyperdrive")) {
|
|
1875
|
+
const { text: promptText, isCancel: isCancelPrompt } = await import("./dist-Dayj3gCK.mjs").then((n) => n.p);
|
|
1876
|
+
const connectionString = await promptText({
|
|
1877
|
+
message: "Your project uses PostgreSQL. Enter your connection string:",
|
|
1878
|
+
placeholder: "postgresql://user:pass@host:5432/dbname",
|
|
1879
|
+
validate(value) {
|
|
1880
|
+
if (!value?.startsWith("postgres://") && !value?.startsWith("postgresql://")) return "Must start with \"postgres://\" or \"postgresql://\"";
|
|
1881
|
+
}
|
|
1882
|
+
});
|
|
1883
|
+
if (isCancelPrompt(connectionString)) {
|
|
1884
|
+
R.info("Deploy cancelled.");
|
|
1885
|
+
process.exit(0);
|
|
1886
|
+
}
|
|
1887
|
+
await client.provisionDatabase(config.projectId, connectionString);
|
|
1888
|
+
R.success("Hyperdrive provisioned.");
|
|
1889
|
+
}
|
|
1890
|
+
} catch {}
|
|
1891
|
+
const rawJobs = scanJobsSync(root);
|
|
1892
|
+
const rawQueues = scanQueuesSync(root);
|
|
1893
|
+
const originalEntry = join(workerDir, fwPreset.workerMain);
|
|
1894
|
+
const wrapperFeatures = {
|
|
1895
|
+
auth: null,
|
|
1896
|
+
crons: rawJobs,
|
|
1897
|
+
queues: rawQueues,
|
|
1898
|
+
hasMigrations: validatedMigrations != null && validatedMigrations.length > 0,
|
|
1899
|
+
migrationDialect: dialect
|
|
1900
|
+
};
|
|
1901
|
+
let actualWorkerDir = workerDir;
|
|
1902
|
+
let actualWorkerMain = fwPreset.workerMain;
|
|
1903
|
+
if (fwPreset.bundle) {
|
|
1904
|
+
R.step("Bundling worker...");
|
|
1905
|
+
const bundleResult = await bundleEntry(root, originalEntry);
|
|
1906
|
+
actualWorkerDir = bundleResult.outputDir;
|
|
1907
|
+
actualWorkerMain = bundleResult.entryFilename;
|
|
1908
|
+
R.info("Worker bundled");
|
|
1909
|
+
}
|
|
1910
|
+
if (needsWrapper(wrapperFeatures)) {
|
|
1911
|
+
R.step("Generating wrapper...");
|
|
1912
|
+
await generateWrapper(actualWorkerDir, actualWorkerMain, root, wrapperFeatures);
|
|
1913
|
+
R.info("Wrapper generated");
|
|
1914
|
+
}
|
|
1915
|
+
const wranglerCompat = readWranglerCompat(root, voidConfig, workerDir);
|
|
1916
|
+
const revalidate = voidConfig.routing?.revalidate;
|
|
1917
|
+
const prerender = voidConfig.routing?.prerender;
|
|
1918
|
+
const schedules = rawJobs.flatMap((job) => job.crons.map((cron) => ({
|
|
1919
|
+
name: job.name,
|
|
1920
|
+
cron
|
|
1921
|
+
})));
|
|
1922
|
+
const queues = rawQueues.map((q) => ({
|
|
1923
|
+
name: q.name,
|
|
1924
|
+
bindingName: q.bindingName,
|
|
1925
|
+
...q.maxBatchSize != null && { maxBatchSize: q.maxBatchSize },
|
|
1926
|
+
...q.maxBatchTimeout != null && { maxBatchTimeout: q.maxBatchTimeout }
|
|
1927
|
+
}));
|
|
1928
|
+
const envVars = loadDeployEnv(root);
|
|
1929
|
+
await assertEnvSchemaValid(root, client, config.projectId);
|
|
1930
|
+
await applySchemaDefaultsToVars(root, envVars);
|
|
1931
|
+
const s = createSpinner();
|
|
1932
|
+
const onProgress = (msg) => s.message(msg);
|
|
1933
|
+
s.start("Checking for changes...");
|
|
1934
|
+
const { assetManifest } = await collectAndHashAssets(assetsDir, onProgress, { ignorePatterns: frameworkAssetIgnorePatterns });
|
|
1935
|
+
const { needed, skipped } = await client.preflight(config.projectId, assetManifest, true);
|
|
1936
|
+
s.stop("Checked for changes");
|
|
1937
|
+
if (skipped > 0) R.info(`Skipping ${skipped} unchanged asset(s)`);
|
|
1938
|
+
s.start("Packaging...");
|
|
1939
|
+
const neededSet = skipped > 0 ? new Set(needed) : null;
|
|
1940
|
+
const formData = await packageFrameworkBuild({
|
|
1941
|
+
frameworkName: detected.name,
|
|
1942
|
+
workerDir: actualWorkerDir,
|
|
1943
|
+
workerMain: actualWorkerMain,
|
|
1944
|
+
assetsDir,
|
|
1945
|
+
bindings,
|
|
1946
|
+
migrations: validatedMigrations,
|
|
1947
|
+
schedules,
|
|
1948
|
+
queues,
|
|
1949
|
+
revalidate,
|
|
1950
|
+
prerender,
|
|
1951
|
+
assetConfig: { not_found_handling: "none" },
|
|
1952
|
+
vars: envVars,
|
|
1953
|
+
packageOptions: {
|
|
1954
|
+
bindingNames: resolveBindingNames(voidConfig.inference?.bindings),
|
|
1955
|
+
workerMain: actualWorkerMain,
|
|
1956
|
+
compatibilityDate: wranglerCompat.compatibilityDate,
|
|
1957
|
+
compatibilityFlags: wranglerCompat.compatibilityFlags
|
|
1958
|
+
},
|
|
1959
|
+
revalidateQueryAllowlist: voidConfig.routing?.revalidateQueryAllowlist,
|
|
1960
|
+
neededAssets: neededSet,
|
|
1961
|
+
hashedAssetsPrefix: fwPreset.hashedAssetsPrefix ?? "assets",
|
|
1962
|
+
headerRules,
|
|
1963
|
+
redirectRules,
|
|
1964
|
+
fallbackRules,
|
|
1965
|
+
dialect,
|
|
1966
|
+
assetIgnorePatterns: frameworkAssetIgnorePatterns,
|
|
1967
|
+
onProgress
|
|
1968
|
+
});
|
|
1969
|
+
formData.append("assetManifest", JSON.stringify(assetManifest));
|
|
1970
|
+
formData.append("source", getTokenSource());
|
|
1971
|
+
if (commit) formData.append("commit", commit);
|
|
1972
|
+
const result = await streamDeploy(client, config.projectId, formData, s);
|
|
1973
|
+
const summary = [`${result.workers} worker module(s), ${formatKnownAssetSummary(result, skipped)}`];
|
|
1974
|
+
if (result.migrations) summary.push(`${result.migrations} migration(s) applied`);
|
|
1975
|
+
if (schedules.length > 0) summary.push(`${schedules.length} cron job(s) scheduled`);
|
|
1976
|
+
if (queues.length > 0) summary.push(`${queues.length} queue(s) configured`);
|
|
1977
|
+
if (prerender && prerender.length > 0) summary.push(`${prerender.length} page(s) prerendered`);
|
|
1978
|
+
if (revalidate) if (typeof revalidate === "number") summary.push(`ISR revalidate: ${revalidate}s`);
|
|
1979
|
+
else summary.push(`ISR revalidate: ${Object.keys(revalidate).length} path rule(s)`);
|
|
1980
|
+
summary.push(`Framework: ${detected.name}`);
|
|
1981
|
+
Se(summary.join("\n"), result.url);
|
|
1982
|
+
cleanupWrapper(root);
|
|
1983
|
+
ye("Done!");
|
|
1984
|
+
}
|
|
1985
|
+
async function runFullDeploy(root, config, client, fwPreset, skipBuild, commit, detected) {
|
|
1986
|
+
if (!skipBuild) {
|
|
1987
|
+
const buildCmd = formatProjectCommand(root, "vite build");
|
|
1988
|
+
R.step(`Building (${buildCmd})...`);
|
|
1989
|
+
try {
|
|
1990
|
+
execSync(resolveProjectCommand(root, "vite build"), {
|
|
1991
|
+
cwd: root,
|
|
1992
|
+
stdio: "inherit",
|
|
1993
|
+
env: {
|
|
1994
|
+
...process.env,
|
|
1995
|
+
VOID_DEPLOY_PROJECT_ID: config.projectId
|
|
1996
|
+
}
|
|
1997
|
+
});
|
|
1998
|
+
} catch (err) {
|
|
1999
|
+
process.exit(getExitCode(err));
|
|
2000
|
+
}
|
|
2001
|
+
}
|
|
2002
|
+
const frameworkBuildCmd = fwPreset ? formatProjectCommand(root, fwPreset.buildCommand) : void 0;
|
|
2003
|
+
const distDir = resolveDistDir(root, fwPreset, frameworkBuildCmd);
|
|
2004
|
+
let assetsPrefix = "assets";
|
|
2005
|
+
const clientManifestPath = join(distDir, "client", ".vite", "manifest.json");
|
|
2006
|
+
if (existsSync(clientManifestPath)) {
|
|
2007
|
+
const manifest = JSON.parse(readFileSync(clientManifestPath, "utf-8"));
|
|
2008
|
+
for (const entry of Object.values(manifest)) if (entry.file?.includes("/")) {
|
|
2009
|
+
const dir = entry.file.slice(0, entry.file.indexOf("/"));
|
|
2010
|
+
if (dir !== "assets") assetsPrefix = dir;
|
|
2011
|
+
break;
|
|
2012
|
+
}
|
|
2013
|
+
}
|
|
2014
|
+
const isFrameworkMode = detected != null;
|
|
2015
|
+
const deployConfig = readConfig(root);
|
|
2016
|
+
const dialect = getDatabaseDialect(deployConfig);
|
|
2017
|
+
const bindings = inferProjectBindings(root, void 0, deployConfig.inference?.scanDirs ?? (isFrameworkMode ? FRAMEWORK_SCAN_DIRS : void 0));
|
|
2018
|
+
if (isSandboxEnabled(deployConfig)) bindings.needsSandbox = true;
|
|
2019
|
+
const authEnabled = bindings.needsAuth || findVoidAuthConfig(root) !== null || deployConfig.auth !== void 0;
|
|
2020
|
+
const effectiveBindings = authEnabled ? {
|
|
2021
|
+
...bindings,
|
|
2022
|
+
needsAuth: true,
|
|
2023
|
+
needsD1: true
|
|
2024
|
+
} : bindings;
|
|
2025
|
+
if (isFrameworkMode && authEnabled) {
|
|
2026
|
+
R.error("Void-managed Better Auth is only supported in Void apps. Use Better Auth's official integration for this framework instead.");
|
|
2027
|
+
process.exit(1);
|
|
2028
|
+
}
|
|
2029
|
+
if (isFrameworkMode && bindings.needsSandbox) {
|
|
2030
|
+
R.error("deploy: Cloudflare Sandboxes are currently supported in Void apps only.");
|
|
2031
|
+
process.exit(1);
|
|
2032
|
+
}
|
|
2033
|
+
const sandboxConfig = bindings.needsSandbox ? resolveSandboxConfig(deployConfig, root) : void 0;
|
|
2034
|
+
if (bindings.needsSandbox && requiresSandboxPlatformImage(deployConfig)) {
|
|
2035
|
+
R.error("deploy: `sandbox.image` points at a local Dockerfile. Add `sandbox.platformImage` with a pushed registry image for `void deploy`, or deploy this sandbox directly with Wrangler.");
|
|
2036
|
+
process.exit(1);
|
|
2037
|
+
}
|
|
2038
|
+
const clientDir = join(distDir, "client");
|
|
2039
|
+
const { headerRules, redirectRules, fallbackRules, redundantForceOn3xx } = mergeRoutingRules({
|
|
2040
|
+
dir: clientDir,
|
|
2041
|
+
routing: deployConfig.routing
|
|
2042
|
+
});
|
|
2043
|
+
warnRedundantForceOn3xx(redundantForceOn3xx);
|
|
2044
|
+
lintDuplicateSources(redirectRules, "redirects/rewrites (pre-asset)");
|
|
2045
|
+
lintDuplicateSources(fallbackRules, "fallbacks (post-asset)");
|
|
2046
|
+
lintDestinationSplats(redirectRules, "redirects/rewrites (pre-asset)");
|
|
2047
|
+
lintDestinationSplats(fallbackRules, "fallbacks (post-asset)");
|
|
2048
|
+
const migrationsDir = join(root, "db", "migrations");
|
|
2049
|
+
assertDeploySchemaDriftFree(root, dialect);
|
|
2050
|
+
const rawMigrations = collectMigrations(migrationsDir);
|
|
2051
|
+
let validatedMigrations;
|
|
2052
|
+
try {
|
|
2053
|
+
assertJournalCoherence(migrationsDir, rawMigrations);
|
|
2054
|
+
} catch (err) {
|
|
2055
|
+
R.error(getErrorMessage(err));
|
|
2056
|
+
process.exit(1);
|
|
2057
|
+
}
|
|
2058
|
+
if (rawMigrations.length > 0) try {
|
|
2059
|
+
validatedMigrations = validateMigrations(rawMigrations);
|
|
2060
|
+
R.info(`Found ${validatedMigrations.length} migration(s)`);
|
|
2061
|
+
} catch (err) {
|
|
2062
|
+
R.error(getErrorMessage(err));
|
|
2063
|
+
process.exit(1);
|
|
2064
|
+
}
|
|
2065
|
+
if (dialect === "postgresql" && (effectiveBindings.needsD1 || rawMigrations.length > 0)) try {
|
|
2066
|
+
if (!(await client.getResources(config.projectId)).some((resource) => resource.type === "hyperdrive")) {
|
|
2067
|
+
const { text: promptText, isCancel: isCancelPrompt } = await import("./dist-Dayj3gCK.mjs").then((n) => n.p);
|
|
2068
|
+
const connectionString = await promptText({
|
|
2069
|
+
message: "Your project uses PostgreSQL. Enter your connection string:",
|
|
2070
|
+
placeholder: "postgresql://user:pass@host:5432/dbname",
|
|
2071
|
+
validate(value) {
|
|
2072
|
+
if (!value?.startsWith("postgres://") && !value?.startsWith("postgresql://")) return "Must start with \"postgres://\" or \"postgresql://\"";
|
|
2073
|
+
}
|
|
2074
|
+
});
|
|
2075
|
+
if (isCancelPrompt(connectionString)) {
|
|
2076
|
+
R.info("Deploy cancelled.");
|
|
2077
|
+
process.exit(0);
|
|
2078
|
+
}
|
|
2079
|
+
await client.provisionDatabase(config.projectId, connectionString);
|
|
2080
|
+
R.success("Hyperdrive provisioned.");
|
|
2081
|
+
}
|
|
2082
|
+
} catch {}
|
|
2083
|
+
const workerDirName = resolveWorkerDirName(distDir, fwPreset, frameworkBuildCmd);
|
|
2084
|
+
const schedules = scanJobsSync(root).flatMap((job) => job.crons.map((cron) => ({
|
|
2085
|
+
name: job.name,
|
|
2086
|
+
cron
|
|
2087
|
+
})));
|
|
2088
|
+
const routeScan = await scanRoutes(root, { output: deployConfig.output });
|
|
2089
|
+
const webSockets = routeScan.websockets ?? scanWebSocketRoutesSync(root);
|
|
2090
|
+
const queues = scanQueuesSync(root).map((q) => ({
|
|
2091
|
+
name: q.name,
|
|
2092
|
+
bindingName: q.bindingName,
|
|
2093
|
+
...q.maxBatchSize != null && { maxBatchSize: q.maxBatchSize },
|
|
2094
|
+
...q.maxBatchTimeout != null && { maxBatchTimeout: q.maxBatchTimeout }
|
|
2095
|
+
}));
|
|
2096
|
+
const hasPages = existsSync(join(root, "pages"));
|
|
2097
|
+
const hasRoutes = routeScan.routes.length > 0;
|
|
2098
|
+
const hasMiddleware = routeScan.middleware.length > 0;
|
|
2099
|
+
const hasWebSockets = webSockets.length > 0;
|
|
2100
|
+
const isSsr = isFrameworkMode ? true : Boolean(validateSsrEntry(root)) || hasPages && deployConfig.output !== "static";
|
|
2101
|
+
const vcRevalidate = readConfig(root).routing?.revalidate;
|
|
2102
|
+
let revalidate;
|
|
2103
|
+
if (vcRevalidate != null) revalidate = vcRevalidate;
|
|
2104
|
+
const pageScan = existsSync(join(root, "pages")) ? routeScan.pages.pages.length > 0 ? routeScan.pages : await scanPages(root, { output: deployConfig.output }) : null;
|
|
2105
|
+
if (pageScan) {
|
|
2106
|
+
const perPageRules = {};
|
|
2107
|
+
for (const page of pageScan.pages) if (page.revalidate != null) perPageRules[page.pattern] = page.revalidate;
|
|
2108
|
+
if (Object.keys(perPageRules).length > 0) if (typeof revalidate === "number") revalidate = Object.fromEntries([...Object.entries(perPageRules), ["*", revalidate]]);
|
|
2109
|
+
else if (typeof revalidate === "object") revalidate = Object.fromEntries([...Object.entries(perPageRules), ...Object.entries(revalidate)]);
|
|
2110
|
+
else revalidate = perPageRules;
|
|
2111
|
+
}
|
|
2112
|
+
if (deployConfig.output === "static" && pageScan && !isSsr) {
|
|
2113
|
+
if (pageScan.pages.every((p) => p.prerender && (p.params.length === 0 && !p.catchAll || p.hasGetPrerenderPaths)) && !(hasRoutes || hasMiddleware || hasWebSockets || authEnabled) && schedules.length === 0 && queues.length === 0) {
|
|
2114
|
+
R.info("All pages prerendered — deploying as static site");
|
|
2115
|
+
return runStaticDeploy(root, config, client, {
|
|
2116
|
+
buildCommand: null,
|
|
2117
|
+
outputDir: clientDir,
|
|
2118
|
+
appType: "static"
|
|
2119
|
+
}, true, commit, assetsPrefix, headerRules, redirectRules, fallbackRules);
|
|
2120
|
+
}
|
|
2121
|
+
}
|
|
2122
|
+
const uniquePrerenderPaths = deployConfig.output === "static" || isFrameworkMode ? [] : isNodeTarget(deployConfig.target) ? await collectPrerenderPathsNode({
|
|
2123
|
+
root,
|
|
2124
|
+
ssrDir: join(distDir, "ssr"),
|
|
2125
|
+
clientDir
|
|
2126
|
+
}) : await collectPrerenderPaths({
|
|
2127
|
+
root,
|
|
2128
|
+
workerDir: join(distDir, workerDirName),
|
|
2129
|
+
clientDir,
|
|
2130
|
+
persistDir: join(root, ".void"),
|
|
2131
|
+
remoteMode: resolveRemoteMode(deployConfig.remote),
|
|
2132
|
+
projectId: config.projectId
|
|
2133
|
+
});
|
|
2134
|
+
const wranglerCompat = resolveDeployWranglerCompat(root, deployConfig, join(distDir, workerDirName));
|
|
2135
|
+
const envVars = loadDeployEnv(root);
|
|
2136
|
+
await assertEnvSchemaValid(root, client, config.projectId);
|
|
2137
|
+
await applySchemaDefaultsToVars(root, envVars);
|
|
2138
|
+
const s = createSpinner();
|
|
2139
|
+
const onProgress = (msg) => s.message(msg);
|
|
2140
|
+
const assetConfig = resolveAssetConfig({
|
|
2141
|
+
isSsr,
|
|
2142
|
+
hasPages,
|
|
2143
|
+
hasRoutes,
|
|
2144
|
+
hasMiddleware,
|
|
2145
|
+
hasWebSockets,
|
|
2146
|
+
authEnabled
|
|
2147
|
+
});
|
|
2148
|
+
s.start("Checking for changes...");
|
|
2149
|
+
const { assetManifest } = await collectAndHashAssets(clientDir, onProgress);
|
|
2150
|
+
const { needed, skipped } = await client.preflight(config.projectId, assetManifest, true);
|
|
2151
|
+
s.stop("Checked for changes");
|
|
2152
|
+
if (skipped > 0) R.info(`Skipping ${skipped} unchanged asset(s)`);
|
|
2153
|
+
s.start("Packaging...");
|
|
2154
|
+
const neededSet = skipped > 0 ? new Set(needed) : null;
|
|
2155
|
+
const formData = await packageBuild(distDir, workerDirName, effectiveBindings, validatedMigrations, schedules, isSsr || hasRoutes || hasMiddleware || hasWebSockets || authEnabled, isFrameworkMode ? detected.name : void 0, revalidate, envVars, queues, uniquePrerenderPaths, assetConfig, {
|
|
2156
|
+
compatibilityDate: wranglerCompat?.compatibilityDate,
|
|
2157
|
+
compatibilityFlags: wranglerCompat?.compatibilityFlags,
|
|
2158
|
+
...sandboxConfig && { sandbox: sandboxConfig },
|
|
2159
|
+
revalidateQueryAllowlist: deployConfig.routing?.revalidateQueryAllowlist
|
|
2160
|
+
}, neededSet, assetsPrefix, headerRules, redirectRules, fallbackRules, dialect, webSockets, onProgress);
|
|
2161
|
+
formData.append("assetManifest", JSON.stringify(assetManifest));
|
|
2162
|
+
formData.append("source", getTokenSource());
|
|
2163
|
+
if (commit) formData.append("commit", commit);
|
|
2164
|
+
const result = await streamDeploy(client, config.projectId, formData, s);
|
|
2165
|
+
const summary = [`${result.workers} worker module(s), ${formatKnownAssetSummary(result, skipped)}`];
|
|
2166
|
+
if (result.migrations) summary.push(`${result.migrations} migration(s) applied`);
|
|
2167
|
+
if (schedules.length > 0) summary.push(`${schedules.length} cron job(s) scheduled`);
|
|
2168
|
+
if (queues.length > 0) summary.push(`${queues.length} queue(s) configured`);
|
|
2169
|
+
if (uniquePrerenderPaths.length > 0) summary.push(`${uniquePrerenderPaths.length} page(s) prerendered`);
|
|
2170
|
+
if (isSsr) summary.push("SSR enabled");
|
|
2171
|
+
if (revalidate) if (typeof revalidate === "number") summary.push(`ISR revalidate: ${revalidate}s`);
|
|
2172
|
+
else summary.push(`ISR revalidate: ${Object.keys(revalidate).length} path rule(s)`);
|
|
2173
|
+
if (isFrameworkMode) summary.push("Framework mode");
|
|
2174
|
+
Se(summary.join("\n"), result.url);
|
|
2175
|
+
ye("Done!");
|
|
2176
|
+
}
|
|
2177
|
+
async function createProjectFromSlug(root, client, slug) {
|
|
2178
|
+
R.step(`Creating project ${import_picocolors.default.blue(slug)}...`);
|
|
2179
|
+
const project = await client.createProject(slug);
|
|
2180
|
+
const config = {
|
|
2181
|
+
projectId: project.id,
|
|
2182
|
+
slug: project.slug
|
|
2183
|
+
};
|
|
2184
|
+
writeProjectConfig(root, config);
|
|
2185
|
+
R.success(`Project created: ${import_picocolors.default.blue(config.slug)}`);
|
|
2186
|
+
return config;
|
|
2187
|
+
}
|
|
2188
|
+
function getGitCommit(root) {
|
|
2189
|
+
try {
|
|
2190
|
+
execSync("git rev-parse --is-inside-work-tree", {
|
|
2191
|
+
cwd: root,
|
|
2192
|
+
stdio: "ignore"
|
|
2193
|
+
});
|
|
2194
|
+
if (execSync("git status --porcelain", {
|
|
2195
|
+
cwd: root,
|
|
2196
|
+
encoding: "utf-8"
|
|
2197
|
+
}).trim() !== "") return null;
|
|
2198
|
+
return execSync("git rev-parse HEAD", {
|
|
2199
|
+
cwd: root,
|
|
2200
|
+
encoding: "utf-8"
|
|
2201
|
+
}).trim();
|
|
2202
|
+
} catch {
|
|
2203
|
+
return null;
|
|
2204
|
+
}
|
|
2205
|
+
}
|
|
2206
|
+
function assertDeploySchemaDriftFree(root, dialect) {
|
|
2207
|
+
try {
|
|
2208
|
+
if (!hasDeploySchemaDrift(root, dialect)) return;
|
|
2209
|
+
} catch (error) {
|
|
2210
|
+
R.error(getErrorMessage(error));
|
|
2211
|
+
process.exit(1);
|
|
2212
|
+
}
|
|
2213
|
+
R.error(getDeploySchemaDriftMessage());
|
|
2214
|
+
process.exit(1);
|
|
2215
|
+
}
|
|
2216
|
+
function hasDeploySchemaDrift(root, dialect) {
|
|
2217
|
+
if (!discoverSchema(root)) return false;
|
|
2218
|
+
const driftRoot = mkDeployDriftCheckRoot();
|
|
2219
|
+
const driftProjectRoot = join(driftRoot, "project");
|
|
2220
|
+
rmSync(driftRoot, {
|
|
2221
|
+
recursive: true,
|
|
2222
|
+
force: true
|
|
2223
|
+
});
|
|
2224
|
+
mkdirSync(driftRoot, { recursive: true });
|
|
2225
|
+
createDeployDriftProject(root, driftProjectRoot);
|
|
2226
|
+
try {
|
|
2227
|
+
const migrationsDir = join(driftProjectRoot, "db", "migrations");
|
|
2228
|
+
const before = snapshotDir(migrationsDir);
|
|
2229
|
+
const driftConfigPath = writeDrizzleConfig(driftProjectRoot, dialect === "postgresql" ? "postgresql://dummy" : ":memory:", dialect);
|
|
2230
|
+
execFileSync(process.execPath, [
|
|
2231
|
+
drizzleKitBin,
|
|
2232
|
+
"generate",
|
|
2233
|
+
"--config",
|
|
2234
|
+
driftConfigPath
|
|
2235
|
+
], {
|
|
2236
|
+
cwd: driftProjectRoot,
|
|
2237
|
+
stdio: "pipe"
|
|
2238
|
+
});
|
|
2239
|
+
return !migrationSnapshotsEqual(before, snapshotDir(migrationsDir));
|
|
2240
|
+
} catch (error) {
|
|
2241
|
+
throw new Error(formatDeploySchemaDriftCheckError(error));
|
|
2242
|
+
} finally {
|
|
2243
|
+
rmSync(driftRoot, {
|
|
2244
|
+
recursive: true,
|
|
2245
|
+
force: true
|
|
2246
|
+
});
|
|
2247
|
+
}
|
|
2248
|
+
}
|
|
2249
|
+
function getDeploySchemaDriftMessage() {
|
|
2250
|
+
return ["Schema drift detected between your Drizzle schema and db/migrations/.", "Run `void db generate`, review the generated migration, commit it yourself, then rerun `void deploy`."].join("\n");
|
|
2251
|
+
}
|
|
2252
|
+
function formatDeploySchemaDriftCheckError(error) {
|
|
2253
|
+
const detail = extractExecErrorOutput(error);
|
|
2254
|
+
return [
|
|
2255
|
+
"Failed to check Drizzle schema drift during deploy.",
|
|
2256
|
+
"Run `void db generate` locally, fix any reported issues, review the generated migration, commit it yourself, then rerun `void deploy`.",
|
|
2257
|
+
...detail ? [detail] : []
|
|
2258
|
+
].join("\n\n");
|
|
2259
|
+
}
|
|
2260
|
+
function extractExecErrorOutput(error) {
|
|
2261
|
+
if (!error || typeof error !== "object") return "";
|
|
2262
|
+
const details = [];
|
|
2263
|
+
const stderr = "stderr" in error ? bufferToString(error.stderr) : "";
|
|
2264
|
+
const stdout = "stdout" in error ? bufferToString(error.stdout) : "";
|
|
2265
|
+
if (stderr) details.push(stderr);
|
|
2266
|
+
if (stdout) details.push(stdout);
|
|
2267
|
+
return details.join("\n").trim();
|
|
2268
|
+
}
|
|
2269
|
+
function bufferToString(value) {
|
|
2270
|
+
if (typeof value === "string") return value.trim();
|
|
2271
|
+
if (value instanceof Buffer) return value.toString("utf-8").trim();
|
|
2272
|
+
return "";
|
|
2273
|
+
}
|
|
2274
|
+
function createDeployDriftProject(root, driftProjectRoot) {
|
|
2275
|
+
mkdirSync(driftProjectRoot, { recursive: true });
|
|
2276
|
+
for (const entry of readdirSync(root, { withFileTypes: true })) {
|
|
2277
|
+
if (entry.name === ".git" || entry.name === ".void" || entry.name === "node_modules") continue;
|
|
2278
|
+
if (entry.name === "dist" || entry.name === "build" || entry.name === ".wrangler" || entry.name === ".turbo" || entry.name === ".next" || entry.name === ".svelte-kit" || entry.name === ".output") continue;
|
|
2279
|
+
const source = join(root, entry.name);
|
|
2280
|
+
const target = join(driftProjectRoot, entry.name);
|
|
2281
|
+
if (entry.name === "db") {
|
|
2282
|
+
cpSync(source, target, { recursive: true });
|
|
2283
|
+
continue;
|
|
2284
|
+
}
|
|
2285
|
+
symlinkSync(source, target, entry.isDirectory() ? symlinkDirType() : "file");
|
|
2286
|
+
}
|
|
2287
|
+
const nodeModules = findNearestNodeModules(root);
|
|
2288
|
+
if (nodeModules) symlinkSync(nodeModules, join(driftProjectRoot, "node_modules"), symlinkDirType());
|
|
2289
|
+
}
|
|
2290
|
+
function findNearestNodeModules(root) {
|
|
2291
|
+
let current = root;
|
|
2292
|
+
while (true) {
|
|
2293
|
+
const candidate = join(current, "node_modules");
|
|
2294
|
+
if (existsSync(candidate)) return candidate;
|
|
2295
|
+
const parent = resolve(current, "..");
|
|
2296
|
+
if (parent === current) return null;
|
|
2297
|
+
current = parent;
|
|
2298
|
+
}
|
|
2299
|
+
}
|
|
2300
|
+
function mkDeployDriftCheckRoot() {
|
|
2301
|
+
return join(tmpdir(), `${deployDriftCheckDir.replaceAll("/", "-").replaceAll("\\", "-")}-${Date.now()}`);
|
|
2302
|
+
}
|
|
2303
|
+
function symlinkDirType() {
|
|
2304
|
+
return process.platform === "win32" ? "junction" : "dir";
|
|
2305
|
+
}
|
|
2306
|
+
function snapshotDir(root) {
|
|
2307
|
+
if (!existsSync(root)) return {};
|
|
2308
|
+
const snapshot = {};
|
|
2309
|
+
collectSnapshotFiles(root, root, snapshot);
|
|
2310
|
+
return snapshot;
|
|
2311
|
+
}
|
|
2312
|
+
function collectSnapshotFiles(current, root, snapshot) {
|
|
2313
|
+
for (const entry of readdirSync(current, { withFileTypes: true })) {
|
|
2314
|
+
const fullPath = join(current, entry.name);
|
|
2315
|
+
if (entry.isDirectory()) {
|
|
2316
|
+
collectSnapshotFiles(fullPath, root, snapshot);
|
|
2317
|
+
continue;
|
|
2318
|
+
}
|
|
2319
|
+
snapshot[relative(root, fullPath)] = readFileSync(fullPath, "utf-8");
|
|
2320
|
+
}
|
|
2321
|
+
}
|
|
2322
|
+
function migrationSnapshotsEqual(left, right) {
|
|
2323
|
+
const leftEntries = Object.entries(left).sort(([a], [b]) => a.localeCompare(b));
|
|
2324
|
+
const rightEntries = Object.entries(right).sort(([a], [b]) => a.localeCompare(b));
|
|
2325
|
+
if (leftEntries.length !== rightEntries.length) return false;
|
|
2326
|
+
return leftEntries.every(([leftPath, leftContent], index) => {
|
|
2327
|
+
const [rightPath, rightContent] = rightEntries[index] ?? [];
|
|
2328
|
+
return leftPath === rightPath && leftContent === rightContent;
|
|
2329
|
+
});
|
|
2330
|
+
}
|
|
2331
|
+
/**
|
|
2332
|
+
* Overlay schema `.default(value)` entries onto the deploy manifest vars so
|
|
2333
|
+
* the deployed worker sees the same fallback as the typed `env.X` proxy at
|
|
2334
|
+
* runtime. User-provided `.env*` values always win — defaults only fill
|
|
2335
|
+
* gaps. No-op when the project has no `env.ts`.
|
|
2336
|
+
*/
|
|
2337
|
+
async function applySchemaDefaultsToVars(root, envVars) {
|
|
2338
|
+
const defaults = await getDeployEnvDefaults(root);
|
|
2339
|
+
for (const [key, value] of Object.entries(defaults)) if (envVars[key] === void 0) envVars[key] = value;
|
|
2340
|
+
}
|
|
2341
|
+
/**
|
|
2342
|
+
* If the project has an `env.ts`, validate its schema against the union of
|
|
2343
|
+
* production env files and remote secret names. Hard-error before upload so
|
|
2344
|
+
* deploys never ship missing required env vars.
|
|
2345
|
+
*/
|
|
2346
|
+
async function assertEnvSchemaValid(root, client, projectId) {
|
|
2347
|
+
if (findEnvFile(root) == null) return;
|
|
2348
|
+
const { hasSchema, report } = await validateProdEnv(root, {
|
|
2349
|
+
productionOnly: true,
|
|
2350
|
+
remoteSecrets: await fetchRemoteSecretNames(client, projectId)
|
|
2351
|
+
});
|
|
2352
|
+
if (!hasSchema || report.valid) return;
|
|
2353
|
+
const formatted = formatEnvReport(report);
|
|
2354
|
+
R.error(`Env validation failed:\n${formatted}`);
|
|
2355
|
+
if (report.missing.length > 0) R.info(`Add the missing values to .env / .env.production, or upload them with \`void secret put <NAME>\`.`);
|
|
2356
|
+
throw new DeployEventError("deploy: Aborting because env.ts schema is not satisfied.");
|
|
2357
|
+
}
|
|
2358
|
+
/**
|
|
2359
|
+
* Load env vars for deploy — only .env and .env.production (not .local files).
|
|
2360
|
+
* Later files win on conflict.
|
|
2361
|
+
*/
|
|
2362
|
+
function loadDeployEnv(root) {
|
|
2363
|
+
const files = [".env", ".env.production"];
|
|
2364
|
+
const env = {};
|
|
2365
|
+
for (const file of files) {
|
|
2366
|
+
const filePath = join(root, file);
|
|
2367
|
+
if (!existsSync(filePath)) continue;
|
|
2368
|
+
const content = readFileSync(filePath, "utf-8");
|
|
2369
|
+
for (const line of content.split("\n")) {
|
|
2370
|
+
const trimmed = line.trim();
|
|
2371
|
+
if (!trimmed || trimmed.startsWith("#")) continue;
|
|
2372
|
+
const eqIndex = trimmed.indexOf("=");
|
|
2373
|
+
if (eqIndex === -1) continue;
|
|
2374
|
+
const key = trimmed.slice(0, eqIndex).trim();
|
|
2375
|
+
let value = trimmed.slice(eqIndex + 1).trim();
|
|
2376
|
+
if (value.startsWith("\"") && value.endsWith("\"") || value.startsWith("'") && value.endsWith("'")) value = value.slice(1, -1);
|
|
2377
|
+
env[key] = value;
|
|
2378
|
+
}
|
|
2379
|
+
}
|
|
2380
|
+
const { sanitized, stripped } = stripInternalEnvKeys(env);
|
|
2381
|
+
if (stripped.length > 0) voidWarn(`Ignoring internal env keys from .env files: ${stripped.join(", ")}. These are reserved for Void runtime use and will not be shipped in the deploy manifest.`);
|
|
2382
|
+
return sanitized;
|
|
2383
|
+
}
|
|
2384
|
+
/**
|
|
2385
|
+
* Resolve the top-level build output directory.
|
|
2386
|
+
*
|
|
2387
|
+
* When a framework preset is available, uses its workerDir to derive the
|
|
2388
|
+
* parent directory (e.g. "build/server" → "build"). This gives framework-
|
|
2389
|
+
* specific error messages when the output is missing.
|
|
2390
|
+
*
|
|
2391
|
+
* For non-framework (Void app) deploys, defaults to "dist".
|
|
2392
|
+
*/
|
|
2393
|
+
function resolveDistDir(root, fwPreset, buildCommand) {
|
|
2394
|
+
if (fwPreset) {
|
|
2395
|
+
const topDir = fwPreset.workerDir.split("/")[0];
|
|
2396
|
+
const distDir = join(root, topDir);
|
|
2397
|
+
if (!existsSync(distDir)) throw new Error(`deploy: Build output directory '${topDir}' not found. Run '${buildCommand ?? fwPreset.buildCommand}' first.`);
|
|
2398
|
+
return distDir;
|
|
2399
|
+
}
|
|
2400
|
+
return join(root, "dist");
|
|
2401
|
+
}
|
|
2402
|
+
/**
|
|
2403
|
+
* Resolve the worker output subdirectory name within the build dir.
|
|
2404
|
+
*
|
|
2405
|
+
* When a framework preset is available, uses the preset's workerDir to
|
|
2406
|
+
* derive the subdirectory name and gives a framework-specific error.
|
|
2407
|
+
*
|
|
2408
|
+
* For non-framework (Void app) deploys, probes for known subdirectories.
|
|
2409
|
+
*/
|
|
2410
|
+
function resolveWorkerDirName(distDir, fwPreset, buildCommand) {
|
|
2411
|
+
if (fwPreset) {
|
|
2412
|
+
const subDir = fwPreset.workerDir.split("/").slice(1).join("/");
|
|
2413
|
+
if (!existsSync(join(distDir, subDir))) throw new Error(`deploy: Worker output '${fwPreset.workerDir}' not found. Run '${buildCommand ?? fwPreset.buildCommand}' first.`);
|
|
2414
|
+
return subDir;
|
|
2415
|
+
}
|
|
2416
|
+
for (const name of [
|
|
2417
|
+
"ssr",
|
|
2418
|
+
"rsc",
|
|
2419
|
+
"server"
|
|
2420
|
+
]) if (existsSync(join(distDir, name))) return name;
|
|
2421
|
+
throw new Error("deploy: Worker output not found. Expected dist/ssr/ or dist/rsc/.");
|
|
2422
|
+
}
|
|
2423
|
+
//#endregion
|
|
2424
|
+
export { resolveSandboxConfig as _, prerenderPagesNode as a, filterLoadedEnv as c, ensureWranglerCompatibilityDate as d, ensureWranglerNodejsAlsFlag as f, isSandboxEnabled as g, SANDBOX_MIGRATION_TAG as h, prerenderPages as i, resolveRemoteMode as l, syncWranglerBindings as m, resolveDistDir as n, DEFAULT_PROXY_URL as o, readWranglerCompat as p, resolveWorkerDirName as r, STAGING_PROXY_URL as s, deploy_exports as t, stripInternalEnvKeys as u };
|