@npflared/cli 0.0.7 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.mjs +7 -0
- package/package.json +15 -13
- package/.turbo/turbo-build.log +0 -14
- package/.turbo/turbo-lint.log +0 -29
- package/CHANGELOG.md +0 -43
- package/biome.json +0 -3
- package/dist/index.js +0 -7
- package/src/commands/install.ts +0 -314
- package/src/index.ts +0 -17
- package/src/types.ts +0 -13
- package/src/utils/cloudflare.ts +0 -169
- package/src/utils/fs.ts +0 -14
- package/tsconfig.json +0 -20
- package/tsup.config.ts +0 -10
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import{hideBin as e}from"yargs/helpers";import t from"yargs/yargs";import{access as n,mkdir as r,mkdtemp as i,readFile as a,rename as o,rm as s,writeFile as c}from"node:fs/promises";import{homedir as l,tmpdir as u}from"node:os";import{join as d}from"node:path";import{confirm as f,intro as p,isCancel as m,log as h,outro as g,select as _,spinner as v,text as y}from"@clack/prompts";import{randomUUID as b}from"node:crypto";import{rmSync as x}from"node:fs";import S from"chalk";import C from"dedent";import w from"degit";import{encode as T}from"uuid-b32";import{$ as E,ProcessOutput as D}from"zx";import{z as O}from"zod";const k=v(),A=d(l(),`.npflared`),j=async()=>{p(`npflared`),k.start(`Deleting npflared directory (${A})...`),await s(A,{recursive:!0,force:!0}),k.stop(`Successfully deleted npflared directory`),process.exit(0)},M=O.object({d1_databases:O.array(O.object({binding:O.string(),database_name:O.string(),database_id:O.string()})).min(1)}),N=O.object({r2_buckets:O.array(O.object({binding:O.string(),bucket_name:O.string()})).min(1)}),P=async()=>{try{let[e]=(await E({quiet:!0})`npx -y wrangler whoami`).stdout.match(/([0-9a-f]{32})/)??[];return e}catch(e){throw e instanceof D?Error(e.stderr||e.stdout):e}},F=async()=>{try{let e=[],t=(await E({quiet:!0})`npx -y wrangler d1 list`).stdout.matchAll(/│(.*)│(.*)│(.*)│(.*)│(.*)│(.*)│/gm);for(let n of t){let[,t,r,i,a,o,s]=n;(t||r||i||a||o||s)&&e.push({id:t.trim(),name:r.trim(),createdAt:i.trim(),version:a.trim(),numberOfTables:Number.parseInt(o,10),size:Number.parseInt(s,10)})}return e}catch(e){throw e instanceof D?Error(e.stderr||e.stdout):e}},I=async()=>{try{let e=(await E({quiet:!0})`npx -y wrangler r2 bucket list`).stdout.matchAll(/name:(.*)\ncreation_date:(.*)/gim),t=[];for(let n of e){let[,e,r]=n;(e||r)&&t.push({name:e.trim(),createdAt:r.trim()})}return t}catch(e){throw e instanceof D?Error(e.stderr||e.stdout):e}},L=async e=>{try{let t=(await E({quiet:!0})`npx -y wrangler r2 bucket create ${e}`).stdout.match(/\{(?:[^{}]*|\{(?:[^{}]*|\{[^{}]*\})*\})*\}/gim),n=N.safeParse(JSON.parse(t?.[0]??``));if(!n.success)throw Error(`Could not properly retrieve R2 bucket binding`);return n.data}catch(e){throw e instanceof D?Error(e.stderr||e.stdout):e}},R=async e=>{try{let t=(await E({quiet:!0})`npx -y wrangler d1 create ${e}`).stdout.match(/\{(?:[^{}]*|\{(?:[^{}]*|\{[^{}]*\})*\})*\}/gim),n=M.safeParse(JSON.parse(t?.[0]??``));if(!n.success)throw Error(`Could not properly retrieve D1 database binding`);return n.data}catch(e){throw e instanceof D?Error(e.stderr||e.stdout):e}},z=async(e,t={})=>{try{await E({cwd:t.cwd})`npx -y wrangler d1 migrations apply ${e} --remote --config wrangler.json`}catch(e){throw e instanceof D?Error(e.stderr||e.stdout):e}},B=async(e={})=>{try{let t=(await E({quiet:!0,cwd:e.cwd})`npx -y wrangler deploy --config wrangler.json`).stdout.match(/([a-z0-9-]+\.[a-z0-9-]+\.workers\.dev)/i);return t?`https://${t[0]}`:`<unknown>`}catch(e){throw e instanceof D?Error(e.stderr||e.stdout):e}},V=async e=>{try{return await n(e),!0}catch(e){if(e?.code===`ENOENT`)return!1;throw e}},H=d(l(),`.npflared`),U=v(),W=async()=>{try{await r(H,{recursive:!0})}catch(e){if(e?.code!==`EEXIST`)throw e}},G=async()=>{let e=await f({message:`Use an existing D1 database?`});if(m(e)&&process.exit(1),e){U.start(`Retrieving D1 databases...`);let e=await F();U.stop();let t=await _({message:`Select a D1 database:`,options:e.map(e=>({value:{name:e.name,id:e.id},label:`${e.name.padEnd(30)} (${e.id}) - Created at: ${e.createdAt}`}))});return m(t)&&process.exit(1),{name:t.name,id:t.id}}let t=await y({initialValue:`npflared`,message:`Enter a name for your D1 database:`,validate(e){if(e.length===0)return`Please enter a name for your D1 database`}});m(t)&&process.exit(1),U.start(`Creating D1 database ${t}...`);let n=await R(t);U.stop();let[r]=n.d1_databases;return r||(console.log(S.red(`Could not create D1 database ${t}`)),process.exit(1)),{name:r.database_name,id:r.database_id}},K=async()=>{let e=await f({message:`Use an existing R2 bucket?`});if(m(e)&&process.exit(1),e){U.start(`Retrieving R2 buckets...`);let e=await I();U.stop();let t=await _({message:`Select a R2 bucket:`,options:e.map(e=>({value:{name:e.name},label:`${e.name.padEnd(30)} - Created at: ${e.createdAt}`}))});return m(t)&&process.exit(1),{name:t.name}}let t=await y({initialValue:`npflared`,message:`Enter a name for your R2 bucket:`,validate(e){if(e.length===0)return`Please enter a name for your R2 bucket`}});m(t)&&process.exit(1),U.start(`Creating R2 bucket ${t}...`);let n=await L(t);U.stop();let[r]=n.r2_buckets;return r||(console.log(S.red(`Could not create R2 bucket ${t}`)),process.exit(1)),{name:r.bucket_name}},q=async()=>{let e=await _({message:`Install dependencies with:`,options:[{value:`npm`,label:`npm`},{value:`pnpm`,label:`pnpm`},{value:`yarn`,label:`yarn`},{value:`bun`,label:`bun`}]});return m(e)&&process.exit(1),e},J=async()=>{let e=await y({initialValue:`npflared`,message:`Enter a name for your worker:`,validate(e){if(e.length===0)return`Please enter a name for your worker`}});return m(e)&&process.exit(1),e},Y=async e=>{U.start(`Generating admin token...`);let t=d(e,`migrations`,`9999_admin-token.sql`);if(!await V(t)){let e=T(b()),n=Date.now();return await c(t,`INSERT INTO token (token, name, scopes, created_at, updated_at) VALUES ('${e}', 'admin-token', '[{"type": "token:read+write", "values": ["*"]}, {"type": "user:read+write", "values": ["*"]}, {"type": "package:read+write", "values": ["*"]}]', ${n}, ${n});`),U.stop(`Admin token migration file generated at ${t}`),e}let n=await a(t,`utf-8`);return U.stop(`Admin token migration file already exists at ${t}`),n.match(/INSERT INTO token \(token, name, scopes, created_at, updated_at\) VALUES \('([^']+)'/)?.[0]??``},X=async()=>{let e=await i(d(u(),`npflared-`)),t=()=>{e&&x(e,{recursive:!0,force:!0})};process.on(`exit`,t),process.on(`SIGINT`,t),process.on(`SIGTERM`,t);try{p(`npflared`);let t=w(`Thomascogez/npflared/apps/api`);U.start(`Cloning npflared...`),await t.clone(e);let n=d(e,`package.json`),r=JSON.parse(await a(n,`utf-8`)).version;await W();let i=d(H,r);await V(i)||await o(e,d(H,r)),U.stop(`Successfully cloned npflared (v${r})`);let s=await J(),l=await q();U.start(`Installing dependencies using ${l}...`),await E({quiet:!0,cwd:i})`npx -y ${l} install`,U.stop(`Successfully installed dependencies using ${l}`),U.start(`Retrieving Cloudflare account id...`);let u=await P();U.stop(),u?h.info(S.green(`Using cloudflare account id: ${S.bold.white(u)}`)):(h.error(S.red(`Could not retrieve Cloudflare account id, please login with ${S.bold.white(`wrangler login`)}`)),process.exit(1));let f=await G(),m=await K();U.start(`Generating wrangler configuration...`);let _={name:s,main:`src/index.ts`,compatibility_date:`2024-11-24`,compatibility_flags:[`nodejs_compat`],d1_databases:[{binding:`DB`,database_name:f.name,database_id:f.id}],r2_buckets:[{binding:`BUCKET`,bucket_name:m.name}]},v=d(i,`wrangler.json`);await c(d(i,`wrangler.json`),JSON.stringify(_,null,2)),U.stop(`Wrangler configuration generated at ${v}`);let y=await Y(i);U.start(`Applying D1 migrations...`),await z(f.name,{cwd:i}),U.stop(`Successfully applied D1 migrations`),U.start(`Deploying...`);let b=await B({cwd:i});U.stop(),h.info(S.green(C`
|
|
3
|
+
🔥 npflared is now ready to use!
|
|
4
|
+
🔗 Deployed to: ${S.bold.white(b)}
|
|
5
|
+
👮 Admin token: ${S.bold.white(y)}
|
|
6
|
+
📚 Check documentation for more information: ${S.bold.white(`https://npflared.thomas-cogez.fr`)}
|
|
7
|
+
`)),g(`You're all set!`)}catch(e){h.error(`${e}`),process.exit(1)}finally{t()}};t(e(process.argv)).command(`install`,`Configure and deploy your own npflared instance on your cloudflare account`,e=>e,async()=>{await X()}).command(`clean`,`Clean the local npflared folder`,e=>e,async()=>{await j()}).demandCommand(1).parse();export{};
|
package/package.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@npflared/cli",
|
|
3
|
-
"version": "0.0
|
|
3
|
+
"version": "0.1.0",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"bin": {
|
|
6
|
-
"npflared": "./dist/index.
|
|
6
|
+
"npflared": "./dist/index.mjs"
|
|
7
7
|
},
|
|
8
8
|
"keywords": [
|
|
9
9
|
"npm",
|
|
@@ -13,23 +13,25 @@
|
|
|
13
13
|
"license": "ISC",
|
|
14
14
|
"devDependencies": {
|
|
15
15
|
"@types/degit": "^2.8.6",
|
|
16
|
-
"@types/node": "^
|
|
17
|
-
"@types/yargs": "^17.0.
|
|
18
|
-
"
|
|
16
|
+
"@types/node": "^25.0.6",
|
|
17
|
+
"@types/yargs": "^17.0.35",
|
|
18
|
+
"tsdown": "^0.19.0"
|
|
19
19
|
},
|
|
20
20
|
"dependencies": {
|
|
21
21
|
"@clack/prompts": "^0.11.0",
|
|
22
|
-
"chalk": "^5.
|
|
23
|
-
"dedent": "^1.
|
|
22
|
+
"chalk": "^5.6.2",
|
|
23
|
+
"dedent": "^1.7.1",
|
|
24
24
|
"degit": "^2.8.4",
|
|
25
25
|
"uuid-b32": "^1.0.1",
|
|
26
|
-
"yargs": "^
|
|
27
|
-
"zod": "^3.
|
|
28
|
-
"zx": "^8.
|
|
26
|
+
"yargs": "^18.0.0",
|
|
27
|
+
"zod": "^4.3.5",
|
|
28
|
+
"zx": "^8.8.5"
|
|
29
29
|
},
|
|
30
|
+
"files": [
|
|
31
|
+
"dist"
|
|
32
|
+
],
|
|
30
33
|
"scripts": {
|
|
31
|
-
"
|
|
32
|
-
"build": "
|
|
33
|
-
"build:watch": "tsup --watch"
|
|
34
|
+
"build": "tsdown",
|
|
35
|
+
"build:watch": "tsdown --watch"
|
|
34
36
|
}
|
|
35
37
|
}
|
package/.turbo/turbo-build.log
DELETED
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
> @npflared/cli@0.0.1 build /Users/thomascogez--allix/Developer/npflared/apps/cli
|
|
4
|
-
> tsup
|
|
5
|
-
|
|
6
|
-
[34mCLI[39m Building entry: src/index.ts
|
|
7
|
-
[34mCLI[39m Using tsconfig: tsconfig.json
|
|
8
|
-
[34mCLI[39m tsup v8.3.6
|
|
9
|
-
[34mCLI[39m Using tsup config: /Users/thomascogez--allix/Developer/npflared/apps/cli/tsup.config.ts
|
|
10
|
-
[34mCLI[39m Target: esnext
|
|
11
|
-
[34mCLI[39m Cleaning output folder
|
|
12
|
-
[34mESM[39m Build start
|
|
13
|
-
[32mESM[39m [1mdist/index.js [22m[32m7.47 KB[39m
|
|
14
|
-
[32mESM[39m ⚡️ Build success in 8ms
|
package/.turbo/turbo-lint.log
DELETED
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
> @npflared/cli@0.0.1 lint /Users/thomascogez--allix/Developer/npflared/apps/cli
|
|
4
|
-
> biome check
|
|
5
|
-
|
|
6
|
-
[0m/Users/thomascogez--allix/Developer/npflared/apps/cli/package.json[0m[0m [0m[0mformat[0m[0m [0m[0m━━━━━━━━━━━━━━━━━━━━━━━━━━[0m[0m
|
|
7
|
-
|
|
8
|
-
[0m[0m [0m[0m[1m[31m✖[0m[0m [0m[0m[31mFormatter would have printed the following content:[0m[0m
|
|
9
|
-
[0m[0m [0m[0m
|
|
10
|
-
[0m[0m [0m[0m [0m[0m[1m11[0m[0m [0m[0m[1m11[0m[0m[1m │ [0m[0m [0m[0m [0m[0m [0m[0m [0m[0m"[0m[0mn[0m[0mp[0m[0mf[0m[0ml[0m[0ma[0m[0mr[0m[0me[0m[0md[0m[0m"[0m[0m:[0m[0m [0m[0m"[0m[0m.[0m[0m/[0m[0md[0m[0mi[0m[0ms[0m[0mt[0m[0m/[0m[0mi[0m[0mn[0m[0md[0m[0me[0m[0mx[0m[0m.[0m[0mj[0m[0ms[0m[0m"[0m[0m
|
|
11
|
-
[0m[0m [0m[0m [0m[0m[1m12[0m[0m [0m[0m[1m12[0m[0m[1m │ [0m[0m [0m[0m [0m[0m [0m[0m}[0m[0m,[0m[0m
|
|
12
|
-
[0m[0m [0m[0m [0m[0m[1m13[0m[0m [0m[0m [0m[0m [0m[0m[1m │ [0m[0m[31m-[0m[0m [0m[0m[2m[31m→ [0m[0m[31m"[0m[0m[31mk[0m[0m[31me[0m[0m[31my[0m[0m[31mw[0m[0m[31mo[0m[0m[31mr[0m[0m[31md[0m[0m[31ms[0m[0m[31m"[0m[0m[31m:[0m[0m[2m[31m·[0m[0m[31m[[0m[0m
|
|
13
|
-
[0m[0m [0m[0m [0m[0m[1m14[0m[0m [0m[0m [0m[0m [0m[0m[1m │ [0m[0m[31m-[0m[0m [0m[0m[1m[2m[31m→ [0m[0m[1m[2m[31m→ [0m[0m[31m"[0m[0m[31mn[0m[0m[31mp[0m[0m[31mm[0m[0m[31m"[0m[0m[31m,[0m[0m
|
|
14
|
-
[0m[0m [0m[0m [0m[0m[1m15[0m[0m [0m[0m [0m[0m [0m[0m[1m │ [0m[0m[31m-[0m[0m [0m[0m[1m[2m[31m→ [0m[0m[1m[2m[31m→ [0m[0m[31m"[0m[0m[31mr[0m[0m[31me[0m[0m[31mg[0m[0m[31mi[0m[0m[31ms[0m[0m[31mt[0m[0m[31mr[0m[0m[31my[0m[0m[31m"[0m[0m[31m,[0m[0m
|
|
15
|
-
[0m[0m [0m[0m [0m[0m[1m16[0m[0m [0m[0m [0m[0m [0m[0m[1m │ [0m[0m[31m-[0m[0m [0m[0m[1m[2m[31m→ [0m[0m[1m[2m[31m→ [0m[0m[31m"[0m[0m[31mn[0m[0m[31mp[0m[0m[31mf[0m[0m[31ml[0m[0m[31ma[0m[0m[31mr[0m[0m[31me[0m[0m[31md[0m[0m[31m"[0m[0m
|
|
16
|
-
[0m[0m [0m[0m [0m[0m[1m17[0m[0m [0m[0m [0m[0m [0m[0m[1m │ [0m[0m[31m-[0m[0m [0m[0m[1m[2m[31m→ [0m[0m[31m][0m[0m[31m,[0m[0m
|
|
17
|
-
[0m[0m [0m[0m [0m[0m [0m[0m [0m[0m [0m[0m[1m13[0m[0m[1m │ [0m[0m[32m+[0m[0m [0m[0m[2m[32m→ [0m[0m[32m"[0m[0m[32mk[0m[0m[32me[0m[0m[32my[0m[0m[32mw[0m[0m[32mo[0m[0m[32mr[0m[0m[32md[0m[0m[32ms[0m[0m[32m"[0m[0m[32m:[0m[0m[2m[32m·[0m[0m[32m[[0m[0m[32m"[0m[0m[32mn[0m[0m[32mp[0m[0m[32mm[0m[0m[32m"[0m[0m[32m,[0m[0m[1m[2m[32m·[0m[0m[32m"[0m[0m[32mr[0m[0m[32me[0m[0m[32mg[0m[0m[32mi[0m[0m[32ms[0m[0m[32mt[0m[0m[32mr[0m[0m[32my[0m[0m[32m"[0m[0m[32m,[0m[0m[1m[2m[32m·[0m[0m[32m"[0m[0m[32mn[0m[0m[32mp[0m[0m[32mf[0m[0m[32ml[0m[0m[32ma[0m[0m[32mr[0m[0m[32me[0m[0m[32md[0m[0m[32m"[0m[0m[32m][0m[0m[32m,[0m[0m
|
|
18
|
-
[0m[0m [0m[0m [0m[0m[1m18[0m[0m [0m[0m[1m14[0m[0m[1m │ [0m[0m [0m[0m [0m[0m [0m[0m"[0m[0ml[0m[0mi[0m[0mc[0m[0me[0m[0mn[0m[0ms[0m[0me[0m[0m"[0m[0m:[0m[0m [0m[0m"[0m[0mI[0m[0mS[0m[0mC[0m[0m"[0m[0m,[0m[0m
|
|
19
|
-
[0m[0m [0m[0m [0m[0m[1m19[0m[0m [0m[0m[1m15[0m[0m[1m │ [0m[0m [0m[0m [0m[0m [0m[0m"[0m[0md[0m[0me[0m[0mv[0m[0mD[0m[0me[0m[0mp[0m[0me[0m[0mn[0m[0md[0m[0me[0m[0mn[0m[0mc[0m[0mi[0m[0me[0m[0ms[0m[0m"[0m[0m:[0m[0m [0m[0m{[0m[0m
|
|
20
|
-
[0m[0m [0m[0m
|
|
21
|
-
[0m
|
|
22
|
-
[0m[34mChecked [0m[0m[34m9[0m[0m[34m [0m[0m[34mfiles[0m[0m[34m in [0m[0m[34m33[0m[0m[2m[34mms[0m[0m[34m.[0m[0m[34m No fixes applied.[0m[0m
|
|
23
|
-
[0m[0m[31mFound [0m[0m[31m1[0m[0m[31m error.[0m
|
|
24
|
-
[0mcheck[0m[0m [0m[0m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━[0m[0m
|
|
25
|
-
|
|
26
|
-
[0m[0m [0m[0m[1m[31m✖[0m[0m [0m[0m[31mSome [0m[0m[1m[31merrors[0m[0m[31m were emitted while [0m[0m[1m[31mrunning checks[0m[0m[31m.[0m[0m
|
|
27
|
-
[0m[0m [0m[0m
|
|
28
|
-
[0m
|
|
29
|
-
[41m[30m ELIFECYCLE [39m[49m [31mCommand failed with exit code 1.[39m
|
package/CHANGELOG.md
DELETED
|
@@ -1,43 +0,0 @@
|
|
|
1
|
-
# @npflared/cli
|
|
2
|
-
|
|
3
|
-
## 0.0.7
|
|
4
|
-
|
|
5
|
-
### Patch Changes
|
|
6
|
-
|
|
7
|
-
- 9093b40: Bump dependencies
|
|
8
|
-
|
|
9
|
-
## 0.0.6
|
|
10
|
-
|
|
11
|
-
### Patch Changes
|
|
12
|
-
|
|
13
|
-
- f302768: Bump dependencies
|
|
14
|
-
|
|
15
|
-
## 0.0.5
|
|
16
|
-
|
|
17
|
-
### Patch Changes
|
|
18
|
-
|
|
19
|
-
- 171a0f0: Bump dependencies
|
|
20
|
-
|
|
21
|
-
## 0.0.4
|
|
22
|
-
|
|
23
|
-
### Patch Changes
|
|
24
|
-
|
|
25
|
-
- 361fca4: Bump dependencies
|
|
26
|
-
|
|
27
|
-
## 0.0.3
|
|
28
|
-
|
|
29
|
-
### Patch Changes
|
|
30
|
-
|
|
31
|
-
- 3aea557: Only include dist folder in package tarball
|
|
32
|
-
|
|
33
|
-
## 0.0.2
|
|
34
|
-
|
|
35
|
-
### Patch Changes
|
|
36
|
-
|
|
37
|
-
- b31f2c5: Create d1 database name id was not correctly bind in generated wrangler config
|
|
38
|
-
|
|
39
|
-
## 0.0.1
|
|
40
|
-
|
|
41
|
-
### Patch Changes
|
|
42
|
-
|
|
43
|
-
- 316bfd3: Initial release
|
package/biome.json
DELETED
package/dist/index.js
DELETED
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
import{hideBin as lt}from"yargs/helpers";import mt from"yargs/yargs";import{randomUUID as q}from"node:crypto";import{rmSync as z}from"node:fs";import{mkdir as G,mkdtemp as L,readFile as C,rename as W,writeFile as T}from"node:fs/promises";import{homedir as K,tmpdir as X}from"node:os";import{join as l}from"node:path";import{confirm as A,intro as Y,isCancel as m,log as y,outro as H,select as x,spinner as Q,text as D}from"@clack/prompts";import d from"chalk";import Z from"dedent";import tt from"degit";import{encode as et}from"uuid-b32";import{$ as at}from"zx";import{z as c}from"zod";import{$ as p,ProcessOutput as u}from"zx";var J=c.object({d1_databases:c.array(c.object({binding:c.string(),database_name:c.string(),database_id:c.string()})).min(1)}),U=c.object({r2_buckets:c.array(c.object({binding:c.string(),bucket_name:c.string()})).min(1)}),$=async()=>{try{let e=(await p({quiet:!0})`npx -y wrangler whoami`).stdout.match(/([0-9a-f]{32})/),[a]=e??[];return a}catch(t){throw t instanceof u?new Error(t.stderr||t.stdout):t}},v=async()=>{try{let t=[],a=(await p({quiet:!0})`npx -y wrangler d1 list`).stdout.matchAll(/│(.*)│(.*)│(.*)│(.*)│(.*)│(.*)│/gm);for(let o of a){let[,i,s,r,g,w,f]=o;(i||s||r||g||w||f)&&t.push({id:i.trim(),name:s.trim(),createdAt:r.trim(),version:g.trim(),numberOfTables:Number.parseInt(w),size:Number.parseInt(f)})}return t}catch(t){throw t instanceof u?new Error(t.stderr||t.stdout):t}},N=async()=>{try{let e=(await p({quiet:!0})`npx -y wrangler r2 bucket list`).stdout.matchAll(/name:(.*)\ncreation_date:(.*)/gim),a=[];for(let o of e){let[,i,s]=o;(i||s)&&a.push({name:i.trim(),createdAt:s.trim()})}return a}catch(t){throw t instanceof u?new Error(t.stderr||t.stdout):t}},_=async t=>{try{let a=(await p({quiet:!0})`npx -y wrangler r2 bucket create ${t}`).stdout.match(/\{(?:[^{}]*|\{(?:[^{}]*|\{[^{}]*\})*\})*\}/gim),o=U.safeParse(JSON.parse(a?.[0]??""));if(!o.success)throw new Error("Could not properly retrieve R2 bucket binding");return o.data}catch(e){throw e instanceof u?new Error(e.stderr||e.stdout):e}},R=async t=>{try{let a=(await p({quiet:!0})`npx -y wrangler d1 create ${t}`).stdout.match(/\{(?:[^{}]*|\{(?:[^{}]*|\{[^{}]*\})*\})*\}/gim),o=J.safeParse(JSON.parse(a?.[0]??""));if(!o.success)throw new Error("Could not properly retrieve D1 database binding");return o.data}catch(e){throw e instanceof u?new Error(e.stderr||e.stdout):e}},S=async(t,e={})=>{try{await p({cwd:e.cwd})`npx -y wrangler d1 migrations apply ${t} --remote --config wrangler.json`}catch(a){throw a instanceof u?new Error(a.stderr||a.stdout):a}},B=async(t={})=>{try{let a=(await p({quiet:!0,cwd:t.cwd})`npx -y wrangler deploy --config wrangler.json`).stdout.match(/([a-z0-9-]+\.[a-z0-9-]+\.workers\.dev)/i);return a?`https://${a[0]}`:"<unknown>"}catch(e){throw e instanceof u?new Error(e.stderr||e.stdout):e}};import{access as V}from"node:fs/promises";var b=async t=>{try{return await V(t),!0}catch(e){if(e?.code==="ENOENT")return!1;throw e}};var rt=".npflared",k=l(K(),rt),n=Q(),nt=async()=>{try{await G(k,{recursive:!0})}catch(t){if(t?.code!=="EEXIST")throw t}},ot=async()=>{let t=await A({message:"Use an existing D1 database?"});if(m(t)&&process.exit(1),t){n.start("Retrieving D1 databases...");let i=await v();n.stop();let s=await x({message:"Select a D1 database:",options:i.map(r=>({value:{name:r.name,id:r.id},label:`${r.name.padEnd(30)} (${r.id}) - Created at: ${r.createdAt}`}))});return m(s)&&process.exit(1),{name:s.name,id:s.id}}let e=await D({initialValue:"npflared",message:"Enter a name for your D1 database:",validate(i){if(i.length===0)return"Please enter a name for your D1 database"}});m(e)&&process.exit(1),n.start(`Creating D1 database ${e}...`);let a=await R(e);n.stop();let[o]=a.d1_databases;return o||(console.log(d.red(`Could not create D1 database ${e}`)),process.exit(1)),{name:o.database_name,id:o.database_id}},st=async()=>{let t=await A({message:"Use an existing R2 bucket?"});if(m(t)&&process.exit(1),t){n.start("Retrieving R2 buckets...");let i=await N();n.stop();let s=await x({message:"Select a R2 bucket:",options:i.map(r=>({value:{name:r.name},label:`${r.name.padEnd(30)} - Created at: ${r.createdAt}`}))});return m(s)&&process.exit(1),{name:s.name}}let e=await D({initialValue:"npflared",message:"Enter a name for your R2 bucket:",validate(i){if(i.length===0)return"Please enter a name for your R2 bucket"}});m(e)&&process.exit(1),n.start(`Creating R2 bucket ${e}...`);let a=await _(e);n.stop();let[o]=a.r2_buckets;return o||(console.log(d.red(`Could not create R2 bucket ${e}`)),process.exit(1)),{name:o.bucket_name}},it=async()=>{let t=await x({message:"Install dependencies with:",options:[{value:"npm",label:"npm"},{value:"pnpm",label:"pnpm"},{value:"yarn",label:"yarn"},{value:"bun",label:"bun"}]});return m(t)&&process.exit(1),t},ct=async()=>{let t=await D({initialValue:"npflared",message:"Enter a name for your worker:",validate(e){if(e.length===0)return"Please enter a name for your worker"}});return m(t)&&process.exit(1),t},dt=async t=>{n.start("Generating admin token...");let a=l(t,"migrations","9999_admin-token.sql");if(!await b(a)){let r=et(q()),g=Date.now();return await T(a,`INSERT INTO token (token, name, scopes, created_at, updated_at) VALUES ('${r}', 'admin-token', '[{"type": "token:read+write", "values": ["*"]}, {"type": "user:read+write", "values": ["*"]}, {"type": "package:read+write", "values": ["*"]}]', ${g}, ${g});`),n.stop(`Admin token migration file generated at ${a}`),r}let i=await C(a,"utf-8");return n.stop(`Admin token migration file already exists at ${a}`),i.match(/INSERT INTO token \(token, name, scopes, created_at, updated_at\) VALUES \('([^']+)'/)?.[0]??""},I=async()=>{let t=await L(l(X(),"npflared-")),e=()=>{t&&z(t,{recursive:!0,force:!0})};process.on("exit",e),process.on("SIGINT",e),process.on("SIGTERM",e);try{Y("npflared");let a=tt("Thomascogez/npflared/apps/api");n.start("Cloning npflared..."),await a.clone(t);let o=l(t,"package.json"),s=JSON.parse(await C(o,"utf-8")).version;await nt();let r=l(k,s);await b(r)||await W(t,l(k,s)),n.stop(`Successfully cloned npflared (v${s})`);let w=await ct(),f=await it();n.start(`Installing dependencies using ${f}...`),await at({quiet:!0,cwd:r})`npx -y ${f} install`,n.stop(`Successfully installed dependencies using ${f}`),n.start("Retrieving Cloudflare account id...");let E=await $();n.stop(),E?y.info(d.green(`Using cloudflare account id: ${d.bold.white(E)}`)):(y.error(d.red(`Could not retrieve Cloudflare account id, please login with ${d.bold.white("wrangler login")}`)),process.exit(1));let h=await ot(),P=await st();n.start("Generating wrangler configuration...");let O={name:w,main:"src/index.ts",compatibility_date:"2024-11-24",compatibility_flags:["nodejs_compat"],d1_databases:[{binding:"DB",database_name:h.name,database_id:h.id}],r2_buckets:[{binding:"BUCKET",bucket_name:P.name}]},j=l(r,"wrangler.json");await T(l(r,"wrangler.json"),JSON.stringify(O,null,2)),n.stop(`Wrangler configuration generated at ${j}`);let M=await dt(r);n.start("Applying D1 migrations..."),await S(h.name,{cwd:r}),n.stop("Successfully applied D1 migrations"),n.start("Deploying...");let F=await B({cwd:r});n.stop(),y.info(d.green(Z`
|
|
3
|
-
🔥 npflared is now ready to use!
|
|
4
|
-
🔗 Deployed to: ${d.bold.white(F)}
|
|
5
|
-
👮 Admin token: ${d.bold.white(M)}
|
|
6
|
-
📚 Check documentation for more information: ${d.bold.white("https://npflared.thomas-cogez.fr")}
|
|
7
|
-
`)),H("You're all set!")}catch(a){y.error(`${a}`),process.exit(1)}finally{e()}};mt(lt(process.argv)).command("install","Configure and deploy your own npflared instance on your cloudflare account",t=>t,async t=>{await I()}).demandCommand(1).parse();
|
package/src/commands/install.ts
DELETED
|
@@ -1,314 +0,0 @@
|
|
|
1
|
-
import { randomUUID } from "node:crypto";
|
|
2
|
-
import { rmSync } from "node:fs";
|
|
3
|
-
import { mkdir, mkdtemp, readFile, rename, writeFile } from "node:fs/promises";
|
|
4
|
-
import { homedir, tmpdir } from "node:os";
|
|
5
|
-
import { join } from "node:path";
|
|
6
|
-
|
|
7
|
-
import { confirm, intro, isCancel, log, outro, select, spinner, text } from "@clack/prompts";
|
|
8
|
-
import chalk from "chalk";
|
|
9
|
-
import dedent from "dedent";
|
|
10
|
-
import degit from "degit";
|
|
11
|
-
import { encode } from "uuid-b32";
|
|
12
|
-
import { $ } from "zx";
|
|
13
|
-
|
|
14
|
-
import {
|
|
15
|
-
applyD1Migrations,
|
|
16
|
-
createD1Database,
|
|
17
|
-
createR2Bucket,
|
|
18
|
-
deploy,
|
|
19
|
-
getLocalAccountId,
|
|
20
|
-
listD1Databases,
|
|
21
|
-
listR2Buckets
|
|
22
|
-
} from "../utils/cloudflare";
|
|
23
|
-
import { pathExists } from "../utils/fs";
|
|
24
|
-
|
|
25
|
-
const npflaredDirName = ".npflared";
|
|
26
|
-
const npflaredDirPath = join(homedir(), npflaredDirName);
|
|
27
|
-
|
|
28
|
-
const cliSpinner = spinner();
|
|
29
|
-
|
|
30
|
-
const ensureNpflaredDirExists = async () => {
|
|
31
|
-
try {
|
|
32
|
-
await mkdir(npflaredDirPath, { recursive: true });
|
|
33
|
-
} catch (error) {
|
|
34
|
-
if ((error as NodeJS.ErrnoException)?.code !== "EEXIST") {
|
|
35
|
-
throw error;
|
|
36
|
-
}
|
|
37
|
-
}
|
|
38
|
-
};
|
|
39
|
-
|
|
40
|
-
const promptD1Database = async (): Promise<{ name: string; id: string }> => {
|
|
41
|
-
const useExistingDatabase = await confirm({ message: "Use an existing D1 database?" });
|
|
42
|
-
|
|
43
|
-
if (isCancel(useExistingDatabase)) {
|
|
44
|
-
process.exit(1);
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
if (useExistingDatabase) {
|
|
48
|
-
cliSpinner.start("Retrieving D1 databases...");
|
|
49
|
-
const d1Databases = await listD1Databases();
|
|
50
|
-
cliSpinner.stop();
|
|
51
|
-
|
|
52
|
-
const d1Database = await select({
|
|
53
|
-
message: "Select a D1 database:",
|
|
54
|
-
options: d1Databases.map((database) => ({
|
|
55
|
-
value: { name: database.name, id: database.id },
|
|
56
|
-
label: `${database.name.padEnd(30)} (${database.id}) - Created at: ${database.createdAt}`
|
|
57
|
-
}))
|
|
58
|
-
});
|
|
59
|
-
|
|
60
|
-
if (isCancel(d1Database)) {
|
|
61
|
-
process.exit(1);
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
return { name: d1Database.name, id: d1Database.id };
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
const d1DatabaseName = await text({
|
|
68
|
-
initialValue: "npflared",
|
|
69
|
-
message: "Enter a name for your D1 database:",
|
|
70
|
-
validate(value) {
|
|
71
|
-
if (value.length === 0) {
|
|
72
|
-
return "Please enter a name for your D1 database";
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
});
|
|
76
|
-
|
|
77
|
-
if (isCancel(d1DatabaseName)) {
|
|
78
|
-
process.exit(1);
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
cliSpinner.start(`Creating D1 database ${d1DatabaseName}...`);
|
|
82
|
-
const results = await createD1Database(d1DatabaseName);
|
|
83
|
-
cliSpinner.stop();
|
|
84
|
-
|
|
85
|
-
const [d1Binding] = results.d1_databases;
|
|
86
|
-
if (!d1Binding) {
|
|
87
|
-
console.log(chalk.red(`Could not create D1 database ${d1DatabaseName}`));
|
|
88
|
-
process.exit(1);
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
return { name: d1Binding.database_name, id: d1Binding.database_id };
|
|
92
|
-
};
|
|
93
|
-
|
|
94
|
-
const promptR2Bucket = async (): Promise<{ name: string }> => {
|
|
95
|
-
const useExistingBucket = await confirm({
|
|
96
|
-
message: "Use an existing R2 bucket?"
|
|
97
|
-
});
|
|
98
|
-
|
|
99
|
-
if (isCancel(useExistingBucket)) {
|
|
100
|
-
process.exit(1);
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
if (useExistingBucket) {
|
|
104
|
-
cliSpinner.start("Retrieving R2 buckets...");
|
|
105
|
-
const r2Buckets = await listR2Buckets();
|
|
106
|
-
cliSpinner.stop();
|
|
107
|
-
|
|
108
|
-
const r2Bucket = await select({
|
|
109
|
-
message: "Select a R2 bucket:",
|
|
110
|
-
options: r2Buckets.map((bucket) => ({
|
|
111
|
-
value: { name: bucket.name },
|
|
112
|
-
label: `${bucket.name.padEnd(30)} - Created at: ${bucket.createdAt}`
|
|
113
|
-
}))
|
|
114
|
-
});
|
|
115
|
-
|
|
116
|
-
if (isCancel(r2Bucket)) {
|
|
117
|
-
process.exit(1);
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
return { name: r2Bucket.name };
|
|
121
|
-
}
|
|
122
|
-
|
|
123
|
-
const r2BucketName = await text({
|
|
124
|
-
initialValue: "npflared",
|
|
125
|
-
message: "Enter a name for your R2 bucket:",
|
|
126
|
-
validate(value) {
|
|
127
|
-
if (value.length === 0) {
|
|
128
|
-
return "Please enter a name for your R2 bucket";
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
});
|
|
132
|
-
|
|
133
|
-
if (isCancel(r2BucketName)) {
|
|
134
|
-
process.exit(1);
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
cliSpinner.start(`Creating R2 bucket ${r2BucketName}...`);
|
|
138
|
-
const results = await createR2Bucket(r2BucketName);
|
|
139
|
-
cliSpinner.stop();
|
|
140
|
-
|
|
141
|
-
const [r2Binding] = results.r2_buckets;
|
|
142
|
-
if (!r2Binding) {
|
|
143
|
-
console.log(chalk.red(`Could not create R2 bucket ${r2BucketName}`));
|
|
144
|
-
process.exit(1);
|
|
145
|
-
}
|
|
146
|
-
|
|
147
|
-
return { name: r2Binding.bucket_name };
|
|
148
|
-
};
|
|
149
|
-
|
|
150
|
-
const promptPackageManager = async (): Promise<string> => {
|
|
151
|
-
const packageManager = await select({
|
|
152
|
-
message: "Install dependencies with:",
|
|
153
|
-
options: [
|
|
154
|
-
{ value: "npm", label: "npm" },
|
|
155
|
-
{ value: "pnpm", label: "pnpm" },
|
|
156
|
-
{ value: "yarn", label: "yarn" },
|
|
157
|
-
{ value: "bun", label: "bun" }
|
|
158
|
-
]
|
|
159
|
-
});
|
|
160
|
-
|
|
161
|
-
if (isCancel(packageManager)) {
|
|
162
|
-
process.exit(1);
|
|
163
|
-
}
|
|
164
|
-
|
|
165
|
-
return packageManager;
|
|
166
|
-
};
|
|
167
|
-
|
|
168
|
-
const promptWorkerName = async (): Promise<string> => {
|
|
169
|
-
const workerName = await text({
|
|
170
|
-
initialValue: "npflared",
|
|
171
|
-
message: "Enter a name for your worker:",
|
|
172
|
-
validate(value) {
|
|
173
|
-
if (value.length === 0) {
|
|
174
|
-
return "Please enter a name for your worker";
|
|
175
|
-
}
|
|
176
|
-
}
|
|
177
|
-
});
|
|
178
|
-
|
|
179
|
-
if (isCancel(workerName)) {
|
|
180
|
-
process.exit(1);
|
|
181
|
-
}
|
|
182
|
-
|
|
183
|
-
return workerName;
|
|
184
|
-
};
|
|
185
|
-
|
|
186
|
-
const generateAdminToken = async (basePath: string) => {
|
|
187
|
-
cliSpinner.start("Generating admin token...");
|
|
188
|
-
|
|
189
|
-
const adminTokenMigrationFileName = "9999_admin-token.sql";
|
|
190
|
-
const adminTokenMigrationFilePath = join(basePath, "migrations", adminTokenMigrationFileName);
|
|
191
|
-
|
|
192
|
-
const adminTokenMigrationFileExists = await pathExists(adminTokenMigrationFilePath);
|
|
193
|
-
if (!adminTokenMigrationFileExists) {
|
|
194
|
-
const adminToken = encode(randomUUID());
|
|
195
|
-
const now = Date.now();
|
|
196
|
-
|
|
197
|
-
await writeFile(
|
|
198
|
-
adminTokenMigrationFilePath,
|
|
199
|
-
`INSERT INTO token (token, name, scopes, created_at, updated_at) VALUES ('${adminToken}', 'admin-token', '[{"type": "token:read+write", "values": ["*"]}, {"type": "user:read+write", "values": ["*"]}, {"type": "package:read+write", "values": ["*"]}]', ${now}, ${now});`
|
|
200
|
-
);
|
|
201
|
-
cliSpinner.stop(`Admin token migration file generated at ${adminTokenMigrationFilePath}`);
|
|
202
|
-
return adminToken;
|
|
203
|
-
}
|
|
204
|
-
const migrationFileContent = await readFile(adminTokenMigrationFilePath, "utf-8");
|
|
205
|
-
cliSpinner.stop(`Admin token migration file already exists at ${adminTokenMigrationFilePath}`);
|
|
206
|
-
|
|
207
|
-
// extract token from migration file
|
|
208
|
-
const match = migrationFileContent.match(
|
|
209
|
-
/INSERT INTO token \(token, name, scopes, created_at, updated_at\) VALUES \('([^']+)'/
|
|
210
|
-
);
|
|
211
|
-
|
|
212
|
-
return match?.[0] ?? "";
|
|
213
|
-
};
|
|
214
|
-
|
|
215
|
-
export const install = async () => {
|
|
216
|
-
const cloneTmpDir = await mkdtemp(join(tmpdir(), "npflared-"));
|
|
217
|
-
|
|
218
|
-
const cleanup = () => {
|
|
219
|
-
if (cloneTmpDir) {
|
|
220
|
-
rmSync(cloneTmpDir, { recursive: true, force: true });
|
|
221
|
-
}
|
|
222
|
-
};
|
|
223
|
-
|
|
224
|
-
process.on("exit", cleanup);
|
|
225
|
-
process.on("SIGINT", cleanup);
|
|
226
|
-
process.on("SIGTERM", cleanup);
|
|
227
|
-
|
|
228
|
-
try {
|
|
229
|
-
intro("npflared");
|
|
230
|
-
|
|
231
|
-
const repository = degit("Thomascogez/npflared/apps/api");
|
|
232
|
-
|
|
233
|
-
cliSpinner.start("Cloning npflared...");
|
|
234
|
-
await repository.clone(cloneTmpDir);
|
|
235
|
-
|
|
236
|
-
const packageJsonPath = join(cloneTmpDir, "package.json");
|
|
237
|
-
const packageJson = JSON.parse(await readFile(packageJsonPath, "utf-8"));
|
|
238
|
-
const npflaredVersion = packageJson.version;
|
|
239
|
-
|
|
240
|
-
await ensureNpflaredDirExists();
|
|
241
|
-
const npflaredCurrentVersionDirectory = join(npflaredDirPath, npflaredVersion);
|
|
242
|
-
|
|
243
|
-
const localVersionExists = await pathExists(npflaredCurrentVersionDirectory);
|
|
244
|
-
if (!localVersionExists) {
|
|
245
|
-
await rename(cloneTmpDir, join(npflaredDirPath, npflaredVersion));
|
|
246
|
-
}
|
|
247
|
-
cliSpinner.stop(`Successfully cloned npflared (v${npflaredVersion})`);
|
|
248
|
-
|
|
249
|
-
const workerName = await promptWorkerName();
|
|
250
|
-
|
|
251
|
-
const packageManager = await promptPackageManager();
|
|
252
|
-
|
|
253
|
-
cliSpinner.start(`Installing dependencies using ${packageManager}...`);
|
|
254
|
-
await $({ quiet: true, cwd: npflaredCurrentVersionDirectory })`npx -y ${packageManager} install`;
|
|
255
|
-
cliSpinner.stop(`Successfully installed dependencies using ${packageManager}`);
|
|
256
|
-
|
|
257
|
-
cliSpinner.start("Retrieving Cloudflare account id...");
|
|
258
|
-
const cloudflareAccountId = await getLocalAccountId();
|
|
259
|
-
cliSpinner.stop();
|
|
260
|
-
|
|
261
|
-
if (!cloudflareAccountId) {
|
|
262
|
-
log.error(
|
|
263
|
-
chalk.red(`Could not retrieve Cloudflare account id, please login with ${chalk.bold.white("wrangler login")}`)
|
|
264
|
-
);
|
|
265
|
-
|
|
266
|
-
process.exit(1);
|
|
267
|
-
} else {
|
|
268
|
-
log.info(chalk.green(`Using cloudflare account id: ${chalk.bold.white(cloudflareAccountId)}`));
|
|
269
|
-
}
|
|
270
|
-
|
|
271
|
-
const d1Database = await promptD1Database();
|
|
272
|
-
const r2Bucket = await promptR2Bucket();
|
|
273
|
-
|
|
274
|
-
cliSpinner.start("Generating wrangler configuration...");
|
|
275
|
-
const wranglerConfig = {
|
|
276
|
-
name: workerName,
|
|
277
|
-
main: "src/index.ts",
|
|
278
|
-
compatibility_date: "2024-11-24",
|
|
279
|
-
compatibility_flags: ["nodejs_compat"],
|
|
280
|
-
d1_databases: [{ binding: "DB", database_name: d1Database.name, database_id: d1Database.id }],
|
|
281
|
-
r2_buckets: [{ binding: "BUCKET", bucket_name: r2Bucket.name }]
|
|
282
|
-
};
|
|
283
|
-
const wranglerConfigFilePath = join(npflaredCurrentVersionDirectory, "wrangler.json");
|
|
284
|
-
|
|
285
|
-
await writeFile(join(npflaredCurrentVersionDirectory, "wrangler.json"), JSON.stringify(wranglerConfig, null, 2));
|
|
286
|
-
cliSpinner.stop(`Wrangler configuration generated at ${wranglerConfigFilePath}`);
|
|
287
|
-
|
|
288
|
-
const adminToken = await generateAdminToken(npflaredCurrentVersionDirectory);
|
|
289
|
-
|
|
290
|
-
cliSpinner.start("Applying D1 migrations...");
|
|
291
|
-
await applyD1Migrations(d1Database.name, { cwd: npflaredCurrentVersionDirectory });
|
|
292
|
-
cliSpinner.stop("Successfully applied D1 migrations");
|
|
293
|
-
|
|
294
|
-
cliSpinner.start("Deploying...");
|
|
295
|
-
const deployedUrl = await deploy({ cwd: npflaredCurrentVersionDirectory });
|
|
296
|
-
cliSpinner.stop();
|
|
297
|
-
|
|
298
|
-
log.info(
|
|
299
|
-
chalk.green(dedent`
|
|
300
|
-
🔥 npflared is now ready to use!
|
|
301
|
-
🔗 Deployed to: ${chalk.bold.white(deployedUrl)}
|
|
302
|
-
👮 Admin token: ${chalk.bold.white(adminToken)}
|
|
303
|
-
📚 Check documentation for more information: ${chalk.bold.white("https://npflared.thomas-cogez.fr")}
|
|
304
|
-
`)
|
|
305
|
-
);
|
|
306
|
-
|
|
307
|
-
outro(`You're all set!`);
|
|
308
|
-
} catch (error) {
|
|
309
|
-
log.error(`${error}`);
|
|
310
|
-
process.exit(1);
|
|
311
|
-
} finally {
|
|
312
|
-
cleanup();
|
|
313
|
-
}
|
|
314
|
-
};
|
package/src/index.ts
DELETED
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
|
|
3
|
-
import { hideBin } from "yargs/helpers";
|
|
4
|
-
import yargs from "yargs/yargs";
|
|
5
|
-
import { install } from "./commands/install";
|
|
6
|
-
|
|
7
|
-
yargs(hideBin(process.argv))
|
|
8
|
-
.command(
|
|
9
|
-
"install",
|
|
10
|
-
"Configure and deploy your own npflared instance on your cloudflare account",
|
|
11
|
-
(yargs) => yargs,
|
|
12
|
-
async (argv) => {
|
|
13
|
-
await install();
|
|
14
|
-
}
|
|
15
|
-
)
|
|
16
|
-
.demandCommand(1)
|
|
17
|
-
.parse();
|
package/src/types.ts
DELETED
package/src/utils/cloudflare.ts
DELETED
|
@@ -1,169 +0,0 @@
|
|
|
1
|
-
import { z } from "zod";
|
|
2
|
-
import { $, ProcessOutput } from "zx";
|
|
3
|
-
import type { D1Database, R2Bucket } from "../types";
|
|
4
|
-
|
|
5
|
-
const createD1DatabaseOutputSchema = z.object({
|
|
6
|
-
d1_databases: z
|
|
7
|
-
.array(
|
|
8
|
-
z.object({
|
|
9
|
-
binding: z.string(),
|
|
10
|
-
database_name: z.string(),
|
|
11
|
-
database_id: z.string()
|
|
12
|
-
})
|
|
13
|
-
)
|
|
14
|
-
.min(1)
|
|
15
|
-
});
|
|
16
|
-
|
|
17
|
-
const createR2BucketOutputSchema = z.object({
|
|
18
|
-
r2_buckets: z
|
|
19
|
-
.array(
|
|
20
|
-
z.object({
|
|
21
|
-
binding: z.string(),
|
|
22
|
-
bucket_name: z.string()
|
|
23
|
-
})
|
|
24
|
-
)
|
|
25
|
-
.min(1)
|
|
26
|
-
});
|
|
27
|
-
|
|
28
|
-
export const getLocalAccountId = async () => {
|
|
29
|
-
try {
|
|
30
|
-
const result = await $({ quiet: true })`npx -y wrangler whoami`;
|
|
31
|
-
const match = result.stdout.match(/([0-9a-f]{32})/);
|
|
32
|
-
const [accountId] = match ?? [];
|
|
33
|
-
|
|
34
|
-
return accountId;
|
|
35
|
-
} catch (error) {
|
|
36
|
-
if (error instanceof ProcessOutput) {
|
|
37
|
-
throw new Error(error.stderr || error.stdout);
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
throw error;
|
|
41
|
-
}
|
|
42
|
-
};
|
|
43
|
-
|
|
44
|
-
export const listD1Databases = async () => {
|
|
45
|
-
try {
|
|
46
|
-
const d1Databases: D1Database[] = [];
|
|
47
|
-
|
|
48
|
-
const result = await $({ quiet: true })`npx -y wrangler d1 list`;
|
|
49
|
-
const matches = result.stdout.matchAll(/│(.*)│(.*)│(.*)│(.*)│(.*)│(.*)│/gm);
|
|
50
|
-
|
|
51
|
-
for (const match of matches) {
|
|
52
|
-
const [, id, name, createdAt, version, numberOfTables, size] = match;
|
|
53
|
-
|
|
54
|
-
if (id || name || createdAt || version || numberOfTables || size) {
|
|
55
|
-
d1Databases.push({
|
|
56
|
-
id: id.trim(),
|
|
57
|
-
name: name.trim(),
|
|
58
|
-
createdAt: createdAt.trim(),
|
|
59
|
-
version: version.trim(),
|
|
60
|
-
numberOfTables: Number.parseInt(numberOfTables),
|
|
61
|
-
size: Number.parseInt(size)
|
|
62
|
-
});
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
return d1Databases;
|
|
67
|
-
} catch (error) {
|
|
68
|
-
if (error instanceof ProcessOutput) {
|
|
69
|
-
throw new Error(error.stderr || error.stdout);
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
throw error;
|
|
73
|
-
}
|
|
74
|
-
};
|
|
75
|
-
|
|
76
|
-
export const listR2Buckets = async () => {
|
|
77
|
-
try {
|
|
78
|
-
const result = await $({ quiet: true })`npx -y wrangler r2 bucket list`;
|
|
79
|
-
const matches = result.stdout.matchAll(/name:(.*)\ncreation_date:(.*)/gim);
|
|
80
|
-
|
|
81
|
-
const r2Buckets: R2Bucket[] = [];
|
|
82
|
-
|
|
83
|
-
for (const match of matches) {
|
|
84
|
-
const [, name, createdAt] = match;
|
|
85
|
-
|
|
86
|
-
if (name || createdAt) {
|
|
87
|
-
r2Buckets.push({
|
|
88
|
-
name: name.trim(),
|
|
89
|
-
createdAt: createdAt.trim()
|
|
90
|
-
});
|
|
91
|
-
}
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
return r2Buckets;
|
|
95
|
-
} catch (error) {
|
|
96
|
-
if (error instanceof ProcessOutput) {
|
|
97
|
-
throw new Error(error.stderr || error.stdout);
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
throw error;
|
|
101
|
-
}
|
|
102
|
-
};
|
|
103
|
-
|
|
104
|
-
export const createR2Bucket = async (name: string) => {
|
|
105
|
-
try {
|
|
106
|
-
const result = await $({ quiet: true })`npx -y wrangler r2 bucket create ${name}`;
|
|
107
|
-
const match = result.stdout.match(/\{(?:[^{}]*|\{(?:[^{}]*|\{[^{}]*\})*\})*\}/gim);
|
|
108
|
-
|
|
109
|
-
const parsedR2Binding = createR2BucketOutputSchema.safeParse(JSON.parse(match?.[0] ?? ""));
|
|
110
|
-
|
|
111
|
-
if (!parsedR2Binding.success) {
|
|
112
|
-
throw new Error("Could not properly retrieve R2 bucket binding");
|
|
113
|
-
}
|
|
114
|
-
|
|
115
|
-
return parsedR2Binding.data;
|
|
116
|
-
} catch (error) {
|
|
117
|
-
if (error instanceof ProcessOutput) {
|
|
118
|
-
throw new Error(error.stderr || error.stdout);
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
throw error;
|
|
122
|
-
}
|
|
123
|
-
};
|
|
124
|
-
|
|
125
|
-
export const createD1Database = async (name: string) => {
|
|
126
|
-
try {
|
|
127
|
-
const result = await $({ quiet: true })`npx -y wrangler d1 create ${name}`;
|
|
128
|
-
const match = result.stdout.match(/\{(?:[^{}]*|\{(?:[^{}]*|\{[^{}]*\})*\})*\}/gim);
|
|
129
|
-
|
|
130
|
-
const parsedD1Binding = createD1DatabaseOutputSchema.safeParse(JSON.parse(match?.[0] ?? ""));
|
|
131
|
-
if (!parsedD1Binding.success) {
|
|
132
|
-
throw new Error("Could not properly retrieve D1 database binding");
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
return parsedD1Binding.data;
|
|
136
|
-
} catch (error) {
|
|
137
|
-
if (error instanceof ProcessOutput) {
|
|
138
|
-
throw new Error(error.stderr || error.stdout);
|
|
139
|
-
}
|
|
140
|
-
|
|
141
|
-
throw error;
|
|
142
|
-
}
|
|
143
|
-
};
|
|
144
|
-
|
|
145
|
-
export const applyD1Migrations = async (d1DatabaseName: string, config: { cwd?: string } = {}) => {
|
|
146
|
-
try {
|
|
147
|
-
await $({ cwd: config.cwd })`npx -y wrangler d1 migrations apply ${d1DatabaseName} --remote --config wrangler.json`;
|
|
148
|
-
} catch (error) {
|
|
149
|
-
if (error instanceof ProcessOutput) {
|
|
150
|
-
throw new Error(error.stderr || error.stdout);
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
throw error;
|
|
154
|
-
}
|
|
155
|
-
};
|
|
156
|
-
|
|
157
|
-
export const deploy = async (config: { cwd?: string } = {}) => {
|
|
158
|
-
try {
|
|
159
|
-
const result = await $({ quiet: true, cwd: config.cwd })`npx -y wrangler deploy --config wrangler.json`;
|
|
160
|
-
const match = result.stdout.match(/([a-z0-9-]+\.[a-z0-9-]+\.workers\.dev)/i);
|
|
161
|
-
|
|
162
|
-
return match ? `https://${match[0]}` : "<unknown>";
|
|
163
|
-
} catch (error) {
|
|
164
|
-
if (error instanceof ProcessOutput) {
|
|
165
|
-
throw new Error(error.stderr || error.stdout);
|
|
166
|
-
}
|
|
167
|
-
throw error;
|
|
168
|
-
}
|
|
169
|
-
};
|
package/src/utils/fs.ts
DELETED
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
import { access } from "node:fs/promises";
|
|
2
|
-
|
|
3
|
-
export const pathExists = async (path: string) => {
|
|
4
|
-
try {
|
|
5
|
-
await access(path);
|
|
6
|
-
return true;
|
|
7
|
-
} catch (error) {
|
|
8
|
-
if ((error as NodeJS.ErrnoException)?.code === "ENOENT") {
|
|
9
|
-
return false;
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
throw error;
|
|
13
|
-
}
|
|
14
|
-
};
|
package/tsconfig.json
DELETED
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"compilerOptions": {
|
|
3
|
-
"esModuleInterop": true,
|
|
4
|
-
"skipLibCheck": true,
|
|
5
|
-
"target": "es2022",
|
|
6
|
-
"allowJs": true,
|
|
7
|
-
"resolveJsonModule": true,
|
|
8
|
-
"module": "ESNext",
|
|
9
|
-
"noEmit": true,
|
|
10
|
-
"moduleResolution": "Bundler",
|
|
11
|
-
"moduleDetection": "force",
|
|
12
|
-
"isolatedModules": false,
|
|
13
|
-
"verbatimModuleSyntax": true,
|
|
14
|
-
"strict": true,
|
|
15
|
-
"noImplicitOverride": true,
|
|
16
|
-
"noFallthroughCasesInSwitch": true
|
|
17
|
-
},
|
|
18
|
-
"exclude": ["node_modules"],
|
|
19
|
-
"include": ["src"]
|
|
20
|
-
}
|