@arcote.tech/arc-cli 0.5.2 → 0.5.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +2854 -637
- package/package.json +8 -7
- package/src/builder/module-builder.ts +35 -9
- package/src/commands/platform-deploy.ts +143 -0
- package/src/commands/platform-start.ts +4 -1
- package/src/deploy/ansible.ts +69 -0
- package/src/deploy/assets/ansible/site.yml +169 -0
- package/src/deploy/assets/terraform/main.tf +38 -0
- package/src/deploy/assets/terraform/variables.tf +35 -0
- package/src/deploy/assets.ts +282 -0
- package/src/deploy/bootstrap.ts +131 -0
- package/src/deploy/caddyfile.ts +59 -0
- package/src/deploy/compose.ts +73 -0
- package/src/deploy/config.ts +279 -0
- package/src/deploy/remote-state.ts +92 -0
- package/src/deploy/remote-sync.ts +202 -0
- package/src/deploy/ssh.ts +246 -0
- package/src/deploy/survey.ts +172 -0
- package/src/deploy/terraform.ts +109 -0
- package/src/index.ts +12 -0
- package/src/platform/deploy-api.ts +183 -0
- package/src/platform/server.ts +49 -25
- package/src/platform/shared.ts +45 -3
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
import { spawn } from "bun";
|
|
2
|
+
import type { DeployTarget } from "./config";
|
|
3
|
+
|
|
4
|
+
/** Convert a Bun subprocess stream (which may be a ReadableStream or undefined) to string. */
|
|
5
|
+
async function streamToString(
|
|
6
|
+
stream: ReadableStream<Uint8Array> | number | undefined,
|
|
7
|
+
): Promise<string> {
|
|
8
|
+
if (!stream || typeof stream === "number") return "";
|
|
9
|
+
return new Response(stream as ReadableStream<Uint8Array>).text();
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
// ---------------------------------------------------------------------------
|
|
13
|
+
// Thin wrappers over system ssh / scp / rsync via Bun.spawn.
|
|
14
|
+
// Zero JS crypto dependencies — uses ssh-agent / ~/.ssh/config on the host.
|
|
15
|
+
// ---------------------------------------------------------------------------
|
|
16
|
+
|
|
17
|
+
export interface SshExecResult {
|
|
18
|
+
stdout: string;
|
|
19
|
+
stderr: string;
|
|
20
|
+
exitCode: number;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function baseSshArgs(target: DeployTarget): string[] {
|
|
24
|
+
const args = [
|
|
25
|
+
"-o",
|
|
26
|
+
"BatchMode=yes",
|
|
27
|
+
"-o",
|
|
28
|
+
"StrictHostKeyChecking=accept-new",
|
|
29
|
+
"-p",
|
|
30
|
+
String(target.port),
|
|
31
|
+
];
|
|
32
|
+
if (target.sshKey) args.push("-i", target.sshKey);
|
|
33
|
+
return args;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Run a command on the remote host. Captures stdout/stderr. Does NOT throw
|
|
38
|
+
* on non-zero exit — callers decide. Use `assertExec` for strict mode.
|
|
39
|
+
*/
|
|
40
|
+
export async function sshExec(
|
|
41
|
+
target: DeployTarget,
|
|
42
|
+
cmd: string,
|
|
43
|
+
opts: { stdin?: Uint8Array | string; quiet?: boolean } = {},
|
|
44
|
+
): Promise<SshExecResult> {
|
|
45
|
+
const args = [
|
|
46
|
+
...baseSshArgs(target),
|
|
47
|
+
`${target.user}@${target.host}`,
|
|
48
|
+
"--",
|
|
49
|
+
cmd,
|
|
50
|
+
];
|
|
51
|
+
const proc = spawn({
|
|
52
|
+
cmd: ["ssh", ...args],
|
|
53
|
+
stdin: opts.stdin ? "pipe" : "ignore",
|
|
54
|
+
stdout: "pipe",
|
|
55
|
+
stderr: "pipe",
|
|
56
|
+
});
|
|
57
|
+
if (opts.stdin && proc.stdin) {
|
|
58
|
+
const data =
|
|
59
|
+
typeof opts.stdin === "string"
|
|
60
|
+
? new TextEncoder().encode(opts.stdin)
|
|
61
|
+
: opts.stdin;
|
|
62
|
+
await (proc.stdin as any).write(data);
|
|
63
|
+
await (proc.stdin as any).end?.();
|
|
64
|
+
}
|
|
65
|
+
const [stdout, stderr, exitCode] = await Promise.all([
|
|
66
|
+
streamToString(proc.stdout),
|
|
67
|
+
streamToString(proc.stderr),
|
|
68
|
+
proc.exited,
|
|
69
|
+
]);
|
|
70
|
+
if (!opts.quiet && exitCode !== 0) {
|
|
71
|
+
process.stderr.write(stderr);
|
|
72
|
+
}
|
|
73
|
+
return { stdout, stderr, exitCode };
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export async function assertExec(
|
|
77
|
+
target: DeployTarget,
|
|
78
|
+
cmd: string,
|
|
79
|
+
): Promise<string> {
|
|
80
|
+
const res = await sshExec(target, cmd);
|
|
81
|
+
if (res.exitCode !== 0) {
|
|
82
|
+
throw new Error(
|
|
83
|
+
`SSH command failed (exit ${res.exitCode}): ${cmd}\n${res.stderr}`,
|
|
84
|
+
);
|
|
85
|
+
}
|
|
86
|
+
return res.stdout;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
/** True if ssh can reach the host and authenticate. */
|
|
90
|
+
export async function canSsh(target: DeployTarget): Promise<boolean> {
|
|
91
|
+
const res = await sshExec(target, "true", { quiet: true });
|
|
92
|
+
return res.exitCode === 0;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/** Wait up to `timeoutMs` for SSH to become available. */
|
|
96
|
+
export async function waitForSsh(
|
|
97
|
+
target: DeployTarget,
|
|
98
|
+
opts: { timeoutMs?: number; intervalMs?: number } = {},
|
|
99
|
+
): Promise<void> {
|
|
100
|
+
const timeout = opts.timeoutMs ?? 300_000; // 5 min
|
|
101
|
+
const interval = opts.intervalMs ?? 10_000; // 10s
|
|
102
|
+
const start = Date.now();
|
|
103
|
+
while (Date.now() - start < timeout) {
|
|
104
|
+
if (await canSsh(target)) return;
|
|
105
|
+
await Bun.sleep(interval);
|
|
106
|
+
}
|
|
107
|
+
throw new Error(`Timed out waiting for SSH on ${target.user}@${target.host}`);
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Upload a single file to the remote host via scp.
|
|
112
|
+
*/
|
|
113
|
+
export async function scpUpload(
|
|
114
|
+
target: DeployTarget,
|
|
115
|
+
localPath: string,
|
|
116
|
+
remotePath: string,
|
|
117
|
+
): Promise<void> {
|
|
118
|
+
const args = [
|
|
119
|
+
"-o",
|
|
120
|
+
"BatchMode=yes",
|
|
121
|
+
"-o",
|
|
122
|
+
"StrictHostKeyChecking=accept-new",
|
|
123
|
+
"-P",
|
|
124
|
+
String(target.port),
|
|
125
|
+
];
|
|
126
|
+
if (target.sshKey) args.push("-i", target.sshKey);
|
|
127
|
+
args.push(localPath, `${target.user}@${target.host}:${remotePath}`);
|
|
128
|
+
|
|
129
|
+
const proc = spawn({ cmd: ["scp", ...args], stderr: "pipe" });
|
|
130
|
+
const [stderr, exitCode] = await Promise.all([
|
|
131
|
+
streamToString(proc.stderr),
|
|
132
|
+
proc.exited,
|
|
133
|
+
]);
|
|
134
|
+
if (exitCode !== 0) {
|
|
135
|
+
throw new Error(`scp failed (${exitCode}): ${stderr}`);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
/**
|
|
140
|
+
* Rsync a directory to the remote host (preserving permissions, deleting stale files).
|
|
141
|
+
*
|
|
142
|
+
* `-L` dereferences symlinks on the source side. This is essential because Arc
|
|
143
|
+
* projects typically use `bun link` for framework packages — `node_modules/@arcote.tech/*`
|
|
144
|
+
* and workspace `@ndt/*` packages are symlinks pointing at paths that don't
|
|
145
|
+
* exist on the remote host. Without `-L`, rsync copies them as dangling
|
|
146
|
+
* symlinks and the container can't resolve `node_modules/.bin/arc`.
|
|
147
|
+
*/
|
|
148
|
+
export async function rsyncDir(
|
|
149
|
+
target: DeployTarget,
|
|
150
|
+
localDir: string,
|
|
151
|
+
remoteDir: string,
|
|
152
|
+
opts: { delete?: boolean } = {},
|
|
153
|
+
): Promise<void> {
|
|
154
|
+
const sshCmdParts = ["ssh", "-p", String(target.port)];
|
|
155
|
+
if (target.sshKey) sshCmdParts.push("-i", target.sshKey);
|
|
156
|
+
const sshCmd = sshCmdParts.join(" ");
|
|
157
|
+
|
|
158
|
+
const args: string[] = ["-azL", "-e", sshCmd];
|
|
159
|
+
if (opts.delete) args.push("--delete");
|
|
160
|
+
// Trailing slash: sync contents, not the dir itself
|
|
161
|
+
const src = localDir.endsWith("/") ? localDir : `${localDir}/`;
|
|
162
|
+
args.push(src, `${target.user}@${target.host}:${remoteDir}`);
|
|
163
|
+
|
|
164
|
+
const proc = spawn({
|
|
165
|
+
cmd: ["rsync", ...args],
|
|
166
|
+
stderr: "pipe",
|
|
167
|
+
stdout: "pipe",
|
|
168
|
+
});
|
|
169
|
+
const [stderr, exitCode] = await Promise.all([
|
|
170
|
+
streamToString(proc.stderr),
|
|
171
|
+
proc.exited,
|
|
172
|
+
]);
|
|
173
|
+
if (exitCode !== 0) {
|
|
174
|
+
throw new Error(`rsync failed (${exitCode}): ${stderr}`);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
/**
|
|
179
|
+
* Open an SSH -L tunnel. Returns a Disposable-like object with `.close()`.
|
|
180
|
+
* Caller MUST call close() (or use `using` in Bun) to release the tunnel.
|
|
181
|
+
*/
|
|
182
|
+
export interface SshTunnel {
|
|
183
|
+
localPort: number;
|
|
184
|
+
close(): void;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
export async function openTunnel(
|
|
188
|
+
target: DeployTarget,
|
|
189
|
+
localPort: number,
|
|
190
|
+
remoteHost: string,
|
|
191
|
+
remotePort: number,
|
|
192
|
+
): Promise<SshTunnel> {
|
|
193
|
+
const args = [
|
|
194
|
+
...baseSshArgs(target),
|
|
195
|
+
"-N",
|
|
196
|
+
"-L",
|
|
197
|
+
`${localPort}:${remoteHost}:${remotePort}`,
|
|
198
|
+
`${target.user}@${target.host}`,
|
|
199
|
+
];
|
|
200
|
+
const proc = spawn({
|
|
201
|
+
cmd: ["ssh", ...args],
|
|
202
|
+
stdin: "ignore",
|
|
203
|
+
stdout: "pipe",
|
|
204
|
+
stderr: "pipe",
|
|
205
|
+
});
|
|
206
|
+
// Wait briefly for the tunnel to establish. ssh prints nothing on success
|
|
207
|
+
// with -N, so we poll a TCP connect on localPort.
|
|
208
|
+
const deadline = Date.now() + 10_000;
|
|
209
|
+
let lastErr: unknown;
|
|
210
|
+
while (Date.now() < deadline) {
|
|
211
|
+
if (proc.exitCode !== null) {
|
|
212
|
+
const stderr = await streamToString(proc.stderr);
|
|
213
|
+
throw new Error(`ssh tunnel exited early: ${stderr}`);
|
|
214
|
+
}
|
|
215
|
+
try {
|
|
216
|
+
const probe = await Bun.connect({
|
|
217
|
+
hostname: "127.0.0.1",
|
|
218
|
+
port: localPort,
|
|
219
|
+
socket: { data() {}, open() {}, close() {}, error() {} },
|
|
220
|
+
});
|
|
221
|
+
probe.end();
|
|
222
|
+
return {
|
|
223
|
+
localPort,
|
|
224
|
+
close() {
|
|
225
|
+
try {
|
|
226
|
+
proc.kill();
|
|
227
|
+
} catch {
|
|
228
|
+
// ignore
|
|
229
|
+
}
|
|
230
|
+
},
|
|
231
|
+
};
|
|
232
|
+
} catch (e) {
|
|
233
|
+
lastErr = e;
|
|
234
|
+
await Bun.sleep(200);
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
try {
|
|
238
|
+
proc.kill();
|
|
239
|
+
} catch {
|
|
240
|
+
// ignore
|
|
241
|
+
}
|
|
242
|
+
throw new Error(
|
|
243
|
+
`Failed to establish SSH tunnel on localhost:${localPort}: ${String(lastErr)}`,
|
|
244
|
+
);
|
|
245
|
+
}
|
|
246
|
+
|
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
import * as clack from "@clack/prompts";
|
|
2
|
+
import type {
|
|
3
|
+
DeployConfig,
|
|
4
|
+
DeployEnv,
|
|
5
|
+
DeployProvision,
|
|
6
|
+
DeployProvisionTerraform,
|
|
7
|
+
} from "./config";
|
|
8
|
+
|
|
9
|
+
// ---------------------------------------------------------------------------
|
|
10
|
+
// Interactive init for deploy.arc.json. Grouped into minimal phases:
|
|
11
|
+
// 1) Target server
|
|
12
|
+
// 2) Environments
|
|
13
|
+
// 3) Optional provisioning (Terraform)
|
|
14
|
+
// 4) Caddy email
|
|
15
|
+
// Returns a validated DeployConfig ready to save. On Ctrl-C, exits the process.
|
|
16
|
+
// ---------------------------------------------------------------------------
|
|
17
|
+
|
|
18
|
+
export async function runSurvey(): Promise<DeployConfig> {
|
|
19
|
+
clack.intro("arc platform deploy — initial setup");
|
|
20
|
+
|
|
21
|
+
// Phase 1: mode
|
|
22
|
+
const mode = (await clack.select({
|
|
23
|
+
message: "How should deploy reach the target server?",
|
|
24
|
+
options: [
|
|
25
|
+
{
|
|
26
|
+
value: "existing",
|
|
27
|
+
label: "Use an existing server (I have SSH access)",
|
|
28
|
+
},
|
|
29
|
+
{
|
|
30
|
+
value: "provision",
|
|
31
|
+
label: "Provision a new server via Terraform (Hetzner Cloud)",
|
|
32
|
+
},
|
|
33
|
+
],
|
|
34
|
+
initialValue: "existing",
|
|
35
|
+
})) as string;
|
|
36
|
+
if (clack.isCancel(mode)) cancel();
|
|
37
|
+
|
|
38
|
+
// Phase 2: target
|
|
39
|
+
const host = (await clack.text({
|
|
40
|
+
message:
|
|
41
|
+
mode === "provision"
|
|
42
|
+
? "Alias for the server (leave blank — will be filled after terraform apply)"
|
|
43
|
+
: "SSH host (IP or alias from ~/.ssh/config)",
|
|
44
|
+
placeholder: mode === "provision" ? "auto" : "example.com or 1.2.3.4",
|
|
45
|
+
validate: (v) => {
|
|
46
|
+
if (mode === "provision") return undefined;
|
|
47
|
+
if (!v || v.length === 0) return "Host is required";
|
|
48
|
+
return undefined;
|
|
49
|
+
},
|
|
50
|
+
})) as string;
|
|
51
|
+
if (clack.isCancel(host)) cancel();
|
|
52
|
+
|
|
53
|
+
const user = (await clack.text({
|
|
54
|
+
message: "SSH user",
|
|
55
|
+
initialValue: "deploy",
|
|
56
|
+
validate: (v) =>
|
|
57
|
+
/^[a-z_][a-z0-9_-]*$/.test(v) ? undefined : "Invalid username",
|
|
58
|
+
})) as string;
|
|
59
|
+
if (clack.isCancel(user)) cancel();
|
|
60
|
+
|
|
61
|
+
const portRaw = (await clack.text({
|
|
62
|
+
message: "SSH port",
|
|
63
|
+
initialValue: "22",
|
|
64
|
+
validate: (v) => {
|
|
65
|
+
const n = Number(v);
|
|
66
|
+
return Number.isInteger(n) && n > 0 && n < 65536
|
|
67
|
+
? undefined
|
|
68
|
+
: "Must be a port number";
|
|
69
|
+
},
|
|
70
|
+
})) as string;
|
|
71
|
+
if (clack.isCancel(portRaw)) cancel();
|
|
72
|
+
const port = Number(portRaw);
|
|
73
|
+
|
|
74
|
+
// Phase 3: environments
|
|
75
|
+
const envs: Record<string, DeployEnv> = {};
|
|
76
|
+
let more = true;
|
|
77
|
+
while (more) {
|
|
78
|
+
const name = (await clack.text({
|
|
79
|
+
message: `Environment name (lowercase, e.g. "prod", "staging")`,
|
|
80
|
+
initialValue: Object.keys(envs).length === 0 ? "prod" : "staging",
|
|
81
|
+
validate: (v) => {
|
|
82
|
+
if (!/^[a-z][a-z0-9-]*$/.test(v)) return "Must match [a-z][a-z0-9-]*";
|
|
83
|
+
if (envs[v]) return "Already defined";
|
|
84
|
+
return undefined;
|
|
85
|
+
},
|
|
86
|
+
})) as string;
|
|
87
|
+
if (clack.isCancel(name)) cancel();
|
|
88
|
+
|
|
89
|
+
const domain = (await clack.text({
|
|
90
|
+
message: `Domain for "${name}" (Caddy will route by Host header)`,
|
|
91
|
+
placeholder: `${name === "prod" ? "app" : name}.example.com`,
|
|
92
|
+
validate: (v) =>
|
|
93
|
+
/^[a-z0-9.-]+\.[a-z]{2,}$/i.test(v)
|
|
94
|
+
? undefined
|
|
95
|
+
: "Expected a domain like app.example.com",
|
|
96
|
+
})) as string;
|
|
97
|
+
if (clack.isCancel(domain)) cancel();
|
|
98
|
+
|
|
99
|
+
envs[name] = { domain };
|
|
100
|
+
|
|
101
|
+
const addAnother = (await clack.confirm({
|
|
102
|
+
message: "Add another environment?",
|
|
103
|
+
initialValue: false,
|
|
104
|
+
})) as boolean;
|
|
105
|
+
if (clack.isCancel(addAnother)) cancel();
|
|
106
|
+
more = addAnother;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// Phase 4: provisioning (only if user chose it)
|
|
110
|
+
let provision: DeployProvision | undefined;
|
|
111
|
+
if (mode === "provision") {
|
|
112
|
+
const tokenEnv = (await clack.text({
|
|
113
|
+
message: "Name of env var holding your Hetzner Cloud API token",
|
|
114
|
+
initialValue: "HCLOUD_TOKEN",
|
|
115
|
+
validate: (v) =>
|
|
116
|
+
/^[A-Z_][A-Z0-9_]*$/.test(v) ? undefined : "Must be SCREAMING_SNAKE_CASE",
|
|
117
|
+
})) as string;
|
|
118
|
+
if (clack.isCancel(tokenEnv)) cancel();
|
|
119
|
+
|
|
120
|
+
const serverType = (await clack.text({
|
|
121
|
+
message: "Hetzner server type",
|
|
122
|
+
initialValue: "cx32",
|
|
123
|
+
})) as string;
|
|
124
|
+
if (clack.isCancel(serverType)) cancel();
|
|
125
|
+
|
|
126
|
+
const location = (await clack.text({
|
|
127
|
+
message: "Hetzner datacenter location",
|
|
128
|
+
initialValue: "nbg1",
|
|
129
|
+
})) as string;
|
|
130
|
+
if (clack.isCancel(location)) cancel();
|
|
131
|
+
|
|
132
|
+
const terraform: DeployProvisionTerraform = {
|
|
133
|
+
provider: "hcloud",
|
|
134
|
+
serverType,
|
|
135
|
+
location,
|
|
136
|
+
image: "ubuntu-22.04",
|
|
137
|
+
tokenEnv,
|
|
138
|
+
};
|
|
139
|
+
provision = { terraform };
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Phase 5: Caddy email
|
|
143
|
+
const email = (await clack.text({
|
|
144
|
+
message: `Email for Let's Encrypt (use "internal" for self-signed certs)`,
|
|
145
|
+
placeholder: "admin@example.com",
|
|
146
|
+
validate: (v) => {
|
|
147
|
+
if (v === "internal") return undefined;
|
|
148
|
+
if (!/^\S+@\S+\.\S+$/.test(v)) return "Expected an email or 'internal'";
|
|
149
|
+
return undefined;
|
|
150
|
+
},
|
|
151
|
+
})) as string;
|
|
152
|
+
if (clack.isCancel(email)) cancel();
|
|
153
|
+
|
|
154
|
+
clack.outro("Configuration ready — writing deploy.arc.json");
|
|
155
|
+
|
|
156
|
+
return {
|
|
157
|
+
target: {
|
|
158
|
+
host: host || "PENDING_TERRAFORM",
|
|
159
|
+
user,
|
|
160
|
+
port,
|
|
161
|
+
remoteDir: "/opt/arc",
|
|
162
|
+
},
|
|
163
|
+
envs,
|
|
164
|
+
caddy: { email },
|
|
165
|
+
provision,
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
function cancel(): never {
|
|
170
|
+
clack.cancel("Cancelled.");
|
|
171
|
+
process.exit(0);
|
|
172
|
+
}
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import { spawn } from "bun";
|
|
2
|
+
import { existsSync, mkdirSync, writeFileSync } from "fs";
|
|
3
|
+
import { tmpdir } from "os";
|
|
4
|
+
import { join } from "path";
|
|
5
|
+
import { ASSETS, materializeAssets } from "./assets";
|
|
6
|
+
import type { DeployProvisionTerraform } from "./config";
|
|
7
|
+
|
|
8
|
+
// ---------------------------------------------------------------------------
|
|
9
|
+
// Runs Terraform from embedded assets. Inputs are passed via a generated
|
|
10
|
+
// terraform.tfvars file in the temp working dir — nothing touches the user's
|
|
11
|
+
// project directory.
|
|
12
|
+
// ---------------------------------------------------------------------------
|
|
13
|
+
|
|
14
|
+
export interface TerraformInputs {
|
|
15
|
+
tf: DeployProvisionTerraform;
|
|
16
|
+
/** Resolved Hetzner token (read from env var by caller — NOT from tf). */
|
|
17
|
+
token: string;
|
|
18
|
+
/** Deterministic server name shown in Hetzner Console. */
|
|
19
|
+
serverName: string;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export interface TerraformOutputs {
|
|
23
|
+
serverIp: string;
|
|
24
|
+
serverName: string;
|
|
25
|
+
/** Working dir where state + vars live — caller may keep or delete. */
|
|
26
|
+
workDir: string;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export async function runTerraform(
|
|
30
|
+
inputs: TerraformInputs,
|
|
31
|
+
): Promise<TerraformOutputs> {
|
|
32
|
+
const workDir = join(tmpdir(), "arc-deploy", `tf-${Date.now()}`);
|
|
33
|
+
mkdirSync(workDir, { recursive: true });
|
|
34
|
+
await materializeAssets(workDir, ASSETS.terraform);
|
|
35
|
+
|
|
36
|
+
// Write tfvars — NEVER put token inline in main.tf
|
|
37
|
+
const sshPubKey =
|
|
38
|
+
inputs.tf.sshPublicKey ?? expandHome("~/.ssh/id_ed25519.pub");
|
|
39
|
+
if (!existsSync(expandHome(sshPubKey))) {
|
|
40
|
+
throw new Error(
|
|
41
|
+
`SSH public key not found at ${sshPubKey}. Set provision.terraform.sshPublicKey in deploy.arc.json.`,
|
|
42
|
+
);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const tfvars =
|
|
46
|
+
[
|
|
47
|
+
`hcloud_token = "${inputs.token}"`,
|
|
48
|
+
`server_name = "${inputs.serverName}"`,
|
|
49
|
+
`server_type = "${inputs.tf.serverType}"`,
|
|
50
|
+
`server_location = "${inputs.tf.location}"`,
|
|
51
|
+
`server_image = "${inputs.tf.image}"`,
|
|
52
|
+
`ssh_public_key = "${expandHome(sshPubKey)}"`,
|
|
53
|
+
].join("\n") + "\n";
|
|
54
|
+
|
|
55
|
+
writeFileSync(join(workDir, "terraform.tfvars"), tfvars);
|
|
56
|
+
|
|
57
|
+
await runTf(workDir, ["init", "-input=false", "-no-color"]);
|
|
58
|
+
await runTf(workDir, [
|
|
59
|
+
"apply",
|
|
60
|
+
"-auto-approve",
|
|
61
|
+
"-input=false",
|
|
62
|
+
"-no-color",
|
|
63
|
+
]);
|
|
64
|
+
const ip = await runTfCapture(workDir, [
|
|
65
|
+
"output",
|
|
66
|
+
"-raw",
|
|
67
|
+
"server_ip",
|
|
68
|
+
"-no-color",
|
|
69
|
+
]);
|
|
70
|
+
|
|
71
|
+
return { serverIp: ip.trim(), serverName: inputs.serverName, workDir };
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
async function runTf(workDir: string, args: string[]): Promise<void> {
|
|
75
|
+
const proc = spawn({
|
|
76
|
+
cmd: ["terraform", ...args],
|
|
77
|
+
cwd: workDir,
|
|
78
|
+
stdout: "inherit",
|
|
79
|
+
stderr: "inherit",
|
|
80
|
+
});
|
|
81
|
+
const exit = await proc.exited;
|
|
82
|
+
if (exit !== 0) {
|
|
83
|
+
throw new Error(`terraform ${args[0]} failed (exit ${exit})`);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
async function runTfCapture(workDir: string, args: string[]): Promise<string> {
|
|
88
|
+
const proc = spawn({
|
|
89
|
+
cmd: ["terraform", ...args],
|
|
90
|
+
cwd: workDir,
|
|
91
|
+
stdout: "pipe",
|
|
92
|
+
stderr: "pipe",
|
|
93
|
+
});
|
|
94
|
+
const [stdout, exit] = await Promise.all([
|
|
95
|
+
new Response(proc.stdout).text(),
|
|
96
|
+
proc.exited,
|
|
97
|
+
]);
|
|
98
|
+
if (exit !== 0) {
|
|
99
|
+
throw new Error(`terraform ${args[0]} failed (exit ${exit})`);
|
|
100
|
+
}
|
|
101
|
+
return stdout;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
function expandHome(p: string): string {
|
|
105
|
+
if (p.startsWith("~/")) {
|
|
106
|
+
return p.replace(/^~/, process.env.HOME ?? "~");
|
|
107
|
+
}
|
|
108
|
+
return p;
|
|
109
|
+
}
|
package/src/index.ts
CHANGED
|
@@ -4,6 +4,7 @@ import { Command } from "commander";
|
|
|
4
4
|
import { build } from "./commands/build";
|
|
5
5
|
import { dev } from "./commands/dev";
|
|
6
6
|
import { platformBuild } from "./commands/platform-build";
|
|
7
|
+
import { platformDeploy } from "./commands/platform-deploy";
|
|
7
8
|
import { platformDev } from "./commands/platform-dev";
|
|
8
9
|
import { platformStart } from "./commands/platform-start";
|
|
9
10
|
|
|
@@ -44,6 +45,17 @@ platform
|
|
|
44
45
|
.description("Start platform in production mode (requires prior build)")
|
|
45
46
|
.action(platformStart);
|
|
46
47
|
|
|
48
|
+
platform
|
|
49
|
+
.command("deploy [env]")
|
|
50
|
+
.description(
|
|
51
|
+
"Deploy platform to a remote server (reads deploy.arc.json, surveys if missing)",
|
|
52
|
+
)
|
|
53
|
+
.option("--skip-build", "Skip local build step")
|
|
54
|
+
.option("--rebuild", "Force rebuild before deploy")
|
|
55
|
+
.action((env: string | undefined, opts: { skipBuild?: boolean; rebuild?: boolean }) =>
|
|
56
|
+
platformDeploy(env, opts),
|
|
57
|
+
);
|
|
58
|
+
|
|
47
59
|
// Parse command line arguments
|
|
48
60
|
program.parse(process.argv);
|
|
49
61
|
|