@kithinji/pod 1.0.18 → 1.0.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,4 @@
1
+ export declare function deploy(targetName: string, options?: {
2
+ forceEnsure?: boolean;
3
+ }): Promise<void>;
4
+ //# sourceMappingURL=deploy.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"deploy.d.ts","sourceRoot":"","sources":["../../../src/deploy/deploy.ts"],"names":[],"mappings":"AAmUA,wBAAsB,MAAM,CAC1B,UAAU,EAAE,MAAM,EAClB,OAAO,CAAC,EAAE;IAAE,WAAW,CAAC,EAAE,OAAO,CAAA;CAAE,iBAwEpC"}
@@ -0,0 +1,2 @@
1
+ export * from "./deploy";
2
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/deploy/index.ts"],"names":[],"mappings":"AAAA,cAAc,UAAU,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"docker.d.ts","sourceRoot":"","sources":["../../../src/docker/docker.ts"],"names":[],"mappings":"AAeA,wBAAsB,SAAS,CAAC,GAAG,GAAE,KAAK,GAAG,MAAe,iBAwB3D"}
1
+ {"version":3,"file":"docker.d.ts","sourceRoot":"","sources":["../../../src/docker/docker.ts"],"names":[],"mappings":"AAeA,wBAAsB,SAAS,CAAC,GAAG,GAAE,KAAK,GAAG,MAAe,iBA2B3D"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@kithinji/pod",
3
- "version": "1.0.18",
3
+ "version": "1.0.20",
4
4
  "description": "",
5
5
  "bin": {
6
6
  "pod": "./dist/main.js"
@@ -25,10 +25,12 @@
25
25
  "@babel/core": "^7.28.5",
26
26
  "@babel/plugin-syntax-jsx": "^7.27.1",
27
27
  "@swc/core": "^1.15.6",
28
+ "chalk": "^5.6.2",
28
29
  "commander": "^14.0.2",
29
30
  "esbuild": "^0.27.2",
30
31
  "fs-extra": "^11.3.3",
31
32
  "js-yaml": "^4.1.1",
33
+ "node-ssh": "^13.2.1",
32
34
  "prompts": "^2.4.2",
33
35
  "typescript": "^5.9.3",
34
36
  "ws": "^8.18.3"
@@ -0,0 +1,592 @@
1
+ import fs from "fs-extra";
2
+ import yaml from "js-yaml";
3
+ import path from "path";
4
+ import os from "os";
5
+ import { NodeSSH } from "node-ssh";
6
+ import chalk from "chalk";
7
+
8
+ interface PodConfig {
9
+ name: string;
10
+ version: string;
11
+ targets: Record<string, TargetConfig>;
12
+ }
13
+
14
+ interface TargetConfig {
15
+ host: string;
16
+ user: string;
17
+ keyPath: string;
18
+ port?: number;
19
+ deployPath: string;
20
+ operations: Operation[];
21
+ }
22
+
23
+ type Operation = EnsureOperation | ActionOperation | VerifyOperation;
24
+
25
+ interface EnsureOperation {
26
+ name: string;
27
+ type: "ensure";
28
+ ensure: {
29
+ swap?: { size: string };
30
+ docker?: { version: string; addUserToGroup?: boolean };
31
+ directory?: { path: string; owner?: string };
32
+ };
33
+ }
34
+
35
+ interface ActionOperation {
36
+ name: string;
37
+ type: "action";
38
+ when?: "always" | "once" | "never";
39
+ action: {
40
+ rsync?: {
41
+ source: string;
42
+ destination: string;
43
+ exclude?: string[];
44
+ };
45
+ command?: string;
46
+ };
47
+ }
48
+
49
+ interface VerifyOperation {
50
+ name: string;
51
+ type: "verify";
52
+ verify: {
53
+ http?: { url: string; timeout?: string };
54
+ command?: string;
55
+ };
56
+ }
57
+
58
+ interface LockFile {
59
+ deployment_version?: string;
60
+ ensures: Record<string, { version: string; config: any }>;
61
+ once_actions: string[];
62
+ }
63
+
64
+ function interpolate(
65
+ str: string | undefined,
66
+ context: Record<string, any>
67
+ ): string {
68
+ if (!str) return "";
69
+ return str.replace(/\${([^}]+)}/g, (match, key) => {
70
+ return context[key] !== undefined ? String(context[key]) : match;
71
+ });
72
+ }
73
+
74
+ function deepInterpolate(obj: any, context: Record<string, any>): any {
75
+ if (typeof obj === "string") {
76
+ return interpolate(obj, context);
77
+ }
78
+ if (Array.isArray(obj)) {
79
+ return obj.map((item) => deepInterpolate(item, context));
80
+ }
81
+ if (obj && typeof obj === "object") {
82
+ const result: any = {};
83
+ for (const [key, value] of Object.entries(obj)) {
84
+ result[key] = deepInterpolate(value, context);
85
+ }
86
+ return result;
87
+ }
88
+ return obj;
89
+ }
90
+
91
+ function expandTilde(fp: string): string {
92
+ if (!fp || typeof fp !== "string") return fp;
93
+ return fp.startsWith("~/") ? path.join(os.homedir(), fp.slice(2)) : fp;
94
+ }
95
+
96
+ function resolveLocalPaths(obj: any, cwd: string): any {
97
+ if (Array.isArray(obj)) {
98
+ return obj.map((item) => resolveLocalPaths(item, cwd));
99
+ }
100
+ if (obj && typeof obj === "object") {
101
+ const result: any = {};
102
+ for (const [key, value] of Object.entries(obj)) {
103
+ const isLocalPathKey = key === "keyPath" || key === "source";
104
+ if (isLocalPathKey && typeof value === "string") {
105
+ const expanded = expandTilde(value);
106
+ result[key] = path.isAbsolute(expanded)
107
+ ? expanded
108
+ : path.resolve(cwd, expanded);
109
+ } else {
110
+ result[key] = resolveLocalPaths(value, cwd);
111
+ }
112
+ }
113
+ return result;
114
+ }
115
+ return obj;
116
+ }
117
+
118
+ const SCRIPTS = {
119
+ SWAP: (size: string) => `#!/bin/bash
120
+ set -euo pipefail
121
+
122
+ # Check if swap file exists
123
+ if [ -f /swapfile ]; then
124
+ CURRENT_SIZE=$(stat -c%s /swapfile 2>/dev/null || echo "0")
125
+ CURRENT_SIZE_GB=$((CURRENT_SIZE / 1024 / 1024 / 1024))
126
+ REQ_SIZE=$(echo "${size}" | tr -d 'G' | tr -d 'g')
127
+
128
+ if [ "$CURRENT_SIZE_GB" -ge "$REQ_SIZE" ]; then
129
+ echo "LOG: Swap of sufficient size exists. Skipping."
130
+ exit 0
131
+ fi
132
+
133
+ # Remove old swap if size doesn't match
134
+ sudo swapoff /swapfile || true
135
+ sudo rm /swapfile
136
+ fi
137
+
138
+ echo "LOG: Creating ${size} swap file..."
139
+ sudo fallocate -l ${size} /swapfile || \\
140
+ sudo dd if=/dev/zero of=/swapfile bs=1M count=$(($(echo ${size} | tr -d 'G' | tr -d 'g') * 1024))
141
+
142
+ sudo chmod 600 /swapfile
143
+ sudo mkswap /swapfile
144
+ sudo swapon /swapfile
145
+
146
+ # Add to fstab if not already there
147
+ grep -q "/swapfile" /etc/fstab || echo '/swapfile none swap sw 0 0' | sudo tee -a /etc/fstab
148
+
149
+ echo "LOG: Swap file configured successfully"
150
+ `,
151
+
152
+ DOCKER: (version: string, user: string, addToGroup: boolean) => `#!/bin/bash
153
+ set -euo pipefail
154
+
155
+ echo "LOG: Target Docker version: ${version}"
156
+
157
+ # Check current Docker installation
158
+ INSTALLED_VER=$(docker --version 2>/dev/null | awk '{print $3}' | tr -d ',' || echo "none")
159
+ echo "LOG: Currently installed: $INSTALLED_VER"
160
+
161
+ # Determine if we need to install/reinstall
162
+ NEEDS_INSTALL=false
163
+
164
+ if [ "$INSTALLED_VER" = "none" ]; then
165
+ echo "LOG: Docker not installed"
166
+ NEEDS_INSTALL=true
167
+ elif [ "${version}" = "latest" ]; then
168
+ echo "LOG: Latest version requested"
169
+ NEEDS_INSTALL=true
170
+ elif [[ "$INSTALLED_VER" != *"${version}"* ]]; then
171
+ echo "LOG: Version mismatch detected (need ${version}, have $INSTALLED_VER)"
172
+ echo "LOG: Uninstalling current Docker..."
173
+
174
+ # Stop Docker services
175
+ sudo systemctl stop docker.socket 2>/dev/null || true
176
+ sudo systemctl stop docker 2>/dev/null || true
177
+ sudo systemctl stop containerd 2>/dev/null || true
178
+
179
+ # Remove Docker packages (data is preserved)
180
+ sudo apt-get purge -y \\
181
+ docker-ce \\
182
+ docker-ce-cli \\
183
+ containerd.io \\
184
+ docker-buildx-plugin \\
185
+ docker-compose-plugin \\
186
+ docker-ce-rootless-extras \\
187
+ 2>/dev/null || true
188
+
189
+ sudo apt-get purge -y docker docker-engine docker.io runc 2>/dev/null || true
190
+ sudo apt-get autoremove -y
191
+
192
+ echo "LOG: Uninstall complete"
193
+ NEEDS_INSTALL=true
194
+ else
195
+ echo "LOG: Correct version already installed"
196
+ fi
197
+
198
+ if [ "$NEEDS_INSTALL" = true ]; then
199
+ echo "LOG: Installing Docker ${version}..."
200
+
201
+ # Update and install prerequisites
202
+ sudo apt-get update -y
203
+ sudo apt-get install -y ca-certificates curl gnupg lsb-release
204
+
205
+ # Add Docker GPG key
206
+ sudo install -m 0755 -d /etc/apt/keyrings
207
+ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | \\
208
+ sudo gpg --dearmor --batch --yes -o /etc/apt/keyrings/docker.gpg
209
+ sudo chmod a+r /etc/apt/keyrings/docker.gpg
210
+
211
+ # Add Docker repository
212
+ ARCH="$(dpkg --print-architecture)"
213
+ RELEASE="$(lsb_release -cs)"
214
+ echo "deb [arch=\${ARCH} signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \${RELEASE} stable" | \\
215
+ sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
216
+
217
+ sudo apt-get update -y
218
+
219
+ # Install Docker
220
+ if [ "${version}" = "latest" ]; then
221
+ echo "LOG: Installing latest Docker version"
222
+ sudo apt-get install -y \\
223
+ docker-ce \\
224
+ docker-ce-cli \\
225
+ containerd.io \\
226
+ docker-buildx-plugin \\
227
+ docker-compose-plugin
228
+ else
229
+ echo "LOG: Finding version ${version}..."
230
+ VERSION_STRING=$(apt-cache madison docker-ce | grep "${version}" | head -1 | awk '{print $3}')
231
+
232
+ if [ -z "$VERSION_STRING" ]; then
233
+ echo "LOG: ERROR - Version ${version} not found!"
234
+ echo "LOG: Available versions:"
235
+ apt-cache madison docker-ce | head -10
236
+ exit 1
237
+ fi
238
+
239
+ echo "LOG: Installing Docker CE version: $VERSION_STRING"
240
+ sudo apt-get install -y \\
241
+ docker-ce=$VERSION_STRING \\
242
+ docker-ce-cli=$VERSION_STRING \\
243
+ containerd.io \\
244
+ docker-buildx-plugin \\
245
+ docker-compose-plugin
246
+ fi
247
+
248
+ # Enable and start Docker
249
+ sudo systemctl enable docker
250
+ sudo systemctl start docker
251
+
252
+ FINAL_VER=$(docker --version | awk '{print $3}' | tr -d ',')
253
+ echo "LOG: Docker installed successfully - $FINAL_VER"
254
+ fi
255
+
256
+ # Configure docker group
257
+ if [ "${addToGroup}" = "true" ]; then
258
+ if ! getent group docker >/dev/null 2>&1; then
259
+ sudo groupadd docker
260
+ echo "LOG: Created docker group"
261
+ fi
262
+
263
+ if groups ${user} | grep -q '\\bdocker\\b'; then
264
+ echo "LOG: User ${user} already in docker group"
265
+ else
266
+ sudo usermod -aG docker ${user}
267
+ echo "LOG: Added ${user} to docker group (logout required)"
268
+ fi
269
+ fi
270
+ `,
271
+ };
272
+
273
+ class RemoteShell {
274
+ constructor(public ssh: NodeSSH) {}
275
+
276
+ async uploadContent(remotePath: string, content: string) {
277
+ const localTmp = path.join(os.tmpdir(), `pod_tmp_${Date.now()}`);
278
+ fs.writeFileSync(localTmp, content);
279
+ try {
280
+ await this.ssh.execCommand(`mkdir -p $(dirname ${remotePath})`);
281
+ await this.ssh.putFile(localTmp, remotePath);
282
+ } finally {
283
+ if (fs.existsSync(localTmp)) fs.unlinkSync(localTmp);
284
+ }
285
+ }
286
+
287
+ async runScript(name: string, content: string, context: Record<string, any>) {
288
+ const interpolated = interpolate(content, context);
289
+ const remotePath = `/tmp/pod_script_${name}_${Date.now()}.sh`;
290
+ await this.uploadContent(remotePath, interpolated);
291
+ try {
292
+ await this.ssh.execCommand(`chmod +x ${remotePath}`);
293
+ return await this.run(remotePath, context);
294
+ } finally {
295
+ await this.ssh.execCommand(`rm -f ${remotePath}`);
296
+ }
297
+ }
298
+
299
+ async run(cmd: string, context: Record<string, any>, silent = false) {
300
+ const interpolated = interpolate(cmd, context);
301
+ const result = await this.ssh.execCommand(interpolated);
302
+ if (result.code !== 0 && result.code !== null) {
303
+ throw new Error(`Execution failed: ${cmd}\nSTDERR: ${result.stderr}`);
304
+ }
305
+ if (!silent && result.stdout) {
306
+ result.stdout
307
+ .split("\n")
308
+ .filter((l) => l.startsWith("LOG:"))
309
+ .forEach((l) => console.log(chalk.gray(` ${l.replace("LOG: ", "")}`)));
310
+ }
311
+ return result;
312
+ }
313
+
314
+ async readJson<T>(remotePath: string): Promise<T | null> {
315
+ const res = await this.ssh.execCommand(`cat ${remotePath}`);
316
+ try {
317
+ return res.code === 0 ? JSON.parse(res.stdout) : null;
318
+ } catch {
319
+ return null;
320
+ }
321
+ }
322
+ }
323
+
324
+ export async function deploy(
325
+ targetName: string,
326
+ options?: { forceEnsure?: boolean }
327
+ ) {
328
+ const cwd = process.cwd();
329
+ const rawConfig = yaml.load(
330
+ fs.readFileSync(path.join(cwd, "pod.deploy.yml"), "utf8")
331
+ ) as any;
332
+
333
+ const rawTarget = rawConfig.targets?.[targetName];
334
+ if (!rawTarget) throw new Error(`Target ${targetName} not found.`);
335
+
336
+ console.log(
337
+ chalk.blue.bold(
338
+ `\n🚀 Pod Deploy: ${rawConfig.name} v${rawConfig.version} → ${targetName}\n`
339
+ )
340
+ );
341
+
342
+ let target = deepInterpolate(rawTarget, {
343
+ ...rawConfig,
344
+ ...rawTarget,
345
+ }) as TargetConfig;
346
+
347
+ target = resolveLocalPaths(target, cwd);
348
+
349
+ const ssh = new NodeSSH();
350
+ const shell = new RemoteShell(ssh);
351
+
352
+ try {
353
+ await ssh.connect({
354
+ host: target.host,
355
+ username: target.user,
356
+ privateKeyPath: target.keyPath,
357
+ port: target.port || 22,
358
+ });
359
+
360
+ const lockPath = path.posix.join(target.deployPath, "pod-lock.json");
361
+ let lock = (await shell.readJson<LockFile>(lockPath)) || {
362
+ ensures: {},
363
+ once_actions: [],
364
+ };
365
+
366
+ // Reset once_actions if version changed
367
+ if (lock.deployment_version !== rawConfig.version) {
368
+ console.log(chalk.magenta(`→ Version change: ${rawConfig.version}`));
369
+ lock.deployment_version = rawConfig.version;
370
+ lock.once_actions = [];
371
+ await shell.uploadContent(lockPath, JSON.stringify(lock, null, 2));
372
+ }
373
+
374
+ // Process all operations
375
+ for (const op of target.operations) {
376
+ try {
377
+ if (op.type === "ensure") {
378
+ await handleEnsure(op, shell, target, lock, lockPath, options);
379
+ } else if (op.type === "action") {
380
+ await handleAction(op, shell, target, lock, lockPath);
381
+ } else if (op.type === "verify") {
382
+ await handleVerify(op, shell, target);
383
+ } else {
384
+ throw new Error(`Unknown operation type: ${(op as any).type}`);
385
+ }
386
+ } catch (err: any) {
387
+ throw new Error(`Failed at operation "${op.name}": ${err.message}`);
388
+ }
389
+ }
390
+
391
+ console.log(chalk.green.bold(`\n✅ Deployment successful!\n`));
392
+ } catch (err: any) {
393
+ console.error(chalk.red.bold(`\n❌ Deployment Failed: ${err.message}`));
394
+ throw err;
395
+ } finally {
396
+ ssh.dispose();
397
+ }
398
+ }
399
+
400
+ async function handleEnsure(
401
+ op: EnsureOperation,
402
+ shell: RemoteShell,
403
+ target: TargetConfig,
404
+ lock: LockFile,
405
+ lockPath: string,
406
+ options?: { forceEnsure?: boolean }
407
+ ) {
408
+ if (!op.ensure) {
409
+ throw new Error(`Ensure operation "${op.name}" missing ensure config`);
410
+ }
411
+
412
+ if (op.ensure.swap) {
413
+ const key = "swap";
414
+ const locked = lock.ensures[key];
415
+ const currentConfig = op.ensure.swap;
416
+ const configChanged =
417
+ JSON.stringify(locked?.config) !== JSON.stringify(currentConfig);
418
+
419
+ if (
420
+ options?.forceEnsure ||
421
+ !locked ||
422
+ locked.version !== currentConfig.size ||
423
+ configChanged
424
+ ) {
425
+ console.log(chalk.yellow(`→ Ensuring: ${op.name}`));
426
+ const script = SCRIPTS.SWAP(currentConfig.size);
427
+ await shell.runScript(key, script, target);
428
+ lock.ensures[key] = {
429
+ version: currentConfig.size,
430
+ config: currentConfig,
431
+ };
432
+ await shell.uploadContent(lockPath, JSON.stringify(lock, null, 2));
433
+ } else {
434
+ console.log(chalk.gray(`✓ ${op.name} (already satisfied)`));
435
+ }
436
+ }
437
+
438
+ if (op.ensure.docker) {
439
+ const key = "docker";
440
+ const locked = lock.ensures[key];
441
+ const currentConfig = op.ensure.docker;
442
+ const configChanged =
443
+ JSON.stringify(locked?.config) !== JSON.stringify(currentConfig);
444
+
445
+ if (
446
+ options?.forceEnsure ||
447
+ !locked ||
448
+ locked.version !== currentConfig.version ||
449
+ configChanged
450
+ ) {
451
+ console.log(chalk.yellow(`→ Ensuring: ${op.name}`));
452
+ const script = SCRIPTS.DOCKER(
453
+ currentConfig.version,
454
+ target.user,
455
+ !!currentConfig.addUserToGroup
456
+ );
457
+ await shell.runScript(key, script, target);
458
+ lock.ensures[key] = {
459
+ version: currentConfig.version,
460
+ config: currentConfig,
461
+ };
462
+ await shell.uploadContent(lockPath, JSON.stringify(lock, null, 2));
463
+ } else {
464
+ console.log(chalk.gray(`✓ ${op.name} (already satisfied)`));
465
+ }
466
+ }
467
+
468
+ if (op.ensure.directory) {
469
+ const key = `directory_${op.ensure.directory.path}`;
470
+ const locked = lock.ensures[key];
471
+ const currentConfig = op.ensure.directory;
472
+ const configChanged =
473
+ JSON.stringify(locked?.config) !== JSON.stringify(currentConfig);
474
+
475
+ if (options?.forceEnsure || !locked || configChanged) {
476
+ console.log(chalk.yellow(`→ Ensuring: ${op.name}`));
477
+ const dirPath = interpolate(currentConfig.path, target);
478
+ const owner = currentConfig.owner
479
+ ? interpolate(currentConfig.owner, target)
480
+ : target.user;
481
+ await shell.run(`mkdir -p ${dirPath}`, target, true);
482
+ await shell.run(
483
+ `sudo chown -R ${owner}:${owner} ${dirPath}`,
484
+ target,
485
+ true
486
+ );
487
+ lock.ensures[key] = {
488
+ version: dirPath,
489
+ config: currentConfig,
490
+ };
491
+ await shell.uploadContent(lockPath, JSON.stringify(lock, null, 2));
492
+ } else {
493
+ console.log(chalk.gray(`✓ ${op.name} (already satisfied)`));
494
+ }
495
+ }
496
+ }
497
+
498
+ async function handleAction(
499
+ op: ActionOperation,
500
+ shell: RemoteShell,
501
+ target: TargetConfig,
502
+ lock: LockFile,
503
+ lockPath: string
504
+ ) {
505
+ if (!op.action) {
506
+ throw new Error(`Action operation "${op.name}" missing action config`);
507
+ }
508
+
509
+ const when = op.when || "always";
510
+
511
+ if (when === "never") {
512
+ console.log(chalk.gray(`⊘ ${op.name} (disabled)`));
513
+ return;
514
+ }
515
+
516
+ const actionId = `action_${op.name}`;
517
+
518
+ if (when === "once" && lock.once_actions.includes(actionId)) {
519
+ console.log(chalk.gray(`✓ ${op.name} (already completed)`));
520
+ return;
521
+ }
522
+
523
+ console.log(chalk.cyan(`→ Running: ${op.name}`));
524
+
525
+ if (op.action.rsync) {
526
+ const src = op.action.rsync.source;
527
+ const dest = interpolate(op.action.rsync.destination || ".", target);
528
+
529
+ const putOptions: any = { recursive: true, concurrency: 10 };
530
+
531
+ if (op.action.rsync.exclude?.length) {
532
+ const excludePatterns = op.action.rsync.exclude;
533
+
534
+ putOptions.validate = (filePath: string) => {
535
+ const relative = path.relative(src, filePath);
536
+ if (relative === "") return true;
537
+
538
+ return !excludePatterns.some((pattern) => {
539
+ if (pattern.endsWith("/")) {
540
+ const dir = pattern.slice(0, -1);
541
+ const segment = "/" + dir + "/";
542
+ return (
543
+ relative === dir ||
544
+ relative.startsWith(dir + "/") ||
545
+ relative.includes(segment)
546
+ );
547
+ }
548
+
549
+ if (pattern.startsWith("*.")) {
550
+ return relative.endsWith(pattern.slice(1));
551
+ }
552
+
553
+ return relative === pattern;
554
+ });
555
+ };
556
+ }
557
+
558
+ console.log(chalk.gray(` Syncing ${src} → ${dest}`));
559
+ await shell.ssh.putDirectory(src, dest, putOptions);
560
+ }
561
+
562
+ if (op.action.command) {
563
+ await shell.run(op.action.command, target);
564
+ }
565
+
566
+ if (when === "once") {
567
+ lock.once_actions.push(actionId);
568
+ await shell.uploadContent(lockPath, JSON.stringify(lock, null, 2));
569
+ }
570
+ }
571
+
572
+ async function handleVerify(
573
+ op: VerifyOperation,
574
+ shell: RemoteShell,
575
+ target: TargetConfig
576
+ ) {
577
+ if (!op.verify) {
578
+ throw new Error(`Verify operation "${op.name}" missing verify config`);
579
+ }
580
+
581
+ console.log(chalk.cyan(`→ Verifying: ${op.name}`));
582
+
583
+ if (op.verify.http) {
584
+ const url = interpolate(op.verify.http.url, target);
585
+ const timeout = op.verify.http.timeout || "30s";
586
+ await shell.run(`curl -f --max-time ${timeout} ${url}`, target, true);
587
+ }
588
+
589
+ if (op.verify.command) {
590
+ await shell.run(op.verify.command, target, true);
591
+ }
592
+ }
@@ -0,0 +1 @@
1
+ export * from "./deploy";
package/src/dev/server.ts CHANGED
@@ -96,7 +96,7 @@ export async function startDevServer(): Promise<void> {
96
96
  format: "esm",
97
97
  sourcemap: config.build?.sourcemap ?? true,
98
98
  splitting: true,
99
- minify: config.build?.minify ?? false,
99
+ minify: config.build?.minify ?? true,
100
100
  plugins: [
101
101
  ...(config.plugins?.map((cb) => cb(store)) || []),
102
102
  ...(config.client_plugins?.map((cb) => cb(store)) || []),