kadi-deploy 0.19.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +6 -0
- package/.prettierrc +6 -0
- package/README.md +589 -0
- package/agent.json +23 -0
- package/index.js +11 -0
- package/package.json +42 -0
- package/quick-command.txt +92 -0
- package/scripts/preflight.js +458 -0
- package/scripts/preflight.sh +300 -0
- package/src/cli/bid-selector.ts +222 -0
- package/src/cli/colors.ts +216 -0
- package/src/cli/index.ts +11 -0
- package/src/cli/prompts.ts +190 -0
- package/src/cli/spinners.ts +165 -0
- package/src/commands/deploy-local.ts +475 -0
- package/src/commands/deploy.ts +1342 -0
- package/src/commands/down.ts +679 -0
- package/src/commands/index.ts +10 -0
- package/src/commands/lock.ts +571 -0
- package/src/config/agent-loader.ts +177 -0
- package/src/config/index.ts +9 -0
- package/src/display/deployment-info.ts +220 -0
- package/src/display/pricing.ts +137 -0
- package/src/display/resources.ts +234 -0
- package/src/enhanced-registry-manager.ts +892 -0
- package/src/index.ts +307 -0
- package/src/infrastructure/registry.ts +269 -0
- package/src/schemas/profiles.ts +529 -0
- package/src/secrets/broker-urls.ts +109 -0
- package/src/secrets/handshake.ts +407 -0
- package/src/secrets/index.ts +69 -0
- package/src/secrets/inject-env.ts +171 -0
- package/src/secrets/nonce.ts +31 -0
- package/src/secrets/normalize.ts +204 -0
- package/src/secrets/prepare.ts +152 -0
- package/src/secrets/validate.ts +243 -0
- package/src/secrets/vault.ts +80 -0
- package/src/types/akash.ts +116 -0
- package/src/types/container-registry-ability.d.ts +158 -0
- package/src/types/external.ts +49 -0
- package/src/types.ts +211 -0
- package/src/utils/akt-price.ts +74 -0
- package/tests/agent-loader.test.ts +239 -0
- package/tests/autonomous.test.ts +244 -0
- package/tests/down.test.ts +1143 -0
- package/tests/lock.test.ts +1148 -0
- package/tests/nonce.test.ts +34 -0
- package/tests/normalize.test.ts +270 -0
- package/tests/secrets-schema.test.ts +301 -0
- package/tests/types.test.ts +198 -0
- package/tsconfig.json +18 -0
- package/vitest.config.ts +9 -0
|
@@ -0,0 +1,679 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Deploy Down Command — Tear down active deployments
|
|
3
|
+
*
|
|
4
|
+
* Reads the `.kadi-deploy.lock` file written after a successful deployment
|
|
5
|
+
* and tears down the running containers:
|
|
6
|
+
*
|
|
7
|
+
* - **Local**: Runs `${engine} compose -f ${composePath} down --remove-orphans`
|
|
8
|
+
* - **Akash**: Connects wallet → calls `AkashClient.closeDeployment(dseq)`
|
|
9
|
+
*
|
|
10
|
+
* Supports both interactive (QR wallet) and autonomous (vault mnemonic) modes
|
|
11
|
+
* for Akash teardown.
|
|
12
|
+
*
|
|
13
|
+
* @module commands/down
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import path from 'node:path';
|
|
17
|
+
import { execSync } from 'node:child_process';
|
|
18
|
+
|
|
19
|
+
import {
|
|
20
|
+
AkashClient,
|
|
21
|
+
connectWallet,
|
|
22
|
+
disconnectWallet,
|
|
23
|
+
createWalletFromMnemonic,
|
|
24
|
+
type SecretsProvider,
|
|
25
|
+
} from '@kadi.build/deploy-ability/akash';
|
|
26
|
+
|
|
27
|
+
import { readLockFile, removeDeployment, listDeployments, getDeploymentByInstance, getDeploymentsByProfile, parseDeploymentKey, type DeploymentLock, type LockFile } from './lock.js';
|
|
28
|
+
import { readSecretFromCli } from '../secrets/index.js';
|
|
29
|
+
import { startSpinner, succeedSpinner, failSpinner } from '../cli/spinners.js';
|
|
30
|
+
import { confirmPrompt, selectPrompt } from '../cli/prompts.js';
|
|
31
|
+
import {
|
|
32
|
+
error as errorColor,
|
|
33
|
+
warning,
|
|
34
|
+
success as successColor,
|
|
35
|
+
dim,
|
|
36
|
+
bold,
|
|
37
|
+
highlight,
|
|
38
|
+
formatKeyValue,
|
|
39
|
+
} from '../cli/colors.js';
|
|
40
|
+
import type { IKadiContext } from '../types.js';
|
|
41
|
+
|
|
42
|
+
// Polyfill localStorage for WalletConnect (same as deploy.ts)
|
|
43
|
+
import { LocalStorage } from 'node-localstorage';
|
|
44
|
+
import os from 'node:os';
|
|
45
|
+
const kadiHome = path.join(os.homedir(), '.kadi');
|
|
46
|
+
(globalThis as any).localStorage ??= new LocalStorage(kadiHome);
|
|
47
|
+
|
|
48
|
+
import QRCode from 'qrcode-terminal';
|
|
49
|
+
|
|
50
|
+
// ─────────────────────────────────────────────────────────
|
|
51
|
+
// Types
|
|
52
|
+
// ─────────────────────────────────────────────────────────
|
|
53
|
+
|
|
54
|
+
export interface DownOptions {
|
|
55
|
+
project?: string;
|
|
56
|
+
profile?: string;
|
|
57
|
+
instance?: string;
|
|
58
|
+
all?: boolean;
|
|
59
|
+
engine?: string;
|
|
60
|
+
yes?: boolean;
|
|
61
|
+
verbose?: boolean;
|
|
62
|
+
autonomous?: boolean;
|
|
63
|
+
secretsVault?: string;
|
|
64
|
+
network?: string;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// ─────────────────────────────────────────────────────────
|
|
68
|
+
// Main entry point
|
|
69
|
+
// ─────────────────────────────────────────────────────────
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Execute the `kadi deploy down` command.
|
|
73
|
+
*
|
|
74
|
+
* Reads the lock file, confirms with the user, then tears down the deployment.
|
|
75
|
+
*/
|
|
76
|
+
export async function executeDown(
|
|
77
|
+
ctx: IKadiContext,
|
|
78
|
+
options: DownOptions
|
|
79
|
+
): Promise<void> {
|
|
80
|
+
const { logger } = ctx;
|
|
81
|
+
const projectRoot = path.resolve(options.project || process.cwd());
|
|
82
|
+
|
|
83
|
+
// ----------------------------------------------------------------
|
|
84
|
+
// 1. Read lock file and resolve deployment
|
|
85
|
+
// ----------------------------------------------------------------
|
|
86
|
+
let lockFile: LockFile | null;
|
|
87
|
+
|
|
88
|
+
try {
|
|
89
|
+
lockFile = await readLockFile(projectRoot);
|
|
90
|
+
} catch (err) {
|
|
91
|
+
logger.error(
|
|
92
|
+
errorColor(`Failed to read lock file: ${(err as Error).message}`)
|
|
93
|
+
);
|
|
94
|
+
return;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
if (!lockFile || Object.keys(lockFile.deployments).length === 0) {
|
|
98
|
+
logger.error(
|
|
99
|
+
errorColor('No active deployment found.')
|
|
100
|
+
);
|
|
101
|
+
logger.log(
|
|
102
|
+
dim(
|
|
103
|
+
'The .kadi-deploy.lock file was not found in ' + projectRoot + '.\n' +
|
|
104
|
+
'This file is created after a successful deployment with `kadi deploy`.'
|
|
105
|
+
)
|
|
106
|
+
);
|
|
107
|
+
return;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Resolve which deployment to tear down
|
|
111
|
+
const entries = Object.entries(lockFile.deployments);
|
|
112
|
+
let lock: DeploymentLock;
|
|
113
|
+
|
|
114
|
+
if (options.instance) {
|
|
115
|
+
// Explicit --instance flag: direct lookup by instanceId
|
|
116
|
+
const entry = Object.values(lockFile.deployments)
|
|
117
|
+
.find((d) => d.instanceId === options.instance);
|
|
118
|
+
if (!entry) {
|
|
119
|
+
logger.error(
|
|
120
|
+
errorColor(`No active deployment found with instance ID "${options.instance}".`)
|
|
121
|
+
);
|
|
122
|
+
logger.log(
|
|
123
|
+
dim('Active instances: ' + entries.map(([k, d]) => `${d.instanceId} (${d.profile}, ${d.target})`).join(', '))
|
|
124
|
+
);
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
lock = entry;
|
|
128
|
+
} else if (options.all) {
|
|
129
|
+
// --all flag: tear down everything (handled separately below)
|
|
130
|
+
if (!options.yes && !options.autonomous) {
|
|
131
|
+
logger.log('');
|
|
132
|
+
logger.log(bold(`About to tear down ALL ${entries.length} deployment(s):`));
|
|
133
|
+
logger.log('');
|
|
134
|
+
for (const [key, dep] of entries) {
|
|
135
|
+
const labelInfo = dep.label ? `, ${dep.label}` : '';
|
|
136
|
+
logger.log(` ${highlight(dep.instanceId)} ${dim(`(${dep.profile}, ${dep.target}${labelInfo}, deployed ${dep.deployedAt})`)}`);
|
|
137
|
+
}
|
|
138
|
+
logger.log('');
|
|
139
|
+
|
|
140
|
+
const proceed = await confirmPrompt(
|
|
141
|
+
`Tear down all ${entries.length} deployment(s)?`,
|
|
142
|
+
false
|
|
143
|
+
);
|
|
144
|
+
|
|
145
|
+
if (!proceed) {
|
|
146
|
+
logger.log('Cancelled.');
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
// Tear down all deployments sequentially
|
|
152
|
+
for (const [key, dep] of entries) {
|
|
153
|
+
logger.log('');
|
|
154
|
+
logger.log(bold(`Tearing down: ${dep.profile}:${dep.instanceId} (${dep.target})`));
|
|
155
|
+
if (dep.target === 'local') {
|
|
156
|
+
await teardownLocal(ctx, projectRoot, dep, options);
|
|
157
|
+
} else if (dep.target === 'akash') {
|
|
158
|
+
await teardownAkash(ctx, projectRoot, dep, options);
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
return;
|
|
162
|
+
} else if (options.profile) {
|
|
163
|
+
// Explicit --profile flag: find matching instances
|
|
164
|
+
const profileInstances = entries.filter(([_, d]) => d.profile === options.profile);
|
|
165
|
+
if (profileInstances.length === 0) {
|
|
166
|
+
logger.error(
|
|
167
|
+
errorColor(`No active deployment found for profile "${options.profile}".`)
|
|
168
|
+
);
|
|
169
|
+
logger.log(
|
|
170
|
+
dim('Active deployments: ' + entries.map(([k, d]) => `${d.profile}:${d.instanceId}`).join(', '))
|
|
171
|
+
);
|
|
172
|
+
return;
|
|
173
|
+
} else if (profileInstances.length === 1) {
|
|
174
|
+
lock = profileInstances[0][1];
|
|
175
|
+
} else if (options.autonomous) {
|
|
176
|
+
logger.error(
|
|
177
|
+
errorColor(
|
|
178
|
+
`Multiple instances of profile "${options.profile}" found. In autonomous mode, specify which one with --instance.`
|
|
179
|
+
)
|
|
180
|
+
);
|
|
181
|
+
logger.log(
|
|
182
|
+
dim('Instances: ' + profileInstances.map(([_, d]) => d.instanceId).join(', '))
|
|
183
|
+
);
|
|
184
|
+
return;
|
|
185
|
+
} else {
|
|
186
|
+
// Multiple instances for same profile — prompt
|
|
187
|
+
logger.log('');
|
|
188
|
+
logger.log(bold(`Multiple instances of profile "${options.profile}" found:`));
|
|
189
|
+
logger.log('');
|
|
190
|
+
for (const [key, dep] of profileInstances) {
|
|
191
|
+
const labelInfo = dep.label ? ` — ${dep.label}` : '';
|
|
192
|
+
logger.log(` ${highlight(dep.instanceId)} ${dim(`(${dep.target}${labelInfo}, deployed ${dep.deployedAt})`)}`);
|
|
193
|
+
}
|
|
194
|
+
logger.log('');
|
|
195
|
+
|
|
196
|
+
const selected = await selectPrompt(
|
|
197
|
+
'Which instance do you want to tear down?',
|
|
198
|
+
profileInstances.map(([_, dep]) => {
|
|
199
|
+
const labelInfo = dep.label ? ` — ${dep.label}` : '';
|
|
200
|
+
return `${dep.instanceId} (${dep.target}${labelInfo})`;
|
|
201
|
+
})
|
|
202
|
+
);
|
|
203
|
+
|
|
204
|
+
const selectedInstanceId = selected.replace(/ \(.*\)$/, '');
|
|
205
|
+
lock = profileInstances.find(([_, d]) => d.instanceId === selectedInstanceId)![1];
|
|
206
|
+
}
|
|
207
|
+
} else if (entries.length === 1) {
|
|
208
|
+
// Only one deployment — auto-select
|
|
209
|
+
lock = entries[0][1];
|
|
210
|
+
} else if (options.autonomous) {
|
|
211
|
+
// Autonomous mode: cannot prompt — require explicit --profile or --instance
|
|
212
|
+
logger.error(
|
|
213
|
+
errorColor(
|
|
214
|
+
'Multiple active deployments found. In autonomous mode, specify which one with --profile or --instance.'
|
|
215
|
+
)
|
|
216
|
+
);
|
|
217
|
+
logger.log(
|
|
218
|
+
dim('Active deployments: ' + entries.map(([k, d]) => `${d.profile}:${d.instanceId}`).join(', '))
|
|
219
|
+
);
|
|
220
|
+
return;
|
|
221
|
+
} else {
|
|
222
|
+
// Multiple deployments — prompt user to pick one
|
|
223
|
+
logger.log('');
|
|
224
|
+
logger.log(bold('Multiple active deployments found:'));
|
|
225
|
+
logger.log('');
|
|
226
|
+
for (const [key, dep] of entries) {
|
|
227
|
+
const labelInfo = dep.label ? ` — ${dep.label}` : '';
|
|
228
|
+
logger.log(` ${highlight(dep.instanceId)} ${dim(`(${dep.profile}, ${dep.target}${labelInfo}, deployed ${dep.deployedAt})`)}`);
|
|
229
|
+
}
|
|
230
|
+
logger.log('');
|
|
231
|
+
|
|
232
|
+
const selected = await selectPrompt(
|
|
233
|
+
'Which deployment do you want to tear down?',
|
|
234
|
+
entries.map(([key, dep]) => {
|
|
235
|
+
const labelInfo = dep.label ? ` — ${dep.label}` : '';
|
|
236
|
+
return `${dep.instanceId} (${dep.profile}, ${dep.target}${labelInfo})`;
|
|
237
|
+
})
|
|
238
|
+
);
|
|
239
|
+
|
|
240
|
+
// Extract the instance ID from the selection (first token before space)
|
|
241
|
+
const selectedInstanceId = selected.replace(/ \(.*\)$/, '');
|
|
242
|
+
lock = entries.find(([_, d]) => d.instanceId === selectedInstanceId)![1];
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
// ----------------------------------------------------------------
|
|
246
|
+
// 2. Display deployment info and confirm
|
|
247
|
+
// ----------------------------------------------------------------
|
|
248
|
+
logger.log('');
|
|
249
|
+
logger.log(bold('Active Deployment'));
|
|
250
|
+
logger.log(dim('─'.repeat(50)));
|
|
251
|
+
logger.log(formatKeyValue('Instance', lock.instanceId));
|
|
252
|
+
logger.log(formatKeyValue('Target', lock.target));
|
|
253
|
+
logger.log(formatKeyValue('Profile', lock.profile));
|
|
254
|
+
if (lock.label) {
|
|
255
|
+
logger.log(formatKeyValue('Label', lock.label));
|
|
256
|
+
}
|
|
257
|
+
logger.log(formatKeyValue('Deployed At', lock.deployedAt));
|
|
258
|
+
|
|
259
|
+
if (lock.target === 'local' && lock.local) {
|
|
260
|
+
logger.log(formatKeyValue('Engine', lock.local.engine));
|
|
261
|
+
logger.log(formatKeyValue('Services', lock.local.services.join(', ')));
|
|
262
|
+
logger.log(formatKeyValue('Compose File', lock.local.composePath));
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
if (lock.target === 'akash' && lock.akash) {
|
|
266
|
+
logger.log(formatKeyValue('DSEQ', String(lock.akash.dseq)));
|
|
267
|
+
logger.log(formatKeyValue('Owner', lock.akash.owner));
|
|
268
|
+
logger.log(formatKeyValue('Provider', lock.akash.provider));
|
|
269
|
+
logger.log(formatKeyValue('Network', lock.akash.network));
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
logger.log(dim('─'.repeat(50)));
|
|
273
|
+
logger.log('');
|
|
274
|
+
|
|
275
|
+
if (!options.yes && !options.autonomous) {
|
|
276
|
+
const proceed = await confirmPrompt(
|
|
277
|
+
`Tear down this ${lock.target} deployment?`,
|
|
278
|
+
false
|
|
279
|
+
);
|
|
280
|
+
|
|
281
|
+
if (!proceed) {
|
|
282
|
+
logger.log('Cancelled.');
|
|
283
|
+
return;
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
// ----------------------------------------------------------------
|
|
288
|
+
// 3. Route to target-specific teardown
|
|
289
|
+
// ----------------------------------------------------------------
|
|
290
|
+
if (lock.target === 'local') {
|
|
291
|
+
await teardownLocal(ctx, projectRoot, lock, options);
|
|
292
|
+
} else if (lock.target === 'akash') {
|
|
293
|
+
await teardownAkash(ctx, projectRoot, lock, options);
|
|
294
|
+
} else {
|
|
295
|
+
logger.error(errorColor(`Unknown deployment target: ${lock.target}`));
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
// ─────────────────────────────────────────────────────────
|
|
300
|
+
// Local teardown
|
|
301
|
+
// ─────────────────────────────────────────────────────────
|
|
302
|
+
|
|
303
|
+
/**
|
|
304
|
+
* Tear down a local Docker/Podman deployment.
|
|
305
|
+
*
|
|
306
|
+
* Runs `compose down --remove-orphans` against the compose file
|
|
307
|
+
* recorded in the lock, then deletes the lock file.
|
|
308
|
+
*/
|
|
309
|
+
async function teardownLocal(
|
|
310
|
+
ctx: IKadiContext,
|
|
311
|
+
projectRoot: string,
|
|
312
|
+
lock: DeploymentLock,
|
|
313
|
+
options: DownOptions
|
|
314
|
+
): Promise<void> {
|
|
315
|
+
const { logger } = ctx;
|
|
316
|
+
const local = lock.local!;
|
|
317
|
+
const engine = options.engine || local.engine;
|
|
318
|
+
|
|
319
|
+
const spinner = startSpinner(
|
|
320
|
+
`Stopping ${local.services.length} service(s) with ${engine}...`
|
|
321
|
+
);
|
|
322
|
+
|
|
323
|
+
try {
|
|
324
|
+
const composeFile = local.composePath;
|
|
325
|
+
const cmd = `${engine} compose -f "${composeFile}" down --remove-orphans`;
|
|
326
|
+
|
|
327
|
+
if (options.verbose) {
|
|
328
|
+
logger.log(dim(`Running: ${cmd}`));
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
execSync(cmd, {
|
|
332
|
+
cwd: projectRoot,
|
|
333
|
+
timeout: 60_000,
|
|
334
|
+
stdio: options.verbose ? 'inherit' : 'pipe',
|
|
335
|
+
});
|
|
336
|
+
|
|
337
|
+
succeedSpinner(spinner, 'Containers stopped');
|
|
338
|
+
|
|
339
|
+
// Remove this deployment from the lock file
|
|
340
|
+
await removeDeployment(projectRoot, lock.profile, lock.instanceId);
|
|
341
|
+
|
|
342
|
+
logger.log('');
|
|
343
|
+
logger.log(successColor('✓ Local deployment torn down successfully'));
|
|
344
|
+
logger.log('');
|
|
345
|
+
} catch (err) {
|
|
346
|
+
failSpinner(spinner, 'Failed to stop containers');
|
|
347
|
+
logger.error(errorColor((err as Error).message));
|
|
348
|
+
|
|
349
|
+
if (options.verbose) {
|
|
350
|
+
console.error(err);
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
// Still try to clean up lock if the failure was because containers
|
|
354
|
+
// were already stopped (compose down returns non-zero)
|
|
355
|
+
logger.log(
|
|
356
|
+
dim('\nTip: If containers are already stopped, delete the lock file manually:')
|
|
357
|
+
);
|
|
358
|
+
logger.log(dim(` rm ${path.join(projectRoot, '.kadi-deploy.lock')}`));
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
// ─────────────────────────────────────────────────────────
|
|
363
|
+
// Akash teardown
|
|
364
|
+
// ─────────────────────────────────────────────────────────
|
|
365
|
+
|
|
366
|
+
/**
|
|
367
|
+
* Tear down an Akash Network deployment.
|
|
368
|
+
*
|
|
369
|
+
* Connects a wallet (interactive QR or autonomous mnemonic),
|
|
370
|
+
* creates an AkashClient, and calls `closeDeployment(dseq)`.
|
|
371
|
+
*/
|
|
372
|
+
async function teardownAkash(
|
|
373
|
+
ctx: IKadiContext,
|
|
374
|
+
projectRoot: string,
|
|
375
|
+
lock: DeploymentLock,
|
|
376
|
+
options: DownOptions
|
|
377
|
+
): Promise<void> {
|
|
378
|
+
const { logger } = ctx;
|
|
379
|
+
const akash = lock.akash!;
|
|
380
|
+
const network = (options.network as 'mainnet' | 'testnet') || akash.network;
|
|
381
|
+
|
|
382
|
+
let walletResult: any;
|
|
383
|
+
let akashClient: AkashClient | undefined;
|
|
384
|
+
let exitCode: number | null = null;
|
|
385
|
+
|
|
386
|
+
try {
|
|
387
|
+
// ----------------------------------------------------------------
|
|
388
|
+
// 1. Connect wallet
|
|
389
|
+
// ----------------------------------------------------------------
|
|
390
|
+
if (options.autonomous) {
|
|
391
|
+
// Autonomous mode: wallet from secrets vault
|
|
392
|
+
await teardownAkashAutonomous(ctx, projectRoot, lock, options);
|
|
393
|
+
return;
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
// Interactive mode: WalletConnect QR code
|
|
397
|
+
let spinner = startSpinner('Initializing WalletConnect...');
|
|
398
|
+
|
|
399
|
+
const projectId = 'ef0fd1c783f5ef70fbb102f5e5dd2c43'; // Default KADI project ID
|
|
400
|
+
|
|
401
|
+
try {
|
|
402
|
+
walletResult = await connectWallet(
|
|
403
|
+
projectId,
|
|
404
|
+
network,
|
|
405
|
+
{
|
|
406
|
+
onUriGenerated: (uri: string) => {
|
|
407
|
+
succeedSpinner(spinner, 'QR Code generated');
|
|
408
|
+
logger.log('');
|
|
409
|
+
logger.log('📱 Scan this QR code with your Keplr mobile wallet:');
|
|
410
|
+
logger.log('');
|
|
411
|
+
QRCode.generate(uri, { small: true });
|
|
412
|
+
logger.log('');
|
|
413
|
+
spinner = startSpinner('Waiting for wallet approval...');
|
|
414
|
+
},
|
|
415
|
+
}
|
|
416
|
+
);
|
|
417
|
+
|
|
418
|
+
if (!walletResult.success) {
|
|
419
|
+
failSpinner(spinner, 'Wallet connection failed');
|
|
420
|
+
logger.error(errorColor(walletResult.error.message));
|
|
421
|
+
exitCode = 1;
|
|
422
|
+
return;
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
succeedSpinner(spinner, `Wallet connected: ${walletResult.data.address}`);
|
|
426
|
+
} catch (err) {
|
|
427
|
+
failSpinner(spinner, 'Wallet connection failed');
|
|
428
|
+
logger.error(errorColor((err as Error).message));
|
|
429
|
+
exitCode = 1;
|
|
430
|
+
return;
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
// ----------------------------------------------------------------
|
|
434
|
+
// 2. Create Akash client
|
|
435
|
+
// ----------------------------------------------------------------
|
|
436
|
+
spinner = startSpinner('Creating Akash client...');
|
|
437
|
+
|
|
438
|
+
try {
|
|
439
|
+
akashClient = new AkashClient({
|
|
440
|
+
network,
|
|
441
|
+
signer: walletResult.data.signer,
|
|
442
|
+
});
|
|
443
|
+
|
|
444
|
+
succeedSpinner(spinner, 'Akash client ready');
|
|
445
|
+
} catch (err) {
|
|
446
|
+
failSpinner(spinner, 'Failed to create Akash client');
|
|
447
|
+
logger.error(errorColor((err as Error).message));
|
|
448
|
+
exitCode = 1;
|
|
449
|
+
return;
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
// ----------------------------------------------------------------
|
|
453
|
+
// 3. Close the deployment
|
|
454
|
+
// ----------------------------------------------------------------
|
|
455
|
+
spinner = startSpinner(
|
|
456
|
+
`Closing deployment DSEQ ${akash.dseq} on ${network}...`
|
|
457
|
+
);
|
|
458
|
+
|
|
459
|
+
const closeResult = await akashClient.closeDeployment(akash.dseq);
|
|
460
|
+
|
|
461
|
+
if (!closeResult.success) {
|
|
462
|
+
failSpinner(spinner, 'Failed to close deployment');
|
|
463
|
+
logger.error(errorColor(closeResult.error.message));
|
|
464
|
+
|
|
465
|
+
if (options.verbose && closeResult.error.cause) {
|
|
466
|
+
logger.error('\nCause:', closeResult.error.cause);
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
exitCode = 1;
|
|
470
|
+
return;
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
succeedSpinner(spinner, 'Deployment closed on Akash');
|
|
474
|
+
|
|
475
|
+
// ----------------------------------------------------------------
|
|
476
|
+
// 4. Display results and clean up
|
|
477
|
+
// ----------------------------------------------------------------
|
|
478
|
+
await removeDeployment(projectRoot, lock.profile, lock.instanceId);
|
|
479
|
+
|
|
480
|
+
logger.log('');
|
|
481
|
+
logger.log(successColor('✓ Akash deployment closed successfully'));
|
|
482
|
+
logger.log('');
|
|
483
|
+
logger.log(formatKeyValue('DSEQ', closeResult.data.dseq));
|
|
484
|
+
logger.log(formatKeyValue('Transaction', closeResult.data.transactionHash));
|
|
485
|
+
logger.log(formatKeyValue('Block Height', String(closeResult.data.height)));
|
|
486
|
+
logger.log(formatKeyValue('Closed At', closeResult.data.closedAt.toLocaleString()));
|
|
487
|
+
logger.log('');
|
|
488
|
+
logger.log(dim('Remaining escrow deposit has been refunded to your wallet.'));
|
|
489
|
+
logger.log('');
|
|
490
|
+
|
|
491
|
+
exitCode = 0;
|
|
492
|
+
} catch (err) {
|
|
493
|
+
logger.error(errorColor(`Akash teardown failed: ${(err as Error).message}`));
|
|
494
|
+
|
|
495
|
+
if (options.verbose) {
|
|
496
|
+
console.error(err);
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
exitCode = 1;
|
|
500
|
+
} finally {
|
|
501
|
+
// Cleanup: disconnect client and wallet
|
|
502
|
+
if (akashClient) {
|
|
503
|
+
try {
|
|
504
|
+
await akashClient.disconnect();
|
|
505
|
+
} catch {
|
|
506
|
+
// Ignore cleanup errors
|
|
507
|
+
}
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
if (walletResult?.success) {
|
|
511
|
+
try {
|
|
512
|
+
await disconnectWallet(walletResult.data);
|
|
513
|
+
} catch {
|
|
514
|
+
// Ignore cleanup errors
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
if (exitCode !== null) {
|
|
519
|
+
process.exit(exitCode);
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
// ─────────────────────────────────────────────────────────
|
|
525
|
+
// Akash autonomous teardown
|
|
526
|
+
// ─────────────────────────────────────────────────────────
|
|
527
|
+
|
|
528
|
+
/**
|
|
529
|
+
* Tear down an Akash deployment autonomously using the wallet mnemonic
|
|
530
|
+
* from the secrets vault. No human interaction required.
|
|
531
|
+
*/
|
|
532
|
+
async function teardownAkashAutonomous(
|
|
533
|
+
ctx: IKadiContext,
|
|
534
|
+
projectRoot: string,
|
|
535
|
+
lock: DeploymentLock,
|
|
536
|
+
options: DownOptions
|
|
537
|
+
): Promise<void> {
|
|
538
|
+
const { logger } = ctx;
|
|
539
|
+
const akash = lock.akash!;
|
|
540
|
+
const network = (options.network as 'mainnet' | 'testnet') || akash.network;
|
|
541
|
+
|
|
542
|
+
let akashClient: AkashClient | undefined;
|
|
543
|
+
let exitCode: number | null = null;
|
|
544
|
+
|
|
545
|
+
logger.log('');
|
|
546
|
+
logger.log(bold('🤖 AUTONOMOUS TEARDOWN MODE'));
|
|
547
|
+
logger.log(dim(' No human interaction required — using secrets vault'));
|
|
548
|
+
logger.log('');
|
|
549
|
+
|
|
550
|
+
try {
|
|
551
|
+
// ----------------------------------------------------------------
|
|
552
|
+
// 1. Get wallet mnemonic from secrets vault
|
|
553
|
+
// ----------------------------------------------------------------
|
|
554
|
+
let spinner = startSpinner('Reading wallet from secrets vault...');
|
|
555
|
+
|
|
556
|
+
const walletVault = options.secretsVault || 'global';
|
|
557
|
+
|
|
558
|
+
const mnemonic = readSecretFromCli({
|
|
559
|
+
key: 'AKASH_WALLET',
|
|
560
|
+
vault: walletVault,
|
|
561
|
+
cwd: projectRoot,
|
|
562
|
+
});
|
|
563
|
+
|
|
564
|
+
if (!mnemonic) {
|
|
565
|
+
failSpinner(spinner, 'Wallet mnemonic not found');
|
|
566
|
+
logger.error(
|
|
567
|
+
errorColor(
|
|
568
|
+
`Wallet mnemonic not found in vault '${walletVault}' (key: AKASH_WALLET).\n\n` +
|
|
569
|
+
`Store your mnemonic with:\n` +
|
|
570
|
+
` kadi secret set AKASH_WALLET "your 12 or 24 word mnemonic" -v ${walletVault}\n`
|
|
571
|
+
)
|
|
572
|
+
);
|
|
573
|
+
exitCode = 1;
|
|
574
|
+
return;
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
succeedSpinner(spinner, 'Wallet mnemonic loaded from vault');
|
|
578
|
+
|
|
579
|
+
// ----------------------------------------------------------------
|
|
580
|
+
// 2. Create wallet from mnemonic
|
|
581
|
+
// ----------------------------------------------------------------
|
|
582
|
+
spinner = startSpinner('Creating wallet from mnemonic...');
|
|
583
|
+
|
|
584
|
+
const walletResult = await createWalletFromMnemonic(mnemonic, network);
|
|
585
|
+
|
|
586
|
+
if (!walletResult.success) {
|
|
587
|
+
failSpinner(spinner, 'Failed to create wallet');
|
|
588
|
+
logger.error(errorColor(walletResult.error.message));
|
|
589
|
+
exitCode = 1;
|
|
590
|
+
return;
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
const wallet = walletResult.data;
|
|
594
|
+
succeedSpinner(spinner, `Wallet ready: ${wallet.address}`);
|
|
595
|
+
|
|
596
|
+
// ----------------------------------------------------------------
|
|
597
|
+
// 3. Create Akash client with signer
|
|
598
|
+
// ----------------------------------------------------------------
|
|
599
|
+
spinner = startSpinner('Connecting to Akash network...');
|
|
600
|
+
|
|
601
|
+
akashClient = new AkashClient({
|
|
602
|
+
network,
|
|
603
|
+
signer: wallet.signer,
|
|
604
|
+
});
|
|
605
|
+
|
|
606
|
+
succeedSpinner(spinner, `Connected to ${network}`);
|
|
607
|
+
|
|
608
|
+
// ----------------------------------------------------------------
|
|
609
|
+
// 4. Close the deployment
|
|
610
|
+
// ----------------------------------------------------------------
|
|
611
|
+
spinner = startSpinner(
|
|
612
|
+
`Closing deployment DSEQ ${akash.dseq} on ${network}...`
|
|
613
|
+
);
|
|
614
|
+
|
|
615
|
+
const closeResult = await akashClient.closeDeployment(akash.dseq);
|
|
616
|
+
|
|
617
|
+
if (!closeResult.success) {
|
|
618
|
+
failSpinner(spinner, 'Failed to close deployment');
|
|
619
|
+
logger.error(errorColor(closeResult.error.message));
|
|
620
|
+
|
|
621
|
+
if (options.verbose && closeResult.error.cause) {
|
|
622
|
+
logger.error('\nCause:', closeResult.error.cause);
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
// Provide hints for common errors
|
|
626
|
+
logger.log('');
|
|
627
|
+
logger.log(warning('Hint: Make sure:'));
|
|
628
|
+
logger.log(dim(` 1. The wallet in vault '${walletVault}' owns deployment DSEQ ${akash.dseq}`));
|
|
629
|
+
logger.log(dim(` 2. The deployment is still active (not already closed)`));
|
|
630
|
+
logger.log(dim(` 3. The network is correct (${network})`));
|
|
631
|
+
|
|
632
|
+
exitCode = 1;
|
|
633
|
+
return;
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
succeedSpinner(spinner, 'Deployment closed on Akash');
|
|
637
|
+
|
|
638
|
+
// ----------------------------------------------------------------
|
|
639
|
+
// 5. Display results and clean up
|
|
640
|
+
// ----------------------------------------------------------------
|
|
641
|
+
await removeDeployment(projectRoot, lock.profile, lock.instanceId);
|
|
642
|
+
|
|
643
|
+
logger.log('');
|
|
644
|
+
logger.log(successColor('✓ Akash deployment closed successfully'));
|
|
645
|
+
logger.log('');
|
|
646
|
+
logger.log(formatKeyValue('DSEQ', closeResult.data.dseq));
|
|
647
|
+
logger.log(formatKeyValue('Transaction', closeResult.data.transactionHash));
|
|
648
|
+
logger.log(formatKeyValue('Block Height', String(closeResult.data.height)));
|
|
649
|
+
logger.log(formatKeyValue('Closed At', closeResult.data.closedAt.toLocaleString()));
|
|
650
|
+
logger.log('');
|
|
651
|
+
logger.log(dim('Remaining escrow deposit has been refunded to your wallet.'));
|
|
652
|
+
logger.log('');
|
|
653
|
+
|
|
654
|
+
exitCode = 0;
|
|
655
|
+
} catch (err) {
|
|
656
|
+
logger.error(
|
|
657
|
+
errorColor(`Autonomous teardown failed: ${(err as Error).message}`)
|
|
658
|
+
);
|
|
659
|
+
|
|
660
|
+
if (options.verbose) {
|
|
661
|
+
console.error(err);
|
|
662
|
+
}
|
|
663
|
+
|
|
664
|
+
exitCode = 1;
|
|
665
|
+
} finally {
|
|
666
|
+
// Cleanup
|
|
667
|
+
if (akashClient) {
|
|
668
|
+
try {
|
|
669
|
+
await akashClient.disconnect();
|
|
670
|
+
} catch {
|
|
671
|
+
// Ignore cleanup errors
|
|
672
|
+
}
|
|
673
|
+
}
|
|
674
|
+
|
|
675
|
+
if (exitCode !== null) {
|
|
676
|
+
process.exit(exitCode);
|
|
677
|
+
}
|
|
678
|
+
}
|
|
679
|
+
}
|