@gns-foundation/hive-worker 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/cli.ts ADDED
@@ -0,0 +1,418 @@
1
+ #!/usr/bin/env node
2
+ // ============================================================
3
+ // HIVE WORKER CLI
4
+ // Usage:
5
+ // npx @gns-foundation/hive-worker join [--handle <@name>] [--rpc-port <port>]
6
+ // npx @gns-foundation/hive-worker status
7
+ // npx @gns-foundation/hive-worker leave
8
+ // npx @gns-foundation/hive-worker whoami
9
+ // npx @gns-foundation/hive-worker models list
10
+ // npx @gns-foundation/hive-worker models fetch <model-id>
11
+ // ============================================================
12
+
13
+ import { program } from 'commander';
14
+ import { loadOrCreateIdentity, shortPk, identityPath } from './identity.js';
15
+ import { detectHardware, detectGeo } from './hardware.js';
16
+ import { registerNode, heartbeat, deregisterNode, fetchSwarmStats, fetchTokenBalance } from './registry.js';
17
+ import { findRpcBinary, startRpcServer, stopRpcServer, DEFAULT_RPC_PORT, type LlamaRpcHandle } from './llama.js';
18
+ import {
19
+ printBanner, printSuccess, printInfo, printWarn, printError,
20
+ renderDashboard, clearDashboard, hideCursor,
21
+ type DashboardState,
22
+ } from './dashboard.js';
23
+ import { startJobPoller, type PollController, type HiveJob, type JobResult } from './jobs.js';
24
+ import { executeJob, downloadModel, listCachedModels, resolveModelUrl, findLlamaCli } from './executor.js';
25
+ import { creditWorker, SPLIT } from './settlement.js';
26
+
27
+ const HEARTBEAT_INTERVAL_MS = 30_000;
28
+ const STATS_REFRESH_INTERVAL_MS = 60_000;
29
+ const DASHBOARD_REFRESH_MS = 3_000;
30
+ const JOB_POLL_INTERVAL_MS = 5_000; // poll for new jobs every 5s
31
+
32
+ // ─── JOIN command ─────────────────────────────────────────────
33
+
34
+ async function cmdJoin(opts: {
35
+ handle?: string;
36
+ rpcPort?: string;
37
+ noRpc?: boolean;
38
+ noJobs?: boolean;
39
+ }): Promise<void> {
40
+ printBanner();
41
+
42
+ // 1. Identity
43
+ const { identity, isNew } = loadOrCreateIdentity();
44
+ if (isNew) {
45
+ printSuccess(`New identity generated: ${shortPk(identity.pk)}`);
46
+ printInfo(`Stored at ${identityPath()}`);
47
+ } else {
48
+ printSuccess(`Identity loaded: ${shortPk(identity.pk)}`);
49
+ }
50
+
51
+ // 2. Handle
52
+ const handle = opts.handle?.replace(/^@/, '') ?? null;
53
+ if (handle) printSuccess(`Handle: @${handle}`);
54
+ else printInfo('No handle — use --handle @yourname to claim one.');
55
+
56
+ // 3. Hardware
57
+ printInfo('Detecting hardware...');
58
+ const hw = detectHardware();
59
+ printSuccess(`${hw.cpuCores}-core ${hw.arch} · ${hw.ramGb} GB RAM · ~${hw.estimatedTflops} TFLOPS`);
60
+ if (hw.gpuModel) printSuccess(`GPU: ${hw.gpuModel}`);
61
+
62
+ // 4. Geo
63
+ printInfo('Detecting location...');
64
+ const geo = await detectGeo();
65
+ printSuccess(`Location: ${geo.city}, ${geo.country} · H3 cell ${geo.h3Cell.slice(0, 10)}…`);
66
+
67
+ // 5. llama-cli check (for job execution)
68
+ const llamaCliAvailable = !!findLlamaCli();
69
+ if (!opts.noJobs) {
70
+ if (llamaCliAvailable) {
71
+ printSuccess('llama-cli found — job execution enabled');
72
+ } else {
73
+ printWarn('llama-cli not found — joining as observer (no inference jobs).');
74
+ printWarn('Install llama.cpp to earn GNS: https://github.com/ggerganov/llama.cpp');
75
+ }
76
+ }
77
+
78
+ // logs declared early so the rpc-server callback can reference it immediately
79
+ const logs: string[] = [`Joined at ${new Date().toISOString()}`];
80
+
81
+ // 6. RPC server (for pipeline/shard mode)
82
+ let rpcHandle: LlamaRpcHandle | null = null;
83
+ const rpcBinary = findRpcBinary();
84
+ const rpcPort = opts.noRpc ? null : parseInt(opts.rpcPort ?? String(DEFAULT_RPC_PORT), 10);
85
+
86
+ if (opts.noRpc !== true && rpcBinary) {
87
+ printInfo(`Starting llama.cpp rpc-server on :${rpcPort}...`);
88
+ rpcHandle = startRpcServer(rpcPort!, (line) => logs.push(line));
89
+ if (rpcHandle) printSuccess(`RPC server PID ${rpcHandle.pid} on :${rpcPort}`);
90
+ } else if (opts.noRpc !== true) {
91
+ printWarn('rpc-server not found — pipeline mode disabled.');
92
+ }
93
+
94
+ // 7. Register
95
+ printInfo('Registering with GEIANT Hive swarm...');
96
+ try {
97
+ await registerNode(identity, hw, geo, handle, rpcHandle?.port ?? null);
98
+ printSuccess('Registered in swarm registry');
99
+ } catch (err) {
100
+ printError(`Registration failed: ${err instanceof Error ? err.message : String(err)}`);
101
+ printWarn('Continuing in offline mode — will retry on heartbeat');
102
+ }
103
+
104
+ // 8. Initial stats
105
+ let swarmStats = await fetchSwarmStats();
106
+ let tokensEarned = await fetchTokenBalance(identity.pk);
107
+ printSuccess(`Swarm: ${swarmStats.activeNodes} active nodes · ${swarmStats.totalTflops} TFLOPS`);
108
+
109
+ // Cached models
110
+ const cachedModels = listCachedModels();
111
+ if (cachedModels.length > 0) {
112
+ printSuccess(`Models cached: ${cachedModels.map(m => m.modelId).join(', ')}`);
113
+ } else if (!opts.noJobs && llamaCliAvailable) {
114
+ printWarn('No models cached. Fetch one: hive-worker models fetch phi-3-mini');
115
+ }
116
+
117
+ console.log('');
118
+ printInfo('Entering live mode. Press Ctrl+C to disconnect.\n');
119
+
120
+ // ─── Live state ───────────────────────────────────────────────
121
+
122
+ hideCursor();
123
+
124
+ const startTime = Date.now();
125
+ let heartbeatCount = 0;
126
+ let lastHeartbeat: Date | null = null;
127
+ let jobsCompleted = 0;
128
+
129
+ const addLog = (msg: string) => {
130
+ logs.push(`[${new Date().toLocaleTimeString()}] ${msg}`);
131
+ if (logs.length > 50) logs.splice(0, logs.length - 50);
132
+ };
133
+
134
+ const state: DashboardState = {
135
+ pk: identity.pk,
136
+ handle,
137
+ status: 'idle',
138
+ hw,
139
+ geo,
140
+ rpcPort: rpcHandle?.port ?? null,
141
+ rpcAvailable: !!rpcBinary,
142
+ tokensEarned,
143
+ swarmStats,
144
+ uptimeSeconds: 0,
145
+ heartbeatCount: 0,
146
+ lastHeartbeat: null,
147
+ logs,
148
+ };
149
+
150
+ // ─── Heartbeat timer ─────────────────────────────────────────
151
+
152
+ const heartbeatTimer = setInterval(async () => {
153
+ try {
154
+ await heartbeat(identity.pk, state.status);
155
+ heartbeatCount++;
156
+ lastHeartbeat = new Date();
157
+ state.heartbeatCount = heartbeatCount;
158
+ state.lastHeartbeat = lastHeartbeat;
159
+ addLog(`Heartbeat #${heartbeatCount}`);
160
+ } catch (err) {
161
+ addLog(`Heartbeat error: ${err instanceof Error ? err.message : String(err)}`);
162
+ }
163
+ }, HEARTBEAT_INTERVAL_MS);
164
+
165
+ // ─── Stats refresh timer ─────────────────────────────────────
166
+
167
+ const statsTimer = setInterval(async () => {
168
+ try {
169
+ swarmStats = await fetchSwarmStats();
170
+ tokensEarned = await fetchTokenBalance(identity.pk);
171
+ state.swarmStats = swarmStats;
172
+ state.tokensEarned = tokensEarned;
173
+ } catch { /* non-fatal */ }
174
+ }, STATS_REFRESH_INTERVAL_MS);
175
+
176
+ // ─── Job polling loop ─────────────────────────────────────────
177
+
178
+ let jobPoller: PollController | null = null;
179
+
180
+ if (!opts.noJobs && (llamaCliAvailable || cachedModels.length > 0)) {
181
+ jobPoller = startJobPoller({
182
+ workerPk: identity.pk,
183
+ h3Cell: geo.h3Cell,
184
+ intervalMs: JOB_POLL_INTERVAL_MS,
185
+
186
+ onJobClaimed: (job: HiveJob) => {
187
+ state.status = 'computing';
188
+ addLog(`Job claimed: ${job.id.slice(0, 8)} · model=${job.model_id} · ${job.max_tokens} tokens`);
189
+ },
190
+
191
+ onJobCompleted: (job: HiveJob, result: JobResult) => {
192
+ state.status = 'idle';
193
+ jobsCompleted++;
194
+ addLog(
195
+ `Job ${job.id.slice(0, 8)} done · ${result.tokensPerSecond} tok/s · ` +
196
+ `earned ${(job.gns_reward * SPLIT.WORKER).toFixed(4)} GNS`,
197
+ );
198
+ // Optimistic local credit (server-side Stellar TX is async)
199
+ state.tokensEarned += job.gns_reward * SPLIT.WORKER;
200
+ // Kick off background settlement
201
+ creditWorker(identity.pk, job.id, job.gns_reward)
202
+ .then(rec => {
203
+ if (rec.stellarTxHash) {
204
+ addLog(`Settled · TX ${rec.stellarTxHash.slice(0, 12)}…`);
205
+ }
206
+ })
207
+ .catch(err => addLog(`Settlement error: ${err instanceof Error ? err.message : String(err)}`));
208
+ },
209
+
210
+ onJobFailed: (job: HiveJob, error: string) => {
211
+ state.status = 'idle';
212
+ addLog(`Job ${job.id.slice(0, 8)} failed: ${error.slice(0, 60)}`);
213
+ },
214
+
215
+ onLog: addLog,
216
+
217
+ executor: async (job: HiveJob) => {
218
+ return executeJob(job, {
219
+ onToken: (_tok) => { /* streaming tokens — could update dashboard */ },
220
+ onLog: addLog,
221
+ });
222
+ },
223
+ });
224
+
225
+ addLog('Job poller started — polling every 5s');
226
+ } else if (opts.noJobs) {
227
+ addLog('Job execution disabled (--no-jobs flag)');
228
+ } else {
229
+ addLog('Observer mode — install llama.cpp + fetch a model to execute jobs');
230
+ }
231
+
232
+ // ─── Dashboard refresh ────────────────────────────────────────
233
+
234
+ const dashTimer = setInterval(() => {
235
+ state.uptimeSeconds = Math.floor((Date.now() - startTime) / 1000);
236
+ renderDashboard(state);
237
+ }, DASHBOARD_REFRESH_MS);
238
+
239
+ renderDashboard(state);
240
+
241
+ // ─── Graceful shutdown ────────────────────────────────────────
242
+
243
+ const shutdown = async (signal: string) => {
244
+ clearInterval(heartbeatTimer);
245
+ clearInterval(statsTimer);
246
+ clearInterval(dashTimer);
247
+ jobPoller?.stop();
248
+ clearDashboard();
249
+
250
+ console.log('\n');
251
+ printInfo(`Received ${signal}. Disconnecting gracefully...`);
252
+
253
+ try {
254
+ await deregisterNode(identity.pk);
255
+ printSuccess('Marked offline in swarm registry');
256
+ } catch { /* best-effort */ }
257
+
258
+ if (rpcHandle) {
259
+ stopRpcServer(rpcHandle);
260
+ printSuccess('RPC server stopped');
261
+ }
262
+
263
+ const earned = await fetchTokenBalance(identity.pk).catch(() => state.tokensEarned);
264
+ printSuccess(`Session complete · ${jobsCompleted} jobs · ${earned.toFixed(4)} GNS earned`);
265
+ console.log('');
266
+ process.exit(0);
267
+ };
268
+
269
+ process.on('SIGINT', () => shutdown('SIGINT'));
270
+ process.on('SIGTERM', () => shutdown('SIGTERM'));
271
+ }
272
+
273
+ // ─── STATUS command ───────────────────────────────────────────
274
+
275
+ async function cmdStatus(): Promise<void> {
276
+ printBanner();
277
+ const { identity } = loadOrCreateIdentity();
278
+ printInfo(`Identity: ${shortPk(identity.pk)}`);
279
+
280
+ const stats = await fetchSwarmStats();
281
+ const tokens = await fetchTokenBalance(identity.pk);
282
+ printInfo(`Swarm: ${stats.activeNodes} active nodes · ${stats.totalTflops} TFLOPS`);
283
+ printInfo(`GNS earned: ${tokens.toFixed(4)}`);
284
+
285
+ const models = listCachedModels();
286
+ if (models.length > 0) {
287
+ printInfo(`Cached models: ${models.map(m => `${m.modelId} (${m.sizeMb} MB)`).join(', ')}`);
288
+ } else {
289
+ printWarn('No models cached. Run: hive-worker models fetch phi-3-mini');
290
+ }
291
+
292
+ const llamaCli = findLlamaCli();
293
+ const rpcBin = findRpcBinary();
294
+ printInfo(`llama-cli: ${llamaCli ?? 'not found'}`);
295
+ printInfo(`rpc-server: ${rpcBin ?? 'not found'}`);
296
+ }
297
+
298
+ // ─── LEAVE command ────────────────────────────────────────────
299
+
300
+ async function cmdLeave(): Promise<void> {
301
+ printBanner();
302
+ const { identity } = loadOrCreateIdentity();
303
+ printInfo(`Deregistering ${shortPk(identity.pk)} from swarm...`);
304
+ await deregisterNode(identity.pk);
305
+ printSuccess('Marked offline. Identity preserved at ' + identityPath());
306
+ printInfo('Run `hive-worker join` to rejoin.');
307
+ }
308
+
309
+ // ─── WHOAMI command ───────────────────────────────────────────
310
+
311
+ async function cmdWhoami(): Promise<void> {
312
+ const { identity, isNew } = loadOrCreateIdentity();
313
+ if (isNew) printWarn('No identity found — a new one was just generated.');
314
+ console.log('');
315
+ console.log(` pk: ${identity.pk}`);
316
+ console.log(` short: ${shortPk(identity.pk)}`);
317
+ console.log(` created: ${identity.createdAt}`);
318
+ console.log(` file: ${identityPath()}`);
319
+ console.log('');
320
+ }
321
+
322
+ // ─── MODELS LIST command ──────────────────────────────────────
323
+
324
+ async function cmdModelsList(): Promise<void> {
325
+ const cached = listCachedModels();
326
+ console.log('');
327
+ if (cached.length === 0) {
328
+ printWarn('No models cached yet.');
329
+ printInfo('Fetch one: hive-worker models fetch phi-3-mini');
330
+ } else {
331
+ printSuccess(`${cached.length} model(s) cached:`);
332
+ for (const m of cached) {
333
+ console.log(` ${m.modelId.padEnd(20)} ${m.sizeMb} MB`);
334
+ }
335
+ }
336
+ console.log('');
337
+ printInfo('Available models: phi-3-mini, gemma-2-2b, tinyllama');
338
+ console.log('');
339
+ }
340
+
341
+ // ─── MODELS FETCH command ─────────────────────────────────────
342
+
343
+ async function cmdModelsFetch(modelId: string): Promise<void> {
344
+ printBanner();
345
+ const job = {
346
+ model_id: modelId,
347
+ model_url: null,
348
+ } as Pick<HiveJob, 'model_id' | 'model_url'>;
349
+
350
+ const url = resolveModelUrl(job as HiveJob);
351
+ if (!url) {
352
+ printError(`Unknown model "${modelId}". Available: phi-3-mini, gemma-2-2b, tinyllama`);
353
+ process.exit(1);
354
+ }
355
+
356
+ printInfo(`Downloading ${modelId}...`);
357
+ printInfo(`Source: ${url}`);
358
+ console.log('');
359
+
360
+ let lastPct = -1;
361
+ const dest = await downloadModel(modelId, url, (pct, mbDone, mbTotal) => {
362
+ if (pct !== lastPct) {
363
+ process.stdout.write(`\r Downloading... ${pct}% (${mbDone} / ${mbTotal} MB) `);
364
+ lastPct = pct;
365
+ }
366
+ });
367
+
368
+ console.log('');
369
+ printSuccess(`Saved to ${dest}`);
370
+ printInfo('You can now run: hive-worker join');
371
+ }
372
+
373
+ // ─── CLI definition ───────────────────────────────────────────
374
+
375
+ program
376
+ .name('hive-worker')
377
+ .description('GEIANT Hive compute node — earn GNS tokens for idle compute')
378
+ .version('0.1.0');
379
+
380
+ program
381
+ .command('join')
382
+ .description('Join the GEIANT Hive swarm and start earning GNS')
383
+ .option('--handle <n>', 'Your GNS handle (e.g. @alice)')
384
+ .option('--rpc-port <port>', 'Port for llama.cpp rpc-server', String(DEFAULT_RPC_PORT))
385
+ .option('--no-rpc', 'Skip starting rpc-server')
386
+ .option('--no-jobs', 'Observer only — do not execute inference jobs')
387
+ .action(async (opts) => { await cmdJoin(opts); });
388
+
389
+ program
390
+ .command('status')
391
+ .description('Show swarm status and your token balance')
392
+ .action(async () => { await cmdStatus(); });
393
+
394
+ program
395
+ .command('leave')
396
+ .description('Gracefully disconnect from the swarm')
397
+ .action(async () => { await cmdLeave(); });
398
+
399
+ program
400
+ .command('whoami')
401
+ .description('Show your Hive identity')
402
+ .action(async () => { await cmdWhoami(); });
403
+
404
+ const models = program
405
+ .command('models')
406
+ .description('Manage locally cached inference models');
407
+
408
+ models
409
+ .command('list')
410
+ .description('List cached models')
411
+ .action(async () => { await cmdModelsList(); });
412
+
413
+ models
414
+ .command('fetch <model-id>')
415
+ .description('Download a model (phi-3-mini, gemma-2-2b, tinyllama)')
416
+ .action(async (modelId: string) => { await cmdModelsFetch(modelId); });
417
+
418
+ program.parse();
@@ -0,0 +1,129 @@
1
+ // ============================================================
2
+ // HIVE WORKER — TERMINAL DASHBOARD (simple logger mode)
3
+ // No ANSI cursor movement — just clean line-by-line output.
4
+ // Eliminates the flickering caused by cursor-up + clear redraws.
5
+ // ============================================================
6
+
7
+ import type { HardwareProfile, GeoProfile } from './hardware.js';
8
+ import type { RegistryStats, WorkerStatus } from './registry.js';
9
+
10
+ const A = {
11
+ reset: '\x1b[0m',
12
+ bold: '\x1b[1m',
13
+ green: '\x1b[32m',
14
+ cyan: '\x1b[36m',
15
+ yellow: '\x1b[33m',
16
+ magenta: '\x1b[35m',
17
+ gray: '\x1b[90m',
18
+ white: '\x1b[37m',
19
+ };
20
+ const col = (c: string, s: string) => `${c}${s}${A.reset}`;
21
+
22
+ export interface DashboardState {
23
+ pk: string;
24
+ handle: string | null;
25
+ status: WorkerStatus;
26
+ hw: HardwareProfile;
27
+ geo: GeoProfile;
28
+ rpcPort: number | null;
29
+ rpcAvailable: boolean;
30
+ tokensEarned: number;
31
+ swarmStats: RegistryStats;
32
+ uptimeSeconds: number;
33
+ heartbeatCount: number;
34
+ lastHeartbeat: Date | null;
35
+ logs: string[];
36
+ }
37
+
38
+ // Track what we last printed so we only reprint on change
39
+ let lastStatus: WorkerStatus | null = null;
40
+ let lastHeartbeat = 0;
41
+ let lastTokens = -1;
42
+ let lastSwarmNodes = -1;
43
+ let lastLogCount = 0;
44
+ let headerPrinted = false;
45
+
46
+ export function renderDashboard(state: DashboardState): void {
47
+ // Print header once
48
+ if (!headerPrinted) {
49
+ console.log('');
50
+ console.log(col(A.cyan + A.bold, ' ◆ GEIANT HIVE WORKER v0.1.0'));
51
+ console.log(col(A.gray, ` ${state.hw.cpuCores}c ${state.hw.arch} · ${state.hw.ramGb} GB · ~${state.hw.estimatedTflops} TFLOPS · ${state.geo.city}`));
52
+ console.log(col(A.gray, ` Identity: ${col(A.cyan, state.pk.slice(0, 8))}${state.handle ? ' · @' + state.handle : ''}`));
53
+ console.log(col(A.gray, ' ─────────────────────────────────────'));
54
+ headerPrinted = true;
55
+ }
56
+
57
+ // Print status line only when status changes
58
+ if (state.status !== lastStatus) {
59
+ const icon = state.status === 'idle' ? col(A.green, '●')
60
+ : state.status === 'computing' ? col(A.yellow, '◉')
61
+ : col(A.gray, '○');
62
+ console.log(` ${icon} ${col(A.white, state.status.toUpperCase())} ${col(A.gray, new Date().toLocaleTimeString())}`);
63
+ lastStatus = state.status;
64
+ }
65
+
66
+ // Print heartbeat line only on new heartbeat
67
+ if (state.heartbeatCount > lastHeartbeat) {
68
+ console.log(col(A.gray, ` ♥ Heartbeat #${state.heartbeatCount} · uptime ${humanUptime(state.uptimeSeconds)}`));
69
+ lastHeartbeat = state.heartbeatCount;
70
+ }
71
+
72
+ // Print swarm update when node count changes
73
+ if (state.swarmStats.activeNodes !== lastSwarmNodes) {
74
+ console.log(col(A.gray, ` ⬡ Swarm: ${col(A.white, String(state.swarmStats.activeNodes))} nodes · ${col(A.yellow, state.swarmStats.totalTflops + ' TFLOPS')}`));
75
+ lastSwarmNodes = state.swarmStats.activeNodes;
76
+ }
77
+
78
+ // Print earnings update when tokens change
79
+ if (state.tokensEarned !== lastTokens) {
80
+ console.log(col(A.green, ` ✦ Earned: ${state.tokensEarned.toFixed(4)} GNS`));
81
+ lastTokens = state.tokensEarned;
82
+ }
83
+
84
+ // Print only new log lines
85
+ if (state.logs.length > lastLogCount) {
86
+ const newLines = state.logs.slice(lastLogCount);
87
+ for (const line of newLines) {
88
+ console.log(col(A.gray, ' › ') + col(A.white, line.slice(0, 72)));
89
+ }
90
+ lastLogCount = state.logs.length;
91
+ }
92
+ }
93
+
94
+ export function clearDashboard(): void {
95
+ // Nothing to clear in logger mode
96
+ }
97
+
98
+ export function hideCursor(): void {
99
+ // No cursor hiding in logger mode — not needed
100
+ }
101
+
102
+ function humanUptime(s: number): string {
103
+ if (s < 60) return `${s}s`;
104
+ if (s < 3600) return `${Math.floor(s / 60)}m ${s % 60}s`;
105
+ return `${Math.floor(s / 3600)}h ${Math.floor((s % 3600) / 60)}m`;
106
+ }
107
+
108
+ export function printBanner(): void {
109
+ console.log('');
110
+ console.log(col(A.cyan + A.bold, ' ◆ GEIANT HIVE WORKER'));
111
+ console.log(col(A.gray, ' geiant.com/hive · Powered by GNS Protocol'));
112
+ console.log('');
113
+ }
114
+
115
+ export function printSuccess(msg: string): void {
116
+ console.log(` ${col(A.green, '✓')} ${msg}`);
117
+ }
118
+
119
+ export function printInfo(msg: string): void {
120
+ console.log(` ${col(A.cyan, '›')} ${msg}`);
121
+ }
122
+
123
+ export function printWarn(msg: string): void {
124
+ console.log(` ${col(A.yellow, '!')} ${msg}`);
125
+ }
126
+
127
+ export function printError(msg: string): void {
128
+ console.log(` ${col(A.magenta, '✗')} ${msg}`);
129
+ }