ai-exodus 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +239 -0
- package/bin/cli.js +655 -0
- package/bin/regenerate.js +95 -0
- package/package.json +43 -0
- package/portal/exodus_mcp.py +300 -0
- package/portal/schema.sql +158 -0
- package/portal/worker.js +2410 -0
- package/prompts/index.js +317 -0
- package/src/analyzer.js +676 -0
- package/src/checkpoint.js +109 -0
- package/src/claude.js +147 -0
- package/src/config.js +40 -0
- package/src/deploy.js +193 -0
- package/src/generator.js +822 -0
- package/src/import.js +185 -0
- package/src/parser.js +445 -0
- package/src/spinner.js +55 -0
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Checkpoint system — saves progress after each chunk/pass
|
|
3
|
+
* Resumes from where it left off if the process crashes
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { readFile, writeFile, unlink } from 'node:fs/promises';
|
|
7
|
+
import { existsSync } from 'node:fs';
|
|
8
|
+
import { join } from 'node:path';
|
|
9
|
+
|
|
10
|
+
const CHECKPOINT_FILE = '.exodus-checkpoint.json';
|
|
11
|
+
|
|
12
|
+
export class Checkpoint {
|
|
13
|
+
constructor(outputDir) {
|
|
14
|
+
this.path = join(outputDir, CHECKPOINT_FILE);
|
|
15
|
+
this.data = null;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Load existing checkpoint, returns null if none exists
|
|
20
|
+
*/
|
|
21
|
+
async load() {
|
|
22
|
+
if (!existsSync(this.path)) return null;
|
|
23
|
+
try {
|
|
24
|
+
const raw = await readFile(this.path, 'utf-8');
|
|
25
|
+
this.data = JSON.parse(raw);
|
|
26
|
+
return this.data;
|
|
27
|
+
} catch {
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Save current state
|
|
34
|
+
*/
|
|
35
|
+
async save(state) {
|
|
36
|
+
this.data = { ...state, savedAt: new Date().toISOString() };
|
|
37
|
+
await writeFile(this.path, JSON.stringify(this.data, null, 2), 'utf-8');
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Remove checkpoint file (migration complete)
|
|
42
|
+
*/
|
|
43
|
+
async clear() {
|
|
44
|
+
if (existsSync(this.path)) {
|
|
45
|
+
await unlink(this.path);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Get completed chunk indices for a specific pass
|
|
51
|
+
*/
|
|
52
|
+
getCompletedChunks(passNum) {
|
|
53
|
+
if (!this.data?.passes?.[passNum]) return [];
|
|
54
|
+
return this.data.passes[passNum].completedChunks || [];
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Get the results array for a specific pass
|
|
59
|
+
*/
|
|
60
|
+
getPassResults(passNum) {
|
|
61
|
+
if (!this.data?.passes?.[passNum]) return null;
|
|
62
|
+
return this.data.passes[passNum].results || null;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Check if a pass is fully complete
|
|
67
|
+
*/
|
|
68
|
+
isPassComplete(passNum) {
|
|
69
|
+
return this.data?.passes?.[passNum]?.complete === true;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Get merged/synthesized data for a pass
|
|
74
|
+
*/
|
|
75
|
+
getMergedData(key) {
|
|
76
|
+
return this.data?.merged?.[key] || null;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Save chunk result for a pass
|
|
81
|
+
*/
|
|
82
|
+
async saveChunkResult(passNum, chunkIndex, result, totalChunks) {
|
|
83
|
+
if (!this.data) this.data = { passes: {}, merged: {} };
|
|
84
|
+
if (!this.data.passes) this.data.passes = {};
|
|
85
|
+
if (!this.data.passes[passNum]) {
|
|
86
|
+
this.data.passes[passNum] = { completedChunks: [], results: [], complete: false };
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
const pass = this.data.passes[passNum];
|
|
90
|
+
pass.completedChunks.push(chunkIndex);
|
|
91
|
+
pass.results[chunkIndex] = result;
|
|
92
|
+
|
|
93
|
+
if (pass.completedChunks.length >= totalChunks) {
|
|
94
|
+
pass.complete = true;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
await this.save(this.data);
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Save merged/synthesized data
|
|
102
|
+
*/
|
|
103
|
+
async saveMerged(key, data) {
|
|
104
|
+
if (!this.data) this.data = { passes: {}, merged: {} };
|
|
105
|
+
if (!this.data.merged) this.data.merged = {};
|
|
106
|
+
this.data.merged[key] = data;
|
|
107
|
+
await this.save(this.data);
|
|
108
|
+
}
|
|
109
|
+
}
|
package/src/claude.js
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Claude CLI wrapper
|
|
3
|
+
* Uses `claude --print` — runs on Max subscription, zero extra cost
|
|
4
|
+
* System prompts via temp files (avoids Windows arg length limits)
|
|
5
|
+
* Conversation chunks via stdin
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { spawn } from 'node:child_process';
|
|
9
|
+
import { existsSync } from 'node:fs';
|
|
10
|
+
import { writeFile, unlink } from 'node:fs/promises';
|
|
11
|
+
import { join } from 'node:path';
|
|
12
|
+
import { tmpdir } from 'node:os';
|
|
13
|
+
import { randomBytes } from 'node:crypto';
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Find the claude CLI path
|
|
17
|
+
*/
|
|
18
|
+
async function findClaude() {
|
|
19
|
+
const home = process.env.USERPROFILE || process.env.HOME || '';
|
|
20
|
+
const knownPaths = [
|
|
21
|
+
join(home, 'AppData', 'Local', 'Microsoft', 'WinGet', 'Links', 'claude.exe'),
|
|
22
|
+
join(home, '.npm-global', 'bin', 'claude'),
|
|
23
|
+
join(home, '.npm-global', 'bin', 'claude.cmd'),
|
|
24
|
+
'/usr/local/bin/claude',
|
|
25
|
+
];
|
|
26
|
+
|
|
27
|
+
for (const p of knownPaths) {
|
|
28
|
+
if (existsSync(p)) return p;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// Fallback
|
|
32
|
+
const { exec } = await import('node:child_process');
|
|
33
|
+
return new Promise((resolve) => {
|
|
34
|
+
const cmd = process.platform === 'win32' ? 'where claude.exe' : 'which claude';
|
|
35
|
+
exec(cmd, (err, stdout) => {
|
|
36
|
+
resolve(!err && stdout.trim() ? stdout.trim().split('\n')[0].trim() : 'claude');
|
|
37
|
+
});
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
let claudePath = null;
|
|
42
|
+
async function getClaude() {
|
|
43
|
+
if (!claudePath) claudePath = await findClaude();
|
|
44
|
+
return claudePath;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Write string to a temp file, return path
|
|
49
|
+
*/
|
|
50
|
+
async function writeTempFile(content) {
|
|
51
|
+
const name = `exodus-${randomBytes(6).toString('hex')}.txt`;
|
|
52
|
+
const path = join(tmpdir(), name);
|
|
53
|
+
await writeFile(path, content, 'utf-8');
|
|
54
|
+
return path;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Call Claude via CLI
|
|
59
|
+
* System prompt → temp file (--system-prompt-file)
|
|
60
|
+
* Prompt → stdin pipe
|
|
61
|
+
*/
|
|
62
|
+
export async function callClaude({ system, prompt, model }) {
|
|
63
|
+
const claude = await getClaude();
|
|
64
|
+
const args = ['--print'];
|
|
65
|
+
|
|
66
|
+
let sysFile = null;
|
|
67
|
+
|
|
68
|
+
if (system) {
|
|
69
|
+
sysFile = await writeTempFile(system);
|
|
70
|
+
args.push('--system-prompt-file', sysFile);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
if (model) {
|
|
74
|
+
args.push('--model', model);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
try {
|
|
78
|
+
const result = await new Promise((resolve, reject) => {
|
|
79
|
+
const proc = spawn(claude, args, {
|
|
80
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
81
|
+
cwd: tmpdir(), // avoid picking up CLAUDE.md from home/project dirs
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
let stdout = '';
|
|
85
|
+
let stderr = '';
|
|
86
|
+
|
|
87
|
+
proc.stdout.on('data', (data) => { stdout += data.toString(); });
|
|
88
|
+
proc.stderr.on('data', (data) => { stderr += data.toString(); });
|
|
89
|
+
|
|
90
|
+
proc.on('error', (err) => {
|
|
91
|
+
if (err.code === 'ENOENT') {
|
|
92
|
+
reject(new Error(
|
|
93
|
+
'Claude Code CLI not found. Install it first:\n' +
|
|
94
|
+
' npm install -g @anthropic-ai/claude-code\n' +
|
|
95
|
+
'Then log in: claude login'
|
|
96
|
+
));
|
|
97
|
+
} else {
|
|
98
|
+
reject(err);
|
|
99
|
+
}
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
proc.on('close', (code) => {
|
|
103
|
+
if (code !== 0) {
|
|
104
|
+
const errMsg = stderr.trim() || stdout.trim() || '(no output)';
|
|
105
|
+
reject(new Error(`Claude CLI exited with code ${code}: ${errMsg}`));
|
|
106
|
+
} else {
|
|
107
|
+
resolve(stdout.trim());
|
|
108
|
+
}
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
// Pipe prompt via stdin
|
|
112
|
+
proc.stdin.write(prompt);
|
|
113
|
+
proc.stdin.end();
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
return result;
|
|
117
|
+
|
|
118
|
+
} finally {
|
|
119
|
+
// Clean up temp file
|
|
120
|
+
if (sysFile) {
|
|
121
|
+
await unlink(sysFile).catch(() => {});
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/**
|
|
127
|
+
* Check that Claude CLI is available and logged in
|
|
128
|
+
*/
|
|
129
|
+
export async function checkCLI() {
|
|
130
|
+
try {
|
|
131
|
+
const claude = await getClaude();
|
|
132
|
+
return await new Promise((resolve) => {
|
|
133
|
+
const proc = spawn(claude, ['--version'], { stdio: ['pipe', 'pipe', 'pipe'] });
|
|
134
|
+
let stdout = '';
|
|
135
|
+
proc.stdout.on('data', (d) => { stdout += d.toString(); });
|
|
136
|
+
proc.on('error', () => resolve({ ok: false, error: 'Claude Code CLI not found' }));
|
|
137
|
+
proc.on('close', (code) => {
|
|
138
|
+
resolve(code === 0
|
|
139
|
+
? { ok: true, version: stdout.trim() }
|
|
140
|
+
: { ok: false, error: 'Claude Code CLI not responding' }
|
|
141
|
+
);
|
|
142
|
+
});
|
|
143
|
+
});
|
|
144
|
+
} catch (err) {
|
|
145
|
+
return { ok: false, error: err.message };
|
|
146
|
+
}
|
|
147
|
+
}
|
package/src/config.js
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Config management for AI Exodus
|
|
3
|
+
* Stores portal URL, credentials, and settings in ~/.exodus/config.json
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { readFile, writeFile, mkdir } from 'node:fs/promises';
|
|
7
|
+
import { existsSync } from 'node:fs';
|
|
8
|
+
import { join } from 'node:path';
|
|
9
|
+
|
|
10
|
+
const HOME = process.env.USERPROFILE || process.env.HOME || '';
|
|
11
|
+
const CONFIG_DIR = join(HOME, '.exodus');
|
|
12
|
+
const CONFIG_FILE = join(CONFIG_DIR, 'config.json');
|
|
13
|
+
|
|
14
|
+
export async function loadConfig() {
|
|
15
|
+
try {
|
|
16
|
+
if (!existsSync(CONFIG_FILE)) return {};
|
|
17
|
+
const raw = await readFile(CONFIG_FILE, 'utf-8');
|
|
18
|
+
return JSON.parse(raw);
|
|
19
|
+
} catch {
|
|
20
|
+
return {};
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export async function saveConfig(config) {
|
|
25
|
+
await mkdir(CONFIG_DIR, { recursive: true });
|
|
26
|
+
await writeFile(CONFIG_FILE, JSON.stringify(config, null, 2), 'utf-8');
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export async function getConfig(key) {
|
|
30
|
+
const config = await loadConfig();
|
|
31
|
+
return config[key];
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export async function setConfig(key, value) {
|
|
35
|
+
const config = await loadConfig();
|
|
36
|
+
config[key] = value;
|
|
37
|
+
await saveConfig(config);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export { CONFIG_DIR, CONFIG_FILE };
|
package/src/deploy.js
ADDED
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Deploy command — creates Cloudflare Worker + D1 for the user's portal
|
|
3
|
+
* Same pattern as Hearthline CLI deploy
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { spawn } from 'node:child_process';
|
|
7
|
+
import { existsSync } from 'node:fs';
|
|
8
|
+
import { readFile, writeFile, mkdir, copyFile } from 'node:fs/promises';
|
|
9
|
+
import { join, resolve, dirname } from 'node:path';
|
|
10
|
+
import { fileURLToPath } from 'node:url';
|
|
11
|
+
import { randomBytes } from 'node:crypto';
|
|
12
|
+
import { loadConfig, saveConfig } from './config.js';
|
|
13
|
+
|
|
14
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
15
|
+
const PORTAL_DIR = resolve(__dirname, '..', 'portal');
|
|
16
|
+
|
|
17
|
+
export async function deploy(options) {
|
|
18
|
+
const { verbose } = options;
|
|
19
|
+
|
|
20
|
+
console.log('');
|
|
21
|
+
console.log(' ╔══════════════════════════════════════╗');
|
|
22
|
+
console.log(' ║ AI EXODUS — Portal Deploy ║');
|
|
23
|
+
console.log(' ╚══════════════════════════════════════╝');
|
|
24
|
+
console.log('');
|
|
25
|
+
|
|
26
|
+
// Check wrangler is available
|
|
27
|
+
const wranglerOk = await checkCommand('npx', ['wrangler', '--version']);
|
|
28
|
+
if (!wranglerOk) {
|
|
29
|
+
console.error(' Error: Wrangler not found. Install it: npm install -g wrangler');
|
|
30
|
+
console.error(' Then log in: npx wrangler login');
|
|
31
|
+
process.exit(1);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Check if portal source exists
|
|
35
|
+
if (!existsSync(join(PORTAL_DIR, 'worker.js'))) {
|
|
36
|
+
console.error(' Error: Portal source not found at ' + PORTAL_DIR);
|
|
37
|
+
console.error(' Make sure ai-exodus-portal/ is alongside ai-exodus/');
|
|
38
|
+
process.exit(1);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const config = await loadConfig();
|
|
42
|
+
const isRedeployMode = !!config.portalUrl;
|
|
43
|
+
|
|
44
|
+
if (isRedeployMode) {
|
|
45
|
+
console.log(' Existing deployment detected: ' + config.portalUrl);
|
|
46
|
+
console.log(' Redeploying with latest code...');
|
|
47
|
+
console.log('');
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Generate deployment name or reuse existing
|
|
51
|
+
const deployName = config.deployName || 'exodus-' + randomBytes(3).toString('hex');
|
|
52
|
+
const mcpSecret = config.mcpSecret || 'exodus-' + randomBytes(8).toString('hex');
|
|
53
|
+
const dbName = config.dbName || deployName + '-db';
|
|
54
|
+
|
|
55
|
+
// Step 1: Create D1 database (skip if exists)
|
|
56
|
+
if (!config.dbId) {
|
|
57
|
+
console.log(' [1/4] Creating database...');
|
|
58
|
+
const dbOutput = await runCommand('npx', ['wrangler', 'd1', 'create', dbName], { verbose });
|
|
59
|
+
// Try both TOML format (old) and JSON format (new wrangler)
|
|
60
|
+
const dbIdMatch = dbOutput.match(/database_id\s*=\s*"([^"]+)"/) ||
|
|
61
|
+
dbOutput.match(/"database_id"\s*:\s*"([^"]+)"/);
|
|
62
|
+
if (!dbIdMatch) {
|
|
63
|
+
console.error(' Error: Could not parse database ID from wrangler output');
|
|
64
|
+
console.error(dbOutput);
|
|
65
|
+
process.exit(1);
|
|
66
|
+
}
|
|
67
|
+
config.dbId = dbIdMatch[1];
|
|
68
|
+
config.dbName = dbName;
|
|
69
|
+
console.log(' Database created: ' + dbName + ' (' + config.dbId + ')');
|
|
70
|
+
} else {
|
|
71
|
+
console.log(' [1/4] Database exists: ' + config.dbName);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// Step 2: Set up deploy directory
|
|
75
|
+
console.log(' [2/4] Preparing deployment...');
|
|
76
|
+
const deployDir = resolve(join(PORTAL_DIR, '.deploy-' + deployName));
|
|
77
|
+
await mkdir(deployDir, { recursive: true });
|
|
78
|
+
|
|
79
|
+
// Copy worker.js and schema.sql
|
|
80
|
+
await copyFile(join(PORTAL_DIR, 'worker.js'), join(deployDir, 'worker.js'));
|
|
81
|
+
await copyFile(join(PORTAL_DIR, 'schema.sql'), join(deployDir, 'schema.sql'));
|
|
82
|
+
|
|
83
|
+
// Write wrangler.toml with actual values
|
|
84
|
+
const wranglerToml = `name = "${deployName}"
|
|
85
|
+
main = "worker.js"
|
|
86
|
+
compatibility_date = "2024-12-01"
|
|
87
|
+
|
|
88
|
+
[[d1_databases]]
|
|
89
|
+
binding = "DB"
|
|
90
|
+
database_name = "${dbName}"
|
|
91
|
+
database_id = "${config.dbId}"
|
|
92
|
+
|
|
93
|
+
[vars]
|
|
94
|
+
MCP_SECRET = "${mcpSecret}"
|
|
95
|
+
`;
|
|
96
|
+
await writeFile(join(deployDir, 'wrangler.toml'), wranglerToml, 'utf-8');
|
|
97
|
+
|
|
98
|
+
// Step 3: Initialize database schema
|
|
99
|
+
console.log(' [3/4] Initializing database schema...');
|
|
100
|
+
try {
|
|
101
|
+
const schemaContent = await readFile(join(PORTAL_DIR, 'schema.sql'), 'utf-8');
|
|
102
|
+
// Split by semicolons and run each statement (wrangler --file can be finicky)
|
|
103
|
+
const statements = schemaContent.split(';').map(s => s.trim()).filter(s => s.length > 5);
|
|
104
|
+
for (const stmt of statements) {
|
|
105
|
+
try {
|
|
106
|
+
await runCommand('npx', ['wrangler', 'd1', 'execute', dbName, '--remote', '--command', stmt + ';'], { verbose, cwd: deployDir });
|
|
107
|
+
} catch (err) {
|
|
108
|
+
const msg = err.message || '';
|
|
109
|
+
// Tolerate "already exists" and argument parsing issues with CREATE INDEX
|
|
110
|
+
if (msg.includes('already exists') || msg.includes('Unknown arguments') || msg.includes('must provide')) {
|
|
111
|
+
if (verbose) console.log(' (skipped: ' + msg.slice(0, 80) + ')');
|
|
112
|
+
} else {
|
|
113
|
+
console.error(' Schema error: ' + msg.slice(0, 200));
|
|
114
|
+
console.error(' Deploy may fail — check your database manually.');
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
console.log(' Schema applied.');
|
|
119
|
+
} catch (err) {
|
|
120
|
+
console.error(' Schema initialization failed: ' + err.message);
|
|
121
|
+
console.error(' Try applying manually: npx wrangler d1 execute ' + dbName + ' --remote --file schema.sql');
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// Step 4: Deploy worker
|
|
125
|
+
console.log(' [4/4] Deploying portal...');
|
|
126
|
+
const deployOutput = await runCommand('npx', ['wrangler', 'deploy'], { verbose, cwd: deployDir });
|
|
127
|
+
|
|
128
|
+
// Extract URL from deploy output
|
|
129
|
+
const urlMatch = deployOutput.match(/(https:\/\/[^\s]+\.workers\.dev)/);
|
|
130
|
+
const portalUrl = urlMatch ? urlMatch[1] : `https://${deployName}.workers.dev`;
|
|
131
|
+
|
|
132
|
+
// Save config
|
|
133
|
+
config.deployName = deployName;
|
|
134
|
+
config.mcpSecret = mcpSecret;
|
|
135
|
+
config.portalUrl = portalUrl;
|
|
136
|
+
await saveConfig(config);
|
|
137
|
+
|
|
138
|
+
// Clean up deploy dir
|
|
139
|
+
// Leave it for now — useful for redeployments
|
|
140
|
+
|
|
141
|
+
console.log('');
|
|
142
|
+
console.log(' ╔══════════════════════════════════════╗');
|
|
143
|
+
console.log(' ║ Portal deployed! ║');
|
|
144
|
+
console.log(' ╚══════════════════════════════════════╝');
|
|
145
|
+
console.log('');
|
|
146
|
+
console.log(' Portal URL: ' + portalUrl);
|
|
147
|
+
console.log(' MCP Secret: ' + mcpSecret);
|
|
148
|
+
console.log('');
|
|
149
|
+
console.log(' Next steps:');
|
|
150
|
+
console.log(' 1. Open ' + portalUrl + ' and set your password');
|
|
151
|
+
console.log(' 2. Import your chat history:');
|
|
152
|
+
console.log(' ai-exodus import conversations.json');
|
|
153
|
+
console.log(' 3. Run analysis:');
|
|
154
|
+
console.log(' ai-exodus analyze --passes all');
|
|
155
|
+
console.log('');
|
|
156
|
+
console.log(' MCP connector URL (for Claude):');
|
|
157
|
+
console.log(' ' + portalUrl + '/mcp/' + mcpSecret + '/search?q=your+query');
|
|
158
|
+
console.log('');
|
|
159
|
+
console.log(' Config saved to ~/.exodus/config.json');
|
|
160
|
+
console.log('');
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
// ── Helpers ──
|
|
165
|
+
|
|
166
|
+
function checkCommand(cmd, args) {
|
|
167
|
+
return new Promise((resolve) => {
|
|
168
|
+
const proc = spawn(cmd, args, { stdio: 'pipe', shell: true });
|
|
169
|
+
proc.on('error', () => resolve(false));
|
|
170
|
+
proc.on('close', (code) => resolve(code === 0));
|
|
171
|
+
});
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
function runCommand(cmd, args, { verbose = false, cwd = undefined } = {}) {
|
|
175
|
+
return new Promise((resolve, reject) => {
|
|
176
|
+
const proc = spawn(cmd, args, { stdio: ['pipe', 'pipe', 'pipe'], shell: true, cwd });
|
|
177
|
+
let stdout = '';
|
|
178
|
+
let stderr = '';
|
|
179
|
+
proc.stdout.on('data', (d) => {
|
|
180
|
+
stdout += d.toString();
|
|
181
|
+
if (verbose) process.stdout.write(d);
|
|
182
|
+
});
|
|
183
|
+
proc.stderr.on('data', (d) => {
|
|
184
|
+
stderr += d.toString();
|
|
185
|
+
if (verbose) process.stderr.write(d);
|
|
186
|
+
});
|
|
187
|
+
proc.on('error', reject);
|
|
188
|
+
proc.on('close', (code) => {
|
|
189
|
+
if (code !== 0) reject(new Error(stderr.trim() || stdout.trim() || `Command failed with code ${code}`));
|
|
190
|
+
else resolve(stdout);
|
|
191
|
+
});
|
|
192
|
+
});
|
|
193
|
+
}
|