codedash-app 1.3.1 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cli.js +19 -0
- package/package.json +1 -1
- package/src/migrate.js +185 -0
package/bin/cli.js
CHANGED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
const { loadSessions } = require('../src/data');
|
|
4
4
|
const { startServer } = require('../src/server');
|
|
5
|
+
const { exportArchive, importArchive } = require('../src/migrate');
|
|
5
6
|
|
|
6
7
|
const DEFAULT_PORT = 3847;
|
|
7
8
|
const args = process.argv.slice(2);
|
|
@@ -56,6 +57,22 @@ switch (command) {
|
|
|
56
57
|
break;
|
|
57
58
|
}
|
|
58
59
|
|
|
60
|
+
case 'export': {
|
|
61
|
+
const outPath = args[1] || `codedash-export-${new Date().toISOString().slice(0,10)}.tar.gz`;
|
|
62
|
+
exportArchive(outPath);
|
|
63
|
+
break;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
case 'import': {
|
|
67
|
+
const archivePath = args[1];
|
|
68
|
+
if (!archivePath) {
|
|
69
|
+
console.error(' Usage: codedash import <archive.tar.gz>');
|
|
70
|
+
process.exit(1);
|
|
71
|
+
}
|
|
72
|
+
importArchive(archivePath);
|
|
73
|
+
break;
|
|
74
|
+
}
|
|
75
|
+
|
|
59
76
|
case 'version':
|
|
60
77
|
case '-v':
|
|
61
78
|
case '--version': {
|
|
@@ -75,6 +92,8 @@ switch (command) {
|
|
|
75
92
|
codedash run [port] [--no-browser] Start the dashboard server
|
|
76
93
|
codedash list [limit] List sessions in terminal
|
|
77
94
|
codedash stats Show session statistics
|
|
95
|
+
codedash export [file.tar.gz] Export all sessions to archive
|
|
96
|
+
codedash import <file.tar.gz> Import sessions from archive
|
|
78
97
|
codedash help Show this help
|
|
79
98
|
codedash version Show version
|
|
80
99
|
|
package/package.json
CHANGED
package/src/migrate.js
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const os = require('os');
|
|
6
|
+
const { execSync } = require('child_process');
|
|
7
|
+
|
|
8
|
+
const CLAUDE_DIR = path.join(os.homedir(), '.claude');
|
|
9
|
+
const CODEX_DIR = path.join(os.homedir(), '.codex');
|
|
10
|
+
|
|
11
|
+
function exportArchive(outPath) {
|
|
12
|
+
const absOut = path.resolve(outPath);
|
|
13
|
+
|
|
14
|
+
// Build list of paths to include
|
|
15
|
+
const paths = [];
|
|
16
|
+
|
|
17
|
+
// Claude data
|
|
18
|
+
if (fs.existsSync(CLAUDE_DIR)) {
|
|
19
|
+
paths.push('.claude/history.jsonl');
|
|
20
|
+
paths.push('.claude/settings.json');
|
|
21
|
+
|
|
22
|
+
// All project session files
|
|
23
|
+
const projectsDir = path.join(CLAUDE_DIR, 'projects');
|
|
24
|
+
if (fs.existsSync(projectsDir)) {
|
|
25
|
+
paths.push('.claude/projects');
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// Session env
|
|
29
|
+
const envDir = path.join(CLAUDE_DIR, 'session-env');
|
|
30
|
+
if (fs.existsSync(envDir)) {
|
|
31
|
+
paths.push('.claude/session-env');
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// CLAUDE.md files
|
|
35
|
+
const claudeMd = path.join(CLAUDE_DIR, 'CLAUDE.md');
|
|
36
|
+
if (fs.existsSync(claudeMd)) {
|
|
37
|
+
paths.push('.claude/CLAUDE.md');
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// Memory
|
|
41
|
+
const projectMemoryDirs = [];
|
|
42
|
+
if (fs.existsSync(projectsDir)) {
|
|
43
|
+
for (const proj of fs.readdirSync(projectsDir)) {
|
|
44
|
+
const memDir = path.join(projectsDir, proj, 'memory');
|
|
45
|
+
if (fs.existsSync(memDir)) {
|
|
46
|
+
projectMemoryDirs.push(path.join('.claude/projects', proj, 'memory'));
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Codex data
|
|
53
|
+
if (fs.existsSync(CODEX_DIR)) {
|
|
54
|
+
const codexHistory = path.join(CODEX_DIR, 'history.jsonl');
|
|
55
|
+
if (fs.existsSync(codexHistory)) {
|
|
56
|
+
paths.push('.codex/history.jsonl');
|
|
57
|
+
}
|
|
58
|
+
const codexSessions = path.join(CODEX_DIR, 'sessions');
|
|
59
|
+
if (fs.existsSync(codexSessions)) {
|
|
60
|
+
paths.push('.codex/sessions');
|
|
61
|
+
}
|
|
62
|
+
const codexConfig = path.join(CODEX_DIR, 'config.toml');
|
|
63
|
+
if (fs.existsSync(codexConfig)) {
|
|
64
|
+
paths.push('.codex/config.toml');
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (paths.length === 0) {
|
|
69
|
+
console.log(' Nothing to export. No ~/.claude or ~/.codex data found.');
|
|
70
|
+
return;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Calculate sizes
|
|
74
|
+
let totalSize = 0;
|
|
75
|
+
let totalFiles = 0;
|
|
76
|
+
for (const p of paths) {
|
|
77
|
+
const full = path.join(os.homedir(), p);
|
|
78
|
+
if (fs.existsSync(full)) {
|
|
79
|
+
const stat = fs.statSync(full);
|
|
80
|
+
if (stat.isDirectory()) {
|
|
81
|
+
const output = execSync(`find "${full}" -type f | wc -l`, { encoding: 'utf8' }).trim();
|
|
82
|
+
totalFiles += parseInt(output) || 0;
|
|
83
|
+
const sizeOut = execSync(`du -sb "${full}" 2>/dev/null || du -sk "${full}"`, { encoding: 'utf8' }).trim();
|
|
84
|
+
totalSize += parseInt(sizeOut) || 0;
|
|
85
|
+
} else {
|
|
86
|
+
totalFiles++;
|
|
87
|
+
totalSize += stat.size;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
console.log('');
|
|
93
|
+
console.log(' \x1b[36m\x1b[1mCodeDash Export\x1b[0m');
|
|
94
|
+
console.log(` Files: ${totalFiles}`);
|
|
95
|
+
console.log(` Paths: ${paths.length} directories/files`);
|
|
96
|
+
console.log(` Includes: ${paths.map(p => p.split('/')[0]).filter((v,i,a) => a.indexOf(v) === i).join(', ')}`);
|
|
97
|
+
console.log('');
|
|
98
|
+
console.log(' Creating archive...');
|
|
99
|
+
|
|
100
|
+
// Create tar.gz from home directory
|
|
101
|
+
const pathArgs = paths.map(p => `"${p}"`).join(' ');
|
|
102
|
+
try {
|
|
103
|
+
execSync(`cd "${os.homedir()}" && tar -czf "${absOut}" ${pathArgs}`, {
|
|
104
|
+
stdio: 'pipe',
|
|
105
|
+
});
|
|
106
|
+
const archiveSize = fs.statSync(absOut).size;
|
|
107
|
+
const sizeMB = (archiveSize / 1048576).toFixed(1);
|
|
108
|
+
console.log(` \x1b[32mDone!\x1b[0m ${absOut} (${sizeMB} MB)`);
|
|
109
|
+
console.log('');
|
|
110
|
+
console.log(' To import on another machine:');
|
|
111
|
+
console.log(` \x1b[2mnpx codedash-app import ${path.basename(absOut)}\x1b[0m`);
|
|
112
|
+
console.log('');
|
|
113
|
+
} catch (e) {
|
|
114
|
+
console.error(' \x1b[31mFailed to create archive:\x1b[0m', e.message);
|
|
115
|
+
process.exit(1);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
function importArchive(archivePath) {
|
|
120
|
+
const absPath = path.resolve(archivePath);
|
|
121
|
+
|
|
122
|
+
if (!fs.existsSync(absPath)) {
|
|
123
|
+
console.error(` File not found: ${absPath}`);
|
|
124
|
+
process.exit(1);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
console.log('');
|
|
128
|
+
console.log(' \x1b[36m\x1b[1mCodeDash Import\x1b[0m');
|
|
129
|
+
console.log(` Archive: ${absPath}`);
|
|
130
|
+
|
|
131
|
+
// List contents
|
|
132
|
+
const contents = execSync(`tar -tzf "${absPath}" | head -20`, { encoding: 'utf8' }).trim();
|
|
133
|
+
const lines = contents.split('\n');
|
|
134
|
+
const dirs = lines.map(l => l.split('/')[0]).filter((v,i,a) => a.indexOf(v) === i);
|
|
135
|
+
|
|
136
|
+
console.log(` Contains: ${dirs.join(', ')}`);
|
|
137
|
+
console.log(` Files: ${lines.length}${lines.length >= 20 ? '+' : ''}`);
|
|
138
|
+
console.log('');
|
|
139
|
+
|
|
140
|
+
// Check for existing data
|
|
141
|
+
const hasExisting = fs.existsSync(path.join(CLAUDE_DIR, 'history.jsonl')) ||
|
|
142
|
+
fs.existsSync(path.join(CODEX_DIR, 'history.jsonl'));
|
|
143
|
+
|
|
144
|
+
if (hasExisting) {
|
|
145
|
+
console.log(' \x1b[33mWarning:\x1b[0m Existing session data found.');
|
|
146
|
+
console.log(' Import will \x1b[1mmerge\x1b[0m — existing files will be overwritten.');
|
|
147
|
+
console.log('');
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// Extract to home directory
|
|
151
|
+
try {
|
|
152
|
+
execSync(`cd "${os.homedir()}" && tar -xzf "${absPath}"`, { stdio: 'pipe' });
|
|
153
|
+
|
|
154
|
+
// Merge history.jsonl if both exist
|
|
155
|
+
const importedHistory = path.join(CLAUDE_DIR, 'history.jsonl');
|
|
156
|
+
if (fs.existsSync(importedHistory)) {
|
|
157
|
+
// Deduplicate by sessionId+timestamp
|
|
158
|
+
const lines = fs.readFileSync(importedHistory, 'utf8').split('\n').filter(Boolean);
|
|
159
|
+
const seen = new Set();
|
|
160
|
+
const deduped = [];
|
|
161
|
+
for (const line of lines) {
|
|
162
|
+
try {
|
|
163
|
+
const d = JSON.parse(line);
|
|
164
|
+
const key = d.sessionId + ':' + d.timestamp;
|
|
165
|
+
if (!seen.has(key)) {
|
|
166
|
+
seen.add(key);
|
|
167
|
+
deduped.push(line);
|
|
168
|
+
}
|
|
169
|
+
} catch {
|
|
170
|
+
deduped.push(line);
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
fs.writeFileSync(importedHistory, deduped.join('\n') + '\n');
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
console.log(' \x1b[32mImport complete!\x1b[0m');
|
|
177
|
+
console.log(' Run \x1b[2mcodedash run\x1b[0m to see your sessions.');
|
|
178
|
+
console.log('');
|
|
179
|
+
} catch (e) {
|
|
180
|
+
console.error(' \x1b[31mFailed to import:\x1b[0m', e.message);
|
|
181
|
+
process.exit(1);
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
module.exports = { exportArchive, importArchive };
|