codedash-app 1.3.0 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -13
- package/bin/cli.js +19 -0
- package/package.json +1 -1
- package/src/frontend/app.js +16 -12
- package/src/migrate.js +185 -0
package/README.md
CHANGED
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
Browser dashboard for Claude Code & Codex sessions. View, search, resume, and manage all your AI coding sessions.
|
|
4
4
|
|
|
5
|
+
[Russian / Русский](docs/README_RU.md) | [Chinese / 中文](docs/README_ZH.md)
|
|
6
|
+
|
|
5
7
|
https://github.com/user-attachments/assets/15c45659-365b-49f8-86a3-9005fa155ca6
|
|
6
8
|
|
|
7
9
|
  
|
|
@@ -45,27 +47,18 @@ npx codedash-app stats # show statistics
|
|
|
45
47
|
- Related git commits shown per session
|
|
46
48
|
- Auto-update notifications
|
|
47
49
|
|
|
48
|
-
**Themes
|
|
49
|
-
- Dark (default), Light, System
|
|
50
|
+
**Themes**: Dark (default), Light, System
|
|
50
51
|
|
|
51
|
-
**Keyboard Shortcuts
|
|
52
|
-
- `/` focus search, `j/k` navigate, `Enter` open
|
|
53
|
-
- `x` star, `d` delete, `s` select mode, `g` toggle groups
|
|
54
|
-
- `r` refresh, `Escape` close panels
|
|
52
|
+
**Keyboard Shortcuts**: `/` search, `j/k` navigate, `Enter` open, `x` star, `d` delete, `s` select, `g` group, `r` refresh, `Esc` close
|
|
55
53
|
|
|
56
54
|
## How It Works
|
|
57
55
|
|
|
58
|
-
Reads session data from `~/.claude/` and `~/.codex
|
|
59
|
-
- `history.jsonl` — session index
|
|
60
|
-
- `projects/*/<session-id>.jsonl` — conversation data
|
|
61
|
-
- `sessions/` — Codex session files
|
|
62
|
-
|
|
63
|
-
Zero dependencies. Everything runs on `localhost`.
|
|
56
|
+
Reads session data from `~/.claude/` and `~/.codex/`. Zero dependencies. Everything runs on `localhost`.
|
|
64
57
|
|
|
65
58
|
## Requirements
|
|
66
59
|
|
|
67
60
|
- Node.js >= 16
|
|
68
|
-
- Claude Code or Codex CLI
|
|
61
|
+
- Claude Code or Codex CLI
|
|
69
62
|
- macOS / Linux / Windows
|
|
70
63
|
|
|
71
64
|
## License
|
package/bin/cli.js
CHANGED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
const { loadSessions } = require('../src/data');
|
|
4
4
|
const { startServer } = require('../src/server');
|
|
5
|
+
const { exportArchive, importArchive } = require('../src/migrate');
|
|
5
6
|
|
|
6
7
|
const DEFAULT_PORT = 3847;
|
|
7
8
|
const args = process.argv.slice(2);
|
|
@@ -56,6 +57,22 @@ switch (command) {
|
|
|
56
57
|
break;
|
|
57
58
|
}
|
|
58
59
|
|
|
60
|
+
case 'export': {
|
|
61
|
+
const outPath = args[1] || `codedash-export-${new Date().toISOString().slice(0,10)}.tar.gz`;
|
|
62
|
+
exportArchive(outPath);
|
|
63
|
+
break;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
case 'import': {
|
|
67
|
+
const archivePath = args[1];
|
|
68
|
+
if (!archivePath) {
|
|
69
|
+
console.error(' Usage: codedash import <archive.tar.gz>');
|
|
70
|
+
process.exit(1);
|
|
71
|
+
}
|
|
72
|
+
importArchive(archivePath);
|
|
73
|
+
break;
|
|
74
|
+
}
|
|
75
|
+
|
|
59
76
|
case 'version':
|
|
60
77
|
case '-v':
|
|
61
78
|
case '--version': {
|
|
@@ -75,6 +92,8 @@ switch (command) {
|
|
|
75
92
|
codedash run [port] [--no-browser] Start the dashboard server
|
|
76
93
|
codedash list [limit] List sessions in terminal
|
|
77
94
|
codedash stats Show session statistics
|
|
95
|
+
codedash export [file.tar.gz] Export all sessions to archive
|
|
96
|
+
codedash import <file.tar.gz> Import sessions from archive
|
|
78
97
|
codedash help Show this help
|
|
79
98
|
codedash version Show version
|
|
80
99
|
|
package/package.json
CHANGED
package/src/frontend/app.js
CHANGED
|
@@ -602,10 +602,16 @@ function renderProjects(container, sessions) {
|
|
|
602
602
|
|
|
603
603
|
// ── Activity Heatmap ───────────────────────────────────────────
|
|
604
604
|
|
|
605
|
+
function localISO(date) {
|
|
606
|
+
var y = date.getFullYear();
|
|
607
|
+
var m = String(date.getMonth() + 1).padStart(2, '0');
|
|
608
|
+
var d = String(date.getDate()).padStart(2, '0');
|
|
609
|
+
return y + '-' + m + '-' + d;
|
|
610
|
+
}
|
|
611
|
+
|
|
605
612
|
function renderHeatmap(container) {
|
|
606
613
|
var now = new Date();
|
|
607
|
-
var oneYearAgo = new Date(now);
|
|
608
|
-
oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
|
|
614
|
+
var oneYearAgo = new Date(now.getFullYear() - 1, now.getMonth(), now.getDate());
|
|
609
615
|
|
|
610
616
|
// Count sessions per day
|
|
611
617
|
var counts = {};
|
|
@@ -615,17 +621,16 @@ function renderHeatmap(container) {
|
|
|
615
621
|
counts[d] = (counts[d] || 0) + 1;
|
|
616
622
|
});
|
|
617
623
|
|
|
618
|
-
// Build day array
|
|
624
|
+
// Build day array — start from Sunday before oneYearAgo, end on Saturday after today
|
|
619
625
|
var days = [];
|
|
620
626
|
var d = new Date(oneYearAgo);
|
|
621
|
-
|
|
622
|
-
d.setDate(d.getDate() - d.getDay());
|
|
627
|
+
d.setDate(d.getDate() - d.getDay()); // align to Sunday
|
|
623
628
|
|
|
624
629
|
var endDate = new Date(now);
|
|
625
|
-
endDate.setDate(endDate.getDate() + (6 - endDate.getDay())); //
|
|
630
|
+
endDate.setDate(endDate.getDate() + (6 - endDate.getDay())); // align to Saturday
|
|
626
631
|
|
|
627
632
|
while (d <= endDate) {
|
|
628
|
-
var iso = d
|
|
633
|
+
var iso = localISO(d);
|
|
629
634
|
var count = counts[iso] || 0;
|
|
630
635
|
var level = 0;
|
|
631
636
|
if (count >= 6) level = 4;
|
|
@@ -633,8 +638,7 @@ function renderHeatmap(container) {
|
|
|
633
638
|
else if (count >= 2) level = 2;
|
|
634
639
|
else if (count >= 1) level = 1;
|
|
635
640
|
days.push({ date: iso, count: count, level: level, day: d.getDay() });
|
|
636
|
-
d = new Date(d);
|
|
637
|
-
d.setDate(d.getDate() + 1);
|
|
641
|
+
d = new Date(d.getFullYear(), d.getMonth(), d.getDate() + 1);
|
|
638
642
|
}
|
|
639
643
|
|
|
640
644
|
// Build weeks (columns)
|
|
@@ -682,10 +686,10 @@ function renderHeatmap(container) {
|
|
|
682
686
|
var streak = 0;
|
|
683
687
|
var checkDate = new Date(now);
|
|
684
688
|
while (true) {
|
|
685
|
-
var
|
|
686
|
-
if (counts[
|
|
689
|
+
var ciso = localISO(checkDate);
|
|
690
|
+
if (counts[ciso] && counts[ciso] > 0) {
|
|
687
691
|
streak++;
|
|
688
|
-
checkDate.
|
|
692
|
+
checkDate = new Date(checkDate.getFullYear(), checkDate.getMonth(), checkDate.getDate() - 1);
|
|
689
693
|
} else {
|
|
690
694
|
break;
|
|
691
695
|
}
|
package/src/migrate.js
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const os = require('os');
|
|
6
|
+
const { execSync } = require('child_process');
|
|
7
|
+
|
|
8
|
+
const CLAUDE_DIR = path.join(os.homedir(), '.claude');
|
|
9
|
+
const CODEX_DIR = path.join(os.homedir(), '.codex');
|
|
10
|
+
|
|
11
|
+
function exportArchive(outPath) {
|
|
12
|
+
const absOut = path.resolve(outPath);
|
|
13
|
+
|
|
14
|
+
// Build list of paths to include
|
|
15
|
+
const paths = [];
|
|
16
|
+
|
|
17
|
+
// Claude data
|
|
18
|
+
if (fs.existsSync(CLAUDE_DIR)) {
|
|
19
|
+
paths.push('.claude/history.jsonl');
|
|
20
|
+
paths.push('.claude/settings.json');
|
|
21
|
+
|
|
22
|
+
// All project session files
|
|
23
|
+
const projectsDir = path.join(CLAUDE_DIR, 'projects');
|
|
24
|
+
if (fs.existsSync(projectsDir)) {
|
|
25
|
+
paths.push('.claude/projects');
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// Session env
|
|
29
|
+
const envDir = path.join(CLAUDE_DIR, 'session-env');
|
|
30
|
+
if (fs.existsSync(envDir)) {
|
|
31
|
+
paths.push('.claude/session-env');
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// CLAUDE.md files
|
|
35
|
+
const claudeMd = path.join(CLAUDE_DIR, 'CLAUDE.md');
|
|
36
|
+
if (fs.existsSync(claudeMd)) {
|
|
37
|
+
paths.push('.claude/CLAUDE.md');
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// Memory
|
|
41
|
+
const projectMemoryDirs = [];
|
|
42
|
+
if (fs.existsSync(projectsDir)) {
|
|
43
|
+
for (const proj of fs.readdirSync(projectsDir)) {
|
|
44
|
+
const memDir = path.join(projectsDir, proj, 'memory');
|
|
45
|
+
if (fs.existsSync(memDir)) {
|
|
46
|
+
projectMemoryDirs.push(path.join('.claude/projects', proj, 'memory'));
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Codex data
|
|
53
|
+
if (fs.existsSync(CODEX_DIR)) {
|
|
54
|
+
const codexHistory = path.join(CODEX_DIR, 'history.jsonl');
|
|
55
|
+
if (fs.existsSync(codexHistory)) {
|
|
56
|
+
paths.push('.codex/history.jsonl');
|
|
57
|
+
}
|
|
58
|
+
const codexSessions = path.join(CODEX_DIR, 'sessions');
|
|
59
|
+
if (fs.existsSync(codexSessions)) {
|
|
60
|
+
paths.push('.codex/sessions');
|
|
61
|
+
}
|
|
62
|
+
const codexConfig = path.join(CODEX_DIR, 'config.toml');
|
|
63
|
+
if (fs.existsSync(codexConfig)) {
|
|
64
|
+
paths.push('.codex/config.toml');
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (paths.length === 0) {
|
|
69
|
+
console.log(' Nothing to export. No ~/.claude or ~/.codex data found.');
|
|
70
|
+
return;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Calculate sizes
|
|
74
|
+
let totalSize = 0;
|
|
75
|
+
let totalFiles = 0;
|
|
76
|
+
for (const p of paths) {
|
|
77
|
+
const full = path.join(os.homedir(), p);
|
|
78
|
+
if (fs.existsSync(full)) {
|
|
79
|
+
const stat = fs.statSync(full);
|
|
80
|
+
if (stat.isDirectory()) {
|
|
81
|
+
const output = execSync(`find "${full}" -type f | wc -l`, { encoding: 'utf8' }).trim();
|
|
82
|
+
totalFiles += parseInt(output) || 0;
|
|
83
|
+
const sizeOut = execSync(`du -sb "${full}" 2>/dev/null || du -sk "${full}"`, { encoding: 'utf8' }).trim();
|
|
84
|
+
totalSize += parseInt(sizeOut) || 0;
|
|
85
|
+
} else {
|
|
86
|
+
totalFiles++;
|
|
87
|
+
totalSize += stat.size;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
console.log('');
|
|
93
|
+
console.log(' \x1b[36m\x1b[1mCodeDash Export\x1b[0m');
|
|
94
|
+
console.log(` Files: ${totalFiles}`);
|
|
95
|
+
console.log(` Paths: ${paths.length} directories/files`);
|
|
96
|
+
console.log(` Includes: ${paths.map(p => p.split('/')[0]).filter((v,i,a) => a.indexOf(v) === i).join(', ')}`);
|
|
97
|
+
console.log('');
|
|
98
|
+
console.log(' Creating archive...');
|
|
99
|
+
|
|
100
|
+
// Create tar.gz from home directory
|
|
101
|
+
const pathArgs = paths.map(p => `"${p}"`).join(' ');
|
|
102
|
+
try {
|
|
103
|
+
execSync(`cd "${os.homedir()}" && tar -czf "${absOut}" ${pathArgs}`, {
|
|
104
|
+
stdio: 'pipe',
|
|
105
|
+
});
|
|
106
|
+
const archiveSize = fs.statSync(absOut).size;
|
|
107
|
+
const sizeMB = (archiveSize / 1048576).toFixed(1);
|
|
108
|
+
console.log(` \x1b[32mDone!\x1b[0m ${absOut} (${sizeMB} MB)`);
|
|
109
|
+
console.log('');
|
|
110
|
+
console.log(' To import on another machine:');
|
|
111
|
+
console.log(` \x1b[2mnpx codedash-app import ${path.basename(absOut)}\x1b[0m`);
|
|
112
|
+
console.log('');
|
|
113
|
+
} catch (e) {
|
|
114
|
+
console.error(' \x1b[31mFailed to create archive:\x1b[0m', e.message);
|
|
115
|
+
process.exit(1);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
function importArchive(archivePath) {
|
|
120
|
+
const absPath = path.resolve(archivePath);
|
|
121
|
+
|
|
122
|
+
if (!fs.existsSync(absPath)) {
|
|
123
|
+
console.error(` File not found: ${absPath}`);
|
|
124
|
+
process.exit(1);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
console.log('');
|
|
128
|
+
console.log(' \x1b[36m\x1b[1mCodeDash Import\x1b[0m');
|
|
129
|
+
console.log(` Archive: ${absPath}`);
|
|
130
|
+
|
|
131
|
+
// List contents
|
|
132
|
+
const contents = execSync(`tar -tzf "${absPath}" | head -20`, { encoding: 'utf8' }).trim();
|
|
133
|
+
const lines = contents.split('\n');
|
|
134
|
+
const dirs = lines.map(l => l.split('/')[0]).filter((v,i,a) => a.indexOf(v) === i);
|
|
135
|
+
|
|
136
|
+
console.log(` Contains: ${dirs.join(', ')}`);
|
|
137
|
+
console.log(` Files: ${lines.length}${lines.length >= 20 ? '+' : ''}`);
|
|
138
|
+
console.log('');
|
|
139
|
+
|
|
140
|
+
// Check for existing data
|
|
141
|
+
const hasExisting = fs.existsSync(path.join(CLAUDE_DIR, 'history.jsonl')) ||
|
|
142
|
+
fs.existsSync(path.join(CODEX_DIR, 'history.jsonl'));
|
|
143
|
+
|
|
144
|
+
if (hasExisting) {
|
|
145
|
+
console.log(' \x1b[33mWarning:\x1b[0m Existing session data found.');
|
|
146
|
+
console.log(' Import will \x1b[1mmerge\x1b[0m — existing files will be overwritten.');
|
|
147
|
+
console.log('');
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// Extract to home directory
|
|
151
|
+
try {
|
|
152
|
+
execSync(`cd "${os.homedir()}" && tar -xzf "${absPath}"`, { stdio: 'pipe' });
|
|
153
|
+
|
|
154
|
+
// Merge history.jsonl if both exist
|
|
155
|
+
const importedHistory = path.join(CLAUDE_DIR, 'history.jsonl');
|
|
156
|
+
if (fs.existsSync(importedHistory)) {
|
|
157
|
+
// Deduplicate by sessionId+timestamp
|
|
158
|
+
const lines = fs.readFileSync(importedHistory, 'utf8').split('\n').filter(Boolean);
|
|
159
|
+
const seen = new Set();
|
|
160
|
+
const deduped = [];
|
|
161
|
+
for (const line of lines) {
|
|
162
|
+
try {
|
|
163
|
+
const d = JSON.parse(line);
|
|
164
|
+
const key = d.sessionId + ':' + d.timestamp;
|
|
165
|
+
if (!seen.has(key)) {
|
|
166
|
+
seen.add(key);
|
|
167
|
+
deduped.push(line);
|
|
168
|
+
}
|
|
169
|
+
} catch {
|
|
170
|
+
deduped.push(line);
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
fs.writeFileSync(importedHistory, deduped.join('\n') + '\n');
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
console.log(' \x1b[32mImport complete!\x1b[0m');
|
|
177
|
+
console.log(' Run \x1b[2mcodedash run\x1b[0m to see your sessions.');
|
|
178
|
+
console.log('');
|
|
179
|
+
} catch (e) {
|
|
180
|
+
console.error(' \x1b[31mFailed to import:\x1b[0m', e.message);
|
|
181
|
+
process.exit(1);
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
module.exports = { exportArchive, importArchive };
|