@sooink/ai-session-tidy 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.prettierrc +7 -0
- package/LICENSE +21 -0
- package/README.ko.md +171 -0
- package/README.md +169 -0
- package/assets/demo-interactive.gif +0 -0
- package/assets/demo.gif +0 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +1917 -0
- package/dist/index.js.map +1 -0
- package/eslint.config.js +29 -0
- package/package.json +54 -0
- package/src/cli.ts +21 -0
- package/src/commands/clean.ts +335 -0
- package/src/commands/config.ts +144 -0
- package/src/commands/list.ts +86 -0
- package/src/commands/scan.ts +200 -0
- package/src/commands/watch.ts +359 -0
- package/src/core/cleaner.test.ts +125 -0
- package/src/core/cleaner.ts +181 -0
- package/src/core/service.ts +236 -0
- package/src/core/trash.test.ts +100 -0
- package/src/core/trash.ts +40 -0
- package/src/core/watcher.test.ts +210 -0
- package/src/core/watcher.ts +194 -0
- package/src/index.ts +5 -0
- package/src/scanners/claude-code.test.ts +112 -0
- package/src/scanners/claude-code.ts +452 -0
- package/src/scanners/cursor.test.ts +140 -0
- package/src/scanners/cursor.ts +133 -0
- package/src/scanners/index.ts +39 -0
- package/src/scanners/types.ts +34 -0
- package/src/utils/config.ts +132 -0
- package/src/utils/logger.ts +29 -0
- package/src/utils/paths.test.ts +95 -0
- package/src/utils/paths.ts +92 -0
- package/src/utils/size.test.ts +80 -0
- package/src/utils/size.ts +50 -0
- package/tsconfig.json +28 -0
- package/tsup.config.ts +11 -0
- package/vitest.config.ts +14 -0
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
import { describe, expect, it, vi, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import { mkdir, rm, writeFile } from 'fs/promises';
|
|
3
|
+
import { join } from 'path';
|
|
4
|
+
import { tmpdir } from 'os';
|
|
5
|
+
|
|
6
|
+
import { Watcher, WatcherOptions, WatchEvent } from './watcher.js';
|
|
7
|
+
|
|
8
|
+
describe('Watcher', () => {
|
|
9
|
+
const testDir = join(tmpdir(), 'watchertest' + Date.now());
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
await mkdir(testDir, { recursive: true });
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
afterEach(async () => {
|
|
16
|
+
await rm(testDir, { recursive: true, force: true });
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
describe('watch', () => {
|
|
20
|
+
it('detects project folder deletion', async () => {
|
|
21
|
+
const projectDir = join(testDir, 'myproject');
|
|
22
|
+
await mkdir(projectDir);
|
|
23
|
+
|
|
24
|
+
const allEvents: WatchEvent[] = [];
|
|
25
|
+
const watcher = new Watcher({
|
|
26
|
+
watchPaths: [testDir],
|
|
27
|
+
delayMs: 50, // Short delay for testing
|
|
28
|
+
debounceMs: 50, // Short debounce for testing
|
|
29
|
+
onDelete: (events) => allEvents.push(...events),
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
watcher.start();
|
|
33
|
+
|
|
34
|
+
// Wait for watcher initialization
|
|
35
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
36
|
+
|
|
37
|
+
// Delete project
|
|
38
|
+
await rm(projectDir, { recursive: true });
|
|
39
|
+
|
|
40
|
+
// Wait for event detection (delay + debounce)
|
|
41
|
+
await new Promise((resolve) => setTimeout(resolve, 300));
|
|
42
|
+
|
|
43
|
+
watcher.stop();
|
|
44
|
+
|
|
45
|
+
expect(allEvents.length).toBeGreaterThanOrEqual(1);
|
|
46
|
+
expect(allEvents[0].path).toContain('myproject');
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
it('executes cleanup after delay', async () => {
|
|
50
|
+
const projectDir = join(testDir, 'delayproject');
|
|
51
|
+
await mkdir(projectDir);
|
|
52
|
+
|
|
53
|
+
let cleanupCalled = false;
|
|
54
|
+
const watcher = new Watcher({
|
|
55
|
+
watchPaths: [testDir],
|
|
56
|
+
delayMs: 100,
|
|
57
|
+
debounceMs: 50,
|
|
58
|
+
onDelete: () => {
|
|
59
|
+
cleanupCalled = true;
|
|
60
|
+
},
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
watcher.start();
|
|
64
|
+
|
|
65
|
+
// Wait for watcher initialization
|
|
66
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
67
|
+
|
|
68
|
+
await rm(projectDir, { recursive: true });
|
|
69
|
+
|
|
70
|
+
// Not called before delay
|
|
71
|
+
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
72
|
+
expect(cleanupCalled).toBe(false);
|
|
73
|
+
|
|
74
|
+
// Called after delay + debounce
|
|
75
|
+
await new Promise((resolve) => setTimeout(resolve, 200));
|
|
76
|
+
expect(cleanupCalled).toBe(true);
|
|
77
|
+
|
|
78
|
+
watcher.stop();
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
it('cancels cleanup if recovered during delay', async () => {
|
|
82
|
+
const projectDir = join(testDir, 'recoveryproject');
|
|
83
|
+
await mkdir(projectDir);
|
|
84
|
+
|
|
85
|
+
let cleanupCalled = false;
|
|
86
|
+
const watcher = new Watcher({
|
|
87
|
+
watchPaths: [testDir],
|
|
88
|
+
delayMs: 200,
|
|
89
|
+
debounceMs: 50,
|
|
90
|
+
onDelete: () => {
|
|
91
|
+
cleanupCalled = true;
|
|
92
|
+
},
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
watcher.start();
|
|
96
|
+
|
|
97
|
+
// Wait for watcher initialization
|
|
98
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
99
|
+
|
|
100
|
+
// Delete then immediately recover
|
|
101
|
+
await rm(projectDir, { recursive: true });
|
|
102
|
+
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
103
|
+
await mkdir(projectDir);
|
|
104
|
+
|
|
105
|
+
// Not called even after delay + debounce
|
|
106
|
+
await new Promise((resolve) => setTimeout(resolve, 300));
|
|
107
|
+
expect(cleanupCalled).toBe(false);
|
|
108
|
+
|
|
109
|
+
watcher.stop();
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
it('does not detect events after stop', async () => {
|
|
113
|
+
const projectDir = join(testDir, 'stoptest');
|
|
114
|
+
await mkdir(projectDir);
|
|
115
|
+
|
|
116
|
+
const allEvents: WatchEvent[] = [];
|
|
117
|
+
const watcher = new Watcher({
|
|
118
|
+
watchPaths: [testDir],
|
|
119
|
+
delayMs: 50,
|
|
120
|
+
debounceMs: 50,
|
|
121
|
+
onDelete: (events) => allEvents.push(...events),
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
watcher.start();
|
|
125
|
+
// Wait for watcher initialization then stop
|
|
126
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
127
|
+
watcher.stop();
|
|
128
|
+
|
|
129
|
+
await rm(projectDir, { recursive: true });
|
|
130
|
+
await new Promise((resolve) => setTimeout(resolve, 200));
|
|
131
|
+
|
|
132
|
+
expect(allEvents).toHaveLength(0);
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
it('debounces multiple folder deletions', async () => {
|
|
136
|
+
const dir1 = join(testDir, 'project1');
|
|
137
|
+
const dir2 = join(testDir, 'project2');
|
|
138
|
+
await mkdir(dir1);
|
|
139
|
+
await mkdir(dir2);
|
|
140
|
+
|
|
141
|
+
let callCount = 0;
|
|
142
|
+
let receivedEvents: WatchEvent[] = [];
|
|
143
|
+
const watcher = new Watcher({
|
|
144
|
+
watchPaths: [testDir],
|
|
145
|
+
delayMs: 50,
|
|
146
|
+
debounceMs: 100, // Debounce window
|
|
147
|
+
onDelete: (events) => {
|
|
148
|
+
callCount++;
|
|
149
|
+
receivedEvents = events;
|
|
150
|
+
},
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
watcher.start();
|
|
154
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
155
|
+
|
|
156
|
+
// Delete two folders consecutively
|
|
157
|
+
await rm(dir1, { recursive: true });
|
|
158
|
+
await rm(dir2, { recursive: true });
|
|
159
|
+
|
|
160
|
+
// Wait for delay + debounce
|
|
161
|
+
await new Promise((resolve) => setTimeout(resolve, 300));
|
|
162
|
+
|
|
163
|
+
watcher.stop();
|
|
164
|
+
|
|
165
|
+
// Should be called only once
|
|
166
|
+
expect(callCount).toBe(1);
|
|
167
|
+
// Two events delivered as a batch
|
|
168
|
+
expect(receivedEvents.length).toBe(2);
|
|
169
|
+
});
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
describe('isWatching', () => {
|
|
173
|
+
it('returns false before start', () => {
|
|
174
|
+
const watcher = new Watcher({
|
|
175
|
+
watchPaths: [testDir],
|
|
176
|
+
delayMs: 100,
|
|
177
|
+
debounceMs: 50,
|
|
178
|
+
onDelete: () => {},
|
|
179
|
+
});
|
|
180
|
+
|
|
181
|
+
expect(watcher.isWatching()).toBe(false);
|
|
182
|
+
});
|
|
183
|
+
|
|
184
|
+
it('returns true after start', () => {
|
|
185
|
+
const watcher = new Watcher({
|
|
186
|
+
watchPaths: [testDir],
|
|
187
|
+
delayMs: 100,
|
|
188
|
+
debounceMs: 50,
|
|
189
|
+
onDelete: () => {},
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
watcher.start();
|
|
193
|
+
expect(watcher.isWatching()).toBe(true);
|
|
194
|
+
watcher.stop();
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
it('returns false after stop', () => {
|
|
198
|
+
const watcher = new Watcher({
|
|
199
|
+
watchPaths: [testDir],
|
|
200
|
+
delayMs: 100,
|
|
201
|
+
debounceMs: 50,
|
|
202
|
+
onDelete: () => {},
|
|
203
|
+
});
|
|
204
|
+
|
|
205
|
+
watcher.start();
|
|
206
|
+
watcher.stop();
|
|
207
|
+
expect(watcher.isWatching()).toBe(false);
|
|
208
|
+
});
|
|
209
|
+
});
|
|
210
|
+
});
|
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
import { watch, FSWatcher } from 'chokidar';
|
|
2
|
+
import { access } from 'fs/promises';
|
|
3
|
+
|
|
4
|
+
export interface WatchEvent {
|
|
5
|
+
path: string;
|
|
6
|
+
timestamp: Date;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export interface WatcherOptions {
|
|
10
|
+
watchPaths: string[];
|
|
11
|
+
/** Delay before cleanup after deletion detected (allows recovery) */
|
|
12
|
+
delayMs: number;
|
|
13
|
+
/** Debounce time to batch multiple delete events (default: 10 seconds) */
|
|
14
|
+
debounceMs?: number;
|
|
15
|
+
depth?: number;
|
|
16
|
+
/** Callback to handle batched delete events */
|
|
17
|
+
onDelete: (events: WatchEvent[]) => void;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Watcher that monitors project folder deletions and invokes cleanup callback
|
|
22
|
+
*
|
|
23
|
+
* ## Event Processing Flow
|
|
24
|
+
*
|
|
25
|
+
* When a folder is deleted, the OS generates individual events for each subfolder:
|
|
26
|
+
* ```
|
|
27
|
+
* rm -rf /project
|
|
28
|
+
* → unlinkDir: /project/frontend
|
|
29
|
+
* → unlinkDir: /project/backend
|
|
30
|
+
* → unlinkDir: /project
|
|
31
|
+
* ```
|
|
32
|
+
*
|
|
33
|
+
* Running scan/cleanup for each event would be inefficient,
|
|
34
|
+
* so we use a two-stage delay mechanism:
|
|
35
|
+
*
|
|
36
|
+
* 1. **Per-path delay (delayMs)**: Provides recovery opportunity (default 5 min)
|
|
37
|
+
* - If folder is restored during delay, cleanup is cancelled
|
|
38
|
+
*
|
|
39
|
+
* 2. **Debounce (debounceMs)**: Batches multiple events together (default 10 sec)
|
|
40
|
+
* - After 10 seconds with no new events, batch is executed
|
|
41
|
+
* - Scan/cleanup runs only once
|
|
42
|
+
*
|
|
43
|
+
* ## Timeline Example
|
|
44
|
+
*
|
|
45
|
+
* ```
|
|
46
|
+
* T+0s: /project/frontend deletion detected → 5min timer starts
|
|
47
|
+
* T+0.1s: /project/backend deletion detected → 5min timer starts
|
|
48
|
+
* T+0.2s: /project deletion detected → 5min timer starts
|
|
49
|
+
* T+5m: /project/frontend timer complete → add to batch, 10sec debounce starts
|
|
50
|
+
* T+5m0.1s: /project/backend timer complete → add to batch, debounce reset
|
|
51
|
+
* T+5m0.2s: /project timer complete → add to batch, debounce reset
|
|
52
|
+
* T+5m10.2s: debounce complete → onDelete([3 events]) → single scan execution
|
|
53
|
+
* ```
|
|
54
|
+
*/
|
|
55
|
+
export class Watcher {
|
|
56
|
+
private readonly options: WatcherOptions;
|
|
57
|
+
private readonly debounceMs: number;
|
|
58
|
+
private watcher: FSWatcher | null = null;
|
|
59
|
+
/** Per-path delay timers */
|
|
60
|
+
private pendingDeletes: Map<string, NodeJS.Timeout> = new Map();
|
|
61
|
+
/** Events waiting for debounce */
|
|
62
|
+
private batchedEvents: WatchEvent[] = [];
|
|
63
|
+
/** Debounce timer */
|
|
64
|
+
private debounceTimer: NodeJS.Timeout | null = null;
|
|
65
|
+
|
|
66
|
+
constructor(options: WatcherOptions) {
|
|
67
|
+
this.options = options;
|
|
68
|
+
this.debounceMs = options.debounceMs ?? 10000; // default 10 seconds
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
start(): void {
|
|
72
|
+
if (this.watcher) return;
|
|
73
|
+
|
|
74
|
+
this.watcher = watch(this.options.watchPaths, {
|
|
75
|
+
ignoreInitial: true,
|
|
76
|
+
persistent: true,
|
|
77
|
+
depth: this.options.depth ?? 3,
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
this.watcher.on('unlinkDir', (path) => {
|
|
81
|
+
this.handleDelete(path);
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
this.watcher.on('addDir', (path) => {
|
|
85
|
+
this.handleRecovery(path);
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
stop(): void {
|
|
90
|
+
if (!this.watcher) return;
|
|
91
|
+
|
|
92
|
+
this.watcher.close();
|
|
93
|
+
this.watcher = null;
|
|
94
|
+
|
|
95
|
+
// Cancel all pending timers
|
|
96
|
+
for (const timeout of this.pendingDeletes.values()) {
|
|
97
|
+
clearTimeout(timeout);
|
|
98
|
+
}
|
|
99
|
+
this.pendingDeletes.clear();
|
|
100
|
+
|
|
101
|
+
// Cancel debounce timer
|
|
102
|
+
if (this.debounceTimer) {
|
|
103
|
+
clearTimeout(this.debounceTimer);
|
|
104
|
+
this.debounceTimer = null;
|
|
105
|
+
}
|
|
106
|
+
this.batchedEvents = [];
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
isWatching(): boolean {
|
|
110
|
+
return this.watcher !== null;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Handle folder deletion event
|
|
115
|
+
*
|
|
116
|
+
* Stage 1: Set per-path delay timer
|
|
117
|
+
* - Don't process immediately to allow recovery
|
|
118
|
+
* - Add to batch if still deleted after delay
|
|
119
|
+
*/
|
|
120
|
+
private handleDelete(path: string): void {
|
|
121
|
+
// Ignore if already pending (prevent duplicate events)
|
|
122
|
+
if (this.pendingDeletes.has(path)) return;
|
|
123
|
+
|
|
124
|
+
const timeout = setTimeout(async () => {
|
|
125
|
+
// Verify path is still deleted after delay
|
|
126
|
+
const stillDeleted = !(await this.pathExists(path));
|
|
127
|
+
|
|
128
|
+
if (stillDeleted) {
|
|
129
|
+
// Move to stage 2: add to batch
|
|
130
|
+
this.addToBatch({
|
|
131
|
+
path,
|
|
132
|
+
timestamp: new Date(),
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
this.pendingDeletes.delete(path);
|
|
137
|
+
}, this.options.delayMs);
|
|
138
|
+
|
|
139
|
+
this.pendingDeletes.set(path, timeout);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
/**
|
|
143
|
+
* Add event to batch and reset debounce timer
|
|
144
|
+
*
|
|
145
|
+
* Stage 2: Debounce
|
|
146
|
+
* - Reset timer on each new event
|
|
147
|
+
* - Execute batch when no new events for debounce period
|
|
148
|
+
* - This combines multiple subfolder deletions into single cleanup
|
|
149
|
+
*/
|
|
150
|
+
private addToBatch(event: WatchEvent): void {
|
|
151
|
+
this.batchedEvents.push(event);
|
|
152
|
+
|
|
153
|
+
// Reset debounce timer (extend wait time on new event)
|
|
154
|
+
if (this.debounceTimer) {
|
|
155
|
+
clearTimeout(this.debounceTimer);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
this.debounceTimer = setTimeout(() => {
|
|
159
|
+
this.flushBatch();
|
|
160
|
+
}, this.debounceMs);
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
/**
|
|
164
|
+
* Deliver batched events to callback
|
|
165
|
+
* - Scan/cleanup runs only once
|
|
166
|
+
*/
|
|
167
|
+
private flushBatch(): void {
|
|
168
|
+
if (this.batchedEvents.length === 0) return;
|
|
169
|
+
|
|
170
|
+
const events = [...this.batchedEvents];
|
|
171
|
+
this.batchedEvents = [];
|
|
172
|
+
this.debounceTimer = null;
|
|
173
|
+
|
|
174
|
+
this.options.onDelete(events);
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
private handleRecovery(path: string): void {
|
|
178
|
+
const timeout = this.pendingDeletes.get(path);
|
|
179
|
+
|
|
180
|
+
if (timeout) {
|
|
181
|
+
clearTimeout(timeout);
|
|
182
|
+
this.pendingDeletes.delete(path);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
private async pathExists(path: string): Promise<boolean> {
|
|
187
|
+
try {
|
|
188
|
+
await access(path);
|
|
189
|
+
return true;
|
|
190
|
+
} catch {
|
|
191
|
+
return false;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
import { describe, expect, it, vi, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import { mkdir, writeFile, rm } from 'fs/promises';
|
|
3
|
+
import { join } from 'path';
|
|
4
|
+
import { tmpdir, homedir } from 'os';
|
|
5
|
+
|
|
6
|
+
import { ClaudeCodeScanner } from './claude-code.js';
|
|
7
|
+
|
|
8
|
+
describe('ClaudeCodeScanner', () => {
|
|
9
|
+
const testBaseDir = join(tmpdir(), 'claudetest' + Date.now());
|
|
10
|
+
const mockProjectsDir = join(testBaseDir, '.claude', 'projects');
|
|
11
|
+
|
|
12
|
+
beforeEach(async () => {
|
|
13
|
+
await mkdir(mockProjectsDir, { recursive: true });
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
afterEach(async () => {
|
|
17
|
+
await rm(testBaseDir, { recursive: true, force: true });
|
|
18
|
+
vi.restoreAllMocks();
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
describe('isAvailable', () => {
|
|
22
|
+
it('returns true when projects directory exists', async () => {
|
|
23
|
+
const scanner = new ClaudeCodeScanner(mockProjectsDir);
|
|
24
|
+
expect(await scanner.isAvailable()).toBe(true);
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
it('returns false when directory does not exist', async () => {
|
|
28
|
+
const scanner = new ClaudeCodeScanner('/nonexistent/path/12345');
|
|
29
|
+
expect(await scanner.isAvailable()).toBe(false);
|
|
30
|
+
});
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
describe('scan', () => {
|
|
34
|
+
it('detects orphaned sessions (original project deleted)', async () => {
|
|
35
|
+
// Create encoded session directory: /deleted/project → -deleted-project
|
|
36
|
+
const sessionDir = join(mockProjectsDir, '-deleted-project');
|
|
37
|
+
await mkdir(sessionDir, { recursive: true });
|
|
38
|
+
await writeFile(join(sessionDir, 'session.json'), '{"test": true}');
|
|
39
|
+
|
|
40
|
+
const scanner = new ClaudeCodeScanner(mockProjectsDir);
|
|
41
|
+
const result = await scanner.scan();
|
|
42
|
+
|
|
43
|
+
expect(result.sessions).toHaveLength(1);
|
|
44
|
+
expect(result.sessions[0].projectPath).toBe('/deleted/project');
|
|
45
|
+
expect(result.sessions[0].sessionPath).toBe(sessionDir);
|
|
46
|
+
expect(result.sessions[0].toolName).toBe('claude-code');
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
it('excludes active projects (original exists)', async () => {
|
|
50
|
+
// Create actually existing directory (path without hyphens)
|
|
51
|
+
const realProjectDir = join(testBaseDir, 'existingproject');
|
|
52
|
+
await mkdir(realProjectDir, { recursive: true });
|
|
53
|
+
|
|
54
|
+
// Create session for that path
|
|
55
|
+
const encodedPath = realProjectDir.replace(/\//g, '-');
|
|
56
|
+
const sessionDir = join(mockProjectsDir, encodedPath);
|
|
57
|
+
await mkdir(sessionDir, { recursive: true });
|
|
58
|
+
await writeFile(join(sessionDir, 'session.json'), '{}');
|
|
59
|
+
|
|
60
|
+
const scanner = new ClaudeCodeScanner(mockProjectsDir);
|
|
61
|
+
const result = await scanner.scan();
|
|
62
|
+
|
|
63
|
+
// Sessions of existing projects are not orphaned
|
|
64
|
+
expect(result.sessions).toHaveLength(0);
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
it('calculates session size', async () => {
|
|
68
|
+
const sessionDir = join(mockProjectsDir, '-orphan-project');
|
|
69
|
+
await mkdir(sessionDir, { recursive: true });
|
|
70
|
+
await writeFile(join(sessionDir, 'data1.json'), 'a'.repeat(100));
|
|
71
|
+
await writeFile(join(sessionDir, 'data2.json'), 'b'.repeat(200));
|
|
72
|
+
|
|
73
|
+
const scanner = new ClaudeCodeScanner(mockProjectsDir);
|
|
74
|
+
const result = await scanner.scan();
|
|
75
|
+
|
|
76
|
+
expect(result.sessions[0].size).toBe(300);
|
|
77
|
+
expect(result.totalSize).toBe(300);
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
it('returns last modified time', async () => {
|
|
81
|
+
const sessionDir = join(mockProjectsDir, '-test-project');
|
|
82
|
+
await mkdir(sessionDir, { recursive: true });
|
|
83
|
+
await writeFile(join(sessionDir, 'file.json'), '{}');
|
|
84
|
+
|
|
85
|
+
const scanner = new ClaudeCodeScanner(mockProjectsDir);
|
|
86
|
+
const result = await scanner.scan();
|
|
87
|
+
|
|
88
|
+
expect(result.sessions[0].lastModified).toBeInstanceOf(Date);
|
|
89
|
+
// Should be within 1 minute since recently created
|
|
90
|
+
const timeDiff = Date.now() - result.sessions[0].lastModified.getTime();
|
|
91
|
+
expect(timeDiff).toBeLessThan(60000);
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
it('excludes empty directories from session list', async () => {
|
|
95
|
+
const sessionDir = join(mockProjectsDir, '-empty-project');
|
|
96
|
+
await mkdir(sessionDir, { recursive: true });
|
|
97
|
+
|
|
98
|
+
const scanner = new ClaudeCodeScanner(mockProjectsDir);
|
|
99
|
+
const result = await scanner.scan();
|
|
100
|
+
|
|
101
|
+
expect(result.sessions).toHaveLength(0);
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
it('records scan duration', async () => {
|
|
105
|
+
const scanner = new ClaudeCodeScanner(mockProjectsDir);
|
|
106
|
+
const result = await scanner.scan();
|
|
107
|
+
|
|
108
|
+
expect(result.scanDuration).toBeGreaterThanOrEqual(0);
|
|
109
|
+
expect(result.toolName).toBe('claude-code');
|
|
110
|
+
});
|
|
111
|
+
});
|
|
112
|
+
});
|