@myrialabs/clopen 0.2.11 → 0.2.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/backend/chat/stream-manager.ts +103 -9
- package/backend/database/queries/project-queries.ts +1 -4
- package/backend/database/queries/session-queries.ts +36 -1
- package/backend/database/queries/snapshot-queries.ts +122 -0
- package/backend/database/utils/connection.ts +17 -11
- package/backend/engine/adapters/claude/stream.ts +12 -2
- package/backend/index.ts +13 -2
- package/backend/snapshot/blob-store.ts +52 -72
- package/backend/snapshot/snapshot-service.ts +24 -0
- package/backend/terminal/stream-manager.ts +41 -2
- package/backend/ws/chat/stream.ts +14 -7
- package/backend/ws/engine/claude/accounts.ts +6 -8
- package/backend/ws/projects/crud.ts +72 -7
- package/backend/ws/sessions/crud.ts +119 -2
- package/backend/ws/system/operations.ts +14 -39
- package/frontend/components/auth/SetupPage.svelte +1 -1
- package/frontend/components/chat/input/ChatInput.svelte +14 -1
- package/frontend/components/chat/message/MessageBubble.svelte +13 -0
- package/frontend/components/common/form/FolderBrowser.svelte +17 -4
- package/frontend/components/common/overlay/Dialog.svelte +17 -15
- package/frontend/components/files/FileNode.svelte +0 -15
- package/frontend/components/history/HistoryModal.svelte +94 -19
- package/frontend/components/history/HistoryView.svelte +29 -36
- package/frontend/components/settings/engines/AIEnginesSettings.svelte +1 -1
- package/frontend/components/settings/general/DataManagementSettings.svelte +1 -54
- package/frontend/components/workspace/DesktopNavigator.svelte +57 -10
- package/frontend/components/workspace/MobileNavigator.svelte +57 -10
- package/frontend/components/workspace/WorkspaceLayout.svelte +0 -8
- package/frontend/services/chat/chat.service.ts +86 -13
- package/frontend/services/notification/global-stream-monitor.ts +5 -2
- package/frontend/stores/core/app.svelte.ts +10 -2
- package/frontend/stores/core/sessions.svelte.ts +4 -1
- package/package.json +1 -1
|
@@ -103,7 +103,7 @@ class StreamManager extends EventEmitter {
|
|
|
103
103
|
* This event fires regardless of per-connection subscribers.
|
|
104
104
|
* Used by the WS layer to send cross-project notifications (presence, sound, push).
|
|
105
105
|
*/
|
|
106
|
-
private emitStreamLifecycle(streamState: StreamState, status: 'completed' | 'error' | 'cancelled'): void {
|
|
106
|
+
private emitStreamLifecycle(streamState: StreamState, status: 'completed' | 'error' | 'cancelled', reason?: string): void {
|
|
107
107
|
if (this.lifecycleEmitted.has(streamState.streamId)) return;
|
|
108
108
|
this.lifecycleEmitted.add(streamState.streamId);
|
|
109
109
|
|
|
@@ -112,7 +112,8 @@ class StreamManager extends EventEmitter {
|
|
|
112
112
|
streamId: streamState.streamId,
|
|
113
113
|
projectId: streamState.projectId,
|
|
114
114
|
chatSessionId: streamState.chatSessionId,
|
|
115
|
-
timestamp: (streamState.completedAt || new Date()).toISOString()
|
|
115
|
+
timestamp: (streamState.completedAt || new Date()).toISOString(),
|
|
116
|
+
reason
|
|
116
117
|
});
|
|
117
118
|
|
|
118
119
|
// Clean up guard after 60s (no need to keep forever)
|
|
@@ -707,10 +708,16 @@ class StreamManager extends EventEmitter {
|
|
|
707
708
|
});
|
|
708
709
|
} else if ((event as any).content_block?.type === 'text') {
|
|
709
710
|
// Reset partial text for new text content block
|
|
710
|
-
// Don't emit the initial text — deltas will provide the content
|
|
711
|
-
// This prevents double-counting if content_block_start.text repeats
|
|
712
|
-
// the first content_block_delta.text
|
|
713
711
|
streamState.currentPartialText = '';
|
|
712
|
+
// Emit a start event so frontend has a text stream_event
|
|
713
|
+
// before deltas arrive (matches thinking block behavior)
|
|
714
|
+
this.emitStreamEvent(streamState, 'partial', {
|
|
715
|
+
processId: streamState.processId,
|
|
716
|
+
eventType: 'start',
|
|
717
|
+
partialText: '',
|
|
718
|
+
deltaText: '',
|
|
719
|
+
timestamp: new Date().toISOString()
|
|
720
|
+
});
|
|
714
721
|
}
|
|
715
722
|
} else if (event.type === 'content_block_delta') {
|
|
716
723
|
debug.log('chat', `[SM] content_block_delta: deltaType=${(event as any).delta?.type}, hasThinking=${'thinking' in ((event as any).delta || {})}, hasText=${'text' in ((event as any).delta || {})}`);
|
|
@@ -830,6 +837,9 @@ class StreamManager extends EventEmitter {
|
|
|
830
837
|
savedReasoningParentId = saved?.parent_message_id || null;
|
|
831
838
|
}
|
|
832
839
|
|
|
840
|
+
// Clear reasoning text after save to prevent stale catchup injection
|
|
841
|
+
streamState.currentReasoningText = undefined;
|
|
842
|
+
|
|
833
843
|
this.emitStreamEvent(streamState, 'message', {
|
|
834
844
|
processId: streamState.processId,
|
|
835
845
|
message: reasoningMsg,
|
|
@@ -890,6 +900,16 @@ class StreamManager extends EventEmitter {
|
|
|
890
900
|
savedParentId = saved?.parent_message_id || null;
|
|
891
901
|
}
|
|
892
902
|
|
|
903
|
+
// Clear partial text after saving a complete assistant message to prevent
|
|
904
|
+
// cancelStream from saving a duplicate text-only message to DB.
|
|
905
|
+
// Also prevents catchupActiveStream from injecting a stale stream_event
|
|
906
|
+
// with text that's already part of the saved message.
|
|
907
|
+
if (message.type === 'assistant' && !message.metadata?.reasoning) {
|
|
908
|
+
streamState.currentPartialText = undefined;
|
|
909
|
+
} else if (message.type === 'assistant' && message.metadata?.reasoning) {
|
|
910
|
+
streamState.currentReasoningText = undefined;
|
|
911
|
+
}
|
|
912
|
+
|
|
893
913
|
streamState.messages.push({
|
|
894
914
|
processId: streamState.processId,
|
|
895
915
|
message,
|
|
@@ -1083,7 +1103,7 @@ class StreamManager extends EventEmitter {
|
|
|
1083
1103
|
return engine.resolveUserAnswer(toolUseId, answers);
|
|
1084
1104
|
}
|
|
1085
1105
|
|
|
1086
|
-
async cancelStream(streamId: string): Promise<boolean> {
|
|
1106
|
+
async cancelStream(streamId: string, reason?: string): Promise<boolean> {
|
|
1087
1107
|
const streamState = this.activeStreams.get(streamId);
|
|
1088
1108
|
if (!streamState || streamState.status !== 'active') {
|
|
1089
1109
|
return false;
|
|
@@ -1093,6 +1113,37 @@ class StreamManager extends EventEmitter {
|
|
|
1093
1113
|
streamState.status = 'cancelled';
|
|
1094
1114
|
streamState.completedAt = new Date();
|
|
1095
1115
|
|
|
1116
|
+
// Save partial reasoning text to DB before cancelling (persists across refresh/project switch)
|
|
1117
|
+
if (streamState.currentReasoningText && streamState.chatSessionId) {
|
|
1118
|
+
try {
|
|
1119
|
+
const reasoningMessage = {
|
|
1120
|
+
type: 'assistant' as const,
|
|
1121
|
+
parent_tool_use_id: null,
|
|
1122
|
+
message: {
|
|
1123
|
+
role: 'assistant' as const,
|
|
1124
|
+
content: [{ type: 'text' as const, text: streamState.currentReasoningText }]
|
|
1125
|
+
},
|
|
1126
|
+
session_id: streamState.sdkSessionId || '',
|
|
1127
|
+
metadata: { reasoning: true }
|
|
1128
|
+
};
|
|
1129
|
+
|
|
1130
|
+
const timestamp = new Date().toISOString();
|
|
1131
|
+
const currentHead = sessionQueries.getHead(streamState.chatSessionId);
|
|
1132
|
+
|
|
1133
|
+
const savedMessage = messageQueries.create({
|
|
1134
|
+
session_id: streamState.chatSessionId,
|
|
1135
|
+
sdk_message: reasoningMessage as any,
|
|
1136
|
+
timestamp,
|
|
1137
|
+
parent_message_id: currentHead || undefined
|
|
1138
|
+
});
|
|
1139
|
+
|
|
1140
|
+
sessionQueries.updateHead(streamState.chatSessionId, savedMessage.id);
|
|
1141
|
+
debug.log('chat', 'Saved partial reasoning on cancel:', savedMessage.id);
|
|
1142
|
+
} catch (error) {
|
|
1143
|
+
debug.error('chat', 'Failed to save partial reasoning on cancel:', error);
|
|
1144
|
+
}
|
|
1145
|
+
}
|
|
1146
|
+
|
|
1096
1147
|
// Save partial text to DB before cancelling (persists across refresh/project switch)
|
|
1097
1148
|
if (streamState.currentPartialText && streamState.chatSessionId) {
|
|
1098
1149
|
try {
|
|
@@ -1172,7 +1223,7 @@ class StreamManager extends EventEmitter {
|
|
|
1172
1223
|
timestamp: streamState.completedAt.toISOString()
|
|
1173
1224
|
});
|
|
1174
1225
|
|
|
1175
|
-
this.emitStreamLifecycle(streamState, 'cancelled');
|
|
1226
|
+
this.emitStreamLifecycle(streamState, 'cancelled', reason);
|
|
1176
1227
|
|
|
1177
1228
|
// Auto-release all MCP-controlled tabs for this chat session
|
|
1178
1229
|
if (streamState.chatSessionId) {
|
|
@@ -1190,8 +1241,16 @@ class StreamManager extends EventEmitter {
|
|
|
1190
1241
|
const streamState = this.activeStreams.get(streamId);
|
|
1191
1242
|
if (streamState) {
|
|
1192
1243
|
const sessionKey = this.getSessionKey(streamState.projectId, streamState.chatSessionId);
|
|
1193
|
-
|
|
1194
|
-
|
|
1244
|
+
// Only delete session key if it still points to THIS stream.
|
|
1245
|
+
// A newer stream for the same session may have overridden the key;
|
|
1246
|
+
// blindly deleting it would orphan the active stream — making it
|
|
1247
|
+
// unfindable by getSessionStream() and breaking cancel/reconnect.
|
|
1248
|
+
if (this.sessionStreams.get(sessionKey) === streamId) {
|
|
1249
|
+
this.sessionStreams.delete(sessionKey);
|
|
1250
|
+
}
|
|
1251
|
+
if (this.sessionStreams.get(streamState.chatSessionId) === streamId) {
|
|
1252
|
+
this.sessionStreams.delete(streamState.chatSessionId);
|
|
1253
|
+
}
|
|
1195
1254
|
this.activeStreams.delete(streamId);
|
|
1196
1255
|
|
|
1197
1256
|
// Cleanup project context service
|
|
@@ -1376,6 +1435,41 @@ class StreamManager extends EventEmitter {
|
|
|
1376
1435
|
});
|
|
1377
1436
|
}
|
|
1378
1437
|
|
|
1438
|
+
/**
|
|
1439
|
+
* Cancel and clean up all streams for a specific chat session.
|
|
1440
|
+
* Used when a session is deleted to remove green/amber status indicators.
|
|
1441
|
+
*/
|
|
1442
|
+
async cleanupSessionStreams(chatSessionId: string): Promise<void> {
|
|
1443
|
+
const streamsToCancel: string[] = [];
|
|
1444
|
+
const streamsToClean: string[] = [];
|
|
1445
|
+
|
|
1446
|
+
this.activeStreams.forEach((stream, streamId) => {
|
|
1447
|
+
if (stream.chatSessionId === chatSessionId) {
|
|
1448
|
+
if (stream.status === 'active') {
|
|
1449
|
+
streamsToCancel.push(streamId);
|
|
1450
|
+
} else {
|
|
1451
|
+
streamsToClean.push(streamId);
|
|
1452
|
+
}
|
|
1453
|
+
}
|
|
1454
|
+
});
|
|
1455
|
+
|
|
1456
|
+
// Cancel active streams and await their processStream promise so the
|
|
1457
|
+
// finally block (snapshot capture) completes before the caller deletes
|
|
1458
|
+
// the session — preventing FOREIGN KEY constraint failures.
|
|
1459
|
+
for (const streamId of streamsToCancel) {
|
|
1460
|
+
await this.cancelStream(streamId, 'session-deleted');
|
|
1461
|
+
const stream = this.activeStreams.get(streamId);
|
|
1462
|
+
if (stream?.streamPromise) {
|
|
1463
|
+
await stream.streamPromise.catch(() => {});
|
|
1464
|
+
}
|
|
1465
|
+
}
|
|
1466
|
+
|
|
1467
|
+
// Clean up non-active streams
|
|
1468
|
+
for (const streamId of streamsToClean) {
|
|
1469
|
+
this.cleanupStream(streamId);
|
|
1470
|
+
}
|
|
1471
|
+
}
|
|
1472
|
+
|
|
1379
1473
|
/**
|
|
1380
1474
|
* Clean up all completed streams
|
|
1381
1475
|
*/
|
|
@@ -57,11 +57,8 @@ export const projectQueries = {
|
|
|
57
57
|
`).run(now, id);
|
|
58
58
|
},
|
|
59
59
|
|
|
60
|
-
|
|
60
|
+
deleteProject(id: string): void {
|
|
61
61
|
const db = getDatabase();
|
|
62
|
-
// Delete related data first
|
|
63
|
-
db.prepare('DELETE FROM messages WHERE session_id IN (SELECT id FROM chat_sessions WHERE project_id = ?)').run(id);
|
|
64
|
-
db.prepare('DELETE FROM chat_sessions WHERE project_id = ?').run(id);
|
|
65
62
|
db.prepare('DELETE FROM user_projects WHERE project_id = ?').run(id);
|
|
66
63
|
db.prepare('DELETE FROM projects WHERE id = ?').run(id);
|
|
67
64
|
},
|
|
@@ -125,11 +125,46 @@ export const sessionQueries = {
|
|
|
125
125
|
|
|
126
126
|
delete(id: string): void {
|
|
127
127
|
const db = getDatabase();
|
|
128
|
-
// Delete related
|
|
128
|
+
// Delete all related data
|
|
129
|
+
db.prepare('DELETE FROM branches WHERE session_id = ?').run(id);
|
|
130
|
+
db.prepare('DELETE FROM message_snapshots WHERE session_id = ?').run(id);
|
|
131
|
+
db.prepare('DELETE FROM session_relationships WHERE parent_session_id = ? OR child_session_id = ?').run(id, id);
|
|
129
132
|
db.prepare('DELETE FROM messages WHERE session_id = ?').run(id);
|
|
133
|
+
db.prepare('DELETE FROM user_unread_sessions WHERE session_id = ?').run(id);
|
|
134
|
+
// Clear current_session_id references in user_projects
|
|
135
|
+
db.prepare('UPDATE user_projects SET current_session_id = NULL WHERE current_session_id = ?').run(id);
|
|
130
136
|
db.prepare('DELETE FROM chat_sessions WHERE id = ?').run(id);
|
|
131
137
|
},
|
|
132
138
|
|
|
139
|
+
/**
|
|
140
|
+
* Delete all sessions for a project and their related data.
|
|
141
|
+
* Returns the list of deleted session IDs.
|
|
142
|
+
*/
|
|
143
|
+
deleteAllByProjectId(projectId: string): string[] {
|
|
144
|
+
const db = getDatabase();
|
|
145
|
+
const sessions = db.prepare('SELECT id FROM chat_sessions WHERE project_id = ?')
|
|
146
|
+
.all(projectId) as { id: string }[];
|
|
147
|
+
const sessionIds = sessions.map(s => s.id);
|
|
148
|
+
|
|
149
|
+
if (sessionIds.length === 0) return [];
|
|
150
|
+
|
|
151
|
+
// Delete all related data for the project's sessions
|
|
152
|
+
db.prepare('DELETE FROM branches WHERE session_id IN (SELECT id FROM chat_sessions WHERE project_id = ?)').run(projectId);
|
|
153
|
+
db.prepare('DELETE FROM message_snapshots WHERE project_id = ?').run(projectId);
|
|
154
|
+
db.prepare(`
|
|
155
|
+
DELETE FROM session_relationships
|
|
156
|
+
WHERE parent_session_id IN (SELECT id FROM chat_sessions WHERE project_id = ?)
|
|
157
|
+
OR child_session_id IN (SELECT id FROM chat_sessions WHERE project_id = ?)
|
|
158
|
+
`).run(projectId, projectId);
|
|
159
|
+
db.prepare('DELETE FROM messages WHERE session_id IN (SELECT id FROM chat_sessions WHERE project_id = ?)').run(projectId);
|
|
160
|
+
db.prepare('DELETE FROM user_unread_sessions WHERE project_id = ?').run(projectId);
|
|
161
|
+
// Clear current_session_id references in user_projects for this project
|
|
162
|
+
db.prepare('UPDATE user_projects SET current_session_id = NULL WHERE project_id = ?').run(projectId);
|
|
163
|
+
db.prepare('DELETE FROM chat_sessions WHERE project_id = ?').run(projectId);
|
|
164
|
+
|
|
165
|
+
return sessionIds;
|
|
166
|
+
},
|
|
167
|
+
|
|
133
168
|
/**
|
|
134
169
|
* Get the active shared session for a project
|
|
135
170
|
* Returns the most recent session that hasn't ended
|
|
@@ -325,5 +325,127 @@ export const snapshotQueries = {
|
|
|
325
325
|
`).all(projectId) as SessionRelationship[];
|
|
326
326
|
|
|
327
327
|
return relationships;
|
|
328
|
+
},
|
|
329
|
+
|
|
330
|
+
/**
|
|
331
|
+
* Get ALL snapshots for a session (including soft-deleted).
|
|
332
|
+
* Used for cleanup — getBySessionId filters is_deleted which misses hashes.
|
|
333
|
+
*/
|
|
334
|
+
getAllBySessionId(sessionId: string): MessageSnapshot[] {
|
|
335
|
+
const db = getDatabase();
|
|
336
|
+
return db.prepare(`
|
|
337
|
+
SELECT * FROM message_snapshots WHERE session_id = ?
|
|
338
|
+
ORDER BY created_at ASC
|
|
339
|
+
`).all(sessionId) as MessageSnapshot[];
|
|
340
|
+
},
|
|
341
|
+
|
|
342
|
+
/**
|
|
343
|
+
* Get ALL snapshots for a project (including soft-deleted).
|
|
344
|
+
* Used for cleanup.
|
|
345
|
+
*/
|
|
346
|
+
getAllByProjectId(projectId: string): MessageSnapshot[] {
|
|
347
|
+
const db = getDatabase();
|
|
348
|
+
return db.prepare(`
|
|
349
|
+
SELECT * FROM message_snapshots WHERE project_id = ?
|
|
350
|
+
ORDER BY created_at ASC
|
|
351
|
+
`).all(projectId) as MessageSnapshot[];
|
|
352
|
+
},
|
|
353
|
+
|
|
354
|
+
/**
|
|
355
|
+
* Delete all snapshots for a session.
|
|
356
|
+
* Returns the deleted snapshots so callers can clean up blob store.
|
|
357
|
+
*/
|
|
358
|
+
deleteBySessionId(sessionId: string): MessageSnapshot[] {
|
|
359
|
+
const db = getDatabase();
|
|
360
|
+
const snapshots = db.prepare(`
|
|
361
|
+
SELECT * FROM message_snapshots WHERE session_id = ?
|
|
362
|
+
`).all(sessionId) as MessageSnapshot[];
|
|
363
|
+
|
|
364
|
+
if (snapshots.length > 0) {
|
|
365
|
+
db.prepare('DELETE FROM message_snapshots WHERE session_id = ?').run(sessionId);
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
return snapshots;
|
|
369
|
+
},
|
|
370
|
+
|
|
371
|
+
/**
|
|
372
|
+
* Delete all snapshots for a project.
|
|
373
|
+
* Returns the deleted snapshots so callers can clean up blob store.
|
|
374
|
+
*/
|
|
375
|
+
deleteByProjectId(projectId: string): MessageSnapshot[] {
|
|
376
|
+
const db = getDatabase();
|
|
377
|
+
const snapshots = db.prepare(`
|
|
378
|
+
SELECT * FROM message_snapshots WHERE project_id = ?
|
|
379
|
+
`).all(projectId) as MessageSnapshot[];
|
|
380
|
+
|
|
381
|
+
if (snapshots.length > 0) {
|
|
382
|
+
db.prepare('DELETE FROM message_snapshots WHERE project_id = ?').run(projectId);
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
return snapshots;
|
|
386
|
+
},
|
|
387
|
+
|
|
388
|
+
/**
|
|
389
|
+
* Delete session relationships by session ID (as parent or child).
|
|
390
|
+
*/
|
|
391
|
+
deleteRelationshipsBySessionId(sessionId: string): void {
|
|
392
|
+
const db = getDatabase();
|
|
393
|
+
db.prepare('DELETE FROM session_relationships WHERE parent_session_id = ? OR child_session_id = ?')
|
|
394
|
+
.run(sessionId, sessionId);
|
|
395
|
+
},
|
|
396
|
+
|
|
397
|
+
/**
|
|
398
|
+
* Delete all session relationships for a project.
|
|
399
|
+
*/
|
|
400
|
+
deleteRelationshipsByProjectId(projectId: string): void {
|
|
401
|
+
const db = getDatabase();
|
|
402
|
+
db.prepare(`
|
|
403
|
+
DELETE FROM session_relationships
|
|
404
|
+
WHERE parent_session_id IN (SELECT id FROM chat_sessions WHERE project_id = ?)
|
|
405
|
+
OR child_session_id IN (SELECT id FROM chat_sessions WHERE project_id = ?)
|
|
406
|
+
`).run(projectId, projectId);
|
|
407
|
+
},
|
|
408
|
+
|
|
409
|
+
/**
|
|
410
|
+
* Collect all blob hashes referenced by the given snapshots.
|
|
411
|
+
* Extracts oldHash and newHash from session_changes.
|
|
412
|
+
*/
|
|
413
|
+
collectBlobHashes(snapshots: MessageSnapshot[]): Set<string> {
|
|
414
|
+
const hashes = new Set<string>();
|
|
415
|
+
for (const snap of snapshots) {
|
|
416
|
+
if (!snap.session_changes) continue;
|
|
417
|
+
try {
|
|
418
|
+
const changes = JSON.parse(snap.session_changes as string) as Record<string, { oldHash: string; newHash: string }>;
|
|
419
|
+
for (const change of Object.values(changes)) {
|
|
420
|
+
if (change.oldHash) hashes.add(change.oldHash);
|
|
421
|
+
if (change.newHash) hashes.add(change.newHash);
|
|
422
|
+
}
|
|
423
|
+
} catch { /* skip malformed */ }
|
|
424
|
+
}
|
|
425
|
+
return hashes;
|
|
426
|
+
},
|
|
427
|
+
|
|
428
|
+
/**
|
|
429
|
+
* Get all blob hashes still referenced by remaining snapshots in the database.
|
|
430
|
+
* Used to determine which blobs are safe to delete (orphan detection).
|
|
431
|
+
*/
|
|
432
|
+
getAllReferencedBlobHashes(): Set<string> {
|
|
433
|
+
const db = getDatabase();
|
|
434
|
+
const rows = db.prepare(`
|
|
435
|
+
SELECT session_changes FROM message_snapshots
|
|
436
|
+
WHERE session_changes IS NOT NULL
|
|
437
|
+
`).all() as { session_changes: string }[];
|
|
438
|
+
|
|
439
|
+
const hashes = new Set<string>();
|
|
440
|
+
for (const row of rows) {
|
|
441
|
+
try {
|
|
442
|
+
const changes = JSON.parse(row.session_changes) as Record<string, { oldHash: string; newHash: string }>;
|
|
443
|
+
for (const change of Object.values(changes)) {
|
|
444
|
+
if (change.oldHash) hashes.add(change.oldHash);
|
|
445
|
+
if (change.newHash) hashes.add(change.newHash);
|
|
446
|
+
}
|
|
447
|
+
} catch { /* skip malformed */ }
|
|
448
|
+
}
|
|
449
|
+
return hashes;
|
|
328
450
|
}
|
|
329
451
|
};
|
|
@@ -117,23 +117,29 @@ export class DatabaseManager {
|
|
|
117
117
|
|
|
118
118
|
async resetDatabase(): Promise<void> {
|
|
119
119
|
debug.log('database', '⚠️ Resetting database (dropping all tables)...');
|
|
120
|
-
|
|
120
|
+
|
|
121
121
|
if (!this.db) {
|
|
122
122
|
throw new Error('Database not connected');
|
|
123
123
|
}
|
|
124
124
|
|
|
125
|
-
//
|
|
126
|
-
|
|
127
|
-
SELECT name FROM sqlite_master
|
|
128
|
-
WHERE type='table' AND name NOT LIKE 'sqlite_%'
|
|
129
|
-
`).all() as { name: string }[];
|
|
125
|
+
// Disable foreign key checks to allow dropping in any order
|
|
126
|
+
this.db.exec('PRAGMA foreign_keys = OFF');
|
|
130
127
|
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
128
|
+
try {
|
|
129
|
+
const tables = this.db.prepare(`
|
|
130
|
+
SELECT name FROM sqlite_master
|
|
131
|
+
WHERE type='table' AND name NOT LIKE 'sqlite_%'
|
|
132
|
+
`).all() as { name: string }[];
|
|
133
|
+
|
|
134
|
+
for (const table of tables) {
|
|
135
|
+
debug.log('database', `🗑️ Dropping table: ${table.name}`);
|
|
136
|
+
this.db.exec(`DROP TABLE IF EXISTS ${table.name}`);
|
|
137
|
+
}
|
|
135
138
|
|
|
136
|
-
|
|
139
|
+
debug.log('database', '✅ Database reset completed');
|
|
140
|
+
} finally {
|
|
141
|
+
this.db.exec('PRAGMA foreign_keys = ON');
|
|
142
|
+
}
|
|
137
143
|
}
|
|
138
144
|
|
|
139
145
|
async vacuum(): Promise<void> {
|
|
@@ -22,6 +22,7 @@ import { debug } from '$shared/utils/logger';
|
|
|
22
22
|
/** Pending AskUserQuestion resolver — stored while SDK is blocked waiting for user input */
|
|
23
23
|
interface PendingUserAnswer {
|
|
24
24
|
resolve: (result: PermissionResult) => void;
|
|
25
|
+
removeAbortListener: () => void;
|
|
25
26
|
input: Record<string, unknown>;
|
|
26
27
|
}
|
|
27
28
|
|
|
@@ -130,6 +131,9 @@ export class ClaudeCodeEngine implements AIEngine {
|
|
|
130
131
|
options.signal.removeEventListener('abort', onAbort);
|
|
131
132
|
resolve(result);
|
|
132
133
|
},
|
|
134
|
+
removeAbortListener: () => {
|
|
135
|
+
options.signal.removeEventListener('abort', onAbort);
|
|
136
|
+
},
|
|
133
137
|
input
|
|
134
138
|
});
|
|
135
139
|
});
|
|
@@ -180,9 +184,15 @@ export class ClaudeCodeEngine implements AIEngine {
|
|
|
180
184
|
* Cancel active query
|
|
181
185
|
*/
|
|
182
186
|
async cancel(): Promise<void> {
|
|
183
|
-
//
|
|
187
|
+
// Remove abort listeners from pending AskUserQuestion promises WITHOUT
|
|
188
|
+
// resolving them. Resolving causes the SDK to call handleControlRequest →
|
|
189
|
+
// write() to send the permission result to the subprocess. If close() has
|
|
190
|
+
// already killed the subprocess, this write throws "Operation aborted" as
|
|
191
|
+
// an unhandled error, crashing the server. By removing listeners and not
|
|
192
|
+
// resolving, the promises are safely abandoned when close() terminates the
|
|
193
|
+
// process and the async generator completes.
|
|
184
194
|
for (const [, pending] of this.pendingUserAnswers) {
|
|
185
|
-
pending.
|
|
195
|
+
pending.removeAbortListener();
|
|
186
196
|
}
|
|
187
197
|
this.pendingUserAnswers.clear();
|
|
188
198
|
|
package/backend/index.ts
CHANGED
|
@@ -35,6 +35,7 @@ import { handleMcpRequest, closeMcpServer } from './mcp/remote-server';
|
|
|
35
35
|
|
|
36
36
|
// Auth middleware
|
|
37
37
|
import { checkRouteAccess } from './auth/permissions';
|
|
38
|
+
import { authRateLimiter } from './auth';
|
|
38
39
|
import { ws as wsServer } from './utils/ws';
|
|
39
40
|
|
|
40
41
|
// Register auth gate on WebSocket router — blocks unauthenticated/unauthorized access
|
|
@@ -165,22 +166,32 @@ async function gracefulShutdown() {
|
|
|
165
166
|
if (isShuttingDown) return;
|
|
166
167
|
isShuttingDown = true;
|
|
167
168
|
|
|
169
|
+
// Force exit after 5 seconds — prevents port from being held by slow cleanup
|
|
170
|
+
// during bun --watch restarts, which causes ECONNREFUSED on the Vite WS proxy.
|
|
171
|
+
const forceExitTimer = setTimeout(() => {
|
|
172
|
+
debug.warn('server', '⚠️ Shutdown timeout — forcing exit to release port');
|
|
173
|
+
process.exit(1);
|
|
174
|
+
}, 5_000);
|
|
175
|
+
|
|
168
176
|
console.log('\n🛑 Shutting down server...');
|
|
169
177
|
try {
|
|
178
|
+
// Stop accepting new connections first — release the port ASAP
|
|
179
|
+
app.stop();
|
|
180
|
+
// Dispose rate limiter timer
|
|
181
|
+
authRateLimiter.dispose();
|
|
170
182
|
// Close MCP remote server (before engines, as they may still reference it)
|
|
171
183
|
await closeMcpServer();
|
|
172
184
|
// Cleanup browser preview sessions
|
|
173
185
|
await browserPreviewServiceManager.cleanup();
|
|
174
186
|
// Dispose all AI engines
|
|
175
187
|
await disposeAllEngines();
|
|
176
|
-
// Stop accepting new connections
|
|
177
|
-
app.stop();
|
|
178
188
|
// Close database connection
|
|
179
189
|
closeDatabase();
|
|
180
190
|
debug.log('server', '✅ Graceful shutdown completed');
|
|
181
191
|
} catch (error) {
|
|
182
192
|
debug.error('server', '❌ Error during shutdown:', error);
|
|
183
193
|
}
|
|
194
|
+
clearTimeout(forceExitTimer);
|
|
184
195
|
process.exit(0);
|
|
185
196
|
}
|
|
186
197
|
|
|
@@ -4,7 +4,6 @@
|
|
|
4
4
|
*
|
|
5
5
|
* Structure:
|
|
6
6
|
* ~/.clopen/snapshots/blobs/{hash[0:2]}/{hash}.gz - compressed file blobs
|
|
7
|
-
* ~/.clopen/snapshots/trees/{snapshotId}.json - tree maps (filepath -> hash)
|
|
8
7
|
*
|
|
9
8
|
* Deduplication: Same file content across any snapshot is stored only once.
|
|
10
9
|
* Compression: All blobs are gzip compressed to minimize disk usage.
|
|
@@ -18,7 +17,6 @@ import { getClopenDir } from '../utils/index.js';
|
|
|
18
17
|
|
|
19
18
|
const SNAPSHOTS_DIR = join(getClopenDir(), 'snapshots');
|
|
20
19
|
const BLOBS_DIR = join(SNAPSHOTS_DIR, 'blobs');
|
|
21
|
-
const TREES_DIR = join(SNAPSHOTS_DIR, 'trees');
|
|
22
20
|
|
|
23
21
|
export interface TreeMap {
|
|
24
22
|
[filepath: string]: string; // filepath -> blob hash
|
|
@@ -45,7 +43,6 @@ class BlobStore {
|
|
|
45
43
|
async init(): Promise<void> {
|
|
46
44
|
if (this.initialized) return;
|
|
47
45
|
await fs.mkdir(BLOBS_DIR, { recursive: true });
|
|
48
|
-
await fs.mkdir(TREES_DIR, { recursive: true });
|
|
49
46
|
this.initialized = true;
|
|
50
47
|
}
|
|
51
48
|
|
|
@@ -112,69 +109,6 @@ class BlobStore {
|
|
|
112
109
|
return gunzipSync(compressed);
|
|
113
110
|
}
|
|
114
111
|
|
|
115
|
-
/**
|
|
116
|
-
* Store a tree (snapshot state) as a JSON file.
|
|
117
|
-
* Returns the tree hash for reference.
|
|
118
|
-
*/
|
|
119
|
-
async storeTree(snapshotId: string, tree: TreeMap): Promise<string> {
|
|
120
|
-
await this.init();
|
|
121
|
-
const treePath = join(TREES_DIR, `${snapshotId}.json`);
|
|
122
|
-
const content = JSON.stringify(tree);
|
|
123
|
-
const treeHash = this.hashContent(Buffer.from(content, 'utf-8'));
|
|
124
|
-
await fs.writeFile(treePath, content, 'utf-8');
|
|
125
|
-
return treeHash;
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
/**
|
|
129
|
-
* Read a tree by snapshot ID
|
|
130
|
-
*/
|
|
131
|
-
async readTree(snapshotId: string): Promise<TreeMap> {
|
|
132
|
-
const treePath = join(TREES_DIR, `${snapshotId}.json`);
|
|
133
|
-
const content = await fs.readFile(treePath, 'utf-8');
|
|
134
|
-
return JSON.parse(content) as TreeMap;
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
/**
|
|
138
|
-
* Check if a tree exists
|
|
139
|
-
*/
|
|
140
|
-
async hasTree(snapshotId: string): Promise<boolean> {
|
|
141
|
-
try {
|
|
142
|
-
await fs.access(join(TREES_DIR, `${snapshotId}.json`));
|
|
143
|
-
return true;
|
|
144
|
-
} catch {
|
|
145
|
-
return false;
|
|
146
|
-
}
|
|
147
|
-
}
|
|
148
|
-
|
|
149
|
-
/**
|
|
150
|
-
* Resolve a tree to full file contents (as Buffers).
|
|
151
|
-
* Reads all blobs in parallel for performance.
|
|
152
|
-
* Returns { filepath: Buffer } map for binary-safe handling.
|
|
153
|
-
*/
|
|
154
|
-
async resolveTree(tree: TreeMap): Promise<Record<string, Buffer>> {
|
|
155
|
-
const result: Record<string, Buffer> = {};
|
|
156
|
-
|
|
157
|
-
const entries = Object.entries(tree);
|
|
158
|
-
const blobPromises = entries.map(async ([filepath, hash]) => {
|
|
159
|
-
try {
|
|
160
|
-
const content = await this.readBlob(hash);
|
|
161
|
-
return { filepath, content };
|
|
162
|
-
} catch (err) {
|
|
163
|
-
debug.warn('snapshot', `Could not read blob ${hash} for ${filepath}:`, err);
|
|
164
|
-
return null;
|
|
165
|
-
}
|
|
166
|
-
});
|
|
167
|
-
|
|
168
|
-
const results = await Promise.all(blobPromises);
|
|
169
|
-
for (const r of results) {
|
|
170
|
-
if (r) {
|
|
171
|
-
result[r.filepath] = r.content;
|
|
172
|
-
}
|
|
173
|
-
}
|
|
174
|
-
|
|
175
|
-
return result;
|
|
176
|
-
}
|
|
177
|
-
|
|
178
112
|
/**
|
|
179
113
|
* Hash a file using mtime cache. Returns { hash, content? }.
|
|
180
114
|
* If the file hasn't changed (same mtime+size), returns cached hash without reading content.
|
|
@@ -191,10 +125,15 @@ class BlobStore {
|
|
|
191
125
|
// Check mtime cache
|
|
192
126
|
const cached = this.fileHashCache.get(filepath);
|
|
193
127
|
if (cached && cached.mtimeMs === stat.mtimeMs && cached.size === stat.size) {
|
|
194
|
-
|
|
128
|
+
// Verify blob still exists on disk (could have been cleaned up)
|
|
129
|
+
if (await this.hasBlob(cached.hash)) {
|
|
130
|
+
return { hash: cached.hash, content: null, cached: true };
|
|
131
|
+
}
|
|
132
|
+
// Blob was deleted — invalidate cache, fall through to re-read and re-store
|
|
133
|
+
this.fileHashCache.delete(filepath);
|
|
195
134
|
}
|
|
196
135
|
|
|
197
|
-
// File changed - read as Buffer (binary-safe, no encoding conversion)
|
|
136
|
+
// File changed or cache miss - read as Buffer (binary-safe, no encoding conversion)
|
|
198
137
|
const content = await fs.readFile(fullPath);
|
|
199
138
|
const hash = this.hashContent(content);
|
|
200
139
|
|
|
@@ -212,14 +151,55 @@ class BlobStore {
|
|
|
212
151
|
}
|
|
213
152
|
|
|
214
153
|
/**
|
|
215
|
-
* Delete
|
|
154
|
+
* Delete multiple blobs by hash.
|
|
155
|
+
* Also invalidates fileHashCache entries whose hash matches a deleted blob.
|
|
216
156
|
*/
|
|
217
|
-
async
|
|
157
|
+
async deleteBlobs(hashes: string[]): Promise<number> {
|
|
158
|
+
const hashSet = new Set(hashes);
|
|
159
|
+
let deleted = 0;
|
|
160
|
+
for (const hash of hashes) {
|
|
161
|
+
try {
|
|
162
|
+
await fs.unlink(this.getBlobPath(hash));
|
|
163
|
+
deleted++;
|
|
164
|
+
} catch {
|
|
165
|
+
// Ignore - might not exist
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
// Invalidate fileHashCache entries pointing to deleted blobs
|
|
170
|
+
for (const [filepath, entry] of this.fileHashCache) {
|
|
171
|
+
if (hashSet.has(entry.hash)) {
|
|
172
|
+
this.fileHashCache.delete(filepath);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
return deleted;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Scan all blob files on disk and return their hashes.
|
|
181
|
+
* Used for full garbage collection — compare with DB references to find orphans.
|
|
182
|
+
*/
|
|
183
|
+
async scanAllBlobHashes(): Promise<Set<string>> {
|
|
184
|
+
const hashes = new Set<string>();
|
|
218
185
|
try {
|
|
219
|
-
await fs.
|
|
186
|
+
const prefixDirs = await fs.readdir(BLOBS_DIR);
|
|
187
|
+
for (const prefix of prefixDirs) {
|
|
188
|
+
const prefixPath = join(BLOBS_DIR, prefix);
|
|
189
|
+
const stat = await fs.stat(prefixPath);
|
|
190
|
+
if (!stat.isDirectory()) continue;
|
|
191
|
+
|
|
192
|
+
const files = await fs.readdir(prefixPath);
|
|
193
|
+
for (const file of files) {
|
|
194
|
+
if (file.endsWith('.gz')) {
|
|
195
|
+
hashes.add(file.slice(0, -3)); // Remove .gz suffix
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
220
199
|
} catch {
|
|
221
|
-
//
|
|
200
|
+
// Directory might not exist yet
|
|
222
201
|
}
|
|
202
|
+
return hashes;
|
|
223
203
|
}
|
|
224
204
|
}
|
|
225
205
|
|