@seflless/ghosttown 1.6.2 → 1.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,322 @@
1
+ /**
2
+ * Output Recorder
3
+ *
4
+ * Records PTY output to disk for scrollback persistence.
5
+ * Uses JSONL format for append-only, corruption-isolated storage.
6
+ *
7
+ * Each line in the JSONL file is a chunk of PTY output with timestamp:
8
+ * {"t": 1234567890123, "d": "raw output data"}
9
+ *
10
+ * The recorder batches writes to disk for performance (flushes every 100ms
11
+ * or when buffer exceeds 64KB).
12
+ */
13
+
14
+ import { EventEmitter } from 'events';
15
+ import { existsSync, mkdirSync } from 'fs';
16
+ import path from 'path';
17
+ import fs from 'fs/promises';
18
+
19
+ /**
20
+ * A single output chunk stored in the scrollback file.
21
+ */
22
+ export interface OutputChunk {
23
+ /** Unix timestamp in milliseconds */
24
+ t: number;
25
+ /** Raw output data */
26
+ d: string;
27
+ }
28
+
29
+ /**
30
+ * Configuration for OutputRecorder.
31
+ */
32
+ export interface OutputRecorderConfig {
33
+ /** Path to the scrollback file */
34
+ filePath: string;
35
+ /** Maximum number of chunks to keep (default: 50000) */
36
+ maxChunks?: number;
37
+ /** Flush interval in milliseconds (default: 100) */
38
+ flushInterval?: number;
39
+ /** Maximum buffer size in bytes before flush (default: 65536 = 64KB) */
40
+ maxBufferSize?: number;
41
+ }
42
+
43
+ /**
44
+ * Events emitted by OutputRecorder.
45
+ */
46
+ export interface OutputRecorderEvents {
47
+ /** Emitted when data is flushed to disk */
48
+ flush: (chunkCount: number) => void;
49
+ /** Emitted on error */
50
+ error: (error: Error) => void;
51
+ }
52
+
53
+ const DEFAULTS = {
54
+ maxChunks: 50_000, // ~50K chunks (each chunk is a PTY output event)
55
+ flushInterval: 100, // 100ms
56
+ maxBufferSize: 65_536, // 64KB
57
+ };
58
+
59
+ /**
60
+ * Records PTY output to disk with batched writes.
61
+ *
62
+ * @example
63
+ * ```typescript
64
+ * const recorder = new OutputRecorder({
65
+ * filePath: '/path/to/scrollback.jsonl'
66
+ * });
67
+ *
68
+ * await recorder.init();
69
+ *
70
+ * // Record output as it comes from PTY
71
+ * recorder.record('Hello, world!\r\n');
72
+ * recorder.record('\x1b[32mGreen text\x1b[0m\r\n');
73
+ *
74
+ * // Flush pending writes
75
+ * await recorder.flush();
76
+ *
77
+ * // Read all chunks
78
+ * const chunks = await recorder.readAll();
79
+ *
80
+ * // Clean up
81
+ * await recorder.close();
82
+ * ```
83
+ */
84
+ export class OutputRecorder extends EventEmitter {
85
+ private config: Required<OutputRecorderConfig>;
86
+ private buffer: OutputChunk[] = [];
87
+ private bufferSize = 0;
88
+ private flushTimer: ReturnType<typeof setInterval> | null = null;
89
+ private isWriting = false;
90
+ private pendingFlush: Promise<void> | null = null;
91
+ private closed = false;
92
+ private chunkCount = 0; // Track total chunks on disk
93
+
94
+ constructor(config: OutputRecorderConfig) {
95
+ super();
96
+
97
+ this.config = {
98
+ filePath: config.filePath,
99
+ maxChunks: config.maxChunks ?? DEFAULTS.maxChunks,
100
+ flushInterval: config.flushInterval ?? DEFAULTS.flushInterval,
101
+ maxBufferSize: config.maxBufferSize ?? DEFAULTS.maxBufferSize,
102
+ };
103
+ }
104
+
105
+ /**
106
+ * Initialize the recorder.
107
+ * Creates the directory and counts existing chunks.
108
+ */
109
+ async init(): Promise<void> {
110
+ // Ensure directory exists
111
+ const dir = path.dirname(this.config.filePath);
112
+ if (!existsSync(dir)) {
113
+ mkdirSync(dir, { recursive: true });
114
+ }
115
+
116
+ // Count existing chunks
117
+ if (existsSync(this.config.filePath)) {
118
+ this.chunkCount = await this.countChunks();
119
+ }
120
+
121
+ // Start flush timer
122
+ this.flushTimer = setInterval(() => {
123
+ if (this.buffer.length > 0 && !this.isWriting) {
124
+ this.flush().catch((err) => this.emit('error', err));
125
+ }
126
+ }, this.config.flushInterval);
127
+ }
128
+
129
+ /**
130
+ * Record a chunk of output.
131
+ * The data is buffered and flushed to disk periodically.
132
+ */
133
+ record(data: string): void {
134
+ if (this.closed) {
135
+ throw new Error('OutputRecorder is closed');
136
+ }
137
+
138
+ const chunk: OutputChunk = {
139
+ t: Date.now(),
140
+ d: data,
141
+ };
142
+
143
+ this.buffer.push(chunk);
144
+ this.bufferSize += data.length;
145
+
146
+ // Flush if buffer exceeds size limit
147
+ if (this.bufferSize >= this.config.maxBufferSize) {
148
+ this.flush().catch((err) => this.emit('error', err));
149
+ }
150
+ }
151
+
152
+ /**
153
+ * Flush buffered chunks to disk.
154
+ * Returns immediately if already flushing (coalesces concurrent calls).
155
+ */
156
+ async flush(): Promise<void> {
157
+ if (this.closed || this.buffer.length === 0) {
158
+ return;
159
+ }
160
+
161
+ // Coalesce concurrent flush calls
162
+ if (this.pendingFlush) {
163
+ return this.pendingFlush;
164
+ }
165
+
166
+ this.pendingFlush = this.doFlush();
167
+
168
+ try {
169
+ await this.pendingFlush;
170
+ } finally {
171
+ this.pendingFlush = null;
172
+ }
173
+ }
174
+
175
+ /**
176
+ * Internal flush implementation.
177
+ */
178
+ private async doFlush(): Promise<void> {
179
+ if (this.buffer.length === 0) {
180
+ return;
181
+ }
182
+
183
+ this.isWriting = true;
184
+
185
+ try {
186
+ // Grab current buffer and reset
187
+ const chunks = this.buffer;
188
+ this.buffer = [];
189
+ this.bufferSize = 0;
190
+
191
+ // Convert to JSONL
192
+ const lines = chunks.map((c) => JSON.stringify(c)).join('\n') + '\n';
193
+
194
+ // Append to file
195
+ await fs.appendFile(this.config.filePath, lines, 'utf-8');
196
+
197
+ this.chunkCount += chunks.length;
198
+ this.emit('flush', chunks.length);
199
+
200
+ // Trim if over limit
201
+ if (this.chunkCount > this.config.maxChunks) {
202
+ await this.trimOldChunks();
203
+ }
204
+ } finally {
205
+ this.isWriting = false;
206
+ }
207
+ }
208
+
209
+ /**
210
+ * Read all chunks from disk.
211
+ * Returns chunks in chronological order.
212
+ */
213
+ async readAll(): Promise<OutputChunk[]> {
214
+ // Flush pending writes first
215
+ await this.flush();
216
+
217
+ if (!existsSync(this.config.filePath)) {
218
+ return [];
219
+ }
220
+
221
+ const content = await fs.readFile(this.config.filePath, 'utf-8');
222
+ const lines = content.trim().split('\n').filter(Boolean);
223
+
224
+ const chunks: OutputChunk[] = [];
225
+ for (const line of lines) {
226
+ try {
227
+ chunks.push(JSON.parse(line));
228
+ } catch {
229
+ // Skip malformed lines
230
+ }
231
+ }
232
+
233
+ return chunks;
234
+ }
235
+
236
+ /**
237
+ * Read chunks with pagination.
238
+ * @param offset Number of chunks to skip from the start
239
+ * @param limit Maximum number of chunks to return
240
+ */
241
+ async read(offset: number, limit: number): Promise<OutputChunk[]> {
242
+ const all = await this.readAll();
243
+ return all.slice(offset, offset + limit);
244
+ }
245
+
246
+ /**
247
+ * Get the total number of chunks (on disk + in buffer).
248
+ */
249
+ getChunkCount(): number {
250
+ return this.chunkCount + this.buffer.length;
251
+ }
252
+
253
+ /**
254
+ * Clear all recorded data.
255
+ */
256
+ async clear(): Promise<void> {
257
+ this.buffer = [];
258
+ this.bufferSize = 0;
259
+ this.chunkCount = 0;
260
+
261
+ if (existsSync(this.config.filePath)) {
262
+ await fs.unlink(this.config.filePath);
263
+ }
264
+ }
265
+
266
+ /**
267
+ * Close the recorder and flush remaining data.
268
+ */
269
+ async close(): Promise<void> {
270
+ if (this.closed) return;
271
+
272
+ this.closed = true;
273
+
274
+ // Stop flush timer
275
+ if (this.flushTimer) {
276
+ clearInterval(this.flushTimer);
277
+ this.flushTimer = null;
278
+ }
279
+
280
+ // Final flush
281
+ await this.flush();
282
+
283
+ this.removeAllListeners();
284
+ }
285
+
286
+ /**
287
+ * Count chunks in the file.
288
+ */
289
+ private async countChunks(): Promise<number> {
290
+ if (!existsSync(this.config.filePath)) {
291
+ return 0;
292
+ }
293
+
294
+ const content = await fs.readFile(this.config.filePath, 'utf-8');
295
+ return content.trim().split('\n').filter(Boolean).length;
296
+ }
297
+
298
+ /**
299
+ * Trim old chunks to stay under the limit.
300
+ * Removes the oldest chunks by rewriting the file.
301
+ */
302
+ private async trimOldChunks(): Promise<void> {
303
+ if (!existsSync(this.config.filePath)) {
304
+ return;
305
+ }
306
+
307
+ const content = await fs.readFile(this.config.filePath, 'utf-8');
308
+ const lines = content.trim().split('\n').filter(Boolean);
309
+
310
+ if (lines.length <= this.config.maxChunks) {
311
+ return;
312
+ }
313
+
314
+ // Keep only the most recent chunks
315
+ const keepCount = Math.floor(this.config.maxChunks * 0.9); // Keep 90% of max
316
+ const trimmedLines = lines.slice(-keepCount);
317
+
318
+ await fs.writeFile(this.config.filePath, trimmedLines.join('\n') + '\n', 'utf-8');
319
+
320
+ this.chunkCount = trimmedLines.length;
321
+ }
322
+ }
@@ -0,0 +1,147 @@
1
+ /**
2
+ * Session Manager
3
+ *
4
+ * Core session management without tmux. Handles:
5
+ * - Direct PTY spawning via node-pty
6
+ * - Session persistence to disk
7
+ * - Session restoration on server restart
8
+ * - Multi-client connections
9
+ */
10
+ import { EventEmitter } from 'events';
11
+ import type { IPty } from '@lydell/node-pty';
12
+ import { HistoryReplay } from './history-replay.js';
13
+ import { type OutputChunk } from './output-recorder.js';
14
+ import type { CreateSessionOptions, Session, SessionId, SessionInfo, SessionManagerConfig, SessionPaths } from './types.js';
15
+ /**
16
+ * Get the default shell for the current platform.
17
+ */
18
+ declare function getDefaultShell(): string;
19
+ /**
20
+ * Get default environment variables for terminal sessions.
21
+ */
22
+ declare function getDefaultEnv(): Record<string, string>;
23
+ /**
24
+ * SessionManager handles all session lifecycle operations.
25
+ *
26
+ * @example
27
+ * ```typescript
28
+ * const manager = new SessionManager();
29
+ * await manager.init();
30
+ *
31
+ * // Create a new session
32
+ * const session = await manager.createSession({ name: 'dev' });
33
+ *
34
+ * // Get the PTY for I/O
35
+ * const pty = manager.getPty(session.id);
36
+ * pty.onData((data) => console.log(data));
37
+ * pty.write('echo hello\n');
38
+ *
39
+ * // List all sessions
40
+ * const sessions = await manager.listSessions();
41
+ *
42
+ * // Delete a session
43
+ * await manager.deleteSession(session.id);
44
+ * ```
45
+ */
46
+ export declare class SessionManager extends EventEmitter {
47
+ private config;
48
+ private paths;
49
+ private sessions;
50
+ private ptyProcesses;
51
+ private outputRecorders;
52
+ private initialized;
53
+ constructor(config?: SessionManagerConfig);
54
+ /**
55
+ * Initialize the session manager.
56
+ * Loads persisted sessions from disk.
57
+ */
58
+ init(): Promise<void>;
59
+ /**
60
+ * Create a new session.
61
+ */
62
+ createSession(options?: CreateSessionOptions): Promise<Session>;
63
+ /**
64
+ * Get a session by ID.
65
+ */
66
+ getSession(sessionId: SessionId): Session | undefined;
67
+ /**
68
+ * Get the PTY process for a session.
69
+ * Returns undefined if the process is not running.
70
+ */
71
+ getPty(sessionId: SessionId): IPty | undefined;
72
+ /**
73
+ * List all sessions with summary info.
74
+ */
75
+ listSessions(): Promise<SessionInfo[]>;
76
+ /**
77
+ * Delete a session and all associated data.
78
+ */
79
+ deleteSession(sessionId: SessionId): Promise<void>;
80
+ /**
81
+ * Rename a session.
82
+ */
83
+ renameSession(sessionId: SessionId, newName: string): Promise<void>;
84
+ /**
85
+ * Resize a session's terminal.
86
+ */
87
+ resizeSession(sessionId: SessionId, cols: number, rows: number): Promise<void>;
88
+ /**
89
+ * Connect to a session, respawning the process if needed.
90
+ * Returns the PTY for I/O.
91
+ */
92
+ connectToSession(sessionId: SessionId): Promise<IPty>;
93
+ /**
94
+ * Write data to a session's PTY.
95
+ */
96
+ write(sessionId: SessionId, data: string): void;
97
+ /**
98
+ * Get the storage paths for session data.
99
+ */
100
+ getPaths(): SessionPaths;
101
+ /**
102
+ * Get the buffered output for a session (for replay on reconnect).
103
+ * Returns the concatenated output as a single string.
104
+ * Note: This loads all scrollback into memory. For large histories,
105
+ * use getScrollbackChunks() instead.
106
+ */
107
+ getScrollback(sessionId: SessionId): Promise<string>;
108
+ /**
109
+ * Get scrollback chunks for a session with pagination.
110
+ * @param sessionId Session ID
111
+ * @param offset Number of chunks to skip from the start
112
+ * @param limit Maximum number of chunks to return
113
+ */
114
+ getScrollbackChunks(sessionId: SessionId, offset?: number, limit?: number): Promise<OutputChunk[]>;
115
+ /**
116
+ * Get the total number of scrollback chunks for a session.
117
+ */
118
+ getScrollbackLength(sessionId: SessionId): number;
119
+ /**
120
+ * Create a history replay for streaming scrollback to a client.
121
+ * The replay emits 'data', 'progress', and 'complete' events.
122
+ */
123
+ createHistoryReplay(sessionId: SessionId): Promise<HistoryReplay | null>;
124
+ /**
125
+ * Spawn a PTY process for a session.
126
+ */
127
+ private spawnProcess;
128
+ /**
129
+ * Persist a session's metadata to disk.
130
+ */
131
+ private persistSession;
132
+ /**
133
+ * Load persisted sessions from disk.
134
+ */
135
+ private loadPersistedSessions;
136
+ /**
137
+ * Generate a display name for a new session.
138
+ * Returns the next available number (1, 2, 3, ...).
139
+ */
140
+ private generateDisplayName;
141
+ /**
142
+ * Clean up all resources.
143
+ */
144
+ destroy(): Promise<void>;
145
+ }
146
+ export declare function getSessionManager(config?: SessionManagerConfig): SessionManager;
147
+ export { getDefaultShell, getDefaultEnv };