@seflless/ghosttown 1.7.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +3 -2
- package/src/cli.js +1 -1
- package/src/session/history-replay.d.ts +118 -0
- package/src/session/history-replay.js +174 -0
- package/src/session/history-replay.js.map +1 -0
- package/src/session/index.d.ts +10 -0
- package/src/session/index.js +11 -0
- package/src/session/index.js.map +1 -0
- package/src/session/output-recorder.d.ts +131 -0
- package/src/session/output-recorder.js +247 -0
- package/src/session/output-recorder.js.map +1 -0
- package/src/session/session-manager.d.ts +147 -0
- package/src/session/session-manager.js +489 -0
- package/src/session/session-manager.js.map +1 -0
- package/src/session/types.d.ts +221 -0
- package/src/session/types.js +8 -0
- package/src/session/types.js.map +1 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@seflless/ghosttown",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "2.0.0",
|
|
4
4
|
"description": "Web-based terminal emulator using Ghostty's VT100 parser via WebAssembly",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/ghostty-web.umd.cjs",
|
|
@@ -68,6 +68,7 @@
|
|
|
68
68
|
"build": "bun run clean && bun run build:wasm && bun run build:lib && bun run build:wasm-copy",
|
|
69
69
|
"build:wasm": "./scripts/build-wasm.sh",
|
|
70
70
|
"build:lib": "vite build",
|
|
71
|
+
"build:cli": "tsc -p tsconfig.cli.json",
|
|
71
72
|
"build:wasm-copy": "cp ghostty-vt.wasm dist/",
|
|
72
73
|
"clean": "rm -rf dist",
|
|
73
74
|
"preview": "vite preview",
|
|
@@ -80,7 +81,7 @@
|
|
|
80
81
|
"test:e2e": "playwright test",
|
|
81
82
|
"test:e2e:headed": "playwright test --headed",
|
|
82
83
|
"test:e2e:ui": "playwright test --ui",
|
|
83
|
-
"prepublishOnly": "bun run build",
|
|
84
|
+
"prepublishOnly": "bun run build && bun run build:cli",
|
|
84
85
|
"cli:publish": "node scripts/cli-publish.js",
|
|
85
86
|
"kill:8080": "kill -9 $(lsof -ti :8080)"
|
|
86
87
|
},
|
package/src/cli.js
CHANGED
|
@@ -53,7 +53,7 @@ import { asciiArt } from '../bin/ascii.js';
|
|
|
53
53
|
// Session name utilities
|
|
54
54
|
import { displayNameExists, findSession, validateSessionName } from './session-utils.js';
|
|
55
55
|
// Session management - custom PTY session manager (replaces tmux)
|
|
56
|
-
import { SessionManager } from './session/session-manager.
|
|
56
|
+
import { SessionManager } from './session/session-manager.js';
|
|
57
57
|
|
|
58
58
|
// Global SessionManager instance (lazy-initialized)
|
|
59
59
|
let sessionManager = null;
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* History Replay
|
|
3
|
+
*
|
|
4
|
+
* Streams scrollback history to clients in chunks.
|
|
5
|
+
* Handles large histories (10,000+ lines) without blocking.
|
|
6
|
+
*
|
|
7
|
+
* Replay is throttled to prevent overwhelming the client with data
|
|
8
|
+
* and to show a progress indicator for long replays.
|
|
9
|
+
*/
|
|
10
|
+
import { EventEmitter } from 'events';
|
|
11
|
+
import type { OutputChunk } from './output-recorder.js';
|
|
12
|
+
/**
|
|
13
|
+
* Configuration for HistoryReplay.
|
|
14
|
+
*/
|
|
15
|
+
export interface HistoryReplayConfig {
|
|
16
|
+
/** Chunks to send per batch (default: 100) */
|
|
17
|
+
chunkSize?: number;
|
|
18
|
+
/** Delay between batches in milliseconds (default: 10) */
|
|
19
|
+
batchDelay?: number;
|
|
20
|
+
/** Maximum replay time in milliseconds (default: 30000 = 30s) */
|
|
21
|
+
maxReplayTime?: number;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Progress information during replay.
|
|
25
|
+
*/
|
|
26
|
+
export interface ReplayProgress {
|
|
27
|
+
/** Number of chunks sent */
|
|
28
|
+
sent: number;
|
|
29
|
+
/** Total number of chunks */
|
|
30
|
+
total: number;
|
|
31
|
+
/** Percentage complete (0-100) */
|
|
32
|
+
percent: number;
|
|
33
|
+
/** Whether replay is complete */
|
|
34
|
+
complete: boolean;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Events emitted by HistoryReplay.
|
|
38
|
+
*/
|
|
39
|
+
export interface HistoryReplayEvents {
|
|
40
|
+
/** Emitted for each batch of chunks */
|
|
41
|
+
data: (data: string) => void;
|
|
42
|
+
/** Emitted with progress updates */
|
|
43
|
+
progress: (progress: ReplayProgress) => void;
|
|
44
|
+
/** Emitted when replay completes */
|
|
45
|
+
complete: () => void;
|
|
46
|
+
/** Emitted on error */
|
|
47
|
+
error: (error: Error) => void;
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Replays scrollback history to a client.
|
|
51
|
+
*
|
|
52
|
+
* @example
|
|
53
|
+
* ```typescript
|
|
54
|
+
* const replay = new HistoryReplay({
|
|
55
|
+
* chunkSize: 100,
|
|
56
|
+
* batchDelay: 10
|
|
57
|
+
* });
|
|
58
|
+
*
|
|
59
|
+
* replay.on('data', (data) => {
|
|
60
|
+
* // Send data to client via WebSocket
|
|
61
|
+
* ws.send(JSON.stringify({ type: 'scrollback', data }));
|
|
62
|
+
* });
|
|
63
|
+
*
|
|
64
|
+
* replay.on('progress', (progress) => {
|
|
65
|
+
* // Update progress indicator
|
|
66
|
+
* console.log(`Replay: ${progress.percent}% complete`);
|
|
67
|
+
* });
|
|
68
|
+
*
|
|
69
|
+
* replay.on('complete', () => {
|
|
70
|
+
* console.log('Replay finished');
|
|
71
|
+
* });
|
|
72
|
+
*
|
|
73
|
+
* // Start replay
|
|
74
|
+
* await replay.start(chunks);
|
|
75
|
+
* ```
|
|
76
|
+
*/
|
|
77
|
+
export declare class HistoryReplay extends EventEmitter {
|
|
78
|
+
private config;
|
|
79
|
+
private aborted;
|
|
80
|
+
private startTime;
|
|
81
|
+
constructor(config?: HistoryReplayConfig);
|
|
82
|
+
/**
|
|
83
|
+
* Start replaying chunks.
|
|
84
|
+
* Emits 'data' events with concatenated output for each batch.
|
|
85
|
+
* Emits 'progress' events periodically.
|
|
86
|
+
* Emits 'complete' when done.
|
|
87
|
+
*
|
|
88
|
+
* @param chunks Array of output chunks to replay
|
|
89
|
+
* @returns Promise that resolves when replay completes or is aborted
|
|
90
|
+
*/
|
|
91
|
+
start(chunks: OutputChunk[]): Promise<void>;
|
|
92
|
+
/**
|
|
93
|
+
* Start replay from an async generator.
|
|
94
|
+
* Useful for streaming from disk without loading all chunks into memory.
|
|
95
|
+
*/
|
|
96
|
+
startFromGenerator(generator: AsyncGenerator<OutputChunk[], void, unknown>, total: number): Promise<void>;
|
|
97
|
+
/**
|
|
98
|
+
* Abort the replay.
|
|
99
|
+
*/
|
|
100
|
+
abort(): void;
|
|
101
|
+
/**
|
|
102
|
+
* Check if replay was aborted.
|
|
103
|
+
*/
|
|
104
|
+
isAborted(): boolean;
|
|
105
|
+
/**
|
|
106
|
+
* Emit progress event.
|
|
107
|
+
*/
|
|
108
|
+
private emitProgress;
|
|
109
|
+
/**
|
|
110
|
+
* Delay for a specified time.
|
|
111
|
+
*/
|
|
112
|
+
private delay;
|
|
113
|
+
}
|
|
114
|
+
/**
|
|
115
|
+
* Create an async generator that reads chunks in batches.
|
|
116
|
+
* Useful for memory-efficient replay of large scrollback files.
|
|
117
|
+
*/
|
|
118
|
+
export declare function createChunkGenerator(chunks: OutputChunk[], batchSize: number): AsyncGenerator<OutputChunk[], void, unknown>;
|
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* History Replay
|
|
3
|
+
*
|
|
4
|
+
* Streams scrollback history to clients in chunks.
|
|
5
|
+
* Handles large histories (10,000+ lines) without blocking.
|
|
6
|
+
*
|
|
7
|
+
* Replay is throttled to prevent overwhelming the client with data
|
|
8
|
+
* and to show a progress indicator for long replays.
|
|
9
|
+
*/
|
|
10
|
+
import { EventEmitter } from 'events';
|
|
11
|
+
const DEFAULTS = {
|
|
12
|
+
chunkSize: 100,
|
|
13
|
+
batchDelay: 10,
|
|
14
|
+
maxReplayTime: 30000,
|
|
15
|
+
};
|
|
16
|
+
/**
|
|
17
|
+
* Replays scrollback history to a client.
|
|
18
|
+
*
|
|
19
|
+
* @example
|
|
20
|
+
* ```typescript
|
|
21
|
+
* const replay = new HistoryReplay({
|
|
22
|
+
* chunkSize: 100,
|
|
23
|
+
* batchDelay: 10
|
|
24
|
+
* });
|
|
25
|
+
*
|
|
26
|
+
* replay.on('data', (data) => {
|
|
27
|
+
* // Send data to client via WebSocket
|
|
28
|
+
* ws.send(JSON.stringify({ type: 'scrollback', data }));
|
|
29
|
+
* });
|
|
30
|
+
*
|
|
31
|
+
* replay.on('progress', (progress) => {
|
|
32
|
+
* // Update progress indicator
|
|
33
|
+
* console.log(`Replay: ${progress.percent}% complete`);
|
|
34
|
+
* });
|
|
35
|
+
*
|
|
36
|
+
* replay.on('complete', () => {
|
|
37
|
+
* console.log('Replay finished');
|
|
38
|
+
* });
|
|
39
|
+
*
|
|
40
|
+
* // Start replay
|
|
41
|
+
* await replay.start(chunks);
|
|
42
|
+
* ```
|
|
43
|
+
*/
|
|
44
|
+
export class HistoryReplay extends EventEmitter {
|
|
45
|
+
constructor(config = {}) {
|
|
46
|
+
super();
|
|
47
|
+
this.aborted = false;
|
|
48
|
+
this.startTime = 0;
|
|
49
|
+
this.config = {
|
|
50
|
+
chunkSize: config.chunkSize ?? DEFAULTS.chunkSize,
|
|
51
|
+
batchDelay: config.batchDelay ?? DEFAULTS.batchDelay,
|
|
52
|
+
maxReplayTime: config.maxReplayTime ?? DEFAULTS.maxReplayTime,
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Start replaying chunks.
|
|
57
|
+
* Emits 'data' events with concatenated output for each batch.
|
|
58
|
+
* Emits 'progress' events periodically.
|
|
59
|
+
* Emits 'complete' when done.
|
|
60
|
+
*
|
|
61
|
+
* @param chunks Array of output chunks to replay
|
|
62
|
+
* @returns Promise that resolves when replay completes or is aborted
|
|
63
|
+
*/
|
|
64
|
+
async start(chunks) {
|
|
65
|
+
this.aborted = false;
|
|
66
|
+
this.startTime = Date.now();
|
|
67
|
+
const total = chunks.length;
|
|
68
|
+
if (total === 0) {
|
|
69
|
+
this.emitProgress(0, 0);
|
|
70
|
+
this.emit('complete');
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
let sent = 0;
|
|
74
|
+
while (sent < total && !this.aborted) {
|
|
75
|
+
// Check timeout
|
|
76
|
+
if (Date.now() - this.startTime > this.config.maxReplayTime) {
|
|
77
|
+
this.emit('error', new Error('Replay timeout exceeded'));
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
// Get next batch
|
|
81
|
+
const batch = chunks.slice(sent, sent + this.config.chunkSize);
|
|
82
|
+
// Concatenate output data
|
|
83
|
+
const data = batch.map((c) => c.d).join('');
|
|
84
|
+
// Emit data
|
|
85
|
+
if (data) {
|
|
86
|
+
this.emit('data', data);
|
|
87
|
+
}
|
|
88
|
+
sent += batch.length;
|
|
89
|
+
// Emit progress
|
|
90
|
+
this.emitProgress(sent, total);
|
|
91
|
+
// Delay between batches (unless this is the last batch)
|
|
92
|
+
if (sent < total && this.config.batchDelay > 0) {
|
|
93
|
+
await this.delay(this.config.batchDelay);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
if (!this.aborted) {
|
|
97
|
+
this.emit('complete');
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* Start replay from an async generator.
|
|
102
|
+
* Useful for streaming from disk without loading all chunks into memory.
|
|
103
|
+
*/
|
|
104
|
+
async startFromGenerator(generator, total) {
|
|
105
|
+
this.aborted = false;
|
|
106
|
+
this.startTime = Date.now();
|
|
107
|
+
let sent = 0;
|
|
108
|
+
for await (const batch of generator) {
|
|
109
|
+
if (this.aborted)
|
|
110
|
+
break;
|
|
111
|
+
// Check timeout
|
|
112
|
+
if (Date.now() - this.startTime > this.config.maxReplayTime) {
|
|
113
|
+
this.emit('error', new Error('Replay timeout exceeded'));
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
// Concatenate output data
|
|
117
|
+
const data = batch.map((c) => c.d).join('');
|
|
118
|
+
// Emit data
|
|
119
|
+
if (data) {
|
|
120
|
+
this.emit('data', data);
|
|
121
|
+
}
|
|
122
|
+
sent += batch.length;
|
|
123
|
+
// Emit progress
|
|
124
|
+
this.emitProgress(sent, total);
|
|
125
|
+
// Delay between batches
|
|
126
|
+
if (this.config.batchDelay > 0) {
|
|
127
|
+
await this.delay(this.config.batchDelay);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
if (!this.aborted) {
|
|
131
|
+
this.emit('complete');
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Abort the replay.
|
|
136
|
+
*/
|
|
137
|
+
abort() {
|
|
138
|
+
this.aborted = true;
|
|
139
|
+
}
|
|
140
|
+
/**
|
|
141
|
+
* Check if replay was aborted.
|
|
142
|
+
*/
|
|
143
|
+
isAborted() {
|
|
144
|
+
return this.aborted;
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Emit progress event.
|
|
148
|
+
*/
|
|
149
|
+
emitProgress(sent, total) {
|
|
150
|
+
const progress = {
|
|
151
|
+
sent,
|
|
152
|
+
total,
|
|
153
|
+
percent: total === 0 ? 100 : Math.round((sent / total) * 100),
|
|
154
|
+
complete: sent >= total,
|
|
155
|
+
};
|
|
156
|
+
this.emit('progress', progress);
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Delay for a specified time.
|
|
160
|
+
*/
|
|
161
|
+
delay(ms) {
|
|
162
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
/**
|
|
166
|
+
* Create an async generator that reads chunks in batches.
|
|
167
|
+
* Useful for memory-efficient replay of large scrollback files.
|
|
168
|
+
*/
|
|
169
|
+
export async function* createChunkGenerator(chunks, batchSize) {
|
|
170
|
+
for (let i = 0; i < chunks.length; i += batchSize) {
|
|
171
|
+
yield chunks.slice(i, i + batchSize);
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
//# sourceMappingURL=history-replay.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"history-replay.js","sourceRoot":"","sources":["history-replay.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAEH,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AA2CtC,MAAM,QAAQ,GAAG;IACf,SAAS,EAAE,GAAG;IACd,UAAU,EAAE,EAAE;IACd,aAAa,EAAE,KAAM;CACtB,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2BG;AACH,MAAM,OAAO,aAAc,SAAQ,YAAY;IAK7C,YAAY,SAA8B,EAAE;QAC1C,KAAK,EAAE,CAAC;QAJF,YAAO,GAAG,KAAK,CAAC;QAChB,cAAS,GAAG,CAAC,CAAC;QAKpB,IAAI,CAAC,MAAM,GAAG;YACZ,SAAS,EAAE,MAAM,CAAC,SAAS,IAAI,QAAQ,CAAC,SAAS;YACjD,UAAU,EAAE,MAAM,CAAC,UAAU,IAAI,QAAQ,CAAC,UAAU;YACpD,aAAa,EAAE,MAAM,CAAC,aAAa,IAAI,QAAQ,CAAC,aAAa;SAC9D,CAAC;IACJ,CAAC;IAED;;;;;;;;OAQG;IACH,KAAK,CAAC,KAAK,CAAC,MAAqB;QAC/B,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC;QACrB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAE5B,MAAM,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC;QAE5B,IAAI,KAAK,KAAK,CAAC,EAAE,CAAC;YAChB,IAAI,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YACxB,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YACtB,OAAO;QACT,CAAC;QAED,IAAI,IAAI,GAAG,CAAC,CAAC;QAEb,OAAO,IAAI,GAAG,KAAK,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC;YACrC,gBAAgB;YAChB,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,CAAC;gBAC5D,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC,CAAC;gBACzD,OAAO;YACT,CAAC;YAED,iBAAiB;YACjB,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;YAE/D,0BAA0B;YAC1B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;YAE5C,YAAY;YACZ,IAAI,IAAI,EAAE,CAAC;gBACT,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;YAC1B,CAAC;YAED,IAAI,IAAI,KAAK,CAAC,MAAM,CAAC;YAErB,gBAAgB;YAChB,IAAI,CAAC,YAAY,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;YAE/B,wDAAwD;YACxD,IAAI,IAAI,GAAG,KAAK,IAAI,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,CAAC,EAAE,CAAC;gBAC/C,MAAM,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;YAC3C,CAAC;QACH,CAAC;QAED,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC;YAClB,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;QACxB,CAAC;IACH,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,kBAAkB,CACtB,SAAuD,EACvD,KAAa;QAEb,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC;QACrB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAE5B,IAAI,IAAI,GAAG,CAAC,CAAC;QAEb,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,SAAS,EAAE,CAAC;YACpC,IAAI,IAAI,CAAC,OAAO;gBAAE,MAAM;YAExB,gBAAgB;YAChB,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,CAAC;gBAC5D,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC,CAAC;gBACzD,OAAO;YACT,CAAC;YAED,0BAA0B;YAC1B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;YAE5C,YAAY;YACZ,IAAI,IAAI,EAAE,CAAC;gBACT,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;YAC1B,CAAC;YAED,IAAI,IAAI,KAAK,CAAC,MAAM,CAAC;YAErB,gBAAgB;YAChB,IAAI,CAAC,YAAY,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;YAE/B,wBAAwB;YACxB,IAAI,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,CAAC,EAAE,CAAC;gBAC/B,MAAM,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;YAC3C,CAAC;QACH,CAAC;QAED,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC;YAClB,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;QACxB,CAAC;IACH,CAAC;IAED;;OAEG;IACH,KAAK;QACH,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;IACtB,CAAC;IAED;;OAEG;IACH,SAAS;QACP,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;OAEG;IACK,YAAY,CAAC,IAAY,EAAE,KAAa;QAC9C,MAAM,QAAQ,GAAmB;YAC/B,IAAI;YACJ,KAAK;YACL,OAAO,EAAE,KAAK,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,GAAG,KAAK,CAAC,GAAG,GAAG,CAAC;YAC7D,QAAQ,EAAE,IAAI,IAAI,KAAK;SACxB,CAAC;QAEF,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;IAClC,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,EAAU;QACtB,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC,CAAC;IAC3D,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,CAAC,KAAK,SAAS,CAAC,CAAC,oBAAoB,CACzC,MAAqB,EACrB,SAAiB;IAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,IAAI,SAAS,EAAE,CAAC;QAClD,MAAM,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,CAAC;IACvC,CAAC;AACH,CAAC"}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Session Management Module
|
|
3
|
+
*
|
|
4
|
+
* Custom PTY session management that replaces tmux.
|
|
5
|
+
* Provides session persistence, multi-client connections, and read-only sharing.
|
|
6
|
+
*/
|
|
7
|
+
export * from './types.js';
|
|
8
|
+
export * from './session-manager.js';
|
|
9
|
+
export * from './output-recorder.js';
|
|
10
|
+
export * from './history-replay.js';
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Session Management Module
|
|
3
|
+
*
|
|
4
|
+
* Custom PTY session management that replaces tmux.
|
|
5
|
+
* Provides session persistence, multi-client connections, and read-only sharing.
|
|
6
|
+
*/
|
|
7
|
+
export * from './types.js';
|
|
8
|
+
export * from './session-manager.js';
|
|
9
|
+
export * from './output-recorder.js';
|
|
10
|
+
export * from './history-replay.js';
|
|
11
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,cAAc,YAAY,CAAC;AAC3B,cAAc,sBAAsB,CAAC;AACrC,cAAc,sBAAsB,CAAC;AACrC,cAAc,qBAAqB,CAAC"}
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Output Recorder
|
|
3
|
+
*
|
|
4
|
+
* Records PTY output to disk for scrollback persistence.
|
|
5
|
+
* Uses JSONL format for append-only, corruption-isolated storage.
|
|
6
|
+
*
|
|
7
|
+
* Each line in the JSONL file is a chunk of PTY output with timestamp:
|
|
8
|
+
* {"t": 1234567890123, "d": "raw output data"}
|
|
9
|
+
*
|
|
10
|
+
* The recorder batches writes to disk for performance (flushes every 100ms
|
|
11
|
+
* or when buffer exceeds 64KB).
|
|
12
|
+
*/
|
|
13
|
+
import { EventEmitter } from 'events';
|
|
14
|
+
/**
|
|
15
|
+
* A single output chunk stored in the scrollback file.
|
|
16
|
+
*/
|
|
17
|
+
export interface OutputChunk {
|
|
18
|
+
/** Unix timestamp in milliseconds */
|
|
19
|
+
t: number;
|
|
20
|
+
/** Raw output data */
|
|
21
|
+
d: string;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Configuration for OutputRecorder.
|
|
25
|
+
*/
|
|
26
|
+
export interface OutputRecorderConfig {
|
|
27
|
+
/** Path to the scrollback file */
|
|
28
|
+
filePath: string;
|
|
29
|
+
/** Maximum number of chunks to keep (default: 50000) */
|
|
30
|
+
maxChunks?: number;
|
|
31
|
+
/** Flush interval in milliseconds (default: 100) */
|
|
32
|
+
flushInterval?: number;
|
|
33
|
+
/** Maximum buffer size in bytes before flush (default: 65536 = 64KB) */
|
|
34
|
+
maxBufferSize?: number;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Events emitted by OutputRecorder.
|
|
38
|
+
*/
|
|
39
|
+
export interface OutputRecorderEvents {
|
|
40
|
+
/** Emitted when data is flushed to disk */
|
|
41
|
+
flush: (chunkCount: number) => void;
|
|
42
|
+
/** Emitted on error */
|
|
43
|
+
error: (error: Error) => void;
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Records PTY output to disk with batched writes.
|
|
47
|
+
*
|
|
48
|
+
* @example
|
|
49
|
+
* ```typescript
|
|
50
|
+
* const recorder = new OutputRecorder({
|
|
51
|
+
* filePath: '/path/to/scrollback.jsonl'
|
|
52
|
+
* });
|
|
53
|
+
*
|
|
54
|
+
* await recorder.init();
|
|
55
|
+
*
|
|
56
|
+
* // Record output as it comes from PTY
|
|
57
|
+
* recorder.record('Hello, world!\r\n');
|
|
58
|
+
* recorder.record('\x1b[32mGreen text\x1b[0m\r\n');
|
|
59
|
+
*
|
|
60
|
+
* // Flush pending writes
|
|
61
|
+
* await recorder.flush();
|
|
62
|
+
*
|
|
63
|
+
* // Read all chunks
|
|
64
|
+
* const chunks = await recorder.readAll();
|
|
65
|
+
*
|
|
66
|
+
* // Clean up
|
|
67
|
+
* await recorder.close();
|
|
68
|
+
* ```
|
|
69
|
+
*/
|
|
70
|
+
export declare class OutputRecorder extends EventEmitter {
|
|
71
|
+
private config;
|
|
72
|
+
private buffer;
|
|
73
|
+
private bufferSize;
|
|
74
|
+
private flushTimer;
|
|
75
|
+
private isWriting;
|
|
76
|
+
private pendingFlush;
|
|
77
|
+
private closed;
|
|
78
|
+
private chunkCount;
|
|
79
|
+
constructor(config: OutputRecorderConfig);
|
|
80
|
+
/**
|
|
81
|
+
* Initialize the recorder.
|
|
82
|
+
* Creates the directory and counts existing chunks.
|
|
83
|
+
*/
|
|
84
|
+
init(): Promise<void>;
|
|
85
|
+
/**
|
|
86
|
+
* Record a chunk of output.
|
|
87
|
+
* The data is buffered and flushed to disk periodically.
|
|
88
|
+
*/
|
|
89
|
+
record(data: string): void;
|
|
90
|
+
/**
|
|
91
|
+
* Flush buffered chunks to disk.
|
|
92
|
+
* Returns immediately if already flushing (coalesces concurrent calls).
|
|
93
|
+
*/
|
|
94
|
+
flush(): Promise<void>;
|
|
95
|
+
/**
|
|
96
|
+
* Internal flush implementation.
|
|
97
|
+
*/
|
|
98
|
+
private doFlush;
|
|
99
|
+
/**
|
|
100
|
+
* Read all chunks from disk.
|
|
101
|
+
* Returns chunks in chronological order.
|
|
102
|
+
*/
|
|
103
|
+
readAll(): Promise<OutputChunk[]>;
|
|
104
|
+
/**
|
|
105
|
+
* Read chunks with pagination.
|
|
106
|
+
* @param offset Number of chunks to skip from the start
|
|
107
|
+
* @param limit Maximum number of chunks to return
|
|
108
|
+
*/
|
|
109
|
+
read(offset: number, limit: number): Promise<OutputChunk[]>;
|
|
110
|
+
/**
|
|
111
|
+
* Get the total number of chunks (on disk + in buffer).
|
|
112
|
+
*/
|
|
113
|
+
getChunkCount(): number;
|
|
114
|
+
/**
|
|
115
|
+
* Clear all recorded data.
|
|
116
|
+
*/
|
|
117
|
+
clear(): Promise<void>;
|
|
118
|
+
/**
|
|
119
|
+
* Close the recorder and flush remaining data.
|
|
120
|
+
*/
|
|
121
|
+
close(): Promise<void>;
|
|
122
|
+
/**
|
|
123
|
+
* Count chunks in the file.
|
|
124
|
+
*/
|
|
125
|
+
private countChunks;
|
|
126
|
+
/**
|
|
127
|
+
* Trim old chunks to stay under the limit.
|
|
128
|
+
* Removes the oldest chunks by rewriting the file.
|
|
129
|
+
*/
|
|
130
|
+
private trimOldChunks;
|
|
131
|
+
}
|