@seflless/ghosttown 1.6.2 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,253 @@
1
+ /**
2
+ * History Replay
3
+ *
4
+ * Streams scrollback history to clients in chunks.
5
+ * Handles large histories (10,000+ lines) without blocking.
6
+ *
7
+ * Replay is throttled to prevent overwhelming the client with data
8
+ * and to show a progress indicator for long replays.
9
+ */
10
+
11
+ import { EventEmitter } from 'events';
12
+ import type { OutputChunk } from './output-recorder.js';
13
+
14
+ /**
15
+ * Configuration for HistoryReplay.
16
+ */
17
+ export interface HistoryReplayConfig {
18
+ /** Chunks to send per batch (default: 100) */
19
+ chunkSize?: number;
20
+ /** Delay between batches in milliseconds (default: 10) */
21
+ batchDelay?: number;
22
+ /** Maximum replay time in milliseconds (default: 30000 = 30s) */
23
+ maxReplayTime?: number;
24
+ }
25
+
26
+ /**
27
+ * Progress information during replay.
28
+ */
29
+ export interface ReplayProgress {
30
+ /** Number of chunks sent */
31
+ sent: number;
32
+ /** Total number of chunks */
33
+ total: number;
34
+ /** Percentage complete (0-100) */
35
+ percent: number;
36
+ /** Whether replay is complete */
37
+ complete: boolean;
38
+ }
39
+
40
+ /**
41
+ * Events emitted by HistoryReplay.
42
+ */
43
+ export interface HistoryReplayEvents {
44
+ /** Emitted for each batch of chunks */
45
+ data: (data: string) => void;
46
+ /** Emitted with progress updates */
47
+ progress: (progress: ReplayProgress) => void;
48
+ /** Emitted when replay completes */
49
+ complete: () => void;
50
+ /** Emitted on error */
51
+ error: (error: Error) => void;
52
+ }
53
+
54
+ const DEFAULTS = {
55
+ chunkSize: 100,
56
+ batchDelay: 10,
57
+ maxReplayTime: 30_000,
58
+ };
59
+
60
+ /**
61
+ * Replays scrollback history to a client.
62
+ *
63
+ * @example
64
+ * ```typescript
65
+ * const replay = new HistoryReplay({
66
+ * chunkSize: 100,
67
+ * batchDelay: 10
68
+ * });
69
+ *
70
+ * replay.on('data', (data) => {
71
+ * // Send data to client via WebSocket
72
+ * ws.send(JSON.stringify({ type: 'scrollback', data }));
73
+ * });
74
+ *
75
+ * replay.on('progress', (progress) => {
76
+ * // Update progress indicator
77
+ * console.log(`Replay: ${progress.percent}% complete`);
78
+ * });
79
+ *
80
+ * replay.on('complete', () => {
81
+ * console.log('Replay finished');
82
+ * });
83
+ *
84
+ * // Start replay
85
+ * await replay.start(chunks);
86
+ * ```
87
+ */
88
+ export class HistoryReplay extends EventEmitter {
89
+ private config: Required<HistoryReplayConfig>;
90
+ private aborted = false;
91
+ private startTime = 0;
92
+
93
+ constructor(config: HistoryReplayConfig = {}) {
94
+ super();
95
+
96
+ this.config = {
97
+ chunkSize: config.chunkSize ?? DEFAULTS.chunkSize,
98
+ batchDelay: config.batchDelay ?? DEFAULTS.batchDelay,
99
+ maxReplayTime: config.maxReplayTime ?? DEFAULTS.maxReplayTime,
100
+ };
101
+ }
102
+
103
+ /**
104
+ * Start replaying chunks.
105
+ * Emits 'data' events with concatenated output for each batch.
106
+ * Emits 'progress' events periodically.
107
+ * Emits 'complete' when done.
108
+ *
109
+ * @param chunks Array of output chunks to replay
110
+ * @returns Promise that resolves when replay completes or is aborted
111
+ */
112
+ async start(chunks: OutputChunk[]): Promise<void> {
113
+ this.aborted = false;
114
+ this.startTime = Date.now();
115
+
116
+ const total = chunks.length;
117
+
118
+ if (total === 0) {
119
+ this.emitProgress(0, 0);
120
+ this.emit('complete');
121
+ return;
122
+ }
123
+
124
+ let sent = 0;
125
+
126
+ while (sent < total && !this.aborted) {
127
+ // Check timeout
128
+ if (Date.now() - this.startTime > this.config.maxReplayTime) {
129
+ this.emit('error', new Error('Replay timeout exceeded'));
130
+ return;
131
+ }
132
+
133
+ // Get next batch
134
+ const batch = chunks.slice(sent, sent + this.config.chunkSize);
135
+
136
+ // Concatenate output data
137
+ const data = batch.map((c) => c.d).join('');
138
+
139
+ // Emit data
140
+ if (data) {
141
+ this.emit('data', data);
142
+ }
143
+
144
+ sent += batch.length;
145
+
146
+ // Emit progress
147
+ this.emitProgress(sent, total);
148
+
149
+ // Delay between batches (unless this is the last batch)
150
+ if (sent < total && this.config.batchDelay > 0) {
151
+ await this.delay(this.config.batchDelay);
152
+ }
153
+ }
154
+
155
+ if (!this.aborted) {
156
+ this.emit('complete');
157
+ }
158
+ }
159
+
160
+ /**
161
+ * Start replay from an async generator.
162
+ * Useful for streaming from disk without loading all chunks into memory.
163
+ */
164
+ async startFromGenerator(
165
+ generator: AsyncGenerator<OutputChunk[], void, unknown>,
166
+ total: number
167
+ ): Promise<void> {
168
+ this.aborted = false;
169
+ this.startTime = Date.now();
170
+
171
+ let sent = 0;
172
+
173
+ for await (const batch of generator) {
174
+ if (this.aborted) break;
175
+
176
+ // Check timeout
177
+ if (Date.now() - this.startTime > this.config.maxReplayTime) {
178
+ this.emit('error', new Error('Replay timeout exceeded'));
179
+ return;
180
+ }
181
+
182
+ // Concatenate output data
183
+ const data = batch.map((c) => c.d).join('');
184
+
185
+ // Emit data
186
+ if (data) {
187
+ this.emit('data', data);
188
+ }
189
+
190
+ sent += batch.length;
191
+
192
+ // Emit progress
193
+ this.emitProgress(sent, total);
194
+
195
+ // Delay between batches
196
+ if (this.config.batchDelay > 0) {
197
+ await this.delay(this.config.batchDelay);
198
+ }
199
+ }
200
+
201
+ if (!this.aborted) {
202
+ this.emit('complete');
203
+ }
204
+ }
205
+
206
+ /**
207
+ * Abort the replay.
208
+ */
209
+ abort(): void {
210
+ this.aborted = true;
211
+ }
212
+
213
+ /**
214
+ * Check if replay was aborted.
215
+ */
216
+ isAborted(): boolean {
217
+ return this.aborted;
218
+ }
219
+
220
+ /**
221
+ * Emit progress event.
222
+ */
223
+ private emitProgress(sent: number, total: number): void {
224
+ const progress: ReplayProgress = {
225
+ sent,
226
+ total,
227
+ percent: total === 0 ? 100 : Math.round((sent / total) * 100),
228
+ complete: sent >= total,
229
+ };
230
+
231
+ this.emit('progress', progress);
232
+ }
233
+
234
+ /**
235
+ * Delay for a specified time.
236
+ */
237
+ private delay(ms: number): Promise<void> {
238
+ return new Promise((resolve) => setTimeout(resolve, ms));
239
+ }
240
+ }
241
+
242
+ /**
243
+ * Create an async generator that reads chunks in batches.
244
+ * Useful for memory-efficient replay of large scrollback files.
245
+ */
246
+ export async function* createChunkGenerator(
247
+ chunks: OutputChunk[],
248
+ batchSize: number
249
+ ): AsyncGenerator<OutputChunk[], void, unknown> {
250
+ for (let i = 0; i < chunks.length; i += batchSize) {
251
+ yield chunks.slice(i, i + batchSize);
252
+ }
253
+ }
@@ -0,0 +1,10 @@
1
+ /**
2
+ * Session Management Module
3
+ *
4
+ * Custom PTY session management that replaces tmux.
5
+ * Provides session persistence, multi-client connections, and read-only sharing.
6
+ */
7
+ export * from './types.js';
8
+ export * from './session-manager.js';
9
+ export * from './output-recorder.js';
10
+ export * from './history-replay.js';
@@ -0,0 +1,11 @@
1
+ /**
2
+ * Session Management Module
3
+ *
4
+ * Custom PTY session management that replaces tmux.
5
+ * Provides session persistence, multi-client connections, and read-only sharing.
6
+ */
7
+ export * from './types.js';
8
+ export * from './session-manager.js';
9
+ export * from './output-recorder.js';
10
+ export * from './history-replay.js';
11
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,cAAc,YAAY,CAAC;AAC3B,cAAc,sBAAsB,CAAC;AACrC,cAAc,sBAAsB,CAAC;AACrC,cAAc,qBAAqB,CAAC"}
@@ -0,0 +1,11 @@
1
+ /**
2
+ * Session Management Module
3
+ *
4
+ * Custom PTY session management that replaces tmux.
5
+ * Provides session persistence, multi-client connections, and read-only sharing.
6
+ */
7
+
8
+ export * from './types.js';
9
+ export * from './session-manager.js';
10
+ export * from './output-recorder.js';
11
+ export * from './history-replay.js';
@@ -0,0 +1,131 @@
1
+ /**
2
+ * Output Recorder
3
+ *
4
+ * Records PTY output to disk for scrollback persistence.
5
+ * Uses JSONL format for append-only, corruption-isolated storage.
6
+ *
7
+ * Each line in the JSONL file is a chunk of PTY output with timestamp:
8
+ * {"t": 1234567890123, "d": "raw output data"}
9
+ *
10
+ * The recorder batches writes to disk for performance (flushes every 100ms
11
+ * or when buffer exceeds 64KB).
12
+ */
13
+ import { EventEmitter } from 'events';
14
+ /**
15
+ * A single output chunk stored in the scrollback file.
16
+ */
17
+ export interface OutputChunk {
18
+ /** Unix timestamp in milliseconds */
19
+ t: number;
20
+ /** Raw output data */
21
+ d: string;
22
+ }
23
+ /**
24
+ * Configuration for OutputRecorder.
25
+ */
26
+ export interface OutputRecorderConfig {
27
+ /** Path to the scrollback file */
28
+ filePath: string;
29
+ /** Maximum number of chunks to keep (default: 50000) */
30
+ maxChunks?: number;
31
+ /** Flush interval in milliseconds (default: 100) */
32
+ flushInterval?: number;
33
+ /** Maximum buffer size in bytes before flush (default: 65536 = 64KB) */
34
+ maxBufferSize?: number;
35
+ }
36
+ /**
37
+ * Events emitted by OutputRecorder.
38
+ */
39
+ export interface OutputRecorderEvents {
40
+ /** Emitted when data is flushed to disk */
41
+ flush: (chunkCount: number) => void;
42
+ /** Emitted on error */
43
+ error: (error: Error) => void;
44
+ }
45
+ /**
46
+ * Records PTY output to disk with batched writes.
47
+ *
48
+ * @example
49
+ * ```typescript
50
+ * const recorder = new OutputRecorder({
51
+ * filePath: '/path/to/scrollback.jsonl'
52
+ * });
53
+ *
54
+ * await recorder.init();
55
+ *
56
+ * // Record output as it comes from PTY
57
+ * recorder.record('Hello, world!\r\n');
58
+ * recorder.record('\x1b[32mGreen text\x1b[0m\r\n');
59
+ *
60
+ * // Flush pending writes
61
+ * await recorder.flush();
62
+ *
63
+ * // Read all chunks
64
+ * const chunks = await recorder.readAll();
65
+ *
66
+ * // Clean up
67
+ * await recorder.close();
68
+ * ```
69
+ */
70
+ export declare class OutputRecorder extends EventEmitter {
71
+ private config;
72
+ private buffer;
73
+ private bufferSize;
74
+ private flushTimer;
75
+ private isWriting;
76
+ private pendingFlush;
77
+ private closed;
78
+ private chunkCount;
79
+ constructor(config: OutputRecorderConfig);
80
+ /**
81
+ * Initialize the recorder.
82
+ * Creates the directory and counts existing chunks.
83
+ */
84
+ init(): Promise<void>;
85
+ /**
86
+ * Record a chunk of output.
87
+ * The data is buffered and flushed to disk periodically.
88
+ */
89
+ record(data: string): void;
90
+ /**
91
+ * Flush buffered chunks to disk.
92
+ * Returns immediately if already flushing (coalesces concurrent calls).
93
+ */
94
+ flush(): Promise<void>;
95
+ /**
96
+ * Internal flush implementation.
97
+ */
98
+ private doFlush;
99
+ /**
100
+ * Read all chunks from disk.
101
+ * Returns chunks in chronological order.
102
+ */
103
+ readAll(): Promise<OutputChunk[]>;
104
+ /**
105
+ * Read chunks with pagination.
106
+ * @param offset Number of chunks to skip from the start
107
+ * @param limit Maximum number of chunks to return
108
+ */
109
+ read(offset: number, limit: number): Promise<OutputChunk[]>;
110
+ /**
111
+ * Get the total number of chunks (on disk + in buffer).
112
+ */
113
+ getChunkCount(): number;
114
+ /**
115
+ * Clear all recorded data.
116
+ */
117
+ clear(): Promise<void>;
118
+ /**
119
+ * Close the recorder and flush remaining data.
120
+ */
121
+ close(): Promise<void>;
122
+ /**
123
+ * Count chunks in the file.
124
+ */
125
+ private countChunks;
126
+ /**
127
+ * Trim old chunks to stay under the limit.
128
+ * Removes the oldest chunks by rewriting the file.
129
+ */
130
+ private trimOldChunks;
131
+ }
@@ -0,0 +1,247 @@
1
+ /**
2
+ * Output Recorder
3
+ *
4
+ * Records PTY output to disk for scrollback persistence.
5
+ * Uses JSONL format for append-only, corruption-isolated storage.
6
+ *
7
+ * Each line in the JSONL file is a chunk of PTY output with timestamp:
8
+ * {"t": 1234567890123, "d": "raw output data"}
9
+ *
10
+ * The recorder batches writes to disk for performance (flushes every 100ms
11
+ * or when buffer exceeds 64KB).
12
+ */
13
+ import { EventEmitter } from 'events';
14
+ import { existsSync, mkdirSync } from 'fs';
15
+ import path from 'path';
16
+ import fs from 'fs/promises';
17
+ const DEFAULTS = {
18
+ maxChunks: 50000, // ~50K chunks (each chunk is a PTY output event)
19
+ flushInterval: 100, // 100ms
20
+ maxBufferSize: 65536, // 64KB
21
+ };
22
+ /**
23
+ * Records PTY output to disk with batched writes.
24
+ *
25
+ * @example
26
+ * ```typescript
27
+ * const recorder = new OutputRecorder({
28
+ * filePath: '/path/to/scrollback.jsonl'
29
+ * });
30
+ *
31
+ * await recorder.init();
32
+ *
33
+ * // Record output as it comes from PTY
34
+ * recorder.record('Hello, world!\r\n');
35
+ * recorder.record('\x1b[32mGreen text\x1b[0m\r\n');
36
+ *
37
+ * // Flush pending writes
38
+ * await recorder.flush();
39
+ *
40
+ * // Read all chunks
41
+ * const chunks = await recorder.readAll();
42
+ *
43
+ * // Clean up
44
+ * await recorder.close();
45
+ * ```
46
+ */
47
+ export class OutputRecorder extends EventEmitter {
48
+ constructor(config) {
49
+ super();
50
+ this.buffer = [];
51
+ this.bufferSize = 0;
52
+ this.flushTimer = null;
53
+ this.isWriting = false;
54
+ this.pendingFlush = null;
55
+ this.closed = false;
56
+ this.chunkCount = 0; // Track total chunks on disk
57
+ this.config = {
58
+ filePath: config.filePath,
59
+ maxChunks: config.maxChunks ?? DEFAULTS.maxChunks,
60
+ flushInterval: config.flushInterval ?? DEFAULTS.flushInterval,
61
+ maxBufferSize: config.maxBufferSize ?? DEFAULTS.maxBufferSize,
62
+ };
63
+ }
64
+ /**
65
+ * Initialize the recorder.
66
+ * Creates the directory and counts existing chunks.
67
+ */
68
+ async init() {
69
+ // Ensure directory exists
70
+ const dir = path.dirname(this.config.filePath);
71
+ if (!existsSync(dir)) {
72
+ mkdirSync(dir, { recursive: true });
73
+ }
74
+ // Count existing chunks
75
+ if (existsSync(this.config.filePath)) {
76
+ this.chunkCount = await this.countChunks();
77
+ }
78
+ // Start flush timer
79
+ this.flushTimer = setInterval(() => {
80
+ if (this.buffer.length > 0 && !this.isWriting) {
81
+ this.flush().catch((err) => this.emit('error', err));
82
+ }
83
+ }, this.config.flushInterval);
84
+ }
85
+ /**
86
+ * Record a chunk of output.
87
+ * The data is buffered and flushed to disk periodically.
88
+ */
89
+ record(data) {
90
+ if (this.closed) {
91
+ throw new Error('OutputRecorder is closed');
92
+ }
93
+ const chunk = {
94
+ t: Date.now(),
95
+ d: data,
96
+ };
97
+ this.buffer.push(chunk);
98
+ this.bufferSize += data.length;
99
+ // Flush if buffer exceeds size limit
100
+ if (this.bufferSize >= this.config.maxBufferSize) {
101
+ this.flush().catch((err) => this.emit('error', err));
102
+ }
103
+ }
104
+ /**
105
+ * Flush buffered chunks to disk.
106
+ * Returns immediately if already flushing (coalesces concurrent calls).
107
+ */
108
+ async flush() {
109
+ if (this.closed || this.buffer.length === 0) {
110
+ return;
111
+ }
112
+ // Coalesce concurrent flush calls
113
+ if (this.pendingFlush) {
114
+ return this.pendingFlush;
115
+ }
116
+ this.pendingFlush = this.doFlush();
117
+ try {
118
+ await this.pendingFlush;
119
+ }
120
+ finally {
121
+ this.pendingFlush = null;
122
+ }
123
+ }
124
+ /**
125
+ * Internal flush implementation.
126
+ */
127
+ async doFlush() {
128
+ if (this.buffer.length === 0) {
129
+ return;
130
+ }
131
+ this.isWriting = true;
132
+ try {
133
+ // Grab current buffer and reset
134
+ const chunks = this.buffer;
135
+ this.buffer = [];
136
+ this.bufferSize = 0;
137
+ // Convert to JSONL
138
+ const lines = chunks.map((c) => JSON.stringify(c)).join('\n') + '\n';
139
+ // Append to file
140
+ await fs.appendFile(this.config.filePath, lines, 'utf-8');
141
+ this.chunkCount += chunks.length;
142
+ this.emit('flush', chunks.length);
143
+ // Trim if over limit
144
+ if (this.chunkCount > this.config.maxChunks) {
145
+ await this.trimOldChunks();
146
+ }
147
+ }
148
+ finally {
149
+ this.isWriting = false;
150
+ }
151
+ }
152
+ /**
153
+ * Read all chunks from disk.
154
+ * Returns chunks in chronological order.
155
+ */
156
+ async readAll() {
157
+ // Flush pending writes first
158
+ await this.flush();
159
+ if (!existsSync(this.config.filePath)) {
160
+ return [];
161
+ }
162
+ const content = await fs.readFile(this.config.filePath, 'utf-8');
163
+ const lines = content.trim().split('\n').filter(Boolean);
164
+ const chunks = [];
165
+ for (const line of lines) {
166
+ try {
167
+ chunks.push(JSON.parse(line));
168
+ }
169
+ catch {
170
+ // Skip malformed lines
171
+ }
172
+ }
173
+ return chunks;
174
+ }
175
+ /**
176
+ * Read chunks with pagination.
177
+ * @param offset Number of chunks to skip from the start
178
+ * @param limit Maximum number of chunks to return
179
+ */
180
+ async read(offset, limit) {
181
+ const all = await this.readAll();
182
+ return all.slice(offset, offset + limit);
183
+ }
184
+ /**
185
+ * Get the total number of chunks (on disk + in buffer).
186
+ */
187
+ getChunkCount() {
188
+ return this.chunkCount + this.buffer.length;
189
+ }
190
+ /**
191
+ * Clear all recorded data.
192
+ */
193
+ async clear() {
194
+ this.buffer = [];
195
+ this.bufferSize = 0;
196
+ this.chunkCount = 0;
197
+ if (existsSync(this.config.filePath)) {
198
+ await fs.unlink(this.config.filePath);
199
+ }
200
+ }
201
+ /**
202
+ * Close the recorder and flush remaining data.
203
+ */
204
+ async close() {
205
+ if (this.closed)
206
+ return;
207
+ this.closed = true;
208
+ // Stop flush timer
209
+ if (this.flushTimer) {
210
+ clearInterval(this.flushTimer);
211
+ this.flushTimer = null;
212
+ }
213
+ // Final flush
214
+ await this.flush();
215
+ this.removeAllListeners();
216
+ }
217
+ /**
218
+ * Count chunks in the file.
219
+ */
220
+ async countChunks() {
221
+ if (!existsSync(this.config.filePath)) {
222
+ return 0;
223
+ }
224
+ const content = await fs.readFile(this.config.filePath, 'utf-8');
225
+ return content.trim().split('\n').filter(Boolean).length;
226
+ }
227
+ /**
228
+ * Trim old chunks to stay under the limit.
229
+ * Removes the oldest chunks by rewriting the file.
230
+ */
231
+ async trimOldChunks() {
232
+ if (!existsSync(this.config.filePath)) {
233
+ return;
234
+ }
235
+ const content = await fs.readFile(this.config.filePath, 'utf-8');
236
+ const lines = content.trim().split('\n').filter(Boolean);
237
+ if (lines.length <= this.config.maxChunks) {
238
+ return;
239
+ }
240
+ // Keep only the most recent chunks
241
+ const keepCount = Math.floor(this.config.maxChunks * 0.9); // Keep 90% of max
242
+ const trimmedLines = lines.slice(-keepCount);
243
+ await fs.writeFile(this.config.filePath, trimmedLines.join('\n') + '\n', 'utf-8');
244
+ this.chunkCount = trimmedLines.length;
245
+ }
246
+ }
247
+ //# sourceMappingURL=output-recorder.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"output-recorder.js","sourceRoot":"","sources":["output-recorder.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAEH,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AACtC,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,IAAI,CAAC;AAC3C,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAE,MAAM,aAAa,CAAC;AAoC7B,MAAM,QAAQ,GAAG;IACf,SAAS,EAAE,KAAM,EAAE,iDAAiD;IACpE,aAAa,EAAE,GAAG,EAAE,QAAQ;IAC5B,aAAa,EAAE,KAAM,EAAE,OAAO;CAC/B,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACH,MAAM,OAAO,cAAe,SAAQ,YAAY;IAU9C,YAAY,MAA4B;QACtC,KAAK,EAAE,CAAC;QATF,WAAM,GAAkB,EAAE,CAAC;QAC3B,eAAU,GAAG,CAAC,CAAC;QACf,eAAU,GAA0C,IAAI,CAAC;QACzD,cAAS,GAAG,KAAK,CAAC;QAClB,iBAAY,GAAyB,IAAI,CAAC;QAC1C,WAAM,GAAG,KAAK,CAAC;QACf,eAAU,GAAG,CAAC,CAAC,CAAC,6BAA6B;QAKnD,IAAI,CAAC,MAAM,GAAG;YACZ,QAAQ,EAAE,MAAM,CAAC,QAAQ;YACzB,SAAS,EAAE,MAAM,CAAC,SAAS,IAAI,QAAQ,CAAC,SAAS;YACjD,aAAa,EAAE,MAAM,CAAC,aAAa,IAAI,QAAQ,CAAC,aAAa;YAC7D,aAAa,EAAE,MAAM,CAAC,aAAa,IAAI,QAAQ,CAAC,aAAa;SAC9D,CAAC;IACJ,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,IAAI;QACR,0BAA0B;QAC1B,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;QAC/C,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;YACrB,SAAS,CAAC,GAAG,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QACtC,CAAC;QAED,wBAAwB;QACxB,IAAI,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC;YACrC,IAAI,CAAC,UAAU,GAAG,MAAM,IAAI,CAAC,WAAW,EAAE,CAAC;QAC7C,CAAC;QAED,oBAAoB;QACpB,IAAI,CAAC,UAAU,GAAG,WAAW,CAAC,GAAG,EAAE;YACjC,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC;gBAC9C,IAAI,CAAC,KAAK,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC,CAAC;YACvD,CAAC;QACH,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC;IAChC,CAAC;IAED;;;OAGG;IACH,MAAM,CAAC,IAAY;QACjB,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YAChB,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;QAC9C,CAAC;QAED,MAAM,KAAK,GAAgB;YACzB,CAAC,EAAE,IAAI,CAAC,GAAG,EAAE;YACb,CAAC,EAAE,IAAI;SACR,CAAC;QAEF,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACxB,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,MAAM,CAAC;QAE/B,qCAAqC;QACrC,IAAI,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,CAAC;YACjD,IAAI,CAAC,KAAK,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC,CAAC;QACvD,CAAC;IACH,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,KAAK;QACT,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC5C,OAAO;QACT,CAAC;QAED,kCAAkC;QAClC,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;YACtB,OAAO,IAAI,CAAC,YAAY,CAAC;QAC3B,CAAC;QAED,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;QAEnC,IAAI,CAAC;YACH,MAAM,IAAI,CAAC,YAAY,CAAC;QAC1B,CAAC;gBAAS,CAAC;YACT,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;QAC3B,CAAC;IACH,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,OAAO;QACnB,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC7B,OAAO;QACT,CAAC;QAED,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC;QAEtB,IAAI,CAAC;YACH,gCAAgC;YAChC,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;YAC3B,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC;YACjB,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;YAEpB,mBAAmB;YACnB,MAAM,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC;YAErE,iBAAiB;YACjB,MAAM,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;YAE1D,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,MAAM,CAAC;YACjC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC;YAElC,qBAAqB;YACrB,IAAI,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC;gBAC5C,MAAM,IAAI,CAAC,aAAa,EAAE,CAAC;YAC7B,CAAC;QACH,CAAC;gBAAS,CAAC;YACT,IAAI,CAAC,SAAS,GAAG,KAAK,CAAC;QACzB,CAAC;IACH,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,OAAO;QACX,6BAA6B;QAC7B,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;QAEnB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC;YACtC,OAAO,EAAE,CAAC;QACZ,CAAC;QAED,MAAM,OAAO,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QACjE,MAAM,KAAK,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QAEzD,MAAM,MAAM,GAAkB,EAAE,CAAC;QACjC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YACzB,IAAI,CAAC;gBACH,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;YAChC,CAAC;YAAC,MAAM,CAAC;gBACP,uBAAuB;YACzB,CAAC;QACH,CAAC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,IAAI,CAAC,MAAc,EAAE,KAAa;QACtC,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;QACjC,OAAO,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,KAAK,CAAC,CAAC;IAC3C,CAAC;IAED;;OAEG;IACH,aAAa;QACX,OAAO,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC;IAC9C,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,KAAK;QACT,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC;QACjB,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;QACpB,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;QAEpB,IAAI,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC;YACrC,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;QACxC,CAAC;IACH,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,KAAK;QACT,IAAI,IAAI,CAAC,MAAM;YAAE,OAAO;QAExB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC;QAEnB,mBAAmB;QACnB,IAAI,IAAI,CAAC,UAAU,EAAE,CAAC;YACpB,aAAa,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC/B,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC;QACzB,CAAC;QAED,cAAc;QACd,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;QAEnB,IAAI,CAAC,kBAAkB,EAAE,CAAC;IAC5B,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,WAAW;QACvB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC;YACtC,OAAO,CAAC,CAAC;QACX,CAAC;QAED,MAAM,OAAO,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QACjE,OAAO,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC;IAC3D,CAAC;IAED;;;OAGG;IACK,KAAK,CAAC,aAAa;QACzB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC;YACtC,OAAO;QACT,CAAC;QAED,MAAM,OAAO,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QACjE,MAAM,KAAK,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QAEzD,IAAI,KAAK,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC;YAC1C,OAAO;QACT,CAAC;QAED,mCAAmC;QACnC,MAAM,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,GAAG,GAAG,CAAC,CAAC,CAAC,kBAAkB;QAC7E,MAAM,YAAY,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC;QAE7C,MAAM,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,IAAI,EAAE,OAAO,CAAC,CAAC;QAElF,IAAI,CAAC,UAAU,GAAG,YAAY,CAAC,MAAM,CAAC;IACxC,CAAC;CACF"}