cursorflow 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cursorflow/__init__.py +78 -0
- cursorflow/auto_updater.py +244 -0
- cursorflow/cli.py +408 -0
- cursorflow/core/agent.py +272 -0
- cursorflow/core/auth_handler.py +433 -0
- cursorflow/core/browser_controller.py +534 -0
- cursorflow/core/browser_engine.py +386 -0
- cursorflow/core/css_iterator.py +397 -0
- cursorflow/core/cursor_integration.py +744 -0
- cursorflow/core/cursorflow.py +649 -0
- cursorflow/core/error_correlator.py +322 -0
- cursorflow/core/event_correlator.py +182 -0
- cursorflow/core/file_change_monitor.py +548 -0
- cursorflow/core/log_collector.py +410 -0
- cursorflow/core/log_monitor.py +179 -0
- cursorflow/core/persistent_session.py +910 -0
- cursorflow/core/report_generator.py +282 -0
- cursorflow/log_sources/local_file.py +198 -0
- cursorflow/log_sources/ssh_remote.py +210 -0
- cursorflow/updater.py +512 -0
- cursorflow-1.2.0.dist-info/METADATA +444 -0
- cursorflow-1.2.0.dist-info/RECORD +25 -0
- cursorflow-1.2.0.dist-info/WHEEL +5 -0
- cursorflow-1.2.0.dist-info/entry_points.txt +2 -0
- cursorflow-1.2.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,910 @@
|
|
1
|
+
"""
|
2
|
+
Persistent Browser Session Manager
|
3
|
+
|
4
|
+
Maintains browser sessions across CSS iterations to take advantage of hot reload
|
5
|
+
environments. Allows rapid CSS iteration without page reloads.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import asyncio
|
9
|
+
import json
|
10
|
+
import time
|
11
|
+
import weakref
|
12
|
+
from typing import Dict, List, Optional, Any, Set
|
13
|
+
from pathlib import Path
|
14
|
+
import logging
|
15
|
+
|
16
|
+
from .browser_controller import BrowserController
|
17
|
+
from .file_change_monitor import get_file_monitor, get_hot_reload_synchronizer
|
18
|
+
|
19
|
+
|
20
|
+
class PersistentSession:
|
21
|
+
"""
|
22
|
+
Manages persistent browser sessions for hot reload environments
|
23
|
+
|
24
|
+
Keeps browser instances alive between CursorFlow operations to take advantage
|
25
|
+
of hot reload, live CSS updates, and maintain application state.
|
26
|
+
"""
|
27
|
+
|
28
|
+
def __init__(self, session_id: str, base_url: str, config: Dict):
|
29
|
+
"""
|
30
|
+
Initialize persistent session
|
31
|
+
|
32
|
+
Args:
|
33
|
+
session_id: Unique identifier for this session
|
34
|
+
base_url: Base URL for the application
|
35
|
+
config: Browser configuration with persistent session options
|
36
|
+
"""
|
37
|
+
self.session_id = session_id
|
38
|
+
self.base_url = base_url
|
39
|
+
self.config = config
|
40
|
+
|
41
|
+
# Browser controller instance (persistent)
|
42
|
+
self.browser: Optional[BrowserController] = None
|
43
|
+
|
44
|
+
# Session state tracking
|
45
|
+
self.is_active = False
|
46
|
+
self.last_used = time.time()
|
47
|
+
self.navigation_history: List[str] = []
|
48
|
+
self.css_injections: List[str] = []
|
49
|
+
|
50
|
+
# Hot reload detection
|
51
|
+
self.hot_reload_urls: Set[str] = set()
|
52
|
+
self.last_reload_time = 0
|
53
|
+
|
54
|
+
# CSS iteration state
|
55
|
+
self.baseline_captured = False
|
56
|
+
self.iteration_count = 0
|
57
|
+
self.applied_css_cache: List[Dict] = []
|
58
|
+
|
59
|
+
self.logger = logging.getLogger(__name__)
|
60
|
+
|
61
|
+
# Session artifacts directory
|
62
|
+
self.session_dir = Path.cwd() / ".cursorflow" / "sessions" / session_id
|
63
|
+
self.session_dir.mkdir(parents=True, exist_ok=True)
|
64
|
+
|
65
|
+
# File monitoring integration
|
66
|
+
self.file_monitor = get_file_monitor()
|
67
|
+
self.hot_reload_sync = get_hot_reload_synchronizer()
|
68
|
+
self.file_monitoring_active = False
|
69
|
+
|
70
|
+
async def initialize(self) -> bool:
|
71
|
+
"""
|
72
|
+
Initialize the persistent browser session
|
73
|
+
|
74
|
+
Returns:
|
75
|
+
True if initialization successful
|
76
|
+
"""
|
77
|
+
try:
|
78
|
+
if self.browser is None:
|
79
|
+
# Create browser with persistent-friendly config
|
80
|
+
persistent_config = self._get_persistent_browser_config()
|
81
|
+
self.browser = BrowserController(self.base_url, persistent_config)
|
82
|
+
await self.browser.initialize()
|
83
|
+
|
84
|
+
# Setup hot reload detection
|
85
|
+
await self._setup_hot_reload_detection()
|
86
|
+
|
87
|
+
# Start file monitoring for hot reload synchronization
|
88
|
+
await self._start_file_monitoring()
|
89
|
+
|
90
|
+
self.is_active = True
|
91
|
+
self.last_used = time.time()
|
92
|
+
|
93
|
+
self.logger.info(f"Persistent session initialized: {self.session_id}")
|
94
|
+
return True
|
95
|
+
else:
|
96
|
+
# Session already active
|
97
|
+
self.last_used = time.time()
|
98
|
+
return True
|
99
|
+
|
100
|
+
except Exception as e:
|
101
|
+
self.logger.error(f"Failed to initialize persistent session: {e}")
|
102
|
+
return False
|
103
|
+
|
104
|
+
def _get_persistent_browser_config(self) -> Dict:
|
105
|
+
"""Get browser configuration optimized for persistent sessions"""
|
106
|
+
config = self.config.copy()
|
107
|
+
|
108
|
+
# Optimize for persistence and hot reload
|
109
|
+
config.update({
|
110
|
+
"headless": config.get("headless", True),
|
111
|
+
"debug_mode": config.get("debug_mode", False),
|
112
|
+
"human_timeout": config.get("human_timeout", 30),
|
113
|
+
|
114
|
+
# Hot reload optimizations
|
115
|
+
"disable_cache": False, # Keep cache for faster reloads
|
116
|
+
"preserve_local_storage": True,
|
117
|
+
"preserve_session_storage": True,
|
118
|
+
|
119
|
+
# Performance optimizations for long sessions
|
120
|
+
"disable_background_throttling": True,
|
121
|
+
"keep_alive": True
|
122
|
+
})
|
123
|
+
|
124
|
+
return config
|
125
|
+
|
126
|
+
async def _setup_hot_reload_detection(self):
|
127
|
+
"""Setup detection for hot reload events"""
|
128
|
+
if not self.browser or not self.browser.page:
|
129
|
+
return
|
130
|
+
|
131
|
+
# Monitor for hot reload indicators
|
132
|
+
await self.browser.page.add_init_script("""
|
133
|
+
// Detect common hot reload frameworks
|
134
|
+
window.__cursorflow_hot_reload_detected = false;
|
135
|
+
|
136
|
+
// Webpack Hot Module Replacement
|
137
|
+
if (window.module && window.module.hot) {
|
138
|
+
window.__cursorflow_hot_reload_detected = true;
|
139
|
+
window.__cursorflow_hot_reload_type = 'webpack-hmr';
|
140
|
+
}
|
141
|
+
|
142
|
+
// Vite HMR
|
143
|
+
if (window.__vite_hot_update) {
|
144
|
+
window.__cursorflow_hot_reload_detected = true;
|
145
|
+
window.__cursorflow_hot_reload_type = 'vite-hmr';
|
146
|
+
}
|
147
|
+
|
148
|
+
// Live reload (general)
|
149
|
+
if (window.location.protocol === 'ws:' || window.WebSocket) {
|
150
|
+
const originalWebSocket = window.WebSocket;
|
151
|
+
window.WebSocket = function(...args) {
|
152
|
+
const ws = new originalWebSocket(...args);
|
153
|
+
if (args[0] && args[0].includes('reload')) {
|
154
|
+
window.__cursorflow_hot_reload_detected = true;
|
155
|
+
window.__cursorflow_hot_reload_type = 'websocket-reload';
|
156
|
+
}
|
157
|
+
return ws;
|
158
|
+
};
|
159
|
+
}
|
160
|
+
|
161
|
+
// CSS hot reload detection
|
162
|
+
const originalCreateElement = document.createElement;
|
163
|
+
document.createElement = function(tagName) {
|
164
|
+
const el = originalCreateElement.call(this, tagName);
|
165
|
+
if (tagName.toLowerCase() === 'style' || tagName.toLowerCase() === 'link') {
|
166
|
+
window.__cursorflow_css_hot_reload = true;
|
167
|
+
}
|
168
|
+
return el;
|
169
|
+
};
|
170
|
+
""")
|
171
|
+
|
172
|
+
self.logger.debug("Hot reload detection setup complete")
|
173
|
+
|
174
|
+
async def _start_file_monitoring(self):
|
175
|
+
"""Start file monitoring for hot reload synchronization"""
|
176
|
+
try:
|
177
|
+
# Start file monitoring if not already active
|
178
|
+
if not self.file_monitor.is_monitoring:
|
179
|
+
await self.file_monitor.start_monitoring(poll_interval=0.5)
|
180
|
+
|
181
|
+
# Register this session with hot reload synchronizer
|
182
|
+
self.hot_reload_sync.register_browser_session(self.session_id, self)
|
183
|
+
|
184
|
+
# Add callback for file changes
|
185
|
+
def on_file_change(change):
|
186
|
+
self.logger.debug(f"File change detected: {change.file_path.name} ({change.change_type.value})")
|
187
|
+
|
188
|
+
self.file_monitor.add_change_callback(on_file_change)
|
189
|
+
self.file_monitoring_active = True
|
190
|
+
|
191
|
+
self.logger.debug("File monitoring integration active")
|
192
|
+
|
193
|
+
except Exception as e:
|
194
|
+
self.logger.warning(f"Failed to start file monitoring: {e}")
|
195
|
+
# Continue without file monitoring - not critical
|
196
|
+
|
197
|
+
async def navigate_persistent(self, path: str, wait_for_load: bool = True) -> bool:
|
198
|
+
"""
|
199
|
+
Navigate while maintaining session state
|
200
|
+
|
201
|
+
Args:
|
202
|
+
path: Path to navigate to
|
203
|
+
wait_for_load: Whether to wait for page load completion
|
204
|
+
|
205
|
+
Returns:
|
206
|
+
True if navigation successful
|
207
|
+
"""
|
208
|
+
try:
|
209
|
+
if not self.browser:
|
210
|
+
await self.initialize()
|
211
|
+
|
212
|
+
# Detect if hot reload is available
|
213
|
+
hot_reload_available = await self._check_hot_reload_capability()
|
214
|
+
|
215
|
+
if hot_reload_available and path in self.navigation_history:
|
216
|
+
# Use hot reload for faster navigation
|
217
|
+
self.logger.info(f"Using hot reload navigation to: {path}")
|
218
|
+
await self._hot_reload_navigate(path)
|
219
|
+
else:
|
220
|
+
# Standard navigation
|
221
|
+
self.logger.info(f"Standard navigation to: {path}")
|
222
|
+
await self.browser.navigate(path, wait_for_load)
|
223
|
+
|
224
|
+
# Track navigation
|
225
|
+
self.navigation_history.append(path)
|
226
|
+
self.last_used = time.time()
|
227
|
+
|
228
|
+
return True
|
229
|
+
|
230
|
+
except Exception as e:
|
231
|
+
self.logger.error(f"Persistent navigation failed: {e}")
|
232
|
+
return False
|
233
|
+
|
234
|
+
async def _check_hot_reload_capability(self) -> bool:
|
235
|
+
"""Check if hot reload is available and working"""
|
236
|
+
if not self.browser or not self.browser.page:
|
237
|
+
return False
|
238
|
+
|
239
|
+
try:
|
240
|
+
result = await self.browser.page.evaluate("""
|
241
|
+
() => {
|
242
|
+
return {
|
243
|
+
detected: window.__cursorflow_hot_reload_detected || false,
|
244
|
+
type: window.__cursorflow_hot_reload_type || 'none',
|
245
|
+
css_reload: window.__cursorflow_css_hot_reload || false
|
246
|
+
};
|
247
|
+
}
|
248
|
+
""")
|
249
|
+
|
250
|
+
if result.get("detected"):
|
251
|
+
self.hot_reload_urls.add(self.browser.page.url)
|
252
|
+
self.logger.debug(f"Hot reload detected: {result.get('type')}")
|
253
|
+
return True
|
254
|
+
|
255
|
+
return False
|
256
|
+
|
257
|
+
except Exception as e:
|
258
|
+
self.logger.debug(f"Hot reload check failed: {e}")
|
259
|
+
return False
|
260
|
+
|
261
|
+
async def _hot_reload_navigate(self, path: str):
|
262
|
+
"""Perform navigation using hot reload when possible"""
|
263
|
+
try:
|
264
|
+
# For hot reload environments, we can often just change the URL
|
265
|
+
# without full page reload
|
266
|
+
current_origin = await self.browser.page.evaluate("() => window.location.origin")
|
267
|
+
target_url = f"{current_origin}/{path.lstrip('/')}"
|
268
|
+
|
269
|
+
# Use history API for SPA navigation
|
270
|
+
await self.browser.page.evaluate(f"""
|
271
|
+
() => {{
|
272
|
+
if (window.history && window.history.pushState) {{
|
273
|
+
window.history.pushState(null, null, '{target_url}');
|
274
|
+
|
275
|
+
// Trigger route change for SPA frameworks
|
276
|
+
const event = new PopStateEvent('popstate', {{
|
277
|
+
state: null
|
278
|
+
}});
|
279
|
+
window.dispatchEvent(event);
|
280
|
+
|
281
|
+
// Trigger hashchange if needed
|
282
|
+
if (window.location.hash) {{
|
283
|
+
const hashEvent = new HashChangeEvent('hashchange');
|
284
|
+
window.dispatchEvent(hashEvent);
|
285
|
+
}}
|
286
|
+
}}
|
287
|
+
}}
|
288
|
+
""")
|
289
|
+
|
290
|
+
# Wait for potential async route updates
|
291
|
+
await asyncio.sleep(0.5)
|
292
|
+
|
293
|
+
except Exception as e:
|
294
|
+
self.logger.warning(f"Hot reload navigation failed, falling back to standard: {e}")
|
295
|
+
await self.browser.navigate(path)
|
296
|
+
|
297
|
+
async def apply_css_persistent(
|
298
|
+
self,
|
299
|
+
css: str,
|
300
|
+
name: str = "",
|
301
|
+
replace_previous: bool = False
|
302
|
+
) -> Dict[str, Any]:
|
303
|
+
"""
|
304
|
+
Apply CSS changes while maintaining session and taking advantage of hot reload
|
305
|
+
|
306
|
+
Args:
|
307
|
+
css: CSS code to apply
|
308
|
+
name: Name for this CSS change
|
309
|
+
replace_previous: Whether to replace all previous CSS injections
|
310
|
+
|
311
|
+
Returns:
|
312
|
+
Result data with before/after state
|
313
|
+
"""
|
314
|
+
try:
|
315
|
+
if not self.browser:
|
316
|
+
await self.initialize()
|
317
|
+
|
318
|
+
# Clear previous CSS if requested
|
319
|
+
if replace_previous:
|
320
|
+
await self._clear_injected_css()
|
321
|
+
self.applied_css_cache.clear()
|
322
|
+
|
323
|
+
# Capture before state
|
324
|
+
before_state = await self._capture_session_state("before_css")
|
325
|
+
|
326
|
+
# Check if hot CSS reload is available
|
327
|
+
hot_css_reload = await self._check_css_hot_reload()
|
328
|
+
|
329
|
+
if hot_css_reload:
|
330
|
+
# Use hot CSS reload mechanism
|
331
|
+
await self._apply_css_hot_reload(css, name)
|
332
|
+
else:
|
333
|
+
# Standard CSS injection
|
334
|
+
await self.browser.inject_css(css)
|
335
|
+
|
336
|
+
# Track applied CSS
|
337
|
+
css_entry = {
|
338
|
+
"name": name or f"css_{self.iteration_count}",
|
339
|
+
"css": css,
|
340
|
+
"timestamp": time.time(),
|
341
|
+
"method": "hot_reload" if hot_css_reload else "injection"
|
342
|
+
}
|
343
|
+
self.applied_css_cache.append(css_entry)
|
344
|
+
self.iteration_count += 1
|
345
|
+
|
346
|
+
# Capture after state
|
347
|
+
after_state = await self._capture_session_state("after_css")
|
348
|
+
|
349
|
+
# Update session tracking
|
350
|
+
self.last_used = time.time()
|
351
|
+
|
352
|
+
return {
|
353
|
+
"success": True,
|
354
|
+
"css_applied": css,
|
355
|
+
"method": css_entry["method"],
|
356
|
+
"before_state": before_state,
|
357
|
+
"after_state": after_state,
|
358
|
+
"iteration_count": self.iteration_count
|
359
|
+
}
|
360
|
+
|
361
|
+
except Exception as e:
|
362
|
+
self.logger.error(f"Persistent CSS application failed: {e}")
|
363
|
+
return {
|
364
|
+
"success": False,
|
365
|
+
"error": str(e),
|
366
|
+
"css": css
|
367
|
+
}
|
368
|
+
|
369
|
+
async def _check_css_hot_reload(self) -> bool:
|
370
|
+
"""Check if CSS hot reload is available"""
|
371
|
+
if not self.browser or not self.browser.page:
|
372
|
+
return False
|
373
|
+
|
374
|
+
try:
|
375
|
+
result = await self.browser.page.evaluate("""
|
376
|
+
() => {
|
377
|
+
// Check for style-loader (webpack)
|
378
|
+
if (window.__webpack_require__ && window.__webpack_require__.hmr) {
|
379
|
+
return true;
|
380
|
+
}
|
381
|
+
|
382
|
+
// Check for Vite CSS HMR
|
383
|
+
if (window.__vite_hot_update) {
|
384
|
+
return true;
|
385
|
+
}
|
386
|
+
|
387
|
+
// Check for live CSS reload WebSocket
|
388
|
+
const wsConnections = Array.from(document.querySelectorAll('script')).some(script =>
|
389
|
+
script.src && script.src.includes('livereload')
|
390
|
+
);
|
391
|
+
|
392
|
+
return wsConnections || window.__cursorflow_css_hot_reload;
|
393
|
+
}
|
394
|
+
""")
|
395
|
+
|
396
|
+
return bool(result)
|
397
|
+
|
398
|
+
except Exception:
|
399
|
+
return False
|
400
|
+
|
401
|
+
async def _apply_css_hot_reload(self, css: str, name: str):
|
402
|
+
"""Apply CSS using hot reload mechanisms when available"""
|
403
|
+
try:
|
404
|
+
# Try to inject CSS in a way that triggers hot reload
|
405
|
+
await self.browser.page.evaluate(f"""
|
406
|
+
(css, name) => {{
|
407
|
+
// Create a style element with hot reload attributes
|
408
|
+
const style = document.createElement('style');
|
409
|
+
style.id = 'cursorflow-css-' + name;
|
410
|
+
style.setAttribute('data-hot-reload', 'true');
|
411
|
+
style.textContent = css;
|
412
|
+
|
413
|
+
// Remove previous iteration if exists
|
414
|
+
const existing = document.getElementById(style.id);
|
415
|
+
if (existing) {{
|
416
|
+
existing.remove();
|
417
|
+
}}
|
418
|
+
|
419
|
+
// Inject CSS
|
420
|
+
document.head.appendChild(style);
|
421
|
+
|
422
|
+
// Trigger hot reload events if framework supports it
|
423
|
+
if (window.__webpack_require__ && window.__webpack_require__.hmr) {{
|
424
|
+
// Webpack HMR
|
425
|
+
const event = new CustomEvent('webpack-hot-update', {{
|
426
|
+
detail: {{ type: 'css', module: name }}
|
427
|
+
}});
|
428
|
+
window.dispatchEvent(event);
|
429
|
+
}}
|
430
|
+
|
431
|
+
if (window.__vite_hot_update) {{
|
432
|
+
// Vite HMR
|
433
|
+
window.__vite_hot_update({{
|
434
|
+
type: 'style-update',
|
435
|
+
path: '/' + name + '.css'
|
436
|
+
}});
|
437
|
+
}}
|
438
|
+
|
439
|
+
return true;
|
440
|
+
}}
|
441
|
+
""", css, name)
|
442
|
+
|
443
|
+
# Give hot reload time to process
|
444
|
+
await asyncio.sleep(0.1)
|
445
|
+
|
446
|
+
except Exception as e:
|
447
|
+
self.logger.warning(f"Hot CSS reload failed, using standard injection: {e}")
|
448
|
+
await self.browser.inject_css(css)
|
449
|
+
|
450
|
+
async def _clear_injected_css(self):
|
451
|
+
"""Clear all previously injected CSS"""
|
452
|
+
if not self.browser or not self.browser.page:
|
453
|
+
return
|
454
|
+
|
455
|
+
try:
|
456
|
+
await self.browser.page.evaluate("""
|
457
|
+
() => {
|
458
|
+
// Remove all CursorFlow injected styles
|
459
|
+
const styles = document.querySelectorAll('style[id^="cursorflow-css-"]');
|
460
|
+
styles.forEach(style => style.remove());
|
461
|
+
|
462
|
+
// Remove any other injected styles
|
463
|
+
const injectedStyles = document.querySelectorAll('style[data-hot-reload="true"]');
|
464
|
+
injectedStyles.forEach(style => style.remove());
|
465
|
+
}
|
466
|
+
""")
|
467
|
+
|
468
|
+
except Exception as e:
|
469
|
+
self.logger.warning(f"Failed to clear injected CSS: {e}")
|
470
|
+
|
471
|
+
async def _capture_session_state(self, stage: str) -> Dict[str, Any]:
|
472
|
+
"""Capture current session state for comparison"""
|
473
|
+
if not self.browser or not self.browser.page:
|
474
|
+
return {}
|
475
|
+
|
476
|
+
try:
|
477
|
+
timestamp = int(time.time())
|
478
|
+
|
479
|
+
# Screenshot
|
480
|
+
screenshot_path = str(self.session_dir / f"{stage}_{timestamp}.png")
|
481
|
+
await self.browser.page.screenshot(path=screenshot_path, full_page=False)
|
482
|
+
|
483
|
+
# Basic page state
|
484
|
+
state = {
|
485
|
+
"timestamp": time.time(),
|
486
|
+
"stage": stage,
|
487
|
+
"url": self.browser.page.url,
|
488
|
+
"screenshot": screenshot_path,
|
489
|
+
"viewport": await self.browser.page.evaluate("() => ({width: window.innerWidth, height: window.innerHeight})"),
|
490
|
+
"scroll": await self.browser.page.evaluate("() => ({x: window.pageXOffset, y: window.pageYOffset})")
|
491
|
+
}
|
492
|
+
|
493
|
+
return state
|
494
|
+
|
495
|
+
except Exception as e:
|
496
|
+
self.logger.error(f"Failed to capture session state: {e}")
|
497
|
+
return {"error": str(e)}
|
498
|
+
|
499
|
+
async def get_session_info(self) -> Dict[str, Any]:
|
500
|
+
"""Get current session information"""
|
501
|
+
hot_reload_info = await self._check_hot_reload_capability() if self.browser else False
|
502
|
+
|
503
|
+
return {
|
504
|
+
"session_id": self.session_id,
|
505
|
+
"is_active": self.is_active,
|
506
|
+
"last_used": self.last_used,
|
507
|
+
"age_seconds": time.time() - self.last_used if self.last_used else 0,
|
508
|
+
"navigation_history": self.navigation_history[-10:], # Last 10 navigations
|
509
|
+
"applied_css_count": len(self.applied_css_cache),
|
510
|
+
"iteration_count": self.iteration_count,
|
511
|
+
"hot_reload_available": hot_reload_info,
|
512
|
+
"hot_reload_urls": list(self.hot_reload_urls),
|
513
|
+
"current_url": self.browser.page.url if self.browser and self.browser.page else None
|
514
|
+
}
|
515
|
+
|
516
|
+
async def save_session_state(self) -> str:
|
517
|
+
"""Save current session state to disk"""
|
518
|
+
try:
|
519
|
+
session_state = {
|
520
|
+
"session_id": self.session_id,
|
521
|
+
"base_url": self.base_url,
|
522
|
+
"config": self.config,
|
523
|
+
"navigation_history": self.navigation_history,
|
524
|
+
"applied_css_cache": self.applied_css_cache,
|
525
|
+
"iteration_count": self.iteration_count,
|
526
|
+
"hot_reload_urls": list(self.hot_reload_urls),
|
527
|
+
"saved_at": time.time()
|
528
|
+
}
|
529
|
+
|
530
|
+
state_file = self.session_dir / "session_state.json"
|
531
|
+
with open(state_file, 'w') as f:
|
532
|
+
json.dump(session_state, f, indent=2)
|
533
|
+
|
534
|
+
self.logger.info(f"Session state saved: {state_file}")
|
535
|
+
return str(state_file)
|
536
|
+
|
537
|
+
except Exception as e:
|
538
|
+
self.logger.error(f"Failed to save session state: {e}")
|
539
|
+
return ""
|
540
|
+
|
541
|
+
async def restore_session_state(self, state_file: str) -> bool:
|
542
|
+
"""Restore session state from disk"""
|
543
|
+
try:
|
544
|
+
with open(state_file, 'r') as f:
|
545
|
+
session_state = json.load(f)
|
546
|
+
|
547
|
+
self.navigation_history = session_state.get("navigation_history", [])
|
548
|
+
self.applied_css_cache = session_state.get("applied_css_cache", [])
|
549
|
+
self.iteration_count = session_state.get("iteration_count", 0)
|
550
|
+
self.hot_reload_urls = set(session_state.get("hot_reload_urls", []))
|
551
|
+
|
552
|
+
self.logger.info(f"Session state restored from: {state_file}")
|
553
|
+
return True
|
554
|
+
|
555
|
+
except Exception as e:
|
556
|
+
self.logger.error(f"Failed to restore session state: {e}")
|
557
|
+
return False
|
558
|
+
|
559
|
+
def is_session_stale(self, max_age_seconds: int = 3600) -> bool:
|
560
|
+
"""Check if session is stale and should be cleaned up"""
|
561
|
+
if not self.last_used:
|
562
|
+
return True
|
563
|
+
return (time.time() - self.last_used) > max_age_seconds
|
564
|
+
|
565
|
+
async def cleanup(self, save_state: bool = True):
|
566
|
+
"""Clean up session resources"""
|
567
|
+
try:
|
568
|
+
if save_state:
|
569
|
+
await self.save_session_state()
|
570
|
+
|
571
|
+
# Clean up file monitoring
|
572
|
+
if self.file_monitoring_active:
|
573
|
+
self.hot_reload_sync.unregister_browser_session(self.session_id)
|
574
|
+
self.file_monitoring_active = False
|
575
|
+
|
576
|
+
if self.browser:
|
577
|
+
await self.browser.cleanup()
|
578
|
+
self.browser = None
|
579
|
+
|
580
|
+
self.is_active = False
|
581
|
+
self.logger.info(f"Persistent session cleaned up: {self.session_id}")
|
582
|
+
|
583
|
+
except Exception as e:
|
584
|
+
self.logger.error(f"Session cleanup failed: {e}")
|
585
|
+
|
586
|
+
|
587
|
+
class SessionManager:
|
588
|
+
"""
|
589
|
+
Manages multiple persistent browser sessions
|
590
|
+
|
591
|
+
Allows CursorFlow to maintain multiple concurrent sessions and reuse them
|
592
|
+
for different components or iteration cycles.
|
593
|
+
"""
|
594
|
+
|
595
|
+
def __init__(self, max_sessions: int = 5, session_timeout: int = 3600):
|
596
|
+
"""
|
597
|
+
Initialize session manager
|
598
|
+
|
599
|
+
Args:
|
600
|
+
max_sessions: Maximum number of concurrent sessions
|
601
|
+
session_timeout: Session timeout in seconds
|
602
|
+
"""
|
603
|
+
self.max_sessions = max_sessions
|
604
|
+
self.session_timeout = session_timeout
|
605
|
+
self.sessions: Dict[str, PersistentSession] = {}
|
606
|
+
self.session_refs: Dict[str, weakref.ref] = {}
|
607
|
+
|
608
|
+
self.logger = logging.getLogger(__name__)
|
609
|
+
|
610
|
+
# Start cleanup task
|
611
|
+
self._cleanup_task = None
|
612
|
+
self._start_cleanup_task()
|
613
|
+
|
614
|
+
def _start_cleanup_task(self):
|
615
|
+
"""Start background cleanup task"""
|
616
|
+
try:
|
617
|
+
loop = asyncio.get_event_loop()
|
618
|
+
if loop.is_running():
|
619
|
+
self._cleanup_task = loop.create_task(self._periodic_cleanup())
|
620
|
+
except RuntimeError:
|
621
|
+
# No event loop, cleanup will be manual
|
622
|
+
pass
|
623
|
+
|
624
|
+
async def _periodic_cleanup(self):
|
625
|
+
"""Periodic cleanup of stale sessions"""
|
626
|
+
while True:
|
627
|
+
try:
|
628
|
+
await asyncio.sleep(300) # Check every 5 minutes
|
629
|
+
await self.cleanup_stale_sessions()
|
630
|
+
except asyncio.CancelledError:
|
631
|
+
break
|
632
|
+
except Exception as e:
|
633
|
+
self.logger.error(f"Cleanup task error: {e}")
|
634
|
+
|
635
|
+
async def get_or_create_session(
|
636
|
+
self,
|
637
|
+
session_id: str,
|
638
|
+
base_url: str,
|
639
|
+
config: Dict
|
640
|
+
) -> PersistentSession:
|
641
|
+
"""
|
642
|
+
Get existing session or create new one
|
643
|
+
|
644
|
+
Args:
|
645
|
+
session_id: Unique session identifier
|
646
|
+
base_url: Base URL for the session
|
647
|
+
config: Browser configuration
|
648
|
+
|
649
|
+
Returns:
|
650
|
+
PersistentSession instance
|
651
|
+
"""
|
652
|
+
# Check if session already exists and is active
|
653
|
+
if session_id in self.sessions:
|
654
|
+
session = self.sessions[session_id]
|
655
|
+
if session.is_active and not session.is_session_stale(self.session_timeout):
|
656
|
+
session.last_used = time.time()
|
657
|
+
return session
|
658
|
+
else:
|
659
|
+
# Clean up stale session
|
660
|
+
await self.remove_session(session_id)
|
661
|
+
|
662
|
+
# Create new session
|
663
|
+
if len(self.sessions) >= self.max_sessions:
|
664
|
+
await self._cleanup_oldest_session()
|
665
|
+
|
666
|
+
session = PersistentSession(session_id, base_url, config)
|
667
|
+
self.sessions[session_id] = session
|
668
|
+
|
669
|
+
# Create weak reference for cleanup
|
670
|
+
def cleanup_callback(ref):
|
671
|
+
if session_id in self.sessions:
|
672
|
+
del self.sessions[session_id]
|
673
|
+
if session_id in self.session_refs:
|
674
|
+
del self.session_refs[session_id]
|
675
|
+
|
676
|
+
self.session_refs[session_id] = weakref.ref(session, cleanup_callback)
|
677
|
+
|
678
|
+
self.logger.info(f"Created new persistent session: {session_id}")
|
679
|
+
return session
|
680
|
+
|
681
|
+
async def remove_session(self, session_id: str):
|
682
|
+
"""Remove and cleanup a specific session"""
|
683
|
+
if session_id in self.sessions:
|
684
|
+
session = self.sessions[session_id]
|
685
|
+
await session.cleanup()
|
686
|
+
del self.sessions[session_id]
|
687
|
+
|
688
|
+
if session_id in self.session_refs:
|
689
|
+
del self.session_refs[session_id]
|
690
|
+
|
691
|
+
self.logger.info(f"Removed session: {session_id}")
|
692
|
+
|
693
|
+
async def _cleanup_oldest_session(self):
|
694
|
+
"""Remove the oldest session to make room for new one"""
|
695
|
+
if not self.sessions:
|
696
|
+
return
|
697
|
+
|
698
|
+
oldest_id = min(self.sessions.keys(),
|
699
|
+
key=lambda sid: self.sessions[sid].last_used or 0)
|
700
|
+
await self.remove_session(oldest_id)
|
701
|
+
|
702
|
+
async def cleanup_stale_sessions(self):
|
703
|
+
"""Clean up all stale sessions"""
|
704
|
+
stale_sessions = [
|
705
|
+
sid for sid, session in self.sessions.items()
|
706
|
+
if session.is_session_stale(self.session_timeout)
|
707
|
+
]
|
708
|
+
|
709
|
+
for session_id in stale_sessions:
|
710
|
+
await self.remove_session(session_id)
|
711
|
+
|
712
|
+
if stale_sessions:
|
713
|
+
self.logger.info(f"Cleaned up {len(stale_sessions)} stale sessions")
|
714
|
+
|
715
|
+
async def smart_cleanup(self, force_cleanup: bool = False) -> Dict[str, Any]:
|
716
|
+
"""
|
717
|
+
Intelligent session cleanup based on usage patterns and resource optimization
|
718
|
+
|
719
|
+
Args:
|
720
|
+
force_cleanup: Force cleanup even for active sessions
|
721
|
+
|
722
|
+
Returns:
|
723
|
+
Cleanup report with statistics and actions taken
|
724
|
+
"""
|
725
|
+
cleanup_report = {
|
726
|
+
"sessions_before": len(self.sessions),
|
727
|
+
"sessions_cleaned": 0,
|
728
|
+
"sessions_optimized": 0,
|
729
|
+
"memory_freed": 0,
|
730
|
+
"actions_taken": []
|
731
|
+
}
|
732
|
+
|
733
|
+
current_time = time.time()
|
734
|
+
sessions_to_remove = []
|
735
|
+
sessions_to_optimize = []
|
736
|
+
|
737
|
+
for session_id, session in self.sessions.items():
|
738
|
+
try:
|
739
|
+
session_info = await session.get_session_info()
|
740
|
+
age_seconds = session_info.get("age_seconds", 0)
|
741
|
+
iteration_count = session_info.get("iteration_count", 0)
|
742
|
+
hot_reload_available = session_info.get("hot_reload_available", False)
|
743
|
+
|
744
|
+
# Cleanup criteria
|
745
|
+
should_cleanup = False
|
746
|
+
cleanup_reason = ""
|
747
|
+
|
748
|
+
if force_cleanup:
|
749
|
+
should_cleanup = True
|
750
|
+
cleanup_reason = "forced_cleanup"
|
751
|
+
elif age_seconds > 7200: # 2 hours
|
752
|
+
should_cleanup = True
|
753
|
+
cleanup_reason = "session_too_old"
|
754
|
+
elif iteration_count > 50 and not hot_reload_available:
|
755
|
+
should_cleanup = True
|
756
|
+
cleanup_reason = "too_many_iterations_without_hot_reload"
|
757
|
+
elif not session.is_active:
|
758
|
+
should_cleanup = True
|
759
|
+
cleanup_reason = "session_inactive"
|
760
|
+
elif session.is_session_stale(3600): # 1 hour threshold
|
761
|
+
should_cleanup = True
|
762
|
+
cleanup_reason = "session_stale"
|
763
|
+
|
764
|
+
if should_cleanup:
|
765
|
+
sessions_to_remove.append((session_id, cleanup_reason))
|
766
|
+
elif age_seconds > 1800 and iteration_count > 10: # 30 minutes, many iterations
|
767
|
+
# Session could benefit from optimization
|
768
|
+
sessions_to_optimize.append((session_id, "heavy_usage_optimization"))
|
769
|
+
|
770
|
+
except Exception as e:
|
771
|
+
self.logger.error(f"Error analyzing session {session_id}: {e}")
|
772
|
+
sessions_to_remove.append((session_id, "analysis_error"))
|
773
|
+
|
774
|
+
# Perform cleanup
|
775
|
+
for session_id, reason in sessions_to_remove:
|
776
|
+
try:
|
777
|
+
await self.remove_session(session_id)
|
778
|
+
cleanup_report["sessions_cleaned"] += 1
|
779
|
+
cleanup_report["actions_taken"].append(f"Cleaned {session_id}: {reason}")
|
780
|
+
except Exception as e:
|
781
|
+
self.logger.error(f"Failed to cleanup session {session_id}: {e}")
|
782
|
+
|
783
|
+
# Perform optimization
|
784
|
+
for session_id, reason in sessions_to_optimize:
|
785
|
+
try:
|
786
|
+
await self._optimize_session(session_id)
|
787
|
+
cleanup_report["sessions_optimized"] += 1
|
788
|
+
cleanup_report["actions_taken"].append(f"Optimized {session_id}: {reason}")
|
789
|
+
except Exception as e:
|
790
|
+
self.logger.error(f"Failed to optimize session {session_id}: {e}")
|
791
|
+
|
792
|
+
cleanup_report["sessions_after"] = len(self.sessions)
|
793
|
+
cleanup_report["memory_freed"] = cleanup_report["sessions_cleaned"] * 50 # Estimated MB per session
|
794
|
+
|
795
|
+
if cleanup_report["sessions_cleaned"] > 0 or cleanup_report["sessions_optimized"] > 0:
|
796
|
+
self.logger.info(f"Smart cleanup completed: {cleanup_report}")
|
797
|
+
|
798
|
+
return cleanup_report
|
799
|
+
|
800
|
+
async def _optimize_session(self, session_id: str):
|
801
|
+
"""Optimize a session for better performance"""
|
802
|
+
if session_id not in self.sessions:
|
803
|
+
return
|
804
|
+
|
805
|
+
session = self.sessions[session_id]
|
806
|
+
|
807
|
+
try:
|
808
|
+
# Clear injected CSS to reduce memory usage
|
809
|
+
if session.browser and session.browser.page:
|
810
|
+
await session._clear_injected_css()
|
811
|
+
|
812
|
+
# Clear old applied CSS cache
|
813
|
+
if len(session.applied_css_cache) > 20:
|
814
|
+
session.applied_css_cache = session.applied_css_cache[-10:] # Keep last 10
|
815
|
+
|
816
|
+
# Save current state
|
817
|
+
await session.save_session_state()
|
818
|
+
|
819
|
+
self.logger.info(f"Optimized session: {session_id}")
|
820
|
+
|
821
|
+
except Exception as e:
|
822
|
+
self.logger.error(f"Session optimization failed for {session_id}: {e}")
|
823
|
+
|
824
|
+
async def get_cleanup_recommendations(self) -> Dict[str, Any]:
|
825
|
+
"""Get recommendations for session cleanup without performing cleanup"""
|
826
|
+
recommendations = {
|
827
|
+
"immediate_cleanup": [],
|
828
|
+
"optimization_candidates": [],
|
829
|
+
"healthy_sessions": [],
|
830
|
+
"total_sessions": len(self.sessions),
|
831
|
+
"estimated_memory_usage": len(self.sessions) * 50 # MB estimate
|
832
|
+
}
|
833
|
+
|
834
|
+
for session_id, session in self.sessions.items():
|
835
|
+
try:
|
836
|
+
session_info = await session.get_session_info()
|
837
|
+
age_seconds = session_info.get("age_seconds", 0)
|
838
|
+
iteration_count = session_info.get("iteration_count", 0)
|
839
|
+
hot_reload_available = session_info.get("hot_reload_available", False)
|
840
|
+
|
841
|
+
session_summary = {
|
842
|
+
"session_id": session_id,
|
843
|
+
"age_hours": age_seconds / 3600,
|
844
|
+
"iteration_count": iteration_count,
|
845
|
+
"hot_reload": hot_reload_available,
|
846
|
+
"active": session.is_active
|
847
|
+
}
|
848
|
+
|
849
|
+
# Categorize sessions
|
850
|
+
if age_seconds > 7200 or iteration_count > 50 or not session.is_active:
|
851
|
+
recommendations["immediate_cleanup"].append({
|
852
|
+
**session_summary,
|
853
|
+
"reason": "old_session" if age_seconds > 7200 else "inactive" if not session.is_active else "overused"
|
854
|
+
})
|
855
|
+
elif age_seconds > 1800 and iteration_count > 10:
|
856
|
+
recommendations["optimization_candidates"].append({
|
857
|
+
**session_summary,
|
858
|
+
"reason": "heavy_usage"
|
859
|
+
})
|
860
|
+
else:
|
861
|
+
recommendations["healthy_sessions"].append(session_summary)
|
862
|
+
|
863
|
+
except Exception as e:
|
864
|
+
self.logger.error(f"Error analyzing session {session_id}: {e}")
|
865
|
+
|
866
|
+
# Add summary statistics
|
867
|
+
recommendations["cleanup_impact"] = {
|
868
|
+
"sessions_to_cleanup": len(recommendations["immediate_cleanup"]),
|
869
|
+
"sessions_to_optimize": len(recommendations["optimization_candidates"]),
|
870
|
+
"memory_recoverable": len(recommendations["immediate_cleanup"]) * 50, # MB estimate
|
871
|
+
"performance_impact": "high" if len(recommendations["immediate_cleanup"]) > 2 else "low"
|
872
|
+
}
|
873
|
+
|
874
|
+
return recommendations
|
875
|
+
|
876
|
+
async def get_session_info(self, session_id: str) -> Optional[Dict[str, Any]]:
|
877
|
+
"""Get information about a specific session"""
|
878
|
+
if session_id in self.sessions:
|
879
|
+
return await self.sessions[session_id].get_session_info()
|
880
|
+
return None
|
881
|
+
|
882
|
+
async def list_sessions(self) -> List[Dict[str, Any]]:
|
883
|
+
"""List all active sessions"""
|
884
|
+
session_list = []
|
885
|
+
for session_id, session in self.sessions.items():
|
886
|
+
info = await session.get_session_info()
|
887
|
+
session_list.append(info)
|
888
|
+
return session_list
|
889
|
+
|
890
|
+
async def cleanup_all_sessions(self):
|
891
|
+
"""Clean up all sessions"""
|
892
|
+
session_ids = list(self.sessions.keys())
|
893
|
+
for session_id in session_ids:
|
894
|
+
await self.remove_session(session_id)
|
895
|
+
|
896
|
+
if self._cleanup_task:
|
897
|
+
self._cleanup_task.cancel()
|
898
|
+
|
899
|
+
self.logger.info("All sessions cleaned up")
|
900
|
+
|
901
|
+
|
902
|
+
# Global session manager instance
|
903
|
+
_session_manager: Optional[SessionManager] = None
|
904
|
+
|
905
|
+
def get_session_manager() -> SessionManager:
|
906
|
+
"""Get or create global session manager"""
|
907
|
+
global _session_manager
|
908
|
+
if _session_manager is None:
|
909
|
+
_session_manager = SessionManager()
|
910
|
+
return _session_manager
|