cursorflow 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cursorflow/__init__.py +78 -0
- cursorflow/auto_updater.py +244 -0
- cursorflow/cli.py +408 -0
- cursorflow/core/agent.py +272 -0
- cursorflow/core/auth_handler.py +433 -0
- cursorflow/core/browser_controller.py +534 -0
- cursorflow/core/browser_engine.py +386 -0
- cursorflow/core/css_iterator.py +397 -0
- cursorflow/core/cursor_integration.py +744 -0
- cursorflow/core/cursorflow.py +649 -0
- cursorflow/core/error_correlator.py +322 -0
- cursorflow/core/event_correlator.py +182 -0
- cursorflow/core/file_change_monitor.py +548 -0
- cursorflow/core/log_collector.py +410 -0
- cursorflow/core/log_monitor.py +179 -0
- cursorflow/core/persistent_session.py +910 -0
- cursorflow/core/report_generator.py +282 -0
- cursorflow/log_sources/local_file.py +198 -0
- cursorflow/log_sources/ssh_remote.py +210 -0
- cursorflow/updater.py +512 -0
- cursorflow-1.2.0.dist-info/METADATA +444 -0
- cursorflow-1.2.0.dist-info/RECORD +25 -0
- cursorflow-1.2.0.dist-info/WHEEL +5 -0
- cursorflow-1.2.0.dist-info/entry_points.txt +2 -0
- cursorflow-1.2.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,548 @@
|
|
1
|
+
"""
|
2
|
+
File Change Monitor for Hot Reload Synchronization
|
3
|
+
|
4
|
+
Monitors CSS and source file changes to synchronize with browser state,
|
5
|
+
enabling real-time feedback and improved hot reload detection.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import asyncio
|
9
|
+
import hashlib
|
10
|
+
import time
|
11
|
+
from typing import Dict, List, Optional, Set, Callable, Any
|
12
|
+
from pathlib import Path
|
13
|
+
import logging
|
14
|
+
from dataclasses import dataclass
|
15
|
+
from enum import Enum
|
16
|
+
|
17
|
+
|
18
|
+
class ChangeType(Enum):
|
19
|
+
"""Types of file changes to monitor"""
|
20
|
+
CSS_CHANGE = "css"
|
21
|
+
JS_CHANGE = "javascript"
|
22
|
+
HTML_CHANGE = "html"
|
23
|
+
CONFIG_CHANGE = "config"
|
24
|
+
ASSET_CHANGE = "asset"
|
25
|
+
|
26
|
+
|
27
|
+
@dataclass
|
28
|
+
class FileChange:
|
29
|
+
"""Represents a detected file change"""
|
30
|
+
file_path: Path
|
31
|
+
change_type: ChangeType
|
32
|
+
timestamp: float
|
33
|
+
content_hash: str
|
34
|
+
change_size: int = 0
|
35
|
+
is_hot_reloadable: bool = False
|
36
|
+
|
37
|
+
|
38
|
+
class FileChangeMonitor:
|
39
|
+
"""
|
40
|
+
Monitors file changes to synchronize with browser hot reload
|
41
|
+
|
42
|
+
Detects changes in CSS, JS, and other files to:
|
43
|
+
1. Predict when hot reload will trigger
|
44
|
+
2. Synchronize browser state monitoring
|
45
|
+
3. Optimize CSS iteration timing
|
46
|
+
4. Provide feedback on hot reload effectiveness
|
47
|
+
"""
|
48
|
+
|
49
|
+
def __init__(
|
50
|
+
self,
|
51
|
+
project_root: Optional[Path] = None,
|
52
|
+
watch_patterns: Optional[List[str]] = None
|
53
|
+
):
|
54
|
+
"""
|
55
|
+
Initialize file change monitor
|
56
|
+
|
57
|
+
Args:
|
58
|
+
project_root: Root directory to monitor (defaults to current working directory)
|
59
|
+
watch_patterns: File patterns to monitor (defaults to common web dev files)
|
60
|
+
"""
|
61
|
+
self.project_root = project_root or Path.cwd()
|
62
|
+
self.watch_patterns = watch_patterns or [
|
63
|
+
"**/*.css", "**/*.scss", "**/*.sass", "**/*.less", # Styles
|
64
|
+
"**/*.js", "**/*.jsx", "**/*.ts", "**/*.tsx", # JavaScript
|
65
|
+
"**/*.html", "**/*.htm", # HTML
|
66
|
+
"**/*.vue", "**/*.svelte", # Component files
|
67
|
+
"**/package.json", "**/webpack.config.js", # Config files
|
68
|
+
"**/*.json" # Config and data
|
69
|
+
]
|
70
|
+
|
71
|
+
# State tracking
|
72
|
+
self.file_hashes: Dict[str, str] = {}
|
73
|
+
self.last_scan_time = 0
|
74
|
+
self.change_history: List[FileChange] = []
|
75
|
+
self.is_monitoring = False
|
76
|
+
self.monitor_task: Optional[asyncio.Task] = None
|
77
|
+
|
78
|
+
# Change callbacks
|
79
|
+
self.change_callbacks: List[Callable[[FileChange], None]] = []
|
80
|
+
self.hot_reload_callbacks: List[Callable[[List[FileChange]], None]] = []
|
81
|
+
|
82
|
+
# Hot reload detection
|
83
|
+
self.hot_reload_patterns = {
|
84
|
+
ChangeType.CSS_CHANGE: [".css", ".scss", ".sass", ".less"],
|
85
|
+
ChangeType.JS_CHANGE: [".js", ".jsx", ".ts", ".tsx"],
|
86
|
+
ChangeType.HTML_CHANGE: [".html", ".htm"],
|
87
|
+
}
|
88
|
+
|
89
|
+
self.logger = logging.getLogger(__name__)
|
90
|
+
|
91
|
+
# Exclude patterns to avoid monitoring noise
|
92
|
+
self.exclude_patterns = [
|
93
|
+
"**/node_modules/**",
|
94
|
+
"**/.git/**",
|
95
|
+
"**/.cursorflow/**",
|
96
|
+
"**/dist/**",
|
97
|
+
"**/build/**",
|
98
|
+
"**/.next/**",
|
99
|
+
"**/coverage/**"
|
100
|
+
]
|
101
|
+
|
102
|
+
async def start_monitoring(self, poll_interval: float = 0.5) -> bool:
|
103
|
+
"""
|
104
|
+
Start monitoring file changes
|
105
|
+
|
106
|
+
Args:
|
107
|
+
poll_interval: How often to check for changes (seconds)
|
108
|
+
|
109
|
+
Returns:
|
110
|
+
True if monitoring started successfully
|
111
|
+
"""
|
112
|
+
try:
|
113
|
+
if self.is_monitoring:
|
114
|
+
self.logger.warning("File monitoring already active")
|
115
|
+
return True
|
116
|
+
|
117
|
+
# Initial scan to establish baseline
|
118
|
+
await self._initial_scan()
|
119
|
+
|
120
|
+
# Start monitoring task
|
121
|
+
self.monitor_task = asyncio.create_task(
|
122
|
+
self._monitor_loop(poll_interval)
|
123
|
+
)
|
124
|
+
|
125
|
+
self.is_monitoring = True
|
126
|
+
self.logger.info(f"File change monitoring started for: {self.project_root}")
|
127
|
+
return True
|
128
|
+
|
129
|
+
except Exception as e:
|
130
|
+
self.logger.error(f"Failed to start file monitoring: {e}")
|
131
|
+
return False
|
132
|
+
|
133
|
+
async def stop_monitoring(self):
|
134
|
+
"""Stop monitoring file changes"""
|
135
|
+
try:
|
136
|
+
self.is_monitoring = False
|
137
|
+
|
138
|
+
if self.monitor_task:
|
139
|
+
self.monitor_task.cancel()
|
140
|
+
try:
|
141
|
+
await self.monitor_task
|
142
|
+
except asyncio.CancelledError:
|
143
|
+
pass
|
144
|
+
self.monitor_task = None
|
145
|
+
|
146
|
+
self.logger.info("File change monitoring stopped")
|
147
|
+
|
148
|
+
except Exception as e:
|
149
|
+
self.logger.error(f"Error stopping file monitoring: {e}")
|
150
|
+
|
151
|
+
def add_change_callback(self, callback: Callable[[FileChange], None]):
|
152
|
+
"""Add callback for individual file changes"""
|
153
|
+
self.change_callbacks.append(callback)
|
154
|
+
|
155
|
+
def add_hot_reload_callback(self, callback: Callable[[List[FileChange]], None]):
|
156
|
+
"""Add callback for hot reload events (batch of related changes)"""
|
157
|
+
self.hot_reload_callbacks.append(callback)
|
158
|
+
|
159
|
+
async def _initial_scan(self):
|
160
|
+
"""Perform initial scan to establish file hash baseline"""
|
161
|
+
self.logger.debug("Performing initial file scan...")
|
162
|
+
|
163
|
+
files_scanned = 0
|
164
|
+
for pattern in self.watch_patterns:
|
165
|
+
for file_path in self.project_root.glob(pattern):
|
166
|
+
if self._should_monitor_file(file_path):
|
167
|
+
try:
|
168
|
+
file_hash = await self._calculate_file_hash(file_path)
|
169
|
+
self.file_hashes[str(file_path)] = file_hash
|
170
|
+
files_scanned += 1
|
171
|
+
except Exception as e:
|
172
|
+
self.logger.debug(f"Error scanning {file_path}: {e}")
|
173
|
+
|
174
|
+
self.last_scan_time = time.time()
|
175
|
+
self.logger.info(f"Initial scan complete: {files_scanned} files monitored")
|
176
|
+
|
177
|
+
async def _monitor_loop(self, poll_interval: float):
|
178
|
+
"""Main monitoring loop"""
|
179
|
+
while self.is_monitoring:
|
180
|
+
try:
|
181
|
+
changes = await self._scan_for_changes()
|
182
|
+
|
183
|
+
if changes:
|
184
|
+
await self._process_changes(changes)
|
185
|
+
|
186
|
+
await asyncio.sleep(poll_interval)
|
187
|
+
|
188
|
+
except asyncio.CancelledError:
|
189
|
+
break
|
190
|
+
except Exception as e:
|
191
|
+
self.logger.error(f"Error in monitoring loop: {e}")
|
192
|
+
await asyncio.sleep(poll_interval)
|
193
|
+
|
194
|
+
async def _scan_for_changes(self) -> List[FileChange]:
|
195
|
+
"""Scan for file changes since last check"""
|
196
|
+
changes = []
|
197
|
+
current_time = time.time()
|
198
|
+
|
199
|
+
# Check existing files for changes
|
200
|
+
for file_path_str, old_hash in list(self.file_hashes.items()):
|
201
|
+
file_path = Path(file_path_str)
|
202
|
+
|
203
|
+
if not file_path.exists():
|
204
|
+
# File was deleted
|
205
|
+
del self.file_hashes[file_path_str]
|
206
|
+
continue
|
207
|
+
|
208
|
+
try:
|
209
|
+
new_hash = await self._calculate_file_hash(file_path)
|
210
|
+
|
211
|
+
if new_hash != old_hash:
|
212
|
+
# File was modified
|
213
|
+
change = FileChange(
|
214
|
+
file_path=file_path,
|
215
|
+
change_type=self._determine_change_type(file_path),
|
216
|
+
timestamp=current_time,
|
217
|
+
content_hash=new_hash,
|
218
|
+
change_size=self._calculate_change_size(file_path),
|
219
|
+
is_hot_reloadable=self._is_hot_reloadable(file_path)
|
220
|
+
)
|
221
|
+
changes.append(change)
|
222
|
+
self.file_hashes[file_path_str] = new_hash
|
223
|
+
|
224
|
+
except Exception as e:
|
225
|
+
self.logger.debug(f"Error checking {file_path}: {e}")
|
226
|
+
|
227
|
+
# Check for new files
|
228
|
+
for pattern in self.watch_patterns:
|
229
|
+
for file_path in self.project_root.glob(pattern):
|
230
|
+
if (self._should_monitor_file(file_path) and
|
231
|
+
str(file_path) not in self.file_hashes):
|
232
|
+
|
233
|
+
try:
|
234
|
+
file_hash = await self._calculate_file_hash(file_path)
|
235
|
+
self.file_hashes[str(file_path)] = file_hash
|
236
|
+
|
237
|
+
# New file detected
|
238
|
+
change = FileChange(
|
239
|
+
file_path=file_path,
|
240
|
+
change_type=self._determine_change_type(file_path),
|
241
|
+
timestamp=current_time,
|
242
|
+
content_hash=file_hash,
|
243
|
+
change_size=0, # New file
|
244
|
+
is_hot_reloadable=self._is_hot_reloadable(file_path)
|
245
|
+
)
|
246
|
+
changes.append(change)
|
247
|
+
|
248
|
+
except Exception as e:
|
249
|
+
self.logger.debug(f"Error processing new file {file_path}: {e}")
|
250
|
+
|
251
|
+
self.last_scan_time = current_time
|
252
|
+
return changes
|
253
|
+
|
254
|
+
async def _process_changes(self, changes: List[FileChange]):
|
255
|
+
"""Process detected changes and trigger callbacks"""
|
256
|
+
# Add to change history
|
257
|
+
self.change_history.extend(changes)
|
258
|
+
|
259
|
+
# Keep only recent history (last 100 changes)
|
260
|
+
if len(self.change_history) > 100:
|
261
|
+
self.change_history = self.change_history[-100:]
|
262
|
+
|
263
|
+
# Trigger individual change callbacks
|
264
|
+
for change in changes:
|
265
|
+
for callback in self.change_callbacks:
|
266
|
+
try:
|
267
|
+
callback(change)
|
268
|
+
except Exception as e:
|
269
|
+
self.logger.error(f"Error in change callback: {e}")
|
270
|
+
|
271
|
+
# Check for hot reload events
|
272
|
+
hot_reload_changes = [c for c in changes if c.is_hot_reloadable]
|
273
|
+
if hot_reload_changes:
|
274
|
+
for callback in self.hot_reload_callbacks:
|
275
|
+
try:
|
276
|
+
callback(hot_reload_changes)
|
277
|
+
except Exception as e:
|
278
|
+
self.logger.error(f"Error in hot reload callback: {e}")
|
279
|
+
|
280
|
+
# Log changes
|
281
|
+
for change in changes:
|
282
|
+
reload_indicator = "🔥" if change.is_hot_reloadable else "📝"
|
283
|
+
self.logger.info(f"{reload_indicator} File changed: {change.file_path.name} ({change.change_type.value})")
|
284
|
+
|
285
|
+
async def _calculate_file_hash(self, file_path: Path) -> str:
|
286
|
+
"""Calculate hash of file content"""
|
287
|
+
try:
|
288
|
+
# For large files, just check size and mtime for performance
|
289
|
+
stat = file_path.stat()
|
290
|
+
if stat.st_size > 1024 * 1024: # 1MB threshold
|
291
|
+
return f"large_file_{stat.st_size}_{stat.st_mtime}"
|
292
|
+
|
293
|
+
# For smaller files, calculate content hash
|
294
|
+
content = file_path.read_bytes()
|
295
|
+
return hashlib.md5(content).hexdigest()
|
296
|
+
|
297
|
+
except Exception as e:
|
298
|
+
self.logger.debug(f"Error calculating hash for {file_path}: {e}")
|
299
|
+
return "error"
|
300
|
+
|
301
|
+
def _calculate_change_size(self, file_path: Path) -> int:
|
302
|
+
"""Calculate approximate size of change"""
|
303
|
+
try:
|
304
|
+
return file_path.stat().st_size
|
305
|
+
except Exception:
|
306
|
+
return 0
|
307
|
+
|
308
|
+
def _determine_change_type(self, file_path: Path) -> ChangeType:
|
309
|
+
"""Determine the type of change based on file extension"""
|
310
|
+
suffix = file_path.suffix.lower()
|
311
|
+
|
312
|
+
if suffix in [".css", ".scss", ".sass", ".less"]:
|
313
|
+
return ChangeType.CSS_CHANGE
|
314
|
+
elif suffix in [".js", ".jsx", ".ts", ".tsx"]:
|
315
|
+
return ChangeType.JS_CHANGE
|
316
|
+
elif suffix in [".html", ".htm"]:
|
317
|
+
return ChangeType.HTML_CHANGE
|
318
|
+
elif suffix in [".json"] and "config" in file_path.name.lower():
|
319
|
+
return ChangeType.CONFIG_CHANGE
|
320
|
+
else:
|
321
|
+
return ChangeType.ASSET_CHANGE
|
322
|
+
|
323
|
+
def _is_hot_reloadable(self, file_path: Path) -> bool:
|
324
|
+
"""Determine if file change is likely to trigger hot reload"""
|
325
|
+
change_type = self._determine_change_type(file_path)
|
326
|
+
|
327
|
+
# CSS files are typically hot reloadable
|
328
|
+
if change_type == ChangeType.CSS_CHANGE:
|
329
|
+
return True
|
330
|
+
|
331
|
+
# JS files may be hot reloadable with HMR
|
332
|
+
if change_type == ChangeType.JS_CHANGE:
|
333
|
+
# Check for common HMR indicators in project
|
334
|
+
return self._project_has_hmr()
|
335
|
+
|
336
|
+
# HTML changes typically require full reload
|
337
|
+
return False
|
338
|
+
|
339
|
+
def _project_has_hmr(self) -> bool:
|
340
|
+
"""Check if project likely has HMR setup"""
|
341
|
+
hmr_indicators = [
|
342
|
+
"webpack.config.js",
|
343
|
+
"vite.config.js",
|
344
|
+
"next.config.js",
|
345
|
+
"package.json" # Check for hot reload dependencies
|
346
|
+
]
|
347
|
+
|
348
|
+
for indicator in hmr_indicators:
|
349
|
+
if (self.project_root / indicator).exists():
|
350
|
+
return True
|
351
|
+
|
352
|
+
return False
|
353
|
+
|
354
|
+
def _should_monitor_file(self, file_path: Path) -> bool:
|
355
|
+
"""Check if file should be monitored"""
|
356
|
+
if not file_path.is_file():
|
357
|
+
return False
|
358
|
+
|
359
|
+
# Check exclude patterns
|
360
|
+
for pattern in self.exclude_patterns:
|
361
|
+
if file_path.match(pattern):
|
362
|
+
return False
|
363
|
+
|
364
|
+
return True
|
365
|
+
|
366
|
+
def get_recent_changes(self, since_seconds: float = 60) -> List[FileChange]:
|
367
|
+
"""Get changes from the last N seconds"""
|
368
|
+
cutoff_time = time.time() - since_seconds
|
369
|
+
return [c for c in self.change_history if c.timestamp >= cutoff_time]
|
370
|
+
|
371
|
+
def get_hot_reload_changes(self, since_seconds: float = 10) -> List[FileChange]:
|
372
|
+
"""Get hot reloadable changes from the last N seconds"""
|
373
|
+
recent_changes = self.get_recent_changes(since_seconds)
|
374
|
+
return [c for c in recent_changes if c.is_hot_reloadable]
|
375
|
+
|
376
|
+
def get_change_stats(self) -> Dict[str, Any]:
|
377
|
+
"""Get statistics about monitored changes"""
|
378
|
+
total_changes = len(self.change_history)
|
379
|
+
hot_reload_changes = len([c for c in self.change_history if c.is_hot_reloadable])
|
380
|
+
|
381
|
+
change_types = {}
|
382
|
+
for change in self.change_history:
|
383
|
+
change_type = change.change_type.value
|
384
|
+
change_types[change_type] = change_types.get(change_type, 0) + 1
|
385
|
+
|
386
|
+
return {
|
387
|
+
"total_files_monitored": len(self.file_hashes),
|
388
|
+
"total_changes_detected": total_changes,
|
389
|
+
"hot_reload_changes": hot_reload_changes,
|
390
|
+
"hot_reload_percentage": (hot_reload_changes / total_changes * 100) if total_changes > 0 else 0,
|
391
|
+
"change_types": change_types,
|
392
|
+
"monitoring_duration": time.time() - self.last_scan_time if self.last_scan_time > 0 else 0
|
393
|
+
}
|
394
|
+
|
395
|
+
|
396
|
+
class HotReloadSynchronizer:
|
397
|
+
"""
|
398
|
+
Synchronizes file changes with browser hot reload events
|
399
|
+
|
400
|
+
Coordinates between file monitoring and browser state to optimize
|
401
|
+
CSS iteration timing and provide feedback on hot reload effectiveness.
|
402
|
+
"""
|
403
|
+
|
404
|
+
def __init__(self, file_monitor: FileChangeMonitor):
|
405
|
+
"""
|
406
|
+
Initialize hot reload synchronizer
|
407
|
+
|
408
|
+
Args:
|
409
|
+
file_monitor: FileChangeMonitor instance
|
410
|
+
"""
|
411
|
+
self.file_monitor = file_monitor
|
412
|
+
self.browser_sessions: Dict[str, Any] = {} # Session ID -> PersistentSession
|
413
|
+
self.sync_callbacks: List[Callable[[str, List[FileChange]], None]] = []
|
414
|
+
|
415
|
+
self.logger = logging.getLogger(__name__)
|
416
|
+
|
417
|
+
# Set up file change monitoring
|
418
|
+
self.file_monitor.add_hot_reload_callback(self._on_hot_reload_changes)
|
419
|
+
|
420
|
+
def register_browser_session(self, session_id: str, persistent_session):
|
421
|
+
"""Register a browser session for hot reload synchronization"""
|
422
|
+
self.browser_sessions[session_id] = persistent_session
|
423
|
+
self.logger.info(f"Registered browser session for hot reload sync: {session_id}")
|
424
|
+
|
425
|
+
def unregister_browser_session(self, session_id: str):
|
426
|
+
"""Unregister a browser session"""
|
427
|
+
if session_id in self.browser_sessions:
|
428
|
+
del self.browser_sessions[session_id]
|
429
|
+
self.logger.info(f"Unregistered browser session: {session_id}")
|
430
|
+
|
431
|
+
def add_sync_callback(self, callback: Callable[[str, List[FileChange]], None]):
|
432
|
+
"""Add callback for synchronized hot reload events"""
|
433
|
+
self.sync_callbacks.append(callback)
|
434
|
+
|
435
|
+
async def _on_hot_reload_changes(self, changes: List[FileChange]):
|
436
|
+
"""Handle hot reload changes and sync with browser sessions"""
|
437
|
+
if not changes:
|
438
|
+
return
|
439
|
+
|
440
|
+
self.logger.info(f"Hot reload changes detected: {len(changes)} files")
|
441
|
+
|
442
|
+
# Notify all registered browser sessions
|
443
|
+
for session_id, session in self.browser_sessions.items():
|
444
|
+
try:
|
445
|
+
await self._sync_session_with_changes(session_id, session, changes)
|
446
|
+
except Exception as e:
|
447
|
+
self.logger.error(f"Error syncing session {session_id}: {e}")
|
448
|
+
|
449
|
+
# Trigger sync callbacks
|
450
|
+
for session_id in self.browser_sessions.keys():
|
451
|
+
for callback in self.sync_callbacks:
|
452
|
+
try:
|
453
|
+
callback(session_id, changes)
|
454
|
+
except Exception as e:
|
455
|
+
self.logger.error(f"Error in sync callback: {e}")
|
456
|
+
|
457
|
+
async def _sync_session_with_changes(
|
458
|
+
self,
|
459
|
+
session_id: str,
|
460
|
+
session,
|
461
|
+
changes: List[FileChange]
|
462
|
+
):
|
463
|
+
"""Synchronize a specific browser session with file changes"""
|
464
|
+
|
465
|
+
# Check if session can benefit from these changes
|
466
|
+
css_changes = [c for c in changes if c.change_type == ChangeType.CSS_CHANGE]
|
467
|
+
|
468
|
+
if css_changes and hasattr(session, 'browser') and session.browser:
|
469
|
+
# Wait a moment for hot reload to process
|
470
|
+
await asyncio.sleep(0.2)
|
471
|
+
|
472
|
+
# Check if hot reload is working by monitoring browser state
|
473
|
+
try:
|
474
|
+
hot_reload_detected = await session._check_hot_reload_capability()
|
475
|
+
|
476
|
+
if hot_reload_detected:
|
477
|
+
self.logger.info(f"Hot reload confirmed for session {session_id}")
|
478
|
+
# Update session's hot reload tracking
|
479
|
+
session.last_reload_time = time.time()
|
480
|
+
else:
|
481
|
+
self.logger.warning(f"Hot reload not detected for session {session_id}")
|
482
|
+
|
483
|
+
except Exception as e:
|
484
|
+
self.logger.debug(f"Error checking hot reload for session {session_id}: {e}")
|
485
|
+
|
486
|
+
async def wait_for_hot_reload(
|
487
|
+
self,
|
488
|
+
timeout: float = 5.0,
|
489
|
+
file_patterns: Optional[List[str]] = None
|
490
|
+
) -> bool:
|
491
|
+
"""
|
492
|
+
Wait for hot reload changes matching specified patterns
|
493
|
+
|
494
|
+
Args:
|
495
|
+
timeout: Maximum time to wait for changes
|
496
|
+
file_patterns: File patterns to wait for (e.g., ["*.css"])
|
497
|
+
|
498
|
+
Returns:
|
499
|
+
True if matching changes were detected
|
500
|
+
"""
|
501
|
+
start_time = time.time()
|
502
|
+
|
503
|
+
while (time.time() - start_time) < timeout:
|
504
|
+
recent_changes = self.file_monitor.get_hot_reload_changes(since_seconds=1)
|
505
|
+
|
506
|
+
if recent_changes:
|
507
|
+
if not file_patterns:
|
508
|
+
return True
|
509
|
+
|
510
|
+
# Check if changes match patterns
|
511
|
+
for change in recent_changes:
|
512
|
+
for pattern in file_patterns:
|
513
|
+
if change.file_path.match(pattern):
|
514
|
+
return True
|
515
|
+
|
516
|
+
await asyncio.sleep(0.1)
|
517
|
+
|
518
|
+
return False
|
519
|
+
|
520
|
+
def get_sync_stats(self) -> Dict[str, Any]:
|
521
|
+
"""Get statistics about hot reload synchronization"""
|
522
|
+
return {
|
523
|
+
"registered_sessions": len(self.browser_sessions),
|
524
|
+
"active_sessions": [sid for sid, session in self.browser_sessions.items()
|
525
|
+
if getattr(session, 'is_active', False)],
|
526
|
+
"file_monitor_stats": self.file_monitor.get_change_stats()
|
527
|
+
}
|
528
|
+
|
529
|
+
|
530
|
+
# Global instances for easy access
|
531
|
+
_file_monitor: Optional[FileChangeMonitor] = None
|
532
|
+
_hot_reload_sync: Optional[HotReloadSynchronizer] = None
|
533
|
+
|
534
|
+
def get_file_monitor(project_root: Optional[Path] = None) -> FileChangeMonitor:
|
535
|
+
"""Get or create global file monitor instance"""
|
536
|
+
global _file_monitor
|
537
|
+
if _file_monitor is None:
|
538
|
+
_file_monitor = FileChangeMonitor(project_root)
|
539
|
+
return _file_monitor
|
540
|
+
|
541
|
+
def get_hot_reload_synchronizer() -> HotReloadSynchronizer:
|
542
|
+
"""Get or create global hot reload synchronizer"""
|
543
|
+
global _hot_reload_sync, _file_monitor
|
544
|
+
if _hot_reload_sync is None:
|
545
|
+
if _file_monitor is None:
|
546
|
+
_file_monitor = get_file_monitor()
|
547
|
+
_hot_reload_sync = HotReloadSynchronizer(_file_monitor)
|
548
|
+
return _hot_reload_sync
|