cursorflow 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cursorflow/__init__.py +78 -0
- cursorflow/auto_updater.py +244 -0
- cursorflow/cli.py +408 -0
- cursorflow/core/agent.py +272 -0
- cursorflow/core/auth_handler.py +433 -0
- cursorflow/core/browser_controller.py +534 -0
- cursorflow/core/browser_engine.py +386 -0
- cursorflow/core/css_iterator.py +397 -0
- cursorflow/core/cursor_integration.py +744 -0
- cursorflow/core/cursorflow.py +649 -0
- cursorflow/core/error_correlator.py +322 -0
- cursorflow/core/event_correlator.py +182 -0
- cursorflow/core/file_change_monitor.py +548 -0
- cursorflow/core/log_collector.py +410 -0
- cursorflow/core/log_monitor.py +179 -0
- cursorflow/core/persistent_session.py +910 -0
- cursorflow/core/report_generator.py +282 -0
- cursorflow/log_sources/local_file.py +198 -0
- cursorflow/log_sources/ssh_remote.py +210 -0
- cursorflow/updater.py +512 -0
- cursorflow-1.2.0.dist-info/METADATA +444 -0
- cursorflow-1.2.0.dist-info/RECORD +25 -0
- cursorflow-1.2.0.dist-info/WHEEL +5 -0
- cursorflow-1.2.0.dist-info/entry_points.txt +2 -0
- cursorflow-1.2.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,534 @@
|
|
1
|
+
"""
|
2
|
+
Universal Browser Controller
|
3
|
+
|
4
|
+
Framework-agnostic browser automation using Playwright.
|
5
|
+
No framework adapters needed - pure universal operations.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import asyncio
|
9
|
+
import time
|
10
|
+
from typing import Dict, List, Any, Optional
|
11
|
+
from playwright.async_api import async_playwright, Page, Browser, BrowserContext
|
12
|
+
import logging
|
13
|
+
from pathlib import Path
|
14
|
+
|
15
|
+
|
16
|
+
class BrowserController:
|
17
|
+
"""
|
18
|
+
Universal browser automation - works with any web technology
|
19
|
+
|
20
|
+
Provides simple, declarative interface without framework complexity.
|
21
|
+
"""
|
22
|
+
|
23
|
+
def __init__(self, base_url: str, config: Dict):
|
24
|
+
"""
|
25
|
+
Initialize browser controller
|
26
|
+
|
27
|
+
Args:
|
28
|
+
base_url: Base URL for testing
|
29
|
+
config: {
|
30
|
+
"headless": True,
|
31
|
+
"debug_mode": False,
|
32
|
+
"human_timeout": 30,
|
33
|
+
"viewport": {"width": 1440, "height": 900}
|
34
|
+
}
|
35
|
+
"""
|
36
|
+
self.base_url = base_url
|
37
|
+
self.config = config
|
38
|
+
self.playwright = None
|
39
|
+
self.browser = None
|
40
|
+
self.context = None
|
41
|
+
self.page = None
|
42
|
+
|
43
|
+
# Event tracking
|
44
|
+
self.console_logs = []
|
45
|
+
self.network_requests = []
|
46
|
+
self.performance_metrics = []
|
47
|
+
|
48
|
+
self.logger = logging.getLogger(__name__)
|
49
|
+
|
50
|
+
# Ensure artifacts directory exists
|
51
|
+
Path("artifacts/screenshots").mkdir(parents=True, exist_ok=True)
|
52
|
+
|
53
|
+
async def initialize(self):
|
54
|
+
"""Initialize browser with universal settings"""
|
55
|
+
try:
|
56
|
+
self.playwright = await async_playwright().start()
|
57
|
+
|
58
|
+
# Browser configuration - works for any framework
|
59
|
+
browser_config = {
|
60
|
+
"headless": self.config.get("headless", True),
|
61
|
+
"slow_mo": 0 if self.config.get("headless", True) else 100,
|
62
|
+
"args": [
|
63
|
+
"--disable-web-security",
|
64
|
+
"--disable-features=VizDisplayCompositor",
|
65
|
+
"--disable-background-timer-throttling",
|
66
|
+
"--disable-backgrounding-occluded-windows",
|
67
|
+
"--disable-renderer-backgrounding"
|
68
|
+
]
|
69
|
+
}
|
70
|
+
|
71
|
+
self.browser = await self.playwright.chromium.launch(**browser_config)
|
72
|
+
|
73
|
+
# Context configuration
|
74
|
+
viewport = self.config.get("viewport", {"width": 1440, "height": 900})
|
75
|
+
context_config = {
|
76
|
+
"viewport": viewport,
|
77
|
+
"ignore_https_errors": True,
|
78
|
+
"record_video_dir": "artifacts/videos" if self.config.get("record_video") else None
|
79
|
+
}
|
80
|
+
|
81
|
+
self.context = await self.browser.new_context(**context_config)
|
82
|
+
self.page = await self.context.new_page()
|
83
|
+
|
84
|
+
# Set up universal event listeners
|
85
|
+
await self._setup_event_listeners()
|
86
|
+
|
87
|
+
if not self.config.get("headless", True):
|
88
|
+
self.logger.info("🖥️ Browser launched in FOREGROUND mode - human can interact")
|
89
|
+
else:
|
90
|
+
self.logger.info("🤖 Browser launched in HEADLESS mode - automated only")
|
91
|
+
|
92
|
+
except Exception as e:
|
93
|
+
self.logger.error(f"Browser initialization failed: {e}")
|
94
|
+
raise
|
95
|
+
|
96
|
+
async def _setup_event_listeners(self):
|
97
|
+
"""Set up universal event listeners for any framework"""
|
98
|
+
|
99
|
+
# Console events
|
100
|
+
self.page.on("console", self._handle_console_message)
|
101
|
+
|
102
|
+
# Network events
|
103
|
+
self.page.on("request", self._handle_request)
|
104
|
+
self.page.on("response", self._handle_response)
|
105
|
+
|
106
|
+
# Page events
|
107
|
+
self.page.on("pageerror", self._handle_page_error)
|
108
|
+
self.page.on("crash", self._handle_page_crash)
|
109
|
+
|
110
|
+
def _handle_console_message(self, msg):
|
111
|
+
"""Handle console messages from any framework"""
|
112
|
+
log_entry = {
|
113
|
+
"timestamp": time.time(),
|
114
|
+
"type": msg.type,
|
115
|
+
"text": msg.text,
|
116
|
+
"location": {
|
117
|
+
"url": msg.location.get("url", "") if msg.location else "",
|
118
|
+
"line": msg.location.get("lineNumber", 0) if msg.location else 0,
|
119
|
+
"column": msg.location.get("columnNumber", 0) if msg.location else 0
|
120
|
+
},
|
121
|
+
"args": [str(arg) for arg in msg.args] if msg.args else [],
|
122
|
+
"stack_trace": getattr(msg, 'stackTrace', None)
|
123
|
+
}
|
124
|
+
self.console_logs.append(log_entry)
|
125
|
+
|
126
|
+
# Enhanced logging for better correlation
|
127
|
+
if msg.type == "error":
|
128
|
+
self.logger.error(f"Console Error: {msg.text} at {msg.location}")
|
129
|
+
elif msg.type == "warning":
|
130
|
+
self.logger.warning(f"Console Warning: {msg.text}")
|
131
|
+
elif msg.type in ["log", "info"] and any(keyword in msg.text.lower() for keyword in ["error", "failed", "exception"]):
|
132
|
+
# Catch application logs that indicate errors
|
133
|
+
self.logger.warning(f"App Error Log: {msg.text}")
|
134
|
+
|
135
|
+
def _handle_request(self, request):
|
136
|
+
"""Handle network requests - framework agnostic"""
|
137
|
+
# Capture complete request data
|
138
|
+
request_data = {
|
139
|
+
"timestamp": time.time(),
|
140
|
+
"type": "request",
|
141
|
+
"url": request.url,
|
142
|
+
"method": request.method,
|
143
|
+
"headers": dict(request.headers),
|
144
|
+
"resource_type": request.resource_type, # document, xhr, fetch, etc.
|
145
|
+
"is_navigation_request": request.is_navigation_request()
|
146
|
+
}
|
147
|
+
|
148
|
+
# Capture complete payload data for all request types
|
149
|
+
if request.post_data:
|
150
|
+
request_data["post_data"] = request.post_data
|
151
|
+
request_data["post_data_size"] = len(request.post_data)
|
152
|
+
|
153
|
+
# Try to parse JSON payloads for better debugging
|
154
|
+
content_type = request.headers.get("content-type", "")
|
155
|
+
if "application/json" in content_type:
|
156
|
+
try:
|
157
|
+
import json
|
158
|
+
request_data["parsed_json"] = json.loads(request.post_data)
|
159
|
+
except:
|
160
|
+
pass
|
161
|
+
elif "application/x-www-form-urlencoded" in content_type:
|
162
|
+
try:
|
163
|
+
from urllib.parse import parse_qs
|
164
|
+
request_data["parsed_form"] = parse_qs(request.post_data)
|
165
|
+
except:
|
166
|
+
pass
|
167
|
+
|
168
|
+
# Capture query parameters
|
169
|
+
from urllib.parse import urlparse, parse_qs
|
170
|
+
parsed_url = urlparse(request.url)
|
171
|
+
if parsed_url.query:
|
172
|
+
request_data["query_params"] = parse_qs(parsed_url.query)
|
173
|
+
|
174
|
+
# Capture file uploads
|
175
|
+
if "multipart/form-data" in request.headers.get("content-type", ""):
|
176
|
+
request_data["has_file_upload"] = True
|
177
|
+
# Note: Actual file content not captured for performance/privacy
|
178
|
+
|
179
|
+
self.network_requests.append(request_data)
|
180
|
+
|
181
|
+
# Enhanced logging for correlation
|
182
|
+
if request.resource_type in ["xhr", "fetch"] or "/api/" in request.url:
|
183
|
+
payload_info = ""
|
184
|
+
if request.post_data:
|
185
|
+
payload_info = f" (payload: {len(request.post_data)} bytes)"
|
186
|
+
self.logger.debug(f"API Request: {request.method} {request.url}{payload_info}")
|
187
|
+
|
188
|
+
# Log critical data for immediate debugging
|
189
|
+
if request.post_data and len(request.post_data) < 500: # Only log small payloads
|
190
|
+
self.logger.debug(f"Request payload: {request.post_data}")
|
191
|
+
|
192
|
+
def _handle_response(self, response):
|
193
|
+
"""Handle network responses - framework agnostic"""
|
194
|
+
response_data = {
|
195
|
+
"timestamp": time.time(),
|
196
|
+
"type": "response",
|
197
|
+
"url": response.url,
|
198
|
+
"status": response.status,
|
199
|
+
"status_text": response.status_text,
|
200
|
+
"headers": dict(response.headers),
|
201
|
+
"size": len(response.body) if hasattr(response, 'body') else 0,
|
202
|
+
"from_cache": response.from_service_worker or False
|
203
|
+
}
|
204
|
+
self.network_requests.append(response_data)
|
205
|
+
|
206
|
+
# Log failed requests for correlation
|
207
|
+
if response.status >= 400:
|
208
|
+
self.logger.warning(f"Failed Response: {response.status} {response.url}")
|
209
|
+
|
210
|
+
# Capture response body for important requests
|
211
|
+
should_capture_body = (
|
212
|
+
response.status >= 400 or # All error responses
|
213
|
+
any(api_path in response.url for api_path in ["/api/", "/ajax", ".json"]) or # API calls
|
214
|
+
"application/json" in response.headers.get("content-type", "") # JSON responses
|
215
|
+
)
|
216
|
+
|
217
|
+
if should_capture_body:
|
218
|
+
asyncio.create_task(self._capture_response_body(response))
|
219
|
+
|
220
|
+
def _handle_page_error(self, error):
|
221
|
+
"""Handle page errors from any framework"""
|
222
|
+
self.console_logs.append({
|
223
|
+
"timestamp": time.time(),
|
224
|
+
"type": "pageerror",
|
225
|
+
"text": str(error),
|
226
|
+
"location": None
|
227
|
+
})
|
228
|
+
self.logger.error(f"Page error: {error}")
|
229
|
+
|
230
|
+
def _handle_page_crash(self, page):
|
231
|
+
"""Handle page crashes"""
|
232
|
+
self.logger.error("Page crashed - attempting recovery")
|
233
|
+
|
234
|
+
async def navigate(self, path: str, wait_for_load: bool = True):
|
235
|
+
"""Navigate to URL - works with any web framework"""
|
236
|
+
try:
|
237
|
+
# Build full URL
|
238
|
+
if path.startswith(("http://", "https://")):
|
239
|
+
url = path
|
240
|
+
else:
|
241
|
+
url = f"{self.base_url.rstrip('/')}/{path.lstrip('/')}"
|
242
|
+
|
243
|
+
self.logger.info(f"Navigating to: {url}")
|
244
|
+
|
245
|
+
# Navigate and wait
|
246
|
+
if wait_for_load:
|
247
|
+
await self.page.goto(url, wait_until="networkidle", timeout=30000)
|
248
|
+
else:
|
249
|
+
await self.page.goto(url, timeout=30000)
|
250
|
+
|
251
|
+
# Universal ready state check (works for any framework)
|
252
|
+
await self.page.wait_for_load_state("domcontentloaded")
|
253
|
+
|
254
|
+
except Exception as e:
|
255
|
+
self.logger.error(f"Navigation failed to {path}: {e}")
|
256
|
+
raise
|
257
|
+
|
258
|
+
async def click(self, selector: str, timeout: int = 10000):
|
259
|
+
"""Click element - universal across frameworks"""
|
260
|
+
try:
|
261
|
+
await self.page.wait_for_selector(selector, timeout=timeout)
|
262
|
+
await self.page.click(selector)
|
263
|
+
self.logger.debug(f"Clicked: {selector}")
|
264
|
+
|
265
|
+
except Exception as e:
|
266
|
+
if not self.config.get("headless", True):
|
267
|
+
# In foreground mode, allow human intervention
|
268
|
+
self.logger.warning(f"Click failed for {selector}: {e}")
|
269
|
+
self.logger.info(f"Human has {self.config.get('human_timeout', 30)} seconds to manually click...")
|
270
|
+
await asyncio.sleep(self.config.get('human_timeout', 30))
|
271
|
+
else:
|
272
|
+
raise
|
273
|
+
|
274
|
+
async def fill(self, selector: str, value: str, timeout: int = 10000):
|
275
|
+
"""Fill input field - universal"""
|
276
|
+
try:
|
277
|
+
await self.page.wait_for_selector(selector, timeout=timeout)
|
278
|
+
await self.page.fill(selector, value)
|
279
|
+
self.logger.debug(f"Filled {selector}: {value}")
|
280
|
+
|
281
|
+
except Exception as e:
|
282
|
+
self.logger.error(f"Fill failed for {selector}: {e}")
|
283
|
+
raise
|
284
|
+
|
285
|
+
async def type(self, selector: str, text: str, delay: int = 50):
|
286
|
+
"""Type text slowly - useful for complex forms"""
|
287
|
+
try:
|
288
|
+
await self.page.wait_for_selector(selector)
|
289
|
+
await self.page.type(selector, text, delay=delay)
|
290
|
+
self.logger.debug(f"Typed in {selector}: {text}")
|
291
|
+
|
292
|
+
except Exception as e:
|
293
|
+
self.logger.error(f"Type failed for {selector}: {e}")
|
294
|
+
raise
|
295
|
+
|
296
|
+
async def wait_for_element(self, selector: str, timeout: int = 30000, state: str = "visible"):
|
297
|
+
"""Wait for element - universal"""
|
298
|
+
try:
|
299
|
+
await self.page.wait_for_selector(selector, timeout=timeout, state=state)
|
300
|
+
self.logger.debug(f"Element ready: {selector}")
|
301
|
+
|
302
|
+
except Exception as e:
|
303
|
+
self.logger.error(f"Wait failed for {selector}: {e}")
|
304
|
+
raise
|
305
|
+
|
306
|
+
async def wait_for_condition(self, condition: str, timeout: int = 30000):
|
307
|
+
"""Wait for custom JavaScript condition - universal"""
|
308
|
+
try:
|
309
|
+
await self.page.wait_for_function(condition, timeout=timeout)
|
310
|
+
self.logger.debug(f"Condition met: {condition}")
|
311
|
+
|
312
|
+
except Exception as e:
|
313
|
+
self.logger.error(f"Condition wait failed: {condition}, {e}")
|
314
|
+
raise
|
315
|
+
|
316
|
+
async def screenshot(self, name: str, full_page: bool = False) -> str:
|
317
|
+
"""Take screenshot - universal"""
|
318
|
+
try:
|
319
|
+
timestamp = int(time.time())
|
320
|
+
filename = f"artifacts/screenshots/{name}_{timestamp}.png"
|
321
|
+
|
322
|
+
await self.page.screenshot(
|
323
|
+
path=filename,
|
324
|
+
full_page=full_page
|
325
|
+
)
|
326
|
+
|
327
|
+
self.logger.debug(f"Screenshot saved: {filename}")
|
328
|
+
return filename
|
329
|
+
|
330
|
+
except Exception as e:
|
331
|
+
self.logger.error(f"Screenshot failed: {e}")
|
332
|
+
raise
|
333
|
+
|
334
|
+
async def evaluate_javascript(self, script: str) -> Any:
|
335
|
+
"""Execute JavaScript - universal"""
|
336
|
+
try:
|
337
|
+
result = await self.page.evaluate(script)
|
338
|
+
return result
|
339
|
+
|
340
|
+
except Exception as e:
|
341
|
+
self.logger.error(f"JavaScript evaluation failed: {e}")
|
342
|
+
raise
|
343
|
+
|
344
|
+
async def get_computed_styles(self, selector: str) -> Dict:
|
345
|
+
"""Get computed styles for element - universal"""
|
346
|
+
try:
|
347
|
+
script = f"""
|
348
|
+
(selector) => {{
|
349
|
+
const el = document.querySelector(selector);
|
350
|
+
if (!el) return null;
|
351
|
+
const styles = window.getComputedStyle(el);
|
352
|
+
return {{
|
353
|
+
position: styles.position,
|
354
|
+
display: styles.display,
|
355
|
+
flexDirection: styles.flexDirection,
|
356
|
+
justifyContent: styles.justifyContent,
|
357
|
+
alignItems: styles.alignItems,
|
358
|
+
width: styles.width,
|
359
|
+
height: styles.height,
|
360
|
+
margin: styles.margin,
|
361
|
+
padding: styles.padding,
|
362
|
+
fontSize: styles.fontSize,
|
363
|
+
color: styles.color,
|
364
|
+
backgroundColor: styles.backgroundColor
|
365
|
+
}};
|
366
|
+
}}
|
367
|
+
"""
|
368
|
+
|
369
|
+
result = await self.page.evaluate(script, selector)
|
370
|
+
return result or {}
|
371
|
+
|
372
|
+
except Exception as e:
|
373
|
+
self.logger.error(f"Get computed styles failed for {selector}: {e}")
|
374
|
+
return {}
|
375
|
+
|
376
|
+
async def inject_css(self, css: str) -> bool:
|
377
|
+
"""Inject CSS into page - universal"""
|
378
|
+
try:
|
379
|
+
await self.page.add_style_tag(content=css)
|
380
|
+
await self.page.wait_for_timeout(100) # Let CSS apply
|
381
|
+
return True
|
382
|
+
|
383
|
+
except Exception as e:
|
384
|
+
self.logger.error(f"CSS injection failed: {e}")
|
385
|
+
return False
|
386
|
+
|
387
|
+
async def set_viewport(self, width: int, height: int):
|
388
|
+
"""Change viewport size - universal"""
|
389
|
+
try:
|
390
|
+
await self.page.set_viewport_size({"width": width, "height": height})
|
391
|
+
await self.page.wait_for_timeout(200) # Let layout stabilize
|
392
|
+
|
393
|
+
except Exception as e:
|
394
|
+
self.logger.error(f"Viewport change failed: {e}")
|
395
|
+
raise
|
396
|
+
|
397
|
+
async def get_performance_metrics(self) -> Dict:
|
398
|
+
"""Get page performance metrics - universal"""
|
399
|
+
try:
|
400
|
+
metrics = await self.page.evaluate("""
|
401
|
+
() => {
|
402
|
+
const perf = performance.getEntriesByType('navigation')[0];
|
403
|
+
return {
|
404
|
+
loadTime: perf ? perf.loadEventEnd - perf.loadEventStart : 0,
|
405
|
+
domContentLoaded: perf ? perf.domContentLoadedEventEnd - perf.domContentLoadedEventStart : 0,
|
406
|
+
firstPaint: performance.getEntriesByType('paint').find(p => p.name === 'first-paint')?.startTime || 0,
|
407
|
+
largestContentfulPaint: performance.getEntriesByType('largest-contentful-paint')[0]?.startTime || 0
|
408
|
+
};
|
409
|
+
}
|
410
|
+
""")
|
411
|
+
|
412
|
+
return metrics
|
413
|
+
|
414
|
+
except Exception as e:
|
415
|
+
self.logger.error(f"Performance metrics failed: {e}")
|
416
|
+
return {}
|
417
|
+
|
418
|
+
async def cleanup(self):
|
419
|
+
"""Clean up browser resources"""
|
420
|
+
try:
|
421
|
+
if self.page:
|
422
|
+
await self.page.close()
|
423
|
+
if self.context:
|
424
|
+
await self.context.close()
|
425
|
+
if self.browser:
|
426
|
+
await self.browser.close()
|
427
|
+
if self.playwright:
|
428
|
+
await self.playwright.stop()
|
429
|
+
|
430
|
+
self.logger.info("Browser cleanup completed")
|
431
|
+
|
432
|
+
except Exception as e:
|
433
|
+
self.logger.error(f"Browser cleanup failed: {e}")
|
434
|
+
|
435
|
+
async def _capture_response_body(self, response):
|
436
|
+
"""Capture response body for API calls and errors"""
|
437
|
+
try:
|
438
|
+
body = await response.body()
|
439
|
+
decoded_body = body.decode('utf-8', errors='ignore')
|
440
|
+
|
441
|
+
# Find and update the matching response entry
|
442
|
+
for req in reversed(self.network_requests):
|
443
|
+
if (req.get("type") == "response" and
|
444
|
+
req.get("url") == response.url and
|
445
|
+
req.get("status") == response.status):
|
446
|
+
|
447
|
+
# Store raw body (truncated for large responses)
|
448
|
+
req["body"] = decoded_body[:2000] # Increased limit for debugging
|
449
|
+
req["body_size"] = len(decoded_body)
|
450
|
+
req["body_truncated"] = len(decoded_body) > 2000
|
451
|
+
|
452
|
+
# Parse JSON responses for easier debugging
|
453
|
+
content_type = response.headers.get("content-type", "")
|
454
|
+
if "application/json" in content_type:
|
455
|
+
try:
|
456
|
+
import json
|
457
|
+
req["parsed_json"] = json.loads(decoded_body)
|
458
|
+
except:
|
459
|
+
req["json_parse_error"] = True
|
460
|
+
|
461
|
+
# Log important error responses for immediate visibility
|
462
|
+
if response.status >= 400:
|
463
|
+
error_preview = decoded_body[:200].replace('\n', ' ')
|
464
|
+
self.logger.error(f"Error response body: {error_preview}")
|
465
|
+
|
466
|
+
break
|
467
|
+
|
468
|
+
except Exception as e:
|
469
|
+
self.logger.warning(f"Failed to capture response body: {e}")
|
470
|
+
|
471
|
+
async def capture_network_har(self) -> Dict:
|
472
|
+
"""Capture full network activity as HAR format"""
|
473
|
+
try:
|
474
|
+
# Get all network requests in HAR-like format
|
475
|
+
har_entries = []
|
476
|
+
|
477
|
+
for req in self.network_requests:
|
478
|
+
if req.get("type") == "request":
|
479
|
+
# Find matching response
|
480
|
+
response = None
|
481
|
+
for resp in self.network_requests:
|
482
|
+
if (resp.get("type") == "response" and
|
483
|
+
resp.get("url") == req.get("url") and
|
484
|
+
resp.get("timestamp", 0) > req.get("timestamp", 0)):
|
485
|
+
response = resp
|
486
|
+
break
|
487
|
+
|
488
|
+
har_entry = {
|
489
|
+
"request": {
|
490
|
+
"method": req.get("method"),
|
491
|
+
"url": req.get("url"),
|
492
|
+
"headers": req.get("headers", {}),
|
493
|
+
"postData": req.get("post_data"),
|
494
|
+
"timestamp": req.get("timestamp")
|
495
|
+
},
|
496
|
+
"response": response if response else {"status": 0},
|
497
|
+
"time": (response.get("timestamp", 0) - req.get("timestamp", 0)) * 1000 # ms
|
498
|
+
}
|
499
|
+
har_entries.append(har_entry)
|
500
|
+
|
501
|
+
return {"entries": har_entries}
|
502
|
+
|
503
|
+
except Exception as e:
|
504
|
+
self.logger.error(f"HAR capture failed: {e}")
|
505
|
+
return {"entries": []}
|
506
|
+
|
507
|
+
def get_console_errors(self) -> List[Dict]:
|
508
|
+
"""Get only console errors for quick analysis"""
|
509
|
+
return [log for log in self.console_logs if log.get("type") == "error"]
|
510
|
+
|
511
|
+
def get_failed_requests(self) -> List[Dict]:
|
512
|
+
"""Get only failed network requests"""
|
513
|
+
failed = []
|
514
|
+
for req in self.network_requests:
|
515
|
+
if (req.get("type") == "response" and
|
516
|
+
req.get("status", 0) >= 400):
|
517
|
+
failed.append(req)
|
518
|
+
return failed
|
519
|
+
|
520
|
+
def get_collected_data(self) -> Dict:
|
521
|
+
"""Get all collected browser data"""
|
522
|
+
return {
|
523
|
+
"console_logs": self.console_logs,
|
524
|
+
"network_requests": self.network_requests,
|
525
|
+
"performance_metrics": self.performance_metrics,
|
526
|
+
"console_errors": self.get_console_errors(),
|
527
|
+
"failed_requests": self.get_failed_requests(),
|
528
|
+
"summary": {
|
529
|
+
"total_console_logs": len(self.console_logs),
|
530
|
+
"total_errors": len(self.get_console_errors()),
|
531
|
+
"total_requests": len([r for r in self.network_requests if r.get("type") == "request"]),
|
532
|
+
"failed_requests": len(self.get_failed_requests())
|
533
|
+
}
|
534
|
+
}
|