router-cli 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
router_cli/client.py ADDED
@@ -0,0 +1,883 @@
1
+ """HTTP client for D-Link DSL-2750U router."""
2
+
3
+ import re
4
+ import time
5
+ import urllib.error
6
+ import urllib.parse
7
+ import urllib.request
8
+ from dataclasses import dataclass, field
9
+ from http.cookiejar import CookieJar
10
+
11
+
12
+ class RouterError(Exception):
13
+ """Base exception for router errors."""
14
+
15
+ pass
16
+
17
+
18
+ class AuthenticationError(RouterError):
19
+ """Raised when authentication fails or session expires."""
20
+
21
+ pass
22
+
23
+
24
+ class ConnectionError(RouterError):
25
+ """Raised when unable to connect to the router."""
26
+
27
+ pass
28
+
29
+
30
+ class HTTPError(RouterError):
31
+ """Raised for HTTP error responses."""
32
+
33
+ def __init__(self, message: str, status_code: int | None = None):
34
+ super().__init__(message)
35
+ self.status_code = status_code
36
+
37
+
38
+ @dataclass
39
+ class RouterStatus:
40
+ """Parsed router status information."""
41
+
42
+ # System Info
43
+ model_name: str = ""
44
+ time_date: str = ""
45
+ firmware: str = ""
46
+
47
+ # Internet Info
48
+ default_gateway: str = ""
49
+ preferred_dns: str = ""
50
+ alternate_dns: str = ""
51
+
52
+ # WAN Connections
53
+ wan_connections: list[dict] = field(default_factory=list)
54
+
55
+ # Wireless Info
56
+ ssid: str = ""
57
+ wireless_mac: str = ""
58
+ wireless_status: str = ""
59
+ security_mode: str = ""
60
+
61
+ # Local Network
62
+ local_mac: str = ""
63
+ local_ip: str = ""
64
+ subnet_mask: str = ""
65
+ dhcp_server: str = ""
66
+
67
+
68
+ @dataclass
69
+ class WirelessClient:
70
+ """A connected wireless client."""
71
+
72
+ mac: str
73
+ associated: bool
74
+ authorized: bool
75
+ ssid: str
76
+ interface: str
77
+
78
+
79
+ @dataclass
80
+ class DHCPLease:
81
+ """A DHCP lease entry."""
82
+
83
+ hostname: str
84
+ mac: str
85
+ ip: str
86
+ expires_in: str
87
+
88
+
89
+ @dataclass
90
+ class Route:
91
+ """A routing table entry."""
92
+
93
+ destination: str
94
+ gateway: str
95
+ subnet_mask: str
96
+ flag: str
97
+ metric: int
98
+ service: str
99
+
100
+
101
+ @dataclass
102
+ class InterfaceStats:
103
+ """Statistics for a network interface."""
104
+
105
+ interface: str
106
+ rx_bytes: int
107
+ rx_packets: int
108
+ rx_errors: int
109
+ rx_drops: int
110
+ tx_bytes: int
111
+ tx_packets: int
112
+ tx_errors: int
113
+ tx_drops: int
114
+
115
+
116
+ @dataclass
117
+ class ADSLStats:
118
+ """ADSL line statistics."""
119
+
120
+ mode: str = ""
121
+ traffic_type: str = ""
122
+ status: str = ""
123
+ link_power_state: str = ""
124
+ downstream_rate: int = 0
125
+ upstream_rate: int = 0
126
+ downstream_snr_margin: float = 0.0
127
+ upstream_snr_margin: float = 0.0
128
+ downstream_attenuation: float = 0.0
129
+ upstream_attenuation: float = 0.0
130
+ downstream_output_power: float = 0.0
131
+ upstream_output_power: float = 0.0
132
+ downstream_attainable_rate: int = 0
133
+ upstream_attainable_rate: int = 0
134
+
135
+
136
+ @dataclass
137
+ class Statistics:
138
+ """Network and ADSL statistics."""
139
+
140
+ lan_interfaces: list[InterfaceStats] = field(default_factory=list)
141
+ wan_interfaces: list[InterfaceStats] = field(default_factory=list)
142
+ adsl: ADSLStats = field(default_factory=ADSLStats)
143
+
144
+
145
+ @dataclass
146
+ class LogEntry:
147
+ """A system log entry."""
148
+
149
+ datetime: str
150
+ facility: str
151
+ severity: str
152
+ message: str
153
+
154
+
155
+ class RouterClient:
156
+ """Client for communicating with D-Link DSL-2750U router."""
157
+
158
+ def __init__(self, ip: str, username: str, password: str):
159
+ self.ip = ip
160
+ self.username = username
161
+ self.password = password
162
+ self.base_url = f"http://{ip}"
163
+ self.cookie_jar = CookieJar()
164
+ self.opener = urllib.request.build_opener(
165
+ urllib.request.HTTPCookieProcessor(self.cookie_jar)
166
+ )
167
+ self._authenticated = False
168
+
169
+ # Patterns that indicate a login/session expired page
170
+ _LOGIN_PAGE_PATTERNS = [
171
+ re.compile(r"<title>\s*Login\s*</title>", re.IGNORECASE),
172
+ re.compile(r'name=["\']?password["\']?.*type=["\']?password', re.IGNORECASE),
173
+ re.compile(r"session\s*(has\s*)?expired", re.IGNORECASE),
174
+ re.compile(r"please\s*log\s*in", re.IGNORECASE),
175
+ re.compile(r"unauthorized", re.IGNORECASE),
176
+ ]
177
+
178
+ # Patterns that indicate an error page
179
+ _ERROR_PAGE_PATTERNS = [
180
+ re.compile(r"<title>\s*Error\s*</title>", re.IGNORECASE),
181
+ re.compile(r"internal\s*server\s*error", re.IGNORECASE),
182
+ re.compile(r"service\s*unavailable", re.IGNORECASE),
183
+ re.compile(r"<h1>\s*\d{3}\s*</h1>", re.IGNORECASE), # <h1>500</h1> etc.
184
+ ]
185
+
186
+ def _is_login_page(self, html: str) -> bool:
187
+ """Check if the HTML response is a login/session expired page."""
188
+ for pattern in self._LOGIN_PAGE_PATTERNS:
189
+ if pattern.search(html):
190
+ return True
191
+ return False
192
+
193
+ def _is_error_page(self, html: str) -> tuple[bool, str | None]:
194
+ """Check if the HTML response is an error page.
195
+
196
+ Returns (is_error, error_message).
197
+ """
198
+ for pattern in self._ERROR_PAGE_PATTERNS:
199
+ if pattern.search(html):
200
+ # Try to extract a meaningful error message
201
+ title_match = re.search(r"<title>([^<]+)</title>", html, re.IGNORECASE)
202
+ h1_match = re.search(r"<h1>([^<]+)</h1>", html, re.IGNORECASE)
203
+ msg = (
204
+ title_match.group(1)
205
+ if title_match
206
+ else (h1_match.group(1) if h1_match else "Unknown error")
207
+ )
208
+ return True, msg.strip()
209
+ return False, None
210
+
211
+ def authenticate(self) -> bool:
212
+ """Authenticate with the router.
213
+
214
+ POST to /main with credentials in cookies and form data.
215
+ """
216
+ url = f"{self.base_url}/main"
217
+
218
+ # URL-encode the password for form data (+ becomes %2B)
219
+ encoded_password = urllib.parse.quote(self.password, safe="")
220
+ form_data = f"username={self.username}&password={encoded_password}&loginfo=on"
221
+
222
+ # Set auth cookies
223
+ cookie_header = f"username={self.username}; password={self.password}"
224
+
225
+ request = urllib.request.Request(
226
+ url,
227
+ data=form_data.encode("utf-8"),
228
+ headers={
229
+ "Cookie": cookie_header,
230
+ "Content-Type": "application/x-www-form-urlencoded",
231
+ },
232
+ method="POST",
233
+ )
234
+
235
+ try:
236
+ with self.opener.open(request, timeout=10) as response:
237
+ html = response.read().decode("utf-8", errors="replace")
238
+ # Check if we got redirected to login page (auth failed)
239
+ if self._is_login_page(html):
240
+ self._authenticated = False
241
+ raise AuthenticationError(
242
+ "Authentication failed: invalid credentials"
243
+ )
244
+ self._authenticated = response.status == 200
245
+ return self._authenticated
246
+ except urllib.error.HTTPError as e:
247
+ raise AuthenticationError(f"Authentication failed: HTTP {e.code}")
248
+ except urllib.error.URLError as e:
249
+ raise ConnectionError(
250
+ f"Failed to connect to router at {self.ip}: {e.reason}"
251
+ )
252
+
253
+ def fetch_page(self, path: str, max_retries: int = 3) -> str:
254
+ """Fetch a page from the router with authentication cookies.
255
+
256
+ Args:
257
+ path: The page path to fetch (e.g., "/info.html")
258
+ max_retries: Maximum number of retry attempts for transient failures
259
+
260
+ Returns:
261
+ The HTML content of the page
262
+
263
+ Raises:
264
+ AuthenticationError: If session expired and re-auth fails
265
+ ConnectionError: If unable to connect to the router
266
+ HTTPError: If the router returns an HTTP error
267
+ """
268
+ if not self._authenticated:
269
+ self.authenticate()
270
+
271
+ url = f"{self.base_url}/{path.lstrip('/')}"
272
+ cookie_header = f"username={self.username}; password={self.password}"
273
+
274
+ last_error: Exception | None = None
275
+
276
+ for attempt in range(max_retries):
277
+ request = urllib.request.Request(
278
+ url, headers={"Cookie": cookie_header}, method="GET"
279
+ )
280
+
281
+ try:
282
+ with self.opener.open(request, timeout=10) as response:
283
+ html = response.read().decode("utf-8", errors="replace")
284
+
285
+ # Check if we got a login page (session expired)
286
+ if self._is_login_page(html):
287
+ self._authenticated = False
288
+ # Try to re-authenticate once
289
+ if attempt == 0:
290
+ try:
291
+ self.authenticate()
292
+ continue # Retry the request
293
+ except AuthenticationError:
294
+ raise AuthenticationError(
295
+ "Session expired and re-authentication failed"
296
+ )
297
+ raise AuthenticationError("Session expired")
298
+
299
+ # Check if we got an error page
300
+ is_error, error_msg = self._is_error_page(html)
301
+ if is_error:
302
+ # Some error pages are transient, retry
303
+ if attempt < max_retries - 1:
304
+ time.sleep(1 * (attempt + 1)) # Backoff
305
+ continue
306
+ raise HTTPError(f"Router returned error page: {error_msg}")
307
+
308
+ return html
309
+
310
+ except urllib.error.HTTPError as e:
311
+ last_error = e
312
+ # Read the error body for better diagnostics
313
+ try:
314
+ error_body = e.read().decode("utf-8", errors="replace")[:200]
315
+ except Exception:
316
+ error_body = ""
317
+
318
+ # Retry on 5xx errors (server-side issues)
319
+ if 500 <= e.code < 600 and attempt < max_retries - 1:
320
+ time.sleep(1 * (attempt + 1)) # Exponential backoff
321
+ continue
322
+
323
+ # Provide helpful error message based on status code
324
+ if e.code == 401:
325
+ self._authenticated = False
326
+ raise AuthenticationError("Authentication required (401)")
327
+ elif e.code == 403:
328
+ raise AuthenticationError("Access forbidden (403)")
329
+ elif e.code == 404:
330
+ raise HTTPError(f"Page not found: {path}", status_code=404)
331
+ elif e.code == 503:
332
+ raise HTTPError(
333
+ "Router is busy or unavailable (503). Try again later.",
334
+ status_code=503,
335
+ )
336
+ else:
337
+ # Include snippet of error body for debugging
338
+ snippet = error_body[:100].replace("\n", " ").strip()
339
+ raise HTTPError(
340
+ f"HTTP {e.code} fetching {path}: {snippet or e.reason}",
341
+ status_code=e.code,
342
+ )
343
+
344
+ except urllib.error.URLError as e:
345
+ last_error = e
346
+ # Retry on network errors
347
+ if attempt < max_retries - 1:
348
+ time.sleep(1 * (attempt + 1))
349
+ continue
350
+ raise ConnectionError(f"Failed to connect to {self.ip}: {e.reason}")
351
+
352
+ except TimeoutError:
353
+ last_error = TimeoutError(f"Request to {path} timed out")
354
+ if attempt < max_retries - 1:
355
+ time.sleep(1 * (attempt + 1))
356
+ continue
357
+ raise ConnectionError(
358
+ f"Request to {path} timed out after {max_retries} attempts"
359
+ )
360
+
361
+ # Should not reach here, but just in case
362
+ raise ConnectionError(
363
+ f"Failed to fetch {path} after {max_retries} attempts: {last_error}"
364
+ )
365
+
366
+ def get_session_key(self, html: str) -> str:
367
+ """Extract session key from HTML page."""
368
+ match = re.search(r"var\s+sessionKey\s*=\s*[\"']([^\"']+)[\"']", html)
369
+ if match:
370
+ return match.group(1)
371
+ raise ValueError("Could not find session key in page")
372
+
373
+ def get_status(self) -> RouterStatus:
374
+ """Fetch and parse router status."""
375
+ html = self.fetch_page("/info.html")
376
+ return self._parse_status(html)
377
+
378
+ def _parse_status(self, html: str) -> RouterStatus:
379
+ """Parse status information from HTML."""
380
+ status = RouterStatus()
381
+
382
+ # System Info - from JavaScript variables or table cells
383
+ status.model_name = self._extract_value(
384
+ html,
385
+ r"var\s+modeName\s*=\s*[\"']([^\"']+)[\"']",
386
+ r"Model Name:.*?<td[^>]*>([^<]+)</td>",
387
+ )
388
+
389
+ status.time_date = self._extract_value(
390
+ html,
391
+ # From document.writeln() in JS
392
+ r"Time and Date:.*?<td>([^<]+)</td>",
393
+ # Static HTML fallback
394
+ r"Time and Date:.*?<td[^>]*>([^<]+)</td>",
395
+ )
396
+
397
+ status.firmware = self._extract_value(
398
+ html,
399
+ r"Firmware Version:\s*([A-Z0-9_.]+)",
400
+ r"<td[^>]*>Firmware Version:</td>\s*<td[^>]*>([^<]+)</td>",
401
+ )
402
+
403
+ # Internet Info - parse from JS variables first, then static HTML
404
+ status.default_gateway = self._extract_value(
405
+ html,
406
+ r"var\s+dfltGw\s*=\s*[\"']([^\"']+)[\"']",
407
+ r"Default Gateway:.*?<td[^>]*>([^<]+)</td>",
408
+ )
409
+
410
+ status.preferred_dns = self._extract_value(
411
+ html, r"Preferred DNS Server:.*?<td[^>]*>([^<]+)</td>"
412
+ )
413
+
414
+ status.alternate_dns = self._extract_value(
415
+ html, r"Alternate DNS Server:.*?<td[^>]*>([^<]+)</td>"
416
+ )
417
+
418
+ # WAN Connections - parse table rows with class="hd"
419
+ status.wan_connections = self._parse_wan_connections(html)
420
+
421
+ # Wireless Info - find section and extract all values
422
+ wireless_section = re.search(
423
+ r"Wireless Info:.*?Local Network Info", html, re.DOTALL
424
+ )
425
+ if wireless_section:
426
+ ws = wireless_section.group(0)
427
+ status.ssid = self._extract_value(
428
+ ws,
429
+ r"<option[^>]*selected[^>]*>\s*([^<\n]+?)\s*</option>",
430
+ )
431
+ status.wireless_mac = self._extract_value(
432
+ ws, r"MAC Address:.*?<td[^>]*>([^<]+)</td>"
433
+ )
434
+ status.wireless_status = self._extract_value(
435
+ ws, r"Status:.*?<td[^>]*>([^<]+)</td>"
436
+ )
437
+ status.security_mode = self._extract_value(
438
+ ws, r"Security Mode:.*?<td[^>]*>([^<]+)</td>"
439
+ )
440
+
441
+ # Local Network Info - find section after "Local Network Info"
442
+ local_section = re.search(
443
+ r"Local Network Info.*?(?:Storage Device|$)", html, re.DOTALL
444
+ )
445
+ if local_section:
446
+ ls = local_section.group(0)
447
+ # MAC address may have malformed HTML (</SPAN> without opening tag)
448
+ status.local_mac = self._extract_value(
449
+ ls,
450
+ r"MAC Address:</TD>\s*<TD>([^<]+)",
451
+ r"MAC Address:.*?<td[^>]*>([^<]+)</td>",
452
+ )
453
+ status.local_ip = self._extract_value(
454
+ ls, r"IP Address:.*?<td[^>]*>([^<]+)</td>"
455
+ )
456
+ status.subnet_mask = self._extract_value(
457
+ ls, r"Subnet Mask:.*?<td[^>]*>([^<]+)</td>"
458
+ )
459
+ # DHCP may be in document.writeln() or static HTML
460
+ status.dhcp_server = self._extract_value(
461
+ ls,
462
+ r"DHCP Server:.*?<td>([^<]+)</td>",
463
+ r"DHCP Server:.*?<td[^>]*>([^<]+)</td>",
464
+ )
465
+
466
+ return status
467
+
468
+ def _extract_value(self, html: str, *patterns: str) -> str:
469
+ """Try multiple regex patterns and return first match."""
470
+ for pattern in patterns:
471
+ match = re.search(pattern, html, re.DOTALL | re.IGNORECASE)
472
+ if match:
473
+ value = match.group(1).strip()
474
+ # Clean up HTML entities
475
+ value = value.replace("&nbsp;", "").strip()
476
+ if value:
477
+ return value
478
+ return "N/A"
479
+
480
+ def _parse_wan_connections(self, html: str) -> list[dict]:
481
+ """Parse WAN connection table."""
482
+ connections = []
483
+
484
+ # Find the WAN connections table section
485
+ wan_section = re.search(
486
+ r"Enabled WAN Connections:.*?</table>", html, re.DOTALL | re.IGNORECASE
487
+ )
488
+ if not wan_section:
489
+ return connections
490
+
491
+ section = wan_section.group(0)
492
+
493
+ # Find data rows - handle both single and double quotes
494
+ # Pattern matches: <tr align='center'> or <tr align="center">
495
+ rows = re.findall(
496
+ r"<tr[^>]*align=[\"']center[\"'][^>]*>(.*?)</tr>",
497
+ section,
498
+ re.DOTALL | re.IGNORECASE,
499
+ )
500
+
501
+ for row in rows:
502
+ # Match cells with class='hd' or class="hd"
503
+ cells = re.findall(
504
+ r"<td[^>]*class=[\"']hd[\"'][^>]*>([^<]*)</td>",
505
+ row,
506
+ re.IGNORECASE,
507
+ )
508
+ if len(cells) >= 4:
509
+ connections.append(
510
+ {
511
+ "interface": cells[0].strip(),
512
+ "description": cells[1].strip(),
513
+ "status": cells[2].strip(),
514
+ "ipv4": cells[3].strip(),
515
+ }
516
+ )
517
+
518
+ return connections
519
+
520
+ def reboot(self) -> bool:
521
+ """Reboot the router."""
522
+ # First get session key from internet.html
523
+ html = self.fetch_page("/internet.html")
524
+ session_key = self.get_session_key(html)
525
+
526
+ # POST to rebootinfo.cgi
527
+ url = f"{self.base_url}/rebootinfo.cgi?sessionKey={session_key}"
528
+ cookie_header = f"username={self.username}; password={self.password}"
529
+
530
+ request = urllib.request.Request(
531
+ url, headers={"Cookie": cookie_header}, method="POST", data=b""
532
+ )
533
+
534
+ try:
535
+ with self.opener.open(request, timeout=10) as response:
536
+ return response.status == 200
537
+ except urllib.error.URLError:
538
+ # Router may disconnect during reboot, this is expected
539
+ return True
540
+
541
+ def get_wireless_clients(self) -> list[WirelessClient]:
542
+ """Fetch and parse wireless clients."""
543
+ html = self.fetch_page("/wlstationlist.cmd")
544
+ return self._parse_wireless_clients(html)
545
+
546
+ def _parse_wireless_clients(self, html: str) -> list[WirelessClient]:
547
+ """Parse wireless clients from HTML."""
548
+ clients = []
549
+
550
+ # Find all data rows in the table
551
+ rows = re.findall(
552
+ r"<tr>\s*<td><p align=center>\s*([A-Fa-f0-9:]+)\s*"
553
+ r".*?<p align=center>\s*(Yes|No)\s*</p>.*?"
554
+ r"<p align=center>\s*(Yes|No)\s*</p>.*?"
555
+ r"<p align=center>\s*([^<&]+?)(?:&nbsp)?\s*</td>.*?"
556
+ r"<p align=center>\s*([^<&]+?)(?:&nbsp)?\s*</td>",
557
+ html,
558
+ re.DOTALL | re.IGNORECASE,
559
+ )
560
+
561
+ for row in rows:
562
+ mac, associated, authorized, ssid, interface = row
563
+ clients.append(
564
+ WirelessClient(
565
+ mac=mac.strip(),
566
+ associated=associated.lower() == "yes",
567
+ authorized=authorized.lower() == "yes",
568
+ ssid=ssid.strip(),
569
+ interface=interface.strip(),
570
+ )
571
+ )
572
+
573
+ return clients
574
+
575
+ def get_dhcp_leases(self) -> list[DHCPLease]:
576
+ """Fetch and parse DHCP leases."""
577
+ html = self.fetch_page("/dhcpinfo.html")
578
+ return self._parse_dhcp_leases(html)
579
+
580
+ def _parse_dhcp_leases(self, html: str) -> list[DHCPLease]:
581
+ """Parse DHCP leases from HTML."""
582
+ leases = []
583
+
584
+ # Find the DHCP table section
585
+ table_match = re.search(
586
+ r"<table class=formlisting>.*?</table>", html, re.DOTALL | re.IGNORECASE
587
+ )
588
+ if not table_match:
589
+ return leases
590
+
591
+ table = table_match.group(0)
592
+
593
+ # Find data rows (skip header row)
594
+ rows = re.findall(
595
+ r"<tr><td>([^<]*)</td><td>([^<]*)</td><td>([^<]*)</td><td>([^<]*)</td></tr>",
596
+ table,
597
+ re.IGNORECASE,
598
+ )
599
+
600
+ for row in rows:
601
+ hostname, mac, ip, expires = row
602
+ leases.append(
603
+ DHCPLease(
604
+ hostname=hostname.strip(),
605
+ mac=mac.strip(),
606
+ ip=ip.strip(),
607
+ expires_in=expires.strip(),
608
+ )
609
+ )
610
+
611
+ return leases
612
+
613
+ def get_routes(self) -> list[Route]:
614
+ """Fetch and parse routing table."""
615
+ html = self.fetch_page("/rtroutecfg.cmd?action=dlinkau")
616
+ return self._parse_routes(html)
617
+
618
+ def _parse_routes(self, html: str) -> list[Route]:
619
+ """Parse routing table from HTML."""
620
+ routes = []
621
+
622
+ # Find the routing table
623
+ table_match = re.search(
624
+ r"<table class=formlisting>.*?</table>", html, re.DOTALL | re.IGNORECASE
625
+ )
626
+ if not table_match:
627
+ return routes
628
+
629
+ table = table_match.group(0)
630
+
631
+ # Find data rows - 6 cells per row
632
+ rows = re.findall(
633
+ r"<tr>\s*"
634
+ r"<td>([^<]*)</td>\s*"
635
+ r"<td>([^<]*)</td>\s*"
636
+ r"<td>([^<]*)</td>\s*"
637
+ r"<td>([^<]*)</td>\s*"
638
+ r"<td>([^<]*)</td>\s*"
639
+ r"<td>([^<]*)</td>\s*"
640
+ r"</tr>",
641
+ table,
642
+ re.IGNORECASE,
643
+ )
644
+
645
+ for row in rows:
646
+ dest, gw, mask, flag, metric, service = row
647
+ # Skip header row
648
+ if "Destination" in dest:
649
+ continue
650
+ routes.append(
651
+ Route(
652
+ destination=dest.strip(),
653
+ gateway=gw.strip(),
654
+ subnet_mask=mask.strip(),
655
+ flag=flag.strip(),
656
+ metric=int(metric.strip()) if metric.strip().isdigit() else 0,
657
+ service=service.replace("&nbsp;", "").strip(),
658
+ )
659
+ )
660
+
661
+ return routes
662
+
663
+ def get_statistics(self) -> Statistics:
664
+ """Fetch and parse network statistics."""
665
+ html = self.fetch_page("/statsifcwanber.html")
666
+ return self._parse_statistics(html)
667
+
668
+ def _parse_statistics(self, html: str) -> Statistics:
669
+ """Parse statistics from HTML."""
670
+ stats = Statistics()
671
+
672
+ # Parse LAN interface stats - look for rows with 9 cells
673
+ lan_section = re.search(
674
+ r"Local Network.*?</table>", html, re.DOTALL | re.IGNORECASE
675
+ )
676
+ if lan_section:
677
+ stats.lan_interfaces = self._parse_interface_stats(lan_section.group(0))
678
+
679
+ # Parse WAN interface stats
680
+ wan_section = re.search(
681
+ r"<td class=topheader>\s*Internet\s*</td>.*?</table>",
682
+ html,
683
+ re.DOTALL | re.IGNORECASE,
684
+ )
685
+ if wan_section:
686
+ stats.wan_interfaces = self._parse_wan_interface_stats(wan_section.group(0))
687
+
688
+ # Parse ADSL stats
689
+ stats.adsl = self._parse_adsl_stats(html)
690
+
691
+ return stats
692
+
693
+ def _parse_interface_stats(self, html: str) -> list[InterfaceStats]:
694
+ """Parse interface statistics table."""
695
+ interfaces = []
696
+
697
+ # Find rows with 9 numeric values
698
+ rows = re.findall(
699
+ r"<tr>\s*<td class='hd'>.*?</script>\s*</td>\s*"
700
+ r"<td>(\d+)</td>\s*<td>(\d+)</td>\s*<td>(\d+)</td>\s*<td>(\d+)</td>\s*"
701
+ r"<td>(\d+)</td>\s*<td>(\d+)</td>\s*<td>(\d+)</td>\s*<td>(\d+)</td>\s*</tr>",
702
+ html,
703
+ re.DOTALL | re.IGNORECASE,
704
+ )
705
+
706
+ # Extract interface names from script blocks
707
+ intf_names = re.findall(
708
+ r"brdIntf\s*=\s*['\"]([^'\"]+)['\"]", html, re.IGNORECASE
709
+ )
710
+
711
+ for i, row in enumerate(rows):
712
+ intf_name = (
713
+ intf_names[i].split("|")[-1] if i < len(intf_names) else f"eth{i}"
714
+ )
715
+ (
716
+ rx_bytes,
717
+ rx_pkts,
718
+ rx_errs,
719
+ rx_drops,
720
+ tx_bytes,
721
+ tx_pkts,
722
+ tx_errs,
723
+ tx_drops,
724
+ ) = row
725
+ interfaces.append(
726
+ InterfaceStats(
727
+ interface=intf_name,
728
+ rx_bytes=int(rx_bytes),
729
+ rx_packets=int(rx_pkts),
730
+ rx_errors=int(rx_errs),
731
+ rx_drops=int(rx_drops),
732
+ tx_bytes=int(tx_bytes),
733
+ tx_packets=int(tx_pkts),
734
+ tx_errors=int(tx_errs),
735
+ tx_drops=int(tx_drops),
736
+ )
737
+ )
738
+
739
+ return interfaces
740
+
741
+ def _parse_wan_interface_stats(self, html: str) -> list[InterfaceStats]:
742
+ """Parse WAN interface statistics table."""
743
+ interfaces = []
744
+
745
+ # Find rows with interface name, description, and 8 numeric values
746
+ rows = re.findall(
747
+ r"<tr>\s*<td class='hd'>([^<]+)</td>\s*"
748
+ r"<td>([^<]+)</td>\s*"
749
+ r"<td>(\d+)</td>\s*<td>(\d+)</td>\s*<td>(\d+)</td>\s*<td>(\d+)</td>\s*"
750
+ r"<td>(\d+)</td>\s*<td>(\d+)</td>\s*<td>(\d+)</td>\s*<td>(\d+)</td>\s*</tr>",
751
+ html,
752
+ re.DOTALL | re.IGNORECASE,
753
+ )
754
+
755
+ for row in rows:
756
+ (
757
+ intf_name,
758
+ _desc,
759
+ rx_bytes,
760
+ rx_pkts,
761
+ rx_errs,
762
+ rx_drops,
763
+ tx_bytes,
764
+ tx_pkts,
765
+ tx_errs,
766
+ tx_drops,
767
+ ) = row
768
+ interfaces.append(
769
+ InterfaceStats(
770
+ interface=intf_name.strip(),
771
+ rx_bytes=int(rx_bytes),
772
+ rx_packets=int(rx_pkts),
773
+ rx_errors=int(rx_errs),
774
+ rx_drops=int(rx_drops),
775
+ tx_bytes=int(tx_bytes),
776
+ tx_packets=int(tx_pkts),
777
+ tx_errors=int(tx_errs),
778
+ tx_drops=int(tx_drops),
779
+ )
780
+ )
781
+
782
+ return interfaces
783
+
784
+ def _parse_adsl_stats(self, html: str) -> ADSLStats:
785
+ """Parse ADSL statistics from HTML."""
786
+ adsl = ADSLStats()
787
+
788
+ adsl.mode = self._extract_value(html, r"Mode:</td><td>([^<]+)</td>")
789
+ adsl.traffic_type = self._extract_value(
790
+ html, r"Traffic Type:</td><td>([^<]+)</td>"
791
+ )
792
+ adsl.status = self._extract_value(html, r"Status:</td><td>([^<]+)</td>")
793
+ adsl.link_power_state = self._extract_value(
794
+ html, r"Link Power State:</td><td>([^<]+)</td>"
795
+ )
796
+
797
+ # Parse rate info - downstream and upstream
798
+ rate_match = re.search(
799
+ r"Rate \(Kbps\):</td><td>(\d+)</td><td>(\d+)</td>", html, re.IGNORECASE
800
+ )
801
+ if rate_match:
802
+ adsl.downstream_rate = int(rate_match.group(1))
803
+ adsl.upstream_rate = int(rate_match.group(2))
804
+
805
+ # Parse SNR margin
806
+ snr_match = re.search(
807
+ r"SNR Margin.*?<td>(\d+)</td><td>(\d+)</td>", html, re.IGNORECASE
808
+ )
809
+ if snr_match:
810
+ adsl.downstream_snr_margin = float(snr_match.group(1)) / 10
811
+ adsl.upstream_snr_margin = float(snr_match.group(2)) / 10
812
+
813
+ # Parse attenuation
814
+ atten_match = re.search(
815
+ r"Attenuation.*?<td>(\d+)</td><td>(\d+)</td>", html, re.IGNORECASE
816
+ )
817
+ if atten_match:
818
+ adsl.downstream_attenuation = float(atten_match.group(1)) / 10
819
+ adsl.upstream_attenuation = float(atten_match.group(2)) / 10
820
+
821
+ # Parse output power
822
+ power_match = re.search(
823
+ r"Output Power.*?<td>(\d+)</td><td>(\d+)</td>", html, re.IGNORECASE
824
+ )
825
+ if power_match:
826
+ adsl.downstream_output_power = float(power_match.group(1)) / 10
827
+ adsl.upstream_output_power = float(power_match.group(2)) / 10
828
+
829
+ # Parse attainable rate
830
+ attain_match = re.search(
831
+ r"Attainable Rate.*?<td>(\d+)</td><td>(\d+)</td>", html, re.IGNORECASE
832
+ )
833
+ if attain_match:
834
+ adsl.downstream_attainable_rate = int(attain_match.group(1))
835
+ adsl.upstream_attainable_rate = int(attain_match.group(2))
836
+
837
+ return adsl
838
+
839
+ def get_logs(self) -> list[LogEntry]:
840
+ """Fetch and parse system logs."""
841
+ html = self.fetch_page("/logview.cmd")
842
+ return self._parse_logs(html)
843
+
844
+ def _parse_logs(self, html: str) -> list[LogEntry]:
845
+ """Parse system logs from HTML."""
846
+ logs = []
847
+
848
+ # Find the log table
849
+ table_match = re.search(
850
+ r"<table class=formlisting>.*?</table>", html, re.DOTALL | re.IGNORECASE
851
+ )
852
+ if not table_match:
853
+ return logs
854
+
855
+ table = table_match.group(0)
856
+
857
+ # Find data rows - 4 cells per row
858
+ rows = re.findall(
859
+ r"<tr>\s*"
860
+ r"<td[^>]*>([^<]*)</td>\s*"
861
+ r"<td[^>]*>([^<]*)</td>\s*"
862
+ r"<td[^>]*>([^<]*)</td>\s*"
863
+ r"<td[^>]*>([^<]*)</td>\s*"
864
+ r"</tr>",
865
+ table,
866
+ re.IGNORECASE,
867
+ )
868
+
869
+ for row in rows:
870
+ datetime_str, facility, severity, message = row
871
+ # Skip header row
872
+ if "Date/Time" in datetime_str:
873
+ continue
874
+ logs.append(
875
+ LogEntry(
876
+ datetime=datetime_str.strip(),
877
+ facility=facility.strip(),
878
+ severity=severity.strip(),
879
+ message=message.strip(),
880
+ )
881
+ )
882
+
883
+ return logs