makcu 2.1.1__py3-none-any.whl → 2.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
makcu/__main__.py CHANGED
@@ -1,298 +1,388 @@
1
- import sys
2
- import os
3
- from pathlib import Path
4
- from typing import List, NoReturn
5
- import pytest
6
- import time
7
- from makcu import create_controller, MakcuConnectionError, MakcuController
8
- import json
9
- import re
10
-
11
- def debug_console():
12
- controller = create_controller()
13
- transport = controller.transport
14
-
15
- print("🔧 Makcu Debug Console")
16
- print("Type a raw command (e.g., km.version()) and press Enter.")
17
- print("Type 'exit' or 'quit' to leave.")
18
-
19
- command_counter = 0
20
-
21
- while True:
22
- try:
23
- cmd = input(">>> ").strip()
24
- if cmd.lower() in {"exit", "quit"}:
25
- break
26
- if not cmd:
27
- continue
28
-
29
- command_counter += 1
30
-
31
- response = transport.send_command(cmd, expect_response=True)
32
-
33
- if response and response.strip():
34
- if response.strip() == cmd:
35
- print(f"{cmd}")
36
- else:
37
- print(f"{response}")
38
- else:
39
- print("(no response)")
40
-
41
- except Exception as e:
42
- print(f"⚠️ Error: {e}")
43
-
44
- controller.disconnect()
45
- print("Disconnected.")
46
-
47
- def test_port(port: str) -> None:
48
- try:
49
- print(f"Trying to connect to {port}...")
50
- makcu = MakcuController(fallback_com_port=port, send_init=False, override_port=True)
51
- makcu.connect()
52
- if makcu.is_connected:
53
- print(f"✅ Successfully connected to {port}.")
54
- makcu.disconnect()
55
- except MakcuConnectionError as e:
56
- if "FileNotFoundError" in str(e):
57
- print(f"❌ Port {port} does not exist. Please check the port name.")
58
- else:
59
- print(f" Failed to connect to {port}: ")
60
- except Exception as e:
61
- print(f"❌ Unexpected error: {e}")
62
-
63
- def parse_html_results(html_file: Path):
64
- if not html_file.exists():
65
- raise FileNotFoundError(f"HTML report not found: {html_file}")
66
-
67
- with open(html_file, 'r', encoding='utf-8') as f:
68
- content = f.read()
69
-
70
- match = re.search(r'data-jsonblob="([^"]*)"', content)
71
- if not match:
72
- raise ValueError("Could not find JSON data in HTML report")
73
-
74
- json_str = match.group(1)
75
- json_str = json_str.replace('"', '"').replace(''', "'").replace('&', '&')
76
-
77
- try:
78
- data = json.loads(json_str)
79
- except json.JSONDecodeError as e:
80
- raise ValueError(f"Failed to parse JSON data: {e}")
81
-
82
- test_results = []
83
- total_ms = 0
84
-
85
- skip_tests = {'test_connect_to_port'}
86
-
87
- for test_id, test_data_list in data.get('tests', {}).items():
88
- test_name = test_id.split('::')[-1]
89
- if test_name in skip_tests:
90
- continue
91
-
92
- for test_data in test_data_list:
93
- status = test_data.get('result', 'UNKNOWN')
94
- duration_str = test_data.get('duration', '0 ms')
95
-
96
- duration_match = re.search(r'(\d+)\s*ms', duration_str)
97
- duration_ms = int(duration_match.group(1)) if duration_match else 0
98
- total_ms += duration_ms
99
-
100
- test_results.append((test_name, status, duration_ms))
101
-
102
- return test_results, total_ms
103
-
104
- def run_tests() -> NoReturn:
105
- try:
106
- from rich.console import Console
107
- from rich.table import Table
108
- from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn, TimeElapsedColumn
109
- from rich.panel import Panel
110
- from rich.align import Align
111
- from rich import print as rprint
112
- from rich.text import Text
113
- import subprocess
114
-
115
- console = Console()
116
-
117
- header = Panel.fit(
118
- "[bold cyan]Makcu Test Suite v2.1.1[/bold cyan]\n[dim]High-Performance Python Library[/dim]",
119
- border_style="bright_blue"
120
- )
121
- console.print(Align.center(header))
122
- console.print()
123
-
124
- package_dir: Path = Path(__file__).resolve().parent
125
- test_file: Path = package_dir / "test_suite.py"
126
- html_file: Path = package_dir.parent / "latest_pytest.html"
127
-
128
- start_time = time.time()
129
-
130
- with Progress(
131
- SpinnerColumn(),
132
- TextColumn("[progress.description]{task.description}"),
133
- BarColumn(),
134
- TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
135
- TimeElapsedColumn(),
136
- console=console,
137
- transient=True
138
- ) as progress:
139
- task = progress.add_task("[cyan]Running tests...", total=100)
140
-
141
- result = subprocess.run(
142
- [
143
- sys.executable, "-m", "pytest",
144
- str(test_file),
145
- "--rootdir", str(package_dir),
146
- "-q",
147
- "--tb=no",
148
- "--html", str(html_file),
149
- "--self-contained-html"
150
- ],
151
- stdout=subprocess.DEVNULL,
152
- stderr=subprocess.DEVNULL,
153
- text=True
154
- )
155
-
156
- progress.update(task, completed=100)
157
-
158
- try:
159
- test_results, total_ms = parse_html_results(html_file)
160
- except (FileNotFoundError, ValueError) as e:
161
- console.print(f"[red]❌ Failed to parse test results: {e}[/red]")
162
- console.print(f"[yellow]⚠️ pytest exit code: {result.returncode}[/yellow]")
163
- sys.exit(1)
164
-
165
- elapsed_time = time.time() - start_time
166
-
167
- table = Table(title="[bold]Test Results[/bold]", show_header=True, header_style="bold magenta")
168
- table.add_column("Test", style="cyan", no_wrap=True)
169
- table.add_column("Status", justify="center")
170
- table.add_column("Time", justify="right", style="yellow")
171
- table.add_column("Performance", justify="center")
172
-
173
- passed = failed = skipped = 0
174
-
175
- for test_name, status, duration_ms in test_results:
176
- display_name = test_name.replace("test_", "").replace("_", " ").title()
177
-
178
- if status.upper() == "PASSED":
179
- status_text = "[green]✅ PASSED[/green]"
180
- passed += 1
181
- elif status.upper() == "FAILED":
182
- status_text = "[red]❌ FAILED[/red]"
183
- failed += 1
184
- elif status.upper() == "SKIPPED":
185
- status_text = "[yellow]⏭️ SKIPPED[/yellow]"
186
- skipped += 1
187
- else:
188
- status_text = status
189
-
190
- time_str = f"{duration_ms}ms" if duration_ms else "-"
191
- if duration_ms <= 3:
192
- perf = "[green]Excellent[/green]"
193
- elif duration_ms <= 5:
194
- perf = "[cyan]Great[/cyan]"
195
- elif duration_ms <= 10:
196
- perf = "[yellow]Good[/yellow]"
197
- elif duration_ms > 0:
198
- perf = "[red]🐌 Needs work[/red]"
199
- else:
200
- perf = "-"
201
-
202
- table.add_row(display_name, status_text, time_str, perf)
203
-
204
- console.print("\n")
205
- console.print(table)
206
- console.print()
207
-
208
- summary = Table.grid(padding=1)
209
- summary.add_column(style="bold cyan", justify="right")
210
- summary.add_column(justify="left")
211
- summary.add_row("Total Tests:", str(len(test_results)))
212
- summary.add_row("Passed:", f"[green]{passed}[/green]")
213
- summary.add_row("Failed:", f"[red]{failed}[/red]" if failed else str(failed))
214
- summary.add_row("Skipped:", f"[yellow]{skipped}[/yellow]" if skipped else str(skipped))
215
- summary.add_row("Total Time:", f"{elapsed_time:.2f}s")
216
- summary.add_row("Avg Time/Test:", f"{total_ms/len(test_results):.1f}ms" if test_results else "0ms")
217
-
218
- console.print(Align.center(Panel(summary, title="[bold]Summary[/bold]", border_style="blue", expand=False)))
219
- console.print()
220
-
221
- if test_results:
222
- avg_time = total_ms / len(test_results)
223
- if avg_time < 3:
224
- perf_text = Text("Performance: ELITE - Ready for 360Hz+ gaming!", style="bold bright_green")
225
- elif avg_time < 5:
226
- perf_text = Text("Performance: EXCELLENT - Ready for 240Hz+ gaming!", style="bold green")
227
- elif avg_time < 10:
228
- perf_text = Text("Performance: GREAT - Ready for 144Hz gaming!", style="bold cyan")
229
- else:
230
- perf_text = Text("Performance: GOOD - Suitable for standard gaming", style="bold yellow")
231
- else:
232
- perf_text = Text("⚠️ No test results parsed. Check your test suite.", style="bold red")
233
-
234
- console.print(Align.center(Panel(perf_text, border_style="green")))
235
- sys.exit(0 if failed == 0 else 1)
236
-
237
- except ImportError:
238
- print("📦 Rich not installed. Install it via `pip install rich` for enhanced output.")
239
- print("\nFallback to raw pytest output...\n")
240
-
241
- package_dir: Path = Path(__file__).resolve().parent
242
- test_file: Path = package_dir / "test_suite.py"
243
- html_file: Path = Path.cwd() / "latest_pytest.html"
244
-
245
- result = pytest.main([
246
- str(test_file),
247
- "--rootdir", str(package_dir),
248
- "-q",
249
- "--tb=no",
250
- "--html", str(html_file),
251
- "--self-contained-html"
252
- ])
253
-
254
- try:
255
- test_results, total_ms = parse_html_results(html_file)
256
- passed = sum(1 for _, status, _ in test_results if status.upper() == "PASSED")
257
- failed = sum(1 for _, status, _ in test_results if status.upper() == "FAILED")
258
- skipped = sum(1 for _, status, _ in test_results if status.upper() == "SKIPPED")
259
-
260
- print(f"\n📊 Results: {passed} passed, {failed} failed, {skipped} skipped")
261
- if test_results:
262
- avg_time = total_ms / len(test_results)
263
- print(f"⏱️ Average time per test: {avg_time:.1f}ms")
264
- except (FileNotFoundError, ValueError):
265
- print("\n⚠️ Could not parse HTML results for summary")
266
-
267
- if result != 0:
268
- print("\n❌ Some tests failed.")
269
- else:
270
- print("\n✅ All tests passed.")
271
-
272
- sys.exit(result)
273
-
274
- def main() -> None:
275
- args: List[str] = sys.argv[1:]
276
-
277
- if not args:
278
- print("Usage:")
279
- print(" python -m makcu --debug")
280
- print(" python -m makcu --testPort COM3")
281
- print(" python -m makcu --runtest")
282
- return
283
-
284
- if args[0] == "--debug":
285
- debug_console()
286
- elif args[0] == "--testPort" and len(args) == 2:
287
- test_port(args[1])
288
- elif args[0] == "--runtest":
289
- run_tests()
290
- else:
291
- print(f"Unknown command: {' '.join(args)}")
292
- print("Usage:")
293
- print(" python -m makcu --debug")
294
- print(" python -m makcu --testPort COM3")
295
- print(" python -m makcu --runtest")
296
-
297
- if __name__ == "__main__":
1
+ import sys
2
+ import os
3
+ from pathlib import Path
4
+ from typing import List, NoReturn, Optional, Tuple
5
+ import pytest
6
+ import time
7
+ from makcu import create_controller, MakcuConnectionError, MakcuController
8
+ import json
9
+ import re
10
+ import subprocess
11
+
12
+ makcu_version = "v2.1.3"
13
+
14
+ def debug_console():
15
+ controller = create_controller()
16
+ transport = controller.transport
17
+
18
+ print("🔧 Makcu Debug Console")
19
+ print("Type a raw command (e.g., km.version()) and press Enter.")
20
+ print("Type 'exit' or 'quit' to leave.")
21
+
22
+ command_counter = 0
23
+
24
+ while True:
25
+ try:
26
+ cmd = input(">>> ").strip()
27
+ if cmd.lower() in {"exit", "quit"}:
28
+ break
29
+ if not cmd:
30
+ continue
31
+
32
+ command_counter += 1
33
+
34
+ response = transport.send_command(cmd, expect_response=True)
35
+
36
+ if response and response.strip():
37
+ if response.strip() == cmd:
38
+ print(f"{cmd}")
39
+ else:
40
+ print(f"{response}")
41
+ else:
42
+ print("(no response)")
43
+
44
+ except Exception as e:
45
+ print(f"⚠️ Error: {e}")
46
+
47
+ controller.disconnect()
48
+ print("Disconnected.")
49
+
50
+ def test_port(port: str) -> None:
51
+ try:
52
+ print(f"Trying to connect to {port}...")
53
+ makcu = MakcuController(fallback_com_port=port, send_init=False, override_port=True)
54
+ makcu.connect()
55
+ if makcu.is_connected:
56
+ print(f" Successfully connected to {port}.")
57
+ makcu.disconnect()
58
+ except MakcuConnectionError as e:
59
+ if "FileNotFoundError" in str(e):
60
+ print(f"❌ Port {port} does not exist. Please check the port name.")
61
+ else:
62
+ print(f"❌ Failed to connect to {port}: ")
63
+ except Exception as e:
64
+ print(f"❌ Unexpected error: {e}")
65
+
66
+ def check_pytest_html_installed() -> bool:
67
+ """Check if pytest-html is installed."""
68
+ try:
69
+ import pytest_html
70
+ return True
71
+ except ImportError:
72
+ return False
73
+
74
+ def find_writable_directory() -> Path:
75
+ """Find a writable directory for the HTML report."""
76
+ # Try current working directory first
77
+ cwd = Path.cwd()
78
+ if os.access(cwd, os.W_OK):
79
+ return cwd
80
+
81
+ # Try user's home directory
82
+ home = Path.home()
83
+ if os.access(home, os.W_OK):
84
+ return home
85
+
86
+ # Try temp directory as last resort
87
+ import tempfile
88
+ return Path(tempfile.gettempdir())
89
+
90
+ def parse_html_results(html_file: Path) -> Tuple[List[Tuple[str, str, int]], int]:
91
+ if not html_file.exists():
92
+ raise FileNotFoundError(f"HTML report not found: {html_file}")
93
+
94
+ with open(html_file, 'r', encoding='utf-8') as f:
95
+ content = f.read()
96
+
97
+ match = re.search(r'data-jsonblob="([^"]*)"', content)
98
+ if not match:
99
+ raise ValueError("Could not find JSON data in HTML report")
100
+
101
+ json_str = match.group(1)
102
+ json_str = json_str.replace('&#34;', '"').replace('&amp;#x27;', "'").replace('&amp;', '&')
103
+
104
+ try:
105
+ data = json.loads(json_str)
106
+ except json.JSONDecodeError as e:
107
+ raise ValueError(f"Failed to parse JSON data: {e}")
108
+
109
+ test_results = []
110
+ total_ms = 0
111
+
112
+ skip_tests = {'test_connect_to_port'}
113
+
114
+ for test_id, test_data_list in data.get('tests', {}).items():
115
+ test_name = test_id.split('::')[-1]
116
+ if test_name in skip_tests:
117
+ continue
118
+
119
+ for test_data in test_data_list:
120
+ status = test_data.get('result', 'UNKNOWN')
121
+ duration_str = test_data.get('duration', '0 ms')
122
+
123
+ duration_match = re.search(r'(\d+)\s*ms', duration_str)
124
+ duration_ms = int(duration_match.group(1)) if duration_match else 0
125
+ total_ms += duration_ms
126
+
127
+ test_results.append((test_name, status, duration_ms))
128
+
129
+ return test_results, total_ms
130
+
131
+ def run_tests() -> NoReturn:
132
+ # Check if pytest-html is installed
133
+ if not check_pytest_html_installed():
134
+ print("❌ pytest-html is not installed. Please install it via:")
135
+ print(" pip install pytest-html")
136
+ sys.exit(1)
137
+
138
+ try:
139
+ from rich.console import Console
140
+ from rich.table import Table
141
+ from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn, TimeElapsedColumn
142
+ from rich.panel import Panel
143
+ from rich.align import Align
144
+ from rich import print as rprint
145
+ from rich.text import Text
146
+
147
+ console = Console()
148
+
149
+ header = Panel.fit(
150
+ f"[bold cyan]Makcu Test Suite {makcu_version}[/bold cyan]\n[dim]High-Performance Python Library[/dim]",
151
+ border_style="bright_blue"
152
+ )
153
+ console.print(Align.center(header))
154
+ console.print()
155
+
156
+ package_dir: Path = Path(__file__).resolve().parent
157
+ test_file: Path = package_dir / "test_suite.py"
158
+
159
+ # Find writable directory and create HTML path
160
+ writable_dir = find_writable_directory()
161
+ html_file: Path = writable_dir / "latest_pytest.html"
162
+
163
+ # Clean up old report if it exists
164
+ if html_file.exists():
165
+ try:
166
+ html_file.unlink()
167
+ except Exception:
168
+ pass
169
+
170
+ console.print(f"[dim]Running pytest to generate: {html_file}[/dim]")
171
+ console.print(f"[dim]Working directory: {Path.cwd()}[/dim]")
172
+
173
+ start_time = time.time()
174
+
175
+ with Progress(
176
+ SpinnerColumn(),
177
+ TextColumn("[progress.description]{task.description}"),
178
+ BarColumn(),
179
+ TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
180
+ TimeElapsedColumn(),
181
+ console=console,
182
+ transient=True
183
+ ) as progress:
184
+ task = progress.add_task("[cyan]Running tests...", total=100)
185
+
186
+ # Run pytest with explicit output capturing
187
+ result = subprocess.run(
188
+ [
189
+ sys.executable, "-m", "pytest",
190
+ str(test_file),
191
+ "--rootdir", str(package_dir),
192
+ "-q",
193
+ "--tb=no",
194
+ "--html", str(html_file),
195
+ "--self-contained-html",
196
+ "-v" # Add verbose to help debug
197
+ ],
198
+ capture_output=True,
199
+ text=True
200
+ )
201
+
202
+ progress.update(task, completed=100)
203
+
204
+ # Check if HTML file was created
205
+ if not html_file.exists():
206
+ console.print(f"[red]❌ HTML report was not created at: {html_file}[/red]")
207
+ console.print(f"[yellow]pytest exit code: {result.returncode}[/yellow]")
208
+ if result.stdout:
209
+ console.print("[yellow]stdout:[/yellow]")
210
+ console.print(result.stdout)
211
+ if result.stderr:
212
+ console.print("[red]stderr:[/red]")
213
+ console.print(result.stderr)
214
+
215
+ # Try to run tests without HTML report
216
+ console.print("\n[yellow]Running tests without HTML report...[/yellow]")
217
+ result2 = subprocess.run(
218
+ [sys.executable, "-m", "pytest", str(test_file), "-v"],
219
+ capture_output=True,
220
+ text=True
221
+ )
222
+ console.print(result2.stdout)
223
+ sys.exit(1)
224
+
225
+ try:
226
+ test_results, total_ms = parse_html_results(html_file)
227
+ except (FileNotFoundError, ValueError) as e:
228
+ console.print(f"[red]❌ Failed to parse test results: {e}[/red]")
229
+ console.print(f"[yellow]⚠️ pytest exit code: {result.returncode}[/yellow]")
230
+ sys.exit(1)
231
+
232
+ elapsed_time = time.time() - start_time
233
+
234
+ table = Table(title="[bold]Test Results[/bold]", show_header=True, header_style="bold magenta")
235
+ table.add_column("Test", style="cyan", no_wrap=True)
236
+ table.add_column("Status", justify="center")
237
+ table.add_column("Time", justify="right", style="yellow")
238
+ table.add_column("Performance", justify="center")
239
+
240
+ passed = failed = skipped = 0
241
+
242
+ for test_name, status, duration_ms in test_results:
243
+ display_name = test_name.replace("test_", "").replace("_", " ").title()
244
+
245
+ if status.upper() == "PASSED":
246
+ status_text = "[green]✅ PASSED[/green]"
247
+ passed += 1
248
+ elif status.upper() == "FAILED":
249
+ status_text = "[red]❌ FAILED[/red]"
250
+ failed += 1
251
+ elif status.upper() == "SKIPPED":
252
+ status_text = "[yellow]⏭️ SKIPPED[/yellow]"
253
+ skipped += 1
254
+ else:
255
+ status_text = status
256
+
257
+ time_str = f"{duration_ms}ms" if duration_ms else "-"
258
+ if duration_ms <= 3:
259
+ perf = "[green]Excellent[/green]"
260
+ elif duration_ms <= 5:
261
+ perf = "[cyan]Great[/cyan]"
262
+ elif duration_ms <= 10:
263
+ perf = "[yellow]Good[/yellow]"
264
+ elif duration_ms > 0:
265
+ perf = "[red]🐌 Needs work[/red]"
266
+ else:
267
+ perf = "-"
268
+
269
+ table.add_row(display_name, status_text, time_str, perf)
270
+
271
+ console.print("\n")
272
+ console.print(table)
273
+ console.print()
274
+
275
+ summary = Table.grid(padding=1)
276
+ summary.add_column(style="bold cyan", justify="right")
277
+ summary.add_column(justify="left")
278
+ summary.add_row("Total Tests:", str(len(test_results)))
279
+ summary.add_row("Passed:", f"[green]{passed}[/green]")
280
+ summary.add_row("Failed:", f"[red]{failed}[/red]" if failed else str(failed))
281
+ summary.add_row("Skipped:", f"[yellow]{skipped}[/yellow]" if skipped else str(skipped))
282
+ summary.add_row("Total Time:", f"{elapsed_time:.2f}s")
283
+ summary.add_row("Avg Time/Test:", f"{total_ms/len(test_results):.1f}ms" if test_results else "0ms")
284
+
285
+ console.print(Align.center(Panel(summary, title="[bold]Summary[/bold]", border_style="blue", expand=False)))
286
+ console.print()
287
+
288
+ if test_results:
289
+ avg_time = total_ms / len(test_results)
290
+ if avg_time < 3:
291
+ perf_text = Text("Performance: ELITE - Ready for 360Hz+ gaming!", style="bold bright_green")
292
+ elif avg_time < 5:
293
+ perf_text = Text("Performance: EXCELLENT - Ready for 240Hz+ gaming!", style="bold green")
294
+ elif avg_time < 10:
295
+ perf_text = Text("Performance: GREAT - Ready for 144Hz gaming!", style="bold cyan")
296
+ else:
297
+ perf_text = Text("Performance: GOOD - Suitable for standard gaming", style="bold yellow")
298
+ else:
299
+ perf_text = Text("⚠️ No test results parsed. Check your test suite.", style="bold red")
300
+
301
+ console.print(Align.center(Panel(perf_text, border_style="green")))
302
+
303
+ # Print the location of the HTML report
304
+ console.print(f"\n[dim]HTML report saved to: {html_file}[/dim]")
305
+
306
+ sys.exit(0 if failed == 0 else 1)
307
+
308
+ except ImportError:
309
+ print("📦 Rich not installed. Install it via `pip install rich` for enhanced output.")
310
+ print("\nFallback to raw pytest output...\n")
311
+
312
+ package_dir: Path = Path(__file__).resolve().parent
313
+ test_file: Path = package_dir / "test_suite.py"
314
+
315
+ # Find writable directory
316
+ writable_dir = find_writable_directory()
317
+ html_file: Path = writable_dir / "latest_pytest.html"
318
+
319
+ print(f"HTML report will be saved to: {html_file}")
320
+
321
+ # Use subprocess instead of pytest.main for better control
322
+ result = subprocess.run(
323
+ [
324
+ sys.executable, "-m", "pytest",
325
+ str(test_file),
326
+ "--rootdir", str(package_dir),
327
+ "-q",
328
+ "--tb=no",
329
+ "--html", str(html_file),
330
+ "--self-contained-html"
331
+ ],
332
+ capture_output=True,
333
+ text=True
334
+ )
335
+
336
+ if not html_file.exists():
337
+ print(f"\n❌ HTML report was not created. pytest exit code: {result.returncode}")
338
+ if result.stdout:
339
+ print("stdout:", result.stdout)
340
+ if result.stderr:
341
+ print("stderr:", result.stderr)
342
+ sys.exit(1)
343
+
344
+ try:
345
+ test_results, total_ms = parse_html_results(html_file)
346
+ passed = sum(1 for _, status, _ in test_results if status.upper() == "PASSED")
347
+ failed = sum(1 for _, status, _ in test_results if status.upper() == "FAILED")
348
+ skipped = sum(1 for _, status, _ in test_results if status.upper() == "SKIPPED")
349
+
350
+ print(f"\n📊 Results: {passed} passed, {failed} failed, {skipped} skipped")
351
+ if test_results:
352
+ avg_time = total_ms / len(test_results)
353
+ print(f"⏱️ Average time per test: {avg_time:.1f}ms")
354
+ except (FileNotFoundError, ValueError):
355
+ print("\n⚠️ Could not parse HTML results for summary")
356
+
357
+ if result.returncode != 0:
358
+ print("\n❌ Some tests failed.")
359
+ else:
360
+ print("\n✅ All tests passed.")
361
+
362
+ sys.exit(result.returncode)
363
+
364
+ def main() -> None:
365
+ args: List[str] = sys.argv[1:]
366
+
367
+ if not args:
368
+ print("Usage:")
369
+ print(" python -m makcu --debug")
370
+ print(" python -m makcu --testPort COM3")
371
+ print(" python -m makcu --runtest")
372
+ return
373
+
374
+ if args[0] == "--debug":
375
+ debug_console()
376
+ elif args[0] == "--testPort" and len(args) == 2:
377
+ test_port(args[1])
378
+ elif args[0] == "--runtest":
379
+ run_tests()
380
+ else:
381
+ print(f"Unknown command: {' '.join(args)}")
382
+ print("Usage:")
383
+ print(" python -m makcu --debug")
384
+ print(" python -m makcu --testPort COM3")
385
+ print(" python -m makcu --runtest")
386
+
387
+ if __name__ == "__main__":
298
388
  main()