haoline 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. haoline/.streamlit/config.toml +10 -0
  2. haoline/__init__.py +248 -0
  3. haoline/analyzer.py +935 -0
  4. haoline/cli.py +2712 -0
  5. haoline/compare.py +811 -0
  6. haoline/compare_visualizations.py +1564 -0
  7. haoline/edge_analysis.py +525 -0
  8. haoline/eval/__init__.py +131 -0
  9. haoline/eval/adapters.py +844 -0
  10. haoline/eval/cli.py +390 -0
  11. haoline/eval/comparison.py +542 -0
  12. haoline/eval/deployment.py +633 -0
  13. haoline/eval/schemas.py +833 -0
  14. haoline/examples/__init__.py +15 -0
  15. haoline/examples/basic_inspection.py +74 -0
  16. haoline/examples/compare_models.py +117 -0
  17. haoline/examples/hardware_estimation.py +78 -0
  18. haoline/format_adapters.py +1001 -0
  19. haoline/formats/__init__.py +123 -0
  20. haoline/formats/coreml.py +250 -0
  21. haoline/formats/gguf.py +483 -0
  22. haoline/formats/openvino.py +255 -0
  23. haoline/formats/safetensors.py +273 -0
  24. haoline/formats/tflite.py +369 -0
  25. haoline/hardware.py +2307 -0
  26. haoline/hierarchical_graph.py +462 -0
  27. haoline/html_export.py +1573 -0
  28. haoline/layer_summary.py +769 -0
  29. haoline/llm_summarizer.py +465 -0
  30. haoline/op_icons.py +618 -0
  31. haoline/operational_profiling.py +1492 -0
  32. haoline/patterns.py +1116 -0
  33. haoline/pdf_generator.py +265 -0
  34. haoline/privacy.py +250 -0
  35. haoline/pydantic_models.py +241 -0
  36. haoline/report.py +1923 -0
  37. haoline/report_sections.py +539 -0
  38. haoline/risks.py +521 -0
  39. haoline/schema.py +523 -0
  40. haoline/streamlit_app.py +2024 -0
  41. haoline/tests/__init__.py +4 -0
  42. haoline/tests/conftest.py +123 -0
  43. haoline/tests/test_analyzer.py +868 -0
  44. haoline/tests/test_compare_visualizations.py +293 -0
  45. haoline/tests/test_edge_analysis.py +243 -0
  46. haoline/tests/test_eval.py +604 -0
  47. haoline/tests/test_format_adapters.py +460 -0
  48. haoline/tests/test_hardware.py +237 -0
  49. haoline/tests/test_hardware_recommender.py +90 -0
  50. haoline/tests/test_hierarchical_graph.py +326 -0
  51. haoline/tests/test_html_export.py +180 -0
  52. haoline/tests/test_layer_summary.py +428 -0
  53. haoline/tests/test_llm_patterns.py +540 -0
  54. haoline/tests/test_llm_summarizer.py +339 -0
  55. haoline/tests/test_patterns.py +774 -0
  56. haoline/tests/test_pytorch.py +327 -0
  57. haoline/tests/test_report.py +383 -0
  58. haoline/tests/test_risks.py +398 -0
  59. haoline/tests/test_schema.py +417 -0
  60. haoline/tests/test_tensorflow.py +380 -0
  61. haoline/tests/test_visualizations.py +316 -0
  62. haoline/universal_ir.py +856 -0
  63. haoline/visualizations.py +1086 -0
  64. haoline/visualize_yolo.py +44 -0
  65. haoline/web.py +110 -0
  66. haoline-0.3.0.dist-info/METADATA +471 -0
  67. haoline-0.3.0.dist-info/RECORD +70 -0
  68. haoline-0.3.0.dist-info/WHEEL +4 -0
  69. haoline-0.3.0.dist-info/entry_points.txt +5 -0
  70. haoline-0.3.0.dist-info/licenses/LICENSE +22 -0
@@ -0,0 +1,428 @@
1
+ # Copyright (c) 2025 HaoLine Contributors
2
+ # SPDX-License-Identifier: MIT
3
+
4
+ """Tests for per-layer summary table (Story 5.8)."""
5
+
6
+ from __future__ import annotations
7
+
8
+ import csv
9
+ import io
10
+ import json
11
+ import tempfile
12
+ from pathlib import Path
13
+
14
+ import numpy as np
15
+ import onnx
16
+ import pytest
17
+ from onnx import TensorProto, helper
18
+
19
+ from ..analyzer import MetricsEngine, ONNXGraphLoader
20
+ from ..layer_summary import (
21
+ LayerMetrics,
22
+ LayerSummary,
23
+ LayerSummaryBuilder,
24
+ generate_html_table,
25
+ generate_markdown_table,
26
+ )
27
+
28
+
29
+ def create_test_model() -> onnx.ModelProto:
30
+ """Create a simple model for testing."""
31
+ X = helper.make_tensor_value_info("X", TensorProto.FLOAT, [1, 3, 224, 224])
32
+ Y = helper.make_tensor_value_info("Y", TensorProto.FLOAT, [1, 1000])
33
+
34
+ weight1 = helper.make_tensor(
35
+ "w1",
36
+ TensorProto.FLOAT,
37
+ [64, 3, 7, 7],
38
+ np.random.randn(64 * 3 * 7 * 7).astype(np.float32).tolist(),
39
+ )
40
+ weight2 = helper.make_tensor(
41
+ "w2",
42
+ TensorProto.FLOAT,
43
+ [512, 1000],
44
+ np.random.randn(512 * 1000).astype(np.float32).tolist(),
45
+ )
46
+ bias = helper.make_tensor(
47
+ "b2",
48
+ TensorProto.FLOAT,
49
+ [1000],
50
+ np.zeros(1000, dtype=np.float32).tolist(),
51
+ )
52
+
53
+ nodes = [
54
+ helper.make_node(
55
+ "Conv",
56
+ ["X", "w1"],
57
+ ["c1"],
58
+ kernel_shape=[7, 7],
59
+ strides=[2, 2],
60
+ pads=[3, 3, 3, 3],
61
+ ),
62
+ helper.make_node("Relu", ["c1"], ["r1"]),
63
+ helper.make_node("GlobalAveragePool", ["r1"], ["pool"]),
64
+ helper.make_node("Flatten", ["pool"], ["flat"]),
65
+ helper.make_node("Gemm", ["flat", "w2", "b2"], ["Y"]),
66
+ ]
67
+
68
+ graph = helper.make_graph(nodes, "test_model", [X], [Y], [weight1, weight2, bias])
69
+ model = helper.make_model(graph, opset_imports=[helper.make_opsetid("", 17)])
70
+ return model
71
+
72
+
73
+ class TestLayerMetrics:
74
+ """Tests for LayerMetrics dataclass."""
75
+
76
+ def test_basic_creation(self):
77
+ """Test creating layer metrics."""
78
+ layer = LayerMetrics(
79
+ name="conv1",
80
+ op_type="Conv",
81
+ params=9408,
82
+ flops=118013952,
83
+ pct_params=10.5,
84
+ pct_flops=25.3,
85
+ )
86
+ assert layer.name == "conv1"
87
+ assert layer.op_type == "Conv"
88
+ assert layer.params == 9408
89
+
90
+ def test_to_dict(self):
91
+ """Test dictionary export."""
92
+ layer = LayerMetrics(
93
+ name="fc1",
94
+ op_type="Gemm",
95
+ params=512000,
96
+ flops=1024000,
97
+ )
98
+ d = layer.to_dict()
99
+ assert d["name"] == "fc1"
100
+ assert d["params"] == 512000
101
+
102
+
103
+ class TestLayerSummary:
104
+ """Tests for LayerSummary."""
105
+
106
+ def test_creation(self):
107
+ """Test creating layer summary."""
108
+ layers = [
109
+ LayerMetrics(name="conv1", op_type="Conv", params=9408, flops=1000000, pct_flops=50.0),
110
+ LayerMetrics(name="relu1", op_type="Relu", params=0, flops=500000, pct_flops=25.0),
111
+ LayerMetrics(name="fc1", op_type="Gemm", params=512000, flops=500000, pct_flops=25.0),
112
+ ]
113
+ summary = LayerSummary(
114
+ layers=layers,
115
+ total_params=521408,
116
+ total_flops=2000000,
117
+ )
118
+ assert len(summary.layers) == 3
119
+ assert summary.total_params == 521408
120
+
121
+ def test_to_json(self):
122
+ """Test JSON export."""
123
+ layers = [
124
+ LayerMetrics(name="conv1", op_type="Conv", params=1000, flops=100000),
125
+ ]
126
+ summary = LayerSummary(layers=layers, total_params=1000, total_flops=100000)
127
+
128
+ json_str = summary.to_json()
129
+ data = json.loads(json_str)
130
+
131
+ assert "layers" in data
132
+ assert len(data["layers"]) == 1
133
+ assert data["layers"][0]["name"] == "conv1"
134
+
135
+ def test_to_csv(self):
136
+ """Test CSV export (Task 5.8.4)."""
137
+ layers = [
138
+ LayerMetrics(
139
+ name="conv1",
140
+ op_type="Conv",
141
+ params=1000,
142
+ flops=100000,
143
+ pct_params=50.0,
144
+ pct_flops=50.0,
145
+ ),
146
+ LayerMetrics(
147
+ name="relu1",
148
+ op_type="Relu",
149
+ params=0,
150
+ flops=50000,
151
+ pct_params=0.0,
152
+ pct_flops=25.0,
153
+ ),
154
+ ]
155
+ summary = LayerSummary(layers=layers, total_params=1000, total_flops=150000)
156
+
157
+ csv_str = summary.to_csv()
158
+ reader = csv.reader(io.StringIO(csv_str))
159
+ rows = list(reader)
160
+
161
+ # Header + 2 data rows
162
+ assert len(rows) == 3
163
+ assert rows[0][0] == "Layer Name"
164
+ assert rows[1][0] == "conv1"
165
+ assert rows[2][0] == "relu1"
166
+
167
+ def test_save_csv(self):
168
+ """Test saving CSV to file."""
169
+ layers = [
170
+ LayerMetrics(name="test", op_type="Test", params=100, flops=1000),
171
+ ]
172
+ summary = LayerSummary(layers=layers, total_params=100, total_flops=1000)
173
+
174
+ with tempfile.NamedTemporaryFile(suffix=".csv", delete=False) as f:
175
+ path = Path(f.name)
176
+
177
+ try:
178
+ summary.save_csv(path)
179
+ assert path.exists()
180
+ content = path.read_text()
181
+ assert "Layer Name" in content
182
+ assert "test" in content
183
+ finally:
184
+ path.unlink()
185
+
186
+ def test_filter_by_op_type(self):
187
+ """Test filtering by op type."""
188
+ layers = [
189
+ LayerMetrics(name="conv1", op_type="Conv", params=1000, flops=100000),
190
+ LayerMetrics(name="relu1", op_type="Relu", params=0, flops=50000),
191
+ LayerMetrics(name="conv2", op_type="Conv", params=2000, flops=200000),
192
+ ]
193
+ summary = LayerSummary(layers=layers, total_params=3000, total_flops=350000)
194
+
195
+ filtered = summary.filter_by_op_type(["Conv"])
196
+ assert len(filtered.layers) == 2
197
+ assert all(layer.op_type == "Conv" for layer in filtered.layers)
198
+
199
+ def test_filter_by_threshold(self):
200
+ """Test filtering by parameter/FLOP thresholds."""
201
+ layers = [
202
+ LayerMetrics(name="big", op_type="Conv", params=10000, flops=1000000, pct_flops=80.0),
203
+ LayerMetrics(name="small", op_type="Relu", params=0, flops=100000, pct_flops=8.0),
204
+ LayerMetrics(name="medium", op_type="Conv", params=1000, flops=150000, pct_flops=12.0),
205
+ ]
206
+ summary = LayerSummary(layers=layers, total_params=11000, total_flops=1250000)
207
+
208
+ # Filter to layers with >10% FLOPs
209
+ filtered = summary.filter_by_threshold(min_pct_flops=10.0)
210
+ assert len(filtered.layers) == 2
211
+ assert "big" in [layer.name for layer in filtered.layers]
212
+ assert "medium" in [layer.name for layer in filtered.layers]
213
+
214
+ def test_sort_by(self):
215
+ """Test sorting by different keys."""
216
+ layers = [
217
+ LayerMetrics(name="a", op_type="Conv", params=100, flops=300),
218
+ LayerMetrics(name="b", op_type="Relu", params=0, flops=100),
219
+ LayerMetrics(name="c", op_type="Gemm", params=500, flops=200),
220
+ ]
221
+ summary = LayerSummary(layers=layers, total_params=600, total_flops=600)
222
+
223
+ # Sort by params descending
224
+ sorted_summary = summary.sort_by("params", descending=True)
225
+ assert sorted_summary.layers[0].name == "c"
226
+
227
+ # Sort by name ascending
228
+ sorted_summary = summary.sort_by("name", descending=False)
229
+ assert sorted_summary.layers[0].name == "a"
230
+
231
+ def test_top_n(self):
232
+ """Test getting top N layers."""
233
+ layers = [
234
+ LayerMetrics(name="a", op_type="Conv", params=100, flops=300),
235
+ LayerMetrics(name="b", op_type="Relu", params=0, flops=100),
236
+ LayerMetrics(name="c", op_type="Gemm", params=500, flops=200),
237
+ ]
238
+ summary = LayerSummary(layers=layers, total_params=600, total_flops=600)
239
+
240
+ top2 = summary.top_n(2, key="flops")
241
+ assert len(top2.layers) == 2
242
+ assert top2.layers[0].name == "a" # 300 FLOPs
243
+ assert top2.layers[1].name == "c" # 200 FLOPs
244
+
245
+
246
+ class TestLayerSummaryBuilder:
247
+ """Tests for building layer summary from ONNX."""
248
+
249
+ def test_build_from_model(self):
250
+ """Test building summary from ONNX model."""
251
+ model = create_test_model()
252
+
253
+ with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
254
+ onnx.save(model, f.name)
255
+ model_path = Path(f.name)
256
+
257
+ try:
258
+ loader = ONNXGraphLoader()
259
+ _, graph_info = loader.load(model_path)
260
+
261
+ metrics_engine = MetricsEngine()
262
+ param_counts = metrics_engine.count_parameters(graph_info)
263
+ flop_counts = metrics_engine.estimate_flops(graph_info)
264
+
265
+ builder = LayerSummaryBuilder()
266
+ summary = builder.build(graph_info, param_counts, flop_counts)
267
+
268
+ assert len(summary.layers) == 5 # 5 nodes in test model
269
+ assert summary.total_params == param_counts.total
270
+ assert summary.total_flops == flop_counts.total
271
+
272
+ # Check op types
273
+ op_types = {layer.op_type for layer in summary.layers}
274
+ assert "Conv" in op_types
275
+ assert "Relu" in op_types
276
+ assert "Gemm" in op_types
277
+
278
+ finally:
279
+ model_path.unlink()
280
+
281
+ def test_percentages_calculated(self):
282
+ """Test that percentages are calculated correctly."""
283
+ model = create_test_model()
284
+
285
+ with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
286
+ onnx.save(model, f.name)
287
+ model_path = Path(f.name)
288
+
289
+ try:
290
+ loader = ONNXGraphLoader()
291
+ _, graph_info = loader.load(model_path)
292
+
293
+ metrics_engine = MetricsEngine()
294
+ param_counts = metrics_engine.count_parameters(graph_info)
295
+ flop_counts = metrics_engine.estimate_flops(graph_info)
296
+
297
+ builder = LayerSummaryBuilder()
298
+ summary = builder.build(graph_info, param_counts, flop_counts)
299
+
300
+ # Totals should match
301
+ assert summary.total_params == param_counts.total
302
+ assert summary.total_flops == flop_counts.total
303
+
304
+ # Percentages should be valid (0-100 range)
305
+ for layer in summary.layers:
306
+ assert 0.0 <= layer.pct_params <= 100.0, f"Invalid pct_params: {layer.pct_params}"
307
+ assert 0.0 <= layer.pct_flops <= 100.0, f"Invalid pct_flops: {layer.pct_flops}"
308
+
309
+ # At least some layers should have FLOPs
310
+ total_pct_flops = sum(layer.pct_flops for layer in summary.layers)
311
+ # Note: might not sum to 100 if some nodes aren't in by_node dict
312
+ assert total_pct_flops >= 0.0, f"FLOPs % sum: {total_pct_flops}"
313
+
314
+ finally:
315
+ model_path.unlink()
316
+
317
+
318
+ class TestHTMLTable:
319
+ """Tests for HTML table generation (Task 5.8.2)."""
320
+
321
+ def test_generate_html_table(self):
322
+ """Test generating sortable HTML table."""
323
+ layers = [
324
+ LayerMetrics(
325
+ name="conv1",
326
+ op_type="Conv",
327
+ params=9408,
328
+ flops=118013952,
329
+ pct_flops=80.0,
330
+ ),
331
+ LayerMetrics(name="relu1", op_type="Relu", params=0, flops=1000000, pct_flops=0.7),
332
+ LayerMetrics(
333
+ name="fc1",
334
+ op_type="Gemm",
335
+ params=512000,
336
+ flops=29000000,
337
+ pct_flops=19.3,
338
+ ),
339
+ ]
340
+ summary = LayerSummary(layers=layers, total_params=521408, total_flops=148013952)
341
+
342
+ html = generate_html_table(summary)
343
+
344
+ # Check for table structure
345
+ assert '<table class="layer-table"' in html
346
+ assert "<thead>" in html
347
+ assert "<tbody>" in html
348
+
349
+ # Check for search input
350
+ assert 'id="layerSearch"' in html
351
+ assert "filterLayers" in html
352
+
353
+ # Check for filter dropdown
354
+ assert 'id="opFilter"' in html
355
+
356
+ # Check for export button
357
+ assert "exportLayersCSV" in html
358
+
359
+ # Check for data rows
360
+ assert "conv1" in html
361
+ assert "relu1" in html
362
+ assert "fc1" in html
363
+
364
+ # Check for sorting JavaScript
365
+ assert "sortTable" in html
366
+ assert "filterLayers" in html
367
+
368
+ def test_html_without_js(self):
369
+ """Test generating HTML table without JavaScript."""
370
+ layers = [
371
+ LayerMetrics(name="test", op_type="Test", params=100, flops=1000),
372
+ ]
373
+ summary = LayerSummary(layers=layers, total_params=100, total_flops=1000)
374
+
375
+ html = generate_html_table(summary, include_js=False)
376
+
377
+ # Should still have table
378
+ assert '<table class="layer-table"' in html
379
+
380
+ # But no export function definition
381
+ assert "const layerCSVData" not in html
382
+
383
+
384
+ class TestMarkdownTable:
385
+ """Tests for Markdown table generation."""
386
+
387
+ def test_generate_markdown_table(self):
388
+ """Test generating Markdown table."""
389
+ layers = [
390
+ LayerMetrics(
391
+ name="conv1",
392
+ op_type="Conv",
393
+ params=9408,
394
+ flops=118000000,
395
+ pct_flops=80.0,
396
+ ),
397
+ LayerMetrics(name="relu1", op_type="Relu", params=0, flops=1000000, pct_flops=0.7),
398
+ ]
399
+ summary = LayerSummary(layers=layers, total_params=9408, total_flops=119000000)
400
+
401
+ md = generate_markdown_table(summary)
402
+
403
+ # Check for header
404
+ assert "| Layer | Op Type | Params | FLOPs | % Compute |" in md
405
+ assert "|-------|---------|--------|-------|-----------|" in md
406
+
407
+ # Check for data (sorted by FLOPs)
408
+ assert "conv1" in md
409
+ assert "Conv" in md
410
+
411
+ def test_markdown_max_rows(self):
412
+ """Test Markdown table respects max_rows."""
413
+ layers = [
414
+ LayerMetrics(name=f"layer{i}", op_type="Conv", params=100, flops=1000 * i)
415
+ for i in range(100)
416
+ ]
417
+ summary = LayerSummary(layers=layers, total_params=10000, total_flops=5050000)
418
+
419
+ md = generate_markdown_table(summary, max_rows=10)
420
+
421
+ # Should have header + separator + 10 rows + truncation note
422
+ lines = [line for line in md.split("\n") if line.strip()]
423
+ assert len(lines) == 13 # header, separator, 10 rows, truncation note
424
+ assert "more layers" in md
425
+
426
+
427
+ if __name__ == "__main__":
428
+ pytest.main([__file__, "-v"])