haoline 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- haoline/.streamlit/config.toml +10 -0
- haoline/__init__.py +248 -0
- haoline/analyzer.py +935 -0
- haoline/cli.py +2712 -0
- haoline/compare.py +811 -0
- haoline/compare_visualizations.py +1564 -0
- haoline/edge_analysis.py +525 -0
- haoline/eval/__init__.py +131 -0
- haoline/eval/adapters.py +844 -0
- haoline/eval/cli.py +390 -0
- haoline/eval/comparison.py +542 -0
- haoline/eval/deployment.py +633 -0
- haoline/eval/schemas.py +833 -0
- haoline/examples/__init__.py +15 -0
- haoline/examples/basic_inspection.py +74 -0
- haoline/examples/compare_models.py +117 -0
- haoline/examples/hardware_estimation.py +78 -0
- haoline/format_adapters.py +1001 -0
- haoline/formats/__init__.py +123 -0
- haoline/formats/coreml.py +250 -0
- haoline/formats/gguf.py +483 -0
- haoline/formats/openvino.py +255 -0
- haoline/formats/safetensors.py +273 -0
- haoline/formats/tflite.py +369 -0
- haoline/hardware.py +2307 -0
- haoline/hierarchical_graph.py +462 -0
- haoline/html_export.py +1573 -0
- haoline/layer_summary.py +769 -0
- haoline/llm_summarizer.py +465 -0
- haoline/op_icons.py +618 -0
- haoline/operational_profiling.py +1492 -0
- haoline/patterns.py +1116 -0
- haoline/pdf_generator.py +265 -0
- haoline/privacy.py +250 -0
- haoline/pydantic_models.py +241 -0
- haoline/report.py +1923 -0
- haoline/report_sections.py +539 -0
- haoline/risks.py +521 -0
- haoline/schema.py +523 -0
- haoline/streamlit_app.py +2024 -0
- haoline/tests/__init__.py +4 -0
- haoline/tests/conftest.py +123 -0
- haoline/tests/test_analyzer.py +868 -0
- haoline/tests/test_compare_visualizations.py +293 -0
- haoline/tests/test_edge_analysis.py +243 -0
- haoline/tests/test_eval.py +604 -0
- haoline/tests/test_format_adapters.py +460 -0
- haoline/tests/test_hardware.py +237 -0
- haoline/tests/test_hardware_recommender.py +90 -0
- haoline/tests/test_hierarchical_graph.py +326 -0
- haoline/tests/test_html_export.py +180 -0
- haoline/tests/test_layer_summary.py +428 -0
- haoline/tests/test_llm_patterns.py +540 -0
- haoline/tests/test_llm_summarizer.py +339 -0
- haoline/tests/test_patterns.py +774 -0
- haoline/tests/test_pytorch.py +327 -0
- haoline/tests/test_report.py +383 -0
- haoline/tests/test_risks.py +398 -0
- haoline/tests/test_schema.py +417 -0
- haoline/tests/test_tensorflow.py +380 -0
- haoline/tests/test_visualizations.py +316 -0
- haoline/universal_ir.py +856 -0
- haoline/visualizations.py +1086 -0
- haoline/visualize_yolo.py +44 -0
- haoline/web.py +110 -0
- haoline-0.3.0.dist-info/METADATA +471 -0
- haoline-0.3.0.dist-info/RECORD +70 -0
- haoline-0.3.0.dist-info/WHEEL +4 -0
- haoline-0.3.0.dist-info/entry_points.txt +5 -0
- haoline-0.3.0.dist-info/licenses/LICENSE +22 -0
|
@@ -0,0 +1,327 @@
|
|
|
1
|
+
# Copyright (c) 2025 HaoLine Contributors
|
|
2
|
+
# SPDX-License-Identifier: MIT
|
|
3
|
+
|
|
4
|
+
"""
|
|
5
|
+
Unit tests for PyTorch to ONNX conversion functionality.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import logging
|
|
11
|
+
import sys
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from unittest.mock import patch
|
|
14
|
+
|
|
15
|
+
import pytest
|
|
16
|
+
|
|
17
|
+
# Add parent path for test imports
|
|
18
|
+
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
|
19
|
+
|
|
20
|
+
from ..cli import (
|
|
21
|
+
_convert_pytorch_to_onnx,
|
|
22
|
+
_extract_ultralytics_metadata,
|
|
23
|
+
)
|
|
24
|
+
from ..report import DatasetInfo, infer_num_classes_from_output
|
|
25
|
+
|
|
26
|
+
# Check if torch is available
|
|
27
|
+
try:
|
|
28
|
+
import torch
|
|
29
|
+
import torch.nn as nn
|
|
30
|
+
|
|
31
|
+
_TORCH_AVAILABLE = True
|
|
32
|
+
except ImportError:
|
|
33
|
+
_TORCH_AVAILABLE = False
|
|
34
|
+
nn = None # Placeholder for type hints
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# Only define the model class if torch is available
|
|
38
|
+
if _TORCH_AVAILABLE:
|
|
39
|
+
|
|
40
|
+
class SimpleTestModel(nn.Module):
|
|
41
|
+
"""Simple model for testing conversion."""
|
|
42
|
+
|
|
43
|
+
def __init__(self):
|
|
44
|
+
super().__init__()
|
|
45
|
+
self.conv = nn.Conv2d(3, 16, 3, padding=1)
|
|
46
|
+
self.relu = nn.ReLU()
|
|
47
|
+
self.pool = nn.AdaptiveAvgPool2d(1)
|
|
48
|
+
self.fc = nn.Linear(16, 10)
|
|
49
|
+
|
|
50
|
+
def forward(self, x):
|
|
51
|
+
x = self.relu(self.conv(x))
|
|
52
|
+
x = self.pool(x)
|
|
53
|
+
x = x.view(x.size(0), -1)
|
|
54
|
+
return self.fc(x)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@pytest.mark.skipif(not _TORCH_AVAILABLE, reason="PyTorch not installed")
|
|
58
|
+
class TestPyTorchConversion:
|
|
59
|
+
"""Tests for PyTorch to ONNX conversion."""
|
|
60
|
+
|
|
61
|
+
def test_torchscript_model_conversion(self, tmp_path):
|
|
62
|
+
"""TorchScript models should convert successfully."""
|
|
63
|
+
# Create and save a TorchScript model
|
|
64
|
+
model = SimpleTestModel()
|
|
65
|
+
model.eval()
|
|
66
|
+
dummy_input = torch.randn(1, 3, 32, 32)
|
|
67
|
+
traced = torch.jit.trace(model, dummy_input)
|
|
68
|
+
|
|
69
|
+
pt_path = tmp_path / "model.pt"
|
|
70
|
+
torch.jit.save(traced, str(pt_path))
|
|
71
|
+
|
|
72
|
+
logger = logging.getLogger("test")
|
|
73
|
+
|
|
74
|
+
# Convert
|
|
75
|
+
onnx_path, _temp_file = _convert_pytorch_to_onnx(
|
|
76
|
+
pt_path,
|
|
77
|
+
input_shape_str="1,3,32,32",
|
|
78
|
+
output_path=tmp_path / "output.onnx",
|
|
79
|
+
opset_version=17,
|
|
80
|
+
logger=logger,
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
assert onnx_path is not None
|
|
84
|
+
assert onnx_path.exists()
|
|
85
|
+
assert onnx_path.suffix == ".onnx"
|
|
86
|
+
|
|
87
|
+
def test_conversion_requires_input_shape(self, tmp_path):
|
|
88
|
+
"""Conversion should fail without input shape."""
|
|
89
|
+
model = SimpleTestModel()
|
|
90
|
+
model.eval()
|
|
91
|
+
dummy_input = torch.randn(1, 3, 32, 32)
|
|
92
|
+
traced = torch.jit.trace(model, dummy_input)
|
|
93
|
+
|
|
94
|
+
pt_path = tmp_path / "model.pt"
|
|
95
|
+
torch.jit.save(traced, str(pt_path))
|
|
96
|
+
|
|
97
|
+
logger = logging.getLogger("test")
|
|
98
|
+
|
|
99
|
+
# Convert without input shape
|
|
100
|
+
onnx_path, _ = _convert_pytorch_to_onnx(
|
|
101
|
+
pt_path,
|
|
102
|
+
input_shape_str=None, # No input shape
|
|
103
|
+
output_path=None,
|
|
104
|
+
opset_version=17,
|
|
105
|
+
logger=logger,
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
assert onnx_path is None
|
|
109
|
+
|
|
110
|
+
def test_conversion_invalid_input_shape(self, tmp_path):
|
|
111
|
+
"""Conversion should fail with invalid input shape format."""
|
|
112
|
+
model = SimpleTestModel()
|
|
113
|
+
model.eval()
|
|
114
|
+
dummy_input = torch.randn(1, 3, 32, 32)
|
|
115
|
+
traced = torch.jit.trace(model, dummy_input)
|
|
116
|
+
|
|
117
|
+
pt_path = tmp_path / "model.pt"
|
|
118
|
+
torch.jit.save(traced, str(pt_path))
|
|
119
|
+
|
|
120
|
+
logger = logging.getLogger("test")
|
|
121
|
+
|
|
122
|
+
# Convert with invalid input shape
|
|
123
|
+
onnx_path, _ = _convert_pytorch_to_onnx(
|
|
124
|
+
pt_path,
|
|
125
|
+
input_shape_str="invalid,shape",
|
|
126
|
+
output_path=None,
|
|
127
|
+
opset_version=17,
|
|
128
|
+
logger=logger,
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
assert onnx_path is None
|
|
132
|
+
|
|
133
|
+
def test_conversion_nonexistent_file(self, tmp_path):
|
|
134
|
+
"""Conversion should fail gracefully for nonexistent file."""
|
|
135
|
+
logger = logging.getLogger("test")
|
|
136
|
+
|
|
137
|
+
onnx_path, _ = _convert_pytorch_to_onnx(
|
|
138
|
+
tmp_path / "nonexistent.pt",
|
|
139
|
+
input_shape_str="1,3,32,32",
|
|
140
|
+
output_path=None,
|
|
141
|
+
opset_version=17,
|
|
142
|
+
logger=logger,
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
assert onnx_path is None
|
|
146
|
+
|
|
147
|
+
def test_conversion_temp_file_cleanup(self, tmp_path):
|
|
148
|
+
"""Temp file should be created when no output path specified."""
|
|
149
|
+
model = SimpleTestModel()
|
|
150
|
+
model.eval()
|
|
151
|
+
dummy_input = torch.randn(1, 3, 32, 32)
|
|
152
|
+
traced = torch.jit.trace(model, dummy_input)
|
|
153
|
+
|
|
154
|
+
pt_path = tmp_path / "model.pt"
|
|
155
|
+
torch.jit.save(traced, str(pt_path))
|
|
156
|
+
|
|
157
|
+
logger = logging.getLogger("test")
|
|
158
|
+
|
|
159
|
+
# Convert without output path (should create temp file)
|
|
160
|
+
onnx_path, temp_file = _convert_pytorch_to_onnx(
|
|
161
|
+
pt_path,
|
|
162
|
+
input_shape_str="1,3,32,32",
|
|
163
|
+
output_path=None, # No output path
|
|
164
|
+
opset_version=17,
|
|
165
|
+
logger=logger,
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
assert onnx_path is not None
|
|
169
|
+
assert temp_file is not None
|
|
170
|
+
assert onnx_path.exists()
|
|
171
|
+
|
|
172
|
+
# Clean up
|
|
173
|
+
onnx_path.unlink()
|
|
174
|
+
|
|
175
|
+
def test_state_dict_not_supported(self, tmp_path):
|
|
176
|
+
"""State dict models should fail with helpful error."""
|
|
177
|
+
model = SimpleTestModel()
|
|
178
|
+
|
|
179
|
+
# Save as state_dict (not full model)
|
|
180
|
+
pt_path = tmp_path / "weights.pth"
|
|
181
|
+
torch.save(model.state_dict(), pt_path)
|
|
182
|
+
|
|
183
|
+
logger = logging.getLogger("test")
|
|
184
|
+
|
|
185
|
+
onnx_path, _ = _convert_pytorch_to_onnx(
|
|
186
|
+
pt_path,
|
|
187
|
+
input_shape_str="1,3,32,32",
|
|
188
|
+
output_path=None,
|
|
189
|
+
opset_version=17,
|
|
190
|
+
logger=logger,
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
assert onnx_path is None
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
@pytest.mark.skipif(not _TORCH_AVAILABLE, reason="PyTorch not installed")
|
|
197
|
+
class TestUltralyticsMetadataExtraction:
|
|
198
|
+
"""Tests for Ultralytics metadata extraction."""
|
|
199
|
+
|
|
200
|
+
def test_extraction_without_ultralytics(self, tmp_path):
|
|
201
|
+
"""Should return None gracefully when ultralytics not available."""
|
|
202
|
+
logger = logging.getLogger("test")
|
|
203
|
+
|
|
204
|
+
# Mock ultralytics not being available
|
|
205
|
+
with patch.dict("sys.modules", {"ultralytics": None}):
|
|
206
|
+
result = _extract_ultralytics_metadata(tmp_path / "fake.pt", logger)
|
|
207
|
+
# Should return None, not crash
|
|
208
|
+
assert result is None or isinstance(result, dict)
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
class TestDatasetInfo:
|
|
212
|
+
"""Tests for DatasetInfo dataclass."""
|
|
213
|
+
|
|
214
|
+
def test_dataset_info_creation(self):
|
|
215
|
+
"""DatasetInfo should be created with expected fields."""
|
|
216
|
+
info = DatasetInfo(
|
|
217
|
+
task="detect",
|
|
218
|
+
num_classes=5,
|
|
219
|
+
class_names=["cat", "dog", "bird", "fish", "car"],
|
|
220
|
+
source="ultralytics",
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
assert info.task == "detect"
|
|
224
|
+
assert info.num_classes == 5
|
|
225
|
+
assert len(info.class_names) == 5
|
|
226
|
+
assert info.source == "ultralytics"
|
|
227
|
+
|
|
228
|
+
def test_dataset_info_defaults(self):
|
|
229
|
+
"""DatasetInfo should have sensible defaults."""
|
|
230
|
+
info = DatasetInfo()
|
|
231
|
+
|
|
232
|
+
assert info.task is None
|
|
233
|
+
assert info.num_classes is None
|
|
234
|
+
assert info.class_names == []
|
|
235
|
+
assert info.source is None
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
class TestInferNumClassesFromOutput:
|
|
239
|
+
"""Tests for infer_num_classes_from_output function (Task 4B.2.2)."""
|
|
240
|
+
|
|
241
|
+
def test_classification_2d_output(self):
|
|
242
|
+
"""Should detect classification from [batch, num_classes] shape."""
|
|
243
|
+
output_shapes = {"output": [1, 1000]} # ImageNet-style
|
|
244
|
+
result = infer_num_classes_from_output(output_shapes)
|
|
245
|
+
|
|
246
|
+
assert result is not None
|
|
247
|
+
assert result.task == "classify"
|
|
248
|
+
assert result.num_classes == 1000
|
|
249
|
+
assert result.source == "output_shape"
|
|
250
|
+
|
|
251
|
+
def test_classification_3d_output(self):
|
|
252
|
+
"""Should detect classification from [batch, 1, num_classes] shape."""
|
|
253
|
+
output_shapes = {"logits": [1, 1, 100]} # CIFAR-100 style
|
|
254
|
+
result = infer_num_classes_from_output(output_shapes)
|
|
255
|
+
|
|
256
|
+
assert result is not None
|
|
257
|
+
assert result.task == "classify"
|
|
258
|
+
assert result.num_classes == 100
|
|
259
|
+
assert result.source == "output_shape"
|
|
260
|
+
|
|
261
|
+
def test_detection_yolo_output(self):
|
|
262
|
+
"""Should detect detection from YOLO-style [batch, boxes, 4+nc] shape."""
|
|
263
|
+
# YOLOv8 output: [1, 8400, 84] for 80 COCO classes + 4 box coords
|
|
264
|
+
output_shapes = {"output0": [1, 8400, 84]}
|
|
265
|
+
result = infer_num_classes_from_output(output_shapes)
|
|
266
|
+
|
|
267
|
+
assert result is not None
|
|
268
|
+
assert result.task == "detect"
|
|
269
|
+
assert result.num_classes == 80 # 84 - 4 = 80
|
|
270
|
+
assert result.source == "output_shape"
|
|
271
|
+
|
|
272
|
+
def test_segmentation_output(self):
|
|
273
|
+
"""Should detect segmentation from [batch, num_classes, h, w] shape."""
|
|
274
|
+
output_shapes = {"output": [1, 21, 512, 512]} # Pascal VOC style
|
|
275
|
+
result = infer_num_classes_from_output(output_shapes)
|
|
276
|
+
|
|
277
|
+
assert result is not None
|
|
278
|
+
assert result.task == "segment"
|
|
279
|
+
assert result.num_classes == 21
|
|
280
|
+
assert result.source == "output_shape"
|
|
281
|
+
|
|
282
|
+
def test_empty_output_shapes(self):
|
|
283
|
+
"""Should return None for empty output shapes."""
|
|
284
|
+
result = infer_num_classes_from_output({})
|
|
285
|
+
assert result is None
|
|
286
|
+
|
|
287
|
+
def test_single_output_dimension(self):
|
|
288
|
+
"""Should return None for single-dimension outputs."""
|
|
289
|
+
output_shapes = {"output": [10]}
|
|
290
|
+
result = infer_num_classes_from_output(output_shapes)
|
|
291
|
+
assert result is None
|
|
292
|
+
|
|
293
|
+
def test_symbolic_dimensions(self):
|
|
294
|
+
"""Should handle symbolic dimensions gracefully."""
|
|
295
|
+
output_shapes = {"output": ["batch", 1000]}
|
|
296
|
+
result = infer_num_classes_from_output(output_shapes)
|
|
297
|
+
|
|
298
|
+
assert result is not None
|
|
299
|
+
assert result.task == "classify"
|
|
300
|
+
assert result.num_classes == 1000
|
|
301
|
+
|
|
302
|
+
def test_priority_output_names(self):
|
|
303
|
+
"""Should prioritize outputs with known names like 'logits'."""
|
|
304
|
+
output_shapes = {
|
|
305
|
+
"some_random_output": [1, 5], # Would infer 5 classes
|
|
306
|
+
"logits": [1, 100], # Should prefer this
|
|
307
|
+
}
|
|
308
|
+
result = infer_num_classes_from_output(output_shapes)
|
|
309
|
+
|
|
310
|
+
assert result is not None
|
|
311
|
+
assert result.num_classes == 100
|
|
312
|
+
|
|
313
|
+
def test_num_classes_too_small(self):
|
|
314
|
+
"""Should not infer if num_classes is too small (< 2)."""
|
|
315
|
+
output_shapes = {"output": [1, 1]} # Only 1 class - not valid
|
|
316
|
+
result = infer_num_classes_from_output(output_shapes)
|
|
317
|
+
assert result is None
|
|
318
|
+
|
|
319
|
+
def test_num_classes_too_large(self):
|
|
320
|
+
"""Should not infer if num_classes is too large (> 10000)."""
|
|
321
|
+
output_shapes = {"output": [1, 50000]} # Unlikely to be classes
|
|
322
|
+
result = infer_num_classes_from_output(output_shapes)
|
|
323
|
+
assert result is None
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
if __name__ == "__main__":
|
|
327
|
+
pytest.main([__file__, "-v"])
|
|
@@ -0,0 +1,383 @@
|
|
|
1
|
+
# Copyright (c) 2025 HaoLine Contributors
|
|
2
|
+
# SPDX-License-Identifier: MIT
|
|
3
|
+
|
|
4
|
+
"""
|
|
5
|
+
Unit tests for the report module (ModelInspector, InspectionReport).
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import json
|
|
11
|
+
import sys
|
|
12
|
+
import tempfile
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
|
|
15
|
+
import numpy as np
|
|
16
|
+
import onnx
|
|
17
|
+
import pytest
|
|
18
|
+
from onnx import TensorProto, helper
|
|
19
|
+
|
|
20
|
+
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
|
21
|
+
from ..report import InspectionReport, ModelInspector
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def create_simple_model() -> onnx.ModelProto:
|
|
25
|
+
"""Create a simple model for testing."""
|
|
26
|
+
X = helper.make_tensor_value_info("X", TensorProto.FLOAT, [1, 3, 8, 8])
|
|
27
|
+
|
|
28
|
+
W = helper.make_tensor(
|
|
29
|
+
"W",
|
|
30
|
+
TensorProto.FLOAT,
|
|
31
|
+
[16, 3, 3, 3],
|
|
32
|
+
np.random.randn(16, 3, 3, 3).astype(np.float32).flatten().tolist(),
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
Y = helper.make_tensor_value_info("Y", TensorProto.FLOAT, [1, 16, 6, 6])
|
|
36
|
+
|
|
37
|
+
conv = helper.make_node("Conv", ["X", "W"], ["conv_out"], kernel_shape=[3, 3])
|
|
38
|
+
relu = helper.make_node("Relu", ["conv_out"], ["Y"])
|
|
39
|
+
|
|
40
|
+
graph = helper.make_graph([conv, relu], "test", [X], [Y], [W])
|
|
41
|
+
|
|
42
|
+
model = helper.make_model(
|
|
43
|
+
graph,
|
|
44
|
+
opset_imports=[helper.make_opsetid("", 17)],
|
|
45
|
+
producer_name="test_producer",
|
|
46
|
+
producer_version="1.0",
|
|
47
|
+
)
|
|
48
|
+
return model
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class TestModelInspector:
|
|
52
|
+
"""Tests for ModelInspector class."""
|
|
53
|
+
|
|
54
|
+
def test_inspect_returns_report(self):
|
|
55
|
+
"""Inspect should return an InspectionReport."""
|
|
56
|
+
model = create_simple_model()
|
|
57
|
+
|
|
58
|
+
with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
|
|
59
|
+
onnx.save(model, f.name)
|
|
60
|
+
model_path = Path(f.name)
|
|
61
|
+
|
|
62
|
+
try:
|
|
63
|
+
inspector = ModelInspector()
|
|
64
|
+
report = inspector.inspect(model_path)
|
|
65
|
+
|
|
66
|
+
assert isinstance(report, InspectionReport)
|
|
67
|
+
assert report.metadata is not None
|
|
68
|
+
assert report.graph_summary is not None
|
|
69
|
+
assert report.param_counts is not None
|
|
70
|
+
assert report.flop_counts is not None
|
|
71
|
+
assert report.memory_estimates is not None
|
|
72
|
+
finally:
|
|
73
|
+
model_path.unlink()
|
|
74
|
+
|
|
75
|
+
def test_metadata_extraction(self):
|
|
76
|
+
"""Test that metadata is extracted correctly."""
|
|
77
|
+
model = create_simple_model()
|
|
78
|
+
|
|
79
|
+
with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
|
|
80
|
+
onnx.save(model, f.name)
|
|
81
|
+
model_path = Path(f.name)
|
|
82
|
+
|
|
83
|
+
try:
|
|
84
|
+
inspector = ModelInspector()
|
|
85
|
+
report = inspector.inspect(model_path)
|
|
86
|
+
|
|
87
|
+
assert report.metadata.producer_name == "test_producer"
|
|
88
|
+
assert report.metadata.producer_version == "1.0"
|
|
89
|
+
assert "ai.onnx" in report.metadata.opsets
|
|
90
|
+
finally:
|
|
91
|
+
model_path.unlink()
|
|
92
|
+
|
|
93
|
+
def test_graph_summary(self):
|
|
94
|
+
"""Test graph summary extraction."""
|
|
95
|
+
model = create_simple_model()
|
|
96
|
+
|
|
97
|
+
with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
|
|
98
|
+
onnx.save(model, f.name)
|
|
99
|
+
model_path = Path(f.name)
|
|
100
|
+
|
|
101
|
+
try:
|
|
102
|
+
inspector = ModelInspector()
|
|
103
|
+
report = inspector.inspect(model_path)
|
|
104
|
+
|
|
105
|
+
assert report.graph_summary.num_nodes == 2 # Conv + Relu
|
|
106
|
+
assert report.graph_summary.num_inputs == 1
|
|
107
|
+
assert report.graph_summary.num_outputs == 1
|
|
108
|
+
assert "Conv" in report.graph_summary.op_type_counts
|
|
109
|
+
assert "Relu" in report.graph_summary.op_type_counts
|
|
110
|
+
finally:
|
|
111
|
+
model_path.unlink()
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
class TestInspectionReport:
|
|
115
|
+
"""Tests for InspectionReport class."""
|
|
116
|
+
|
|
117
|
+
def test_to_json(self):
|
|
118
|
+
"""Test JSON serialization."""
|
|
119
|
+
model = create_simple_model()
|
|
120
|
+
|
|
121
|
+
with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
|
|
122
|
+
onnx.save(model, f.name)
|
|
123
|
+
model_path = Path(f.name)
|
|
124
|
+
|
|
125
|
+
try:
|
|
126
|
+
inspector = ModelInspector()
|
|
127
|
+
report = inspector.inspect(model_path)
|
|
128
|
+
|
|
129
|
+
json_str = report.to_json()
|
|
130
|
+
assert json_str is not None
|
|
131
|
+
|
|
132
|
+
# Should be valid JSON
|
|
133
|
+
data = json.loads(json_str)
|
|
134
|
+
assert "metadata" in data
|
|
135
|
+
assert "graph_summary" in data
|
|
136
|
+
assert "param_counts" in data
|
|
137
|
+
finally:
|
|
138
|
+
model_path.unlink()
|
|
139
|
+
|
|
140
|
+
def test_to_dict(self):
|
|
141
|
+
"""Test dictionary serialization."""
|
|
142
|
+
model = create_simple_model()
|
|
143
|
+
|
|
144
|
+
with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
|
|
145
|
+
onnx.save(model, f.name)
|
|
146
|
+
model_path = Path(f.name)
|
|
147
|
+
|
|
148
|
+
try:
|
|
149
|
+
inspector = ModelInspector()
|
|
150
|
+
report = inspector.inspect(model_path)
|
|
151
|
+
|
|
152
|
+
data = report.to_dict()
|
|
153
|
+
assert isinstance(data, dict)
|
|
154
|
+
assert "metadata" in data
|
|
155
|
+
assert data["metadata"]["producer_name"] == "test_producer"
|
|
156
|
+
finally:
|
|
157
|
+
model_path.unlink()
|
|
158
|
+
|
|
159
|
+
def test_to_markdown(self):
|
|
160
|
+
"""Test Markdown generation."""
|
|
161
|
+
model = create_simple_model()
|
|
162
|
+
|
|
163
|
+
with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
|
|
164
|
+
onnx.save(model, f.name)
|
|
165
|
+
model_path = Path(f.name)
|
|
166
|
+
|
|
167
|
+
try:
|
|
168
|
+
inspector = ModelInspector()
|
|
169
|
+
report = inspector.inspect(model_path)
|
|
170
|
+
|
|
171
|
+
md = report.to_markdown()
|
|
172
|
+
assert isinstance(md, str)
|
|
173
|
+
assert "# Model Card:" in md
|
|
174
|
+
assert "## Metadata" in md
|
|
175
|
+
assert "## Graph Summary" in md
|
|
176
|
+
finally:
|
|
177
|
+
model_path.unlink()
|
|
178
|
+
|
|
179
|
+
def test_to_html_basic(self):
|
|
180
|
+
"""Test HTML generation produces valid HTML structure."""
|
|
181
|
+
model = create_simple_model()
|
|
182
|
+
|
|
183
|
+
with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
|
|
184
|
+
onnx.save(model, f.name)
|
|
185
|
+
model_path = Path(f.name)
|
|
186
|
+
|
|
187
|
+
try:
|
|
188
|
+
inspector = ModelInspector()
|
|
189
|
+
report = inspector.inspect(model_path)
|
|
190
|
+
|
|
191
|
+
html = report.to_html()
|
|
192
|
+
assert isinstance(html, str)
|
|
193
|
+
assert "<!DOCTYPE html>" in html
|
|
194
|
+
assert "<html" in html
|
|
195
|
+
assert "</html>" in html
|
|
196
|
+
assert "HaoLine" in html
|
|
197
|
+
finally:
|
|
198
|
+
model_path.unlink()
|
|
199
|
+
|
|
200
|
+
def test_to_html_contains_sections(self):
|
|
201
|
+
"""Test HTML contains expected sections."""
|
|
202
|
+
model = create_simple_model()
|
|
203
|
+
|
|
204
|
+
with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
|
|
205
|
+
onnx.save(model, f.name)
|
|
206
|
+
model_path = Path(f.name)
|
|
207
|
+
|
|
208
|
+
try:
|
|
209
|
+
inspector = ModelInspector()
|
|
210
|
+
report = inspector.inspect(model_path)
|
|
211
|
+
|
|
212
|
+
html = report.to_html()
|
|
213
|
+
# Should have key sections
|
|
214
|
+
assert "Model Details" in html
|
|
215
|
+
assert "Metadata" in html
|
|
216
|
+
assert "Graph Summary" in html
|
|
217
|
+
finally:
|
|
218
|
+
model_path.unlink()
|
|
219
|
+
|
|
220
|
+
def test_to_html_with_llm_summary(self):
|
|
221
|
+
"""Test HTML includes LLM summary when provided."""
|
|
222
|
+
model = create_simple_model()
|
|
223
|
+
|
|
224
|
+
with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
|
|
225
|
+
onnx.save(model, f.name)
|
|
226
|
+
model_path = Path(f.name)
|
|
227
|
+
|
|
228
|
+
try:
|
|
229
|
+
inspector = ModelInspector()
|
|
230
|
+
report = inspector.inspect(model_path)
|
|
231
|
+
|
|
232
|
+
# Add mock LLM summary
|
|
233
|
+
report.llm_summary = {
|
|
234
|
+
"success": True,
|
|
235
|
+
"short_summary": "Test short summary.",
|
|
236
|
+
"detailed_summary": "Test detailed summary paragraph.",
|
|
237
|
+
"model": "test-model",
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
html = report.to_html()
|
|
241
|
+
assert "Executive Summary" in html
|
|
242
|
+
assert "Test short summary" in html
|
|
243
|
+
assert "Test detailed summary" in html
|
|
244
|
+
finally:
|
|
245
|
+
model_path.unlink()
|
|
246
|
+
|
|
247
|
+
def test_to_markdown_with_llm_summary(self):
|
|
248
|
+
"""Test Markdown includes executive summary when LLM summary provided."""
|
|
249
|
+
model = create_simple_model()
|
|
250
|
+
|
|
251
|
+
with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
|
|
252
|
+
onnx.save(model, f.name)
|
|
253
|
+
model_path = Path(f.name)
|
|
254
|
+
|
|
255
|
+
try:
|
|
256
|
+
inspector = ModelInspector()
|
|
257
|
+
report = inspector.inspect(model_path)
|
|
258
|
+
|
|
259
|
+
# Add mock LLM summary
|
|
260
|
+
report.llm_summary = {
|
|
261
|
+
"success": True,
|
|
262
|
+
"short_summary": "This is a test model for unit testing.",
|
|
263
|
+
"detailed_summary": "This model contains Conv and ReLU operations for testing.",
|
|
264
|
+
"model": "test-llm-model",
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
md = report.to_markdown()
|
|
268
|
+
assert "## Executive Summary" in md
|
|
269
|
+
assert "Executive Summary" in md
|
|
270
|
+
assert "This is a test model for unit testing." in md
|
|
271
|
+
assert "This model contains Conv and ReLU operations for testing." in md
|
|
272
|
+
assert "*Generated by test-llm-model*" in md
|
|
273
|
+
finally:
|
|
274
|
+
model_path.unlink()
|
|
275
|
+
|
|
276
|
+
def test_to_markdown_no_executive_summary_without_llm(self):
|
|
277
|
+
"""Test Markdown omits executive summary when no LLM summary."""
|
|
278
|
+
model = create_simple_model()
|
|
279
|
+
|
|
280
|
+
with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
|
|
281
|
+
onnx.save(model, f.name)
|
|
282
|
+
model_path = Path(f.name)
|
|
283
|
+
|
|
284
|
+
try:
|
|
285
|
+
inspector = ModelInspector()
|
|
286
|
+
report = inspector.inspect(model_path)
|
|
287
|
+
|
|
288
|
+
# No LLM summary set
|
|
289
|
+
md = report.to_markdown()
|
|
290
|
+
assert "## Executive Summary" not in md
|
|
291
|
+
finally:
|
|
292
|
+
model_path.unlink()
|
|
293
|
+
|
|
294
|
+
def test_to_html_embeds_images(self):
|
|
295
|
+
"""Test HTML embeds images as base64 when provided."""
|
|
296
|
+
model = create_simple_model()
|
|
297
|
+
|
|
298
|
+
with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
|
|
299
|
+
onnx.save(model, f.name)
|
|
300
|
+
model_path = Path(f.name)
|
|
301
|
+
|
|
302
|
+
try:
|
|
303
|
+
inspector = ModelInspector()
|
|
304
|
+
report = inspector.inspect(model_path)
|
|
305
|
+
|
|
306
|
+
# Create a temp image file
|
|
307
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
308
|
+
# Create a minimal PNG (1x1 pixel)
|
|
309
|
+
import base64
|
|
310
|
+
|
|
311
|
+
# Minimal valid PNG
|
|
312
|
+
png_data = base64.b64decode(
|
|
313
|
+
"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="
|
|
314
|
+
)
|
|
315
|
+
img_path = Path(tmpdir) / "test.png"
|
|
316
|
+
img_path.write_bytes(png_data)
|
|
317
|
+
|
|
318
|
+
html = report.to_html(image_paths={"test_chart": img_path})
|
|
319
|
+
assert "data:image/png;base64," in html
|
|
320
|
+
finally:
|
|
321
|
+
model_path.unlink()
|
|
322
|
+
|
|
323
|
+
def test_format_number(self):
|
|
324
|
+
"""Test number formatting utility."""
|
|
325
|
+
assert InspectionReport._format_number(1_000) == "1.00K"
|
|
326
|
+
assert InspectionReport._format_number(1_000_000) == "1.00M"
|
|
327
|
+
assert InspectionReport._format_number(1_000_000_000) == "1.00B"
|
|
328
|
+
assert InspectionReport._format_number(500) == "500"
|
|
329
|
+
|
|
330
|
+
def test_format_bytes(self):
|
|
331
|
+
"""Test byte formatting utility."""
|
|
332
|
+
assert InspectionReport._format_bytes(1024) == "1.02 KB"
|
|
333
|
+
assert InspectionReport._format_bytes(1024 * 1024) == "1.05 MB"
|
|
334
|
+
assert InspectionReport._format_bytes(1024 * 1024 * 1024) == "1.07 GB"
|
|
335
|
+
assert InspectionReport._format_bytes(500) == "500 bytes"
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
class TestReportRiskSignals:
|
|
339
|
+
"""Tests for risk signal inclusion in reports."""
|
|
340
|
+
|
|
341
|
+
def test_report_includes_risk_signals(self):
|
|
342
|
+
"""Reports should include analyzed risk signals."""
|
|
343
|
+
model = create_simple_model()
|
|
344
|
+
|
|
345
|
+
with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
|
|
346
|
+
onnx.save(model, f.name)
|
|
347
|
+
model_path = Path(f.name)
|
|
348
|
+
|
|
349
|
+
try:
|
|
350
|
+
inspector = ModelInspector()
|
|
351
|
+
report = inspector.inspect(model_path)
|
|
352
|
+
|
|
353
|
+
# risk_signals is always a list (may be empty for small models)
|
|
354
|
+
assert isinstance(report.risk_signals, list)
|
|
355
|
+
finally:
|
|
356
|
+
model_path.unlink()
|
|
357
|
+
|
|
358
|
+
def test_report_includes_blocks(self):
|
|
359
|
+
"""Reports should include detected blocks."""
|
|
360
|
+
model = create_simple_model()
|
|
361
|
+
|
|
362
|
+
with tempfile.NamedTemporaryFile(suffix=".onnx", delete=False) as f:
|
|
363
|
+
onnx.save(model, f.name)
|
|
364
|
+
model_path = Path(f.name)
|
|
365
|
+
|
|
366
|
+
try:
|
|
367
|
+
inspector = ModelInspector()
|
|
368
|
+
report = inspector.inspect(model_path)
|
|
369
|
+
|
|
370
|
+
assert isinstance(report.detected_blocks, list)
|
|
371
|
+
assert report.architecture_type in (
|
|
372
|
+
"cnn",
|
|
373
|
+
"mlp",
|
|
374
|
+
"transformer",
|
|
375
|
+
"hybrid",
|
|
376
|
+
"unknown",
|
|
377
|
+
)
|
|
378
|
+
finally:
|
|
379
|
+
model_path.unlink()
|
|
380
|
+
|
|
381
|
+
|
|
382
|
+
if __name__ == "__main__":
|
|
383
|
+
pytest.main([__file__, "-v"])
|