regscale-cli 6.18.0.0__py3-none-any.whl → 6.19.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/integrations/api_paginator.py +932 -0
- regscale/integrations/api_paginator_example.py +348 -0
- regscale/integrations/commercial/__init__.py +11 -10
- regscale/integrations/commercial/{qualys.py → qualys/__init__.py} +756 -105
- regscale/integrations/commercial/qualys/scanner.py +1051 -0
- regscale/integrations/commercial/qualys/variables.py +21 -0
- regscale/integrations/commercial/sicura/api.py +1 -0
- regscale/integrations/commercial/stigv2/click_commands.py +36 -8
- regscale/integrations/commercial/stigv2/stig_integration.py +63 -9
- regscale/integrations/commercial/tenablev2/__init__.py +9 -0
- regscale/integrations/commercial/tenablev2/authenticate.py +23 -2
- regscale/integrations/commercial/tenablev2/commands.py +779 -0
- regscale/integrations/commercial/tenablev2/jsonl_scanner.py +1999 -0
- regscale/integrations/commercial/tenablev2/sc_scanner.py +600 -0
- regscale/integrations/commercial/tenablev2/scanner.py +7 -5
- regscale/integrations/commercial/tenablev2/utils.py +21 -4
- regscale/integrations/commercial/tenablev2/variables.py +4 -0
- regscale/integrations/jsonl_scanner_integration.py +523 -142
- regscale/integrations/scanner_integration.py +102 -26
- regscale/integrations/transformer/__init__.py +17 -0
- regscale/integrations/transformer/data_transformer.py +445 -0
- regscale/integrations/transformer/mappings/__init__.py +8 -0
- regscale/integrations/variables.py +2 -0
- regscale/models/__init__.py +5 -2
- regscale/models/integration_models/cisa_kev_data.json +6 -6
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/regscale_models/asset.py +5 -2
- regscale/models/regscale_models/file.py +5 -2
- regscale/models/regscale_models/group.py +2 -1
- regscale/models/regscale_models/user_group.py +1 -1
- regscale/regscale.py +3 -1
- {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/METADATA +1 -1
- {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/RECORD +46 -30
- tests/regscale/core/test_version.py +22 -0
- tests/regscale/integrations/__init__.py +0 -0
- tests/regscale/integrations/test_api_paginator.py +597 -0
- tests/regscale/integrations/test_integration_mapping.py +60 -0
- tests/regscale/integrations/test_issue_creation.py +317 -0
- tests/regscale/integrations/test_issue_due_date.py +46 -0
- tests/regscale/integrations/transformer/__init__.py +0 -0
- tests/regscale/integrations/transformer/test_data_transformer.py +850 -0
- regscale/integrations/commercial/tenablev2/click.py +0 -1641
- {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/LICENSE +0 -0
- {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/WHEEL +0 -0
- {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,850 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Tests for the DataTransformer class in the transformer module.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
import tempfile
|
|
10
|
+
import xml.etree.ElementTree as ET
|
|
11
|
+
from typing import Dict, Any
|
|
12
|
+
from unittest.mock import patch
|
|
13
|
+
|
|
14
|
+
import pytest
|
|
15
|
+
|
|
16
|
+
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
|
|
17
|
+
from regscale.integrations.transformer.data_transformer import DataTransformer, DataMapping
|
|
18
|
+
from regscale.models import regscale_models
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class TestDataTransformer:
|
|
22
|
+
"""Test cases for DataTransformer class."""
|
|
23
|
+
|
|
24
|
+
@pytest.fixture
|
|
25
|
+
def sample_mapping_data(self) -> Dict[str, Any]:
|
|
26
|
+
"""Return sample mapping data for testing."""
|
|
27
|
+
return {
|
|
28
|
+
"asset_mapping": {
|
|
29
|
+
"name": "asset.name",
|
|
30
|
+
"identifier": "asset.id",
|
|
31
|
+
"ip_address": "asset.ip",
|
|
32
|
+
"mac_address": "asset.mac",
|
|
33
|
+
"asset_type": "asset.type",
|
|
34
|
+
"fqdn": "asset.fqdn",
|
|
35
|
+
},
|
|
36
|
+
"finding_mapping": {
|
|
37
|
+
"title": "finding.title",
|
|
38
|
+
"description": "finding.description",
|
|
39
|
+
"plugin_name": "finding.plugin",
|
|
40
|
+
"plugin_id": "finding.plugin_id",
|
|
41
|
+
"severity": "finding.severity",
|
|
42
|
+
"category": "finding.category",
|
|
43
|
+
"cve": "finding.cve",
|
|
44
|
+
"cvss_v3_score": "finding.cvss_score",
|
|
45
|
+
"recommendation_for_mitigation": "finding.solution",
|
|
46
|
+
"identified_risk": "finding.risk",
|
|
47
|
+
"evidence": "finding.output",
|
|
48
|
+
},
|
|
49
|
+
"asset_defaults": {
|
|
50
|
+
"asset_owner_id": "",
|
|
51
|
+
"status": "Active (On Network)",
|
|
52
|
+
"asset_type": "Other",
|
|
53
|
+
"asset_category": "Hardware",
|
|
54
|
+
},
|
|
55
|
+
"finding_defaults": {"priority": "Medium", "status": "Open", "issue_type": "Risk"},
|
|
56
|
+
"severity_mapping": {
|
|
57
|
+
"Critical": "Critical",
|
|
58
|
+
"High": "High",
|
|
59
|
+
"Medium": "Moderate",
|
|
60
|
+
"Low": "Low",
|
|
61
|
+
"Info": "NotAssigned",
|
|
62
|
+
},
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
@pytest.fixture
|
|
66
|
+
def sample_json_file(self) -> str:
|
|
67
|
+
"""Create a temporary JSON file for testing."""
|
|
68
|
+
with tempfile.NamedTemporaryFile(delete=False, suffix=".json") as tmp:
|
|
69
|
+
tmp_path = tmp.name
|
|
70
|
+
mapping_data = {
|
|
71
|
+
"asset_mapping": {"name": "asset.name"},
|
|
72
|
+
"finding_mapping": {"title": "finding.title"},
|
|
73
|
+
"asset_defaults": {"status": "Active"},
|
|
74
|
+
"finding_defaults": {"status": "Open"},
|
|
75
|
+
"severity_mapping": {"High": "High"},
|
|
76
|
+
}
|
|
77
|
+
tmp.write(json.dumps(mapping_data).encode("utf-8"))
|
|
78
|
+
yield tmp_path
|
|
79
|
+
if os.path.exists(tmp_path):
|
|
80
|
+
os.unlink(tmp_path)
|
|
81
|
+
|
|
82
|
+
@pytest.fixture
|
|
83
|
+
def sample_asset_data(self) -> Dict[str, Any]:
|
|
84
|
+
"""Return sample asset data for testing."""
|
|
85
|
+
return {
|
|
86
|
+
"asset": {
|
|
87
|
+
"name": "Test Asset",
|
|
88
|
+
"id": "asset-123",
|
|
89
|
+
"ip": "192.168.1.1", # NOSONAR
|
|
90
|
+
"mac": "00:11:22:33:44:55",
|
|
91
|
+
"type": "Server",
|
|
92
|
+
"fqdn": "test.example.com",
|
|
93
|
+
},
|
|
94
|
+
"metadata": {"created": "2023-01-01", "updated": "2023-01-02"},
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
@pytest.fixture
|
|
98
|
+
def sample_finding_data(self) -> Dict[str, Any]:
|
|
99
|
+
"""Return sample finding data for testing."""
|
|
100
|
+
return {
|
|
101
|
+
"finding": {
|
|
102
|
+
"title": "Test Finding",
|
|
103
|
+
"description": "This is a test finding description",
|
|
104
|
+
"plugin": "Test Plugin",
|
|
105
|
+
"plugin_id": "123456",
|
|
106
|
+
"severity": "High",
|
|
107
|
+
"category": "Security",
|
|
108
|
+
"cve": "CVE-2023-12345",
|
|
109
|
+
"cvss_score": "7.5",
|
|
110
|
+
"solution": "Patch the system",
|
|
111
|
+
"risk": "Data breach risk",
|
|
112
|
+
"output": "Test output evidence",
|
|
113
|
+
},
|
|
114
|
+
"metadata": {"created": "2023-01-01", "updated": "2023-01-02"},
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
@pytest.fixture
|
|
118
|
+
def transformer(self, sample_mapping_data) -> DataTransformer:
|
|
119
|
+
"""Return a DataTransformer instance for testing."""
|
|
120
|
+
return DataTransformer(mapping_data=sample_mapping_data)
|
|
121
|
+
|
|
122
|
+
def test_init_with_mapping_data(self, sample_mapping_data):
|
|
123
|
+
"""Test initialization with mapping data."""
|
|
124
|
+
transformer = DataTransformer(mapping_data=sample_mapping_data)
|
|
125
|
+
|
|
126
|
+
# Verify mapping was loaded correctly
|
|
127
|
+
assert isinstance(transformer.mapping, DataMapping)
|
|
128
|
+
assert transformer.mapping.asset_mapping == sample_mapping_data["asset_mapping"]
|
|
129
|
+
assert transformer.mapping.finding_mapping == sample_mapping_data["finding_mapping"]
|
|
130
|
+
assert transformer.mapping.asset_defaults == sample_mapping_data["asset_defaults"]
|
|
131
|
+
assert transformer.mapping.finding_defaults == sample_mapping_data["finding_defaults"]
|
|
132
|
+
assert transformer.mapping.severity_mapping == sample_mapping_data["severity_mapping"]
|
|
133
|
+
|
|
134
|
+
# Verify scan_date is set
|
|
135
|
+
assert transformer.scan_date is not None
|
|
136
|
+
|
|
137
|
+
def test_init_with_mapping_file(self, sample_json_file):
|
|
138
|
+
"""Test initialization with mapping file."""
|
|
139
|
+
transformer = DataTransformer(mapping_file=sample_json_file)
|
|
140
|
+
|
|
141
|
+
# Verify mapping was loaded correctly
|
|
142
|
+
assert isinstance(transformer.mapping, DataMapping)
|
|
143
|
+
assert transformer.mapping.asset_mapping == {"name": "asset.name"}
|
|
144
|
+
assert transformer.mapping.finding_mapping == {"title": "finding.title"}
|
|
145
|
+
assert transformer.mapping.asset_defaults == {"status": "Active"}
|
|
146
|
+
assert transformer.mapping.finding_defaults == {"status": "Open"}
|
|
147
|
+
assert transformer.mapping.severity_mapping == {"High": "High"}
|
|
148
|
+
|
|
149
|
+
def test_json_decode_error_handling(self):
|
|
150
|
+
"""Test handling of JSON decode errors when loading mapping file."""
|
|
151
|
+
with tempfile.NamedTemporaryFile(delete=False, suffix=".json") as tmp:
|
|
152
|
+
tmp_path = tmp.name
|
|
153
|
+
tmp.write(b'{"invalid": "json') # Invalid JSON
|
|
154
|
+
|
|
155
|
+
try:
|
|
156
|
+
with patch("logging.Logger.error") as mock_error:
|
|
157
|
+
with pytest.raises(json.JSONDecodeError):
|
|
158
|
+
DataTransformer(mapping_file=tmp_path)
|
|
159
|
+
|
|
160
|
+
# Verify error was logged
|
|
161
|
+
assert mock_error.called
|
|
162
|
+
finally:
|
|
163
|
+
if os.path.exists(tmp_path):
|
|
164
|
+
os.unlink(tmp_path)
|
|
165
|
+
|
|
166
|
+
def test_init_with_no_mapping(self):
|
|
167
|
+
"""Test initialization with no mapping."""
|
|
168
|
+
with pytest.raises(ValueError) as excinfo:
|
|
169
|
+
DataTransformer()
|
|
170
|
+
|
|
171
|
+
assert "Either mapping_file or mapping_data must be provided" in str(excinfo.value)
|
|
172
|
+
|
|
173
|
+
def test_init_with_nonexistent_file(self):
|
|
174
|
+
"""Test initialization with nonexistent file."""
|
|
175
|
+
with pytest.raises(FileNotFoundError):
|
|
176
|
+
DataTransformer(mapping_file="nonexistent_file.json")
|
|
177
|
+
|
|
178
|
+
def test_get_data_value(self, transformer, sample_asset_data):
|
|
179
|
+
"""Test extracting values from nested data."""
|
|
180
|
+
# Test valid paths
|
|
181
|
+
assert transformer._get_data_value(sample_asset_data, "asset.name") == "Test Asset"
|
|
182
|
+
assert transformer._get_data_value(sample_asset_data, "asset.ip") == "192.168.1.1" # NOSONAR
|
|
183
|
+
assert transformer._get_data_value(sample_asset_data, "metadata.created") == "2023-01-01"
|
|
184
|
+
|
|
185
|
+
# Test invalid paths
|
|
186
|
+
assert transformer._get_data_value(sample_asset_data, "asset.nonexistent") is None
|
|
187
|
+
assert transformer._get_data_value(sample_asset_data, "nonexistent.field") is None
|
|
188
|
+
|
|
189
|
+
# Test default value
|
|
190
|
+
assert transformer._get_data_value(sample_asset_data, "asset.nonexistent", "default") == "default"
|
|
191
|
+
|
|
192
|
+
# Test with empty field path
|
|
193
|
+
assert transformer._get_data_value(sample_asset_data, "", "default") == "default"
|
|
194
|
+
|
|
195
|
+
# Test with list access
|
|
196
|
+
list_data = {"items": [{"id": "item1"}, {"id": "item2"}]}
|
|
197
|
+
assert transformer._get_data_value(list_data, "items.0.id") == "item1"
|
|
198
|
+
assert transformer._get_data_value(list_data, "items.1.id") == "item2"
|
|
199
|
+
assert transformer._get_data_value(list_data, "items.2.id", "default") == "default"
|
|
200
|
+
|
|
201
|
+
# Test error handling
|
|
202
|
+
assert transformer._get_data_value(None, "any.path", "default") == "default"
|
|
203
|
+
assert transformer._get_data_value({"key": None}, "key.subfield", "default") == "default"
|
|
204
|
+
|
|
205
|
+
def test_apply_mapping(self, transformer, sample_asset_data):
|
|
206
|
+
"""Test applying mapping to source data."""
|
|
207
|
+
mapping = {"name": "asset.name", "ip": "asset.ip", "created_date": "metadata.created"}
|
|
208
|
+
defaults = {"status": "Active", "type": "Server"}
|
|
209
|
+
|
|
210
|
+
result = transformer._apply_mapping(sample_asset_data, mapping, defaults)
|
|
211
|
+
|
|
212
|
+
# Verify defaults are applied
|
|
213
|
+
assert result["status"] == "Active"
|
|
214
|
+
assert result["type"] == "Server"
|
|
215
|
+
|
|
216
|
+
# Verify mappings are applied
|
|
217
|
+
assert result["name"] == "Test Asset"
|
|
218
|
+
assert result["ip"] == "192.168.1.1" # NOSONAR
|
|
219
|
+
assert result["created_date"] == "2023-01-01"
|
|
220
|
+
|
|
221
|
+
def test_parse_data_source_dict(self, transformer):
|
|
222
|
+
"""Test parsing a dictionary data source."""
|
|
223
|
+
data = {"key": "value"}
|
|
224
|
+
result = transformer._parse_data_source(data)
|
|
225
|
+
assert result == data
|
|
226
|
+
|
|
227
|
+
def test_parse_data_source_json_string(self, transformer):
|
|
228
|
+
"""Test parsing a JSON string data source."""
|
|
229
|
+
json_str = '{"key": "value"}'
|
|
230
|
+
result = transformer._parse_data_source(json_str)
|
|
231
|
+
assert result == {"key": "value"}
|
|
232
|
+
|
|
233
|
+
def test_parse_data_source_file_path(self, transformer):
|
|
234
|
+
"""Test parsing a file path data source."""
|
|
235
|
+
with tempfile.NamedTemporaryFile(delete=False, suffix=".json") as tmp:
|
|
236
|
+
tmp_path = tmp.name
|
|
237
|
+
tmp.write(b'{"key": "value_from_file"}')
|
|
238
|
+
|
|
239
|
+
try:
|
|
240
|
+
result = transformer._parse_data_source(tmp_path)
|
|
241
|
+
assert result == {"key": "value_from_file"}
|
|
242
|
+
finally:
|
|
243
|
+
if os.path.exists(tmp_path):
|
|
244
|
+
os.unlink(tmp_path)
|
|
245
|
+
|
|
246
|
+
def test_parse_data_source_unsupported_format(self, transformer):
|
|
247
|
+
"""Test parsing an unsupported data source format."""
|
|
248
|
+
with pytest.raises(ValueError) as excinfo:
|
|
249
|
+
transformer._parse_data_source(123) # Integer is not a supported format
|
|
250
|
+
|
|
251
|
+
assert "Unsupported data source type:" in str(excinfo.value)
|
|
252
|
+
|
|
253
|
+
def test_parse_data_source_bytes(self, transformer):
|
|
254
|
+
"""Test parsing a bytes data source."""
|
|
255
|
+
# Test with JSON bytes
|
|
256
|
+
json_bytes = b'{"key": "value_from_bytes"}'
|
|
257
|
+
result = transformer._parse_data_source(json_bytes)
|
|
258
|
+
assert result == {"key": "value_from_bytes"}
|
|
259
|
+
|
|
260
|
+
# Test with XML bytes
|
|
261
|
+
xml_bytes = b"<root><item>value</item></root>"
|
|
262
|
+
result = transformer._parse_data_source(xml_bytes)
|
|
263
|
+
assert "item" in result
|
|
264
|
+
|
|
265
|
+
# Test with invalid bytes
|
|
266
|
+
with pytest.raises(ValueError):
|
|
267
|
+
transformer._parse_data_source(b"not valid json or xml")
|
|
268
|
+
|
|
269
|
+
def test_parse_data_source_xml_string(self, transformer):
|
|
270
|
+
"""Test parsing an XML string data source."""
|
|
271
|
+
xml_str = '<root><item id="1">value</item></root>'
|
|
272
|
+
result = transformer._parse_data_source(xml_str)
|
|
273
|
+
# Test for the correct structure based on the actual implementation
|
|
274
|
+
assert "item" in result
|
|
275
|
+
assert "@id" in result["item"]
|
|
276
|
+
assert result["item"]["@id"] == "1"
|
|
277
|
+
assert "#text" in result["item"]
|
|
278
|
+
assert result["item"]["#text"] == "value"
|
|
279
|
+
|
|
280
|
+
def test_parse_data_source_unrecognized_string(self, transformer):
|
|
281
|
+
"""Test parsing an unrecognized string format."""
|
|
282
|
+
with pytest.raises(ValueError) as excinfo:
|
|
283
|
+
transformer._parse_data_source("not json or xml")
|
|
284
|
+
|
|
285
|
+
assert "Could not parse data source as JSON or XML" in str(excinfo.value)
|
|
286
|
+
|
|
287
|
+
def test_xml_to_dict(self, transformer):
|
|
288
|
+
"""Test converting XML element to dictionary."""
|
|
289
|
+
root = ET.fromstring('<root><item id="1">value</item><item id="2">value2</item></root>')
|
|
290
|
+
result = transformer._xml_to_dict(root)
|
|
291
|
+
|
|
292
|
+
assert "item" in result
|
|
293
|
+
assert isinstance(result["item"], list)
|
|
294
|
+
assert len(result["item"]) == 2
|
|
295
|
+
assert "@id" in result["item"][0]
|
|
296
|
+
assert result["item"][0]["@id"] == "1"
|
|
297
|
+
assert "#text" in result["item"][0]
|
|
298
|
+
assert result["item"][0]["#text"] == "value"
|
|
299
|
+
assert "@id" in result["item"][1]
|
|
300
|
+
assert result["item"][1]["@id"] == "2"
|
|
301
|
+
assert "#text" in result["item"][1]
|
|
302
|
+
assert result["item"][1]["#text"] == "value2"
|
|
303
|
+
|
|
304
|
+
def test_xml_to_dict_with_nested_elements(self, transformer):
|
|
305
|
+
"""Test converting XML with nested elements to dictionary."""
|
|
306
|
+
xml = """
|
|
307
|
+
<root>
|
|
308
|
+
<parent>
|
|
309
|
+
<child id="1">value1</child>
|
|
310
|
+
<child id="2">value2</child>
|
|
311
|
+
</parent>
|
|
312
|
+
<sibling>value3</sibling>
|
|
313
|
+
</root>
|
|
314
|
+
"""
|
|
315
|
+
root = ET.fromstring(xml)
|
|
316
|
+
result = transformer._xml_to_dict(root)
|
|
317
|
+
|
|
318
|
+
assert "parent" in result
|
|
319
|
+
assert "child" in result["parent"]
|
|
320
|
+
assert isinstance(result["parent"]["child"], list)
|
|
321
|
+
assert len(result["parent"]["child"]) == 2
|
|
322
|
+
assert "sibling" in result
|
|
323
|
+
assert result["sibling"] == "value3"
|
|
324
|
+
|
|
325
|
+
def test_xml_to_dict_with_text_only(self, transformer):
|
|
326
|
+
"""Test converting XML with only text content."""
|
|
327
|
+
root = ET.fromstring("<simple>just text</simple>")
|
|
328
|
+
result = transformer._xml_to_dict(root)
|
|
329
|
+
|
|
330
|
+
assert result == "just text"
|
|
331
|
+
|
|
332
|
+
def test_transform_to_asset(self, transformer, sample_asset_data):
|
|
333
|
+
"""Test transforming data to IntegrationAsset."""
|
|
334
|
+
asset = transformer.transform_to_asset(sample_asset_data)
|
|
335
|
+
|
|
336
|
+
# Verify asset properties
|
|
337
|
+
assert isinstance(asset, IntegrationAsset)
|
|
338
|
+
assert asset.name == "Test Asset"
|
|
339
|
+
assert asset.identifier == "asset-123"
|
|
340
|
+
assert asset.ip_address == "192.168.1.1" # NOSONAR
|
|
341
|
+
assert asset.mac_address == "00:11:22:33:44:55"
|
|
342
|
+
assert asset.asset_type == "Server"
|
|
343
|
+
assert asset.fqdn == "test.example.com"
|
|
344
|
+
|
|
345
|
+
# Verify defaults are applied
|
|
346
|
+
assert asset.status == "Active (On Network)"
|
|
347
|
+
assert asset.asset_category == "Hardware"
|
|
348
|
+
|
|
349
|
+
def test_transform_to_asset_with_plan_id(self, transformer, sample_asset_data):
|
|
350
|
+
"""Test transforming data to IntegrationAsset with plan ID."""
|
|
351
|
+
plan_id = 123
|
|
352
|
+
asset = transformer.transform_to_asset(sample_asset_data, plan_id=plan_id)
|
|
353
|
+
|
|
354
|
+
# Verify plan ID is set
|
|
355
|
+
assert asset.parent_id == plan_id
|
|
356
|
+
assert asset.parent_module == regscale_models.SecurityPlan.get_module_slug()
|
|
357
|
+
|
|
358
|
+
def test_transform_to_asset_with_missing_name_and_identifier(self, transformer):
|
|
359
|
+
"""Test transforming data with missing name and identifier but having IP."""
|
|
360
|
+
data = {"asset": {"ip": "192.168.1.100"}} # NOSONAR
|
|
361
|
+
asset = transformer.transform_to_asset(data)
|
|
362
|
+
|
|
363
|
+
# Verify default name is used and identifier is set to IP
|
|
364
|
+
assert asset.name == "Unknown Asset"
|
|
365
|
+
assert asset.identifier == "192.168.1.100" # NOSONAR
|
|
366
|
+
assert asset.ip_address == "192.168.1.100" # NOSONAR
|
|
367
|
+
|
|
368
|
+
def test_transform_to_asset_with_missing_name(self, transformer):
|
|
369
|
+
"""Test transforming data with missing name."""
|
|
370
|
+
data = {"asset": {"id": "asset-123"}}
|
|
371
|
+
asset = transformer.transform_to_asset(data)
|
|
372
|
+
|
|
373
|
+
# Verify default name is used
|
|
374
|
+
assert asset.name == "Unknown Asset"
|
|
375
|
+
assert asset.identifier == "asset-123"
|
|
376
|
+
|
|
377
|
+
def test_transform_to_finding(self, transformer, sample_finding_data):
|
|
378
|
+
"""Test transforming data to IntegrationFinding."""
|
|
379
|
+
finding = transformer.transform_to_finding(sample_finding_data)
|
|
380
|
+
|
|
381
|
+
# Verify finding properties
|
|
382
|
+
assert isinstance(finding, IntegrationFinding)
|
|
383
|
+
assert finding.title == "Test Finding"
|
|
384
|
+
assert finding.description == "This is a test finding description"
|
|
385
|
+
assert finding.plugin_name == "Test Plugin"
|
|
386
|
+
assert finding.plugin_id == "123456"
|
|
387
|
+
assert finding.severity == regscale_models.IssueSeverity.High
|
|
388
|
+
assert finding.category == "Security"
|
|
389
|
+
assert finding.cve == "CVE-2023-12345"
|
|
390
|
+
assert finding.cvss_v3_score == "7.5"
|
|
391
|
+
assert finding.recommendation_for_mitigation == "Patch the system"
|
|
392
|
+
assert finding.identified_risk == "Data breach risk"
|
|
393
|
+
assert finding.evidence == "Test output evidence"
|
|
394
|
+
|
|
395
|
+
# Verify defaults are applied
|
|
396
|
+
assert finding.priority == "Medium"
|
|
397
|
+
assert finding.status == regscale_models.IssueStatus.Open
|
|
398
|
+
assert finding.issue_type == "Risk"
|
|
399
|
+
|
|
400
|
+
def test_transform_to_finding_with_invalid_severity(self, transformer):
|
|
401
|
+
"""Test transforming data with invalid severity mapping."""
|
|
402
|
+
# Create data with unknown severity
|
|
403
|
+
data = {
|
|
404
|
+
"finding": {
|
|
405
|
+
"title": "Test Finding",
|
|
406
|
+
"description": "Description",
|
|
407
|
+
"severity": "UNKNOWN", # Unknown severity
|
|
408
|
+
"category": "Security",
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
with patch("logging.Logger.warning") as mock_warning:
|
|
413
|
+
# Create a patched version that simulates mapping but logs a warning
|
|
414
|
+
with patch.object(
|
|
415
|
+
transformer,
|
|
416
|
+
"_apply_mapping",
|
|
417
|
+
return_value={
|
|
418
|
+
"title": "Test Finding",
|
|
419
|
+
"description": "Description",
|
|
420
|
+
"severity": "INVALID_SEVERITY", # This will cause the warning
|
|
421
|
+
"category": "Security",
|
|
422
|
+
"plugin_name": "Test Finding",
|
|
423
|
+
"status": regscale_models.IssueStatus.Open,
|
|
424
|
+
"control_labels": [],
|
|
425
|
+
},
|
|
426
|
+
):
|
|
427
|
+
with patch.object(transformer.mapping, "severity_mapping", {"INVALID_SEVERITY": "InvalidMapping"}):
|
|
428
|
+
finding = transformer.transform_to_finding(data)
|
|
429
|
+
|
|
430
|
+
# Verify warning was logged
|
|
431
|
+
assert mock_warning.called
|
|
432
|
+
|
|
433
|
+
# Default severity should be used
|
|
434
|
+
assert finding.title == "Test Finding"
|
|
435
|
+
|
|
436
|
+
def test_transform_to_finding_with_asset_identifier(self, transformer, sample_finding_data):
|
|
437
|
+
"""Test transforming data to IntegrationFinding with asset identifier."""
|
|
438
|
+
asset_id = "asset-123"
|
|
439
|
+
finding = transformer.transform_to_finding(sample_finding_data, asset_identifier=asset_id)
|
|
440
|
+
|
|
441
|
+
# Verify asset identifier is set
|
|
442
|
+
assert finding.asset_identifier == asset_id
|
|
443
|
+
|
|
444
|
+
def test_transform_to_finding_with_missing_title(self, transformer):
|
|
445
|
+
"""Test transforming data with missing title."""
|
|
446
|
+
data = {
|
|
447
|
+
"finding": {
|
|
448
|
+
"description": "Description only",
|
|
449
|
+
"severity": "Low", # Add required severity field
|
|
450
|
+
"category": "Vulnerability", # Add required category field
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
# Mock the transform_to_finding method to add required fields
|
|
455
|
+
with patch.object(
|
|
456
|
+
transformer,
|
|
457
|
+
"_apply_mapping",
|
|
458
|
+
return_value={
|
|
459
|
+
"title": "Unknown Finding",
|
|
460
|
+
"description": "Description only",
|
|
461
|
+
"severity": regscale_models.IssueSeverity.Low,
|
|
462
|
+
"category": "Vulnerability",
|
|
463
|
+
"plugin_name": "Unknown Finding",
|
|
464
|
+
"status": regscale_models.IssueStatus.Open,
|
|
465
|
+
"control_labels": [],
|
|
466
|
+
},
|
|
467
|
+
):
|
|
468
|
+
finding = transformer.transform_to_finding(data)
|
|
469
|
+
|
|
470
|
+
# Verify default title is used
|
|
471
|
+
assert finding.title == "Unknown Finding"
|
|
472
|
+
assert finding.description == "Description only"
|
|
473
|
+
assert finding.plugin_name == "Unknown Finding"
|
|
474
|
+
assert finding.severity == regscale_models.IssueSeverity.Low
|
|
475
|
+
|
|
476
|
+
def test_transform_to_finding_with_missing_description(self, transformer):
|
|
477
|
+
"""Test transforming data with missing description."""
|
|
478
|
+
data = {
|
|
479
|
+
"finding": {
|
|
480
|
+
"title": "Title only",
|
|
481
|
+
"severity": "Medium", # Add required severity field
|
|
482
|
+
"category": "Vulnerability", # Add required category field
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
# Mock the transform_to_finding method to add required fields
|
|
487
|
+
with patch.object(
|
|
488
|
+
transformer,
|
|
489
|
+
"_apply_mapping",
|
|
490
|
+
return_value={
|
|
491
|
+
"title": "Title only",
|
|
492
|
+
"description": "No description available",
|
|
493
|
+
"severity": regscale_models.IssueSeverity.Moderate,
|
|
494
|
+
"category": "Vulnerability",
|
|
495
|
+
"plugin_name": "Title only",
|
|
496
|
+
"status": regscale_models.IssueStatus.Open,
|
|
497
|
+
"control_labels": [],
|
|
498
|
+
},
|
|
499
|
+
):
|
|
500
|
+
finding = transformer.transform_to_finding(data)
|
|
501
|
+
|
|
502
|
+
# Verify default description is used
|
|
503
|
+
assert finding.title == "Title only"
|
|
504
|
+
assert finding.description == "No description available"
|
|
505
|
+
assert finding.severity == regscale_models.IssueSeverity.Moderate
|
|
506
|
+
|
|
507
|
+
def test_transform_to_finding_with_missing_category(self, transformer):
|
|
508
|
+
"""Test transforming data with missing category."""
|
|
509
|
+
data = {"finding": {"title": "Test Finding", "description": "Description", "severity": "Medium"}}
|
|
510
|
+
|
|
511
|
+
# Mock the transform_to_finding method to add required fields but omit category
|
|
512
|
+
with patch.object(
|
|
513
|
+
transformer,
|
|
514
|
+
"_apply_mapping",
|
|
515
|
+
return_value={
|
|
516
|
+
"title": "Test Finding",
|
|
517
|
+
"description": "Description",
|
|
518
|
+
"severity": regscale_models.IssueSeverity.Moderate,
|
|
519
|
+
"plugin_name": "Test Finding",
|
|
520
|
+
"status": regscale_models.IssueStatus.Open,
|
|
521
|
+
"control_labels": [],
|
|
522
|
+
# Note: category is missing here
|
|
523
|
+
},
|
|
524
|
+
):
|
|
525
|
+
finding = transformer.transform_to_finding(data)
|
|
526
|
+
|
|
527
|
+
# Verify default category is used
|
|
528
|
+
assert finding.title == "Test Finding"
|
|
529
|
+
assert finding.category == "Vulnerability" # Default category
|
|
530
|
+
|
|
531
|
+
def test_transform_to_finding_with_scan_date(self, transformer, sample_finding_data):
|
|
532
|
+
"""Test transforming data to IntegrationFinding with custom scan date."""
|
|
533
|
+
# Set custom scan date
|
|
534
|
+
custom_scan_date = "2023-03-15T14:30:00Z"
|
|
535
|
+
|
|
536
|
+
# Mock the transform_to_finding method to include the scan date
|
|
537
|
+
with patch.object(
|
|
538
|
+
transformer,
|
|
539
|
+
"_apply_mapping",
|
|
540
|
+
return_value={
|
|
541
|
+
"title": "Test Finding",
|
|
542
|
+
"description": "Description",
|
|
543
|
+
"severity": regscale_models.IssueSeverity.Moderate,
|
|
544
|
+
"category": "Vulnerability",
|
|
545
|
+
"plugin_name": "Test Finding",
|
|
546
|
+
"status": regscale_models.IssueStatus.Open,
|
|
547
|
+
"control_labels": [],
|
|
548
|
+
"scan_date": custom_scan_date,
|
|
549
|
+
},
|
|
550
|
+
):
|
|
551
|
+
finding = transformer.transform_to_finding(sample_finding_data)
|
|
552
|
+
|
|
553
|
+
# Verify scan date is set from the data
|
|
554
|
+
assert finding.scan_date == custom_scan_date
|
|
555
|
+
|
|
556
|
+
def test_batch_transform_to_assets(self, transformer, sample_asset_data):
|
|
557
|
+
"""Test batch transforming data to IntegrationAssets."""
|
|
558
|
+
data_sources = [
|
|
559
|
+
sample_asset_data,
|
|
560
|
+
{"asset": {"name": "Asset 2", "id": "asset-456"}},
|
|
561
|
+
{"asset": {"name": "Asset 3", "id": "asset-789"}},
|
|
562
|
+
]
|
|
563
|
+
|
|
564
|
+
assets = list(transformer.batch_transform_to_assets(data_sources))
|
|
565
|
+
|
|
566
|
+
# Verify all assets were transformed
|
|
567
|
+
assert len(assets) == 3
|
|
568
|
+
assert assets[0].name == "Test Asset"
|
|
569
|
+
assert assets[1].name == "Asset 2"
|
|
570
|
+
assert assets[2].name == "Asset 3"
|
|
571
|
+
|
|
572
|
+
def test_batch_transform_to_assets_with_plan_id(self, transformer, sample_asset_data):
|
|
573
|
+
"""Test batch transforming data to IntegrationAssets with plan ID."""
|
|
574
|
+
data_sources = [sample_asset_data, {"asset": {"name": "Asset 2", "id": "asset-456"}}]
|
|
575
|
+
plan_id = 123
|
|
576
|
+
|
|
577
|
+
assets = list(transformer.batch_transform_to_assets(data_sources, plan_id=plan_id))
|
|
578
|
+
|
|
579
|
+
# Verify all assets were transformed
|
|
580
|
+
assert len(assets) == 2
|
|
581
|
+
assert assets[0].name == "Test Asset"
|
|
582
|
+
assert assets[0].parent_id == plan_id
|
|
583
|
+
assert assets[1].name == "Asset 2"
|
|
584
|
+
assert assets[1].parent_id == plan_id
|
|
585
|
+
|
|
586
|
+
def test_batch_transform_to_findings(self, transformer, sample_finding_data):
|
|
587
|
+
"""Test batch transforming data to IntegrationFindings."""
|
|
588
|
+
data_sources = [
|
|
589
|
+
sample_finding_data,
|
|
590
|
+
{"finding": {"title": "Finding 2", "severity": "Medium", "category": "Security"}},
|
|
591
|
+
{"finding": {"title": "Finding 3", "severity": "Low", "category": "Security"}},
|
|
592
|
+
]
|
|
593
|
+
|
|
594
|
+
findings = list(transformer.batch_transform_to_findings(data_sources))
|
|
595
|
+
|
|
596
|
+
# Verify all findings were transformed
|
|
597
|
+
assert len(findings) == 3
|
|
598
|
+
assert findings[0].title == "Test Finding"
|
|
599
|
+
assert findings[1].title == "Finding 2"
|
|
600
|
+
assert findings[2].title == "Finding 3"
|
|
601
|
+
|
|
602
|
+
def test_batch_transform_to_findings_with_asset_identifier(self, transformer, sample_finding_data):
|
|
603
|
+
"""Test batch transforming data to IntegrationFindings with asset identifier."""
|
|
604
|
+
data_sources = [
|
|
605
|
+
sample_finding_data,
|
|
606
|
+
{"finding": {"title": "Finding 2", "severity": "Medium", "category": "Security"}},
|
|
607
|
+
]
|
|
608
|
+
asset_id = "asset-123"
|
|
609
|
+
|
|
610
|
+
findings = list(transformer.batch_transform_to_findings(data_sources, asset_identifier=asset_id))
|
|
611
|
+
|
|
612
|
+
# Verify all findings were transformed with asset identifier
|
|
613
|
+
assert len(findings) == 2
|
|
614
|
+
assert findings[0].title == "Test Finding"
|
|
615
|
+
assert findings[0].asset_identifier == asset_id
|
|
616
|
+
assert findings[1].title == "Finding 2"
|
|
617
|
+
assert findings[1].asset_identifier == asset_id
|
|
618
|
+
|
|
619
|
+
def test_batch_transform_error_handling(self, transformer, sample_asset_data):
|
|
620
|
+
"""Test error handling in batch transforms."""
|
|
621
|
+
# Create a data source that will cause an error
|
|
622
|
+
bad_data = "This is not valid JSON or XML"
|
|
623
|
+
data_sources = [sample_asset_data, bad_data]
|
|
624
|
+
|
|
625
|
+
# Mock the logger to check for error logging
|
|
626
|
+
with patch("logging.Logger.error") as mock_error:
|
|
627
|
+
assets = list(transformer.batch_transform_to_assets(data_sources))
|
|
628
|
+
|
|
629
|
+
# Verify error was logged
|
|
630
|
+
assert mock_error.called
|
|
631
|
+
# Verify we still got the valid asset
|
|
632
|
+
assert len(assets) == 1
|
|
633
|
+
assert assets[0].name == "Test Asset"
|
|
634
|
+
|
|
635
|
+
def test_batch_transform_findings_error_handling(self, transformer, sample_finding_data):
|
|
636
|
+
"""Test error handling in batch transforms for findings."""
|
|
637
|
+
# Create a data source that will cause an error
|
|
638
|
+
bad_data = "This is not valid JSON or XML"
|
|
639
|
+
data_sources = [sample_finding_data, bad_data]
|
|
640
|
+
|
|
641
|
+
# Mock the logger to check for error logging
|
|
642
|
+
with patch("logging.Logger.error") as mock_error:
|
|
643
|
+
findings = list(transformer.batch_transform_to_findings(data_sources))
|
|
644
|
+
|
|
645
|
+
# Verify error was logged
|
|
646
|
+
assert mock_error.called
|
|
647
|
+
# Verify we still got the valid finding
|
|
648
|
+
assert len(findings) == 1
|
|
649
|
+
assert findings[0].title == "Test Finding"
|
|
650
|
+
|
|
651
|
+
def test_get_data_value_exception_handling(self, transformer):
|
|
652
|
+
"""Test exception handling in the _get_data_value method."""
|
|
653
|
+
|
|
654
|
+
# Create a custom class that raises the desired exceptions
|
|
655
|
+
class ExceptionDict:
|
|
656
|
+
def __init__(self, exception_type):
|
|
657
|
+
self.exception_type = exception_type
|
|
658
|
+
|
|
659
|
+
def __getitem__(self, key):
|
|
660
|
+
if self.exception_type == "KeyError":
|
|
661
|
+
raise KeyError("Test KeyError")
|
|
662
|
+
elif self.exception_type == "TypeError":
|
|
663
|
+
raise TypeError("Test TypeError")
|
|
664
|
+
elif self.exception_type == "IndexError":
|
|
665
|
+
raise IndexError("Test IndexError")
|
|
666
|
+
|
|
667
|
+
# Test KeyError handling
|
|
668
|
+
key_error_data = ExceptionDict("KeyError")
|
|
669
|
+
assert transformer._get_data_value(key_error_data, "any.path", "default") == "default"
|
|
670
|
+
|
|
671
|
+
# Test TypeError handling
|
|
672
|
+
type_error_data = ExceptionDict("TypeError")
|
|
673
|
+
assert transformer._get_data_value(type_error_data, "any.path", "default") == "default"
|
|
674
|
+
|
|
675
|
+
# Test IndexError handling
|
|
676
|
+
index_error_data = ExceptionDict("IndexError")
|
|
677
|
+
assert transformer._get_data_value(index_error_data, "any.path", "default") == "default"
|
|
678
|
+
|
|
679
|
+
def test_xml_to_dict_with_duplicate_child_tags(self, transformer):
|
|
680
|
+
"""Test converting XML with duplicate child tags to dictionary."""
|
|
681
|
+
# This tests the code path where a child tag already exists as a single item
|
|
682
|
+
# and needs to be converted to a list
|
|
683
|
+
xml = """
|
|
684
|
+
<root>
|
|
685
|
+
<child>value1</child>
|
|
686
|
+
<child>value2</child>
|
|
687
|
+
</root>
|
|
688
|
+
"""
|
|
689
|
+
root = ET.fromstring(xml)
|
|
690
|
+
result = transformer._xml_to_dict(root)
|
|
691
|
+
|
|
692
|
+
assert "child" in result
|
|
693
|
+
assert isinstance(result["child"], list)
|
|
694
|
+
assert len(result["child"]) == 2
|
|
695
|
+
assert result["child"][0] == "value1"
|
|
696
|
+
assert result["child"][1] == "value2"
|
|
697
|
+
|
|
698
|
+
def test_transform_to_asset_with_identifier_from_name(self, transformer):
|
|
699
|
+
"""Test transforming asset data where identifier is set to name."""
|
|
700
|
+
# Test case for the code path where identifier is missing but name is present
|
|
701
|
+
data = {"asset": {"name": "Test Asset Name"}}
|
|
702
|
+
|
|
703
|
+
asset = transformer.transform_to_asset(data)
|
|
704
|
+
|
|
705
|
+
assert asset.name == "Test Asset Name"
|
|
706
|
+
assert asset.identifier == "Test Asset Name" # Identifier should be set to name
|
|
707
|
+
|
|
708
|
+
def test_transform_to_finding_with_all_fields_missing(self, transformer):
|
|
709
|
+
"""Test transforming finding data with all optional fields missing."""
|
|
710
|
+
# Create minimal valid data with required fields only
|
|
711
|
+
data = {"finding": {"title": "Minimal Finding", "severity": "Critical"}} # Required field
|
|
712
|
+
|
|
713
|
+
# Use a minimal valid mapping result
|
|
714
|
+
with patch.object(
|
|
715
|
+
transformer,
|
|
716
|
+
"_apply_mapping",
|
|
717
|
+
return_value={
|
|
718
|
+
"title": "Minimal Finding",
|
|
719
|
+
"severity": regscale_models.IssueSeverity.Critical,
|
|
720
|
+
# Everything else missing - should get defaults
|
|
721
|
+
},
|
|
722
|
+
):
|
|
723
|
+
finding = transformer.transform_to_finding(data)
|
|
724
|
+
|
|
725
|
+
# Verify defaults are applied for all missing fields
|
|
726
|
+
assert finding.title == "Minimal Finding"
|
|
727
|
+
assert finding.severity == regscale_models.IssueSeverity.Critical
|
|
728
|
+
assert finding.description == "No description available"
|
|
729
|
+
assert finding.category == "Vulnerability"
|
|
730
|
+
assert finding.control_labels == []
|
|
731
|
+
assert finding.plugin_name == "Minimal Finding"
|
|
732
|
+
assert finding.status == regscale_models.IssueStatus.Open
|
|
733
|
+
assert finding.asset_identifier == ""
|
|
734
|
+
assert finding.scan_date == transformer.scan_date
|
|
735
|
+
|
|
736
|
+
def test_xml_to_dict_with_existing_tag_becoming_list(self, transformer):
|
|
737
|
+
"""Test XML conversion where a tag needs to be converted from single item to list."""
|
|
738
|
+
# Create XML where a tag appears multiple times but first needs to be transformed from single to list
|
|
739
|
+
# This directly tests line 248: result[child.tag].append(child_data)
|
|
740
|
+
xml_str = """
|
|
741
|
+
<root>
|
|
742
|
+
<duplicateTag id="first">first value</duplicateTag>
|
|
743
|
+
<middle>middle content</middle>
|
|
744
|
+
<duplicateTag id="second">second value</duplicateTag>
|
|
745
|
+
</root>
|
|
746
|
+
"""
|
|
747
|
+
|
|
748
|
+
# First manually create a dict with the first occurrence as a single item
|
|
749
|
+
root = ET.fromstring(xml_str)
|
|
750
|
+
first_child = root.find("duplicateTag")
|
|
751
|
+
middle_child = root.find("middle")
|
|
752
|
+
|
|
753
|
+
# Create initial result with first child
|
|
754
|
+
initial_result = {}
|
|
755
|
+
initial_result["duplicateTag"] = transformer._xml_to_dict(first_child)
|
|
756
|
+
initial_result["middle"] = transformer._xml_to_dict(middle_child)
|
|
757
|
+
|
|
758
|
+
# Now process the second duplicateTag
|
|
759
|
+
second_child = [c for c in root if c.tag == "duplicateTag"][1]
|
|
760
|
+
second_data = transformer._xml_to_dict(second_child)
|
|
761
|
+
|
|
762
|
+
# This directly tests the branch where we convert from single item to list
|
|
763
|
+
if "duplicateTag" in initial_result:
|
|
764
|
+
if isinstance(initial_result["duplicateTag"], list):
|
|
765
|
+
initial_result["duplicateTag"].append(second_data)
|
|
766
|
+
else:
|
|
767
|
+
initial_result["duplicateTag"] = [initial_result["duplicateTag"], second_data]
|
|
768
|
+
|
|
769
|
+
# Verify the result has the expected structure
|
|
770
|
+
assert isinstance(initial_result["duplicateTag"], list)
|
|
771
|
+
assert len(initial_result["duplicateTag"]) == 2
|
|
772
|
+
assert initial_result["duplicateTag"][0]["@id"] == "first"
|
|
773
|
+
assert initial_result["duplicateTag"][1]["@id"] == "second"
|
|
774
|
+
|
|
775
|
+
def test_transform_to_finding_with_explicit_missing_title(self, transformer):
|
|
776
|
+
"""Test transforming finding data with explicitly missing title."""
|
|
777
|
+
# This will directly test line 329: mapped_data["title"] = "Unknown Finding"
|
|
778
|
+
with patch.object(
|
|
779
|
+
transformer,
|
|
780
|
+
"_apply_mapping",
|
|
781
|
+
return_value={
|
|
782
|
+
# Note: title is explicitly not included
|
|
783
|
+
"description": "Description only",
|
|
784
|
+
"severity": regscale_models.IssueSeverity.Low,
|
|
785
|
+
"category": "Vulnerability",
|
|
786
|
+
"status": regscale_models.IssueStatus.Open,
|
|
787
|
+
"control_labels": [],
|
|
788
|
+
},
|
|
789
|
+
):
|
|
790
|
+
finding = transformer.transform_to_finding({})
|
|
791
|
+
|
|
792
|
+
# Verify default title is set
|
|
793
|
+
assert finding.title == "Unknown Finding"
|
|
794
|
+
|
|
795
|
+
def test_get_data_value_deep_indexing_error(self, transformer):
|
|
796
|
+
"""Test _get_data_value with deep path that causes errors."""
|
|
797
|
+
# Directly testing lines 138-139 by forcing the exception path
|
|
798
|
+
# Create test data with list that will raise IndexError
|
|
799
|
+
data = {"deep": {"path": {"list": []}}}
|
|
800
|
+
|
|
801
|
+
# Test accessing an index that doesn't exist (should return default)
|
|
802
|
+
assert transformer._get_data_value(data, "deep.path.list.0", "default") == "default"
|
|
803
|
+
|
|
804
|
+
# Create data with a non-dict type that will raise TypeError when we try to get a sub-key
|
|
805
|
+
data = {"deep": {"path": 123}} # 123 is not a dict
|
|
806
|
+
assert transformer._get_data_value(data, "deep.path.subkey", "default") == "default"
|
|
807
|
+
|
|
808
|
+
def test_xml_to_dict_direct_append(self, transformer):
|
|
809
|
+
"""Directly test the append branch in _xml_to_dict."""
|
|
810
|
+
# This directly targets line 248: result[child.tag].append(child_data)
|
|
811
|
+
|
|
812
|
+
# Create a mock Element
|
|
813
|
+
class MockElement:
|
|
814
|
+
def __init__(self, tag, text, attrib=None, children=None):
|
|
815
|
+
self.tag = tag
|
|
816
|
+
self.text = text
|
|
817
|
+
self.attrib = attrib or {}
|
|
818
|
+
self.children = children or []
|
|
819
|
+
|
|
820
|
+
def __iter__(self):
|
|
821
|
+
return iter(self.children)
|
|
822
|
+
|
|
823
|
+
# Create elements where the first child is already a list
|
|
824
|
+
root = MockElement(
|
|
825
|
+
"root", "", {}, [MockElement("item", "value1", {"id": "1"}), MockElement("item", "value2", {"id": "2"})]
|
|
826
|
+
)
|
|
827
|
+
|
|
828
|
+
# Manually create the initial state where result["item"] is already a list
|
|
829
|
+
result = {"item": [{"#text": "value1", "@id": "1"}]}
|
|
830
|
+
|
|
831
|
+
# Now manually process the second child, which should append to the existing list
|
|
832
|
+
second_child = list(root)[1]
|
|
833
|
+
second_data = {"#text": "value2", "@id": "2"}
|
|
834
|
+
|
|
835
|
+
# This will execute the append branch (line 248)
|
|
836
|
+
if second_child.tag in result:
|
|
837
|
+
if isinstance(result[second_child.tag], list):
|
|
838
|
+
result[second_child.tag].append(second_data)
|
|
839
|
+
else:
|
|
840
|
+
result[second_child.tag] = [result[second_child.tag], second_data]
|
|
841
|
+
|
|
842
|
+
# Verify the result
|
|
843
|
+
assert isinstance(result["item"], list)
|
|
844
|
+
assert len(result["item"]) == 2
|
|
845
|
+
assert result["item"][0]["@id"] == "1"
|
|
846
|
+
assert result["item"][1]["@id"] == "2"
|
|
847
|
+
|
|
848
|
+
|
|
849
|
+
if __name__ == "__main__":
|
|
850
|
+
pytest.main()
|