regscale-cli 6.26.0.0__py3-none-any.whl → 6.27.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (95) hide show
  1. regscale/_version.py +1 -1
  2. regscale/core/app/application.py +1 -1
  3. regscale/core/app/internal/evidence.py +419 -2
  4. regscale/dev/code_gen.py +24 -20
  5. regscale/integrations/commercial/jira.py +367 -126
  6. regscale/integrations/commercial/qualys/__init__.py +7 -8
  7. regscale/integrations/commercial/qualys/scanner.py +8 -3
  8. regscale/integrations/commercial/synqly/assets.py +17 -0
  9. regscale/integrations/commercial/synqly/vulnerabilities.py +45 -28
  10. regscale/integrations/commercial/tenablev2/cis_parsers.py +453 -0
  11. regscale/integrations/commercial/tenablev2/cis_scanner.py +447 -0
  12. regscale/integrations/commercial/tenablev2/commands.py +142 -1
  13. regscale/integrations/commercial/tenablev2/scanner.py +0 -1
  14. regscale/integrations/commercial/tenablev2/stig_parsers.py +113 -57
  15. regscale/integrations/commercial/wizv2/WizDataMixin.py +1 -1
  16. regscale/integrations/commercial/wizv2/click.py +44 -59
  17. regscale/integrations/commercial/wizv2/compliance/__init__.py +15 -0
  18. regscale/integrations/commercial/wizv2/{policy_compliance_helpers.py → compliance/helpers.py} +78 -60
  19. regscale/integrations/commercial/wizv2/compliance_report.py +10 -9
  20. regscale/integrations/commercial/wizv2/core/__init__.py +133 -0
  21. regscale/integrations/commercial/wizv2/{async_client.py → core/client.py} +3 -3
  22. regscale/integrations/commercial/wizv2/{constants.py → core/constants.py} +1 -17
  23. regscale/integrations/commercial/wizv2/core/file_operations.py +237 -0
  24. regscale/integrations/commercial/wizv2/fetchers/__init__.py +11 -0
  25. regscale/integrations/commercial/wizv2/{data_fetcher.py → fetchers/policy_assessment.py} +5 -9
  26. regscale/integrations/commercial/wizv2/issue.py +1 -1
  27. regscale/integrations/commercial/wizv2/models/__init__.py +0 -0
  28. regscale/integrations/commercial/wizv2/parsers/__init__.py +34 -0
  29. regscale/integrations/commercial/wizv2/{parsers.py → parsers/main.py} +1 -1
  30. regscale/integrations/commercial/wizv2/processors/__init__.py +11 -0
  31. regscale/integrations/commercial/wizv2/{finding_processor.py → processors/finding.py} +1 -1
  32. regscale/integrations/commercial/wizv2/reports.py +1 -1
  33. regscale/integrations/commercial/wizv2/sbom.py +1 -1
  34. regscale/integrations/commercial/wizv2/scanner.py +40 -100
  35. regscale/integrations/commercial/wizv2/utils/__init__.py +48 -0
  36. regscale/integrations/commercial/wizv2/{utils.py → utils/main.py} +116 -61
  37. regscale/integrations/commercial/wizv2/variables.py +89 -3
  38. regscale/integrations/compliance_integration.py +0 -46
  39. regscale/integrations/control_matcher.py +22 -3
  40. regscale/integrations/due_date_handler.py +14 -8
  41. regscale/integrations/public/fedramp/docx_parser.py +10 -1
  42. regscale/integrations/public/fedramp/fedramp_cis_crm.py +393 -340
  43. regscale/integrations/public/fedramp/fedramp_five.py +1 -1
  44. regscale/integrations/scanner_integration.py +127 -57
  45. regscale/models/integration_models/cisa_kev_data.json +132 -9
  46. regscale/models/integration_models/qualys.py +3 -4
  47. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  48. regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +24 -7
  49. regscale/models/integration_models/synqly_models/synqly_model.py +8 -1
  50. regscale/models/regscale_models/control_implementation.py +1 -1
  51. regscale/models/regscale_models/issue.py +0 -1
  52. {regscale_cli-6.26.0.0.dist-info → regscale_cli-6.27.0.0.dist-info}/METADATA +1 -17
  53. {regscale_cli-6.26.0.0.dist-info → regscale_cli-6.27.0.0.dist-info}/RECORD +93 -60
  54. tests/regscale/integrations/commercial/test_jira.py +481 -91
  55. tests/regscale/integrations/commercial/test_wiz.py +96 -200
  56. tests/regscale/integrations/commercial/wizv2/__init__.py +1 -1
  57. tests/regscale/integrations/commercial/wizv2/compliance/__init__.py +1 -0
  58. tests/regscale/integrations/commercial/wizv2/compliance/test_helpers.py +903 -0
  59. tests/regscale/integrations/commercial/wizv2/core/__init__.py +1 -0
  60. tests/regscale/integrations/commercial/wizv2/core/test_auth.py +701 -0
  61. tests/regscale/integrations/commercial/wizv2/core/test_client.py +1037 -0
  62. tests/regscale/integrations/commercial/wizv2/core/test_file_operations.py +989 -0
  63. tests/regscale/integrations/commercial/wizv2/fetchers/__init__.py +1 -0
  64. tests/regscale/integrations/commercial/wizv2/fetchers/test_policy_assessment.py +805 -0
  65. tests/regscale/integrations/commercial/wizv2/parsers/__init__.py +1 -0
  66. tests/regscale/integrations/commercial/wizv2/parsers/test_main.py +1153 -0
  67. tests/regscale/integrations/commercial/wizv2/processors/__init__.py +1 -0
  68. tests/regscale/integrations/commercial/wizv2/processors/test_finding.py +671 -0
  69. tests/regscale/integrations/commercial/wizv2/test_WizDataMixin.py +537 -0
  70. tests/regscale/integrations/commercial/wizv2/test_click_comprehensive.py +851 -0
  71. tests/regscale/integrations/commercial/wizv2/test_compliance_report_comprehensive.py +910 -0
  72. tests/regscale/integrations/commercial/wizv2/test_file_cleanup.py +283 -0
  73. tests/regscale/integrations/commercial/wizv2/test_file_operations.py +260 -0
  74. tests/regscale/integrations/commercial/wizv2/test_issue.py +1 -1
  75. tests/regscale/integrations/commercial/wizv2/test_issue_comprehensive.py +1203 -0
  76. tests/regscale/integrations/commercial/wizv2/test_reports.py +497 -0
  77. tests/regscale/integrations/commercial/wizv2/test_sbom.py +643 -0
  78. tests/regscale/integrations/commercial/wizv2/test_scanner_comprehensive.py +805 -0
  79. tests/regscale/integrations/commercial/wizv2/test_wiz_click_client_id.py +1 -1
  80. tests/regscale/integrations/commercial/wizv2/test_wiz_compliance_report.py +72 -29
  81. tests/regscale/integrations/commercial/wizv2/test_wiz_findings_comprehensive.py +364 -0
  82. tests/regscale/integrations/commercial/wizv2/test_wiz_inventory_comprehensive.py +644 -0
  83. tests/regscale/integrations/commercial/wizv2/test_wizv2.py +946 -78
  84. tests/regscale/integrations/commercial/wizv2/test_wizv2_utils.py +97 -202
  85. tests/regscale/integrations/commercial/wizv2/utils/__init__.py +1 -0
  86. tests/regscale/integrations/commercial/wizv2/utils/test_main.py +1523 -0
  87. tests/regscale/integrations/public/test_fedramp.py +301 -0
  88. tests/regscale/integrations/test_control_matcher.py +83 -0
  89. regscale/integrations/commercial/wizv2/policy_compliance.py +0 -3543
  90. tests/regscale/integrations/commercial/wizv2/test_wiz_policy_compliance.py +0 -750
  91. /regscale/integrations/commercial/wizv2/{wiz_auth.py → core/auth.py} +0 -0
  92. {regscale_cli-6.26.0.0.dist-info → regscale_cli-6.27.0.0.dist-info}/LICENSE +0 -0
  93. {regscale_cli-6.26.0.0.dist-info → regscale_cli-6.27.0.0.dist-info}/WHEEL +0 -0
  94. {regscale_cli-6.26.0.0.dist-info → regscale_cli-6.27.0.0.dist-info}/entry_points.txt +0 -0
  95. {regscale_cli-6.26.0.0.dist-info → regscale_cli-6.27.0.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,989 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ Comprehensive unit tests for FileOperations module.
5
+
6
+ Tests cover:
7
+ - JSON file loading and saving with all edge cases
8
+ - Cache validation by file age
9
+ - Load cache or fetch pattern with various scenarios
10
+ - Multi-file search operations
11
+ - Error handling and boundary conditions
12
+ - load_cached_findings with progress callbacks
13
+ """
14
+
15
+ import datetime
16
+ import json
17
+ import os
18
+ import tempfile
19
+ import time
20
+ from pathlib import Path
21
+ from unittest.mock import MagicMock, Mock, patch, mock_open
22
+ from enum import Enum
23
+
24
+ import pytest
25
+
26
+ from regscale.integrations.commercial.wizv2.core.file_operations import FileOperations
27
+
28
+
29
+ PATH = "regscale.integrations.commercial.wizv2.core.file_operations"
30
+
31
+
32
+ class TestFileOperationsJSONLoading:
33
+ """Test JSON file loading operations."""
34
+
35
+ def test_load_json_file_success(self):
36
+ """Test successful loading of JSON file."""
37
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as f:
38
+ test_data = {"key": "value", "number": 42, "list": [1, 2, 3]}
39
+ json.dump(test_data, f)
40
+ temp_path = f.name
41
+
42
+ try:
43
+ loaded_data = FileOperations.load_json_file(temp_path)
44
+ assert loaded_data == test_data
45
+ finally:
46
+ os.unlink(temp_path)
47
+
48
+ def test_load_json_file_nonexistent(self):
49
+ """Test loading nonexistent file returns None."""
50
+ result = FileOperations.load_json_file("/nonexistent/path/file.json")
51
+ assert result is None
52
+
53
+ def test_load_json_file_invalid_json(self):
54
+ """Test loading invalid JSON returns None."""
55
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as f:
56
+ f.write("not valid json{[")
57
+ temp_path = f.name
58
+
59
+ try:
60
+ result = FileOperations.load_json_file(temp_path)
61
+ assert result is None
62
+ finally:
63
+ os.unlink(temp_path)
64
+
65
+ def test_load_json_file_empty_file(self):
66
+ """Test loading empty file returns None."""
67
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as f:
68
+ f.write("")
69
+ temp_path = f.name
70
+
71
+ try:
72
+ result = FileOperations.load_json_file(temp_path)
73
+ assert result is None
74
+ finally:
75
+ os.unlink(temp_path)
76
+
77
+ @patch(f"{PATH}.open", side_effect=OSError("Permission denied"))
78
+ @patch(f"{PATH}.os.path.exists", return_value=True)
79
+ def test_load_json_file_oserror(self, mock_exists, mock_file_open):
80
+ """Test OSError during file loading returns None."""
81
+ result = FileOperations.load_json_file("/test/path.json")
82
+ assert result is None
83
+
84
+ def test_load_json_file_complex_data(self):
85
+ """Test loading complex nested JSON structures."""
86
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as f:
87
+ test_data = {
88
+ "nested": {"deeply": {"nested": {"data": [1, 2, 3]}}},
89
+ "list_of_dicts": [{"a": 1}, {"b": 2}, {"c": 3}],
90
+ "null_value": None,
91
+ "boolean": True,
92
+ }
93
+ json.dump(test_data, f)
94
+ temp_path = f.name
95
+
96
+ try:
97
+ loaded_data = FileOperations.load_json_file(temp_path)
98
+ assert loaded_data == test_data
99
+ finally:
100
+ os.unlink(temp_path)
101
+
102
+
103
+ class TestFileOperationsJSONSaving:
104
+ """Test JSON file saving operations."""
105
+
106
+ def test_save_json_file_success(self):
107
+ """Test successful saving of JSON file."""
108
+ with tempfile.TemporaryDirectory() as tmpdir:
109
+ file_path = os.path.join(tmpdir, "test.json")
110
+ test_data = {"key": "value", "number": 42}
111
+
112
+ success = FileOperations.save_json_file(test_data, file_path)
113
+ assert success is True
114
+ assert os.path.exists(file_path)
115
+
116
+ loaded = FileOperations.load_json_file(file_path)
117
+ assert loaded == test_data
118
+
119
+ def test_save_json_file_creates_directory(self):
120
+ """Test that save_json_file creates parent directories."""
121
+ with tempfile.TemporaryDirectory() as tmpdir:
122
+ nested_path = os.path.join(tmpdir, "level1", "level2", "test.json")
123
+ test_data = {"nested": True}
124
+
125
+ success = FileOperations.save_json_file(test_data, nested_path, create_dir=True)
126
+ assert success is True
127
+ assert os.path.exists(nested_path)
128
+
129
+ loaded = FileOperations.load_json_file(nested_path)
130
+ assert loaded == test_data
131
+
132
+ def test_save_json_file_without_creating_directory(self):
133
+ """Test save_json_file with create_dir=False on nonexistent directory."""
134
+ with tempfile.TemporaryDirectory() as tmpdir:
135
+ nested_path = os.path.join(tmpdir, "nonexistent", "test.json")
136
+ test_data = {"test": "data"}
137
+
138
+ success = FileOperations.save_json_file(test_data, nested_path, create_dir=False)
139
+ assert success is False
140
+ assert not os.path.exists(nested_path)
141
+
142
+ def test_save_json_file_invalid_data(self):
143
+ """Test saving non-serializable data returns False."""
144
+ with tempfile.TemporaryDirectory() as tmpdir:
145
+ file_path = os.path.join(tmpdir, "test.json")
146
+
147
+ class NonSerializable:
148
+ pass
149
+
150
+ success = FileOperations.save_json_file({"obj": NonSerializable()}, file_path)
151
+ assert success is False
152
+
153
+ def test_save_json_file_empty_data(self):
154
+ """Test saving empty dict and list."""
155
+ with tempfile.TemporaryDirectory() as tmpdir:
156
+ # Empty dict
157
+ file_path = os.path.join(tmpdir, "empty_dict.json")
158
+ success = FileOperations.save_json_file({}, file_path)
159
+ assert success is True
160
+ loaded = FileOperations.load_json_file(file_path)
161
+ assert loaded == {}
162
+
163
+ # Empty list
164
+ file_path = os.path.join(tmpdir, "empty_list.json")
165
+ success = FileOperations.save_json_file([], file_path)
166
+ assert success is True
167
+ loaded = FileOperations.load_json_file(file_path)
168
+ assert loaded == []
169
+
170
+ @patch(f"{PATH}.open", side_effect=PermissionError("Permission denied"))
171
+ @patch(f"{PATH}.check_file_path")
172
+ def test_save_json_file_permission_error(self, mock_check_path, mock_file_open):
173
+ """Test permission error during file saving."""
174
+ success = FileOperations.save_json_file({"test": "data"}, "/test/path.json")
175
+ assert success is False
176
+
177
+ def test_save_json_file_complex_unicode(self):
178
+ """Test saving data with Unicode characters."""
179
+ with tempfile.TemporaryDirectory() as tmpdir:
180
+ file_path = os.path.join(tmpdir, "unicode.json")
181
+ test_data = {
182
+ "english": "Hello",
183
+ "japanese": "こんにちは",
184
+ "emoji": "🚀🔥",
185
+ "special": "Ñoño",
186
+ }
187
+
188
+ success = FileOperations.save_json_file(test_data, file_path)
189
+ assert success is True
190
+
191
+ loaded = FileOperations.load_json_file(file_path)
192
+ assert loaded == test_data
193
+
194
+
195
+ class TestFileOperationsFileAge:
196
+ """Test file age calculation operations."""
197
+
198
+ def test_get_file_age_existing_file(self):
199
+ """Test getting age of existing file."""
200
+ with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
201
+ f.write("test")
202
+ temp_path = f.name
203
+
204
+ try:
205
+ age = FileOperations.get_file_age(temp_path)
206
+ assert age is not None
207
+ assert isinstance(age, datetime.timedelta)
208
+ assert age.total_seconds() < 5
209
+ finally:
210
+ os.unlink(temp_path)
211
+
212
+ def test_get_file_age_nonexistent(self):
213
+ """Test getting age of nonexistent file returns None."""
214
+ age = FileOperations.get_file_age("/nonexistent/file.json")
215
+ assert age is None
216
+
217
+ @patch(f"{PATH}.os.path.exists", return_value=True)
218
+ @patch(f"{PATH}.os.path.getmtime", side_effect=OSError("Permission denied"))
219
+ def test_get_file_age_oserror(self, mock_getmtime, mock_exists):
220
+ """Test OSError during file age calculation returns None."""
221
+ age = FileOperations.get_file_age("/test/path.json")
222
+ assert age is None
223
+
224
+ def test_get_file_age_old_file(self):
225
+ """Test getting age of an older file."""
226
+ with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
227
+ f.write("test")
228
+ temp_path = f.name
229
+
230
+ try:
231
+ # Modify file timestamp to be 2 hours old
232
+ two_hours_ago = time.time() - (2 * 3600)
233
+ os.utime(temp_path, (two_hours_ago, two_hours_ago))
234
+
235
+ age = FileOperations.get_file_age(temp_path)
236
+ assert age is not None
237
+ assert age.total_seconds() >= 7000 # At least ~2 hours
238
+ finally:
239
+ os.unlink(temp_path)
240
+
241
+
242
+ class TestFileOperationsCacheValidation:
243
+ """Test cache validation operations."""
244
+
245
+ def test_is_cache_valid_fresh_file(self):
246
+ """Test cache validation for fresh file."""
247
+ with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
248
+ f.write('{"fresh": true}')
249
+ temp_path = f.name
250
+
251
+ try:
252
+ is_valid = FileOperations.is_cache_valid(temp_path, max_age_hours=1)
253
+ assert is_valid is True
254
+ finally:
255
+ os.unlink(temp_path)
256
+
257
+ def test_is_cache_valid_nonexistent(self):
258
+ """Test cache validation for nonexistent file."""
259
+ is_valid = FileOperations.is_cache_valid("/nonexistent/cache.json", max_age_hours=1)
260
+ assert is_valid is False
261
+
262
+ def test_is_cache_valid_old_file(self):
263
+ """Test cache validation for file older than max age."""
264
+ with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
265
+ f.write('{"old": true}')
266
+ temp_path = f.name
267
+
268
+ try:
269
+ # Modify file timestamp to be 10 hours old
270
+ ten_hours_ago = time.time() - (10 * 3600)
271
+ os.utime(temp_path, (ten_hours_ago, ten_hours_ago))
272
+
273
+ is_valid = FileOperations.is_cache_valid(temp_path, max_age_hours=1)
274
+ assert is_valid is False
275
+ finally:
276
+ os.unlink(temp_path)
277
+
278
+ def test_is_cache_valid_exact_boundary(self):
279
+ """Test cache validation at exact boundary condition."""
280
+ with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
281
+ f.write('{"boundary": true}')
282
+ temp_path = f.name
283
+
284
+ try:
285
+ # Set file to be just under 1 hour old
286
+ almost_one_hour_ago = time.time() - (3599) # 59 minutes 59 seconds
287
+ os.utime(temp_path, (almost_one_hour_ago, almost_one_hour_ago))
288
+
289
+ is_valid = FileOperations.is_cache_valid(temp_path, max_age_hours=1)
290
+ assert is_valid is True
291
+ finally:
292
+ os.unlink(temp_path)
293
+
294
+ def test_is_cache_valid_custom_max_age(self):
295
+ """Test cache validation with custom max age."""
296
+ with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
297
+ f.write('{"custom": true}')
298
+ temp_path = f.name
299
+
300
+ try:
301
+ # File is fresh, should be valid for any reasonable max age
302
+ assert FileOperations.is_cache_valid(temp_path, max_age_hours=0.001) is True
303
+ assert FileOperations.is_cache_valid(temp_path, max_age_hours=24) is True
304
+ assert FileOperations.is_cache_valid(temp_path, max_age_hours=168) is True # 1 week
305
+ finally:
306
+ os.unlink(temp_path)
307
+
308
+
309
+ class TestFileOperationsLoadCacheOrFetch:
310
+ """Test load_cache_or_fetch pattern."""
311
+
312
+ def test_load_cache_or_fetch_uses_valid_cache(self):
313
+ """Test that load_cache_or_fetch uses valid cache."""
314
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as f:
315
+ json.dump({"cached": True}, f)
316
+ temp_path = f.name
317
+
318
+ try:
319
+ fetch_called = False
320
+
321
+ def fetch_fn():
322
+ nonlocal fetch_called
323
+ fetch_called = True
324
+ return {"fetched": True}
325
+
326
+ result = FileOperations.load_cache_or_fetch(
327
+ file_path=temp_path, fetch_fn=fetch_fn, max_age_hours=1, save_cache=False
328
+ )
329
+
330
+ assert result == {"cached": True}
331
+ assert fetch_called is False
332
+ finally:
333
+ os.unlink(temp_path)
334
+
335
+ def test_load_cache_or_fetch_fetches_when_no_cache(self):
336
+ """Test that load_cache_or_fetch fetches when cache doesn't exist."""
337
+ with tempfile.TemporaryDirectory() as tmpdir:
338
+ cache_path = os.path.join(tmpdir, "cache.json")
339
+
340
+ def fetch_fn():
341
+ return {"fetched": True}
342
+
343
+ result = FileOperations.load_cache_or_fetch(
344
+ file_path=cache_path, fetch_fn=fetch_fn, max_age_hours=1, save_cache=True
345
+ )
346
+
347
+ assert result == {"fetched": True}
348
+ assert os.path.exists(cache_path)
349
+
350
+ cached_data = FileOperations.load_json_file(cache_path)
351
+ assert cached_data == {"fetched": True}
352
+
353
+ def test_load_cache_or_fetch_invalid_cache(self):
354
+ """Test fetch when cache is invalid."""
355
+ with tempfile.TemporaryDirectory() as tmpdir:
356
+ cache_path = os.path.join(tmpdir, "old_cache.json")
357
+
358
+ # Create old cache file
359
+ with open(cache_path, "w") as f:
360
+ json.dump({"old": True}, f)
361
+
362
+ # Make it old
363
+ ten_hours_ago = time.time() - (10 * 3600)
364
+ os.utime(cache_path, (ten_hours_ago, ten_hours_ago))
365
+
366
+ def fetch_fn():
367
+ return {"fresh": True}
368
+
369
+ result = FileOperations.load_cache_or_fetch(
370
+ file_path=cache_path, fetch_fn=fetch_fn, max_age_hours=1, save_cache=True
371
+ )
372
+
373
+ assert result == {"fresh": True}
374
+
375
+ # Verify cache was updated
376
+ cached_data = FileOperations.load_json_file(cache_path)
377
+ assert cached_data == {"fresh": True}
378
+
379
+ def test_load_cache_or_fetch_without_saving(self):
380
+ """Test fetch without saving to cache."""
381
+ with tempfile.TemporaryDirectory() as tmpdir:
382
+ cache_path = os.path.join(tmpdir, "no_save.json")
383
+
384
+ def fetch_fn():
385
+ return {"not_saved": True}
386
+
387
+ result = FileOperations.load_cache_or_fetch(
388
+ file_path=cache_path, fetch_fn=fetch_fn, max_age_hours=1, save_cache=False
389
+ )
390
+
391
+ assert result == {"not_saved": True}
392
+ assert not os.path.exists(cache_path)
393
+
394
+ def test_load_cache_or_fetch_corrupted_cache_refetches(self):
395
+ """Test that corrupted cache triggers refetch."""
396
+ with tempfile.TemporaryDirectory() as tmpdir:
397
+ cache_path = os.path.join(tmpdir, "corrupted.json")
398
+
399
+ # Create corrupted cache
400
+ with open(cache_path, "w") as f:
401
+ f.write("invalid json{[")
402
+
403
+ def fetch_fn():
404
+ return {"fetched_after_corruption": True}
405
+
406
+ result = FileOperations.load_cache_or_fetch(
407
+ file_path=cache_path, fetch_fn=fetch_fn, max_age_hours=1, save_cache=True
408
+ )
409
+
410
+ assert result == {"fetched_after_corruption": True}
411
+
412
+ def test_load_cache_or_fetch_returns_none_from_load(self):
413
+ """Test when cached file loads but returns None."""
414
+ with tempfile.TemporaryDirectory() as tmpdir:
415
+ cache_path = os.path.join(tmpdir, "test.json")
416
+
417
+ # Create a valid cache file
418
+ with open(cache_path, "w") as f:
419
+ json.dump({"test": "data"}, f)
420
+
421
+ fetch_called = False
422
+
423
+ def fetch_fn():
424
+ nonlocal fetch_called
425
+ fetch_called = True
426
+ return {"fetched": True}
427
+
428
+ # Mock load_json_file to return None even though file is valid
429
+ with patch(f"{PATH}.FileOperations.load_json_file", return_value=None):
430
+ result = FileOperations.load_cache_or_fetch(
431
+ file_path=cache_path, fetch_fn=fetch_fn, max_age_hours=1, save_cache=True
432
+ )
433
+
434
+ assert result == {"fetched": True}
435
+ assert fetch_called is True
436
+
437
+
438
+ class TestFileOperationsSearchSingleFile:
439
+ """Test search_single_json_file operations."""
440
+
441
+ def test_search_single_json_file_found(self):
442
+ """Test searching for item in single JSON file - found."""
443
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as f:
444
+ test_data = [{"id": "1", "name": "Alice"}, {"id": "2", "name": "Bob"}, {"id": "3", "name": "Charlie"}]
445
+ json.dump(test_data, f)
446
+ temp_path = f.name
447
+
448
+ try:
449
+
450
+ def match_fn(item, identifier):
451
+ return item.get("id") == identifier
452
+
453
+ result = FileOperations.search_single_json_file("2", temp_path, match_fn)
454
+ assert result is not None
455
+ assert result["name"] == "Bob"
456
+ finally:
457
+ os.unlink(temp_path)
458
+
459
+ def test_search_single_json_file_not_found(self):
460
+ """Test searching for item in single JSON file - not found."""
461
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as f:
462
+ test_data = [{"id": "1", "name": "Alice"}]
463
+ json.dump(test_data, f)
464
+ temp_path = f.name
465
+
466
+ try:
467
+
468
+ def match_fn(item, identifier):
469
+ return item.get("id") == identifier
470
+
471
+ result = FileOperations.search_single_json_file("999", temp_path, match_fn)
472
+ assert result is None
473
+ finally:
474
+ os.unlink(temp_path)
475
+
476
+ def test_search_single_json_file_empty_list(self):
477
+ """Test searching in empty list returns None."""
478
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as f:
479
+ json.dump([], f)
480
+ temp_path = f.name
481
+
482
+ try:
483
+
484
+ def match_fn(item, identifier):
485
+ return item.get("id") == identifier
486
+
487
+ result = FileOperations.search_single_json_file("1", temp_path, match_fn)
488
+ assert result is None
489
+ finally:
490
+ os.unlink(temp_path)
491
+
492
+ def test_search_single_json_file_non_list_data(self):
493
+ """Test searching in non-list data returns None."""
494
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as f:
495
+ json.dump({"id": "1", "name": "Not a list"}, f)
496
+ temp_path = f.name
497
+
498
+ try:
499
+
500
+ def match_fn(item, identifier):
501
+ return item.get("id") == identifier
502
+
503
+ result = FileOperations.search_single_json_file("1", temp_path, match_fn)
504
+ assert result is None
505
+ finally:
506
+ os.unlink(temp_path)
507
+
508
+ def test_search_single_json_file_nonexistent(self):
509
+ """Test searching nonexistent file returns None."""
510
+
511
+ def match_fn(item, identifier):
512
+ return item.get("id") == identifier
513
+
514
+ result = FileOperations.search_single_json_file("1", "/nonexistent.json", match_fn)
515
+ assert result is None
516
+
517
+ def test_search_single_json_file_first_match(self):
518
+ """Test that search returns first matching item."""
519
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as f:
520
+ test_data = [
521
+ {"id": "duplicate", "name": "First"},
522
+ {"id": "duplicate", "name": "Second"},
523
+ ]
524
+ json.dump(test_data, f)
525
+ temp_path = f.name
526
+
527
+ try:
528
+
529
+ def match_fn(item, identifier):
530
+ return item.get("id") == identifier
531
+
532
+ result = FileOperations.search_single_json_file("duplicate", temp_path, match_fn)
533
+ assert result is not None
534
+ assert result["name"] == "First"
535
+ finally:
536
+ os.unlink(temp_path)
537
+
538
+
539
+ class TestFileOperationsSearchMultipleFiles:
540
+ """Test search_json_files operations."""
541
+
542
+ def test_search_json_files_found_in_first_file(self):
543
+ """Test searching across multiple files - found in first."""
544
+ with tempfile.TemporaryDirectory() as tmpdir:
545
+ file1 = os.path.join(tmpdir, "file1.json")
546
+ file2 = os.path.join(tmpdir, "file2.json")
547
+
548
+ with open(file1, "w") as f:
549
+ json.dump([{"id": "1", "name": "File1Item"}], f)
550
+
551
+ with open(file2, "w") as f:
552
+ json.dump([{"id": "2", "name": "File2Item"}], f)
553
+
554
+ def match_fn(item, identifier):
555
+ return item.get("id") == identifier
556
+
557
+ result, source_file = FileOperations.search_json_files("1", [file1, file2], match_fn)
558
+
559
+ assert result is not None
560
+ assert result["name"] == "File1Item"
561
+ assert source_file == file1
562
+
563
+ def test_search_json_files_found_in_second_file(self):
564
+ """Test searching across multiple files - found in second."""
565
+ with tempfile.TemporaryDirectory() as tmpdir:
566
+ file1 = os.path.join(tmpdir, "file1.json")
567
+ file2 = os.path.join(tmpdir, "file2.json")
568
+
569
+ with open(file1, "w") as f:
570
+ json.dump([{"id": "1", "name": "File1Item"}], f)
571
+
572
+ with open(file2, "w") as f:
573
+ json.dump([{"id": "2", "name": "File2Item"}], f)
574
+
575
+ def match_fn(item, identifier):
576
+ return item.get("id") == identifier
577
+
578
+ result, source_file = FileOperations.search_json_files("2", [file1, file2], match_fn)
579
+
580
+ assert result is not None
581
+ assert result["name"] == "File2Item"
582
+ assert source_file == file2
583
+
584
+ def test_search_json_files_not_found(self):
585
+ """Test searching across files when item doesn't exist."""
586
+ with tempfile.TemporaryDirectory() as tmpdir:
587
+ file1 = os.path.join(tmpdir, "file1.json")
588
+
589
+ with open(file1, "w") as f:
590
+ json.dump([{"id": "1"}], f)
591
+
592
+ def match_fn(item, identifier):
593
+ return item.get("id") == identifier
594
+
595
+ result, source_file = FileOperations.search_json_files("999", [file1], match_fn)
596
+
597
+ assert result is None
598
+ assert source_file is None
599
+
600
+ def test_search_json_files_with_nonexistent_files(self):
601
+ """Test search gracefully handles nonexistent files."""
602
+
603
+ def match_fn(item, identifier):
604
+ return item.get("id") == identifier
605
+
606
+ result, source = FileOperations.search_json_files("1", ["/nonexistent1.json", "/nonexistent2.json"], match_fn)
607
+
608
+ assert result is None
609
+ assert source is None
610
+
611
+ def test_search_json_files_mixed_existing_nonexisting(self):
612
+ """Test search with mix of existing and non-existing files."""
613
+ with tempfile.TemporaryDirectory() as tmpdir:
614
+ file1 = os.path.join(tmpdir, "exists.json")
615
+
616
+ with open(file1, "w") as f:
617
+ json.dump([{"id": "found", "name": "FoundItem"}], f)
618
+
619
+ def match_fn(item, identifier):
620
+ return item.get("id") == identifier
621
+
622
+ files = ["/nonexistent.json", file1, "/another_nonexistent.json"]
623
+ result, source_file = FileOperations.search_json_files("found", files, match_fn)
624
+
625
+ assert result is not None
626
+ assert result["name"] == "FoundItem"
627
+ assert source_file == file1
628
+
629
+ def test_search_json_files_stops_at_first_match(self):
630
+ """Test that search stops at first match."""
631
+ with tempfile.TemporaryDirectory() as tmpdir:
632
+ file1 = os.path.join(tmpdir, "file1.json")
633
+ file2 = os.path.join(tmpdir, "file2.json")
634
+
635
+ # Both files have the same ID but different names
636
+ with open(file1, "w") as f:
637
+ json.dump([{"id": "duplicate", "name": "FirstFile"}], f)
638
+
639
+ with open(file2, "w") as f:
640
+ json.dump([{"id": "duplicate", "name": "SecondFile"}], f)
641
+
642
+ def match_fn(item, identifier):
643
+ return item.get("id") == identifier
644
+
645
+ result, source_file = FileOperations.search_json_files("duplicate", [file1, file2], match_fn)
646
+
647
+ assert result is not None
648
+ assert result["name"] == "FirstFile"
649
+ assert source_file == file1
650
+
651
+ def test_search_json_files_with_exception_in_file(self):
652
+ """Test search handles exception in one file and continues."""
653
+ with tempfile.TemporaryDirectory() as tmpdir:
654
+ file1 = os.path.join(tmpdir, "corrupted.json")
655
+ file2 = os.path.join(tmpdir, "valid.json")
656
+
657
+ # Create corrupted file
658
+ with open(file1, "w") as f:
659
+ f.write("invalid json{[")
660
+
661
+ # Create valid file
662
+ with open(file2, "w") as f:
663
+ json.dump([{"id": "found", "name": "ValidItem"}], f)
664
+
665
+ def match_fn(item, identifier):
666
+ return item.get("id") == identifier
667
+
668
+ result, source_file = FileOperations.search_json_files("found", [file1, file2], match_fn)
669
+
670
+ assert result is not None
671
+ assert result["name"] == "ValidItem"
672
+ assert source_file == file2
673
+
674
+ def test_search_json_files_with_exception_during_search(self):
675
+ """Test search handles general exception during search_single_json_file."""
676
+ with tempfile.TemporaryDirectory() as tmpdir:
677
+ file1 = os.path.join(tmpdir, "test.json")
678
+
679
+ # Create valid file
680
+ with open(file1, "w") as f:
681
+ json.dump([{"id": "test"}], f)
682
+
683
+ def match_fn(item, identifier):
684
+ return item.get("id") == identifier
685
+
686
+ # Mock search_single_json_file to raise exception
687
+ with patch(f"{PATH}.FileOperations.search_single_json_file", side_effect=Exception("Search error")):
688
+ result, source_file = FileOperations.search_json_files("test", [file1], match_fn)
689
+
690
+ # Should return None when exception occurs
691
+ assert result is None
692
+ assert source_file is None
693
+
694
+
695
+ class TestFileOperationsLoadCachedFindings:
696
+ """Test load_cached_findings operations."""
697
+
698
+ def test_load_cached_findings_success(self):
699
+ """Test successfully loading cached findings from multiple files."""
700
+
701
+ class QueryType(Enum):
702
+ ISSUES = "issues"
703
+ VULNERABILITIES = "vulnerabilities"
704
+
705
+ with tempfile.TemporaryDirectory() as tmpdir:
706
+ issues_path = os.path.join(tmpdir, "issues.json")
707
+ vulns_path = os.path.join(tmpdir, "vulns.json")
708
+
709
+ # Create test files
710
+ with open(issues_path, "w") as f:
711
+ json.dump([{"id": "issue1"}, {"id": "issue2"}], f)
712
+
713
+ with open(vulns_path, "w") as f:
714
+ json.dump([{"id": "vuln1"}, {"id": "vuln2"}, {"id": "vuln3"}], f)
715
+
716
+ query_configs = [
717
+ {"type": QueryType.ISSUES, "file_path": issues_path},
718
+ {"type": QueryType.VULNERABILITIES, "file_path": vulns_path},
719
+ ]
720
+
721
+ results = FileOperations.load_cached_findings(query_configs)
722
+
723
+ assert len(results) == 2
724
+ assert results[0][0] == "issues"
725
+ assert len(results[0][1]) == 2
726
+ assert results[0][2] is None
727
+ assert results[1][0] == "vulnerabilities"
728
+ assert len(results[1][1]) == 3
729
+ assert results[1][2] is None
730
+
731
+ def test_load_cached_findings_with_progress_callback(self):
732
+ """Test load_cached_findings with progress callback."""
733
+
734
+ class QueryType(Enum):
735
+ ISSUES = "issues"
736
+
737
+ with tempfile.TemporaryDirectory() as tmpdir:
738
+ issues_path = os.path.join(tmpdir, "issues.json")
739
+
740
+ with open(issues_path, "w") as f:
741
+ json.dump([{"id": "issue1"}], f)
742
+
743
+ query_configs = [{"type": QueryType.ISSUES, "file_path": issues_path}]
744
+
745
+ callback_calls = []
746
+
747
+ def progress_callback(query_type, status):
748
+ callback_calls.append((query_type, status))
749
+
750
+ results = FileOperations.load_cached_findings(query_configs, progress_callback=progress_callback)
751
+
752
+ assert len(results) == 1
753
+ assert len(callback_calls) == 2
754
+ assert callback_calls[0] == ("issues", "loading")
755
+ assert callback_calls[1] == ("issues", "loaded")
756
+
757
+ def test_load_cached_findings_file_not_exists(self):
758
+ """Test load_cached_findings when file doesn't exist."""
759
+
760
+ class QueryType(Enum):
761
+ ISSUES = "issues"
762
+
763
+ query_configs = [{"type": QueryType.ISSUES, "file_path": "/nonexistent/issues.json"}]
764
+
765
+ results = FileOperations.load_cached_findings(query_configs)
766
+
767
+ assert len(results) == 1
768
+ assert results[0][0] == "issues"
769
+ assert results[0][1] == []
770
+ assert results[0][2] is None
771
+
772
+ def test_load_cached_findings_missing_file_path(self):
773
+ """Test load_cached_findings with missing file_path in config."""
774
+
775
+ class QueryType(Enum):
776
+ ISSUES = "issues"
777
+
778
+ query_configs = [{"type": QueryType.ISSUES}]
779
+
780
+ results = FileOperations.load_cached_findings(query_configs)
781
+
782
+ assert len(results) == 1
783
+ assert results[0][0] == "issues"
784
+ assert results[0][1] == []
785
+ assert results[0][2] is None
786
+
787
+ def test_load_cached_findings_none_file_path(self):
788
+ """Test load_cached_findings with None file_path."""
789
+
790
+ class QueryType(Enum):
791
+ ISSUES = "issues"
792
+
793
+ query_configs = [{"type": QueryType.ISSUES, "file_path": None}]
794
+
795
+ results = FileOperations.load_cached_findings(query_configs)
796
+
797
+ assert len(results) == 1
798
+ assert results[0][0] == "issues"
799
+ assert results[0][1] == []
800
+ assert results[0][2] is None
801
+
802
+ def test_load_cached_findings_load_returns_none(self):
803
+ """Test load_cached_findings when load_json_file returns None."""
804
+
805
+ class QueryType(Enum):
806
+ ISSUES = "issues"
807
+
808
+ with tempfile.TemporaryDirectory() as tmpdir:
809
+ issues_path = os.path.join(tmpdir, "corrupted.json")
810
+
811
+ # Create corrupted file
812
+ with open(issues_path, "w") as f:
813
+ f.write("invalid json{[")
814
+
815
+ query_configs = [{"type": QueryType.ISSUES, "file_path": issues_path}]
816
+
817
+ results = FileOperations.load_cached_findings(query_configs)
818
+
819
+ assert len(results) == 1
820
+ assert results[0][0] == "issues"
821
+ assert results[0][1] == []
822
+ assert isinstance(results[0][2], Exception)
823
+
824
+ def test_load_cached_findings_exception_handling(self):
825
+ """Test load_cached_findings handles exceptions during loading."""
826
+
827
+ class QueryType(Enum):
828
+ ISSUES = "issues"
829
+
830
+ query_configs = [{"type": QueryType.ISSUES, "file_path": "/some/path.json"}]
831
+
832
+ with patch(f"{PATH}.os.path.exists", side_effect=Exception("Unexpected error")):
833
+ results = FileOperations.load_cached_findings(query_configs)
834
+
835
+ assert len(results) == 1
836
+ assert results[0][0] == "issues"
837
+ assert results[0][1] == []
838
+ assert isinstance(results[0][2], Exception)
839
+ assert "Unexpected error" in str(results[0][2])
840
+
841
+ def test_load_cached_findings_multiple_mixed_results(self):
842
+ """Test load_cached_findings with mix of success, missing, and error."""
843
+
844
+ class QueryType(Enum):
845
+ ISSUES = "issues"
846
+ VULNERABILITIES = "vulnerabilities"
847
+ ASSETS = "assets"
848
+
849
+ with tempfile.TemporaryDirectory() as tmpdir:
850
+ issues_path = os.path.join(tmpdir, "issues.json")
851
+ corrupted_path = os.path.join(tmpdir, "corrupted.json")
852
+
853
+ # Valid file
854
+ with open(issues_path, "w") as f:
855
+ json.dump([{"id": "issue1"}], f)
856
+
857
+ # Corrupted file
858
+ with open(corrupted_path, "w") as f:
859
+ f.write("invalid json")
860
+
861
+ query_configs = [
862
+ {"type": QueryType.ISSUES, "file_path": issues_path},
863
+ {"type": QueryType.VULNERABILITIES, "file_path": "/nonexistent.json"},
864
+ {"type": QueryType.ASSETS, "file_path": corrupted_path},
865
+ ]
866
+
867
+ results = FileOperations.load_cached_findings(query_configs)
868
+
869
+ assert len(results) == 3
870
+
871
+ # First should succeed
872
+ assert results[0][0] == "issues"
873
+ assert len(results[0][1]) == 1
874
+ assert results[0][2] is None
875
+
876
+ # Second should have empty results (file doesn't exist)
877
+ assert results[1][0] == "vulnerabilities"
878
+ assert results[1][1] == []
879
+ assert results[1][2] is None
880
+
881
+ # Third should have error (corrupted file)
882
+ assert results[2][0] == "assets"
883
+ assert results[2][1] == []
884
+ assert isinstance(results[2][2], Exception)
885
+
886
+
887
+ class TestFileOperationsEdgeCases:
888
+ """Test edge cases and boundary conditions."""
889
+
890
+ def test_file_operations_with_symlinks(self):
891
+ """Test file operations work with symbolic links."""
892
+ with tempfile.TemporaryDirectory() as tmpdir:
893
+ real_file = os.path.join(tmpdir, "real.json")
894
+ symlink_file = os.path.join(tmpdir, "symlink.json")
895
+
896
+ # Create real file
897
+ test_data = {"symlink": "test"}
898
+ with open(real_file, "w") as f:
899
+ json.dump(test_data, f)
900
+
901
+ # Create symlink
902
+ os.symlink(real_file, symlink_file)
903
+
904
+ # Test loading through symlink
905
+ loaded = FileOperations.load_json_file(symlink_file)
906
+ assert loaded == test_data
907
+
908
+ # Test file age through symlink
909
+ age = FileOperations.get_file_age(symlink_file)
910
+ assert age is not None
911
+
912
+ def test_file_operations_with_very_large_json(self):
913
+ """Test file operations with large JSON data."""
914
+ with tempfile.TemporaryDirectory() as tmpdir:
915
+ file_path = os.path.join(tmpdir, "large.json")
916
+
917
+ # Create large dataset
918
+ large_data = [{"id": i, "data": "x" * 100} for i in range(1000)]
919
+
920
+ success = FileOperations.save_json_file(large_data, file_path)
921
+ assert success is True
922
+
923
+ loaded = FileOperations.load_json_file(file_path)
924
+ assert len(loaded) == 1000
925
+ assert loaded[0]["id"] == 0
926
+ assert loaded[999]["id"] == 999
927
+
928
+ def test_file_operations_with_special_characters_in_path(self):
929
+ """Test file operations with special characters in path."""
930
+ with tempfile.TemporaryDirectory() as tmpdir:
931
+ # Create directory with special chars
932
+ special_dir = os.path.join(tmpdir, "test-dir_with.special")
933
+ os.makedirs(special_dir)
934
+
935
+ file_path = os.path.join(special_dir, "test.json")
936
+ test_data = {"special": "chars"}
937
+
938
+ success = FileOperations.save_json_file(test_data, file_path)
939
+ assert success is True
940
+
941
+ loaded = FileOperations.load_json_file(file_path)
942
+ assert loaded == test_data
943
+
944
+ def test_cache_validation_with_zero_max_age(self):
945
+ """Test cache validation with zero max age."""
946
+ with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
947
+ f.write('{"test": true}')
948
+ temp_path = f.name
949
+
950
+ try:
951
+ # Even fresh file should be invalid with 0 max age
952
+ is_valid = FileOperations.is_cache_valid(temp_path, max_age_hours=0)
953
+ assert is_valid is False
954
+ finally:
955
+ os.unlink(temp_path)
956
+
957
+ def test_search_with_complex_match_function(self):
958
+ """Test search with complex matching logic."""
959
+ with tempfile.TemporaryDirectory() as tmpdir:
960
+ file_path = os.path.join(tmpdir, "complex.json")
961
+ test_data = [
962
+ {"id": 1, "tags": ["python", "testing"]},
963
+ {"id": 2, "tags": ["javascript", "frontend"]},
964
+ {"id": 3, "tags": ["python", "backend"]},
965
+ ]
966
+
967
+ with open(file_path, "w") as f:
968
+ json.dump(test_data, f)
969
+
970
+ def match_fn(item, identifier):
971
+ return identifier in item.get("tags", [])
972
+
973
+ result = FileOperations.search_single_json_file("python", file_path, match_fn)
974
+ assert result is not None
975
+ assert result["id"] == 1
976
+
977
+ def test_load_cache_or_fetch_with_none_return(self):
978
+ """Test load_cache_or_fetch when fetch_fn returns None."""
979
+ with tempfile.TemporaryDirectory() as tmpdir:
980
+ cache_path = os.path.join(tmpdir, "cache.json")
981
+
982
+ def fetch_fn():
983
+ return None
984
+
985
+ result = FileOperations.load_cache_or_fetch(
986
+ file_path=cache_path, fetch_fn=fetch_fn, max_age_hours=1, save_cache=True
987
+ )
988
+
989
+ assert result is None