regscale-cli 6.26.0.0__py3-none-any.whl → 6.27.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (96) hide show
  1. regscale/_version.py +1 -1
  2. regscale/core/app/application.py +1 -1
  3. regscale/core/app/internal/evidence.py +419 -2
  4. regscale/dev/code_gen.py +24 -20
  5. regscale/integrations/commercial/__init__.py +0 -1
  6. regscale/integrations/commercial/jira.py +367 -126
  7. regscale/integrations/commercial/qualys/__init__.py +7 -8
  8. regscale/integrations/commercial/qualys/scanner.py +8 -3
  9. regscale/integrations/commercial/synqly/assets.py +17 -0
  10. regscale/integrations/commercial/synqly/vulnerabilities.py +45 -28
  11. regscale/integrations/commercial/tenablev2/cis_parsers.py +453 -0
  12. regscale/integrations/commercial/tenablev2/cis_scanner.py +447 -0
  13. regscale/integrations/commercial/tenablev2/commands.py +142 -1
  14. regscale/integrations/commercial/tenablev2/scanner.py +0 -1
  15. regscale/integrations/commercial/tenablev2/stig_parsers.py +113 -57
  16. regscale/integrations/commercial/wizv2/WizDataMixin.py +1 -1
  17. regscale/integrations/commercial/wizv2/click.py +44 -59
  18. regscale/integrations/commercial/wizv2/compliance/__init__.py +15 -0
  19. regscale/integrations/commercial/wizv2/{policy_compliance_helpers.py → compliance/helpers.py} +78 -60
  20. regscale/integrations/commercial/wizv2/compliance_report.py +10 -9
  21. regscale/integrations/commercial/wizv2/core/__init__.py +133 -0
  22. regscale/integrations/commercial/wizv2/{async_client.py → core/client.py} +3 -3
  23. regscale/integrations/commercial/wizv2/{constants.py → core/constants.py} +1 -17
  24. regscale/integrations/commercial/wizv2/core/file_operations.py +237 -0
  25. regscale/integrations/commercial/wizv2/fetchers/__init__.py +11 -0
  26. regscale/integrations/commercial/wizv2/{data_fetcher.py → fetchers/policy_assessment.py} +5 -9
  27. regscale/integrations/commercial/wizv2/issue.py +1 -1
  28. regscale/integrations/commercial/wizv2/models/__init__.py +0 -0
  29. regscale/integrations/commercial/wizv2/parsers/__init__.py +34 -0
  30. regscale/integrations/commercial/wizv2/{parsers.py → parsers/main.py} +1 -1
  31. regscale/integrations/commercial/wizv2/processors/__init__.py +11 -0
  32. regscale/integrations/commercial/wizv2/{finding_processor.py → processors/finding.py} +1 -1
  33. regscale/integrations/commercial/wizv2/reports.py +1 -1
  34. regscale/integrations/commercial/wizv2/sbom.py +1 -1
  35. regscale/integrations/commercial/wizv2/scanner.py +40 -100
  36. regscale/integrations/commercial/wizv2/utils/__init__.py +48 -0
  37. regscale/integrations/commercial/wizv2/{utils.py → utils/main.py} +116 -61
  38. regscale/integrations/commercial/wizv2/variables.py +89 -3
  39. regscale/integrations/compliance_integration.py +0 -46
  40. regscale/integrations/control_matcher.py +22 -3
  41. regscale/integrations/due_date_handler.py +14 -8
  42. regscale/integrations/public/fedramp/docx_parser.py +10 -1
  43. regscale/integrations/public/fedramp/fedramp_cis_crm.py +393 -340
  44. regscale/integrations/public/fedramp/fedramp_five.py +1 -1
  45. regscale/integrations/scanner_integration.py +127 -57
  46. regscale/models/integration_models/cisa_kev_data.json +132 -9
  47. regscale/models/integration_models/qualys.py +3 -4
  48. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  49. regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +24 -7
  50. regscale/models/integration_models/synqly_models/synqly_model.py +8 -1
  51. regscale/models/regscale_models/control_implementation.py +1 -1
  52. regscale/models/regscale_models/issue.py +0 -1
  53. {regscale_cli-6.26.0.0.dist-info → regscale_cli-6.27.0.1.dist-info}/METADATA +1 -17
  54. {regscale_cli-6.26.0.0.dist-info → regscale_cli-6.27.0.1.dist-info}/RECORD +94 -61
  55. tests/regscale/integrations/commercial/test_jira.py +481 -91
  56. tests/regscale/integrations/commercial/test_wiz.py +96 -200
  57. tests/regscale/integrations/commercial/wizv2/__init__.py +1 -1
  58. tests/regscale/integrations/commercial/wizv2/compliance/__init__.py +1 -0
  59. tests/regscale/integrations/commercial/wizv2/compliance/test_helpers.py +903 -0
  60. tests/regscale/integrations/commercial/wizv2/core/__init__.py +1 -0
  61. tests/regscale/integrations/commercial/wizv2/core/test_auth.py +701 -0
  62. tests/regscale/integrations/commercial/wizv2/core/test_client.py +1037 -0
  63. tests/regscale/integrations/commercial/wizv2/core/test_file_operations.py +989 -0
  64. tests/regscale/integrations/commercial/wizv2/fetchers/__init__.py +1 -0
  65. tests/regscale/integrations/commercial/wizv2/fetchers/test_policy_assessment.py +805 -0
  66. tests/regscale/integrations/commercial/wizv2/parsers/__init__.py +1 -0
  67. tests/regscale/integrations/commercial/wizv2/parsers/test_main.py +1153 -0
  68. tests/regscale/integrations/commercial/wizv2/processors/__init__.py +1 -0
  69. tests/regscale/integrations/commercial/wizv2/processors/test_finding.py +671 -0
  70. tests/regscale/integrations/commercial/wizv2/test_WizDataMixin.py +537 -0
  71. tests/regscale/integrations/commercial/wizv2/test_click_comprehensive.py +851 -0
  72. tests/regscale/integrations/commercial/wizv2/test_compliance_report_comprehensive.py +910 -0
  73. tests/regscale/integrations/commercial/wizv2/test_file_cleanup.py +283 -0
  74. tests/regscale/integrations/commercial/wizv2/test_file_operations.py +260 -0
  75. tests/regscale/integrations/commercial/wizv2/test_issue.py +1 -1
  76. tests/regscale/integrations/commercial/wizv2/test_issue_comprehensive.py +1203 -0
  77. tests/regscale/integrations/commercial/wizv2/test_reports.py +497 -0
  78. tests/regscale/integrations/commercial/wizv2/test_sbom.py +643 -0
  79. tests/regscale/integrations/commercial/wizv2/test_scanner_comprehensive.py +805 -0
  80. tests/regscale/integrations/commercial/wizv2/test_wiz_click_client_id.py +1 -1
  81. tests/regscale/integrations/commercial/wizv2/test_wiz_compliance_report.py +72 -29
  82. tests/regscale/integrations/commercial/wizv2/test_wiz_findings_comprehensive.py +364 -0
  83. tests/regscale/integrations/commercial/wizv2/test_wiz_inventory_comprehensive.py +644 -0
  84. tests/regscale/integrations/commercial/wizv2/test_wizv2.py +946 -78
  85. tests/regscale/integrations/commercial/wizv2/test_wizv2_utils.py +97 -202
  86. tests/regscale/integrations/commercial/wizv2/utils/__init__.py +1 -0
  87. tests/regscale/integrations/commercial/wizv2/utils/test_main.py +1523 -0
  88. tests/regscale/integrations/public/test_fedramp.py +301 -0
  89. tests/regscale/integrations/test_control_matcher.py +83 -0
  90. regscale/integrations/commercial/wizv2/policy_compliance.py +0 -3543
  91. tests/regscale/integrations/commercial/wizv2/test_wiz_policy_compliance.py +0 -750
  92. /regscale/integrations/commercial/wizv2/{wiz_auth.py → core/auth.py} +0 -0
  93. {regscale_cli-6.26.0.0.dist-info → regscale_cli-6.27.0.1.dist-info}/LICENSE +0 -0
  94. {regscale_cli-6.26.0.0.dist-info → regscale_cli-6.27.0.1.dist-info}/WHEEL +0 -0
  95. {regscale_cli-6.26.0.0.dist-info → regscale_cli-6.27.0.1.dist-info}/entry_points.txt +0 -0
  96. {regscale_cli-6.26.0.0.dist-info → regscale_cli-6.27.0.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,283 @@
1
+ """Tests for file cleanup utilities."""
2
+
3
+ import os
4
+ import tempfile
5
+ import unittest
6
+ from pathlib import Path
7
+ from unittest.mock import patch, MagicMock
8
+
9
+ from regscale.integrations.commercial.wizv2.file_cleanup import ReportFileCleanup
10
+
11
+
12
+ class TestReportFileCleanup(unittest.TestCase):
13
+ """Test cases for ReportFileCleanup."""
14
+
15
+ def setUp(self):
16
+ """Set up test fixtures."""
17
+ self.test_dir = tempfile.mkdtemp()
18
+ self.file_prefix = "test_report_"
19
+
20
+ def tearDown(self):
21
+ """Clean up test files."""
22
+ if os.path.exists(self.test_dir):
23
+ for filename in os.listdir(self.test_dir):
24
+ filepath = os.path.join(self.test_dir, filename)
25
+ try:
26
+ os.remove(filepath)
27
+ except Exception:
28
+ pass
29
+ try:
30
+ os.rmdir(self.test_dir)
31
+ except Exception:
32
+ pass
33
+
34
+ def _create_test_file(self, filename: str, age_seconds: int = 0) -> str:
35
+ """
36
+ Create a test file and optionally set its modification time.
37
+
38
+ :param str filename: Name of the file to create
39
+ :param int age_seconds: How many seconds old the file should be
40
+ :return: Full path to the created file
41
+ """
42
+ filepath = os.path.join(self.test_dir, filename)
43
+ Path(filepath).touch()
44
+ if age_seconds > 0:
45
+ import time
46
+
47
+ current_time = time.time()
48
+ os.utime(filepath, (current_time - age_seconds, current_time - age_seconds))
49
+ return filepath
50
+
51
+ def test_cleanup_no_files(self):
52
+ """Test cleanup when no files exist."""
53
+ ReportFileCleanup.cleanup_old_files(self.test_dir, self.file_prefix)
54
+ # Should not raise any exceptions
55
+
56
+ def test_cleanup_directory_not_exists(self):
57
+ """Test cleanup when directory doesn't exist."""
58
+ non_existent = os.path.join(self.test_dir, "non_existent")
59
+ ReportFileCleanup.cleanup_old_files(non_existent, self.file_prefix)
60
+ # Should not raise any exceptions
61
+
62
+ def test_cleanup_keeps_recent_files(self):
63
+ """Test that recent files are kept."""
64
+ for i in range(3):
65
+ self._create_test_file(f"{self.file_prefix}{i}.csv")
66
+
67
+ ReportFileCleanup.cleanup_old_files(self.test_dir, self.file_prefix, keep_count=5)
68
+
69
+ remaining_files = [f for f in os.listdir(self.test_dir) if f.startswith(self.file_prefix)]
70
+ self.assertEqual(len(remaining_files), 3)
71
+
72
+ def test_cleanup_deletes_old_files(self):
73
+ """Test that old files beyond keep_count are deleted."""
74
+ for i in range(10):
75
+ self._create_test_file(f"{self.file_prefix}{i:02d}.csv", age_seconds=100 - i * 10)
76
+
77
+ ReportFileCleanup.cleanup_old_files(self.test_dir, self.file_prefix, keep_count=3)
78
+
79
+ remaining_files = [f for f in os.listdir(self.test_dir) if f.startswith(self.file_prefix)]
80
+ self.assertEqual(len(remaining_files), 3)
81
+
82
+ def test_cleanup_respects_extension_grouping(self):
83
+ """Test that files are grouped by extension before cleanup."""
84
+ for i in range(6):
85
+ self._create_test_file(f"{self.file_prefix}{i:02d}.csv", age_seconds=100 - i * 10)
86
+ for i in range(6):
87
+ self._create_test_file(f"{self.file_prefix}{i:02d}.json", age_seconds=100 - i * 10)
88
+
89
+ ReportFileCleanup.cleanup_old_files(self.test_dir, self.file_prefix, keep_count=3)
90
+
91
+ csv_files = [f for f in os.listdir(self.test_dir) if f.endswith(".csv")]
92
+ json_files = [f for f in os.listdir(self.test_dir) if f.endswith(".json")]
93
+
94
+ self.assertEqual(len(csv_files), 3)
95
+ self.assertEqual(len(json_files), 3)
96
+
97
+ def test_cleanup_with_custom_extensions(self):
98
+ """Test cleanup with custom extension list."""
99
+ self._create_test_file(f"{self.file_prefix}01.csv")
100
+ self._create_test_file(f"{self.file_prefix}02.txt")
101
+ self._create_test_file(f"{self.file_prefix}03.log")
102
+
103
+ ReportFileCleanup.cleanup_old_files(self.test_dir, self.file_prefix, extensions=[".txt", ".log"], keep_count=1)
104
+
105
+ remaining_files = os.listdir(self.test_dir)
106
+ self.assertIn(f"{self.file_prefix}01.csv", remaining_files)
107
+ self.assertIn(f"{self.file_prefix}02.txt", remaining_files)
108
+ self.assertIn(f"{self.file_prefix}03.log", remaining_files)
109
+
110
+ def test_cleanup_default_extensions(self):
111
+ """Test cleanup uses default extensions when not specified."""
112
+ self._create_test_file(f"{self.file_prefix}01.csv")
113
+ self._create_test_file(f"{self.file_prefix}02.json")
114
+ self._create_test_file(f"{self.file_prefix}03.jsonl")
115
+ self._create_test_file(f"{self.file_prefix}04.txt")
116
+
117
+ ReportFileCleanup.cleanup_old_files(self.test_dir, self.file_prefix, keep_count=5)
118
+
119
+ remaining_files = os.listdir(self.test_dir)
120
+ self.assertEqual(len(remaining_files), 4)
121
+
122
+ def test_cleanup_ignores_non_matching_prefix(self):
123
+ """Test that files with different prefix are not deleted."""
124
+ self._create_test_file(f"{self.file_prefix}01.csv")
125
+ self._create_test_file("other_prefix_01.csv")
126
+
127
+ ReportFileCleanup.cleanup_old_files(self.test_dir, self.file_prefix, keep_count=0)
128
+
129
+ remaining_files = os.listdir(self.test_dir)
130
+ self.assertIn("other_prefix_01.csv", remaining_files)
131
+
132
+ def test_cleanup_handles_delete_exception(self):
133
+ """Test cleanup handles exceptions when deleting files."""
134
+ for i in range(10):
135
+ self._create_test_file(f"{self.file_prefix}{i:02d}.csv", age_seconds=100 - i * 10)
136
+
137
+ with patch("os.remove", side_effect=PermissionError("Permission denied")):
138
+ # Should not raise exception, just log warning
139
+ ReportFileCleanup.cleanup_old_files(self.test_dir, self.file_prefix, keep_count=3)
140
+
141
+ def test_cleanup_handles_general_exception(self):
142
+ """Test cleanup handles general exceptions."""
143
+ with patch("os.path.exists", return_value=True):
144
+ with patch("os.listdir", side_effect=PermissionError("Permission denied")):
145
+ # Should not raise exception, just log warning
146
+ ReportFileCleanup.cleanup_old_files(self.test_dir, self.file_prefix)
147
+
148
+ def test_find_matching_files(self):
149
+ """Test _find_matching_files method."""
150
+ self._create_test_file(f"{self.file_prefix}01.csv")
151
+ self._create_test_file(f"{self.file_prefix}02.json")
152
+ self._create_test_file("other_file.csv")
153
+
154
+ entries = ReportFileCleanup._find_matching_files(self.test_dir, self.file_prefix, [".csv", ".json"])
155
+
156
+ self.assertEqual(len(entries), 2)
157
+ filenames = [entry[0] for entry in entries]
158
+ self.assertIn(f"{self.file_prefix}01.csv", filenames)
159
+ self.assertIn(f"{self.file_prefix}02.json", filenames)
160
+
161
+ def test_find_matching_files_multiple_extensions_per_file(self):
162
+ """Test that only one entry is created per file even if multiple extensions match."""
163
+ self._create_test_file(f"{self.file_prefix}01.csv")
164
+
165
+ entries = ReportFileCleanup._find_matching_files(self.test_dir, self.file_prefix, [".csv", ".json"])
166
+
167
+ # Should only have one entry for the .csv file
168
+ self.assertEqual(len(entries), 1)
169
+ self.assertEqual(entries[0][2], ".csv")
170
+
171
+ def test_group_files_by_extension(self):
172
+ """Test _group_files_by_extension method."""
173
+ entries = [
174
+ ("file1.csv", "/path/file1.csv", ".csv"),
175
+ ("file2.csv", "/path/file2.csv", ".csv"),
176
+ ("file3.json", "/path/file3.json", ".json"),
177
+ ]
178
+
179
+ grouped = ReportFileCleanup._group_files_by_extension(entries)
180
+
181
+ self.assertEqual(len(grouped), 2)
182
+ self.assertEqual(len(grouped[".csv"]), 2)
183
+ self.assertEqual(len(grouped[".json"]), 1)
184
+
185
+ def test_group_files_by_extension_empty(self):
186
+ """Test _group_files_by_extension with empty list."""
187
+ grouped = ReportFileCleanup._group_files_by_extension([])
188
+ self.assertEqual(grouped, {})
189
+
190
+ def test_cleanup_files_by_extension(self):
191
+ """Test _cleanup_files_by_extension method."""
192
+ # Create test files
193
+ files = []
194
+ for i in range(5):
195
+ filepath = self._create_test_file(f"{self.file_prefix}{i:02d}.csv", age_seconds=100 - i * 10)
196
+ files.append((f"{self.file_prefix}{i:02d}.csv", filepath))
197
+
198
+ files_by_ext = {".csv": files}
199
+ deleted_count = ReportFileCleanup._cleanup_files_by_extension(files_by_ext, keep_count=2)
200
+
201
+ self.assertEqual(deleted_count, 3)
202
+ remaining_files = os.listdir(self.test_dir)
203
+ self.assertEqual(len(remaining_files), 2)
204
+
205
+ def test_cleanup_files_by_extension_no_files_to_delete(self):
206
+ """Test _cleanup_files_by_extension when all files should be kept."""
207
+ # Create test files
208
+ files = []
209
+ for i in range(3):
210
+ filepath = self._create_test_file(f"{self.file_prefix}{i:02d}.csv")
211
+ files.append((f"{self.file_prefix}{i:02d}.csv", filepath))
212
+
213
+ files_by_ext = {".csv": files}
214
+ deleted_count = ReportFileCleanup._cleanup_files_by_extension(files_by_ext, keep_count=5)
215
+
216
+ self.assertEqual(deleted_count, 0)
217
+ remaining_files = os.listdir(self.test_dir)
218
+ self.assertEqual(len(remaining_files), 3)
219
+
220
+ def test_cleanup_files_by_extension_delete_all(self):
221
+ """Test _cleanup_files_by_extension with keep_count=0."""
222
+ # Create test files
223
+ files = []
224
+ for i in range(5):
225
+ filepath = self._create_test_file(f"{self.file_prefix}{i:02d}.csv")
226
+ files.append((f"{self.file_prefix}{i:02d}.csv", filepath))
227
+
228
+ files_by_ext = {".csv": files}
229
+ deleted_count = ReportFileCleanup._cleanup_files_by_extension(files_by_ext, keep_count=0)
230
+
231
+ self.assertEqual(deleted_count, 5)
232
+ remaining_files = os.listdir(self.test_dir)
233
+ self.assertEqual(len(remaining_files), 0)
234
+
235
+ def test_cleanup_files_by_extension_with_delete_failure(self):
236
+ """Test _cleanup_files_by_extension handles delete failures gracefully."""
237
+ # Create test files
238
+ files = []
239
+ for i in range(5):
240
+ filepath = self._create_test_file(f"{self.file_prefix}{i:02d}.csv", age_seconds=100 - i * 10)
241
+ files.append((f"{self.file_prefix}{i:02d}.csv", filepath))
242
+
243
+ files_by_ext = {".csv": files}
244
+
245
+ with patch("os.remove", side_effect=[None, PermissionError("Permission denied"), None]):
246
+ deleted_count = ReportFileCleanup._cleanup_files_by_extension(files_by_ext, keep_count=2)
247
+ # Should continue processing even if one delete fails
248
+ self.assertGreaterEqual(deleted_count, 0)
249
+
250
+ def test_cleanup_files_sorted_by_modification_time(self):
251
+ """Test that files are sorted by modification time before deletion."""
252
+ # Create files with specific modification times
253
+ self._create_test_file(f"{self.file_prefix}oldest.csv", age_seconds=1000)
254
+ self._create_test_file(f"{self.file_prefix}middle.csv", age_seconds=500)
255
+ self._create_test_file(f"{self.file_prefix}newest.csv", age_seconds=100)
256
+
257
+ ReportFileCleanup.cleanup_old_files(self.test_dir, self.file_prefix, keep_count=1)
258
+
259
+ remaining_files = os.listdir(self.test_dir)
260
+ self.assertEqual(len(remaining_files), 1)
261
+ self.assertIn(f"{self.file_prefix}newest.csv", remaining_files)
262
+
263
+ def test_cleanup_multiple_extensions_independent(self):
264
+ """Test that different extensions are cleaned up independently."""
265
+ # Create CSV files
266
+ for i in range(10):
267
+ self._create_test_file(f"{self.file_prefix}{i:02d}.csv", age_seconds=200 - i * 10)
268
+
269
+ # Create JSON files
270
+ for i in range(5):
271
+ self._create_test_file(f"{self.file_prefix}{i:02d}.json", age_seconds=100 - i * 10)
272
+
273
+ ReportFileCleanup.cleanup_old_files(self.test_dir, self.file_prefix, keep_count=3)
274
+
275
+ csv_files = [f for f in os.listdir(self.test_dir) if f.endswith(".csv")]
276
+ json_files = [f for f in os.listdir(self.test_dir) if f.endswith(".json")]
277
+
278
+ self.assertEqual(len(csv_files), 3)
279
+ self.assertEqual(len(json_files), 3)
280
+
281
+
282
+ if __name__ == "__main__":
283
+ unittest.main()
@@ -0,0 +1,260 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ Test suite for FileOperations module.
5
+
6
+ Tests cover:
7
+ - JSON file loading and saving
8
+ - Cache validation by file age
9
+ - Load cache or fetch pattern
10
+ - Multi-file search operations
11
+ """
12
+
13
+ import datetime
14
+ import json
15
+ import os
16
+ import tempfile
17
+ import pytest
18
+ from pathlib import Path
19
+
20
+ from regscale.integrations.commercial.wizv2.core.file_operations import FileOperations
21
+
22
+
23
+ class TestFileOperationsBasics:
24
+ """Test basic file I/O operations."""
25
+
26
+ def test_save_and_load_json_file(self):
27
+ """Test saving and loading JSON data."""
28
+ with tempfile.TemporaryDirectory() as tmpdir:
29
+ file_path = os.path.join(tmpdir, "test.json")
30
+ test_data = {"key": "value", "number": 42, "list": [1, 2, 3]}
31
+
32
+ # Save data
33
+ success = FileOperations.save_json_file(test_data, file_path)
34
+ assert success is True
35
+ assert os.path.exists(file_path)
36
+
37
+ # Load data
38
+ loaded_data = FileOperations.load_json_file(file_path)
39
+ assert loaded_data == test_data
40
+
41
+ def test_load_json_file_nonexistent(self):
42
+ """Test loading nonexistent file returns None."""
43
+ result = FileOperations.load_json_file("/nonexistent/path/file.json")
44
+ assert result is None
45
+
46
+ def test_save_json_file_creates_directory(self):
47
+ """Test that save_json_file creates parent directories."""
48
+ with tempfile.TemporaryDirectory() as tmpdir:
49
+ nested_path = os.path.join(tmpdir, "level1", "level2", "test.json")
50
+ test_data = {"nested": True}
51
+
52
+ success = FileOperations.save_json_file(test_data, nested_path, create_dir=True)
53
+ assert success is True
54
+ assert os.path.exists(nested_path)
55
+
56
+ loaded = FileOperations.load_json_file(nested_path)
57
+ assert loaded == test_data
58
+
59
+
60
+ class TestFileOperationsCaching:
61
+ """Test cache-related operations."""
62
+
63
+ def test_get_file_age_existing_file(self):
64
+ """Test getting age of existing file."""
65
+ with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
66
+ f.write("test")
67
+ temp_path = f.name
68
+
69
+ try:
70
+ age = FileOperations.get_file_age(temp_path)
71
+ assert age is not None
72
+ assert isinstance(age, datetime.timedelta)
73
+ assert age.total_seconds() < 5 # Just created, should be very recent
74
+ finally:
75
+ os.unlink(temp_path)
76
+
77
+ def test_get_file_age_nonexistent(self):
78
+ """Test getting age of nonexistent file returns None."""
79
+ age = FileOperations.get_file_age("/nonexistent/file.json")
80
+ assert age is None
81
+
82
+ def test_is_cache_valid_fresh_file(self):
83
+ """Test cache validation for fresh file."""
84
+ with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
85
+ f.write('{"fresh": true}')
86
+ temp_path = f.name
87
+
88
+ try:
89
+ is_valid = FileOperations.is_cache_valid(temp_path, max_age_hours=1)
90
+ assert is_valid is True
91
+ finally:
92
+ os.unlink(temp_path)
93
+
94
+ def test_is_cache_valid_nonexistent(self):
95
+ """Test cache validation for nonexistent file."""
96
+ is_valid = FileOperations.is_cache_valid("/nonexistent/cache.json", max_age_hours=1)
97
+ assert is_valid is False
98
+
99
+ def test_load_cache_or_fetch_uses_cache(self):
100
+ """Test that load_cache_or_fetch uses valid cache."""
101
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as f:
102
+ json.dump({"cached": True}, f)
103
+ temp_path = f.name
104
+
105
+ try:
106
+ fetch_called = False
107
+
108
+ def fetch_fn():
109
+ nonlocal fetch_called
110
+ fetch_called = True
111
+ return {"fetched": True}
112
+
113
+ result = FileOperations.load_cache_or_fetch(
114
+ file_path=temp_path, fetch_fn=fetch_fn, max_age_hours=1, save_cache=False
115
+ )
116
+
117
+ assert result == {"cached": True}
118
+ assert fetch_called is False # Should not fetch when cache is valid
119
+ finally:
120
+ os.unlink(temp_path)
121
+
122
+ def test_load_cache_or_fetch_fetches_when_no_cache(self):
123
+ """Test that load_cache_or_fetch fetches when cache doesn't exist."""
124
+ with tempfile.TemporaryDirectory() as tmpdir:
125
+ cache_path = os.path.join(tmpdir, "cache.json")
126
+
127
+ def fetch_fn():
128
+ return {"fetched": True}
129
+
130
+ result = FileOperations.load_cache_or_fetch(
131
+ file_path=cache_path, fetch_fn=fetch_fn, max_age_hours=1, save_cache=True
132
+ )
133
+
134
+ assert result == {"fetched": True}
135
+ assert os.path.exists(cache_path) # Should save cache
136
+
137
+ # Verify cache was saved correctly
138
+ cached_data = FileOperations.load_json_file(cache_path)
139
+ assert cached_data == {"fetched": True}
140
+
141
+
142
+ class TestFileOperationsSearch:
143
+ """Test file search operations."""
144
+
145
+ def test_search_single_json_file_found(self):
146
+ """Test searching for item in single JSON file - found."""
147
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as f:
148
+ test_data = [{"id": "1", "name": "Alice"}, {"id": "2", "name": "Bob"}, {"id": "3", "name": "Charlie"}]
149
+ json.dump(test_data, f)
150
+ temp_path = f.name
151
+
152
+ try:
153
+
154
+ def match_fn(item, identifier):
155
+ return item.get("id") == identifier
156
+
157
+ result = FileOperations.search_single_json_file("2", temp_path, match_fn)
158
+ assert result is not None
159
+ assert result["name"] == "Bob"
160
+ finally:
161
+ os.unlink(temp_path)
162
+
163
+ def test_search_single_json_file_not_found(self):
164
+ """Test searching for item in single JSON file - not found."""
165
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as f:
166
+ test_data = [{"id": "1", "name": "Alice"}]
167
+ json.dump(test_data, f)
168
+ temp_path = f.name
169
+
170
+ try:
171
+
172
+ def match_fn(item, identifier):
173
+ return item.get("id") == identifier
174
+
175
+ result = FileOperations.search_single_json_file("999", temp_path, match_fn)
176
+ assert result is None
177
+ finally:
178
+ os.unlink(temp_path)
179
+
180
+ def test_search_json_files_multiple_files(self):
181
+ """Test searching across multiple JSON files."""
182
+ with tempfile.TemporaryDirectory() as tmpdir:
183
+ # Create multiple files
184
+ file1 = os.path.join(tmpdir, "file1.json")
185
+ file2 = os.path.join(tmpdir, "file2.json")
186
+ file3 = os.path.join(tmpdir, "file3.json")
187
+
188
+ with open(file1, "w") as f:
189
+ json.dump([{"id": "1", "name": "File1Item"}], f)
190
+
191
+ with open(file2, "w") as f:
192
+ json.dump([{"id": "2", "name": "File2Item"}], f)
193
+
194
+ with open(file3, "w") as f:
195
+ json.dump([{"id": "3", "name": "File3Item"}], f)
196
+
197
+ def match_fn(item, identifier):
198
+ return item.get("id") == identifier
199
+
200
+ # Search across all files - should find in file2
201
+ result, source_file = FileOperations.search_json_files("2", [file1, file2, file3], match_fn)
202
+
203
+ assert result is not None
204
+ assert result["name"] == "File2Item"
205
+ assert source_file == file2
206
+
207
+ def test_search_json_files_not_found(self):
208
+ """Test searching across files when item doesn't exist."""
209
+ with tempfile.TemporaryDirectory() as tmpdir:
210
+ file1 = os.path.join(tmpdir, "file1.json")
211
+
212
+ with open(file1, "w") as f:
213
+ json.dump([{"id": "1"}], f)
214
+
215
+ def match_fn(item, identifier):
216
+ return item.get("id") == identifier
217
+
218
+ result, source_file = FileOperations.search_json_files("999", [file1], match_fn)
219
+
220
+ assert result is None
221
+ assert source_file is None
222
+
223
+
224
+ class TestFileOperationsEdgeCases:
225
+ """Test edge cases and error handling."""
226
+
227
+ def test_save_json_file_invalid_data(self):
228
+ """Test saving non-serializable data returns False."""
229
+ with tempfile.TemporaryDirectory() as tmpdir:
230
+ file_path = os.path.join(tmpdir, "test.json")
231
+
232
+ # Try to save non-serializable data
233
+ class NonSerializable:
234
+ pass
235
+
236
+ success = FileOperations.save_json_file({"obj": NonSerializable()}, file_path)
237
+ assert success is False
238
+
239
+ def test_load_json_file_invalid_json(self):
240
+ """Test loading invalid JSON returns None."""
241
+ with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
242
+ f.write("not valid json{[")
243
+ temp_path = f.name
244
+
245
+ try:
246
+ result = FileOperations.load_json_file(temp_path)
247
+ assert result is None
248
+ finally:
249
+ os.unlink(temp_path)
250
+
251
+ def test_search_json_files_with_nonexistent_files(self):
252
+ """Test search gracefully handles nonexistent files."""
253
+
254
+ def match_fn(item, identifier):
255
+ return item.get("id") == identifier
256
+
257
+ result, source = FileOperations.search_json_files("1", ["/nonexistent1.json", "/nonexistent2.json"], match_fn)
258
+
259
+ assert result is None
260
+ assert source is None
@@ -1,7 +1,7 @@
1
1
  import pytest
2
2
  from unittest.mock import patch
3
3
  from regscale.integrations.commercial.wizv2.issue import WizIssue
4
- from regscale.integrations.commercial.wizv2.constants import WizVulnerabilityType
4
+ from regscale.integrations.commercial.wizv2.core.constants import WizVulnerabilityType
5
5
  from regscale.models import IssueSeverity, IssueStatus
6
6
 
7
7