regscale-cli 6.17.0.0__py3-none-any.whl → 6.19.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/core/app/api.py +5 -0
- regscale/core/login.py +3 -0
- regscale/integrations/api_paginator.py +932 -0
- regscale/integrations/api_paginator_example.py +348 -0
- regscale/integrations/commercial/__init__.py +11 -10
- regscale/integrations/commercial/burp.py +4 -0
- regscale/integrations/commercial/{qualys.py → qualys/__init__.py} +756 -105
- regscale/integrations/commercial/qualys/scanner.py +1051 -0
- regscale/integrations/commercial/qualys/variables.py +21 -0
- regscale/integrations/commercial/sicura/api.py +1 -0
- regscale/integrations/commercial/stigv2/click_commands.py +36 -8
- regscale/integrations/commercial/stigv2/stig_integration.py +63 -9
- regscale/integrations/commercial/tenablev2/__init__.py +9 -0
- regscale/integrations/commercial/tenablev2/authenticate.py +23 -2
- regscale/integrations/commercial/tenablev2/commands.py +779 -0
- regscale/integrations/commercial/tenablev2/jsonl_scanner.py +1999 -0
- regscale/integrations/commercial/tenablev2/sc_scanner.py +600 -0
- regscale/integrations/commercial/tenablev2/scanner.py +7 -5
- regscale/integrations/commercial/tenablev2/utils.py +21 -4
- regscale/integrations/commercial/tenablev2/variables.py +4 -0
- regscale/integrations/jsonl_scanner_integration.py +523 -142
- regscale/integrations/scanner_integration.py +102 -26
- regscale/integrations/transformer/__init__.py +17 -0
- regscale/integrations/transformer/data_transformer.py +445 -0
- regscale/integrations/transformer/mappings/__init__.py +8 -0
- regscale/integrations/variables.py +2 -0
- regscale/models/__init__.py +5 -2
- regscale/models/integration_models/cisa_kev_data.json +63 -7
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/regscale_models/asset.py +5 -2
- regscale/models/regscale_models/file.py +5 -2
- regscale/regscale.py +3 -1
- {regscale_cli-6.17.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.17.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/RECORD +47 -31
- tests/regscale/core/test_version.py +22 -0
- tests/regscale/integrations/__init__.py +0 -0
- tests/regscale/integrations/test_api_paginator.py +597 -0
- tests/regscale/integrations/test_integration_mapping.py +60 -0
- tests/regscale/integrations/test_issue_creation.py +317 -0
- tests/regscale/integrations/test_issue_due_date.py +46 -0
- tests/regscale/integrations/transformer/__init__.py +0 -0
- tests/regscale/integrations/transformer/test_data_transformer.py +850 -0
- regscale/integrations/commercial/tenablev2/click.py +0 -1637
- {regscale_cli-6.17.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.17.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.17.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.17.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,597 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Tests for the ApiPaginator class.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
import tempfile
|
|
10
|
+
from typing import Dict, List, Any
|
|
11
|
+
from unittest.mock import patch
|
|
12
|
+
|
|
13
|
+
import pytest
|
|
14
|
+
import responses
|
|
15
|
+
|
|
16
|
+
from regscale.integrations.api_paginator import ApiPaginator, HTTPS_PREFIX
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class TestApiPaginator:
|
|
20
|
+
"""Test cases for ApiPaginator class."""
|
|
21
|
+
|
|
22
|
+
@pytest.fixture
|
|
23
|
+
def base_url(self) -> str:
|
|
24
|
+
"""Return a base URL for testing."""
|
|
25
|
+
return "https://api.example.com"
|
|
26
|
+
|
|
27
|
+
@pytest.fixture
|
|
28
|
+
def auth_headers(self) -> Dict[str, str]:
|
|
29
|
+
"""Return authentication headers for testing."""
|
|
30
|
+
return {"Authorization": "Bearer test-token"}
|
|
31
|
+
|
|
32
|
+
@pytest.fixture
|
|
33
|
+
def temp_output_file(self, tmp_path) -> str:
|
|
34
|
+
"""Create a temporary output file for testing."""
|
|
35
|
+
with tempfile.NamedTemporaryFile(delete=False, suffix=".jsonl") as tmp:
|
|
36
|
+
tmp_path = tmp.name
|
|
37
|
+
yield tmp_path
|
|
38
|
+
if os.path.exists(tmp_path):
|
|
39
|
+
os.unlink(tmp_path)
|
|
40
|
+
|
|
41
|
+
@pytest.fixture
|
|
42
|
+
def mock_response_data(self) -> List[Dict[str, Any]]:
|
|
43
|
+
"""Return mock API response data for testing."""
|
|
44
|
+
return [
|
|
45
|
+
{"id": 1, "name": "Item 1"},
|
|
46
|
+
{"id": 2, "name": "Item 2"},
|
|
47
|
+
{"id": 3, "name": "Item 3"},
|
|
48
|
+
]
|
|
49
|
+
|
|
50
|
+
@pytest.fixture
|
|
51
|
+
def paginator(self, base_url, auth_headers) -> ApiPaginator:
|
|
52
|
+
"""Return an ApiPaginator instance for testing."""
|
|
53
|
+
return ApiPaginator(
|
|
54
|
+
base_url=base_url,
|
|
55
|
+
auth_headers=auth_headers,
|
|
56
|
+
page_size=2,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
@pytest.fixture
|
|
60
|
+
def file_paginator(self, base_url, auth_headers, temp_output_file) -> ApiPaginator:
|
|
61
|
+
"""Return an ApiPaginator instance with file output for testing."""
|
|
62
|
+
return ApiPaginator(
|
|
63
|
+
base_url=base_url,
|
|
64
|
+
auth_headers=auth_headers,
|
|
65
|
+
page_size=2,
|
|
66
|
+
output_file=temp_output_file,
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
def test_init(self, base_url, auth_headers, temp_output_file):
|
|
70
|
+
"""Test initialization with default and custom parameters."""
|
|
71
|
+
# Test with minimal parameters
|
|
72
|
+
paginator = ApiPaginator(base_url=base_url, auth_headers={})
|
|
73
|
+
assert paginator.base_url == base_url
|
|
74
|
+
assert paginator.auth_headers == {}
|
|
75
|
+
assert paginator.page_size == 100
|
|
76
|
+
assert paginator.throttle_rate is None
|
|
77
|
+
assert paginator.timeout == 30
|
|
78
|
+
assert paginator.ssl_verify is True
|
|
79
|
+
assert paginator.output_file is None
|
|
80
|
+
|
|
81
|
+
# Test with custom parameters
|
|
82
|
+
paginator = ApiPaginator(
|
|
83
|
+
base_url=base_url,
|
|
84
|
+
auth_headers=auth_headers,
|
|
85
|
+
page_size=50,
|
|
86
|
+
throttle_rate=0.5,
|
|
87
|
+
timeout=10,
|
|
88
|
+
ssl_verify=False,
|
|
89
|
+
output_file=temp_output_file,
|
|
90
|
+
)
|
|
91
|
+
assert paginator.base_url == base_url
|
|
92
|
+
assert paginator.auth_headers == auth_headers
|
|
93
|
+
assert paginator.page_size == 50
|
|
94
|
+
assert paginator.throttle_rate == abs(0.5)
|
|
95
|
+
assert paginator.timeout == 10
|
|
96
|
+
assert paginator.ssl_verify is False
|
|
97
|
+
assert paginator.output_file == temp_output_file
|
|
98
|
+
|
|
99
|
+
def test_create_session(self, paginator):
|
|
100
|
+
"""Test creation of requests session."""
|
|
101
|
+
session = paginator.session
|
|
102
|
+
|
|
103
|
+
# Verify session has the correct headers
|
|
104
|
+
assert paginator.auth_headers.items() <= session.headers.items()
|
|
105
|
+
|
|
106
|
+
# Verify HTTPS adapter is mounted
|
|
107
|
+
assert HTTPS_PREFIX in session.adapters
|
|
108
|
+
|
|
109
|
+
# For ssl_verify=True, both HTTP and HTTPS adapters are present
|
|
110
|
+
# We no longer check for HTTP adapter absence
|
|
111
|
+
assert session.adapters.get(HTTPS_PREFIX) is not None
|
|
112
|
+
|
|
113
|
+
@responses.activate
|
|
114
|
+
def test_offset_pagination(self, paginator, base_url, mock_response_data):
|
|
115
|
+
"""Test offset-based pagination."""
|
|
116
|
+
# First page
|
|
117
|
+
responses.add(
|
|
118
|
+
responses.GET,
|
|
119
|
+
f"{base_url}/items",
|
|
120
|
+
json=mock_response_data[:2],
|
|
121
|
+
status=200,
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
# Second page
|
|
125
|
+
responses.add(
|
|
126
|
+
responses.GET,
|
|
127
|
+
f"{base_url}/items",
|
|
128
|
+
json=mock_response_data[2:],
|
|
129
|
+
status=200,
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
# Third page (empty, to end pagination)
|
|
133
|
+
responses.add(
|
|
134
|
+
responses.GET,
|
|
135
|
+
f"{base_url}/items",
|
|
136
|
+
json=[],
|
|
137
|
+
status=200,
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
results = list(
|
|
141
|
+
paginator.fetch_paginated_results(
|
|
142
|
+
endpoint="items",
|
|
143
|
+
pagination_type="offset",
|
|
144
|
+
)
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
assert len(results) == 3
|
|
148
|
+
assert results == mock_response_data
|
|
149
|
+
|
|
150
|
+
# Verify that offset was incremented correctly
|
|
151
|
+
request_params = [r.request.params for r in responses.calls]
|
|
152
|
+
assert request_params[0]["offset"] == "0"
|
|
153
|
+
assert request_params[0]["limit"] == "2"
|
|
154
|
+
assert request_params[1]["offset"] == "2"
|
|
155
|
+
assert request_params[1]["limit"] == "2"
|
|
156
|
+
# We don't need to check the third request, as it might not be made
|
|
157
|
+
# if the API returns an empty list for the second page
|
|
158
|
+
|
|
159
|
+
@responses.activate
|
|
160
|
+
def test_page_pagination(self, paginator, base_url, mock_response_data):
|
|
161
|
+
"""Test page-based pagination."""
|
|
162
|
+
# First page
|
|
163
|
+
responses.add(
|
|
164
|
+
responses.GET,
|
|
165
|
+
f"{base_url}/items",
|
|
166
|
+
json=mock_response_data[:2],
|
|
167
|
+
status=200,
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
# Second page
|
|
171
|
+
responses.add(
|
|
172
|
+
responses.GET,
|
|
173
|
+
f"{base_url}/items",
|
|
174
|
+
json=mock_response_data[2:],
|
|
175
|
+
status=200,
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
# Third page (empty, to end pagination)
|
|
179
|
+
responses.add(
|
|
180
|
+
responses.GET,
|
|
181
|
+
f"{base_url}/items",
|
|
182
|
+
json=[],
|
|
183
|
+
status=200,
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
results = list(
|
|
187
|
+
paginator.fetch_paginated_results(
|
|
188
|
+
endpoint="items",
|
|
189
|
+
pagination_type="page",
|
|
190
|
+
)
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
assert len(results) == 3
|
|
194
|
+
assert results == mock_response_data
|
|
195
|
+
|
|
196
|
+
# Verify that page was incremented correctly
|
|
197
|
+
request_params = [r.request.params for r in responses.calls]
|
|
198
|
+
assert request_params[0]["page"] == "1"
|
|
199
|
+
assert request_params[0]["per_page"] == "2"
|
|
200
|
+
assert request_params[1]["page"] == "2"
|
|
201
|
+
assert request_params[1]["per_page"] == "2"
|
|
202
|
+
# We don't need to check the third request as it might not be made
|
|
203
|
+
# if the API returns an empty list for the second page
|
|
204
|
+
|
|
205
|
+
@responses.activate
|
|
206
|
+
def test_token_pagination(self, paginator, base_url, mock_response_data):
|
|
207
|
+
"""Test token-based pagination."""
|
|
208
|
+
# First page with next token
|
|
209
|
+
responses.add(
|
|
210
|
+
responses.GET,
|
|
211
|
+
f"{base_url}/items",
|
|
212
|
+
json={"data": mock_response_data[:2], "nextToken": "token123"},
|
|
213
|
+
status=200,
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
# Second page with no next token (end of pagination)
|
|
217
|
+
responses.add(
|
|
218
|
+
responses.GET,
|
|
219
|
+
f"{base_url}/items",
|
|
220
|
+
json={"data": mock_response_data[2:], "nextToken": None},
|
|
221
|
+
status=200,
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
results = list(
|
|
225
|
+
paginator.fetch_paginated_results(
|
|
226
|
+
endpoint="items",
|
|
227
|
+
pagination_type="token",
|
|
228
|
+
data_path="data",
|
|
229
|
+
)
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
assert len(results) == 3
|
|
233
|
+
assert results == mock_response_data
|
|
234
|
+
|
|
235
|
+
# Verify token was passed in second request
|
|
236
|
+
request_params = [r.request.params for r in responses.calls]
|
|
237
|
+
assert "next_token" not in request_params[0]
|
|
238
|
+
assert request_params[1]["next_token"] == "token123"
|
|
239
|
+
|
|
240
|
+
@responses.activate
|
|
241
|
+
def test_cursor_pagination(self, paginator, base_url, mock_response_data):
|
|
242
|
+
"""Test cursor-based pagination."""
|
|
243
|
+
# First page with next cursor
|
|
244
|
+
responses.add(
|
|
245
|
+
responses.GET,
|
|
246
|
+
f"{base_url}/items",
|
|
247
|
+
json={
|
|
248
|
+
"data": mock_response_data[:2],
|
|
249
|
+
"paging": {"cursors": {"after": "cursor123"}},
|
|
250
|
+
},
|
|
251
|
+
status=200,
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
# Second page with no next cursor (end of pagination)
|
|
255
|
+
responses.add(
|
|
256
|
+
responses.GET,
|
|
257
|
+
f"{base_url}/items",
|
|
258
|
+
json={"data": mock_response_data[2:], "paging": {"cursors": {}}},
|
|
259
|
+
status=200,
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
results = list(
|
|
263
|
+
paginator.fetch_paginated_results(
|
|
264
|
+
endpoint="items",
|
|
265
|
+
pagination_type="cursor",
|
|
266
|
+
data_path="data",
|
|
267
|
+
)
|
|
268
|
+
)
|
|
269
|
+
|
|
270
|
+
assert len(results) == 3
|
|
271
|
+
assert results == mock_response_data
|
|
272
|
+
|
|
273
|
+
# Verify cursor was passed in second request
|
|
274
|
+
request_params = [r.request.params for r in responses.calls]
|
|
275
|
+
assert "cursor" not in request_params[0]
|
|
276
|
+
assert request_params[1]["cursor"] == "cursor123"
|
|
277
|
+
|
|
278
|
+
@responses.activate
|
|
279
|
+
def test_custom_pagination(self, paginator, base_url, mock_response_data):
|
|
280
|
+
"""Test custom pagination."""
|
|
281
|
+
# Setup a custom pagination scenario
|
|
282
|
+
# First page with custom next page info
|
|
283
|
+
responses.add(
|
|
284
|
+
responses.GET,
|
|
285
|
+
f"{base_url}/items",
|
|
286
|
+
json={
|
|
287
|
+
"items": mock_response_data[:2],
|
|
288
|
+
"metadata": {"has_more": True, "next_page": "/items?page=2"},
|
|
289
|
+
},
|
|
290
|
+
status=200,
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
# Second page with no more pages
|
|
294
|
+
responses.add(
|
|
295
|
+
responses.GET,
|
|
296
|
+
f"{base_url}/items",
|
|
297
|
+
json={
|
|
298
|
+
"items": mock_response_data[2:],
|
|
299
|
+
"metadata": {"has_more": False},
|
|
300
|
+
},
|
|
301
|
+
status=200,
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
# Define custom next page extractor
|
|
305
|
+
def next_page_extractor(result):
|
|
306
|
+
next_url = result.get("metadata", {}).get("next_page")
|
|
307
|
+
return next_url
|
|
308
|
+
|
|
309
|
+
results = list(
|
|
310
|
+
paginator.fetch_paginated_results(
|
|
311
|
+
endpoint="items",
|
|
312
|
+
pagination_type="custom",
|
|
313
|
+
data_path="items",
|
|
314
|
+
next_page_extractor=next_page_extractor,
|
|
315
|
+
)
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
assert len(results) == 3
|
|
319
|
+
assert results == mock_response_data
|
|
320
|
+
|
|
321
|
+
@responses.activate
|
|
322
|
+
def test_file_output(self, file_paginator, base_url, mock_response_data, temp_output_file):
|
|
323
|
+
"""Test writing results to a file."""
|
|
324
|
+
# Add mock responses
|
|
325
|
+
responses.add(
|
|
326
|
+
responses.GET,
|
|
327
|
+
f"{base_url}/items",
|
|
328
|
+
json=mock_response_data,
|
|
329
|
+
status=200,
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
# Second page (empty, to end pagination)
|
|
333
|
+
responses.add(
|
|
334
|
+
responses.GET,
|
|
335
|
+
f"{base_url}/items",
|
|
336
|
+
json=[],
|
|
337
|
+
status=200,
|
|
338
|
+
)
|
|
339
|
+
|
|
340
|
+
# Fetch results
|
|
341
|
+
results = list(
|
|
342
|
+
file_paginator.fetch_paginated_results(
|
|
343
|
+
endpoint="items",
|
|
344
|
+
pagination_type="offset",
|
|
345
|
+
)
|
|
346
|
+
)
|
|
347
|
+
|
|
348
|
+
assert len(results) == 3
|
|
349
|
+
assert results == mock_response_data
|
|
350
|
+
|
|
351
|
+
# Verify file content
|
|
352
|
+
with open(temp_output_file, "r") as f:
|
|
353
|
+
file_data = [json.loads(line) for line in f]
|
|
354
|
+
assert file_data == mock_response_data
|
|
355
|
+
|
|
356
|
+
@responses.activate
|
|
357
|
+
def test_max_pages(self, paginator, base_url, mock_response_data):
|
|
358
|
+
"""Test max_pages parameter."""
|
|
359
|
+
# First page
|
|
360
|
+
responses.add(
|
|
361
|
+
responses.GET,
|
|
362
|
+
f"{base_url}/items",
|
|
363
|
+
json=mock_response_data[:2],
|
|
364
|
+
status=200,
|
|
365
|
+
)
|
|
366
|
+
|
|
367
|
+
# Second page
|
|
368
|
+
responses.add(
|
|
369
|
+
responses.GET,
|
|
370
|
+
f"{base_url}/items",
|
|
371
|
+
json=mock_response_data[2:],
|
|
372
|
+
status=200,
|
|
373
|
+
)
|
|
374
|
+
|
|
375
|
+
# Set max_pages directly on the paginator
|
|
376
|
+
paginator.max_pages = 1
|
|
377
|
+
|
|
378
|
+
# We should only get the first page due to max_pages=1
|
|
379
|
+
results = list(
|
|
380
|
+
paginator.fetch_paginated_results(
|
|
381
|
+
endpoint="items",
|
|
382
|
+
pagination_type="offset",
|
|
383
|
+
)
|
|
384
|
+
)
|
|
385
|
+
|
|
386
|
+
assert len(results) == 2
|
|
387
|
+
assert results == mock_response_data[:2]
|
|
388
|
+
assert len(responses.calls) == 1
|
|
389
|
+
|
|
390
|
+
@responses.activate
|
|
391
|
+
def test_throttling(self, paginator, base_url, mock_response_data):
|
|
392
|
+
"""Test request throttling."""
|
|
393
|
+
# Set throttle_rate
|
|
394
|
+
paginator.throttle_rate = 0.01 # Small value for testing
|
|
395
|
+
|
|
396
|
+
# First page
|
|
397
|
+
responses.add(
|
|
398
|
+
responses.GET,
|
|
399
|
+
f"{base_url}/items",
|
|
400
|
+
json=mock_response_data[:2],
|
|
401
|
+
status=200,
|
|
402
|
+
)
|
|
403
|
+
|
|
404
|
+
# Second page
|
|
405
|
+
responses.add(
|
|
406
|
+
responses.GET,
|
|
407
|
+
f"{base_url}/items",
|
|
408
|
+
json=mock_response_data[2:],
|
|
409
|
+
status=200,
|
|
410
|
+
)
|
|
411
|
+
|
|
412
|
+
# Third page (empty, to end pagination)
|
|
413
|
+
responses.add(
|
|
414
|
+
responses.GET,
|
|
415
|
+
f"{base_url}/items",
|
|
416
|
+
json=[],
|
|
417
|
+
status=200,
|
|
418
|
+
)
|
|
419
|
+
|
|
420
|
+
with patch("time.sleep") as mock_sleep:
|
|
421
|
+
_ = list(
|
|
422
|
+
paginator.fetch_paginated_results(
|
|
423
|
+
endpoint="items",
|
|
424
|
+
pagination_type="offset",
|
|
425
|
+
)
|
|
426
|
+
)
|
|
427
|
+
|
|
428
|
+
# Throttling only occurs after the first request
|
|
429
|
+
# It's only called once since we have two page requests
|
|
430
|
+
assert mock_sleep.call_count == 1
|
|
431
|
+
|
|
432
|
+
@responses.activate
|
|
433
|
+
def test_error_handling(self, paginator, base_url):
|
|
434
|
+
"""Test error handling in requests."""
|
|
435
|
+
# Response with error status
|
|
436
|
+
responses.add(
|
|
437
|
+
responses.GET,
|
|
438
|
+
f"{base_url}/items",
|
|
439
|
+
status=500,
|
|
440
|
+
json={"error": "Server error"},
|
|
441
|
+
)
|
|
442
|
+
|
|
443
|
+
# Log error should be called
|
|
444
|
+
with patch("logging.Logger.error") as mock_error:
|
|
445
|
+
with patch("logging.Logger.debug") as mock_debug: # Mock debug too
|
|
446
|
+
# The paginator will handle the error internally and return None
|
|
447
|
+
# We don't need to catch RetryError as it's already handled in _make_request
|
|
448
|
+
results = list(
|
|
449
|
+
paginator.fetch_paginated_results(
|
|
450
|
+
endpoint="items",
|
|
451
|
+
pagination_type="offset",
|
|
452
|
+
)
|
|
453
|
+
)
|
|
454
|
+
|
|
455
|
+
# Verify the logs were called
|
|
456
|
+
assert mock_error.called
|
|
457
|
+
assert mock_debug.called
|
|
458
|
+
# Verify that we attempted to make the call
|
|
459
|
+
assert len(responses.calls) > 0
|
|
460
|
+
# Result should be empty since error occurred
|
|
461
|
+
assert len(results) == 0
|
|
462
|
+
|
|
463
|
+
@responses.activate
|
|
464
|
+
def test_data_path_navigation(self, paginator, base_url, mock_response_data):
|
|
465
|
+
"""Test navigating to data using a data path."""
|
|
466
|
+
# API returns data in a nested structure
|
|
467
|
+
responses.add(
|
|
468
|
+
responses.GET,
|
|
469
|
+
f"{base_url}/items",
|
|
470
|
+
json={"response": {"items": mock_response_data}},
|
|
471
|
+
status=200,
|
|
472
|
+
)
|
|
473
|
+
|
|
474
|
+
# Second call with empty data to end pagination
|
|
475
|
+
responses.add(
|
|
476
|
+
responses.GET,
|
|
477
|
+
f"{base_url}/items",
|
|
478
|
+
json={"response": {"items": []}},
|
|
479
|
+
status=200,
|
|
480
|
+
)
|
|
481
|
+
|
|
482
|
+
results = list(
|
|
483
|
+
paginator.fetch_paginated_results(
|
|
484
|
+
endpoint="items",
|
|
485
|
+
pagination_type="offset",
|
|
486
|
+
data_path="response.items",
|
|
487
|
+
)
|
|
488
|
+
)
|
|
489
|
+
|
|
490
|
+
assert len(results) == 3
|
|
491
|
+
assert results == mock_response_data
|
|
492
|
+
|
|
493
|
+
@responses.activate
|
|
494
|
+
def test_fetch_all_concurrent(self, paginator, base_url, mock_response_data):
|
|
495
|
+
"""Test fetching multiple endpoints concurrently."""
|
|
496
|
+
# Mock responses for endpoint1
|
|
497
|
+
responses.add(
|
|
498
|
+
responses.GET,
|
|
499
|
+
f"{base_url}/endpoint1",
|
|
500
|
+
json=mock_response_data[:2],
|
|
501
|
+
status=200,
|
|
502
|
+
)
|
|
503
|
+
responses.add(
|
|
504
|
+
responses.GET,
|
|
505
|
+
f"{base_url}/endpoint1",
|
|
506
|
+
json=[],
|
|
507
|
+
status=200,
|
|
508
|
+
)
|
|
509
|
+
|
|
510
|
+
# Mock responses for endpoint2
|
|
511
|
+
responses.add(
|
|
512
|
+
responses.GET,
|
|
513
|
+
f"{base_url}/endpoint2",
|
|
514
|
+
json=mock_response_data[2:],
|
|
515
|
+
status=200,
|
|
516
|
+
)
|
|
517
|
+
responses.add(
|
|
518
|
+
responses.GET,
|
|
519
|
+
f"{base_url}/endpoint2",
|
|
520
|
+
json=[],
|
|
521
|
+
status=200,
|
|
522
|
+
)
|
|
523
|
+
|
|
524
|
+
results = list(
|
|
525
|
+
paginator.fetch_all_concurrent(
|
|
526
|
+
endpoints=["endpoint1", "endpoint2"],
|
|
527
|
+
)
|
|
528
|
+
)
|
|
529
|
+
|
|
530
|
+
assert len(results) == 3
|
|
531
|
+
assert sorted(results, key=lambda x: x["id"]) == mock_response_data
|
|
532
|
+
|
|
533
|
+
def test_read_jsonl_file(self, file_paginator, temp_output_file, mock_response_data):
|
|
534
|
+
"""Test reading from a JSONL file."""
|
|
535
|
+
# Write test data to the file
|
|
536
|
+
with open(temp_output_file, "w") as f:
|
|
537
|
+
for item in mock_response_data:
|
|
538
|
+
f.write(json.dumps(item) + "\n")
|
|
539
|
+
|
|
540
|
+
# Use the static method directly
|
|
541
|
+
results = list(ApiPaginator.read_jsonl_file(temp_output_file))
|
|
542
|
+
assert results == mock_response_data
|
|
543
|
+
|
|
544
|
+
def test_clear_output_file(self, file_paginator, temp_output_file):
|
|
545
|
+
"""Test clearing the output file."""
|
|
546
|
+
# Write something to the file
|
|
547
|
+
with open(temp_output_file, "w") as f:
|
|
548
|
+
f.write("test data\n")
|
|
549
|
+
|
|
550
|
+
# File should have content
|
|
551
|
+
assert os.path.getsize(temp_output_file) > 0
|
|
552
|
+
|
|
553
|
+
# Clear the file
|
|
554
|
+
file_paginator.clear_output_file()
|
|
555
|
+
|
|
556
|
+
# File should be deleted
|
|
557
|
+
assert not os.path.exists(temp_output_file)
|
|
558
|
+
|
|
559
|
+
def test_extract_value_from_paths(self, paginator):
|
|
560
|
+
"""Test extracting values from a nested structure."""
|
|
561
|
+
# Test nested structure
|
|
562
|
+
data = {
|
|
563
|
+
"user": {
|
|
564
|
+
"profile": {
|
|
565
|
+
"name": "Test User",
|
|
566
|
+
"email": "test@example.com",
|
|
567
|
+
},
|
|
568
|
+
"settings": {
|
|
569
|
+
"notifications": True,
|
|
570
|
+
},
|
|
571
|
+
},
|
|
572
|
+
"metadata": {
|
|
573
|
+
"created_at": "2023-01-01",
|
|
574
|
+
},
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
# Create path lists for testing
|
|
578
|
+
name_paths = [["user", "profile", "name"]]
|
|
579
|
+
notifications_paths = [["user", "settings", "notifications"]]
|
|
580
|
+
date_paths = [["metadata", "created_at"]]
|
|
581
|
+
missing_paths = [["user", "age"]]
|
|
582
|
+
multiple_paths = [["missing"], ["user", "profile", "name"]]
|
|
583
|
+
|
|
584
|
+
# Test valid paths
|
|
585
|
+
assert paginator._extract_value_from_paths(data, name_paths) == "Test User"
|
|
586
|
+
assert paginator._extract_value_from_paths(data, notifications_paths) == "True"
|
|
587
|
+
assert paginator._extract_value_from_paths(data, date_paths) == "2023-01-01"
|
|
588
|
+
|
|
589
|
+
# Test non-existent paths
|
|
590
|
+
assert paginator._extract_value_from_paths(data, missing_paths) is None
|
|
591
|
+
|
|
592
|
+
# Test multiple paths (should find the first valid one)
|
|
593
|
+
assert paginator._extract_value_from_paths(data, multiple_paths) == "Test User"
|
|
594
|
+
|
|
595
|
+
|
|
596
|
+
if __name__ == "__main__":
|
|
597
|
+
pytest.main()
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from unittest.mock import MagicMock
|
|
3
|
+
|
|
4
|
+
import pytest
|
|
5
|
+
|
|
6
|
+
from regscale.core.app.application import Application
|
|
7
|
+
from regscale.integrations.integration_override import IntegrationOverride
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@pytest.fixture()
|
|
11
|
+
def config():
|
|
12
|
+
return {
|
|
13
|
+
"findingFromMapping": {
|
|
14
|
+
"tenable_sc": {
|
|
15
|
+
"severity": "risk_level",
|
|
16
|
+
"description": "details",
|
|
17
|
+
"remediation": "rando",
|
|
18
|
+
"title": "default",
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def test_integration_mapping_load(config):
|
|
25
|
+
# Mock Application object and its config
|
|
26
|
+
app = Application()
|
|
27
|
+
# Override the config for testing purposes
|
|
28
|
+
app.config = config
|
|
29
|
+
assert "findingFromMapping" in app.config
|
|
30
|
+
# Initialize IntegrationMapping with the mocked app
|
|
31
|
+
integration_mapping = IntegrationOverride(app)
|
|
32
|
+
# Test default
|
|
33
|
+
assert integration_mapping.mapping_exists("tenable_sc", "title") is False
|
|
34
|
+
|
|
35
|
+
# Test loading existing mapping
|
|
36
|
+
assert integration_mapping.load("tenable_sc", "severity") == "risk_level"
|
|
37
|
+
assert integration_mapping.load("tenable_sc", "description") == "details"
|
|
38
|
+
assert integration_mapping.load("tenable_sc", "remediation") == "rando"
|
|
39
|
+
|
|
40
|
+
# Test loading non-existing mapping
|
|
41
|
+
assert integration_mapping.load("tenable_sc", "title") is None # default is None
|
|
42
|
+
assert integration_mapping.load("tenable_sc", "non_existing_field") is None
|
|
43
|
+
assert integration_mapping.load("non_existing_integration", "severity") is None
|
|
44
|
+
# Uber fail
|
|
45
|
+
assert integration_mapping.load(None, None) is None
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def test_no_config():
|
|
49
|
+
mock_app = MagicMock()
|
|
50
|
+
mock_app.config = {}
|
|
51
|
+
integration_mapping = IntegrationOverride(mock_app)
|
|
52
|
+
assert integration_mapping.load("tenable_sc", "remediation") is None
|
|
53
|
+
assert integration_mapping.load(None, None) is None
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def test_singleton():
|
|
57
|
+
app = Application()
|
|
58
|
+
instance1 = IntegrationOverride(app)
|
|
59
|
+
instance2 = IntegrationOverride(app)
|
|
60
|
+
assert instance1 is instance2
|