regscale-cli 6.17.0.0__py3-none-any.whl → 6.19.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/core/app/api.py +5 -0
- regscale/core/login.py +3 -0
- regscale/integrations/api_paginator.py +932 -0
- regscale/integrations/api_paginator_example.py +348 -0
- regscale/integrations/commercial/__init__.py +11 -10
- regscale/integrations/commercial/burp.py +4 -0
- regscale/integrations/commercial/{qualys.py → qualys/__init__.py} +756 -105
- regscale/integrations/commercial/qualys/scanner.py +1051 -0
- regscale/integrations/commercial/qualys/variables.py +21 -0
- regscale/integrations/commercial/sicura/api.py +1 -0
- regscale/integrations/commercial/stigv2/click_commands.py +36 -8
- regscale/integrations/commercial/stigv2/stig_integration.py +63 -9
- regscale/integrations/commercial/tenablev2/__init__.py +9 -0
- regscale/integrations/commercial/tenablev2/authenticate.py +23 -2
- regscale/integrations/commercial/tenablev2/commands.py +779 -0
- regscale/integrations/commercial/tenablev2/jsonl_scanner.py +1999 -0
- regscale/integrations/commercial/tenablev2/sc_scanner.py +600 -0
- regscale/integrations/commercial/tenablev2/scanner.py +7 -5
- regscale/integrations/commercial/tenablev2/utils.py +21 -4
- regscale/integrations/commercial/tenablev2/variables.py +4 -0
- regscale/integrations/jsonl_scanner_integration.py +523 -142
- regscale/integrations/scanner_integration.py +102 -26
- regscale/integrations/transformer/__init__.py +17 -0
- regscale/integrations/transformer/data_transformer.py +445 -0
- regscale/integrations/transformer/mappings/__init__.py +8 -0
- regscale/integrations/variables.py +2 -0
- regscale/models/__init__.py +5 -2
- regscale/models/integration_models/cisa_kev_data.json +63 -7
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/regscale_models/asset.py +5 -2
- regscale/models/regscale_models/file.py +5 -2
- regscale/regscale.py +3 -1
- {regscale_cli-6.17.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.17.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/RECORD +47 -31
- tests/regscale/core/test_version.py +22 -0
- tests/regscale/integrations/__init__.py +0 -0
- tests/regscale/integrations/test_api_paginator.py +597 -0
- tests/regscale/integrations/test_integration_mapping.py +60 -0
- tests/regscale/integrations/test_issue_creation.py +317 -0
- tests/regscale/integrations/test_issue_due_date.py +46 -0
- tests/regscale/integrations/transformer/__init__.py +0 -0
- tests/regscale/integrations/transformer/test_data_transformer.py +850 -0
- regscale/integrations/commercial/tenablev2/click.py +0 -1637
- {regscale_cli-6.17.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.17.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.17.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.17.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/top_level.txt +0 -0
|
@@ -1,13 +1,15 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
"""
|
|
1
|
+
"""
|
|
2
|
+
Qualys integration module for RegScale CLI.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
4
6
|
import os
|
|
5
7
|
import pprint
|
|
6
8
|
import traceback
|
|
7
9
|
from asyncio import sleep
|
|
8
|
-
from datetime import datetime,
|
|
10
|
+
from datetime import datetime, timezone, timedelta
|
|
9
11
|
from json import JSONDecodeError
|
|
10
|
-
from typing import Any,
|
|
12
|
+
from typing import Optional, Any, Union, Tuple
|
|
11
13
|
from urllib.parse import urljoin
|
|
12
14
|
|
|
13
15
|
import click
|
|
@@ -15,53 +17,678 @@ import requests
|
|
|
15
17
|
import xmltodict
|
|
16
18
|
from pathlib import Path
|
|
17
19
|
from requests import Session
|
|
18
|
-
from rich.progress import TaskID
|
|
20
|
+
from rich.progress import TaskID, Progress, TextColumn, BarColumn, SpinnerColumn, TimeElapsedColumn
|
|
19
21
|
|
|
20
|
-
from regscale.core.app.logz import create_logger
|
|
21
22
|
from regscale.core.app.utils.app_utils import (
|
|
22
23
|
check_file_path,
|
|
23
24
|
check_license,
|
|
24
|
-
create_progress_object,
|
|
25
|
-
error_and_exit,
|
|
26
25
|
get_current_datetime,
|
|
27
26
|
save_data_to,
|
|
27
|
+
error_and_exit,
|
|
28
|
+
create_progress_object,
|
|
28
29
|
)
|
|
29
30
|
from regscale.core.app.utils.file_utils import download_from_s3
|
|
30
|
-
from regscale.
|
|
31
|
-
from regscale.
|
|
32
|
-
from regscale.
|
|
31
|
+
from regscale.integrations.commercial.qualys.scanner import QualysTotalCloudJSONLIntegration
|
|
32
|
+
from regscale.integrations.commercial.qualys.variables import QualysVariables
|
|
33
|
+
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
|
|
34
|
+
from regscale.integrations.variables import ScannerVariables
|
|
35
|
+
from regscale.models import Asset, Search, regscale_models, Issue
|
|
36
|
+
from regscale.models.app_models.click import regscale_ssp_id, save_output_to, NotRequiredIf
|
|
33
37
|
from regscale.models.integration_models.flat_file_importer import FlatFileImporter
|
|
34
38
|
from regscale.models.integration_models.qualys import (
|
|
35
|
-
Qualys,
|
|
36
39
|
QualysContainerScansImporter,
|
|
37
40
|
QualysWasScansImporter,
|
|
38
41
|
QualysPolicyScansImporter,
|
|
42
|
+
Qualys,
|
|
39
43
|
)
|
|
40
|
-
from regscale.models.integration_models.qualys_scanner import QualysTotalCloudIntegration
|
|
41
|
-
|
|
42
|
-
####################################################################################################
|
|
43
|
-
#
|
|
44
|
-
# Qualys API Documentation:
|
|
45
|
-
# https://qualysguard.qg2.apps.qualys.com/qwebhelp/fo_portal/api_doc/index.htm
|
|
46
|
-
#
|
|
47
|
-
####################################################################################################
|
|
48
|
-
|
|
49
44
|
|
|
50
|
-
#
|
|
51
|
-
logger =
|
|
52
|
-
|
|
53
|
-
# create progress object to add tasks to for real time updates
|
|
45
|
+
# Create logger for this module
|
|
46
|
+
logger = logging.getLogger("regscale")
|
|
54
47
|
job_progress = create_progress_object()
|
|
55
48
|
HEADERS = {"X-Requested-With": "RegScale CLI"}
|
|
49
|
+
|
|
50
|
+
# Import the Qualys API session object and headers from the main qualys module
|
|
56
51
|
QUALYS_API = Session()
|
|
57
52
|
|
|
58
53
|
|
|
59
|
-
# Create group to handle Qualys commands
|
|
60
54
|
@click.group()
|
|
61
55
|
def qualys():
|
|
62
56
|
"""Performs actions from the Qualys API"""
|
|
63
57
|
|
|
64
58
|
|
|
59
|
+
def _prepare_qualys_params(include_tags: str, exclude_tags: str) -> dict:
|
|
60
|
+
"""
|
|
61
|
+
Prepare parameters for Qualys API request.
|
|
62
|
+
|
|
63
|
+
:param str include_tags: Tags to include in the filter
|
|
64
|
+
:param str exclude_tags: Tags to exclude in the filter
|
|
65
|
+
:return: Dictionary of parameters for the API request
|
|
66
|
+
:rtype: dict
|
|
67
|
+
"""
|
|
68
|
+
params = {
|
|
69
|
+
"action": "list",
|
|
70
|
+
"show_asset_id": "1",
|
|
71
|
+
"show_tags": "1",
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if exclude_tags or include_tags:
|
|
75
|
+
params["use_tags"] = "1"
|
|
76
|
+
if exclude_tags:
|
|
77
|
+
params["tag_set_exclude"] = exclude_tags
|
|
78
|
+
if include_tags:
|
|
79
|
+
params["tag_set_include"] = include_tags
|
|
80
|
+
|
|
81
|
+
return params
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _setup_progress_tracking(integration, progress):
|
|
85
|
+
"""
|
|
86
|
+
Set up progress tracking for assets and findings.
|
|
87
|
+
|
|
88
|
+
:param integration: Scanner integration instance
|
|
89
|
+
:param progress: Progress bar instance
|
|
90
|
+
:return: Tuple of task IDs and counts for assets and findings
|
|
91
|
+
:rtype: tuple
|
|
92
|
+
"""
|
|
93
|
+
# Count assets and findings for progress tracking
|
|
94
|
+
asset_count = sum(1 for _ in open(integration.ASSETS_FILE, "r") if _.strip())
|
|
95
|
+
finding_count = sum(1 for _ in open(integration.FINDINGS_FILE, "r") if _.strip())
|
|
96
|
+
|
|
97
|
+
logger.info(f"Found {asset_count} assets and {finding_count} findings in JSONL files")
|
|
98
|
+
|
|
99
|
+
# Create tasks
|
|
100
|
+
asset_task = progress.add_task(f"[green]Importing {asset_count} assets to RegScale...", total=asset_count)
|
|
101
|
+
finding_task = progress.add_task(f"[yellow]Importing {finding_count} findings to RegScale...", visible=False)
|
|
102
|
+
|
|
103
|
+
return asset_task, finding_task, asset_count, finding_count
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def _import_assets(integration, assets_iterator, progress, asset_task):
|
|
107
|
+
"""
|
|
108
|
+
Import assets to RegScale with progress tracking.
|
|
109
|
+
|
|
110
|
+
:param integration: Scanner integration instance
|
|
111
|
+
:param assets_iterator: Iterator of assets
|
|
112
|
+
:param progress: Progress bar instance to track progress
|
|
113
|
+
:param asset_task: Task ID for asset progress
|
|
114
|
+
:return: Number of assets imported
|
|
115
|
+
:rtype: int
|
|
116
|
+
"""
|
|
117
|
+
if not _verify_assets_file_exists(integration):
|
|
118
|
+
return 0
|
|
119
|
+
|
|
120
|
+
try:
|
|
121
|
+
wrapped_iterator = _create_asset_progress_tracker(assets_iterator, progress, asset_task)
|
|
122
|
+
assets_imported = integration.update_regscale_assets(wrapped_iterator)
|
|
123
|
+
logger.info(f"Imported {assets_imported} assets to RegScale")
|
|
124
|
+
return assets_imported
|
|
125
|
+
except Exception as e:
|
|
126
|
+
logger.error(f"Error importing assets to RegScale: {str(e)}")
|
|
127
|
+
logger.error(traceback.format_exc())
|
|
128
|
+
return 0
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def _verify_assets_file_exists(integration):
|
|
132
|
+
"""
|
|
133
|
+
Verify that the assets file exists and is not empty.
|
|
134
|
+
|
|
135
|
+
:param integration: Scanner integration instance
|
|
136
|
+
:return: True if the file exists and is not empty, False otherwise
|
|
137
|
+
:rtype: bool
|
|
138
|
+
"""
|
|
139
|
+
if not os.path.exists(integration.ASSETS_FILE) or os.path.getsize(integration.ASSETS_FILE) == 0:
|
|
140
|
+
logger.warning(f"Assets file {integration.ASSETS_FILE} is empty or does not exist")
|
|
141
|
+
return False
|
|
142
|
+
return True
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def _create_asset_progress_tracker(assets_iter, progress, asset_task):
|
|
146
|
+
"""
|
|
147
|
+
Create a generator that tracks progress of asset processing.
|
|
148
|
+
|
|
149
|
+
:param assets_iter: Iterator of assets
|
|
150
|
+
:param progress: Progress bar instance
|
|
151
|
+
:param asset_task: Task ID for asset progress
|
|
152
|
+
:return: Generator that yields assets and updates progress
|
|
153
|
+
"""
|
|
154
|
+
count = 0
|
|
155
|
+
asset_ids = []
|
|
156
|
+
|
|
157
|
+
try:
|
|
158
|
+
for asset in assets_iter:
|
|
159
|
+
count += 1
|
|
160
|
+
if asset and hasattr(asset, "identifier"):
|
|
161
|
+
asset_ids.append(asset.identifier)
|
|
162
|
+
progress.update(asset_task, advance=1)
|
|
163
|
+
yield asset
|
|
164
|
+
|
|
165
|
+
_log_asset_results(count, asset_ids)
|
|
166
|
+
except Exception as e:
|
|
167
|
+
logger.error(f"Error while yielding assets: {str(e)}")
|
|
168
|
+
logger.error(traceback.format_exc())
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def _log_asset_results(count, asset_ids):
|
|
172
|
+
"""
|
|
173
|
+
Log the results of asset processing.
|
|
174
|
+
|
|
175
|
+
:param count: Number of assets processed
|
|
176
|
+
:param asset_ids: List of asset IDs
|
|
177
|
+
"""
|
|
178
|
+
if count == 0:
|
|
179
|
+
logger.warning("No assets were yielded from the JSONL file")
|
|
180
|
+
else:
|
|
181
|
+
sample_ids = asset_ids[:5]
|
|
182
|
+
truncation_indicator = ", ..." if len(asset_ids) > 5 else ""
|
|
183
|
+
sample_ids_str = ", ".join(sample_ids)
|
|
184
|
+
logger.debug(f"Yielded {count} assets to RegScale with IDs: {sample_ids_str}{truncation_indicator}")
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def _import_findings(integration, progress, finding_task):
|
|
188
|
+
"""
|
|
189
|
+
Import findings to RegScale with progress tracking.
|
|
190
|
+
|
|
191
|
+
:param integration: Scanner integration instance
|
|
192
|
+
:param progress: Progress bar instance
|
|
193
|
+
:param finding_task: Task ID for finding progress
|
|
194
|
+
:return: Number of findings imported
|
|
195
|
+
:rtype: int
|
|
196
|
+
"""
|
|
197
|
+
progress.update(finding_task, visible=True)
|
|
198
|
+
total_findings = _count_findings_in_file(integration)
|
|
199
|
+
|
|
200
|
+
if total_findings > 0:
|
|
201
|
+
progress.update(finding_task, total=total_findings)
|
|
202
|
+
|
|
203
|
+
findings_yielded = 0
|
|
204
|
+
try:
|
|
205
|
+
# Create findings iterator directly from the file
|
|
206
|
+
tracked_iterator = _create_finding_progress_tracker(
|
|
207
|
+
integration._yield_items_from_jsonl(integration.FINDINGS_FILE, IntegrationFinding), progress, finding_task
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
findings_imported = integration.update_regscale_findings(tracked_iterator)
|
|
211
|
+
logger.info(f"Successfully imported {findings_imported} findings to RegScale")
|
|
212
|
+
|
|
213
|
+
# Update final count for nonlocal reference from the tracked_iterator
|
|
214
|
+
findings_yielded = getattr(tracked_iterator, "count", 0)
|
|
215
|
+
|
|
216
|
+
# Ensure progress is complete
|
|
217
|
+
if findings_yielded > 0:
|
|
218
|
+
progress.update(finding_task, completed=findings_yielded)
|
|
219
|
+
|
|
220
|
+
return findings_imported
|
|
221
|
+
except Exception as e:
|
|
222
|
+
logger.error(f"Error during RegScale findings import: {str(e)}")
|
|
223
|
+
return 0
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def _count_findings_in_file(integration):
|
|
227
|
+
"""
|
|
228
|
+
Count the findings in the JSONL file.
|
|
229
|
+
|
|
230
|
+
:param integration: Scanner integration instance
|
|
231
|
+
:return: Number of findings in the file
|
|
232
|
+
:rtype: int
|
|
233
|
+
"""
|
|
234
|
+
try:
|
|
235
|
+
if os.path.exists(integration.FINDINGS_FILE):
|
|
236
|
+
with open(integration.FINDINGS_FILE, "r") as f:
|
|
237
|
+
total_findings = sum(1 for line in f if line.strip())
|
|
238
|
+
logger.info(f"Found {total_findings} findings in JSONL file")
|
|
239
|
+
return total_findings
|
|
240
|
+
else:
|
|
241
|
+
logger.warning(f"Findings file {integration.FINDINGS_FILE} does not exist")
|
|
242
|
+
return 0
|
|
243
|
+
except Exception as e:
|
|
244
|
+
logger.error(f"Error counting findings in JSONL file: {str(e)}")
|
|
245
|
+
return 0
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
def _create_finding_progress_tracker(findings_iter, progress, finding_task):
|
|
249
|
+
"""
|
|
250
|
+
Create a generator function that tracks progress of finding processing.
|
|
251
|
+
|
|
252
|
+
:param findings_iter: Iterator of findings
|
|
253
|
+
:param progress: Progress bar instance
|
|
254
|
+
:param finding_task: Task ID for finding progress
|
|
255
|
+
:return: Generator that yields findings and updates progress
|
|
256
|
+
"""
|
|
257
|
+
tracker = FindingProgressTracker(findings_iter, progress, finding_task)
|
|
258
|
+
return tracker
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
class FindingProgressTracker:
|
|
262
|
+
"""Class to track progress of finding processing with proper object reference."""
|
|
263
|
+
|
|
264
|
+
def __init__(self, findings_iter, progress, finding_task):
|
|
265
|
+
self.findings_iter = findings_iter
|
|
266
|
+
self.progress = progress
|
|
267
|
+
self.finding_task = finding_task
|
|
268
|
+
self.count = 0
|
|
269
|
+
self.finding_ids = []
|
|
270
|
+
|
|
271
|
+
def __iter__(self):
|
|
272
|
+
return self
|
|
273
|
+
|
|
274
|
+
def __next__(self):
|
|
275
|
+
try:
|
|
276
|
+
finding = next(self.findings_iter)
|
|
277
|
+
self.count += 1
|
|
278
|
+
if finding and hasattr(finding, "external_id"):
|
|
279
|
+
self.finding_ids.append(finding.external_id)
|
|
280
|
+
self.progress.update(self.finding_task, advance=1)
|
|
281
|
+
return finding
|
|
282
|
+
except StopIteration:
|
|
283
|
+
self._log_finding_results()
|
|
284
|
+
raise
|
|
285
|
+
except Exception as e:
|
|
286
|
+
logger.error(f"Error yielding findings: {str(e)}")
|
|
287
|
+
logger.debug(f"Findings yielded before error: {self.count}")
|
|
288
|
+
raise
|
|
289
|
+
|
|
290
|
+
def _log_finding_results(self):
|
|
291
|
+
"""Log the results of finding processing."""
|
|
292
|
+
if self.count == 0:
|
|
293
|
+
logger.warning("No findings were yielded from the JSONL file")
|
|
294
|
+
else:
|
|
295
|
+
logger.info(f"Yielded {self.count} findings to RegScale")
|
|
296
|
+
sample_ids = self.finding_ids[:5]
|
|
297
|
+
truncation_indicator = ", ..." if len(self.finding_ids) > 5 else ""
|
|
298
|
+
sample_ids_str = ", ".join(sample_ids)
|
|
299
|
+
logger.debug(f"Sample finding IDs: {sample_ids_str}{truncation_indicator}")
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
@click.command(name="import_total_cloud")
|
|
303
|
+
@regscale_ssp_id()
|
|
304
|
+
@click.option(
|
|
305
|
+
"--include_tags",
|
|
306
|
+
"-t",
|
|
307
|
+
type=click.STRING,
|
|
308
|
+
required=False,
|
|
309
|
+
default=None,
|
|
310
|
+
help="Include tags in the import comma separated string of tag names or ids, defaults to None.",
|
|
311
|
+
)
|
|
312
|
+
@click.option(
|
|
313
|
+
"--exclude_tags",
|
|
314
|
+
"-e",
|
|
315
|
+
type=click.STRING,
|
|
316
|
+
required=False,
|
|
317
|
+
default=None,
|
|
318
|
+
help="Exclude tags in the import comma separated string of tag names or ids, defaults to None.",
|
|
319
|
+
)
|
|
320
|
+
@click.option(
|
|
321
|
+
"--vulnerability-creation",
|
|
322
|
+
"-v",
|
|
323
|
+
type=click.Choice(["NoIssue", "IssueCreation", "PoamCreation"], case_sensitive=False),
|
|
324
|
+
required=False,
|
|
325
|
+
default=None,
|
|
326
|
+
help="Specify how vulnerabilities are processed: NoIssue, IssueCreation, or PoamCreation.",
|
|
327
|
+
)
|
|
328
|
+
@click.option(
|
|
329
|
+
"--ssl-verify/--no-ssl-verify",
|
|
330
|
+
default=None,
|
|
331
|
+
required=False,
|
|
332
|
+
help="Enable/disable SSL certificate verification for API calls.",
|
|
333
|
+
)
|
|
334
|
+
def import_total_cloud(
|
|
335
|
+
regscale_ssp_id: int, include_tags: str, exclude_tags: str, vulnerability_creation: str, ssl_verify: bool
|
|
336
|
+
):
|
|
337
|
+
"""
|
|
338
|
+
Import Qualys Total Cloud Assets and Vulnerabilities using JSONL scanner implementation.
|
|
339
|
+
|
|
340
|
+
This command uses the JSONLScannerIntegration class for improved efficiency and memory management.
|
|
341
|
+
"""
|
|
342
|
+
try:
|
|
343
|
+
# Configure scanner variables and fetch data
|
|
344
|
+
_configure_scanner_variables(vulnerability_creation, ssl_verify)
|
|
345
|
+
response_data = _fetch_qualys_api_data(include_tags, exclude_tags)
|
|
346
|
+
if not response_data:
|
|
347
|
+
return
|
|
348
|
+
|
|
349
|
+
# Initialize and run integration
|
|
350
|
+
integration = _initialize_integration(regscale_ssp_id, response_data, vulnerability_creation, ssl_verify)
|
|
351
|
+
_run_integration_import(integration)
|
|
352
|
+
|
|
353
|
+
logger.info("Qualys Total Cloud data imported successfully with JSONL scanner.")
|
|
354
|
+
except Exception:
|
|
355
|
+
error_message = traceback.format_exc()
|
|
356
|
+
logger.error("Error occurred while processing Qualys data with JSONL scanner")
|
|
357
|
+
logger.error(error_message)
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
def _configure_scanner_variables(vulnerability_creation, ssl_verify):
|
|
361
|
+
"""Configure scanner variables with appropriate precedence.
|
|
362
|
+
|
|
363
|
+
:param str vulnerability_creation: Vulnerability creation mode from command line
|
|
364
|
+
:param bool ssl_verify: SSL verification setting from command line
|
|
365
|
+
"""
|
|
366
|
+
# Configure vulnerability creation mode
|
|
367
|
+
_configure_vulnerability_creation(vulnerability_creation)
|
|
368
|
+
|
|
369
|
+
# Configure SSL verification
|
|
370
|
+
_configure_ssl_verification(ssl_verify)
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
def _configure_vulnerability_creation(vulnerability_creation):
|
|
374
|
+
"""Configure vulnerability creation mode with appropriate precedence.
|
|
375
|
+
|
|
376
|
+
:param str vulnerability_creation: Vulnerability creation mode from command line
|
|
377
|
+
"""
|
|
378
|
+
if vulnerability_creation:
|
|
379
|
+
# Command line option takes precedence
|
|
380
|
+
ScannerVariables.vulnerabilityCreation = vulnerability_creation
|
|
381
|
+
logger.info(f"Setting vulnerability creation mode from command line: {vulnerability_creation}")
|
|
382
|
+
elif hasattr(QualysVariables, "vulnerabilityCreation") and ScannerVariables.vulnerabilityCreation:
|
|
383
|
+
# Use Qualys-specific setting if available
|
|
384
|
+
logger.info(f"Using Qualys-specific vulnerability creation mode: {ScannerVariables.vulnerabilityCreation}")
|
|
385
|
+
else:
|
|
386
|
+
# Fall back to global ScannerVariables
|
|
387
|
+
logger.info(f"Using global vulnerability creation mode: {ScannerVariables.vulnerabilityCreation}")
|
|
388
|
+
|
|
389
|
+
|
|
390
|
+
def _configure_ssl_verification(ssl_verify):
|
|
391
|
+
"""Configure SSL verification setting with appropriate precedence.
|
|
392
|
+
|
|
393
|
+
:param bool ssl_verify: SSL verification setting from command line
|
|
394
|
+
"""
|
|
395
|
+
if ssl_verify is not None:
|
|
396
|
+
# Command line option takes precedence
|
|
397
|
+
ScannerVariables.sslVerify = ssl_verify
|
|
398
|
+
logger.info(f"Setting SSL verification from command line: {ssl_verify}")
|
|
399
|
+
elif hasattr(QualysVariables, "sslVerify") and QualysVariables.sslVerify is not None:
|
|
400
|
+
# Use Qualys-specific setting
|
|
401
|
+
logger.info(f"Using Qualys-specific SSL verification setting: {ScannerVariables.sslVerify}")
|
|
402
|
+
else:
|
|
403
|
+
# Fall back to global ScannerVariables
|
|
404
|
+
logger.info(f"Using global SSL verification setting: {ScannerVariables.sslVerify}")
|
|
405
|
+
|
|
406
|
+
|
|
407
|
+
def _fetch_qualys_api_data(include_tags, exclude_tags):
|
|
408
|
+
"""Fetch data from Qualys API.
|
|
409
|
+
|
|
410
|
+
:param str include_tags: Tags to include in the query
|
|
411
|
+
:param str exclude_tags: Tags to exclude from the query
|
|
412
|
+
:return: Parsed XML data or None if request failed
|
|
413
|
+
"""
|
|
414
|
+
qualys_url, qualys_api = _get_qualys_api()
|
|
415
|
+
params = _prepare_qualys_params(include_tags, exclude_tags)
|
|
416
|
+
|
|
417
|
+
logger.info("Fetching Qualys Total Cloud data with JSONL scanner...")
|
|
418
|
+
response = qualys_api.get(
|
|
419
|
+
url=urljoin(qualys_url, "/api/2.0/fo/asset/host/vm/detection/"),
|
|
420
|
+
headers=HEADERS,
|
|
421
|
+
params=params,
|
|
422
|
+
verify=ScannerVariables.sslVerify, # Apply SSL verification setting
|
|
423
|
+
)
|
|
424
|
+
|
|
425
|
+
if not response or not response.ok:
|
|
426
|
+
logger.error(
|
|
427
|
+
f"Received unexpected response from Qualys API: {response.status_code}: {response.text if response.text else 'response is null'}"
|
|
428
|
+
)
|
|
429
|
+
return None
|
|
430
|
+
|
|
431
|
+
# Process API response
|
|
432
|
+
logger.info("Total cloud data fetched. Processing with JSONL scanner...")
|
|
433
|
+
return xmltodict.parse(response.text)
|
|
434
|
+
|
|
435
|
+
|
|
436
|
+
def _initialize_integration(regscale_ssp_id, response_data, vulnerability_creation, ssl_verify):
|
|
437
|
+
"""Initialize the scanner integration with appropriate settings.
|
|
438
|
+
|
|
439
|
+
:param int regscale_ssp_id: RegScale SSP ID
|
|
440
|
+
:param dict response_data: Parsed XML data from API
|
|
441
|
+
:param str vulnerability_creation: Vulnerability creation mode
|
|
442
|
+
:param bool ssl_verify: SSL verification setting
|
|
443
|
+
:return: Initialized integration object
|
|
444
|
+
"""
|
|
445
|
+
# Build integration kwargs
|
|
446
|
+
integration_kwargs = {
|
|
447
|
+
"plan_id": regscale_ssp_id,
|
|
448
|
+
"xml_data": response_data,
|
|
449
|
+
"vulnerability_creation": vulnerability_creation or ScannerVariables.vulnerabilityCreation,
|
|
450
|
+
"ssl_verify": ssl_verify if ssl_verify is not None else ScannerVariables.sslVerify,
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
# Add thread workers if available
|
|
454
|
+
if hasattr(ScannerVariables, "threadMaxWorkers"):
|
|
455
|
+
integration_kwargs["max_workers"] = ScannerVariables.threadMaxWorkers
|
|
456
|
+
logger.debug(f"Using thread max workers: {ScannerVariables.threadMaxWorkers}")
|
|
457
|
+
|
|
458
|
+
# Initialize and return integration
|
|
459
|
+
integration = QualysTotalCloudJSONLIntegration(**integration_kwargs)
|
|
460
|
+
return integration
|
|
461
|
+
|
|
462
|
+
|
|
463
|
+
def _run_integration_import(integration):
|
|
464
|
+
"""Run the integration import process with progress tracking.
|
|
465
|
+
|
|
466
|
+
:param QualysTotalCloudJSONLIntegration integration: Initialized integration object
|
|
467
|
+
"""
|
|
468
|
+
assets_iterator, _ = integration.fetch_assets_and_findings()
|
|
469
|
+
logger.info("Syncing assets to RegScale...")
|
|
470
|
+
|
|
471
|
+
# Set up progress reporting and import data
|
|
472
|
+
with Progress(
|
|
473
|
+
SpinnerColumn(),
|
|
474
|
+
TextColumn("[bold blue]{task.description}"),
|
|
475
|
+
BarColumn(),
|
|
476
|
+
TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
|
|
477
|
+
TextColumn("•"),
|
|
478
|
+
TimeElapsedColumn(),
|
|
479
|
+
console=None,
|
|
480
|
+
) as progress:
|
|
481
|
+
# Set up progress tasks
|
|
482
|
+
asset_task, finding_task, _, _ = _setup_progress_tracking(integration, progress)
|
|
483
|
+
|
|
484
|
+
# Import assets and findings
|
|
485
|
+
_import_assets(integration, assets_iterator, progress, asset_task)
|
|
486
|
+
_import_findings(integration, progress, finding_task)
|
|
487
|
+
|
|
488
|
+
|
|
489
|
+
@click.command(name="import_total_cloud_xml")
|
|
490
|
+
@regscale_ssp_id()
|
|
491
|
+
@click.option(
|
|
492
|
+
"--xml_file",
|
|
493
|
+
"-f",
|
|
494
|
+
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
|
|
495
|
+
required=True,
|
|
496
|
+
help="Path to Qualys Total Cloud XML file to process.",
|
|
497
|
+
)
|
|
498
|
+
def import_total_cloud_from_xml(regscale_ssp_id: int, xml_file: str):
|
|
499
|
+
"""
|
|
500
|
+
Import Qualys Total Cloud Assets and Vulnerabilities from an existing XML file using JSONL scanner.
|
|
501
|
+
|
|
502
|
+
This command processes an existing XML file instead of making an API call, useful for testing.
|
|
503
|
+
"""
|
|
504
|
+
try:
|
|
505
|
+
logger.info(f"Processing Qualys Total Cloud XML file: {xml_file}")
|
|
506
|
+
|
|
507
|
+
# Parse the XML file
|
|
508
|
+
with open(xml_file, "r") as f:
|
|
509
|
+
xml_content = f.read()
|
|
510
|
+
response_data = xmltodict.parse(xml_content)
|
|
511
|
+
|
|
512
|
+
# Initialize the JSONLScannerIntegration implementation
|
|
513
|
+
integration = QualysTotalCloudJSONLIntegration(
|
|
514
|
+
plan_id=regscale_ssp_id, xml_data=response_data, file_path=xml_file
|
|
515
|
+
)
|
|
516
|
+
|
|
517
|
+
# Process data and generate JSONL files
|
|
518
|
+
if not _process_xml_to_jsonl(integration):
|
|
519
|
+
return
|
|
520
|
+
|
|
521
|
+
# Count and validate items for progress tracking
|
|
522
|
+
asset_count, finding_count = _count_jsonl_items(integration)
|
|
523
|
+
if asset_count == 0 and finding_count == 0:
|
|
524
|
+
logger.error("No assets or findings found in the processed data")
|
|
525
|
+
return
|
|
526
|
+
|
|
527
|
+
# Set up progress tracking and import to RegScale
|
|
528
|
+
_import_to_regscale(integration, asset_count, finding_count)
|
|
529
|
+
|
|
530
|
+
logger.info("Qualys Total Cloud XML file imported successfully with JSONL scanner.")
|
|
531
|
+
except Exception:
|
|
532
|
+
error_message = traceback.format_exc()
|
|
533
|
+
logger.error("Error occurred while processing Qualys XML file with JSONL scanner")
|
|
534
|
+
logger.error(error_message)
|
|
535
|
+
|
|
536
|
+
|
|
537
|
+
def _process_xml_to_jsonl(integration):
|
|
538
|
+
"""Process XML data and generate JSONL files."""
|
|
539
|
+
try:
|
|
540
|
+
logger.info("Fetching assets and findings...")
|
|
541
|
+
integration.fetch_assets_and_findings()
|
|
542
|
+
|
|
543
|
+
# Validate JSONL files
|
|
544
|
+
for file_path, file_type in [(integration.ASSETS_FILE, "Assets"), (integration.FINDINGS_FILE, "Findings")]:
|
|
545
|
+
if not os.path.exists(file_path) or os.path.getsize(file_path) == 0:
|
|
546
|
+
logger.error(f"{file_type} file not created or is empty: {file_path}")
|
|
547
|
+
return False
|
|
548
|
+
|
|
549
|
+
logger.info("Successfully created JSONL files.")
|
|
550
|
+
logger.info(f"Assets file size: {os.path.getsize(integration.ASSETS_FILE)} bytes")
|
|
551
|
+
logger.info(f"Findings file size: {os.path.getsize(integration.FINDINGS_FILE)} bytes")
|
|
552
|
+
return True
|
|
553
|
+
except Exception as e:
|
|
554
|
+
logger.error(f"Error fetching assets and findings: {str(e)}")
|
|
555
|
+
logger.error(traceback.format_exc())
|
|
556
|
+
return False
|
|
557
|
+
|
|
558
|
+
|
|
559
|
+
def _count_jsonl_items(integration):
|
|
560
|
+
"""Count assets and findings in JSONL files."""
|
|
561
|
+
# Count assets
|
|
562
|
+
assets_count = 0
|
|
563
|
+
asset_ids = []
|
|
564
|
+
try:
|
|
565
|
+
assets_iterator = integration._yield_items_from_jsonl(integration.ASSETS_FILE, IntegrationAsset)
|
|
566
|
+
for asset in assets_iterator:
|
|
567
|
+
assets_count += 1
|
|
568
|
+
if asset and hasattr(asset, "identifier"):
|
|
569
|
+
asset_ids.append(asset.identifier)
|
|
570
|
+
|
|
571
|
+
logger.debug(f"Asset IDs: {', '.join(asset_ids[:5])}{', ...' if len(asset_ids) > 5 else ''}")
|
|
572
|
+
except Exception as e:
|
|
573
|
+
logger.error(f"Error counting assets: {str(e)}")
|
|
574
|
+
|
|
575
|
+
# Count findings
|
|
576
|
+
findings_count = 0
|
|
577
|
+
try:
|
|
578
|
+
findings_iterator = integration._yield_items_from_jsonl(integration.FINDINGS_FILE, IntegrationFinding)
|
|
579
|
+
findings_count = sum(1 for _ in findings_iterator)
|
|
580
|
+
except Exception as e:
|
|
581
|
+
logger.error(f"Error counting findings: {str(e)}")
|
|
582
|
+
|
|
583
|
+
logger.info(f"Found {assets_count} assets and {findings_count} findings in JSONL files")
|
|
584
|
+
return assets_count, findings_count
|
|
585
|
+
|
|
586
|
+
|
|
587
|
+
def _create_progress_bar():
|
|
588
|
+
"""Create a progress bar that doesn't reset at 100%."""
|
|
589
|
+
from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn, TimeElapsedColumn
|
|
590
|
+
|
|
591
|
+
class NonResettingProgress(Progress):
|
|
592
|
+
def update(self, task_id, **fields):
|
|
593
|
+
"""Update a task."""
|
|
594
|
+
task = self._tasks[task_id]
|
|
595
|
+
completed_old = task.completed
|
|
596
|
+
|
|
597
|
+
# Update the task with the provided fields
|
|
598
|
+
for field_name, value in fields.items():
|
|
599
|
+
if field_name == "completed" and value is True:
|
|
600
|
+
if task.total is not None:
|
|
601
|
+
task.completed = task.total
|
|
602
|
+
else:
|
|
603
|
+
setattr(task, field_name, value)
|
|
604
|
+
|
|
605
|
+
# Prevent task from being reset to 0% after reaching 100%
|
|
606
|
+
if completed_old == task.total and task.completed < task.total:
|
|
607
|
+
task.completed = task.total
|
|
608
|
+
|
|
609
|
+
self.refresh()
|
|
610
|
+
|
|
611
|
+
return NonResettingProgress(
|
|
612
|
+
SpinnerColumn(),
|
|
613
|
+
TextColumn("[bold blue]{task.description}"),
|
|
614
|
+
BarColumn(),
|
|
615
|
+
TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
|
|
616
|
+
TextColumn("•"),
|
|
617
|
+
TimeElapsedColumn(),
|
|
618
|
+
console=None,
|
|
619
|
+
)
|
|
620
|
+
|
|
621
|
+
|
|
622
|
+
def _track_progress_generator(iterator, progress_bar, task_id, id_attribute=None):
|
|
623
|
+
"""Create a generator that tracks progress as items are yielded."""
|
|
624
|
+
processed = 0
|
|
625
|
+
item_ids = []
|
|
626
|
+
|
|
627
|
+
for item in iterator:
|
|
628
|
+
processed += 1
|
|
629
|
+
|
|
630
|
+
if id_attribute and hasattr(item, id_attribute):
|
|
631
|
+
item_ids.append(getattr(item, id_attribute))
|
|
632
|
+
|
|
633
|
+
progress_bar.update(task_id, completed=processed)
|
|
634
|
+
yield item
|
|
635
|
+
|
|
636
|
+
# Log debugging information
|
|
637
|
+
item_type = "items"
|
|
638
|
+
if id_attribute == "identifier":
|
|
639
|
+
item_type = "assets"
|
|
640
|
+
elif id_attribute == "external_id":
|
|
641
|
+
item_type = "findings"
|
|
642
|
+
|
|
643
|
+
logger.debug(f"Yielded {processed} {item_type} to RegScale")
|
|
644
|
+
|
|
645
|
+
if processed == 0:
|
|
646
|
+
logger.error(f"WARNING: No {item_type} were yielded to RegScale!")
|
|
647
|
+
elif item_ids:
|
|
648
|
+
logger.debug(f"First 10 {item_type} IDs: {item_ids[:10]}")
|
|
649
|
+
|
|
650
|
+
|
|
651
|
+
def _import_to_regscale(integration, asset_count, finding_count):
|
|
652
|
+
"""Import assets and findings to RegScale with progress tracking."""
|
|
653
|
+
progress = _create_progress_bar()
|
|
654
|
+
|
|
655
|
+
with progress:
|
|
656
|
+
# Create tasks
|
|
657
|
+
asset_task = progress.add_task(f"[green]Importing {asset_count} assets to RegScale...", total=asset_count)
|
|
658
|
+
finding_task = progress.add_task(
|
|
659
|
+
f"[yellow]Importing {finding_count} findings to RegScale...", visible=False, total=finding_count
|
|
660
|
+
)
|
|
661
|
+
|
|
662
|
+
# Import assets
|
|
663
|
+
try:
|
|
664
|
+
assets_iterator = integration._yield_items_from_jsonl(integration.ASSETS_FILE, IntegrationAsset)
|
|
665
|
+
tracked_assets = _track_progress_generator(assets_iterator, progress, asset_task, "identifier")
|
|
666
|
+
assets_imported = integration.update_regscale_assets(tracked_assets)
|
|
667
|
+
logger.info(f"Imported {assets_imported} assets to RegScale")
|
|
668
|
+
# Ensure the progress shows complete
|
|
669
|
+
progress.update(asset_task, completed=asset_count)
|
|
670
|
+
except Exception as e:
|
|
671
|
+
logger.error(f"Error importing assets to RegScale: {str(e)}")
|
|
672
|
+
logger.error(traceback.format_exc())
|
|
673
|
+
# Mark the task as completed even if there was an error
|
|
674
|
+
progress.update(asset_task, completed=asset_count)
|
|
675
|
+
|
|
676
|
+
# Import findings
|
|
677
|
+
progress.update(finding_task, visible=True)
|
|
678
|
+
try:
|
|
679
|
+
findings_iterator = integration._yield_items_from_jsonl(integration.FINDINGS_FILE, IntegrationFinding)
|
|
680
|
+
tracked_findings = _track_progress_generator(findings_iterator, progress, finding_task, "external_id")
|
|
681
|
+
findings_imported = integration.update_regscale_findings(tracked_findings)
|
|
682
|
+
logger.info(f"Imported {findings_imported} findings to RegScale")
|
|
683
|
+
# Ensure progress shows complete
|
|
684
|
+
progress.update(finding_task, completed=finding_count)
|
|
685
|
+
except Exception as e:
|
|
686
|
+
logger.error(f"Error importing findings to RegScale: {str(e)}")
|
|
687
|
+
logger.error(traceback.format_exc())
|
|
688
|
+
# Mark as completed even if there was an error
|
|
689
|
+
progress.update(finding_task, completed=finding_count)
|
|
690
|
+
|
|
691
|
+
|
|
65
692
|
@qualys.command(name="export_scans")
|
|
66
693
|
@save_output_to()
|
|
67
694
|
@click.option(
|
|
@@ -172,6 +799,54 @@ def import_qualys_scans(
|
|
|
172
799
|
)
|
|
173
800
|
|
|
174
801
|
|
|
802
|
+
@qualys.command(name="import_policy_scans")
|
|
803
|
+
@FlatFileImporter.common_scanner_options(
|
|
804
|
+
message="File path to the folder containing policy .csv files to process to RegScale.",
|
|
805
|
+
prompt="File path for Qualys files",
|
|
806
|
+
import_name="qualys_policy_scan",
|
|
807
|
+
)
|
|
808
|
+
@click.option(
|
|
809
|
+
"--skip_rows",
|
|
810
|
+
type=click.INT,
|
|
811
|
+
help="The number of rows in the file to skip to get to the column headers, defaults to 5.",
|
|
812
|
+
default=5,
|
|
813
|
+
)
|
|
814
|
+
def import_policy_scans(
|
|
815
|
+
folder_path: os.PathLike[str],
|
|
816
|
+
regscale_ssp_id: int,
|
|
817
|
+
scan_date: datetime,
|
|
818
|
+
mappings_path: Path,
|
|
819
|
+
disable_mapping: bool,
|
|
820
|
+
skip_rows: int,
|
|
821
|
+
s3_bucket: str,
|
|
822
|
+
s3_prefix: str,
|
|
823
|
+
aws_profile: str,
|
|
824
|
+
upload_file: bool,
|
|
825
|
+
):
|
|
826
|
+
"""
|
|
827
|
+
Import Qualys policy scans from a CSV file into a RegScale Security Plan as assets and vulnerabilities.
|
|
828
|
+
"""
|
|
829
|
+
process_files_with_importer(
|
|
830
|
+
folder_path=str(folder_path),
|
|
831
|
+
importer_class=QualysPolicyScansImporter,
|
|
832
|
+
regscale_ssp_id=regscale_ssp_id,
|
|
833
|
+
importer_args={
|
|
834
|
+
"plan_id": regscale_ssp_id,
|
|
835
|
+
"name": "QualysPolicyScan",
|
|
836
|
+
"parent_id": regscale_ssp_id,
|
|
837
|
+
"parent_module": "securityplans",
|
|
838
|
+
"scan_date": scan_date,
|
|
839
|
+
},
|
|
840
|
+
mappings_path=str(mappings_path),
|
|
841
|
+
disable_mapping=disable_mapping,
|
|
842
|
+
skip_rows=skip_rows,
|
|
843
|
+
s3_bucket=s3_bucket,
|
|
844
|
+
s3_prefix=s3_prefix,
|
|
845
|
+
aws_profile=aws_profile,
|
|
846
|
+
upload_file=upload_file,
|
|
847
|
+
)
|
|
848
|
+
|
|
849
|
+
|
|
175
850
|
@qualys.command(name="save_results")
|
|
176
851
|
@save_output_to()
|
|
177
852
|
@click.option(
|
|
@@ -348,80 +1023,6 @@ def import_was_scans(
|
|
|
348
1023
|
)
|
|
349
1024
|
|
|
350
1025
|
|
|
351
|
-
@qualys.command(name="import_total_cloud")
|
|
352
|
-
@regscale_ssp_id()
|
|
353
|
-
@click.option(
|
|
354
|
-
"--include_tags",
|
|
355
|
-
"-t",
|
|
356
|
-
type=click.STRING,
|
|
357
|
-
required=False,
|
|
358
|
-
default=None,
|
|
359
|
-
help="Include tags in the import comma seperated string of tag names or ids, defaults to None.",
|
|
360
|
-
)
|
|
361
|
-
@click.option(
|
|
362
|
-
"--exclude_tags",
|
|
363
|
-
"-e",
|
|
364
|
-
type=click.STRING,
|
|
365
|
-
required=False,
|
|
366
|
-
default=None,
|
|
367
|
-
help="Exclude tags in the import comma seperated string of tag names or ids, defaults to None.",
|
|
368
|
-
)
|
|
369
|
-
def import_total_cloud_assets_and_vulnerabilities(regscale_ssp_id: int, include_tags: str, exclude_tags: str):
|
|
370
|
-
"""
|
|
371
|
-
Import Qualys Total Cloud Assets and Vulnerabilities into RegScale via API."""
|
|
372
|
-
import_total_cloud_data_from_qualys_api(
|
|
373
|
-
security_plan_id=regscale_ssp_id, include_tags=include_tags, exclude_tags=exclude_tags
|
|
374
|
-
)
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
@qualys.command(name="import_policy_scans")
|
|
378
|
-
@FlatFileImporter.common_scanner_options(
|
|
379
|
-
message="File path to the folder containing policy .csv files to process to RegScale.",
|
|
380
|
-
prompt="File path for Qualys files",
|
|
381
|
-
import_name="qualys_policy_scan",
|
|
382
|
-
)
|
|
383
|
-
@click.option(
|
|
384
|
-
"--skip_rows",
|
|
385
|
-
type=click.INT,
|
|
386
|
-
help="The number of rows in the file to skip to get to the column headers, defaults to 5.",
|
|
387
|
-
default=5,
|
|
388
|
-
)
|
|
389
|
-
def import_policy_scans(
|
|
390
|
-
folder_path: os.PathLike[str],
|
|
391
|
-
regscale_ssp_id: int,
|
|
392
|
-
scan_date: datetime,
|
|
393
|
-
mappings_path: Path,
|
|
394
|
-
disable_mapping: bool,
|
|
395
|
-
skip_rows: int,
|
|
396
|
-
s3_bucket: str,
|
|
397
|
-
s3_prefix: str,
|
|
398
|
-
aws_profile: str,
|
|
399
|
-
upload_file: bool,
|
|
400
|
-
):
|
|
401
|
-
"""
|
|
402
|
-
Import Qualys policy scans from a CSV file into a RegScale Security Plan as assets and vulnerabilities.
|
|
403
|
-
"""
|
|
404
|
-
process_files_with_importer(
|
|
405
|
-
folder_path=str(folder_path),
|
|
406
|
-
importer_class=QualysPolicyScansImporter,
|
|
407
|
-
regscale_ssp_id=regscale_ssp_id,
|
|
408
|
-
importer_args={
|
|
409
|
-
"plan_id": regscale_ssp_id,
|
|
410
|
-
"name": "QualysPolicyScan",
|
|
411
|
-
"parent_id": regscale_ssp_id,
|
|
412
|
-
"parent_module": "securityplans",
|
|
413
|
-
"scan_date": scan_date,
|
|
414
|
-
},
|
|
415
|
-
mappings_path=str(mappings_path),
|
|
416
|
-
disable_mapping=disable_mapping,
|
|
417
|
-
skip_rows=skip_rows,
|
|
418
|
-
s3_bucket=s3_bucket,
|
|
419
|
-
s3_prefix=s3_prefix,
|
|
420
|
-
aws_profile=aws_profile,
|
|
421
|
-
upload_file=upload_file,
|
|
422
|
-
)
|
|
423
|
-
|
|
424
|
-
|
|
425
1026
|
def process_files_with_importer(
|
|
426
1027
|
regscale_ssp_id: int,
|
|
427
1028
|
folder_path: str,
|
|
@@ -731,12 +1332,12 @@ def _get_qualys_api():
|
|
|
731
1332
|
def import_total_cloud_data_from_qualys_api(security_plan_id: int, include_tags: str, exclude_tags: str):
|
|
732
1333
|
"""
|
|
733
1334
|
Function to get the total cloud data from Qualys API
|
|
1335
|
+
|
|
734
1336
|
:param int security_plan_id: The ID of the plan to get the data for
|
|
735
1337
|
:param str include_tags: The tags to include in the data
|
|
736
1338
|
:param str exclude_tags: The tags to exclude from the data
|
|
737
1339
|
"""
|
|
738
1340
|
try:
|
|
739
|
-
|
|
740
1341
|
qualys_url, QUALYS_API = _get_qualys_api()
|
|
741
1342
|
params = {
|
|
742
1343
|
"action": "list",
|
|
@@ -749,19 +1350,44 @@ def import_total_cloud_data_from_qualys_api(security_plan_id: int, include_tags:
|
|
|
749
1350
|
params["tag_set_exclude"] = exclude_tags
|
|
750
1351
|
if include_tags:
|
|
751
1352
|
params["tag_set_include"] = include_tags
|
|
1353
|
+
|
|
752
1354
|
logger.info("Fetching Qualys Total Cloud data...")
|
|
753
1355
|
response = QUALYS_API.get(
|
|
754
1356
|
url=urljoin(qualys_url, "/api/2.0/fo/asset/host/vm/detection/"),
|
|
755
1357
|
headers=HEADERS,
|
|
756
1358
|
params=params,
|
|
757
1359
|
)
|
|
1360
|
+
|
|
758
1361
|
if response and response.ok:
|
|
759
1362
|
logger.info("Total cloud data fetched. processing...")
|
|
760
1363
|
response_data = xmltodict.parse(response.text)
|
|
761
|
-
qt = QualysTotalCloudIntegration(plan_id=security_plan_id, xml_data=response_data)
|
|
762
|
-
qt.fetch_assets()
|
|
763
|
-
qt.fetch_findings()
|
|
764
1364
|
|
|
1365
|
+
# Create artifacts directory if it doesn't exist
|
|
1366
|
+
os.makedirs("./artifacts", exist_ok=True)
|
|
1367
|
+
|
|
1368
|
+
# Write the XML data to a temporary file
|
|
1369
|
+
temp_xml_file = "./artifacts/qualys_temp_data.xml"
|
|
1370
|
+
with open(temp_xml_file, "w") as f:
|
|
1371
|
+
f.write(response.text)
|
|
1372
|
+
|
|
1373
|
+
logger.info(f"Saved Qualys response data to {temp_xml_file}")
|
|
1374
|
+
|
|
1375
|
+
# Initialize the JSONLScannerIntegration implementation with the file path
|
|
1376
|
+
integration = QualysTotalCloudJSONLIntegration(
|
|
1377
|
+
plan_id=security_plan_id, xml_data=response_data, file_path=temp_xml_file
|
|
1378
|
+
)
|
|
1379
|
+
|
|
1380
|
+
# Process assets and findings in one pass for efficiency
|
|
1381
|
+
assets_iterator, findings_iterator = integration.fetch_assets_and_findings()
|
|
1382
|
+
|
|
1383
|
+
# Update RegScale with the processed data
|
|
1384
|
+
logger.info("Syncing assets to RegScale...")
|
|
1385
|
+
integration.update_regscale_assets(assets_iterator)
|
|
1386
|
+
|
|
1387
|
+
logger.info("Syncing findings to RegScale...")
|
|
1388
|
+
integration.update_regscale_findings(findings_iterator)
|
|
1389
|
+
|
|
1390
|
+
logger.info("Qualys Total Cloud data imported successfully.")
|
|
765
1391
|
else:
|
|
766
1392
|
logger.error(
|
|
767
1393
|
f"Received unexpected response from Qualys API: {response.status_code}: {response.text if response.text else 'response is null'}"
|
|
@@ -1451,3 +2077,28 @@ def get_asset_groups_from_qualys() -> list:
|
|
|
1451
2077
|
f"Unable to retrieve asset groups from Qualys.\nReceived: #{response.status_code}: {response.text}"
|
|
1452
2078
|
)
|
|
1453
2079
|
return asset_groups
|
|
2080
|
+
|
|
2081
|
+
|
|
2082
|
+
__all__ = [
|
|
2083
|
+
"QualysTotalCloudJSONLIntegration",
|
|
2084
|
+
"import_total_cloud",
|
|
2085
|
+
"import_total_cloud_from_xml",
|
|
2086
|
+
"save_results",
|
|
2087
|
+
"sync_qualys",
|
|
2088
|
+
"get_asset_groups",
|
|
2089
|
+
"import_container_scans",
|
|
2090
|
+
"import_was_scans",
|
|
2091
|
+
"import_policy_scans",
|
|
2092
|
+
]
|
|
2093
|
+
|
|
2094
|
+
# Register commands with the qualys group
|
|
2095
|
+
qualys.add_command(import_total_cloud)
|
|
2096
|
+
qualys.add_command(import_total_cloud_from_xml)
|
|
2097
|
+
qualys.add_command(export_past_scans)
|
|
2098
|
+
qualys.add_command(import_scans)
|
|
2099
|
+
qualys.add_command(import_policy_scans)
|
|
2100
|
+
qualys.add_command(save_results)
|
|
2101
|
+
qualys.add_command(sync_qualys)
|
|
2102
|
+
qualys.add_command(get_asset_groups)
|
|
2103
|
+
qualys.add_command(import_container_scans)
|
|
2104
|
+
qualys.add_command(import_was_scans)
|