regscale-cli 6.20.1.1__py3-none-any.whl → 6.20.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/core/app/utils/variables.py +5 -3
- regscale/integrations/commercial/__init__.py +2 -0
- regscale/integrations/commercial/burp.py +14 -0
- regscale/integrations/commercial/grype/commands.py +8 -1
- regscale/integrations/commercial/grype/scanner.py +2 -1
- regscale/integrations/commercial/jira.py +290 -133
- regscale/integrations/commercial/opentext/commands.py +14 -5
- regscale/integrations/commercial/opentext/scanner.py +3 -2
- regscale/integrations/commercial/qualys/__init__.py +3 -3
- regscale/integrations/commercial/stigv2/click_commands.py +6 -37
- regscale/integrations/commercial/tenablev2/commands.py +12 -4
- regscale/integrations/commercial/tenablev2/sc_scanner.py +21 -1
- regscale/integrations/commercial/tenablev2/sync_compliance.py +3 -0
- regscale/integrations/commercial/trivy/commands.py +11 -4
- regscale/integrations/commercial/trivy/scanner.py +2 -1
- regscale/integrations/jsonl_scanner_integration.py +8 -1
- regscale/integrations/public/cisa.py +58 -63
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +88 -93
- regscale/integrations/scanner_integration.py +22 -6
- regscale/models/app_models/click.py +49 -1
- regscale/models/integration_models/burp.py +11 -8
- regscale/models/integration_models/cisa_kev_data.json +142 -21
- regscale/models/integration_models/flat_file_importer/__init__.py +36 -176
- regscale/models/integration_models/jira_task_sync.py +27 -0
- regscale/models/integration_models/qualys.py +6 -7
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/regscale_models/control_implementation.py +39 -2
- regscale/models/regscale_models/regscale_model.py +49 -1
- regscale/models/regscale_models/task.py +1 -0
- regscale/regscale.py +1 -4
- regscale/utils/string.py +13 -0
- {regscale_cli-6.20.1.1.dist-info → regscale_cli-6.20.2.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.20.1.1.dist-info → regscale_cli-6.20.2.0.dist-info}/RECORD +38 -38
- regscale/integrations/commercial/synqly_jira.py +0 -840
- {regscale_cli-6.20.1.1.dist-info → regscale_cli-6.20.2.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.20.1.1.dist-info → regscale_cli-6.20.2.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.20.1.1.dist-info → regscale_cli-6.20.2.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.20.1.1.dist-info → regscale_cli-6.20.2.0.dist-info}/top_level.txt +0 -0
|
@@ -27,9 +27,10 @@ def web_inspect():
|
|
|
27
27
|
|
|
28
28
|
@web_inspect.command(name="import_scans")
|
|
29
29
|
@FlatFileImporter.common_scanner_options(
|
|
30
|
-
message="File path to the folder containing
|
|
31
|
-
prompt="File path for
|
|
32
|
-
import_name="
|
|
30
|
+
message="File path to the folder containing OpenText WebInspect .xml files to process to RegScale.",
|
|
31
|
+
prompt="File path for OpenText WebInspect files",
|
|
32
|
+
import_name="opentext",
|
|
33
|
+
support_component=True,
|
|
33
34
|
)
|
|
34
35
|
@click.option(
|
|
35
36
|
"--destination",
|
|
@@ -41,7 +42,7 @@ def web_inspect():
|
|
|
41
42
|
@click.option(
|
|
42
43
|
"--file_pattern",
|
|
43
44
|
"-fp",
|
|
44
|
-
help="[Optional] File pattern to match (e.g., '*.
|
|
45
|
+
help="[Optional] File pattern to match (e.g., '*.xml')",
|
|
45
46
|
required=False,
|
|
46
47
|
)
|
|
47
48
|
def import_scans(
|
|
@@ -49,6 +50,7 @@ def import_scans(
|
|
|
49
50
|
file_pattern: str,
|
|
50
51
|
folder_path: Path,
|
|
51
52
|
regscale_ssp_id: int,
|
|
53
|
+
component_id: int,
|
|
52
54
|
scan_date: datetime,
|
|
53
55
|
mappings_path: Path,
|
|
54
56
|
disable_mapping: bool,
|
|
@@ -63,8 +65,15 @@ def import_scans(
|
|
|
63
65
|
# Use the new WebInspectIntegration class to sync assets and findings
|
|
64
66
|
if s3_bucket and not folder_path:
|
|
65
67
|
folder_path = s3_bucket
|
|
68
|
+
|
|
69
|
+
if not regscale_ssp_id and not component_id:
|
|
70
|
+
raise click.UsageError(
|
|
71
|
+
"You must provide either a --regscale_ssp_id or a --component_id to import OpenText scans."
|
|
72
|
+
)
|
|
73
|
+
|
|
66
74
|
wi = WebInspectIntegration(
|
|
67
|
-
plan_id=regscale_ssp_id,
|
|
75
|
+
plan_id=component_id if component_id else regscale_ssp_id,
|
|
76
|
+
is_component=True if component_id else False,
|
|
68
77
|
file_path=str(folder_path) if folder_path else None,
|
|
69
78
|
s3_bucket=s3_bucket,
|
|
70
79
|
s3_prefix=s3_prefix,
|
|
@@ -48,6 +48,7 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
48
48
|
kwargs["read_files_only"] = True
|
|
49
49
|
self.disable_mapping = kwargs["disable_mapping"] = True
|
|
50
50
|
self.set_scan_date(kwargs.get("scan_date"))
|
|
51
|
+
self.is_component = kwargs.get("is_component", False)
|
|
51
52
|
# logger.debug(f"scan_date: {self.scan_date}"
|
|
52
53
|
super().__init__(*args, **kwargs)
|
|
53
54
|
logger.debug(f"WebInspectIntegration initialized with scan date: {self.scan_date}")
|
|
@@ -400,7 +401,7 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
400
401
|
asset_type="Other",
|
|
401
402
|
asset_category="Hardware",
|
|
402
403
|
parent_id=self.plan_id,
|
|
403
|
-
parent_module="securityplans",
|
|
404
|
+
parent_module="securityplans" if not self.is_component else "components",
|
|
404
405
|
)
|
|
405
406
|
|
|
406
407
|
# Get the host from the first issue
|
|
@@ -416,7 +417,7 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
416
417
|
asset_type="Other",
|
|
417
418
|
asset_category="Hardware",
|
|
418
419
|
parent_id=self.plan_id,
|
|
419
|
-
parent_module="securityplans",
|
|
420
|
+
parent_module="securityplans" if not self.is_component else "components",
|
|
420
421
|
fqdn=url,
|
|
421
422
|
)
|
|
422
423
|
|
|
@@ -718,7 +718,7 @@ def export_past_scans(save_output_to: Path, days: int, export: bool = True):
|
|
|
718
718
|
|
|
719
719
|
@qualys.command(name="import_scans")
|
|
720
720
|
@FlatFileImporter.common_scanner_options(
|
|
721
|
-
message="File path to the folder containing
|
|
721
|
+
message="File path to the folder containing Qualys .csv files to process to RegScale.",
|
|
722
722
|
prompt="File path for Qualys files",
|
|
723
723
|
import_name="qualys",
|
|
724
724
|
)
|
|
@@ -740,7 +740,7 @@ def import_scans(
|
|
|
740
740
|
aws_profile: str,
|
|
741
741
|
upload_file: bool,
|
|
742
742
|
):
|
|
743
|
-
"""Import scans from Qualys"""
|
|
743
|
+
"""Import .csv scans from Qualys"""
|
|
744
744
|
import_qualys_scans(
|
|
745
745
|
folder_path=folder_path,
|
|
746
746
|
regscale_ssp_id=regscale_ssp_id,
|
|
@@ -801,7 +801,7 @@ def import_qualys_scans(
|
|
|
801
801
|
|
|
802
802
|
@qualys.command(name="import_policy_scans")
|
|
803
803
|
@FlatFileImporter.common_scanner_options(
|
|
804
|
-
message="File path to the folder containing policy .csv files to process to RegScale.",
|
|
804
|
+
message="File path to the folder containing Qualys policy .csv files to process to RegScale.",
|
|
805
805
|
prompt="File path for Qualys files",
|
|
806
806
|
import_name="qualys_policy_scan",
|
|
807
807
|
)
|
|
@@ -7,7 +7,7 @@ RegScale STIG Integration
|
|
|
7
7
|
import click
|
|
8
8
|
|
|
9
9
|
from regscale.integrations.commercial.stigv2.stig_integration import StigIntegration
|
|
10
|
-
from regscale.models.app_models.click import
|
|
10
|
+
from regscale.models.app_models.click import ssp_or_component_id
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
@click.group(name="stigv2")
|
|
@@ -16,24 +16,7 @@ def stigv2():
|
|
|
16
16
|
|
|
17
17
|
|
|
18
18
|
@stigv2.command(name="sync_findings")
|
|
19
|
-
@
|
|
20
|
-
"-p",
|
|
21
|
-
"--regscale_ssp_id",
|
|
22
|
-
type=click.INT,
|
|
23
|
-
help="The ID number from RegScale of the System Security Plan",
|
|
24
|
-
prompt="Enter RegScale System Security Plan ID",
|
|
25
|
-
cls=NotRequiredIf,
|
|
26
|
-
not_required_if=["component_id"],
|
|
27
|
-
)
|
|
28
|
-
@click.option(
|
|
29
|
-
"-c",
|
|
30
|
-
"--component_id",
|
|
31
|
-
type=click.INT,
|
|
32
|
-
help="The ID number from RegScale of the Component",
|
|
33
|
-
prompt="Enter RegScale Component ID",
|
|
34
|
-
cls=NotRequiredIf,
|
|
35
|
-
not_required_if=["regscale_ssp_id"],
|
|
36
|
-
)
|
|
19
|
+
@ssp_or_component_id()
|
|
37
20
|
@click.option(
|
|
38
21
|
"-d",
|
|
39
22
|
"--stig_directory",
|
|
@@ -46,28 +29,14 @@ def sync_findings(regscale_ssp_id, component_id, stig_directory):
|
|
|
46
29
|
"""Sync GCP Findings to RegScale."""
|
|
47
30
|
if component_id:
|
|
48
31
|
StigIntegration.sync_findings(plan_id=component_id, path=stig_directory, is_component=True)
|
|
49
|
-
|
|
32
|
+
elif regscale_ssp_id:
|
|
50
33
|
StigIntegration.sync_findings(plan_id=regscale_ssp_id, path=stig_directory, is_component=False)
|
|
34
|
+
else:
|
|
35
|
+
raise click.UsageError("Either --regscale_ssp_id or --component_id must be provided.")
|
|
51
36
|
|
|
52
37
|
|
|
53
38
|
@stigv2.command(name="sync_assets")
|
|
54
|
-
@
|
|
55
|
-
"-p",
|
|
56
|
-
"--regscale_ssp_id",
|
|
57
|
-
type=click.INT,
|
|
58
|
-
help="The ID number from RegScale of the System Security Plan to sync assets to.",
|
|
59
|
-
cls=NotRequiredIf,
|
|
60
|
-
not_required_if=["component_id"],
|
|
61
|
-
)
|
|
62
|
-
@click.option(
|
|
63
|
-
"-c",
|
|
64
|
-
"--component_id",
|
|
65
|
-
type=click.INT,
|
|
66
|
-
help="The ID number from RegScale of the Component to sync assets to.",
|
|
67
|
-
cls=NotRequiredIf,
|
|
68
|
-
not_required_if=["regscale_ssp_id"],
|
|
69
|
-
default=None,
|
|
70
|
-
)
|
|
39
|
+
@ssp_or_component_id()
|
|
71
40
|
@click.option(
|
|
72
41
|
"-d",
|
|
73
42
|
"--stig_directory",
|
|
@@ -30,7 +30,7 @@ from regscale.integrations.commercial.tenablev2.jsonl_scanner import TenableSCJs
|
|
|
30
30
|
from regscale.integrations.commercial.tenablev2.sc_scanner import SCIntegration
|
|
31
31
|
from regscale.integrations.commercial.tenablev2.variables import TenableVariables
|
|
32
32
|
from regscale.models import regscale_id, regscale_ssp_id
|
|
33
|
-
from regscale.models.app_models.click import file_types, hidden_file_path, save_output_to
|
|
33
|
+
from regscale.models.app_models.click import file_types, hidden_file_path, save_output_to, ssp_or_component_id
|
|
34
34
|
from regscale.models.regscale_models import SecurityPlan
|
|
35
35
|
|
|
36
36
|
logger = logging.getLogger("regscale")
|
|
@@ -468,7 +468,7 @@ def get_queries() -> list:
|
|
|
468
468
|
prompt="Enter Tenable query ID",
|
|
469
469
|
required=True,
|
|
470
470
|
)
|
|
471
|
-
@
|
|
471
|
+
@ssp_or_component_id()
|
|
472
472
|
@click.option(
|
|
473
473
|
"--scan_date",
|
|
474
474
|
"-sd",
|
|
@@ -476,7 +476,7 @@ def get_queries() -> list:
|
|
|
476
476
|
help="The scan date of the file.",
|
|
477
477
|
required=False,
|
|
478
478
|
)
|
|
479
|
-
def query_vuln(query_id: int, regscale_ssp_id: int, scan_date: datetime = None):
|
|
479
|
+
def query_vuln(query_id: int, regscale_ssp_id: int, component_id: int, scan_date: datetime = None):
|
|
480
480
|
"""Query Tenable SC vulnerabilities and sync assets to RegScale."""
|
|
481
481
|
try:
|
|
482
482
|
# Validate license
|
|
@@ -485,7 +485,15 @@ def query_vuln(query_id: int, regscale_ssp_id: int, scan_date: datetime = None):
|
|
|
485
485
|
console.print("[bold]Starting Tenable SC vulnerability query...[/bold]")
|
|
486
486
|
|
|
487
487
|
# Use the SCIntegration class method to fetch vulnerabilities by query ID
|
|
488
|
-
|
|
488
|
+
if component_id:
|
|
489
|
+
sc_integration = SCIntegration(plan_id=component_id, scan_date=scan_date, is_component=True)
|
|
490
|
+
elif regscale_ssp_id:
|
|
491
|
+
sc_integration = SCIntegration(plan_id=regscale_ssp_id, scan_date=scan_date)
|
|
492
|
+
else:
|
|
493
|
+
raise click.UsageError(
|
|
494
|
+
"You must provide either a --regscale_ssp_id or a --component_id to query Tenable vulnerabilities."
|
|
495
|
+
)
|
|
496
|
+
|
|
489
497
|
sc_integration.fetch_vulns_query(query_id=query_id)
|
|
490
498
|
|
|
491
499
|
console.print("[bold green]Tenable SC vulnerability query complete.[/bold green]")
|
|
@@ -61,9 +61,20 @@ class SCIntegration(ScannerIntegration):
|
|
|
61
61
|
super().__init__(*args, **kwargs)
|
|
62
62
|
self.scan_date = kwargs.get("scan_date")
|
|
63
63
|
self.plan_id = kwargs.get("plan_id")
|
|
64
|
+
self.is_component = kwargs.get("is_component", False) is True
|
|
64
65
|
self.client = None
|
|
65
66
|
self.closed_count = 0
|
|
66
|
-
self.batch_size = kwargs.get("batch_size", 1000)
|
|
67
|
+
self.batch_size = kwargs.get("batch_size", 1000)
|
|
68
|
+
if self.is_component:
|
|
69
|
+
from regscale.validation.record import validate_regscale_object
|
|
70
|
+
|
|
71
|
+
if validate_regscale_object(
|
|
72
|
+
parent_id=self.plan_id, parent_module=regscale_models.Component.get_module_string()
|
|
73
|
+
):
|
|
74
|
+
component = regscale_models.Component.get_object(self.plan_id)
|
|
75
|
+
self.component_title = component.title
|
|
76
|
+
else:
|
|
77
|
+
self.component_title = None
|
|
67
78
|
|
|
68
79
|
def authenticate(self) -> None:
|
|
69
80
|
"""Authenticate to Tenable SC."""
|
|
@@ -255,6 +266,13 @@ class SCIntegration(ScannerIntegration):
|
|
|
255
266
|
status="Active (On Network)" if asset.family.type else "Off-Network",
|
|
256
267
|
asset_type="Other",
|
|
257
268
|
asset_category="Hardware",
|
|
269
|
+
parent_id=self.plan_id,
|
|
270
|
+
parent_module=(
|
|
271
|
+
regscale_models.Component.get_module_string()
|
|
272
|
+
if self.is_component
|
|
273
|
+
else regscale_models.SecurityPlan.get_module_string()
|
|
274
|
+
),
|
|
275
|
+
component_names=[self.component_title],
|
|
258
276
|
)
|
|
259
277
|
|
|
260
278
|
def is_empty(self, file_path: Path) -> bool:
|
|
@@ -337,6 +355,7 @@ class SCIntegration(ScannerIntegration):
|
|
|
337
355
|
plan_id=self.plan_id,
|
|
338
356
|
integration_assets=(asset for sublist in iterables[0] for asset in sublist),
|
|
339
357
|
asset_count=assets_count,
|
|
358
|
+
is_component=self.is_component,
|
|
340
359
|
)
|
|
341
360
|
|
|
342
361
|
# Sync findings
|
|
@@ -344,6 +363,7 @@ class SCIntegration(ScannerIntegration):
|
|
|
344
363
|
plan_id=self.plan_id,
|
|
345
364
|
integration_findings=(finding for sublist in iterables[1] for finding in sublist),
|
|
346
365
|
finding_count=findings_count,
|
|
366
|
+
is_component=self.is_component,
|
|
347
367
|
)
|
|
348
368
|
|
|
349
369
|
logger.info(f"Successfully synced {assets_count} assets and {findings_count} findings")
|
|
@@ -55,6 +55,9 @@ def sync_compliance_data(ssp_id: int, catalog_id: int, framework: str, offline:
|
|
|
55
55
|
failing_controls: Dict = dict()
|
|
56
56
|
for findings in compliance_data:
|
|
57
57
|
asset_check = AssetCheck(**findings)
|
|
58
|
+
if not asset_check.reference:
|
|
59
|
+
logger.warning(f"Asset check {asset_check.check_name} has no references, skipping.")
|
|
60
|
+
continue
|
|
58
61
|
for ref in asset_check.reference:
|
|
59
62
|
if ref.framework not in framework_controls:
|
|
60
63
|
framework_controls[ref.framework] = []
|
|
@@ -19,9 +19,10 @@ def trivy():
|
|
|
19
19
|
|
|
20
20
|
@trivy.command("import_scans")
|
|
21
21
|
@FlatFileImporter.common_scanner_options(
|
|
22
|
-
message="File path to the folder containing
|
|
23
|
-
prompt="File path for
|
|
24
|
-
import_name="
|
|
22
|
+
message="File path to the folder containing Trivy .json files to process to RegScale.",
|
|
23
|
+
prompt="File path for Trivy files",
|
|
24
|
+
import_name="trivy",
|
|
25
|
+
support_component=True,
|
|
25
26
|
)
|
|
26
27
|
@click.option(
|
|
27
28
|
"--destination",
|
|
@@ -41,6 +42,7 @@ def import_scans(
|
|
|
41
42
|
file_pattern: str,
|
|
42
43
|
folder_path: Path,
|
|
43
44
|
regscale_ssp_id: int,
|
|
45
|
+
component_id: int,
|
|
44
46
|
scan_date: datetime,
|
|
45
47
|
mappings_path: Path,
|
|
46
48
|
disable_mapping: bool,
|
|
@@ -56,8 +58,13 @@ def import_scans(
|
|
|
56
58
|
|
|
57
59
|
if s3_bucket and not folder_path:
|
|
58
60
|
folder_path = s3_bucket
|
|
61
|
+
|
|
62
|
+
if not regscale_ssp_id and not component_id:
|
|
63
|
+
raise click.UsageError("You must provide either a --regscale_ssp_id or a --component_id to import Trivy scans.")
|
|
64
|
+
|
|
59
65
|
ti = TrivyIntegration(
|
|
60
|
-
plan_id=regscale_ssp_id,
|
|
66
|
+
plan_id=component_id if component_id else regscale_ssp_id,
|
|
67
|
+
is_component=True if component_id else False,
|
|
61
68
|
file_path=str(folder_path) if folder_path else None,
|
|
62
69
|
s3_bucket=s3_bucket,
|
|
63
70
|
s3_prefix=s3_prefix,
|
|
@@ -53,6 +53,7 @@ class TrivyIntegration(JSONLScannerIntegration):
|
|
|
53
53
|
self.scan_date = kwargs.get("scan_date") if "scan_date" in kwargs else None
|
|
54
54
|
if self.scan_date:
|
|
55
55
|
self.scan_date = self.clean_scan_date(self.scan_date)
|
|
56
|
+
self.is_component = kwargs.get("is_component", False)
|
|
56
57
|
super().__init__(*args, **kwargs)
|
|
57
58
|
|
|
58
59
|
def is_valid_file(self, data: Any, file_path: Union[Path, str]) -> Tuple[bool, Optional[Dict[str, Any]]]:
|
|
@@ -128,7 +129,7 @@ class TrivyIntegration(JSONLScannerIntegration):
|
|
|
128
129
|
notes=f"{os.path.basename(file_path_str)}",
|
|
129
130
|
other_tracking_number=artifact_name,
|
|
130
131
|
parent_id=self.plan_id,
|
|
131
|
-
parent_module="securityplans",
|
|
132
|
+
parent_module="securityplans" if not self.is_component else "components",
|
|
132
133
|
fqdn=artifact_name,
|
|
133
134
|
)
|
|
134
135
|
|
|
@@ -61,6 +61,7 @@ class JSONLScannerIntegration(ScannerIntegration):
|
|
|
61
61
|
|
|
62
62
|
# plan_id is required for all integrations
|
|
63
63
|
super().__init__(**kwargs)
|
|
64
|
+
self.is_component = kwargs.get("is_component", False)
|
|
64
65
|
# Extract S3-related kwargs
|
|
65
66
|
self.s3_bucket = kwargs.get("s3_bucket", None)
|
|
66
67
|
self.s3_prefix = kwargs.get("s3_prefix", "")
|
|
@@ -127,7 +128,11 @@ class JSONLScannerIntegration(ScannerIntegration):
|
|
|
127
128
|
logger.info(f"Creating ScanHistory with scan_date: {scan_date}")
|
|
128
129
|
scan_history = regscale_models.ScanHistory(
|
|
129
130
|
parentId=self.plan_id,
|
|
130
|
-
parentModule=
|
|
131
|
+
parentModule=(
|
|
132
|
+
regscale_models.Component.get_module_string()
|
|
133
|
+
if self.is_component
|
|
134
|
+
else regscale_models.SecurityPlan.get_module_string()
|
|
135
|
+
),
|
|
131
136
|
scanningTool=self.title,
|
|
132
137
|
scanDate=scan_date,
|
|
133
138
|
createdById=self.assessor_id,
|
|
@@ -1255,6 +1260,7 @@ class JSONLScannerIntegration(ScannerIntegration):
|
|
|
1255
1260
|
use_jsonl_file=True,
|
|
1256
1261
|
asset_count=total_assets,
|
|
1257
1262
|
scan_date=self.scan_date,
|
|
1263
|
+
is_component=self.is_component,
|
|
1258
1264
|
)
|
|
1259
1265
|
|
|
1260
1266
|
logger.info("Syncing %d findings to RegScale", total_findings)
|
|
@@ -1264,6 +1270,7 @@ class JSONLScannerIntegration(ScannerIntegration):
|
|
|
1264
1270
|
use_jsonl_file=True,
|
|
1265
1271
|
finding_count=total_findings,
|
|
1266
1272
|
scan_date=self.scan_date,
|
|
1273
|
+
is_component=self.is_component,
|
|
1267
1274
|
)
|
|
1268
1275
|
|
|
1269
1276
|
logger.info("Assets and findings sync complete")
|
|
@@ -7,7 +7,7 @@ import logging
|
|
|
7
7
|
import re
|
|
8
8
|
from concurrent.futures import ALL_COMPLETED, ThreadPoolExecutor, wait
|
|
9
9
|
from datetime import date, datetime
|
|
10
|
-
from typing import List, Optional, Tuple, Any, Dict
|
|
10
|
+
from typing import List, Optional, Tuple, Any, Dict, Union
|
|
11
11
|
from urllib.error import URLError
|
|
12
12
|
from urllib.parse import urlparse
|
|
13
13
|
|
|
@@ -23,6 +23,7 @@ from regscale.core.app.application import Application
|
|
|
23
23
|
from regscale.core.app.internal.login import is_valid
|
|
24
24
|
from regscale.models import Link, Threat
|
|
25
25
|
from regscale.core.app.utils.app_utils import error_and_exit
|
|
26
|
+
from regscale.utils.string import extract_url
|
|
26
27
|
|
|
27
28
|
logger = logging.getLogger("regscale")
|
|
28
29
|
console = Console()
|
|
@@ -71,22 +72,9 @@ def update_regscale_links(threats: List[Threat]) -> None:
|
|
|
71
72
|
:rtype: None
|
|
72
73
|
"""
|
|
73
74
|
|
|
74
|
-
# extract url from html string using regex
|
|
75
|
-
def extract_url(html: str) -> str:
|
|
76
|
-
"""
|
|
77
|
-
Extract URL from HTML string
|
|
78
|
-
|
|
79
|
-
:param str html: HTML string
|
|
80
|
-
:return: URL
|
|
81
|
-
:rtype: str
|
|
82
|
-
"""
|
|
83
|
-
url = re.findall(r"(?P<url>https?://[^\s]+)", html)
|
|
84
|
-
return url[0].replace('"', "") if url else None
|
|
85
|
-
|
|
86
75
|
links = []
|
|
87
76
|
for threat in threats:
|
|
88
|
-
url
|
|
89
|
-
if threat.description:
|
|
77
|
+
if url := extract_url(threat.description):
|
|
90
78
|
link = Link(
|
|
91
79
|
parentID=threat.id,
|
|
92
80
|
parentModule="threats",
|
|
@@ -115,9 +103,8 @@ def process_threats(threats: list[Threat], unique_threats: set[str], reg_threats
|
|
|
115
103
|
update_dict = threat.dict()
|
|
116
104
|
update_dict = merge_old(update_dict, old_dict)
|
|
117
105
|
update_threats.append(update_dict) # Update
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
insert_threats.append(threat.dict()) # Post
|
|
106
|
+
elif threat:
|
|
107
|
+
insert_threats.append(threat.dict()) # Post
|
|
121
108
|
return insert_threats, update_threats
|
|
122
109
|
|
|
123
110
|
|
|
@@ -218,29 +205,29 @@ def build_threat(app: Application, detailed_link: str, short_description: str, t
|
|
|
218
205
|
:rtype: Threat
|
|
219
206
|
"""
|
|
220
207
|
dat = parse_details(detailed_link)
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
208
|
+
if not dat:
|
|
209
|
+
return None
|
|
210
|
+
|
|
211
|
+
date_created = dat[0]
|
|
212
|
+
vulnerability = dat[1]
|
|
213
|
+
mitigation = dat[2]
|
|
214
|
+
notes = dat[3]
|
|
215
|
+
|
|
216
|
+
return Threat(
|
|
217
|
+
uuid=Threat.xstr(None),
|
|
218
|
+
title=title,
|
|
219
|
+
threatType="Specific",
|
|
220
|
+
threatOwnerId=app.config["userId"],
|
|
221
|
+
dateIdentified=date_created,
|
|
222
|
+
targetType="Other",
|
|
223
|
+
source="Open Source",
|
|
224
|
+
description=short_description or f"""<p><a href="{detailed_link}" title="">{detailed_link}</a></p>""",
|
|
225
|
+
vulnerabilityAnalysis="".join(vulnerability),
|
|
226
|
+
mitigations="".join(mitigation),
|
|
227
|
+
notes="".join(notes),
|
|
228
|
+
dateCreated=date_created,
|
|
229
|
+
status="Initial Report/Notification",
|
|
230
|
+
)
|
|
244
231
|
|
|
245
232
|
|
|
246
233
|
def filter_elements(element: Tag) -> Optional[Tag]:
|
|
@@ -332,7 +319,8 @@ def parse_details(link: str) -> Optional[Tuple[str, list, list, list]]:
|
|
|
332
319
|
mitigation = []
|
|
333
320
|
notes = []
|
|
334
321
|
detailed_soup = gen_soup(link)
|
|
335
|
-
date_created
|
|
322
|
+
if not (date_created := fuzzy_find_date(detailed_soup)):
|
|
323
|
+
return None
|
|
336
324
|
last_header = None
|
|
337
325
|
last_h3 = None
|
|
338
326
|
nav_string = ""
|
|
@@ -357,9 +345,8 @@ def parse_details(link: str) -> Optional[Tuple[str, list, list, list]]:
|
|
|
357
345
|
notes.append(DEFAULT_STR)
|
|
358
346
|
if len(mitigation) == 0:
|
|
359
347
|
mitigation.append(DEFAULT_STR)
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
return None
|
|
348
|
+
|
|
349
|
+
return date_created, unique(vulnerability), unique(mitigation), unique(notes)
|
|
363
350
|
|
|
364
351
|
|
|
365
352
|
def fuzzy_find_date(detailed_soup: BeautifulSoup, location: int = 2, attempts: int = 0) -> str:
|
|
@@ -396,7 +383,7 @@ def fuzzy_find_date(detailed_soup: BeautifulSoup, location: int = 2, attempts: i
|
|
|
396
383
|
return fuzzy_dt
|
|
397
384
|
|
|
398
385
|
|
|
399
|
-
def gen_soup(url: str) -> BeautifulSoup:
|
|
386
|
+
def gen_soup(url: Union[str, Tuple[str, ...]]) -> BeautifulSoup:
|
|
400
387
|
"""
|
|
401
388
|
Generate a BeautifulSoup instance for the given URL
|
|
402
389
|
|
|
@@ -404,7 +391,7 @@ def gen_soup(url: str) -> BeautifulSoup:
|
|
|
404
391
|
:raises: URLError if URL is invalid
|
|
405
392
|
:rtype: BeautifulSoup
|
|
406
393
|
"""
|
|
407
|
-
if isinstance(url,
|
|
394
|
+
if isinstance(url, tuple):
|
|
408
395
|
url = url[0]
|
|
409
396
|
if is_url(url):
|
|
410
397
|
req = Api().get(url)
|
|
@@ -445,7 +432,7 @@ def pull_cisa_kev() -> Dict[Any, Any]:
|
|
|
445
432
|
result = []
|
|
446
433
|
|
|
447
434
|
# Get URL from config or use default
|
|
448
|
-
if "
|
|
435
|
+
if "cisaKev" in config:
|
|
449
436
|
cisa_url = config["cisaKev"]
|
|
450
437
|
else:
|
|
451
438
|
cisa_url = CISA_KEV_URL
|
|
@@ -504,7 +491,7 @@ def update_regscale(data: dict) -> None:
|
|
|
504
491
|
threats_updated = []
|
|
505
492
|
new_threats = [dat for dat in data["vulnerabilities"] if dat not in matching_threats]
|
|
506
493
|
console.print(f"Found {len(new_threats)} new threats from CISA")
|
|
507
|
-
if
|
|
494
|
+
if new_threats:
|
|
508
495
|
for rec in new_threats:
|
|
509
496
|
threat = Threat(
|
|
510
497
|
uuid=Threat.xstr(None),
|
|
@@ -558,20 +545,28 @@ def merge_old(update_vuln: dict, old_vuln: dict) -> dict:
|
|
|
558
545
|
:return: A merged vulnerability dictionary
|
|
559
546
|
:rtype: dict
|
|
560
547
|
"""
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
548
|
+
fields_to_preserve = [
|
|
549
|
+
"id",
|
|
550
|
+
"uuid",
|
|
551
|
+
"status",
|
|
552
|
+
"source",
|
|
553
|
+
"threatType",
|
|
554
|
+
"threatOwnerId",
|
|
555
|
+
"notes",
|
|
556
|
+
"targetType",
|
|
557
|
+
"dateCreated",
|
|
558
|
+
"isPublic",
|
|
559
|
+
"investigated",
|
|
560
|
+
"investigationResults",
|
|
561
|
+
]
|
|
562
|
+
merged = update_vuln.copy()
|
|
563
|
+
|
|
564
|
+
# Preserve specified fields from old dictionary if they exist
|
|
565
|
+
for field in fields_to_preserve:
|
|
566
|
+
if field in old_vuln:
|
|
567
|
+
merged[field] = old_vuln[field]
|
|
568
|
+
|
|
569
|
+
return merged
|
|
575
570
|
|
|
576
571
|
|
|
577
572
|
def insert_or_upd_threat(threat: dict, app: Application, threat_id: int = None) -> requests.Response:
|