regscale-cli 6.20.10.0__py3-none-any.whl → 6.21.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/_version.py +1 -1
- regscale/core/app/application.py +12 -5
- regscale/core/app/internal/set_permissions.py +58 -27
- regscale/integrations/commercial/__init__.py +1 -2
- regscale/integrations/commercial/amazon/common.py +79 -2
- regscale/integrations/commercial/aws/cli.py +183 -9
- regscale/integrations/commercial/aws/scanner.py +544 -9
- regscale/integrations/commercial/cpe.py +18 -1
- regscale/integrations/commercial/nessus/scanner.py +2 -0
- regscale/integrations/commercial/sonarcloud.py +35 -36
- regscale/integrations/commercial/synqly/ticketing.py +51 -0
- regscale/integrations/commercial/tenablev2/jsonl_scanner.py +2 -1
- regscale/integrations/commercial/wizv2/async_client.py +10 -3
- regscale/integrations/commercial/wizv2/click.py +102 -26
- regscale/integrations/commercial/wizv2/constants.py +249 -1
- regscale/integrations/commercial/wizv2/issue.py +2 -2
- regscale/integrations/commercial/wizv2/parsers.py +3 -2
- regscale/integrations/commercial/wizv2/policy_compliance.py +1858 -0
- regscale/integrations/commercial/wizv2/scanner.py +15 -21
- regscale/integrations/commercial/wizv2/utils.py +258 -85
- regscale/integrations/commercial/wizv2/variables.py +4 -3
- regscale/integrations/compliance_integration.py +1455 -0
- regscale/integrations/integration_override.py +15 -6
- regscale/integrations/public/fedramp/fedramp_five.py +1 -1
- regscale/integrations/public/fedramp/markdown_parser.py +7 -1
- regscale/integrations/scanner_integration.py +193 -37
- regscale/models/app_models/__init__.py +1 -0
- regscale/models/integration_models/amazon_models/inspector_scan.py +32 -57
- regscale/models/integration_models/aqua.py +92 -78
- regscale/models/integration_models/cisa_kev_data.json +117 -5
- regscale/models/integration_models/defenderimport.py +64 -59
- regscale/models/integration_models/ecr_models/ecr.py +100 -147
- regscale/models/integration_models/flat_file_importer/__init__.py +52 -38
- regscale/models/integration_models/ibm.py +29 -47
- regscale/models/integration_models/nexpose.py +156 -68
- regscale/models/integration_models/prisma.py +46 -66
- regscale/models/integration_models/qualys.py +99 -93
- regscale/models/integration_models/snyk.py +229 -158
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/integration_models/veracode.py +15 -20
- regscale/{integrations/commercial/wizv2/models.py → models/integration_models/wizv2.py} +4 -12
- regscale/models/integration_models/xray.py +276 -82
- regscale/models/regscale_models/control_implementation.py +14 -12
- regscale/models/regscale_models/file.py +4 -0
- regscale/models/regscale_models/issue.py +123 -0
- regscale/models/regscale_models/milestone.py +1 -1
- regscale/models/regscale_models/rbac.py +22 -0
- regscale/models/regscale_models/regscale_model.py +4 -2
- regscale/models/regscale_models/security_plan.py +1 -1
- regscale/utils/graphql_client.py +3 -1
- {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.1.0.dist-info}/METADATA +9 -9
- {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.1.0.dist-info}/RECORD +64 -60
- tests/fixtures/test_fixture.py +58 -2
- tests/regscale/core/test_app.py +5 -3
- tests/regscale/core/test_version_regscale.py +5 -3
- tests/regscale/integrations/test_integration_mapping.py +522 -40
- tests/regscale/integrations/test_issue_due_date.py +1 -1
- tests/regscale/integrations/test_update_finding_dates.py +336 -0
- tests/regscale/integrations/test_wiz_policy_compliance_affected_controls.py +154 -0
- tests/regscale/models/test_asset.py +406 -50
- {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.1.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.1.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.1.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.1.0.dist-info}/top_level.txt +0 -0
|
@@ -2,8 +2,6 @@
|
|
|
2
2
|
A simple singleton class that loads custom integration mappings, if available
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
|
-
from regscale.core.app.application import Application
|
|
6
|
-
|
|
7
5
|
# pylint: disable=C0415
|
|
8
6
|
|
|
9
7
|
|
|
@@ -74,9 +72,12 @@ class IntegrationOverride:
|
|
|
74
72
|
:return: The mapped field name
|
|
75
73
|
:rtype: Optional[str]
|
|
76
74
|
"""
|
|
77
|
-
if integration and self.mapping_exists(integration, field_name):
|
|
75
|
+
if integration and field_name and self.mapping_exists(integration, field_name):
|
|
78
76
|
integration_map = self.mapping.get(integration.lower(), {})
|
|
79
|
-
|
|
77
|
+
# Find the actual key that matches case-insensitively
|
|
78
|
+
for key in integration_map.keys():
|
|
79
|
+
if key.lower() == field_name.lower():
|
|
80
|
+
return integration_map.get(key)
|
|
80
81
|
return None
|
|
81
82
|
|
|
82
83
|
def mapping_exists(self, integration: str, field_name: str) -> bool:
|
|
@@ -88,8 +89,16 @@ class IntegrationOverride:
|
|
|
88
89
|
:return: Whether the mapping exists
|
|
89
90
|
:rtype: bool
|
|
90
91
|
"""
|
|
92
|
+
if not integration or not field_name:
|
|
93
|
+
return False
|
|
91
94
|
the_map = self.mapping.get(integration.lower())
|
|
92
|
-
|
|
95
|
+
if not the_map:
|
|
96
|
+
return False
|
|
97
|
+
# Find the actual key that matches case-insensitively
|
|
98
|
+
for key in the_map.keys():
|
|
99
|
+
if key.lower() == field_name.lower():
|
|
100
|
+
return the_map.get(key) != "default"
|
|
101
|
+
return False
|
|
93
102
|
|
|
94
103
|
def field_map_validation(self, obj: Any, model_type: str) -> Optional[str]:
|
|
95
104
|
"""
|
|
@@ -131,7 +140,7 @@ class IntegrationOverride:
|
|
|
131
140
|
},
|
|
132
141
|
}
|
|
133
142
|
# The type an associated fields we are able to override. Limited for now.
|
|
134
|
-
supported_fields = {"asset": {"ipAddress", "name", "fqdn", "dns"}}
|
|
143
|
+
supported_fields = {"asset": {"ipAddress", "name", "fqdn", "dns"}, "issue": {"dateFirstDetected"}}
|
|
135
144
|
if regscale_field not in supported_fields.get(model_type.lower(), set()):
|
|
136
145
|
return match
|
|
137
146
|
|
|
@@ -1764,7 +1764,7 @@ def update_existing_control(
|
|
|
1764
1764
|
|
|
1765
1765
|
# Convert the model to a dict and back to a model to workaround these odd 400 errors.
|
|
1766
1766
|
try:
|
|
1767
|
-
|
|
1767
|
+
control.save()
|
|
1768
1768
|
except Exception as e:
|
|
1769
1769
|
logger.warning(f"Error updating control: {control.id} - {e}")
|
|
1770
1770
|
|
|
@@ -7,6 +7,7 @@ import re
|
|
|
7
7
|
import logging
|
|
8
8
|
import zipfile # Assuming you need this for other file handling
|
|
9
9
|
import pypandoc
|
|
10
|
+
import re
|
|
10
11
|
from collections import defaultdict
|
|
11
12
|
from typing import Dict, TextIO, Optional, Tuple
|
|
12
13
|
from regscale.models import ProfileMapping
|
|
@@ -108,7 +109,12 @@ class MDDocParser:
|
|
|
108
109
|
"""
|
|
109
110
|
# Extract control ID and clean it
|
|
110
111
|
html_free_line = self.clean_html_and_newlines(line)
|
|
111
|
-
|
|
112
|
+
# Use regex to find "what" case-insensitively and split
|
|
113
|
+
pattern = re.compile(r"what", re.IGNORECASE)
|
|
114
|
+
if pattern.search(html_free_line):
|
|
115
|
+
clean_line = pattern.split(html_free_line)[0].strip()
|
|
116
|
+
else:
|
|
117
|
+
clean_line = html_free_line
|
|
112
118
|
if not clean_line:
|
|
113
119
|
return None
|
|
114
120
|
clean_control_id_from_line = clean_line.strip()
|
|
@@ -28,7 +28,7 @@ from regscale.integrations.commercial.durosuite.variables import DuroSuiteVariab
|
|
|
28
28
|
from regscale.integrations.commercial.stig_mapper_integration.mapping_engine import StigMappingEngine as STIGMapper
|
|
29
29
|
from regscale.integrations.public.cisa import pull_cisa_kev
|
|
30
30
|
from regscale.integrations.variables import ScannerVariables
|
|
31
|
-
from regscale.models import DateTimeEncoder, OpenIssueDict, regscale_models
|
|
31
|
+
from regscale.models import DateTimeEncoder, OpenIssueDict, Property, regscale_models
|
|
32
32
|
from regscale.utils.threading import ThreadSafeDict, ThreadSafeList
|
|
33
33
|
|
|
34
34
|
logger = logging.getLogger(__name__)
|
|
@@ -407,7 +407,7 @@ class IntegrationFinding:
|
|
|
407
407
|
issue_type: str = "Risk"
|
|
408
408
|
date_created: str = dataclasses.field(default_factory=get_current_datetime)
|
|
409
409
|
date_last_updated: str = dataclasses.field(default_factory=get_current_datetime)
|
|
410
|
-
due_date: str = dataclasses.field(default_factory=lambda: date_str(days_from_today(60)))
|
|
410
|
+
due_date: str = "" # dataclasses.field(default_factory=lambda: date_str(days_from_today(60)))
|
|
411
411
|
external_id: str = ""
|
|
412
412
|
gaps: str = ""
|
|
413
413
|
observations: str = ""
|
|
@@ -423,6 +423,9 @@ class IntegrationFinding:
|
|
|
423
423
|
planned_milestone_changes: Optional[str] = None
|
|
424
424
|
adjusted_risk_rating: Optional[str] = None
|
|
425
425
|
risk_adjustment: str = "No"
|
|
426
|
+
|
|
427
|
+
# Compliance fields
|
|
428
|
+
assessment_id: Optional[int] = None
|
|
426
429
|
operational_requirements: Optional[str] = None
|
|
427
430
|
deviation_rationale: Optional[str] = None
|
|
428
431
|
is_cwe: bool = False
|
|
@@ -456,13 +459,16 @@ class IntegrationFinding:
|
|
|
456
459
|
source_rule_id: Optional[str] = None
|
|
457
460
|
vulnerability_type: Optional[str] = None
|
|
458
461
|
|
|
459
|
-
#
|
|
462
|
+
# CoalFire POAM
|
|
460
463
|
basis_for_adjustment: Optional[str] = None
|
|
461
464
|
poam_id: Optional[str] = None
|
|
462
465
|
|
|
463
466
|
# Additional fields from Wiz integration
|
|
464
467
|
vpr_score: Optional[float] = None
|
|
465
468
|
|
|
469
|
+
# Extra data field for miscellaneous data
|
|
470
|
+
extra_data: Dict[str, Any] = dataclasses.field(default_factory=dict)
|
|
471
|
+
|
|
466
472
|
def __post_init__(self):
|
|
467
473
|
"""Validate and adjust types after initialization."""
|
|
468
474
|
# Set default date values if empty
|
|
@@ -1609,6 +1615,23 @@ class ScannerIntegration(ABC):
|
|
|
1609
1615
|
issue.securityPlanId = self.plan_id if not self.is_component else None
|
|
1610
1616
|
issue.identification = finding.identification
|
|
1611
1617
|
issue.dateFirstDetected = finding.first_seen
|
|
1618
|
+
# Ensure a due date is always set using configured policy defaults (e.g., FedRAMP)
|
|
1619
|
+
if not finding.due_date:
|
|
1620
|
+
try:
|
|
1621
|
+
base_created = finding.date_created or issue.dateCreated
|
|
1622
|
+
finding.due_date = issue_due_date(
|
|
1623
|
+
severity=finding.severity,
|
|
1624
|
+
created_date=base_created,
|
|
1625
|
+
title=self.title,
|
|
1626
|
+
)
|
|
1627
|
+
except Exception:
|
|
1628
|
+
# Final fallback to a Low severity default if anything goes wrong
|
|
1629
|
+
base_created = finding.date_created or issue.dateCreated
|
|
1630
|
+
finding.due_date = issue_due_date(
|
|
1631
|
+
severity=regscale_models.IssueSeverity.Low,
|
|
1632
|
+
created_date=base_created,
|
|
1633
|
+
title=self.title,
|
|
1634
|
+
)
|
|
1612
1635
|
issue.dueDate = finding.due_date
|
|
1613
1636
|
issue.description = description
|
|
1614
1637
|
issue.sourceReport = finding.source_report or self.title
|
|
@@ -1619,15 +1642,21 @@ class ScannerIntegration(ABC):
|
|
|
1619
1642
|
issue.integrationFindingId = self.get_finding_identifier(finding)
|
|
1620
1643
|
issue.poamComments = finding.poam_comments
|
|
1621
1644
|
issue.cve = finding.cve
|
|
1645
|
+
issue.assessmentId = finding.assessment_id
|
|
1622
1646
|
control_id = self.get_control_implementation_id_for_cci(finding.cci_ref) if finding.cci_ref else None
|
|
1623
1647
|
issue.controlId = control_id # TODO REMOVE
|
|
1624
1648
|
# Add the control implementation ids and the cci ref if it exists
|
|
1625
1649
|
# Get control implementation ID for CCI if it exists
|
|
1626
1650
|
# Only add CCI control ID if it exists
|
|
1627
1651
|
cci_control_ids = [control_id] if control_id is not None else []
|
|
1628
|
-
|
|
1652
|
+
# Ensure failed control labels (e.g., AC-4(21)) are present in affectedControls
|
|
1653
|
+
if finding.affected_controls:
|
|
1654
|
+
issue.affectedControls = finding.affected_controls
|
|
1655
|
+
elif finding.control_labels:
|
|
1656
|
+
issue.affectedControls = ", ".join(sorted({cl for cl in finding.control_labels if cl}))
|
|
1629
1657
|
|
|
1630
1658
|
issue.controlImplementationIds = list(set(finding._control_implementation_ids + cci_control_ids)) # noqa
|
|
1659
|
+
# Always ensure isPoam reflects current settings, even when updating existing issues
|
|
1631
1660
|
issue.isPoam = is_poam
|
|
1632
1661
|
issue.basisForAdjustment = (
|
|
1633
1662
|
finding.basis_for_adjustment if finding.basis_for_adjustment else f"{self.title} import"
|
|
@@ -1644,17 +1673,22 @@ class ScannerIntegration(ABC):
|
|
|
1644
1673
|
issue.operationalRequirement = finding.operational_requirements
|
|
1645
1674
|
issue.deviationRationale = finding.deviation_rationale
|
|
1646
1675
|
issue.dateLastUpdated = get_current_datetime()
|
|
1676
|
+
## set affected controls if they exist
|
|
1677
|
+
issue.affectedControls = finding.affected_controls
|
|
1647
1678
|
|
|
1648
1679
|
if finding.cve:
|
|
1649
|
-
issue = self.
|
|
1680
|
+
issue = self.lookup_kev_and_update_issue(cve=finding.cve, issue=issue, cisa_kevs=self._kev_data)
|
|
1650
1681
|
|
|
1651
1682
|
if existing_issue:
|
|
1652
|
-
logger.debug("Saving
|
|
1683
|
+
logger.debug("Saving Old Issue: %s with assetIdentifier: %s", issue.id, issue.assetIdentifier)
|
|
1653
1684
|
issue.save(bulk=True)
|
|
1685
|
+
logger.debug("Saved existing issue %s with assetIdentifier: %s", issue.id, issue.assetIdentifier)
|
|
1686
|
+
|
|
1654
1687
|
else:
|
|
1655
1688
|
issue = issue.create_or_update(
|
|
1656
1689
|
bulk_update=True, defaults={"otherIdentifier": self._get_other_identifier(finding, is_poam)}
|
|
1657
1690
|
)
|
|
1691
|
+
self.extra_data_to_properties(finding, issue.id)
|
|
1658
1692
|
|
|
1659
1693
|
self._handle_property_and_milestone_creation(issue, finding, existing_issue)
|
|
1660
1694
|
return issue
|
|
@@ -1730,6 +1764,35 @@ class ScannerIntegration(ABC):
|
|
|
1730
1764
|
else:
|
|
1731
1765
|
logger.debug("No milestone created for issue %s from finding %s", issue.id, finding.external_id)
|
|
1732
1766
|
|
|
1767
|
+
@staticmethod
|
|
1768
|
+
def extra_data_to_properties(finding: IntegrationFinding, issue_id: int) -> None:
|
|
1769
|
+
"""
|
|
1770
|
+
Adds extra data to properties for an issue in a separate thread
|
|
1771
|
+
|
|
1772
|
+
:param IntegrationFinding finding: The finding data
|
|
1773
|
+
:param int issue_id: The ID of the issue
|
|
1774
|
+
:rtype: None
|
|
1775
|
+
"""
|
|
1776
|
+
|
|
1777
|
+
def _create_property():
|
|
1778
|
+
"""Create the property in a separate thread"""
|
|
1779
|
+
if not finding.extra_data:
|
|
1780
|
+
return
|
|
1781
|
+
try:
|
|
1782
|
+
Property(
|
|
1783
|
+
key="source_file_path",
|
|
1784
|
+
value=finding.extra_data.get("source_file_path"),
|
|
1785
|
+
parentId=issue_id,
|
|
1786
|
+
parentModule="issues",
|
|
1787
|
+
).create()
|
|
1788
|
+
except Exception as exc:
|
|
1789
|
+
# Log any errors that occur in the thread
|
|
1790
|
+
logger.error(f"Error creating property for issue {issue_id}: {exc}")
|
|
1791
|
+
|
|
1792
|
+
# Start the property creation in a separate thread
|
|
1793
|
+
thread = threading.Thread(target=_create_property, daemon=True)
|
|
1794
|
+
thread.start()
|
|
1795
|
+
|
|
1733
1796
|
@staticmethod
|
|
1734
1797
|
def get_consolidated_asset_identifier(
|
|
1735
1798
|
finding: IntegrationFinding,
|
|
@@ -1776,7 +1839,7 @@ class ScannerIntegration(ABC):
|
|
|
1776
1839
|
return None
|
|
1777
1840
|
|
|
1778
1841
|
@staticmethod
|
|
1779
|
-
def
|
|
1842
|
+
def lookup_kev_and_update_issue(
|
|
1780
1843
|
cve: str, issue: regscale_models.Issue, cisa_kevs: Optional[ThreadSafeDict[str, Any]] = None
|
|
1781
1844
|
) -> regscale_models.Issue:
|
|
1782
1845
|
"""
|
|
@@ -1804,7 +1867,14 @@ class ScannerIntegration(ABC):
|
|
|
1804
1867
|
None,
|
|
1805
1868
|
)
|
|
1806
1869
|
if kev_data:
|
|
1807
|
-
issue
|
|
1870
|
+
# If kev due date is before the issue date created, add the difference to the date created
|
|
1871
|
+
calculated_due_date = ScannerIntegration._calculate_kev_due_date(kev_data, issue.dateCreated)
|
|
1872
|
+
if calculated_due_date:
|
|
1873
|
+
issue.dueDate = calculated_due_date
|
|
1874
|
+
else:
|
|
1875
|
+
issue.dueDate = convert_datetime_to_regscale_string(
|
|
1876
|
+
datetime.strptime(kev_data["dueDate"], "%Y-%m-%d")
|
|
1877
|
+
)
|
|
1808
1878
|
issue.kevList = "Yes"
|
|
1809
1879
|
|
|
1810
1880
|
return issue
|
|
@@ -2656,6 +2726,9 @@ class ScannerIntegration(ABC):
|
|
|
2656
2726
|
"""
|
|
2657
2727
|
# Do not close issues from other tools
|
|
2658
2728
|
if issue.sourceReport != self.title:
|
|
2729
|
+
logger.debug(
|
|
2730
|
+
"Skipping issue %d from different source: %s (expected: %s)", issue.id, issue.sourceReport, self.title
|
|
2731
|
+
)
|
|
2659
2732
|
return False
|
|
2660
2733
|
|
|
2661
2734
|
# If the issue has a vulnerability ID, check if it's still current for any asset
|
|
@@ -2667,14 +2740,19 @@ class ScannerIntegration(ABC):
|
|
|
2667
2740
|
|
|
2668
2741
|
# Check if the issue's vulnerability is still current for any asset
|
|
2669
2742
|
# If it is, we shouldn't close the issue
|
|
2670
|
-
|
|
2671
|
-
mapping.assetId in current_vulnerabilities
|
|
2672
|
-
|
|
2673
|
-
|
|
2674
|
-
|
|
2675
|
-
|
|
2743
|
+
for mapping in vuln_mappings:
|
|
2744
|
+
if mapping.assetId in current_vulnerabilities:
|
|
2745
|
+
if issue.vulnerabilityId in current_vulnerabilities[mapping.assetId]:
|
|
2746
|
+
logger.debug(
|
|
2747
|
+
"Issue %d has current vulnerability %d for asset %d",
|
|
2748
|
+
issue.id,
|
|
2749
|
+
issue.vulnerabilityId,
|
|
2750
|
+
mapping.assetId,
|
|
2751
|
+
)
|
|
2752
|
+
return False
|
|
2676
2753
|
|
|
2677
2754
|
# If we've checked all conditions and found no current vulnerabilities, we should close it
|
|
2755
|
+
logger.debug("Issue %d has no current vulnerabilities, marking for closure", issue.id)
|
|
2678
2756
|
return True
|
|
2679
2757
|
|
|
2680
2758
|
@staticmethod
|
|
@@ -2957,6 +3035,65 @@ class ScannerIntegration(ABC):
|
|
|
2957
3035
|
finding.first_seen = self.scan_date
|
|
2958
3036
|
return existing_vuln
|
|
2959
3037
|
|
|
3038
|
+
def _update_first_seen_date(
|
|
3039
|
+
self, finding: IntegrationFinding, existing_vuln: Optional[regscale_models.VulnerabilityMapping]
|
|
3040
|
+
) -> None:
|
|
3041
|
+
"""
|
|
3042
|
+
Update the first_seen date based on existing vulnerability mapping or scan date.
|
|
3043
|
+
|
|
3044
|
+
:param IntegrationFinding finding: The integration finding
|
|
3045
|
+
:param Optional[regscale_models.VulnerabilityMapping] existing_vuln: The existing vulnerability mapping
|
|
3046
|
+
:return: None
|
|
3047
|
+
:rtype: None
|
|
3048
|
+
"""
|
|
3049
|
+
if existing_vuln and existing_vuln.firstSeen:
|
|
3050
|
+
finding.first_seen = existing_vuln.firstSeen
|
|
3051
|
+
elif not finding.first_seen:
|
|
3052
|
+
finding.first_seen = self.scan_date
|
|
3053
|
+
|
|
3054
|
+
def _update_date_created(self, finding: IntegrationFinding, issue: Optional[regscale_models.Issue]) -> None:
|
|
3055
|
+
"""
|
|
3056
|
+
Update the date_created based on issue or scan date.
|
|
3057
|
+
|
|
3058
|
+
:param IntegrationFinding finding: The integration finding
|
|
3059
|
+
:param Optional[regscale_models.Issue] issue: The existing issue
|
|
3060
|
+
:return: None
|
|
3061
|
+
:rtype: None
|
|
3062
|
+
"""
|
|
3063
|
+
if issue and issue.dateFirstDetected:
|
|
3064
|
+
finding.date_created = issue.dateFirstDetected
|
|
3065
|
+
elif not finding.date_created:
|
|
3066
|
+
finding.date_created = self.scan_date
|
|
3067
|
+
|
|
3068
|
+
def _update_due_date(self, finding: IntegrationFinding) -> None:
|
|
3069
|
+
"""
|
|
3070
|
+
Update the due_date based on severity and configuration.
|
|
3071
|
+
|
|
3072
|
+
:param IntegrationFinding finding: The integration finding
|
|
3073
|
+
:return: None
|
|
3074
|
+
:rtype: None
|
|
3075
|
+
"""
|
|
3076
|
+
finding.due_date = issue_due_date(
|
|
3077
|
+
severity=finding.severity,
|
|
3078
|
+
created_date=finding.date_created or self.scan_date,
|
|
3079
|
+
title=self.title,
|
|
3080
|
+
config=self.app.config,
|
|
3081
|
+
)
|
|
3082
|
+
|
|
3083
|
+
def _update_last_seen_date(self, finding: IntegrationFinding) -> None:
|
|
3084
|
+
"""
|
|
3085
|
+
Update the last_seen date if scan date is after first_seen.
|
|
3086
|
+
|
|
3087
|
+
:param IntegrationFinding finding: The integration finding
|
|
3088
|
+
:return: None
|
|
3089
|
+
:rtype: None
|
|
3090
|
+
"""
|
|
3091
|
+
scan_date = date_obj(self.scan_date)
|
|
3092
|
+
first_seen = date_obj(finding.first_seen)
|
|
3093
|
+
|
|
3094
|
+
if scan_date and first_seen and scan_date >= first_seen:
|
|
3095
|
+
finding.last_seen = self.scan_date
|
|
3096
|
+
|
|
2960
3097
|
def update_finding_dates(
|
|
2961
3098
|
self,
|
|
2962
3099
|
finding: IntegrationFinding,
|
|
@@ -2972,28 +3109,17 @@ class ScannerIntegration(ABC):
|
|
|
2972
3109
|
:return: The updated integration finding
|
|
2973
3110
|
:rtype: IntegrationFinding
|
|
2974
3111
|
"""
|
|
2975
|
-
if
|
|
2976
|
-
if
|
|
2977
|
-
|
|
2978
|
-
|
|
2979
|
-
|
|
2980
|
-
|
|
2981
|
-
|
|
2982
|
-
|
|
2983
|
-
|
|
2984
|
-
|
|
2985
|
-
|
|
2986
|
-
title=self.title,
|
|
2987
|
-
config=self.app.config,
|
|
2988
|
-
)
|
|
2989
|
-
else:
|
|
2990
|
-
finding.first_seen = existing_vuln.firstSeen if existing_vuln else finding.first_seen
|
|
2991
|
-
if issue:
|
|
2992
|
-
finding.date_created = issue.dateFirstDetected or finding.date_created
|
|
2993
|
-
scan_date = date_obj(self.scan_date)
|
|
2994
|
-
first_seen = date_obj(finding.first_seen)
|
|
2995
|
-
if scan_date and first_seen and scan_date >= first_seen:
|
|
2996
|
-
finding.last_seen = self.scan_date
|
|
3112
|
+
if finding.due_date:
|
|
3113
|
+
# If due_date is already set, only update last_seen if needed
|
|
3114
|
+
self._update_last_seen_date(finding)
|
|
3115
|
+
return finding
|
|
3116
|
+
|
|
3117
|
+
# Update dates for new findings
|
|
3118
|
+
self._update_first_seen_date(finding, existing_vuln)
|
|
3119
|
+
self._update_date_created(finding, issue)
|
|
3120
|
+
self._update_due_date(finding)
|
|
3121
|
+
self._update_last_seen_date(finding)
|
|
3122
|
+
|
|
2997
3123
|
return finding
|
|
2998
3124
|
|
|
2999
3125
|
def update_scan(self, scan_history: regscale_models.ScanHistory) -> None:
|
|
@@ -3047,7 +3173,9 @@ class ScannerIntegration(ABC):
|
|
|
3047
3173
|
:rtype: None
|
|
3048
3174
|
"""
|
|
3049
3175
|
# Method is deprecated - using update_control_implementation_status_after_close instead
|
|
3050
|
-
|
|
3176
|
+
logger.warning(
|
|
3177
|
+
"update_control_implementation_status is deprecated - using update_control_implementation_status_after_close instead"
|
|
3178
|
+
)
|
|
3051
3179
|
|
|
3052
3180
|
def update_regscale_checklists(self, findings: List[IntegrationFinding]) -> int:
|
|
3053
3181
|
"""
|
|
@@ -3130,3 +3258,31 @@ class ScannerIntegration(ABC):
|
|
|
3130
3258
|
impact=finding.impact,
|
|
3131
3259
|
recommendationForMitigation=finding.recommendation_for_mitigation,
|
|
3132
3260
|
).create()
|
|
3261
|
+
|
|
3262
|
+
@staticmethod
|
|
3263
|
+
def _calculate_kev_due_date(kev_data: dict, issue_date_created: str) -> Optional[str]:
|
|
3264
|
+
"""
|
|
3265
|
+
Calculate the due date for a KEV issue based on the difference between
|
|
3266
|
+
KEV due date and date added, then add that difference to the issue creation date.
|
|
3267
|
+
|
|
3268
|
+
:param dict kev_data: KEV data containing dueDate and dateAdded
|
|
3269
|
+
:param str issue_date_created: The issue creation date string
|
|
3270
|
+
:return: Calculated due date as a RegScale formatted string or None
|
|
3271
|
+
:rtype: Optional[str]
|
|
3272
|
+
"""
|
|
3273
|
+
from datetime import datetime
|
|
3274
|
+
|
|
3275
|
+
from regscale.core.app.utils.app_utils import convert_datetime_to_regscale_string
|
|
3276
|
+
|
|
3277
|
+
if datetime.strptime(kev_data["dueDate"], "%Y-%m-%d") < datetime.strptime(
|
|
3278
|
+
issue_date_created, "%Y-%m-%d %H:%M:%S"
|
|
3279
|
+
):
|
|
3280
|
+
# diff kev due date and kev dateAdded
|
|
3281
|
+
diff = datetime.strptime(kev_data["dueDate"], "%Y-%m-%d") - datetime.strptime(
|
|
3282
|
+
kev_data["dateAdded"], "%Y-%m-%d"
|
|
3283
|
+
)
|
|
3284
|
+
# add diff to issue.dateCreated
|
|
3285
|
+
return convert_datetime_to_regscale_string(
|
|
3286
|
+
datetime.strptime(issue_date_created, "%Y-%m-%d %H:%M:%S") + diff
|
|
3287
|
+
)
|
|
3288
|
+
return None
|
|
@@ -3,18 +3,17 @@ AWS Inspector Scan information
|
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
5
|
import re
|
|
6
|
-
from typing import List, Optional, Tuple
|
|
7
|
-
|
|
8
6
|
from pathlib import Path
|
|
7
|
+
from typing import List, Optional, Tuple
|
|
9
8
|
|
|
10
9
|
from regscale.core.app.application import Application
|
|
11
10
|
from regscale.core.app.logz import create_logger
|
|
12
|
-
from regscale.core.app.utils.app_utils import
|
|
11
|
+
from regscale.core.app.utils.app_utils import is_valid_fqdn
|
|
12
|
+
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
|
|
13
13
|
from regscale.models import ImportValidater
|
|
14
14
|
from regscale.models.integration_models.amazon_models.inspector import InspectorRecord
|
|
15
15
|
from regscale.models.integration_models.flat_file_importer import FlatFileImporter
|
|
16
|
-
from regscale.models.regscale_models
|
|
17
|
-
from regscale.models.regscale_models.vulnerability import Vulnerability
|
|
16
|
+
from regscale.models.regscale_models import Asset, AssetStatus, IssueStatus, Vulnerability
|
|
18
17
|
|
|
19
18
|
|
|
20
19
|
class InspectorScan(FlatFileImporter):
|
|
@@ -101,26 +100,17 @@ class InspectorScan(FlatFileImporter):
|
|
|
101
100
|
# Container Image, Virtual Machine (VM), etc.
|
|
102
101
|
asset_type = self.amazon_type_map().get(dat.resource_type, "Other")
|
|
103
102
|
|
|
104
|
-
return
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
"scanningTool": self.name,
|
|
116
|
-
"assetOwnerId": self.config["userId"],
|
|
117
|
-
"assetType": asset_type,
|
|
118
|
-
"fqdn": hostname if is_valid_fqdn(hostname) else None,
|
|
119
|
-
"operatingSystem": Asset.find_os(distro),
|
|
120
|
-
"systemAdministratorId": self.config["userId"],
|
|
121
|
-
"parentId": self.attributes.parent_id,
|
|
122
|
-
"parentModule": self.attributes.parent_module,
|
|
123
|
-
}
|
|
103
|
+
return IntegrationAsset(
|
|
104
|
+
identifier=hostname,
|
|
105
|
+
name=hostname,
|
|
106
|
+
ip_address="0.0.0.0",
|
|
107
|
+
cpu=0,
|
|
108
|
+
ram=0,
|
|
109
|
+
status=AssetStatus.Active.value,
|
|
110
|
+
asset_type=asset_type,
|
|
111
|
+
asset_category="Software",
|
|
112
|
+
operating_system=Asset.find_os(distro),
|
|
113
|
+
fqdn=hostname if is_valid_fqdn(hostname) else None,
|
|
124
114
|
)
|
|
125
115
|
|
|
126
116
|
def create_vuln(self, dat: Optional[InspectorRecord] = None, **kwargs) -> Optional[Vulnerability]:
|
|
@@ -132,44 +122,29 @@ class InspectorScan(FlatFileImporter):
|
|
|
132
122
|
:rtype: Optional[Vulnerability]
|
|
133
123
|
"""
|
|
134
124
|
hostname = dat.resource_id
|
|
135
|
-
distro = dat.platform
|
|
136
125
|
cve: str = dat.vulnerability_id
|
|
137
126
|
description: str = dat.description
|
|
138
127
|
title = dat.title if dat.title else dat.description
|
|
139
128
|
aws_severity = dat.severity
|
|
140
129
|
severity = self.severity_mapper(aws_severity)
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
if dat and asset_match:
|
|
145
|
-
return Vulnerability(
|
|
146
|
-
id=0,
|
|
147
|
-
scanId=0, # set later
|
|
148
|
-
parentId=asset.id,
|
|
149
|
-
parentModule="assets",
|
|
150
|
-
ipAddress="0.0.0.0", # No ip address available
|
|
151
|
-
lastSeen=dat.last_seen,
|
|
152
|
-
firstSeen=dat.first_seen,
|
|
153
|
-
daysOpen=None,
|
|
154
|
-
dns=hostname,
|
|
155
|
-
mitigated=None,
|
|
156
|
-
operatingSystem=(Asset.find_os(distro) if Asset.find_os(distro) else None),
|
|
157
|
-
severity=severity,
|
|
158
|
-
plugInName=dat.title,
|
|
159
|
-
plugInId=self.convert_cve_string_to_int(dat.vulnerability_id),
|
|
160
|
-
cve=cve,
|
|
161
|
-
vprScore=None,
|
|
162
|
-
tenantsId=0,
|
|
163
|
-
title=f"{description} on asset {asset.name}",
|
|
130
|
+
if dat:
|
|
131
|
+
return IntegrationFinding(
|
|
132
|
+
title=title,
|
|
164
133
|
description=description,
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
134
|
+
severity=self.determine_severity(severity),
|
|
135
|
+
status=IssueStatus.Open.value,
|
|
136
|
+
ip_address="0.0.0.0",
|
|
137
|
+
plugin_text=title,
|
|
138
|
+
plugin_name=dat.title,
|
|
139
|
+
plugin_id=self.convert_cve_string_to_int(dat.vulnerability_id),
|
|
140
|
+
asset_identifier=hostname,
|
|
141
|
+
remediation=dat.remediation,
|
|
142
|
+
cve=cve,
|
|
143
|
+
first_seen=dat.first_seen,
|
|
144
|
+
last_seen=dat.last_seen,
|
|
145
|
+
scan_date=self.scan_date,
|
|
146
|
+
category="Software",
|
|
147
|
+
control_labels=[],
|
|
173
148
|
)
|
|
174
149
|
return None
|
|
175
150
|
|