regscale-cli 6.20.10.0__py3-none-any.whl → 6.21.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (37) hide show
  1. regscale/_version.py +1 -1
  2. regscale/core/app/application.py +12 -5
  3. regscale/core/app/internal/set_permissions.py +58 -27
  4. regscale/integrations/commercial/nessus/scanner.py +2 -0
  5. regscale/integrations/commercial/sonarcloud.py +35 -36
  6. regscale/integrations/commercial/synqly/ticketing.py +51 -0
  7. regscale/integrations/integration_override.py +15 -6
  8. regscale/integrations/scanner_integration.py +163 -35
  9. regscale/models/integration_models/amazon_models/inspector_scan.py +32 -57
  10. regscale/models/integration_models/aqua.py +92 -78
  11. regscale/models/integration_models/cisa_kev_data.json +47 -4
  12. regscale/models/integration_models/defenderimport.py +64 -59
  13. regscale/models/integration_models/ecr_models/ecr.py +100 -147
  14. regscale/models/integration_models/flat_file_importer/__init__.py +52 -38
  15. regscale/models/integration_models/ibm.py +29 -47
  16. regscale/models/integration_models/nexpose.py +156 -68
  17. regscale/models/integration_models/prisma.py +46 -66
  18. regscale/models/integration_models/qualys.py +99 -93
  19. regscale/models/integration_models/snyk.py +229 -158
  20. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  21. regscale/models/integration_models/veracode.py +15 -20
  22. regscale/models/integration_models/xray.py +276 -82
  23. regscale/models/regscale_models/control_implementation.py +14 -12
  24. regscale/models/regscale_models/milestone.py +1 -1
  25. regscale/models/regscale_models/rbac.py +22 -0
  26. {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.0.0.dist-info}/METADATA +1 -1
  27. {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.0.0.dist-info}/RECORD +37 -36
  28. tests/fixtures/test_fixture.py +58 -2
  29. tests/regscale/core/test_app.py +5 -3
  30. tests/regscale/integrations/test_integration_mapping.py +522 -40
  31. tests/regscale/integrations/test_issue_due_date.py +1 -1
  32. tests/regscale/integrations/test_update_finding_dates.py +336 -0
  33. tests/regscale/models/test_asset.py +406 -50
  34. {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.0.0.dist-info}/LICENSE +0 -0
  35. {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.0.0.dist-info}/WHEEL +0 -0
  36. {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.0.0.dist-info}/entry_points.txt +0 -0
  37. {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.0.0.dist-info}/top_level.txt +0 -0
@@ -28,7 +28,7 @@ from regscale.integrations.commercial.durosuite.variables import DuroSuiteVariab
28
28
  from regscale.integrations.commercial.stig_mapper_integration.mapping_engine import StigMappingEngine as STIGMapper
29
29
  from regscale.integrations.public.cisa import pull_cisa_kev
30
30
  from regscale.integrations.variables import ScannerVariables
31
- from regscale.models import DateTimeEncoder, OpenIssueDict, regscale_models
31
+ from regscale.models import DateTimeEncoder, OpenIssueDict, Property, regscale_models
32
32
  from regscale.utils.threading import ThreadSafeDict, ThreadSafeList
33
33
 
34
34
  logger = logging.getLogger(__name__)
@@ -407,7 +407,7 @@ class IntegrationFinding:
407
407
  issue_type: str = "Risk"
408
408
  date_created: str = dataclasses.field(default_factory=get_current_datetime)
409
409
  date_last_updated: str = dataclasses.field(default_factory=get_current_datetime)
410
- due_date: str = dataclasses.field(default_factory=lambda: date_str(days_from_today(60)))
410
+ due_date: str = "" # dataclasses.field(default_factory=lambda: date_str(days_from_today(60)))
411
411
  external_id: str = ""
412
412
  gaps: str = ""
413
413
  observations: str = ""
@@ -463,6 +463,9 @@ class IntegrationFinding:
463
463
  # Additional fields from Wiz integration
464
464
  vpr_score: Optional[float] = None
465
465
 
466
+ # Extra data field for miscellaneous data
467
+ extra_data: Dict[str, Any] = dataclasses.field(default_factory=dict)
468
+
466
469
  def __post_init__(self):
467
470
  """Validate and adjust types after initialization."""
468
471
  # Set default date values if empty
@@ -1646,15 +1649,18 @@ class ScannerIntegration(ABC):
1646
1649
  issue.dateLastUpdated = get_current_datetime()
1647
1650
 
1648
1651
  if finding.cve:
1649
- issue = self.lookup_kev_and_upate_issue(cve=finding.cve, issue=issue, cisa_kevs=self._kev_data)
1652
+ issue = self.lookup_kev_and_update_issue(cve=finding.cve, issue=issue, cisa_kevs=self._kev_data)
1650
1653
 
1651
1654
  if existing_issue:
1652
- logger.debug("Saving existing issue %s with assetIdentifier: %s", issue.id, issue.assetIdentifier)
1655
+ logger.debug("Saving Old Issue: %s with assetIdentifier: %s", issue.id, issue.assetIdentifier)
1653
1656
  issue.save(bulk=True)
1657
+ logger.debug("Saved existing issue %s with assetIdentifier: %s", issue.id, issue.assetIdentifier)
1658
+
1654
1659
  else:
1655
1660
  issue = issue.create_or_update(
1656
1661
  bulk_update=True, defaults={"otherIdentifier": self._get_other_identifier(finding, is_poam)}
1657
1662
  )
1663
+ self.extra_data_to_properties(finding, issue.id)
1658
1664
 
1659
1665
  self._handle_property_and_milestone_creation(issue, finding, existing_issue)
1660
1666
  return issue
@@ -1730,6 +1736,35 @@ class ScannerIntegration(ABC):
1730
1736
  else:
1731
1737
  logger.debug("No milestone created for issue %s from finding %s", issue.id, finding.external_id)
1732
1738
 
1739
+ @staticmethod
1740
+ def extra_data_to_properties(finding: IntegrationFinding, issue_id: int) -> None:
1741
+ """
1742
+ Adds extra data to properties for an issue in a separate thread
1743
+
1744
+ :param IntegrationFinding finding: The finding data
1745
+ :param int issue_id: The ID of the issue
1746
+ :rtype: None
1747
+ """
1748
+
1749
+ def _create_property():
1750
+ """Create the property in a separate thread"""
1751
+ if not finding.extra_data:
1752
+ return
1753
+ try:
1754
+ Property(
1755
+ key="source_file_path",
1756
+ value=finding.extra_data.get("source_file_path"),
1757
+ parentId=issue_id,
1758
+ parentModule="issues",
1759
+ ).create()
1760
+ except Exception as exc:
1761
+ # Log any errors that occur in the thread
1762
+ logger.error(f"Error creating property for issue {issue_id}: {exc}")
1763
+
1764
+ # Start the property creation in a separate thread
1765
+ thread = threading.Thread(target=_create_property, daemon=True)
1766
+ thread.start()
1767
+
1733
1768
  @staticmethod
1734
1769
  def get_consolidated_asset_identifier(
1735
1770
  finding: IntegrationFinding,
@@ -1776,7 +1811,7 @@ class ScannerIntegration(ABC):
1776
1811
  return None
1777
1812
 
1778
1813
  @staticmethod
1779
- def lookup_kev_and_upate_issue(
1814
+ def lookup_kev_and_update_issue(
1780
1815
  cve: str, issue: regscale_models.Issue, cisa_kevs: Optional[ThreadSafeDict[str, Any]] = None
1781
1816
  ) -> regscale_models.Issue:
1782
1817
  """
@@ -1804,7 +1839,14 @@ class ScannerIntegration(ABC):
1804
1839
  None,
1805
1840
  )
1806
1841
  if kev_data:
1807
- issue.dueDate = convert_datetime_to_regscale_string(datetime.strptime(kev_data["dueDate"], "%Y-%m-%d"))
1842
+ # If kev due date is before the issue date created, add the difference to the date created
1843
+ calculated_due_date = ScannerIntegration._calculate_kev_due_date(kev_data, issue.dateCreated)
1844
+ if calculated_due_date:
1845
+ issue.dueDate = calculated_due_date
1846
+ else:
1847
+ issue.dueDate = convert_datetime_to_regscale_string(
1848
+ datetime.strptime(kev_data["dueDate"], "%Y-%m-%d")
1849
+ )
1808
1850
  issue.kevList = "Yes"
1809
1851
 
1810
1852
  return issue
@@ -2656,6 +2698,9 @@ class ScannerIntegration(ABC):
2656
2698
  """
2657
2699
  # Do not close issues from other tools
2658
2700
  if issue.sourceReport != self.title:
2701
+ logger.debug(
2702
+ "Skipping issue %d from different source: %s (expected: %s)", issue.id, issue.sourceReport, self.title
2703
+ )
2659
2704
  return False
2660
2705
 
2661
2706
  # If the issue has a vulnerability ID, check if it's still current for any asset
@@ -2667,14 +2712,19 @@ class ScannerIntegration(ABC):
2667
2712
 
2668
2713
  # Check if the issue's vulnerability is still current for any asset
2669
2714
  # If it is, we shouldn't close the issue
2670
- if any(
2671
- mapping.assetId in current_vulnerabilities
2672
- and issue.vulnerabilityId in current_vulnerabilities[mapping.assetId]
2673
- for mapping in vuln_mappings
2674
- ):
2675
- return False
2715
+ for mapping in vuln_mappings:
2716
+ if mapping.assetId in current_vulnerabilities:
2717
+ if issue.vulnerabilityId in current_vulnerabilities[mapping.assetId]:
2718
+ logger.debug(
2719
+ "Issue %d has current vulnerability %d for asset %d",
2720
+ issue.id,
2721
+ issue.vulnerabilityId,
2722
+ mapping.assetId,
2723
+ )
2724
+ return False
2676
2725
 
2677
2726
  # If we've checked all conditions and found no current vulnerabilities, we should close it
2727
+ logger.debug("Issue %d has no current vulnerabilities, marking for closure", issue.id)
2678
2728
  return True
2679
2729
 
2680
2730
  @staticmethod
@@ -2957,6 +3007,65 @@ class ScannerIntegration(ABC):
2957
3007
  finding.first_seen = self.scan_date
2958
3008
  return existing_vuln
2959
3009
 
3010
+ def _update_first_seen_date(
3011
+ self, finding: IntegrationFinding, existing_vuln: Optional[regscale_models.VulnerabilityMapping]
3012
+ ) -> None:
3013
+ """
3014
+ Update the first_seen date based on existing vulnerability mapping or scan date.
3015
+
3016
+ :param IntegrationFinding finding: The integration finding
3017
+ :param Optional[regscale_models.VulnerabilityMapping] existing_vuln: The existing vulnerability mapping
3018
+ :return: None
3019
+ :rtype: None
3020
+ """
3021
+ if existing_vuln and existing_vuln.firstSeen:
3022
+ finding.first_seen = existing_vuln.firstSeen
3023
+ elif not finding.first_seen:
3024
+ finding.first_seen = self.scan_date
3025
+
3026
+ def _update_date_created(self, finding: IntegrationFinding, issue: Optional[regscale_models.Issue]) -> None:
3027
+ """
3028
+ Update the date_created based on issue or scan date.
3029
+
3030
+ :param IntegrationFinding finding: The integration finding
3031
+ :param Optional[regscale_models.Issue] issue: The existing issue
3032
+ :return: None
3033
+ :rtype: None
3034
+ """
3035
+ if issue and issue.dateFirstDetected:
3036
+ finding.date_created = issue.dateFirstDetected
3037
+ elif not finding.date_created:
3038
+ finding.date_created = self.scan_date
3039
+
3040
+ def _update_due_date(self, finding: IntegrationFinding) -> None:
3041
+ """
3042
+ Update the due_date based on severity and configuration.
3043
+
3044
+ :param IntegrationFinding finding: The integration finding
3045
+ :return: None
3046
+ :rtype: None
3047
+ """
3048
+ finding.due_date = issue_due_date(
3049
+ severity=finding.severity,
3050
+ created_date=finding.date_created or self.scan_date,
3051
+ title=self.title,
3052
+ config=self.app.config,
3053
+ )
3054
+
3055
+ def _update_last_seen_date(self, finding: IntegrationFinding) -> None:
3056
+ """
3057
+ Update the last_seen date if scan date is after first_seen.
3058
+
3059
+ :param IntegrationFinding finding: The integration finding
3060
+ :return: None
3061
+ :rtype: None
3062
+ """
3063
+ scan_date = date_obj(self.scan_date)
3064
+ first_seen = date_obj(finding.first_seen)
3065
+
3066
+ if scan_date and first_seen and scan_date >= first_seen:
3067
+ finding.last_seen = self.scan_date
3068
+
2960
3069
  def update_finding_dates(
2961
3070
  self,
2962
3071
  finding: IntegrationFinding,
@@ -2972,28 +3081,17 @@ class ScannerIntegration(ABC):
2972
3081
  :return: The updated integration finding
2973
3082
  :rtype: IntegrationFinding
2974
3083
  """
2975
- if not finding.due_date:
2976
- if not existing_vuln:
2977
- finding.first_seen = self.scan_date
2978
- finding.date_created = self.scan_date
2979
- # From @mlongworth:
2980
- # It also appears that the suspense date (due date for remediation) is set based upon the import date rather
2981
- # than the scan date. This calculation needs to be based upon scan date e.g. scan date of 2/5/2024 severity
2982
- # High, should set the due date for remediation in the POA&M to 4/5/2024.
2983
- finding.due_date = issue_due_date(
2984
- severity=finding.severity,
2985
- created_date=finding.date_created or self.scan_date,
2986
- title=self.title,
2987
- config=self.app.config,
2988
- )
2989
- else:
2990
- finding.first_seen = existing_vuln.firstSeen if existing_vuln else finding.first_seen
2991
- if issue:
2992
- finding.date_created = issue.dateFirstDetected or finding.date_created
2993
- scan_date = date_obj(self.scan_date)
2994
- first_seen = date_obj(finding.first_seen)
2995
- if scan_date and first_seen and scan_date >= first_seen:
2996
- finding.last_seen = self.scan_date
3084
+ if finding.due_date:
3085
+ # If due_date is already set, only update last_seen if needed
3086
+ self._update_last_seen_date(finding)
3087
+ return finding
3088
+
3089
+ # Update dates for new findings
3090
+ self._update_first_seen_date(finding, existing_vuln)
3091
+ self._update_date_created(finding, issue)
3092
+ self._update_due_date(finding)
3093
+ self._update_last_seen_date(finding)
3094
+
2997
3095
  return finding
2998
3096
 
2999
3097
  def update_scan(self, scan_history: regscale_models.ScanHistory) -> None:
@@ -3047,7 +3145,9 @@ class ScannerIntegration(ABC):
3047
3145
  :rtype: None
3048
3146
  """
3049
3147
  # Method is deprecated - using update_control_implementation_status_after_close instead
3050
- pass
3148
+ logger.warning(
3149
+ "update_control_implementation_status is deprecated - using update_control_implementation_status_after_close instead"
3150
+ )
3051
3151
 
3052
3152
  def update_regscale_checklists(self, findings: List[IntegrationFinding]) -> int:
3053
3153
  """
@@ -3130,3 +3230,31 @@ class ScannerIntegration(ABC):
3130
3230
  impact=finding.impact,
3131
3231
  recommendationForMitigation=finding.recommendation_for_mitigation,
3132
3232
  ).create()
3233
+
3234
+ @staticmethod
3235
+ def _calculate_kev_due_date(kev_data: dict, issue_date_created: str) -> Optional[str]:
3236
+ """
3237
+ Calculate the due date for a KEV issue based on the difference between
3238
+ KEV due date and date added, then add that difference to the issue creation date.
3239
+
3240
+ :param dict kev_data: KEV data containing dueDate and dateAdded
3241
+ :param str issue_date_created: The issue creation date string
3242
+ :return: Calculated due date as a RegScale formatted string or None
3243
+ :rtype: Optional[str]
3244
+ """
3245
+ from datetime import datetime
3246
+
3247
+ from regscale.core.app.utils.app_utils import convert_datetime_to_regscale_string
3248
+
3249
+ if datetime.strptime(kev_data["dueDate"], "%Y-%m-%d") < datetime.strptime(
3250
+ issue_date_created, "%Y-%m-%d %H:%M:%S"
3251
+ ):
3252
+ # diff kev due date and kev dateAdded
3253
+ diff = datetime.strptime(kev_data["dueDate"], "%Y-%m-%d") - datetime.strptime(
3254
+ kev_data["dateAdded"], "%Y-%m-%d"
3255
+ )
3256
+ # add diff to issue.dateCreated
3257
+ return convert_datetime_to_regscale_string(
3258
+ datetime.strptime(issue_date_created, "%Y-%m-%d %H:%M:%S") + diff
3259
+ )
3260
+ return None
@@ -3,18 +3,17 @@ AWS Inspector Scan information
3
3
  """
4
4
 
5
5
  import re
6
- from typing import List, Optional, Tuple
7
-
8
6
  from pathlib import Path
7
+ from typing import List, Optional, Tuple
9
8
 
10
9
  from regscale.core.app.application import Application
11
10
  from regscale.core.app.logz import create_logger
12
- from regscale.core.app.utils.app_utils import get_current_datetime, is_valid_fqdn
11
+ from regscale.core.app.utils.app_utils import is_valid_fqdn
12
+ from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
13
13
  from regscale.models import ImportValidater
14
14
  from regscale.models.integration_models.amazon_models.inspector import InspectorRecord
15
15
  from regscale.models.integration_models.flat_file_importer import FlatFileImporter
16
- from regscale.models.regscale_models.asset import Asset
17
- from regscale.models.regscale_models.vulnerability import Vulnerability
16
+ from regscale.models.regscale_models import Asset, AssetStatus, IssueStatus, Vulnerability
18
17
 
19
18
 
20
19
  class InspectorScan(FlatFileImporter):
@@ -101,26 +100,17 @@ class InspectorScan(FlatFileImporter):
101
100
  # Container Image, Virtual Machine (VM), etc.
102
101
  asset_type = self.amazon_type_map().get(dat.resource_type, "Other")
103
102
 
104
- return Asset(
105
- **{
106
- "id": 0,
107
- "name": hostname,
108
- "awsIdentifier": hostname,
109
- "ipAddress": "",
110
- "isPublic": True,
111
- "status": "Active (On Network)",
112
- "assetCategory": "Hardware",
113
- "bLatestScan": True,
114
- "bAuthenticatedScan": True,
115
- "scanningTool": self.name,
116
- "assetOwnerId": self.config["userId"],
117
- "assetType": asset_type,
118
- "fqdn": hostname if is_valid_fqdn(hostname) else None,
119
- "operatingSystem": Asset.find_os(distro),
120
- "systemAdministratorId": self.config["userId"],
121
- "parentId": self.attributes.parent_id,
122
- "parentModule": self.attributes.parent_module,
123
- }
103
+ return IntegrationAsset(
104
+ identifier=hostname,
105
+ name=hostname,
106
+ ip_address="0.0.0.0",
107
+ cpu=0,
108
+ ram=0,
109
+ status=AssetStatus.Active.value,
110
+ asset_type=asset_type,
111
+ asset_category="Software",
112
+ operating_system=Asset.find_os(distro),
113
+ fqdn=hostname if is_valid_fqdn(hostname) else None,
124
114
  )
125
115
 
126
116
  def create_vuln(self, dat: Optional[InspectorRecord] = None, **kwargs) -> Optional[Vulnerability]:
@@ -132,44 +122,29 @@ class InspectorScan(FlatFileImporter):
132
122
  :rtype: Optional[Vulnerability]
133
123
  """
134
124
  hostname = dat.resource_id
135
- distro = dat.platform
136
125
  cve: str = dat.vulnerability_id
137
126
  description: str = dat.description
138
127
  title = dat.title if dat.title else dat.description
139
128
  aws_severity = dat.severity
140
129
  severity = self.severity_mapper(aws_severity)
141
- config = self.attributes.app.config
142
- asset_match = [asset for asset in self.data["assets"] if asset.name == hostname]
143
- asset = asset_match[0] if asset_match else None
144
- if dat and asset_match:
145
- return Vulnerability(
146
- id=0,
147
- scanId=0, # set later
148
- parentId=asset.id,
149
- parentModule="assets",
150
- ipAddress="0.0.0.0", # No ip address available
151
- lastSeen=dat.last_seen,
152
- firstSeen=dat.first_seen,
153
- daysOpen=None,
154
- dns=hostname,
155
- mitigated=None,
156
- operatingSystem=(Asset.find_os(distro) if Asset.find_os(distro) else None),
157
- severity=severity,
158
- plugInName=dat.title,
159
- plugInId=self.convert_cve_string_to_int(dat.vulnerability_id),
160
- cve=cve,
161
- vprScore=None,
162
- tenantsId=0,
163
- title=f"{description} on asset {asset.name}",
130
+ if dat:
131
+ return IntegrationFinding(
132
+ title=title,
164
133
  description=description,
165
- plugInText=title,
166
- createdById=config["userId"],
167
- lastUpdatedById=config["userId"],
168
- dateCreated=get_current_datetime(),
169
- extra_data={
170
- "solution": dat.remediation,
171
- "proof": dat.finding_arn,
172
- },
134
+ severity=self.determine_severity(severity),
135
+ status=IssueStatus.Open.value,
136
+ ip_address="0.0.0.0",
137
+ plugin_text=title,
138
+ plugin_name=dat.title,
139
+ plugin_id=self.convert_cve_string_to_int(dat.vulnerability_id),
140
+ asset_identifier=hostname,
141
+ remediation=dat.remediation,
142
+ cve=cve,
143
+ first_seen=dat.first_seen,
144
+ last_seen=dat.last_seen,
145
+ scan_date=self.scan_date,
146
+ category="Software",
147
+ control_labels=[],
173
148
  )
174
149
  return None
175
150
 
@@ -10,10 +10,12 @@ from regscale.core.app.application import Application
10
10
  from regscale.core.app.logz import create_logger
11
11
  from regscale.core.app.utils.app_utils import get_current_datetime, is_valid_fqdn
12
12
  from regscale.core.utils.date import datetime_str
13
+ from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
14
+ from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
13
15
  from regscale.models.app_models import ImportValidater
14
16
  from regscale.models.integration_models.flat_file_importer import FlatFileImporter
15
- from regscale.models.regscale_models.asset import Asset
16
- from regscale.models.regscale_models.vulnerability import Vulnerability
17
+ from regscale.models.regscale_models import AssetStatus, IssueSeverity, IssueStatus
18
+ from regscale.models.regscale_models import AssetStatus, IssueSeverity, IssueStatus
17
19
 
18
20
 
19
21
  class Aqua(FlatFileImporter):
@@ -36,6 +38,7 @@ class Aqua(FlatFileImporter):
36
38
  self.vendor_cvss_v2_severity = "Vendor CVSS v2 Severity"
37
39
  self.vendor_cvss_v3_severity = "Vendor CVSS v3 Severity"
38
40
  self.vendor_cvss_v3_score = "Vendor CVSS v3 Score"
41
+ self.vendor_cvss_v2_score = "Vendor CVSS v2 Score"
39
42
  self.nvd_cvss_v2_severity = "NVD CVSS v2 Severity"
40
43
  self.nvd_cvss_v3_severity = "NVD CVSS v3 Severity"
41
44
  self.required_headers = [
@@ -63,40 +66,32 @@ class Aqua(FlatFileImporter):
63
66
  **kwargs,
64
67
  )
65
68
 
66
- def create_asset(self, dat: Optional[dict] = None) -> Optional[Asset]:
69
+ def create_asset(self, dat: Optional[dict] = None) -> Optional[IntegrationAsset]:
67
70
  """
68
- Create an asset from a row in the Aqua file
71
+ Create an IntegrationAsset from a row in the Aqua file
69
72
 
70
73
  :param Optional[dict] dat: Data row from CSV file, defaults to None
71
- :return: RegScale Asset object or None
72
- :rtype: Optional[Asset]
74
+ :return: RegScale IntegrationAsset object or None
75
+ :rtype: Optional[IntegrationAsset]
73
76
  """
74
77
  name = self.mapping.get_value(dat, self.image_name)
75
78
  if not name:
76
79
  return None
77
80
  os = self.mapping.get_value(dat, self.OS)
78
- return Asset(
79
- **{
80
- "id": 0,
81
- "name": name,
82
- "description": "",
83
- "operatingSystem": Asset.find_os(os),
84
- "operatingSystemVersion": os,
85
- "ipAddress": "0.0.0.0",
86
- "isPublic": True,
87
- "status": "Active (On Network)",
88
- "assetCategory": "Hardware",
89
- "bLatestScan": True,
90
- "bAuthenticatedScan": True,
91
- "scanningTool": self.name,
92
- "assetOwnerId": self.config["userId"],
93
- "assetType": "Other",
94
- "fqdn": name if is_valid_fqdn(name) else None,
95
- "systemAdministratorId": self.config["userId"],
96
- "parentId": self.attributes.parent_id,
97
- "parentModule": self.attributes.parent_module,
98
- "extra_data": {"software_inventory": self.generate_software_inventory(name)},
99
- }
81
+ return IntegrationAsset(
82
+ identifier=name,
83
+ name=name,
84
+ ip_address="0.0.0.0",
85
+ cpu=0,
86
+ ram=0,
87
+ status=AssetStatus.Active.value,
88
+ asset_type="Other",
89
+ asset_category="Hardware",
90
+ scanning_tool=self.name,
91
+ fqdn=name if is_valid_fqdn(name) else None,
92
+ operating_system=os,
93
+ other_tracking_number=name,
94
+ software_inventory=self.generate_software_inventory(name),
100
95
  )
101
96
 
102
97
  def generate_software_inventory(self, name: str) -> List[dict]:
@@ -137,64 +132,83 @@ class Aqua(FlatFileImporter):
137
132
  :rtype: str
138
133
  """
139
134
  self.logger.info(f"Unable to determine date for the '{field}' field, falling back to current date and time.")
140
- return get_current_datetime()
135
+ return self.scan_date
136
+
137
+ def determine_first_seen(self, dat: dict) -> str:
138
+ """
139
+ Determine the first seen date and time of the vulnerability
140
+
141
+ :param dict dat: Data row from CSV file
142
+ :return: The first seen date and time
143
+ :rtype: str
144
+ """
145
+ first_detected = datetime_str(
146
+ self.mapping.get_value(dat, self.integration_mapping.load("aqua", "dateFirstDetected"))
147
+ )
148
+ ffi = datetime_str(self.mapping.get_value(dat, self.ffi)) or self.current_datetime_w_log(self.ffi)
149
+ if first_detected and first_detected != ffi:
150
+ ffi = first_detected
151
+ return ffi
141
152
 
142
- def create_vuln(self, dat: Optional[dict] = None, **kwargs: dict) -> Optional[Vulnerability]:
153
+ def create_vuln(self, dat: Optional[dict] = None, **kwargs: dict) -> Optional[IntegrationFinding]:
143
154
  """
144
- Create a vulnerability from a row in the Aqua csv file
155
+ Create a IntegrationFinding from a row in the Aqua csv file
145
156
 
146
157
  :param Optional[dict] dat: Data row from CSV file, defaults to None
147
158
  :param dict **kwargs: Additional keyword arguments
148
- :return: RegScale Vulnerability object or None
149
- :rtype: Optional[Vulnerability]
159
+ :return: RegScale IntegrationFinding object or None
160
+ :rtype: Optional[IntegrationFinding]
150
161
  """
151
162
 
152
- hostname = self.mapping.get_value(dat, self.image_name)
163
+ try:
164
+ hostname = self.mapping.get_value(dat, self.image_name)
153
165
 
154
- # Custom Integration Mapping fields
155
- remediation = self.mapping.get_value(dat, self.integration_mapping.load("aqua", "remediation")) or (
156
- self.mapping.get_value(dat, self.description) or "Upgrade affected package"
157
- ) # OLDTODO: BMC would like this to use "Solution" column
158
- description = self.mapping.get_value(dat, self.integration_mapping.load("aqua", "description")) or (
159
- self.mapping.get_value(dat, self.description)
160
- )
161
- title = self.mapping.get_value(dat, self.integration_mapping.load("aqua", "title")) or (
162
- description[:255] if description else f"Vulnerability on {hostname}"
163
- ) # OLDTODO: BMC Would like the CVE here
164
-
165
- regscale_vuln = None
166
- severity = self.determine_cvss_severity(dat)
167
- config = self.attributes.app.config
168
- asset_match = [asset for asset in self.data["assets"] if asset.name == hostname]
169
- asset = asset_match[0] if asset_match else None
170
- if asset_match and self.validate(ix=kwargs.get("index"), dat=dat):
171
- regscale_vuln = Vulnerability(
172
- id=0,
173
- scanId=0,
174
- parentId=asset.id,
175
- parentModule="assets",
176
- ipAddress="0.0.0.0",
177
- lastSeen=datetime_str(self.mapping.get_value(dat, self.last_image_scan))
178
- or self.current_datetime_w_log(self.last_image_scan),
179
- firstSeen=datetime_str(self.mapping.get_value(dat, self.ffi)) or self.current_datetime_w_log(self.ffi),
180
- daysOpen=None,
181
- dns=hostname,
182
- mitigated=None,
183
- operatingSystem=asset.operatingSystem,
184
- severity=severity,
185
- plugInName=description,
186
- cve=self.mapping.get_value(dat, self.vuln_title),
187
- cvsSv3BaseScore=self.mapping.get_value(dat, self.vendor_cvss_v3_score, 0.0),
188
- tenantsId=0,
189
- title=title,
190
- description=description,
191
- plugInText=self.mapping.get_value(dat, self.vuln_title),
192
- createdById=config["userId"],
193
- lastUpdatedById=config["userId"],
194
- dateCreated=get_current_datetime(),
195
- extra_data={"solution": remediation},
166
+ # Custom Integration Mapping fields
167
+ remediation = self.mapping.get_value(dat, self.integration_mapping.load("aqua", "remediation")) or (
168
+ self.mapping.get_value(dat, self.description) or "Upgrade affected package"
169
+ ) # OLDTODO: BMC would like this to use "Solution" column
170
+ description = self.mapping.get_value(dat, self.integration_mapping.load("aqua", "description")) or (
171
+ self.mapping.get_value(dat, self.description)
196
172
  )
197
- return regscale_vuln
173
+ title = self.mapping.get_value(dat, self.integration_mapping.load("aqua", "title")) or (
174
+ description[:255] if description else f"Vulnerability on {hostname}"
175
+ ) # OLDTODO: BMC Would like the CVE here
176
+
177
+ cvss3_score = self.mapping.get_value(dat, self.vendor_cvss_v3_score) or 0.0
178
+ cvss_v2_score = self.mapping.get_value(dat, self.vendor_cvss_v2_score) or 0.0
179
+
180
+ regscale_finding = None
181
+ severity = self.determine_cvss_severity(dat)
182
+ # Create IntegrationFinding if we have valid data and asset match
183
+
184
+ if dat:
185
+ return IntegrationFinding(
186
+ control_labels=[], # Add an empty list for control_labels
187
+ title=title,
188
+ description=description,
189
+ ip_address="0.0.0.0",
190
+ cve=self.mapping.get_value(dat, self.vuln_title, "").upper(),
191
+ severity=severity,
192
+ asset_identifier=hostname,
193
+ plugin_name=description,
194
+ plugin_id=self.mapping.get_value(dat, self.vuln_title),
195
+ cvss_score=cvss_v2_score or 0.0,
196
+ cvss_v3_score=cvss3_score or 0.0,
197
+ cvss_v2_score=cvss_v2_score or 0.0,
198
+ plugin_text=self.mapping.get_value(dat, self.vuln_title),
199
+ remediation=remediation,
200
+ category="Hardware",
201
+ status=IssueStatus.Open,
202
+ first_seen=self.determine_first_seen(dat),
203
+ last_seen=datetime_str(self.mapping.get_value(dat, self.last_image_scan))
204
+ or self.current_datetime_w_log(self.last_image_scan),
205
+ vulnerability_type="Vulnerability Scan",
206
+ baseline=f"{self.name} Host",
207
+ )
208
+ return regscale_finding
209
+ except AttributeError as e:
210
+ self.logger.error(f"Error creating finding: {e}")
211
+ return None
198
212
 
199
213
  def determine_cvss_severity(self, dat: dict) -> str:
200
214
  """
@@ -217,7 +231,7 @@ class Aqua(FlatFileImporter):
217
231
  severity = self.mapping.get_value(dat, key).lower()
218
232
  break
219
233
 
220
- return severity
234
+ return self.determine_severity(severity)
221
235
 
222
236
  def validate(self, ix: Optional[int], dat: dict) -> bool:
223
237
  """