regscale-cli 6.16.1.0__py3-none-any.whl → 6.16.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (46) hide show
  1. regscale/__init__.py +1 -1
  2. regscale/core/app/internal/login.py +1 -1
  3. regscale/core/app/internal/poam_editor.py +1 -1
  4. regscale/core/app/utils/api_handler.py +4 -11
  5. regscale/integrations/commercial/__init__.py +2 -2
  6. regscale/integrations/commercial/ad.py +1 -1
  7. regscale/integrations/commercial/crowdstrike.py +0 -1
  8. regscale/integrations/commercial/grype/__init__.py +3 -0
  9. regscale/integrations/commercial/grype/commands.py +72 -0
  10. regscale/integrations/commercial/grype/scanner.py +390 -0
  11. regscale/integrations/commercial/import_all/import_all_cmd.py +2 -2
  12. regscale/integrations/commercial/opentext/__init__.py +6 -0
  13. regscale/integrations/commercial/opentext/commands.py +77 -0
  14. regscale/integrations/commercial/opentext/scanner.py +449 -85
  15. regscale/integrations/commercial/qualys.py +50 -61
  16. regscale/integrations/commercial/servicenow.py +1 -0
  17. regscale/integrations/commercial/snyk.py +2 -2
  18. regscale/integrations/commercial/synqly/ticketing.py +29 -0
  19. regscale/integrations/commercial/trivy/__init__.py +5 -0
  20. regscale/integrations/commercial/trivy/commands.py +74 -0
  21. regscale/integrations/commercial/trivy/scanner.py +276 -0
  22. regscale/integrations/commercial/veracode.py +1 -1
  23. regscale/integrations/commercial/wizv2/utils.py +1 -1
  24. regscale/integrations/jsonl_scanner_integration.py +869 -0
  25. regscale/integrations/public/fedramp/fedramp_common.py +4 -4
  26. regscale/integrations/public/fedramp/inventory_items.py +3 -3
  27. regscale/integrations/scanner_integration.py +225 -59
  28. regscale/models/integration_models/cisa_kev_data.json +65 -7
  29. regscale/models/integration_models/{flat_file_importer.py → flat_file_importer/__init__.py} +29 -8
  30. regscale/models/integration_models/snyk.py +141 -15
  31. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  32. regscale/models/integration_models/tenable_models/integration.py +42 -7
  33. regscale/models/integration_models/veracode.py +91 -48
  34. regscale/models/regscale_models/regscale_model.py +1 -1
  35. regscale/models/regscale_models/user.py +3 -4
  36. regscale/models/regscale_models/vulnerability.py +21 -0
  37. regscale/utils/version.py +3 -5
  38. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.3.0.dist-info}/METADATA +3 -3
  39. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.3.0.dist-info}/RECORD +43 -38
  40. regscale/integrations/commercial/grype.py +0 -165
  41. regscale/integrations/commercial/opentext/click.py +0 -99
  42. regscale/integrations/commercial/trivy.py +0 -162
  43. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.3.0.dist-info}/LICENSE +0 -0
  44. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.3.0.dist-info}/WHEEL +0 -0
  45. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.3.0.dist-info}/entry_points.txt +0 -0
  46. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.3.0.dist-info}/top_level.txt +0 -0
@@ -27,9 +27,9 @@ from regscale.core.app.utils.app_utils import (
27
27
  save_data_to,
28
28
  )
29
29
  from regscale.core.app.utils.file_utils import download_from_s3
30
- from regscale.models.app_models.click import regscale_ssp_id
31
30
  from regscale.models import Asset, Issue, Search, regscale_models
32
31
  from regscale.models.app_models.click import NotRequiredIf, save_output_to
32
+ from regscale.models.app_models.click import regscale_ssp_id
33
33
  from regscale.models.integration_models.flat_file_importer import FlatFileImporter
34
34
  from regscale.models.integration_models.qualys import (
35
35
  Qualys,
@@ -611,11 +611,7 @@ def get_scan_results(scans: Any, task: TaskID) -> dict:
611
611
  :return: dictionary of detailed Qualys scans
612
612
  :rtype: dict
613
613
  """
614
- app = check_license()
615
- config = app.config
616
-
617
- # set the auth for the QUALYS_API session
618
- QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
614
+ qualys_url, QUALYS_API = _get_qualys_api()
619
615
 
620
616
  scan_data = {}
621
617
  # check number of scans requested
@@ -636,7 +632,7 @@ def get_scan_results(scans: Any, task: TaskID) -> dict:
636
632
  }
637
633
  # get the scan data via API
638
634
  res = QUALYS_API.get(
639
- url=urljoin(config["qualysUrl"], "/api/2.0/fo/scan/"),
635
+ url=urljoin(qualys_url, "/api/2.0/fo/scan/"),
640
636
  headers=HEADERS,
641
637
  params=params,
642
638
  )
@@ -664,11 +660,7 @@ def get_detailed_scans(days: int) -> list:
664
660
  :return: list of results from Qualys API
665
661
  :rtype: list
666
662
  """
667
- app = check_license()
668
- config = app.config
669
-
670
- # set the auth for the QUALYS_API session
671
- QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
663
+ qualys_url, QUALYS_API = _get_qualys_api()
672
664
 
673
665
  today = datetime.now()
674
666
  scan_date = today - timedelta(days=days)
@@ -684,12 +676,12 @@ def get_detailed_scans(days: int) -> list:
684
676
  "scan_datetime_since": scan_date.strftime("%Y-%m-%dT%H:%I:%S%ZZ"),
685
677
  }
686
678
  res = QUALYS_API.get(
687
- url=urljoin(config["qualysUrl"], "/api/2.0/fo/scan/summary/"),
679
+ url=urljoin(qualys_url, "/api/2.0/fo/scan/summary/"),
688
680
  headers=HEADERS,
689
681
  params=params,
690
682
  )
691
683
  response = QUALYS_API.get(
692
- url=urljoin(config["qualysUrl"], "/api/2.0/fo/scan/vm/summary/"),
684
+ url=urljoin(qualys_url, "/api/2.0/fo/scan/vm/summary/"),
693
685
  headers=HEADERS,
694
686
  params=params2,
695
687
  )
@@ -708,6 +700,34 @@ def get_detailed_scans(days: int) -> list:
708
700
  return res_data
709
701
 
710
702
 
703
+ def _get_config():
704
+ """
705
+ Get the Qualys configuration
706
+
707
+ :return: Qualys configuration
708
+ :rtype: dict
709
+ """
710
+ app = check_license()
711
+ config = app.config
712
+ return config
713
+
714
+
715
+ def _get_qualys_api():
716
+ """
717
+ Get the Qualys API session
718
+
719
+ :return: Qualys API session
720
+ :rtype: Session
721
+ """
722
+ config = _get_config()
723
+
724
+ # set the auth for the QUALYS_API session
725
+ QUALYS_API.auth = (config.get("qualysUserName"), config.get("qualysPassword"))
726
+ QUALYS_API.verify = config.get("qualysVerify", True)
727
+ qualys_url = config.get("qualysUrl")
728
+ return qualys_url, QUALYS_API
729
+
730
+
711
731
  def import_total_cloud_data_from_qualys_api(security_plan_id: int, include_tags: str, exclude_tags: str):
712
732
  """
713
733
  Function to get the total cloud data from Qualys API
@@ -717,10 +737,7 @@ def import_total_cloud_data_from_qualys_api(security_plan_id: int, include_tags:
717
737
  """
718
738
  try:
719
739
 
720
- app = check_license()
721
- config = app.config
722
- # set the auth for the QUALYS_API session
723
- QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
740
+ qualys_url, QUALYS_API = _get_qualys_api()
724
741
  params = {
725
742
  "action": "list",
726
743
  "show_asset_id": "1",
@@ -733,7 +750,7 @@ def import_total_cloud_data_from_qualys_api(security_plan_id: int, include_tags:
733
750
  if include_tags:
734
751
  params["tag_set_include"] = include_tags
735
752
  response = QUALYS_API.get(
736
- url=urljoin(config["qualysUrl"], "/api/2.0/fo/asset/host/vm/detection/"),
753
+ url=urljoin(qualys_url, "/api/2.0/fo/asset/host/vm/detection/"),
737
754
  headers=HEADERS,
738
755
  params=params,
739
756
  )
@@ -761,17 +778,13 @@ def get_scans_summary(scan_choice: str) -> dict:
761
778
  :return: Detailed summary of scans from Qualys API as a dictionary
762
779
  :rtype: dict
763
780
  """
764
- app = check_license()
765
- config = app.config
781
+ qualys_url, QUALYS_API = _get_qualys_api()
766
782
  urls = []
767
783
 
768
- # set the auth for the QUALYS_API session
769
- QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
770
-
771
784
  # set up variables for function
772
785
  scan_data = {}
773
786
  responses = []
774
- scan_url = urljoin(config["qualysUrl"], "/api/2.0/fo/scan/")
787
+ scan_url = urljoin(qualys_url, "/api/2.0/fo/scan/")
775
788
 
776
789
  # set up parameters for the scans query
777
790
  params = {"action": "list"}
@@ -812,11 +825,7 @@ def get_scan_details(days: int) -> list:
812
825
  :return: Detailed summary of scans from Qualys API as a dictionary
813
826
  :rtype: list
814
827
  """
815
- app = check_license()
816
- config = app.config
817
-
818
- # set the auth for the QUALYS_API session
819
- QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
828
+ qualys_url, QUALYS_API = _get_qualys_api()
820
829
  # get since date for API call
821
830
  since_date = datetime.now() - timedelta(days=days)
822
831
  # set up data and parameters for the scans query
@@ -835,12 +844,12 @@ def get_scan_details(days: int) -> list:
835
844
  "scan_datetime_since": since_date.strftime("%Y-%m-%dT%H:%M:%SZ"),
836
845
  }
837
846
  res = QUALYS_API.get(
838
- url=urljoin(config["qualysUrl"], "/api/2.0/fo/scan/summary/"),
847
+ url=urljoin(qualys_url, "/api/2.0/fo/scan/summary/"),
839
848
  headers=headers,
840
849
  params=params,
841
850
  )
842
851
  response = QUALYS_API.get(
843
- url=urljoin(config["qualysUrl"], "/api/2.0/fo/scan/vm/summary/"),
852
+ url=urljoin(qualys_url, "/api/2.0/fo/scan/vm/summary/"),
844
853
  headers=headers,
845
854
  params=params2,
846
855
  )
@@ -874,11 +883,7 @@ def sync_qualys_assets_and_vulns(
874
883
  :param Optional[Union[int, str]] asset_group_filter: Filter the Qualys assets by an asset group ID or name, if any
875
884
  :rtype: None
876
885
  """
877
- app = check_license()
878
- config = app.config
879
-
880
- # set the auth for the QUALYS_API session
881
- QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
886
+ config = _get_config()
882
887
 
883
888
  # Get the assets from RegScale with the provided SSP ID
884
889
  logger.info("Getting assets from RegScale for SSP #%s...", ssp_id)
@@ -1086,14 +1091,10 @@ def get_qualys_assets_and_scan_results(
1086
1091
  :return: list of dictionaries containing asset data
1087
1092
  :rtype: list
1088
1093
  """
1089
- app = check_license()
1090
- config = app.config
1091
-
1092
- # set the auth for the QUALYS_API session
1093
- QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
1094
+ qualys_url, QUALYS_API = _get_qualys_api()
1094
1095
  # set url
1095
1096
  if not url:
1096
- url = urljoin(config["qualysUrl"], "api/2.0/fo/asset/host/vm/detection?action=list&show_asset_id=1")
1097
+ url = urljoin(qualys_url, "api/2.0/fo/asset/host/vm/detection?action=list&show_asset_id=1")
1097
1098
 
1098
1099
  # check if an asset group filter was provided and append it to the url
1099
1100
  if asset_group_filter:
@@ -1143,12 +1144,7 @@ def get_issue_data_for_assets(asset_list: list) -> Tuple[list[dict], int]:
1143
1144
  :return: Updated asset list of Qualys assets and their associated vulnerabilities, total number of vulnerabilities
1144
1145
  :rtype: Tuple[list[dict], int]
1145
1146
  """
1146
- app = check_license()
1147
- config = app.config
1148
-
1149
- # set the auth for the QUALYS_API session
1150
- QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
1151
-
1147
+ config = _get_config()
1152
1148
  with job_progress:
1153
1149
  issues = {}
1154
1150
  for asset in asset_list:
@@ -1285,9 +1281,9 @@ def lookup_asset(asset_list: list, asset_id: str = None) -> list[Asset]:
1285
1281
  :rtype: list[Asset]
1286
1282
  """
1287
1283
  if asset_id:
1288
- results = [Asset(**asset) for asset in asset_list if getattr(asset, "qualysId", None) == asset_id]
1284
+ results = [asset for asset in asset_list if getattr(asset, "qualysId", None) == asset_id]
1289
1285
  else:
1290
- results = [Asset(**asset) for asset in asset_list]
1286
+ results = [asset for asset in asset_list]
1291
1287
  # Return unique list
1292
1288
  return list(set(results)) or []
1293
1289
 
@@ -1322,11 +1318,7 @@ def create_regscale_issue_from_vuln(
1322
1318
  :return: list of RegScale issues to update, and a list of issues to be created
1323
1319
  :rtype: Tuple[list[Issue], list[Issue]]
1324
1320
  """
1325
- app = check_license()
1326
- config = app.config
1327
-
1328
- # set the auth for the QUALYS_API session
1329
- QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
1321
+ config = _get_config()
1330
1322
  default_status = config["issues"]["qualys"]["status"]
1331
1323
  regscale_issues = []
1332
1324
  regscale_existing_issues = Issue.get_all_by_parent(parent_id=regscale_ssp_id, parent_module="securityplans")
@@ -1441,13 +1433,10 @@ def get_asset_groups_from_qualys() -> list:
1441
1433
  :return: list of assets from Qualys
1442
1434
  :rtype: list
1443
1435
  """
1444
- app = check_license()
1445
- config = app.config
1446
1436
  asset_groups = []
1447
1437
 
1448
- # set the auth for the QUALYS_API session
1449
- QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
1450
- response = QUALYS_API.get(url=urljoin(config["qualysUrl"], "api/2.0/fo/asset/group?action=list"), headers=HEADERS)
1438
+ qualys_url, QUALYS_API = _get_qualys_api()
1439
+ response = QUALYS_API.get(url=urljoin(qualys_url, "api/2.0/fo/asset/group?action=list"), headers=HEADERS)
1451
1440
  if response.ok:
1452
1441
  logger.debug(response.text)
1453
1442
  try:
@@ -741,6 +741,7 @@ def create_snow_tag(snow_config: ServiceNowConfig, tag_name: str) -> Optional[di
741
741
  "active": True,
742
742
  "sys_class_name": "tag",
743
743
  "type": "Standard",
744
+ "viewable_by": "everyone",
744
745
  }
745
746
  url = urljoin(snow_config.url, "api/now/table/label")
746
747
  response = snow_api.post(
@@ -18,7 +18,7 @@ def snyk():
18
18
 
19
19
  @snyk.command(name="import_snyk")
20
20
  @FlatFileImporter.common_scanner_options(
21
- message="File path to the folder containing Snyk .xlsx files to process to RegScale.",
21
+ message="File path to the folder containing Snyk .xlsx or .json files to process to RegScale.",
22
22
  prompt="File path for Snyk files",
23
23
  import_name="snyk",
24
24
  )
@@ -77,7 +77,7 @@ def import_synk_files(
77
77
  FlatFileImporter.import_files(
78
78
  import_type=Snyk,
79
79
  import_name="Snyk",
80
- file_types=".xlsx",
80
+ file_types=[".xlsx", ".json"],
81
81
  folder_path=folder_path,
82
82
  regscale_ssp_id=regscale_ssp_id,
83
83
  scan_date=scan_date,
@@ -13,6 +13,17 @@ def ticketing() -> None:
13
13
  pass
14
14
 
15
15
 
16
+ @ticketing.command(name="sync_autotask")
17
+ @regscale_id()
18
+ @regscale_module()
19
+ def sync_autotask(regscale_id: int, regscale_module: str) -> None:
20
+ """Sync Ticketing data between Autotask and RegScale."""
21
+ from regscale.models.integration_models.synqly_models.connectors import Ticketing
22
+
23
+ ticketing_autotask = Ticketing("autotask")
24
+ ticketing_autotask.run_sync(regscale_id=regscale_id, regscale_module=regscale_module)
25
+
26
+
16
27
  @ticketing.command(name="sync_jira")
17
28
  @regscale_id()
18
29
  @regscale_module()
@@ -111,6 +122,24 @@ def sync_servicenow(regscale_id: int, regscale_module: str, issue_type: str, def
111
122
  )
112
123
 
113
124
 
125
+ @ticketing.command(name="sync_servicenow_sir")
126
+ @regscale_id()
127
+ @regscale_module()
128
+ @click.option(
129
+ "--issue_type",
130
+ type=click.STRING,
131
+ help="servicenow_sir issue type",
132
+ required=True,
133
+ prompt="servicenow_sir issue type",
134
+ )
135
+ def sync_servicenow_sir(regscale_id: int, regscale_module: str, issue_type: str) -> None:
136
+ """Sync Ticketing data between Servicenow Sir and RegScale."""
137
+ from regscale.models.integration_models.synqly_models.connectors import Ticketing
138
+
139
+ ticketing_servicenow_sir = Ticketing("servicenow_sir")
140
+ ticketing_servicenow_sir.run_sync(regscale_id=regscale_id, regscale_module=regscale_module, issue_type=issue_type)
141
+
142
+
114
143
  @ticketing.command(name="sync_torq")
115
144
  @regscale_id()
116
145
  @regscale_module()
@@ -0,0 +1,5 @@
1
+ """
2
+ Module for processing Trivy scan results and loading them into RegScale.
3
+ """
4
+
5
+ from .commands import import_scans
@@ -0,0 +1,74 @@
1
+ """
2
+ This module contains the command line interface for the Trivy scanner integration.
3
+ """
4
+
5
+ from datetime import datetime
6
+ from typing import Optional
7
+
8
+ import click
9
+ from pathlib import Path
10
+
11
+ from regscale.models.integration_models.flat_file_importer import FlatFileImporter
12
+
13
+
14
+ @click.group()
15
+ def trivy():
16
+ """Performs actions from the Trivy scanner integration."""
17
+ pass
18
+
19
+
20
+ @trivy.command("import_scans")
21
+ @FlatFileImporter.common_scanner_options(
22
+ message="File path to the folder containing JFrog XRay .json files to process to RegScale.",
23
+ prompt="File path for Grype files",
24
+ import_name="grype",
25
+ )
26
+ @click.option(
27
+ "--destination",
28
+ "-d",
29
+ help="Path to download the files to. If not provided, files will be downloaded to the temporary directory.",
30
+ type=click.Path(exists=True, dir_okay=True),
31
+ required=False,
32
+ )
33
+ @click.option(
34
+ "--file_pattern",
35
+ "-fp",
36
+ help="[Optional] File pattern to match (e.g., '*.json')",
37
+ required=False,
38
+ )
39
+ def import_scans(
40
+ destination: Optional[Path],
41
+ file_pattern: str,
42
+ folder_path: Path,
43
+ regscale_ssp_id: int,
44
+ scan_date: datetime,
45
+ mappings_path: Path,
46
+ disable_mapping: bool,
47
+ s3_bucket: str,
48
+ s3_prefix: str,
49
+ aws_profile: str,
50
+ upload_file: bool,
51
+ ) -> None:
52
+ """
53
+ Process Trivy scan results from a folder containing Trivy scan files and load into RegScale.
54
+ """
55
+ from regscale.integrations.commercial.trivy.scanner import TrivyIntegration
56
+
57
+ if s3_bucket and not folder_path:
58
+ folder_path = s3_bucket
59
+ ti = TrivyIntegration(
60
+ plan_id=regscale_ssp_id,
61
+ file_path=str(folder_path) if folder_path else None,
62
+ s3_bucket=s3_bucket,
63
+ s3_prefix=s3_prefix,
64
+ aws_profile=aws_profile,
65
+ scan_date=scan_date,
66
+ mappings_path=str(mappings_path) if mappings_path else None,
67
+ disable_mapping=disable_mapping,
68
+ download_destination=destination,
69
+ file_pattern=file_pattern,
70
+ read_files_only=True,
71
+ upload_file=upload_file,
72
+ )
73
+
74
+ ti.sync_assets_and_findings()
@@ -0,0 +1,276 @@
1
+ """
2
+ Trivy Scanner Integration for RegScale.
3
+
4
+ This module provides integration between Trivy scanner and RegScale,
5
+ allowing you to import Trivy scan results into RegScale as assets and findings.
6
+ """
7
+
8
+ import logging
9
+ import os
10
+ from typing import Any, Dict, List, Optional, Union, Tuple, TypeVar
11
+
12
+ from pathlib import Path
13
+
14
+ from regscale.core.app.utils.parser_utils import safe_datetime_str
15
+ from regscale.integrations.jsonl_scanner_integration import JSONLScannerIntegration
16
+ from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
17
+ from regscale.models import IssueSeverity, AssetStatus, IssueStatus
18
+
19
+ logger = logging.getLogger("regscale")
20
+
21
+ # Define generic types for items that can be written to file
22
+ T = TypeVar("T")
23
+ ItemType = TypeVar("ItemType", IntegrationAsset, IntegrationFinding)
24
+
25
+
26
+ class TrivyIntegration(JSONLScannerIntegration):
27
+ """Class for handling Trivy scanner integration."""
28
+
29
+ title: str = "Trivy"
30
+ asset_identifier_field: str = "otherTrackingNumber"
31
+ finding_severity_map: Dict[str, Any] = {
32
+ "CRITICAL": IssueSeverity.Critical.value,
33
+ "HIGH": IssueSeverity.High.value,
34
+ "MEDIUM": IssueSeverity.Moderate.value,
35
+ "LOW": IssueSeverity.Low.value,
36
+ "UNKNOWN": IssueSeverity.High.value,
37
+ "NEGLIGIBLE": IssueSeverity.High.value,
38
+ }
39
+
40
+ # Constants for file paths
41
+ ASSETS_FILE = "./artifacts/trivy_assets.jsonl"
42
+ FINDINGS_FILE = "./artifacts/trivy_findings.jsonl"
43
+
44
+ def __init__(self, *args, **kwargs):
45
+ """
46
+ Initialize the TrivyIntegration object.
47
+
48
+ :param Any kwargs: Keyword arguments
49
+ """
50
+ kwargs["read_files_only"] = True
51
+ kwargs["file_pattern"] = "*.json"
52
+ self.disable_mapping = kwargs["disable_mapping"] = True
53
+ super().__init__(*args, **kwargs)
54
+
55
+ def is_valid_file(self, data: Any, file_path: Union[Path, str]) -> Tuple[bool, Optional[Dict[str, Any]]]:
56
+ """
57
+ Check if the provided data is a valid Trivy scan result.
58
+
59
+ Validates that the data is from a Trivy JSON file with the required structure.
60
+ Logs a warning with the file path and returns (False, None) if invalid.
61
+
62
+ :param Any data: Data parsed from the file (string content when read_files_only is True, or file path otherwise)
63
+ :param Union[Path, str] file_path: Path to the file being processed
64
+ :return: Tuple of (is_valid, validated_data) where validated_data is the parsed JSON if valid
65
+ :rtype: Tuple[bool, Optional[Dict[str, Any]]]
66
+ """
67
+
68
+ # Check Trivy-specific structure
69
+ if not isinstance(data, dict):
70
+ logger.warning(f"File {file_path} is not a dict, skipping")
71
+ return False, None
72
+
73
+ if "Results" not in data:
74
+ logger.warning(f"File {file_path} has no 'Results' key, skipping")
75
+ return False, None
76
+
77
+ if not isinstance(data.get("Results"), list):
78
+ logger.warning(f"File {file_path} 'Results' is not a list, skipping")
79
+ return False, None
80
+
81
+ if "Metadata" not in data:
82
+ logger.warning(f"File {file_path} has no 'Metadata' key, skipping")
83
+ return False, None
84
+
85
+ return True, data
86
+
87
+ def parse_asset(self, file_path: Union[Path, str], data: Dict[str, Any]) -> IntegrationAsset:
88
+ """
89
+ Parse a single asset from Trivy scan data.
90
+
91
+ :param Union[Path, str] file_path: Path to the file containing the asset data
92
+ :param Dict[str, Any] data: The parsed JSON data
93
+ :return: IntegrationAsset object
94
+ :rtype: IntegrationAsset
95
+ """
96
+ # Convert path to string if it's not already
97
+ file_path_str = str(file_path)
98
+
99
+ # Get metadata and OS information
100
+ metadata = data.get("Metadata", {})
101
+ os_data = metadata.get("OS", {})
102
+
103
+ # Determine identifier from file name or data
104
+ if "sha256-" in file_path_str:
105
+ # Extract the sha256 from the filename
106
+ base_name = os.path.basename(file_path_str)
107
+ identifier = "sha256-" + base_name.split("sha256-")[1].split(".json")[0]
108
+ else:
109
+ identifier = metadata.get("ImageID", "Unknown")
110
+
111
+ # Get artifact name for other tracking number and fqdn
112
+ artifact_name = data.get("ArtifactName", identifier)
113
+
114
+ # Create and return the asset
115
+ return IntegrationAsset(
116
+ identifier=identifier,
117
+ name=identifier,
118
+ ip_address="0.0.0.0",
119
+ cpu=0,
120
+ ram=0,
121
+ status=AssetStatus.Active,
122
+ asset_type="Other",
123
+ asset_category="Software",
124
+ operating_system=f"{os_data.get('Family', '')} {os_data.get('Name', '')}",
125
+ notes=f"{os.path.basename(file_path_str)}",
126
+ other_tracking_number=artifact_name,
127
+ parent_id=self.plan_id,
128
+ parent_module="securityplans",
129
+ fqdn=artifact_name,
130
+ )
131
+
132
+ def _get_findings_data_from_file(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
133
+ """
134
+ Extract findings data from Trivy file data.
135
+
136
+ :param Dict[str, Any] data: The data from the Trivy file
137
+ :return: List of finding items
138
+ :rtype: List[Dict[str, Any]]
139
+ """
140
+ if not data or not isinstance(data, dict):
141
+ return []
142
+
143
+ findings = []
144
+
145
+ # Process all results
146
+ for result in data.get("Results", []):
147
+ if not isinstance(result, dict):
148
+ continue
149
+
150
+ # Extract vulnerabilities from the result
151
+ vulnerabilities = result.get("Vulnerabilities", [])
152
+ if not isinstance(vulnerabilities, list):
153
+ continue
154
+
155
+ # Add each vulnerability to the findings list
156
+ findings.extend(vulnerabilities)
157
+
158
+ return findings
159
+
160
+ def parse_finding(self, asset_identifier: str, data: Dict[str, Any], item: Dict[str, Any]) -> IntegrationFinding:
161
+ """
162
+ Parse a single finding from Trivy scan data.
163
+
164
+ :param str asset_identifier: The identifier of the asset this finding belongs to
165
+ :param Dict[str, Any] data: The parsed JSON data (for metadata)
166
+ :param Dict[str, Any] item: The finding data
167
+ :return: IntegrationFinding object
168
+ :rtype: IntegrationFinding
169
+ """
170
+ # Get scan date from the finding or use current time
171
+ scan_date = safe_datetime_str(data.get("CreatedAt"))
172
+
173
+ # Process severity
174
+ severity_str = item.get("Severity", "UNKNOWN")
175
+ severity_value = self.finding_severity_map.get(severity_str.upper(), IssueSeverity.High.value)
176
+ try:
177
+ severity = IssueSeverity(severity_value)
178
+ except ValueError:
179
+ severity = IssueSeverity.High
180
+
181
+ # Get CVSS fields
182
+ cvss_fields = self._get_cvss_score(item)
183
+
184
+ # Get data source information
185
+ data_source = item.get("DataSource", {})
186
+ plugin_name = data_source.get("Name", self.title)
187
+ plugin_id = data_source.get("ID", self.title)
188
+
189
+ metadata = data.get("Metadata", {})
190
+ os_family = metadata.get("OS", {}).get("Family", "")
191
+ os_name = metadata.get("OS", {}).get("Name", "")
192
+ if os_family and os_name == "unknown":
193
+ affected_os = "unknown"
194
+ else:
195
+ affected_os = f"{os_family} {os_name}"
196
+
197
+ # Set image digest from artifact name
198
+ artifact_name = data.get("ArtifactName", "")
199
+ image_digest = ""
200
+ if "@" in artifact_name:
201
+ image_digest = artifact_name.split("@")[1]
202
+
203
+ build_version = (
204
+ metadata.get("ImageConfig", {}).get("config", {}).get("Labels", {}).get("io.buildah.version", "")
205
+ )
206
+ pkg_name = item.get("PkgName", "")
207
+ cve = item.get("VulnerabilityID", "")
208
+
209
+ # Create and return the finding
210
+ return IntegrationFinding(
211
+ title=f"{cve}: {pkg_name}" if cve else pkg_name,
212
+ description=item.get("Description", "No description available"),
213
+ severity=severity,
214
+ status=IssueStatus.Open,
215
+ cvss_v3_score=cvss_fields.get("V3Score"),
216
+ cvss_v3_vector=cvss_fields.get("V3Vector") or "",
217
+ cvss_v2_score=cvss_fields.get("V2Score"),
218
+ cvss_v2_vector=cvss_fields.get("V2Vector") or "",
219
+ plugin_name=plugin_name,
220
+ plugin_id=plugin_id,
221
+ asset_identifier=asset_identifier,
222
+ cve=cve,
223
+ first_seen=scan_date,
224
+ last_seen=scan_date,
225
+ scan_date=scan_date,
226
+ date_created=scan_date,
227
+ category="Software",
228
+ control_labels=[],
229
+ installed_versions=item.get("InstalledVersion", ""),
230
+ affected_os=affected_os,
231
+ affected_packages=item.get("PkgID", ""),
232
+ image_digest=image_digest,
233
+ package_path=item.get("PkgIdentifier", {}).get("PURL", ""),
234
+ build_version=build_version,
235
+ fixed_versions=item.get("FixedVersion", ""),
236
+ fix_status=item.get("Status", ""),
237
+ )
238
+
239
+ @staticmethod
240
+ def _get_cvss_score(finding: Dict) -> dict:
241
+ """
242
+ Get the CVSS v3 and v2 scores and vectors from the cvss data.
243
+
244
+ :param Dict finding: The cvss data
245
+ :return: The CVSS fields
246
+ :rtype: dict
247
+ """
248
+ values = {
249
+ "V3Score": None,
250
+ "V2Score": None,
251
+ "V3Vector": None,
252
+ "V2Vector": None,
253
+ }
254
+
255
+ if cvs := finding.get("CVSS"):
256
+ if nvd := cvs.get("nvd"):
257
+ values["V3Score"] = nvd.get("V3Score", None)
258
+ values["V3Vector"] = nvd.get("V3Vector", None)
259
+ values["V2Score"] = nvd.get("V2Score", None)
260
+ values["V2Vector"] = nvd.get("V2Vector", None)
261
+ elif redhat := cvs.get("redhat"):
262
+ values["V3Score"] = redhat.get("V3Score", None)
263
+ values["V3Vector"] = redhat.get("V3Vector", None)
264
+ values["V2Score"] = redhat.get("V2Score", None)
265
+ values["V2Vector"] = redhat.get("V2Vector", None)
266
+ elif ghsa := cvs.get("ghsa"):
267
+ values["V3Score"] = ghsa.get("V3Score", None)
268
+ values["V3Vector"] = ghsa.get("V3Vector", None)
269
+ values["V2Score"] = ghsa.get("V2Score", None)
270
+ values["V2Vector"] = ghsa.get("V2Vector", None)
271
+ elif bitnami := cvs.get("bitnami"):
272
+ values["V3Score"] = bitnami.get("V3Score", None)
273
+ values["V3Vector"] = bitnami.get("V3Vector", None)
274
+ values["V2Score"] = bitnami.get("V2Score", None)
275
+ values["V2Vector"] = bitnami.get("V2Vector", None)
276
+ return values
@@ -83,7 +83,7 @@ def import_veracode_data(
83
83
  FlatFileImporter.import_files(
84
84
  import_type=Veracode,
85
85
  import_name="Veracode",
86
- file_types=[".xml", ".xlsx"],
86
+ file_types=[".xml", ".xlsx", ".json"],
87
87
  folder_path=folder_path,
88
88
  regscale_ssp_id=regscale_ssp_id,
89
89
  scan_date=scan_date,