regscale-cli 6.21.1.0__py3-none-any.whl → 6.21.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (35) hide show
  1. regscale/_version.py +1 -1
  2. regscale/core/app/application.py +8 -0
  3. regscale/integrations/commercial/__init__.py +8 -8
  4. regscale/integrations/commercial/import_all/import_all_cmd.py +2 -2
  5. regscale/integrations/commercial/microsoft_defender/__init__.py +0 -0
  6. regscale/integrations/commercial/{defender.py → microsoft_defender/defender.py} +38 -612
  7. regscale/integrations/commercial/microsoft_defender/defender_api.py +286 -0
  8. regscale/integrations/commercial/microsoft_defender/defender_constants.py +80 -0
  9. regscale/integrations/commercial/microsoft_defender/defender_scanner.py +168 -0
  10. regscale/integrations/commercial/qualys/__init__.py +24 -86
  11. regscale/integrations/commercial/qualys/containers.py +2 -0
  12. regscale/integrations/commercial/qualys/scanner.py +7 -2
  13. regscale/integrations/commercial/sonarcloud.py +110 -71
  14. regscale/integrations/commercial/wizv2/click.py +4 -1
  15. regscale/integrations/commercial/wizv2/data_fetcher.py +401 -0
  16. regscale/integrations/commercial/wizv2/finding_processor.py +295 -0
  17. regscale/integrations/commercial/wizv2/policy_compliance.py +1471 -204
  18. regscale/integrations/commercial/wizv2/policy_compliance_helpers.py +564 -0
  19. regscale/integrations/commercial/wizv2/scanner.py +4 -4
  20. regscale/integrations/compliance_integration.py +213 -61
  21. regscale/integrations/public/fedramp/fedramp_five.py +92 -7
  22. regscale/integrations/scanner_integration.py +27 -4
  23. regscale/models/__init__.py +1 -1
  24. regscale/models/integration_models/cisa_kev_data.json +79 -3
  25. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  26. regscale/models/regscale_models/issue.py +29 -9
  27. regscale/models/regscale_models/milestone.py +15 -14
  28. {regscale_cli-6.21.1.0.dist-info → regscale_cli-6.21.2.1.dist-info}/METADATA +1 -1
  29. {regscale_cli-6.21.1.0.dist-info → regscale_cli-6.21.2.1.dist-info}/RECORD +33 -28
  30. tests/regscale/test_authorization.py +0 -65
  31. tests/regscale/test_init.py +0 -96
  32. {regscale_cli-6.21.1.0.dist-info → regscale_cli-6.21.2.1.dist-info}/LICENSE +0 -0
  33. {regscale_cli-6.21.1.0.dist-info → regscale_cli-6.21.2.1.dist-info}/WHEEL +0 -0
  34. {regscale_cli-6.21.1.0.dist-info → regscale_cli-6.21.2.1.dist-info}/entry_points.txt +0 -0
  35. {regscale_cli-6.21.1.0.dist-info → regscale_cli-6.21.2.1.dist-info}/top_level.txt +0 -0
@@ -36,6 +36,7 @@ from regscale.integrations.scanner_integration import IntegrationAsset, Integrat
36
36
  from regscale.integrations.variables import ScannerVariables
37
37
  from regscale.models import Asset, Issue, Search, regscale_models
38
38
  from regscale.models.app_models.click import NotRequiredIf, regscale_ssp_id, save_output_to, ssp_or_component_id
39
+ from regscale.models.integration_models.ecr_models.data import Finding
39
40
  from regscale.models.integration_models.flat_file_importer import FlatFileImporter
40
41
  from regscale.models.integration_models.qualys import (
41
42
  Qualys,
@@ -73,9 +74,9 @@ def _prepare_qualys_params(include_tags: str, exclude_tags: str) -> dict:
73
74
  "show_asset_id": "1",
74
75
  "show_tags": "1",
75
76
  }
76
-
77
77
  if exclude_tags or include_tags:
78
78
  params["use_tags"] = "1"
79
+ params["tag_set_by"] = "name"
79
80
  if exclude_tags:
80
81
  params["tag_set_exclude"] = exclude_tags
81
82
  if include_tags:
@@ -179,12 +180,12 @@ def _log_asset_results(count, asset_ids):
179
180
  :param asset_ids: List of asset IDs
180
181
  """
181
182
  if count == 0:
182
- logger.warning("No assets were yielded from the JSONL file")
183
+ logger.warning("No assets were created/updated from the JSONL file")
183
184
  else:
184
185
  sample_ids = asset_ids[:5]
185
186
  truncation_indicator = ", ..." if len(asset_ids) > 5 else ""
186
187
  sample_ids_str = ", ".join(sample_ids)
187
- logger.debug(f"Yielded {count} assets to RegScale with IDs: {sample_ids_str}{truncation_indicator}")
188
+ logger.debug(f"Created/updated {count} assets to RegScale with IDs: {sample_ids_str}{truncation_indicator}")
188
189
 
189
190
 
190
191
  def _import_findings(integration, progress, finding_task):
@@ -270,6 +271,7 @@ class FindingProgressTracker:
270
271
  self.finding_task = finding_task
271
272
  self.count = 0
272
273
  self.finding_ids = []
274
+ self.output_final_log: bool = False
273
275
 
274
276
  def __iter__(self):
275
277
  return self
@@ -284,22 +286,22 @@ class FindingProgressTracker:
284
286
  return finding
285
287
  except StopIteration:
286
288
  self._log_finding_results()
287
- raise
289
+ return
288
290
  except Exception as e:
289
- logger.error(f"Error yielding findings: {str(e)}")
290
- logger.debug(f"Findings yielded before error: {self.count}")
291
- raise
291
+ logger.debug(f"Findings created/updated before error: {self.count}")
292
+ error_and_exit(f"Error creating/updating findings: {str(e)}")
292
293
 
293
294
  def _log_finding_results(self):
294
295
  """Log the results of finding processing."""
295
296
  if self.count == 0:
296
- logger.warning("No findings were yielded from the JSONL file")
297
- else:
298
- logger.info(f"Yielded {self.count} findings to RegScale")
297
+ logger.warning("No findings were created/updated from the JSONL file")
298
+ elif not self.output_final_log:
299
+ logger.info(f"Created/Updated {self.count} findings to RegScale")
299
300
  sample_ids = self.finding_ids[:5]
300
301
  truncation_indicator = ", ..." if len(self.finding_ids) > 5 else ""
301
302
  sample_ids_str = ", ".join(sample_ids)
302
303
  logger.debug(f"Sample finding IDs: {sample_ids_str}{truncation_indicator}")
304
+ self.output_final_log = True
303
305
 
304
306
 
305
307
  @click.command(name="import_total_cloud")
@@ -318,7 +320,7 @@ class FindingProgressTracker:
318
320
  type=click.STRING,
319
321
  required=False,
320
322
  default=None,
321
- help="Exclude tags in the import comma separated string of tag names or ids, defaults to None.",
323
+ help="Exclude tags in the import comma separated string of tag names or ids, defaults to None. If used, --include_tags must also be provided.",
322
324
  )
323
325
  @click.option(
324
326
  "--vulnerability-creation",
@@ -368,10 +370,14 @@ def import_total_cloud(
368
370
  logger.warning("SSP #%i is not a valid RegScale Security Plan.", regscale_ssp_id)
369
371
  return
370
372
  else:
371
- raise click.UsageError(
373
+ error_and_exit(
372
374
  "You must provide either a --regscale_ssp_id or a --component_id to import Qualys Total Cloud data."
373
375
  )
374
376
 
377
+ # exclude tags must have include_tags
378
+ if exclude_tags and not include_tags:
379
+ error_and_exit("You must provide --include_tags when using --exclude_tags to import Qualys Total Cloud data.")
380
+
375
381
  containers_lst = []
376
382
  try:
377
383
  # Configure scanner variables and fetch data
@@ -382,7 +388,8 @@ def import_total_cloud(
382
388
 
383
389
  if containers:
384
390
  # Fetch containers and container findings
385
- containers_lst = fetch_all_vulnerabilities()
391
+ params = _prepare_qualys_params(include_tags, exclude_tags)
392
+ containers_lst = fetch_all_vulnerabilities(filters=params)
386
393
 
387
394
  # Initialize and run integration
388
395
  integration = _initialize_integration(
@@ -599,7 +606,7 @@ def import_total_cloud_from_xml(xml_file: str, regscale_ssp_id: int = None, comp
599
606
  logger.warning("SSP #%i is not a valid RegScale Security Plan.", regscale_ssp_id)
600
607
  return
601
608
  else:
602
- raise click.UsageError(
609
+ error_and_exit(
603
610
  "You must provide either a --regscale_ssp_id or a --component_id to import Qualys Total Cloud data."
604
611
  )
605
612
 
@@ -734,10 +741,10 @@ def _track_progress_generator(iterator, progress_bar, task_id, id_attribute=None
734
741
  elif id_attribute == "external_id":
735
742
  item_type = "findings"
736
743
 
737
- logger.debug(f"Yielded {processed} {item_type} to RegScale")
744
+ logger.debug(f"Created/updated {processed} {item_type} in RegScale")
738
745
 
739
746
  if processed == 0:
740
- logger.error(f"WARNING: No {item_type} were yielded to RegScale!")
747
+ logger.error(f"WARNING: No {item_type} were created/updated in RegScale!")
741
748
  elif item_ids:
742
749
  logger.debug(f"First 10 {item_type} IDs: {item_ids[:10]}")
743
750
 
@@ -978,7 +985,7 @@ def _resolve_plan_and_component(regscale_ssp_id: int = None, component_id: int =
978
985
  Returns (plan_id, is_component)
979
986
  """
980
987
  if (regscale_ssp_id is None and component_id is None) or (regscale_ssp_id and component_id):
981
- raise click.UsageError("You must provide either --regscale_ssp_id or --component_id, but not both.")
988
+ error_and_exit("You must provide either --regscale_ssp_id or --component_id, but not both.")
982
989
  is_component = component_id is not None
983
990
  plan_id = component_id if is_component else regscale_ssp_id
984
991
  return plan_id, is_component
@@ -1455,75 +1462,6 @@ def _get_qualys_api():
1455
1462
  return qualys_url, QUALYS_API
1456
1463
 
1457
1464
 
1458
- def import_total_cloud_data_from_qualys_api(security_plan_id: int, include_tags: str, exclude_tags: str):
1459
- """
1460
- Function to get the total cloud data from Qualys API
1461
-
1462
- :param int security_plan_id: The ID of the plan to get the data for
1463
- :param str include_tags: The tags to include in the data
1464
- :param str exclude_tags: The tags to exclude from the data
1465
- """
1466
- try:
1467
- qualys_url, QUALYS_API = _get_qualys_api()
1468
- params = {
1469
- "action": "list",
1470
- "show_asset_id": "1",
1471
- "show_tags": "1",
1472
- }
1473
- if exclude_tags or include_tags:
1474
- params["use_tags"] = "1"
1475
- if exclude_tags:
1476
- params["tag_set_exclude"] = exclude_tags
1477
- if include_tags:
1478
- params["tag_set_include"] = include_tags
1479
-
1480
- logger.info("Fetching Qualys Total Cloud data...")
1481
- response = QUALYS_API.get(
1482
- url=urljoin(qualys_url, "/api/2.0/fo/asset/host/vm/detection/"),
1483
- headers=HEADERS,
1484
- params=params,
1485
- )
1486
-
1487
- if response and response.ok:
1488
- logger.info("Total cloud data fetched. processing...")
1489
- response_data = xmltodict.parse(response.text)
1490
-
1491
- # Create artifacts directory if it doesn't exist
1492
- os.makedirs("./artifacts", exist_ok=True)
1493
-
1494
- # Write the XML data to a temporary file
1495
- temp_xml_file = "./artifacts/qualys_temp_data.xml"
1496
- with open(temp_xml_file, "w") as f:
1497
- f.write(response.text)
1498
-
1499
- logger.info(f"Saved Qualys response data to {temp_xml_file}")
1500
-
1501
- # Initialize the JSONLScannerIntegration implementation with the file path
1502
- integration = QualysTotalCloudJSONLIntegration(
1503
- plan_id=security_plan_id, xml_data=response_data, file_path=temp_xml_file
1504
- )
1505
-
1506
- # Process assets and findings in one pass for efficiency
1507
- assets_iterator, findings_iterator = integration.fetch_assets_and_findings()
1508
-
1509
- # Update RegScale with the processed data
1510
- logger.info("Syncing assets to RegScale...")
1511
- integration.update_regscale_assets(assets_iterator)
1512
-
1513
- logger.info("Syncing findings to RegScale...")
1514
- integration.update_regscale_findings(findings_iterator)
1515
-
1516
- logger.info("Qualys Total Cloud data imported successfully.")
1517
- else:
1518
- logger.error(
1519
- f"Received unexpected response from Qualys API: {response.status_code}: {response.text if response.text else 'response is null'}"
1520
- )
1521
- except Exception:
1522
- error_message = traceback.format_exc()
1523
- logger.error("Error occurred while processing Qualys data")
1524
- logger.error(error_message)
1525
-
1526
-
1527
1465
  def get_scans_summary(scan_choice: str) -> dict:
1528
1466
  """
1529
1467
  Get all scans from Qualys Host
@@ -213,6 +213,8 @@ def _fetch_paginated_data(endpoint: str, filters: Optional[Dict] = None, limit:
213
213
 
214
214
  # Clear params for subsequent requests since they're in the URL
215
215
  params = {}
216
+ progress.update(task, total=len(all_items))
217
+ progress.update(task, completed=len(all_items))
216
218
 
217
219
  except Exception as e:
218
220
  logger.error("Error fetching data from %s: %s", current_url if current_url else "N/A", e)
@@ -1096,6 +1096,11 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
1096
1096
  first_found = self._convert_timestamp_to_date_str(first_found)
1097
1097
  last_found = self._convert_timestamp_to_date_str(last_found)
1098
1098
 
1099
+ # sometimes cvss3Info is not a dict, so we ensure it is
1100
+ cvs3_info = vuln.get("cvss3Info")
1101
+ if not isinstance(cvs3_info, dict):
1102
+ cvs3_info = {}
1103
+
1099
1104
  cve = next(iter(vuln.get("cveids", [])), "")
1100
1105
  # Create finding object
1101
1106
  return IntegrationFinding(
@@ -1109,8 +1114,8 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
1109
1114
  category="Vulnerability",
1110
1115
  plugin_name=cve or f"QID-{qid}",
1111
1116
  control_labels=[f"QID-{qid}"],
1112
- cvss_v3_base_score=vuln.get("cvss3Info", {}).get("baseScore"),
1113
- cvss_v3_vector=vuln.get("cvss3Info", {}).get("temporalScore"),
1117
+ cvss_v3_base_score=cvs3_info.get("baseScore"),
1118
+ cvss_v3_vector=cvs3_info.get("temporalScore"),
1114
1119
  first_seen=first_found,
1115
1120
  last_seen=last_found,
1116
1121
  evidence=vuln.get("result", "No evidence available"),
@@ -13,6 +13,7 @@ import requests # type: ignore
13
13
  from regscale.core.app.api import Api
14
14
  from regscale.core.app.application import Application
15
15
  from regscale.core.app.utils.app_utils import (
16
+ create_progress_object,
16
17
  days_between,
17
18
  error_and_exit,
18
19
  get_current_datetime,
@@ -24,12 +25,16 @@ from regscale.models.regscale_models.issue import Issue
24
25
  logger = logging.getLogger("regscale")
25
26
 
26
27
 
27
- def get_sonarcloud_results(config: dict, branch: Optional[str] = None) -> list[list[dict]]:
28
+ def get_sonarcloud_results(
29
+ config: dict, organization: Optional[str] = None, branch: Optional[str] = None, project_key: Optional[str] = None
30
+ ) -> list[list[dict]]:
28
31
  """
29
32
  Retrieve Sonarcloud Results from the Sonarcloud.io API
30
33
 
31
34
  :param dict config: RegScale CLI configuration
35
+ :param Optional[str] organization: Organization name to filter results, defaults to None
32
36
  :param Optional[str] branch: Branch name to filter results, defaults to None
37
+ :param Optional[str] project_key: SonarCloud Project Key, defaults to None
33
38
  :return: json response data from API GET request
34
39
  :rtype: list[list[dict]]
35
40
  """
@@ -44,10 +49,19 @@ def get_sonarcloud_results(config: dict, branch: Optional[str] = None) -> list[l
44
49
  "statuses": "OPEN, CONFIRMED, REOPENED",
45
50
  "ps": 500,
46
51
  }
52
+ if organization and project_key:
53
+ params["componentKeys"] = project_key
54
+ if organization:
55
+ params["organization"] = organization
47
56
  if branch:
48
57
  params["branch"] = branch
58
+ if project_key:
59
+ params["projectKeys"] = project_key
49
60
  # GET request pulls in data to check results size
61
+ logger.info("Fetching issues from SonarCloud/Qube...")
50
62
  r = requests.get(url, auth=(str(token), ""), params=params)
63
+ if r.status_code != 200:
64
+ error_and_exit(f"Sonarcloud API call failed with status code {r.status_code}: {r.reason}\n{r.text}")
51
65
  # if the status code does not equal 200
52
66
  if r and not r.ok:
53
67
  # exit the script gracefully
@@ -57,13 +71,12 @@ def get_sonarcloud_results(config: dict, branch: Optional[str] = None) -> list[l
57
71
  # find the total results number
58
72
  total = data["paging"]["total"]
59
73
  complete.extend(data.get("issues", []))
60
- logger.info(f"Found {total} issue(s) from SonarCloud/Qube.")
61
74
  # find the number of results in each result page
62
75
  size = data["paging"]["pageSize"]
63
76
  # calculate the number of pages to iterate through sequentially
64
77
  pages = math.ceil(total / size)
65
78
  # loop through each page number
66
- for i in range(1, pages + 1, 1):
79
+ for i in range(2, pages + 1, 1):
67
80
  # parameters to pass to the API call
68
81
  params["p"] = str(i)
69
82
  # for each page make a GET request to pull in the data
@@ -79,56 +92,56 @@ def get_sonarcloud_results(config: dict, branch: Optional[str] = None) -> list[l
79
92
  return complete
80
93
 
81
94
 
82
- def build_data(api: Api, branch: Optional[str] = None) -> list[dict]:
95
+ def build_data(
96
+ api: Api, organization: Optional[str] = None, branch: Optional[str] = None, project_key: Optional[str] = None
97
+ ) -> list[dict]:
83
98
  """
84
99
  Build vulnerability alert data list
100
+
85
101
  :param Api api: API object
102
+ :param Optional[str] organization: Organization name to filter results, defaults to None
86
103
  :param Optional[str] branch: Branch name to filter results, defaults to None
104
+ :param Optional[str] project_key: SonarCloud Project Key, defaults to None
87
105
  :return: vulnerability data list
88
106
  :rtype: list[dict]
89
107
  """
90
108
  # execute GET request
91
- data = get_sonarcloud_results(config=api.config, branch=branch)
109
+ data = get_sonarcloud_results(config=api.config, organization=organization, branch=branch, project_key=project_key)
92
110
  # create empty list to hold json response dicts
93
111
  vulnerability_data_list = []
94
112
  # loop through the lists in API response data
95
- for result in data:
113
+ for issue in data:
96
114
  # loop through the list of dicts in the API response data
97
- for i, issue in enumerate(result):
98
- # format datetime stamp to use with days_between function
99
- create_date = issue["creationDate"][0:19] + "Z"
100
- # build vulnerability list
101
- vulnerability_data_list.append(
102
- {
103
- "key": issue["key"],
104
- "severity": issue["severity"],
105
- "component": issue["component"],
106
- "status": issue["status"],
107
- "message": issue["message"],
108
- "creationDate": issue["creationDate"][0:19],
109
- "updateDate": issue["updateDate"][0:19],
110
- "type": issue["type"],
111
- "days_elapsed": days_between(vuln_time=create_date),
112
- }
113
- )
115
+ # format datetime stamp to use with days_between function
116
+ create_date = issue["creationDate"][0:19] + "Z"
117
+ # build vulnerability list
118
+ vulnerability_data_list.append(
119
+ {
120
+ "key": issue["key"],
121
+ "severity": issue["severity"],
122
+ "component": issue["component"],
123
+ "status": issue["status"],
124
+ "message": issue["message"],
125
+ "creationDate": issue["creationDate"][0:19],
126
+ "updateDate": issue["updateDate"][0:19],
127
+ "type": issue["type"],
128
+ "days_elapsed": days_between(vuln_time=create_date),
129
+ }
130
+ )
114
131
  return vulnerability_data_list
115
132
 
116
133
 
117
- def build_dataframes(api: Api) -> str:
134
+ def build_dataframes(sonar_data: list[dict]) -> str:
118
135
  """
119
136
  Build pandas dataframes from vulnerability alert data list
120
137
 
121
- :param Api api: API object
138
+ :param list[dict] sonar_data: SonarCloud alerts and issues data
122
139
  :return: dataframe as an HTML table
123
140
  :rtype: str
124
141
  """
125
142
  import pandas as pd # Optimize import performance
126
143
 
127
- # create vulnerability data list
128
- vuln_data_list = build_data(api=api)
129
-
130
- # for vulnerability in vuln_data_list:
131
- df = pd.DataFrame(vuln_data_list)
144
+ df = pd.DataFrame(sonar_data)
132
145
  # sort dataframe by severity
133
146
  df.sort_values(by=["severity"], inplace=True)
134
147
  # reset and drop the index
@@ -139,10 +152,12 @@ def build_dataframes(api: Api) -> str:
139
152
 
140
153
 
141
154
  def create_alert_assessment(
142
- api: Api, parent_id: Optional[int] = None, parent_module: Optional[str] = None
155
+ sonar_data: list[dict], api: Api, parent_id: Optional[int] = None, parent_module: Optional[str] = None
143
156
  ) -> Optional[int]:
144
157
  """
145
158
  Create Assessment containing SonarCloud alerts
159
+
160
+ :param list[dict] sonar_data: SonarCloud alerts and issues data
146
161
  :param Api api: API object
147
162
  :param Optional[int] parent_id: Parent ID of the assessment, defaults to None
148
163
  :param Optional[str] parent_module: Parent module of the assessment, defaults to None
@@ -150,7 +165,7 @@ def create_alert_assessment(
150
165
  :rtype: Optional[int]
151
166
  """
152
167
  # create the assessment report HTML table
153
- df_output = build_dataframes(api)
168
+ df_output = build_dataframes(sonar_data)
154
169
  # build assessment model data
155
170
  assessment_data = Assessment(
156
171
  leadAssessorId=api.config["userId"],
@@ -169,11 +184,9 @@ def create_alert_assessment(
169
184
  if parent_id and parent_module:
170
185
  assessment_data.parentId = parent_id
171
186
  assessment_data.parentModule = parent_module
172
- # create vulnerability data list
173
- vuln_data_list = build_data(api)
174
187
  # if assessmentResult is changed to Pass / Fail then status has to be
175
188
  # changed to complete and a completion date has to be passed
176
- for vulnerability in vuln_data_list:
189
+ for vulnerability in sonar_data:
177
190
  if vulnerability["severity"] == "CRITICAL" and vulnerability["days_elapsed"] >= 10:
178
191
  assessment_data.status = "Complete"
179
192
  assessment_data.actualFinish = get_current_datetime()
@@ -182,60 +195,68 @@ def create_alert_assessment(
182
195
  # create a new assessment in RegScale
183
196
  if new_assessment := assessment_data.create():
184
197
  # log assessment creation result
185
- api.logger.info("Assessment was created successfully")
198
+ api.logger.debug("Assessment was created successfully")
186
199
  return new_assessment.id
187
200
  else:
188
- api.logger.info("Assessment was not created")
201
+ api.logger.debug("Assessment was not created")
189
202
  return None
190
203
 
191
204
 
192
205
  def create_alert_issues(
193
- parent_id: Optional[int] = None, parent_module: Optional[str] = None, branch: Optional[str] = None
206
+ parent_id: Optional[int] = None,
207
+ parent_module: Optional[str] = None,
208
+ organization: Optional[str] = None,
209
+ branch: Optional[str] = None,
210
+ project_key: Optional[str] = None,
194
211
  ) -> None:
195
212
  """
196
213
  Create child issues from the alert assessment
214
+
197
215
  :param Optional[int] parent_id: Parent ID record to associate the assessment to, defaults to None
198
216
  :param Optional[str] parent_module: Parent module to associate the assessment to, defaults to None
217
+ :param Optional[str] organization: Organization name to filter results, defaults to None
199
218
  :param Optional[str] branch: Branch name to filter results, defaults to None
219
+ :param Optional[str] project_key: SonarCloud Project Key, defaults to None
200
220
  :rtype: None
201
221
  """
202
222
  # set environment and application configuration
203
223
  app = Application()
204
224
  api = Api()
225
+ sonar_data = build_data(api=api, organization=organization, branch=branch, project_key=project_key)
205
226
  # execute POST request and return new assessment ID
206
- assessment_id = create_alert_assessment(api=api, parent_id=parent_id, parent_module=parent_module)
227
+ assessment_id = create_alert_assessment(
228
+ sonar_data=sonar_data, api=api, parent_id=parent_id, parent_module=parent_module
229
+ )
207
230
 
208
231
  # create vulnerability data list
209
- vuln_data_list = build_data(api, branch)
210
232
  # loop through each vulnerability alert in the list
211
- for vulnerability in vuln_data_list:
212
- # create issue model
213
- issue_data = Issue(
214
- title="Sonarcloud Code Scan", # Required
215
- dateCreated=get_current_datetime("%Y-%m-%dT%H:%M:%S"),
216
- description=vulnerability["message"],
217
- severityLevel=Issue.assign_severity(vulnerability["severity"]), # Required
218
- issueOwnerId=app.config["userId"], # Required
219
- dueDate=get_current_datetime(),
220
- identification="Code scan assessment",
221
- status="Open",
222
- assessmentId=assessment_id,
223
- createdBy=app.config["userId"],
224
- lastUpdatedById=app.config["userId"],
225
- dateLastUpdated=get_current_datetime(),
226
- parentId=assessment_id,
227
- parentModule="assessments",
228
- )
229
- # create assessment child issue via POST
230
- iss = api.post(
231
- f'{app.config["domain"]}/api/issues',
232
- json=issue_data.dict(),
233
- )
234
- # log issue creation result
235
- if iss.ok:
236
- logger.info("Issue created successfully.")
237
- else:
238
- logger.info("Issue was not created.")
233
+ with create_progress_object() as progress:
234
+ task = progress.add_task("Creating/updating issue(s) in RegScale...", total=len(sonar_data))
235
+ for vulnerability in sonar_data:
236
+ # create issue model
237
+ issue_data = Issue(
238
+ title="Sonarcloud Code Scan", # Required
239
+ dateCreated=get_current_datetime("%Y-%m-%dT%H:%M:%S"),
240
+ description=vulnerability["message"],
241
+ severityLevel=Issue.assign_severity(vulnerability["severity"]), # Required
242
+ dueDate=Issue.get_due_date(
243
+ severity=vulnerability["severity"].lower(), config=app.config, key="sonarcloud"
244
+ ),
245
+ identification="Code scan assessment",
246
+ status="Open",
247
+ assessmentId=assessment_id,
248
+ parentId=parent_id or assessment_id,
249
+ parentModule=parent_module or "assessments",
250
+ sourceReport="SonarCloud/Qube",
251
+ otherIdentifier=vulnerability["key"],
252
+ )
253
+ # log issue creation result
254
+ if issue_data.create_or_update(bulk_create=True, bulk_update=True):
255
+ logger.debug("Issue was created/updated successfully")
256
+ else:
257
+ logger.debug("Issue was not created.")
258
+ progress.advance(task)
259
+ Issue.bulk_save(progress)
239
260
 
240
261
 
241
262
  @click.group()
@@ -249,11 +270,29 @@ def sonarcloud() -> None:
249
270
  @sonarcloud.command(name="sync_alerts")
250
271
  @regscale_id(required=False, default=None)
251
272
  @regscale_module(required=False, default=None)
252
- @click.option("--branch", help="Branch name to filter results, defaults to None")
273
+ @click.option(
274
+ "--organization",
275
+ "-o",
276
+ type=click.STRING,
277
+ help="Organization name to filter results, defaults to None",
278
+ default=None,
279
+ )
280
+ @click.option("--branch", "-b", type=click.STRING, help="Branch name to filter results, defaults to None", default=None)
281
+ @click.option("--project_key", "-p", type=click.STRING, help="SonarCloud Project Key, defaults to None", default=None)
253
282
  def create_alerts(
254
- regscale_id: Optional[int] = None, regscale_module: Optional[str] = None, branch: Optional[str] = None
283
+ regscale_id: Optional[int] = None,
284
+ regscale_module: Optional[str] = None,
285
+ organization: Optional[str] = None,
286
+ branch: Optional[str] = None,
287
+ project_key: Optional[str] = None,
255
288
  ) -> None:
256
289
  """
257
290
  Create a child assessment and child issues in RegScale from SonarCloud alerts.
258
291
  """
259
- create_alert_issues(regscale_id, regscale_module, branch)
292
+ create_alert_issues(
293
+ parent_id=regscale_id,
294
+ parent_module=regscale_module,
295
+ organization=organization,
296
+ branch=branch,
297
+ project_key=project_key,
298
+ )
@@ -10,7 +10,7 @@ import click
10
10
 
11
11
  from regscale.integrations.commercial.wizv2.variables import WizVariables
12
12
  from regscale.models import regscale_id
13
- from regscale.models.app_models.click import regscale_ssp_id
13
+ from regscale.models.app_models.click import regscale_ssp_id, regscale_module
14
14
 
15
15
  logger = logging.getLogger("regscale")
16
16
 
@@ -337,6 +337,7 @@ def add_report_evidence(
337
337
  required=True,
338
338
  )
339
339
  @regscale_id(help="RegScale will create and update control assessments as children of this record.")
340
+ @regscale_module(required=True, default="securityplans", prompt=False)
340
341
  @click.option( # type: ignore
341
342
  "--client_id",
342
343
  "-i",
@@ -401,6 +402,7 @@ def add_report_evidence(
401
402
  def sync_compliance(
402
403
  wiz_project_id,
403
404
  regscale_id,
405
+ regscale_module,
404
406
  client_id,
405
407
  client_secret,
406
408
  framework_id,
@@ -463,6 +465,7 @@ def sync_compliance(
463
465
  client_id=client_id,
464
466
  client_secret=client_secret,
465
467
  framework_id=resolved_framework_id,
468
+ regscale_module=regscale_module,
466
469
  create_poams=create_poams,
467
470
  cache_duration_minutes=cache_duration,
468
471
  force_refresh=refresh,