regscale-cli 6.16.4.0__py3-none-any.whl → 6.17.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (32) hide show
  1. regscale/__init__.py +1 -1
  2. regscale/core/app/api.py +4 -1
  3. regscale/core/app/utils/regscale_utils.py +2 -3
  4. regscale/dev/code_gen.py +10 -7
  5. regscale/integrations/commercial/aws/inventory/base.py +0 -2
  6. regscale/integrations/commercial/durosuite/api.py +20 -9
  7. regscale/integrations/commercial/opentext/scanner.py +2 -2
  8. regscale/integrations/commercial/sap/sysdig/sysdig_scanner.py +40 -21
  9. regscale/integrations/commercial/sap/tenable/scanner.py +41 -15
  10. regscale/integrations/commercial/sicura/api.py +9 -1
  11. regscale/integrations/commercial/synqly/edr.py +84 -0
  12. regscale/integrations/commercial/tenablev2/click.py +20 -2
  13. regscale/integrations/commercial/tenablev2/scanner.py +1 -1
  14. regscale/integrations/scanner_integration.py +84 -30
  15. regscale/models/integration_models/cisa_kev_data.json +100 -10
  16. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  17. regscale/models/integration_models/synqly_models/connectors/__init__.py +1 -0
  18. regscale/models/integration_models/synqly_models/connectors/edr.py +137 -0
  19. regscale/models/integration_models/synqly_models/ocsf_mapper.py +61 -11
  20. regscale/models/integration_models/synqly_models/synqly_model.py +8 -5
  21. regscale/models/regscale_models/file.py +3 -1
  22. regscale/models/regscale_models/master_assessment.py +127 -0
  23. regscale/models/regscale_models/regscale_model.py +2 -4
  24. regscale/models/regscale_models/risk.py +26 -31
  25. regscale/models/regscale_models/supply_chain.py +5 -5
  26. regscale/regscale.py +2 -0
  27. {regscale_cli-6.16.4.0.dist-info → regscale_cli-6.17.0.0.dist-info}/METADATA +1 -1
  28. {regscale_cli-6.16.4.0.dist-info → regscale_cli-6.17.0.0.dist-info}/RECORD +32 -29
  29. {regscale_cli-6.16.4.0.dist-info → regscale_cli-6.17.0.0.dist-info}/LICENSE +0 -0
  30. {regscale_cli-6.16.4.0.dist-info → regscale_cli-6.17.0.0.dist-info}/WHEEL +0 -0
  31. {regscale_cli-6.16.4.0.dist-info → regscale_cli-6.17.0.0.dist-info}/entry_points.txt +0 -0
  32. {regscale_cli-6.16.4.0.dist-info → regscale_cli-6.17.0.0.dist-info}/top_level.txt +0 -0
regscale/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "6.16.4.0"
1
+ __version__ = "6.17.0.0"
regscale/core/app/api.py CHANGED
@@ -33,6 +33,7 @@ class Api:
33
33
  app: "Application"
34
34
  _retry_log: str = "Retrying request with new token."
35
35
  _no_res_text: str = "No response text available"
36
+ _ssl_warning_displayed: bool = False
36
37
 
37
38
  def __init__(
38
39
  self,
@@ -59,7 +60,9 @@ class Api:
59
60
  self.logger = logging.getLogger("regscale")
60
61
  self.verify = ScannerVariables.sslVerify
61
62
  if not self.verify:
62
- self.logger.warning("SSL Verification has been disabled.")
63
+ if not Api._ssl_warning_displayed:
64
+ self.logger.warning("SSL Verification has been disabled.")
65
+ Api._ssl_warning_displayed = True
63
66
  r_session.verify = False
64
67
  disable_warnings(InsecureRequestWarning)
65
68
  if self.config and "timeout" in self.config:
@@ -6,6 +6,7 @@
6
6
  import json
7
7
  import os
8
8
  import re
9
+ import warnings
9
10
  from typing import Any, Optional
10
11
  from urllib.parse import urljoin
11
12
 
@@ -17,8 +18,6 @@ from regscale.core.app.logz import create_logger
17
18
  from regscale.core.app.utils.app_utils import convert_to_string, error_and_exit, get_file_name, get_file_type
18
19
  from regscale.models import Data
19
20
  from regscale.models.regscale_models.modules import Modules
20
- import warnings
21
-
22
21
 
23
22
  logger = create_logger()
24
23
 
@@ -165,7 +164,7 @@ def get_all_from_module(api: Api, module: str, timeout: int = 300) -> list[dict]
165
164
  elif module == "risks":
166
165
  from regscale.models.regscale_models.risk import Risk
167
166
 
168
- all_risks = Risk().fetch_all_risks(api.app)
167
+ all_risks = Risk().fetch_all_risks()
169
168
  regscale_data = [risk.dict() for risk in all_risks]
170
169
  else:
171
170
  logger.warning(
regscale/dev/code_gen.py CHANGED
@@ -8,7 +8,7 @@ if TYPE_CHECKING:
8
8
  from regscale.models.integration_models.synqly_models.connector_types import ConnectorType
9
9
  from regscale.models.integration_models.synqly_models.param import Param
10
10
 
11
- SUPPORTED_CONNECTORS = [ConnectorType.Ticketing, ConnectorType.Vulnerabilities, ConnectorType.Assets]
11
+ SUPPORTED_CONNECTORS = [ConnectorType.Ticketing, ConnectorType.Vulnerabilities, ConnectorType.Assets, ConnectorType.Edr]
12
12
 
13
13
 
14
14
  def generate_dags() -> None:
@@ -136,7 +136,10 @@ def _build_description(configs: dict[str, dict], connector: str) -> str:
136
136
  and "download_attachment" in capabilities
137
137
  ):
138
138
  sync_attachments.append(integration_name)
139
- description = f"Sync {connector.capitalize()} data between {', '.join(integrations)} and RegScale data."
139
+ if connector.lower() == ConnectorType.Edr.lower():
140
+ description = f"Sync endpoints, apps, and alerts data between {', '.join(integrations)} and RegScale data."
141
+ else:
142
+ description = f"Sync {connector.capitalize()} data between {', '.join(integrations)} and RegScale data."
140
143
  if sync_attachments:
141
144
  description += f" You are also able to sync attachments between {', '.join(sync_attachments)} and RegScale."
142
145
  return description
@@ -395,14 +398,14 @@ def _build_all_params(integration_name: str, connector: str) -> tuple[list[str],
395
398
  "scan_date=scan_date",
396
399
  "all_scans=all_scans",
397
400
  ]
398
- elif connector == ConnectorType.Assets:
399
- click_options = ["@regscale_ssp_id()"]
400
- function_params = ["regscale_ssp_id: int"]
401
- function_kwargs = ["regscale_ssp_id=regscale_ssp_id"]
402
- else:
401
+ elif connector == ConnectorType.Ticketing:
403
402
  click_options = ["@regscale_id()", "@regscale_module()"]
404
403
  function_params = ["regscale_id: int", "regscale_module: str"]
405
404
  function_kwargs = ["regscale_id=regscale_id", "regscale_module=regscale_module"]
405
+ else:
406
+ click_options = ["@regscale_ssp_id()"]
407
+ function_params = ["regscale_ssp_id: int"]
408
+ function_kwargs = ["regscale_ssp_id=regscale_ssp_id"]
406
409
  return click_options, function_params, function_kwargs
407
410
 
408
411
 
@@ -1,7 +1,5 @@
1
1
  """Base classes for AWS resource collection."""
2
2
 
3
- import datetime
4
- import json
5
3
  import logging
6
4
  from typing import Any, Dict, TYPE_CHECKING
7
5
 
@@ -13,6 +13,8 @@ from pydantic import BaseModel, Field
13
13
  from requests.exceptions import Timeout, ConnectionError as RequestsConnectionError
14
14
 
15
15
  logger = logging.getLogger("regscale")
16
+ API_DEVICES = "/api/devices"
17
+ API_GROUPS = "/api/groups"
16
18
 
17
19
 
18
20
  def retry_with_backoff(retries=3, backoff_in_seconds=1):
@@ -201,6 +203,8 @@ class DuroSuite:
201
203
  :return: Response data or None if request failed
202
204
  :rtype: Optional[Union[dict, str]]
203
205
  """
206
+ from regscale.integrations.variables import ScannerVariables
207
+
204
208
  url = urljoin(self.base_url, endpoint)
205
209
  headers = {}
206
210
  if self.api_key:
@@ -213,7 +217,14 @@ class DuroSuite:
213
217
 
214
218
  try:
215
219
  response = requests.request(
216
- method, url, headers=headers, json=data, params=params, verify=True, timeout=60, files=files
220
+ method,
221
+ url,
222
+ headers=headers,
223
+ json=data,
224
+ params=params,
225
+ verify=ScannerVariables.sslVerify,
226
+ timeout=60,
227
+ files=files,
217
228
  )
218
229
 
219
230
  if response.status_code == 403:
@@ -229,7 +240,7 @@ class DuroSuite:
229
240
  logger.error(f"Resource not found: {url}")
230
241
  try:
231
242
  return response.json()
232
- except: # noqa: E722
243
+ except requests.exceptions.JSONDecodeError: # noqa: E722
233
244
  response.raise_for_status()
234
245
  else:
235
246
  response.raise_for_status()
@@ -455,7 +466,7 @@ class DuroSuite:
455
466
  :rtype: List[Device]
456
467
  """
457
468
  try:
458
- response = self._make_request("GET", "/api/devices")
469
+ response = self._make_request("GET", API_DEVICES)
459
470
  # Handle 404 "No devices available" response
460
471
  if not response or (isinstance(response, dict) and "detail" in response):
461
472
  logger.debug(f"No devices found: {response}")
@@ -473,7 +484,7 @@ class DuroSuite:
473
484
  :return: API response
474
485
  :rtype: Optional[Dict[str, Any]]
475
486
  """
476
- return self._make_request("PUT", "/api/devices", params=device_data.model_dump())
487
+ return self._make_request("PUT", API_DEVICES, params=device_data.model_dump())
477
488
 
478
489
  def add_new_device(self, device_data: Dict[str, Any]) -> Optional[Device]:
479
490
  """
@@ -494,7 +505,7 @@ class DuroSuite:
494
505
  logger.debug(f"Creating device with data: {request_data}")
495
506
 
496
507
  # Create device first
497
- response = self._make_request("POST", "/api/devices", data=request_data)
508
+ response = self._make_request("POST", API_DEVICES, data=request_data)
498
509
 
499
510
  if not response:
500
511
  logger.error("Failed to create device - empty response")
@@ -516,7 +527,7 @@ class DuroSuite:
516
527
  self._make_request("POST", "/api/devices/vars", data=var_data)
517
528
 
518
529
  # Get all devices and find our newly created one
519
- all_devices = self._make_request("GET", "/api/devices")
530
+ all_devices = self._make_request("GET", API_DEVICES)
520
531
  if not all_devices:
521
532
  logger.error("Failed to get devices list")
522
533
  return None
@@ -636,7 +647,7 @@ class DuroSuite:
636
647
  :rtype: List[Group]
637
648
  """
638
649
  try:
639
- response = self._make_request("GET", "/api/groups")
650
+ response = self._make_request("GET", API_GROUPS)
640
651
  if not response:
641
652
  return []
642
653
 
@@ -670,7 +681,7 @@ class DuroSuite:
670
681
  :return: API response
671
682
  :rtype: Optional[Dict[str, Any]]
672
683
  """
673
- return self._make_request("PUT", "/api/groups", params=group_data)
684
+ return self._make_request("PUT", API_GROUPS, params=group_data)
674
685
 
675
686
  def add_new_group(self, group_data: Dict[str, Any]) -> Optional[Group]:
676
687
  """
@@ -689,7 +700,7 @@ class DuroSuite:
689
700
 
690
701
  logger.debug(f"Creating group with data: {request_data}")
691
702
 
692
- response = self._make_request("POST", "/api/groups", data=request_data)
703
+ response = self._make_request("POST", API_GROUPS, data=request_data)
693
704
 
694
705
  if not response:
695
706
  logger.error("Failed to create group - empty response")
@@ -14,7 +14,7 @@ from typing import Any, Dict, List, Optional, Union, Tuple, cast, Iterator, Set
14
14
 
15
15
  from pathlib import Path
16
16
 
17
- from regscale.core.app.utils.app_utils import check_license
17
+ from regscale.core.app.utils.app_utils import check_license, get_current_datetime
18
18
  from regscale.core.app.utils.file_utils import find_files, read_file
19
19
  from regscale.integrations.jsonl_scanner_integration import JSONLScannerIntegration
20
20
  from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding, issue_due_date
@@ -486,7 +486,7 @@ class WebInspectIntegration(JSONLScannerIntegration):
486
486
  severity = IssueSeverity.High
487
487
 
488
488
  if self.scan_date is None:
489
- self.scan_date = self.file_date
489
+ self.scan_date = self.file_date or get_current_datetime()
490
490
 
491
491
  title = item.get("Name", "")
492
492
  plugin_id = item.get("VulnerabilityID", "")
@@ -1,19 +1,20 @@
1
1
  import csv
2
2
  import logging
3
- from typing import Any, Dict, Iterator, Tuple
3
+ from typing import Any, Dict, Iterator, Tuple, Optional
4
4
 
5
5
  from regscale.core.app.utils.app_utils import get_current_datetime
6
- from regscale.core.app.utils.parser_utils import safe_datetime_str, safe_float
6
+ from regscale.core.app.utils.parser_utils import safe_float
7
7
  from regscale.integrations.scanner_integration import (
8
8
  IntegrationAsset,
9
9
  IntegrationFinding,
10
10
  ScannerIntegration,
11
- issue_due_date,
12
11
  )
13
12
  from regscale.integrations.variables import ScannerVariables
14
13
  from regscale.models import regscale_models
15
14
 
16
15
  logger = logging.getLogger("regscale")
16
+ IMAGE_NAME = "Image name"
17
+ IMAGE_TAG = "Image tag"
17
18
 
18
19
 
19
20
  class SAPConcurSysDigScanner(ScannerIntegration):
@@ -26,40 +27,59 @@ class SAPConcurSysDigScanner(ScannerIntegration):
26
27
  "low": regscale_models.IssueSeverity.Low,
27
28
  }
28
29
 
29
- def parse_assets(self, asset: Dict[str, Any]) -> IntegrationAsset:
30
+ def parse_assets(self, asset: Dict[str, Any]) -> Optional[IntegrationAsset]:
30
31
  """
31
32
  Parse a single asset from the vulnerability data.
32
33
 
33
34
  :param Dict[str, Any] asset: A dictionary containing the asset data
34
- :return: An IntegrationAsset object with parsed data
35
- :rtype: IntegrationAsset
35
+ :return: An IntegrationAsset object with parsed data, or None if the asset doesn't have an identifier or name
36
+ :rtype: Optional[IntegrationAsset]
36
37
  """
37
38
  name = (
38
- asset.get("Image name", None) + ":" + asset.get("Image tag", None)
39
- if (asset.get("Image name") and asset.get("Image tag"))
39
+ asset.get(IMAGE_NAME, None) + ":" + asset.get(IMAGE_TAG, None)
40
+ if (asset.get(IMAGE_NAME) and asset.get(IMAGE_TAG))
40
41
  else None
41
42
  )
42
- return IntegrationAsset(
43
- name=name,
44
- identifier=name
43
+ identifier = (
44
+ name
45
45
  or asset.get("Container name")
46
46
  or asset.get("Cluster name")
47
47
  or asset.get("Pod")
48
- or asset.get("Namespace"),
48
+ or asset.get("Namespace")
49
+ )
50
+ if name is None or identifier is None:
51
+ return None
52
+ return IntegrationAsset(
53
+ name=name,
54
+ identifier=identifier,
49
55
  asset_type="Other", # Sysdig primarily concerns itself with containers
50
56
  asset_category=regscale_models.AssetCategory.Hardware,
51
57
  asset_owner_id=ScannerVariables.userId,
52
- status="Active (On Network)",
58
+ status=regscale_models.AssetStatus.Active,
53
59
  mac_address="",
54
60
  fqdn="",
55
61
  ip_address="",
56
- operating_system="",
62
+ operating_system=None,
57
63
  aws_identifier="",
58
64
  vlan_id="",
59
65
  location="",
60
66
  software_inventory=[],
61
67
  )
62
68
 
69
+ @staticmethod
70
+ def _get_row_count(**kwargs) -> int:
71
+ """
72
+ Get the number of rows in the CSV file.
73
+
74
+ :param kwargs: Arbitrary keyword arguments
75
+ :return: The number of rows in the CSV file
76
+ :rtype: int
77
+ """
78
+ if path := kwargs.get("path"):
79
+ with open(path, "r", newline="") as csvfile:
80
+ reader = csv.DictReader(csvfile)
81
+ return sum(1 for _ in reader)
82
+
63
83
  def fetch_assets(self, *args: Tuple, **kwargs: dict) -> Iterator[IntegrationAsset]:
64
84
  """
65
85
  Fetch assets from a CSV file and yield IntegrationAsset objects.
@@ -74,12 +94,12 @@ class SAPConcurSysDigScanner(ScannerIntegration):
74
94
  raise ValueError("Path is required")
75
95
 
76
96
  logger.info(f"Fetching assets from {path}")
77
- self.num_assets_to_process = 0
97
+ self.num_assets_to_process = self._get_row_count(path=path)
78
98
  with open(path, "r", newline="") as csvfile:
79
99
  reader = csv.DictReader(csvfile)
80
100
  for row in reader:
81
- self.num_assets_to_process += 1
82
- yield self.parse_assets(row)
101
+ if asset := self.parse_assets(row):
102
+ yield asset
83
103
 
84
104
  def fetch_findings(self, *args: Tuple, **kwargs: dict) -> Iterator[IntegrationFinding]:
85
105
  """
@@ -96,11 +116,10 @@ class SAPConcurSysDigScanner(ScannerIntegration):
96
116
 
97
117
  logger.info(f"Fetching findings from {path}")
98
118
 
99
- self.num_findings_to_process = 0
119
+ self.num_findings_to_process = self._get_row_count(path=path)
100
120
  with open(path, "r", newline="") as csvfile:
101
121
  reader = csv.DictReader(csvfile)
102
122
  for row in reader:
103
- self.num_findings_to_process += 1
104
123
  yield from self.parse_findings(finding=row, kwargs=kwargs)
105
124
 
106
125
  def parse_findings(self, finding: Dict[str, Any], **kwargs: dict) -> Iterator[IntegrationFinding]:
@@ -136,8 +155,8 @@ class SAPConcurSysDigScanner(ScannerIntegration):
136
155
  :rtype: IntegrationFinding
137
156
  """
138
157
  asset_name = (
139
- finding.get("Image name", None) + ":" + finding.get("Image tag", None)
140
- if (finding.get("Image name") and finding.get("Image tag"))
158
+ finding.get(IMAGE_NAME, None) + ":" + finding.get(IMAGE_TAG, None)
159
+ if (finding.get(IMAGE_NAME) and finding.get(IMAGE_TAG))
141
160
  else None
142
161
  )
143
162
  asset_id = (
@@ -1,7 +1,12 @@
1
+ """
2
+ SAP Concur flat file Scanner Integration
3
+ """
4
+
1
5
  import csv
2
6
  import logging
3
- from typing import Any, Dict, Iterator, Tuple
7
+ from typing import Any, Dict, Iterator, Optional, Tuple
4
8
 
9
+ from regscale.core.app.utils.app_utils import error_and_exit
5
10
  from regscale.core.app.utils.parser_utils import safe_datetime_str, safe_float, safe_int
6
11
  from regscale.integrations.scanner_integration import (
7
12
  IntegrationAsset,
@@ -18,6 +23,7 @@ IP_ADDRESS_ANONYMIZED = "IP Address (Anonymized)"
18
23
  DNS_NAME = "DNS Name"
19
24
  LAST_OBSERVED = "Last Observed"
20
25
  CVSS_V3_BASE_SCORE = "CVSS V3 Base Score"
26
+ FIRST_DISCOVERD = "First Discovered"
21
27
 
22
28
 
23
29
  class SAPConcurScanner(ScannerIntegration):
@@ -31,23 +37,30 @@ class SAPConcurScanner(ScannerIntegration):
31
37
  }
32
38
 
33
39
  @staticmethod
34
- def parse_assets(asset: Dict[str, Any]) -> IntegrationAsset:
40
+ def parse_assets(asset: Dict[str, Any]) -> Optional[IntegrationAsset]:
35
41
  """
36
42
  Parse a single asset from the vulnerability data.
37
43
 
38
44
  :param Dict[str, Any] asset: A dictionary containing the asset data
39
- :return: An IntegrationAsset object with parsed data
40
- :rtype: IntegrationAsset
45
+ :return: An IntegrationAsset object with parsed data, or None if the asset doesn't have an identifier or name
46
+ :rtype: Optional[IntegrationAsset]
41
47
  """
48
+ from regscale.models import AssetStatus
49
+
42
50
  ip_address = asset.get(IP_ADDRESS_ANONYMIZED, "")
43
51
  external_id = asset.get("Host ID") or ip_address # Use Host ID if available, otherwise use IP address
52
+ name = asset.get(DNS_NAME) or ip_address # Use Host Name if available, otherwise use IP address
53
+
54
+ if not name or not external_id:
55
+ logger.debug("Skipping asset due to missing name or external_id: %s", asset)
56
+ return None
44
57
 
45
58
  return IntegrationAsset(
46
59
  name=asset.get(DNS_NAME) or ip_address,
47
60
  identifier=external_id,
48
61
  asset_type="Server",
49
62
  asset_category="Infrastructure",
50
- status="Active (On Network)",
63
+ status=AssetStatus.Active,
51
64
  date_last_updated=safe_datetime_str(asset.get(LAST_OBSERVED)),
52
65
  ip_address=ip_address,
53
66
  mac_address=asset.get("MAC Address"),
@@ -75,6 +88,20 @@ class SAPConcurScanner(ScannerIntegration):
75
88
  notes=f"NetBIOS Name: {asset.get('NetBIOS Name', '')}, Repository: {asset.get('Repository', '')}",
76
89
  )
77
90
 
91
+ @staticmethod
92
+ def _get_row_count(**kwargs) -> int:
93
+ """
94
+ Get the number of rows in the CSV file.
95
+
96
+ :param kwargs: Arbitrary keyword arguments
97
+ :return: The number of rows in the CSV file
98
+ :rtype: int
99
+ """
100
+ if path := kwargs.get("path"):
101
+ with open(path, "r", newline="") as csvfile:
102
+ reader = csv.DictReader(csvfile)
103
+ return sum(1 for _ in reader)
104
+
78
105
  def fetch_assets(self, *args: Tuple, **kwargs: dict) -> Iterator[IntegrationAsset]:
79
106
  """
80
107
  Fetch assets from a CSV file and yield IntegrationAsset objects.
@@ -86,15 +113,15 @@ class SAPConcurScanner(ScannerIntegration):
86
113
  """
87
114
  path: str = kwargs.get("path", "")
88
115
  if not path:
89
- raise ValueError("Path is required")
116
+ error_and_exit("Path is required")
90
117
 
91
118
  logger.info(f"Fetching assets from {path}")
92
- self.num_assets_to_process = 0
119
+ self.num_assets_to_process = self._get_row_count(path=path)
93
120
  with open(path, "r", newline="") as csvfile:
94
121
  reader = csv.DictReader(csvfile)
95
122
  for row in reader:
96
- self.num_assets_to_process += 1
97
- yield self.parse_assets(row)
123
+ if asset := self.parse_assets(row):
124
+ yield asset
98
125
 
99
126
  def fetch_findings(self, *args: Tuple, **kwargs: dict) -> Iterator[IntegrationFinding]:
100
127
  """
@@ -107,15 +134,14 @@ class SAPConcurScanner(ScannerIntegration):
107
134
  """
108
135
  path: str = kwargs.get("path", "")
109
136
  if not path:
110
- raise ValueError("Path is required")
137
+ error_and_exit("Path is required")
111
138
 
112
139
  logger.info(f"Fetching findings from {path}")
113
140
 
114
- self.num_findings_to_process = 0
141
+ self.num_findings_to_process = self._get_row_count(path=path)
115
142
  with open(path, "r", newline="") as csvfile:
116
143
  reader = csv.DictReader(csvfile)
117
144
  for row in reader:
118
- self.num_assets_to_process += 1
119
145
  yield from self.parse_findings(row)
120
146
 
121
147
  def parse_findings(self, finding: Dict[str, Any]) -> Iterator[IntegrationFinding]:
@@ -159,7 +185,7 @@ class SAPConcurScanner(ScannerIntegration):
159
185
  description=finding.get("Description", ""),
160
186
  status=regscale_models.IssueStatus.Open,
161
187
  priority=finding.get("Vulnerability Priority Rating", "Medium"),
162
- first_seen=safe_datetime_str(finding.get("First Discovered")),
188
+ first_seen=safe_datetime_str(finding.get(FIRST_DISCOVERD)),
163
189
  last_seen=safe_datetime_str(finding.get(LAST_OBSERVED)),
164
190
  cve=cve,
165
191
  cvss_v3_score=safe_float(finding.get(CVSS_V3_BASE_SCORE)),
@@ -169,9 +195,9 @@ class SAPConcurScanner(ScannerIntegration):
169
195
  dns=finding.get(DNS_NAME),
170
196
  issue_title=f"Vulnerability {finding.get('Plugin Name')} found",
171
197
  issue_type="Risk",
172
- date_created=safe_datetime_str(finding.get("First Discovered")),
198
+ date_created=safe_datetime_str(finding.get(FIRST_DISCOVERD)),
173
199
  date_last_updated=safe_datetime_str(finding.get(LAST_OBSERVED)),
174
- due_date=issue_due_date(severity=severity, created_date=safe_datetime_str(finding.get("First Discovered"))),
200
+ due_date=issue_due_date(severity=severity, created_date=safe_datetime_str(finding.get(FIRST_DISCOVERD))),
175
201
  external_id=finding.get("Plugin"),
176
202
  gaps="",
177
203
  observations="",
@@ -13,6 +13,8 @@ from urllib.parse import urljoin, urlencode
13
13
  import requests
14
14
  from pydantic import BaseModel, Field, RootModel
15
15
  from requests.exceptions import Timeout, ConnectionError as RequestsConnectionError
16
+ from urllib3 import disable_warnings
17
+ from urllib3.exceptions import InsecureRequestWarning
16
18
 
17
19
  from regscale.integrations.commercial.sicura.variables import SicuraVariables
18
20
 
@@ -207,9 +209,15 @@ class SicuraAPI:
207
209
  Initialize Sicura API client.
208
210
 
209
211
  """
212
+ from regscale.integrations.variables import ScannerVariables
213
+
210
214
  self.base_url = SicuraVariables.sicuraURL.rstrip("/")
211
215
  self.session = requests.Session()
212
- self.csrf_token = None
216
+ self.verify = ScannerVariables.sslVerify
217
+ if not self.verify:
218
+ logger.warning("SSL Verification has been disabled for Sicura API requests.")
219
+ self.session.verify = False
220
+ disable_warnings(InsecureRequestWarning)
213
221
 
214
222
  @retry_with_backoff(retries=3, backoff_in_seconds=1)
215
223
  def _make_request(
@@ -0,0 +1,84 @@
1
+ # flake8: noqa E501
2
+ # pylint: disable=line-too-long
3
+
4
+ """Edr connector commands for the RegScale CLI"""
5
+
6
+ import click
7
+ from regscale.models import regscale_ssp_id
8
+
9
+
10
+ @click.group()
11
+ def edr() -> None:
12
+ """Edr connector commands for the RegScale CLI"""
13
+ pass
14
+
15
+
16
+ @edr.command(name="sync_crowdstrike")
17
+ @regscale_ssp_id()
18
+ @click.option(
19
+ "--url",
20
+ type=click.STRING,
21
+ help="The root domain where your CrowdStrike Falcon tenant is located.",
22
+ required=False,
23
+ )
24
+ def sync_crowdstrike(regscale_ssp_id: int, url: str) -> None:
25
+ """Sync Edr from Crowdstrike to RegScale."""
26
+ from regscale.models.integration_models.synqly_models.connectors import Edr
27
+
28
+ edr_crowdstrike = Edr("crowdstrike")
29
+ edr_crowdstrike.run_sync(regscale_ssp_id=regscale_ssp_id, url=url)
30
+
31
+
32
+ @edr.command(name="sync_defender")
33
+ @regscale_ssp_id()
34
+ def sync_defender(regscale_ssp_id: int) -> None:
35
+ """Sync Edr from Defender to RegScale."""
36
+ from regscale.models.integration_models.synqly_models.connectors import Edr
37
+
38
+ edr_defender = Edr("defender")
39
+ edr_defender.run_sync(regscale_ssp_id=regscale_ssp_id)
40
+
41
+
42
+ @edr.command(name="sync_malwarebytes")
43
+ @regscale_ssp_id()
44
+ @click.option(
45
+ "--url",
46
+ type=click.STRING,
47
+ help="URL for the Malwarebytes EDR Provider",
48
+ required=False,
49
+ )
50
+ def sync_malwarebytes(regscale_ssp_id: int, url: str) -> None:
51
+ """Sync Edr from Malwarebytes to RegScale."""
52
+ from regscale.models.integration_models.synqly_models.connectors import Edr
53
+
54
+ edr_malwarebytes = Edr("malwarebytes")
55
+ edr_malwarebytes.run_sync(regscale_ssp_id=regscale_ssp_id, url=url)
56
+
57
+
58
+ @edr.command(name="sync_sentinelone")
59
+ @regscale_ssp_id()
60
+ def sync_sentinelone(regscale_ssp_id: int) -> None:
61
+ """Sync Edr from Sentinelone to RegScale."""
62
+ from regscale.models.integration_models.synqly_models.connectors import Edr
63
+
64
+ edr_sentinelone = Edr("sentinelone")
65
+ edr_sentinelone.run_sync(regscale_ssp_id=regscale_ssp_id)
66
+
67
+
68
+ @edr.command(name="sync_sophos")
69
+ @regscale_ssp_id()
70
+ @click.option(
71
+ "--url",
72
+ type=click.STRING,
73
+ help="Optional root domain where your Sophos tenant is located.",
74
+ required=False,
75
+ )
76
+ def sync_sophos(regscale_ssp_id: int, url: str) -> None:
77
+ """Sync Edr from Sophos to RegScale."""
78
+ from regscale.models.integration_models.synqly_models.connectors import Edr
79
+
80
+ edr_sophos = Edr("sophos")
81
+ edr_sophos.run_sync(regscale_ssp_id=regscale_ssp_id, url=url)
82
+
83
+
84
+ # pylint: enable=line-too-long
@@ -46,6 +46,7 @@ from regscale.core.app.utils.app_utils import (
46
46
  check_license,
47
47
  create_progress_object,
48
48
  epoch_to_datetime,
49
+ error_and_exit,
49
50
  format_dict_to_html,
50
51
  get_current_datetime,
51
52
  regscale_string_to_epoch,
@@ -971,12 +972,13 @@ def gen_tsc(config: dict) -> "TenableSC":
971
972
  :return: Tenable client
972
973
  :rtype: "TenableSC"
973
974
  """
975
+ from restfly.errors import APIError
974
976
  from tenable.sc import TenableSC
975
977
 
976
978
  if not config:
977
979
  app = Application()
978
980
  config = app.config
979
- return TenableSC(
981
+ res = TenableSC(
980
982
  url=config["tenableUrl"],
981
983
  access_key=config["tenableAccessKey"],
982
984
  secret_key=config["tenableSecretKey"],
@@ -984,6 +986,11 @@ def gen_tsc(config: dict) -> "TenableSC":
984
986
  product=REGSCALE_CLI,
985
987
  build=__version__,
986
988
  )
989
+ try:
990
+ res.status.status()
991
+ except APIError:
992
+ error_and_exit("Unable to authenticate with Tenable SC. Please check your credentials.", False)
993
+ return res
987
994
 
988
995
 
989
996
  def gen_tio(config: dict) -> "TenableIO":
@@ -995,9 +1002,10 @@ def gen_tio(config: dict) -> "TenableIO":
995
1002
  :rtype: "TenableIO"
996
1003
  """
997
1004
 
1005
+ from restfly.errors import UnauthorizedError
998
1006
  from tenable.io import TenableIO
999
1007
 
1000
- return TenableIO(
1008
+ res = TenableIO(
1001
1009
  url=config["tenableUrl"],
1002
1010
  access_key=config["tenableAccessKey"],
1003
1011
  secret_key=config["tenableSecretKey"],
@@ -1006,6 +1014,16 @@ def gen_tio(config: dict) -> "TenableIO":
1006
1014
  build=__version__,
1007
1015
  )
1008
1016
 
1017
+ try:
1018
+ # Check a quick API to make sure we have access
1019
+ res.scans.list(last_modified=datetime.now())
1020
+ except UnauthorizedError:
1021
+ error_and_exit(
1022
+ "Unable to authenticate with Tenable Vulnerability Management (IO). Please check your credentials.", False
1023
+ )
1024
+
1025
+ return res
1026
+
1009
1027
 
1010
1028
  def get_controls(catalog_id: int) -> List[Dict]:
1011
1029
  """