regscale-cli 6.16.4.1__py3-none-any.whl → 6.17.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/core/app/api.py +4 -1
- regscale/core/app/utils/regscale_utils.py +2 -3
- regscale/dev/code_gen.py +10 -7
- regscale/integrations/commercial/aws/inventory/base.py +0 -2
- regscale/integrations/commercial/durosuite/api.py +20 -9
- regscale/integrations/commercial/opentext/scanner.py +2 -2
- regscale/integrations/commercial/sap/sysdig/sysdig_scanner.py +40 -21
- regscale/integrations/commercial/sap/tenable/scanner.py +41 -15
- regscale/integrations/commercial/sicura/api.py +9 -1
- regscale/integrations/commercial/synqly/edr.py +84 -0
- regscale/integrations/commercial/tenablev2/click.py +20 -2
- regscale/integrations/commercial/tenablev2/scanner.py +1 -1
- regscale/integrations/scanner_integration.py +80 -27
- regscale/models/integration_models/cisa_kev_data.json +100 -10
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/integration_models/synqly_models/connectors/__init__.py +1 -0
- regscale/models/integration_models/synqly_models/connectors/edr.py +137 -0
- regscale/models/integration_models/synqly_models/ocsf_mapper.py +61 -11
- regscale/models/integration_models/synqly_models/synqly_model.py +8 -5
- regscale/models/regscale_models/file.py +3 -1
- regscale/models/regscale_models/master_assessment.py +127 -0
- regscale/models/regscale_models/regscale_model.py +2 -4
- regscale/models/regscale_models/risk.py +26 -31
- regscale/models/regscale_models/supply_chain.py +5 -5
- regscale/regscale.py +2 -0
- {regscale_cli-6.16.4.1.dist-info → regscale_cli-6.17.0.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.16.4.1.dist-info → regscale_cli-6.17.0.0.dist-info}/RECORD +32 -29
- {regscale_cli-6.16.4.1.dist-info → regscale_cli-6.17.0.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.16.4.1.dist-info → regscale_cli-6.17.0.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.16.4.1.dist-info → regscale_cli-6.17.0.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.16.4.1.dist-info → regscale_cli-6.17.0.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
"""Vulnerabilities Connector Model"""
|
|
2
|
+
|
|
3
|
+
from typing import Iterator, Optional
|
|
4
|
+
|
|
5
|
+
from pydantic import ConfigDict
|
|
6
|
+
|
|
7
|
+
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding, ScannerIntegration
|
|
8
|
+
from regscale.models.integration_models.synqly_models.synqly_model import SynqlyModel
|
|
9
|
+
from regscale.models.regscale_models import IssueSeverity
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class EDRIntegration(ScannerIntegration):
|
|
13
|
+
title = "EDR Connector Integration"
|
|
14
|
+
# Required fields from ScannerIntegration
|
|
15
|
+
asset_identifier_field = "otherTrackingNumber"
|
|
16
|
+
finding_severity_map = {
|
|
17
|
+
"Critical": IssueSeverity.Critical,
|
|
18
|
+
"High": IssueSeverity.High,
|
|
19
|
+
"Medium": IssueSeverity.Moderate,
|
|
20
|
+
"Low": IssueSeverity.Low,
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
def fetch_assets(self, *args, **kwargs) -> Iterator[IntegrationAsset]:
|
|
24
|
+
"""
|
|
25
|
+
Fetches assets from Synqly
|
|
26
|
+
|
|
27
|
+
:yields: Iterator[IntegrationAsset]
|
|
28
|
+
"""
|
|
29
|
+
integration_assets = kwargs.get("integration_assets")
|
|
30
|
+
for asset in integration_assets:
|
|
31
|
+
yield asset
|
|
32
|
+
|
|
33
|
+
def fetch_findings(self, *args, **kwargs) -> Iterator[IntegrationFinding]:
|
|
34
|
+
"""
|
|
35
|
+
Fetches findings from the Synqly
|
|
36
|
+
|
|
37
|
+
:yields: Iterator[IntegrationFinding]
|
|
38
|
+
"""
|
|
39
|
+
integration_findings = kwargs.get("integration_findings")
|
|
40
|
+
for finding in integration_findings:
|
|
41
|
+
yield finding
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class Edr(SynqlyModel):
|
|
45
|
+
"""Edr Connector Model"""
|
|
46
|
+
|
|
47
|
+
model_config = ConfigDict(populate_by_name=True, use_enum_values=True, arbitrary_types_allowed=True)
|
|
48
|
+
|
|
49
|
+
integration_id: str = ""
|
|
50
|
+
scanner_integration: Optional["EDRIntegration"] = None
|
|
51
|
+
provider: str = ""
|
|
52
|
+
can_fetch_apps: bool = False
|
|
53
|
+
can_fetch_alerts: bool = False
|
|
54
|
+
can_fetch_endpoints: bool = False
|
|
55
|
+
|
|
56
|
+
def __init__(self, integration: str, **kwargs):
|
|
57
|
+
SynqlyModel.__init__(self, connector_type=self.__class__.__name__, integration=integration, **kwargs)
|
|
58
|
+
self.integration_id = f"{self._connector_type.lower()}_{self.integration.lower()}"
|
|
59
|
+
integration_company = self.integration.split("_")[0] if "_" in self.integration else self.integration # noqa
|
|
60
|
+
self.provider = integration_company
|
|
61
|
+
self.can_fetch_apps = "query_applications" in self.capabilities
|
|
62
|
+
self.can_fetch_alerts = "query_alerts" in self.capabilities
|
|
63
|
+
self.can_fetch_endpoints = "query_endpoints" in self.capabilities
|
|
64
|
+
|
|
65
|
+
def integration_sync(self, regscale_ssp_id: int, **kwargs) -> None:
|
|
66
|
+
"""
|
|
67
|
+
Runs the integration sync process
|
|
68
|
+
|
|
69
|
+
:param int regscale_ssp_id: The RegScale SSP ID
|
|
70
|
+
:rtype: None
|
|
71
|
+
"""
|
|
72
|
+
self.logger.info(f"Fetching alert data from {self.integration_name}...")
|
|
73
|
+
alerts = (
|
|
74
|
+
self.fetch_integration_data(
|
|
75
|
+
func=self.tenant.engine_client.edr.query_alerts,
|
|
76
|
+
**kwargs,
|
|
77
|
+
)
|
|
78
|
+
if self.can_fetch_alerts
|
|
79
|
+
else []
|
|
80
|
+
)
|
|
81
|
+
self.logger.info(f"Fetching application data from {self.integration_name}...")
|
|
82
|
+
apps = (
|
|
83
|
+
self.fetch_integration_data(func=self.tenant.engine_client.edr.query_applications, **kwargs)
|
|
84
|
+
if self.can_fetch_apps
|
|
85
|
+
else []
|
|
86
|
+
)
|
|
87
|
+
endpoints = (
|
|
88
|
+
self.fetch_integration_data(func=self.tenant.engine_client.edr.query_endpoints, **kwargs)
|
|
89
|
+
if self.can_fetch_endpoints
|
|
90
|
+
else []
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
edr_data = {"alert(s)": alerts, "app(s)": apps, "endpoint(s)": endpoints}
|
|
94
|
+
integration_assets: list[IntegrationAsset] = []
|
|
95
|
+
integration_findings: list[IntegrationFinding] = []
|
|
96
|
+
|
|
97
|
+
for name, data in edr_data.items():
|
|
98
|
+
if data:
|
|
99
|
+
self.logger.info(f"Mapping {len(data)} {self.provider} {name} to RegScale data...")
|
|
100
|
+
self.app.thread_manager.submit_tasks_from_list(
|
|
101
|
+
func=self.mapper.to_regscale,
|
|
102
|
+
items=data,
|
|
103
|
+
args=None,
|
|
104
|
+
connector=self,
|
|
105
|
+
regscale_ssp_id=regscale_ssp_id,
|
|
106
|
+
**kwargs,
|
|
107
|
+
)
|
|
108
|
+
if mapped_data := self.app.thread_manager.execute_and_verify(return_passed=True):
|
|
109
|
+
if isinstance(mapped_data[0], IntegrationFinding):
|
|
110
|
+
self.logger.info(f"Mapped {len(mapped_data)} {self.provider} {name} to RegScale finding(s).")
|
|
111
|
+
integration_findings.extend(mapped_data)
|
|
112
|
+
elif isinstance(mapped_data[0], IntegrationAsset):
|
|
113
|
+
self.logger.info(f"Mapped {len(mapped_data)} {self.provider} {name} to RegScale asset(s).")
|
|
114
|
+
integration_assets.extend(mapped_data)
|
|
115
|
+
|
|
116
|
+
self.scanner_integration = EDRIntegration(plan_id=regscale_ssp_id)
|
|
117
|
+
self.scanner_integration.sync_assets(
|
|
118
|
+
title=f"{self.integration_name} EDR",
|
|
119
|
+
plan_id=regscale_ssp_id,
|
|
120
|
+
integration_assets=integration_assets,
|
|
121
|
+
asset_count=len(integration_assets),
|
|
122
|
+
)
|
|
123
|
+
self.scanner_integration.sync_findings(
|
|
124
|
+
title=f"{self.integration_name} EDR",
|
|
125
|
+
plan_id=regscale_ssp_id,
|
|
126
|
+
integration_findings=integration_findings,
|
|
127
|
+
finding_count=len(integration_findings),
|
|
128
|
+
)
|
|
129
|
+
self.logger.info(f"[green]Sync from {self.integration_name} to RegScale completed.")
|
|
130
|
+
|
|
131
|
+
def run_sync(self, *args, **kwargs) -> None:
|
|
132
|
+
"""
|
|
133
|
+
Syncs RegScale issues with Vulnerability connector using Synqly
|
|
134
|
+
|
|
135
|
+
:rtype: None
|
|
136
|
+
"""
|
|
137
|
+
self.run_integration_sync(*args, **kwargs)
|
|
@@ -4,7 +4,7 @@ from datetime import datetime
|
|
|
4
4
|
from typing import Any, Union, TYPE_CHECKING, Optional
|
|
5
5
|
|
|
6
6
|
if TYPE_CHECKING:
|
|
7
|
-
from regscale.models.integration_models.synqly_models.connectors import Ticketing, Vulnerabilities
|
|
7
|
+
from regscale.models.integration_models.synqly_models.connectors import Edr, Ticketing, Vulnerabilities
|
|
8
8
|
from synqly.engine.resources.ocsf.resources.v_1_1_0.resources.securityfinding import ResourceDetails, Vulnerability
|
|
9
9
|
|
|
10
10
|
from synqly import engine
|
|
@@ -17,6 +17,9 @@ from regscale.core.app.utils.app_utils import (
|
|
|
17
17
|
)
|
|
18
18
|
from synqly.engine.resources.ticketing.types.ticket import Ticket
|
|
19
19
|
from synqly.engine.resources.vulnerabilities.types import Asset as OCSFAsset, SecurityFinding
|
|
20
|
+
from synqly.engine.resources.events.types import Event_DetectionFinding as Alerts
|
|
21
|
+
from synqly.engine.resources.ocsf.resources.v_1_3_0.resources.softwareinfo import SoftwareInfo
|
|
22
|
+
from synqly.engine.resources.ocsf.resources.v_1_3_0.resources.inventoryinfo import InventoryInfo
|
|
20
23
|
from regscale.models.regscale_models import Issue
|
|
21
24
|
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
|
|
22
25
|
|
|
@@ -39,21 +42,27 @@ class Mapper:
|
|
|
39
42
|
else:
|
|
40
43
|
error_and_exit(f"Unsupported object type {type(regscale_object)}")
|
|
41
44
|
|
|
42
|
-
def to_regscale(self, ocsf_object: Any, connector: Union["Ticketing", "Vulnerabilities"], **kwargs) -> Any:
|
|
45
|
+
def to_regscale(self, ocsf_object: Any, connector: Union["Edr", "Ticketing", "Vulnerabilities"], **kwargs) -> Any:
|
|
43
46
|
"""
|
|
44
47
|
Convert OCSF object to RegScale object
|
|
45
48
|
|
|
46
49
|
:param Any ocsf_object: OCSF object to convert to a RegScale object
|
|
47
|
-
:param Union["Ticketing", "Vulnerabilities"] connector: Connector class object
|
|
50
|
+
:param Union["Edr", "Ticketing", "Vulnerabilities"] connector: Connector class object
|
|
48
51
|
:return: The comparable RegScale object
|
|
49
52
|
:rtype: Any
|
|
50
53
|
"""
|
|
51
54
|
if isinstance(ocsf_object, Ticket):
|
|
52
55
|
return self._ticket_to_regscale(connector, ocsf_object, **kwargs)
|
|
53
|
-
elif
|
|
56
|
+
elif (
|
|
57
|
+
isinstance(ocsf_object, OCSFAsset)
|
|
58
|
+
or isinstance(ocsf_object, SoftwareInfo)
|
|
59
|
+
or isinstance(ocsf_object, InventoryInfo)
|
|
60
|
+
):
|
|
54
61
|
return self._ocsf_asset_to_regscale(connector, ocsf_object, **kwargs)
|
|
55
62
|
elif isinstance(ocsf_object, SecurityFinding):
|
|
56
63
|
return self._security_finding_to_regscale(connector, ocsf_object, **kwargs)
|
|
64
|
+
elif isinstance(ocsf_object, Alerts):
|
|
65
|
+
return self._security_alert_to_regscale(connector, ocsf_object, **kwargs)
|
|
57
66
|
else:
|
|
58
67
|
error_and_exit(f"Unsupported object type {type(ocsf_object)}")
|
|
59
68
|
|
|
@@ -94,12 +103,14 @@ class Mapper:
|
|
|
94
103
|
return regscale_issue
|
|
95
104
|
|
|
96
105
|
@staticmethod
|
|
97
|
-
def _ocsf_asset_to_regscale(
|
|
106
|
+
def _ocsf_asset_to_regscale(
|
|
107
|
+
connector: Union["Edr", "Vulnerabilities"], asset: Union[InventoryInfo, OCSFAsset, SoftwareInfo], **kwargs
|
|
108
|
+
) -> IntegrationAsset:
|
|
98
109
|
"""
|
|
99
110
|
Convert OCSF Asset to RegScale Asset
|
|
100
111
|
|
|
101
|
-
:param Vulnerabilities connector: Vulnerabilities connector class object
|
|
102
|
-
:param OCSFAsset asset: OCSF Asset to convert to an IntegrationAsset
|
|
112
|
+
:param Union["Edr", "Vulnerabilities"] connector: Edr or Vulnerabilities connector class object
|
|
113
|
+
:param Union[InventoryInfo, OCSFAsset, SoftwareInfo] asset: OCSF Asset data to convert to an IntegrationAsset
|
|
103
114
|
:return: The comparable IntegrationAsset object
|
|
104
115
|
:rtype: IntegrationAsset
|
|
105
116
|
"""
|
|
@@ -113,11 +124,17 @@ class Mapper:
|
|
|
113
124
|
]
|
|
114
125
|
else:
|
|
115
126
|
software_inventory = []
|
|
127
|
+
if isinstance(asset, OCSFAsset): # this is a hardware asset
|
|
128
|
+
name = device_data.name or device_data.hostname or f"{connector.provider} Asset: {device_data.instance_uid}"
|
|
129
|
+
category = AssetCategory.Hardware
|
|
130
|
+
else:
|
|
131
|
+
name = device_data.name or device_data.hostname or f"{connector.provider} Asset: {device_data.uid}"
|
|
132
|
+
category = AssetCategory.Software
|
|
116
133
|
return IntegrationAsset(
|
|
117
|
-
name=
|
|
134
|
+
name=name,
|
|
118
135
|
identifier=device_data.uid,
|
|
119
|
-
asset_type=device_data.type,
|
|
120
|
-
asset_category=
|
|
136
|
+
asset_type=device_data.type or "Other",
|
|
137
|
+
asset_category=category,
|
|
121
138
|
parent_id=kwargs.pop("regscale_ssp_id"),
|
|
122
139
|
parent_module=SecurityPlan.get_module_string(),
|
|
123
140
|
mac_address=device_data.mac,
|
|
@@ -129,7 +146,7 @@ class Mapper:
|
|
|
129
146
|
serial_number=device_data.hw_info.serial_number if device_data.hw_info else None,
|
|
130
147
|
cpu=device_data.hw_info.cpu_cores if device_data.hw_info else None,
|
|
131
148
|
ram=device_data.hw_info.ram_size if device_data.hw_info else None,
|
|
132
|
-
operating_system=os_data.name if os_data else None,
|
|
149
|
+
operating_system=os_data.name or os_data.type if os_data else None,
|
|
133
150
|
os_version=os_data.version if os_data else None,
|
|
134
151
|
software_inventory=software_inventory,
|
|
135
152
|
)
|
|
@@ -252,6 +269,39 @@ class Mapper:
|
|
|
252
269
|
|
|
253
270
|
return findings
|
|
254
271
|
|
|
272
|
+
def _security_alert_to_regscale(self, connector: "Edr", finding: Alerts, **_) -> IntegrationFinding:
|
|
273
|
+
"""
|
|
274
|
+
Converts an OCSF Event_DetectionFinding (Alerts) to a RegScale IntegrationFinding
|
|
275
|
+
|
|
276
|
+
:param "Edr" connector: Edr connector class object
|
|
277
|
+
:param Alerts finding: OCSF Event_DetectionFinding (Alerts) to convert to an IntegrationFinding
|
|
278
|
+
:return: Comparable IntegrationFinding object
|
|
279
|
+
:rtype: IntegrationFinding
|
|
280
|
+
"""
|
|
281
|
+
from regscale.models.regscale_models.issue import IssueStatus
|
|
282
|
+
|
|
283
|
+
asset = finding.device
|
|
284
|
+
vuln = finding.finding_info
|
|
285
|
+
|
|
286
|
+
return IntegrationFinding(
|
|
287
|
+
control_labels=[],
|
|
288
|
+
category=f"{connector.integration_name} Vulnerability",
|
|
289
|
+
title=vuln.title,
|
|
290
|
+
plugin_name=connector.integration_name,
|
|
291
|
+
severity=Issue.assign_severity(finding.severity), # type: ignore
|
|
292
|
+
description=vuln.desc or finding.comment,
|
|
293
|
+
status=IssueStatus.Open,
|
|
294
|
+
first_seen=self._datetime_to_str(vuln.first_seen_time_dt),
|
|
295
|
+
last_seen=self._datetime_to_str(vuln.last_seen_time_dt),
|
|
296
|
+
ip_address=asset.ip,
|
|
297
|
+
plugin_id=vuln.product_uid,
|
|
298
|
+
dns=asset.hostname,
|
|
299
|
+
issue_title=vuln.title,
|
|
300
|
+
impact=finding.impact,
|
|
301
|
+
asset_identifier=asset.uid,
|
|
302
|
+
source_report=connector.integration_name,
|
|
303
|
+
)
|
|
304
|
+
|
|
255
305
|
def _datetime_to_str(self, date_time: Optional[datetime] = None) -> str:
|
|
256
306
|
"""
|
|
257
307
|
Convert a datetime object to a string
|
|
@@ -41,7 +41,7 @@ class SynqlyModel(BaseModel, ABC):
|
|
|
41
41
|
terminated: Optional[bool] = False
|
|
42
42
|
app: Application = Field(default_factory=Application, alias="app")
|
|
43
43
|
api: Api = Field(default_factory=Api, alias="api")
|
|
44
|
-
logger: logging.Logger = Field(default=logging.getLogger("
|
|
44
|
+
logger: logging.Logger = Field(default=logging.getLogger("regscale"), alias="logger")
|
|
45
45
|
job_progress: Optional[Progress] = None
|
|
46
46
|
integration: str = ""
|
|
47
47
|
integration_name: str = Field(default="", description="This stores the proper name of the integration for logging.")
|
|
@@ -176,8 +176,8 @@ class SynqlyModel(BaseModel, ABC):
|
|
|
176
176
|
raise ModuleNotFoundError(f"Unable to find the config object for {self._connector_type}_{self.integration}")
|
|
177
177
|
check_attributes = [self.required_secrets, self.required_params]
|
|
178
178
|
for attribute in check_attributes:
|
|
179
|
-
for secret in attribute:
|
|
180
|
-
if
|
|
179
|
+
for secret, param in attribute.items():
|
|
180
|
+
if not param.optional:
|
|
181
181
|
kwargs.update(
|
|
182
182
|
self._update_config_and_kwargs(
|
|
183
183
|
attribute=attribute, # type: ignore
|
|
@@ -196,8 +196,8 @@ class SynqlyModel(BaseModel, ABC):
|
|
|
196
196
|
error_and_exit("Please provide the required secrets mentioned above.")
|
|
197
197
|
self.integration_config = config_object(
|
|
198
198
|
type=f"{self._connector_type.lower()}_{self.integration.lower()}",
|
|
199
|
-
url=kwargs.get("url"),
|
|
200
199
|
credential=self._get_auth_method(**kwargs),
|
|
200
|
+
**kwargs,
|
|
201
201
|
)
|
|
202
202
|
|
|
203
203
|
def _get_auth_method(self, **kwargs) -> Any:
|
|
@@ -236,7 +236,10 @@ class SynqlyModel(BaseModel, ABC):
|
|
|
236
236
|
:rtype: dict
|
|
237
237
|
"""
|
|
238
238
|
if key not in kwargs and not config.get(f"{self._connector_type}_{self.integration}_{key}"):
|
|
239
|
-
if
|
|
239
|
+
if attribute[key].default:
|
|
240
|
+
kwargs[key] = attribute[key].default
|
|
241
|
+
config[f"{self._connector_type}_{self.integration}_{key}"] = attribute[key].default
|
|
242
|
+
elif not skip_prompts:
|
|
240
243
|
self.logger.info(f"Enter the {key} for {self.integration}. Description: {attribute[key].description}")
|
|
241
244
|
provided_secret = input(f"{key}: ")
|
|
242
245
|
kwargs[key] = provided_secret
|
|
@@ -11,7 +11,7 @@ import os
|
|
|
11
11
|
import sys
|
|
12
12
|
from io import BytesIO
|
|
13
13
|
from pathlib import Path
|
|
14
|
-
from typing import Optional,
|
|
14
|
+
from typing import Optional, Tuple, Union
|
|
15
15
|
|
|
16
16
|
from pydantic import BaseModel, ConfigDict, Field
|
|
17
17
|
from requests import JSONDecodeError
|
|
@@ -375,6 +375,8 @@ class File(BaseModel):
|
|
|
375
375
|
file_type_header = "text/xml"
|
|
376
376
|
elif file_type == ".gz":
|
|
377
377
|
file_type_header = "application/gzip"
|
|
378
|
+
elif file_type == ".msg":
|
|
379
|
+
file_type_header = "application/vnd.ms-outlook"
|
|
378
380
|
else:
|
|
379
381
|
logger = logging.getLogger("regscale")
|
|
380
382
|
logger.warning(f"Unacceptable file type for upload: {file_type}")
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""Model for a RegScale Assessment"""
|
|
4
|
+
from typing import List, Optional
|
|
5
|
+
|
|
6
|
+
from pydantic import ConfigDict, Field
|
|
7
|
+
|
|
8
|
+
from regscale.models.regscale_models.assessment import Assessment
|
|
9
|
+
from regscale.models.regscale_models.regscale_model import RegScaleModel
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class MasterAssessment(RegScaleModel):
|
|
13
|
+
"""
|
|
14
|
+
Model for a RegScale Assessment
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
_module_slug = "masterassessments"
|
|
18
|
+
_unique_fields = [["title", "parentId", "parentModule"]]
|
|
19
|
+
|
|
20
|
+
id: int = Field(default=0, description="Internal identifier for the assessment")
|
|
21
|
+
# Relationship and metadata fields
|
|
22
|
+
parentId: int = Field(description="ID of the parent record")
|
|
23
|
+
parentModule: str = Field(description="Module of the parent record")
|
|
24
|
+
|
|
25
|
+
leadAssessorId: str = Field(default="", description="Identifier for the lead assessor")
|
|
26
|
+
title: str = Field(default=None, description="Title of the assessment")
|
|
27
|
+
instructions: str = Field(default=None, description="Instructions for the assessment")
|
|
28
|
+
plannedFinish: str = Field(default=None, description="Planned finish date of the assessment")
|
|
29
|
+
status: str = Field(default=None, description="Current status of the assessment")
|
|
30
|
+
actualFinish: Optional[str] = Field(default=None, description="Actual finish date of the assessment")
|
|
31
|
+
dateAdjustedForCorrections: bool = Field(
|
|
32
|
+
default=False, description="Flag indicating if dates were adjusted for corrections"
|
|
33
|
+
)
|
|
34
|
+
percentComplete: float = Field(default=0.0, description="Percentage of assessment completed", ge=0, le=100)
|
|
35
|
+
complianceScore: Optional[int] = Field(default=None, description="Compliance score of the assessment")
|
|
36
|
+
|
|
37
|
+
# Assessment documentation
|
|
38
|
+
assessmentPlan: Optional[str] = Field(default=None, description="The assessment plan document")
|
|
39
|
+
methodology: Optional[str] = Field(default=None, description="Methodology used for the assessment")
|
|
40
|
+
rulesOfEngagement: Optional[str] = Field(default=None, description="Rules of engagement for the assessment")
|
|
41
|
+
disclosures: Optional[str] = Field(default=None, description="Disclosures related to the assessment")
|
|
42
|
+
scopeIncludes: Optional[str] = Field(default=None, description="What is included in the assessment scope")
|
|
43
|
+
scopeExcludes: Optional[str] = Field(default=None, description="What is excluded from the assessment scope")
|
|
44
|
+
limitationsOfLiability: Optional[str] = Field(
|
|
45
|
+
default=None, description="Limitations of liability for the assessment"
|
|
46
|
+
)
|
|
47
|
+
documentsReviewed: Optional[str] = Field(default=None, description="Documents reviewed during the assessment")
|
|
48
|
+
activitiesObserved: Optional[str] = Field(default=None, description="Activities observed during the assessment")
|
|
49
|
+
fixedDuringAssessment: Optional[str] = Field(default=None, description="Issues fixed during the assessment")
|
|
50
|
+
summaryOfResults: Optional[str] = Field(default=None, description="Summary of assessment results")
|
|
51
|
+
assessmentReport: Optional[str] = Field(default=None, description="Full assessment report")
|
|
52
|
+
|
|
53
|
+
# FedRAMP specific fields
|
|
54
|
+
fedrampAssessmentType: Optional[str] = Field(default=None, description="Type of FedRAMP assessment")
|
|
55
|
+
fedrampSignificantChanges: Optional[str] = Field(default=None, description="Significant changes for FedRAMP")
|
|
56
|
+
fedrampSubmitter: Optional[str] = Field(default=None, description="FedRAMP submitter information")
|
|
57
|
+
fedrampScope: Optional[str] = Field(default=None, description="FedRAMP assessment scope")
|
|
58
|
+
fedrampVulnerabilitySampling: Optional[str] = Field(
|
|
59
|
+
default=None, description="FedRAMP vulnerability sampling methodology"
|
|
60
|
+
)
|
|
61
|
+
fedrampTestingSampling: Optional[str] = Field(default=None, description="FedRAMP testing sampling methodology")
|
|
62
|
+
fedrampTestingNotification: Optional[str] = Field(default=None, description="FedRAMP testing notification details")
|
|
63
|
+
fedrampTestingDays: Optional[int] = Field(default=None, description="Number of FedRAMP testing days")
|
|
64
|
+
fedrampDatabaseDiscrepanicies: Optional[str] = Field(default=None, description="FedRAMP database discrepancies")
|
|
65
|
+
fedrampDatabaseDiscrepaniciesDescription: Optional[str] = Field(
|
|
66
|
+
default=None, description="Description of FedRAMP database discrepancies"
|
|
67
|
+
)
|
|
68
|
+
fedrampWebDiscrepanicies: Optional[str] = Field(default=None, description="FedRAMP web discrepancies")
|
|
69
|
+
fedrampWebDiscrepaniciesDescription: Optional[str] = Field(
|
|
70
|
+
default=None, description="Description of FedRAMP web discrepancies"
|
|
71
|
+
)
|
|
72
|
+
fedrampContainerDiscrepanicies: Optional[str] = Field(default=None, description="FedRAMP container discrepancies")
|
|
73
|
+
fedrampContainerDiscrepaniciesDescription: Optional[str] = Field(
|
|
74
|
+
default=None, description="Description of FedRAMP container discrepancies"
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
@staticmethod
|
|
78
|
+
def _get_additional_endpoints() -> dict[str, str]:
|
|
79
|
+
"""
|
|
80
|
+
Get additional endpoints for the Assessments model.
|
|
81
|
+
|
|
82
|
+
:return: A dictionary of additional endpoints
|
|
83
|
+
:rtype: dict[str, str]
|
|
84
|
+
"""
|
|
85
|
+
return ConfigDict( # type: ignore
|
|
86
|
+
get_all_by_master="/api/{model_slug}/getAllByMaster/{masterAssessmentId}",
|
|
87
|
+
get_history="/api/{model_slug}/getHistory/{parentId}/{parentModuleName}",
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
@classmethod
|
|
91
|
+
def get_history(cls, parent_id: int, parent_module: str) -> List[Assessment]:
|
|
92
|
+
"""
|
|
93
|
+
Get a list of master assessments ordered by planned finish date.
|
|
94
|
+
|
|
95
|
+
:param int parent_id: The ID of the parent record
|
|
96
|
+
:param str parent_module: The module of the parent record
|
|
97
|
+
:return: A list of assessments
|
|
98
|
+
:rtype: List[Assessment]
|
|
99
|
+
"""
|
|
100
|
+
response = cls._get_api_handler().get(
|
|
101
|
+
endpoint=cls.get_endpoint("get_history").format(parentId=parent_id, parentModuleName=parent_module)
|
|
102
|
+
)
|
|
103
|
+
master_assessments = []
|
|
104
|
+
if response and response.ok:
|
|
105
|
+
for ci in response.json():
|
|
106
|
+
if ci := cls.get_object(object_id=ci["id"]):
|
|
107
|
+
master_assessments.append(ci)
|
|
108
|
+
return master_assessments
|
|
109
|
+
|
|
110
|
+
@classmethod
|
|
111
|
+
def get_all_by_master(cls, master_assessment_id: int) -> List["Assessment"]:
|
|
112
|
+
"""
|
|
113
|
+
Get a list of assessments by id.
|
|
114
|
+
|
|
115
|
+
:param int master_assessment_id: The ID of the master assessment
|
|
116
|
+
:return: A list of assessments
|
|
117
|
+
:rtype: List[Assessment]
|
|
118
|
+
"""
|
|
119
|
+
response = cls._get_api_handler().get(
|
|
120
|
+
endpoint=cls.get_endpoint("get_all_by_master").format(masterAssessmentId=master_assessment_id)
|
|
121
|
+
)
|
|
122
|
+
assessments = []
|
|
123
|
+
if response and response.ok:
|
|
124
|
+
for ci in response.json():
|
|
125
|
+
if ci := cls.get_object(object_id=ci["id"]):
|
|
126
|
+
assessments.append(ci)
|
|
127
|
+
return assessments
|
|
@@ -1315,8 +1315,7 @@ class RegScaleModel(BaseModel, ABC):
|
|
|
1315
1315
|
:param Optional[Progress] progress: Optional progress context for tracking
|
|
1316
1316
|
:param Optional[bool] remove_progress_bar: Whether to remove the progress bar after completion, defaults to False
|
|
1317
1317
|
"""
|
|
1318
|
-
# noqa: F824
|
|
1319
|
-
nonlocal results
|
|
1318
|
+
nonlocal results # noqa: F824
|
|
1320
1319
|
create_job = None
|
|
1321
1320
|
if progress:
|
|
1322
1321
|
create_job = progress.add_task(
|
|
@@ -1373,8 +1372,7 @@ class RegScaleModel(BaseModel, ABC):
|
|
|
1373
1372
|
:param Optional[Progress] progress: Optional progress context for tracking
|
|
1374
1373
|
:param Optional[bool] remove_progress_bar: Whether to remove the progress bar after completion, defaults to False
|
|
1375
1374
|
"""
|
|
1376
|
-
# noqa: F824
|
|
1377
|
-
nonlocal results
|
|
1375
|
+
nonlocal results # noqa: F824
|
|
1378
1376
|
update_job = None
|
|
1379
1377
|
if progress:
|
|
1380
1378
|
update_job = progress.add_task(
|
|
@@ -9,7 +9,6 @@ from regscale.core.app.api import Api
|
|
|
9
9
|
from regscale.core.app.application import Application
|
|
10
10
|
from regscale.models.regscale_models.regscale_model import RegScaleModel
|
|
11
11
|
|
|
12
|
-
|
|
13
12
|
logger = logging.getLogger("regscale")
|
|
14
13
|
|
|
15
14
|
|
|
@@ -52,7 +51,6 @@ class Risk(RegScaleModel):
|
|
|
52
51
|
comments: Optional[str] = None
|
|
53
52
|
riskTier: Optional[str] = None
|
|
54
53
|
title: Optional[str] = None
|
|
55
|
-
recommendations: Optional[str] = None
|
|
56
54
|
impactDescription: Optional[str] = None
|
|
57
55
|
inherentRiskScore: Optional[float] = None
|
|
58
56
|
residualRiskScore: Optional[float] = None
|
|
@@ -79,11 +77,10 @@ class Risk(RegScaleModel):
|
|
|
79
77
|
parentRiskModelId: Optional[int] = None
|
|
80
78
|
|
|
81
79
|
@staticmethod
|
|
82
|
-
def fetch_all_risks(
|
|
80
|
+
def fetch_all_risks() -> list["Risk"]:
|
|
83
81
|
"""
|
|
84
82
|
Fetches all risks from RegScale
|
|
85
83
|
|
|
86
|
-
:param Application app: Application object
|
|
87
84
|
:return: List of Risks from RegScale
|
|
88
85
|
:rtype: list[Risk]
|
|
89
86
|
"""
|
|
@@ -113,7 +110,6 @@ class Risk(RegScaleModel):
|
|
|
113
110
|
def get_sort_position_dict(cls) -> dict:
|
|
114
111
|
"""
|
|
115
112
|
Overrides the base method.
|
|
116
|
-
|
|
117
113
|
:return: dict The sort position in the list of properties
|
|
118
114
|
:rtype: dict
|
|
119
115
|
"""
|
|
@@ -153,31 +149,30 @@ class Risk(RegScaleModel):
|
|
|
153
149
|
"comments": 30,
|
|
154
150
|
"riskTier": 31,
|
|
155
151
|
"title": 32,
|
|
156
|
-
"
|
|
157
|
-
"
|
|
158
|
-
"
|
|
159
|
-
"
|
|
160
|
-
"
|
|
161
|
-
"
|
|
162
|
-
"
|
|
163
|
-
"
|
|
164
|
-
"
|
|
165
|
-
"
|
|
166
|
-
"
|
|
167
|
-
"
|
|
168
|
-
"
|
|
169
|
-
"
|
|
170
|
-
"
|
|
171
|
-
"
|
|
172
|
-
"
|
|
173
|
-
"
|
|
174
|
-
"
|
|
175
|
-
"
|
|
176
|
-
"riskOwnerId": 53,
|
|
152
|
+
"impactDescription": 33,
|
|
153
|
+
"inherentRiskScore": 34,
|
|
154
|
+
"residualRiskScore": 35,
|
|
155
|
+
"targetRiskScore": 36,
|
|
156
|
+
"difference": 37,
|
|
157
|
+
"futureCosts": 38,
|
|
158
|
+
"costToMitigate": 39,
|
|
159
|
+
"controlId": 40,
|
|
160
|
+
"assessmentId": 41,
|
|
161
|
+
"requirementId": 42,
|
|
162
|
+
"securityPlanId": 43,
|
|
163
|
+
"projectId": 44,
|
|
164
|
+
"supplyChainId": 45,
|
|
165
|
+
"policyId": 46,
|
|
166
|
+
"componentId": 47,
|
|
167
|
+
"incidentId": 48,
|
|
168
|
+
"riskAssessmentFrequency": 49,
|
|
169
|
+
"dateLastAssessed": 50,
|
|
170
|
+
"nextAssessmentDueDate": 51,
|
|
171
|
+
"riskOwnerId": 52,
|
|
177
172
|
"isPublic": -1,
|
|
178
|
-
"averageRiskScore":
|
|
179
|
-
"weightedRiskScore":
|
|
180
|
-
"parentRiskModelId":
|
|
173
|
+
"averageRiskScore": 53,
|
|
174
|
+
"weightedRiskScore": 54,
|
|
175
|
+
"parentRiskModelId": 55,
|
|
181
176
|
}
|
|
182
177
|
|
|
183
178
|
# pylint: disable=W0613
|
|
@@ -215,7 +210,7 @@ class Risk(RegScaleModel):
|
|
|
215
210
|
"componentId": "",
|
|
216
211
|
"incidentId": "",
|
|
217
212
|
}
|
|
218
|
-
if field_name in lookup_fields
|
|
213
|
+
if field_name in lookup_fields:
|
|
219
214
|
return lookup_fields[field_name]
|
|
220
215
|
return ""
|
|
221
216
|
|
|
@@ -247,7 +242,7 @@ class Risk(RegScaleModel):
|
|
|
247
242
|
:rtype: list
|
|
248
243
|
"""
|
|
249
244
|
# This query returns all risks and doesn't use the parent ID or module provided.
|
|
250
|
-
return cls.fetch_all_risks(
|
|
245
|
+
return cls.fetch_all_risks()
|
|
251
246
|
|
|
252
247
|
@classmethod
|
|
253
248
|
def use_query(cls) -> bool:
|
|
@@ -91,21 +91,21 @@ class SupplyChain(RegScaleModel):
|
|
|
91
91
|
_module_slug = "supplychain"
|
|
92
92
|
|
|
93
93
|
title: str
|
|
94
|
+
contractType: Union[SupplyChainContractType, str] # Required
|
|
95
|
+
strategicTier: Union[SupplyChainTier, str] # Required
|
|
96
|
+
fips: Union[SupplyChainFipsImpact, str] # Required
|
|
94
97
|
id: Optional[int] = 0
|
|
95
98
|
status: Union[SupplyChainStatus, str] = SupplyChainStatus.Pending
|
|
96
99
|
uuid: Optional[str] = None
|
|
97
|
-
fips: Optional[Union[SupplyChainFipsImpact, str]] = None
|
|
98
100
|
contractNumber: Optional[str] = None
|
|
99
101
|
isPublic: bool = True
|
|
100
102
|
parentId: int = 0
|
|
101
103
|
parentModule: Optional[str] = None
|
|
102
|
-
orgId: Optional[int] =
|
|
103
|
-
facilityId: Optional[int] =
|
|
104
|
+
orgId: Optional[int] = None
|
|
105
|
+
facilityId: Optional[int] = None
|
|
104
106
|
contractValue: Optional[int] = 0
|
|
105
107
|
fundedAmount: Optional[int] = 0
|
|
106
108
|
actualCosts: Optional[int] = 0
|
|
107
|
-
strategicTier: Optional[Union[SupplyChainTier, str]] = None
|
|
108
|
-
contractType: Optional[Union[SupplyChainContractType, str]] = None
|
|
109
109
|
scope: Optional[str] = None
|
|
110
110
|
startDate: Optional[str] = None
|
|
111
111
|
endDate: Optional[str] = None
|
regscale/regscale.py
CHANGED
|
@@ -149,6 +149,7 @@ aqua = import_command_with_timing(COMMERCIAL, "aqua")
|
|
|
149
149
|
awsv2 = import_command_with_timing(COMMERCIAL, "aws")
|
|
150
150
|
azure = import_command_with_timing(COMMERCIAL, "azure")
|
|
151
151
|
burp = import_command_with_timing(COMMERCIAL, "burp")
|
|
152
|
+
edr = import_command_with_timing("regscale.integrations.commercial.synqly.edr", "edr")
|
|
152
153
|
assets = import_command_with_timing("regscale.integrations.commercial.synqly.assets", "assets")
|
|
153
154
|
vulnerabilities = import_command_with_timing(
|
|
154
155
|
"regscale.integrations.commercial.synqly.vulnerabilities", "vulnerabilities"
|
|
@@ -766,6 +767,7 @@ cli.add_command(aqua) # Add Aqua ECR support
|
|
|
766
767
|
cli.add_command(awsv2) # add AWS support
|
|
767
768
|
cli.add_command(azure) # add Azure Integration
|
|
768
769
|
cli.add_command(burp) # add Burp File Integration
|
|
770
|
+
cli.add_command(edr) # add Edr connector
|
|
769
771
|
cli.add_command(assets) # add Assets connector
|
|
770
772
|
cli.add_command(vulnerabilities) # add Vulnerabilities connector
|
|
771
773
|
cli.add_command(ticketing) # add Ticketing connector
|