regscale-cli 6.20.2.0__py3-none-any.whl → 6.20.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/integrations/commercial/__init__.py +13 -0
- regscale/integrations/commercial/axonius/__init__.py +0 -0
- regscale/integrations/commercial/axonius/axonius_integration.py +70 -0
- regscale/integrations/commercial/jira.py +6 -12
- regscale/integrations/commercial/synqly/assets.py +10 -0
- regscale/integrations/commercial/wizv2/constants.py +4 -0
- regscale/integrations/commercial/wizv2/scanner.py +67 -14
- regscale/integrations/commercial/wizv2/utils.py +24 -10
- regscale/integrations/commercial/wizv2/variables.py +7 -0
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +67 -13
- regscale/integrations/scanner_integration.py +8 -2
- regscale/integrations/variables.py +1 -0
- regscale/models/app_models/import_validater.py +3 -1
- regscale/models/integration_models/axonius_models/__init__.py +0 -0
- regscale/models/integration_models/axonius_models/connectors/__init__.py +3 -0
- regscale/models/integration_models/axonius_models/connectors/assets.py +111 -0
- regscale/models/integration_models/cisa_kev_data.json +66 -6
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/regscale_models/__init__.py +2 -1
- regscale/models/regscale_models/issue.py +1 -0
- regscale/models/regscale_models/risk_issue_mapping.py +61 -0
- regscale/utils/graphql_client.py +4 -4
- {regscale_cli-6.20.2.0.dist-info → regscale_cli-6.20.3.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.20.2.0.dist-info → regscale_cli-6.20.3.0.dist-info}/RECORD +29 -23
- {regscale_cli-6.20.2.0.dist-info → regscale_cli-6.20.3.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.20.2.0.dist-info → regscale_cli-6.20.3.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.20.2.0.dist-info → regscale_cli-6.20.3.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.20.2.0.dist-info → regscale_cli-6.20.3.0.dist-info}/top_level.txt +0 -0
regscale/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "6.20.
|
|
1
|
+
__version__ = "6.20.3.0"
|
|
@@ -57,6 +57,19 @@ def aws():
|
|
|
57
57
|
show_mapping(aws, "aws_inspector")
|
|
58
58
|
|
|
59
59
|
|
|
60
|
+
@click.group(
|
|
61
|
+
cls=LazyGroup,
|
|
62
|
+
lazy_subcommands={
|
|
63
|
+
"sync_assets": "regscale.integrations.commercial.axonius.axonius_integration.sync_assets",
|
|
64
|
+
"sync_findings": "regscale.integrations.commercial.axonius.axonius_integration.sync_findings",
|
|
65
|
+
},
|
|
66
|
+
name="axonius",
|
|
67
|
+
)
|
|
68
|
+
def axonius():
|
|
69
|
+
"""Axonius Integration"""
|
|
70
|
+
pass
|
|
71
|
+
|
|
72
|
+
|
|
60
73
|
@click.group(
|
|
61
74
|
cls=LazyGroup,
|
|
62
75
|
lazy_subcommands={
|
|
File without changes
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""Axonius integration for RegScale CLI to sync assets"""
|
|
4
|
+
|
|
5
|
+
# Standard python imports
|
|
6
|
+
import click
|
|
7
|
+
from regscale.core.app.utils.parser_utils import safe_datetime_str
|
|
8
|
+
from regscale.core.app.api import Api
|
|
9
|
+
from regscale.core.app.logz import create_logger
|
|
10
|
+
from regscale.core.app.utils.app_utils import (
|
|
11
|
+
check_file_path,
|
|
12
|
+
check_license,
|
|
13
|
+
compute_hashes_in_directory,
|
|
14
|
+
convert_datetime_to_regscale_string,
|
|
15
|
+
create_progress_object,
|
|
16
|
+
error_and_exit,
|
|
17
|
+
get_current_datetime,
|
|
18
|
+
save_data_to,
|
|
19
|
+
)
|
|
20
|
+
from regscale.core.app.utils.regscale_utils import verify_provided_module
|
|
21
|
+
from regscale.models import regscale_id, regscale_module
|
|
22
|
+
from regscale.models.regscale_models.file import File
|
|
23
|
+
from regscale.models.regscale_models.issue import Issue
|
|
24
|
+
from regscale.models.regscale_models.task import Task
|
|
25
|
+
from regscale.utils.threading.threadhandler import create_threads, thread_assignment
|
|
26
|
+
from regscale.models import regscale_ssp_id
|
|
27
|
+
|
|
28
|
+
import pandas as pd
|
|
29
|
+
import requests
|
|
30
|
+
import datetime
|
|
31
|
+
from datetime import date
|
|
32
|
+
import warnings
|
|
33
|
+
import json
|
|
34
|
+
from urllib.parse import urljoin
|
|
35
|
+
|
|
36
|
+
warnings.filterwarnings("ignore")
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
####################################################################################################
|
|
40
|
+
#
|
|
41
|
+
# SYNC ASSETS WITH AXONIUS
|
|
42
|
+
# AXONIUS API Docs: https://developer.axonius.com/docs/overview
|
|
43
|
+
#
|
|
44
|
+
####################################################################################################
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
# Create group to handle Axonius integration
|
|
48
|
+
@click.group()
|
|
49
|
+
def axonius():
|
|
50
|
+
"""Sync assets between Axonius and RegScale."""
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@axonius.command(name="sync_assets")
|
|
54
|
+
@regscale_ssp_id()
|
|
55
|
+
def sync_assets(regscale_ssp_id: int) -> None:
|
|
56
|
+
"""Sync Assets from Axonius into RegScale."""
|
|
57
|
+
from regscale.models.integration_models.axonius_models.connectors.assets import AxoniusIntegration
|
|
58
|
+
|
|
59
|
+
scanner = AxoniusIntegration(plan_id=regscale_ssp_id)
|
|
60
|
+
scanner.sync_assets(plan_id=regscale_ssp_id)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@axonius.command(name="sync_findings")
|
|
64
|
+
@regscale_ssp_id()
|
|
65
|
+
def sync_findings(regscale_ssp_id: int) -> None:
|
|
66
|
+
"""Sync Assets from Axonius into RegScale."""
|
|
67
|
+
from regscale.models.integration_models.axonius_models.connectors.assets import AxoniusIntegration
|
|
68
|
+
|
|
69
|
+
scanner = AxoniusIntegration(plan_id=regscale_ssp_id)
|
|
70
|
+
scanner.sync_findings(plan_id=regscale_ssp_id)
|
|
@@ -297,10 +297,6 @@ def sync_regscale_and_jira(
|
|
|
297
297
|
len(update_counter),
|
|
298
298
|
output_str,
|
|
299
299
|
)
|
|
300
|
-
if sync_tasks_only:
|
|
301
|
-
# set the updated flag to True for the tasks that were updated, to prevent them from being updated again during the sync_regscale_objects_to_jira function
|
|
302
|
-
for task in regscale_objects_to_update:
|
|
303
|
-
task.extra_data["updated"] = True
|
|
304
300
|
else:
|
|
305
301
|
logger.info("No %s(s) need to be updated in RegScale.", output_str)
|
|
306
302
|
|
|
@@ -540,14 +536,10 @@ def process_tasks_for_sync(
|
|
|
540
536
|
close_tasks = []
|
|
541
537
|
for jira_issue in jira_issues:
|
|
542
538
|
task = create_regscale_task_from_jira(config, jira_issue, parent_id, parent_module)
|
|
543
|
-
if task not
|
|
544
|
-
|
|
545
|
-
else:
|
|
546
|
-
existing_task = next(
|
|
547
|
-
(t for t in existing_tasks if t == task and not t.extra_data.get("updated", False)), None
|
|
548
|
-
)
|
|
539
|
+
existing_task = next((t for t in existing_tasks if t == task and not t.extra_data.get("updated", False)), None)
|
|
540
|
+
if existing_task:
|
|
549
541
|
task.id = existing_task.id
|
|
550
|
-
if (jira_issue.fields.status.name.lower() == "done" and
|
|
542
|
+
if (jira_issue.fields.status.name.lower() == "done" and existing_task.status not in closed_statuses) or (
|
|
551
543
|
task.status in closed_statuses and task != existing_task
|
|
552
544
|
):
|
|
553
545
|
task.status = "Closed"
|
|
@@ -556,6 +548,8 @@ def process_tasks_for_sync(
|
|
|
556
548
|
close_tasks.append(task)
|
|
557
549
|
elif task != existing_task:
|
|
558
550
|
update_tasks.append(task)
|
|
551
|
+
else:
|
|
552
|
+
insert_tasks.append(task)
|
|
559
553
|
progress.update(progress_task, advance=1)
|
|
560
554
|
return insert_tasks, update_tasks, close_tasks
|
|
561
555
|
|
|
@@ -617,7 +611,7 @@ def create_and_update_regscale_tasks(
|
|
|
617
611
|
}
|
|
618
612
|
for _ in as_completed(task_futures):
|
|
619
613
|
progress.update(progress_task, advance=1)
|
|
620
|
-
return len(insert_tasks), len(update_tasks), len(close_tasks)
|
|
614
|
+
return len(insert_tasks), len(update_tasks) + len(update_counter), len(close_tasks)
|
|
621
615
|
|
|
622
616
|
|
|
623
617
|
def task_and_attachments_sync(
|
|
@@ -23,6 +23,16 @@ def sync_armis_centrix(regscale_ssp_id: int) -> None:
|
|
|
23
23
|
assets_armis_centrix.run_sync(regscale_ssp_id=regscale_ssp_id)
|
|
24
24
|
|
|
25
25
|
|
|
26
|
+
@assets.command(name="sync_axonius")
|
|
27
|
+
@regscale_ssp_id()
|
|
28
|
+
def sync_axonius(regscale_ssp_id: int) -> None:
|
|
29
|
+
"""Sync Assets from Axonius to RegScale."""
|
|
30
|
+
from regscale.models.integration_models.synqly_models.connectors import Assets
|
|
31
|
+
|
|
32
|
+
assets_axonius = Assets("axonius")
|
|
33
|
+
assets_axonius.run_sync(regscale_ssp_id=regscale_ssp_id)
|
|
34
|
+
|
|
35
|
+
|
|
26
36
|
@assets.command(name="sync_crowdstrike")
|
|
27
37
|
@regscale_ssp_id()
|
|
28
38
|
@click.option(
|
|
@@ -5,7 +5,7 @@ import json
|
|
|
5
5
|
import logging
|
|
6
6
|
import os
|
|
7
7
|
import re
|
|
8
|
-
from typing import Any, Dict, Iterator, List, Optional
|
|
8
|
+
from typing import Any, Dict, Iterator, List, Optional, Union
|
|
9
9
|
|
|
10
10
|
from regscale.core.app.utils.app_utils import check_file_path, get_current_datetime
|
|
11
11
|
from regscale.core.utils import get_base_protocol_from_port
|
|
@@ -60,7 +60,13 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
60
60
|
wiz_token = None
|
|
61
61
|
|
|
62
62
|
@staticmethod
|
|
63
|
-
def get_variables():
|
|
63
|
+
def get_variables() -> Dict[str, Any]:
|
|
64
|
+
"""
|
|
65
|
+
Returns default variables for first and filterBy for Wiz GraphQL queries.
|
|
66
|
+
|
|
67
|
+
:return: Default variables for Wiz queries
|
|
68
|
+
:rtype: Dict[str, Any]
|
|
69
|
+
"""
|
|
64
70
|
return {
|
|
65
71
|
"first": 100,
|
|
66
72
|
"filterBy": {},
|
|
@@ -115,7 +121,6 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
115
121
|
topic_key=wiz_vulnerability_type["topic_key"],
|
|
116
122
|
file_path=wiz_vulnerability_type["file_path"],
|
|
117
123
|
)
|
|
118
|
-
self.num_findings_to_process += len(nodes)
|
|
119
124
|
yield from self.parse_findings(nodes, wiz_vulnerability_type["type"])
|
|
120
125
|
logger.info("Finished fetching Wiz findings.")
|
|
121
126
|
|
|
@@ -131,8 +136,8 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
131
136
|
:rtype: Iterator[IntegrationFinding]
|
|
132
137
|
"""
|
|
133
138
|
for node in nodes:
|
|
134
|
-
finding
|
|
135
|
-
|
|
139
|
+
if finding := self.parse_finding(node, vulnerability_type):
|
|
140
|
+
self.num_findings_to_process += 1
|
|
136
141
|
yield finding
|
|
137
142
|
|
|
138
143
|
@classmethod
|
|
@@ -232,17 +237,24 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
232
237
|
self.num_assets_to_process = len(nodes)
|
|
233
238
|
|
|
234
239
|
for node in nodes:
|
|
235
|
-
asset
|
|
236
|
-
if asset:
|
|
240
|
+
if asset := self.parse_asset(node):
|
|
237
241
|
yield asset
|
|
238
242
|
|
|
239
243
|
@staticmethod
|
|
240
|
-
def get_filter_by(filter_by_override, wiz_project_id):
|
|
244
|
+
def get_filter_by(filter_by_override: Union[str, Dict[str, Any]], wiz_project_id: str) -> Dict[str, Any]:
|
|
245
|
+
"""
|
|
246
|
+
Constructs the filter_by dictionary for fetching assets
|
|
247
|
+
|
|
248
|
+
:param Union[str, Dict[str, Any]] filter_by_override: Override for the filter_by dictionary
|
|
249
|
+
:param str wiz_project_id: The Wiz project ID
|
|
250
|
+
:return: The filter_by dictionary
|
|
251
|
+
:rtype: Dict[str, Any]
|
|
252
|
+
"""
|
|
241
253
|
if filter_by_override:
|
|
242
254
|
return json.loads(filter_by_override) if isinstance(filter_by_override, str) else filter_by_override
|
|
243
255
|
filter_by = {"project": wiz_project_id}
|
|
244
256
|
if WizVariables.wizLastInventoryPull and not WizVariables.wizFullPullLimitHours:
|
|
245
|
-
filter_by["updatedAt"] = {"after": WizVariables.wizLastInventoryPull}
|
|
257
|
+
filter_by["updatedAt"] = {"after": WizVariables.wizLastInventoryPull} # type: ignore
|
|
246
258
|
return filter_by
|
|
247
259
|
|
|
248
260
|
def parse_asset(self, node: Dict[str, Any]) -> Optional[IntegrationAsset]:
|
|
@@ -278,6 +290,14 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
278
290
|
software_name = self.get_software_name(software_name_dict, wiz_entity_properties, node)
|
|
279
291
|
software_vendor = self.get_software_vendor(software_name_dict, wiz_entity_properties, node)
|
|
280
292
|
|
|
293
|
+
if WizVariables.useWizHardwareAssetTypes and node.get("graphEntity", {}).get("technologies", []):
|
|
294
|
+
technologies = node.get("graphEntity", {}).get("technologies", [])
|
|
295
|
+
deployment_models: set[str] = {
|
|
296
|
+
tech.get("deploymentModel") for tech in technologies if tech.get("deploymentModel")
|
|
297
|
+
}
|
|
298
|
+
else:
|
|
299
|
+
deployment_models = set()
|
|
300
|
+
|
|
281
301
|
return IntegrationAsset(
|
|
282
302
|
name=name,
|
|
283
303
|
external_id=node.get("name"),
|
|
@@ -288,7 +308,7 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
288
308
|
asset_owner_id=ScannerVariables.userId,
|
|
289
309
|
parent_id=self.plan_id,
|
|
290
310
|
parent_module=regscale_models.SecurityPlan.get_module_slug(),
|
|
291
|
-
asset_category=map_category(node.get("type", "")),
|
|
311
|
+
asset_category=map_category(deployment_models or node.get("type", "")),
|
|
292
312
|
date_last_updated=wiz_entity.get("lastSeen", ""),
|
|
293
313
|
management_type=handle_management_type(wiz_entity_properties),
|
|
294
314
|
status=self.map_wiz_status(wiz_entity_properties.get("status")),
|
|
@@ -326,7 +346,14 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
326
346
|
)
|
|
327
347
|
|
|
328
348
|
@staticmethod
|
|
329
|
-
def get_ports_and_protocols(wiz_entity_properties):
|
|
349
|
+
def get_ports_and_protocols(wiz_entity_properties: dict) -> List[Dict[str, Union[int, str]]]:
|
|
350
|
+
"""
|
|
351
|
+
Extracts ports and protocols from Wiz entity properties using the "portStart","portEnd", and "protocol" keys.
|
|
352
|
+
|
|
353
|
+
:param dict wiz_entity_properties: Dictionary containing Wiz entity properties
|
|
354
|
+
:return: A list of dictionaries containing start_port, end_port, and protocol
|
|
355
|
+
:rtype: List[Dict[str, Union[int, str]]]
|
|
356
|
+
"""
|
|
330
357
|
start_port = wiz_entity_properties.get("portStart")
|
|
331
358
|
if start_port:
|
|
332
359
|
end_port = wiz_entity_properties.get("portEnd") or start_port
|
|
@@ -337,19 +364,45 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
337
364
|
return []
|
|
338
365
|
|
|
339
366
|
@staticmethod
|
|
340
|
-
def get_software_vendor(software_name_dict, wiz_entity_properties, node):
|
|
367
|
+
def get_software_vendor(software_name_dict: dict, wiz_entity_properties: dict, node: dict) -> Optional[str]:
|
|
368
|
+
"""
|
|
369
|
+
Gets the software vendor from the software name dictionary or Wiz entity properties.
|
|
370
|
+
|
|
371
|
+
:param dict software_name_dict: Dictionary containing software name and vendor
|
|
372
|
+
:param dict wiz_entity_properties: Properties of the Wiz entity
|
|
373
|
+
:param dict node: Node dictionary
|
|
374
|
+
:return: Software vendor
|
|
375
|
+
:rtype: Optional[str]
|
|
376
|
+
"""
|
|
341
377
|
if map_category(node.get("type")) == regscale_models.AssetCategory.Software:
|
|
342
378
|
return software_name_dict.get("software_vendor") or wiz_entity_properties.get("cloudPlatform")
|
|
343
379
|
return None
|
|
344
380
|
|
|
345
381
|
@staticmethod
|
|
346
|
-
def get_software_version(wiz_entity_properties, node):
|
|
382
|
+
def get_software_version(wiz_entity_properties: dict, node: dict) -> Optional[str]:
|
|
383
|
+
"""
|
|
384
|
+
Gets the software version from the Wiz entity properties or handles it based on the node type.
|
|
385
|
+
|
|
386
|
+
:param dict wiz_entity_properties: Properties of the Wiz entity
|
|
387
|
+
:param dict node: Node dictionary
|
|
388
|
+
:return: Software version
|
|
389
|
+
:rtype: Optional[str]
|
|
390
|
+
"""
|
|
347
391
|
if map_category(node.get("type")) == regscale_models.AssetCategory.Software:
|
|
348
392
|
return handle_software_version(wiz_entity_properties, map_category(node.get("type"))) or "1.0"
|
|
349
393
|
return None
|
|
350
394
|
|
|
351
395
|
@staticmethod
|
|
352
|
-
def get_software_name(software_name_dict, wiz_entity_properties, node):
|
|
396
|
+
def get_software_name(software_name_dict: dict, wiz_entity_properties: dict, node: dict) -> Optional[str]:
|
|
397
|
+
"""
|
|
398
|
+
Gets the software name from the software name dictionary or Wiz entity properties.
|
|
399
|
+
|
|
400
|
+
:param dict software_name_dict: Dictionary containing software name and vendor
|
|
401
|
+
:param dict wiz_entity_properties: Properties of the Wiz entity
|
|
402
|
+
:param dict node: Node dictionary
|
|
403
|
+
:return: Software name
|
|
404
|
+
:rtype: Optional[str]
|
|
405
|
+
"""
|
|
353
406
|
if map_category(node.get("type")) == regscale_models.AssetCategory.Software:
|
|
354
407
|
return software_name_dict.get("software_name") or wiz_entity_properties.get("nativeType")
|
|
355
408
|
return None
|
|
@@ -10,7 +10,7 @@ import logging
|
|
|
10
10
|
import time
|
|
11
11
|
import traceback
|
|
12
12
|
from contextlib import closing
|
|
13
|
-
from typing import Dict, List, Any, Optional
|
|
13
|
+
from typing import Dict, List, Any, Optional, Union
|
|
14
14
|
from zipfile import ZipFile
|
|
15
15
|
|
|
16
16
|
import cachetools
|
|
@@ -111,22 +111,36 @@ def create_asset_type(asset_type: str) -> str:
|
|
|
111
111
|
return asset_type
|
|
112
112
|
|
|
113
113
|
|
|
114
|
-
def map_category(asset_string: str) -> regscale_models.AssetCategory:
|
|
114
|
+
def map_category(asset_string: Union[set[str], str]) -> regscale_models.AssetCategory:
|
|
115
115
|
"""
|
|
116
|
-
category
|
|
116
|
+
Map the asset category based on the asset string. If the asset string is not found in the wizHardwareAssetTypes or
|
|
117
|
+
in the AssetCategory enum, it will be mapped to "Software"
|
|
117
118
|
|
|
118
|
-
:param str asset_string:
|
|
119
|
-
|
|
119
|
+
:param Union[set[str], str] asset_string: Set of strings from the Wiz asset's technologies.deploymentModel or
|
|
120
|
+
the node's type
|
|
121
|
+
:return: RegScale AssetCategory
|
|
120
122
|
:rtype: regscale_models.AssetCategory
|
|
121
123
|
"""
|
|
122
124
|
try:
|
|
123
|
-
if asset_string
|
|
124
|
-
|
|
125
|
-
|
|
125
|
+
if isinstance(asset_string, set):
|
|
126
|
+
hardware_count = sum(
|
|
127
|
+
asset.lower() == type.lower() for type in WizVariables.wizHardwareAssetTypes for asset in asset_string
|
|
128
|
+
)
|
|
129
|
+
software_count = len(asset_string) - hardware_count
|
|
130
|
+
return (
|
|
131
|
+
regscale_models.AssetCategory.Hardware
|
|
132
|
+
if hardware_count > software_count
|
|
133
|
+
else regscale_models.AssetCategory.Software
|
|
134
|
+
)
|
|
135
|
+
if asset_string in WizVariables.wizHardwareAssetTypes:
|
|
136
|
+
return regscale_models.AssetCategory.Hardware
|
|
137
|
+
elif asset_category := getattr(regscale_models.AssetCategory, asset_string):
|
|
138
|
+
return asset_category
|
|
139
|
+
return regscale_models.AssetCategory.Software
|
|
126
140
|
except (KeyError, AttributeError) as ex:
|
|
127
141
|
# why map AssetCategory of everything is software?
|
|
128
|
-
logger.debug("Unable to find %s in AssetType enum \n", ex)
|
|
129
|
-
return regscale_models.AssetCategory.
|
|
142
|
+
logger.debug("Unable to find %s in AssetType enum. Defaulting to Software\n", ex)
|
|
143
|
+
return regscale_models.AssetCategory.Software
|
|
130
144
|
|
|
131
145
|
|
|
132
146
|
def convert_first_seen_to_days(first_seen: str) -> int:
|
|
@@ -37,3 +37,10 @@ class WizVariables(metaclass=RsVariablesMeta):
|
|
|
37
37
|
wizClientId: RsVariableType(str, "", sensitive=True) # type: ignore
|
|
38
38
|
wizClientSecret: RsVariableType(str, "", sensitive=True) # type: ignore
|
|
39
39
|
wizLastInventoryPull: RsVariableType(str, "2022-01-01T00:00:00Z", required=False) # type: ignore
|
|
40
|
+
useWizHardwareAssetTypes: RsVariableType(bool, False, required=False) # type: ignore
|
|
41
|
+
wizHardwareAssetTypes: RsVariableType(
|
|
42
|
+
list,
|
|
43
|
+
'["SERVER_APPLICATION", "CLIENT_APPLICATION", "VIRTUAL_APPLIANCE"]',
|
|
44
|
+
default=["SERVER_APPLICATION", "CLIENT_APPLICATION", "VIRTUAL_APPLIANCE"],
|
|
45
|
+
required=False,
|
|
46
|
+
) # type: ignore
|
|
@@ -22,7 +22,7 @@ from regscale.core.app.utils.app_utils import create_progress_object, error_and_
|
|
|
22
22
|
from regscale.core.utils.graphql import GraphQLQuery
|
|
23
23
|
from regscale.integrations.public.fedramp.parts_mapper import PartMapper
|
|
24
24
|
from regscale.integrations.public.fedramp.ssp_logger import SSPLogger
|
|
25
|
-
from regscale.models import ControlObjective, ImplementationObjective, Parameter, Profile
|
|
25
|
+
from regscale.models import ControlObjective, ImplementationObjective, ImportValidater, Parameter, Profile
|
|
26
26
|
from regscale.models.regscale_models import (
|
|
27
27
|
ControlImplementation,
|
|
28
28
|
File,
|
|
@@ -39,6 +39,7 @@ if TYPE_CHECKING:
|
|
|
39
39
|
import pandas as pd
|
|
40
40
|
|
|
41
41
|
from functools import lru_cache
|
|
42
|
+
from tempfile import gettempdir
|
|
42
43
|
|
|
43
44
|
T = TypeVar("T")
|
|
44
45
|
|
|
@@ -278,8 +279,8 @@ def map_origination(control_id: str, cis_data: dict) -> dict:
|
|
|
278
279
|
for origination_str, bool_key in origination_mapping.items():
|
|
279
280
|
if origination_str in control_origination:
|
|
280
281
|
result[bool_key] = True
|
|
281
|
-
|
|
282
|
-
|
|
282
|
+
if control_origination not in result["record_text"]:
|
|
283
|
+
result["record_text"] += control_origination
|
|
283
284
|
|
|
284
285
|
return result
|
|
285
286
|
|
|
@@ -351,6 +352,9 @@ def update_imp_objective(
|
|
|
351
352
|
for objective in objectives:
|
|
352
353
|
current_pair = (objective.id, imp.id)
|
|
353
354
|
if current_pair not in existing_pairs:
|
|
355
|
+
if objective.securityControlId != imp.controlID:
|
|
356
|
+
# This is a bad match, do not save.
|
|
357
|
+
continue
|
|
354
358
|
imp_obj = ImplementationObjective(
|
|
355
359
|
id=0,
|
|
356
360
|
uuid="",
|
|
@@ -437,6 +441,7 @@ def parse_control_details(
|
|
|
437
441
|
control_imp.bServiceProviderSystemSpecific = origination_bool["bServiceProviderSystemSpecific"]
|
|
438
442
|
control_imp.bServiceProviderHybrid = origination_bool["bServiceProviderHybrid"]
|
|
439
443
|
control_imp.bConfiguredByCustomer = origination_bool["bConfiguredByCustomer"]
|
|
444
|
+
control_imp.bShared = origination_bool["bShared"]
|
|
440
445
|
control_imp.bProvidedByCustomer = origination_bool["bProvidedByCustomer"]
|
|
441
446
|
control_imp.responsibility = get_responsibility(origination_bool)
|
|
442
447
|
logger.debug(f"Control Implementation Responsibility: {control_imp.responsibility}")
|
|
@@ -822,7 +827,6 @@ def parse_crm_worksheet(file_path: click.Path, crm_sheet_name: str, version: Lit
|
|
|
822
827
|
:return: Formatted CRM content
|
|
823
828
|
:rtype: dict
|
|
824
829
|
"""
|
|
825
|
-
pd = get_pandas()
|
|
826
830
|
logger.info("Parsing CRM worksheet...")
|
|
827
831
|
formatted_crm = {}
|
|
828
832
|
|
|
@@ -832,13 +836,19 @@ def parse_crm_worksheet(file_path: click.Path, crm_sheet_name: str, version: Lit
|
|
|
832
836
|
# Value for rev4
|
|
833
837
|
skip_rows = 3
|
|
834
838
|
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
839
|
+
validator = ImportValidater(
|
|
840
|
+
file_path=file_path,
|
|
841
|
+
disable_mapping=True,
|
|
842
|
+
required_headers=[],
|
|
843
|
+
mapping_file_path=gettempdir(),
|
|
844
|
+
prompt=False,
|
|
845
|
+
ignore_unnamed=True,
|
|
846
|
+
worksheet_name=crm_sheet_name,
|
|
847
|
+
warn_extra_headers=False,
|
|
838
848
|
)
|
|
839
849
|
|
|
840
850
|
# find index of row where the first column == Control ID
|
|
841
|
-
skip_rows = determine_skip_row(original_df=
|
|
851
|
+
skip_rows = determine_skip_row(original_df=validator.data, text_to_find=CONTROL_ID, original_skip=skip_rows)
|
|
842
852
|
|
|
843
853
|
logger.debug(f"Skipping {skip_rows} rows in CRM worksheet")
|
|
844
854
|
|
|
@@ -850,13 +860,43 @@ def parse_crm_worksheet(file_path: click.Path, crm_sheet_name: str, version: Lit
|
|
|
850
860
|
]
|
|
851
861
|
|
|
852
862
|
try:
|
|
863
|
+
# Verify that the columns are in the dataframe
|
|
864
|
+
header_row = validator.data.iloc[skip_rows - 1 :].iloc[0]
|
|
865
|
+
|
|
866
|
+
# Check if we have enough columns
|
|
867
|
+
if len(header_row) < len(usecols):
|
|
868
|
+
error_and_exit(
|
|
869
|
+
f"Not enough columns found in CRM worksheet. Expected {len(usecols)} columns but found {len(header_row)}."
|
|
870
|
+
)
|
|
871
|
+
|
|
872
|
+
# Verify each required column exists in the correct position
|
|
873
|
+
missing_columns = []
|
|
874
|
+
for i, expected_col in enumerate(usecols):
|
|
875
|
+
if header_row.iloc[i] != expected_col:
|
|
876
|
+
missing_columns.append(
|
|
877
|
+
f"Expected '{expected_col}' at position {i + 1} but found '{header_row.iloc[i]}'"
|
|
878
|
+
)
|
|
879
|
+
|
|
880
|
+
if missing_columns:
|
|
881
|
+
error_msg = "Required columns not found in the CRM worksheet:\n" + "\n".join(missing_columns)
|
|
882
|
+
error_and_exit(error_msg)
|
|
883
|
+
|
|
884
|
+
logger.debug("Verified all required columns exist in CRM worksheet")
|
|
885
|
+
|
|
853
886
|
# Reindex the dataframe and skip some rows
|
|
854
|
-
data =
|
|
887
|
+
data = validator.data.iloc[skip_rows:]
|
|
888
|
+
|
|
889
|
+
# Keep only the first three columns
|
|
890
|
+
data = data.iloc[:, :3]
|
|
891
|
+
|
|
892
|
+
# Rename the columns to match usecols
|
|
855
893
|
data.columns = usecols
|
|
894
|
+
logger.debug(f"Kept only required columns: {', '.join(usecols)}")
|
|
895
|
+
|
|
856
896
|
except KeyError as e:
|
|
857
897
|
error_and_exit(f"KeyError: {e} - One or more columns specified in usecols are not found in the dataframe.")
|
|
858
898
|
except Exception as e:
|
|
859
|
-
error_and_exit(f"An error occurred: {e}")
|
|
899
|
+
error_and_exit(f"An error occurred while processing CRM worksheet: {str(e)}")
|
|
860
900
|
# Filter rows where "Can Be Inherited from CSP" is not equal to "No"
|
|
861
901
|
exclude_no = data[data[CAN_BE_INHERITED_CSP] != "No"]
|
|
862
902
|
|
|
@@ -896,10 +936,21 @@ def parse_cis_worksheet(file_path: click.Path, cis_sheet_name: str) -> dict:
|
|
|
896
936
|
pd = get_pandas()
|
|
897
937
|
logger.info("Parsing CIS worksheet...")
|
|
898
938
|
skip_rows = 2
|
|
899
|
-
# Parse the worksheet named 'CIS GovCloud U.S.+DoD (H)', skipping the initial rows
|
|
900
|
-
original_cis = pd.read_excel(file_path, sheet_name=cis_sheet_name)
|
|
901
939
|
|
|
902
|
-
|
|
940
|
+
validator = ImportValidater(
|
|
941
|
+
file_path=file_path,
|
|
942
|
+
disable_mapping=True,
|
|
943
|
+
required_headers=[],
|
|
944
|
+
mapping_file_path=gettempdir(),
|
|
945
|
+
prompt=False,
|
|
946
|
+
ignore_unnamed=True,
|
|
947
|
+
worksheet_name=cis_sheet_name,
|
|
948
|
+
warn_extra_headers=False,
|
|
949
|
+
)
|
|
950
|
+
skip_rows = determine_skip_row(original_df=validator.data, text_to_find=CONTROL_ID, original_skip=skip_rows)
|
|
951
|
+
|
|
952
|
+
# Parse the worksheet named 'CIS GovCloud U.S.+DoD (H)', skipping the initial rows
|
|
953
|
+
original_cis = validator.data
|
|
903
954
|
|
|
904
955
|
cis_df = original_cis.iloc[skip_rows:].reset_index(drop=True)
|
|
905
956
|
|
|
@@ -912,6 +963,9 @@ def parse_cis_worksheet(file_path: click.Path, cis_sheet_name: str) -> dict:
|
|
|
912
963
|
# Reset the index
|
|
913
964
|
cis_df.reset_index(drop=True, inplace=True)
|
|
914
965
|
|
|
966
|
+
# Only keep the first 13 columns
|
|
967
|
+
cis_df = cis_df.iloc[:, :13]
|
|
968
|
+
|
|
915
969
|
# Rename columns to standardize names
|
|
916
970
|
cis_df.columns = [
|
|
917
971
|
CONTROL_ID,
|
|
@@ -425,6 +425,8 @@ class IntegrationFinding:
|
|
|
425
425
|
operational_requirements: Optional[str] = None
|
|
426
426
|
deviation_rationale: Optional[str] = None
|
|
427
427
|
is_cwe: bool = False
|
|
428
|
+
affected_controls: Optional[str] = None
|
|
429
|
+
identification: Optional[str] = "Vulnerability Assessment"
|
|
428
430
|
|
|
429
431
|
poam_comments: Optional[str] = None
|
|
430
432
|
vulnerability_id: Optional[int] = None
|
|
@@ -1613,7 +1615,7 @@ class ScannerIntegration(ABC):
|
|
|
1613
1615
|
issue.severityLevel = finding.severity
|
|
1614
1616
|
issue.issueOwnerId = self.assessor_id
|
|
1615
1617
|
issue.securityPlanId = self.plan_id if not self.is_component else None
|
|
1616
|
-
issue.identification =
|
|
1618
|
+
issue.identification = finding.identification
|
|
1617
1619
|
issue.dateFirstDetected = finding.first_seen
|
|
1618
1620
|
issue.dueDate = finding.due_date
|
|
1619
1621
|
issue.description = description
|
|
@@ -1631,6 +1633,7 @@ class ScannerIntegration(ABC):
|
|
|
1631
1633
|
# Get control implementation ID for CCI if it exists
|
|
1632
1634
|
# Only add CCI control ID if it exists
|
|
1633
1635
|
cci_control_ids = [control_id] if control_id is not None else []
|
|
1636
|
+
issue.affectedControls = finding.affected_controls
|
|
1634
1637
|
|
|
1635
1638
|
issue.controlImplementationIds = list(set(finding._control_implementation_ids + cci_control_ids)) # noqa
|
|
1636
1639
|
issue.isPoam = is_poam
|
|
@@ -1793,7 +1796,10 @@ class ScannerIntegration(ABC):
|
|
|
1793
1796
|
:return: True if the issue should be a POAM, False otherwise
|
|
1794
1797
|
:rtype: bool
|
|
1795
1798
|
"""
|
|
1796
|
-
if
|
|
1799
|
+
if (
|
|
1800
|
+
ScannerVariables.vulnerabilityCreation.lower() == "poamcreation"
|
|
1801
|
+
or ScannerVariables.complianceCreation.lower() == "poam"
|
|
1802
|
+
):
|
|
1797
1803
|
return True
|
|
1798
1804
|
if finding.due_date < get_current_datetime():
|
|
1799
1805
|
return True
|
|
@@ -25,3 +25,4 @@ class ScannerVariables(metaclass=RsVariablesMeta):
|
|
|
25
25
|
issueDueDates: RsVariableType(dict, "dueDates", default="{'high': 60, 'moderate': 120, 'low': 364}", required=False) # type: ignore # noqa: F722,F821
|
|
26
26
|
maxRetries: RsVariableType(int, "3", default=3, required=False) # type: ignore
|
|
27
27
|
timeout: RsVariableType(int, "60", default=60, required=False) # type: ignore
|
|
28
|
+
complianceCreation: RsVariableType(str, "Assessment|Issue|POAM", default="Assessment", required=False) # type: ignore # noqa: F722,F821
|
|
@@ -58,6 +58,7 @@ class ImportValidater:
|
|
|
58
58
|
skip_rows: Optional[int] = None,
|
|
59
59
|
prompt: bool = True,
|
|
60
60
|
ignore_unnamed: bool = False,
|
|
61
|
+
warn_extra_headers: bool = True,
|
|
61
62
|
):
|
|
62
63
|
self.ignore_unnamed = ignore_unnamed
|
|
63
64
|
self.prompt = prompt
|
|
@@ -74,6 +75,7 @@ class ImportValidater:
|
|
|
74
75
|
self.keys = keys
|
|
75
76
|
self.worksheet_name = worksheet_name
|
|
76
77
|
self.skip_rows = skip_rows
|
|
78
|
+
self.warn_extra_headers = warn_extra_headers
|
|
77
79
|
if self.file_type not in self._supported_types:
|
|
78
80
|
raise ValidationException(
|
|
79
81
|
f"Unsupported file type: {self.file_type}, supported types are: {', '.join(self._supported_types)}",
|
|
@@ -148,7 +150,7 @@ class ImportValidater:
|
|
|
148
150
|
raise ValidationException(
|
|
149
151
|
f"{', '.join([f'`{header}`' for header in missing_headers])} header(s) not found in {self.file_path}"
|
|
150
152
|
)
|
|
151
|
-
if extra_headers:
|
|
153
|
+
if extra_headers and self.warn_extra_headers:
|
|
152
154
|
logger.warning("Extra headers found in the file: %s", ", ".join(extra_headers))
|
|
153
155
|
|
|
154
156
|
if self.disable_mapping:
|
|
File without changes
|