regscale-cli 6.16.2.0__py3-none-any.whl → 6.16.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/core/app/internal/control_editor.py +26 -2
- regscale/core/app/internal/model_editor.py +39 -26
- regscale/core/app/utils/api_handler.py +4 -11
- regscale/integrations/commercial/crowdstrike.py +0 -1
- regscale/integrations/commercial/grype/scanner.py +37 -29
- regscale/integrations/commercial/opentext/commands.py +2 -0
- regscale/integrations/commercial/opentext/scanner.py +45 -31
- regscale/integrations/commercial/qualys.py +52 -61
- regscale/integrations/commercial/servicenow.py +1 -0
- regscale/integrations/commercial/sicura/commands.py +9 -14
- regscale/integrations/commercial/snyk.py +2 -2
- regscale/integrations/commercial/synqly/ticketing.py +29 -0
- regscale/integrations/commercial/tenablev2/click.py +25 -13
- regscale/integrations/commercial/tenablev2/scanner.py +12 -3
- regscale/integrations/commercial/trivy/scanner.py +14 -6
- regscale/integrations/commercial/veracode.py +1 -1
- regscale/integrations/commercial/wizv2/click.py +15 -37
- regscale/integrations/jsonl_scanner_integration.py +120 -16
- regscale/integrations/public/fedramp/click.py +8 -8
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +499 -106
- regscale/integrations/public/fedramp/ssp_logger.py +2 -9
- regscale/integrations/scanner_integration.py +67 -27
- regscale/models/integration_models/cisa_kev_data.json +86 -12
- regscale/models/integration_models/flat_file_importer/__init__.py +29 -8
- regscale/models/integration_models/snyk.py +141 -15
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/integration_models/tenable_models/integration.py +23 -3
- regscale/models/integration_models/veracode.py +91 -48
- regscale/models/regscale_models/control_implementation.py +18 -0
- regscale/models/regscale_models/control_objective.py +2 -1
- regscale/models/regscale_models/facility.py +10 -26
- regscale/models/regscale_models/functional_roles.py +38 -0
- regscale/models/regscale_models/issue.py +3 -1
- regscale/models/regscale_models/parameter.py +21 -3
- regscale/models/regscale_models/profile.py +22 -0
- regscale/models/regscale_models/profile_mapping.py +48 -3
- regscale/models/regscale_models/regscale_model.py +2 -0
- regscale/models/regscale_models/risk.py +38 -30
- regscale/models/regscale_models/security_plan.py +1 -0
- regscale/models/regscale_models/supply_chain.py +1 -1
- regscale/models/regscale_models/user.py +19 -6
- regscale/utils/threading/__init__.py +1 -0
- regscale/utils/threading/threadsafe_list.py +10 -0
- regscale/utils/threading/threadsafe_set.py +116 -0
- regscale/utils/version.py +3 -5
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.4.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.4.0.dist-info}/RECORD +52 -50
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.4.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.4.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.4.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.4.0.dist-info}/top_level.txt +0 -0
|
@@ -27,9 +27,9 @@ from regscale.core.app.utils.app_utils import (
|
|
|
27
27
|
save_data_to,
|
|
28
28
|
)
|
|
29
29
|
from regscale.core.app.utils.file_utils import download_from_s3
|
|
30
|
-
from regscale.models.app_models.click import regscale_ssp_id
|
|
31
30
|
from regscale.models import Asset, Issue, Search, regscale_models
|
|
32
31
|
from regscale.models.app_models.click import NotRequiredIf, save_output_to
|
|
32
|
+
from regscale.models.app_models.click import regscale_ssp_id
|
|
33
33
|
from regscale.models.integration_models.flat_file_importer import FlatFileImporter
|
|
34
34
|
from regscale.models.integration_models.qualys import (
|
|
35
35
|
Qualys,
|
|
@@ -611,11 +611,7 @@ def get_scan_results(scans: Any, task: TaskID) -> dict:
|
|
|
611
611
|
:return: dictionary of detailed Qualys scans
|
|
612
612
|
:rtype: dict
|
|
613
613
|
"""
|
|
614
|
-
|
|
615
|
-
config = app.config
|
|
616
|
-
|
|
617
|
-
# set the auth for the QUALYS_API session
|
|
618
|
-
QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
|
|
614
|
+
qualys_url, QUALYS_API = _get_qualys_api()
|
|
619
615
|
|
|
620
616
|
scan_data = {}
|
|
621
617
|
# check number of scans requested
|
|
@@ -636,7 +632,7 @@ def get_scan_results(scans: Any, task: TaskID) -> dict:
|
|
|
636
632
|
}
|
|
637
633
|
# get the scan data via API
|
|
638
634
|
res = QUALYS_API.get(
|
|
639
|
-
url=urljoin(
|
|
635
|
+
url=urljoin(qualys_url, "/api/2.0/fo/scan/"),
|
|
640
636
|
headers=HEADERS,
|
|
641
637
|
params=params,
|
|
642
638
|
)
|
|
@@ -664,11 +660,7 @@ def get_detailed_scans(days: int) -> list:
|
|
|
664
660
|
:return: list of results from Qualys API
|
|
665
661
|
:rtype: list
|
|
666
662
|
"""
|
|
667
|
-
|
|
668
|
-
config = app.config
|
|
669
|
-
|
|
670
|
-
# set the auth for the QUALYS_API session
|
|
671
|
-
QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
|
|
663
|
+
qualys_url, QUALYS_API = _get_qualys_api()
|
|
672
664
|
|
|
673
665
|
today = datetime.now()
|
|
674
666
|
scan_date = today - timedelta(days=days)
|
|
@@ -684,12 +676,12 @@ def get_detailed_scans(days: int) -> list:
|
|
|
684
676
|
"scan_datetime_since": scan_date.strftime("%Y-%m-%dT%H:%I:%S%ZZ"),
|
|
685
677
|
}
|
|
686
678
|
res = QUALYS_API.get(
|
|
687
|
-
url=urljoin(
|
|
679
|
+
url=urljoin(qualys_url, "/api/2.0/fo/scan/summary/"),
|
|
688
680
|
headers=HEADERS,
|
|
689
681
|
params=params,
|
|
690
682
|
)
|
|
691
683
|
response = QUALYS_API.get(
|
|
692
|
-
url=urljoin(
|
|
684
|
+
url=urljoin(qualys_url, "/api/2.0/fo/scan/vm/summary/"),
|
|
693
685
|
headers=HEADERS,
|
|
694
686
|
params=params2,
|
|
695
687
|
)
|
|
@@ -708,6 +700,34 @@ def get_detailed_scans(days: int) -> list:
|
|
|
708
700
|
return res_data
|
|
709
701
|
|
|
710
702
|
|
|
703
|
+
def _get_config():
|
|
704
|
+
"""
|
|
705
|
+
Get the Qualys configuration
|
|
706
|
+
|
|
707
|
+
:return: Qualys configuration
|
|
708
|
+
:rtype: dict
|
|
709
|
+
"""
|
|
710
|
+
app = check_license()
|
|
711
|
+
config = app.config
|
|
712
|
+
return config
|
|
713
|
+
|
|
714
|
+
|
|
715
|
+
def _get_qualys_api():
|
|
716
|
+
"""
|
|
717
|
+
Get the Qualys API session
|
|
718
|
+
|
|
719
|
+
:return: Qualys API session
|
|
720
|
+
:rtype: Session
|
|
721
|
+
"""
|
|
722
|
+
config = _get_config()
|
|
723
|
+
|
|
724
|
+
# set the auth for the QUALYS_API session
|
|
725
|
+
QUALYS_API.auth = (config.get("qualysUserName"), config.get("qualysPassword"))
|
|
726
|
+
QUALYS_API.verify = config.get("sslVerify", True)
|
|
727
|
+
qualys_url = config.get("qualysUrl")
|
|
728
|
+
return qualys_url, QUALYS_API
|
|
729
|
+
|
|
730
|
+
|
|
711
731
|
def import_total_cloud_data_from_qualys_api(security_plan_id: int, include_tags: str, exclude_tags: str):
|
|
712
732
|
"""
|
|
713
733
|
Function to get the total cloud data from Qualys API
|
|
@@ -717,10 +737,7 @@ def import_total_cloud_data_from_qualys_api(security_plan_id: int, include_tags:
|
|
|
717
737
|
"""
|
|
718
738
|
try:
|
|
719
739
|
|
|
720
|
-
|
|
721
|
-
config = app.config
|
|
722
|
-
# set the auth for the QUALYS_API session
|
|
723
|
-
QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
|
|
740
|
+
qualys_url, QUALYS_API = _get_qualys_api()
|
|
724
741
|
params = {
|
|
725
742
|
"action": "list",
|
|
726
743
|
"show_asset_id": "1",
|
|
@@ -732,12 +749,14 @@ def import_total_cloud_data_from_qualys_api(security_plan_id: int, include_tags:
|
|
|
732
749
|
params["tag_set_exclude"] = exclude_tags
|
|
733
750
|
if include_tags:
|
|
734
751
|
params["tag_set_include"] = include_tags
|
|
752
|
+
logger.info("Fetching Qualys Total Cloud data...")
|
|
735
753
|
response = QUALYS_API.get(
|
|
736
|
-
url=urljoin(
|
|
754
|
+
url=urljoin(qualys_url, "/api/2.0/fo/asset/host/vm/detection/"),
|
|
737
755
|
headers=HEADERS,
|
|
738
756
|
params=params,
|
|
739
757
|
)
|
|
740
758
|
if response and response.ok:
|
|
759
|
+
logger.info("Total cloud data fetched. processing...")
|
|
741
760
|
response_data = xmltodict.parse(response.text)
|
|
742
761
|
qt = QualysTotalCloudIntegration(plan_id=security_plan_id, xml_data=response_data)
|
|
743
762
|
qt.fetch_assets()
|
|
@@ -761,17 +780,13 @@ def get_scans_summary(scan_choice: str) -> dict:
|
|
|
761
780
|
:return: Detailed summary of scans from Qualys API as a dictionary
|
|
762
781
|
:rtype: dict
|
|
763
782
|
"""
|
|
764
|
-
|
|
765
|
-
config = app.config
|
|
783
|
+
qualys_url, QUALYS_API = _get_qualys_api()
|
|
766
784
|
urls = []
|
|
767
785
|
|
|
768
|
-
# set the auth for the QUALYS_API session
|
|
769
|
-
QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
|
|
770
|
-
|
|
771
786
|
# set up variables for function
|
|
772
787
|
scan_data = {}
|
|
773
788
|
responses = []
|
|
774
|
-
scan_url = urljoin(
|
|
789
|
+
scan_url = urljoin(qualys_url, "/api/2.0/fo/scan/")
|
|
775
790
|
|
|
776
791
|
# set up parameters for the scans query
|
|
777
792
|
params = {"action": "list"}
|
|
@@ -812,11 +827,7 @@ def get_scan_details(days: int) -> list:
|
|
|
812
827
|
:return: Detailed summary of scans from Qualys API as a dictionary
|
|
813
828
|
:rtype: list
|
|
814
829
|
"""
|
|
815
|
-
|
|
816
|
-
config = app.config
|
|
817
|
-
|
|
818
|
-
# set the auth for the QUALYS_API session
|
|
819
|
-
QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
|
|
830
|
+
qualys_url, QUALYS_API = _get_qualys_api()
|
|
820
831
|
# get since date for API call
|
|
821
832
|
since_date = datetime.now() - timedelta(days=days)
|
|
822
833
|
# set up data and parameters for the scans query
|
|
@@ -835,12 +846,12 @@ def get_scan_details(days: int) -> list:
|
|
|
835
846
|
"scan_datetime_since": since_date.strftime("%Y-%m-%dT%H:%M:%SZ"),
|
|
836
847
|
}
|
|
837
848
|
res = QUALYS_API.get(
|
|
838
|
-
url=urljoin(
|
|
849
|
+
url=urljoin(qualys_url, "/api/2.0/fo/scan/summary/"),
|
|
839
850
|
headers=headers,
|
|
840
851
|
params=params,
|
|
841
852
|
)
|
|
842
853
|
response = QUALYS_API.get(
|
|
843
|
-
url=urljoin(
|
|
854
|
+
url=urljoin(qualys_url, "/api/2.0/fo/scan/vm/summary/"),
|
|
844
855
|
headers=headers,
|
|
845
856
|
params=params2,
|
|
846
857
|
)
|
|
@@ -874,11 +885,7 @@ def sync_qualys_assets_and_vulns(
|
|
|
874
885
|
:param Optional[Union[int, str]] asset_group_filter: Filter the Qualys assets by an asset group ID or name, if any
|
|
875
886
|
:rtype: None
|
|
876
887
|
"""
|
|
877
|
-
|
|
878
|
-
config = app.config
|
|
879
|
-
|
|
880
|
-
# set the auth for the QUALYS_API session
|
|
881
|
-
QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
|
|
888
|
+
config = _get_config()
|
|
882
889
|
|
|
883
890
|
# Get the assets from RegScale with the provided SSP ID
|
|
884
891
|
logger.info("Getting assets from RegScale for SSP #%s...", ssp_id)
|
|
@@ -1086,14 +1093,10 @@ def get_qualys_assets_and_scan_results(
|
|
|
1086
1093
|
:return: list of dictionaries containing asset data
|
|
1087
1094
|
:rtype: list
|
|
1088
1095
|
"""
|
|
1089
|
-
|
|
1090
|
-
config = app.config
|
|
1091
|
-
|
|
1092
|
-
# set the auth for the QUALYS_API session
|
|
1093
|
-
QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
|
|
1096
|
+
qualys_url, QUALYS_API = _get_qualys_api()
|
|
1094
1097
|
# set url
|
|
1095
1098
|
if not url:
|
|
1096
|
-
url = urljoin(
|
|
1099
|
+
url = urljoin(qualys_url, "api/2.0/fo/asset/host/vm/detection?action=list&show_asset_id=1")
|
|
1097
1100
|
|
|
1098
1101
|
# check if an asset group filter was provided and append it to the url
|
|
1099
1102
|
if asset_group_filter:
|
|
@@ -1143,12 +1146,7 @@ def get_issue_data_for_assets(asset_list: list) -> Tuple[list[dict], int]:
|
|
|
1143
1146
|
:return: Updated asset list of Qualys assets and their associated vulnerabilities, total number of vulnerabilities
|
|
1144
1147
|
:rtype: Tuple[list[dict], int]
|
|
1145
1148
|
"""
|
|
1146
|
-
|
|
1147
|
-
config = app.config
|
|
1148
|
-
|
|
1149
|
-
# set the auth for the QUALYS_API session
|
|
1150
|
-
QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
|
|
1151
|
-
|
|
1149
|
+
config = _get_config()
|
|
1152
1150
|
with job_progress:
|
|
1153
1151
|
issues = {}
|
|
1154
1152
|
for asset in asset_list:
|
|
@@ -1285,9 +1283,9 @@ def lookup_asset(asset_list: list, asset_id: str = None) -> list[Asset]:
|
|
|
1285
1283
|
:rtype: list[Asset]
|
|
1286
1284
|
"""
|
|
1287
1285
|
if asset_id:
|
|
1288
|
-
results = [
|
|
1286
|
+
results = [asset for asset in asset_list if getattr(asset, "qualysId", None) == asset_id]
|
|
1289
1287
|
else:
|
|
1290
|
-
results = [
|
|
1288
|
+
results = [asset for asset in asset_list]
|
|
1291
1289
|
# Return unique list
|
|
1292
1290
|
return list(set(results)) or []
|
|
1293
1291
|
|
|
@@ -1322,11 +1320,7 @@ def create_regscale_issue_from_vuln(
|
|
|
1322
1320
|
:return: list of RegScale issues to update, and a list of issues to be created
|
|
1323
1321
|
:rtype: Tuple[list[Issue], list[Issue]]
|
|
1324
1322
|
"""
|
|
1325
|
-
|
|
1326
|
-
config = app.config
|
|
1327
|
-
|
|
1328
|
-
# set the auth for the QUALYS_API session
|
|
1329
|
-
QUALYS_API.auth = (config["qualysUserName"], config["qualysPassword"])
|
|
1323
|
+
config = _get_config()
|
|
1330
1324
|
default_status = config["issues"]["qualys"]["status"]
|
|
1331
1325
|
regscale_issues = []
|
|
1332
1326
|
regscale_existing_issues = Issue.get_all_by_parent(parent_id=regscale_ssp_id, parent_module="securityplans")
|
|
@@ -1441,13 +1435,10 @@ def get_asset_groups_from_qualys() -> list:
|
|
|
1441
1435
|
:return: list of assets from Qualys
|
|
1442
1436
|
:rtype: list
|
|
1443
1437
|
"""
|
|
1444
|
-
app = check_license()
|
|
1445
|
-
config = app.config
|
|
1446
1438
|
asset_groups = []
|
|
1447
1439
|
|
|
1448
|
-
|
|
1449
|
-
|
|
1450
|
-
response = QUALYS_API.get(url=urljoin(config["qualysUrl"], "api/2.0/fo/asset/group?action=list"), headers=HEADERS)
|
|
1440
|
+
qualys_url, QUALYS_API = _get_qualys_api()
|
|
1441
|
+
response = QUALYS_API.get(url=urljoin(qualys_url, "api/2.0/fo/asset/group?action=list"), headers=HEADERS)
|
|
1451
1442
|
if response.ok:
|
|
1452
1443
|
logger.debug(response.text)
|
|
1453
1444
|
try:
|
|
@@ -741,6 +741,7 @@ def create_snow_tag(snow_config: ServiceNowConfig, tag_name: str) -> Optional[di
|
|
|
741
741
|
"active": True,
|
|
742
742
|
"sys_class_name": "tag",
|
|
743
743
|
"type": "Standard",
|
|
744
|
+
"viewable_by": "everyone",
|
|
744
745
|
}
|
|
745
746
|
url = urljoin(snow_config.url, "api/now/table/label")
|
|
746
747
|
response = snow_api.post(
|
|
@@ -5,11 +5,10 @@ This module contains the Click command group for Sicura.
|
|
|
5
5
|
import logging
|
|
6
6
|
|
|
7
7
|
import click
|
|
8
|
-
|
|
8
|
+
|
|
9
9
|
from regscale.models import regscale_id
|
|
10
10
|
|
|
11
11
|
logger = logging.getLogger("regscale")
|
|
12
|
-
console = Console()
|
|
13
12
|
|
|
14
13
|
|
|
15
14
|
@click.group()
|
|
@@ -23,7 +22,7 @@ def sicura():
|
|
|
23
22
|
|
|
24
23
|
@sicura.command(name="sync_assets")
|
|
25
24
|
@regscale_id(help="RegScale will create and update assets as children of this record.")
|
|
26
|
-
def sync_assets(regscale_id):
|
|
25
|
+
def sync_assets(regscale_id: int):
|
|
27
26
|
"""
|
|
28
27
|
Sync Sicura assets to RegScale.
|
|
29
28
|
|
|
@@ -36,20 +35,18 @@ def sync_assets(regscale_id):
|
|
|
36
35
|
plan_id=regscale_id,
|
|
37
36
|
)
|
|
38
37
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
integration.sync_assets(plan_id=regscale_id)
|
|
38
|
+
# Using import_assets method which handles the synchronization
|
|
39
|
+
integration.sync_assets(plan_id=regscale_id)
|
|
42
40
|
|
|
43
|
-
|
|
41
|
+
logger.info("[bold green]Sicura asset synchronization complete.")
|
|
44
42
|
|
|
45
43
|
except Exception as e:
|
|
46
44
|
logger.error(f"Error syncing assets: {e}", exc_info=True)
|
|
47
|
-
console.print(f"[bold red]Error syncing assets: {e}")
|
|
48
45
|
|
|
49
46
|
|
|
50
47
|
@sicura.command(name="sync_findings")
|
|
51
48
|
@regscale_id(help="RegScale will create and update findings as children of this record.")
|
|
52
|
-
def sync_findings(regscale_id):
|
|
49
|
+
def sync_findings(regscale_id: int):
|
|
53
50
|
"""
|
|
54
51
|
Sync Sicura findings to RegScale.
|
|
55
52
|
|
|
@@ -62,12 +59,10 @@ def sync_findings(regscale_id):
|
|
|
62
59
|
plan_id=regscale_id,
|
|
63
60
|
)
|
|
64
61
|
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
integration.sync_findings(plan_id=regscale_id)
|
|
62
|
+
# Using import_findings method which handles the synchronization
|
|
63
|
+
integration.sync_findings(plan_id=regscale_id)
|
|
68
64
|
|
|
69
|
-
|
|
65
|
+
logger.info("[bold green]Finding synchronization complete.")
|
|
70
66
|
|
|
71
67
|
except Exception as e:
|
|
72
68
|
logger.error(f"Error syncing findings: {e}", exc_info=True)
|
|
73
|
-
console.print(f"[bold red]Error syncing findings: {e}")
|
|
@@ -18,7 +18,7 @@ def snyk():
|
|
|
18
18
|
|
|
19
19
|
@snyk.command(name="import_snyk")
|
|
20
20
|
@FlatFileImporter.common_scanner_options(
|
|
21
|
-
message="File path to the folder containing Snyk .xlsx files to process to RegScale.",
|
|
21
|
+
message="File path to the folder containing Snyk .xlsx or .json files to process to RegScale.",
|
|
22
22
|
prompt="File path for Snyk files",
|
|
23
23
|
import_name="snyk",
|
|
24
24
|
)
|
|
@@ -77,7 +77,7 @@ def import_synk_files(
|
|
|
77
77
|
FlatFileImporter.import_files(
|
|
78
78
|
import_type=Snyk,
|
|
79
79
|
import_name="Snyk",
|
|
80
|
-
file_types=".xlsx",
|
|
80
|
+
file_types=[".xlsx", ".json"],
|
|
81
81
|
folder_path=folder_path,
|
|
82
82
|
regscale_ssp_id=regscale_ssp_id,
|
|
83
83
|
scan_date=scan_date,
|
|
@@ -13,6 +13,17 @@ def ticketing() -> None:
|
|
|
13
13
|
pass
|
|
14
14
|
|
|
15
15
|
|
|
16
|
+
@ticketing.command(name="sync_autotask")
|
|
17
|
+
@regscale_id()
|
|
18
|
+
@regscale_module()
|
|
19
|
+
def sync_autotask(regscale_id: int, regscale_module: str) -> None:
|
|
20
|
+
"""Sync Ticketing data between Autotask and RegScale."""
|
|
21
|
+
from regscale.models.integration_models.synqly_models.connectors import Ticketing
|
|
22
|
+
|
|
23
|
+
ticketing_autotask = Ticketing("autotask")
|
|
24
|
+
ticketing_autotask.run_sync(regscale_id=regscale_id, regscale_module=regscale_module)
|
|
25
|
+
|
|
26
|
+
|
|
16
27
|
@ticketing.command(name="sync_jira")
|
|
17
28
|
@regscale_id()
|
|
18
29
|
@regscale_module()
|
|
@@ -111,6 +122,24 @@ def sync_servicenow(regscale_id: int, regscale_module: str, issue_type: str, def
|
|
|
111
122
|
)
|
|
112
123
|
|
|
113
124
|
|
|
125
|
+
@ticketing.command(name="sync_servicenow_sir")
|
|
126
|
+
@regscale_id()
|
|
127
|
+
@regscale_module()
|
|
128
|
+
@click.option(
|
|
129
|
+
"--issue_type",
|
|
130
|
+
type=click.STRING,
|
|
131
|
+
help="servicenow_sir issue type",
|
|
132
|
+
required=True,
|
|
133
|
+
prompt="servicenow_sir issue type",
|
|
134
|
+
)
|
|
135
|
+
def sync_servicenow_sir(regscale_id: int, regscale_module: str, issue_type: str) -> None:
|
|
136
|
+
"""Sync Ticketing data between Servicenow Sir and RegScale."""
|
|
137
|
+
from regscale.models.integration_models.synqly_models.connectors import Ticketing
|
|
138
|
+
|
|
139
|
+
ticketing_servicenow_sir = Ticketing("servicenow_sir")
|
|
140
|
+
ticketing_servicenow_sir.run_sync(regscale_id=regscale_id, regscale_module=regscale_module, issue_type=issue_type)
|
|
141
|
+
|
|
142
|
+
|
|
114
143
|
@ticketing.command(name="sync_torq")
|
|
115
144
|
@regscale_id()
|
|
116
145
|
@regscale_module()
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
|
|
5
5
|
import queue
|
|
6
6
|
from concurrent.futures import wait
|
|
7
|
-
from typing import TYPE_CHECKING
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
8
8
|
|
|
9
9
|
from regscale.integrations.integration_override import IntegrationOverride
|
|
10
10
|
|
|
@@ -262,13 +262,17 @@ def get_queries() -> list:
|
|
|
262
262
|
required=True,
|
|
263
263
|
)
|
|
264
264
|
@regscale_ssp_id()
|
|
265
|
+
@click.option(
|
|
266
|
+
"--scan_date",
|
|
267
|
+
"-sd",
|
|
268
|
+
type=click.DateTime(formats=["%Y-%m-%d"]),
|
|
269
|
+
help="The scan date of the file.",
|
|
270
|
+
required=False,
|
|
271
|
+
)
|
|
265
272
|
# Add Prompt for RegScale SSP name
|
|
266
|
-
def query_vuln(query_id: int, regscale_ssp_id: int):
|
|
273
|
+
def query_vuln(query_id: int, regscale_ssp_id: int, scan_date: datetime = None):
|
|
267
274
|
"""Query Tenable vulnerabilities and sync assets to RegScale."""
|
|
268
|
-
q_vuln(
|
|
269
|
-
query_id=query_id,
|
|
270
|
-
ssp_id=regscale_ssp_id,
|
|
271
|
-
)
|
|
275
|
+
q_vuln(query_id=query_id, ssp_id=regscale_ssp_id, scan_date=scan_date)
|
|
272
276
|
|
|
273
277
|
|
|
274
278
|
@io.command(name="sync_assets")
|
|
@@ -300,14 +304,20 @@ def query_assets(regscale_ssp_id: int, tags: Optional[List[Tuple[str, str]]] = N
|
|
|
300
304
|
required=False,
|
|
301
305
|
callback=validate_tags,
|
|
302
306
|
)
|
|
303
|
-
|
|
304
|
-
|
|
307
|
+
@click.option(
|
|
308
|
+
"--scan_date",
|
|
309
|
+
"-sd",
|
|
310
|
+
type=click.DateTime(formats=["%Y-%m-%d"]),
|
|
311
|
+
help="The scan date of the file.",
|
|
312
|
+
required=False,
|
|
313
|
+
)
|
|
314
|
+
def query_vulns(regscale_ssp_id: int, tags: Optional[List[Tuple[str, str]]] = None, scan_date: datetime = None):
|
|
305
315
|
"""
|
|
306
316
|
Query Tenable vulnerabilities and sync assets, vulnerabilities and issues to RegScale.
|
|
307
317
|
"""
|
|
308
318
|
from regscale.integrations.commercial.tenablev2.scanner import TenableIntegration
|
|
309
319
|
|
|
310
|
-
TenableIntegration.sync_findings(plan_id=regscale_ssp_id, tags=tags)
|
|
320
|
+
TenableIntegration.sync_findings(plan_id=regscale_ssp_id, tags=tags, scan_date=scan_date)
|
|
311
321
|
|
|
312
322
|
|
|
313
323
|
def validate_regscale_security_plan(parent_id: int) -> bool:
|
|
@@ -409,18 +419,19 @@ def process_vulnerabilities(counts: collections.Counter, reg_assets: list, ssp_i
|
|
|
409
419
|
return update_assets
|
|
410
420
|
|
|
411
421
|
|
|
412
|
-
def q_vuln(query_id: int, ssp_id: int) -> list:
|
|
422
|
+
def q_vuln(query_id: int, ssp_id: int, scan_date: datetime = None) -> list:
|
|
413
423
|
"""
|
|
414
424
|
Query Tenable vulnerabilities
|
|
415
425
|
|
|
416
426
|
:param int query_id: Tenable query ID
|
|
417
427
|
:param int ssp_id: RegScale System Security Plan ID
|
|
428
|
+
:param datetime scan_date: Scan date, defaults to None
|
|
418
429
|
:return: List of queries from Tenable
|
|
419
430
|
:rtype: list
|
|
420
431
|
"""
|
|
421
432
|
check_license()
|
|
422
433
|
# At SSP level, provide a list of vulnerabilities and the counts of each
|
|
423
|
-
fetch_vulns(query_id=query_id, regscale_ssp_id=ssp_id)
|
|
434
|
+
fetch_vulns(query_id=query_id, regscale_ssp_id=ssp_id, scan_date=scan_date)
|
|
424
435
|
|
|
425
436
|
|
|
426
437
|
def process_vuln(counts: collections.Counter, reg_assets: list, ssp_id: int, vuln: TenableAsset) -> list:
|
|
@@ -708,18 +719,19 @@ def fetch_assets(ssp_id: int) -> list[TenableIOAsset]:
|
|
|
708
719
|
return assets
|
|
709
720
|
|
|
710
721
|
|
|
711
|
-
def fetch_vulns(query_id: int = 0, regscale_ssp_id: int = 0):
|
|
722
|
+
def fetch_vulns(query_id: int = 0, regscale_ssp_id: int = 0, scan_date: datetime = None):
|
|
712
723
|
"""
|
|
713
724
|
Fetch vulnerabilities from Tenable by query ID
|
|
714
725
|
|
|
715
726
|
:param int query_id: Tenable query ID, defaults to 0
|
|
716
727
|
:param int regscale_ssp_id: RegScale System Security Plan ID, defaults to 0
|
|
728
|
+
:param datetime scan_date: Scan date, defaults to None
|
|
717
729
|
"""
|
|
718
730
|
|
|
719
731
|
client = gen_client()
|
|
720
732
|
if query_id and client._env_base == "TSC":
|
|
721
733
|
vulns = client.analysis.vulns(query_id=query_id)
|
|
722
|
-
sc = SCIntegration(plan_id=regscale_ssp_id)
|
|
734
|
+
sc = SCIntegration(plan_id=regscale_ssp_id, scan_date=scan_date)
|
|
723
735
|
# Create pickle file to cache data
|
|
724
736
|
# make sure folder exists
|
|
725
737
|
with tempfile.TemporaryDirectory() as temp_dir:
|
|
@@ -6,9 +6,9 @@ import datetime
|
|
|
6
6
|
import json
|
|
7
7
|
import linecache
|
|
8
8
|
import logging
|
|
9
|
-
from pathlib import Path
|
|
10
9
|
from typing import Any, Dict, Iterator, List, Optional, Tuple
|
|
11
10
|
|
|
11
|
+
from pathlib import Path
|
|
12
12
|
from tenable.errors import TioExportsError
|
|
13
13
|
|
|
14
14
|
from regscale.core.app.utils.app_utils import get_current_datetime
|
|
@@ -18,7 +18,12 @@ from regscale.integrations.commercial.tenablev2.authenticate import gen_tio
|
|
|
18
18
|
from regscale.integrations.commercial.tenablev2.stig_parsers import parse_stig_output
|
|
19
19
|
from regscale.integrations.commercial.tenablev2.utils import get_last_pull_epoch
|
|
20
20
|
from regscale.integrations.commercial.tenablev2.variables import TenableVariables
|
|
21
|
-
from regscale.integrations.scanner_integration import
|
|
21
|
+
from regscale.integrations.scanner_integration import (
|
|
22
|
+
IntegrationAsset,
|
|
23
|
+
IntegrationFinding,
|
|
24
|
+
ScannerIntegration,
|
|
25
|
+
issue_due_date,
|
|
26
|
+
)
|
|
22
27
|
from regscale.integrations.variables import ScannerVariables
|
|
23
28
|
from regscale.models import regscale_models
|
|
24
29
|
|
|
@@ -44,9 +49,10 @@ class TenableIntegration(ScannerIntegration):
|
|
|
44
49
|
:param int plan_id: The ID of the security plan
|
|
45
50
|
:param int tenant_id: The ID of the tenant, defaults to 1
|
|
46
51
|
"""
|
|
47
|
-
super().__init__(plan_id, tenant_id)
|
|
52
|
+
super().__init__(plan_id, tenant_id, **kwargs)
|
|
48
53
|
self.client = None
|
|
49
54
|
self.tags = tags or []
|
|
55
|
+
self.scan_date = kwargs.get("scan_date", get_current_datetime())
|
|
50
56
|
|
|
51
57
|
def authenticate(self) -> None:
|
|
52
58
|
"""Authenticate to Tenable."""
|
|
@@ -482,6 +488,9 @@ class TenableIntegration(ScannerIntegration):
|
|
|
482
488
|
poam_comments=None,
|
|
483
489
|
vulnerable_asset=asset_id,
|
|
484
490
|
source_rule_id=str(plugin.get("id", "")),
|
|
491
|
+
due_date=issue_due_date(
|
|
492
|
+
severity=severity, created_date=self.scan_date, title="tenable", config=self.app.config
|
|
493
|
+
),
|
|
485
494
|
)
|
|
486
495
|
if is_stig:
|
|
487
496
|
integration_finding = parse_stig_output(output=plugin_output, finding=integration_finding)
|
|
@@ -13,7 +13,7 @@ from pathlib import Path
|
|
|
13
13
|
|
|
14
14
|
from regscale.core.app.utils.parser_utils import safe_datetime_str
|
|
15
15
|
from regscale.integrations.jsonl_scanner_integration import JSONLScannerIntegration
|
|
16
|
-
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
|
|
16
|
+
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding, issue_due_date
|
|
17
17
|
from regscale.models import IssueSeverity, AssetStatus, IssueStatus
|
|
18
18
|
|
|
19
19
|
logger = logging.getLogger("regscale")
|
|
@@ -50,6 +50,9 @@ class TrivyIntegration(JSONLScannerIntegration):
|
|
|
50
50
|
kwargs["read_files_only"] = True
|
|
51
51
|
kwargs["file_pattern"] = "*.json"
|
|
52
52
|
self.disable_mapping = kwargs["disable_mapping"] = True
|
|
53
|
+
self.scan_date = kwargs.get("scan_date") if "scan_date" in kwargs else None
|
|
54
|
+
if self.scan_date:
|
|
55
|
+
self.scan_date = self.clean_scan_date(self.scan_date)
|
|
53
56
|
super().__init__(*args, **kwargs)
|
|
54
57
|
|
|
55
58
|
def is_valid_file(self, data: Any, file_path: Union[Path, str]) -> Tuple[bool, Optional[Dict[str, Any]]]:
|
|
@@ -167,8 +170,10 @@ class TrivyIntegration(JSONLScannerIntegration):
|
|
|
167
170
|
:return: IntegrationFinding object
|
|
168
171
|
:rtype: IntegrationFinding
|
|
169
172
|
"""
|
|
173
|
+
created_date = safe_datetime_str(data.get("CreatedAt"))
|
|
170
174
|
# Get scan date from the finding or use current time
|
|
171
|
-
scan_date
|
|
175
|
+
if self.scan_date is None:
|
|
176
|
+
self.scan_date = created_date
|
|
172
177
|
|
|
173
178
|
# Process severity
|
|
174
179
|
severity_str = item.get("Severity", "UNKNOWN")
|
|
@@ -220,10 +225,10 @@ class TrivyIntegration(JSONLScannerIntegration):
|
|
|
220
225
|
plugin_id=plugin_id,
|
|
221
226
|
asset_identifier=asset_identifier,
|
|
222
227
|
cve=cve,
|
|
223
|
-
first_seen=
|
|
224
|
-
last_seen=scan_date,
|
|
225
|
-
scan_date=scan_date,
|
|
226
|
-
date_created=
|
|
228
|
+
first_seen=safe_datetime_str(data.get("CreatedAt")),
|
|
229
|
+
last_seen=self.scan_date,
|
|
230
|
+
scan_date=self.scan_date,
|
|
231
|
+
date_created=item.get("CreatedAt"),
|
|
227
232
|
category="Software",
|
|
228
233
|
control_labels=[],
|
|
229
234
|
installed_versions=item.get("InstalledVersion", ""),
|
|
@@ -234,6 +239,9 @@ class TrivyIntegration(JSONLScannerIntegration):
|
|
|
234
239
|
build_version=build_version,
|
|
235
240
|
fixed_versions=item.get("FixedVersion", ""),
|
|
236
241
|
fix_status=item.get("Status", ""),
|
|
242
|
+
due_date=issue_due_date(
|
|
243
|
+
severity=severity, created_date=created_date, title="trivy", config=self.app.config
|
|
244
|
+
),
|
|
237
245
|
)
|
|
238
246
|
|
|
239
247
|
@staticmethod
|
|
@@ -83,7 +83,7 @@ def import_veracode_data(
|
|
|
83
83
|
FlatFileImporter.import_files(
|
|
84
84
|
import_type=Veracode,
|
|
85
85
|
import_name="Veracode",
|
|
86
|
-
file_types=[".xml", ".xlsx"],
|
|
86
|
+
file_types=[".xml", ".xlsx", ".json"],
|
|
87
87
|
folder_path=folder_path,
|
|
88
88
|
regscale_ssp_id=regscale_ssp_id,
|
|
89
89
|
scan_date=scan_date,
|