regscale-cli 6.20.10.0__py3-none-any.whl → 6.21.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (64) hide show
  1. regscale/_version.py +1 -1
  2. regscale/core/app/application.py +12 -5
  3. regscale/core/app/internal/set_permissions.py +58 -27
  4. regscale/integrations/commercial/__init__.py +1 -2
  5. regscale/integrations/commercial/amazon/common.py +79 -2
  6. regscale/integrations/commercial/aws/cli.py +183 -9
  7. regscale/integrations/commercial/aws/scanner.py +544 -9
  8. regscale/integrations/commercial/cpe.py +18 -1
  9. regscale/integrations/commercial/nessus/scanner.py +2 -0
  10. regscale/integrations/commercial/sonarcloud.py +35 -36
  11. regscale/integrations/commercial/synqly/ticketing.py +51 -0
  12. regscale/integrations/commercial/tenablev2/jsonl_scanner.py +2 -1
  13. regscale/integrations/commercial/wizv2/async_client.py +10 -3
  14. regscale/integrations/commercial/wizv2/click.py +102 -26
  15. regscale/integrations/commercial/wizv2/constants.py +249 -1
  16. regscale/integrations/commercial/wizv2/issue.py +2 -2
  17. regscale/integrations/commercial/wizv2/parsers.py +3 -2
  18. regscale/integrations/commercial/wizv2/policy_compliance.py +1858 -0
  19. regscale/integrations/commercial/wizv2/scanner.py +15 -21
  20. regscale/integrations/commercial/wizv2/utils.py +258 -85
  21. regscale/integrations/commercial/wizv2/variables.py +4 -3
  22. regscale/integrations/compliance_integration.py +1455 -0
  23. regscale/integrations/integration_override.py +15 -6
  24. regscale/integrations/public/fedramp/fedramp_five.py +1 -1
  25. regscale/integrations/public/fedramp/markdown_parser.py +7 -1
  26. regscale/integrations/scanner_integration.py +193 -37
  27. regscale/models/app_models/__init__.py +1 -0
  28. regscale/models/integration_models/amazon_models/inspector_scan.py +32 -57
  29. regscale/models/integration_models/aqua.py +92 -78
  30. regscale/models/integration_models/cisa_kev_data.json +117 -5
  31. regscale/models/integration_models/defenderimport.py +64 -59
  32. regscale/models/integration_models/ecr_models/ecr.py +100 -147
  33. regscale/models/integration_models/flat_file_importer/__init__.py +52 -38
  34. regscale/models/integration_models/ibm.py +29 -47
  35. regscale/models/integration_models/nexpose.py +156 -68
  36. regscale/models/integration_models/prisma.py +46 -66
  37. regscale/models/integration_models/qualys.py +99 -93
  38. regscale/models/integration_models/snyk.py +229 -158
  39. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  40. regscale/models/integration_models/veracode.py +15 -20
  41. regscale/{integrations/commercial/wizv2/models.py → models/integration_models/wizv2.py} +4 -12
  42. regscale/models/integration_models/xray.py +276 -82
  43. regscale/models/regscale_models/control_implementation.py +14 -12
  44. regscale/models/regscale_models/file.py +4 -0
  45. regscale/models/regscale_models/issue.py +123 -0
  46. regscale/models/regscale_models/milestone.py +1 -1
  47. regscale/models/regscale_models/rbac.py +22 -0
  48. regscale/models/regscale_models/regscale_model.py +4 -2
  49. regscale/models/regscale_models/security_plan.py +1 -1
  50. regscale/utils/graphql_client.py +3 -1
  51. {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.1.0.dist-info}/METADATA +9 -9
  52. {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.1.0.dist-info}/RECORD +64 -60
  53. tests/fixtures/test_fixture.py +58 -2
  54. tests/regscale/core/test_app.py +5 -3
  55. tests/regscale/core/test_version_regscale.py +5 -3
  56. tests/regscale/integrations/test_integration_mapping.py +522 -40
  57. tests/regscale/integrations/test_issue_due_date.py +1 -1
  58. tests/regscale/integrations/test_update_finding_dates.py +336 -0
  59. tests/regscale/integrations/test_wiz_policy_compliance_affected_controls.py +154 -0
  60. tests/regscale/models/test_asset.py +406 -50
  61. {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.1.0.dist-info}/LICENSE +0 -0
  62. {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.1.0.dist-info}/WHEEL +0 -0
  63. {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.1.0.dist-info}/entry_points.txt +0 -0
  64. {regscale_cli-6.20.10.0.dist-info → regscale_cli-6.21.1.0.dist-info}/top_level.txt +0 -0
@@ -109,6 +109,23 @@ def extract_product_name_and_version(cpe_string: str) -> Dict:
109
109
  :rtype: Dict
110
110
  """
111
111
  # convert to version 2.3 if 2.2
112
+ # TODO: Note this is an incomplete conversion as the additional properties
113
+ # in the URI format (which is still supported in 2.3) are separated by
114
+ # tildes (~) after the final colon. We should extend this to support them
115
+ # at some point to be safe. Example from NISTIR7697 the 2.3 dictionary
116
+ # specification:
117
+ #
118
+ # WFN:
119
+ # wfn:[part="o",vendor="microsoft",product="windows_vista",version="6\.0",
120
+ # update="sp1",edition=NA,language=NA,sw_edition="home_premium",
121
+ # target_sw=NA,target_hw="x64",other=NA]
122
+ #
123
+ # WFN bound to a URI:
124
+ # cpe:/o:microsoft:windows_vista:6.0:sp1:~-~home_premium~-~x64~-
125
+ #
126
+ # WFN bound to a formatted string:
127
+ # cpe:2.3:o:microsoft:windows_vista:6.0:sp1:-:-:home_premium:-:x64:-
128
+ #
112
129
  if cpe_string.startswith("cpe:/"):
113
130
  cpe_string = cpe_string.replace("cpe:/", "cpe:2.3:")
114
131
 
@@ -117,7 +134,7 @@ def extract_product_name_and_version(cpe_string: str) -> Dict:
117
134
 
118
135
  # Extract the product name and version
119
136
  # parts[3] is the product name, parts[4] is the version
120
- part = parts[2]
137
+ part = parts[2] if len(parts) > 2 else None
121
138
  logger.debug(f"part: {part}")
122
139
  vendor_name = parts[3] if len(parts) > 3 else None
123
140
  product_name = parts[4] if len(parts) > 4 else None
@@ -83,6 +83,8 @@ class NessusIntegration(ScannerIntegration):
83
83
  for file in iterate_files(file_collection):
84
84
  content = read_file(file)
85
85
  root = ET.fromstring(content)
86
+ if scan_dt := nfr.scan.scan_time_start(root):
87
+ self.scan_date = scan_dt.strftime("%Y-%m-%d")
86
88
  for nessus_asset in nfr.scan.report_hosts(root):
87
89
  asset_name = nfr.host.report_host_name(nessus_asset)
88
90
  for nessus_vulnerability in root.iterfind(f"./Report/ReportHost[@name='{asset_name}']/ReportItem"):
@@ -2,100 +2,93 @@
2
2
  # -*- coding: utf-8 -*-
3
3
  """RegScale SonarCloud Integration"""
4
4
 
5
- # standard python imports
5
+ import logging
6
6
  import math
7
- import sys
8
7
  from typing import Optional
8
+ from urllib.parse import urljoin
9
9
 
10
10
  import click
11
11
  import requests # type: ignore
12
12
 
13
13
  from regscale.core.app.api import Api
14
14
  from regscale.core.app.application import Application
15
- from regscale.core.app.logz import create_logger
16
15
  from regscale.core.app.utils.app_utils import (
17
- get_current_datetime,
18
16
  days_between,
17
+ error_and_exit,
18
+ get_current_datetime,
19
19
  )
20
20
  from regscale.models import regscale_id, regscale_module
21
21
  from regscale.models.regscale_models.assessment import Assessment
22
22
  from regscale.models.regscale_models.issue import Issue
23
23
 
24
- # create logger function to log to the console
25
- logger = create_logger()
24
+ logger = logging.getLogger("regscale")
26
25
 
27
26
 
28
- def get_sonarcloud_results(config: dict) -> list[list[dict]]:
27
+ def get_sonarcloud_results(config: dict, branch: Optional[str] = None) -> list[list[dict]]:
29
28
  """
30
29
  Retrieve Sonarcloud Results from the Sonarcloud.io API
30
+
31
31
  :param dict config: RegScale CLI configuration
32
+ :param Optional[str] branch: Branch name to filter results, defaults to None
32
33
  :return: json response data from API GET request
33
34
  :rtype: list[list[dict]]
34
35
  """
36
+ # create an empty list to hold multiple pages of data
37
+ complete = []
35
38
  # api endpoint
36
- url = "https://sonarcloud.io/api/issues/search"
39
+ url = urljoin(config["sonarUrl"], "/api/issues/search")
37
40
  # SONAR_TOKEN from Sonarcloud
38
41
  token = config["sonarToken"]
39
42
  # arguments to pass to the API call
40
- query = {
41
- "organization": "regscale",
42
- "projects": "RegScale_regscale",
43
- "branch": "main",
44
- "projectKey": "RegScale_regscale",
43
+ params = {
45
44
  "statuses": "OPEN, CONFIRMED, REOPENED",
46
- "createdInLast": "1m",
47
45
  "ps": 500,
48
46
  }
47
+ if branch:
48
+ params["branch"] = branch
49
49
  # GET request pulls in data to check results size
50
- r = requests.get(url, auth=(str(token), ""), params=query)
50
+ r = requests.get(url, auth=(str(token), ""), params=params)
51
51
  # if the status code does not equal 200
52
52
  if r and not r.ok:
53
53
  # exit the script gracefully
54
- sys.exit("Sonarcloud API call failed please check the configuration")
54
+ error_and_exit(f"Sonarcloud API call failed please check the configuration\n{r.status_code}: {r.text}")
55
55
  # pull in response data to a dictionary
56
56
  data = r.json()
57
57
  # find the total results number
58
58
  total = data["paging"]["total"]
59
+ complete.extend(data.get("issues", []))
60
+ logger.info(f"Found {total} issue(s) from SonarCloud/Qube.")
59
61
  # find the number of results in each result page
60
62
  size = data["paging"]["pageSize"]
61
63
  # calculate the number of pages to iterate through sequentially
62
64
  pages = math.ceil(total / size)
63
- # create an empty list to hold multiple pages of data
64
- complete = []
65
65
  # loop through each page number
66
66
  for i in range(1, pages + 1, 1):
67
67
  # parameters to pass to the API call
68
- filters = {
69
- "organization": "regscale",
70
- "projects": "RegScale_regscale",
71
- "branch": "main",
72
- "projectKey": "RegScale_regscale",
73
- "statuses": "OPEN, CONFIRMED, REOPENED",
74
- "createdInLast": "1m",
75
- "ps": 500,
76
- "p": f"{i}",
77
- }
68
+ params["p"] = str(i)
78
69
  # for each page make a GET request to pull in the data
79
- r = requests.get(url, auth=(str(token), ""), params=filters)
70
+ r = requests.get(url, auth=(str(token), ""), params=params)
80
71
  # pull in response data to a dictionary
81
72
  data = r.json()
82
73
  # extract only the issues from the data
83
74
  issues = data["issues"]
84
75
  # add each page to the total results page
85
- complete.append(issues)
76
+ complete.extend(issues)
86
77
  # return the list of json response objects for use
78
+ logger.info(f"Retrieved {len(complete)}/{total} issue(s) from SonarCloud/Qube.")
87
79
  return complete
88
80
 
89
81
 
90
- def build_data(api: Api) -> list[dict]:
82
+ def build_data(api: Api, branch: Optional[str] = None) -> list[dict]:
91
83
  """
92
84
  Build vulnerability alert data list
93
85
  :param Api api: API object
86
+ :param Optional[str] branch: Branch name to filter results, defaults to None
94
87
  :return: vulnerability data list
95
88
  :rtype: list[dict]
96
89
  """
97
90
  # execute GET request
98
- data = get_sonarcloud_results(config=api.config)
91
+ data = get_sonarcloud_results(config=api.config, branch=branch)
99
92
  # create empty list to hold json response dicts
100
93
  vulnerability_data_list = []
101
94
  # loop through the lists in API response data
@@ -196,11 +189,14 @@ def create_alert_assessment(
196
189
  return None
197
190
 
198
191
 
199
- def create_alert_issues(parent_id: Optional[int] = None, parent_module: Optional[str] = None) -> None:
192
+ def create_alert_issues(
193
+ parent_id: Optional[int] = None, parent_module: Optional[str] = None, branch: Optional[str] = None
194
+ ) -> None:
200
195
  """
201
196
  Create child issues from the alert assessment
202
197
  :param Optional[int] parent_id: Parent ID record to associate the assessment to, defaults to None
203
198
  :param Optional[str] parent_module: Parent module to associate the assessment to, defaults to None
199
+ :param Optional[str] branch: Branch name to filter results, defaults to None
204
200
  :rtype: None
205
201
  """
206
202
  # set environment and application configuration
@@ -210,7 +206,7 @@ def create_alert_issues(parent_id: Optional[int] = None, parent_module: Optional
210
206
  assessment_id = create_alert_assessment(api=api, parent_id=parent_id, parent_module=parent_module)
211
207
 
212
208
  # create vulnerability data list
213
- vuln_data_list = build_data(api)
209
+ vuln_data_list = build_data(api, branch)
214
210
  # loop through each vulnerability alert in the list
215
211
  for vulnerability in vuln_data_list:
216
212
  # create issue model
@@ -253,8 +249,11 @@ def sonarcloud() -> None:
253
249
  @sonarcloud.command(name="sync_alerts")
254
250
  @regscale_id(required=False, default=None)
255
251
  @regscale_module(required=False, default=None)
256
- def create_alerts(regscale_id: Optional[int] = None, regscale_module: Optional[str] = None) -> None:
252
+ @click.option("--branch", help="Branch name to filter results, defaults to None")
253
+ def create_alerts(
254
+ regscale_id: Optional[int] = None, regscale_module: Optional[str] = None, branch: Optional[str] = None
255
+ ) -> None:
257
256
  """
258
257
  Create a child assessment and child issues in RegScale from SonarCloud alerts.
259
258
  """
260
- create_alert_issues(regscale_id, regscale_module)
259
+ create_alert_issues(regscale_id, regscale_module, branch)
@@ -100,6 +100,57 @@ def sync_jira(
100
100
  )
101
101
 
102
102
 
103
+ @ticketing.command(name="sync_jira_service_management")
104
+ @regscale_id()
105
+ @regscale_module()
106
+ @click.option(
107
+ "--project",
108
+ type=click.STRING,
109
+ help="jira_service_management project",
110
+ required=True,
111
+ prompt="jira_service_management project",
112
+ )
113
+ @click.option(
114
+ "--default_issue_type",
115
+ type=click.STRING,
116
+ help="Default issue type when creating tickets.",
117
+ required=False,
118
+ )
119
+ @click.option(
120
+ "--default_project",
121
+ type=click.STRING,
122
+ help="Default project when listing, creating, or editing tickets.",
123
+ required=False,
124
+ )
125
+ @click.option(
126
+ "--sync_attachments",
127
+ type=click.BOOL,
128
+ help="Whether to sync attachments between Jira Service Management and RegScale",
129
+ required=False,
130
+ default=True,
131
+ )
132
+ def sync_jira_service_management(
133
+ regscale_id: int,
134
+ regscale_module: str,
135
+ project: str,
136
+ default_issue_type: str,
137
+ default_project: str,
138
+ sync_attachments: bool,
139
+ ) -> None:
140
+ """Sync Ticketing data between Jira Service Management and RegScale."""
141
+ from regscale.models.integration_models.synqly_models.connectors import Ticketing
142
+
143
+ ticketing_jira_service_management = Ticketing("jira_service_management")
144
+ ticketing_jira_service_management.run_sync(
145
+ regscale_id=regscale_id,
146
+ regscale_module=regscale_module,
147
+ project=project,
148
+ default_issue_type=default_issue_type,
149
+ default_project=default_project,
150
+ sync_attachments=sync_attachments,
151
+ )
152
+
153
+
103
154
  @ticketing.command(name="sync_pagerduty")
104
155
  @regscale_id()
105
156
  @regscale_module()
@@ -446,7 +446,7 @@ class TenableSCJsonlScanner(JSONLScannerIntegration):
446
446
  # If no findings were created, return a basic finding
447
447
  # Get the IP from the vulnerability directly rather than using passed asset_identifier
448
448
  finding_asset_id = vuln.ip or asset_identifier
449
-
449
+ logger.debug(item)
450
450
  return IntegrationFinding(
451
451
  title=item.get("pluginName", "Unknown Finding"),
452
452
  description=item.get("description", "No description available"),
@@ -456,6 +456,7 @@ class TenableSCJsonlScanner(JSONLScannerIntegration):
456
456
  category="Vulnerability",
457
457
  scan_date=self.scan_date,
458
458
  plugin_name=item.get("pluginName", UNKNOWN_PLUGIN),
459
+ control_labels=item.get("controlLabels", []),
459
460
  )
460
461
 
461
462
  except Exception as e:
@@ -10,6 +10,7 @@ import anyio
10
10
  import httpx
11
11
 
12
12
  from regscale.core.app.utils.app_utils import error_and_exit
13
+ from regscale.integrations.variables import ScannerVariables
13
14
 
14
15
  logger = logging.getLogger("regscale")
15
16
 
@@ -72,7 +73,9 @@ class AsyncWizGraphQLClient:
72
73
  logger.debug(f"Variables: {variables}")
73
74
 
74
75
  try:
75
- async with httpx.AsyncClient(timeout=self.timeout) as client:
76
+ # Get SSL verify setting from scanner variables config
77
+ ssl_verify = getattr(ScannerVariables, "sslVerify", True)
78
+ async with httpx.AsyncClient(timeout=self.timeout, verify=ssl_verify) as client:
76
79
  if progress_callback:
77
80
  progress_callback(task_name, "requesting")
78
81
 
@@ -81,7 +84,7 @@ class AsyncWizGraphQLClient:
81
84
  if progress_callback:
82
85
  progress_callback(task_name, "processing")
83
86
 
84
- if response.raise_for_status():
87
+ if not response.is_success:
85
88
  error_and_exit(
86
89
  f"Received non-200 response from GraphQL API: {response.status_code}: {response.text}"
87
90
  )
@@ -304,6 +307,7 @@ def run_async_queries(
304
307
  query_configs: List[Dict[str, Any]],
305
308
  progress_tracker: Optional[Any] = None,
306
309
  max_concurrent: int = 5,
310
+ timeout: int = 60,
307
311
  ) -> List[Tuple[str, List[Dict[str, Any]], Optional[Exception]]]:
308
312
  """
309
313
  Convenience function to run async queries from synchronous code.
@@ -313,12 +317,15 @@ def run_async_queries(
313
317
  :param List[Dict[str, Any]] query_configs: Query configurations
314
318
  :param Optional[Any] progress_tracker: Progress tracker
315
319
  :param int max_concurrent: Maximum concurrent requests
320
+ :param int timeout: Request timeout in seconds
316
321
  :return: Query results
317
322
  :rtype: List[Tuple[str, List[Dict[str, Any]], Optional[Exception]]]
318
323
  """
319
324
 
320
325
  async def _run():
321
- client = AsyncWizGraphQLClient(endpoint=endpoint, headers=headers, max_concurrent=max_concurrent)
326
+ client = AsyncWizGraphQLClient(
327
+ endpoint=endpoint, headers=headers, max_concurrent=max_concurrent, timeout=timeout
328
+ )
322
329
  return await client.execute_concurrent_queries(query_configs, progress_tracker)
323
330
 
324
331
  # Use anyio.run for better compatibility
@@ -9,7 +9,7 @@ from typing import Optional
9
9
  import click
10
10
 
11
11
  from regscale.integrations.commercial.wizv2.variables import WizVariables
12
- from regscale.models import regscale_id, regscale_module
12
+ from regscale.models import regscale_id
13
13
  from regscale.models.app_models.click import regscale_ssp_id
14
14
 
15
15
  logger = logging.getLogger("regscale")
@@ -333,12 +333,10 @@ def add_report_evidence(
333
333
  "--wiz_project_id",
334
334
  "-p",
335
335
  prompt="Enter the Wiz project ID",
336
- help="Enter the Wiz Project ID. Options include: projects, \
337
- policies, supplychain, securityplans, components.",
336
+ help="Enter the Wiz Project ID for policy compliance sync.",
338
337
  required=True,
339
338
  )
340
- @regscale_id(help="RegScale will create and update issues as children of this record.")
341
- @regscale_module()
339
+ @regscale_id(help="RegScale will create and update control assessments as children of this record.")
342
340
  @click.option( # type: ignore
343
341
  "--client_id",
344
342
  "-i",
@@ -356,44 +354,122 @@ def add_report_evidence(
356
354
  required=False,
357
355
  )
358
356
  @click.option( # type: ignore
359
- "--catalog_id",
360
- "-c",
361
- help="RegScale Catalog ID for the selected framework.",
362
- hide_input=False,
357
+ "--framework_id",
358
+ "-f",
359
+ help="Wiz framework ID or shorthand (e.g., 'nist', 'aws', 'wf-id-4'). Use --list-frameworks to see options. Default: wf-id-4 (NIST SP 800-53 Rev 5)",
360
+ default="wf-id-4",
363
361
  required=False,
364
- default=None,
365
362
  )
366
363
  @click.option( # type: ignore
367
- "--framework",
368
- "-f",
369
- type=click.Choice(["CSF", "NIST800-53R5", "NIST800-53R4"], case_sensitive=False), # type: ignore
370
- help="Choose either one of the Frameworks",
371
- default="NIST800-53R5",
372
- required=True,
364
+ "--list-frameworks",
365
+ "-lf",
366
+ is_flag=True,
367
+ help="List all available framework options and shortcuts",
368
+ default=False,
369
+ )
370
+ @click.option( # type: ignore
371
+ "--create-issues/--no-create-issues",
372
+ "-ci/-ni",
373
+ default=True,
374
+ help="Create issues for failed policy assessments (default: enabled)",
375
+ )
376
+ @click.option( # type: ignore
377
+ "--update-control-status/--no-update-control-status",
378
+ "-ucs/-nucs",
379
+ default=True,
380
+ help="Update control implementation status based on assessment results (default: enabled)",
381
+ )
382
+ @click.option( # type: ignore
383
+ "--create-poams/--no-create-poams",
384
+ "-cp/-ncp",
385
+ default=False,
386
+ help="Mark created issues as POAMs (default: disabled)",
387
+ )
388
+ @click.option( # type: ignore
389
+ "--refresh/--no-refresh",
390
+ "-r/-nr",
391
+ default=False,
392
+ help="Force refresh and ignore cached data (default: use cache if available)",
393
+ )
394
+ @click.option( # type: ignore
395
+ "--cache-duration",
396
+ "-cd",
397
+ type=click.INT,
398
+ default=1440,
399
+ help="Cache duration in minutes - reuse cached data if newer than this (default: 1440 minutes / 1 day)",
373
400
  )
374
401
  def sync_compliance(
375
402
  wiz_project_id,
376
403
  regscale_id,
377
- regscale_module,
378
404
  client_id,
379
405
  client_secret,
380
- catalog_id,
381
- framework,
406
+ framework_id,
407
+ list_frameworks,
408
+ create_issues,
409
+ update_control_status,
410
+ create_poams,
411
+ refresh,
412
+ cache_duration,
382
413
  ):
383
- """Sync compliance posture from Wiz to RegScale"""
384
- from regscale.integrations.commercial.wizv2.utils import _sync_compliance
414
+ """
415
+ Sync policy compliance assessments from Wiz to RegScale.
416
+
417
+ This command fetches policy assessment data from Wiz and creates:
418
+ - Control assessments based on policy compliance results
419
+ - Issues for failed policy assessments (if --create-issues enabled)
420
+ - Updates to control implementation status (if --update-control-status enabled)
421
+ - JSON output file with policy compliance data in artifacts/wiz/ directory
422
+ - Cached framework mapping for improved performance
423
+
424
+ CACHING:
425
+ By default, the command will reuse cached policy data if it's newer than the cache
426
+ duration (default: 60 minutes). Use --refresh to force fresh data from Wiz.
427
+ Use --cache-duration to adjust how long cached data is considered valid.
428
+ """
429
+ from regscale.integrations.commercial.wizv2.policy_compliance import (
430
+ WizPolicyComplianceIntegration,
431
+ list_available_frameworks,
432
+ resolve_framework_id,
433
+ )
434
+
435
+ # Handle --list-frameworks flag
436
+ if list_frameworks:
437
+ click.echo(list_available_frameworks())
438
+ return
385
439
 
440
+ # Use environment variables if not provided
386
441
  if not client_secret:
387
442
  client_secret = WizVariables.wizClientSecret
388
443
  if not client_id:
389
444
  client_id = WizVariables.wizClientId
390
445
 
391
- _sync_compliance(
446
+ # Resolve framework ID using the enhanced framework resolution
447
+ try:
448
+ resolved_framework_id = resolve_framework_id(framework_id.lower())
449
+ if resolved_framework_id != framework_id:
450
+ from regscale.integrations.commercial.wizv2.policy_compliance import FRAMEWORK_MAPPINGS
451
+
452
+ framework_name = FRAMEWORK_MAPPINGS.get(resolved_framework_id, resolved_framework_id)
453
+ click.echo(f"🔍 Resolved '{framework_id}' to '{framework_name}' ({resolved_framework_id})")
454
+ except ValueError as e:
455
+ click.echo(f"❌ {str(e)}")
456
+ click.echo("\nUse --list-frameworks to see all available options.")
457
+ return
458
+
459
+ # Create and run the policy compliance integration
460
+ policy_integration = WizPolicyComplianceIntegration(
461
+ plan_id=regscale_id,
392
462
  wiz_project_id=wiz_project_id,
393
- regscale_id=regscale_id,
394
- regscale_module=regscale_module,
395
463
  client_id=client_id,
396
464
  client_secret=client_secret,
397
- catalog_id=catalog_id,
398
- framework=framework,
465
+ framework_id=resolved_framework_id,
466
+ create_poams=create_poams,
467
+ cache_duration_minutes=cache_duration,
468
+ force_refresh=refresh,
469
+ )
470
+
471
+ # Run the policy compliance sync
472
+ policy_integration.sync_policy_compliance(
473
+ create_issues=create_issues,
474
+ update_control_status=update_control_status,
399
475
  )