regscale-cli 6.20.9.1__py3-none-any.whl → 6.21.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (56) hide show
  1. regscale/_version.py +1 -1
  2. regscale/core/app/application.py +12 -5
  3. regscale/core/app/internal/set_permissions.py +58 -27
  4. regscale/integrations/commercial/defender.py +9 -0
  5. regscale/integrations/commercial/nessus/scanner.py +2 -0
  6. regscale/integrations/commercial/sonarcloud.py +35 -36
  7. regscale/integrations/commercial/synqly/ticketing.py +51 -0
  8. regscale/integrations/commercial/wizv2/async_client.py +325 -0
  9. regscale/integrations/commercial/wizv2/constants.py +756 -0
  10. regscale/integrations/commercial/wizv2/scanner.py +1301 -89
  11. regscale/integrations/commercial/wizv2/utils.py +280 -36
  12. regscale/integrations/commercial/wizv2/variables.py +2 -10
  13. regscale/integrations/integration_override.py +15 -6
  14. regscale/integrations/scanner_integration.py +221 -37
  15. regscale/integrations/variables.py +1 -0
  16. regscale/models/integration_models/amazon_models/inspector_scan.py +32 -57
  17. regscale/models/integration_models/aqua.py +92 -78
  18. regscale/models/integration_models/cisa_kev_data.json +47 -4
  19. regscale/models/integration_models/defenderimport.py +64 -59
  20. regscale/models/integration_models/ecr_models/ecr.py +100 -147
  21. regscale/models/integration_models/flat_file_importer/__init__.py +52 -38
  22. regscale/models/integration_models/ibm.py +29 -47
  23. regscale/models/integration_models/nexpose.py +156 -68
  24. regscale/models/integration_models/prisma.py +46 -66
  25. regscale/models/integration_models/qualys.py +99 -93
  26. regscale/models/integration_models/snyk.py +229 -158
  27. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  28. regscale/models/integration_models/veracode.py +15 -20
  29. regscale/models/integration_models/xray.py +276 -82
  30. regscale/models/regscale_models/__init__.py +13 -0
  31. regscale/models/regscale_models/classification.py +23 -0
  32. regscale/models/regscale_models/control_implementation.py +14 -12
  33. regscale/models/regscale_models/cryptography.py +56 -0
  34. regscale/models/regscale_models/deviation.py +4 -4
  35. regscale/models/regscale_models/group.py +3 -2
  36. regscale/models/regscale_models/interconnection.py +1 -1
  37. regscale/models/regscale_models/issue.py +140 -41
  38. regscale/models/regscale_models/milestone.py +40 -0
  39. regscale/models/regscale_models/property.py +0 -1
  40. regscale/models/regscale_models/rbac.py +22 -0
  41. regscale/models/regscale_models/regscale_model.py +29 -18
  42. regscale/models/regscale_models/team.py +55 -0
  43. {regscale_cli-6.20.9.1.dist-info → regscale_cli-6.21.0.0.dist-info}/METADATA +1 -1
  44. {regscale_cli-6.20.9.1.dist-info → regscale_cli-6.21.0.0.dist-info}/RECORD +56 -49
  45. tests/fixtures/test_fixture.py +58 -2
  46. tests/regscale/core/test_app.py +5 -3
  47. tests/regscale/integrations/test_integration_mapping.py +522 -40
  48. tests/regscale/integrations/test_issue_due_date.py +1 -1
  49. tests/regscale/integrations/test_property_and_milestone_creation.py +684 -0
  50. tests/regscale/integrations/test_update_finding_dates.py +336 -0
  51. tests/regscale/models/test_asset.py +406 -50
  52. tests/regscale/models/test_report.py +105 -29
  53. {regscale_cli-6.20.9.1.dist-info → regscale_cli-6.21.0.0.dist-info}/LICENSE +0 -0
  54. {regscale_cli-6.20.9.1.dist-info → regscale_cli-6.21.0.0.dist-info}/WHEEL +0 -0
  55. {regscale_cli-6.20.9.1.dist-info → regscale_cli-6.21.0.0.dist-info}/entry_points.txt +0 -0
  56. {regscale_cli-6.20.9.1.dist-info → regscale_cli-6.21.0.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,325 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """Async GraphQL client for Wiz integration with concurrent query processing."""
4
+
5
+ import asyncio
6
+ import logging
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ import anyio
10
+ import httpx
11
+
12
+ from regscale.core.app.utils.app_utils import error_and_exit
13
+
14
+ logger = logging.getLogger("regscale")
15
+
16
+
17
+ class AsyncWizGraphQLClient:
18
+ """
19
+ Async GraphQL client optimized for concurrent Wiz API queries.
20
+
21
+ This client can execute multiple GraphQL queries concurrently, significantly
22
+ improving performance when fetching different finding types from Wiz.
23
+ """
24
+
25
+ def __init__(
26
+ self,
27
+ endpoint: str,
28
+ headers: Optional[Dict[str, str]] = None,
29
+ timeout: float = 30.0,
30
+ max_concurrent: int = 5,
31
+ ):
32
+ """
33
+ Initialize the async GraphQL client.
34
+
35
+ :param str endpoint: GraphQL endpoint URL
36
+ :param Optional[Dict[str, str]] headers: HTTP headers for requests
37
+ :param float timeout: Request timeout in seconds
38
+ :param int max_concurrent: Maximum concurrent requests
39
+ """
40
+ self.endpoint = endpoint
41
+ self.headers = headers or {}
42
+ self.timeout = timeout
43
+ self.max_concurrent = max_concurrent
44
+ self._semaphore = anyio.Semaphore(max_concurrent)
45
+
46
+ async def execute_query(
47
+ self,
48
+ query: str,
49
+ variables: Optional[Dict[str, Any]] = None,
50
+ progress_callback: Optional[callable] = None,
51
+ task_name: str = "GraphQL Query",
52
+ ) -> Dict[str, Any]:
53
+ """
54
+ Execute a single GraphQL query asynchronously.
55
+
56
+ :param str query: GraphQL query string
57
+ :param Optional[Dict[str, Any]] variables: Query variables
58
+ :param Optional[callable] progress_callback: Callback for progress updates
59
+ :param str task_name: Name for progress tracking
60
+ :return: Query response data
61
+ :rtype: Dict[str, Any]
62
+ """
63
+ async with self._semaphore: # Limit concurrent requests
64
+ if progress_callback:
65
+ progress_callback(task_name, "starting")
66
+
67
+ payload = {"query": query, "variables": variables or {}}
68
+
69
+ # Debug logging for authentication and payload
70
+ logger.debug(f"Async GraphQL request to {self.endpoint}")
71
+ logger.debug(f"Headers: {self.headers}")
72
+ logger.debug(f"Variables: {variables}")
73
+
74
+ try:
75
+ async with httpx.AsyncClient(timeout=self.timeout) as client:
76
+ if progress_callback:
77
+ progress_callback(task_name, "requesting")
78
+
79
+ response = await client.post(self.endpoint, json=payload, headers=self.headers)
80
+
81
+ if progress_callback:
82
+ progress_callback(task_name, "processing")
83
+
84
+ if response.raise_for_status():
85
+ error_and_exit(
86
+ f"Received non-200 response from GraphQL API: {response.status_code}: {response.text}"
87
+ )
88
+ result = response.json()
89
+
90
+ if "errors" in result:
91
+ error_msg = f"GraphQL errors: {result['errors']}"
92
+ logger.error(error_msg)
93
+ error_and_exit(error_msg)
94
+
95
+ if progress_callback:
96
+ progress_callback(task_name, "completed")
97
+
98
+ return result.get("data", {})
99
+
100
+ except httpx.HTTPError as e:
101
+ error_msg = f"HTTP error in {task_name}: {str(e)}"
102
+ logger.error(error_msg)
103
+ if progress_callback:
104
+ progress_callback(task_name, "failed")
105
+ error_and_exit(error_msg)
106
+ except Exception as e:
107
+ error_msg = f"Error in {task_name}: {str(e)}"
108
+ logger.error(error_msg)
109
+ if progress_callback:
110
+ progress_callback(task_name, "failed")
111
+ error_and_exit(error_msg)
112
+
113
+ async def execute_paginated_query(
114
+ self,
115
+ query: str,
116
+ variables: Dict[str, Any],
117
+ topic_key: str,
118
+ progress_callback: Optional[callable] = None,
119
+ task_name: str = "Paginated Query",
120
+ ) -> List[Dict[str, Any]]:
121
+ """
122
+ Execute a paginated GraphQL query, fetching all pages.
123
+
124
+ :param str query: GraphQL query string
125
+ :param Dict[str, Any] variables: Query variables
126
+ :param str topic_key: Key to extract nodes from response
127
+ :param Optional[callable] progress_callback: Callback for progress updates
128
+ :param str task_name: Name for progress tracking
129
+ :return: All nodes from all pages
130
+ :rtype: List[Dict[str, Any]]
131
+ """
132
+ all_nodes = []
133
+ has_next_page = True
134
+ after_cursor = None
135
+ page_count = 0
136
+
137
+ while has_next_page:
138
+ page_count += 1
139
+ current_variables = variables.copy()
140
+ current_variables["after"] = after_cursor
141
+
142
+ page_task_name = f"{task_name} (Page {page_count})"
143
+
144
+ try:
145
+ data = await self.execute_query(
146
+ query=query,
147
+ variables=current_variables,
148
+ progress_callback=progress_callback,
149
+ task_name=page_task_name,
150
+ )
151
+
152
+ topic_data = data.get(topic_key, {})
153
+ nodes = topic_data.get("nodes", [])
154
+ page_info = topic_data.get("pageInfo", {})
155
+
156
+ all_nodes.extend(nodes)
157
+
158
+ has_next_page = page_info.get("hasNextPage", False)
159
+ after_cursor = page_info.get("endCursor")
160
+
161
+ if progress_callback:
162
+ progress_callback(
163
+ task_name,
164
+ f"fetched_page_{page_count}",
165
+ {"nodes_count": len(nodes), "total_nodes": len(all_nodes)},
166
+ )
167
+
168
+ except Exception as e:
169
+ logger.error(f"Error fetching page {page_count} for {task_name}: {str(e)}")
170
+ break
171
+
172
+ return all_nodes
173
+
174
+ def _create_progress_callback(self, progress_tracker: Any, task_id: Any, query_type: str) -> callable:
175
+ """
176
+ Create a progress callback function for query execution tracking.
177
+
178
+ :param Any progress_tracker: Progress tracker instance
179
+ :param Any task_id: Task ID for progress updates
180
+ :param str query_type: Type of query being executed
181
+ :return: Progress callback function
182
+ :rtype: callable
183
+ """
184
+
185
+ def progress_callback(task_name: str, status: str, extra_data: Dict = None):
186
+ status_messages = {
187
+ "starting": f"[yellow]Starting {query_type}...",
188
+ "requesting": f"[blue]Querying {query_type}...",
189
+ "processing": f"[magenta]Processing {query_type}...",
190
+ "completed": f"[green]✓ Completed {query_type}",
191
+ "failed": f"[red]✗ Failed {query_type}",
192
+ }
193
+
194
+ if status in status_messages:
195
+ progress_tracker.update(task_id, description=status_messages[status])
196
+ elif status.startswith("fetched_page_") and extra_data:
197
+ progress_tracker.update(
198
+ task_id, description=f"[cyan]{query_type}: {extra_data['total_nodes']} nodes fetched"
199
+ )
200
+
201
+ return progress_callback
202
+
203
+ async def _execute_single_query_config(
204
+ self, config: Dict[str, Any], progress_tracker: Optional[Any] = None
205
+ ) -> Tuple[str, List[Dict[str, Any]], Optional[Exception]]:
206
+ """
207
+ Execute a single query configuration with progress tracking.
208
+
209
+ :param Dict[str, Any] config: Query configuration
210
+ :param Optional[Any] progress_tracker: Progress tracker for UI updates
211
+ :return: Tuple of (query_type, results, error)
212
+ :rtype: Tuple[str, List[Dict[str, Any]], Optional[Exception]]
213
+ """
214
+ query_type = config["type"].value
215
+ query = config["query"]
216
+ variables = config.get("variables", {})
217
+ topic_key = config["topic_key"]
218
+
219
+ # Setup progress tracking if available
220
+ progress_callback = None
221
+ task_id = None
222
+
223
+ if progress_tracker:
224
+ task_id = progress_tracker.add_task(f"[yellow]Fetching {query_type}...", total=None)
225
+ progress_callback = self._create_progress_callback(progress_tracker, task_id, query_type)
226
+
227
+ try:
228
+ # Execute paginated query
229
+ nodes = await self.execute_paginated_query(
230
+ query=query,
231
+ variables=variables,
232
+ topic_key=topic_key,
233
+ progress_callback=progress_callback,
234
+ task_name=query_type,
235
+ )
236
+
237
+ # Update progress on success
238
+ if task_id and progress_tracker:
239
+ progress_tracker.update(
240
+ task_id, description=f"[green]✓ {query_type}: {len(nodes)} nodes", completed=1, total=1
241
+ )
242
+
243
+ return query_type, nodes, None
244
+
245
+ except Exception as e:
246
+ # Update progress on failure
247
+ if task_id and progress_tracker:
248
+ progress_tracker.update(
249
+ task_id, description=f"[red]✗ {query_type}: {str(e)[:50]}...", completed=1, total=1
250
+ )
251
+ return query_type, [], e
252
+
253
+ def _process_concurrent_results(
254
+ self, results: List[Any], query_configs: List[Dict[str, Any]]
255
+ ) -> List[Tuple[str, List[Dict[str, Any]], Optional[Exception]]]:
256
+ """
257
+ Process results from concurrent query execution.
258
+
259
+ :param List[Any] results: Raw results from asyncio.gather
260
+ :param List[Dict[str, Any]] query_configs: Original query configurations
261
+ :return: Processed results
262
+ :rtype: List[Tuple[str, List[Dict[str, Any]], Optional[Exception]]]
263
+ """
264
+ processed_results = []
265
+
266
+ for i, result in enumerate(results):
267
+ if isinstance(result, Exception):
268
+ query_type = query_configs[i]["type"].value
269
+ processed_results.append((query_type, [], result))
270
+ else:
271
+ processed_results.append(result)
272
+
273
+ return processed_results
274
+
275
+ async def execute_concurrent_queries(
276
+ self, query_configs: List[Dict[str, Any]], progress_tracker: Optional[Any] = None
277
+ ) -> List[Tuple[str, List[Dict[str, Any]], Optional[Exception]]]:
278
+ """
279
+ Execute multiple GraphQL queries concurrently.
280
+
281
+ :param List[Dict[str, Any]] query_configs: List of query configurations
282
+ :param Optional[Any] progress_tracker: Progress tracker for UI updates
283
+ :return: List of (query_type, results, error) tuples
284
+ :rtype: List[Tuple[str, List[Dict[str, Any]], Optional[Exception]]]
285
+ """
286
+ logger.info(f"Starting {len(query_configs)} concurrent GraphQL queries...")
287
+
288
+ # Create tasks for concurrent execution
289
+ tasks = [self._execute_single_query_config(config, progress_tracker) for config in query_configs]
290
+
291
+ # Execute all queries concurrently
292
+ results = await asyncio.gather(*tasks, return_exceptions=True)
293
+
294
+ # Process and return results
295
+ processed_results = self._process_concurrent_results(results, query_configs)
296
+
297
+ logger.info(f"Completed {len(query_configs)} concurrent queries")
298
+ return processed_results
299
+
300
+
301
+ def run_async_queries(
302
+ endpoint: str,
303
+ headers: Dict[str, str],
304
+ query_configs: List[Dict[str, Any]],
305
+ progress_tracker: Optional[Any] = None,
306
+ max_concurrent: int = 5,
307
+ ) -> List[Tuple[str, List[Dict[str, Any]], Optional[Exception]]]:
308
+ """
309
+ Convenience function to run async queries from synchronous code.
310
+
311
+ :param str endpoint: GraphQL endpoint URL
312
+ :param Dict[str, str] headers: HTTP headers
313
+ :param List[Dict[str, Any]] query_configs: Query configurations
314
+ :param Optional[Any] progress_tracker: Progress tracker
315
+ :param int max_concurrent: Maximum concurrent requests
316
+ :return: Query results
317
+ :rtype: List[Tuple[str, List[Dict[str, Any]], Optional[Exception]]]
318
+ """
319
+
320
+ async def _run():
321
+ client = AsyncWizGraphQLClient(endpoint=endpoint, headers=headers, max_concurrent=max_concurrent)
322
+ return await client.execute_concurrent_queries(query_configs, progress_tracker)
323
+
324
+ # Use anyio.run for better compatibility
325
+ return anyio.run(_run)