regscale-cli 6.18.0.0__py3-none-any.whl → 6.19.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (45) hide show
  1. regscale/__init__.py +1 -1
  2. regscale/integrations/api_paginator.py +932 -0
  3. regscale/integrations/api_paginator_example.py +348 -0
  4. regscale/integrations/commercial/__init__.py +11 -10
  5. regscale/integrations/commercial/{qualys.py → qualys/__init__.py} +756 -105
  6. regscale/integrations/commercial/qualys/scanner.py +1051 -0
  7. regscale/integrations/commercial/qualys/variables.py +21 -0
  8. regscale/integrations/commercial/sicura/api.py +1 -0
  9. regscale/integrations/commercial/stigv2/click_commands.py +36 -8
  10. regscale/integrations/commercial/stigv2/stig_integration.py +63 -9
  11. regscale/integrations/commercial/tenablev2/__init__.py +9 -0
  12. regscale/integrations/commercial/tenablev2/authenticate.py +23 -2
  13. regscale/integrations/commercial/tenablev2/commands.py +779 -0
  14. regscale/integrations/commercial/tenablev2/jsonl_scanner.py +1999 -0
  15. regscale/integrations/commercial/tenablev2/sc_scanner.py +600 -0
  16. regscale/integrations/commercial/tenablev2/scanner.py +7 -5
  17. regscale/integrations/commercial/tenablev2/utils.py +21 -4
  18. regscale/integrations/commercial/tenablev2/variables.py +4 -0
  19. regscale/integrations/jsonl_scanner_integration.py +523 -142
  20. regscale/integrations/scanner_integration.py +102 -26
  21. regscale/integrations/transformer/__init__.py +17 -0
  22. regscale/integrations/transformer/data_transformer.py +445 -0
  23. regscale/integrations/transformer/mappings/__init__.py +8 -0
  24. regscale/integrations/variables.py +2 -0
  25. regscale/models/__init__.py +5 -2
  26. regscale/models/integration_models/cisa_kev_data.json +5 -5
  27. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  28. regscale/models/regscale_models/asset.py +5 -2
  29. regscale/models/regscale_models/file.py +5 -2
  30. regscale/regscale.py +3 -1
  31. {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/METADATA +1 -1
  32. {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/RECORD +44 -28
  33. tests/regscale/core/test_version.py +22 -0
  34. tests/regscale/integrations/__init__.py +0 -0
  35. tests/regscale/integrations/test_api_paginator.py +597 -0
  36. tests/regscale/integrations/test_integration_mapping.py +60 -0
  37. tests/regscale/integrations/test_issue_creation.py +317 -0
  38. tests/regscale/integrations/test_issue_due_date.py +46 -0
  39. tests/regscale/integrations/transformer/__init__.py +0 -0
  40. tests/regscale/integrations/transformer/test_data_transformer.py +850 -0
  41. regscale/integrations/commercial/tenablev2/click.py +0 -1641
  42. {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/LICENSE +0 -0
  43. {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/WHEEL +0 -0
  44. {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/entry_points.txt +0 -0
  45. {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,348 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ Example using ApiPaginator with JSONLScannerIntegration.
5
+
6
+ This file demonstrates how to use the ApiPaginator class to fetch paginated API responses
7
+ and integrate them with the JSONLScannerIntegration class.
8
+ """
9
+
10
+ import logging
11
+ import os
12
+ from typing import Dict, Any, Union, Tuple, Iterator, Optional
13
+
14
+ import click
15
+ from pathlib import Path
16
+
17
+ from regscale.integrations.api_paginator import ApiPaginator
18
+ from regscale.integrations.jsonl_scanner_integration import (
19
+ JSONLScannerIntegration,
20
+ IntegrationAsset,
21
+ IntegrationFinding,
22
+ )
23
+ from regscale.models import IssueSeverity, IssueStatus, AssetStatus
24
+
25
+ logger = logging.getLogger("regscale")
26
+
27
+
28
+ class ApiScannerExample(JSONLScannerIntegration):
29
+ """
30
+ Example integration class that uses ApiPaginator to fetch data.
31
+
32
+ This class demonstrates how to combine ApiPaginator with JSONLScannerIntegration
33
+ to create a full API-based scanner integration.
34
+ """
35
+
36
+ # Class constants
37
+ title = "API Scanner Example"
38
+ asset_identifier_field = "otherTrackingNumber"
39
+
40
+ # Custom file paths
41
+ ASSETS_FILE = "./artifacts/api_example_assets.jsonl"
42
+ FINDINGS_FILE = "./artifacts/api_example_findings.jsonl"
43
+
44
+ # Severity mapping
45
+ finding_severity_map = {
46
+ "CRITICAL": IssueSeverity.Critical,
47
+ "HIGH": IssueSeverity.High,
48
+ "MEDIUM": IssueSeverity.Moderate,
49
+ "LOW": IssueSeverity.Low,
50
+ "INFO": IssueSeverity.Low,
51
+ }
52
+
53
+ def __init__(self, *args, **kwargs):
54
+ """
55
+ Initialize the ApiScannerExample integration.
56
+
57
+ Args:
58
+ api_url (str): Base URL for the API
59
+ api_key (str): API key for authentication
60
+ api_token (str): API token for authentication
61
+ """
62
+ # Extract API-specific parameters
63
+ self.api_url = kwargs.pop("api_url", None)
64
+ self.api_key = kwargs.pop("api_key", None)
65
+ self.api_token = kwargs.pop("api_token", None)
66
+
67
+ # Set file pattern
68
+ kwargs["file_pattern"] = "*.json"
69
+
70
+ # Call parent initializer
71
+ super().__init__(*args, **kwargs)
72
+
73
+ # Create API paginator
74
+ self.paginator = self._create_paginator()
75
+
76
+ def _create_paginator(self) -> ApiPaginator:
77
+ """
78
+ Create an ApiPaginator instance configured for this integration.
79
+
80
+ Returns:
81
+ ApiPaginator: Configured paginator instance
82
+ """
83
+ if not self.api_url:
84
+ raise ValueError("API URL is required")
85
+
86
+ # Create authentication headers
87
+ auth_headers = {}
88
+ if self.api_key:
89
+ auth_headers["X-API-Key"] = self.api_key
90
+ if self.api_token:
91
+ auth_headers["Authorization"] = f"Bearer {self.api_token}"
92
+
93
+ # Create paginator with default settings
94
+ return ApiPaginator(
95
+ base_url=self.api_url,
96
+ auth_headers=auth_headers,
97
+ output_file=None, # We'll set this per-operation
98
+ page_size=100,
99
+ timeout=30,
100
+ retry_attempts=3,
101
+ ssl_verify=self.ssl_verify,
102
+ )
103
+
104
+ def fetch_assets_from_api(self) -> None:
105
+ """
106
+ Fetch assets from the API and write them to the ASSETS_FILE.
107
+ """
108
+ logger.info(f"Fetching assets from API {self.api_url}")
109
+
110
+ # Ensure artifacts directory exists
111
+ os.makedirs(os.path.dirname(self.ASSETS_FILE), exist_ok=True)
112
+
113
+ # Configure paginator for this operation
114
+ self.paginator.output_file = self.ASSETS_FILE
115
+ self.paginator.clear_output_file() # Clear any existing data
116
+
117
+ # Fetch assets with pagination
118
+ try:
119
+ self.paginator.fetch_paginated_results(
120
+ endpoint="assets", params={"type": "all"}, data_path="items", pagination_type="offset"
121
+ )
122
+ logger.info(f"Successfully wrote assets to {self.ASSETS_FILE}")
123
+ except Exception as e:
124
+ logger.error(f"Error fetching assets: {str(e)}")
125
+ raise
126
+
127
+ def fetch_findings_from_api(self) -> None:
128
+ """
129
+ Fetch findings from the API and write them to the FINDINGS_FILE.
130
+ """
131
+ logger.info(f"Fetching findings from API {self.api_url}")
132
+
133
+ # Ensure artifacts directory exists
134
+ os.makedirs(os.path.dirname(self.FINDINGS_FILE), exist_ok=True)
135
+
136
+ # Configure paginator for this operation
137
+ self.paginator.output_file = self.FINDINGS_FILE
138
+ self.paginator.clear_output_file() # Clear any existing data
139
+
140
+ # Fetch findings with pagination
141
+ try:
142
+ self.paginator.fetch_paginated_results(
143
+ endpoint="findings", params={"status": "open"}, data_path="items", pagination_type="offset"
144
+ )
145
+ logger.info(f"Successfully wrote findings to {self.FINDINGS_FILE}")
146
+ except Exception as e:
147
+ logger.error(f"Error fetching findings: {str(e)}")
148
+ raise
149
+
150
+ def fetch_assets_and_findings_from_api(self) -> Tuple[str, str]:
151
+ """
152
+ Fetch both assets and findings from the API.
153
+
154
+ Returns:
155
+ Tuple[str, str]: Paths to the assets and findings files
156
+ """
157
+ self.fetch_assets_from_api()
158
+ self.fetch_findings_from_api()
159
+ return self.ASSETS_FILE, self.FINDINGS_FILE
160
+
161
+ def find_valid_files(self, path: Union[Path, str]) -> Iterator[Tuple[Union[Path, str], Dict[str, Any]]]:
162
+ """
163
+ Override to use our API fetcher instead of looking for files.
164
+
165
+ Args:
166
+ path (Union[Path, str]): Path to search for files (ignored)
167
+
168
+ Returns:
169
+ Iterator: Iterator of (path, data) tuples
170
+ """
171
+ # Instead of searching for files, we'll fetch directly from the API
172
+ # This demonstrates how to inject the API fetching into the JSONLScannerIntegration flow
173
+
174
+ # Fetch data from API and use the JSONL files as our source
175
+ self.fetch_assets_and_findings_from_api()
176
+
177
+ # Use the parent class's file finding method to process the JSONL files we created
178
+ artifacts_dir = Path("./artifacts")
179
+ if artifacts_dir.exists():
180
+ yield from super().find_valid_files(artifacts_dir)
181
+
182
+ def is_valid_file(self, data: Any, file_path: Union[Path, str]) -> Tuple[bool, Optional[Dict[str, Any]]]:
183
+ """
184
+ Validate that the file has the expected structure.
185
+
186
+ Args:
187
+ data (Any): Data parsed from the file
188
+ file_path (Union[Path, str]): Path to the file
189
+
190
+ Returns:
191
+ Tuple[bool, Optional[Dict[str, Any]]]: (is_valid, validated_data)
192
+ """
193
+ if not isinstance(data, dict):
194
+ return False, None
195
+
196
+ # Simple validation - this would be more specific in a real integration
197
+ return True, data
198
+
199
+ def parse_asset(self, file_path: Union[Path, str], data: Dict[str, Any]) -> IntegrationAsset:
200
+ """
201
+ Parse a single asset from source data.
202
+
203
+ Args:
204
+ file_path (Union[Path, str]): Path to the file
205
+ data (Dict[str, Any]): The parsed data
206
+
207
+ Returns:
208
+ IntegrationAsset: Parsed asset object
209
+ """
210
+ # Extract basic asset information
211
+ asset_id = data.get("id", "unknown")
212
+ asset_name = data.get("name", f"Asset {asset_id}")
213
+ asset_type = data.get("type", "Other")
214
+
215
+ return IntegrationAsset(
216
+ identifier=asset_id,
217
+ name=asset_name,
218
+ asset_type=asset_type,
219
+ asset_category=data.get("category", "Software"),
220
+ ip_address=data.get("ipAddress", ""),
221
+ status=AssetStatus.Active,
222
+ parent_id=self.plan_id,
223
+ parent_module="securityplans",
224
+ source_data=data,
225
+ )
226
+
227
+ def parse_finding(self, asset_identifier: str, data: Dict[str, Any], item: Dict[str, Any]) -> IntegrationFinding:
228
+ """
229
+ Parse a single finding from source data.
230
+
231
+ Args:
232
+ asset_identifier (str): Identifier of the asset this finding belongs to
233
+ data (Dict[str, Any]): The asset data
234
+ item (Dict[str, Any]): The finding data
235
+
236
+ Returns:
237
+ IntegrationFinding: Parsed finding object
238
+ """
239
+ # Map severity from source to RegScale severity
240
+ raw_severity = item.get("severity", "MEDIUM")
241
+ severity = self.finding_severity_map.get(raw_severity, IssueSeverity.Moderate)
242
+
243
+ return IntegrationFinding(
244
+ title=item.get("title", "Unknown Finding"),
245
+ description=item.get("description", "No description available"),
246
+ severity=severity,
247
+ status=IssueStatus.Open,
248
+ asset_identifier=asset_identifier,
249
+ category="Vulnerability",
250
+ control_labels=[], # Add control labels if applicable
251
+ plugin_name=self.title,
252
+ cve=item.get("cveId", ""),
253
+ cvss_v3_score=item.get("cvssScore", None),
254
+ recommendation_for_mitigation=item.get("remediation", ""),
255
+ scan_date=self.scan_date,
256
+ )
257
+
258
+ def _get_findings_data_from_file(self, data: Dict[str, Any]) -> list:
259
+ """
260
+ Extract findings data from file data.
261
+
262
+ Args:
263
+ data (Dict[str, Any]): The data from the file
264
+
265
+ Returns:
266
+ list: List of finding items
267
+ """
268
+ # In our example, findings are directly in the data
269
+ # In a real integration, this might navigate a more complex structure
270
+ return [data] if data else []
271
+
272
+
273
+ @click.group()
274
+ def api_scanner():
275
+ """API Scanner Integration commands."""
276
+ pass
277
+
278
+
279
+ @api_scanner.command(name="sync_assets")
280
+ @click.option("--api-url", required=True, help="Base URL for the API")
281
+ @click.option("--api-key", help="API key for authentication")
282
+ @click.option("--api-token", help="API token for authentication")
283
+ @click.option("--plan-id", required=True, type=int, help="RegScale ID to create assets under")
284
+ def sync_assets(api_url: str, api_key: str, api_token: str, plan_id: int):
285
+ """Sync assets from API to RegScale."""
286
+ integration = ApiScannerExample(
287
+ plan_id=plan_id,
288
+ api_url=api_url,
289
+ api_key=api_key,
290
+ api_token=api_token,
291
+ )
292
+
293
+ # Fetch assets from API
294
+ integration.fetch_assets_from_api()
295
+
296
+ # Process assets
297
+ assets = integration.fetch_assets()
298
+ count = integration.update_regscale_assets(assets)
299
+
300
+ logger.info(f"Synchronized {count} assets from API to RegScale")
301
+
302
+
303
+ @api_scanner.command(name="sync_findings")
304
+ @click.option("--api-url", required=True, help="Base URL for the API")
305
+ @click.option("--api-key", help="API key for authentication")
306
+ @click.option("--api-token", help="API token for authentication")
307
+ @click.option("--plan-id", required=True, type=int, help="RegScale ID to create findings under")
308
+ def sync_findings(api_url: str, api_key: str, api_token: str, plan_id: int):
309
+ """Sync findings from API to RegScale."""
310
+ integration = ApiScannerExample(
311
+ plan_id=plan_id,
312
+ api_url=api_url,
313
+ api_key=api_key,
314
+ api_token=api_token,
315
+ )
316
+
317
+ # Fetch findings from API
318
+ integration.fetch_findings_from_api()
319
+
320
+ # Process findings
321
+ findings = integration.fetch_findings()
322
+ count = integration.update_regscale_findings(findings)
323
+
324
+ logger.info(f"Synchronized {count} findings from API to RegScale")
325
+
326
+
327
+ @api_scanner.command(name="sync_all")
328
+ @click.option("--api-url", required=True, help="Base URL for the API")
329
+ @click.option("--api-key", help="API key for authentication")
330
+ @click.option("--api-token", help="API token for authentication")
331
+ @click.option("--plan-id", required=True, type=int, help="RegScale ID for synchronization")
332
+ def sync_all(api_url: str, api_key: str, api_token: str, plan_id: int):
333
+ """Sync both assets and findings from API to RegScale."""
334
+ integration = ApiScannerExample(
335
+ plan_id=plan_id,
336
+ api_url=api_url,
337
+ api_key=api_key,
338
+ api_token=api_token,
339
+ )
340
+
341
+ # Fetch and process both assets and findings
342
+ integration.sync_assets_and_findings()
343
+
344
+ logger.info("Synchronized assets and findings from API to RegScale")
345
+
346
+
347
+ if __name__ == "__main__":
348
+ api_scanner()
@@ -287,13 +287,14 @@ show_mapping(prisma, "prisma", "csv")
287
287
  lazy_subcommands={
288
288
  "export_scans": "regscale.integrations.commercial.qualys.export_past_scans",
289
289
  "import_scans": "regscale.integrations.commercial.qualys.import_scans",
290
- "import_container_scans": "regscale.integrations.commercial.qualys.import_container_scans",
291
- "import_was_scans": "regscale.integrations.commercial.qualys.import_was_scans",
292
- "import_policy_scans": "regscale.integrations.commercial.qualys.import_policy_scans",
293
- "save_results": "regscale.integrations.commercial.qualys.save_results",
294
- "sync_qualys": "regscale.integrations.commercial.qualys.sync_qualys",
295
- "get_asset_groups": "regscale.integrations.commercial.qualys.get_asset_groups",
296
- "import_total_cloud": "regscale.integrations.commercial.qualys.import_total_cloud_assets_and_vulnerabilities",
290
+ "import_container_scans": "regscale.integrations.commercial.qualys.__init__.import_container_scans",
291
+ "import_was_scans": "regscale.integrations.commercial.qualys.__init__.import_was_scans",
292
+ "import_policy_scans": "regscale.integrations.commercial.qualys.__init__.import_policy_scans",
293
+ "save_results": "regscale.integrations.commercial.qualys.__init__.save_results",
294
+ "sync_qualys": "regscale.integrations.commercial.qualys.__init__.sync_qualys",
295
+ "get_asset_groups": "regscale.integrations.commercial.qualys.__init__.get_asset_groups",
296
+ "import_total_cloud_xml": "regscale.integrations.commercial.qualys.__init__.import_total_cloud_from_xml",
297
+ "import_total_cloud": "regscale.integrations.commercial.qualys.__init__.import_total_cloud",
297
298
  },
298
299
  name="qualys",
299
300
  )
@@ -426,9 +427,9 @@ def stig():
426
427
  @click.group(
427
428
  cls=LazyGroup,
428
429
  lazy_subcommands={
429
- "io": "regscale.integrations.commercial.tenablev2.click.io",
430
- "sc": "regscale.integrations.commercial.tenablev2.click.sc",
431
- "nessus": "regscale.integrations.commercial.tenablev2.click.nessus",
430
+ "io": "regscale.integrations.commercial.tenablev2.commands.io",
431
+ "sc": "regscale.integrations.commercial.tenablev2.commands.sc",
432
+ "nessus": "regscale.integrations.commercial.tenablev2.commands.nessus",
432
433
  },
433
434
  name="tenable",
434
435
  )