regscale-cli 6.18.0.0__py3-none-any.whl → 6.19.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (47) hide show
  1. regscale/__init__.py +1 -1
  2. regscale/integrations/api_paginator.py +932 -0
  3. regscale/integrations/api_paginator_example.py +348 -0
  4. regscale/integrations/commercial/__init__.py +11 -10
  5. regscale/integrations/commercial/{qualys.py → qualys/__init__.py} +756 -105
  6. regscale/integrations/commercial/qualys/scanner.py +1051 -0
  7. regscale/integrations/commercial/qualys/variables.py +21 -0
  8. regscale/integrations/commercial/sicura/api.py +1 -0
  9. regscale/integrations/commercial/stigv2/click_commands.py +36 -8
  10. regscale/integrations/commercial/stigv2/stig_integration.py +63 -9
  11. regscale/integrations/commercial/tenablev2/__init__.py +9 -0
  12. regscale/integrations/commercial/tenablev2/authenticate.py +23 -2
  13. regscale/integrations/commercial/tenablev2/commands.py +779 -0
  14. regscale/integrations/commercial/tenablev2/jsonl_scanner.py +1999 -0
  15. regscale/integrations/commercial/tenablev2/sc_scanner.py +600 -0
  16. regscale/integrations/commercial/tenablev2/scanner.py +7 -5
  17. regscale/integrations/commercial/tenablev2/utils.py +21 -4
  18. regscale/integrations/commercial/tenablev2/variables.py +4 -0
  19. regscale/integrations/jsonl_scanner_integration.py +523 -142
  20. regscale/integrations/scanner_integration.py +102 -26
  21. regscale/integrations/transformer/__init__.py +17 -0
  22. regscale/integrations/transformer/data_transformer.py +445 -0
  23. regscale/integrations/transformer/mappings/__init__.py +8 -0
  24. regscale/integrations/variables.py +2 -0
  25. regscale/models/__init__.py +5 -2
  26. regscale/models/integration_models/cisa_kev_data.json +6 -6
  27. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  28. regscale/models/regscale_models/asset.py +5 -2
  29. regscale/models/regscale_models/file.py +5 -2
  30. regscale/models/regscale_models/group.py +2 -1
  31. regscale/models/regscale_models/user_group.py +1 -1
  32. regscale/regscale.py +3 -1
  33. {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/METADATA +1 -1
  34. {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/RECORD +46 -30
  35. tests/regscale/core/test_version.py +22 -0
  36. tests/regscale/integrations/__init__.py +0 -0
  37. tests/regscale/integrations/test_api_paginator.py +597 -0
  38. tests/regscale/integrations/test_integration_mapping.py +60 -0
  39. tests/regscale/integrations/test_issue_creation.py +317 -0
  40. tests/regscale/integrations/test_issue_due_date.py +46 -0
  41. tests/regscale/integrations/transformer/__init__.py +0 -0
  42. tests/regscale/integrations/transformer/test_data_transformer.py +850 -0
  43. regscale/integrations/commercial/tenablev2/click.py +0 -1641
  44. {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/LICENSE +0 -0
  45. {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/WHEEL +0 -0
  46. {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/entry_points.txt +0 -0
  47. {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,445 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ Data Transformer Module
5
+
6
+ This module provides a DataTransformer class that can transform data from
7
+ various formats (JSON, XML, dict) into IntegrationAsset and IntegrationFinding objects
8
+ using a mapping file.
9
+ """
10
+
11
+ import json
12
+ import logging
13
+ import os
14
+ import xml.etree.ElementTree as ET
15
+ from dataclasses import dataclass
16
+ from typing import Any, Dict, Iterator, List, Optional, Union
17
+
18
+ from regscale.core.app.utils.app_utils import get_current_datetime
19
+ from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
20
+ from regscale.models import regscale_models
21
+
22
+ logger = logging.getLogger("regscale")
23
+
24
+
25
+ @dataclass
26
+ class DataMapping:
27
+ """
28
+ Data structure to hold mapping configuration for transforming data.
29
+ """
30
+
31
+ asset_mapping: Dict[str, str]
32
+ finding_mapping: Dict[str, str]
33
+ asset_defaults: Dict[str, Any]
34
+ finding_defaults: Dict[str, Any]
35
+ severity_mapping: Dict[str, str]
36
+
37
+
38
+ class DataTransformer:
39
+ """
40
+ Transforms data from various formats (JSON, XML, dict) into IntegrationAsset and
41
+ IntegrationFinding objects using a mapping file.
42
+
43
+ This class provides functionality to:
44
+ 1. Load mapping configurations
45
+ 2. Transform source data into IntegrationAsset objects
46
+ 3. Transform source data into IntegrationFinding objects
47
+ 4. Handle different input formats (JSON, XML, dict)
48
+
49
+ The mapping file should contain mappings for both assets and findings,
50
+ as well as default values and transformations.
51
+ """
52
+
53
+ def __init__(self, mapping_file: Optional[str] = None, mapping_data: Optional[Dict[str, Any]] = None):
54
+ """
55
+ Initialize the DataTransformer with a mapping file or mapping data.
56
+
57
+ Args:
58
+ mapping_file (Optional[str]): Path to the mapping file (JSON format)
59
+ mapping_data (Optional[Dict[str, Any]]): Mapping data as a dictionary
60
+
61
+ Raises:
62
+ ValueError: If neither mapping_file nor mapping_data is provided
63
+ """
64
+ if not mapping_file and not mapping_data:
65
+ raise ValueError("Either mapping_file or mapping_data must be provided")
66
+
67
+ self.mapping = self._load_mapping(mapping_file, mapping_data)
68
+ self.scan_date = get_current_datetime()
69
+
70
+ def _load_mapping(self, mapping_file: Optional[str], mapping_data: Optional[Dict[str, Any]]) -> DataMapping:
71
+ """
72
+ Load mapping configuration from a file or dictionary.
73
+
74
+ Args:
75
+ mapping_file (Optional[str]): Path to the mapping file
76
+ mapping_data (Optional[Dict[str, Any]]): Mapping data as a dictionary
77
+
78
+ Returns:
79
+ DataMapping: The loaded mapping configuration
80
+
81
+ Raises:
82
+ FileNotFoundError: If the mapping file does not exist
83
+ json.JSONDecodeError: If the mapping file is not valid JSON
84
+ """
85
+ if mapping_file and os.path.exists(mapping_file):
86
+ logger.info(f"Loading mapping from file: {mapping_file}")
87
+ try:
88
+ with open(mapping_file, "r") as f:
89
+ data = json.load(f)
90
+ except json.JSONDecodeError as e:
91
+ logger.error(f"Error parsing mapping file {mapping_file}: {str(e)}")
92
+ raise
93
+ elif mapping_data:
94
+ logger.info("Using provided mapping data")
95
+ data = mapping_data
96
+ else:
97
+ raise FileNotFoundError(f"Mapping file {mapping_file} not found")
98
+
99
+ return DataMapping(
100
+ asset_mapping=data.get("asset_mapping", {}),
101
+ finding_mapping=data.get("finding_mapping", {}),
102
+ asset_defaults=data.get("asset_defaults", {}),
103
+ finding_defaults=data.get("finding_defaults", {}),
104
+ severity_mapping=data.get("severity_mapping", {}),
105
+ )
106
+
107
+ def _get_data_value(self, data: Dict[str, Any], field_path: str, default: Any = None) -> Any:
108
+ """
109
+ Extract a value from nested data using a dot-notation path.
110
+
111
+ Args:
112
+ data (Dict[str, Any]): The data to extract from
113
+ field_path (str): The path to the field (e.g., 'asset.info.name')
114
+ default (Any): Default value if the field is not found
115
+
116
+ Returns:
117
+ Any: The extracted value or the default
118
+ """
119
+ if not field_path:
120
+ return default
121
+
122
+ parts = field_path.split(".")
123
+ current = data
124
+
125
+ try:
126
+ for part in parts:
127
+ if isinstance(current, dict) and part in current:
128
+ current = current[part]
129
+ elif isinstance(current, list) and part.isdigit():
130
+ index = int(part)
131
+ if 0 <= index < len(current):
132
+ current = current[index]
133
+ else:
134
+ return default
135
+ else:
136
+ return default
137
+ return current
138
+ except (KeyError, TypeError, IndexError):
139
+ return default
140
+
141
+ def _apply_mapping(
142
+ self, source_data: Dict[str, Any], mapping: Dict[str, str], defaults: Dict[str, Any]
143
+ ) -> Dict[str, Any]:
144
+ """
145
+ Apply mapping configuration to source data.
146
+
147
+ Args:
148
+ source_data (Dict[str, Any]): The source data to transform
149
+ mapping (Dict[str, str]): The field mapping (target_field -> source_field_path)
150
+ defaults (Dict[str, Any]): Default values for fields
151
+
152
+ Returns:
153
+ Dict[str, Any]: The transformed data
154
+ """
155
+ result = {}
156
+
157
+ # Apply defaults first
158
+ for field, value in defaults.items():
159
+ result[field] = value
160
+
161
+ # Apply mappings, overriding defaults if needed
162
+ for target_field, source_path in mapping.items():
163
+ value = self._get_data_value(source_data, source_path)
164
+ if value is not None:
165
+ result[target_field] = value
166
+
167
+ return result
168
+
169
+ def _parse_data_source(self, data_source: Union[str, Dict[str, Any], bytes]) -> Dict[str, Any]:
170
+ """
171
+ Parse the data source into a dictionary.
172
+
173
+ Args:
174
+ data_source: The data source (JSON string, XML string, dictionary, or file path)
175
+
176
+ Returns:
177
+ Dict[str, Any]: The parsed data
178
+
179
+ Raises:
180
+ ValueError: If the data source format is not recognized
181
+ """
182
+ if isinstance(data_source, dict):
183
+ return data_source
184
+
185
+ if isinstance(data_source, str):
186
+ # Check if it's a file path
187
+ if os.path.exists(data_source):
188
+ with open(data_source, "r") as f:
189
+ content = f.read()
190
+ else:
191
+ content = data_source
192
+
193
+ # Try to parse as JSON
194
+ try:
195
+ return json.loads(content)
196
+ except json.JSONDecodeError:
197
+ pass
198
+
199
+ # Try to parse as XML
200
+ try:
201
+ root = ET.fromstring(content)
202
+ return self._xml_to_dict(root)
203
+ except ET.ParseError:
204
+ pass
205
+
206
+ raise ValueError(f"Could not parse data source as JSON or XML: {data_source[:50]}...")
207
+
208
+ if isinstance(data_source, bytes):
209
+ # Try to parse as JSON
210
+ try:
211
+ return json.loads(data_source)
212
+ except json.JSONDecodeError:
213
+ pass
214
+
215
+ # Try to parse as XML
216
+ try:
217
+ root = ET.fromstring(data_source.decode("utf-8"))
218
+ return self._xml_to_dict(root)
219
+ except ET.ParseError:
220
+ pass
221
+
222
+ raise ValueError("Could not parse data source as JSON or XML")
223
+
224
+ raise ValueError(f"Unsupported data source type: {type(data_source)}")
225
+
226
+ def _xml_to_dict(self, element: ET.Element) -> Dict[str, Any]:
227
+ """
228
+ Convert an XML element to a dictionary.
229
+
230
+ Args:
231
+ element (ET.Element): The XML element to convert
232
+
233
+ Returns:
234
+ Dict[str, Any]: The converted dictionary
235
+ """
236
+ result = {}
237
+
238
+ # Add attributes
239
+ for key, value in element.attrib.items():
240
+ result[f"@{key}"] = value
241
+
242
+ # Add children
243
+ for child in element:
244
+ child_data = self._xml_to_dict(child)
245
+
246
+ if child.tag in result:
247
+ if isinstance(result[child.tag], list):
248
+ result[child.tag].append(child_data)
249
+ else:
250
+ result[child.tag] = [result[child.tag], child_data]
251
+ else:
252
+ result[child.tag] = child_data
253
+
254
+ # Add text content
255
+ if element.text and element.text.strip():
256
+ if result:
257
+ result["#text"] = element.text.strip()
258
+ else:
259
+ return element.text.strip()
260
+
261
+ return result
262
+
263
+ def transform_to_asset(
264
+ self, data_source: Union[str, Dict[str, Any], bytes], plan_id: Optional[int] = None
265
+ ) -> IntegrationAsset:
266
+ """
267
+ Transform data source into an IntegrationAsset object.
268
+
269
+ Args:
270
+ data_source: The data source to transform
271
+ plan_id (Optional[int]): The ID of the security plan
272
+
273
+ Returns:
274
+ IntegrationAsset: The transformed asset
275
+ """
276
+ data = self._parse_data_source(data_source)
277
+
278
+ # Apply mapping
279
+ mapped_data = self._apply_mapping(data, self.mapping.asset_mapping, self.mapping.asset_defaults)
280
+
281
+ # Ensure required fields have values
282
+ if "name" not in mapped_data:
283
+ mapped_data["name"] = "Unknown Asset"
284
+
285
+ if "identifier" not in mapped_data:
286
+ if "ip_address" in mapped_data:
287
+ mapped_data["identifier"] = mapped_data["ip_address"]
288
+ else:
289
+ mapped_data["identifier"] = mapped_data["name"]
290
+
291
+ # Add plan ID if provided
292
+ if plan_id:
293
+ mapped_data["parent_id"] = plan_id
294
+ mapped_data["parent_module"] = regscale_models.SecurityPlan.get_module_slug()
295
+
296
+ # Create IntegrationAsset
297
+ return IntegrationAsset(**mapped_data)
298
+
299
+ def transform_to_finding(
300
+ self, data_source: Union[str, Dict[str, Any], bytes], asset_identifier: Optional[str] = None
301
+ ) -> IntegrationFinding:
302
+ """
303
+ Transform data source into an IntegrationFinding object.
304
+
305
+ Args:
306
+ data_source: The data source to transform
307
+ asset_identifier (Optional[str]): The identifier of the associated asset
308
+
309
+ Returns:
310
+ IntegrationFinding: The transformed finding
311
+ """
312
+ data = self._parse_data_source(data_source)
313
+
314
+ # Apply mapping
315
+ mapped_data = self._apply_mapping(data, self.mapping.finding_mapping, self.mapping.finding_defaults)
316
+
317
+ # Map severity if needed
318
+ if "severity" in mapped_data:
319
+ raw_severity = str(mapped_data["severity"])
320
+ if raw_severity in self.mapping.severity_mapping:
321
+ mapped_severity = self.mapping.severity_mapping[raw_severity]
322
+ try:
323
+ mapped_data["severity"] = getattr(regscale_models.IssueSeverity, mapped_severity)
324
+ except AttributeError:
325
+ logger.warning(f"Invalid severity mapping: {mapped_severity}")
326
+
327
+ # Ensure required fields have values
328
+ if "title" not in mapped_data:
329
+ mapped_data["title"] = "Unknown Finding"
330
+
331
+ if "description" not in mapped_data:
332
+ mapped_data["description"] = "No description available"
333
+
334
+ if "category" not in mapped_data:
335
+ mapped_data["category"] = "Vulnerability"
336
+
337
+ if "control_labels" not in mapped_data:
338
+ mapped_data["control_labels"] = []
339
+
340
+ if "plugin_name" not in mapped_data:
341
+ mapped_data["plugin_name"] = mapped_data["title"]
342
+
343
+ if "status" not in mapped_data:
344
+ mapped_data["status"] = regscale_models.IssueStatus.Open
345
+
346
+ # Set the asset identifier
347
+ if asset_identifier:
348
+ mapped_data["asset_identifier"] = asset_identifier
349
+ elif "asset_identifier" not in mapped_data:
350
+ mapped_data["asset_identifier"] = ""
351
+
352
+ # Set scan date
353
+ if "scan_date" not in mapped_data:
354
+ mapped_data["scan_date"] = self.scan_date
355
+
356
+ # Create IntegrationFinding
357
+ return IntegrationFinding(**mapped_data)
358
+
359
+ def batch_transform_to_assets(
360
+ self, data_sources: List[Union[str, Dict[str, Any], bytes]], plan_id: Optional[int] = None
361
+ ) -> Iterator[IntegrationAsset]:
362
+ """
363
+ Transform multiple data sources into IntegrationAsset objects.
364
+
365
+ Args:
366
+ data_sources: List of data sources to transform
367
+ plan_id (Optional[int]): The ID of the security plan
368
+
369
+ Yields:
370
+ IntegrationAsset: The transformed assets
371
+ """
372
+ for data_source in data_sources:
373
+ try:
374
+ yield self.transform_to_asset(data_source, plan_id)
375
+ except Exception as e:
376
+ logger.error(f"Error transforming data source to asset: {str(e)}")
377
+
378
+ def batch_transform_to_findings(
379
+ self, data_sources: List[Union[str, Dict[str, Any], bytes]], asset_identifier: Optional[str] = None
380
+ ) -> Iterator[IntegrationFinding]:
381
+ """
382
+ Transform multiple data sources into IntegrationFinding objects.
383
+
384
+ Args:
385
+ data_sources: List of data sources to transform
386
+ asset_identifier (Optional[str]): The identifier of the associated asset
387
+
388
+ Yields:
389
+ IntegrationFinding: The transformed findings
390
+ """
391
+ for data_source in data_sources:
392
+ try:
393
+ yield self.transform_to_finding(data_source, asset_identifier)
394
+ except Exception as e:
395
+ logger.error(f"Error transforming data source to finding: {str(e)}")
396
+
397
+
398
+ # Example Tenable SC mapping file structure for reference
399
+ TENABLE_SC_MAPPING = {
400
+ "asset_mapping": {
401
+ "name": "dnsName",
402
+ "identifier": "ip",
403
+ "ip_address": "ip",
404
+ "mac_address": "macAddress",
405
+ "asset_type": "family.type",
406
+ "asset_category": "Hardware",
407
+ "fqdn": "dnsName",
408
+ "status": "", # Will be handled by transformation logic
409
+ },
410
+ "finding_mapping": {
411
+ "title": "pluginName",
412
+ "description": "description",
413
+ "plugin_name": "pluginName",
414
+ "plugin_id": "pluginID",
415
+ "severity": "severity.name",
416
+ "category": "family.name",
417
+ "cve": "cve",
418
+ "cvss_v3_score": "cvssV3BaseScore",
419
+ "cvss_v2_score": "cvssV2BaseScore",
420
+ "cvss_v3_vector": "cvssV3Vector",
421
+ "cvss_v2_vector": "cvssV2Vector",
422
+ "recommendation_for_mitigation": "solution",
423
+ "identified_risk": "risk_factor",
424
+ "evidence": "output",
425
+ },
426
+ "asset_defaults": {
427
+ "asset_owner_id": "", # Will be set by ScannerIntegration
428
+ "status": "Active (On Network)",
429
+ "asset_type": "Other",
430
+ "asset_category": "Hardware",
431
+ },
432
+ "finding_defaults": {"priority": "Medium", "status": "Open", "issue_type": "Risk"},
433
+ "severity_mapping": {
434
+ "Critical": "Critical",
435
+ "High": "High",
436
+ "Medium": "Moderate",
437
+ "Low": "Low",
438
+ "Info": "NotAssigned",
439
+ "4": "Critical",
440
+ "3": "High",
441
+ "2": "Moderate",
442
+ "1": "Low",
443
+ "0": "NotAssigned",
444
+ },
445
+ }
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ Mapping configurations for DataTransformer.
5
+
6
+ This package contains mapping files used by DataTransformer to convert
7
+ external data formats into IntegrationAsset and IntegrationFinding objects.
8
+ """
@@ -23,3 +23,5 @@ class ScannerVariables(metaclass=RsVariablesMeta):
23
23
  incrementPoamIdentifier: RsVariableType(bool, "true|false", default=False, required=False) # type: ignore # noqa: F722,F821
24
24
  sslVerify: RsVariableType(bool, "true|false", default=True, required=False) # type: ignore # noqa: F722,F821
25
25
  issueDueDates: RsVariableType(dict, "dueDates", default="{'high': 60, 'moderate': 120, 'low': 364}", required=False) # type: ignore # noqa: F722,F821
26
+ maxRetries: RsVariableType(int, "3", default=3, required=False) # type: ignore
27
+ timeout: RsVariableType(int, "60", default=60, required=False) # type: ignore
@@ -2,6 +2,9 @@
2
2
  # -*- coding: utf-8 -*-
3
3
  """standard python imports"""
4
4
  from regscale.models.app_models.click import regscale_id, regscale_module, regscale_ssp_id
5
- from .regscale_models import *
6
- from .integration_models import *
7
5
  from .app_models import *
6
+ from .integration_models import *
7
+ from .regscale_models import *
8
+
9
+ # Import v2 models
10
+ # from .v2 import *
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "title": "CISA Catalog of Known Exploited Vulnerabilities",
3
- "catalogVersion": "2025.04.17",
4
- "dateReleased": "2025-04-17T17:01:44.4538Z",
3
+ "catalogVersion": "2025.04.25",
4
+ "dateReleased": "2025-04-25T18:02:32.6749Z",
5
5
  "count": 1323,
6
6
  "vulnerabilities": [
7
7
  {
@@ -109,13 +109,13 @@
109
109
  "cveID": "CVE-2025-30406",
110
110
  "vendorProject": "Gladinet",
111
111
  "product": "CentreStack",
112
- "vulnerabilityName": "Gladinet CentreStack Use of Hard-coded Cryptographic Key Vulnerability",
112
+ "vulnerabilityName": "Gladinet CentreStack and Triofox Use of Hard-coded Cryptographic Key Vulnerability",
113
113
  "dateAdded": "2025-04-08",
114
- "shortDescription": "Gladinet CentreStack contains a use of hard-coded cryptographic key vulnerability in the way that the application manages keys used for ViewState integrity verification. Successful exploitation allows an attacker to forge ViewState payloads for server-side deserialization, allowing for remote code execution.",
114
+ "shortDescription": "Gladinet CentreStack and Triofox contains a use of hard-coded cryptographic key vulnerability in the way that the application manages keys used for ViewState integrity verification. Successful exploitation allows an attacker to forge ViewState payloads for server-side deserialization, allowing for remote code execution.",
115
115
  "requiredAction": "Apply mitigations per vendor instructions, follow applicable BOD 22-01 guidance for cloud services, or discontinue use of the product if mitigations are unavailable.",
116
116
  "dueDate": "2025-04-29",
117
117
  "knownRansomwareCampaignUse": "Unknown",
118
- "notes": "https:\/\/gladinetsupport.s3.us-east-1.amazonaws.com\/gladinet\/securityadvisory-cve-2005.pdf ; https:\/\/www.centrestack.com\/p\/gce_latest_release.html ; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2025-30406",
118
+ "notes": "https:\/\/gladinetsupport.s3.us-east-1.amazonaws.com\/gladinet\/securityadvisory-cve-2005.pdf ; https:\/\/gladinetsupport.s3.us-east-1.amazonaws.com\/gladinet\/securityadvisory-cve-2025-triofox.pdf ; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2025-30406",
119
119
  "cwes": [
120
120
  "CWE-321"
121
121
  ]
@@ -6664,7 +6664,7 @@
6664
6664
  "shortDescription": "Intel ethernet diagnostics driver for Windows IQVW32.sys and IQVW64.sys contain an unspecified vulnerability that allows for a denial-of-service (DoS).",
6665
6665
  "requiredAction": "Apply updates per vendor instructions.",
6666
6666
  "dueDate": "2023-03-03",
6667
- "knownRansomwareCampaignUse": "Unknown",
6667
+ "knownRansomwareCampaignUse": "Known",
6668
6668
  "notes": "https:\/\/www.intel.com\/content\/www\/us\/en\/security-center\/advisory\/intel-sa-00051.html; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2015-2291",
6669
6669
  "cwes": [
6670
6670
  "CWE-20"