regscale-cli 6.20.4.1__py3-none-any.whl → 6.20.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/_version.py +39 -0
- regscale/core/app/internal/__init__.py +13 -0
- regscale/core/app/internal/model_editor.py +3 -3
- regscale/core/app/internal/set_permissions.py +173 -0
- regscale/core/app/utils/file_utils.py +11 -1
- regscale/core/app/utils/regscale_utils.py +34 -129
- regscale/core/utils/date.py +86 -30
- regscale/integrations/commercial/defender.py +3 -0
- regscale/integrations/commercial/qualys/__init__.py +40 -14
- regscale/integrations/commercial/qualys/containers.py +324 -0
- regscale/integrations/commercial/qualys/scanner.py +203 -8
- regscale/integrations/commercial/synqly/edr.py +10 -0
- regscale/integrations/commercial/wizv2/click.py +11 -7
- regscale/integrations/commercial/wizv2/constants.py +28 -0
- regscale/integrations/commercial/wizv2/issue.py +3 -2
- regscale/integrations/commercial/wizv2/parsers.py +23 -0
- regscale/integrations/commercial/wizv2/scanner.py +89 -30
- regscale/integrations/commercial/wizv2/utils.py +208 -75
- regscale/integrations/commercial/wizv2/variables.py +2 -1
- regscale/integrations/commercial/wizv2/wiz_auth.py +3 -3
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +98 -20
- regscale/integrations/public/fedramp/fedramp_docx.py +2 -3
- regscale/integrations/scanner_integration.py +7 -2
- regscale/models/integration_models/cisa_kev_data.json +187 -5
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/regscale_models/__init__.py +2 -0
- regscale/models/regscale_models/asset.py +1 -1
- regscale/models/regscale_models/catalog.py +16 -0
- regscale/models/regscale_models/file.py +2 -1
- regscale/models/regscale_models/form_field_value.py +59 -1
- regscale/models/regscale_models/issue.py +47 -0
- regscale/models/regscale_models/modules.py +88 -1
- regscale/models/regscale_models/organization.py +30 -0
- regscale/models/regscale_models/regscale_model.py +20 -6
- regscale/models/regscale_models/security_control.py +47 -0
- regscale/models/regscale_models/security_plan.py +32 -0
- regscale/models/regscale_models/vulnerability.py +3 -3
- regscale/models/regscale_models/vulnerability_mapping.py +2 -2
- regscale/regscale.py +2 -0
- {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.6.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.6.0.dist-info}/RECORD +49 -44
- tests/fixtures/test_fixture.py +33 -4
- tests/regscale/core/test_app.py +53 -32
- tests/regscale/test_init.py +94 -0
- {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.6.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.6.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.6.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.6.0.dist-info}/top_level.txt +0 -0
|
@@ -7,41 +7,43 @@ import os
|
|
|
7
7
|
import pprint
|
|
8
8
|
import traceback
|
|
9
9
|
from asyncio import sleep
|
|
10
|
-
from datetime import datetime,
|
|
10
|
+
from datetime import datetime, timedelta, timezone
|
|
11
11
|
from json import JSONDecodeError
|
|
12
|
-
from
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Any, Optional, Tuple, Union
|
|
13
14
|
from urllib.parse import urljoin
|
|
14
15
|
|
|
15
16
|
import click
|
|
16
17
|
import requests
|
|
17
18
|
import xmltodict
|
|
18
|
-
from pathlib import Path
|
|
19
19
|
from requests import Session
|
|
20
|
-
from rich.progress import
|
|
20
|
+
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskID, TextColumn, TimeElapsedColumn
|
|
21
21
|
|
|
22
22
|
from regscale.core.app.utils.app_utils import (
|
|
23
23
|
check_file_path,
|
|
24
24
|
check_license,
|
|
25
|
+
create_progress_object,
|
|
26
|
+
error_and_exit,
|
|
25
27
|
get_current_datetime,
|
|
26
28
|
save_data_to,
|
|
27
|
-
error_and_exit,
|
|
28
|
-
create_progress_object,
|
|
29
29
|
)
|
|
30
30
|
from regscale.core.app.utils.file_utils import download_from_s3
|
|
31
|
+
from regscale.integrations.commercial.qualys.containers import fetch_all_vulnerabilities
|
|
31
32
|
from regscale.integrations.commercial.qualys.qualys_error_handler import QualysErrorHandler
|
|
32
33
|
from regscale.integrations.commercial.qualys.scanner import QualysTotalCloudJSONLIntegration
|
|
33
34
|
from regscale.integrations.commercial.qualys.variables import QualysVariables
|
|
34
35
|
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
|
|
35
36
|
from regscale.integrations.variables import ScannerVariables
|
|
36
|
-
from regscale.models import Asset, Search, regscale_models
|
|
37
|
-
from regscale.models.app_models.click import regscale_ssp_id, save_output_to
|
|
37
|
+
from regscale.models import Asset, Issue, Search, regscale_models
|
|
38
|
+
from regscale.models.app_models.click import NotRequiredIf, regscale_ssp_id, save_output_to
|
|
38
39
|
from regscale.models.integration_models.flat_file_importer import FlatFileImporter
|
|
39
40
|
from regscale.models.integration_models.qualys import (
|
|
41
|
+
Qualys,
|
|
40
42
|
QualysContainerScansImporter,
|
|
41
|
-
QualysWasScansImporter,
|
|
42
43
|
QualysPolicyScansImporter,
|
|
43
|
-
|
|
44
|
+
QualysWasScansImporter,
|
|
44
45
|
)
|
|
46
|
+
from regscale.validation.record import validate_regscale_object
|
|
45
47
|
|
|
46
48
|
# Create logger for this module
|
|
47
49
|
logger = logging.getLogger("regscale")
|
|
@@ -332,14 +334,29 @@ class FindingProgressTracker:
|
|
|
332
334
|
required=False,
|
|
333
335
|
help="Enable/disable SSL certificate verification for API calls.",
|
|
334
336
|
)
|
|
337
|
+
@click.option(
|
|
338
|
+
"--containers",
|
|
339
|
+
type=click.BOOL,
|
|
340
|
+
help="To disable fetching containers, use False. Defaults to True.",
|
|
341
|
+
default=True,
|
|
342
|
+
)
|
|
335
343
|
def import_total_cloud(
|
|
336
|
-
regscale_ssp_id: int,
|
|
344
|
+
regscale_ssp_id: int,
|
|
345
|
+
include_tags: str,
|
|
346
|
+
exclude_tags: str,
|
|
347
|
+
vulnerability_creation: str,
|
|
348
|
+
ssl_verify: bool,
|
|
349
|
+
containers: bool,
|
|
337
350
|
):
|
|
338
351
|
"""
|
|
339
352
|
Import Qualys Total Cloud Assets and Vulnerabilities using JSONL scanner implementation.
|
|
340
353
|
|
|
341
354
|
This command uses the JSONLScannerIntegration class for improved efficiency and memory management.
|
|
342
355
|
"""
|
|
356
|
+
if not validate_regscale_object(regscale_ssp_id, "securityplans"):
|
|
357
|
+
logger.warning("SSP #%i is not a valid RegScale Security Plan.", regscale_ssp_id)
|
|
358
|
+
return
|
|
359
|
+
containers_lst = []
|
|
343
360
|
try:
|
|
344
361
|
# Configure scanner variables and fetch data
|
|
345
362
|
_configure_scanner_variables(vulnerability_creation, ssl_verify)
|
|
@@ -347,8 +364,14 @@ def import_total_cloud(
|
|
|
347
364
|
if not response_data:
|
|
348
365
|
return
|
|
349
366
|
|
|
367
|
+
if containers:
|
|
368
|
+
# Fetch containers and container findings
|
|
369
|
+
containers_lst = fetch_all_vulnerabilities()
|
|
370
|
+
|
|
350
371
|
# Initialize and run integration
|
|
351
|
-
integration = _initialize_integration(
|
|
372
|
+
integration = _initialize_integration(
|
|
373
|
+
regscale_ssp_id, response_data, vulnerability_creation, ssl_verify, containers_lst
|
|
374
|
+
)
|
|
352
375
|
_run_integration_import(integration)
|
|
353
376
|
|
|
354
377
|
logger.info("Qualys Total Cloud data imported successfully with JSONL scanner.")
|
|
@@ -460,13 +483,14 @@ def _fetch_qualys_api_data(include_tags, exclude_tags):
|
|
|
460
483
|
return None
|
|
461
484
|
|
|
462
485
|
|
|
463
|
-
def _initialize_integration(regscale_ssp_id, response_data, vulnerability_creation, ssl_verify):
|
|
486
|
+
def _initialize_integration(regscale_ssp_id, response_data, vulnerability_creation, ssl_verify, containers):
|
|
464
487
|
"""Initialize the scanner integration with appropriate settings.
|
|
465
488
|
|
|
466
489
|
:param int regscale_ssp_id: RegScale SSP ID
|
|
467
490
|
:param dict response_data: Parsed XML data from API
|
|
468
491
|
:param str vulnerability_creation: Vulnerability creation mode
|
|
469
492
|
:param bool ssl_verify: SSL verification setting
|
|
493
|
+
:param list containers: List of containers
|
|
470
494
|
:return: Initialized integration object
|
|
471
495
|
"""
|
|
472
496
|
# Build integration kwargs
|
|
@@ -475,6 +499,7 @@ def _initialize_integration(regscale_ssp_id, response_data, vulnerability_creati
|
|
|
475
499
|
"xml_data": response_data,
|
|
476
500
|
"vulnerability_creation": vulnerability_creation or ScannerVariables.vulnerabilityCreation,
|
|
477
501
|
"ssl_verify": ssl_verify if ssl_verify is not None else ScannerVariables.sslVerify,
|
|
502
|
+
"containers": containers,
|
|
478
503
|
}
|
|
479
504
|
|
|
480
505
|
# Add thread workers if available
|
|
@@ -634,7 +659,7 @@ def _count_jsonl_items(integration):
|
|
|
634
659
|
|
|
635
660
|
def _create_progress_bar():
|
|
636
661
|
"""Create a progress bar that doesn't reset at 100%."""
|
|
637
|
-
from rich.progress import Progress, SpinnerColumn, TextColumn,
|
|
662
|
+
from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeElapsedColumn
|
|
638
663
|
|
|
639
664
|
class NonResettingProgress(Progress):
|
|
640
665
|
def update(self, task_id, **fields):
|
|
@@ -1120,6 +1145,7 @@ def process_files_with_importer(
|
|
|
1120
1145
|
:param Optional[bool] upload_file: Whether to upload the file to RegScale after processing, defaults to True.
|
|
1121
1146
|
"""
|
|
1122
1147
|
import csv
|
|
1148
|
+
|
|
1123
1149
|
from openpyxl import Workbook
|
|
1124
1150
|
|
|
1125
1151
|
if s3_bucket:
|
|
@@ -0,0 +1,324 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Container operations module for Qualys CS API integration.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
import traceback
|
|
7
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
8
|
+
from functools import lru_cache
|
|
9
|
+
from json import JSONDecodeError
|
|
10
|
+
from typing import Dict, List, Optional
|
|
11
|
+
from urllib.parse import urljoin
|
|
12
|
+
|
|
13
|
+
from requests import RequestException
|
|
14
|
+
from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeElapsedColumn
|
|
15
|
+
|
|
16
|
+
# Create logger for this module
|
|
17
|
+
logger = logging.getLogger("regscale")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@lru_cache(maxsize=1)
|
|
21
|
+
def auth_cs_api() -> tuple[str, dict]:
|
|
22
|
+
"""
|
|
23
|
+
Authenticate the Qualys CS API using form-based authentication
|
|
24
|
+
|
|
25
|
+
:return: A tuple of the base URL and a dictionary of headers
|
|
26
|
+
:rtype: tuple[str, dict]
|
|
27
|
+
"""
|
|
28
|
+
from . import QUALYS_API, _get_config # noqa: C0415
|
|
29
|
+
|
|
30
|
+
config = _get_config()
|
|
31
|
+
qualys_url = config.get("qualysUrl")
|
|
32
|
+
user = config.get("qualysUserName")
|
|
33
|
+
password = config.get("qualysPassword")
|
|
34
|
+
|
|
35
|
+
# Update headers to match the curl command
|
|
36
|
+
auth_headers = {"X-Requested-With": "RegScale CLI"}
|
|
37
|
+
|
|
38
|
+
# Prepare form data for authentication
|
|
39
|
+
auth_data = {"username": user, "password": password, "permissions": "true", "token": "true"}
|
|
40
|
+
|
|
41
|
+
try:
|
|
42
|
+
# Make authentication request
|
|
43
|
+
# https://gateway.qg3.apps.qualys.com/auth
|
|
44
|
+
|
|
45
|
+
if qualys_url:
|
|
46
|
+
base_url = qualys_url.replace("qualysguard", "gateway")
|
|
47
|
+
else:
|
|
48
|
+
base_url = qualys_url
|
|
49
|
+
|
|
50
|
+
auth_url = urljoin(base_url, "/auth")
|
|
51
|
+
response = QUALYS_API.post(url=auth_url, headers=auth_headers, data=auth_data)
|
|
52
|
+
|
|
53
|
+
if response.ok:
|
|
54
|
+
logger.info("Successfully authenticated with Qualys CS API")
|
|
55
|
+
|
|
56
|
+
# Parse the response to extract the JWT token
|
|
57
|
+
try:
|
|
58
|
+
response_text = response.content.decode("utf-8")
|
|
59
|
+
# The response should contain the JWT token
|
|
60
|
+
# You might need to parse JSON or extract the token from the response
|
|
61
|
+
# For now, let's assume the token is in the response text
|
|
62
|
+
|
|
63
|
+
# Add Authorization Bearer header
|
|
64
|
+
auth_headers["Authorization"] = f"Bearer {response_text}"
|
|
65
|
+
|
|
66
|
+
logger.debug("Added Authorization Bearer header to auth_headers")
|
|
67
|
+
except (UnicodeDecodeError, AttributeError) as e:
|
|
68
|
+
logger.warning("Could not decode response content for Authorization header: %s", e)
|
|
69
|
+
logger.debug(
|
|
70
|
+
"Response content type: %s, length: %s",
|
|
71
|
+
type(response.content),
|
|
72
|
+
len(response.content) if hasattr(response.content, "__len__") else "unknown",
|
|
73
|
+
)
|
|
74
|
+
# Continue without Authorization header if parsing fails
|
|
75
|
+
else:
|
|
76
|
+
raise RequestException(f"Authentication failed with status code: {response.status_code}")
|
|
77
|
+
|
|
78
|
+
except Exception as e:
|
|
79
|
+
logger.error("Error during authentication: %s", e)
|
|
80
|
+
raise
|
|
81
|
+
|
|
82
|
+
return base_url, auth_headers
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def _make_api_request(current_url: str, headers: dict, params: Optional[Dict] = None) -> dict:
|
|
86
|
+
"""
|
|
87
|
+
Make API request to fetch containers from Qualys CS API
|
|
88
|
+
|
|
89
|
+
:param str current_url: The URL for the API request
|
|
90
|
+
:param dict headers: Headers to include in the request
|
|
91
|
+
:param Dict params: Optional query parameters for pagination
|
|
92
|
+
:return: Response data containing containers and response object
|
|
93
|
+
:rtype: dict
|
|
94
|
+
"""
|
|
95
|
+
from . import QUALYS_API # noqa: C0415
|
|
96
|
+
|
|
97
|
+
# Make API request
|
|
98
|
+
response = QUALYS_API.get(url=current_url, headers=headers, params=params)
|
|
99
|
+
|
|
100
|
+
# Validate response
|
|
101
|
+
if not response.ok:
|
|
102
|
+
logger.error("API request failed: %s - %s", response.status_code, response.text)
|
|
103
|
+
return {"data": [], "_response": response}
|
|
104
|
+
|
|
105
|
+
try:
|
|
106
|
+
response_data = response.json()
|
|
107
|
+
response_data["_response"] = response # Include response object for headers
|
|
108
|
+
return response_data
|
|
109
|
+
except JSONDecodeError as e:
|
|
110
|
+
logger.error("Failed to parse JSON response: %s", e)
|
|
111
|
+
return {"data": [], "_response": response}
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def _parse_link_header(link_header: str) -> Optional[str]:
|
|
115
|
+
"""
|
|
116
|
+
Parse the Link header to find the next page URL.
|
|
117
|
+
|
|
118
|
+
:param str link_header: The Link header value
|
|
119
|
+
:return: The next page URL or None if not found
|
|
120
|
+
:rtype: Optional[str]
|
|
121
|
+
"""
|
|
122
|
+
if not link_header:
|
|
123
|
+
logger.debug("No Link header found, assuming no more pages")
|
|
124
|
+
return None
|
|
125
|
+
|
|
126
|
+
# Parse the Link header to find the next page URL
|
|
127
|
+
# Format: <url>;rel=next
|
|
128
|
+
for link in link_header.split(","):
|
|
129
|
+
link = link.strip()
|
|
130
|
+
if "rel=next" in link:
|
|
131
|
+
# Extract URL from <url>;rel=next format
|
|
132
|
+
url_start = link.find("<") + 1
|
|
133
|
+
url_end = link.find(">")
|
|
134
|
+
if 0 < url_start < url_end:
|
|
135
|
+
return link[url_start:url_end]
|
|
136
|
+
|
|
137
|
+
logger.debug("No next page URL found in Link header")
|
|
138
|
+
return None
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def _fetch_paginated_data(endpoint: str, filters: Optional[Dict] = None, limit: int = 100) -> List[Dict]:
|
|
142
|
+
"""
|
|
143
|
+
Generic function to fetch paginated data from Qualys CS API
|
|
144
|
+
|
|
145
|
+
:param str endpoint: The API endpoint (e.g., 'containers/list', 'images/list')
|
|
146
|
+
:param Optional[Dict] filters: Filters to apply to the request
|
|
147
|
+
:param int limit: Number of items to fetch per page
|
|
148
|
+
:return: A list of items from all pages
|
|
149
|
+
:rtype: List[Dict]
|
|
150
|
+
"""
|
|
151
|
+
all_items = []
|
|
152
|
+
page: int = 1
|
|
153
|
+
|
|
154
|
+
try:
|
|
155
|
+
# Get authentication
|
|
156
|
+
base_url, headers = auth_cs_api()
|
|
157
|
+
|
|
158
|
+
# Prepare base parameters
|
|
159
|
+
params = {"limit": limit}
|
|
160
|
+
|
|
161
|
+
# Add filters if provided
|
|
162
|
+
if filters:
|
|
163
|
+
params.update(filters)
|
|
164
|
+
|
|
165
|
+
# Track the current URL for pagination
|
|
166
|
+
current_url = urljoin(base_url, f"/csapi/v1.3/{endpoint}")
|
|
167
|
+
|
|
168
|
+
# Create progress bar for pagination
|
|
169
|
+
progress = Progress(
|
|
170
|
+
SpinnerColumn(),
|
|
171
|
+
TextColumn("[bold blue]{task.description}"),
|
|
172
|
+
BarColumn(),
|
|
173
|
+
TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
|
|
174
|
+
TextColumn("•"),
|
|
175
|
+
TimeElapsedColumn(),
|
|
176
|
+
console=None,
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
with progress:
|
|
180
|
+
task = progress.add_task(f"[green]Fetching {endpoint} data...", total=None) # Unknown total for pagination
|
|
181
|
+
|
|
182
|
+
while current_url:
|
|
183
|
+
# Make API request
|
|
184
|
+
response_data = _make_api_request(current_url, headers, params)
|
|
185
|
+
|
|
186
|
+
# Extract items from current page
|
|
187
|
+
current_items = response_data.get("data", [])
|
|
188
|
+
all_items.extend(current_items)
|
|
189
|
+
|
|
190
|
+
# Update progress description with current status
|
|
191
|
+
progress.update(
|
|
192
|
+
task, description=f"[green]Fetching {endpoint} data... (Page {page}, Total: {len(all_items)})"
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
logger.debug("Fetched page: %s items (Total so far: %s)", page, len(all_items))
|
|
196
|
+
|
|
197
|
+
# Check for next page using the Link header
|
|
198
|
+
response = response_data.get("_response")
|
|
199
|
+
if not response or not hasattr(response, "headers"):
|
|
200
|
+
# If no response object available, assume single page
|
|
201
|
+
break
|
|
202
|
+
|
|
203
|
+
link_header = response.headers.get("link", "")
|
|
204
|
+
next_url = _parse_link_header(link_header)
|
|
205
|
+
|
|
206
|
+
if not next_url:
|
|
207
|
+
break
|
|
208
|
+
|
|
209
|
+
# Update current URL for next iteration
|
|
210
|
+
current_url = next_url
|
|
211
|
+
page += 1
|
|
212
|
+
|
|
213
|
+
# Clear params for subsequent requests since they're in the URL
|
|
214
|
+
params = {}
|
|
215
|
+
|
|
216
|
+
except Exception as e:
|
|
217
|
+
logger.error("Error fetching data from %s: %s", current_url, e)
|
|
218
|
+
logger.debug(traceback.format_exc())
|
|
219
|
+
|
|
220
|
+
logger.info("Completed: Fetched %s total items from %s", len(all_items), endpoint)
|
|
221
|
+
return all_items
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def fetch_all_containers(filters: Optional[Dict] = None, limit: int = 100) -> List[Dict]:
|
|
225
|
+
"""
|
|
226
|
+
Fetch all containers from Qualys CS API with pagination
|
|
227
|
+
|
|
228
|
+
:param Optional[Dict] filters: Filters to apply to the containers
|
|
229
|
+
:param int limit: Number of containers to fetch per page
|
|
230
|
+
:return: A list of containers
|
|
231
|
+
:rtype: List[Dict]
|
|
232
|
+
"""
|
|
233
|
+
return _fetch_paginated_data("containers/list", filters, limit)
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def fetch_all_images(filters: Optional[Dict] = None, limit: int = 100) -> List[Dict]:
|
|
237
|
+
"""
|
|
238
|
+
Fetch all images from Qualys CS API with pagination
|
|
239
|
+
|
|
240
|
+
:param Optional[Dict] filters: Filters to apply to the images
|
|
241
|
+
:param int limit: Number of images to fetch per page
|
|
242
|
+
:return: A list of images
|
|
243
|
+
:rtype: List[Dict]
|
|
244
|
+
"""
|
|
245
|
+
return _fetch_paginated_data("images/list", filters, limit)
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
def fetch_container_vulns(container_sha: str) -> List[Dict]:
|
|
249
|
+
"""
|
|
250
|
+
Fetch vulnerabilities for a specific container from Qualys CS API
|
|
251
|
+
|
|
252
|
+
:param str container_sha: The SHA of the container
|
|
253
|
+
:return: A list of vulnerabilities
|
|
254
|
+
:rtype: List[Dict]
|
|
255
|
+
"""
|
|
256
|
+
base_url, headers = auth_cs_api()
|
|
257
|
+
current_url = urljoin(base_url, f"/csapi/v1.3/containers/{container_sha}/vuln")
|
|
258
|
+
response_data = _make_api_request(current_url, headers)
|
|
259
|
+
return response_data.get("details", {}).get("vulns", [])
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def fetch_all_vulnerabilities(filters: Optional[Dict] = None, limit: int = 100, max_workers: int = 10) -> List[Dict]:
|
|
263
|
+
"""
|
|
264
|
+
Fetch all containers and a list of vulnerabilities for each container from Qualys CS API with pagination
|
|
265
|
+
|
|
266
|
+
:param Optional[Dict] filters: Filters to apply to the containers
|
|
267
|
+
:param int limit: Number of containers to fetch per page
|
|
268
|
+
:param int max_workers: Maximum number of worker threads for concurrent vulnerability fetching
|
|
269
|
+
:return: A list of containers with vulnerabilities
|
|
270
|
+
:rtype: List[Dict]
|
|
271
|
+
"""
|
|
272
|
+
containers = fetch_all_containers(filters, limit)
|
|
273
|
+
|
|
274
|
+
if not containers:
|
|
275
|
+
logger.info("No containers found to fetch vulnerabilities for")
|
|
276
|
+
return containers
|
|
277
|
+
|
|
278
|
+
# Create progress bar for fetching vulnerabilities
|
|
279
|
+
progress = Progress(
|
|
280
|
+
SpinnerColumn(),
|
|
281
|
+
TextColumn("[bold blue]{task.description}"),
|
|
282
|
+
BarColumn(),
|
|
283
|
+
TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
|
|
284
|
+
TextColumn("•"),
|
|
285
|
+
TimeElapsedColumn(),
|
|
286
|
+
console=None,
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
def fetch_container_vulns_with_progress(container):
|
|
290
|
+
"""Helper function to fetch vulnerabilities for a single container with progress tracking."""
|
|
291
|
+
container_sha = container.get("sha")
|
|
292
|
+
if not container_sha:
|
|
293
|
+
logger.warning("Container missing SHA, skipping vulnerability fetch")
|
|
294
|
+
return container, []
|
|
295
|
+
|
|
296
|
+
try:
|
|
297
|
+
vulns = fetch_container_vulns(container_sha)
|
|
298
|
+
logger.debug("Fetched %s vulnerabilities for container %s...", len(vulns), container_sha[:8])
|
|
299
|
+
return container, vulns
|
|
300
|
+
except Exception as e:
|
|
301
|
+
logger.error("Error fetching vulnerabilities for container %s: %s", container_sha, e)
|
|
302
|
+
return container, []
|
|
303
|
+
|
|
304
|
+
with progress:
|
|
305
|
+
task = progress.add_task(
|
|
306
|
+
f"[yellow]Fetching vulnerabilities for {len(containers)} containers...", total=len(containers)
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
# Use ThreadPoolExecutor for concurrent processing
|
|
310
|
+
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
311
|
+
# Submit all tasks
|
|
312
|
+
future_to_container = {}
|
|
313
|
+
for container in containers:
|
|
314
|
+
future = executor.submit(fetch_container_vulns_with_progress, container)
|
|
315
|
+
future_to_container[future] = container
|
|
316
|
+
|
|
317
|
+
# Process completed tasks and update progress
|
|
318
|
+
for future in as_completed(future_to_container):
|
|
319
|
+
container, vulns = future.result()
|
|
320
|
+
container["vulnerabilities"] = vulns
|
|
321
|
+
progress.update(task, advance=1)
|
|
322
|
+
|
|
323
|
+
logger.info("Completed fetching vulnerabilities for %s containers using %s workers", len(containers), max_workers)
|
|
324
|
+
return containers
|