regscale-cli 6.20.4.1__py3-none-any.whl → 6.20.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (33) hide show
  1. regscale/__init__.py +1 -1
  2. regscale/core/app/internal/model_editor.py +3 -3
  3. regscale/core/app/utils/regscale_utils.py +37 -0
  4. regscale/core/utils/date.py +26 -3
  5. regscale/integrations/commercial/defender.py +3 -0
  6. regscale/integrations/commercial/qualys/__init__.py +40 -14
  7. regscale/integrations/commercial/qualys/containers.py +324 -0
  8. regscale/integrations/commercial/qualys/scanner.py +203 -8
  9. regscale/integrations/commercial/synqly/edr.py +10 -0
  10. regscale/integrations/commercial/wizv2/click.py +2 -2
  11. regscale/integrations/commercial/wizv2/constants.py +13 -0
  12. regscale/integrations/commercial/wizv2/issue.py +3 -2
  13. regscale/integrations/commercial/wizv2/scanner.py +5 -1
  14. regscale/integrations/commercial/wizv2/utils.py +118 -72
  15. regscale/integrations/public/fedramp/fedramp_cis_crm.py +98 -20
  16. regscale/models/integration_models/cisa_kev_data.json +140 -3
  17. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  18. regscale/models/regscale_models/catalog.py +16 -0
  19. regscale/models/regscale_models/file.py +2 -1
  20. regscale/models/regscale_models/form_field_value.py +59 -1
  21. regscale/models/regscale_models/issue.py +47 -0
  22. regscale/models/regscale_models/organization.py +30 -0
  23. regscale/models/regscale_models/regscale_model.py +13 -5
  24. regscale/models/regscale_models/security_control.py +47 -0
  25. regscale/models/regscale_models/security_plan.py +32 -0
  26. {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.5.0.dist-info}/METADATA +1 -1
  27. {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.5.0.dist-info}/RECORD +33 -31
  28. tests/fixtures/test_fixture.py +33 -4
  29. tests/regscale/core/test_app.py +53 -32
  30. {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.5.0.dist-info}/LICENSE +0 -0
  31. {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.5.0.dist-info}/WHEEL +0 -0
  32. {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.5.0.dist-info}/entry_points.txt +0 -0
  33. {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.5.0.dist-info}/top_level.txt +0 -0
regscale/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "6.20.4.1"
1
+ __version__ = "6.20.5.0"
@@ -1365,7 +1365,7 @@ def build_object_field_list(obj: object) -> None:
1365
1365
  """
1366
1366
  # Build the list of fields for the model type
1367
1367
  pos_dict = obj.get_sort_position_dict()
1368
- field_names = obj.model_fields.keys()
1368
+ field_names = obj.__class__.model_fields.keys()
1369
1369
  extra_fields = obj.get_extra_fields()
1370
1370
  include_field_list = obj.get_include_fields()
1371
1371
  for item in include_field_list:
@@ -1375,7 +1375,7 @@ def build_object_field_list(obj: object) -> None:
1375
1375
  field_makeup = FieldMakeup(
1376
1376
  cur_field,
1377
1377
  convert_property_to_column_label(cur_field),
1378
- get_field_data_type(obj.model_fields[cur_field]),
1378
+ get_field_data_type(obj.__class__.model_fields[cur_field]),
1379
1379
  )
1380
1380
  field_makeup.sort_order = find_sort_pos(cur_field, pos_dict)
1381
1381
  field_makeup.enum_values = obj.get_enum_values(cur_field)
@@ -1444,7 +1444,7 @@ def is_field_required(obj: object, field_name: str) -> bool:
1444
1444
  :return: bool indicating if the field is required
1445
1445
  :rtype: bool
1446
1446
  """
1447
- field_info = obj.model_fields[field_name]
1447
+ field_info = obj.__class__.model_fields[field_name]
1448
1448
  if field_info.annotation == dict:
1449
1449
  return True
1450
1450
  if field_info.annotation == int:
@@ -19,6 +19,8 @@ from regscale.core.app.utils.app_utils import convert_to_string, error_and_exit,
19
19
  from regscale.models import Data
20
20
  from regscale.models.regscale_models.modules import Modules
21
21
 
22
+ from regscale.models.regscale_models.form_field_value import FormFieldValue
23
+
22
24
  logger = create_logger()
23
25
 
24
26
 
@@ -316,3 +318,38 @@ def create_properties(
316
318
 
317
319
  new_props = Property.batch_create(properties)
318
320
  return len(new_props) > 0
321
+
322
+
323
+ def normalize_controlid(name: str) -> str:
324
+ """
325
+ Normalizes a control Id String
326
+ e.g. AC-01(02) -> ac-1.2
327
+ AC-01a.[02] -> ac-1.a.2
328
+
329
+ :param str name: Control Id String to normalize
330
+ :return: normalized Control Id String
331
+ :rtype: str
332
+ """
333
+ # AC-01(02)
334
+ # AC-01a.[02] vs. AC-1a.2
335
+ new_string = name.replace(" ", "")
336
+ new_string = new_string.replace("(", ".") # AC-01.02) #AC-01a.[02]
337
+ new_string = new_string.replace(")", "") # AC-01.02 #AC-01.a.[02]
338
+ new_string = new_string.replace("[", "") # AC-01.02 #AC-01.a.02]
339
+ new_string = new_string.replace("]", "") # AC-01.02 #AC-01.a.02
340
+
341
+ parts = new_string.split(".")
342
+ new_string = ""
343
+ for part in parts:
344
+ part = part.lstrip("0")
345
+ new_string += f"{part}."
346
+ new_string = new_string.rstrip(".") # AC-01.2 #AC-01.a.2
347
+
348
+ parts = new_string.split("-")
349
+ new_string = ""
350
+ for part in parts:
351
+ part = part.lstrip("0")
352
+ new_string += f"{part}-"
353
+ new_string = new_string.rstrip("-") # AC-1.2 #AC-1.a.2
354
+
355
+ return new_string.lower()
@@ -4,11 +4,10 @@
4
4
 
5
5
  import datetime
6
6
  import logging
7
- from typing import List, Union, Optional
7
+ from typing import Any, List, Optional, Union
8
8
 
9
+ from dateutil.parser import ParserError, parse
9
10
  from pandas import Timestamp
10
- from dateutil.parser import parse, ParserError
11
-
12
11
 
13
12
  logger = logging.getLogger("regscale")
14
13
  default_date_format = "%Y-%m-%dT%H:%M:%S%z"
@@ -236,3 +235,27 @@ def normalize_date(dt: str, fmt: str) -> str:
236
235
  except ValueError:
237
236
  return dt
238
237
  return dt
238
+
239
+
240
+ def normalize_timestamp(timestamp_value: Any) -> int:
241
+ """
242
+ Normalize timestamp to seconds, handling both seconds and milliseconds.
243
+
244
+ :param Any timestamp_value: The timestamp value to normalize
245
+ :return: Timestamp in seconds
246
+ :raises ValueError: If the timestamp is invalid
247
+ :rtype: int
248
+ """
249
+ if isinstance(timestamp_value, str):
250
+ if not timestamp_value.isdigit():
251
+ raise ValueError(f"Invalid timestamp value: {timestamp_value}")
252
+ timestamp_int = int(timestamp_value)
253
+ elif isinstance(timestamp_value, (int, float)):
254
+ timestamp_int = int(timestamp_value)
255
+ else:
256
+ raise ValueError(f"Invalid timestamp value type: {type(timestamp_value)}, defaulting to current datetime")
257
+
258
+ # Determine if it's epoch seconds or milliseconds based on magnitude
259
+ if timestamp_int > 9999999999: # Likely milliseconds (13+ digits)
260
+ return timestamp_int // 1000
261
+ return timestamp_int
@@ -27,6 +27,7 @@ from regscale.core.app.utils.app_utils import (
27
27
  uncamel_case,
28
28
  save_data_to,
29
29
  )
30
+ from regscale.models.app_models.click import NotRequiredIf
30
31
  from regscale.models import regscale_id, regscale_module, regscale_ssp_id, Asset, Component, File, Issue
31
32
  from regscale.models.integration_models.defender_data import DefenderData
32
33
  from regscale.models.integration_models.flat_file_importer import FlatFileImporter
@@ -128,6 +129,8 @@ def sync_cloud_resources(regscale_ssp_id: int):
128
129
  help="The name of the saved query to export from Microsoft Defender for Cloud resource graph queries.",
129
130
  prompt="Enter the name of the query to export",
130
131
  default=None,
132
+ cls=NotRequiredIf,
133
+ not_required_if=["all_queries"],
131
134
  )
132
135
  @click.option(
133
136
  "--no_upload",
@@ -7,41 +7,43 @@ import os
7
7
  import pprint
8
8
  import traceback
9
9
  from asyncio import sleep
10
- from datetime import datetime, timezone, timedelta
10
+ from datetime import datetime, timedelta, timezone
11
11
  from json import JSONDecodeError
12
- from typing import Optional, Any, Union, Tuple
12
+ from pathlib import Path
13
+ from typing import Any, Optional, Tuple, Union
13
14
  from urllib.parse import urljoin
14
15
 
15
16
  import click
16
17
  import requests
17
18
  import xmltodict
18
- from pathlib import Path
19
19
  from requests import Session
20
- from rich.progress import TaskID, Progress, TextColumn, BarColumn, SpinnerColumn, TimeElapsedColumn
20
+ from rich.progress import BarColumn, Progress, SpinnerColumn, TaskID, TextColumn, TimeElapsedColumn
21
21
 
22
22
  from regscale.core.app.utils.app_utils import (
23
23
  check_file_path,
24
24
  check_license,
25
+ create_progress_object,
26
+ error_and_exit,
25
27
  get_current_datetime,
26
28
  save_data_to,
27
- error_and_exit,
28
- create_progress_object,
29
29
  )
30
30
  from regscale.core.app.utils.file_utils import download_from_s3
31
+ from regscale.integrations.commercial.qualys.containers import fetch_all_vulnerabilities
31
32
  from regscale.integrations.commercial.qualys.qualys_error_handler import QualysErrorHandler
32
33
  from regscale.integrations.commercial.qualys.scanner import QualysTotalCloudJSONLIntegration
33
34
  from regscale.integrations.commercial.qualys.variables import QualysVariables
34
35
  from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
35
36
  from regscale.integrations.variables import ScannerVariables
36
- from regscale.models import Asset, Search, regscale_models, Issue
37
- from regscale.models.app_models.click import regscale_ssp_id, save_output_to, NotRequiredIf
37
+ from regscale.models import Asset, Issue, Search, regscale_models
38
+ from regscale.models.app_models.click import NotRequiredIf, regscale_ssp_id, save_output_to
38
39
  from regscale.models.integration_models.flat_file_importer import FlatFileImporter
39
40
  from regscale.models.integration_models.qualys import (
41
+ Qualys,
40
42
  QualysContainerScansImporter,
41
- QualysWasScansImporter,
42
43
  QualysPolicyScansImporter,
43
- Qualys,
44
+ QualysWasScansImporter,
44
45
  )
46
+ from regscale.validation.record import validate_regscale_object
45
47
 
46
48
  # Create logger for this module
47
49
  logger = logging.getLogger("regscale")
@@ -332,14 +334,29 @@ class FindingProgressTracker:
332
334
  required=False,
333
335
  help="Enable/disable SSL certificate verification for API calls.",
334
336
  )
337
+ @click.option(
338
+ "--containers",
339
+ type=click.BOOL,
340
+ help="To disable fetching containers, use False. Defaults to True.",
341
+ default=True,
342
+ )
335
343
  def import_total_cloud(
336
- regscale_ssp_id: int, include_tags: str, exclude_tags: str, vulnerability_creation: str, ssl_verify: bool
344
+ regscale_ssp_id: int,
345
+ include_tags: str,
346
+ exclude_tags: str,
347
+ vulnerability_creation: str,
348
+ ssl_verify: bool,
349
+ containers: bool,
337
350
  ):
338
351
  """
339
352
  Import Qualys Total Cloud Assets and Vulnerabilities using JSONL scanner implementation.
340
353
 
341
354
  This command uses the JSONLScannerIntegration class for improved efficiency and memory management.
342
355
  """
356
+ if not validate_regscale_object(regscale_ssp_id, "securityplans"):
357
+ logger.warning("SSP #%i is not a valid RegScale Security Plan.", regscale_ssp_id)
358
+ return
359
+ containers_lst = []
343
360
  try:
344
361
  # Configure scanner variables and fetch data
345
362
  _configure_scanner_variables(vulnerability_creation, ssl_verify)
@@ -347,8 +364,14 @@ def import_total_cloud(
347
364
  if not response_data:
348
365
  return
349
366
 
367
+ if containers:
368
+ # Fetch containers and container findings
369
+ containers_lst = fetch_all_vulnerabilities()
370
+
350
371
  # Initialize and run integration
351
- integration = _initialize_integration(regscale_ssp_id, response_data, vulnerability_creation, ssl_verify)
372
+ integration = _initialize_integration(
373
+ regscale_ssp_id, response_data, vulnerability_creation, ssl_verify, containers_lst
374
+ )
352
375
  _run_integration_import(integration)
353
376
 
354
377
  logger.info("Qualys Total Cloud data imported successfully with JSONL scanner.")
@@ -460,13 +483,14 @@ def _fetch_qualys_api_data(include_tags, exclude_tags):
460
483
  return None
461
484
 
462
485
 
463
- def _initialize_integration(regscale_ssp_id, response_data, vulnerability_creation, ssl_verify):
486
+ def _initialize_integration(regscale_ssp_id, response_data, vulnerability_creation, ssl_verify, containers):
464
487
  """Initialize the scanner integration with appropriate settings.
465
488
 
466
489
  :param int regscale_ssp_id: RegScale SSP ID
467
490
  :param dict response_data: Parsed XML data from API
468
491
  :param str vulnerability_creation: Vulnerability creation mode
469
492
  :param bool ssl_verify: SSL verification setting
493
+ :param list containers: List of containers
470
494
  :return: Initialized integration object
471
495
  """
472
496
  # Build integration kwargs
@@ -475,6 +499,7 @@ def _initialize_integration(regscale_ssp_id, response_data, vulnerability_creati
475
499
  "xml_data": response_data,
476
500
  "vulnerability_creation": vulnerability_creation or ScannerVariables.vulnerabilityCreation,
477
501
  "ssl_verify": ssl_verify if ssl_verify is not None else ScannerVariables.sslVerify,
502
+ "containers": containers,
478
503
  }
479
504
 
480
505
  # Add thread workers if available
@@ -634,7 +659,7 @@ def _count_jsonl_items(integration):
634
659
 
635
660
  def _create_progress_bar():
636
661
  """Create a progress bar that doesn't reset at 100%."""
637
- from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn, TimeElapsedColumn
662
+ from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeElapsedColumn
638
663
 
639
664
  class NonResettingProgress(Progress):
640
665
  def update(self, task_id, **fields):
@@ -1120,6 +1145,7 @@ def process_files_with_importer(
1120
1145
  :param Optional[bool] upload_file: Whether to upload the file to RegScale after processing, defaults to True.
1121
1146
  """
1122
1147
  import csv
1148
+
1123
1149
  from openpyxl import Workbook
1124
1150
 
1125
1151
  if s3_bucket:
@@ -0,0 +1,324 @@
1
+ """
2
+ Container operations module for Qualys CS API integration.
3
+ """
4
+
5
+ import logging
6
+ import traceback
7
+ from concurrent.futures import ThreadPoolExecutor, as_completed
8
+ from functools import lru_cache
9
+ from json import JSONDecodeError
10
+ from typing import Dict, List, Optional
11
+ from urllib.parse import urljoin
12
+
13
+ from requests import RequestException
14
+ from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeElapsedColumn
15
+
16
+ # Create logger for this module
17
+ logger = logging.getLogger("regscale")
18
+
19
+
20
+ @lru_cache(maxsize=1)
21
+ def auth_cs_api() -> tuple[str, dict]:
22
+ """
23
+ Authenticate the Qualys CS API using form-based authentication
24
+
25
+ :return: A tuple of the base URL and a dictionary of headers
26
+ :rtype: tuple[str, dict]
27
+ """
28
+ from . import QUALYS_API, _get_config # noqa: C0415
29
+
30
+ config = _get_config()
31
+ qualys_url = config.get("qualysUrl")
32
+ user = config.get("qualysUserName")
33
+ password = config.get("qualysPassword")
34
+
35
+ # Update headers to match the curl command
36
+ auth_headers = {"X-Requested-With": "RegScale CLI"}
37
+
38
+ # Prepare form data for authentication
39
+ auth_data = {"username": user, "password": password, "permissions": "true", "token": "true"}
40
+
41
+ try:
42
+ # Make authentication request
43
+ # https://gateway.qg3.apps.qualys.com/auth
44
+
45
+ if qualys_url:
46
+ base_url = qualys_url.replace("qualysguard", "gateway")
47
+ else:
48
+ base_url = qualys_url
49
+
50
+ auth_url = urljoin(base_url, "/auth")
51
+ response = QUALYS_API.post(url=auth_url, headers=auth_headers, data=auth_data)
52
+
53
+ if response.ok:
54
+ logger.info("Successfully authenticated with Qualys CS API")
55
+
56
+ # Parse the response to extract the JWT token
57
+ try:
58
+ response_text = response.content.decode("utf-8")
59
+ # The response should contain the JWT token
60
+ # You might need to parse JSON or extract the token from the response
61
+ # For now, let's assume the token is in the response text
62
+
63
+ # Add Authorization Bearer header
64
+ auth_headers["Authorization"] = f"Bearer {response_text}"
65
+
66
+ logger.debug("Added Authorization Bearer header to auth_headers")
67
+ except (UnicodeDecodeError, AttributeError) as e:
68
+ logger.warning("Could not decode response content for Authorization header: %s", e)
69
+ logger.debug(
70
+ "Response content type: %s, length: %s",
71
+ type(response.content),
72
+ len(response.content) if hasattr(response.content, "__len__") else "unknown",
73
+ )
74
+ # Continue without Authorization header if parsing fails
75
+ else:
76
+ raise RequestException(f"Authentication failed with status code: {response.status_code}")
77
+
78
+ except Exception as e:
79
+ logger.error("Error during authentication: %s", e)
80
+ raise
81
+
82
+ return base_url, auth_headers
83
+
84
+
85
+ def _make_api_request(current_url: str, headers: dict, params: Optional[Dict] = None) -> dict:
86
+ """
87
+ Make API request to fetch containers from Qualys CS API
88
+
89
+ :param str current_url: The URL for the API request
90
+ :param dict headers: Headers to include in the request
91
+ :param Dict params: Optional query parameters for pagination
92
+ :return: Response data containing containers and response object
93
+ :rtype: dict
94
+ """
95
+ from . import QUALYS_API # noqa: C0415
96
+
97
+ # Make API request
98
+ response = QUALYS_API.get(url=current_url, headers=headers, params=params)
99
+
100
+ # Validate response
101
+ if not response.ok:
102
+ logger.error("API request failed: %s - %s", response.status_code, response.text)
103
+ return {"data": [], "_response": response}
104
+
105
+ try:
106
+ response_data = response.json()
107
+ response_data["_response"] = response # Include response object for headers
108
+ return response_data
109
+ except JSONDecodeError as e:
110
+ logger.error("Failed to parse JSON response: %s", e)
111
+ return {"data": [], "_response": response}
112
+
113
+
114
+ def _parse_link_header(link_header: str) -> Optional[str]:
115
+ """
116
+ Parse the Link header to find the next page URL.
117
+
118
+ :param str link_header: The Link header value
119
+ :return: The next page URL or None if not found
120
+ :rtype: Optional[str]
121
+ """
122
+ if not link_header:
123
+ logger.debug("No Link header found, assuming no more pages")
124
+ return None
125
+
126
+ # Parse the Link header to find the next page URL
127
+ # Format: <url>;rel=next
128
+ for link in link_header.split(","):
129
+ link = link.strip()
130
+ if "rel=next" in link:
131
+ # Extract URL from <url>;rel=next format
132
+ url_start = link.find("<") + 1
133
+ url_end = link.find(">")
134
+ if 0 < url_start < url_end:
135
+ return link[url_start:url_end]
136
+
137
+ logger.debug("No next page URL found in Link header")
138
+ return None
139
+
140
+
141
+ def _fetch_paginated_data(endpoint: str, filters: Optional[Dict] = None, limit: int = 100) -> List[Dict]:
142
+ """
143
+ Generic function to fetch paginated data from Qualys CS API
144
+
145
+ :param str endpoint: The API endpoint (e.g., 'containers/list', 'images/list')
146
+ :param Optional[Dict] filters: Filters to apply to the request
147
+ :param int limit: Number of items to fetch per page
148
+ :return: A list of items from all pages
149
+ :rtype: List[Dict]
150
+ """
151
+ all_items = []
152
+ page: int = 1
153
+
154
+ try:
155
+ # Get authentication
156
+ base_url, headers = auth_cs_api()
157
+
158
+ # Prepare base parameters
159
+ params = {"limit": limit}
160
+
161
+ # Add filters if provided
162
+ if filters:
163
+ params.update(filters)
164
+
165
+ # Track the current URL for pagination
166
+ current_url = urljoin(base_url, f"/csapi/v1.3/{endpoint}")
167
+
168
+ # Create progress bar for pagination
169
+ progress = Progress(
170
+ SpinnerColumn(),
171
+ TextColumn("[bold blue]{task.description}"),
172
+ BarColumn(),
173
+ TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
174
+ TextColumn("•"),
175
+ TimeElapsedColumn(),
176
+ console=None,
177
+ )
178
+
179
+ with progress:
180
+ task = progress.add_task(f"[green]Fetching {endpoint} data...", total=None) # Unknown total for pagination
181
+
182
+ while current_url:
183
+ # Make API request
184
+ response_data = _make_api_request(current_url, headers, params)
185
+
186
+ # Extract items from current page
187
+ current_items = response_data.get("data", [])
188
+ all_items.extend(current_items)
189
+
190
+ # Update progress description with current status
191
+ progress.update(
192
+ task, description=f"[green]Fetching {endpoint} data... (Page {page}, Total: {len(all_items)})"
193
+ )
194
+
195
+ logger.debug("Fetched page: %s items (Total so far: %s)", page, len(all_items))
196
+
197
+ # Check for next page using the Link header
198
+ response = response_data.get("_response")
199
+ if not response or not hasattr(response, "headers"):
200
+ # If no response object available, assume single page
201
+ break
202
+
203
+ link_header = response.headers.get("link", "")
204
+ next_url = _parse_link_header(link_header)
205
+
206
+ if not next_url:
207
+ break
208
+
209
+ # Update current URL for next iteration
210
+ current_url = next_url
211
+ page += 1
212
+
213
+ # Clear params for subsequent requests since they're in the URL
214
+ params = {}
215
+
216
+ except Exception as e:
217
+ logger.error("Error fetching data from %s: %s", current_url, e)
218
+ logger.debug(traceback.format_exc())
219
+
220
+ logger.info("Completed: Fetched %s total items from %s", len(all_items), endpoint)
221
+ return all_items
222
+
223
+
224
+ def fetch_all_containers(filters: Optional[Dict] = None, limit: int = 100) -> List[Dict]:
225
+ """
226
+ Fetch all containers from Qualys CS API with pagination
227
+
228
+ :param Optional[Dict] filters: Filters to apply to the containers
229
+ :param int limit: Number of containers to fetch per page
230
+ :return: A list of containers
231
+ :rtype: List[Dict]
232
+ """
233
+ return _fetch_paginated_data("containers/list", filters, limit)
234
+
235
+
236
+ def fetch_all_images(filters: Optional[Dict] = None, limit: int = 100) -> List[Dict]:
237
+ """
238
+ Fetch all images from Qualys CS API with pagination
239
+
240
+ :param Optional[Dict] filters: Filters to apply to the images
241
+ :param int limit: Number of images to fetch per page
242
+ :return: A list of images
243
+ :rtype: List[Dict]
244
+ """
245
+ return _fetch_paginated_data("images/list", filters, limit)
246
+
247
+
248
+ def fetch_container_vulns(container_sha: str) -> List[Dict]:
249
+ """
250
+ Fetch vulnerabilities for a specific container from Qualys CS API
251
+
252
+ :param str container_sha: The SHA of the container
253
+ :return: A list of vulnerabilities
254
+ :rtype: List[Dict]
255
+ """
256
+ base_url, headers = auth_cs_api()
257
+ current_url = urljoin(base_url, f"/csapi/v1.3/containers/{container_sha}/vuln")
258
+ response_data = _make_api_request(current_url, headers)
259
+ return response_data.get("details", {}).get("vulns", [])
260
+
261
+
262
+ def fetch_all_vulnerabilities(filters: Optional[Dict] = None, limit: int = 100, max_workers: int = 10) -> List[Dict]:
263
+ """
264
+ Fetch all containers and a list of vulnerabilities for each container from Qualys CS API with pagination
265
+
266
+ :param Optional[Dict] filters: Filters to apply to the containers
267
+ :param int limit: Number of containers to fetch per page
268
+ :param int max_workers: Maximum number of worker threads for concurrent vulnerability fetching
269
+ :return: A list of containers with vulnerabilities
270
+ :rtype: List[Dict]
271
+ """
272
+ containers = fetch_all_containers(filters, limit)
273
+
274
+ if not containers:
275
+ logger.info("No containers found to fetch vulnerabilities for")
276
+ return containers
277
+
278
+ # Create progress bar for fetching vulnerabilities
279
+ progress = Progress(
280
+ SpinnerColumn(),
281
+ TextColumn("[bold blue]{task.description}"),
282
+ BarColumn(),
283
+ TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
284
+ TextColumn("•"),
285
+ TimeElapsedColumn(),
286
+ console=None,
287
+ )
288
+
289
+ def fetch_container_vulns_with_progress(container):
290
+ """Helper function to fetch vulnerabilities for a single container with progress tracking."""
291
+ container_sha = container.get("sha")
292
+ if not container_sha:
293
+ logger.warning("Container missing SHA, skipping vulnerability fetch")
294
+ return container, []
295
+
296
+ try:
297
+ vulns = fetch_container_vulns(container_sha)
298
+ logger.debug("Fetched %s vulnerabilities for container %s...", len(vulns), container_sha[:8])
299
+ return container, vulns
300
+ except Exception as e:
301
+ logger.error("Error fetching vulnerabilities for container %s: %s", container_sha, e)
302
+ return container, []
303
+
304
+ with progress:
305
+ task = progress.add_task(
306
+ f"[yellow]Fetching vulnerabilities for {len(containers)} containers...", total=len(containers)
307
+ )
308
+
309
+ # Use ThreadPoolExecutor for concurrent processing
310
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
311
+ # Submit all tasks
312
+ future_to_container = {}
313
+ for container in containers:
314
+ future = executor.submit(fetch_container_vulns_with_progress, container)
315
+ future_to_container[future] = container
316
+
317
+ # Process completed tasks and update progress
318
+ for future in as_completed(future_to_container):
319
+ container, vulns = future.result()
320
+ container["vulnerabilities"] = vulns
321
+ progress.update(task, advance=1)
322
+
323
+ logger.info("Completed fetching vulnerabilities for %s containers using %s workers", len(containers), max_workers)
324
+ return containers