scanoss 1.27.1__py3-none-any.whl → 1.43.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. protoc_gen_swagger/options/annotations_pb2.py +18 -12
  2. protoc_gen_swagger/options/annotations_pb2.pyi +48 -0
  3. protoc_gen_swagger/options/annotations_pb2_grpc.py +20 -0
  4. protoc_gen_swagger/options/openapiv2_pb2.py +110 -99
  5. protoc_gen_swagger/options/openapiv2_pb2.pyi +1317 -0
  6. protoc_gen_swagger/options/openapiv2_pb2_grpc.py +20 -0
  7. scanoss/__init__.py +1 -1
  8. scanoss/api/common/v2/scanoss_common_pb2.py +49 -22
  9. scanoss/api/common/v2/scanoss_common_pb2_grpc.py +25 -0
  10. scanoss/api/components/v2/scanoss_components_pb2.py +68 -43
  11. scanoss/api/components/v2/scanoss_components_pb2_grpc.py +83 -22
  12. scanoss/api/cryptography/v2/scanoss_cryptography_pb2.py +136 -47
  13. scanoss/api/cryptography/v2/scanoss_cryptography_pb2_grpc.py +650 -33
  14. scanoss/api/dependencies/v2/scanoss_dependencies_pb2.py +56 -37
  15. scanoss/api/dependencies/v2/scanoss_dependencies_pb2_grpc.py +64 -12
  16. scanoss/api/geoprovenance/v2/scanoss_geoprovenance_pb2.py +74 -31
  17. scanoss/api/geoprovenance/v2/scanoss_geoprovenance_pb2_grpc.py +252 -13
  18. scanoss/api/licenses/__init__.py +23 -0
  19. scanoss/api/licenses/v2/__init__.py +23 -0
  20. scanoss/api/licenses/v2/scanoss_licenses_pb2.py +84 -0
  21. scanoss/api/licenses/v2/scanoss_licenses_pb2_grpc.py +302 -0
  22. scanoss/api/scanning/v2/scanoss_scanning_pb2.py +32 -21
  23. scanoss/api/scanning/v2/scanoss_scanning_pb2_grpc.py +49 -8
  24. scanoss/api/semgrep/v2/scanoss_semgrep_pb2.py +50 -23
  25. scanoss/api/semgrep/v2/scanoss_semgrep_pb2_grpc.py +151 -16
  26. scanoss/api/vulnerabilities/v2/scanoss_vulnerabilities_pb2.py +78 -31
  27. scanoss/api/vulnerabilities/v2/scanoss_vulnerabilities_pb2_grpc.py +282 -18
  28. scanoss/cli.py +1000 -186
  29. scanoss/components.py +80 -50
  30. scanoss/constants.py +7 -1
  31. scanoss/cryptography.py +89 -55
  32. scanoss/csvoutput.py +13 -7
  33. scanoss/cyclonedx.py +141 -9
  34. scanoss/data/build_date.txt +1 -1
  35. scanoss/data/osadl-copyleft.json +133 -0
  36. scanoss/delta.py +197 -0
  37. scanoss/export/__init__.py +23 -0
  38. scanoss/export/dependency_track.py +227 -0
  39. scanoss/file_filters.py +2 -163
  40. scanoss/filecount.py +37 -38
  41. scanoss/gitlabqualityreport.py +214 -0
  42. scanoss/header_filter.py +563 -0
  43. scanoss/inspection/policy_check/__init__.py +0 -0
  44. scanoss/inspection/policy_check/dependency_track/__init__.py +0 -0
  45. scanoss/inspection/policy_check/dependency_track/project_violation.py +479 -0
  46. scanoss/inspection/{policy_check.py → policy_check/policy_check.py} +65 -72
  47. scanoss/inspection/policy_check/scanoss/__init__.py +0 -0
  48. scanoss/inspection/{copyleft.py → policy_check/scanoss/copyleft.py} +89 -73
  49. scanoss/inspection/{undeclared_component.py → policy_check/scanoss/undeclared_component.py} +52 -46
  50. scanoss/inspection/summary/__init__.py +0 -0
  51. scanoss/inspection/summary/component_summary.py +170 -0
  52. scanoss/inspection/{license_summary.py → summary/license_summary.py} +62 -12
  53. scanoss/inspection/summary/match_summary.py +341 -0
  54. scanoss/inspection/utils/file_utils.py +44 -0
  55. scanoss/inspection/utils/license_utils.py +57 -71
  56. scanoss/inspection/utils/markdown_utils.py +63 -0
  57. scanoss/inspection/{inspect_base.py → utils/scan_result_processor.py} +53 -67
  58. scanoss/osadl.py +125 -0
  59. scanoss/scanner.py +135 -253
  60. scanoss/scanners/folder_hasher.py +47 -32
  61. scanoss/scanners/scanner_hfh.py +50 -18
  62. scanoss/scanoss_settings.py +33 -3
  63. scanoss/scanossapi.py +23 -25
  64. scanoss/scanossbase.py +1 -1
  65. scanoss/scanossgrpc.py +543 -289
  66. scanoss/services/dependency_track_service.py +132 -0
  67. scanoss/spdxlite.py +11 -4
  68. scanoss/threadeddependencies.py +19 -18
  69. scanoss/threadedscanning.py +10 -0
  70. scanoss/utils/scanoss_scan_results_utils.py +41 -0
  71. scanoss/winnowing.py +71 -19
  72. {scanoss-1.27.1.dist-info → scanoss-1.43.1.dist-info}/METADATA +8 -5
  73. scanoss-1.43.1.dist-info/RECORD +110 -0
  74. scanoss/inspection/component_summary.py +0 -94
  75. scanoss-1.27.1.dist-info/RECORD +0 -87
  76. {scanoss-1.27.1.dist-info → scanoss-1.43.1.dist-info}/WHEEL +0 -0
  77. {scanoss-1.27.1.dist-info → scanoss-1.43.1.dist-info}/entry_points.txt +0 -0
  78. {scanoss-1.27.1.dist-info → scanoss-1.43.1.dist-info}/licenses/LICENSE +0 -0
  79. {scanoss-1.27.1.dist-info → scanoss-1.43.1.dist-info}/top_level.txt +0 -0
@@ -6,6 +6,7 @@ from typing import Dict, List, Literal, Optional
6
6
 
7
7
  from progress.bar import Bar
8
8
 
9
+ from scanoss.constants import DEFAULT_HFH_DEPTH
9
10
  from scanoss.file_filters import FileFilters
10
11
  from scanoss.scanoss_settings import ScanossSettings
11
12
  from scanoss.scanossbase import ScanossBase
@@ -15,8 +16,6 @@ from scanoss.utils.simhash import WordFeatureSet, fingerprint, simhash, vectoriz
15
16
 
16
17
  MINIMUM_FILE_COUNT = 8
17
18
  MINIMUM_CONCATENATED_NAME_LENGTH = 32
18
- MAXIMUM_FILE_NAME_LENGTH = 32
19
-
20
19
 
21
20
  class DirectoryNode:
22
21
  """
@@ -72,6 +71,12 @@ class FolderHasher:
72
71
 
73
72
  It builds a directory tree (DirectoryNode) and computes the associated
74
73
  hash data for the folder.
74
+
75
+ Args:
76
+ scan_dir (str): The directory to be hashed.
77
+ config (FolderHasherConfig): Configuration parameters for the folder hasher.
78
+ scanoss_settings (Optional[ScanossSettings]): Optional settings for Scanoss.
79
+ depth (int): How many levels to hash from the root directory (default: 1).
75
80
  """
76
81
 
77
82
  def __init__(
@@ -79,6 +84,7 @@ class FolderHasher:
79
84
  scan_dir: str,
80
85
  config: FolderHasherConfig,
81
86
  scanoss_settings: Optional[ScanossSettings] = None,
87
+ depth: int = DEFAULT_HFH_DEPTH,
82
88
  ):
83
89
  self.base = ScanossBase(
84
90
  debug=config.debug,
@@ -101,6 +107,7 @@ class FolderHasher:
101
107
 
102
108
  self.scan_dir = scan_dir
103
109
  self.tree = None
110
+ self.depth = depth
104
111
 
105
112
  def hash_directory(self, path: str) -> dict:
106
113
  """
@@ -123,7 +130,10 @@ class FolderHasher:
123
130
 
124
131
  return tree
125
132
 
126
- def _build_root_node(self, path: str) -> DirectoryNode:
133
+ def _build_root_node(
134
+ self,
135
+ path: str,
136
+ ) -> DirectoryNode:
127
137
  """
128
138
  Build a directory tree from the given path with file information.
129
139
 
@@ -140,47 +150,48 @@ class FolderHasher:
140
150
  root_node = DirectoryNode(str(root))
141
151
 
142
152
  all_files = [
143
- f for f in root.rglob('*') if f.is_file() and len(f.name.encode('utf-8')) <= MAXIMUM_FILE_NAME_LENGTH
153
+ f for f in root.rglob('*') if f.is_file()
144
154
  ]
145
155
  filtered_files = self.file_filters.get_filtered_files_from_files(all_files, str(root))
146
156
 
147
157
  # Sort the files by name to ensure the hash is the same for the same folder
148
158
  filtered_files.sort()
149
159
 
150
- bar = Bar('Hashing files...', max=len(filtered_files))
151
- for file_path in filtered_files:
152
- try:
153
- file_path_obj = Path(file_path) if isinstance(file_path, str) else file_path
154
- full_file_path = file_path_obj if file_path_obj.is_absolute() else root / file_path_obj
160
+ bar_ctx = Bar('Hashing files...', max=len(filtered_files))
155
161
 
156
- self.base.print_debug(f'\nHashing file {str(full_file_path)}')
162
+ with bar_ctx as bar:
163
+ full_file_path = ''
164
+ for file_path in filtered_files:
165
+ try:
166
+ file_path_obj = Path(file_path) if isinstance(file_path, str) else file_path
167
+ full_file_path = file_path_obj if file_path_obj.is_absolute() else root / file_path_obj
157
168
 
158
- file_bytes = full_file_path.read_bytes()
159
- key = CRC64.get_hash_buff(file_bytes)
160
- key_str = ''.join(f'{b:02x}' for b in key)
161
- rel_path = str(full_file_path.relative_to(root))
169
+ self.base.print_debug(f'\nHashing file {str(full_file_path)}')
162
170
 
163
- file_item = DirectoryFile(rel_path, key, key_str)
171
+ file_bytes = full_file_path.read_bytes()
172
+ key = CRC64.get_hash_buff(file_bytes)
173
+ key_str = ''.join(f'{b:02x}' for b in key)
174
+ rel_path = str(full_file_path.relative_to(root))
164
175
 
165
- current_node = root_node
166
- for part in Path(rel_path).parent.parts:
167
- child_path = str(Path(current_node.path) / part)
168
- if child_path not in current_node.children:
169
- current_node.children[child_path] = DirectoryNode(child_path)
170
- current_node = current_node.children[child_path]
171
- current_node.files.append(file_item)
176
+ file_item = DirectoryFile(rel_path, key, key_str)
172
177
 
173
- root_node.files.append(file_item)
178
+ current_node = root_node
179
+ for part in Path(rel_path).parent.parts:
180
+ child_path = str(Path(current_node.path) / part)
181
+ if child_path not in current_node.children:
182
+ current_node.children[child_path] = DirectoryNode(child_path)
183
+ current_node = current_node.children[child_path]
184
+ current_node.files.append(file_item)
174
185
 
175
- except Exception as e:
176
- self.base.print_debug(f'Skipping file {full_file_path}: {str(e)}')
186
+ root_node.files.append(file_item)
177
187
 
178
- bar.next()
188
+ except Exception as e:
189
+ self.base.print_debug(f'Skipping file {full_file_path}: {str(e)}')
179
190
 
180
- bar.finish()
191
+ bar.next()
181
192
  return root_node
182
193
 
183
- def _hash_calc_from_node(self, node: DirectoryNode) -> dict:
194
+ def _hash_calc_from_node(self, node: DirectoryNode, current_depth: int = 1) -> dict:
184
195
  """
185
196
  Recursively compute folder hash data for a directory node.
186
197
 
@@ -189,12 +200,13 @@ class FolderHasher:
189
200
 
190
201
  Args:
191
202
  node (DirectoryNode): The directory node to compute the hash for.
203
+ current_depth (int): The current depth level (1-based, root is depth 1).
192
204
 
193
205
  Returns:
194
206
  dict: The computed hash data for the node.
195
207
  """
196
208
  hash_data = self._hash_calc(node)
197
-
209
+
198
210
  # Safely calculate relative path
199
211
  try:
200
212
  node_path = Path(node.path).resolve()
@@ -204,13 +216,18 @@ class FolderHasher:
204
216
  # If relative_to fails, use the node path as is or a fallback
205
217
  rel_path = Path(node.path).name if node.path else Path('.')
206
218
 
219
+ # Only process children if we haven't reached the depth limit
220
+ children = []
221
+ if current_depth < self.depth:
222
+ children = [self._hash_calc_from_node(child, current_depth + 1) for child in node.children.values()]
223
+
207
224
  return {
208
225
  'path_id': str(rel_path),
209
226
  'sim_hash_names': f'{hash_data["name_hash"]:02x}' if hash_data['name_hash'] is not None else None,
210
227
  'sim_hash_content': f'{hash_data["content_hash"]:02x}' if hash_data['content_hash'] is not None else None,
211
228
  'sim_hash_dir_names': f'{hash_data["dir_hash"]:02x}' if hash_data['dir_hash'] is not None else None,
212
229
  'lang_extensions': hash_data['lang_extensions'],
213
- 'children': [self._hash_calc_from_node(child) for child in node.children.values()],
230
+ 'children': children,
214
231
  }
215
232
 
216
233
  def _hash_calc(self, node: DirectoryNode) -> dict:
@@ -237,8 +254,6 @@ class FolderHasher:
237
254
 
238
255
  for file in node.files:
239
256
  key_str = file.key_str
240
- if key_str in processed_hashes:
241
- continue
242
257
 
243
258
  file_name = os.path.basename(file.path)
244
259
 
@@ -29,7 +29,12 @@ from typing import Dict, Optional
29
29
 
30
30
  from progress.spinner import Spinner
31
31
 
32
- from scanoss.constants import DEFAULT_HFH_RANK_THRESHOLD
32
+ from scanoss.constants import (
33
+ DEFAULT_HFH_DEPTH,
34
+ DEFAULT_HFH_MIN_ACCEPTED_SCORE,
35
+ DEFAULT_HFH_RANK_THRESHOLD,
36
+ DEFAULT_HFH_RECURSIVE_THRESHOLD,
37
+ )
33
38
  from scanoss.cyclonedx import CycloneDx
34
39
  from scanoss.file_filters import FileFilters
35
40
  from scanoss.scanners.folder_hasher import FolderHasher
@@ -48,13 +53,17 @@ class ScannerHFH:
48
53
  and calculates simhash values based on file names and content to detect folder-level similarities.
49
54
  """
50
55
 
51
- def __init__(
56
+ def __init__( # noqa: PLR0913
52
57
  self,
53
58
  scan_dir: str,
54
59
  config: ScannerConfig,
55
60
  client: Optional[ScanossGrpc] = None,
56
61
  scanoss_settings: Optional[ScanossSettings] = None,
57
62
  rank_threshold: int = DEFAULT_HFH_RANK_THRESHOLD,
63
+ depth: int = DEFAULT_HFH_DEPTH,
64
+ recursive_threshold: float = DEFAULT_HFH_RECURSIVE_THRESHOLD,
65
+ min_accepted_score: float = DEFAULT_HFH_MIN_ACCEPTED_SCORE,
66
+ use_grpc: bool = False,
58
67
  ):
59
68
  """
60
69
  Initialize the ScannerHFH.
@@ -65,6 +74,9 @@ class ScannerHFH:
65
74
  client (ScanossGrpc): gRPC client for communicating with the scanning service.
66
75
  scanoss_settings (Optional[ScanossSettings]): Optional settings for Scanoss.
67
76
  rank_threshold (int): Get results with rank below this threshold (default: 5).
77
+ depth (int): How many levels to scan (default: 1).
78
+ recursive_threshold (float): Minimum score threshold to consider a match (default: 0.25).
79
+ min_accepted_score (float): Only show results with a score at or above this threshold (default: 0.15).
68
80
  """
69
81
  self.base = ScanossBase(
70
82
  debug=config.debug,
@@ -87,12 +99,29 @@ class ScannerHFH:
87
99
  scan_dir=scan_dir,
88
100
  config=config,
89
101
  scanoss_settings=scanoss_settings,
102
+ depth=depth,
90
103
  )
91
104
 
92
105
  self.scan_dir = scan_dir
93
106
  self.client = client
94
107
  self.scan_results = None
95
108
  self.rank_threshold = rank_threshold
109
+ self.recursive_threshold = recursive_threshold
110
+ self.min_accepted_score = min_accepted_score
111
+ self.use_grpc = use_grpc
112
+
113
+ def _execute_grpc_scan(self, hfh_request: Dict) -> None:
114
+ """
115
+ Execute folder hash scan.
116
+
117
+ Args:
118
+ hfh_request: Request dictionary for the gRPC call
119
+ """
120
+ try:
121
+ self.scan_results = self.client.folder_hash_scan(hfh_request, self.use_grpc)
122
+ except Exception as e:
123
+ self.base.print_stderr(f'Error during folder hash scan: {e}')
124
+ self.scan_results = None
96
125
 
97
126
  def scan(self) -> Optional[Dict]:
98
127
  """
@@ -102,29 +131,23 @@ class ScannerHFH:
102
131
  Optional[Dict]: The folder hash response from the gRPC client, or None if an error occurs.
103
132
  """
104
133
  hfh_request = {
105
- 'root': self.folder_hasher.hash_directory(self.scan_dir),
134
+ 'root': self.folder_hasher.hash_directory(path=self.scan_dir),
106
135
  'rank_threshold': self.rank_threshold,
136
+ 'recursive_threshold': self.recursive_threshold,
137
+ 'min_accepted_score': self.min_accepted_score,
107
138
  }
108
139
 
109
- spinner = Spinner('Scanning folder...')
110
- stop_spinner = False
140
+ spinner_ctx = Spinner('Scanning folder...')
111
141
 
112
- def spin():
113
- while not stop_spinner:
142
+ with spinner_ctx as spinner:
143
+ grpc_thread = threading.Thread(target=self._execute_grpc_scan, args=(hfh_request,))
144
+ grpc_thread.start()
145
+
146
+ while grpc_thread.is_alive():
114
147
  spinner.next()
115
148
  time.sleep(0.1)
116
149
 
117
- spinner_thread = threading.Thread(target=spin)
118
- spinner_thread.start()
119
-
120
- try:
121
- response = self.client.folder_hash_scan(hfh_request)
122
- if response:
123
- self.scan_results = response
124
- finally:
125
- stop_spinner = True
126
- spinner_thread.join()
127
- spinner.finish()
150
+ grpc_thread.join()
128
151
 
129
152
  return self.scan_results
130
153
 
@@ -194,7 +217,12 @@ class ScannerHFHPresenter(AbstractPresenter):
194
217
  ]
195
218
  }
196
219
 
220
+ get_vulnerabilities_json_request = {
221
+ 'components': [{'purl': purl, 'requirement': best_match_version['version']}],
222
+ }
223
+
197
224
  decorated_scan_results = self.scanner.client.get_dependencies(get_dependencies_json_request)
225
+ vulnerabilities = self.scanner.client.get_vulnerabilities_json(get_vulnerabilities_json_request)
198
226
 
199
227
  cdx = CycloneDx(self.base.debug)
200
228
  scan_results = {}
@@ -205,6 +233,10 @@ class ScannerHFHPresenter(AbstractPresenter):
205
233
  error_msg = 'ERROR: Failed to produce CycloneDX output'
206
234
  self.base.print_stderr(error_msg)
207
235
  return None
236
+
237
+ if vulnerabilities:
238
+ cdx_output = cdx.append_vulnerabilities(cdx_output, vulnerabilities, purl)
239
+
208
240
  return json.dumps(cdx_output, indent=2)
209
241
  except Exception as e:
210
242
  self.base.print_stderr(f'ERROR: Failed to get license information: {e}')
@@ -172,7 +172,7 @@ class ScanossSettings(ScanossBase):
172
172
 
173
173
  def _get_bom(self):
174
174
  """
175
- Get the Billing of Materials from the settings file
175
+ Get the Bill of Materials from the settings file
176
176
  Returns:
177
177
  dict: If using scanoss.json
178
178
  list: If using SBOM.json
@@ -196,6 +196,17 @@ class ScanossSettings(ScanossBase):
196
196
  return self._get_bom()
197
197
  return self._get_bom().get('include', [])
198
198
 
199
+
200
+ def get_bom_exclude(self) -> List[BomEntry]:
201
+ """
202
+ Get the list of components to exclude from the scan
203
+ Returns:
204
+ list: List of components to exclude from the scan
205
+ """
206
+ if self.settings_file_type == 'legacy':
207
+ return self._get_bom()
208
+ return self._get_bom().get('exclude', [])
209
+
199
210
  def get_bom_remove(self) -> List[BomEntry]:
200
211
  """
201
212
  Get the list of components to remove from the scan
@@ -225,8 +236,8 @@ class ScanossSettings(ScanossBase):
225
236
  if not self.data:
226
237
  return None
227
238
  return {
228
- 'scan_type': self.scan_type,
229
239
  'assets': json.dumps(self._get_sbom_assets()),
240
+ 'scan_type': self.scan_type,
230
241
  }
231
242
 
232
243
  def _get_sbom_assets(self):
@@ -235,7 +246,18 @@ class ScanossSettings(ScanossBase):
235
246
  Returns:
236
247
  List: List of SBOM assets
237
248
  """
238
- if self.scan_type == 'identify':
249
+
250
+ if self.settings_file_type == 'new':
251
+ if len(self.get_bom_include()):
252
+ self.scan_type = 'identify'
253
+ include_bom_entries = self._remove_duplicates(self.normalize_bom_entries(self.get_bom_include()))
254
+ return {"components": include_bom_entries}
255
+ elif len(self.get_bom_exclude()):
256
+ self.scan_type = 'blacklist'
257
+ exclude_bom_entries = self._remove_duplicates(self.normalize_bom_entries(self.get_bom_exclude()))
258
+ return {"components": exclude_bom_entries}
259
+
260
+ if self.settings_file_type == 'legacy' and self.scan_type == 'identify': # sbom-identify.json
239
261
  include_bom_entries = self._remove_duplicates(self.normalize_bom_entries(self.get_bom_include()))
240
262
  replace_bom_entries = self._remove_duplicates(self.normalize_bom_entries(self.get_bom_replace()))
241
263
  self.print_debug(
@@ -244,6 +266,14 @@ class ScanossSettings(ScanossBase):
244
266
  f'From Replace list: {[entry["purl"] for entry in replace_bom_entries]} \n'
245
267
  )
246
268
  return include_bom_entries + replace_bom_entries
269
+
270
+ if self.settings_file_type == 'legacy' and self.scan_type == 'blacklist': # sbom-identify.json
271
+ exclude_bom_entries = self._remove_duplicates(self.normalize_bom_entries(self.get_bom_exclude()))
272
+ self.print_debug(
273
+ f"Scan type set to 'blacklist'. Adding {len(exclude_bom_entries)} components as context to the scan. \n" # noqa: E501
274
+ f'From Exclude list: {[entry["purl"] for entry in exclude_bom_entries]} \n')
275
+ return exclude_bom_entries
276
+
247
277
  return self.normalize_bom_entries(self.get_bom_remove())
248
278
 
249
279
  @staticmethod
scanoss/scanossapi.py CHANGED
@@ -22,23 +22,23 @@ SPDX-License-Identifier: MIT
22
22
  THE SOFTWARE.
23
23
  """
24
24
 
25
+ import http.client as http_client
25
26
  import logging
26
27
  import os
27
28
  import sys
28
29
  import time
30
+ import uuid
29
31
  from json.decoder import JSONDecodeError
32
+
30
33
  import requests
31
- import uuid
32
- import http.client as http_client
33
34
  import urllib3
34
-
35
35
  from pypac import PACSession
36
36
  from pypac.parser import PACFile
37
37
  from urllib3.exceptions import InsecureRequestWarning
38
38
 
39
- from .scanossbase import ScanossBase
40
39
  from . import __version__
41
-
40
+ from .constants import DEFAULT_TIMEOUT, MIN_TIMEOUT
41
+ from .scanossbase import ScanossBase
42
42
 
43
43
  DEFAULT_URL = 'https://api.osskb.org/scan/direct' # default free service URL
44
44
  DEFAULT_URL2 = 'https://api.scanoss.com/scan/direct' # default premium service URL
@@ -52,7 +52,7 @@ class ScanossApi(ScanossBase):
52
52
  Currently support posting scan requests to the SCANOSS streaming API
53
53
  """
54
54
 
55
- def __init__( # noqa: PLR0913, PLR0915
55
+ def __init__( # noqa: PLR0912, PLR0913, PLR0915
56
56
  self,
57
57
  scan_format: str = None,
58
58
  flags: str = None,
@@ -61,7 +61,7 @@ class ScanossApi(ScanossBase):
61
61
  debug: bool = False,
62
62
  trace: bool = False,
63
63
  quiet: bool = False,
64
- timeout: int = 180,
64
+ timeout: int = DEFAULT_TIMEOUT,
65
65
  ver_details: str = None,
66
66
  ignore_cert_errors: bool = False,
67
67
  proxy: str = None,
@@ -78,7 +78,7 @@ class ScanossApi(ScanossBase):
78
78
  :param api_key: API Key (default None)
79
79
  :param debug: Enable debug (default False)
80
80
  :param trace: Enable trace (default False)
81
- :param quiet: Enable quite mode (default False)
81
+ :param quiet: Enable quiet mode (default False)
82
82
 
83
83
  To set a custom certificate use:
84
84
  REQUESTS_CA_BUNDLE=/path/to/cert.pem
@@ -87,30 +87,28 @@ class ScanossApi(ScanossBase):
87
87
  HTTPS_PROXY='http://<ip>:<port>'
88
88
  """
89
89
  super().__init__(debug, trace, quiet)
90
- self.url = url
91
- self.api_key = api_key
92
90
  self.sbom = None
93
91
  self.scan_format = scan_format if scan_format else 'plain'
94
92
  self.flags = flags
95
- self.timeout = timeout if timeout > 5 else 180
93
+ self.timeout = timeout if timeout > MIN_TIMEOUT else DEFAULT_TIMEOUT
96
94
  self.retry_limit = retry if retry >= 0 else 5
97
95
  self.ignore_cert_errors = ignore_cert_errors
98
96
  self.req_headers = req_headers if req_headers else {}
99
97
  self.headers = {}
100
-
98
+ # Set the correct URL/API key combination
99
+ self.url = url if url else SCANOSS_SCAN_URL
100
+ self.api_key = api_key if api_key else SCANOSS_API_KEY
101
+ if self.api_key and not url and not os.environ.get('SCANOSS_SCAN_URL'):
102
+ self.url = DEFAULT_URL2 # API key specific and no alternative URL, so use the default premium
101
103
  if ver_details:
102
104
  self.headers['x-scanoss-client'] = ver_details
103
105
  if self.api_key:
104
106
  self.headers['X-Session'] = self.api_key
105
107
  self.headers['x-api-key'] = self.api_key
106
- self.headers['User-Agent'] = f'scanoss-py/{__version__}'
107
- self.headers['user-agent'] = f'scanoss-py/{__version__}'
108
- self.load_generic_headers()
109
-
110
- self.url = url if url else SCANOSS_SCAN_URL
111
- self.api_key = api_key if api_key else SCANOSS_API_KEY
112
- if self.api_key and not url and not os.environ.get('SCANOSS_SCAN_URL'):
113
- self.url = DEFAULT_URL2 # API key specific and no alternative URL, so use the default premium
108
+ user_agent = f'scanoss-py/{__version__}'
109
+ self.headers['User-Agent'] = user_agent
110
+ self.headers['user-agent'] = user_agent
111
+ self.load_generic_headers(url)
114
112
 
115
113
  if self.trace:
116
114
  logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
@@ -133,7 +131,7 @@ class ScanossApi(ScanossBase):
133
131
  if self.proxies:
134
132
  self.session.proxies = self.proxies
135
133
 
136
- def scan(self, wfp: str, context: str = None, scan_id: int = None):
134
+ def scan(self, wfp: str, context: str = None, scan_id: int = None): # noqa: PLR0912, PLR0915
137
135
  """
138
136
  Scan the specified WFP and return the JSON object
139
137
  :param wfp: WFP to scan
@@ -192,7 +190,7 @@ class ScanossApi(ScanossBase):
192
190
  else:
193
191
  self.print_stderr(f'Warning: No response received from {self.url}. Retrying...')
194
192
  time.sleep(5)
195
- elif r.status_code == 503: # Service limits have most likely been reached
193
+ elif r.status_code == requests.codes.service_unavailable: # Service limits most likely reached
196
194
  self.print_stderr(
197
195
  f'ERROR: SCANOSS API rejected the scan request ({request_id}) due to '
198
196
  f'service limits being exceeded'
@@ -202,7 +200,7 @@ class ScanossApi(ScanossBase):
202
200
  f'ERROR: {r.status_code} - The SCANOSS API request ({request_id}) rejected '
203
201
  f'for {self.url} due to service limits being exceeded.'
204
202
  )
205
- elif r.status_code >= 400:
203
+ elif r.status_code >= requests.codes.bad_request:
206
204
  if retry > self.retry_limit: # No response retry_limit or more times, fail
207
205
  self.save_bad_req_wfp(scan_files, request_id, scan_id)
208
206
  raise Exception(
@@ -269,7 +267,7 @@ class ScanossApi(ScanossBase):
269
267
  self.sbom = sbom
270
268
  return self
271
269
 
272
- def load_generic_headers(self):
270
+ def load_generic_headers(self, url):
273
271
  """
274
272
  Adds custom headers from req_headers to the headers collection.
275
273
 
@@ -279,7 +277,7 @@ class ScanossApi(ScanossBase):
279
277
  if self.req_headers: # Load generic headers
280
278
  for key, value in self.req_headers.items():
281
279
  if key == 'x-api-key': # Set premium URL if x-api-key header is set
282
- if not self.url and not os.environ.get('SCANOSS_SCAN_URL'):
280
+ if not url and not os.environ.get('SCANOSS_SCAN_URL'):
283
281
  self.url = DEFAULT_URL2 # API key specific and no alternative URL, so use the default premium
284
282
  self.api_key = value
285
283
  self.headers[key] = value
scanoss/scanossbase.py CHANGED
@@ -50,7 +50,7 @@ class ScanossBase:
50
50
 
51
51
  def print_msg(self, *args, **kwargs):
52
52
  """
53
- Print message if quite mode is not enabled
53
+ Print message if quiet mode is not enabled
54
54
  """
55
55
  if not self.quiet:
56
56
  self.print_stderr(*args, **kwargs)