fosslight-util 1.4.47__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fosslight_util/compare_yaml.py +18 -11
- fosslight_util/constant.py +11 -0
- fosslight_util/convert_excel_to_yaml.py +1 -1
- fosslight_util/correct.py +47 -91
- fosslight_util/cover.py +19 -6
- fosslight_util/help.py +3 -4
- fosslight_util/oss_item.py +148 -155
- fosslight_util/output_format.py +7 -5
- fosslight_util/parsing_yaml.py +45 -30
- fosslight_util/read_excel.py +29 -35
- fosslight_util/set_log.py +20 -3
- fosslight_util/spdx_licenses.py +2 -1
- fosslight_util/write_excel.py +88 -156
- fosslight_util/write_opossum.py +14 -20
- fosslight_util/write_scancodejson.py +31 -31
- fosslight_util/write_spdx.py +30 -35
- fosslight_util/write_txt.py +2 -1
- fosslight_util/write_yaml.py +43 -54
- {fosslight_util-1.4.47.dist-info → fosslight_util-2.0.0.dist-info}/METADATA +7 -8
- fosslight_util-2.0.0.dist-info/RECORD +31 -0
- {fosslight_util-1.4.47.dist-info → fosslight_util-2.0.0.dist-info}/WHEEL +1 -1
- fosslight_util-1.4.47.dist-info/RECORD +0 -31
- {fosslight_util-1.4.47.dist-info → fosslight_util-2.0.0.dist-info}/LICENSE +0 -0
- {fosslight_util-1.4.47.dist-info → fosslight_util-2.0.0.dist-info}/entry_points.txt +0 -0
- {fosslight_util-1.4.47.dist-info → fosslight_util-2.0.0.dist-info}/top_level.txt +0 -0
fosslight_util/set_log.py
CHANGED
|
@@ -12,6 +12,8 @@ import platform
|
|
|
12
12
|
from . import constant as constant
|
|
13
13
|
from lastversion import lastversion
|
|
14
14
|
import coloredlogs
|
|
15
|
+
from typing import Tuple
|
|
16
|
+
from logging import Logger
|
|
15
17
|
|
|
16
18
|
|
|
17
19
|
def init_check_latest_version(pkg_version="", main_package_name=""):
|
|
@@ -32,6 +34,21 @@ def init_check_latest_version(pkg_version="", main_package_name=""):
|
|
|
32
34
|
logger.debug('Cannot check the latest version:' + str(error))
|
|
33
35
|
|
|
34
36
|
|
|
37
|
+
def get_os_version():
|
|
38
|
+
|
|
39
|
+
logger = logging.getLogger(constant.LOGGER_NAME)
|
|
40
|
+
|
|
41
|
+
os_version = platform.system() + " " + platform.release()
|
|
42
|
+
if os_version == "Windows 10":
|
|
43
|
+
try:
|
|
44
|
+
windows_build = sys.getwindowsversion().build
|
|
45
|
+
if windows_build >= 22000:
|
|
46
|
+
os_version = "Windows 11"
|
|
47
|
+
except Exception as error:
|
|
48
|
+
logger.debug(str(error))
|
|
49
|
+
return os_version
|
|
50
|
+
|
|
51
|
+
|
|
35
52
|
class CustomAdapter(logging.LoggerAdapter):
|
|
36
53
|
def __init__(self, logger, extra):
|
|
37
54
|
super(CustomAdapter, self).__init__(logger, {})
|
|
@@ -41,8 +58,8 @@ class CustomAdapter(logging.LoggerAdapter):
|
|
|
41
58
|
return '[%s] %s' % (self.extra, msg), kwargs
|
|
42
59
|
|
|
43
60
|
|
|
44
|
-
def init_log(log_file, create_file=True, stream_log_level=logging.INFO,
|
|
45
|
-
|
|
61
|
+
def init_log(log_file: str, create_file: bool = True, stream_log_level: int = logging.INFO, file_log_level: int = logging.DEBUG,
|
|
62
|
+
main_package_name: str = "", path_to_analyze: str = "", path_to_exclude: list = []) -> Tuple[Logger, dict]:
|
|
46
63
|
|
|
47
64
|
logger = logging.getLogger(constant.LOGGER_NAME)
|
|
48
65
|
|
|
@@ -70,7 +87,7 @@ def init_log(log_file, create_file=True, stream_log_level=logging.INFO,
|
|
|
70
87
|
_result_log = {
|
|
71
88
|
"Tool Info": main_package_name,
|
|
72
89
|
"Python version": _PYTHON_VERSION,
|
|
73
|
-
"OS":
|
|
90
|
+
"OS": get_os_version(),
|
|
74
91
|
}
|
|
75
92
|
if main_package_name != "":
|
|
76
93
|
pkg_info = main_package_name
|
fosslight_util/spdx_licenses.py
CHANGED
|
@@ -8,6 +8,7 @@ import os
|
|
|
8
8
|
import sys
|
|
9
9
|
import json
|
|
10
10
|
import traceback
|
|
11
|
+
from typing import Tuple
|
|
11
12
|
|
|
12
13
|
_resources_dir = 'resources'
|
|
13
14
|
_licenses_json_file = 'licenses.json'
|
|
@@ -34,7 +35,7 @@ def get_license_from_nick():
|
|
|
34
35
|
return licenses
|
|
35
36
|
|
|
36
37
|
|
|
37
|
-
def get_spdx_licenses_json():
|
|
38
|
+
def get_spdx_licenses_json() -> Tuple[bool, str, str]:
|
|
38
39
|
success = True
|
|
39
40
|
error_msg = ''
|
|
40
41
|
licenses = ''
|
fosslight_util/write_excel.py
CHANGED
|
@@ -7,160 +7,101 @@ import csv
|
|
|
7
7
|
import time
|
|
8
8
|
import logging
|
|
9
9
|
import os
|
|
10
|
-
import platform
|
|
11
10
|
import pandas as pd
|
|
12
|
-
import copy
|
|
13
11
|
from pathlib import Path
|
|
14
|
-
|
|
12
|
+
from fosslight_util.constant import LOGGER_NAME, SHEET_NAME_FOR_SCANNER, FOSSLIGHT_BINARY
|
|
15
13
|
from jsonmerge import merge
|
|
16
|
-
from fosslight_util.cover import CoverItem
|
|
17
14
|
|
|
18
15
|
_HEADER = {'BIN (': ['ID', 'Binary Path', 'Source Code Path',
|
|
19
16
|
'NOTICE.html', 'OSS Name', 'OSS Version',
|
|
20
17
|
'License', 'Download Location', 'Homepage',
|
|
21
18
|
'Copyright Text', 'Exclude', 'Comment'],
|
|
22
|
-
'SRC': ['ID', 'Source Path', 'OSS Name',
|
|
23
|
-
'
|
|
24
|
-
'
|
|
25
|
-
'Comment'],
|
|
19
|
+
'SRC': ['ID', 'Source Path', 'OSS Name', 'OSS Version',
|
|
20
|
+
'License', 'Download Location', 'Homepage',
|
|
21
|
+
'Copyright Text', 'Exclude', 'Comment'],
|
|
26
22
|
'BIN': ['ID', 'Binary Path', 'OSS Name', 'OSS Version',
|
|
27
23
|
'License', 'Download Location', 'Homepage',
|
|
28
|
-
'Copyright Text', 'Exclude', 'Comment'
|
|
24
|
+
'Copyright Text', 'Exclude', 'Comment',
|
|
25
|
+
'Vulnerability Link', 'TLSH', 'SHA1'],
|
|
26
|
+
'DEP': ['ID', 'Package URL', 'OSS Name', 'OSS Version',
|
|
27
|
+
'License', 'Download Location', 'Homepage',
|
|
28
|
+
'Copyright Text', 'Exclude', 'Comment',
|
|
29
|
+
'Depends On']}
|
|
30
|
+
|
|
31
|
+
BIN_HIDE_HEADER = {'TLSH', "SHA1"}
|
|
29
32
|
_OUTPUT_FILE_PREFIX = "FOSSLight-Report_"
|
|
30
|
-
_EMPTY_ITEM_MSG = "* There is no item"\
|
|
31
|
-
" to print in FOSSLight-Report.\n"
|
|
32
33
|
IDX_FILE = 0
|
|
33
34
|
IDX_EXCLUDE = 7
|
|
34
|
-
logger = logging.getLogger(
|
|
35
|
+
logger = logging.getLogger(LOGGER_NAME)
|
|
35
36
|
COVER_SHEET_NAME = 'Scanner Info'
|
|
36
37
|
|
|
37
38
|
|
|
38
|
-
def
|
|
39
|
-
success = True
|
|
40
|
-
error_msg = ""
|
|
41
|
-
success_csv = True
|
|
42
|
-
error_msg_csv = ""
|
|
43
|
-
output_files = ""
|
|
44
|
-
output_csv = ""
|
|
45
|
-
|
|
46
|
-
is_not_null, sheet_list = remove_empty_sheet(sheet_list)
|
|
47
|
-
|
|
48
|
-
if is_not_null:
|
|
49
|
-
output_dir = os.path.dirname(filename_without_extension)
|
|
50
|
-
Path(output_dir).mkdir(parents=True, exist_ok=True)
|
|
51
|
-
|
|
52
|
-
success, error_msg = write_result_to_excel(f"{filename_without_extension}.xlsx",
|
|
53
|
-
sheet_list,
|
|
54
|
-
extended_header,
|
|
55
|
-
hide_header)
|
|
56
|
-
|
|
57
|
-
if ignore_os or platform.system() != "Windows":
|
|
58
|
-
success_csv, error_msg_csv, output_csv = write_result_to_csv(f"{filename_without_extension}.csv",
|
|
59
|
-
sheet_list, True, extended_header)
|
|
60
|
-
if success:
|
|
61
|
-
output_files = f"{filename_without_extension}.xlsx"
|
|
62
|
-
else:
|
|
63
|
-
error_msg = "[Error] Writing excel:" + error_msg
|
|
64
|
-
if success_csv:
|
|
65
|
-
if output_csv:
|
|
66
|
-
output_files = f"{output_files}, {output_csv}" if output_files else output_csv
|
|
67
|
-
else:
|
|
68
|
-
error_msg += "\n[Error] Writing csv:" + error_msg_csv
|
|
69
|
-
else:
|
|
70
|
-
success = False
|
|
71
|
-
error_msg = _EMPTY_ITEM_MSG
|
|
72
|
-
|
|
73
|
-
return (success and success_csv), error_msg, output_files
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
def remove_empty_sheet(sheet_items):
|
|
77
|
-
skip_sheet_name = []
|
|
78
|
-
cnt_sheet_to_print = 0
|
|
79
|
-
final_sheet_to_print = {}
|
|
80
|
-
success = False
|
|
81
|
-
try:
|
|
82
|
-
if sheet_items:
|
|
83
|
-
for sheet_name, sheet_content in sheet_items.items():
|
|
84
|
-
if len(sheet_content) > 0:
|
|
85
|
-
final_sheet_to_print[sheet_name] = sheet_content
|
|
86
|
-
cnt_sheet_to_print += 1
|
|
87
|
-
else:
|
|
88
|
-
skip_sheet_name.append(sheet_name)
|
|
89
|
-
if cnt_sheet_to_print != 0:
|
|
90
|
-
success = True
|
|
91
|
-
if len(skip_sheet_name) > 0:
|
|
92
|
-
logger.warn("* Empty sheet(not printed):" + str(skip_sheet_name))
|
|
93
|
-
except Exception as ex:
|
|
94
|
-
logger.warn("* Warning:"+str(ex))
|
|
95
|
-
|
|
96
|
-
return success, final_sheet_to_print
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
def get_header_row(sheet_name, sheet_content, extended_header={}):
|
|
39
|
+
def get_header_row(sheet_name, extended_header={}):
|
|
100
40
|
selected_header = []
|
|
101
41
|
|
|
102
42
|
merged_headers = merge(_HEADER, extended_header)
|
|
103
43
|
|
|
104
|
-
selected_header = merged_headers.get(sheet_name)
|
|
44
|
+
selected_header = merged_headers.get(sheet_name, [])
|
|
105
45
|
if not selected_header:
|
|
106
46
|
for header_key in merged_headers.keys():
|
|
107
47
|
if sheet_name.startswith(header_key):
|
|
108
48
|
selected_header = merged_headers[header_key]
|
|
109
49
|
break
|
|
110
|
-
|
|
111
|
-
if not selected_header:
|
|
112
|
-
selected_header = sheet_content.pop(0)
|
|
113
|
-
return selected_header, sheet_content
|
|
50
|
+
return selected_header
|
|
114
51
|
|
|
115
52
|
|
|
116
|
-
def write_result_to_csv(output_file,
|
|
53
|
+
def write_result_to_csv(output_file, scan_item, separate_sheet=False, extended_header={}):
|
|
117
54
|
success = True
|
|
118
55
|
error_msg = ""
|
|
119
56
|
file_extension = ".csv"
|
|
120
57
|
output = ""
|
|
121
58
|
|
|
122
59
|
try:
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
60
|
+
output_files = []
|
|
61
|
+
output_dir = os.path.dirname(output_file)
|
|
62
|
+
Path(output_dir).mkdir(parents=True, exist_ok=True)
|
|
63
|
+
if separate_sheet:
|
|
64
|
+
filename = os.path.splitext(os.path.basename(output_file))[0]
|
|
65
|
+
separate_output_file = os.path.join(output_dir, filename)
|
|
66
|
+
|
|
67
|
+
merge_sheet = []
|
|
68
|
+
for scanner_name, _ in scan_item.file_items.items():
|
|
69
|
+
row_num = 1
|
|
70
|
+
sheet_name = ""
|
|
71
|
+
if scanner_name.lower() in SHEET_NAME_FOR_SCANNER:
|
|
72
|
+
sheet_name = SHEET_NAME_FOR_SCANNER[scanner_name.lower()]
|
|
73
|
+
elif extended_header:
|
|
74
|
+
sheet_name = list(extended_header.keys())[0]
|
|
75
|
+
sheet_content_without_header = scan_item.get_print_array(scanner_name)
|
|
76
|
+
header_row = get_header_row(sheet_name, extended_header)
|
|
77
|
+
|
|
78
|
+
if 'Copyright Text' in header_row:
|
|
79
|
+
idx = header_row.index('Copyright Text')-1
|
|
80
|
+
for item in sheet_content_without_header:
|
|
81
|
+
item[idx] = item[idx].replace('\n', ', ')
|
|
82
|
+
if not separate_sheet:
|
|
83
|
+
merge_sheet.extend(sheet_content_without_header)
|
|
84
|
+
if scanner_name == list(scan_item.file_items.keys())[-1]:
|
|
85
|
+
sheet_content_without_header = merge_sheet
|
|
147
86
|
else:
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
87
|
+
continue
|
|
88
|
+
else:
|
|
89
|
+
output_file = separate_output_file + "_" + sheet_name + file_extension
|
|
90
|
+
try:
|
|
91
|
+
sheet_content_without_header = sorted(sheet_content_without_header,
|
|
92
|
+
key=lambda x: (x[IDX_EXCLUDE], x[IDX_FILE] == "", x[IDX_FILE]))
|
|
93
|
+
except Exception:
|
|
94
|
+
pass
|
|
95
|
+
with open(output_file, 'w', newline='') as file:
|
|
96
|
+
writer = csv.writer(file, delimiter='\t')
|
|
97
|
+
writer.writerow(header_row)
|
|
98
|
+
for row_item in sheet_content_without_header:
|
|
99
|
+
row_item.insert(0, row_num)
|
|
100
|
+
writer.writerow(row_item)
|
|
101
|
+
row_num += 1
|
|
102
|
+
output_files.append(output_file)
|
|
103
|
+
if output_files:
|
|
104
|
+
output = ", ".join(output_files)
|
|
164
105
|
except Exception as ex:
|
|
165
106
|
error_msg = str(ex)
|
|
166
107
|
success = False
|
|
@@ -168,7 +109,7 @@ def write_result_to_csv(output_file, sheet_list_origin, separate_sheet=False, ex
|
|
|
168
109
|
return success, error_msg, output
|
|
169
110
|
|
|
170
111
|
|
|
171
|
-
def write_result_to_excel(out_file_name,
|
|
112
|
+
def write_result_to_excel(out_file_name, scan_item, extended_header={}, hide_header={}):
|
|
172
113
|
success = True
|
|
173
114
|
error_msg = ""
|
|
174
115
|
|
|
@@ -177,21 +118,37 @@ def write_result_to_excel(out_file_name, sheet_list, extended_header={}, hide_he
|
|
|
177
118
|
Path(output_dir).mkdir(parents=True, exist_ok=True)
|
|
178
119
|
|
|
179
120
|
workbook = xlsxwriter.Workbook(out_file_name)
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
121
|
+
write_cover_sheet(workbook, scan_item.cover)
|
|
122
|
+
if scan_item.file_items and len(scan_item.file_items.keys()) > 0:
|
|
123
|
+
for scanner_name, _ in scan_item.file_items.items():
|
|
124
|
+
sheet_name = ""
|
|
125
|
+
if scanner_name.lower() in SHEET_NAME_FOR_SCANNER:
|
|
126
|
+
sheet_name = SHEET_NAME_FOR_SCANNER[scanner_name.lower()]
|
|
127
|
+
elif extended_header:
|
|
128
|
+
sheet_name = list(extended_header.keys())[0]
|
|
129
|
+
sheet_content_without_header = scan_item.get_print_array(scanner_name)
|
|
130
|
+
selected_header = get_header_row(sheet_name, extended_header)
|
|
185
131
|
try:
|
|
186
132
|
sheet_content_without_header = sorted(sheet_content_without_header,
|
|
187
133
|
key=lambda x: (x[IDX_EXCLUDE], x[IDX_FILE] == "", x[IDX_FILE]))
|
|
188
134
|
except Exception:
|
|
189
135
|
pass
|
|
136
|
+
if sheet_name:
|
|
137
|
+
worksheet = create_worksheet(workbook, sheet_name, selected_header)
|
|
138
|
+
write_result_to_sheet(worksheet, sheet_content_without_header)
|
|
139
|
+
if (scanner_name == FOSSLIGHT_BINARY) and (not hide_header):
|
|
140
|
+
hide_header = BIN_HIDE_HEADER
|
|
141
|
+
if hide_header:
|
|
142
|
+
hide_column(worksheet, selected_header, hide_header)
|
|
143
|
+
|
|
144
|
+
for sheet_name, content in scan_item.external_sheets.items():
|
|
145
|
+
if len(content) > 0:
|
|
146
|
+
selected_header = content.pop(0)
|
|
190
147
|
worksheet = create_worksheet(workbook, sheet_name, selected_header)
|
|
191
|
-
write_result_to_sheet(worksheet,
|
|
192
|
-
|
|
148
|
+
write_result_to_sheet(worksheet, content)
|
|
193
149
|
if hide_header:
|
|
194
150
|
hide_column(worksheet, selected_header, hide_header)
|
|
151
|
+
|
|
195
152
|
workbook.close()
|
|
196
153
|
except Exception as ex:
|
|
197
154
|
error_msg = str(ex)
|
|
@@ -240,37 +197,12 @@ def create_worksheet(workbook, sheet_name, header_row):
|
|
|
240
197
|
current_time = str(time.time())
|
|
241
198
|
sheet_name = current_time
|
|
242
199
|
worksheet = workbook.add_worksheet(sheet_name)
|
|
243
|
-
|
|
244
|
-
|
|
200
|
+
if header_row:
|
|
201
|
+
for col_num, value in enumerate(header_row):
|
|
202
|
+
worksheet.write(0, col_num, value)
|
|
245
203
|
return worksheet
|
|
246
204
|
|
|
247
205
|
|
|
248
|
-
def merge_cover_comment(find_excel_dir, merge_files=''):
|
|
249
|
-
FIND_EXTENSION = '.xlsx'
|
|
250
|
-
merge_comment = []
|
|
251
|
-
cover_comment = ''
|
|
252
|
-
try:
|
|
253
|
-
files = os.listdir(find_excel_dir)
|
|
254
|
-
|
|
255
|
-
if len([name for name in files if name.endswith(FIND_EXTENSION)]) > 0:
|
|
256
|
-
for file in files:
|
|
257
|
-
if merge_files:
|
|
258
|
-
if file not in merge_files:
|
|
259
|
-
continue
|
|
260
|
-
if file.endswith(FIND_EXTENSION):
|
|
261
|
-
file = os.path.join(find_excel_dir, file)
|
|
262
|
-
df_excel = pd.read_excel(file, sheet_name=COVER_SHEET_NAME, index_col=0, engine='openpyxl')
|
|
263
|
-
if not df_excel.empty:
|
|
264
|
-
tool_name = df_excel.loc[CoverItem.tool_name_key].values[0]
|
|
265
|
-
comment = df_excel.loc[CoverItem.comment_key].values[0]
|
|
266
|
-
merge_comment.append(str(f"[{tool_name}] {comment}"))
|
|
267
|
-
cover_comment = '\n'.join(merge_comment)
|
|
268
|
-
except Exception as ex:
|
|
269
|
-
logger.warning(f'Fail to merge comment of Scanner info: {str(ex)}')
|
|
270
|
-
|
|
271
|
-
return cover_comment
|
|
272
|
-
|
|
273
|
-
|
|
274
206
|
def merge_excels(find_excel_dir, final_out, merge_files='', cover=''):
|
|
275
207
|
success = True
|
|
276
208
|
msg = ""
|
fosslight_util/write_opossum.py
CHANGED
|
@@ -11,9 +11,8 @@ import logging
|
|
|
11
11
|
from datetime import datetime
|
|
12
12
|
from pathlib import Path
|
|
13
13
|
import traceback
|
|
14
|
-
from
|
|
15
|
-
|
|
16
|
-
import fosslight_util.constant as constant
|
|
14
|
+
from fosslight_util.constant import LOGGER_NAME, FOSSLIGHT_BINARY, FOSSLIGHT_DEPENDENCY, FOSSLIGHT_SOURCE
|
|
15
|
+
from typing import Dict, Optional, Tuple
|
|
17
16
|
|
|
18
17
|
|
|
19
18
|
PACKAGE = {
|
|
@@ -30,7 +29,7 @@ PACKAGE = {
|
|
|
30
29
|
}
|
|
31
30
|
|
|
32
31
|
_attributionConfidence = 80
|
|
33
|
-
logger = logging.getLogger(
|
|
32
|
+
logger = logging.getLogger(LOGGER_NAME)
|
|
34
33
|
|
|
35
34
|
|
|
36
35
|
class AttributionItem():
|
|
@@ -51,7 +50,7 @@ class AttributionItem():
|
|
|
51
50
|
self.excludeFromNotice = False
|
|
52
51
|
|
|
53
52
|
self.source_name = source_name
|
|
54
|
-
if source_name ==
|
|
53
|
+
if source_name == FOSSLIGHT_DEPENDENCY:
|
|
55
54
|
self.preSelected = True
|
|
56
55
|
else:
|
|
57
56
|
self.preSelected = False
|
|
@@ -113,12 +112,12 @@ class Attribution(AttributionItem):
|
|
|
113
112
|
dict[licenseName] = self.licenseName
|
|
114
113
|
dict[preSelected] = self.preSelected
|
|
115
114
|
|
|
116
|
-
if self.source_name ==
|
|
115
|
+
if self.source_name == FOSSLIGHT_SOURCE or FOSSLIGHT_BINARY:
|
|
117
116
|
dict[copyright] = self.copyright
|
|
118
117
|
dict[packageName] = self.packageName
|
|
119
118
|
dict[packageVersion] = self.packageVersion
|
|
120
119
|
dict[url] = self.url
|
|
121
|
-
elif self.source_name ==
|
|
120
|
+
elif self.source_name == FOSSLIGHT_DEPENDENCY:
|
|
122
121
|
dict[copyright] = self.copyright
|
|
123
122
|
dict[packageName] = self.packageName
|
|
124
123
|
dict[packageVersion] = self.packageVersion
|
|
@@ -165,7 +164,7 @@ def make_frequentlicenses():
|
|
|
165
164
|
return frequentLicenses, success, error_msg
|
|
166
165
|
|
|
167
166
|
|
|
168
|
-
def write_opossum(filename,
|
|
167
|
+
def write_opossum(filename: str, scan_item) -> Tuple[bool, str]:
|
|
169
168
|
success = True
|
|
170
169
|
error_msg = ''
|
|
171
170
|
dict = {}
|
|
@@ -176,7 +175,7 @@ def write_opossum(filename, sheet_list):
|
|
|
176
175
|
_filesWithChildren_key = 'filesWithChildren'
|
|
177
176
|
_attributionBreakpoints_key = 'attributionBreakpoints'
|
|
178
177
|
|
|
179
|
-
if
|
|
178
|
+
if scan_item:
|
|
180
179
|
output_dir = os.path.dirname(filename)
|
|
181
180
|
Path(output_dir).mkdir(parents=True, exist_ok=True)
|
|
182
181
|
|
|
@@ -189,14 +188,9 @@ def write_opossum(filename, sheet_list):
|
|
|
189
188
|
filesWithChildren_list = []
|
|
190
189
|
attributionBreakpoints_list = []
|
|
191
190
|
try:
|
|
192
|
-
for
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
else:
|
|
196
|
-
logger.warning("Not supported scanner(sheet_name):" + sheet_name)
|
|
197
|
-
continue
|
|
198
|
-
|
|
199
|
-
ret_resources_attribution = make_resources_and_attributions(sheet_contents, scanner, resources, fc_list)
|
|
191
|
+
for scanner_name, _ in scan_item.file_items.items():
|
|
192
|
+
sheet_contents = scan_item.get_print_array(scanner_name)
|
|
193
|
+
ret_resources_attribution = make_resources_and_attributions(sheet_contents, scanner_name, resources, fc_list)
|
|
200
194
|
success, rsc, ea, ra, fl, ab = ret_resources_attribution
|
|
201
195
|
if success:
|
|
202
196
|
dict[_resources_key].update(rsc)
|
|
@@ -255,14 +249,14 @@ def make_resources_and_attributions(sheet_items, scanner, resources, fc_list):
|
|
|
255
249
|
items = items[0:9]
|
|
256
250
|
path, oss_name, oss_version, license, url, homepage, copyright, exclude, comment = items
|
|
257
251
|
|
|
258
|
-
if scanner ==
|
|
252
|
+
if scanner == FOSSLIGHT_SOURCE:
|
|
259
253
|
if (os.path.join(os.sep, path) + os.sep) not in fc_list:
|
|
260
254
|
resources = make_resources(path, resources)
|
|
261
255
|
attribution = Attribution(scanner, license, exclude, copyright, oss_name, oss_version, url)
|
|
262
|
-
elif scanner ==
|
|
256
|
+
elif scanner == FOSSLIGHT_BINARY:
|
|
263
257
|
resources = make_resources(path, resources)
|
|
264
258
|
attribution = Attribution(scanner, license, exclude, copyright, oss_name, oss_version, url)
|
|
265
|
-
elif scanner ==
|
|
259
|
+
elif scanner == FOSSLIGHT_DEPENDENCY:
|
|
266
260
|
try:
|
|
267
261
|
packageType = PACKAGE[path]
|
|
268
262
|
except Exception:
|
|
@@ -6,59 +6,59 @@
|
|
|
6
6
|
import logging
|
|
7
7
|
import os
|
|
8
8
|
import json
|
|
9
|
-
|
|
10
|
-
from fosslight_util.oss_item import
|
|
9
|
+
from fosslight_util.constant import LOGGER_NAME
|
|
10
|
+
from fosslight_util.oss_item import ScannerItem
|
|
11
11
|
from typing import List
|
|
12
12
|
|
|
13
|
-
logger = logging.getLogger(
|
|
13
|
+
logger = logging.getLogger(LOGGER_NAME)
|
|
14
14
|
EMPTY_FILE_PATH = '-'
|
|
15
15
|
|
|
16
16
|
|
|
17
|
-
def write_scancodejson(output_dir: str, output_filename: str, oss_list: List[
|
|
17
|
+
def write_scancodejson(output_dir: str, output_filename: str, oss_list: List[ScannerItem]):
|
|
18
18
|
json_output = {}
|
|
19
19
|
json_output['headers'] = []
|
|
20
20
|
json_output['summary'] = {}
|
|
21
21
|
json_output['license_detections'] = []
|
|
22
22
|
json_output['files'] = []
|
|
23
23
|
|
|
24
|
-
for
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
json_output['files'] = add_item_in_files(oi, item_path, json_output['files'])
|
|
24
|
+
for file_items in oss_list.file_items.values():
|
|
25
|
+
for fi in file_items:
|
|
26
|
+
if fi.exclude:
|
|
27
|
+
continue
|
|
28
|
+
if fi.oss_items and (all(oss_item.exclude for oss_item in fi.oss_items)):
|
|
29
|
+
continue
|
|
30
|
+
if not fi.source_name_or_path:
|
|
31
|
+
fi.source_name_or_path = EMPTY_FILE_PATH
|
|
32
|
+
json_output['files'] = add_item_in_files(fi, json_output['files'])
|
|
33
|
+
|
|
35
34
|
with open(os.path.join(output_dir, output_filename), 'w') as f:
|
|
36
35
|
json.dump(json_output, f, sort_keys=False, indent=4)
|
|
37
36
|
|
|
38
37
|
|
|
39
|
-
def append_oss_item_in_filesitem(
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
38
|
+
def append_oss_item_in_filesitem(oss_items, files_item):
|
|
39
|
+
for oi in oss_items:
|
|
40
|
+
if oi.exclude:
|
|
41
|
+
continue
|
|
43
42
|
oss_item = {}
|
|
44
|
-
oss_item['name'] =
|
|
45
|
-
oss_item['version'] =
|
|
46
|
-
oss_item['license'] =
|
|
47
|
-
oss_item['copyright'] =
|
|
48
|
-
oss_item['download_location'] =
|
|
49
|
-
oss_item['comment'] =
|
|
43
|
+
oss_item['name'] = oi.name
|
|
44
|
+
oss_item['version'] = oi.version
|
|
45
|
+
oss_item['license'] = oi.license
|
|
46
|
+
oss_item['copyright'] = oi.copyright
|
|
47
|
+
oss_item['download_location'] = oi.download_location
|
|
48
|
+
oss_item['comment'] = oi.comment
|
|
50
49
|
files_item['oss'].append(oss_item)
|
|
50
|
+
|
|
51
51
|
return files_item
|
|
52
52
|
|
|
53
53
|
|
|
54
|
-
def add_item_in_files(
|
|
54
|
+
def add_item_in_files(file_item, files_list):
|
|
55
55
|
files_item = {}
|
|
56
|
-
files_item['path'] =
|
|
57
|
-
files_item['name'] = os.path.basename(
|
|
58
|
-
files_item['is_binary'] =
|
|
59
|
-
files_item['base_name'], files_item['extension'] = os.path.splitext(os.path.basename(
|
|
56
|
+
files_item['path'] = file_item.source_name_or_path
|
|
57
|
+
files_item['name'] = os.path.basename(file_item.source_name_or_path)
|
|
58
|
+
files_item['is_binary'] = file_item.is_binary
|
|
59
|
+
files_item['base_name'], files_item['extension'] = os.path.splitext(os.path.basename(file_item.source_name_or_path))
|
|
60
60
|
files_item['oss'] = []
|
|
61
|
-
files_item = append_oss_item_in_filesitem(
|
|
61
|
+
files_item = append_oss_item_in_filesitem(file_item.oss_items, files_item)
|
|
62
62
|
files_list.append(files_item)
|
|
63
63
|
|
|
64
64
|
return files_list
|