ScriptCollection 3.5.14__py3-none-any.whl → 3.5.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ScriptCollection/Executables.py +13 -0
- ScriptCollection/ScriptCollectionCore.py +227 -75
- ScriptCollection/TasksForCommonProjectStructure.py +21 -13
- {ScriptCollection-3.5.14.dist-info → ScriptCollection-3.5.16.dist-info}/METADATA +1 -1
- {ScriptCollection-3.5.14.dist-info → ScriptCollection-3.5.16.dist-info}/RECORD +8 -8
- {ScriptCollection-3.5.14.dist-info → ScriptCollection-3.5.16.dist-info}/entry_points.txt +1 -0
- {ScriptCollection-3.5.14.dist-info → ScriptCollection-3.5.16.dist-info}/WHEEL +0 -0
- {ScriptCollection-3.5.14.dist-info → ScriptCollection-3.5.16.dist-info}/top_level.txt +0 -0
ScriptCollection/Executables.py
CHANGED
|
@@ -356,3 +356,16 @@ def ChangeFileExtensions() -> int:
|
|
|
356
356
|
args = parser.parse_args()
|
|
357
357
|
ScriptCollectionCore().change_file_extensions(args.folder, args.source_extension, args.target_extension, args.recursive, args.ignore_case)
|
|
358
358
|
return 0
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
def GenerateARC42ReferenceTemplate() -> int:
|
|
362
|
+
parser = argparse.ArgumentParser()
|
|
363
|
+
parser.add_argument('-f', '--folder', required=False)
|
|
364
|
+
parser.add_argument('-p', '--productname', required=False)
|
|
365
|
+
args = parser.parse_args()
|
|
366
|
+
|
|
367
|
+
folder = args.folder
|
|
368
|
+
if folder is None:
|
|
369
|
+
folder = os.getcwd()
|
|
370
|
+
ScriptCollectionCore().generate_arc42_reference_template(folder, args.productname)
|
|
371
|
+
return 0
|
|
@@ -1,9 +1,10 @@
|
|
|
1
|
+
import sys
|
|
1
2
|
from datetime import timedelta, datetime
|
|
2
3
|
import json
|
|
3
4
|
import binascii
|
|
4
5
|
import filecmp
|
|
5
|
-
import time
|
|
6
6
|
import hashlib
|
|
7
|
+
import time
|
|
7
8
|
from io import BytesIO
|
|
8
9
|
import itertools
|
|
9
10
|
import math
|
|
@@ -29,7 +30,7 @@ from .ProgramRunnerBase import ProgramRunnerBase
|
|
|
29
30
|
from .ProgramRunnerPopen import ProgramRunnerPopen
|
|
30
31
|
from .ProgramRunnerEpew import ProgramRunnerEpew, CustomEpewArgument
|
|
31
32
|
|
|
32
|
-
version = "3.5.
|
|
33
|
+
version = "3.5.16"
|
|
33
34
|
__version__ = version
|
|
34
35
|
|
|
35
36
|
|
|
@@ -40,11 +41,12 @@ class ScriptCollectionCore:
|
|
|
40
41
|
mock_program_calls: bool = False
|
|
41
42
|
# The purpose of this property is to use it when testing your code which uses scriptcollection for external program-calls.
|
|
42
43
|
execute_program_really_if_no_mock_call_is_defined: bool = False
|
|
43
|
-
__mocked_program_calls: list =
|
|
44
|
+
__mocked_program_calls: list = None
|
|
44
45
|
program_runner: ProgramRunnerBase = None
|
|
45
46
|
|
|
46
47
|
def __init__(self):
|
|
47
48
|
self.program_runner = ProgramRunnerPopen()
|
|
49
|
+
self.__mocked_program_calls = list[ScriptCollectionCore.__MockProgramCall]()
|
|
48
50
|
|
|
49
51
|
@staticmethod
|
|
50
52
|
@GeneralUtilities.check_arguments
|
|
@@ -536,8 +538,7 @@ class ScriptCollectionCore:
|
|
|
536
538
|
for file_or_folder, item_type in items.items():
|
|
537
539
|
truncated_file = file_or_folder[path_prefix:]
|
|
538
540
|
if (filter_function is None or filter_function(folder, truncated_file)):
|
|
539
|
-
owner_and_permisssion = self.get_file_owner_and_file_permission(
|
|
540
|
-
file_or_folder)
|
|
541
|
+
owner_and_permisssion = self.get_file_owner_and_file_permission(file_or_folder)
|
|
541
542
|
user = owner_and_permisssion[0]
|
|
542
543
|
permissions = owner_and_permisssion[1]
|
|
543
544
|
lines.append(f"{truncated_file};{item_type};{user};{permissions}")
|
|
@@ -562,8 +563,7 @@ class ScriptCollectionCore:
|
|
|
562
563
|
foldername = os.path.basename(subfolder)
|
|
563
564
|
if ".git" in foldername:
|
|
564
565
|
new_name = foldername.replace(".git", ".gitx")
|
|
565
|
-
subfolder2 = os.path.join(
|
|
566
|
-
str(Path(subfolder).parent), new_name)
|
|
566
|
+
subfolder2 = os.path.join(str(Path(subfolder).parent), new_name)
|
|
567
567
|
os.rename(subfolder, subfolder2)
|
|
568
568
|
renamed_items[subfolder2] = subfolder
|
|
569
569
|
else:
|
|
@@ -954,13 +954,11 @@ class ScriptCollectionCore:
|
|
|
954
954
|
with (open(full_path, "rb").read()) as text_io_wrapper:
|
|
955
955
|
content = text_io_wrapper
|
|
956
956
|
path_in_iso = '/' + files_directory + \
|
|
957
|
-
self.__adjust_folder_name(
|
|
958
|
-
full_path[len(folder)::1]).upper()
|
|
957
|
+
self.__adjust_folder_name(full_path[len(folder)::1]).upper()
|
|
959
958
|
if path_in_iso not in created_directories:
|
|
960
959
|
iso.add_directory(path_in_iso)
|
|
961
960
|
created_directories.append(path_in_iso)
|
|
962
|
-
iso.add_fp(BytesIO(content), len(content),
|
|
963
|
-
path_in_iso + '/' + file.upper() + ';1')
|
|
961
|
+
iso.add_fp(BytesIO(content), len(content), path_in_iso + '/' + file.upper() + ';1')
|
|
964
962
|
iso.write(iso_file)
|
|
965
963
|
iso.close()
|
|
966
964
|
|
|
@@ -1008,8 +1006,7 @@ class ScriptCollectionCore:
|
|
|
1008
1006
|
new_file_name = os.path.join(
|
|
1009
1007
|
os.path.dirname(file), new_file_name_without_path)
|
|
1010
1008
|
os.rename(file, new_file_name)
|
|
1011
|
-
GeneralUtilities.append_line_to_file(namemappingfile, os.path.basename(
|
|
1012
|
-
file) + ";" + new_file_name_without_path + ";" + hash_value)
|
|
1009
|
+
GeneralUtilities.append_line_to_file(namemappingfile, os.path.basename(file) + ";" + new_file_name_without_path + ";" + hash_value)
|
|
1013
1010
|
else:
|
|
1014
1011
|
raise ValueError(f"Directory not found: '{inputfolder}'")
|
|
1015
1012
|
|
|
@@ -1026,15 +1023,12 @@ class ScriptCollectionCore:
|
|
|
1026
1023
|
for line in reversed(lines):
|
|
1027
1024
|
if not GeneralUtilities.string_is_none_or_whitespace(line):
|
|
1028
1025
|
if "RunningHealthy (" in line: # TODO use regex
|
|
1029
|
-
GeneralUtilities.write_message_to_stderr(
|
|
1030
|
-
f"Healthy running due to line '{line}' in file '{file}'.")
|
|
1026
|
+
GeneralUtilities.write_message_to_stderr(f"Healthy running due to line '{line}' in file '{file}'.")
|
|
1031
1027
|
return 0
|
|
1032
1028
|
else:
|
|
1033
|
-
GeneralUtilities.write_message_to_stderr(
|
|
1034
|
-
f"Not healthy running due to line '{line}' in file '{file}'.")
|
|
1029
|
+
GeneralUtilities.write_message_to_stderr(f"Not healthy running due to line '{line}' in file '{file}'.")
|
|
1035
1030
|
return 1
|
|
1036
|
-
GeneralUtilities.write_message_to_stderr(
|
|
1037
|
-
f"No valid line found for healthycheck in file '{file}'.")
|
|
1031
|
+
GeneralUtilities.write_message_to_stderr(f"No valid line found for healthycheck in file '{file}'.")
|
|
1038
1032
|
return 2
|
|
1039
1033
|
|
|
1040
1034
|
@GeneralUtilities.check_arguments
|
|
@@ -1103,12 +1097,11 @@ class ScriptCollectionCore:
|
|
|
1103
1097
|
@GeneralUtilities.check_arguments
|
|
1104
1098
|
def get_file_permission(self, file: str) -> str:
|
|
1105
1099
|
"""This function returns an usual octet-triple, for example "700"."""
|
|
1106
|
-
ls_output = self.
|
|
1100
|
+
ls_output: str = self.run_ls_for_folder(file)
|
|
1107
1101
|
return self.__get_file_permission_helper(ls_output)
|
|
1108
1102
|
|
|
1109
1103
|
@GeneralUtilities.check_arguments
|
|
1110
|
-
def __get_file_permission_helper(self,
|
|
1111
|
-
permissions = ' '.join(ls_output.split()).split(' ')[0][1:]
|
|
1104
|
+
def __get_file_permission_helper(self, permissions: str) -> str:
|
|
1112
1105
|
return str(self.__to_octet(permissions[0:3]))+str(self.__to_octet(permissions[3:6]))+str(self.__to_octet(permissions[6:9]))
|
|
1113
1106
|
|
|
1114
1107
|
@GeneralUtilities.check_arguments
|
|
@@ -1125,31 +1118,44 @@ class ScriptCollectionCore:
|
|
|
1125
1118
|
@GeneralUtilities.check_arguments
|
|
1126
1119
|
def get_file_owner(self, file: str) -> str:
|
|
1127
1120
|
"""This function returns the user and the group in the format "user:group"."""
|
|
1128
|
-
ls_output = self.
|
|
1121
|
+
ls_output: str = self.run_ls_for_folder(file)
|
|
1129
1122
|
return self.__get_file_owner_helper(ls_output)
|
|
1130
1123
|
|
|
1131
1124
|
@GeneralUtilities.check_arguments
|
|
1132
1125
|
def __get_file_owner_helper(self, ls_output: str) -> str:
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
return f"{splitted[2]}:{splitted[3]}"
|
|
1136
|
-
except Exception as exception:
|
|
1137
|
-
raise ValueError(
|
|
1138
|
-
f"ls-output '{ls_output}' not parsable") from exception
|
|
1126
|
+
splitted = ls_output.split()
|
|
1127
|
+
return f"{splitted[2]}:{splitted[3]}"
|
|
1139
1128
|
|
|
1140
1129
|
@GeneralUtilities.check_arguments
|
|
1141
1130
|
def get_file_owner_and_file_permission(self, file: str) -> str:
|
|
1142
|
-
ls_output = self.
|
|
1131
|
+
ls_output: str = self.run_ls_for_folder(file)
|
|
1143
1132
|
return [self.__get_file_owner_helper(ls_output), self.__get_file_permission_helper(ls_output)]
|
|
1144
1133
|
|
|
1145
1134
|
@GeneralUtilities.check_arguments
|
|
1146
|
-
def
|
|
1147
|
-
|
|
1148
|
-
GeneralUtilities.assert_condition(os.path.isfile(
|
|
1149
|
-
|
|
1150
|
-
GeneralUtilities.assert_condition(
|
|
1151
|
-
GeneralUtilities.assert_condition(not GeneralUtilities.string_is_none_or_whitespace(
|
|
1152
|
-
|
|
1135
|
+
def run_ls_for_folder(self, file_or_folder: str) -> str:
|
|
1136
|
+
file_or_folder = file_or_folder.replace("\\", "/")
|
|
1137
|
+
GeneralUtilities.assert_condition(os.path.isfile(file_or_folder) or os.path.isdir(file_or_folder), f"Can not execute 'ls -ld' because '{file_or_folder}' does not exist.")
|
|
1138
|
+
ls_result = self.run_program_argsasarray("ls", ["-ld", file_or_folder])
|
|
1139
|
+
GeneralUtilities.assert_condition(ls_result[0] == 0, f"'ls -ld {file_or_folder}' resulted in exitcode {str(ls_result[0])}. StdErr: {ls_result[2]}")
|
|
1140
|
+
GeneralUtilities.assert_condition(not GeneralUtilities.string_is_none_or_whitespace(ls_result[1]), f"'ls -ld' of '{file_or_folder}' had an empty output. StdErr: '{ls_result[2]}'")
|
|
1141
|
+
GeneralUtilities.write_message_to_stdout(ls_result[1])
|
|
1142
|
+
output = ls_result[1]
|
|
1143
|
+
result = output.replace("\n", "")
|
|
1144
|
+
result = ' '.join(result.split()) # reduce multiple whitespaces to one
|
|
1145
|
+
return result
|
|
1146
|
+
|
|
1147
|
+
@GeneralUtilities.check_arguments
|
|
1148
|
+
def run_ls_for_folder_content(self, file_or_folder: str) -> list[str]:
|
|
1149
|
+
file_or_folder = file_or_folder.replace("\\", "/")
|
|
1150
|
+
GeneralUtilities.assert_condition(os.path.isfile(file_or_folder) or os.path.isdir(file_or_folder), f"Can not execute 'ls -la' because '{file_or_folder}' does not exist.")
|
|
1151
|
+
ls_result = self.run_program_argsasarray("ls", ["-la", file_or_folder])
|
|
1152
|
+
GeneralUtilities.assert_condition(ls_result[0] == 0, f"'ls -la {file_or_folder}' resulted in exitcode {str(ls_result[0])}. StdErr: {ls_result[2]}")
|
|
1153
|
+
GeneralUtilities.assert_condition(not GeneralUtilities.string_is_none_or_whitespace(ls_result[1]), f"'ls -la' of '{file_or_folder}' had an empty output. StdErr: '{ls_result[2]}'")
|
|
1154
|
+
GeneralUtilities.write_message_to_stdout(ls_result[1])
|
|
1155
|
+
output = ls_result[1]
|
|
1156
|
+
result = output.split("\n")[3:] # skip the lines with "Total", "." and ".."
|
|
1157
|
+
result = [' '.join(line.split()) for line in result] # reduce multiple whitespaces to one
|
|
1158
|
+
return result
|
|
1153
1159
|
|
|
1154
1160
|
@GeneralUtilities.check_arguments
|
|
1155
1161
|
def set_permission(self, file_or_folder: str, permissions: str, recursive: bool = False) -> None:
|
|
@@ -1189,31 +1195,28 @@ class ScriptCollectionCore:
|
|
|
1189
1195
|
return popen
|
|
1190
1196
|
|
|
1191
1197
|
@staticmethod
|
|
1192
|
-
@GeneralUtilities.check_arguments
|
|
1193
1198
|
def __enqueue_output(file, queue):
|
|
1194
1199
|
for line in iter(file.readline, ''):
|
|
1195
1200
|
queue.put(line)
|
|
1196
1201
|
file.close()
|
|
1197
1202
|
|
|
1198
1203
|
@staticmethod
|
|
1199
|
-
@GeneralUtilities.check_arguments
|
|
1200
1204
|
def __read_popen_pipes(p: Popen):
|
|
1201
|
-
|
|
1202
1205
|
with ThreadPoolExecutor(2) as pool:
|
|
1203
|
-
q_stdout
|
|
1206
|
+
q_stdout = Queue()
|
|
1207
|
+
q_stderr = Queue()
|
|
1204
1208
|
|
|
1205
1209
|
pool.submit(ScriptCollectionCore.__enqueue_output, p.stdout, q_stdout)
|
|
1206
1210
|
pool.submit(ScriptCollectionCore.__enqueue_output, p.stderr, q_stderr)
|
|
1207
|
-
while
|
|
1208
|
-
time.sleep(0.
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
out_line = ''
|
|
1213
|
-
err_line = ''
|
|
1214
|
-
|
|
1211
|
+
while (p.poll() is None) or (not q_stdout.empty()) or (not q_stderr.empty()):
|
|
1212
|
+
time.sleep(0.01)
|
|
1213
|
+
out_line = None
|
|
1214
|
+
err_line = None
|
|
1215
1215
|
try:
|
|
1216
1216
|
out_line = q_stdout.get_nowait()
|
|
1217
|
+
except Empty:
|
|
1218
|
+
pass
|
|
1219
|
+
try:
|
|
1217
1220
|
err_line = q_stderr.get_nowait()
|
|
1218
1221
|
except Empty:
|
|
1219
1222
|
pass
|
|
@@ -1258,25 +1261,56 @@ class ScriptCollectionCore:
|
|
|
1258
1261
|
|
|
1259
1262
|
with self.__run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, verbosity, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, custom_argument, interactive) as process:
|
|
1260
1263
|
|
|
1264
|
+
if log_file is not None:
|
|
1265
|
+
GeneralUtilities.ensure_file_exists(log_file)
|
|
1261
1266
|
pid = process.pid
|
|
1262
|
-
for
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1267
|
+
for out_line_plain, err_line_plain in ScriptCollectionCore.__read_popen_pipes(process): # see https://stackoverflow.com/a/57084403/3905529
|
|
1268
|
+
|
|
1269
|
+
if out_line_plain is not None:
|
|
1270
|
+
out_line: str = None
|
|
1271
|
+
if isinstance(out_line_plain, str):
|
|
1272
|
+
out_line = out_line_plain
|
|
1273
|
+
elif isinstance(out_line_plain, bytes):
|
|
1274
|
+
out_line = GeneralUtilities.bytes_to_string(out_line_plain)
|
|
1275
|
+
else:
|
|
1276
|
+
raise ValueError(f"Unknown type of output: {str(type(out_line_plain))}")
|
|
1277
|
+
|
|
1278
|
+
if out_line is not None and GeneralUtilities.string_has_content(out_line):
|
|
1279
|
+
if out_line.endswith("\n"):
|
|
1280
|
+
out_line = out_line[:-1]
|
|
1281
|
+
if print_live_output:
|
|
1282
|
+
print(out_line, end='\n', file=sys.stdout, flush=True)
|
|
1283
|
+
if 0 < len(stdout):
|
|
1284
|
+
stdout = stdout+"\n"
|
|
1285
|
+
stdout = stdout+out_line
|
|
1286
|
+
if log_file is not None:
|
|
1287
|
+
GeneralUtilities.append_line_to_file(log_file, out_line)
|
|
1288
|
+
|
|
1289
|
+
if err_line_plain is not None:
|
|
1290
|
+
err_line: str = None
|
|
1291
|
+
if isinstance(err_line_plain, str):
|
|
1292
|
+
err_line = err_line_plain
|
|
1293
|
+
elif isinstance(err_line_plain, bytes):
|
|
1294
|
+
err_line = GeneralUtilities.bytes_to_string(err_line_plain)
|
|
1295
|
+
else:
|
|
1296
|
+
raise ValueError(f"Unknown type of output: {str(type(err_line_plain))}")
|
|
1297
|
+
if err_line is not None and GeneralUtilities.string_has_content(err_line):
|
|
1298
|
+
if err_line.endswith("\n"):
|
|
1299
|
+
err_line = err_line[:-1]
|
|
1300
|
+
if print_live_output:
|
|
1301
|
+
print(err_line, end='\n', file=sys.stderr, flush=True)
|
|
1302
|
+
if 0 < len(stderr):
|
|
1303
|
+
stderr = stderr+"\n"
|
|
1304
|
+
stderr = stderr+err_line
|
|
1305
|
+
if log_file is not None:
|
|
1306
|
+
GeneralUtilities.append_line_to_file(log_file, err_line)
|
|
1307
|
+
|
|
1308
|
+
exit_code = process.returncode
|
|
1276
1309
|
|
|
1277
1310
|
if throw_exception_if_exitcode_is_not_zero and exit_code != 0:
|
|
1278
1311
|
raise ValueError(f"Program '{working_directory}>{program} {arguments_for_log_as_string}' resulted in exitcode {exit_code}. (StdOut: '{stdout}', StdErr: '{stderr}')")
|
|
1279
1312
|
|
|
1313
|
+
GeneralUtilities.assert_condition(exit_code is not None, f"Exitcode of program-run of '{info_for_log}' is None.")
|
|
1280
1314
|
result = (exit_code, stdout, stderr, pid)
|
|
1281
1315
|
return result
|
|
1282
1316
|
except Exception as e:
|
|
@@ -1321,8 +1355,7 @@ class ScriptCollectionCore:
|
|
|
1321
1355
|
@GeneralUtilities.check_arguments
|
|
1322
1356
|
def verify_no_pending_mock_program_calls(self):
|
|
1323
1357
|
if (len(self.__mocked_program_calls) > 0):
|
|
1324
|
-
raise AssertionError(
|
|
1325
|
-
"The following mock-calls were not called:\n"+",\n ".join([self.__format_mock_program_call(r) for r in self.__mocked_program_calls]))
|
|
1358
|
+
raise AssertionError("The following mock-calls were not called:\n"+",\n ".join([self.__format_mock_program_call(r) for r in self.__mocked_program_calls]))
|
|
1326
1359
|
|
|
1327
1360
|
@GeneralUtilities.check_arguments
|
|
1328
1361
|
def __format_mock_program_call(self, r) -> str:
|
|
@@ -1357,8 +1390,7 @@ class ScriptCollectionCore:
|
|
|
1357
1390
|
result = mock_call
|
|
1358
1391
|
break
|
|
1359
1392
|
if result is None:
|
|
1360
|
-
raise LookupError(
|
|
1361
|
-
f"Tried to execute mock-call '{workingdirectory}>{program} {argument}' but no mock-call was defined for that execution")
|
|
1393
|
+
raise LookupError(f"Tried to execute mock-call '{workingdirectory}>{program} {argument}' but no mock-call was defined for that execution")
|
|
1362
1394
|
else:
|
|
1363
1395
|
self.__mocked_program_calls.remove(result)
|
|
1364
1396
|
return (result.exit_code, result.stdout, result.stderr, result.pid)
|
|
@@ -1407,8 +1439,7 @@ class ScriptCollectionCore:
|
|
|
1407
1439
|
|
|
1408
1440
|
@GeneralUtilities.check_arguments
|
|
1409
1441
|
def check_system_time_with_default_tolerance(self) -> None:
|
|
1410
|
-
self.check_system_time(
|
|
1411
|
-
self.__get_default_tolerance_for_system_time_equals_internet_time())
|
|
1442
|
+
self.check_system_time(self.__get_default_tolerance_for_system_time_equals_internet_time())
|
|
1412
1443
|
|
|
1413
1444
|
@GeneralUtilities.check_arguments
|
|
1414
1445
|
def __get_default_tolerance_for_system_time_equals_internet_time(self) -> timedelta:
|
|
@@ -1439,8 +1470,7 @@ class ScriptCollectionCore:
|
|
|
1439
1470
|
current_commit = self.git_get_commit_id(repository_folder)
|
|
1440
1471
|
current_commit_is_on_latest_tag = id_of_latest_tag == current_commit
|
|
1441
1472
|
if current_commit_is_on_latest_tag:
|
|
1442
|
-
result = self.increment_version(
|
|
1443
|
-
result, False, False, True)
|
|
1473
|
+
result = self.increment_version(result, False, False, True)
|
|
1444
1474
|
else:
|
|
1445
1475
|
result = "0.1.0"
|
|
1446
1476
|
return result
|
|
@@ -1637,16 +1667,14 @@ chmod {permission} {link_file}
|
|
|
1637
1667
|
# copy binaries
|
|
1638
1668
|
usr_bin_folder = os.path.join(packagecontent_data_folder, "usr/bin")
|
|
1639
1669
|
GeneralUtilities.ensure_directory_exists(usr_bin_folder)
|
|
1640
|
-
usr_bin_content_folder = os.path.join(
|
|
1641
|
-
usr_bin_folder, tool_content_folder_name)
|
|
1670
|
+
usr_bin_content_folder = os.path.join(usr_bin_folder, tool_content_folder_name)
|
|
1642
1671
|
GeneralUtilities.copy_content_of_folder(bin_folder, usr_bin_content_folder)
|
|
1643
1672
|
|
|
1644
1673
|
# create debfile
|
|
1645
1674
|
deb_filename = f"{toolname}.deb"
|
|
1646
1675
|
self.run_program_argsasarray("tar", ["czf", f"../{entireresult_content_folder_name}/control.tar.gz", "*"], packagecontent_control_folder, verbosity=verbosity)
|
|
1647
1676
|
self.run_program_argsasarray("tar", ["czf", f"../{entireresult_content_folder_name}/data.tar.gz", "*"], packagecontent_data_folder, verbosity=verbosity)
|
|
1648
|
-
self.run_program_argsasarray("ar", ["r", deb_filename, "debian-binary", "control.tar.gz",
|
|
1649
|
-
"data.tar.gz"], packagecontent_entireresult_folder, verbosity=verbosity)
|
|
1677
|
+
self.run_program_argsasarray("ar", ["r", deb_filename, "debian-binary", "control.tar.gz", "data.tar.gz"], packagecontent_entireresult_folder, verbosity=verbosity)
|
|
1650
1678
|
result_file = os.path.join(packagecontent_entireresult_folder, deb_filename)
|
|
1651
1679
|
shutil.copy(result_file, os.path.join(deb_output_folder, deb_filename))
|
|
1652
1680
|
|
|
@@ -1711,3 +1739,127 @@ chmod {permission} {link_file}
|
|
|
1711
1739
|
if recursive:
|
|
1712
1740
|
for subfolder in GeneralUtilities.get_direct_folders_of_folder(folder):
|
|
1713
1741
|
self.change_file_extensions(subfolder, from_extension, to_extension, recursive, ignore_case)
|
|
1742
|
+
|
|
1743
|
+
@GeneralUtilities.check_arguments
|
|
1744
|
+
def __add_chapter(self, main_reference_file, reference_content_folder, number: int, chaptertitle: str, content: str = None):
|
|
1745
|
+
if content is None:
|
|
1746
|
+
content = "TXDX add content here"
|
|
1747
|
+
filename = str(number).zfill(2)+"_"+chaptertitle.replace(' ', '-')
|
|
1748
|
+
file = f"{reference_content_folder}/{filename}.md"
|
|
1749
|
+
full_title = f"{number}. {chaptertitle}"
|
|
1750
|
+
|
|
1751
|
+
GeneralUtilities.append_line_to_file(main_reference_file, f"- [{full_title}](./{filename}.md)")
|
|
1752
|
+
|
|
1753
|
+
GeneralUtilities.ensure_file_exists(file)
|
|
1754
|
+
GeneralUtilities.write_text_to_file(file, f"""# {full_title}
|
|
1755
|
+
|
|
1756
|
+
{content}
|
|
1757
|
+
""".replace("XDX", "ODO"))
|
|
1758
|
+
|
|
1759
|
+
@GeneralUtilities.check_arguments
|
|
1760
|
+
def generate_arc42_reference_template(self, repository: str, productname: str = None):
|
|
1761
|
+
productname: str
|
|
1762
|
+
if productname is None:
|
|
1763
|
+
productname = os.path.basename(repository)
|
|
1764
|
+
reference_root_folder = f"{repository}/Other/Resources/Reference"
|
|
1765
|
+
reference_content_folder = reference_root_folder + "/Technical"
|
|
1766
|
+
if os.path.isdir(reference_root_folder):
|
|
1767
|
+
raise ValueError(f"The folder '{reference_root_folder}' does already exist.")
|
|
1768
|
+
GeneralUtilities.ensure_directory_exists(reference_root_folder)
|
|
1769
|
+
GeneralUtilities.ensure_directory_exists(reference_content_folder)
|
|
1770
|
+
main_reference_file = f"{reference_root_folder}/Reference.md"
|
|
1771
|
+
GeneralUtilities.ensure_file_exists(main_reference_file)
|
|
1772
|
+
GeneralUtilities.write_text_to_file(main_reference_file, f"""# {productname}
|
|
1773
|
+
|
|
1774
|
+
TXDX add minimal service-description here.
|
|
1775
|
+
|
|
1776
|
+
## Technical documentation
|
|
1777
|
+
|
|
1778
|
+
""".replace("XDX", "ODO"))
|
|
1779
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 1, 'Introduction and Goals', """## Overview
|
|
1780
|
+
|
|
1781
|
+
TXDX
|
|
1782
|
+
|
|
1783
|
+
# Quality goals
|
|
1784
|
+
|
|
1785
|
+
TXDX
|
|
1786
|
+
|
|
1787
|
+
# Stakeholder
|
|
1788
|
+
|
|
1789
|
+
| Name | How to contact | Reason |
|
|
1790
|
+
| ---- | -------------- | ------ |""")
|
|
1791
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 2, 'Constraints', """## Technical constraints
|
|
1792
|
+
|
|
1793
|
+
| Constraint-identifier | Constraint | Reason |
|
|
1794
|
+
| --------------------- | ---------- | ------ |
|
|
1795
|
+
|
|
1796
|
+
## Organizational constraints
|
|
1797
|
+
|
|
1798
|
+
| Constraint-identifier | Constraint | Reason |
|
|
1799
|
+
| --------------------- | ---------- | ------ |""")
|
|
1800
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 3, 'Context and Scope', """## Context
|
|
1801
|
+
|
|
1802
|
+
TXDX
|
|
1803
|
+
|
|
1804
|
+
## Scope
|
|
1805
|
+
|
|
1806
|
+
TXDX""")
|
|
1807
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 4, 'Solution Strategy', """TXDX""")
|
|
1808
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 5, 'Building Block View', """TXDX""")
|
|
1809
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 6, 'Runtime View', """TXDX""")
|
|
1810
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 7, 'Deployment View', """## Infrastructure-overview
|
|
1811
|
+
|
|
1812
|
+
TXDX
|
|
1813
|
+
|
|
1814
|
+
## Infrastructure-requirements
|
|
1815
|
+
|
|
1816
|
+
TXDX
|
|
1817
|
+
|
|
1818
|
+
## Deployment-proecsses
|
|
1819
|
+
|
|
1820
|
+
TXDX
|
|
1821
|
+
""")
|
|
1822
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 8, 'Crosscutting Concepts', """TXDX""")
|
|
1823
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 9, 'Architectural Decisions', """## Decision-board
|
|
1824
|
+
|
|
1825
|
+
| Decision-identifier | Date | Decision | Reason and notes |
|
|
1826
|
+
| ------------------- | ---- | -------- | ---------------- |""") # empty because there are no decsions yet
|
|
1827
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 10, 'Quality Requirements', """TXDX""")
|
|
1828
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 11, 'Risks and Technical Debt', """## Risks
|
|
1829
|
+
|
|
1830
|
+
Currently there are no known risks.
|
|
1831
|
+
|
|
1832
|
+
## Technical debts
|
|
1833
|
+
|
|
1834
|
+
Currently there are no technical depts.""")
|
|
1835
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 12, 'Glossary', """## Terms
|
|
1836
|
+
|
|
1837
|
+
| Term | Meaning |
|
|
1838
|
+
| ---- | ------- |
|
|
1839
|
+
|
|
1840
|
+
## Abbreviations
|
|
1841
|
+
|
|
1842
|
+
| Abbreviation | Meaning |
|
|
1843
|
+
| ------------ | ------- |""")
|
|
1844
|
+
|
|
1845
|
+
GeneralUtilities.append_to_file(main_reference_file, """
|
|
1846
|
+
|
|
1847
|
+
## Responsibilities
|
|
1848
|
+
|
|
1849
|
+
| Responsibility | Name and contact-information |
|
|
1850
|
+
| --------------- | ---------------------------- |
|
|
1851
|
+
| Pdocut-owner | TXDX |
|
|
1852
|
+
| Product-manager | TXDX |
|
|
1853
|
+
| Support | TXDX |
|
|
1854
|
+
|
|
1855
|
+
## License & Pricing
|
|
1856
|
+
|
|
1857
|
+
TXDX
|
|
1858
|
+
|
|
1859
|
+
## External resources
|
|
1860
|
+
|
|
1861
|
+
- [Repository](TXDX)
|
|
1862
|
+
- [Productive-System](TXDX)
|
|
1863
|
+
- [QualityCheck-system](TXDX)
|
|
1864
|
+
|
|
1865
|
+
""")
|
|
@@ -241,11 +241,11 @@ class TasksForCommonProjectStructure:
|
|
|
241
241
|
GeneralUtilities.ensure_directory_exists(web_folder)
|
|
242
242
|
GeneralUtilities.copy_content_of_folder(web_relase_folder, web_folder)
|
|
243
243
|
elif target == "windows":
|
|
244
|
-
|
|
244
|
+
windows_release_folder = os.path.join(src_folder, "build/windows/x64/runner/Release")
|
|
245
245
|
windows_folder = os.path.join(artifacts_folder, "BuildResult_Windows")
|
|
246
246
|
GeneralUtilities.ensure_directory_does_not_exist(windows_folder)
|
|
247
247
|
GeneralUtilities.ensure_directory_exists(windows_folder)
|
|
248
|
-
GeneralUtilities.copy_content_of_folder(
|
|
248
|
+
GeneralUtilities.copy_content_of_folder(windows_release_folder, windows_folder)
|
|
249
249
|
elif target == "ios":
|
|
250
250
|
raise ValueError("building for ios is not implemented yet")
|
|
251
251
|
elif target == "appbundle":
|
|
@@ -1391,7 +1391,7 @@ class TasksForCommonProjectStructure:
|
|
|
1391
1391
|
|
|
1392
1392
|
# Check codeunit-conformity
|
|
1393
1393
|
# TODO check if foldername=="<codeunitname>[.codeunit.xml]" == <codeunitname> in file
|
|
1394
|
-
supported_codeunitspecificationversion = "2.
|
|
1394
|
+
supported_codeunitspecificationversion = "2.9.0" # should always be the latest version of the ProjectTemplates-repository
|
|
1395
1395
|
codeunit_file = os.path.join(codeunit_folder, f"{codeunit_name}.codeunit.xml")
|
|
1396
1396
|
if not os.path.isfile(codeunit_file):
|
|
1397
1397
|
raise ValueError(f'Codeunitfile "{codeunit_file}" does not exist.')
|
|
@@ -1420,6 +1420,14 @@ class TasksForCommonProjectStructure:
|
|
|
1420
1420
|
if codeunit_name != codeunit_name_in_codeunit_file:
|
|
1421
1421
|
raise ValueError(f"The folder-name ('{codeunit_name}') is not equal to the codeunit-name ('{codeunit_name_in_codeunit_file}').")
|
|
1422
1422
|
|
|
1423
|
+
# Check owner-name
|
|
1424
|
+
codeunit_ownername_in_codeunit_file = root.xpath('//cps:codeunit/cps:codeunitownername/text()', namespaces=namespaces)[0]
|
|
1425
|
+
GeneralUtilities.assert_condition(GeneralUtilities.string_has_content(codeunit_ownername_in_codeunit_file), "No valid name for codeunitowner given.")
|
|
1426
|
+
|
|
1427
|
+
# Check owner-emailaddress
|
|
1428
|
+
codeunit_owneremailaddress_in_codeunit_file = root.xpath('//cps:codeunit/cps:codeunitowneremailaddress/text()', namespaces=namespaces)[0]
|
|
1429
|
+
GeneralUtilities.assert_condition(GeneralUtilities.string_has_content(codeunit_owneremailaddress_in_codeunit_file), "No valid email-address for codeunitowner given.")
|
|
1430
|
+
|
|
1423
1431
|
# Check for mandatory files
|
|
1424
1432
|
files = ["Other/Build/Build.py", "Other/QualityCheck/Linting.py", "Other/Reference/GenerateReference.py"]
|
|
1425
1433
|
if self.codeunit_has_testable_sourcecode(codeunit_file):
|
|
@@ -2238,16 +2246,16 @@ class TasksForCommonProjectStructure:
|
|
|
2238
2246
|
raise ValueError("Can not download FFMPEG.")
|
|
2239
2247
|
|
|
2240
2248
|
@GeneralUtilities.check_arguments
|
|
2241
|
-
def ensure_plantuml_is_available(self,
|
|
2242
|
-
self.ensure_file_from_github_assets_is_available(
|
|
2249
|
+
def ensure_plantuml_is_available(self, target_folder: str) -> None:
|
|
2250
|
+
self.ensure_file_from_github_assets_is_available(target_folder, "plantuml", "plantuml", "PlantUML", "plantuml.jar", lambda latest_version: "plantuml.jar")
|
|
2243
2251
|
|
|
2244
2252
|
@GeneralUtilities.check_arguments
|
|
2245
|
-
def ensure_androidappbundletool_is_available(self,
|
|
2246
|
-
self.ensure_file_from_github_assets_is_available(
|
|
2253
|
+
def ensure_androidappbundletool_is_available(self, target_folder: str) -> None:
|
|
2254
|
+
self.ensure_file_from_github_assets_is_available(target_folder, "google", "bundletool", "AndroidAppBundleTool", "bundletool.jar", lambda latest_version: f"bundletool-all-{latest_version}.jar")
|
|
2247
2255
|
|
|
2248
2256
|
@GeneralUtilities.check_arguments
|
|
2249
|
-
def ensure_file_from_github_assets_is_available(self,
|
|
2250
|
-
resource_folder = os.path.join(
|
|
2257
|
+
def ensure_file_from_github_assets_is_available(self, target_folder: str, githubuser: str, githubprojectname: str, resource_name: str, local_filename: str, get_filename_on_github) -> None:
|
|
2258
|
+
resource_folder = os.path.join(target_folder, "Other", "Resources", resource_name)
|
|
2251
2259
|
internet_connection_is_available = GeneralUtilities.internet_connection_is_available()
|
|
2252
2260
|
file = f"{resource_folder}/{local_filename}"
|
|
2253
2261
|
file_exists = os.path.isfile(file)
|
|
@@ -2266,10 +2274,10 @@ class TasksForCommonProjectStructure:
|
|
|
2266
2274
|
raise ValueError(f"Can not download {resource_name}.")
|
|
2267
2275
|
|
|
2268
2276
|
@GeneralUtilities.check_arguments
|
|
2269
|
-
def generate_svg_files_from_plantuml_files(self,
|
|
2270
|
-
self.ensure_plantuml_is_available(
|
|
2271
|
-
plant_uml_folder = os.path.join(
|
|
2272
|
-
files_folder = os.path.join(
|
|
2277
|
+
def generate_svg_files_from_plantuml_files(self, target_folder: str) -> None:
|
|
2278
|
+
self.ensure_plantuml_is_available(target_folder)
|
|
2279
|
+
plant_uml_folder = os.path.join(target_folder, "Other", "Resources", "PlantUML")
|
|
2280
|
+
files_folder = os.path.join(target_folder, "Other/Reference")
|
|
2273
2281
|
sc = ScriptCollectionCore()
|
|
2274
2282
|
for file in GeneralUtilities.get_all_files_of_folder(files_folder):
|
|
2275
2283
|
if file.endswith(".plantuml"):
|
|
@@ -1,16 +1,16 @@
|
|
|
1
|
-
ScriptCollection/Executables.py,sha256=
|
|
1
|
+
ScriptCollection/Executables.py,sha256=Ui9omPvmBjnsgFZXpeXMoZXmk17sjPyO1mS_7B7w8aY,19951
|
|
2
2
|
ScriptCollection/GeneralUtilities.py,sha256=AElXv2NO30cTw-Qs3qVmO-YCOQ5FvBQM3RZMywuKQ_Y,35121
|
|
3
3
|
ScriptCollection/ProcessesRunner.py,sha256=3mu4ZxzZleQo0Op6o9EYTCFiJfb6kx5ov2YfZfT89mU,1395
|
|
4
4
|
ScriptCollection/ProgramRunnerBase.py,sha256=7QAjoqOz6XPmJH19F2k-Z1fFQB_uZnPFvn-T54IJcHQ,2324
|
|
5
5
|
ScriptCollection/ProgramRunnerEpew.py,sha256=C2Rs3YWOWWWJct7XmKphp5CF1tf0j4Fp-ljV2drLTfs,6349
|
|
6
6
|
ScriptCollection/ProgramRunnerPopen.py,sha256=G3LgQUVCfaq7XjBsGzalElH31Hbr0etttGR2_H87YzA,3512
|
|
7
7
|
ScriptCollection/RPStream.py,sha256=NRRHL3YSP3D9MuAV2jB_--0KUKCsvJGxeKnxgrRZ9kY,1545
|
|
8
|
-
ScriptCollection/ScriptCollectionCore.py,sha256=
|
|
9
|
-
ScriptCollection/TasksForCommonProjectStructure.py,sha256
|
|
8
|
+
ScriptCollection/ScriptCollectionCore.py,sha256=AAUVqYuYN-1YDeyxzIO8TE7Ip4uxw8IJQBEPBh3mHW0,100035
|
|
9
|
+
ScriptCollection/TasksForCommonProjectStructure.py,sha256=uSVSRCqtSRW7yYXkduNAFxVMY-tsGYH9Lh0WCHhd1oE,184749
|
|
10
10
|
ScriptCollection/UpdateCertificates.py,sha256=Eynbgu7k9jLxApP2D_8Il77B6BFjJap6K7oTeEAZYbk,7790
|
|
11
11
|
ScriptCollection/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
|
-
ScriptCollection-3.5.
|
|
13
|
-
ScriptCollection-3.5.
|
|
14
|
-
ScriptCollection-3.5.
|
|
15
|
-
ScriptCollection-3.5.
|
|
16
|
-
ScriptCollection-3.5.
|
|
12
|
+
ScriptCollection-3.5.16.dist-info/METADATA,sha256=YT_oAn2upPHtHIkXZWIpdAyILnQdy32sCxZTtLJRGz0,7680
|
|
13
|
+
ScriptCollection-3.5.16.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
|
14
|
+
ScriptCollection-3.5.16.dist-info/entry_points.txt,sha256=yASwR6hWZ_b5d4W49YeX1htD8ngfWbwgjpfQiJdtUAU,2322
|
|
15
|
+
ScriptCollection-3.5.16.dist-info/top_level.txt,sha256=hY2hOVH0V0Ce51WB76zKkIWTUNwMUdHo4XDkR2vYVwg,17
|
|
16
|
+
ScriptCollection-3.5.16.dist-info/RECORD,,
|
|
@@ -11,6 +11,7 @@ sccreateisofilewithobfuscatedfiles = ScriptCollection.Executables:CreateISOFileW
|
|
|
11
11
|
sccreatesimplemergewithoutrelease = ScriptCollection.Executables:CreateSimpleMergeWithoutRelease
|
|
12
12
|
scextractpdfpages = ScriptCollection.Executables:ExtractPDFPages
|
|
13
13
|
scfilenameobfuscator = ScriptCollection.Executables:FilenameObfuscator
|
|
14
|
+
scgeneratearc42referencetemplate = ScriptCollection.Executables:GenerateARC42ReferenceTemplate
|
|
14
15
|
scgeneratecertificate = ScriptCollection.Executables:GenerateCertificate
|
|
15
16
|
scgeneratecertificateauthority = ScriptCollection.Executables:GenerateCertificateAuthority
|
|
16
17
|
scgeneratecertificatesignrequest = ScriptCollection.Executables:GenerateCertificateSignRequest
|
|
File without changes
|
|
File without changes
|