ScriptCollection 3.5.13__py3-none-any.whl → 3.5.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ScriptCollection/Executables.py +13 -0
- ScriptCollection/GeneralUtilities.py +1 -1
- ScriptCollection/ProgramRunnerPopen.py +10 -10
- ScriptCollection/ScriptCollectionCore.py +296 -242
- ScriptCollection/TasksForCommonProjectStructure.py +12 -12
- {ScriptCollection-3.5.13.dist-info → ScriptCollection-3.5.15.dist-info}/METADATA +1 -1
- ScriptCollection-3.5.15.dist-info/RECORD +16 -0
- {ScriptCollection-3.5.13.dist-info → ScriptCollection-3.5.15.dist-info}/WHEEL +1 -1
- {ScriptCollection-3.5.13.dist-info → ScriptCollection-3.5.15.dist-info}/entry_points.txt +1 -0
- ScriptCollection-3.5.13.dist-info/RECORD +0 -16
- {ScriptCollection-3.5.13.dist-info → ScriptCollection-3.5.15.dist-info}/top_level.txt +0 -0
ScriptCollection/Executables.py
CHANGED
|
@@ -356,3 +356,16 @@ def ChangeFileExtensions() -> int:
|
|
|
356
356
|
args = parser.parse_args()
|
|
357
357
|
ScriptCollectionCore().change_file_extensions(args.folder, args.source_extension, args.target_extension, args.recursive, args.ignore_case)
|
|
358
358
|
return 0
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
def GenerateARC42ReferenceTemplate() -> int:
|
|
362
|
+
parser = argparse.ArgumentParser()
|
|
363
|
+
parser.add_argument('-f', '--folder', required=False)
|
|
364
|
+
parser.add_argument('-p', '--productname', required=False)
|
|
365
|
+
args = parser.parse_args()
|
|
366
|
+
|
|
367
|
+
folder = args.folder
|
|
368
|
+
if folder is None:
|
|
369
|
+
folder = os.getcwd()
|
|
370
|
+
ScriptCollectionCore().generate_arc42_reference_template(folder, args.productname)
|
|
371
|
+
return 0
|
|
@@ -67,7 +67,7 @@ class GeneralUtilities:
|
|
|
67
67
|
@functools.wraps(func)
|
|
68
68
|
def new_func(*args, **kwargs):
|
|
69
69
|
warnings.simplefilter('always', DeprecationWarning)
|
|
70
|
-
warnings.warn(f"Call to deprecated function {func.__name__}",
|
|
70
|
+
warnings.warn(f"Call to deprecated function {func.__name__}", category=DeprecationWarning, stacklevel=2)
|
|
71
71
|
warnings.simplefilter('default', DeprecationWarning)
|
|
72
72
|
return func(*args, **kwargs)
|
|
73
73
|
return new_func
|
|
@@ -7,7 +7,7 @@ from .ProgramRunnerBase import ProgramRunnerBase
|
|
|
7
7
|
class ProgramRunnerPopen(ProgramRunnerBase):
|
|
8
8
|
|
|
9
9
|
@GeneralUtilities.check_arguments
|
|
10
|
-
def run_program_argsasarray_async_helper(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, custom_argument: object = None, interactive:bool=False) -> Popen:
|
|
10
|
+
def run_program_argsasarray_async_helper(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, custom_argument: object = None, interactive: bool = False) -> Popen:
|
|
11
11
|
arguments_for_process = [program]
|
|
12
12
|
arguments_for_process.extend(arguments_as_array)
|
|
13
13
|
# "shell=True" is not allowed because it is not recommended and also something like
|
|
@@ -15,9 +15,9 @@ class ProgramRunnerPopen(ProgramRunnerBase):
|
|
|
15
15
|
# would not be possible anymore because the ampersand will be treated as shell-command.
|
|
16
16
|
try:
|
|
17
17
|
if interactive:
|
|
18
|
-
result = Popen(arguments_for_process, cwd=working_directory
|
|
18
|
+
result = Popen(arguments_for_process, cwd=working_directory, stdout=PIPE, stderr=PIPE, shell=False, text=True, stdin=sys.stdin) # pylint: disable=consider-using-with
|
|
19
19
|
else:
|
|
20
|
-
result = Popen(arguments_for_process, cwd=working_directory, stdout=PIPE, stderr=PIPE, shell=False) # pylint: disable=consider-using-with
|
|
20
|
+
result = Popen(arguments_for_process, cwd=working_directory, stdout=PIPE, stderr=PIPE, shell=False, text=True) # pylint: disable=consider-using-with
|
|
21
21
|
except FileNotFoundError as fileNotFoundError:
|
|
22
22
|
raise FileNotFoundError(f"Starting '{program}' in '{working_directory}' resulted in a FileNotFoundError: '{fileNotFoundError.filename}'")
|
|
23
23
|
return result
|
|
@@ -34,18 +34,18 @@ class ProgramRunnerPopen(ProgramRunnerBase):
|
|
|
34
34
|
return result
|
|
35
35
|
|
|
36
36
|
@GeneralUtilities.check_arguments
|
|
37
|
-
def run_program_argsasarray(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, custom_argument: object = None, interactive:bool=False) -> tuple[int, str, str, int]:
|
|
38
|
-
process: Popen = self.run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, custom_argument,interactive)
|
|
37
|
+
def run_program_argsasarray(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, custom_argument: object = None, interactive: bool = False) -> tuple[int, str, str, int]:
|
|
38
|
+
process: Popen = self.run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, custom_argument, interactive)
|
|
39
39
|
return self.wait(process, custom_argument)
|
|
40
40
|
|
|
41
41
|
@GeneralUtilities.check_arguments
|
|
42
|
-
def run_program(self, program: str, arguments: str = "", working_directory: str = None, custom_argument: object = None, interactive:bool=False) -> tuple[int, str, str, int]:
|
|
42
|
+
def run_program(self, program: str, arguments: str = "", working_directory: str = None, custom_argument: object = None, interactive: bool = False) -> tuple[int, str, str, int]:
|
|
43
43
|
return self.run_program_argsasarray(program, GeneralUtilities.arguments_to_array(arguments), working_directory, custom_argument)
|
|
44
44
|
|
|
45
45
|
@GeneralUtilities.check_arguments
|
|
46
|
-
def run_program_argsasarray_async(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, custom_argument: object = None, interactive:bool=False) -> int:
|
|
47
|
-
return self.run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, custom_argument,interactive).pid
|
|
46
|
+
def run_program_argsasarray_async(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, custom_argument: object = None, interactive: bool = False) -> int:
|
|
47
|
+
return self.run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, custom_argument, interactive).pid
|
|
48
48
|
|
|
49
49
|
@GeneralUtilities.check_arguments
|
|
50
|
-
def run_program_async(self, program: str, arguments: str = "", working_directory: str = None, custom_argument: object = None, interactive:bool=False) -> int:
|
|
51
|
-
return self.run_program_argsasarray_async(program, GeneralUtilities.arguments_to_array(arguments), working_directory, custom_argument,interactive)
|
|
50
|
+
def run_program_async(self, program: str, arguments: str = "", working_directory: str = None, custom_argument: object = None, interactive: bool = False) -> int:
|
|
51
|
+
return self.run_program_argsasarray_async(program, GeneralUtilities.arguments_to_array(arguments), working_directory, custom_argument, interactive)
|
|
@@ -1,19 +1,20 @@
|
|
|
1
|
-
import
|
|
1
|
+
import sys
|
|
2
2
|
from datetime import timedelta, datetime
|
|
3
3
|
import json
|
|
4
4
|
import binascii
|
|
5
5
|
import filecmp
|
|
6
6
|
import hashlib
|
|
7
|
-
|
|
7
|
+
import time
|
|
8
|
+
from io import BytesIO
|
|
8
9
|
import itertools
|
|
9
10
|
import math
|
|
10
11
|
import os
|
|
12
|
+
from queue import Queue, Empty
|
|
13
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
11
14
|
from pathlib import Path
|
|
12
|
-
from random import randrange
|
|
13
15
|
from subprocess import Popen
|
|
14
16
|
import re
|
|
15
17
|
import shutil
|
|
16
|
-
import traceback
|
|
17
18
|
import uuid
|
|
18
19
|
import tempfile
|
|
19
20
|
import io
|
|
@@ -29,7 +30,7 @@ from .ProgramRunnerBase import ProgramRunnerBase
|
|
|
29
30
|
from .ProgramRunnerPopen import ProgramRunnerPopen
|
|
30
31
|
from .ProgramRunnerEpew import ProgramRunnerEpew, CustomEpewArgument
|
|
31
32
|
|
|
32
|
-
version = "3.5.
|
|
33
|
+
version = "3.5.15"
|
|
33
34
|
__version__ = version
|
|
34
35
|
|
|
35
36
|
|
|
@@ -40,11 +41,12 @@ class ScriptCollectionCore:
|
|
|
40
41
|
mock_program_calls: bool = False
|
|
41
42
|
# The purpose of this property is to use it when testing your code which uses scriptcollection for external program-calls.
|
|
42
43
|
execute_program_really_if_no_mock_call_is_defined: bool = False
|
|
43
|
-
__mocked_program_calls: list =
|
|
44
|
+
__mocked_program_calls: list = None
|
|
44
45
|
program_runner: ProgramRunnerBase = None
|
|
45
46
|
|
|
46
47
|
def __init__(self):
|
|
47
48
|
self.program_runner = ProgramRunnerPopen()
|
|
49
|
+
self.__mocked_program_calls = list[ScriptCollectionCore.__MockProgramCall]()
|
|
48
50
|
|
|
49
51
|
@staticmethod
|
|
50
52
|
@GeneralUtilities.check_arguments
|
|
@@ -59,8 +61,7 @@ class ScriptCollectionCore:
|
|
|
59
61
|
errorsonly_argument = ""
|
|
60
62
|
else:
|
|
61
63
|
errorsonly_argument = " --errors-only"
|
|
62
|
-
(exit_code, stdout, stderr, _) = self.run_program("pylint", filename +
|
|
63
|
-
errorsonly_argument, working_directory, throw_exception_if_exitcode_is_not_zero=False)
|
|
64
|
+
(exit_code, stdout, stderr, _) = self.run_program("pylint", filename + errorsonly_argument, working_directory, throw_exception_if_exitcode_is_not_zero=False)
|
|
64
65
|
if (exit_code != 0):
|
|
65
66
|
errors.append(f"Linting-issues of {file}:")
|
|
66
67
|
errors.append(f"Pylint-exitcode: {exit_code}")
|
|
@@ -74,18 +75,15 @@ class ScriptCollectionCore:
|
|
|
74
75
|
|
|
75
76
|
@GeneralUtilities.check_arguments
|
|
76
77
|
def replace_version_in_dockerfile_file(self, dockerfile: str, new_version_value: str) -> None:
|
|
77
|
-
GeneralUtilities.write_text_to_file(dockerfile, re.sub(
|
|
78
|
-
"ARG Version=\"\\d+\\.\\d+\\.\\d+\"", f"ARG Version=\"{new_version_value}\"", GeneralUtilities.read_text_from_file(dockerfile)))
|
|
78
|
+
GeneralUtilities.write_text_to_file(dockerfile, re.sub("ARG Version=\"\\d+\\.\\d+\\.\\d+\"", f"ARG Version=\"{new_version_value}\"", GeneralUtilities.read_text_from_file(dockerfile)))
|
|
79
79
|
|
|
80
80
|
@GeneralUtilities.check_arguments
|
|
81
81
|
def replace_version_in_python_file(self, file: str, new_version_value: str):
|
|
82
|
-
GeneralUtilities.write_text_to_file(file, re.sub(
|
|
83
|
-
"version = \"\\d+\\.\\d+\\.\\d+\"", f"version = \"{new_version_value}\"", GeneralUtilities.read_text_from_file(file)))
|
|
82
|
+
GeneralUtilities.write_text_to_file(file, re.sub("version = \"\\d+\\.\\d+\\.\\d+\"", f"version = \"{new_version_value}\"", GeneralUtilities.read_text_from_file(file)))
|
|
84
83
|
|
|
85
84
|
@GeneralUtilities.check_arguments
|
|
86
85
|
def replace_version_in_ini_file(self, file: str, new_version_value: str):
|
|
87
|
-
GeneralUtilities.write_text_to_file(file, re.sub(
|
|
88
|
-
"version = \\d+\\.\\d+\\.\\d+", f"version = {new_version_value}", GeneralUtilities.read_text_from_file(file)))
|
|
86
|
+
GeneralUtilities.write_text_to_file(file, re.sub("version = \\d+\\.\\d+\\.\\d+", f"version = {new_version_value}", GeneralUtilities.read_text_from_file(file)))
|
|
89
87
|
|
|
90
88
|
@GeneralUtilities.check_arguments
|
|
91
89
|
def replace_version_in_nuspec_file(self, nuspec_file: str, new_version: str) -> None:
|
|
@@ -96,8 +94,7 @@ class ScriptCollectionCore:
|
|
|
96
94
|
if pattern.match(new_version):
|
|
97
95
|
GeneralUtilities.write_text_to_file(nuspec_file, re.sub(f"<version>{versionregex}<\\/version>", f"<version>{new_version}</version>", GeneralUtilities.read_text_from_file(nuspec_file)))
|
|
98
96
|
else:
|
|
99
|
-
raise ValueError(
|
|
100
|
-
f"Version '{new_version}' does not match version-regex '{versiononlyregex}'")
|
|
97
|
+
raise ValueError(f"Version '{new_version}' does not match version-regex '{versiononlyregex}'")
|
|
101
98
|
|
|
102
99
|
@GeneralUtilities.check_arguments
|
|
103
100
|
def replace_version_in_csproj_file(self, csproj_file: str, current_version: str):
|
|
@@ -106,25 +103,20 @@ class ScriptCollectionCore:
|
|
|
106
103
|
pattern = re.compile(versiononlyregex)
|
|
107
104
|
if pattern.match(current_version):
|
|
108
105
|
for tag in ["Version", "AssemblyVersion", "FileVersion"]:
|
|
109
|
-
GeneralUtilities.write_text_to_file(csproj_file, re.sub(
|
|
110
|
-
f"<{tag}>{versionregex}(.\\d+)?<\\/{tag}>", f"<{tag}>{current_version}</{tag}>", GeneralUtilities.read_text_from_file(csproj_file)))
|
|
106
|
+
GeneralUtilities.write_text_to_file(csproj_file, re.sub(f"<{tag}>{versionregex}(.\\d+)?<\\/{tag}>", f"<{tag}>{current_version}</{tag}>", GeneralUtilities.read_text_from_file(csproj_file)))
|
|
111
107
|
else:
|
|
112
|
-
raise ValueError(
|
|
113
|
-
f"Version '{current_version}' does not match version-regex '{versiononlyregex}'")
|
|
108
|
+
raise ValueError(f"Version '{current_version}' does not match version-regex '{versiononlyregex}'")
|
|
114
109
|
|
|
115
110
|
@GeneralUtilities.check_arguments
|
|
116
111
|
def push_nuget_build_artifact(self, nupkg_file: str, registry_address: str, api_key: str, verbosity: int = 1):
|
|
117
112
|
nupkg_file_name = os.path.basename(nupkg_file)
|
|
118
113
|
nupkg_file_folder = os.path.dirname(nupkg_file)
|
|
119
|
-
self.run_program(
|
|
120
|
-
"dotnet", f"nuget push {nupkg_file_name} --force-english-output --source {registry_address} --api-key {api_key}", nupkg_file_folder, verbosity)
|
|
114
|
+
self.run_program("dotnet", f"nuget push {nupkg_file_name} --force-english-output --source {registry_address} --api-key {api_key}", nupkg_file_folder, verbosity)
|
|
121
115
|
|
|
122
116
|
@GeneralUtilities.check_arguments
|
|
123
117
|
def dotnet_build(self, repository_folder: str, projectname: str, configuration: str):
|
|
124
|
-
self.run_program(
|
|
125
|
-
|
|
126
|
-
self.run_program(
|
|
127
|
-
"dotnet", f"build {projectname}/{projectname}.csproj -c {configuration}", repository_folder)
|
|
118
|
+
self.run_program("dotnet", f"clean -c {configuration}", repository_folder)
|
|
119
|
+
self.run_program("dotnet", f"build {projectname}/{projectname}.csproj -c {configuration}", repository_folder)
|
|
128
120
|
|
|
129
121
|
@GeneralUtilities.check_arguments
|
|
130
122
|
def find_file_by_extension(self, folder: str, extension: str):
|
|
@@ -162,8 +154,7 @@ class ScriptCollectionCore:
|
|
|
162
154
|
subfolder_argument = ""
|
|
163
155
|
else:
|
|
164
156
|
subfolder_argument = f" -- {subfolder}"
|
|
165
|
-
log_result = self.run_program(
|
|
166
|
-
"git", f'log --pretty=%aN{space_character}%aE%n%cN{space_character}%cE HEAD{subfolder_argument}', repository_folder, verbosity=0)
|
|
157
|
+
log_result = self.run_program("git", f'log --pretty=%aN{space_character}%aE%n%cN{space_character}%cE HEAD{subfolder_argument}', repository_folder, verbosity=0)
|
|
167
158
|
plain_content: list[str] = list(
|
|
168
159
|
set([line for line in log_result[1].split("\n") if len(line) > 0]))
|
|
169
160
|
result: list[tuple[str, str]] = []
|
|
@@ -179,8 +170,7 @@ class ScriptCollectionCore:
|
|
|
179
170
|
def get_commit_ids_between_dates(self, repository_folder: str, since: datetime, until: datetime, ignore_commits_which_are_not_in_history_of_head: bool = True) -> None:
|
|
180
171
|
since_as_string = self.__datetime_to_string_for_git(since)
|
|
181
172
|
until_as_string = self.__datetime_to_string_for_git(until)
|
|
182
|
-
result = filter(lambda line: not GeneralUtilities.string_is_none_or_whitespace(line), self.run_program(
|
|
183
|
-
"git", f'log --since "{since_as_string}" --until "{until_as_string}" --pretty=format:"%H" --no-patch', repository_folder, throw_exception_if_exitcode_is_not_zero=True)[1].split("\n").replace("\r", ""))
|
|
173
|
+
result = filter(lambda line: not GeneralUtilities.string_is_none_or_whitespace(line), self.run_program("git", f'log --since "{since_as_string}" --until "{until_as_string}" --pretty=format:"%H" --no-patch', repository_folder, throw_exception_if_exitcode_is_not_zero=True)[1].split("\n").replace("\r", ""))
|
|
184
174
|
if ignore_commits_which_are_not_in_history_of_head:
|
|
185
175
|
result = [commit_id for commit_id in result if self.git_commit_is_ancestor(
|
|
186
176
|
repository_folder, commit_id)]
|
|
@@ -192,8 +182,7 @@ class ScriptCollectionCore:
|
|
|
192
182
|
|
|
193
183
|
@GeneralUtilities.check_arguments
|
|
194
184
|
def git_commit_is_ancestor(self, repository_folder: str, ancestor: str, descendant: str = "HEAD") -> bool:
|
|
195
|
-
exit_code = self.run_program_argsasarray(
|
|
196
|
-
"git", ["merge-base", "--is-ancestor", ancestor, descendant], repository_folder, throw_exception_if_exitcode_is_not_zero=False)[0]
|
|
185
|
+
exit_code = self.run_program_argsasarray("git", ["merge-base", "--is-ancestor", ancestor, descendant], repository_folder, throw_exception_if_exitcode_is_not_zero=False)[0]
|
|
197
186
|
if exit_code == 0:
|
|
198
187
|
return True
|
|
199
188
|
elif exit_code == 1:
|
|
@@ -203,8 +192,7 @@ class ScriptCollectionCore:
|
|
|
203
192
|
|
|
204
193
|
@GeneralUtilities.check_arguments
|
|
205
194
|
def __git_changes_helper(self, repository_folder: str, arguments_as_array: list[str]) -> bool:
|
|
206
|
-
lines = GeneralUtilities.string_to_lines(self.run_program_argsasarray(
|
|
207
|
-
"git", arguments_as_array, repository_folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)[1], False)
|
|
195
|
+
lines = GeneralUtilities.string_to_lines(self.run_program_argsasarray("git", arguments_as_array, repository_folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)[1], False)
|
|
208
196
|
for line in lines:
|
|
209
197
|
if GeneralUtilities.string_has_content(line):
|
|
210
198
|
return True
|
|
@@ -245,8 +233,7 @@ class ScriptCollectionCore:
|
|
|
245
233
|
|
|
246
234
|
@GeneralUtilities.check_arguments
|
|
247
235
|
def git_get_commit_date(self, repository_folder: str, commit: str = "HEAD") -> datetime:
|
|
248
|
-
result: tuple[int, str, str, int] = self.run_program_argsasarray(
|
|
249
|
-
"git", ["show", "-s", "--format=%ci", commit], repository_folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
|
|
236
|
+
result: tuple[int, str, str, int] = self.run_program_argsasarray("git", ["show", "-s", "--format=%ci", commit], repository_folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
|
|
250
237
|
date_as_string = result[1].replace('\n', '')
|
|
251
238
|
result = datetime.strptime(date_as_string, '%Y-%m-%d %H:%M:%S %z')
|
|
252
239
|
return result
|
|
@@ -359,8 +346,7 @@ class ScriptCollectionCore:
|
|
|
359
346
|
self.run_program_argsasarray("git", ['checkout', '.'], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
|
|
360
347
|
|
|
361
348
|
@GeneralUtilities.check_arguments
|
|
362
|
-
def git_commit(self, directory: str, message: str, author_name: str = None, author_email: str = None, stage_all_changes: bool = True,
|
|
363
|
-
no_changes_behavior: int = 0) -> str:
|
|
349
|
+
def git_commit(self, directory: str, message: str, author_name: str = None, author_email: str = None, stage_all_changes: bool = True, no_changes_behavior: int = 0) -> str:
|
|
364
350
|
# no_changes_behavior=0 => No commit
|
|
365
351
|
# no_changes_behavior=1 => Commit anyway
|
|
366
352
|
# no_changes_behavior=2 => Exception
|
|
@@ -410,7 +396,7 @@ class ScriptCollectionCore:
|
|
|
410
396
|
@GeneralUtilities.check_arguments
|
|
411
397
|
def git_checkout(self, directory: str, branch: str) -> None:
|
|
412
398
|
self.run_program_argsasarray("git", ["checkout", branch], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
|
|
413
|
-
self.run_program_argsasarray("git", ["submodule", "update", "--recursive"],
|
|
399
|
+
self.run_program_argsasarray("git", ["submodule", "update", "--recursive"], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
|
|
414
400
|
|
|
415
401
|
@GeneralUtilities.check_arguments
|
|
416
402
|
def git_merge_abort(self, directory: str) -> None:
|
|
@@ -450,7 +436,7 @@ class ScriptCollectionCore:
|
|
|
450
436
|
self.git_fetch(target_directory)
|
|
451
437
|
else:
|
|
452
438
|
# clone
|
|
453
|
-
self.git_clone(target_repository, source_repository,
|
|
439
|
+
self.git_clone(target_repository, source_repository, include_submodules=True, mirror=True)
|
|
454
440
|
|
|
455
441
|
def get_git_submodules(self, folder: str) -> list[str]:
|
|
456
442
|
e = self.run_program("git", "submodule status", folder)
|
|
@@ -472,8 +458,7 @@ class ScriptCollectionCore:
|
|
|
472
458
|
return True
|
|
473
459
|
if (exit_code == 1):
|
|
474
460
|
return False
|
|
475
|
-
raise ValueError(
|
|
476
|
-
f"Unable to calculate whether '{file_in_repository}' in repository '{repositorybasefolder}' is ignored due to git-exitcode {exit_code}.")
|
|
461
|
+
raise ValueError(f"Unable to calculate whether '{file_in_repository}' in repository '{repositorybasefolder}' is ignored due to git-exitcode {exit_code}.")
|
|
477
462
|
|
|
478
463
|
@GeneralUtilities.check_arguments
|
|
479
464
|
def git_discard_all_changes(self, repository: str) -> None:
|
|
@@ -553,8 +538,7 @@ class ScriptCollectionCore:
|
|
|
553
538
|
for file_or_folder, item_type in items.items():
|
|
554
539
|
truncated_file = file_or_folder[path_prefix:]
|
|
555
540
|
if (filter_function is None or filter_function(folder, truncated_file)):
|
|
556
|
-
owner_and_permisssion = self.get_file_owner_and_file_permission(
|
|
557
|
-
file_or_folder)
|
|
541
|
+
owner_and_permisssion = self.get_file_owner_and_file_permission(file_or_folder)
|
|
558
542
|
user = owner_and_permisssion[0]
|
|
559
543
|
permissions = owner_and_permisssion[1]
|
|
560
544
|
lines.append(f"{truncated_file};{item_type};{user};{permissions}")
|
|
@@ -579,8 +563,7 @@ class ScriptCollectionCore:
|
|
|
579
563
|
foldername = os.path.basename(subfolder)
|
|
580
564
|
if ".git" in foldername:
|
|
581
565
|
new_name = foldername.replace(".git", ".gitx")
|
|
582
|
-
subfolder2 = os.path.join(
|
|
583
|
-
str(Path(subfolder).parent), new_name)
|
|
566
|
+
subfolder2 = os.path.join(str(Path(subfolder).parent), new_name)
|
|
584
567
|
os.rename(subfolder, subfolder2)
|
|
585
568
|
renamed_items[subfolder2] = subfolder
|
|
586
569
|
else:
|
|
@@ -921,29 +904,6 @@ class ScriptCollectionCore:
|
|
|
921
904
|
line[0], line[1], line[2], line[3], line[4])
|
|
922
905
|
GeneralUtilities.write_message_to_stdout(separator_line)
|
|
923
906
|
|
|
924
|
-
@GeneralUtilities.check_arguments
|
|
925
|
-
def SCUploadFileToFileHost(self, file: str, host: str) -> int:
|
|
926
|
-
try:
|
|
927
|
-
GeneralUtilities.write_message_to_stdout(
|
|
928
|
-
self.upload_file_to_file_host(file, host))
|
|
929
|
-
return 0
|
|
930
|
-
except Exception as exception:
|
|
931
|
-
GeneralUtilities.write_exception_to_stderr_with_traceback(exception, traceback)
|
|
932
|
-
return 1
|
|
933
|
-
|
|
934
|
-
@GeneralUtilities.check_arguments
|
|
935
|
-
def SCFileIsAvailableOnFileHost(self, file: str) -> int:
|
|
936
|
-
try:
|
|
937
|
-
if self.file_is_available_on_file_host(file):
|
|
938
|
-
GeneralUtilities.write_message_to_stdout(f"'{file}' is available")
|
|
939
|
-
return 0
|
|
940
|
-
else:
|
|
941
|
-
GeneralUtilities.write_message_to_stdout(f"'{file}' is not available")
|
|
942
|
-
return 1
|
|
943
|
-
except Exception as exception:
|
|
944
|
-
GeneralUtilities.write_exception_to_stderr_with_traceback(exception, traceback)
|
|
945
|
-
return 2
|
|
946
|
-
|
|
947
907
|
@GeneralUtilities.check_arguments
|
|
948
908
|
def SCCalculateBitcoinBlockHash(self, block_version_number: str, previousblockhash: str, transactionsmerkleroot: str, timestamp: str, target: str, nonce: str) -> str:
|
|
949
909
|
# Example-values:
|
|
@@ -994,13 +954,11 @@ class ScriptCollectionCore:
|
|
|
994
954
|
with (open(full_path, "rb").read()) as text_io_wrapper:
|
|
995
955
|
content = text_io_wrapper
|
|
996
956
|
path_in_iso = '/' + files_directory + \
|
|
997
|
-
self.__adjust_folder_name(
|
|
998
|
-
full_path[len(folder)::1]).upper()
|
|
957
|
+
self.__adjust_folder_name(full_path[len(folder)::1]).upper()
|
|
999
958
|
if path_in_iso not in created_directories:
|
|
1000
959
|
iso.add_directory(path_in_iso)
|
|
1001
960
|
created_directories.append(path_in_iso)
|
|
1002
|
-
iso.add_fp(BytesIO(content), len(content),
|
|
1003
|
-
path_in_iso + '/' + file.upper() + ';1')
|
|
961
|
+
iso.add_fp(BytesIO(content), len(content), path_in_iso + '/' + file.upper() + ';1')
|
|
1004
962
|
iso.write(iso_file)
|
|
1005
963
|
iso.close()
|
|
1006
964
|
|
|
@@ -1048,8 +1006,7 @@ class ScriptCollectionCore:
|
|
|
1048
1006
|
new_file_name = os.path.join(
|
|
1049
1007
|
os.path.dirname(file), new_file_name_without_path)
|
|
1050
1008
|
os.rename(file, new_file_name)
|
|
1051
|
-
GeneralUtilities.append_line_to_file(namemappingfile, os.path.basename(
|
|
1052
|
-
file) + ";" + new_file_name_without_path + ";" + hash_value)
|
|
1009
|
+
GeneralUtilities.append_line_to_file(namemappingfile, os.path.basename(file) + ";" + new_file_name_without_path + ";" + hash_value)
|
|
1053
1010
|
else:
|
|
1054
1011
|
raise ValueError(f"Directory not found: '{inputfolder}'")
|
|
1055
1012
|
|
|
@@ -1066,15 +1023,12 @@ class ScriptCollectionCore:
|
|
|
1066
1023
|
for line in reversed(lines):
|
|
1067
1024
|
if not GeneralUtilities.string_is_none_or_whitespace(line):
|
|
1068
1025
|
if "RunningHealthy (" in line: # TODO use regex
|
|
1069
|
-
GeneralUtilities.write_message_to_stderr(
|
|
1070
|
-
f"Healthy running due to line '{line}' in file '{file}'.")
|
|
1026
|
+
GeneralUtilities.write_message_to_stderr(f"Healthy running due to line '{line}' in file '{file}'.")
|
|
1071
1027
|
return 0
|
|
1072
1028
|
else:
|
|
1073
|
-
GeneralUtilities.write_message_to_stderr(
|
|
1074
|
-
f"Not healthy running due to line '{line}' in file '{file}'.")
|
|
1029
|
+
GeneralUtilities.write_message_to_stderr(f"Not healthy running due to line '{line}' in file '{file}'.")
|
|
1075
1030
|
return 1
|
|
1076
|
-
GeneralUtilities.write_message_to_stderr(
|
|
1077
|
-
f"No valid line found for healthycheck in file '{file}'.")
|
|
1031
|
+
GeneralUtilities.write_message_to_stderr(f"No valid line found for healthycheck in file '{file}'.")
|
|
1078
1032
|
return 2
|
|
1079
1033
|
|
|
1080
1034
|
@GeneralUtilities.check_arguments
|
|
@@ -1132,44 +1086,6 @@ class ScriptCollectionCore:
|
|
|
1132
1086
|
tor_version = version_with_overhead.split("~")[0]
|
|
1133
1087
|
return tor_version
|
|
1134
1088
|
|
|
1135
|
-
@GeneralUtilities.check_arguments
|
|
1136
|
-
def upload_file_to_file_host(self, file: str, host: str) -> int:
|
|
1137
|
-
if (host is None):
|
|
1138
|
-
return self.upload_file_to_random_filesharing_service(file)
|
|
1139
|
-
elif host == "anonfiles.com":
|
|
1140
|
-
return self.upload_file_to_anonfiles(file)
|
|
1141
|
-
elif host == "bayfiles.com":
|
|
1142
|
-
return self.upload_file_to_bayfiles(file)
|
|
1143
|
-
GeneralUtilities.write_message_to_stderr("Unknown host: "+host)
|
|
1144
|
-
return 1
|
|
1145
|
-
|
|
1146
|
-
@GeneralUtilities.check_arguments
|
|
1147
|
-
def upload_file_to_random_filesharing_service(self, file: str) -> int:
|
|
1148
|
-
host = randrange(2)
|
|
1149
|
-
if host == 0:
|
|
1150
|
-
return self.upload_file_to_anonfiles(file)
|
|
1151
|
-
if host == 1:
|
|
1152
|
-
return self.upload_file_to_bayfiles(file)
|
|
1153
|
-
return 1
|
|
1154
|
-
|
|
1155
|
-
@GeneralUtilities.check_arguments
|
|
1156
|
-
def upload_file_to_anonfiles(self, file) -> int:
|
|
1157
|
-
return self.upload_file_by_using_simple_curl_request("https://api.anonfiles.com/upload", file)
|
|
1158
|
-
|
|
1159
|
-
@GeneralUtilities.check_arguments
|
|
1160
|
-
def upload_file_to_bayfiles(self, file) -> int:
|
|
1161
|
-
return self.upload_file_by_using_simple_curl_request("https://api.bayfiles.com/upload", file)
|
|
1162
|
-
|
|
1163
|
-
@GeneralUtilities.check_arguments
|
|
1164
|
-
def upload_file_by_using_simple_curl_request(self, api_url: str, file: str) -> int:
|
|
1165
|
-
# TODO implement
|
|
1166
|
-
return 1
|
|
1167
|
-
|
|
1168
|
-
@GeneralUtilities.check_arguments
|
|
1169
|
-
def file_is_available_on_file_host(self, file) -> int:
|
|
1170
|
-
# TODO implement
|
|
1171
|
-
return 1
|
|
1172
|
-
|
|
1173
1089
|
def run_testcases_for_python_project(self, repository_folder: str):
|
|
1174
1090
|
self.run_program("coverage", "run -m pytest", repository_folder)
|
|
1175
1091
|
self.run_program("coverage", "xml", repository_folder)
|
|
@@ -1180,13 +1096,12 @@ class ScriptCollectionCore:
|
|
|
1180
1096
|
|
|
1181
1097
|
@GeneralUtilities.check_arguments
|
|
1182
1098
|
def get_file_permission(self, file: str) -> str:
|
|
1183
|
-
"""This function returns an usual octet-triple, for example "
|
|
1184
|
-
ls_output = self.
|
|
1099
|
+
"""This function returns an usual octet-triple, for example "700"."""
|
|
1100
|
+
ls_output: str = self.run_ls_for_folder(file)
|
|
1185
1101
|
return self.__get_file_permission_helper(ls_output)
|
|
1186
1102
|
|
|
1187
1103
|
@GeneralUtilities.check_arguments
|
|
1188
|
-
def __get_file_permission_helper(self,
|
|
1189
|
-
permissions = ' '.join(ls_output.split()).split(' ')[0][1:]
|
|
1104
|
+
def __get_file_permission_helper(self, permissions: str) -> str:
|
|
1190
1105
|
return str(self.__to_octet(permissions[0:3]))+str(self.__to_octet(permissions[3:6]))+str(self.__to_octet(permissions[6:9]))
|
|
1191
1106
|
|
|
1192
1107
|
@GeneralUtilities.check_arguments
|
|
@@ -1203,33 +1118,44 @@ class ScriptCollectionCore:
|
|
|
1203
1118
|
@GeneralUtilities.check_arguments
|
|
1204
1119
|
def get_file_owner(self, file: str) -> str:
|
|
1205
1120
|
"""This function returns the user and the group in the format "user:group"."""
|
|
1206
|
-
ls_output = self.
|
|
1121
|
+
ls_output: str = self.run_ls_for_folder(file)
|
|
1207
1122
|
return self.__get_file_owner_helper(ls_output)
|
|
1208
1123
|
|
|
1209
1124
|
@GeneralUtilities.check_arguments
|
|
1210
1125
|
def __get_file_owner_helper(self, ls_output: str) -> str:
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
return f"{splitted[2]}:{splitted[3]}"
|
|
1214
|
-
except Exception as exception:
|
|
1215
|
-
raise ValueError(
|
|
1216
|
-
f"ls-output '{ls_output}' not parsable") from exception
|
|
1126
|
+
splitted = ls_output.split()
|
|
1127
|
+
return f"{splitted[2]}:{splitted[3]}"
|
|
1217
1128
|
|
|
1218
1129
|
@GeneralUtilities.check_arguments
|
|
1219
1130
|
def get_file_owner_and_file_permission(self, file: str) -> str:
|
|
1220
|
-
ls_output = self.
|
|
1131
|
+
ls_output: str = self.run_ls_for_folder(file)
|
|
1221
1132
|
return [self.__get_file_owner_helper(ls_output), self.__get_file_permission_helper(ls_output)]
|
|
1222
1133
|
|
|
1223
1134
|
@GeneralUtilities.check_arguments
|
|
1224
|
-
def
|
|
1225
|
-
|
|
1226
|
-
GeneralUtilities.assert_condition(os.path.isfile(
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
GeneralUtilities.assert_condition(
|
|
1230
|
-
GeneralUtilities.
|
|
1231
|
-
|
|
1232
|
-
|
|
1135
|
+
def run_ls_for_folder(self, file_or_folder: str) -> str:
|
|
1136
|
+
file_or_folder = file_or_folder.replace("\\", "/")
|
|
1137
|
+
GeneralUtilities.assert_condition(os.path.isfile(file_or_folder) or os.path.isdir(file_or_folder), f"Can not execute 'ls -ld' because '{file_or_folder}' does not exist.")
|
|
1138
|
+
ls_result = self.run_program_argsasarray("ls", ["-ld", file_or_folder])
|
|
1139
|
+
GeneralUtilities.assert_condition(ls_result[0] == 0, f"'ls -ld {file_or_folder}' resulted in exitcode {str(ls_result[0])}. StdErr: {ls_result[2]}")
|
|
1140
|
+
GeneralUtilities.assert_condition(not GeneralUtilities.string_is_none_or_whitespace(ls_result[1]), f"'ls -ld' of '{file_or_folder}' had an empty output. StdErr: '{ls_result[2]}'")
|
|
1141
|
+
GeneralUtilities.write_message_to_stdout(ls_result[1])
|
|
1142
|
+
output = ls_result[1]
|
|
1143
|
+
result = output.replace("\n", "")
|
|
1144
|
+
result = ' '.join(result.split()) # reduce multiple whitespaces to one
|
|
1145
|
+
return result
|
|
1146
|
+
|
|
1147
|
+
@GeneralUtilities.check_arguments
|
|
1148
|
+
def run_ls_for_folder_content(self, file_or_folder: str) -> list[str]:
|
|
1149
|
+
file_or_folder = file_or_folder.replace("\\", "/")
|
|
1150
|
+
GeneralUtilities.assert_condition(os.path.isfile(file_or_folder) or os.path.isdir(file_or_folder), f"Can not execute 'ls -la' because '{file_or_folder}' does not exist.")
|
|
1151
|
+
ls_result = self.run_program_argsasarray("ls", ["-la", file_or_folder])
|
|
1152
|
+
GeneralUtilities.assert_condition(ls_result[0] == 0, f"'ls -la {file_or_folder}' resulted in exitcode {str(ls_result[0])}. StdErr: {ls_result[2]}")
|
|
1153
|
+
GeneralUtilities.assert_condition(not GeneralUtilities.string_is_none_or_whitespace(ls_result[1]), f"'ls -la' of '{file_or_folder}' had an empty output. StdErr: '{ls_result[2]}'")
|
|
1154
|
+
GeneralUtilities.write_message_to_stdout(ls_result[1])
|
|
1155
|
+
output = ls_result[1]
|
|
1156
|
+
result = output.split("\n")[3:] # skip the lines with "Total", "." and ".."
|
|
1157
|
+
result = [' '.join(line.split()) for line in result] # reduce multiple whitespaces to one
|
|
1158
|
+
return result
|
|
1233
1159
|
|
|
1234
1160
|
@GeneralUtilities.check_arguments
|
|
1235
1161
|
def set_permission(self, file_or_folder: str, permissions: str, recursive: bool = False) -> None:
|
|
@@ -1268,8 +1194,36 @@ class ScriptCollectionCore:
|
|
|
1268
1194
|
popen: Popen = self.program_runner.run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, custom_argument, interactive)
|
|
1269
1195
|
return popen
|
|
1270
1196
|
|
|
1271
|
-
|
|
1197
|
+
@staticmethod
|
|
1198
|
+
def __enqueue_output(file, queue):
|
|
1199
|
+
for line in iter(file.readline, ''):
|
|
1200
|
+
queue.put(line)
|
|
1201
|
+
file.close()
|
|
1202
|
+
|
|
1203
|
+
@staticmethod
|
|
1204
|
+
def __read_popen_pipes(p: Popen):
|
|
1205
|
+
with ThreadPoolExecutor(2) as pool:
|
|
1206
|
+
q_stdout = Queue()
|
|
1207
|
+
q_stderr = Queue()
|
|
1208
|
+
|
|
1209
|
+
pool.submit(ScriptCollectionCore.__enqueue_output, p.stdout, q_stdout)
|
|
1210
|
+
pool.submit(ScriptCollectionCore.__enqueue_output, p.stderr, q_stderr)
|
|
1211
|
+
while (p.poll() is None) or (not q_stdout.empty()) or (not q_stderr.empty()):
|
|
1212
|
+
time.sleep(0.01)
|
|
1213
|
+
out_line = None
|
|
1214
|
+
err_line = None
|
|
1215
|
+
try:
|
|
1216
|
+
out_line = q_stdout.get_nowait()
|
|
1217
|
+
except Empty:
|
|
1218
|
+
pass
|
|
1219
|
+
try:
|
|
1220
|
+
err_line = q_stderr.get_nowait()
|
|
1221
|
+
except Empty:
|
|
1222
|
+
pass
|
|
1272
1223
|
|
|
1224
|
+
yield (out_line, err_line)
|
|
1225
|
+
|
|
1226
|
+
# Return-values program_runner: Exitcode, StdOut, StdErr, Pid
|
|
1273
1227
|
@GeneralUtilities.check_arguments
|
|
1274
1228
|
def run_program_argsasarray(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, verbosity: int = 1, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, throw_exception_if_exitcode_is_not_zero: bool = True, custom_argument: object = None, interactive: bool = False) -> tuple[int, str, str, int]:
|
|
1275
1229
|
# verbosity 1: No output will be logged.
|
|
@@ -1297,82 +1251,68 @@ class ScriptCollectionCore:
|
|
|
1297
1251
|
|
|
1298
1252
|
if verbosity >= 3:
|
|
1299
1253
|
GeneralUtilities.write_message_to_stdout(f"Run '{info_for_log}'.")
|
|
1300
|
-
with self.__run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, verbosity, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, custom_argument, interactive) as process:
|
|
1301
|
-
pid = process.pid
|
|
1302
|
-
|
|
1303
|
-
stdout_lines = list[str]()
|
|
1304
|
-
stderr_lines = list[str]()
|
|
1305
1254
|
|
|
1306
|
-
|
|
1255
|
+
print_live_output = 1 < verbosity
|
|
1307
1256
|
|
|
1308
|
-
|
|
1309
|
-
|
|
1310
|
-
|
|
1257
|
+
exit_code: int = None
|
|
1258
|
+
stdout: str = ""
|
|
1259
|
+
stderr: str = ""
|
|
1260
|
+
pid: int = None
|
|
1311
1261
|
|
|
1312
|
-
|
|
1313
|
-
# there are 2 issues in this part:
|
|
1314
|
-
# 1.: ctrl+c
|
|
1315
|
-
# 2.: sometimes this function does not terminate even if the started process exited
|
|
1316
|
-
def stream_process(process) -> bool:
|
|
1317
|
-
try:
|
|
1318
|
-
go: bool = process.poll() is None
|
|
1319
|
-
|
|
1320
|
-
stdoutreader: BufferedReader = process.stdout
|
|
1321
|
-
if stdoutreader.readable():
|
|
1322
|
-
stdoutresultb: bytes = stdoutreader.read()
|
|
1323
|
-
stdoutresult = GeneralUtilities.bytes_to_string(
|
|
1324
|
-
stdoutresultb)
|
|
1325
|
-
stdoutlines = GeneralUtilities.string_to_lines(stdoutresult)
|
|
1326
|
-
for line in stdoutlines:
|
|
1327
|
-
line_stripped = line.replace("\r", "").strip()
|
|
1328
|
-
if len(line_stripped) > 0:
|
|
1329
|
-
line_str = line_stripped
|
|
1330
|
-
stdout_lines.append(line_str)
|
|
1331
|
-
if live_console_output_printing:
|
|
1332
|
-
GeneralUtilities.write_message_to_stdout(line_str)
|
|
1333
|
-
if log_to_file:
|
|
1334
|
-
GeneralUtilities.append_line_to_file(
|
|
1335
|
-
log_file, line_str)
|
|
1336
|
-
|
|
1337
|
-
stderrreader: BufferedReader = process.stderr
|
|
1338
|
-
if stderrreader.readable():
|
|
1339
|
-
stderrresultb: bytes = stderrreader.read()
|
|
1340
|
-
stderrresult = GeneralUtilities.bytes_to_string(stderrresultb)
|
|
1341
|
-
stderrlines = GeneralUtilities.string_to_lines(stderrresult)
|
|
1342
|
-
for line in stderrlines:
|
|
1343
|
-
line_stripped = line.replace("\r", "").strip()
|
|
1344
|
-
if len(line_stripped) > 0:
|
|
1345
|
-
line_str = line_stripped
|
|
1346
|
-
stderr_lines.append(line_str)
|
|
1347
|
-
if live_console_output_printing:
|
|
1348
|
-
if print_errors_as_information:
|
|
1349
|
-
GeneralUtilities.write_message_to_stdout(line_str)
|
|
1350
|
-
else:
|
|
1351
|
-
GeneralUtilities.write_message_to_stderr(line_str)
|
|
1352
|
-
if log_to_file:
|
|
1353
|
-
GeneralUtilities.append_line_to_file(log_file, line_str)
|
|
1354
|
-
|
|
1355
|
-
return go
|
|
1356
|
-
except Exception:
|
|
1357
|
-
return False
|
|
1358
|
-
|
|
1359
|
-
while stream_process(process):
|
|
1360
|
-
time.sleep(0.1)
|
|
1361
|
-
|
|
1362
|
-
exit_code = process.poll()
|
|
1363
|
-
stdout = '\n'.join(stdout_lines)
|
|
1364
|
-
stderr = '\n'.join(stderr_lines)
|
|
1365
|
-
else:
|
|
1366
|
-
stdout, stderr = process.communicate()
|
|
1367
|
-
exit_code = process.wait()
|
|
1368
|
-
stdout = GeneralUtilities.bytes_to_string(stdout).replace('\r', '')
|
|
1369
|
-
stderr = GeneralUtilities.bytes_to_string(stderr).replace('\r', '')
|
|
1370
|
-
|
|
1371
|
-
if throw_exception_if_exitcode_is_not_zero and exit_code != 0:
|
|
1372
|
-
raise ValueError(f"Program '{working_directory}>{program} {arguments_for_log_as_string}' resulted in exitcode {exit_code}. (StdOut: '{stdout}', StdErr: '{stderr}')")
|
|
1262
|
+
with self.__run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, verbosity, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, custom_argument, interactive) as process:
|
|
1373
1263
|
|
|
1374
|
-
|
|
1375
|
-
|
|
1264
|
+
if log_file is not None:
|
|
1265
|
+
GeneralUtilities.ensure_file_exists(log_file)
|
|
1266
|
+
pid = process.pid
|
|
1267
|
+
for out_line_plain, err_line_plain in ScriptCollectionCore.__read_popen_pipes(process): # see https://stackoverflow.com/a/57084403/3905529
|
|
1268
|
+
|
|
1269
|
+
if out_line_plain is not None:
|
|
1270
|
+
out_line: str = None
|
|
1271
|
+
if isinstance(out_line_plain, str):
|
|
1272
|
+
out_line = out_line_plain
|
|
1273
|
+
elif isinstance(out_line_plain, bytes):
|
|
1274
|
+
out_line = GeneralUtilities.bytes_to_string(out_line_plain)
|
|
1275
|
+
else:
|
|
1276
|
+
raise ValueError(f"Unknown type of output: {str(type(out_line_plain))}")
|
|
1277
|
+
|
|
1278
|
+
if out_line is not None and GeneralUtilities.string_has_content(out_line):
|
|
1279
|
+
if out_line.endswith("\n"):
|
|
1280
|
+
out_line = out_line[:-1]
|
|
1281
|
+
if print_live_output:
|
|
1282
|
+
print(out_line, end='\n', file=sys.stdout, flush=True)
|
|
1283
|
+
if 0 < len(stdout):
|
|
1284
|
+
stdout = stdout+"\n"
|
|
1285
|
+
stdout = stdout+out_line
|
|
1286
|
+
if log_file is not None:
|
|
1287
|
+
GeneralUtilities.append_line_to_file(log_file, out_line)
|
|
1288
|
+
|
|
1289
|
+
if err_line_plain is not None:
|
|
1290
|
+
err_line: str = None
|
|
1291
|
+
if isinstance(err_line_plain, str):
|
|
1292
|
+
err_line = err_line_plain
|
|
1293
|
+
elif isinstance(err_line_plain, bytes):
|
|
1294
|
+
err_line = GeneralUtilities.bytes_to_string(err_line_plain)
|
|
1295
|
+
else:
|
|
1296
|
+
raise ValueError(f"Unknown type of output: {str(type(err_line_plain))}")
|
|
1297
|
+
if err_line is not None and GeneralUtilities.string_has_content(err_line):
|
|
1298
|
+
if err_line.endswith("\n"):
|
|
1299
|
+
err_line = err_line[:-1]
|
|
1300
|
+
if print_live_output:
|
|
1301
|
+
print(err_line, end='\n', file=sys.stderr, flush=True)
|
|
1302
|
+
if 0 < len(stderr):
|
|
1303
|
+
stderr = stderr+"\n"
|
|
1304
|
+
stderr = stderr+err_line
|
|
1305
|
+
if log_file is not None:
|
|
1306
|
+
GeneralUtilities.append_line_to_file(log_file, err_line)
|
|
1307
|
+
|
|
1308
|
+
exit_code = process.returncode
|
|
1309
|
+
|
|
1310
|
+
if throw_exception_if_exitcode_is_not_zero and exit_code != 0:
|
|
1311
|
+
raise ValueError(f"Program '{working_directory}>{program} {arguments_for_log_as_string}' resulted in exitcode {exit_code}. (StdOut: '{stdout}', StdErr: '{stderr}')")
|
|
1312
|
+
|
|
1313
|
+
GeneralUtilities.assert_condition(exit_code is not None, f"Exitcode of program-run of '{info_for_log}' is None.")
|
|
1314
|
+
result = (exit_code, stdout, stderr, pid)
|
|
1315
|
+
return result
|
|
1376
1316
|
except Exception as e:
|
|
1377
1317
|
raise e
|
|
1378
1318
|
|
|
@@ -1415,8 +1355,7 @@ class ScriptCollectionCore:
|
|
|
1415
1355
|
@GeneralUtilities.check_arguments
|
|
1416
1356
|
def verify_no_pending_mock_program_calls(self):
|
|
1417
1357
|
if (len(self.__mocked_program_calls) > 0):
|
|
1418
|
-
raise AssertionError(
|
|
1419
|
-
"The following mock-calls were not called:\n"+",\n ".join([self.__format_mock_program_call(r) for r in self.__mocked_program_calls]))
|
|
1358
|
+
raise AssertionError("The following mock-calls were not called:\n"+",\n ".join([self.__format_mock_program_call(r) for r in self.__mocked_program_calls]))
|
|
1420
1359
|
|
|
1421
1360
|
@GeneralUtilities.check_arguments
|
|
1422
1361
|
def __format_mock_program_call(self, r) -> str:
|
|
@@ -1451,8 +1390,7 @@ class ScriptCollectionCore:
|
|
|
1451
1390
|
result = mock_call
|
|
1452
1391
|
break
|
|
1453
1392
|
if result is None:
|
|
1454
|
-
raise LookupError(
|
|
1455
|
-
f"Tried to execute mock-call '{workingdirectory}>{program} {argument}' but no mock-call was defined for that execution")
|
|
1393
|
+
raise LookupError(f"Tried to execute mock-call '{workingdirectory}>{program} {argument}' but no mock-call was defined for that execution")
|
|
1456
1394
|
else:
|
|
1457
1395
|
self.__mocked_program_calls.remove(result)
|
|
1458
1396
|
return (result.exit_code, result.stdout, result.stderr, result.pid)
|
|
@@ -1501,8 +1439,7 @@ class ScriptCollectionCore:
|
|
|
1501
1439
|
|
|
1502
1440
|
@GeneralUtilities.check_arguments
|
|
1503
1441
|
def check_system_time_with_default_tolerance(self) -> None:
|
|
1504
|
-
self.check_system_time(
|
|
1505
|
-
self.__get_default_tolerance_for_system_time_equals_internet_time())
|
|
1442
|
+
self.check_system_time(self.__get_default_tolerance_for_system_time_equals_internet_time())
|
|
1506
1443
|
|
|
1507
1444
|
@GeneralUtilities.check_arguments
|
|
1508
1445
|
def __get_default_tolerance_for_system_time_equals_internet_time(self) -> timedelta:
|
|
@@ -1529,13 +1466,11 @@ class ScriptCollectionCore:
|
|
|
1529
1466
|
result = self.get_version_from_gitversion(repository_folder, "MajorMinorPatch")
|
|
1530
1467
|
if self.git_repository_has_uncommitted_changes(repository_folder):
|
|
1531
1468
|
if self.get_current_git_branch_has_tag(repository_folder):
|
|
1532
|
-
id_of_latest_tag = self.git_get_commitid_of_tag(
|
|
1533
|
-
repository_folder, self.get_latest_git_tag(repository_folder))
|
|
1469
|
+
id_of_latest_tag = self.git_get_commitid_of_tag(repository_folder, self.get_latest_git_tag(repository_folder))
|
|
1534
1470
|
current_commit = self.git_get_commit_id(repository_folder)
|
|
1535
1471
|
current_commit_is_on_latest_tag = id_of_latest_tag == current_commit
|
|
1536
1472
|
if current_commit_is_on_latest_tag:
|
|
1537
|
-
result = self.increment_version(
|
|
1538
|
-
result, False, False, True)
|
|
1473
|
+
result = self.increment_version(result, False, False, True)
|
|
1539
1474
|
else:
|
|
1540
1475
|
result = "0.1.0"
|
|
1541
1476
|
return result
|
|
@@ -1573,8 +1508,7 @@ class ScriptCollectionCore:
|
|
|
1573
1508
|
self.run_program("openssl", f'genrsa -out {filename}.key {rsa_key_length}', folder)
|
|
1574
1509
|
self.run_program("openssl", f'req -new -subj /C={subj_c}/ST={subj_st}/L={subj_l}/O={subj_o}/CN={domain}/OU={subj_ou} -x509 -key {filename}.key -out {filename}.unsigned.crt -days {days_until_expire}', folder)
|
|
1575
1510
|
self.run_program("openssl", f'pkcs12 -export -out {filename}.selfsigned.pfx -password pass:{password} -inkey {filename}.key -in {filename}.unsigned.crt', folder)
|
|
1576
|
-
GeneralUtilities.write_text_to_file(
|
|
1577
|
-
os.path.join(folder, f"{filename}.password"), password)
|
|
1511
|
+
GeneralUtilities.write_text_to_file(os.path.join(folder, f"{filename}.password"), password)
|
|
1578
1512
|
GeneralUtilities.write_text_to_file(os.path.join(folder, f"{filename}.san.conf"), f"""[ req ]
|
|
1579
1513
|
default_bits = {rsa_key_length}
|
|
1580
1514
|
distinguished_name = req_distinguished_name
|
|
@@ -1600,8 +1534,7 @@ DNS = {domain}
|
|
|
1600
1534
|
|
|
1601
1535
|
@GeneralUtilities.check_arguments
|
|
1602
1536
|
def generate_certificate_sign_request(self, folder: str, domain: str, filename: str, subj_c: str, subj_st: str, subj_l: str, subj_o: str, subj_ou: str) -> None:
|
|
1603
|
-
self.run_program(
|
|
1604
|
-
"openssl", f'req -new -subj /C={subj_c}/ST={subj_st}/L={subj_l}/O={subj_o}/CN={domain}/OU={subj_ou} -key {filename}.key -out {filename}.csr -config {filename}.san.conf', folder)
|
|
1537
|
+
self.run_program("openssl", f'req -new -subj /C={subj_c}/ST={subj_st}/L={subj_l}/O={subj_o}/CN={domain}/OU={subj_ou} -key {filename}.key -out {filename}.csr -config {filename}.san.conf', folder)
|
|
1605
1538
|
|
|
1606
1539
|
@GeneralUtilities.check_arguments
|
|
1607
1540
|
def sign_certificate(self, folder: str, ca_folder: str, ca_name: str, domain: str, filename: str, days_until_expire: int = None) -> None:
|
|
@@ -1629,11 +1562,13 @@ DNS = {domain}
|
|
|
1629
1562
|
elif ">" in line:
|
|
1630
1563
|
try:
|
|
1631
1564
|
# line is something like "cyclonedx-bom>=2.0.2" and the function must return with the updated version
|
|
1632
|
-
# (something like "cyclonedx-bom>=
|
|
1565
|
+
# (something like "cyclonedx-bom>=2.11.0" for example)
|
|
1633
1566
|
package = line.split(">")[0]
|
|
1634
1567
|
operator = ">=" if ">=" in line else ">"
|
|
1635
1568
|
response = requests.get(f'https://pypi.org/pypi/{package}/json', timeout=5)
|
|
1636
1569
|
latest_version = response.json()['info']['version']
|
|
1570
|
+
# TODO update only minor- and patch-version
|
|
1571
|
+
# TODO print info if there is a new major-version
|
|
1637
1572
|
return package+operator+latest_version
|
|
1638
1573
|
except:
|
|
1639
1574
|
return line
|
|
@@ -1696,8 +1631,7 @@ DNS = {domain}
|
|
|
1696
1631
|
GeneralUtilities.ensure_directory_exists(packagecontent_entireresult_folder)
|
|
1697
1632
|
|
|
1698
1633
|
# create "debian-binary"-file
|
|
1699
|
-
debianbinary_file = os.path.join(
|
|
1700
|
-
packagecontent_entireresult_folder, "debian-binary")
|
|
1634
|
+
debianbinary_file = os.path.join(packagecontent_entireresult_folder, "debian-binary")
|
|
1701
1635
|
GeneralUtilities.ensure_file_exists(debianbinary_file)
|
|
1702
1636
|
GeneralUtilities.write_text_to_file(debianbinary_file, "2.0\n")
|
|
1703
1637
|
|
|
@@ -1714,10 +1648,10 @@ DNS = {domain}
|
|
|
1714
1648
|
link_file = f"/usr/bin/{toolname.lower()}"
|
|
1715
1649
|
permission = str(permission_of_executable_file_as_octet_triple)
|
|
1716
1650
|
GeneralUtilities.write_text_to_file(postinst_file, f"""#!/bin/sh
|
|
1717
|
-
|
|
1718
|
-
|
|
1719
|
-
|
|
1720
|
-
|
|
1651
|
+
ln -s {exe_file} {link_file}
|
|
1652
|
+
chmod {permission} {exe_file}
|
|
1653
|
+
chmod {permission} {link_file}
|
|
1654
|
+
""")
|
|
1721
1655
|
|
|
1722
1656
|
# control
|
|
1723
1657
|
control_file = os.path.join(packagecontent_control_folder, "control")
|
|
@@ -1733,16 +1667,14 @@ DNS = {domain}
|
|
|
1733
1667
|
# copy binaries
|
|
1734
1668
|
usr_bin_folder = os.path.join(packagecontent_data_folder, "usr/bin")
|
|
1735
1669
|
GeneralUtilities.ensure_directory_exists(usr_bin_folder)
|
|
1736
|
-
usr_bin_content_folder = os.path.join(
|
|
1737
|
-
usr_bin_folder, tool_content_folder_name)
|
|
1670
|
+
usr_bin_content_folder = os.path.join(usr_bin_folder, tool_content_folder_name)
|
|
1738
1671
|
GeneralUtilities.copy_content_of_folder(bin_folder, usr_bin_content_folder)
|
|
1739
1672
|
|
|
1740
1673
|
# create debfile
|
|
1741
1674
|
deb_filename = f"{toolname}.deb"
|
|
1742
1675
|
self.run_program_argsasarray("tar", ["czf", f"../{entireresult_content_folder_name}/control.tar.gz", "*"], packagecontent_control_folder, verbosity=verbosity)
|
|
1743
1676
|
self.run_program_argsasarray("tar", ["czf", f"../{entireresult_content_folder_name}/data.tar.gz", "*"], packagecontent_data_folder, verbosity=verbosity)
|
|
1744
|
-
self.run_program_argsasarray("ar", ["r", deb_filename, "debian-binary", "control.tar.gz",
|
|
1745
|
-
"data.tar.gz"], packagecontent_entireresult_folder, verbosity=verbosity)
|
|
1677
|
+
self.run_program_argsasarray("ar", ["r", deb_filename, "debian-binary", "control.tar.gz", "data.tar.gz"], packagecontent_entireresult_folder, verbosity=verbosity)
|
|
1746
1678
|
result_file = os.path.join(packagecontent_entireresult_folder, deb_filename)
|
|
1747
1679
|
shutil.copy(result_file, os.path.join(deb_output_folder, deb_filename))
|
|
1748
1680
|
|
|
@@ -1788,8 +1720,7 @@ DNS = {domain}
|
|
|
1788
1720
|
proxies = None
|
|
1789
1721
|
if GeneralUtilities.string_has_content(proxy):
|
|
1790
1722
|
proxies = {"http": proxy}
|
|
1791
|
-
response = requests.get('https://ipinfo.io',
|
|
1792
|
-
proxies=proxies, timeout=5)
|
|
1723
|
+
response = requests.get('https://ipinfo.io', proxies=proxies, timeout=5)
|
|
1793
1724
|
network_information_as_json_string = GeneralUtilities.bytes_to_string(
|
|
1794
1725
|
response.content)
|
|
1795
1726
|
return network_information_as_json_string
|
|
@@ -1802,10 +1733,133 @@ DNS = {domain}
|
|
|
1802
1733
|
else:
|
|
1803
1734
|
extension_to_compare = from_extension
|
|
1804
1735
|
for file in GeneralUtilities.get_direct_files_of_folder(folder):
|
|
1805
|
-
if (ignore_case and file.lower().endswith(f".{extension_to_compare}")
|
|
1806
|
-
or not ignore_case and file.endswith(f".{extension_to_compare}")):
|
|
1736
|
+
if (ignore_case and file.lower().endswith(f".{extension_to_compare}") or not ignore_case and file.endswith(f".{extension_to_compare}")):
|
|
1807
1737
|
p = Path(file)
|
|
1808
1738
|
p.rename(p.with_suffix('.'+to_extension))
|
|
1809
1739
|
if recursive:
|
|
1810
1740
|
for subfolder in GeneralUtilities.get_direct_folders_of_folder(folder):
|
|
1811
1741
|
self.change_file_extensions(subfolder, from_extension, to_extension, recursive, ignore_case)
|
|
1742
|
+
|
|
1743
|
+
@GeneralUtilities.check_arguments
|
|
1744
|
+
def __add_chapter(self, main_reference_file, reference_content_folder, number: int, chaptertitle: str, content: str = None):
|
|
1745
|
+
if content is None:
|
|
1746
|
+
content = "TXDX add content here"
|
|
1747
|
+
filename = str(number).zfill(2)+"_"+chaptertitle.replace(' ', '-')
|
|
1748
|
+
file = f"{reference_content_folder}/{filename}.md"
|
|
1749
|
+
full_title = f"{number}. {chaptertitle}"
|
|
1750
|
+
|
|
1751
|
+
GeneralUtilities.append_line_to_file(main_reference_file, f"- [{full_title}](./{filename}.md)")
|
|
1752
|
+
|
|
1753
|
+
GeneralUtilities.ensure_file_exists(file)
|
|
1754
|
+
GeneralUtilities.write_text_to_file(file, f"""# {full_title}
|
|
1755
|
+
|
|
1756
|
+
{content}
|
|
1757
|
+
""".replace("XDX", "ODO"))
|
|
1758
|
+
|
|
1759
|
+
@GeneralUtilities.check_arguments
|
|
1760
|
+
def generate_arc42_reference_template(self, repository: str, productname: str = None):
|
|
1761
|
+
productname: str
|
|
1762
|
+
if productname is None:
|
|
1763
|
+
productname = os.path.basename(repository)
|
|
1764
|
+
reference_root_folder = f"{repository}/Other/Resources/Reference"
|
|
1765
|
+
reference_content_folder = reference_root_folder + "/Technical"
|
|
1766
|
+
if os.path.isdir(reference_root_folder):
|
|
1767
|
+
raise ValueError(f"The folder '{reference_root_folder}' does already exist.")
|
|
1768
|
+
GeneralUtilities.ensure_directory_exists(reference_root_folder)
|
|
1769
|
+
GeneralUtilities.ensure_directory_exists(reference_content_folder)
|
|
1770
|
+
main_reference_file = f"{reference_root_folder}/Reference.md"
|
|
1771
|
+
GeneralUtilities.ensure_file_exists(main_reference_file)
|
|
1772
|
+
GeneralUtilities.write_text_to_file(main_reference_file, f"""# {productname}
|
|
1773
|
+
|
|
1774
|
+
TXDX add minimal service-description here.
|
|
1775
|
+
|
|
1776
|
+
## Technical documentation
|
|
1777
|
+
|
|
1778
|
+
""".replace("XDX", "ODO"))
|
|
1779
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 1, 'Introduction and Goals', """## Overview
|
|
1780
|
+
|
|
1781
|
+
TXDX
|
|
1782
|
+
|
|
1783
|
+
# Quality goals
|
|
1784
|
+
|
|
1785
|
+
TXDX
|
|
1786
|
+
|
|
1787
|
+
# Stakeholder
|
|
1788
|
+
|
|
1789
|
+
| Name | How to contact | Reason |
|
|
1790
|
+
| ---- | -------------- | ------ |""")
|
|
1791
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 2, 'Constraints', """## Technical constraints
|
|
1792
|
+
|
|
1793
|
+
| Constraint-identifier | Constraint | Reason |
|
|
1794
|
+
| --------------------- | ---------- | ------ |
|
|
1795
|
+
|
|
1796
|
+
## Organizational constraints
|
|
1797
|
+
|
|
1798
|
+
| Constraint-identifier | Constraint | Reason |
|
|
1799
|
+
| --------------------- | ---------- | ------ |""")
|
|
1800
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 3, 'Context and Scope', """## Context
|
|
1801
|
+
|
|
1802
|
+
TXDX
|
|
1803
|
+
|
|
1804
|
+
## Scope
|
|
1805
|
+
|
|
1806
|
+
TXDX""")
|
|
1807
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 4, 'Solution Strategy', """TXDX""")
|
|
1808
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 5, 'Building Block View', """TXDX""")
|
|
1809
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 6, 'Runtime View', """TXDX""")
|
|
1810
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 7, 'Deployment View', """## Infrastructure-overview
|
|
1811
|
+
|
|
1812
|
+
TXDX
|
|
1813
|
+
|
|
1814
|
+
## Infrastructure-requirements
|
|
1815
|
+
|
|
1816
|
+
TXDX
|
|
1817
|
+
|
|
1818
|
+
## Deployment-proecsses
|
|
1819
|
+
|
|
1820
|
+
TXDX
|
|
1821
|
+
""")
|
|
1822
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 8, 'Crosscutting Concepts', """TXDX""")
|
|
1823
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 9, 'Architectural Decisions', """## Decision-board
|
|
1824
|
+
|
|
1825
|
+
| Decision-identifier | Date | Decision | Reason and notes |
|
|
1826
|
+
| ------------------- | ---- | -------- | ---------------- |""") # empty because there are no decsions yet
|
|
1827
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 10, 'Quality Requirements', """TXDX""")
|
|
1828
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 11, 'Risks and Technical Debt', """## Risks
|
|
1829
|
+
|
|
1830
|
+
Currently there are no known risks.
|
|
1831
|
+
|
|
1832
|
+
## Technical debts
|
|
1833
|
+
|
|
1834
|
+
Currently there are no technical depts.""")
|
|
1835
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 12, 'Glossary', """## Terms
|
|
1836
|
+
|
|
1837
|
+
| Term | Meaning |
|
|
1838
|
+
| ---- | ------- |
|
|
1839
|
+
|
|
1840
|
+
## Abbreviations
|
|
1841
|
+
|
|
1842
|
+
| Abbreviation | Meaning |
|
|
1843
|
+
| ------------ | ------- |""")
|
|
1844
|
+
|
|
1845
|
+
GeneralUtilities.append_to_file(main_reference_file, """
|
|
1846
|
+
|
|
1847
|
+
## Responsibilities
|
|
1848
|
+
|
|
1849
|
+
| Responsibility | Name and contact-information |
|
|
1850
|
+
| --------------- | ---------------------------- |
|
|
1851
|
+
| Pdocut-owner | TXDX |
|
|
1852
|
+
| Product-manager | TXDX |
|
|
1853
|
+
| Support | TXDX |
|
|
1854
|
+
|
|
1855
|
+
## License & Pricing
|
|
1856
|
+
|
|
1857
|
+
TXDX
|
|
1858
|
+
|
|
1859
|
+
## External resources
|
|
1860
|
+
|
|
1861
|
+
- [Repository](TXDX)
|
|
1862
|
+
- [Productive-System](TXDX)
|
|
1863
|
+
- [QualityCheck-system](TXDX)
|
|
1864
|
+
|
|
1865
|
+
""")
|
|
@@ -241,11 +241,11 @@ class TasksForCommonProjectStructure:
|
|
|
241
241
|
GeneralUtilities.ensure_directory_exists(web_folder)
|
|
242
242
|
GeneralUtilities.copy_content_of_folder(web_relase_folder, web_folder)
|
|
243
243
|
elif target == "windows":
|
|
244
|
-
|
|
244
|
+
windows_release_folder = os.path.join(src_folder, "build/windows/x64/runner/Release")
|
|
245
245
|
windows_folder = os.path.join(artifacts_folder, "BuildResult_Windows")
|
|
246
246
|
GeneralUtilities.ensure_directory_does_not_exist(windows_folder)
|
|
247
247
|
GeneralUtilities.ensure_directory_exists(windows_folder)
|
|
248
|
-
GeneralUtilities.copy_content_of_folder(
|
|
248
|
+
GeneralUtilities.copy_content_of_folder(windows_release_folder, windows_folder)
|
|
249
249
|
elif target == "ios":
|
|
250
250
|
raise ValueError("building for ios is not implemented yet")
|
|
251
251
|
elif target == "appbundle":
|
|
@@ -2238,16 +2238,16 @@ class TasksForCommonProjectStructure:
|
|
|
2238
2238
|
raise ValueError("Can not download FFMPEG.")
|
|
2239
2239
|
|
|
2240
2240
|
@GeneralUtilities.check_arguments
|
|
2241
|
-
def ensure_plantuml_is_available(self,
|
|
2242
|
-
self.ensure_file_from_github_assets_is_available(
|
|
2241
|
+
def ensure_plantuml_is_available(self, target_folder: str) -> None:
|
|
2242
|
+
self.ensure_file_from_github_assets_is_available(target_folder, "plantuml", "plantuml", "PlantUML", "plantuml.jar", lambda latest_version: "plantuml.jar")
|
|
2243
2243
|
|
|
2244
2244
|
@GeneralUtilities.check_arguments
|
|
2245
|
-
def ensure_androidappbundletool_is_available(self,
|
|
2246
|
-
self.ensure_file_from_github_assets_is_available(
|
|
2245
|
+
def ensure_androidappbundletool_is_available(self, target_folder: str) -> None:
|
|
2246
|
+
self.ensure_file_from_github_assets_is_available(target_folder, "google", "bundletool", "AndroidAppBundleTool", "bundletool.jar", lambda latest_version: f"bundletool-all-{latest_version}.jar")
|
|
2247
2247
|
|
|
2248
2248
|
@GeneralUtilities.check_arguments
|
|
2249
|
-
def ensure_file_from_github_assets_is_available(self,
|
|
2250
|
-
resource_folder = os.path.join(
|
|
2249
|
+
def ensure_file_from_github_assets_is_available(self, target_folder: str, githubuser: str, githubprojectname: str, resource_name: str, local_filename: str, get_filename_on_github) -> None:
|
|
2250
|
+
resource_folder = os.path.join(target_folder, "Other", "Resources", resource_name)
|
|
2251
2251
|
internet_connection_is_available = GeneralUtilities.internet_connection_is_available()
|
|
2252
2252
|
file = f"{resource_folder}/{local_filename}"
|
|
2253
2253
|
file_exists = os.path.isfile(file)
|
|
@@ -2266,10 +2266,10 @@ class TasksForCommonProjectStructure:
|
|
|
2266
2266
|
raise ValueError(f"Can not download {resource_name}.")
|
|
2267
2267
|
|
|
2268
2268
|
@GeneralUtilities.check_arguments
|
|
2269
|
-
def generate_svg_files_from_plantuml_files(self,
|
|
2270
|
-
self.ensure_plantuml_is_available(
|
|
2271
|
-
plant_uml_folder = os.path.join(
|
|
2272
|
-
files_folder = os.path.join(
|
|
2269
|
+
def generate_svg_files_from_plantuml_files(self, target_folder: str) -> None:
|
|
2270
|
+
self.ensure_plantuml_is_available(target_folder)
|
|
2271
|
+
plant_uml_folder = os.path.join(target_folder, "Other", "Resources", "PlantUML")
|
|
2272
|
+
files_folder = os.path.join(target_folder, "Other/Reference")
|
|
2273
2273
|
sc = ScriptCollectionCore()
|
|
2274
2274
|
for file in GeneralUtilities.get_all_files_of_folder(files_folder):
|
|
2275
2275
|
if file.endswith(".plantuml"):
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
ScriptCollection/Executables.py,sha256=Ui9omPvmBjnsgFZXpeXMoZXmk17sjPyO1mS_7B7w8aY,19951
|
|
2
|
+
ScriptCollection/GeneralUtilities.py,sha256=AElXv2NO30cTw-Qs3qVmO-YCOQ5FvBQM3RZMywuKQ_Y,35121
|
|
3
|
+
ScriptCollection/ProcessesRunner.py,sha256=3mu4ZxzZleQo0Op6o9EYTCFiJfb6kx5ov2YfZfT89mU,1395
|
|
4
|
+
ScriptCollection/ProgramRunnerBase.py,sha256=7QAjoqOz6XPmJH19F2k-Z1fFQB_uZnPFvn-T54IJcHQ,2324
|
|
5
|
+
ScriptCollection/ProgramRunnerEpew.py,sha256=C2Rs3YWOWWWJct7XmKphp5CF1tf0j4Fp-ljV2drLTfs,6349
|
|
6
|
+
ScriptCollection/ProgramRunnerPopen.py,sha256=G3LgQUVCfaq7XjBsGzalElH31Hbr0etttGR2_H87YzA,3512
|
|
7
|
+
ScriptCollection/RPStream.py,sha256=NRRHL3YSP3D9MuAV2jB_--0KUKCsvJGxeKnxgrRZ9kY,1545
|
|
8
|
+
ScriptCollection/ScriptCollectionCore.py,sha256=YjhExLE-EnEarXD72pToTIF-6UYMid3wXs31-M8_rSA,100035
|
|
9
|
+
ScriptCollection/TasksForCommonProjectStructure.py,sha256=_JYOAmB0W5S_KKvyI3a0CbjWImt7lQHUraPQ0dEFUsk,184076
|
|
10
|
+
ScriptCollection/UpdateCertificates.py,sha256=Eynbgu7k9jLxApP2D_8Il77B6BFjJap6K7oTeEAZYbk,7790
|
|
11
|
+
ScriptCollection/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
|
+
ScriptCollection-3.5.15.dist-info/METADATA,sha256=kOCRDbe9JAms1tzXKdJm8ZbYQOnGi4XJwnBXYH8uScs,7680
|
|
13
|
+
ScriptCollection-3.5.15.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
|
14
|
+
ScriptCollection-3.5.15.dist-info/entry_points.txt,sha256=yASwR6hWZ_b5d4W49YeX1htD8ngfWbwgjpfQiJdtUAU,2322
|
|
15
|
+
ScriptCollection-3.5.15.dist-info/top_level.txt,sha256=hY2hOVH0V0Ce51WB76zKkIWTUNwMUdHo4XDkR2vYVwg,17
|
|
16
|
+
ScriptCollection-3.5.15.dist-info/RECORD,,
|
|
@@ -11,6 +11,7 @@ sccreateisofilewithobfuscatedfiles = ScriptCollection.Executables:CreateISOFileW
|
|
|
11
11
|
sccreatesimplemergewithoutrelease = ScriptCollection.Executables:CreateSimpleMergeWithoutRelease
|
|
12
12
|
scextractpdfpages = ScriptCollection.Executables:ExtractPDFPages
|
|
13
13
|
scfilenameobfuscator = ScriptCollection.Executables:FilenameObfuscator
|
|
14
|
+
scgeneratearc42referencetemplate = ScriptCollection.Executables:GenerateARC42ReferenceTemplate
|
|
14
15
|
scgeneratecertificate = ScriptCollection.Executables:GenerateCertificate
|
|
15
16
|
scgeneratecertificateauthority = ScriptCollection.Executables:GenerateCertificateAuthority
|
|
16
17
|
scgeneratecertificatesignrequest = ScriptCollection.Executables:GenerateCertificateSignRequest
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
ScriptCollection/Executables.py,sha256=msAlVLjgrxCxW-5Uu7Gk8NI1CNQLK2i6TjNR1fppaCY,19535
|
|
2
|
-
ScriptCollection/GeneralUtilities.py,sha256=1Q5ML9RwwS5e1mZPuJps3Z3ceVCGF5taFhh0GEG6MlY,35167
|
|
3
|
-
ScriptCollection/ProcessesRunner.py,sha256=3mu4ZxzZleQo0Op6o9EYTCFiJfb6kx5ov2YfZfT89mU,1395
|
|
4
|
-
ScriptCollection/ProgramRunnerBase.py,sha256=7QAjoqOz6XPmJH19F2k-Z1fFQB_uZnPFvn-T54IJcHQ,2324
|
|
5
|
-
ScriptCollection/ProgramRunnerEpew.py,sha256=C2Rs3YWOWWWJct7XmKphp5CF1tf0j4Fp-ljV2drLTfs,6349
|
|
6
|
-
ScriptCollection/ProgramRunnerPopen.py,sha256=ECx35Yz-MKPeoRa_42Bsq8qmbHd13I40vXUJSpZPrI4,3475
|
|
7
|
-
ScriptCollection/RPStream.py,sha256=NRRHL3YSP3D9MuAV2jB_--0KUKCsvJGxeKnxgrRZ9kY,1545
|
|
8
|
-
ScriptCollection/ScriptCollectionCore.py,sha256=cWn10QGcBDP4C6u5TtzGYn6OiYV5OpY8BRkeNGQo00w,97421
|
|
9
|
-
ScriptCollection/TasksForCommonProjectStructure.py,sha256=-QxOB01yFHrcPpTNnDA0GY0rv9McvQ95DZ3PNHRCrFE,184090
|
|
10
|
-
ScriptCollection/UpdateCertificates.py,sha256=Eynbgu7k9jLxApP2D_8Il77B6BFjJap6K7oTeEAZYbk,7790
|
|
11
|
-
ScriptCollection/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
|
-
ScriptCollection-3.5.13.dist-info/METADATA,sha256=LxH_9vYMJRCIrEukh8lc02T6npy78tCjQTrpaOtYB1A,7680
|
|
13
|
-
ScriptCollection-3.5.13.dist-info/WHEEL,sha256=ixB2d4u7mugx_bCBycvM9OzZ5yD7NmPXFRtKlORZS2Y,91
|
|
14
|
-
ScriptCollection-3.5.13.dist-info/entry_points.txt,sha256=Jz1pyS3Q6lqpuOWVHogt00jGwrpcHYtPrHGY0_Ltjbo,2227
|
|
15
|
-
ScriptCollection-3.5.13.dist-info/top_level.txt,sha256=hY2hOVH0V0Ce51WB76zKkIWTUNwMUdHo4XDkR2vYVwg,17
|
|
16
|
-
ScriptCollection-3.5.13.dist-info/RECORD,,
|
|
File without changes
|