ScriptCollection 3.5.21__py3-none-any.whl → 3.5.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,1870 +1,1870 @@
1
- import sys
2
- from datetime import timedelta, datetime
3
- import json
4
- import binascii
5
- import filecmp
6
- import hashlib
7
- import time
8
- from io import BytesIO
9
- import itertools
10
- import math
11
- import os
12
- from queue import Queue, Empty
13
- from concurrent.futures import ThreadPoolExecutor
14
- from pathlib import Path
15
- from subprocess import Popen
16
- import re
17
- import shutil
18
- import uuid
19
- import tempfile
20
- import io
21
- import requests
22
- import ntplib
23
- import yaml
24
- import qrcode
25
- import pycdlib
26
- import send2trash
27
- import PyPDF2
28
- from .GeneralUtilities import GeneralUtilities
29
- from .ProgramRunnerBase import ProgramRunnerBase
30
- from .ProgramRunnerPopen import ProgramRunnerPopen
31
- from .ProgramRunnerEpew import ProgramRunnerEpew, CustomEpewArgument
32
-
33
- version = "3.5.21"
34
- __version__ = version
35
-
36
-
37
- class ScriptCollectionCore:
38
-
39
- # The purpose of this property is to use it when testing your code which uses scriptcollection for external program-calls.
40
- # Do not change this value for productive environments.
41
- mock_program_calls: bool = False
42
- # The purpose of this property is to use it when testing your code which uses scriptcollection for external program-calls.
43
- execute_program_really_if_no_mock_call_is_defined: bool = False
44
- __mocked_program_calls: list = None
45
- program_runner: ProgramRunnerBase = None
46
-
47
- def __init__(self):
48
- self.program_runner = ProgramRunnerPopen()
49
- self.__mocked_program_calls = list[ScriptCollectionCore.__MockProgramCall]()
50
-
51
- @staticmethod
52
- @GeneralUtilities.check_arguments
53
- def get_scriptcollection_version() -> str:
54
- return __version__
55
-
56
- @GeneralUtilities.check_arguments
57
- def python_file_has_errors(self, file: str, working_directory: str, treat_warnings_as_errors: bool = True) -> tuple[bool, list[str]]:
58
- errors = list()
59
- filename = os.path.relpath(file, working_directory)
60
- if treat_warnings_as_errors:
61
- errorsonly_argument = ""
62
- else:
63
- errorsonly_argument = " --errors-only"
64
- (exit_code, stdout, stderr, _) = self.run_program("pylint", filename + errorsonly_argument, working_directory, throw_exception_if_exitcode_is_not_zero=False)
65
- if (exit_code != 0):
66
- errors.append(f"Linting-issues of {file}:")
67
- errors.append(f"Pylint-exitcode: {exit_code}")
68
- for line in GeneralUtilities.string_to_lines(stdout):
69
- errors.append(line)
70
- for line in GeneralUtilities.string_to_lines(stderr):
71
- errors.append(line)
72
- return (True, errors)
73
-
74
- return (False, errors)
75
-
76
- @GeneralUtilities.check_arguments
77
- def replace_version_in_dockerfile_file(self, dockerfile: str, new_version_value: str) -> None:
78
- GeneralUtilities.write_text_to_file(dockerfile, re.sub("ARG Version=\"\\d+\\.\\d+\\.\\d+\"", f"ARG Version=\"{new_version_value}\"", GeneralUtilities.read_text_from_file(dockerfile)))
79
-
80
- @GeneralUtilities.check_arguments
81
- def replace_version_in_python_file(self, file: str, new_version_value: str):
82
- GeneralUtilities.write_text_to_file(file, re.sub("version = \"\\d+\\.\\d+\\.\\d+\"", f"version = \"{new_version_value}\"", GeneralUtilities.read_text_from_file(file)))
83
-
84
- @GeneralUtilities.check_arguments
85
- def replace_version_in_ini_file(self, file: str, new_version_value: str):
86
- GeneralUtilities.write_text_to_file(file, re.sub("version = \\d+\\.\\d+\\.\\d+", f"version = {new_version_value}", GeneralUtilities.read_text_from_file(file)))
87
-
88
- @GeneralUtilities.check_arguments
89
- def replace_version_in_nuspec_file(self, nuspec_file: str, new_version: str) -> None:
90
- # TODO use XSLT instead
91
- versionregex = "\\d+\\.\\d+\\.\\d+"
92
- versiononlyregex = f"^{versionregex}$"
93
- pattern = re.compile(versiononlyregex)
94
- if pattern.match(new_version):
95
- GeneralUtilities.write_text_to_file(nuspec_file, re.sub(f"<version>{versionregex}<\\/version>", f"<version>{new_version}</version>", GeneralUtilities.read_text_from_file(nuspec_file)))
96
- else:
97
- raise ValueError(f"Version '{new_version}' does not match version-regex '{versiononlyregex}'")
98
-
99
- @GeneralUtilities.check_arguments
100
- def replace_version_in_csproj_file(self, csproj_file: str, current_version: str):
101
- versionregex = "\\d+\\.\\d+\\.\\d+"
102
- versiononlyregex = f"^{versionregex}$"
103
- pattern = re.compile(versiononlyregex)
104
- if pattern.match(current_version):
105
- for tag in ["Version", "AssemblyVersion", "FileVersion"]:
106
- GeneralUtilities.write_text_to_file(csproj_file, re.sub(f"<{tag}>{versionregex}(.\\d+)?<\\/{tag}>", f"<{tag}>{current_version}</{tag}>", GeneralUtilities.read_text_from_file(csproj_file)))
107
- else:
108
- raise ValueError(f"Version '{current_version}' does not match version-regex '{versiononlyregex}'")
109
-
110
- @GeneralUtilities.check_arguments
111
- def push_nuget_build_artifact(self, nupkg_file: str, registry_address: str, api_key: str, verbosity: int = 1):
112
- nupkg_file_name = os.path.basename(nupkg_file)
113
- nupkg_file_folder = os.path.dirname(nupkg_file)
114
- self.run_program("dotnet", f"nuget push {nupkg_file_name} --force-english-output --source {registry_address} --api-key {api_key}", nupkg_file_folder, verbosity)
115
-
116
- @GeneralUtilities.check_arguments
117
- def dotnet_build(self, repository_folder: str, projectname: str, configuration: str):
118
- self.run_program("dotnet", f"clean -c {configuration}", repository_folder)
119
- self.run_program("dotnet", f"build {projectname}/{projectname}.csproj -c {configuration}", repository_folder)
120
-
121
- @GeneralUtilities.check_arguments
122
- def find_file_by_extension(self, folder: str, extension: str):
123
- result = [file for file in GeneralUtilities.get_direct_files_of_folder(folder) if file.endswith(f".{extension}")]
124
- result_length = len(result)
125
- if result_length == 0:
126
- raise FileNotFoundError(f"No file available in folder '{folder}' with extension '{extension}'.")
127
- if result_length == 1:
128
- return result[0]
129
- else:
130
- raise ValueError(f"Multiple values available in folder '{folder}' with extension '{extension}'.")
131
-
132
- @GeneralUtilities.check_arguments
133
- def commit_is_signed_by_key(self, repository_folder: str, revision_identifier: str, key: str) -> bool:
134
- result = self.run_program(
135
- "git", f"verify-commit {revision_identifier}", repository_folder, throw_exception_if_exitcode_is_not_zero=False)
136
- if (result[0] != 0):
137
- return False
138
- if (not GeneralUtilities.contains_line(result[1].splitlines(), f"gpg\\:\\ using\\ [A-Za-z0-9]+\\ key\\ [A-Za-z0-9]+{key}")):
139
- # TODO check whether this works on machines where gpg is installed in another langauge than english
140
- return False
141
- if (not GeneralUtilities.contains_line(result[1].splitlines(), "gpg\\:\\ Good\\ signature\\ from")):
142
- # TODO check whether this works on machines where gpg is installed in another langauge than english
143
- return False
144
- return True
145
-
146
- @GeneralUtilities.check_arguments
147
- def get_parent_commit_ids_of_commit(self, repository_folder: str, commit_id: str) -> str:
148
- return self.run_program("git", f'log --pretty=%P -n 1 "{commit_id}"', repository_folder, throw_exception_if_exitcode_is_not_zero=True)[1].replace("\r", "").replace("\n", "").split(" ")
149
-
150
- @GeneralUtilities.check_arguments
151
- def get_all_authors_and_committers_of_repository(self, repository_folder: str, subfolder: str = None, verbosity: int = 1) -> list[tuple[str, str]]:
152
- space_character = "_"
153
- if subfolder is None:
154
- subfolder_argument = ""
155
- else:
156
- subfolder_argument = f" -- {subfolder}"
157
- log_result = self.run_program("git", f'log --pretty=%aN{space_character}%aE%n%cN{space_character}%cE HEAD{subfolder_argument}', repository_folder, verbosity=0)
158
- plain_content: list[str] = list(
159
- set([line for line in log_result[1].split("\n") if len(line) > 0]))
160
- result: list[tuple[str, str]] = []
161
- for item in plain_content:
162
- if len(re.findall(space_character, item)) == 1:
163
- splitted = item.split(space_character)
164
- result.append((splitted[0], splitted[1]))
165
- else:
166
- raise ValueError(f'Unexpected author: "{item}"')
167
- return result
168
-
169
- @GeneralUtilities.check_arguments
170
- def get_commit_ids_between_dates(self, repository_folder: str, since: datetime, until: datetime, ignore_commits_which_are_not_in_history_of_head: bool = True) -> None:
171
- since_as_string = self.__datetime_to_string_for_git(since)
172
- until_as_string = self.__datetime_to_string_for_git(until)
173
- result = filter(lambda line: not GeneralUtilities.string_is_none_or_whitespace(line), self.run_program("git", f'log --since "{since_as_string}" --until "{until_as_string}" --pretty=format:"%H" --no-patch', repository_folder, throw_exception_if_exitcode_is_not_zero=True)[1].split("\n").replace("\r", ""))
174
- if ignore_commits_which_are_not_in_history_of_head:
175
- result = [commit_id for commit_id in result if self.git_commit_is_ancestor(
176
- repository_folder, commit_id)]
177
- return result
178
-
179
- @GeneralUtilities.check_arguments
180
- def __datetime_to_string_for_git(self, datetime_object: datetime) -> str:
181
- return datetime_object.strftime('%Y-%m-%d %H:%M:%S')
182
-
183
- @GeneralUtilities.check_arguments
184
- def git_commit_is_ancestor(self, repository_folder: str, ancestor: str, descendant: str = "HEAD") -> bool:
185
- exit_code = self.run_program_argsasarray("git", ["merge-base", "--is-ancestor", ancestor, descendant], repository_folder, throw_exception_if_exitcode_is_not_zero=False)[0]
186
- if exit_code == 0:
187
- return True
188
- elif exit_code == 1:
189
- return False
190
- else:
191
- raise ValueError(f"Can not calculate if {ancestor} is an ancestor of {descendant} in repository {repository_folder}.")
192
-
193
- @GeneralUtilities.check_arguments
194
- def __git_changes_helper(self, repository_folder: str, arguments_as_array: list[str]) -> bool:
195
- lines = GeneralUtilities.string_to_lines(self.run_program_argsasarray("git", arguments_as_array, repository_folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)[1], False)
196
- for line in lines:
197
- if GeneralUtilities.string_has_content(line):
198
- return True
199
- return False
200
-
201
- @GeneralUtilities.check_arguments
202
- def git_repository_has_new_untracked_files(self, repositoryFolder: str):
203
- return self.__git_changes_helper(repositoryFolder, ["ls-files", "--exclude-standard", "--others"])
204
-
205
- @GeneralUtilities.check_arguments
206
- def git_repository_has_unstaged_changes_of_tracked_files(self, repositoryFolder: str):
207
- return self.__git_changes_helper(repositoryFolder, ["diff"])
208
-
209
- @GeneralUtilities.check_arguments
210
- def git_repository_has_staged_changes(self, repositoryFolder: str):
211
- return self.__git_changes_helper(repositoryFolder, ["diff", "--cached"])
212
-
213
- @GeneralUtilities.check_arguments
214
- def git_repository_has_uncommitted_changes(self, repositoryFolder: str) -> bool:
215
- if (self.git_repository_has_unstaged_changes(repositoryFolder)):
216
- return True
217
- if (self.git_repository_has_staged_changes(repositoryFolder)):
218
- return True
219
- return False
220
-
221
- @GeneralUtilities.check_arguments
222
- def git_repository_has_unstaged_changes(self, repository_folder: str) -> bool:
223
- if (self.git_repository_has_unstaged_changes_of_tracked_files(repository_folder)):
224
- return True
225
- if (self.git_repository_has_new_untracked_files(repository_folder)):
226
- return True
227
- return False
228
-
229
- @GeneralUtilities.check_arguments
230
- def git_get_commit_id(self, repository_folder: str, commit: str = "HEAD") -> str:
231
- result: tuple[int, str, str, int] = self.run_program_argsasarray("git", ["rev-parse", "--verify", commit], repository_folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
232
- return result[1].replace('\n', '')
233
-
234
- @GeneralUtilities.check_arguments
235
- def git_get_commit_date(self, repository_folder: str, commit: str = "HEAD") -> datetime:
236
- result: tuple[int, str, str, int] = self.run_program_argsasarray("git", ["show", "-s", "--format=%ci", commit], repository_folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
237
- date_as_string = result[1].replace('\n', '')
238
- result = datetime.strptime(date_as_string, '%Y-%m-%d %H:%M:%S %z')
239
- return result
240
-
241
- @GeneralUtilities.check_arguments
242
- def git_fetch(self, folder: str, remotename: str = "--all") -> None:
243
- self.run_program_argsasarray("git", ["fetch", remotename, "--tags", "--prune"], folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
244
-
245
- @GeneralUtilities.check_arguments
246
- def git_fetch_in_bare_repository(self, folder: str, remotename, localbranch: str, remotebranch: str) -> None:
247
- self.run_program_argsasarray("git", ["fetch", remotename, f"{remotebranch}:{localbranch}"], folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
248
-
249
- @GeneralUtilities.check_arguments
250
- def git_remove_branch(self, folder: str, branchname: str) -> None:
251
- self.run_program("git", f"branch -D {branchname}", folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
252
-
253
- @GeneralUtilities.check_arguments
254
- def git_push(self, folder: str, remotename: str, localbranchname: str, remotebranchname: str, forcepush: bool = False, pushalltags: bool = True, verbosity: int = 0) -> None:
255
- argument = ["push", "--recurse-submodules=on-demand", remotename, f"{localbranchname}:{remotebranchname}"]
256
- if (forcepush):
257
- argument.append("--force")
258
- if (pushalltags):
259
- argument.append("--tags")
260
- result: tuple[int, str, str, int] = self.run_program_argsasarray("git", argument, folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=verbosity, print_errors_as_information=True)
261
- return result[1].replace('\r', '').replace('\n', '')
262
-
263
- @GeneralUtilities.check_arguments
264
- def git_pull(self, folder: str, remote: str, localbranchname: str, remotebranchname: str, force: bool = False) -> None:
265
- argument = f"pull {remote} {remotebranchname}:{localbranchname}"
266
- if force:
267
- argument = f"{argument} --force"
268
- self.run_program("git", argument, folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
269
-
270
- @GeneralUtilities.check_arguments
271
- def git_list_remote_branches(self, folder: str, remote: str, fetch: bool) -> list[str]:
272
- if fetch:
273
- self.git_fetch(folder, remote)
274
- run_program_result = self.run_program("git", f"branch -rl {remote}/*", folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
275
- output = GeneralUtilities.string_to_lines(run_program_result[1])
276
- result = list[str]()
277
- for item in output:
278
- striped_item = item.strip()
279
- if GeneralUtilities.string_has_content(striped_item):
280
- branch: str = None
281
- if " " in striped_item:
282
- branch = striped_item.split(" ")[0]
283
- else:
284
- branch = striped_item
285
- branchname = branch[len(remote)+1:]
286
- if branchname != "HEAD":
287
- result.append(branchname)
288
- return result
289
-
290
- @GeneralUtilities.check_arguments
291
- def git_clone(self, clone_target_folder: str, remote_repository_path: str, include_submodules: bool = True, mirror: bool = False) -> None:
292
- if (os.path.isdir(clone_target_folder)):
293
- pass # TODO throw error
294
- else:
295
- args = ["clone", remote_repository_path, clone_target_folder]
296
- if include_submodules:
297
- args.append("--recurse-submodules")
298
- args.append("--remote-submodules")
299
- if mirror:
300
- args.append("--mirror")
301
- self.run_program_argsasarray("git", args, os.getcwd(), throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
302
-
303
- @GeneralUtilities.check_arguments
304
- def git_get_all_remote_names(self, directory: str) -> list[str]:
305
- result = GeneralUtilities.string_to_lines(self.run_program_argsasarray("git", ["remote"], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)[1], False)
306
- return result
307
-
308
- @GeneralUtilities.check_arguments
309
- def git_get_remote_url(self, directory: str, remote_name: str) -> str:
310
- result = GeneralUtilities.string_to_lines(self.run_program_argsasarray("git", ["remote", "get-url", remote_name], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)[1], False)
311
- return result[0].replace('\n', '')
312
-
313
- @GeneralUtilities.check_arguments
314
- def repository_has_remote_with_specific_name(self, directory: str, remote_name: str) -> bool:
315
- return remote_name in self.git_get_all_remote_names(directory)
316
-
317
- @GeneralUtilities.check_arguments
318
- def git_add_or_set_remote_address(self, directory: str, remote_name: str, remote_address: str) -> None:
319
- if (self.repository_has_remote_with_specific_name(directory, remote_name)):
320
- self.run_program_argsasarray("git", ['remote', 'set-url', 'remote_name', remote_address], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
321
- else:
322
- self.run_program_argsasarray("git", ['remote', 'add', remote_name, remote_address], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
323
-
324
- @GeneralUtilities.check_arguments
325
- def git_stage_all_changes(self, directory: str) -> None:
326
- self.run_program_argsasarray("git", ["add", "-A"], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
327
-
328
- @GeneralUtilities.check_arguments
329
- def git_unstage_all_changes(self, directory: str) -> None:
330
- self.run_program_argsasarray("git", ["reset"], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
331
-
332
- @GeneralUtilities.check_arguments
333
- def git_stage_file(self, directory: str, file: str) -> None:
334
- self.run_program_argsasarray("git", ['stage', file], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
335
-
336
- @GeneralUtilities.check_arguments
337
- def git_unstage_file(self, directory: str, file: str) -> None:
338
- self.run_program_argsasarray("git", ['reset', file], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
339
-
340
- @GeneralUtilities.check_arguments
341
- def git_discard_unstaged_changes_of_file(self, directory: str, file: str) -> None:
342
- """Caution: This method works really only for 'changed' files yet. So this method does not work properly for new or renamed files."""
343
- self.run_program_argsasarray("git", ['checkout', file], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
344
-
345
- @GeneralUtilities.check_arguments
346
- def git_discard_all_unstaged_changes(self, directory: str) -> None:
347
- """Caution: This function executes 'git clean -df'. This can delete files which maybe should not be deleted. Be aware of that."""
348
- self.run_program_argsasarray("git", ['clean', '-df'], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
349
- self.run_program_argsasarray("git", ['checkout', '.'], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
350
-
351
- @GeneralUtilities.check_arguments
352
- def git_commit(self, directory: str, message: str, author_name: str = None, author_email: str = None, stage_all_changes: bool = True, no_changes_behavior: int = 0) -> str:
353
- # no_changes_behavior=0 => No commit
354
- # no_changes_behavior=1 => Commit anyway
355
- # no_changes_behavior=2 => Exception
356
- author_name = GeneralUtilities.str_none_safe(author_name).strip()
357
- author_email = GeneralUtilities.str_none_safe(author_email).strip()
358
- argument = ['commit', '--quiet', '--allow-empty', '--message', message]
359
- if (GeneralUtilities.string_has_content(author_name)):
360
- argument.append(f'--author="{author_name} <{author_email}>"')
361
- git_repository_has_uncommitted_changes = self.git_repository_has_uncommitted_changes(directory)
362
-
363
- if git_repository_has_uncommitted_changes:
364
- do_commit = True
365
- if stage_all_changes:
366
- self.git_stage_all_changes(directory)
367
- else:
368
- if no_changes_behavior == 0:
369
- GeneralUtilities.write_message_to_stdout(f"Commit '{message}' will not be done because there are no changes to commit in repository '{directory}'")
370
- do_commit = False
371
- elif no_changes_behavior == 1:
372
- GeneralUtilities.write_message_to_stdout(f"There are no changes to commit in repository '{directory}'. Commit '{message}' will be done anyway.")
373
- do_commit = True
374
- elif no_changes_behavior == 2:
375
- raise RuntimeError(f"There are no changes to commit in repository '{directory}'. Commit '{message}' will not be done.")
376
- else:
377
- raise ValueError(f"Unknown value for no_changes_behavior: {GeneralUtilities.str_none_safe(no_changes_behavior)}")
378
-
379
- if do_commit:
380
- GeneralUtilities.write_message_to_stdout(f"Commit changes in '{directory}'")
381
- self.run_program_argsasarray("git", argument, directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
382
-
383
- return self.git_get_commit_id(directory)
384
-
385
- @GeneralUtilities.check_arguments
386
- def git_create_tag(self, directory: str, target_for_tag: str, tag: str, sign: bool = False, message: str = None) -> None:
387
- argument = ["tag", tag, target_for_tag]
388
- if sign:
389
- if message is None:
390
- message = f"Created {target_for_tag}"
391
- argument.extend(["-s", '-m', message])
392
- self.run_program_argsasarray(
393
- "git", argument, directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
394
-
395
- @GeneralUtilities.check_arguments
396
- def git_delete_tag(self, directory: str, tag: str) -> None:
397
- self.run_program_argsasarray("git", ["tag", "--delete", tag], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
398
-
399
- @GeneralUtilities.check_arguments
400
- def git_checkout(self, directory: str, branch: str) -> None:
401
- self.run_program_argsasarray("git", ["checkout", branch], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
402
- self.run_program_argsasarray("git", ["submodule", "update", "--recursive"], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
403
-
404
- @GeneralUtilities.check_arguments
405
- def git_merge_abort(self, directory: str) -> None:
406
- self.run_program_argsasarray("git", ["merge", "--abort"], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
407
-
408
- @GeneralUtilities.check_arguments
409
- def git_merge(self, directory: str, sourcebranch: str, targetbranch: str, fastforward: bool = True, commit: bool = True, commit_message: str = None) -> str:
410
- self.git_checkout(directory, targetbranch)
411
- args = ["merge"]
412
- if not commit:
413
- args.append("--no-commit")
414
- if not fastforward:
415
- args.append("--no-ff")
416
- if commit_message is not None:
417
- args.append("-m")
418
- args.append(commit_message)
419
- args.append(sourcebranch)
420
- self.run_program_argsasarray("git", args, directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
421
- self.run_program_argsasarray("git", ["submodule", "update"], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
422
- return self.git_get_commit_id(directory)
423
-
424
- @GeneralUtilities.check_arguments
425
- def git_undo_all_changes(self, directory: str) -> None:
426
- """Caution: This function executes 'git clean -df'. This can delete files which maybe should not be deleted. Be aware of that."""
427
- self.git_unstage_all_changes(directory)
428
- self.git_discard_all_unstaged_changes(directory)
429
-
430
- @GeneralUtilities.check_arguments
431
- def git_fetch_or_clone_all_in_directory(self, source_directory: str, target_directory: str) -> None:
432
- for subfolder in GeneralUtilities.get_direct_folders_of_folder(source_directory):
433
- foldername = os.path.basename(subfolder)
434
- if self.is_git_repository(subfolder):
435
- source_repository = subfolder
436
- target_repository = os.path.join(target_directory, foldername)
437
- if os.path.isdir(target_directory):
438
- # fetch
439
- self.git_fetch(target_directory)
440
- else:
441
- # clone
442
- self.git_clone(target_repository, source_repository, include_submodules=True, mirror=True)
443
-
444
- def get_git_submodules(self, folder: str) -> list[str]:
445
- e = self.run_program("git", "submodule status", folder)
446
- result = []
447
- for submodule_line in GeneralUtilities.string_to_lines(e[1], False, True):
448
- result.append(submodule_line.split(' ')[1])
449
- return result
450
-
451
- @GeneralUtilities.check_arguments
452
- def is_git_repository(self, folder: str) -> bool:
453
- combined = os.path.join(folder, ".git")
454
- # TODO consider check for bare-repositories
455
- return os.path.isdir(combined) or os.path.isfile(combined)
456
-
457
- @GeneralUtilities.check_arguments
458
- def file_is_git_ignored(self, file_in_repository: str, repositorybasefolder: str) -> None:
459
- exit_code = self.run_program_argsasarray("git", ['check-ignore', file_in_repository], repositorybasefolder, throw_exception_if_exitcode_is_not_zero=False, verbosity=0)[0]
460
- if (exit_code == 0):
461
- return True
462
- if (exit_code == 1):
463
- return False
464
- raise ValueError(f"Unable to calculate whether '{file_in_repository}' in repository '{repositorybasefolder}' is ignored due to git-exitcode {exit_code}.")
465
-
466
- @GeneralUtilities.check_arguments
467
- def git_discard_all_changes(self, repository: str) -> None:
468
- self.run_program_argsasarray("git", ["reset", "HEAD", "."], repository, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
469
- self.run_program_argsasarray("git", ["checkout", "."], repository, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
470
-
471
- @GeneralUtilities.check_arguments
472
- def git_get_current_branch_name(self, repository: str) -> str:
473
- result = self.run_program_argsasarray("git", ["rev-parse", "--abbrev-ref", "HEAD"], repository, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
474
- return result[1].replace("\r", "").replace("\n", "")
475
-
476
- @GeneralUtilities.check_arguments
477
- def git_get_commitid_of_tag(self, repository: str, tag: str) -> str:
478
- stdout = self.run_program_argsasarray("git", ["rev-list", "-n", "1", tag], repository, verbosity=0)
479
- result = stdout[1].replace("\r", "").replace("\n", "")
480
- return result
481
-
482
- @GeneralUtilities.check_arguments
483
- def git_get_tags(self, repository: str) -> list[str]:
484
- tags = [line.replace("\r", "") for line in self.run_program_argsasarray(
485
- "git", ["tag"], repository)[1].split("\n") if len(line) > 0]
486
- return tags
487
-
488
- @GeneralUtilities.check_arguments
489
- def git_move_tags_to_another_branch(self, repository: str, tag_source_branch: str, tag_target_branch: str, sign: bool = False, message: str = None) -> None:
490
- tags = self.git_get_tags(repository)
491
- tags_count = len(tags)
492
- counter = 0
493
- for tag in tags:
494
- counter = counter+1
495
- GeneralUtilities.write_message_to_stdout(f"Process tag {counter}/{tags_count}.")
496
- # tag is on source-branch
497
- if self.git_commit_is_ancestor(repository, tag, tag_source_branch):
498
- commit_id_old = self.git_get_commitid_of_tag(repository, tag)
499
- commit_date: datetime = self.git_get_commit_date(repository, commit_id_old)
500
- date_as_string = self.__datetime_to_string_for_git(commit_date)
501
- search_commit_result = self.run_program_argsasarray("git", ["log", f'--after="{date_as_string}"', f'--before="{date_as_string}"', "--pretty=format:%H", tag_target_branch], repository, throw_exception_if_exitcode_is_not_zero=False)
502
- if search_commit_result[0] != 0 or not GeneralUtilities.string_has_nonwhitespace_content(search_commit_result[1]):
503
- raise ValueError(f"Can not calculate corresponding commit for tag '{tag}'.")
504
- commit_id_new = search_commit_result[1]
505
- self.git_delete_tag(repository, tag)
506
- self.git_create_tag(repository, commit_id_new, tag, sign, message)
507
-
508
- @GeneralUtilities.check_arguments
509
- def get_current_git_branch_has_tag(self, repository_folder: str) -> bool:
510
- result = self.run_program_argsasarray("git", ["describe", "--tags", "--abbrev=0"], repository_folder, verbosity=0, throw_exception_if_exitcode_is_not_zero=False)
511
- return result[0] == 0
512
-
513
- @GeneralUtilities.check_arguments
514
- def get_latest_git_tag(self, repository_folder: str) -> str:
515
- result = self.run_program_argsasarray(
516
- "git", ["describe", "--tags", "--abbrev=0"], repository_folder, verbosity=0)
517
- result = result[1].replace("\r", "").replace("\n", "")
518
- return result
519
-
520
- @GeneralUtilities.check_arguments
521
- def get_staged_or_committed_git_ignored_files(self, repository_folder: str) -> list[str]:
522
- tresult = self.run_program_argsasarray("git", ["ls-files", "-i", "-c", "--exclude-standard"], repository_folder, verbosity=0)
523
- tresult = tresult[1].replace("\r", "")
524
- result = [line for line in tresult.split("\n") if len(line) > 0]
525
- return result
526
-
527
- @GeneralUtilities.check_arguments
528
- def git_repository_has_commits(self, repository_folder: str) -> bool:
529
- return self.run_program_argsasarray("git", ["rev-parse", "--verify", "HEAD"], repository_folder, throw_exception_if_exitcode_is_not_zero=False)[0] == 0
530
-
531
- @GeneralUtilities.check_arguments
532
- def export_filemetadata(self, folder: str, target_file: str, encoding: str = "utf-8", filter_function=None) -> None:
533
- folder = GeneralUtilities.resolve_relative_path_from_current_working_directory(folder)
534
- lines = list()
535
- path_prefix = len(folder)+1
536
- items = dict()
537
- for item in GeneralUtilities.get_all_folders_of_folder(folder):
538
- items[item] = "d"
539
- for item in GeneralUtilities.get_all_files_of_folder(folder):
540
- items[item] = "f"
541
- for file_or_folder, item_type in items.items():
542
- truncated_file = file_or_folder[path_prefix:]
543
- if (filter_function is None or filter_function(folder, truncated_file)):
544
- owner_and_permisssion = self.get_file_owner_and_file_permission(file_or_folder)
545
- user = owner_and_permisssion[0]
546
- permissions = owner_and_permisssion[1]
547
- lines.append(f"{truncated_file};{item_type};{user};{permissions}")
548
- lines = sorted(lines, key=str.casefold)
549
- with open(target_file, "w", encoding=encoding) as file_object:
550
- file_object.write("\n".join(lines))
551
-
552
- @GeneralUtilities.check_arguments
553
- def escape_git_repositories_in_folder(self, folder: str) -> dict[str, str]:
554
- return self.__escape_git_repositories_in_folder_internal(folder, dict[str, str]())
555
-
556
- @GeneralUtilities.check_arguments
557
- def __escape_git_repositories_in_folder_internal(self, folder: str, renamed_items: dict[str, str]) -> dict[str, str]:
558
- for file in GeneralUtilities.get_direct_files_of_folder(folder):
559
- filename = os.path.basename(file)
560
- if ".git" in filename:
561
- new_name = filename.replace(".git", ".gitx")
562
- target = os.path.join(folder, new_name)
563
- os.rename(file, target)
564
- renamed_items[target] = file
565
- for subfolder in GeneralUtilities.get_direct_folders_of_folder(folder):
566
- foldername = os.path.basename(subfolder)
567
- if ".git" in foldername:
568
- new_name = foldername.replace(".git", ".gitx")
569
- subfolder2 = os.path.join(str(Path(subfolder).parent), new_name)
570
- os.rename(subfolder, subfolder2)
571
- renamed_items[subfolder2] = subfolder
572
- else:
573
- subfolder2 = subfolder
574
- self.__escape_git_repositories_in_folder_internal(subfolder2, renamed_items)
575
- return renamed_items
576
-
577
- @GeneralUtilities.check_arguments
578
- def deescape_git_repositories_in_folder(self, renamed_items: dict[str, str]):
579
- for renamed_item, original_name in renamed_items.items():
580
- os.rename(renamed_item, original_name)
581
-
582
- @GeneralUtilities.check_arguments
583
- def __sort_fmd(self, line: str):
584
- splitted: list = line.split(";")
585
- filetype: str = splitted[1]
586
- if filetype == "d":
587
- return -1
588
- if filetype == "f":
589
- return 1
590
- return 0
591
-
592
- @GeneralUtilities.check_arguments
593
- def restore_filemetadata(self, folder: str, source_file: str, strict=False, encoding: str = "utf-8", create_folder_is_not_exist: bool = True) -> None:
594
- lines = GeneralUtilities.read_lines_from_file(source_file, encoding)
595
- lines.sort(key=self.__sort_fmd)
596
- for line in lines:
597
- splitted: list = line.split(";")
598
- full_path_of_file_or_folder: str = os.path.join(folder, splitted[0])
599
- filetype: str = splitted[1]
600
- user: str = splitted[2]
601
- permissions: str = splitted[3]
602
- if filetype == "d" and create_folder_is_not_exist and not os.path.isdir(full_path_of_file_or_folder):
603
- GeneralUtilities.ensure_directory_exists(full_path_of_file_or_folder)
604
- if (filetype == "f" and os.path.isfile(full_path_of_file_or_folder)) or (filetype == "d" and os.path.isdir(full_path_of_file_or_folder)):
605
- self.set_owner(full_path_of_file_or_folder, user, os.name != 'nt')
606
- self.set_permission(full_path_of_file_or_folder, permissions)
607
- else:
608
- if strict:
609
- if filetype == "f":
610
- filetype_full = "File"
611
- elif filetype == "d":
612
- filetype_full = "Directory"
613
- else:
614
- raise ValueError(f"Unknown filetype: {GeneralUtilities.str_none_safe(filetype)}")
615
- raise ValueError(f"{filetype_full} '{full_path_of_file_or_folder}' does not exist")
616
-
617
- @GeneralUtilities.check_arguments
618
- def __calculate_lengh_in_seconds(self, filename: str, folder: str) -> float:
619
- argument = ['-v', 'error', '-show_entries', 'format=duration', '-of', 'default=noprint_wrappers=1:nokey=1', filename]
620
- result = self.run_program_argsasarray("ffprobe", argument, folder, throw_exception_if_exitcode_is_not_zero=True)
621
- return float(result[1].replace('\n', ''))
622
-
623
- @GeneralUtilities.check_arguments
624
- def __create_thumbnails(self, filename: str, fps: str, folder: str, tempname_for_thumbnails: str) -> list[str]:
625
- argument = ['-i', filename, '-r', str(fps), '-vf', 'scale=-1:120', '-vcodec', 'png', f'{tempname_for_thumbnails}-%002d.png']
626
- self.run_program_argsasarray("ffmpeg", argument, folder, throw_exception_if_exitcode_is_not_zero=True)
627
- files = GeneralUtilities.get_direct_files_of_folder(folder)
628
- result: list[str] = []
629
- regex = "^"+re.escape(tempname_for_thumbnails)+"\\-\\d+\\.png$"
630
- regex_for_files = re.compile(regex)
631
- for file in files:
632
- filename = os.path.basename(file)
633
- if regex_for_files.match(filename):
634
- result.append(file)
635
- GeneralUtilities.assert_condition(0 < len(result), "No thumbnail-files found.")
636
- return result
637
-
638
- @GeneralUtilities.check_arguments
639
- def __create_thumbnail(self, outputfilename: str, folder: str, length_in_seconds: float, tempname_for_thumbnails: str, amount_of_images: int) -> None:
640
- duration = timedelta(seconds=length_in_seconds)
641
- info = GeneralUtilities.timedelta_to_simple_string(duration)
642
- rows: int = 5
643
- columns: int = math.ceil(amount_of_images/rows)
644
- argument = ['-title', f'"{outputfilename} ({info})"', '-tile', f'{rows}x{columns}', f'{tempname_for_thumbnails}*.png', f'{outputfilename}.png']
645
- self.run_program_argsasarray("montage", argument, folder, throw_exception_if_exitcode_is_not_zero=True)
646
-
647
- @GeneralUtilities.check_arguments
648
- def __roundup(self, x: float, places: int) -> int:
649
- d = 10 ** places
650
- if x < 0:
651
- return math.floor(x * d) / d
652
- else:
653
- return math.ceil(x * d) / d
654
-
655
- @GeneralUtilities.check_arguments
656
- def generate_thumbnail(self, file: str, frames_per_second: str, tempname_for_thumbnails: str = None, hook=None) -> None:
657
- if tempname_for_thumbnails is None:
658
- tempname_for_thumbnails = "t_"+str(uuid.uuid4())
659
-
660
- file = GeneralUtilities.resolve_relative_path_from_current_working_directory(file)
661
- filename = os.path.basename(file)
662
- folder = os.path.dirname(file)
663
- filename_without_extension = Path(file).stem
664
- preview_files: list[str] = []
665
- try:
666
- length_in_seconds = self.__calculate_lengh_in_seconds(filename, folder)
667
- if (frames_per_second.endswith("fps")):
668
- # frames per second, example: frames_per_second="20fps" => 20 frames per second
669
- frames_per_second = self.__roundup(float(frames_per_second[:-3]), 2)
670
- frames_per_second_as_string = str(frames_per_second)
671
- amounf_of_previewframes = int(math.floor(length_in_seconds*frames_per_second))
672
- else:
673
- # concrete amount of frame, examples: frames_per_second="16" => 16 frames for entire video
674
- amounf_of_previewframes = int(float(frames_per_second))
675
- # self.roundup((amounf_of_previewframes-2)/length_in_seconds, 2)
676
- frames_per_second_as_string = f"{amounf_of_previewframes-2}/{length_in_seconds}"
677
- preview_files = self.__create_thumbnails(filename, frames_per_second_as_string, folder, tempname_for_thumbnails)
678
- if hook is not None:
679
- hook(file, preview_files)
680
- actual_amounf_of_previewframes = len(preview_files)
681
- self.__create_thumbnail(filename_without_extension, folder, length_in_seconds, tempname_for_thumbnails, actual_amounf_of_previewframes)
682
- finally:
683
- for thumbnail_to_delete in preview_files:
684
- os.remove(thumbnail_to_delete)
685
-
686
- @GeneralUtilities.check_arguments
687
- def extract_pdf_pages(self, file: str, from_page: int, to_page: int, outputfile: str) -> None:
688
- pdf_reader = PyPDF2.PdfReader(file)
689
- pdf_writer = PyPDF2.PdfWriter()
690
- start = from_page
691
- end = to_page
692
- while start <= end:
693
- pdf_writer.add_page(pdf_reader.pages[start-1])
694
- start += 1
695
- with open(outputfile, 'wb') as out:
696
- pdf_writer.write(out)
697
-
698
- @GeneralUtilities.check_arguments
699
- def merge_pdf_files(self, files: list[str], outputfile: str) -> None:
700
- # TODO add wildcard-option
701
- pdfFileMerger = PyPDF2.PdfFileMerger()
702
- for file in files:
703
- pdfFileMerger.append(file.strip())
704
- pdfFileMerger.write(outputfile)
705
- pdfFileMerger.close()
706
-
707
- @GeneralUtilities.check_arguments
708
- def pdf_to_image(self, file: str, outputfilename_without_extension: str) -> None:
709
- raise ValueError("Function currently not available")
710
- # PyMuPDF can be used for that but sometimes it throws
711
- # "ImportError: DLL load failed while importing _fitz: Das angegebene Modul wurde nicht gefunden."
712
-
713
- # doc = None # fitz.open(file)
714
- # for i, page in enumerate(doc):
715
- # pix = page.get_pixmap()
716
- # img = Image.frombytes("RGB", [pix.width, pix.height], pix.samples)
717
- # img.save(f"{outputfilename_without_extension}_{i}.png", "PNG")
718
-
719
- @GeneralUtilities.check_arguments
720
- def show_missing_files(self, folderA: str, folderB: str):
721
- for file in GeneralUtilities.get_missing_files(folderA, folderB):
722
- GeneralUtilities.write_message_to_stdout(file)
723
-
724
- @GeneralUtilities.check_arguments
725
- def SCCreateEmptyFileWithSpecificSize(self, name: str, size_string: str) -> int:
726
- if size_string.isdigit():
727
- size = int(size_string)
728
- else:
729
- if len(size_string) >= 3:
730
- if (size_string.endswith("kb")):
731
- size = int(size_string[:-2]) * pow(10, 3)
732
- elif (size_string.endswith("mb")):
733
- size = int(size_string[:-2]) * pow(10, 6)
734
- elif (size_string.endswith("gb")):
735
- size = int(size_string[:-2]) * pow(10, 9)
736
- elif (size_string.endswith("kib")):
737
- size = int(size_string[:-3]) * pow(2, 10)
738
- elif (size_string.endswith("mib")):
739
- size = int(size_string[:-3]) * pow(2, 20)
740
- elif (size_string.endswith("gib")):
741
- size = int(size_string[:-3]) * pow(2, 30)
742
- else:
743
- GeneralUtilities.write_message_to_stderr("Wrong format")
744
- return 1
745
- else:
746
- GeneralUtilities.write_message_to_stderr("Wrong format")
747
- return 1
748
- with open(name, "wb") as f:
749
- f.seek(size-1)
750
- f.write(b"\0")
751
- return 0
752
-
753
- @GeneralUtilities.check_arguments
754
- def SCCreateHashOfAllFiles(self, folder: str) -> None:
755
- for file in GeneralUtilities.absolute_file_paths(folder):
756
- with open(file+".sha256", "w+", encoding="utf-8") as f:
757
- f.write(GeneralUtilities.get_sha256_of_file(file))
758
-
759
- @GeneralUtilities.check_arguments
760
- def SCCreateSimpleMergeWithoutRelease(self, repository: str, sourcebranch: str, targetbranch: str, remotename: str, remove_source_branch: bool) -> None:
761
- commitid = self.git_merge(repository, sourcebranch, targetbranch, False, True)
762
- self.git_merge(repository, targetbranch, sourcebranch, True, True)
763
- created_version = self.get_semver_version_from_gitversion(repository)
764
- self.git_create_tag(repository, commitid, f"v{created_version}", True)
765
- self.git_push(repository, remotename, targetbranch, targetbranch, False, True)
766
- if (GeneralUtilities.string_has_nonwhitespace_content(remotename)):
767
- self.git_push(repository, remotename, sourcebranch, sourcebranch, False, True)
768
- if (remove_source_branch):
769
- self.git_remove_branch(repository, sourcebranch)
770
-
771
- @GeneralUtilities.check_arguments
772
- def sc_organize_lines_in_file(self, file: str, encoding: str, sort: bool = False, remove_duplicated_lines: bool = False, ignore_first_line: bool = False, remove_empty_lines: bool = True, ignored_start_character: list = list()) -> int:
773
- if os.path.isfile(file):
774
-
775
- # read file
776
- lines = GeneralUtilities.read_lines_from_file(file, encoding)
777
- if (len(lines) == 0):
778
- return 0
779
-
780
- # store first line if desiredpopd
781
-
782
- if (ignore_first_line):
783
- first_line = lines.pop(0)
784
-
785
- # remove empty lines if desired
786
- if remove_empty_lines:
787
- temp = lines
788
- lines = []
789
- for line in temp:
790
- if (not (GeneralUtilities.string_is_none_or_whitespace(line))):
791
- lines.append(line)
792
-
793
- # remove duplicated lines if desired
794
- if remove_duplicated_lines:
795
- lines = GeneralUtilities.remove_duplicates(lines)
796
-
797
- # sort lines if desired
798
- if sort:
799
- lines = sorted(lines, key=lambda singleline: self.__adapt_line_for_sorting(singleline, ignored_start_character))
800
-
801
- # reinsert first line
802
- if ignore_first_line:
803
- lines.insert(0, first_line)
804
-
805
- # write result to file
806
- GeneralUtilities.write_lines_to_file(file, lines, encoding)
807
-
808
- return 0
809
- else:
810
- GeneralUtilities.write_message_to_stdout(f"File '{file}' does not exist")
811
- return 1
812
-
813
- @GeneralUtilities.check_arguments
814
- def __adapt_line_for_sorting(self, line: str, ignored_start_characters: list):
815
- result = line.lower()
816
- while len(result) > 0 and result[0] in ignored_start_characters:
817
- result = result[1:]
818
- return result
819
-
820
- @GeneralUtilities.check_arguments
821
- def SCGenerateSnkFiles(self, outputfolder, keysize=4096, amountofkeys=10) -> int:
822
- GeneralUtilities.ensure_directory_exists(outputfolder)
823
- for _ in range(amountofkeys):
824
- file = os.path.join(outputfolder, str(uuid.uuid4())+".snk")
825
- argument = f"-k {keysize} {file}"
826
- self.run_program("sn", argument, outputfolder)
827
-
828
- @GeneralUtilities.check_arguments
829
- def __merge_files(self, sourcefile: str, targetfile: str) -> None:
830
- with open(sourcefile, "rb") as f:
831
- source_data = f.read()
832
- with open(targetfile, "ab") as fout:
833
- merge_separator = [0x0A]
834
- fout.write(bytes(merge_separator))
835
- fout.write(source_data)
836
-
837
- @GeneralUtilities.check_arguments
838
- def __process_file(self, file: str, substringInFilename: str, newSubstringInFilename: str, conflictResolveMode: str) -> None:
839
- new_filename = os.path.join(os.path.dirname(file), os.path.basename(file).replace(substringInFilename, newSubstringInFilename))
840
- if file != new_filename:
841
- if os.path.isfile(new_filename):
842
- if filecmp.cmp(file, new_filename):
843
- send2trash.send2trash(file)
844
- else:
845
- if conflictResolveMode == "ignore":
846
- pass
847
- elif conflictResolveMode == "preservenewest":
848
- if (os.path.getmtime(file) - os.path.getmtime(new_filename) > 0):
849
- send2trash.send2trash(file)
850
- else:
851
- send2trash.send2trash(new_filename)
852
- os.rename(file, new_filename)
853
- elif (conflictResolveMode == "merge"):
854
- self.__merge_files(file, new_filename)
855
- send2trash.send2trash(file)
856
- else:
857
- raise ValueError('Unknown conflict resolve mode')
858
- else:
859
- os.rename(file, new_filename)
860
-
861
- @GeneralUtilities.check_arguments
862
- def SCReplaceSubstringsInFilenames(self, folder: str, substringInFilename: str, newSubstringInFilename: str, conflictResolveMode: str) -> None:
863
- for file in GeneralUtilities.absolute_file_paths(folder):
864
- self.__process_file(file, substringInFilename, newSubstringInFilename, conflictResolveMode)
865
-
866
- @GeneralUtilities.check_arguments
867
- def __check_file(self, file: str, searchstring: str) -> None:
868
- bytes_ascii = bytes(searchstring, "ascii")
869
- # often called "unicode-encoding"
870
- bytes_utf16 = bytes(searchstring, "utf-16")
871
- bytes_utf8 = bytes(searchstring, "utf-8")
872
- with open(file, mode='rb') as file_object:
873
- content = file_object.read()
874
- if bytes_ascii in content:
875
- GeneralUtilities.write_message_to_stdout(file)
876
- elif bytes_utf16 in content:
877
- GeneralUtilities.write_message_to_stdout(file)
878
- elif bytes_utf8 in content:
879
- GeneralUtilities.write_message_to_stdout(file)
880
-
881
- @GeneralUtilities.check_arguments
882
- def SCSearchInFiles(self, folder: str, searchstring: str) -> None:
883
- for file in GeneralUtilities.absolute_file_paths(folder):
884
- self.__check_file(file, searchstring)
885
-
886
- @GeneralUtilities.check_arguments
887
- def __print_qr_code_by_csv_line(self, displayname: str, website: str, emailaddress: str, key: str, period: str) -> None:
888
- qrcode_content = f"otpauth://totp/{website}:{emailaddress}?secret={key}&issuer={displayname}&period={period}"
889
- GeneralUtilities.write_message_to_stdout(
890
- f"{displayname} ({emailaddress}):")
891
- GeneralUtilities.write_message_to_stdout(qrcode_content)
892
- qr = qrcode.QRCode()
893
- qr.add_data(qrcode_content)
894
- f = io.StringIO()
895
- qr.print_ascii(out=f)
896
- f.seek(0)
897
- GeneralUtilities.write_message_to_stdout(f.read())
898
-
899
- @GeneralUtilities.check_arguments
900
- def SCShow2FAAsQRCode(self, csvfile: str) -> None:
901
- separator_line = "--------------------------------------------------------"
902
- lines = GeneralUtilities.read_csv_file(csvfile, True)
903
- lines.sort(key=lambda items: ''.join(items).lower())
904
- for line in lines:
905
- GeneralUtilities.write_message_to_stdout(separator_line)
906
- self.__print_qr_code_by_csv_line(
907
- line[0], line[1], line[2], line[3], line[4])
908
- GeneralUtilities.write_message_to_stdout(separator_line)
909
-
910
- @GeneralUtilities.check_arguments
911
- def SCCalculateBitcoinBlockHash(self, block_version_number: str, previousblockhash: str, transactionsmerkleroot: str, timestamp: str, target: str, nonce: str) -> str:
912
- # Example-values:
913
- # block_version_number: "00000020"
914
- # previousblockhash: "66720b99e07d284bd4fe67ff8c49a5db1dd8514fcdab61000000000000000000"
915
- # transactionsmerkleroot: "7829844f4c3a41a537b3131ca992643eaa9d093b2383e4cdc060ad7dc5481187"
916
- # timestamp: "51eb505a"
917
- # target: "c1910018"
918
- # nonce: "de19b302"
919
- header = str(block_version_number + previousblockhash + transactionsmerkleroot + timestamp + target + nonce)
920
- return binascii.hexlify(hashlib.sha256(hashlib.sha256(binascii.unhexlify(header)).digest()).digest()[::-1]).decode('utf-8')
921
-
922
- @GeneralUtilities.check_arguments
923
- def SCChangeHashOfProgram(self, inputfile: str) -> None:
924
- valuetoappend = str(uuid.uuid4())
925
-
926
- outputfile = inputfile + '.modified'
927
-
928
- shutil.copy2(inputfile, outputfile)
929
- with open(outputfile, 'a', encoding="utf-8") as file:
930
- # TODO use rcedit for .exe-files instead of appending valuetoappend ( https://github.com/electron/rcedit/ )
931
- # background: you can retrieve the "original-filename" from the .exe-file like discussed here:
932
- # https://security.stackexchange.com/questions/210843/ is-it-possible-to-change-original-filename-of-an-exe
933
- # so removing the original filename with rcedit is probably a better way to make it more difficult to detect the programname.
934
- # this would obviously also change the hashvalue of the program so appending a whitespace is not required anymore.
935
- file.write(valuetoappend)
936
-
937
- @GeneralUtilities.check_arguments
938
- def __adjust_folder_name(self, folder: str) -> str:
939
- result = os.path.dirname(folder).replace("\\", "/")
940
- if result == "/":
941
- return ""
942
- else:
943
- return result
944
-
945
- @GeneralUtilities.check_arguments
946
- def __create_iso(self, folder, iso_file) -> None:
947
- created_directories = []
948
- files_directory = "FILES"
949
- iso = pycdlib.PyCdlib()
950
- iso.new()
951
- files_directory = files_directory.upper()
952
- iso.add_directory("/" + files_directory)
953
- created_directories.append("/" + files_directory)
954
- for root, _, files in os.walk(folder):
955
- for file in files:
956
- full_path = os.path.join(root, file)
957
- with (open(full_path, "rb").read()) as text_io_wrapper:
958
- content = text_io_wrapper
959
- path_in_iso = '/' + files_directory + \
960
- self.__adjust_folder_name(full_path[len(folder)::1]).upper()
961
- if path_in_iso not in created_directories:
962
- iso.add_directory(path_in_iso)
963
- created_directories.append(path_in_iso)
964
- iso.add_fp(BytesIO(content), len(content), path_in_iso + '/' + file.upper() + ';1')
965
- iso.write(iso_file)
966
- iso.close()
967
-
968
- @GeneralUtilities.check_arguments
969
- def SCCreateISOFileWithObfuscatedFiles(self, inputfolder: str, outputfile: str, printtableheadline, createisofile, extensions) -> None:
970
- if (os.path.isdir(inputfolder)):
971
- namemappingfile = "name_map.csv"
972
- files_directory = inputfolder
973
- files_directory_obf = f"{files_directory}_Obfuscated"
974
- self.SCObfuscateFilesFolder(
975
- inputfolder, printtableheadline, namemappingfile, extensions)
976
- os.rename(namemappingfile, os.path.join(
977
- files_directory_obf, namemappingfile))
978
- if createisofile:
979
- self.__create_iso(files_directory_obf, outputfile)
980
- shutil.rmtree(files_directory_obf)
981
- else:
982
- raise ValueError(f"Directory not found: '{inputfolder}'")
983
-
984
- @GeneralUtilities.check_arguments
985
- def SCFilenameObfuscator(self, inputfolder: str, printtableheadline, namemappingfile: str, extensions: str) -> None:
986
- obfuscate_all_files = extensions == "*"
987
- if (obfuscate_all_files):
988
- obfuscate_file_extensions = None
989
- else:
990
- obfuscate_file_extensions = extensions.split(",")
991
- if (os.path.isdir(inputfolder)):
992
- printtableheadline = GeneralUtilities.string_to_boolean(
993
- printtableheadline)
994
- files = []
995
- if not os.path.isfile(namemappingfile):
996
- with open(namemappingfile, "a", encoding="utf-8"):
997
- pass
998
- if printtableheadline:
999
- GeneralUtilities.append_line_to_file(
1000
- namemappingfile, "Original filename;new filename;SHA2-hash of file")
1001
- for file in GeneralUtilities.absolute_file_paths(inputfolder):
1002
- if os.path.isfile(os.path.join(inputfolder, file)):
1003
- if obfuscate_all_files or self.__extension_matchs(file, obfuscate_file_extensions):
1004
- files.append(file)
1005
- for file in files:
1006
- hash_value = GeneralUtilities.get_sha256_of_file(file)
1007
- extension = Path(file).suffix
1008
- new_file_name_without_path = str(uuid.uuid4())[0:8] + extension
1009
- new_file_name = os.path.join(
1010
- os.path.dirname(file), new_file_name_without_path)
1011
- os.rename(file, new_file_name)
1012
- GeneralUtilities.append_line_to_file(namemappingfile, os.path.basename(file) + ";" + new_file_name_without_path + ";" + hash_value)
1013
- else:
1014
- raise ValueError(f"Directory not found: '{inputfolder}'")
1015
-
1016
- @GeneralUtilities.check_arguments
1017
- def __extension_matchs(self, file: str, obfuscate_file_extensions) -> bool:
1018
- for extension in obfuscate_file_extensions:
1019
- if file.lower().endswith("."+extension.lower()):
1020
- return True
1021
- return False
1022
-
1023
- @GeneralUtilities.check_arguments
1024
- def SCHealthcheck(self, file: str) -> int:
1025
- lines = GeneralUtilities.read_lines_from_file(file)
1026
- for line in reversed(lines):
1027
- if not GeneralUtilities.string_is_none_or_whitespace(line):
1028
- if "RunningHealthy (" in line: # TODO use regex
1029
- GeneralUtilities.write_message_to_stderr(f"Healthy running due to line '{line}' in file '{file}'.")
1030
- return 0
1031
- else:
1032
- GeneralUtilities.write_message_to_stderr(f"Not healthy running due to line '{line}' in file '{file}'.")
1033
- return 1
1034
- GeneralUtilities.write_message_to_stderr(f"No valid line found for healthycheck in file '{file}'.")
1035
- return 2
1036
-
1037
- @GeneralUtilities.check_arguments
1038
- def SCObfuscateFilesFolder(self, inputfolder: str, printtableheadline, namemappingfile: str, extensions: str) -> None:
1039
- obfuscate_all_files = extensions == "*"
1040
- if (obfuscate_all_files):
1041
- obfuscate_file_extensions = None
1042
- else:
1043
- if "," in extensions:
1044
- obfuscate_file_extensions = extensions.split(",")
1045
- else:
1046
- obfuscate_file_extensions = [extensions]
1047
- newd = inputfolder+"_Obfuscated"
1048
- shutil.copytree(inputfolder, newd)
1049
- inputfolder = newd
1050
- if (os.path.isdir(inputfolder)):
1051
- for file in GeneralUtilities.absolute_file_paths(inputfolder):
1052
- if obfuscate_all_files or self.__extension_matchs(file, obfuscate_file_extensions):
1053
- self.SCChangeHashOfProgram(file)
1054
- os.remove(file)
1055
- os.rename(file + ".modified", file)
1056
- self.SCFilenameObfuscator(inputfolder, printtableheadline, namemappingfile, extensions)
1057
- else:
1058
- raise ValueError(f"Directory not found: '{inputfolder}'")
1059
-
1060
- @GeneralUtilities.check_arguments
1061
- def get_services_from_yaml_file(self, yaml_file: str) -> list[str]:
1062
- with open(yaml_file, encoding="utf-8") as stream:
1063
- loaded = yaml.safe_load(stream)
1064
- services = loaded["services"]
1065
- result = list(services.keys())
1066
- return result
1067
-
1068
- @GeneralUtilities.check_arguments
1069
- def kill_docker_container(self, container_name: str) -> None:
1070
- self.run_program("docker", f"container rm -f {container_name}")
1071
-
1072
- @GeneralUtilities.check_arguments
1073
- def get_docker_debian_version(self, image_tag: str) -> str:
1074
- result = ScriptCollectionCore().run_program_argsasarray(
1075
- "docker", ['run', f'debian:{image_tag}', 'bash', '-c', 'apt-get -y update && apt-get -y install lsb-release && lsb_release -cs'])
1076
- result_line = GeneralUtilities.string_to_lines(result[1])[-2]
1077
- return result_line
1078
-
1079
- @GeneralUtilities.check_arguments
1080
- def get_latest_tor_version_of_debian_repository(self, debian_version: str) -> str:
1081
- package_url: str = f"https://deb.torproject.org/torproject.org/dists/{debian_version}/main/binary-amd64/Packages"
1082
- r = requests.get(package_url, timeout=5)
1083
- if r.status_code != 200:
1084
- raise ValueError(f"Checking for latest tor package resulted in HTTP-response-code {r.status_code}.")
1085
- lines = GeneralUtilities.string_to_lines(GeneralUtilities.bytes_to_string(r.content))
1086
- version_line_prefix = "Version: "
1087
- version_content_line = [line for line in lines if line.startswith(version_line_prefix)][1]
1088
- version_with_overhead = version_content_line[len(version_line_prefix):]
1089
- tor_version = version_with_overhead.split("~")[0]
1090
- return tor_version
1091
-
1092
- def run_testcases_for_python_project(self, repository_folder: str):
1093
- self.run_program("coverage", "run -m pytest", repository_folder)
1094
- self.run_program("coverage", "xml", repository_folder)
1095
- GeneralUtilities.ensure_directory_exists(os.path.join(repository_folder, "Other/TestCoverage"))
1096
- coveragefile = os.path.join(repository_folder, "Other/TestCoverage/TestCoverage.xml")
1097
- GeneralUtilities.ensure_file_does_not_exist(coveragefile)
1098
- os.rename(os.path.join(repository_folder, "coverage.xml"), coveragefile)
1099
-
1100
- @GeneralUtilities.check_arguments
1101
- def get_file_permission(self, file: str) -> str:
1102
- """This function returns an usual octet-triple, for example "700"."""
1103
- ls_output: str = self.run_ls_for_folder(file)
1104
- return self.__get_file_permission_helper(ls_output)
1105
-
1106
- @GeneralUtilities.check_arguments
1107
- def __get_file_permission_helper(self, permissions: str) -> str:
1108
- return str(self.__to_octet(permissions[0:3]))+str(self.__to_octet(permissions[3:6]))+str(self.__to_octet(permissions[6:9]))
1109
-
1110
- @GeneralUtilities.check_arguments
1111
- def __to_octet(self, string: str) -> int:
1112
- return int(self.__to_octet_helper(string[0])+self.__to_octet_helper(string[1])+self.__to_octet_helper(string[2]), 2)
1113
-
1114
- @GeneralUtilities.check_arguments
1115
- def __to_octet_helper(self, string: str) -> str:
1116
- if (string == "-"):
1117
- return "0"
1118
- else:
1119
- return "1"
1120
-
1121
- @GeneralUtilities.check_arguments
1122
- def get_file_owner(self, file: str) -> str:
1123
- """This function returns the user and the group in the format "user:group"."""
1124
- ls_output: str = self.run_ls_for_folder(file)
1125
- return self.__get_file_owner_helper(ls_output)
1126
-
1127
- @GeneralUtilities.check_arguments
1128
- def __get_file_owner_helper(self, ls_output: str) -> str:
1129
- splitted = ls_output.split()
1130
- return f"{splitted[2]}:{splitted[3]}"
1131
-
1132
- @GeneralUtilities.check_arguments
1133
- def get_file_owner_and_file_permission(self, file: str) -> str:
1134
- ls_output: str = self.run_ls_for_folder(file)
1135
- return [self.__get_file_owner_helper(ls_output), self.__get_file_permission_helper(ls_output)]
1136
-
1137
- @GeneralUtilities.check_arguments
1138
- def run_ls_for_folder(self, file_or_folder: str) -> str:
1139
- file_or_folder = file_or_folder.replace("\\", "/")
1140
- GeneralUtilities.assert_condition(os.path.isfile(file_or_folder) or os.path.isdir(file_or_folder), f"Can not execute 'ls -ld' because '{file_or_folder}' does not exist.")
1141
- ls_result = self.run_program_argsasarray("ls", ["-ld", file_or_folder])
1142
- GeneralUtilities.assert_condition(ls_result[0] == 0, f"'ls -ld {file_or_folder}' resulted in exitcode {str(ls_result[0])}. StdErr: {ls_result[2]}")
1143
- GeneralUtilities.assert_condition(not GeneralUtilities.string_is_none_or_whitespace(ls_result[1]), f"'ls -ld' of '{file_or_folder}' had an empty output. StdErr: '{ls_result[2]}'")
1144
- GeneralUtilities.write_message_to_stdout(ls_result[1])
1145
- output = ls_result[1]
1146
- result = output.replace("\n", "")
1147
- result = ' '.join(result.split()) # reduce multiple whitespaces to one
1148
- return result
1149
-
1150
- @GeneralUtilities.check_arguments
1151
- def run_ls_for_folder_content(self, file_or_folder: str) -> list[str]:
1152
- file_or_folder = file_or_folder.replace("\\", "/")
1153
- GeneralUtilities.assert_condition(os.path.isfile(file_or_folder) or os.path.isdir(file_or_folder), f"Can not execute 'ls -la' because '{file_or_folder}' does not exist.")
1154
- ls_result = self.run_program_argsasarray("ls", ["-la", file_or_folder])
1155
- GeneralUtilities.assert_condition(ls_result[0] == 0, f"'ls -la {file_or_folder}' resulted in exitcode {str(ls_result[0])}. StdErr: {ls_result[2]}")
1156
- GeneralUtilities.assert_condition(not GeneralUtilities.string_is_none_or_whitespace(ls_result[1]), f"'ls -la' of '{file_or_folder}' had an empty output. StdErr: '{ls_result[2]}'")
1157
- GeneralUtilities.write_message_to_stdout(ls_result[1])
1158
- output = ls_result[1]
1159
- result = output.split("\n")[3:] # skip the lines with "Total", "." and ".."
1160
- result = [' '.join(line.split()) for line in result] # reduce multiple whitespaces to one
1161
- return result
1162
-
1163
- @GeneralUtilities.check_arguments
1164
- def set_permission(self, file_or_folder: str, permissions: str, recursive: bool = False) -> None:
1165
- """This function expects an usual octet-triple, for example "700"."""
1166
- args = []
1167
- if recursive:
1168
- args.append("--recursive")
1169
- args.append(permissions)
1170
- args.append(file_or_folder)
1171
- self.run_program_argsasarray("chmod", args)
1172
-
1173
- @GeneralUtilities.check_arguments
1174
- def set_owner(self, file_or_folder: str, owner: str, recursive: bool = False, follow_symlinks: bool = False) -> None:
1175
- """This function expects the user and the group in the format "user:group"."""
1176
- args = []
1177
- if recursive:
1178
- args.append("--recursive")
1179
- if follow_symlinks:
1180
- args.append("--no-dereference")
1181
- args.append(owner)
1182
- args.append(file_or_folder)
1183
- self.run_program_argsasarray("chown", args)
1184
-
1185
- # <run programs>
1186
-
1187
- @GeneralUtilities.check_arguments
1188
- def __run_program_argsasarray_async_helper(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, verbosity: int = 1, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, custom_argument: object = None, interactive: bool = False) -> Popen:
1189
- # Verbosity:
1190
- # 0=Quiet (No output will be printed.)
1191
- # 1=Normal (If the exitcode of the executed program is not 0 then the StdErr will be printed.)
1192
- # 2=Full (Prints StdOut and StdErr of the executed program.)
1193
- # 3=Verbose (Same as "Full" but with some more information.)
1194
-
1195
- if isinstance(self.program_runner, ProgramRunnerEpew):
1196
- custom_argument = CustomEpewArgument(print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, verbosity, arguments_for_log)
1197
- popen: Popen = self.program_runner.run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, custom_argument, interactive)
1198
- return popen
1199
-
1200
- @staticmethod
1201
- def __enqueue_output(file, queue):
1202
- for line in iter(file.readline, ''):
1203
- queue.put(line)
1204
- file.close()
1205
-
1206
- @staticmethod
1207
- def __read_popen_pipes(p: Popen):
1208
- with ThreadPoolExecutor(2) as pool:
1209
- q_stdout = Queue()
1210
- q_stderr = Queue()
1211
-
1212
- pool.submit(ScriptCollectionCore.__enqueue_output, p.stdout, q_stdout)
1213
- pool.submit(ScriptCollectionCore.__enqueue_output, p.stderr, q_stderr)
1214
- while (p.poll() is None) or (not q_stdout.empty()) or (not q_stderr.empty()):
1215
- time.sleep(0.01)
1216
- out_line = None
1217
- err_line = None
1218
- try:
1219
- out_line = q_stdout.get_nowait()
1220
- except Empty:
1221
- pass
1222
- try:
1223
- err_line = q_stderr.get_nowait()
1224
- except Empty:
1225
- pass
1226
-
1227
- yield (out_line, err_line)
1228
-
1229
- # Return-values program_runner: Exitcode, StdOut, StdErr, Pid
1230
- @GeneralUtilities.check_arguments
1231
- def run_program_argsasarray(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, verbosity: int = 1, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, throw_exception_if_exitcode_is_not_zero: bool = True, custom_argument: object = None, interactive: bool = False) -> tuple[int, str, str, int]:
1232
- # verbosity 1: No output will be logged.
1233
- # verbosity 2: If the exitcode of the executed program is not 0 then the StdErr will be logged. This is supposed to be the default verbosity-level.
1234
- # verbosity 3: Logs and prints StdOut and StdErr of the executed program in realtime.
1235
- # verbosity 4: Same as loglevel 3 but with some more overhead-information.
1236
- try:
1237
- arguments_as_str = ' '.join(arguments_as_array)
1238
- mock_loader_result = self.__try_load_mock(program, arguments_as_str, working_directory)
1239
- if mock_loader_result[0]:
1240
- return mock_loader_result[1]
1241
-
1242
- working_directory = self.__adapt_workingdirectory(working_directory)
1243
-
1244
- if arguments_for_log is None:
1245
- arguments_for_log = arguments_as_array
1246
-
1247
- arguments_for_log_as_string: str = ' '.join(arguments_for_log)
1248
- cmd = f'{working_directory}>{program} {arguments_for_log_as_string}'
1249
-
1250
- if GeneralUtilities.string_is_none_or_whitespace(title):
1251
- info_for_log = cmd
1252
- else:
1253
- info_for_log = title
1254
-
1255
- if verbosity >= 3:
1256
- GeneralUtilities.write_message_to_stdout(f"Run '{info_for_log}'.")
1257
-
1258
- print_live_output = 1 < verbosity
1259
-
1260
- exit_code: int = None
1261
- stdout: str = ""
1262
- stderr: str = ""
1263
- pid: int = None
1264
-
1265
- with self.__run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, verbosity, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, custom_argument, interactive) as process:
1266
-
1267
- if log_file is not None:
1268
- GeneralUtilities.ensure_file_exists(log_file)
1269
- pid = process.pid
1270
- for out_line_plain, err_line_plain in ScriptCollectionCore.__read_popen_pipes(process): # see https://stackoverflow.com/a/57084403/3905529
1271
-
1272
- if out_line_plain is not None:
1273
- out_line: str = None
1274
- if isinstance(out_line_plain, str):
1275
- out_line = out_line_plain
1276
- elif isinstance(out_line_plain, bytes):
1277
- out_line = GeneralUtilities.bytes_to_string(out_line_plain)
1278
- else:
1279
- raise ValueError(f"Unknown type of output: {str(type(out_line_plain))}")
1280
-
1281
- if out_line is not None and GeneralUtilities.string_has_content(out_line):
1282
- if out_line.endswith("\n"):
1283
- out_line = out_line[:-1]
1284
- if print_live_output:
1285
- print(out_line, end='\n', file=sys.stdout, flush=True)
1286
- if 0 < len(stdout):
1287
- stdout = stdout+"\n"
1288
- stdout = stdout+out_line
1289
- if log_file is not None:
1290
- GeneralUtilities.append_line_to_file(log_file, out_line)
1291
-
1292
- if err_line_plain is not None:
1293
- err_line: str = None
1294
- if isinstance(err_line_plain, str):
1295
- err_line = err_line_plain
1296
- elif isinstance(err_line_plain, bytes):
1297
- err_line = GeneralUtilities.bytes_to_string(err_line_plain)
1298
- else:
1299
- raise ValueError(f"Unknown type of output: {str(type(err_line_plain))}")
1300
- if err_line is not None and GeneralUtilities.string_has_content(err_line):
1301
- if err_line.endswith("\n"):
1302
- err_line = err_line[:-1]
1303
- if print_live_output:
1304
- print(err_line, end='\n', file=sys.stderr, flush=True)
1305
- if 0 < len(stderr):
1306
- stderr = stderr+"\n"
1307
- stderr = stderr+err_line
1308
- if log_file is not None:
1309
- GeneralUtilities.append_line_to_file(log_file, err_line)
1310
-
1311
- exit_code = process.returncode
1312
-
1313
- if throw_exception_if_exitcode_is_not_zero and exit_code != 0:
1314
- raise ValueError(f"Program '{working_directory}>{program} {arguments_for_log_as_string}' resulted in exitcode {exit_code}. (StdOut: '{stdout}', StdErr: '{stderr}')")
1315
-
1316
- GeneralUtilities.assert_condition(exit_code is not None, f"Exitcode of program-run of '{info_for_log}' is None.")
1317
- result = (exit_code, stdout, stderr, pid)
1318
- return result
1319
- except Exception as e:
1320
- raise e
1321
-
1322
- # Return-values program_runner: Exitcode, StdOut, StdErr, Pid
1323
- @GeneralUtilities.check_arguments
1324
- def run_program(self, program: str, arguments: str = "", working_directory: str = None, verbosity: int = 1, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, throw_exception_if_exitcode_is_not_zero: bool = True, custom_argument: object = None, interactive: bool = False) -> tuple[int, str, str, int]:
1325
- return self.run_program_argsasarray(program, GeneralUtilities.arguments_to_array(arguments), working_directory, verbosity, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, throw_exception_if_exitcode_is_not_zero, custom_argument, interactive)
1326
-
1327
- # Return-values program_runner: Pid
1328
- @GeneralUtilities.check_arguments
1329
- def run_program_argsasarray_async(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, verbosity: int = 1, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, custom_argument: object = None, interactive: bool = False) -> int:
1330
- mock_loader_result = self.__try_load_mock(program, ' '.join(arguments_as_array), working_directory)
1331
- if mock_loader_result[0]:
1332
- return mock_loader_result[1]
1333
- process: Popen = self.__run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, verbosity, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, custom_argument, interactive)
1334
- return process.pid
1335
-
1336
- # Return-values program_runner: Pid
1337
- @GeneralUtilities.check_arguments
1338
- def run_program_async(self, program: str, arguments: str = "", working_directory: str = None, verbosity: int = 1, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, custom_argument: object = None, interactive: bool = False) -> int:
1339
- return self.run_program_argsasarray_async(program, GeneralUtilities.arguments_to_array(arguments), working_directory, verbosity, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, custom_argument, interactive)
1340
-
1341
- @GeneralUtilities.check_arguments
1342
- def __try_load_mock(self, program: str, arguments: str, working_directory: str) -> tuple[bool, tuple[int, str, str, int]]:
1343
- if self.mock_program_calls:
1344
- try:
1345
- return [True, self.__get_mock_program_call(program, arguments, working_directory)]
1346
- except LookupError:
1347
- if not self.execute_program_really_if_no_mock_call_is_defined:
1348
- raise
1349
- return [False, None]
1350
-
1351
- @GeneralUtilities.check_arguments
1352
- def __adapt_workingdirectory(self, workingdirectory: str) -> str:
1353
- if workingdirectory is None:
1354
- return os.getcwd()
1355
- else:
1356
- return GeneralUtilities.resolve_relative_path_from_current_working_directory(workingdirectory)
1357
-
1358
- @GeneralUtilities.check_arguments
1359
- def verify_no_pending_mock_program_calls(self):
1360
- if (len(self.__mocked_program_calls) > 0):
1361
- raise AssertionError("The following mock-calls were not called:\n"+",\n ".join([self.__format_mock_program_call(r) for r in self.__mocked_program_calls]))
1362
-
1363
- @GeneralUtilities.check_arguments
1364
- def __format_mock_program_call(self, r) -> str:
1365
- r: ScriptCollectionCore.__MockProgramCall = r
1366
- return f"'{r.workingdirectory}>{r.program} {r.argument}' (" \
1367
- f"exitcode: {GeneralUtilities.str_none_safe(str(r.exit_code))}, " \
1368
- f"pid: {GeneralUtilities.str_none_safe(str(r.pid))}, "\
1369
- f"stdout: {GeneralUtilities.str_none_safe(str(r.stdout))}, " \
1370
- f"stderr: {GeneralUtilities.str_none_safe(str(r.stderr))})"
1371
-
1372
- @GeneralUtilities.check_arguments
1373
- def register_mock_program_call(self, program: str, argument: str, workingdirectory: str, result_exit_code: int, result_stdout: str, result_stderr: str, result_pid: int, amount_of_expected_calls=1):
1374
- "This function is for test-purposes only"
1375
- for _ in itertools.repeat(None, amount_of_expected_calls):
1376
- mock_call = ScriptCollectionCore.__MockProgramCall()
1377
- mock_call.program = program
1378
- mock_call.argument = argument
1379
- mock_call.workingdirectory = workingdirectory
1380
- mock_call.exit_code = result_exit_code
1381
- mock_call.stdout = result_stdout
1382
- mock_call.stderr = result_stderr
1383
- mock_call.pid = result_pid
1384
- self.__mocked_program_calls.append(mock_call)
1385
-
1386
- @GeneralUtilities.check_arguments
1387
- def __get_mock_program_call(self, program: str, argument: str, workingdirectory: str):
1388
- result: ScriptCollectionCore.__MockProgramCall = None
1389
- for mock_call in self.__mocked_program_calls:
1390
- if ((re.match(mock_call.program, program) is not None)
1391
- and (re.match(mock_call.argument, argument) is not None)
1392
- and (re.match(mock_call.workingdirectory, workingdirectory) is not None)):
1393
- result = mock_call
1394
- break
1395
- if result is None:
1396
- raise LookupError(f"Tried to execute mock-call '{workingdirectory}>{program} {argument}' but no mock-call was defined for that execution")
1397
- else:
1398
- self.__mocked_program_calls.remove(result)
1399
- return (result.exit_code, result.stdout, result.stderr, result.pid)
1400
-
1401
- @GeneralUtilities.check_arguments
1402
- class __MockProgramCall:
1403
- program: str
1404
- argument: str
1405
- workingdirectory: str
1406
- exit_code: int
1407
- stdout: str
1408
- stderr: str
1409
- pid: int
1410
-
1411
- # </run programs>
1412
-
1413
- @GeneralUtilities.check_arguments
1414
- def extract_archive_with_7z(self, unzip_program_file: str, zipfile: str, password: str, output_directory: str) -> None:
1415
- password_set = not password is None
1416
- file_name = Path(zipfile).name
1417
- file_folder = os.path.dirname(zipfile)
1418
- argument = "x"
1419
- if password_set:
1420
- argument = f"{argument} -p\"{password}\""
1421
- argument = f"{argument} -o {output_directory}"
1422
- argument = f"{argument} {file_name}"
1423
- return self.run_program(unzip_program_file, argument, file_folder)
1424
-
1425
- @GeneralUtilities.check_arguments
1426
- def get_internet_time(self) -> datetime:
1427
- response = ntplib.NTPClient().request('pool.ntp.org')
1428
- return datetime.fromtimestamp(response.tx_time)
1429
-
1430
- @GeneralUtilities.check_arguments
1431
- def system_time_equals_internet_time(self, maximal_tolerance_difference: timedelta) -> bool:
1432
- return abs(datetime.now() - self.get_internet_time()) < maximal_tolerance_difference
1433
-
1434
- @GeneralUtilities.check_arguments
1435
- def system_time_equals_internet_time_with_default_tolerance(self) -> bool:
1436
- return self.system_time_equals_internet_time(self.__get_default_tolerance_for_system_time_equals_internet_time())
1437
-
1438
- @GeneralUtilities.check_arguments
1439
- def check_system_time(self, maximal_tolerance_difference: timedelta):
1440
- if not self.system_time_equals_internet_time(maximal_tolerance_difference):
1441
- raise ValueError("System time may be wrong")
1442
-
1443
- @GeneralUtilities.check_arguments
1444
- def check_system_time_with_default_tolerance(self) -> None:
1445
- self.check_system_time(self.__get_default_tolerance_for_system_time_equals_internet_time())
1446
-
1447
- @GeneralUtilities.check_arguments
1448
- def __get_default_tolerance_for_system_time_equals_internet_time(self) -> timedelta:
1449
- return timedelta(hours=0, minutes=0, seconds=3)
1450
-
1451
- @GeneralUtilities.check_arguments
1452
- def increment_version(self, input_version: str, increment_major: bool, increment_minor: bool, increment_patch: bool) -> str:
1453
- splitted = input_version.split(".")
1454
- GeneralUtilities.assert_condition(len(splitted) == 3, f"Version '{input_version}' does not have the 'major.minor.patch'-pattern.")
1455
- major = int(splitted[0])
1456
- minor = int(splitted[1])
1457
- patch = int(splitted[2])
1458
- if increment_major:
1459
- major = major+1
1460
- if increment_minor:
1461
- minor = minor+1
1462
- if increment_patch:
1463
- patch = patch+1
1464
- return f"{major}.{minor}.{patch}"
1465
-
1466
- @GeneralUtilities.check_arguments
1467
- def get_semver_version_from_gitversion(self, repository_folder: str) -> str:
1468
- if (self.git_repository_has_commits(repository_folder)):
1469
- result = self.get_version_from_gitversion(repository_folder, "MajorMinorPatch")
1470
- if self.git_repository_has_uncommitted_changes(repository_folder):
1471
- if self.get_current_git_branch_has_tag(repository_folder):
1472
- id_of_latest_tag = self.git_get_commitid_of_tag(repository_folder, self.get_latest_git_tag(repository_folder))
1473
- current_commit = self.git_get_commit_id(repository_folder)
1474
- current_commit_is_on_latest_tag = id_of_latest_tag == current_commit
1475
- if current_commit_is_on_latest_tag:
1476
- result = self.increment_version(result, False, False, True)
1477
- else:
1478
- result = "0.1.0"
1479
- return result
1480
-
1481
- @staticmethod
1482
- @GeneralUtilities.check_arguments
1483
- def is_patch_version(version_string: str) -> bool:
1484
- return not version_string.endswith(".0")
1485
-
1486
- @GeneralUtilities.check_arguments
1487
- def get_version_from_gitversion(self, folder: str, variable: str) -> str:
1488
- # called twice as workaround for issue 1877 in gitversion ( https://github.com/GitTools/GitVersion/issues/1877 )
1489
- result = self.run_program_argsasarray("gitversion", ["/showVariable", variable], folder, verbosity=0)
1490
- result = self.run_program_argsasarray("gitversion", ["/showVariable", variable], folder, verbosity=0)
1491
- result = GeneralUtilities.strip_new_line_character(result[1])
1492
-
1493
- return result
1494
-
1495
- @GeneralUtilities.check_arguments
1496
- def generate_certificate_authority(self, folder: str, name: str, subj_c: str, subj_st: str, subj_l: str, subj_o: str, subj_ou: str, days_until_expire: int = None, password: str = None) -> None:
1497
- if days_until_expire is None:
1498
- days_until_expire = 1825
1499
- if password is None:
1500
- password = GeneralUtilities.generate_password()
1501
- GeneralUtilities.ensure_directory_exists(folder)
1502
- self.run_program("openssl", f'req -new -newkey ec -pkeyopt ec_paramgen_curve:prime256v1 -days {days_until_expire} -nodes -x509 -subj /C={subj_c}/ST={subj_st}/L={subj_l}/O={subj_o}/CN={name}/OU={subj_ou} -passout pass:{password} -keyout {name}.key -out {name}.crt', folder)
1503
-
1504
- @GeneralUtilities.check_arguments
1505
- def generate_certificate(self, folder: str, domain: str, filename: str, subj_c: str, subj_st: str, subj_l: str, subj_o: str, subj_ou: str, days_until_expire: int = None, password: str = None) -> None:
1506
- if days_until_expire is None:
1507
- days_until_expire = 397
1508
- if password is None:
1509
- password = GeneralUtilities.generate_password()
1510
- rsa_key_length = 4096
1511
- self.run_program("openssl", f'genrsa -out {filename}.key {rsa_key_length}', folder)
1512
- self.run_program("openssl", f'req -new -subj /C={subj_c}/ST={subj_st}/L={subj_l}/O={subj_o}/CN={domain}/OU={subj_ou} -x509 -key {filename}.key -out {filename}.unsigned.crt -days {days_until_expire}', folder)
1513
- self.run_program("openssl", f'pkcs12 -export -out {filename}.selfsigned.pfx -password pass:{password} -inkey {filename}.key -in {filename}.unsigned.crt', folder)
1514
- GeneralUtilities.write_text_to_file(os.path.join(folder, f"{filename}.password"), password)
1515
- GeneralUtilities.write_text_to_file(os.path.join(folder, f"{filename}.san.conf"), f"""[ req ]
1516
- default_bits = {rsa_key_length}
1517
- distinguished_name = req_distinguished_name
1518
- req_extensions = v3_req
1519
- default_md = sha256
1520
- dirstring_type = nombstr
1521
- prompt = no
1522
-
1523
- [ req_distinguished_name ]
1524
- countryName = {subj_c}
1525
- stateOrProvinceName = {subj_st}
1526
- localityName = {subj_l}
1527
- organizationName = {subj_o}
1528
- organizationUnit = {subj_ou}
1529
- commonName = {domain}
1530
-
1531
- [v3_req]
1532
- subjectAltName = @subject_alt_name
1533
-
1534
- [ subject_alt_name ]
1535
- DNS = {domain}
1536
- """)
1537
-
1538
- @GeneralUtilities.check_arguments
1539
- def generate_certificate_sign_request(self, folder: str, domain: str, filename: str, subj_c: str, subj_st: str, subj_l: str, subj_o: str, subj_ou: str) -> None:
1540
- self.run_program("openssl", f'req -new -subj /C={subj_c}/ST={subj_st}/L={subj_l}/O={subj_o}/CN={domain}/OU={subj_ou} -key {filename}.key -out {filename}.csr -config {filename}.san.conf', folder)
1541
-
1542
- @GeneralUtilities.check_arguments
1543
- def sign_certificate(self, folder: str, ca_folder: str, ca_name: str, domain: str, filename: str, days_until_expire: int = None) -> None:
1544
- if days_until_expire is None:
1545
- days_until_expire = 397
1546
- ca = os.path.join(ca_folder, ca_name)
1547
- password_file = os.path.join(folder, f"{filename}.password")
1548
- password = GeneralUtilities.read_text_from_file(password_file)
1549
- self.run_program("openssl", f'x509 -req -in {filename}.csr -CA {ca}.crt -CAkey {ca}.key -CAcreateserial -CAserial {ca}.srl -out {filename}.crt -days {days_until_expire} -sha256 -extensions v3_req -extfile {filename}.san.conf', folder)
1550
- self.run_program("openssl", f'pkcs12 -export -out {filename}.pfx -inkey {filename}.key -in {filename}.crt -password pass:{password}', folder)
1551
-
1552
- @GeneralUtilities.check_arguments
1553
- def update_dependencies_of_python_in_requirementstxt_file(self, file: str, verbosity: int):
1554
- lines = GeneralUtilities.read_lines_from_file(file)
1555
- new_lines = []
1556
- for line in lines:
1557
- if GeneralUtilities.string_has_content(line):
1558
- new_lines.append(self.__get_updated_line_for_python_requirements(line.strip()))
1559
- GeneralUtilities.write_lines_to_file(file, new_lines)
1560
-
1561
- @GeneralUtilities.check_arguments
1562
- def __get_updated_line_for_python_requirements(self, line: str) -> str:
1563
- if "==" in line or "<" in line:
1564
- return line
1565
- elif ">" in line:
1566
- try:
1567
- # line is something like "cyclonedx-bom>=2.0.2" and the function must return with the updated version
1568
- # (something like "cyclonedx-bom>=2.11.0" for example)
1569
- package = line.split(">")[0]
1570
- operator = ">=" if ">=" in line else ">"
1571
- response = requests.get(f'https://pypi.org/pypi/{package}/json', timeout=5)
1572
- latest_version = response.json()['info']['version']
1573
- # TODO update only minor- and patch-version
1574
- # TODO print info if there is a new major-version
1575
- return package+operator+latest_version
1576
- except:
1577
- return line
1578
- else:
1579
- raise ValueError(f'Unexpected line in requirements-file: "{line}"')
1580
-
1581
- @GeneralUtilities.check_arguments
1582
- def update_dependencies_of_python_in_setupcfg_file(self, setup_cfg_file: str, verbosity: int):
1583
- lines = GeneralUtilities.read_lines_from_file(setup_cfg_file)
1584
- new_lines = []
1585
- requirement_parsing_mode = False
1586
- for line in lines:
1587
- new_line = line
1588
- if (requirement_parsing_mode):
1589
- if ("<" in line or "=" in line or ">" in line):
1590
- updated_line = f" {self.__get_updated_line_for_python_requirements(line.strip())}"
1591
- new_line = updated_line
1592
- else:
1593
- requirement_parsing_mode = False
1594
- else:
1595
- if line.startswith("install_requires ="):
1596
- requirement_parsing_mode = True
1597
- new_lines.append(new_line)
1598
- GeneralUtilities.write_lines_to_file(setup_cfg_file, new_lines)
1599
-
1600
- @GeneralUtilities.check_arguments
1601
- def update_dependencies_of_dotnet_project(self, csproj_file: str, verbosity: int, ignored_dependencies: list[str]):
1602
- folder = os.path.dirname(csproj_file)
1603
- csproj_filename = os.path.basename(csproj_file)
1604
- GeneralUtilities.write_message_to_stderr(f"Check for updates in {csproj_filename}")
1605
- result = self.run_program("dotnet", f"list {csproj_filename} package --outdated", folder)
1606
- for line in result[1].replace("\r", "").split("\n"):
1607
- # Relevant output-lines are something like " > NJsonSchema 10.7.0 10.7.0 10.9.0"
1608
- if ">" in line:
1609
- package_name = line.replace(">", "").strip().split(" ")[0]
1610
- if not (package_name in ignored_dependencies):
1611
- GeneralUtilities.write_message_to_stderr(f"Update package {package_name}")
1612
- self.run_program("dotnet", f"add {csproj_filename} package {package_name}", folder)
1613
-
1614
- @GeneralUtilities.check_arguments
1615
- def create_deb_package(self, toolname: str, binary_folder: str, control_file_content: str, deb_output_folder: str, verbosity: int, permission_of_executable_file_as_octet_triple: int) -> None:
1616
-
1617
- # prepare
1618
- GeneralUtilities.ensure_directory_exists(deb_output_folder)
1619
- temp_folder = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
1620
- GeneralUtilities.ensure_directory_exists(temp_folder)
1621
- bin_folder = binary_folder
1622
- tool_content_folder_name = toolname+"Content"
1623
-
1624
- # create folder
1625
- GeneralUtilities.ensure_directory_exists(temp_folder)
1626
- control_content_folder_name = "controlcontent"
1627
- packagecontent_control_folder = os.path.join(temp_folder, control_content_folder_name)
1628
- GeneralUtilities.ensure_directory_exists(packagecontent_control_folder)
1629
- data_content_folder_name = "datacontent"
1630
- packagecontent_data_folder = os.path.join(temp_folder, data_content_folder_name)
1631
- GeneralUtilities.ensure_directory_exists(packagecontent_data_folder)
1632
- entireresult_content_folder_name = "entireresultcontent"
1633
- packagecontent_entireresult_folder = os.path.join(temp_folder, entireresult_content_folder_name)
1634
- GeneralUtilities.ensure_directory_exists(packagecontent_entireresult_folder)
1635
-
1636
- # create "debian-binary"-file
1637
- debianbinary_file = os.path.join(packagecontent_entireresult_folder, "debian-binary")
1638
- GeneralUtilities.ensure_file_exists(debianbinary_file)
1639
- GeneralUtilities.write_text_to_file(debianbinary_file, "2.0\n")
1640
-
1641
- # create control-content
1642
-
1643
- # conffiles
1644
- conffiles_file = os.path.join(packagecontent_control_folder, "conffiles")
1645
- GeneralUtilities.ensure_file_exists(conffiles_file)
1646
-
1647
- # postinst-script
1648
- postinst_file = os.path.join(packagecontent_control_folder, "postinst")
1649
- GeneralUtilities.ensure_file_exists(postinst_file)
1650
- exe_file = f"/usr/bin/{tool_content_folder_name}/{toolname}"
1651
- link_file = f"/usr/bin/{toolname.lower()}"
1652
- permission = str(permission_of_executable_file_as_octet_triple)
1653
- GeneralUtilities.write_text_to_file(postinst_file, f"""#!/bin/sh
1654
- ln -s {exe_file} {link_file}
1655
- chmod {permission} {exe_file}
1656
- chmod {permission} {link_file}
1657
- """)
1658
-
1659
- # control
1660
- control_file = os.path.join(packagecontent_control_folder, "control")
1661
- GeneralUtilities.ensure_file_exists(control_file)
1662
- GeneralUtilities.write_text_to_file(control_file, control_file_content)
1663
-
1664
- # md5sums
1665
- md5sums_file = os.path.join(packagecontent_control_folder, "md5sums")
1666
- GeneralUtilities.ensure_file_exists(md5sums_file)
1667
-
1668
- # create data-content
1669
-
1670
- # copy binaries
1671
- usr_bin_folder = os.path.join(packagecontent_data_folder, "usr/bin")
1672
- GeneralUtilities.ensure_directory_exists(usr_bin_folder)
1673
- usr_bin_content_folder = os.path.join(usr_bin_folder, tool_content_folder_name)
1674
- GeneralUtilities.copy_content_of_folder(bin_folder, usr_bin_content_folder)
1675
-
1676
- # create debfile
1677
- deb_filename = f"{toolname}.deb"
1678
- self.run_program_argsasarray("tar", ["czf", f"../{entireresult_content_folder_name}/control.tar.gz", "*"], packagecontent_control_folder, verbosity=verbosity)
1679
- self.run_program_argsasarray("tar", ["czf", f"../{entireresult_content_folder_name}/data.tar.gz", "*"], packagecontent_data_folder, verbosity=verbosity)
1680
- self.run_program_argsasarray("ar", ["r", deb_filename, "debian-binary", "control.tar.gz", "data.tar.gz"], packagecontent_entireresult_folder, verbosity=verbosity)
1681
- result_file = os.path.join(packagecontent_entireresult_folder, deb_filename)
1682
- shutil.copy(result_file, os.path.join(deb_output_folder, deb_filename))
1683
-
1684
- # cleanup
1685
- GeneralUtilities.ensure_directory_does_not_exist(temp_folder)
1686
-
1687
- @GeneralUtilities.check_arguments
1688
- def update_year_in_copyright_tags(self, file: str) -> None:
1689
- current_year = str(datetime.now().year)
1690
- lines = GeneralUtilities.read_lines_from_file(file)
1691
- lines_result = []
1692
- for line in lines:
1693
- if match := re.search("(.*<[Cc]opyright>.*)\\d\\d\\d\\d(.*<\\/[Cc]opyright>.*)", line):
1694
- part1 = match.group(1)
1695
- part2 = match.group(2)
1696
- adapted = part1+current_year+part2
1697
- else:
1698
- adapted = line
1699
- lines_result.append(adapted)
1700
- GeneralUtilities.write_lines_to_file(file, lines_result)
1701
-
1702
- @GeneralUtilities.check_arguments
1703
- def update_year_in_first_line_of_file(self, file: str) -> None:
1704
- current_year = str(datetime.now().year)
1705
- lines = GeneralUtilities.read_lines_from_file(file)
1706
- lines[0] = re.sub("\\d\\d\\d\\d", current_year, lines[0])
1707
- GeneralUtilities.write_lines_to_file(file, lines)
1708
-
1709
- @GeneralUtilities.check_arguments
1710
- def get_external_ip(self, proxy: str) -> str:
1711
- information = self.get_externalnetworkinformation_as_json_string(proxy)
1712
- parsed = json.loads(information)
1713
- return parsed.ip
1714
-
1715
- @GeneralUtilities.check_arguments
1716
- def get_country_of_external_ip(self, proxy: str) -> str:
1717
- information = self.get_externalnetworkinformation_as_json_string(proxy)
1718
- parsed = json.loads(information)
1719
- return parsed.country
1720
-
1721
- @GeneralUtilities.check_arguments
1722
- def get_externalnetworkinformation_as_json_string(self, proxy: str) -> str:
1723
- proxies = None
1724
- if GeneralUtilities.string_has_content(proxy):
1725
- proxies = {"http": proxy}
1726
- response = requests.get('https://ipinfo.io', proxies=proxies, timeout=5)
1727
- network_information_as_json_string = GeneralUtilities.bytes_to_string(
1728
- response.content)
1729
- return network_information_as_json_string
1730
-
1731
- @GeneralUtilities.check_arguments
1732
- def change_file_extensions(self, folder: str, from_extension: str, to_extension: str, recursive: bool, ignore_case: bool) -> None:
1733
- extension_to_compare: str = None
1734
- if ignore_case:
1735
- extension_to_compare = from_extension.lower()
1736
- else:
1737
- extension_to_compare = from_extension
1738
- for file in GeneralUtilities.get_direct_files_of_folder(folder):
1739
- if (ignore_case and file.lower().endswith(f".{extension_to_compare}") or not ignore_case and file.endswith(f".{extension_to_compare}")):
1740
- p = Path(file)
1741
- p.rename(p.with_suffix('.'+to_extension))
1742
- if recursive:
1743
- for subfolder in GeneralUtilities.get_direct_folders_of_folder(folder):
1744
- self.change_file_extensions(subfolder, from_extension, to_extension, recursive, ignore_case)
1745
-
1746
- @GeneralUtilities.check_arguments
1747
- def __add_chapter(self, main_reference_file, reference_content_folder, number: int, chaptertitle: str, content: str = None):
1748
- if content is None:
1749
- content = "TXDX add content here"
1750
- filename = str(number).zfill(2)+"_"+chaptertitle.replace(' ', '-')
1751
- file = f"{reference_content_folder}/{filename}.md"
1752
- full_title = f"{number}. {chaptertitle}"
1753
-
1754
- GeneralUtilities.append_line_to_file(main_reference_file, f"- [{full_title}](./{filename}.md)")
1755
-
1756
- GeneralUtilities.ensure_file_exists(file)
1757
- GeneralUtilities.write_text_to_file(file, f"""# {full_title}
1758
-
1759
- {content}
1760
- """.replace("XDX", "ODO"))
1761
-
1762
- @GeneralUtilities.check_arguments
1763
- def generate_arc42_reference_template(self, repository: str, productname: str = None, subfolder: str = None):
1764
- productname: str
1765
- if productname is None:
1766
- productname = os.path.basename(repository)
1767
- if subfolder is None:
1768
- subfolder = "Other/Resources/Reference"
1769
- reference_root_folder = f"{repository}/{subfolder}"
1770
- reference_content_folder = reference_root_folder + "/Technical"
1771
- if os.path.isdir(reference_root_folder):
1772
- raise ValueError(f"The folder '{reference_root_folder}' does already exist.")
1773
- GeneralUtilities.ensure_directory_exists(reference_root_folder)
1774
- GeneralUtilities.ensure_directory_exists(reference_content_folder)
1775
- main_reference_file = f"{reference_root_folder}/Reference.md"
1776
- GeneralUtilities.ensure_file_exists(main_reference_file)
1777
- GeneralUtilities.write_text_to_file(main_reference_file, f"""# {productname}
1778
-
1779
- TXDX add minimal service-description here.
1780
-
1781
- ## Technical documentation
1782
-
1783
- """.replace("XDX", "ODO"))
1784
- self.__add_chapter(main_reference_file, reference_content_folder, 1, 'Introduction and Goals', """## Overview
1785
-
1786
- TXDX
1787
-
1788
- ## Quality goals
1789
-
1790
- TXDX
1791
-
1792
- ## Stakeholder
1793
-
1794
- | Name | How to contact | Reason |
1795
- | ---- | -------------- | ------ |""")
1796
- self.__add_chapter(main_reference_file, reference_content_folder, 2, 'Constraints', """## Technical constraints
1797
-
1798
- | Constraint-identifier | Constraint | Reason |
1799
- | --------------------- | ---------- | ------ |
1800
-
1801
- ## Organizational constraints
1802
-
1803
- | Constraint-identifier | Constraint | Reason |
1804
- | --------------------- | ---------- | ------ |""")
1805
- self.__add_chapter(main_reference_file, reference_content_folder, 3, 'Context and Scope', """## Context
1806
-
1807
- TXDX
1808
-
1809
- ## Scope
1810
-
1811
- TXDX""")
1812
- self.__add_chapter(main_reference_file, reference_content_folder, 4, 'Solution Strategy', """TXDX""")
1813
- self.__add_chapter(main_reference_file, reference_content_folder, 5, 'Building Block View', """TXDX""")
1814
- self.__add_chapter(main_reference_file, reference_content_folder, 6, 'Runtime View', """TXDX""")
1815
- self.__add_chapter(main_reference_file, reference_content_folder, 7, 'Deployment View', """## Infrastructure-overview
1816
-
1817
- TXDX
1818
-
1819
- ## Infrastructure-requirements
1820
-
1821
- TXDX
1822
-
1823
- ## Deployment-proecsses
1824
-
1825
- TXDX
1826
- """)
1827
- self.__add_chapter(main_reference_file, reference_content_folder, 8, 'Crosscutting Concepts', """TXDX""")
1828
- self.__add_chapter(main_reference_file, reference_content_folder, 9, 'Architectural Decisions', """## Decision-board
1829
-
1830
- | Decision-identifier | Date | Decision | Reason and notes |
1831
- | ------------------- | ---- | -------- | ---------------- |""") # empty because there are no decsions yet
1832
- self.__add_chapter(main_reference_file, reference_content_folder, 10, 'Quality Requirements', """TXDX""")
1833
- self.__add_chapter(main_reference_file, reference_content_folder, 11, 'Risks and Technical Debt', """## Risks
1834
-
1835
- Currently there are no known risks.
1836
-
1837
- ## Technical debts
1838
-
1839
- Currently there are no technical depts.""")
1840
- self.__add_chapter(main_reference_file, reference_content_folder, 12, 'Glossary', """## Terms
1841
-
1842
- | Term | Meaning |
1843
- | ---- | ------- |
1844
-
1845
- ## Abbreviations
1846
-
1847
- | Abbreviation | Meaning |
1848
- | ------------ | ------- |""")
1849
-
1850
- GeneralUtilities.append_to_file(main_reference_file, """
1851
-
1852
- ## Responsibilities
1853
-
1854
- | Responsibility | Name and contact-information |
1855
- | --------------- | ---------------------------- |
1856
- | Pdocut-owner | TXDX |
1857
- | Product-manager | TXDX |
1858
- | Support | TXDX |
1859
-
1860
- ## License & Pricing
1861
-
1862
- TXDX
1863
-
1864
- ## External resources
1865
-
1866
- - [Repository](TXDX)
1867
- - [Productive-System](TXDX)
1868
- - [QualityCheck-system](TXDX)
1869
-
1870
- """)
1
+ import sys
2
+ from datetime import timedelta, datetime
3
+ import json
4
+ import binascii
5
+ import filecmp
6
+ import hashlib
7
+ import time
8
+ from io import BytesIO
9
+ import itertools
10
+ import math
11
+ import os
12
+ from queue import Queue, Empty
13
+ from concurrent.futures import ThreadPoolExecutor
14
+ from pathlib import Path
15
+ from subprocess import Popen
16
+ import re
17
+ import shutil
18
+ import uuid
19
+ import tempfile
20
+ import io
21
+ import requests
22
+ import ntplib
23
+ import yaml
24
+ import qrcode
25
+ import pycdlib
26
+ import send2trash
27
+ import PyPDF2
28
+ from .GeneralUtilities import GeneralUtilities
29
+ from .ProgramRunnerBase import ProgramRunnerBase
30
+ from .ProgramRunnerPopen import ProgramRunnerPopen
31
+ from .ProgramRunnerEpew import ProgramRunnerEpew, CustomEpewArgument
32
+
33
+ version = "3.5.22"
34
+ __version__ = version
35
+
36
+
37
+ class ScriptCollectionCore:
38
+
39
+ # The purpose of this property is to use it when testing your code which uses scriptcollection for external program-calls.
40
+ # Do not change this value for productive environments.
41
+ mock_program_calls: bool = False
42
+ # The purpose of this property is to use it when testing your code which uses scriptcollection for external program-calls.
43
+ execute_program_really_if_no_mock_call_is_defined: bool = False
44
+ __mocked_program_calls: list = None
45
+ program_runner: ProgramRunnerBase = None
46
+
47
+ def __init__(self):
48
+ self.program_runner = ProgramRunnerPopen()
49
+ self.__mocked_program_calls = list[ScriptCollectionCore.__MockProgramCall]()
50
+
51
+ @staticmethod
52
+ @GeneralUtilities.check_arguments
53
+ def get_scriptcollection_version() -> str:
54
+ return __version__
55
+
56
+ @GeneralUtilities.check_arguments
57
+ def python_file_has_errors(self, file: str, working_directory: str, treat_warnings_as_errors: bool = True) -> tuple[bool, list[str]]:
58
+ errors = list()
59
+ filename = os.path.relpath(file, working_directory)
60
+ if treat_warnings_as_errors:
61
+ errorsonly_argument = ""
62
+ else:
63
+ errorsonly_argument = " --errors-only"
64
+ (exit_code, stdout, stderr, _) = self.run_program("pylint", filename + errorsonly_argument, working_directory, throw_exception_if_exitcode_is_not_zero=False)
65
+ if (exit_code != 0):
66
+ errors.append(f"Linting-issues of {file}:")
67
+ errors.append(f"Pylint-exitcode: {exit_code}")
68
+ for line in GeneralUtilities.string_to_lines(stdout):
69
+ errors.append(line)
70
+ for line in GeneralUtilities.string_to_lines(stderr):
71
+ errors.append(line)
72
+ return (True, errors)
73
+
74
+ return (False, errors)
75
+
76
+ @GeneralUtilities.check_arguments
77
+ def replace_version_in_dockerfile_file(self, dockerfile: str, new_version_value: str) -> None:
78
+ GeneralUtilities.write_text_to_file(dockerfile, re.sub("ARG Version=\"\\d+\\.\\d+\\.\\d+\"", f"ARG Version=\"{new_version_value}\"", GeneralUtilities.read_text_from_file(dockerfile)))
79
+
80
+ @GeneralUtilities.check_arguments
81
+ def replace_version_in_python_file(self, file: str, new_version_value: str):
82
+ GeneralUtilities.write_text_to_file(file, re.sub("version = \"\\d+\\.\\d+\\.\\d+\"", f"version = \"{new_version_value}\"", GeneralUtilities.read_text_from_file(file)))
83
+
84
+ @GeneralUtilities.check_arguments
85
+ def replace_version_in_ini_file(self, file: str, new_version_value: str):
86
+ GeneralUtilities.write_text_to_file(file, re.sub("version = \\d+\\.\\d+\\.\\d+", f"version = {new_version_value}", GeneralUtilities.read_text_from_file(file)))
87
+
88
+ @GeneralUtilities.check_arguments
89
+ def replace_version_in_nuspec_file(self, nuspec_file: str, new_version: str) -> None:
90
+ # TODO use XSLT instead
91
+ versionregex = "\\d+\\.\\d+\\.\\d+"
92
+ versiononlyregex = f"^{versionregex}$"
93
+ pattern = re.compile(versiononlyregex)
94
+ if pattern.match(new_version):
95
+ GeneralUtilities.write_text_to_file(nuspec_file, re.sub(f"<version>{versionregex}<\\/version>", f"<version>{new_version}</version>", GeneralUtilities.read_text_from_file(nuspec_file)))
96
+ else:
97
+ raise ValueError(f"Version '{new_version}' does not match version-regex '{versiononlyregex}'")
98
+
99
+ @GeneralUtilities.check_arguments
100
+ def replace_version_in_csproj_file(self, csproj_file: str, current_version: str):
101
+ versionregex = "\\d+\\.\\d+\\.\\d+"
102
+ versiononlyregex = f"^{versionregex}$"
103
+ pattern = re.compile(versiononlyregex)
104
+ if pattern.match(current_version):
105
+ for tag in ["Version", "AssemblyVersion", "FileVersion"]:
106
+ GeneralUtilities.write_text_to_file(csproj_file, re.sub(f"<{tag}>{versionregex}(.\\d+)?<\\/{tag}>", f"<{tag}>{current_version}</{tag}>", GeneralUtilities.read_text_from_file(csproj_file)))
107
+ else:
108
+ raise ValueError(f"Version '{current_version}' does not match version-regex '{versiononlyregex}'")
109
+
110
+ @GeneralUtilities.check_arguments
111
+ def push_nuget_build_artifact(self, nupkg_file: str, registry_address: str, api_key: str, verbosity: int = 1):
112
+ nupkg_file_name = os.path.basename(nupkg_file)
113
+ nupkg_file_folder = os.path.dirname(nupkg_file)
114
+ self.run_program("dotnet", f"nuget push {nupkg_file_name} --force-english-output --source {registry_address} --api-key {api_key}", nupkg_file_folder, verbosity)
115
+
116
+ @GeneralUtilities.check_arguments
117
+ def dotnet_build(self, repository_folder: str, projectname: str, configuration: str):
118
+ self.run_program("dotnet", f"clean -c {configuration}", repository_folder)
119
+ self.run_program("dotnet", f"build {projectname}/{projectname}.csproj -c {configuration}", repository_folder)
120
+
121
+ @GeneralUtilities.check_arguments
122
+ def find_file_by_extension(self, folder: str, extension: str):
123
+ result = [file for file in GeneralUtilities.get_direct_files_of_folder(folder) if file.endswith(f".{extension}")]
124
+ result_length = len(result)
125
+ if result_length == 0:
126
+ raise FileNotFoundError(f"No file available in folder '{folder}' with extension '{extension}'.")
127
+ if result_length == 1:
128
+ return result[0]
129
+ else:
130
+ raise ValueError(f"Multiple values available in folder '{folder}' with extension '{extension}'.")
131
+
132
+ @GeneralUtilities.check_arguments
133
+ def commit_is_signed_by_key(self, repository_folder: str, revision_identifier: str, key: str) -> bool:
134
+ result = self.run_program(
135
+ "git", f"verify-commit {revision_identifier}", repository_folder, throw_exception_if_exitcode_is_not_zero=False)
136
+ if (result[0] != 0):
137
+ return False
138
+ if (not GeneralUtilities.contains_line(result[1].splitlines(), f"gpg\\:\\ using\\ [A-Za-z0-9]+\\ key\\ [A-Za-z0-9]+{key}")):
139
+ # TODO check whether this works on machines where gpg is installed in another langauge than english
140
+ return False
141
+ if (not GeneralUtilities.contains_line(result[1].splitlines(), "gpg\\:\\ Good\\ signature\\ from")):
142
+ # TODO check whether this works on machines where gpg is installed in another langauge than english
143
+ return False
144
+ return True
145
+
146
+ @GeneralUtilities.check_arguments
147
+ def get_parent_commit_ids_of_commit(self, repository_folder: str, commit_id: str) -> str:
148
+ return self.run_program("git", f'log --pretty=%P -n 1 "{commit_id}"', repository_folder, throw_exception_if_exitcode_is_not_zero=True)[1].replace("\r", "").replace("\n", "").split(" ")
149
+
150
+ @GeneralUtilities.check_arguments
151
+ def get_all_authors_and_committers_of_repository(self, repository_folder: str, subfolder: str = None, verbosity: int = 1) -> list[tuple[str, str]]:
152
+ space_character = "_"
153
+ if subfolder is None:
154
+ subfolder_argument = ""
155
+ else:
156
+ subfolder_argument = f" -- {subfolder}"
157
+ log_result = self.run_program("git", f'log --pretty=%aN{space_character}%aE%n%cN{space_character}%cE HEAD{subfolder_argument}', repository_folder, verbosity=0)
158
+ plain_content: list[str] = list(
159
+ set([line for line in log_result[1].split("\n") if len(line) > 0]))
160
+ result: list[tuple[str, str]] = []
161
+ for item in plain_content:
162
+ if len(re.findall(space_character, item)) == 1:
163
+ splitted = item.split(space_character)
164
+ result.append((splitted[0], splitted[1]))
165
+ else:
166
+ raise ValueError(f'Unexpected author: "{item}"')
167
+ return result
168
+
169
+ @GeneralUtilities.check_arguments
170
+ def get_commit_ids_between_dates(self, repository_folder: str, since: datetime, until: datetime, ignore_commits_which_are_not_in_history_of_head: bool = True) -> None:
171
+ since_as_string = self.__datetime_to_string_for_git(since)
172
+ until_as_string = self.__datetime_to_string_for_git(until)
173
+ result = filter(lambda line: not GeneralUtilities.string_is_none_or_whitespace(line), self.run_program("git", f'log --since "{since_as_string}" --until "{until_as_string}" --pretty=format:"%H" --no-patch', repository_folder, throw_exception_if_exitcode_is_not_zero=True)[1].split("\n").replace("\r", ""))
174
+ if ignore_commits_which_are_not_in_history_of_head:
175
+ result = [commit_id for commit_id in result if self.git_commit_is_ancestor(
176
+ repository_folder, commit_id)]
177
+ return result
178
+
179
+ @GeneralUtilities.check_arguments
180
+ def __datetime_to_string_for_git(self, datetime_object: datetime) -> str:
181
+ return datetime_object.strftime('%Y-%m-%d %H:%M:%S')
182
+
183
+ @GeneralUtilities.check_arguments
184
+ def git_commit_is_ancestor(self, repository_folder: str, ancestor: str, descendant: str = "HEAD") -> bool:
185
+ exit_code = self.run_program_argsasarray("git", ["merge-base", "--is-ancestor", ancestor, descendant], repository_folder, throw_exception_if_exitcode_is_not_zero=False)[0]
186
+ if exit_code == 0:
187
+ return True
188
+ elif exit_code == 1:
189
+ return False
190
+ else:
191
+ raise ValueError(f"Can not calculate if {ancestor} is an ancestor of {descendant} in repository {repository_folder}.")
192
+
193
+ @GeneralUtilities.check_arguments
194
+ def __git_changes_helper(self, repository_folder: str, arguments_as_array: list[str]) -> bool:
195
+ lines = GeneralUtilities.string_to_lines(self.run_program_argsasarray("git", arguments_as_array, repository_folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)[1], False)
196
+ for line in lines:
197
+ if GeneralUtilities.string_has_content(line):
198
+ return True
199
+ return False
200
+
201
+ @GeneralUtilities.check_arguments
202
+ def git_repository_has_new_untracked_files(self, repositoryFolder: str):
203
+ return self.__git_changes_helper(repositoryFolder, ["ls-files", "--exclude-standard", "--others"])
204
+
205
+ @GeneralUtilities.check_arguments
206
+ def git_repository_has_unstaged_changes_of_tracked_files(self, repositoryFolder: str):
207
+ return self.__git_changes_helper(repositoryFolder, ["--no-pager", "diff"])
208
+
209
+ @GeneralUtilities.check_arguments
210
+ def git_repository_has_staged_changes(self, repositoryFolder: str):
211
+ return self.__git_changes_helper(repositoryFolder, ["--no-pager", "diff", "--cached"])
212
+
213
+ @GeneralUtilities.check_arguments
214
+ def git_repository_has_uncommitted_changes(self, repositoryFolder: str) -> bool:
215
+ if (self.git_repository_has_unstaged_changes(repositoryFolder)):
216
+ return True
217
+ if (self.git_repository_has_staged_changes(repositoryFolder)):
218
+ return True
219
+ return False
220
+
221
+ @GeneralUtilities.check_arguments
222
+ def git_repository_has_unstaged_changes(self, repository_folder: str) -> bool:
223
+ if (self.git_repository_has_unstaged_changes_of_tracked_files(repository_folder)):
224
+ return True
225
+ if (self.git_repository_has_new_untracked_files(repository_folder)):
226
+ return True
227
+ return False
228
+
229
+ @GeneralUtilities.check_arguments
230
+ def git_get_commit_id(self, repository_folder: str, commit: str = "HEAD") -> str:
231
+ result: tuple[int, str, str, int] = self.run_program_argsasarray("git", ["rev-parse", "--verify", commit], repository_folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
232
+ return result[1].replace('\n', '')
233
+
234
+ @GeneralUtilities.check_arguments
235
+ def git_get_commit_date(self, repository_folder: str, commit: str = "HEAD") -> datetime:
236
+ result: tuple[int, str, str, int] = self.run_program_argsasarray("git", ["show", "-s", "--format=%ci", commit], repository_folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
237
+ date_as_string = result[1].replace('\n', '')
238
+ result = datetime.strptime(date_as_string, '%Y-%m-%d %H:%M:%S %z')
239
+ return result
240
+
241
+ @GeneralUtilities.check_arguments
242
+ def git_fetch(self, folder: str, remotename: str = "--all") -> None:
243
+ self.run_program_argsasarray("git", ["fetch", remotename, "--tags", "--prune"], folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
244
+
245
+ @GeneralUtilities.check_arguments
246
+ def git_fetch_in_bare_repository(self, folder: str, remotename, localbranch: str, remotebranch: str) -> None:
247
+ self.run_program_argsasarray("git", ["fetch", remotename, f"{remotebranch}:{localbranch}"], folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
248
+
249
+ @GeneralUtilities.check_arguments
250
+ def git_remove_branch(self, folder: str, branchname: str) -> None:
251
+ self.run_program("git", f"branch -D {branchname}", folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
252
+
253
+ @GeneralUtilities.check_arguments
254
+ def git_push(self, folder: str, remotename: str, localbranchname: str, remotebranchname: str, forcepush: bool = False, pushalltags: bool = True, verbosity: int = 0) -> None:
255
+ argument = ["push", "--recurse-submodules=on-demand", remotename, f"{localbranchname}:{remotebranchname}"]
256
+ if (forcepush):
257
+ argument.append("--force")
258
+ if (pushalltags):
259
+ argument.append("--tags")
260
+ result: tuple[int, str, str, int] = self.run_program_argsasarray("git", argument, folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=verbosity, print_errors_as_information=True)
261
+ return result[1].replace('\r', '').replace('\n', '')
262
+
263
+ @GeneralUtilities.check_arguments
264
+ def git_pull(self, folder: str, remote: str, localbranchname: str, remotebranchname: str, force: bool = False) -> None:
265
+ argument = f"pull {remote} {remotebranchname}:{localbranchname}"
266
+ if force:
267
+ argument = f"{argument} --force"
268
+ self.run_program("git", argument, folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
269
+
270
+ @GeneralUtilities.check_arguments
271
+ def git_list_remote_branches(self, folder: str, remote: str, fetch: bool) -> list[str]:
272
+ if fetch:
273
+ self.git_fetch(folder, remote)
274
+ run_program_result = self.run_program("git", f"branch -rl {remote}/*", folder, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
275
+ output = GeneralUtilities.string_to_lines(run_program_result[1])
276
+ result = list[str]()
277
+ for item in output:
278
+ striped_item = item.strip()
279
+ if GeneralUtilities.string_has_content(striped_item):
280
+ branch: str = None
281
+ if " " in striped_item:
282
+ branch = striped_item.split(" ")[0]
283
+ else:
284
+ branch = striped_item
285
+ branchname = branch[len(remote)+1:]
286
+ if branchname != "HEAD":
287
+ result.append(branchname)
288
+ return result
289
+
290
+ @GeneralUtilities.check_arguments
291
+ def git_clone(self, clone_target_folder: str, remote_repository_path: str, include_submodules: bool = True, mirror: bool = False) -> None:
292
+ if (os.path.isdir(clone_target_folder)):
293
+ pass # TODO throw error
294
+ else:
295
+ args = ["clone", remote_repository_path, clone_target_folder]
296
+ if include_submodules:
297
+ args.append("--recurse-submodules")
298
+ args.append("--remote-submodules")
299
+ if mirror:
300
+ args.append("--mirror")
301
+ self.run_program_argsasarray("git", args, os.getcwd(), throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
302
+
303
+ @GeneralUtilities.check_arguments
304
+ def git_get_all_remote_names(self, directory: str) -> list[str]:
305
+ result = GeneralUtilities.string_to_lines(self.run_program_argsasarray("git", ["remote"], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)[1], False)
306
+ return result
307
+
308
+ @GeneralUtilities.check_arguments
309
+ def git_get_remote_url(self, directory: str, remote_name: str) -> str:
310
+ result = GeneralUtilities.string_to_lines(self.run_program_argsasarray("git", ["remote", "get-url", remote_name], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)[1], False)
311
+ return result[0].replace('\n', '')
312
+
313
+ @GeneralUtilities.check_arguments
314
+ def repository_has_remote_with_specific_name(self, directory: str, remote_name: str) -> bool:
315
+ return remote_name in self.git_get_all_remote_names(directory)
316
+
317
+ @GeneralUtilities.check_arguments
318
+ def git_add_or_set_remote_address(self, directory: str, remote_name: str, remote_address: str) -> None:
319
+ if (self.repository_has_remote_with_specific_name(directory, remote_name)):
320
+ self.run_program_argsasarray("git", ['remote', 'set-url', 'remote_name', remote_address], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
321
+ else:
322
+ self.run_program_argsasarray("git", ['remote', 'add', remote_name, remote_address], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
323
+
324
+ @GeneralUtilities.check_arguments
325
+ def git_stage_all_changes(self, directory: str) -> None:
326
+ self.run_program_argsasarray("git", ["add", "-A"], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
327
+
328
+ @GeneralUtilities.check_arguments
329
+ def git_unstage_all_changes(self, directory: str) -> None:
330
+ self.run_program_argsasarray("git", ["reset"], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
331
+
332
+ @GeneralUtilities.check_arguments
333
+ def git_stage_file(self, directory: str, file: str) -> None:
334
+ self.run_program_argsasarray("git", ['stage', file], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
335
+
336
+ @GeneralUtilities.check_arguments
337
+ def git_unstage_file(self, directory: str, file: str) -> None:
338
+ self.run_program_argsasarray("git", ['reset', file], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
339
+
340
+ @GeneralUtilities.check_arguments
341
+ def git_discard_unstaged_changes_of_file(self, directory: str, file: str) -> None:
342
+ """Caution: This method works really only for 'changed' files yet. So this method does not work properly for new or renamed files."""
343
+ self.run_program_argsasarray("git", ['checkout', file], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
344
+
345
+ @GeneralUtilities.check_arguments
346
+ def git_discard_all_unstaged_changes(self, directory: str) -> None:
347
+ """Caution: This function executes 'git clean -df'. This can delete files which maybe should not be deleted. Be aware of that."""
348
+ self.run_program_argsasarray("git", ['clean', '-df'], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
349
+ self.run_program_argsasarray("git", ['checkout', '.'], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
350
+
351
+ @GeneralUtilities.check_arguments
352
+ def git_commit(self, directory: str, message: str, author_name: str = None, author_email: str = None, stage_all_changes: bool = True, no_changes_behavior: int = 0) -> str:
353
+ # no_changes_behavior=0 => No commit
354
+ # no_changes_behavior=1 => Commit anyway
355
+ # no_changes_behavior=2 => Exception
356
+ author_name = GeneralUtilities.str_none_safe(author_name).strip()
357
+ author_email = GeneralUtilities.str_none_safe(author_email).strip()
358
+ argument = ['commit', '--quiet', '--allow-empty', '--message', message]
359
+ if (GeneralUtilities.string_has_content(author_name)):
360
+ argument.append(f'--author="{author_name} <{author_email}>"')
361
+ git_repository_has_uncommitted_changes = self.git_repository_has_uncommitted_changes(directory)
362
+
363
+ if git_repository_has_uncommitted_changes:
364
+ do_commit = True
365
+ if stage_all_changes:
366
+ self.git_stage_all_changes(directory)
367
+ else:
368
+ if no_changes_behavior == 0:
369
+ GeneralUtilities.write_message_to_stdout(f"Commit '{message}' will not be done because there are no changes to commit in repository '{directory}'")
370
+ do_commit = False
371
+ elif no_changes_behavior == 1:
372
+ GeneralUtilities.write_message_to_stdout(f"There are no changes to commit in repository '{directory}'. Commit '{message}' will be done anyway.")
373
+ do_commit = True
374
+ elif no_changes_behavior == 2:
375
+ raise RuntimeError(f"There are no changes to commit in repository '{directory}'. Commit '{message}' will not be done.")
376
+ else:
377
+ raise ValueError(f"Unknown value for no_changes_behavior: {GeneralUtilities.str_none_safe(no_changes_behavior)}")
378
+
379
+ if do_commit:
380
+ GeneralUtilities.write_message_to_stdout(f"Commit changes in '{directory}'")
381
+ self.run_program_argsasarray("git", argument, directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
382
+
383
+ return self.git_get_commit_id(directory)
384
+
385
+ @GeneralUtilities.check_arguments
386
+ def git_create_tag(self, directory: str, target_for_tag: str, tag: str, sign: bool = False, message: str = None) -> None:
387
+ argument = ["tag", tag, target_for_tag]
388
+ if sign:
389
+ if message is None:
390
+ message = f"Created {target_for_tag}"
391
+ argument.extend(["-s", '-m', message])
392
+ self.run_program_argsasarray(
393
+ "git", argument, directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
394
+
395
+ @GeneralUtilities.check_arguments
396
+ def git_delete_tag(self, directory: str, tag: str) -> None:
397
+ self.run_program_argsasarray("git", ["tag", "--delete", tag], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
398
+
399
+ @GeneralUtilities.check_arguments
400
+ def git_checkout(self, directory: str, branch: str) -> None:
401
+ self.run_program_argsasarray("git", ["checkout", branch], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
402
+ self.run_program_argsasarray("git", ["submodule", "update", "--recursive"], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
403
+
404
+ @GeneralUtilities.check_arguments
405
+ def git_merge_abort(self, directory: str) -> None:
406
+ self.run_program_argsasarray("git", ["merge", "--abort"], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
407
+
408
+ @GeneralUtilities.check_arguments
409
+ def git_merge(self, directory: str, sourcebranch: str, targetbranch: str, fastforward: bool = True, commit: bool = True, commit_message: str = None) -> str:
410
+ self.git_checkout(directory, targetbranch)
411
+ args = ["merge"]
412
+ if not commit:
413
+ args.append("--no-commit")
414
+ if not fastforward:
415
+ args.append("--no-ff")
416
+ if commit_message is not None:
417
+ args.append("-m")
418
+ args.append(commit_message)
419
+ args.append(sourcebranch)
420
+ self.run_program_argsasarray("git", args, directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
421
+ self.run_program_argsasarray("git", ["submodule", "update"], directory, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
422
+ return self.git_get_commit_id(directory)
423
+
424
+ @GeneralUtilities.check_arguments
425
+ def git_undo_all_changes(self, directory: str) -> None:
426
+ """Caution: This function executes 'git clean -df'. This can delete files which maybe should not be deleted. Be aware of that."""
427
+ self.git_unstage_all_changes(directory)
428
+ self.git_discard_all_unstaged_changes(directory)
429
+
430
+ @GeneralUtilities.check_arguments
431
+ def git_fetch_or_clone_all_in_directory(self, source_directory: str, target_directory: str) -> None:
432
+ for subfolder in GeneralUtilities.get_direct_folders_of_folder(source_directory):
433
+ foldername = os.path.basename(subfolder)
434
+ if self.is_git_repository(subfolder):
435
+ source_repository = subfolder
436
+ target_repository = os.path.join(target_directory, foldername)
437
+ if os.path.isdir(target_directory):
438
+ # fetch
439
+ self.git_fetch(target_directory)
440
+ else:
441
+ # clone
442
+ self.git_clone(target_repository, source_repository, include_submodules=True, mirror=True)
443
+
444
+ def get_git_submodules(self, folder: str) -> list[str]:
445
+ e = self.run_program("git", "submodule status", folder)
446
+ result = []
447
+ for submodule_line in GeneralUtilities.string_to_lines(e[1], False, True):
448
+ result.append(submodule_line.split(' ')[1])
449
+ return result
450
+
451
+ @GeneralUtilities.check_arguments
452
+ def is_git_repository(self, folder: str) -> bool:
453
+ combined = os.path.join(folder, ".git")
454
+ # TODO consider check for bare-repositories
455
+ return os.path.isdir(combined) or os.path.isfile(combined)
456
+
457
+ @GeneralUtilities.check_arguments
458
+ def file_is_git_ignored(self, file_in_repository: str, repositorybasefolder: str) -> None:
459
+ exit_code = self.run_program_argsasarray("git", ['check-ignore', file_in_repository], repositorybasefolder, throw_exception_if_exitcode_is_not_zero=False, verbosity=0)[0]
460
+ if (exit_code == 0):
461
+ return True
462
+ if (exit_code == 1):
463
+ return False
464
+ raise ValueError(f"Unable to calculate whether '{file_in_repository}' in repository '{repositorybasefolder}' is ignored due to git-exitcode {exit_code}.")
465
+
466
+ @GeneralUtilities.check_arguments
467
+ def git_discard_all_changes(self, repository: str) -> None:
468
+ self.run_program_argsasarray("git", ["reset", "HEAD", "."], repository, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
469
+ self.run_program_argsasarray("git", ["checkout", "."], repository, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
470
+
471
+ @GeneralUtilities.check_arguments
472
+ def git_get_current_branch_name(self, repository: str) -> str:
473
+ result = self.run_program_argsasarray("git", ["rev-parse", "--abbrev-ref", "HEAD"], repository, throw_exception_if_exitcode_is_not_zero=True, verbosity=0)
474
+ return result[1].replace("\r", "").replace("\n", "")
475
+
476
+ @GeneralUtilities.check_arguments
477
+ def git_get_commitid_of_tag(self, repository: str, tag: str) -> str:
478
+ stdout = self.run_program_argsasarray("git", ["rev-list", "-n", "1", tag], repository, verbosity=0)
479
+ result = stdout[1].replace("\r", "").replace("\n", "")
480
+ return result
481
+
482
+ @GeneralUtilities.check_arguments
483
+ def git_get_tags(self, repository: str) -> list[str]:
484
+ tags = [line.replace("\r", "") for line in self.run_program_argsasarray(
485
+ "git", ["tag"], repository)[1].split("\n") if len(line) > 0]
486
+ return tags
487
+
488
+ @GeneralUtilities.check_arguments
489
+ def git_move_tags_to_another_branch(self, repository: str, tag_source_branch: str, tag_target_branch: str, sign: bool = False, message: str = None) -> None:
490
+ tags = self.git_get_tags(repository)
491
+ tags_count = len(tags)
492
+ counter = 0
493
+ for tag in tags:
494
+ counter = counter+1
495
+ GeneralUtilities.write_message_to_stdout(f"Process tag {counter}/{tags_count}.")
496
+ # tag is on source-branch
497
+ if self.git_commit_is_ancestor(repository, tag, tag_source_branch):
498
+ commit_id_old = self.git_get_commitid_of_tag(repository, tag)
499
+ commit_date: datetime = self.git_get_commit_date(repository, commit_id_old)
500
+ date_as_string = self.__datetime_to_string_for_git(commit_date)
501
+ search_commit_result = self.run_program_argsasarray("git", ["log", f'--after="{date_as_string}"', f'--before="{date_as_string}"', "--pretty=format:%H", tag_target_branch], repository, throw_exception_if_exitcode_is_not_zero=False)
502
+ if search_commit_result[0] != 0 or not GeneralUtilities.string_has_nonwhitespace_content(search_commit_result[1]):
503
+ raise ValueError(f"Can not calculate corresponding commit for tag '{tag}'.")
504
+ commit_id_new = search_commit_result[1]
505
+ self.git_delete_tag(repository, tag)
506
+ self.git_create_tag(repository, commit_id_new, tag, sign, message)
507
+
508
+ @GeneralUtilities.check_arguments
509
+ def get_current_git_branch_has_tag(self, repository_folder: str) -> bool:
510
+ result = self.run_program_argsasarray("git", ["describe", "--tags", "--abbrev=0"], repository_folder, verbosity=0, throw_exception_if_exitcode_is_not_zero=False)
511
+ return result[0] == 0
512
+
513
+ @GeneralUtilities.check_arguments
514
+ def get_latest_git_tag(self, repository_folder: str) -> str:
515
+ result = self.run_program_argsasarray(
516
+ "git", ["describe", "--tags", "--abbrev=0"], repository_folder, verbosity=0)
517
+ result = result[1].replace("\r", "").replace("\n", "")
518
+ return result
519
+
520
+ @GeneralUtilities.check_arguments
521
+ def get_staged_or_committed_git_ignored_files(self, repository_folder: str) -> list[str]:
522
+ tresult = self.run_program_argsasarray("git", ["ls-files", "-i", "-c", "--exclude-standard"], repository_folder, verbosity=0)
523
+ tresult = tresult[1].replace("\r", "")
524
+ result = [line for line in tresult.split("\n") if len(line) > 0]
525
+ return result
526
+
527
+ @GeneralUtilities.check_arguments
528
+ def git_repository_has_commits(self, repository_folder: str) -> bool:
529
+ return self.run_program_argsasarray("git", ["rev-parse", "--verify", "HEAD"], repository_folder, throw_exception_if_exitcode_is_not_zero=False)[0] == 0
530
+
531
+ @GeneralUtilities.check_arguments
532
+ def export_filemetadata(self, folder: str, target_file: str, encoding: str = "utf-8", filter_function=None) -> None:
533
+ folder = GeneralUtilities.resolve_relative_path_from_current_working_directory(folder)
534
+ lines = list()
535
+ path_prefix = len(folder)+1
536
+ items = dict()
537
+ for item in GeneralUtilities.get_all_folders_of_folder(folder):
538
+ items[item] = "d"
539
+ for item in GeneralUtilities.get_all_files_of_folder(folder):
540
+ items[item] = "f"
541
+ for file_or_folder, item_type in items.items():
542
+ truncated_file = file_or_folder[path_prefix:]
543
+ if (filter_function is None or filter_function(folder, truncated_file)):
544
+ owner_and_permisssion = self.get_file_owner_and_file_permission(file_or_folder)
545
+ user = owner_and_permisssion[0]
546
+ permissions = owner_and_permisssion[1]
547
+ lines.append(f"{truncated_file};{item_type};{user};{permissions}")
548
+ lines = sorted(lines, key=str.casefold)
549
+ with open(target_file, "w", encoding=encoding) as file_object:
550
+ file_object.write("\n".join(lines))
551
+
552
+ @GeneralUtilities.check_arguments
553
+ def escape_git_repositories_in_folder(self, folder: str) -> dict[str, str]:
554
+ return self.__escape_git_repositories_in_folder_internal(folder, dict[str, str]())
555
+
556
+ @GeneralUtilities.check_arguments
557
+ def __escape_git_repositories_in_folder_internal(self, folder: str, renamed_items: dict[str, str]) -> dict[str, str]:
558
+ for file in GeneralUtilities.get_direct_files_of_folder(folder):
559
+ filename = os.path.basename(file)
560
+ if ".git" in filename:
561
+ new_name = filename.replace(".git", ".gitx")
562
+ target = os.path.join(folder, new_name)
563
+ os.rename(file, target)
564
+ renamed_items[target] = file
565
+ for subfolder in GeneralUtilities.get_direct_folders_of_folder(folder):
566
+ foldername = os.path.basename(subfolder)
567
+ if ".git" in foldername:
568
+ new_name = foldername.replace(".git", ".gitx")
569
+ subfolder2 = os.path.join(str(Path(subfolder).parent), new_name)
570
+ os.rename(subfolder, subfolder2)
571
+ renamed_items[subfolder2] = subfolder
572
+ else:
573
+ subfolder2 = subfolder
574
+ self.__escape_git_repositories_in_folder_internal(subfolder2, renamed_items)
575
+ return renamed_items
576
+
577
+ @GeneralUtilities.check_arguments
578
+ def deescape_git_repositories_in_folder(self, renamed_items: dict[str, str]):
579
+ for renamed_item, original_name in renamed_items.items():
580
+ os.rename(renamed_item, original_name)
581
+
582
+ @GeneralUtilities.check_arguments
583
+ def __sort_fmd(self, line: str):
584
+ splitted: list = line.split(";")
585
+ filetype: str = splitted[1]
586
+ if filetype == "d":
587
+ return -1
588
+ if filetype == "f":
589
+ return 1
590
+ return 0
591
+
592
+ @GeneralUtilities.check_arguments
593
+ def restore_filemetadata(self, folder: str, source_file: str, strict=False, encoding: str = "utf-8", create_folder_is_not_exist: bool = True) -> None:
594
+ lines = GeneralUtilities.read_lines_from_file(source_file, encoding)
595
+ lines.sort(key=self.__sort_fmd)
596
+ for line in lines:
597
+ splitted: list = line.split(";")
598
+ full_path_of_file_or_folder: str = os.path.join(folder, splitted[0])
599
+ filetype: str = splitted[1]
600
+ user: str = splitted[2]
601
+ permissions: str = splitted[3]
602
+ if filetype == "d" and create_folder_is_not_exist and not os.path.isdir(full_path_of_file_or_folder):
603
+ GeneralUtilities.ensure_directory_exists(full_path_of_file_or_folder)
604
+ if (filetype == "f" and os.path.isfile(full_path_of_file_or_folder)) or (filetype == "d" and os.path.isdir(full_path_of_file_or_folder)):
605
+ self.set_owner(full_path_of_file_or_folder, user, os.name != 'nt')
606
+ self.set_permission(full_path_of_file_or_folder, permissions)
607
+ else:
608
+ if strict:
609
+ if filetype == "f":
610
+ filetype_full = "File"
611
+ elif filetype == "d":
612
+ filetype_full = "Directory"
613
+ else:
614
+ raise ValueError(f"Unknown filetype: {GeneralUtilities.str_none_safe(filetype)}")
615
+ raise ValueError(f"{filetype_full} '{full_path_of_file_or_folder}' does not exist")
616
+
617
+ @GeneralUtilities.check_arguments
618
+ def __calculate_lengh_in_seconds(self, filename: str, folder: str) -> float:
619
+ argument = ['-v', 'error', '-show_entries', 'format=duration', '-of', 'default=noprint_wrappers=1:nokey=1', filename]
620
+ result = self.run_program_argsasarray("ffprobe", argument, folder, throw_exception_if_exitcode_is_not_zero=True)
621
+ return float(result[1].replace('\n', ''))
622
+
623
+ @GeneralUtilities.check_arguments
624
+ def __create_thumbnails(self, filename: str, fps: str, folder: str, tempname_for_thumbnails: str) -> list[str]:
625
+ argument = ['-i', filename, '-r', str(fps), '-vf', 'scale=-1:120', '-vcodec', 'png', f'{tempname_for_thumbnails}-%002d.png']
626
+ self.run_program_argsasarray("ffmpeg", argument, folder, throw_exception_if_exitcode_is_not_zero=True)
627
+ files = GeneralUtilities.get_direct_files_of_folder(folder)
628
+ result: list[str] = []
629
+ regex = "^"+re.escape(tempname_for_thumbnails)+"\\-\\d+\\.png$"
630
+ regex_for_files = re.compile(regex)
631
+ for file in files:
632
+ filename = os.path.basename(file)
633
+ if regex_for_files.match(filename):
634
+ result.append(file)
635
+ GeneralUtilities.assert_condition(0 < len(result), "No thumbnail-files found.")
636
+ return result
637
+
638
+ @GeneralUtilities.check_arguments
639
+ def __create_thumbnail(self, outputfilename: str, folder: str, length_in_seconds: float, tempname_for_thumbnails: str, amount_of_images: int) -> None:
640
+ duration = timedelta(seconds=length_in_seconds)
641
+ info = GeneralUtilities.timedelta_to_simple_string(duration)
642
+ rows: int = 5
643
+ columns: int = math.ceil(amount_of_images/rows)
644
+ argument = ['-title', f'"{outputfilename} ({info})"', '-tile', f'{rows}x{columns}', f'{tempname_for_thumbnails}*.png', f'{outputfilename}.png']
645
+ self.run_program_argsasarray("montage", argument, folder, throw_exception_if_exitcode_is_not_zero=True)
646
+
647
+ @GeneralUtilities.check_arguments
648
+ def __roundup(self, x: float, places: int) -> int:
649
+ d = 10 ** places
650
+ if x < 0:
651
+ return math.floor(x * d) / d
652
+ else:
653
+ return math.ceil(x * d) / d
654
+
655
+ @GeneralUtilities.check_arguments
656
+ def generate_thumbnail(self, file: str, frames_per_second: str, tempname_for_thumbnails: str = None, hook=None) -> None:
657
+ if tempname_for_thumbnails is None:
658
+ tempname_for_thumbnails = "t_"+str(uuid.uuid4())
659
+
660
+ file = GeneralUtilities.resolve_relative_path_from_current_working_directory(file)
661
+ filename = os.path.basename(file)
662
+ folder = os.path.dirname(file)
663
+ filename_without_extension = Path(file).stem
664
+ preview_files: list[str] = []
665
+ try:
666
+ length_in_seconds = self.__calculate_lengh_in_seconds(filename, folder)
667
+ if (frames_per_second.endswith("fps")):
668
+ # frames per second, example: frames_per_second="20fps" => 20 frames per second
669
+ frames_per_second = self.__roundup(float(frames_per_second[:-3]), 2)
670
+ frames_per_second_as_string = str(frames_per_second)
671
+ amounf_of_previewframes = int(math.floor(length_in_seconds*frames_per_second))
672
+ else:
673
+ # concrete amount of frame, examples: frames_per_second="16" => 16 frames for entire video
674
+ amounf_of_previewframes = int(float(frames_per_second))
675
+ # self.roundup((amounf_of_previewframes-2)/length_in_seconds, 2)
676
+ frames_per_second_as_string = f"{amounf_of_previewframes-2}/{length_in_seconds}"
677
+ preview_files = self.__create_thumbnails(filename, frames_per_second_as_string, folder, tempname_for_thumbnails)
678
+ if hook is not None:
679
+ hook(file, preview_files)
680
+ actual_amounf_of_previewframes = len(preview_files)
681
+ self.__create_thumbnail(filename_without_extension, folder, length_in_seconds, tempname_for_thumbnails, actual_amounf_of_previewframes)
682
+ finally:
683
+ for thumbnail_to_delete in preview_files:
684
+ os.remove(thumbnail_to_delete)
685
+
686
+ @GeneralUtilities.check_arguments
687
+ def extract_pdf_pages(self, file: str, from_page: int, to_page: int, outputfile: str) -> None:
688
+ pdf_reader = PyPDF2.PdfReader(file)
689
+ pdf_writer = PyPDF2.PdfWriter()
690
+ start = from_page
691
+ end = to_page
692
+ while start <= end:
693
+ pdf_writer.add_page(pdf_reader.pages[start-1])
694
+ start += 1
695
+ with open(outputfile, 'wb') as out:
696
+ pdf_writer.write(out)
697
+
698
+ @GeneralUtilities.check_arguments
699
+ def merge_pdf_files(self, files: list[str], outputfile: str) -> None:
700
+ # TODO add wildcard-option
701
+ pdfFileMerger = PyPDF2.PdfFileMerger()
702
+ for file in files:
703
+ pdfFileMerger.append(file.strip())
704
+ pdfFileMerger.write(outputfile)
705
+ pdfFileMerger.close()
706
+
707
+ @GeneralUtilities.check_arguments
708
+ def pdf_to_image(self, file: str, outputfilename_without_extension: str) -> None:
709
+ raise ValueError("Function currently not available")
710
+ # PyMuPDF can be used for that but sometimes it throws
711
+ # "ImportError: DLL load failed while importing _fitz: Das angegebene Modul wurde nicht gefunden."
712
+
713
+ # doc = None # fitz.open(file)
714
+ # for i, page in enumerate(doc):
715
+ # pix = page.get_pixmap()
716
+ # img = Image.frombytes("RGB", [pix.width, pix.height], pix.samples)
717
+ # img.save(f"{outputfilename_without_extension}_{i}.png", "PNG")
718
+
719
+ @GeneralUtilities.check_arguments
720
+ def show_missing_files(self, folderA: str, folderB: str):
721
+ for file in GeneralUtilities.get_missing_files(folderA, folderB):
722
+ GeneralUtilities.write_message_to_stdout(file)
723
+
724
+ @GeneralUtilities.check_arguments
725
+ def SCCreateEmptyFileWithSpecificSize(self, name: str, size_string: str) -> int:
726
+ if size_string.isdigit():
727
+ size = int(size_string)
728
+ else:
729
+ if len(size_string) >= 3:
730
+ if (size_string.endswith("kb")):
731
+ size = int(size_string[:-2]) * pow(10, 3)
732
+ elif (size_string.endswith("mb")):
733
+ size = int(size_string[:-2]) * pow(10, 6)
734
+ elif (size_string.endswith("gb")):
735
+ size = int(size_string[:-2]) * pow(10, 9)
736
+ elif (size_string.endswith("kib")):
737
+ size = int(size_string[:-3]) * pow(2, 10)
738
+ elif (size_string.endswith("mib")):
739
+ size = int(size_string[:-3]) * pow(2, 20)
740
+ elif (size_string.endswith("gib")):
741
+ size = int(size_string[:-3]) * pow(2, 30)
742
+ else:
743
+ GeneralUtilities.write_message_to_stderr("Wrong format")
744
+ return 1
745
+ else:
746
+ GeneralUtilities.write_message_to_stderr("Wrong format")
747
+ return 1
748
+ with open(name, "wb") as f:
749
+ f.seek(size-1)
750
+ f.write(b"\0")
751
+ return 0
752
+
753
+ @GeneralUtilities.check_arguments
754
+ def SCCreateHashOfAllFiles(self, folder: str) -> None:
755
+ for file in GeneralUtilities.absolute_file_paths(folder):
756
+ with open(file+".sha256", "w+", encoding="utf-8") as f:
757
+ f.write(GeneralUtilities.get_sha256_of_file(file))
758
+
759
+ @GeneralUtilities.check_arguments
760
+ def SCCreateSimpleMergeWithoutRelease(self, repository: str, sourcebranch: str, targetbranch: str, remotename: str, remove_source_branch: bool) -> None:
761
+ commitid = self.git_merge(repository, sourcebranch, targetbranch, False, True)
762
+ self.git_merge(repository, targetbranch, sourcebranch, True, True)
763
+ created_version = self.get_semver_version_from_gitversion(repository)
764
+ self.git_create_tag(repository, commitid, f"v{created_version}", True)
765
+ self.git_push(repository, remotename, targetbranch, targetbranch, False, True)
766
+ if (GeneralUtilities.string_has_nonwhitespace_content(remotename)):
767
+ self.git_push(repository, remotename, sourcebranch, sourcebranch, False, True)
768
+ if (remove_source_branch):
769
+ self.git_remove_branch(repository, sourcebranch)
770
+
771
+ @GeneralUtilities.check_arguments
772
+ def sc_organize_lines_in_file(self, file: str, encoding: str, sort: bool = False, remove_duplicated_lines: bool = False, ignore_first_line: bool = False, remove_empty_lines: bool = True, ignored_start_character: list = list()) -> int:
773
+ if os.path.isfile(file):
774
+
775
+ # read file
776
+ lines = GeneralUtilities.read_lines_from_file(file, encoding)
777
+ if (len(lines) == 0):
778
+ return 0
779
+
780
+ # store first line if desiredpopd
781
+
782
+ if (ignore_first_line):
783
+ first_line = lines.pop(0)
784
+
785
+ # remove empty lines if desired
786
+ if remove_empty_lines:
787
+ temp = lines
788
+ lines = []
789
+ for line in temp:
790
+ if (not (GeneralUtilities.string_is_none_or_whitespace(line))):
791
+ lines.append(line)
792
+
793
+ # remove duplicated lines if desired
794
+ if remove_duplicated_lines:
795
+ lines = GeneralUtilities.remove_duplicates(lines)
796
+
797
+ # sort lines if desired
798
+ if sort:
799
+ lines = sorted(lines, key=lambda singleline: self.__adapt_line_for_sorting(singleline, ignored_start_character))
800
+
801
+ # reinsert first line
802
+ if ignore_first_line:
803
+ lines.insert(0, first_line)
804
+
805
+ # write result to file
806
+ GeneralUtilities.write_lines_to_file(file, lines, encoding)
807
+
808
+ return 0
809
+ else:
810
+ GeneralUtilities.write_message_to_stdout(f"File '{file}' does not exist")
811
+ return 1
812
+
813
+ @GeneralUtilities.check_arguments
814
+ def __adapt_line_for_sorting(self, line: str, ignored_start_characters: list):
815
+ result = line.lower()
816
+ while len(result) > 0 and result[0] in ignored_start_characters:
817
+ result = result[1:]
818
+ return result
819
+
820
+ @GeneralUtilities.check_arguments
821
+ def SCGenerateSnkFiles(self, outputfolder, keysize=4096, amountofkeys=10) -> int:
822
+ GeneralUtilities.ensure_directory_exists(outputfolder)
823
+ for _ in range(amountofkeys):
824
+ file = os.path.join(outputfolder, str(uuid.uuid4())+".snk")
825
+ argument = f"-k {keysize} {file}"
826
+ self.run_program("sn", argument, outputfolder)
827
+
828
+ @GeneralUtilities.check_arguments
829
+ def __merge_files(self, sourcefile: str, targetfile: str) -> None:
830
+ with open(sourcefile, "rb") as f:
831
+ source_data = f.read()
832
+ with open(targetfile, "ab") as fout:
833
+ merge_separator = [0x0A]
834
+ fout.write(bytes(merge_separator))
835
+ fout.write(source_data)
836
+
837
+ @GeneralUtilities.check_arguments
838
+ def __process_file(self, file: str, substringInFilename: str, newSubstringInFilename: str, conflictResolveMode: str) -> None:
839
+ new_filename = os.path.join(os.path.dirname(file), os.path.basename(file).replace(substringInFilename, newSubstringInFilename))
840
+ if file != new_filename:
841
+ if os.path.isfile(new_filename):
842
+ if filecmp.cmp(file, new_filename):
843
+ send2trash.send2trash(file)
844
+ else:
845
+ if conflictResolveMode == "ignore":
846
+ pass
847
+ elif conflictResolveMode == "preservenewest":
848
+ if (os.path.getmtime(file) - os.path.getmtime(new_filename) > 0):
849
+ send2trash.send2trash(file)
850
+ else:
851
+ send2trash.send2trash(new_filename)
852
+ os.rename(file, new_filename)
853
+ elif (conflictResolveMode == "merge"):
854
+ self.__merge_files(file, new_filename)
855
+ send2trash.send2trash(file)
856
+ else:
857
+ raise ValueError('Unknown conflict resolve mode')
858
+ else:
859
+ os.rename(file, new_filename)
860
+
861
+ @GeneralUtilities.check_arguments
862
+ def SCReplaceSubstringsInFilenames(self, folder: str, substringInFilename: str, newSubstringInFilename: str, conflictResolveMode: str) -> None:
863
+ for file in GeneralUtilities.absolute_file_paths(folder):
864
+ self.__process_file(file, substringInFilename, newSubstringInFilename, conflictResolveMode)
865
+
866
+ @GeneralUtilities.check_arguments
867
+ def __check_file(self, file: str, searchstring: str) -> None:
868
+ bytes_ascii = bytes(searchstring, "ascii")
869
+ # often called "unicode-encoding"
870
+ bytes_utf16 = bytes(searchstring, "utf-16")
871
+ bytes_utf8 = bytes(searchstring, "utf-8")
872
+ with open(file, mode='rb') as file_object:
873
+ content = file_object.read()
874
+ if bytes_ascii in content:
875
+ GeneralUtilities.write_message_to_stdout(file)
876
+ elif bytes_utf16 in content:
877
+ GeneralUtilities.write_message_to_stdout(file)
878
+ elif bytes_utf8 in content:
879
+ GeneralUtilities.write_message_to_stdout(file)
880
+
881
+ @GeneralUtilities.check_arguments
882
+ def SCSearchInFiles(self, folder: str, searchstring: str) -> None:
883
+ for file in GeneralUtilities.absolute_file_paths(folder):
884
+ self.__check_file(file, searchstring)
885
+
886
+ @GeneralUtilities.check_arguments
887
+ def __print_qr_code_by_csv_line(self, displayname: str, website: str, emailaddress: str, key: str, period: str) -> None:
888
+ qrcode_content = f"otpauth://totp/{website}:{emailaddress}?secret={key}&issuer={displayname}&period={period}"
889
+ GeneralUtilities.write_message_to_stdout(
890
+ f"{displayname} ({emailaddress}):")
891
+ GeneralUtilities.write_message_to_stdout(qrcode_content)
892
+ qr = qrcode.QRCode()
893
+ qr.add_data(qrcode_content)
894
+ f = io.StringIO()
895
+ qr.print_ascii(out=f)
896
+ f.seek(0)
897
+ GeneralUtilities.write_message_to_stdout(f.read())
898
+
899
+ @GeneralUtilities.check_arguments
900
+ def SCShow2FAAsQRCode(self, csvfile: str) -> None:
901
+ separator_line = "--------------------------------------------------------"
902
+ lines = GeneralUtilities.read_csv_file(csvfile, True)
903
+ lines.sort(key=lambda items: ''.join(items).lower())
904
+ for line in lines:
905
+ GeneralUtilities.write_message_to_stdout(separator_line)
906
+ self.__print_qr_code_by_csv_line(
907
+ line[0], line[1], line[2], line[3], line[4])
908
+ GeneralUtilities.write_message_to_stdout(separator_line)
909
+
910
+ @GeneralUtilities.check_arguments
911
+ def SCCalculateBitcoinBlockHash(self, block_version_number: str, previousblockhash: str, transactionsmerkleroot: str, timestamp: str, target: str, nonce: str) -> str:
912
+ # Example-values:
913
+ # block_version_number: "00000020"
914
+ # previousblockhash: "66720b99e07d284bd4fe67ff8c49a5db1dd8514fcdab61000000000000000000"
915
+ # transactionsmerkleroot: "7829844f4c3a41a537b3131ca992643eaa9d093b2383e4cdc060ad7dc5481187"
916
+ # timestamp: "51eb505a"
917
+ # target: "c1910018"
918
+ # nonce: "de19b302"
919
+ header = str(block_version_number + previousblockhash + transactionsmerkleroot + timestamp + target + nonce)
920
+ return binascii.hexlify(hashlib.sha256(hashlib.sha256(binascii.unhexlify(header)).digest()).digest()[::-1]).decode('utf-8')
921
+
922
+ @GeneralUtilities.check_arguments
923
+ def SCChangeHashOfProgram(self, inputfile: str) -> None:
924
+ valuetoappend = str(uuid.uuid4())
925
+
926
+ outputfile = inputfile + '.modified'
927
+
928
+ shutil.copy2(inputfile, outputfile)
929
+ with open(outputfile, 'a', encoding="utf-8") as file:
930
+ # TODO use rcedit for .exe-files instead of appending valuetoappend ( https://github.com/electron/rcedit/ )
931
+ # background: you can retrieve the "original-filename" from the .exe-file like discussed here:
932
+ # https://security.stackexchange.com/questions/210843/ is-it-possible-to-change-original-filename-of-an-exe
933
+ # so removing the original filename with rcedit is probably a better way to make it more difficult to detect the programname.
934
+ # this would obviously also change the hashvalue of the program so appending a whitespace is not required anymore.
935
+ file.write(valuetoappend)
936
+
937
+ @GeneralUtilities.check_arguments
938
+ def __adjust_folder_name(self, folder: str) -> str:
939
+ result = os.path.dirname(folder).replace("\\", "/")
940
+ if result == "/":
941
+ return ""
942
+ else:
943
+ return result
944
+
945
+ @GeneralUtilities.check_arguments
946
+ def __create_iso(self, folder, iso_file) -> None:
947
+ created_directories = []
948
+ files_directory = "FILES"
949
+ iso = pycdlib.PyCdlib()
950
+ iso.new()
951
+ files_directory = files_directory.upper()
952
+ iso.add_directory("/" + files_directory)
953
+ created_directories.append("/" + files_directory)
954
+ for root, _, files in os.walk(folder):
955
+ for file in files:
956
+ full_path = os.path.join(root, file)
957
+ with (open(full_path, "rb").read()) as text_io_wrapper:
958
+ content = text_io_wrapper
959
+ path_in_iso = '/' + files_directory + \
960
+ self.__adjust_folder_name(full_path[len(folder)::1]).upper()
961
+ if path_in_iso not in created_directories:
962
+ iso.add_directory(path_in_iso)
963
+ created_directories.append(path_in_iso)
964
+ iso.add_fp(BytesIO(content), len(content), path_in_iso + '/' + file.upper() + ';1')
965
+ iso.write(iso_file)
966
+ iso.close()
967
+
968
+ @GeneralUtilities.check_arguments
969
+ def SCCreateISOFileWithObfuscatedFiles(self, inputfolder: str, outputfile: str, printtableheadline, createisofile, extensions) -> None:
970
+ if (os.path.isdir(inputfolder)):
971
+ namemappingfile = "name_map.csv"
972
+ files_directory = inputfolder
973
+ files_directory_obf = f"{files_directory}_Obfuscated"
974
+ self.SCObfuscateFilesFolder(
975
+ inputfolder, printtableheadline, namemappingfile, extensions)
976
+ os.rename(namemappingfile, os.path.join(
977
+ files_directory_obf, namemappingfile))
978
+ if createisofile:
979
+ self.__create_iso(files_directory_obf, outputfile)
980
+ shutil.rmtree(files_directory_obf)
981
+ else:
982
+ raise ValueError(f"Directory not found: '{inputfolder}'")
983
+
984
+ @GeneralUtilities.check_arguments
985
+ def SCFilenameObfuscator(self, inputfolder: str, printtableheadline, namemappingfile: str, extensions: str) -> None:
986
+ obfuscate_all_files = extensions == "*"
987
+ if (obfuscate_all_files):
988
+ obfuscate_file_extensions = None
989
+ else:
990
+ obfuscate_file_extensions = extensions.split(",")
991
+ if (os.path.isdir(inputfolder)):
992
+ printtableheadline = GeneralUtilities.string_to_boolean(
993
+ printtableheadline)
994
+ files = []
995
+ if not os.path.isfile(namemappingfile):
996
+ with open(namemappingfile, "a", encoding="utf-8"):
997
+ pass
998
+ if printtableheadline:
999
+ GeneralUtilities.append_line_to_file(
1000
+ namemappingfile, "Original filename;new filename;SHA2-hash of file")
1001
+ for file in GeneralUtilities.absolute_file_paths(inputfolder):
1002
+ if os.path.isfile(os.path.join(inputfolder, file)):
1003
+ if obfuscate_all_files or self.__extension_matchs(file, obfuscate_file_extensions):
1004
+ files.append(file)
1005
+ for file in files:
1006
+ hash_value = GeneralUtilities.get_sha256_of_file(file)
1007
+ extension = Path(file).suffix
1008
+ new_file_name_without_path = str(uuid.uuid4())[0:8] + extension
1009
+ new_file_name = os.path.join(
1010
+ os.path.dirname(file), new_file_name_without_path)
1011
+ os.rename(file, new_file_name)
1012
+ GeneralUtilities.append_line_to_file(namemappingfile, os.path.basename(file) + ";" + new_file_name_without_path + ";" + hash_value)
1013
+ else:
1014
+ raise ValueError(f"Directory not found: '{inputfolder}'")
1015
+
1016
+ @GeneralUtilities.check_arguments
1017
+ def __extension_matchs(self, file: str, obfuscate_file_extensions) -> bool:
1018
+ for extension in obfuscate_file_extensions:
1019
+ if file.lower().endswith("."+extension.lower()):
1020
+ return True
1021
+ return False
1022
+
1023
+ @GeneralUtilities.check_arguments
1024
+ def SCHealthcheck(self, file: str) -> int:
1025
+ lines = GeneralUtilities.read_lines_from_file(file)
1026
+ for line in reversed(lines):
1027
+ if not GeneralUtilities.string_is_none_or_whitespace(line):
1028
+ if "RunningHealthy (" in line: # TODO use regex
1029
+ GeneralUtilities.write_message_to_stderr(f"Healthy running due to line '{line}' in file '{file}'.")
1030
+ return 0
1031
+ else:
1032
+ GeneralUtilities.write_message_to_stderr(f"Not healthy running due to line '{line}' in file '{file}'.")
1033
+ return 1
1034
+ GeneralUtilities.write_message_to_stderr(f"No valid line found for healthycheck in file '{file}'.")
1035
+ return 2
1036
+
1037
+ @GeneralUtilities.check_arguments
1038
+ def SCObfuscateFilesFolder(self, inputfolder: str, printtableheadline, namemappingfile: str, extensions: str) -> None:
1039
+ obfuscate_all_files = extensions == "*"
1040
+ if (obfuscate_all_files):
1041
+ obfuscate_file_extensions = None
1042
+ else:
1043
+ if "," in extensions:
1044
+ obfuscate_file_extensions = extensions.split(",")
1045
+ else:
1046
+ obfuscate_file_extensions = [extensions]
1047
+ newd = inputfolder+"_Obfuscated"
1048
+ shutil.copytree(inputfolder, newd)
1049
+ inputfolder = newd
1050
+ if (os.path.isdir(inputfolder)):
1051
+ for file in GeneralUtilities.absolute_file_paths(inputfolder):
1052
+ if obfuscate_all_files or self.__extension_matchs(file, obfuscate_file_extensions):
1053
+ self.SCChangeHashOfProgram(file)
1054
+ os.remove(file)
1055
+ os.rename(file + ".modified", file)
1056
+ self.SCFilenameObfuscator(inputfolder, printtableheadline, namemappingfile, extensions)
1057
+ else:
1058
+ raise ValueError(f"Directory not found: '{inputfolder}'")
1059
+
1060
+ @GeneralUtilities.check_arguments
1061
+ def get_services_from_yaml_file(self, yaml_file: str) -> list[str]:
1062
+ with open(yaml_file, encoding="utf-8") as stream:
1063
+ loaded = yaml.safe_load(stream)
1064
+ services = loaded["services"]
1065
+ result = list(services.keys())
1066
+ return result
1067
+
1068
+ @GeneralUtilities.check_arguments
1069
+ def kill_docker_container(self, container_name: str) -> None:
1070
+ self.run_program("docker", f"container rm -f {container_name}")
1071
+
1072
+ @GeneralUtilities.check_arguments
1073
+ def get_docker_debian_version(self, image_tag: str) -> str:
1074
+ result = ScriptCollectionCore().run_program_argsasarray(
1075
+ "docker", ['run', f'debian:{image_tag}', 'bash', '-c', 'apt-get -y update && apt-get -y install lsb-release && lsb_release -cs'])
1076
+ result_line = GeneralUtilities.string_to_lines(result[1])[-2]
1077
+ return result_line
1078
+
1079
+ @GeneralUtilities.check_arguments
1080
+ def get_latest_tor_version_of_debian_repository(self, debian_version: str) -> str:
1081
+ package_url: str = f"https://deb.torproject.org/torproject.org/dists/{debian_version}/main/binary-amd64/Packages"
1082
+ r = requests.get(package_url, timeout=5)
1083
+ if r.status_code != 200:
1084
+ raise ValueError(f"Checking for latest tor package resulted in HTTP-response-code {r.status_code}.")
1085
+ lines = GeneralUtilities.string_to_lines(GeneralUtilities.bytes_to_string(r.content))
1086
+ version_line_prefix = "Version: "
1087
+ version_content_line = [line for line in lines if line.startswith(version_line_prefix)][1]
1088
+ version_with_overhead = version_content_line[len(version_line_prefix):]
1089
+ tor_version = version_with_overhead.split("~")[0]
1090
+ return tor_version
1091
+
1092
+ def run_testcases_for_python_project(self, repository_folder: str):
1093
+ self.run_program("coverage", "run -m pytest", repository_folder)
1094
+ self.run_program("coverage", "xml", repository_folder)
1095
+ GeneralUtilities.ensure_directory_exists(os.path.join(repository_folder, "Other/TestCoverage"))
1096
+ coveragefile = os.path.join(repository_folder, "Other/TestCoverage/TestCoverage.xml")
1097
+ GeneralUtilities.ensure_file_does_not_exist(coveragefile)
1098
+ os.rename(os.path.join(repository_folder, "coverage.xml"), coveragefile)
1099
+
1100
+ @GeneralUtilities.check_arguments
1101
+ def get_file_permission(self, file: str) -> str:
1102
+ """This function returns an usual octet-triple, for example "700"."""
1103
+ ls_output: str = self.run_ls_for_folder(file)
1104
+ return self.__get_file_permission_helper(ls_output)
1105
+
1106
+ @GeneralUtilities.check_arguments
1107
+ def __get_file_permission_helper(self, permissions: str) -> str:
1108
+ return str(self.__to_octet(permissions[0:3]))+str(self.__to_octet(permissions[3:6]))+str(self.__to_octet(permissions[6:9]))
1109
+
1110
+ @GeneralUtilities.check_arguments
1111
+ def __to_octet(self, string: str) -> int:
1112
+ return int(self.__to_octet_helper(string[0])+self.__to_octet_helper(string[1])+self.__to_octet_helper(string[2]), 2)
1113
+
1114
+ @GeneralUtilities.check_arguments
1115
+ def __to_octet_helper(self, string: str) -> str:
1116
+ if (string == "-"):
1117
+ return "0"
1118
+ else:
1119
+ return "1"
1120
+
1121
+ @GeneralUtilities.check_arguments
1122
+ def get_file_owner(self, file: str) -> str:
1123
+ """This function returns the user and the group in the format "user:group"."""
1124
+ ls_output: str = self.run_ls_for_folder(file)
1125
+ return self.__get_file_owner_helper(ls_output)
1126
+
1127
+ @GeneralUtilities.check_arguments
1128
+ def __get_file_owner_helper(self, ls_output: str) -> str:
1129
+ splitted = ls_output.split()
1130
+ return f"{splitted[2]}:{splitted[3]}"
1131
+
1132
+ @GeneralUtilities.check_arguments
1133
+ def get_file_owner_and_file_permission(self, file: str) -> str:
1134
+ ls_output: str = self.run_ls_for_folder(file)
1135
+ return [self.__get_file_owner_helper(ls_output), self.__get_file_permission_helper(ls_output)]
1136
+
1137
+ @GeneralUtilities.check_arguments
1138
+ def run_ls_for_folder(self, file_or_folder: str) -> str:
1139
+ file_or_folder = file_or_folder.replace("\\", "/")
1140
+ GeneralUtilities.assert_condition(os.path.isfile(file_or_folder) or os.path.isdir(file_or_folder), f"Can not execute 'ls -ld' because '{file_or_folder}' does not exist.")
1141
+ ls_result = self.run_program_argsasarray("ls", ["-ld", file_or_folder])
1142
+ GeneralUtilities.assert_condition(ls_result[0] == 0, f"'ls -ld {file_or_folder}' resulted in exitcode {str(ls_result[0])}. StdErr: {ls_result[2]}")
1143
+ GeneralUtilities.assert_condition(not GeneralUtilities.string_is_none_or_whitespace(ls_result[1]), f"'ls -ld' of '{file_or_folder}' had an empty output. StdErr: '{ls_result[2]}'")
1144
+ GeneralUtilities.write_message_to_stdout(ls_result[1])
1145
+ output = ls_result[1]
1146
+ result = output.replace("\n", "")
1147
+ result = ' '.join(result.split()) # reduce multiple whitespaces to one
1148
+ return result
1149
+
1150
+ @GeneralUtilities.check_arguments
1151
+ def run_ls_for_folder_content(self, file_or_folder: str) -> list[str]:
1152
+ file_or_folder = file_or_folder.replace("\\", "/")
1153
+ GeneralUtilities.assert_condition(os.path.isfile(file_or_folder) or os.path.isdir(file_or_folder), f"Can not execute 'ls -la' because '{file_or_folder}' does not exist.")
1154
+ ls_result = self.run_program_argsasarray("ls", ["-la", file_or_folder])
1155
+ GeneralUtilities.assert_condition(ls_result[0] == 0, f"'ls -la {file_or_folder}' resulted in exitcode {str(ls_result[0])}. StdErr: {ls_result[2]}")
1156
+ GeneralUtilities.assert_condition(not GeneralUtilities.string_is_none_or_whitespace(ls_result[1]), f"'ls -la' of '{file_or_folder}' had an empty output. StdErr: '{ls_result[2]}'")
1157
+ GeneralUtilities.write_message_to_stdout(ls_result[1])
1158
+ output = ls_result[1]
1159
+ result = output.split("\n")[3:] # skip the lines with "Total", "." and ".."
1160
+ result = [' '.join(line.split()) for line in result] # reduce multiple whitespaces to one
1161
+ return result
1162
+
1163
+ @GeneralUtilities.check_arguments
1164
+ def set_permission(self, file_or_folder: str, permissions: str, recursive: bool = False) -> None:
1165
+ """This function expects an usual octet-triple, for example "700"."""
1166
+ args = []
1167
+ if recursive:
1168
+ args.append("--recursive")
1169
+ args.append(permissions)
1170
+ args.append(file_or_folder)
1171
+ self.run_program_argsasarray("chmod", args)
1172
+
1173
+ @GeneralUtilities.check_arguments
1174
+ def set_owner(self, file_or_folder: str, owner: str, recursive: bool = False, follow_symlinks: bool = False) -> None:
1175
+ """This function expects the user and the group in the format "user:group"."""
1176
+ args = []
1177
+ if recursive:
1178
+ args.append("--recursive")
1179
+ if follow_symlinks:
1180
+ args.append("--no-dereference")
1181
+ args.append(owner)
1182
+ args.append(file_or_folder)
1183
+ self.run_program_argsasarray("chown", args)
1184
+
1185
+ # <run programs>
1186
+
1187
+ @GeneralUtilities.check_arguments
1188
+ def __run_program_argsasarray_async_helper(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, verbosity: int = 1, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, custom_argument: object = None, interactive: bool = False) -> Popen:
1189
+ # Verbosity:
1190
+ # 0=Quiet (No output will be printed.)
1191
+ # 1=Normal (If the exitcode of the executed program is not 0 then the StdErr will be printed.)
1192
+ # 2=Full (Prints StdOut and StdErr of the executed program.)
1193
+ # 3=Verbose (Same as "Full" but with some more information.)
1194
+
1195
+ if isinstance(self.program_runner, ProgramRunnerEpew):
1196
+ custom_argument = CustomEpewArgument(print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, verbosity, arguments_for_log)
1197
+ popen: Popen = self.program_runner.run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, custom_argument, interactive)
1198
+ return popen
1199
+
1200
+ @staticmethod
1201
+ def __enqueue_output(file, queue):
1202
+ for line in iter(file.readline, ''):
1203
+ queue.put(line)
1204
+ file.close()
1205
+
1206
+ @staticmethod
1207
+ def __read_popen_pipes(p: Popen):
1208
+ with ThreadPoolExecutor(2) as pool:
1209
+ q_stdout = Queue()
1210
+ q_stderr = Queue()
1211
+
1212
+ pool.submit(ScriptCollectionCore.__enqueue_output, p.stdout, q_stdout)
1213
+ pool.submit(ScriptCollectionCore.__enqueue_output, p.stderr, q_stderr)
1214
+ while (p.poll() is None) or (not q_stdout.empty()) or (not q_stderr.empty()):
1215
+ time.sleep(0.01)
1216
+ out_line = None
1217
+ err_line = None
1218
+ try:
1219
+ out_line = q_stdout.get_nowait()
1220
+ except Empty:
1221
+ pass
1222
+ try:
1223
+ err_line = q_stderr.get_nowait()
1224
+ except Empty:
1225
+ pass
1226
+
1227
+ yield (out_line, err_line)
1228
+
1229
+ # Return-values program_runner: Exitcode, StdOut, StdErr, Pid
1230
+ @GeneralUtilities.check_arguments
1231
+ def run_program_argsasarray(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, verbosity: int = 1, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, throw_exception_if_exitcode_is_not_zero: bool = True, custom_argument: object = None, interactive: bool = False) -> tuple[int, str, str, int]:
1232
+ # verbosity 1: No output will be logged.
1233
+ # verbosity 2: If the exitcode of the executed program is not 0 then the StdErr will be logged. This is supposed to be the default verbosity-level.
1234
+ # verbosity 3: Logs and prints StdOut and StdErr of the executed program in realtime.
1235
+ # verbosity 4: Same as loglevel 3 but with some more overhead-information.
1236
+ try:
1237
+ arguments_as_str = ' '.join(arguments_as_array)
1238
+ mock_loader_result = self.__try_load_mock(program, arguments_as_str, working_directory)
1239
+ if mock_loader_result[0]:
1240
+ return mock_loader_result[1]
1241
+
1242
+ working_directory = self.__adapt_workingdirectory(working_directory)
1243
+
1244
+ if arguments_for_log is None:
1245
+ arguments_for_log = arguments_as_array
1246
+
1247
+ arguments_for_log_as_string: str = ' '.join(arguments_for_log)
1248
+ cmd = f'{working_directory}>{program} {arguments_for_log_as_string}'
1249
+
1250
+ if GeneralUtilities.string_is_none_or_whitespace(title):
1251
+ info_for_log = cmd
1252
+ else:
1253
+ info_for_log = title
1254
+
1255
+ if verbosity >= 3:
1256
+ GeneralUtilities.write_message_to_stdout(f"Run '{info_for_log}'.")
1257
+
1258
+ print_live_output = 1 < verbosity
1259
+
1260
+ exit_code: int = None
1261
+ stdout: str = ""
1262
+ stderr: str = ""
1263
+ pid: int = None
1264
+
1265
+ with self.__run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, verbosity, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, custom_argument, interactive) as process:
1266
+
1267
+ if log_file is not None:
1268
+ GeneralUtilities.ensure_file_exists(log_file)
1269
+ pid = process.pid
1270
+ for out_line_plain, err_line_plain in ScriptCollectionCore.__read_popen_pipes(process): # see https://stackoverflow.com/a/57084403/3905529
1271
+
1272
+ if out_line_plain is not None:
1273
+ out_line: str = None
1274
+ if isinstance(out_line_plain, str):
1275
+ out_line = out_line_plain
1276
+ elif isinstance(out_line_plain, bytes):
1277
+ out_line = GeneralUtilities.bytes_to_string(out_line_plain)
1278
+ else:
1279
+ raise ValueError(f"Unknown type of output: {str(type(out_line_plain))}")
1280
+
1281
+ if out_line is not None and GeneralUtilities.string_has_content(out_line):
1282
+ if out_line.endswith("\n"):
1283
+ out_line = out_line[:-1]
1284
+ if print_live_output:
1285
+ print(out_line, end='\n', file=sys.stdout, flush=True)
1286
+ if 0 < len(stdout):
1287
+ stdout = stdout+"\n"
1288
+ stdout = stdout+out_line
1289
+ if log_file is not None:
1290
+ GeneralUtilities.append_line_to_file(log_file, out_line)
1291
+
1292
+ if err_line_plain is not None:
1293
+ err_line: str = None
1294
+ if isinstance(err_line_plain, str):
1295
+ err_line = err_line_plain
1296
+ elif isinstance(err_line_plain, bytes):
1297
+ err_line = GeneralUtilities.bytes_to_string(err_line_plain)
1298
+ else:
1299
+ raise ValueError(f"Unknown type of output: {str(type(err_line_plain))}")
1300
+ if err_line is not None and GeneralUtilities.string_has_content(err_line):
1301
+ if err_line.endswith("\n"):
1302
+ err_line = err_line[:-1]
1303
+ if print_live_output:
1304
+ print(err_line, end='\n', file=sys.stderr, flush=True)
1305
+ if 0 < len(stderr):
1306
+ stderr = stderr+"\n"
1307
+ stderr = stderr+err_line
1308
+ if log_file is not None:
1309
+ GeneralUtilities.append_line_to_file(log_file, err_line)
1310
+
1311
+ exit_code = process.returncode
1312
+
1313
+ if throw_exception_if_exitcode_is_not_zero and exit_code != 0:
1314
+ raise ValueError(f"Program '{working_directory}>{program} {arguments_for_log_as_string}' resulted in exitcode {exit_code}. (StdOut: '{stdout}', StdErr: '{stderr}')")
1315
+
1316
+ GeneralUtilities.assert_condition(exit_code is not None, f"Exitcode of program-run of '{info_for_log}' is None.")
1317
+ result = (exit_code, stdout, stderr, pid)
1318
+ return result
1319
+ except Exception as e:
1320
+ raise e
1321
+
1322
+ # Return-values program_runner: Exitcode, StdOut, StdErr, Pid
1323
+ @GeneralUtilities.check_arguments
1324
+ def run_program(self, program: str, arguments: str = "", working_directory: str = None, verbosity: int = 1, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, throw_exception_if_exitcode_is_not_zero: bool = True, custom_argument: object = None, interactive: bool = False) -> tuple[int, str, str, int]:
1325
+ return self.run_program_argsasarray(program, GeneralUtilities.arguments_to_array(arguments), working_directory, verbosity, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, throw_exception_if_exitcode_is_not_zero, custom_argument, interactive)
1326
+
1327
+ # Return-values program_runner: Pid
1328
+ @GeneralUtilities.check_arguments
1329
+ def run_program_argsasarray_async(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, verbosity: int = 1, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, custom_argument: object = None, interactive: bool = False) -> int:
1330
+ mock_loader_result = self.__try_load_mock(program, ' '.join(arguments_as_array), working_directory)
1331
+ if mock_loader_result[0]:
1332
+ return mock_loader_result[1]
1333
+ process: Popen = self.__run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, verbosity, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, custom_argument, interactive)
1334
+ return process.pid
1335
+
1336
+ # Return-values program_runner: Pid
1337
+ @GeneralUtilities.check_arguments
1338
+ def run_program_async(self, program: str, arguments: str = "", working_directory: str = None, verbosity: int = 1, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, custom_argument: object = None, interactive: bool = False) -> int:
1339
+ return self.run_program_argsasarray_async(program, GeneralUtilities.arguments_to_array(arguments), working_directory, verbosity, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, custom_argument, interactive)
1340
+
1341
+ @GeneralUtilities.check_arguments
1342
+ def __try_load_mock(self, program: str, arguments: str, working_directory: str) -> tuple[bool, tuple[int, str, str, int]]:
1343
+ if self.mock_program_calls:
1344
+ try:
1345
+ return [True, self.__get_mock_program_call(program, arguments, working_directory)]
1346
+ except LookupError:
1347
+ if not self.execute_program_really_if_no_mock_call_is_defined:
1348
+ raise
1349
+ return [False, None]
1350
+
1351
+ @GeneralUtilities.check_arguments
1352
+ def __adapt_workingdirectory(self, workingdirectory: str) -> str:
1353
+ if workingdirectory is None:
1354
+ return os.getcwd()
1355
+ else:
1356
+ return GeneralUtilities.resolve_relative_path_from_current_working_directory(workingdirectory)
1357
+
1358
+ @GeneralUtilities.check_arguments
1359
+ def verify_no_pending_mock_program_calls(self):
1360
+ if (len(self.__mocked_program_calls) > 0):
1361
+ raise AssertionError("The following mock-calls were not called:\n"+",\n ".join([self.__format_mock_program_call(r) for r in self.__mocked_program_calls]))
1362
+
1363
+ @GeneralUtilities.check_arguments
1364
+ def __format_mock_program_call(self, r) -> str:
1365
+ r: ScriptCollectionCore.__MockProgramCall = r
1366
+ return f"'{r.workingdirectory}>{r.program} {r.argument}' (" \
1367
+ f"exitcode: {GeneralUtilities.str_none_safe(str(r.exit_code))}, " \
1368
+ f"pid: {GeneralUtilities.str_none_safe(str(r.pid))}, "\
1369
+ f"stdout: {GeneralUtilities.str_none_safe(str(r.stdout))}, " \
1370
+ f"stderr: {GeneralUtilities.str_none_safe(str(r.stderr))})"
1371
+
1372
+ @GeneralUtilities.check_arguments
1373
+ def register_mock_program_call(self, program: str, argument: str, workingdirectory: str, result_exit_code: int, result_stdout: str, result_stderr: str, result_pid: int, amount_of_expected_calls=1):
1374
+ "This function is for test-purposes only"
1375
+ for _ in itertools.repeat(None, amount_of_expected_calls):
1376
+ mock_call = ScriptCollectionCore.__MockProgramCall()
1377
+ mock_call.program = program
1378
+ mock_call.argument = argument
1379
+ mock_call.workingdirectory = workingdirectory
1380
+ mock_call.exit_code = result_exit_code
1381
+ mock_call.stdout = result_stdout
1382
+ mock_call.stderr = result_stderr
1383
+ mock_call.pid = result_pid
1384
+ self.__mocked_program_calls.append(mock_call)
1385
+
1386
+ @GeneralUtilities.check_arguments
1387
+ def __get_mock_program_call(self, program: str, argument: str, workingdirectory: str):
1388
+ result: ScriptCollectionCore.__MockProgramCall = None
1389
+ for mock_call in self.__mocked_program_calls:
1390
+ if ((re.match(mock_call.program, program) is not None)
1391
+ and (re.match(mock_call.argument, argument) is not None)
1392
+ and (re.match(mock_call.workingdirectory, workingdirectory) is not None)):
1393
+ result = mock_call
1394
+ break
1395
+ if result is None:
1396
+ raise LookupError(f"Tried to execute mock-call '{workingdirectory}>{program} {argument}' but no mock-call was defined for that execution")
1397
+ else:
1398
+ self.__mocked_program_calls.remove(result)
1399
+ return (result.exit_code, result.stdout, result.stderr, result.pid)
1400
+
1401
+ @GeneralUtilities.check_arguments
1402
+ class __MockProgramCall:
1403
+ program: str
1404
+ argument: str
1405
+ workingdirectory: str
1406
+ exit_code: int
1407
+ stdout: str
1408
+ stderr: str
1409
+ pid: int
1410
+
1411
+ # </run programs>
1412
+
1413
+ @GeneralUtilities.check_arguments
1414
+ def extract_archive_with_7z(self, unzip_program_file: str, zipfile: str, password: str, output_directory: str) -> None:
1415
+ password_set = not password is None
1416
+ file_name = Path(zipfile).name
1417
+ file_folder = os.path.dirname(zipfile)
1418
+ argument = "x"
1419
+ if password_set:
1420
+ argument = f"{argument} -p\"{password}\""
1421
+ argument = f"{argument} -o {output_directory}"
1422
+ argument = f"{argument} {file_name}"
1423
+ return self.run_program(unzip_program_file, argument, file_folder)
1424
+
1425
+ @GeneralUtilities.check_arguments
1426
+ def get_internet_time(self) -> datetime:
1427
+ response = ntplib.NTPClient().request('pool.ntp.org')
1428
+ return datetime.fromtimestamp(response.tx_time)
1429
+
1430
+ @GeneralUtilities.check_arguments
1431
+ def system_time_equals_internet_time(self, maximal_tolerance_difference: timedelta) -> bool:
1432
+ return abs(datetime.now() - self.get_internet_time()) < maximal_tolerance_difference
1433
+
1434
+ @GeneralUtilities.check_arguments
1435
+ def system_time_equals_internet_time_with_default_tolerance(self) -> bool:
1436
+ return self.system_time_equals_internet_time(self.__get_default_tolerance_for_system_time_equals_internet_time())
1437
+
1438
+ @GeneralUtilities.check_arguments
1439
+ def check_system_time(self, maximal_tolerance_difference: timedelta):
1440
+ if not self.system_time_equals_internet_time(maximal_tolerance_difference):
1441
+ raise ValueError("System time may be wrong")
1442
+
1443
+ @GeneralUtilities.check_arguments
1444
+ def check_system_time_with_default_tolerance(self) -> None:
1445
+ self.check_system_time(self.__get_default_tolerance_for_system_time_equals_internet_time())
1446
+
1447
+ @GeneralUtilities.check_arguments
1448
+ def __get_default_tolerance_for_system_time_equals_internet_time(self) -> timedelta:
1449
+ return timedelta(hours=0, minutes=0, seconds=3)
1450
+
1451
+ @GeneralUtilities.check_arguments
1452
+ def increment_version(self, input_version: str, increment_major: bool, increment_minor: bool, increment_patch: bool) -> str:
1453
+ splitted = input_version.split(".")
1454
+ GeneralUtilities.assert_condition(len(splitted) == 3, f"Version '{input_version}' does not have the 'major.minor.patch'-pattern.")
1455
+ major = int(splitted[0])
1456
+ minor = int(splitted[1])
1457
+ patch = int(splitted[2])
1458
+ if increment_major:
1459
+ major = major+1
1460
+ if increment_minor:
1461
+ minor = minor+1
1462
+ if increment_patch:
1463
+ patch = patch+1
1464
+ return f"{major}.{minor}.{patch}"
1465
+
1466
+ @GeneralUtilities.check_arguments
1467
+ def get_semver_version_from_gitversion(self, repository_folder: str) -> str:
1468
+ if (self.git_repository_has_commits(repository_folder)):
1469
+ result = self.get_version_from_gitversion(repository_folder, "MajorMinorPatch")
1470
+ if self.git_repository_has_uncommitted_changes(repository_folder):
1471
+ if self.get_current_git_branch_has_tag(repository_folder):
1472
+ id_of_latest_tag = self.git_get_commitid_of_tag(repository_folder, self.get_latest_git_tag(repository_folder))
1473
+ current_commit = self.git_get_commit_id(repository_folder)
1474
+ current_commit_is_on_latest_tag = id_of_latest_tag == current_commit
1475
+ if current_commit_is_on_latest_tag:
1476
+ result = self.increment_version(result, False, False, True)
1477
+ else:
1478
+ result = "0.1.0"
1479
+ return result
1480
+
1481
+ @staticmethod
1482
+ @GeneralUtilities.check_arguments
1483
+ def is_patch_version(version_string: str) -> bool:
1484
+ return not version_string.endswith(".0")
1485
+
1486
+ @GeneralUtilities.check_arguments
1487
+ def get_version_from_gitversion(self, folder: str, variable: str) -> str:
1488
+ # called twice as workaround for issue 1877 in gitversion ( https://github.com/GitTools/GitVersion/issues/1877 )
1489
+ result = self.run_program_argsasarray("gitversion", ["/showVariable", variable], folder, verbosity=0)
1490
+ result = self.run_program_argsasarray("gitversion", ["/showVariable", variable], folder, verbosity=0)
1491
+ result = GeneralUtilities.strip_new_line_character(result[1])
1492
+
1493
+ return result
1494
+
1495
+ @GeneralUtilities.check_arguments
1496
+ def generate_certificate_authority(self, folder: str, name: str, subj_c: str, subj_st: str, subj_l: str, subj_o: str, subj_ou: str, days_until_expire: int = None, password: str = None) -> None:
1497
+ if days_until_expire is None:
1498
+ days_until_expire = 1825
1499
+ if password is None:
1500
+ password = GeneralUtilities.generate_password()
1501
+ GeneralUtilities.ensure_directory_exists(folder)
1502
+ self.run_program("openssl", f'req -new -newkey ec -pkeyopt ec_paramgen_curve:prime256v1 -days {days_until_expire} -nodes -x509 -subj /C={subj_c}/ST={subj_st}/L={subj_l}/O={subj_o}/CN={name}/OU={subj_ou} -passout pass:{password} -keyout {name}.key -out {name}.crt', folder)
1503
+
1504
+ @GeneralUtilities.check_arguments
1505
+ def generate_certificate(self, folder: str, domain: str, filename: str, subj_c: str, subj_st: str, subj_l: str, subj_o: str, subj_ou: str, days_until_expire: int = None, password: str = None) -> None:
1506
+ if days_until_expire is None:
1507
+ days_until_expire = 397
1508
+ if password is None:
1509
+ password = GeneralUtilities.generate_password()
1510
+ rsa_key_length = 4096
1511
+ self.run_program("openssl", f'genrsa -out {filename}.key {rsa_key_length}', folder)
1512
+ self.run_program("openssl", f'req -new -subj /C={subj_c}/ST={subj_st}/L={subj_l}/O={subj_o}/CN={domain}/OU={subj_ou} -x509 -key {filename}.key -out {filename}.unsigned.crt -days {days_until_expire}', folder)
1513
+ self.run_program("openssl", f'pkcs12 -export -out {filename}.selfsigned.pfx -password pass:{password} -inkey {filename}.key -in {filename}.unsigned.crt', folder)
1514
+ GeneralUtilities.write_text_to_file(os.path.join(folder, f"{filename}.password"), password)
1515
+ GeneralUtilities.write_text_to_file(os.path.join(folder, f"{filename}.san.conf"), f"""[ req ]
1516
+ default_bits = {rsa_key_length}
1517
+ distinguished_name = req_distinguished_name
1518
+ req_extensions = v3_req
1519
+ default_md = sha256
1520
+ dirstring_type = nombstr
1521
+ prompt = no
1522
+
1523
+ [ req_distinguished_name ]
1524
+ countryName = {subj_c}
1525
+ stateOrProvinceName = {subj_st}
1526
+ localityName = {subj_l}
1527
+ organizationName = {subj_o}
1528
+ organizationUnit = {subj_ou}
1529
+ commonName = {domain}
1530
+
1531
+ [v3_req]
1532
+ subjectAltName = @subject_alt_name
1533
+
1534
+ [ subject_alt_name ]
1535
+ DNS = {domain}
1536
+ """)
1537
+
1538
+ @GeneralUtilities.check_arguments
1539
+ def generate_certificate_sign_request(self, folder: str, domain: str, filename: str, subj_c: str, subj_st: str, subj_l: str, subj_o: str, subj_ou: str) -> None:
1540
+ self.run_program("openssl", f'req -new -subj /C={subj_c}/ST={subj_st}/L={subj_l}/O={subj_o}/CN={domain}/OU={subj_ou} -key {filename}.key -out {filename}.csr -config {filename}.san.conf', folder)
1541
+
1542
+ @GeneralUtilities.check_arguments
1543
+ def sign_certificate(self, folder: str, ca_folder: str, ca_name: str, domain: str, filename: str, days_until_expire: int = None) -> None:
1544
+ if days_until_expire is None:
1545
+ days_until_expire = 397
1546
+ ca = os.path.join(ca_folder, ca_name)
1547
+ password_file = os.path.join(folder, f"{filename}.password")
1548
+ password = GeneralUtilities.read_text_from_file(password_file)
1549
+ self.run_program("openssl", f'x509 -req -in {filename}.csr -CA {ca}.crt -CAkey {ca}.key -CAcreateserial -CAserial {ca}.srl -out {filename}.crt -days {days_until_expire} -sha256 -extensions v3_req -extfile {filename}.san.conf', folder)
1550
+ self.run_program("openssl", f'pkcs12 -export -out {filename}.pfx -inkey {filename}.key -in {filename}.crt -password pass:{password}', folder)
1551
+
1552
+ @GeneralUtilities.check_arguments
1553
+ def update_dependencies_of_python_in_requirementstxt_file(self, file: str, verbosity: int):
1554
+ lines = GeneralUtilities.read_lines_from_file(file)
1555
+ new_lines = []
1556
+ for line in lines:
1557
+ if GeneralUtilities.string_has_content(line):
1558
+ new_lines.append(self.__get_updated_line_for_python_requirements(line.strip()))
1559
+ GeneralUtilities.write_lines_to_file(file, new_lines)
1560
+
1561
+ @GeneralUtilities.check_arguments
1562
+ def __get_updated_line_for_python_requirements(self, line: str) -> str:
1563
+ if "==" in line or "<" in line:
1564
+ return line
1565
+ elif ">" in line:
1566
+ try:
1567
+ # line is something like "cyclonedx-bom>=2.0.2" and the function must return with the updated version
1568
+ # (something like "cyclonedx-bom>=2.11.0" for example)
1569
+ package = line.split(">")[0]
1570
+ operator = ">=" if ">=" in line else ">"
1571
+ response = requests.get(f'https://pypi.org/pypi/{package}/json', timeout=5)
1572
+ latest_version = response.json()['info']['version']
1573
+ # TODO update only minor- and patch-version
1574
+ # TODO print info if there is a new major-version
1575
+ return package+operator+latest_version
1576
+ except:
1577
+ return line
1578
+ else:
1579
+ raise ValueError(f'Unexpected line in requirements-file: "{line}"')
1580
+
1581
+ @GeneralUtilities.check_arguments
1582
+ def update_dependencies_of_python_in_setupcfg_file(self, setup_cfg_file: str, verbosity: int):
1583
+ lines = GeneralUtilities.read_lines_from_file(setup_cfg_file)
1584
+ new_lines = []
1585
+ requirement_parsing_mode = False
1586
+ for line in lines:
1587
+ new_line = line
1588
+ if (requirement_parsing_mode):
1589
+ if ("<" in line or "=" in line or ">" in line):
1590
+ updated_line = f" {self.__get_updated_line_for_python_requirements(line.strip())}"
1591
+ new_line = updated_line
1592
+ else:
1593
+ requirement_parsing_mode = False
1594
+ else:
1595
+ if line.startswith("install_requires ="):
1596
+ requirement_parsing_mode = True
1597
+ new_lines.append(new_line)
1598
+ GeneralUtilities.write_lines_to_file(setup_cfg_file, new_lines)
1599
+
1600
+ @GeneralUtilities.check_arguments
1601
+ def update_dependencies_of_dotnet_project(self, csproj_file: str, verbosity: int, ignored_dependencies: list[str]):
1602
+ folder = os.path.dirname(csproj_file)
1603
+ csproj_filename = os.path.basename(csproj_file)
1604
+ GeneralUtilities.write_message_to_stderr(f"Check for updates in {csproj_filename}")
1605
+ result = self.run_program("dotnet", f"list {csproj_filename} package --outdated", folder)
1606
+ for line in result[1].replace("\r", "").split("\n"):
1607
+ # Relevant output-lines are something like " > NJsonSchema 10.7.0 10.7.0 10.9.0"
1608
+ if ">" in line:
1609
+ package_name = line.replace(">", "").strip().split(" ")[0]
1610
+ if not (package_name in ignored_dependencies):
1611
+ GeneralUtilities.write_message_to_stderr(f"Update package {package_name}")
1612
+ self.run_program("dotnet", f"add {csproj_filename} package {package_name}", folder)
1613
+
1614
+ @GeneralUtilities.check_arguments
1615
+ def create_deb_package(self, toolname: str, binary_folder: str, control_file_content: str, deb_output_folder: str, verbosity: int, permission_of_executable_file_as_octet_triple: int) -> None:
1616
+
1617
+ # prepare
1618
+ GeneralUtilities.ensure_directory_exists(deb_output_folder)
1619
+ temp_folder = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
1620
+ GeneralUtilities.ensure_directory_exists(temp_folder)
1621
+ bin_folder = binary_folder
1622
+ tool_content_folder_name = toolname+"Content"
1623
+
1624
+ # create folder
1625
+ GeneralUtilities.ensure_directory_exists(temp_folder)
1626
+ control_content_folder_name = "controlcontent"
1627
+ packagecontent_control_folder = os.path.join(temp_folder, control_content_folder_name)
1628
+ GeneralUtilities.ensure_directory_exists(packagecontent_control_folder)
1629
+ data_content_folder_name = "datacontent"
1630
+ packagecontent_data_folder = os.path.join(temp_folder, data_content_folder_name)
1631
+ GeneralUtilities.ensure_directory_exists(packagecontent_data_folder)
1632
+ entireresult_content_folder_name = "entireresultcontent"
1633
+ packagecontent_entireresult_folder = os.path.join(temp_folder, entireresult_content_folder_name)
1634
+ GeneralUtilities.ensure_directory_exists(packagecontent_entireresult_folder)
1635
+
1636
+ # create "debian-binary"-file
1637
+ debianbinary_file = os.path.join(packagecontent_entireresult_folder, "debian-binary")
1638
+ GeneralUtilities.ensure_file_exists(debianbinary_file)
1639
+ GeneralUtilities.write_text_to_file(debianbinary_file, "2.0\n")
1640
+
1641
+ # create control-content
1642
+
1643
+ # conffiles
1644
+ conffiles_file = os.path.join(packagecontent_control_folder, "conffiles")
1645
+ GeneralUtilities.ensure_file_exists(conffiles_file)
1646
+
1647
+ # postinst-script
1648
+ postinst_file = os.path.join(packagecontent_control_folder, "postinst")
1649
+ GeneralUtilities.ensure_file_exists(postinst_file)
1650
+ exe_file = f"/usr/bin/{tool_content_folder_name}/{toolname}"
1651
+ link_file = f"/usr/bin/{toolname.lower()}"
1652
+ permission = str(permission_of_executable_file_as_octet_triple)
1653
+ GeneralUtilities.write_text_to_file(postinst_file, f"""#!/bin/sh
1654
+ ln -s {exe_file} {link_file}
1655
+ chmod {permission} {exe_file}
1656
+ chmod {permission} {link_file}
1657
+ """)
1658
+
1659
+ # control
1660
+ control_file = os.path.join(packagecontent_control_folder, "control")
1661
+ GeneralUtilities.ensure_file_exists(control_file)
1662
+ GeneralUtilities.write_text_to_file(control_file, control_file_content)
1663
+
1664
+ # md5sums
1665
+ md5sums_file = os.path.join(packagecontent_control_folder, "md5sums")
1666
+ GeneralUtilities.ensure_file_exists(md5sums_file)
1667
+
1668
+ # create data-content
1669
+
1670
+ # copy binaries
1671
+ usr_bin_folder = os.path.join(packagecontent_data_folder, "usr/bin")
1672
+ GeneralUtilities.ensure_directory_exists(usr_bin_folder)
1673
+ usr_bin_content_folder = os.path.join(usr_bin_folder, tool_content_folder_name)
1674
+ GeneralUtilities.copy_content_of_folder(bin_folder, usr_bin_content_folder)
1675
+
1676
+ # create debfile
1677
+ deb_filename = f"{toolname}.deb"
1678
+ self.run_program_argsasarray("tar", ["czf", f"../{entireresult_content_folder_name}/control.tar.gz", "*"], packagecontent_control_folder, verbosity=verbosity)
1679
+ self.run_program_argsasarray("tar", ["czf", f"../{entireresult_content_folder_name}/data.tar.gz", "*"], packagecontent_data_folder, verbosity=verbosity)
1680
+ self.run_program_argsasarray("ar", ["r", deb_filename, "debian-binary", "control.tar.gz", "data.tar.gz"], packagecontent_entireresult_folder, verbosity=verbosity)
1681
+ result_file = os.path.join(packagecontent_entireresult_folder, deb_filename)
1682
+ shutil.copy(result_file, os.path.join(deb_output_folder, deb_filename))
1683
+
1684
+ # cleanup
1685
+ GeneralUtilities.ensure_directory_does_not_exist(temp_folder)
1686
+
1687
+ @GeneralUtilities.check_arguments
1688
+ def update_year_in_copyright_tags(self, file: str) -> None:
1689
+ current_year = str(datetime.now().year)
1690
+ lines = GeneralUtilities.read_lines_from_file(file)
1691
+ lines_result = []
1692
+ for line in lines:
1693
+ if match := re.search("(.*<[Cc]opyright>.*)\\d\\d\\d\\d(.*<\\/[Cc]opyright>.*)", line):
1694
+ part1 = match.group(1)
1695
+ part2 = match.group(2)
1696
+ adapted = part1+current_year+part2
1697
+ else:
1698
+ adapted = line
1699
+ lines_result.append(adapted)
1700
+ GeneralUtilities.write_lines_to_file(file, lines_result)
1701
+
1702
+ @GeneralUtilities.check_arguments
1703
+ def update_year_in_first_line_of_file(self, file: str) -> None:
1704
+ current_year = str(datetime.now().year)
1705
+ lines = GeneralUtilities.read_lines_from_file(file)
1706
+ lines[0] = re.sub("\\d\\d\\d\\d", current_year, lines[0])
1707
+ GeneralUtilities.write_lines_to_file(file, lines)
1708
+
1709
+ @GeneralUtilities.check_arguments
1710
+ def get_external_ip(self, proxy: str) -> str:
1711
+ information = self.get_externalnetworkinformation_as_json_string(proxy)
1712
+ parsed = json.loads(information)
1713
+ return parsed.ip
1714
+
1715
+ @GeneralUtilities.check_arguments
1716
+ def get_country_of_external_ip(self, proxy: str) -> str:
1717
+ information = self.get_externalnetworkinformation_as_json_string(proxy)
1718
+ parsed = json.loads(information)
1719
+ return parsed.country
1720
+
1721
+ @GeneralUtilities.check_arguments
1722
+ def get_externalnetworkinformation_as_json_string(self, proxy: str) -> str:
1723
+ proxies = None
1724
+ if GeneralUtilities.string_has_content(proxy):
1725
+ proxies = {"http": proxy}
1726
+ response = requests.get('https://ipinfo.io', proxies=proxies, timeout=5)
1727
+ network_information_as_json_string = GeneralUtilities.bytes_to_string(
1728
+ response.content)
1729
+ return network_information_as_json_string
1730
+
1731
+ @GeneralUtilities.check_arguments
1732
+ def change_file_extensions(self, folder: str, from_extension: str, to_extension: str, recursive: bool, ignore_case: bool) -> None:
1733
+ extension_to_compare: str = None
1734
+ if ignore_case:
1735
+ extension_to_compare = from_extension.lower()
1736
+ else:
1737
+ extension_to_compare = from_extension
1738
+ for file in GeneralUtilities.get_direct_files_of_folder(folder):
1739
+ if (ignore_case and file.lower().endswith(f".{extension_to_compare}") or not ignore_case and file.endswith(f".{extension_to_compare}")):
1740
+ p = Path(file)
1741
+ p.rename(p.with_suffix('.'+to_extension))
1742
+ if recursive:
1743
+ for subfolder in GeneralUtilities.get_direct_folders_of_folder(folder):
1744
+ self.change_file_extensions(subfolder, from_extension, to_extension, recursive, ignore_case)
1745
+
1746
+ @GeneralUtilities.check_arguments
1747
+ def __add_chapter(self, main_reference_file, reference_content_folder, number: int, chaptertitle: str, content: str = None):
1748
+ if content is None:
1749
+ content = "TXDX add content here"
1750
+ filename = str(number).zfill(2)+"_"+chaptertitle.replace(' ', '-')
1751
+ file = f"{reference_content_folder}/{filename}.md"
1752
+ full_title = f"{number}. {chaptertitle}"
1753
+
1754
+ GeneralUtilities.append_line_to_file(main_reference_file, f"- [{full_title}](./{filename}.md)")
1755
+
1756
+ GeneralUtilities.ensure_file_exists(file)
1757
+ GeneralUtilities.write_text_to_file(file, f"""# {full_title}
1758
+
1759
+ {content}
1760
+ """.replace("XDX", "ODO"))
1761
+
1762
+ @GeneralUtilities.check_arguments
1763
+ def generate_arc42_reference_template(self, repository: str, productname: str = None, subfolder: str = None):
1764
+ productname: str
1765
+ if productname is None:
1766
+ productname = os.path.basename(repository)
1767
+ if subfolder is None:
1768
+ subfolder = "Other/Resources/Reference"
1769
+ reference_root_folder = f"{repository}/{subfolder}"
1770
+ reference_content_folder = reference_root_folder + "/Technical"
1771
+ if os.path.isdir(reference_root_folder):
1772
+ raise ValueError(f"The folder '{reference_root_folder}' does already exist.")
1773
+ GeneralUtilities.ensure_directory_exists(reference_root_folder)
1774
+ GeneralUtilities.ensure_directory_exists(reference_content_folder)
1775
+ main_reference_file = f"{reference_root_folder}/Reference.md"
1776
+ GeneralUtilities.ensure_file_exists(main_reference_file)
1777
+ GeneralUtilities.write_text_to_file(main_reference_file, f"""# {productname}
1778
+
1779
+ TXDX add minimal service-description here.
1780
+
1781
+ ## Technical documentation
1782
+
1783
+ """.replace("XDX", "ODO"))
1784
+ self.__add_chapter(main_reference_file, reference_content_folder, 1, 'Introduction and Goals', """## Overview
1785
+
1786
+ TXDX
1787
+
1788
+ ## Quality goals
1789
+
1790
+ TXDX
1791
+
1792
+ ## Stakeholder
1793
+
1794
+ | Name | How to contact | Reason |
1795
+ | ---- | -------------- | ------ |""")
1796
+ self.__add_chapter(main_reference_file, reference_content_folder, 2, 'Constraints', """## Technical constraints
1797
+
1798
+ | Constraint-identifier | Constraint | Reason |
1799
+ | --------------------- | ---------- | ------ |
1800
+
1801
+ ## Organizational constraints
1802
+
1803
+ | Constraint-identifier | Constraint | Reason |
1804
+ | --------------------- | ---------- | ------ |""")
1805
+ self.__add_chapter(main_reference_file, reference_content_folder, 3, 'Context and Scope', """## Context
1806
+
1807
+ TXDX
1808
+
1809
+ ## Scope
1810
+
1811
+ TXDX""")
1812
+ self.__add_chapter(main_reference_file, reference_content_folder, 4, 'Solution Strategy', """TXDX""")
1813
+ self.__add_chapter(main_reference_file, reference_content_folder, 5, 'Building Block View', """TXDX""")
1814
+ self.__add_chapter(main_reference_file, reference_content_folder, 6, 'Runtime View', """TXDX""")
1815
+ self.__add_chapter(main_reference_file, reference_content_folder, 7, 'Deployment View', """## Infrastructure-overview
1816
+
1817
+ TXDX
1818
+
1819
+ ## Infrastructure-requirements
1820
+
1821
+ TXDX
1822
+
1823
+ ## Deployment-proecsses
1824
+
1825
+ TXDX
1826
+ """)
1827
+ self.__add_chapter(main_reference_file, reference_content_folder, 8, 'Crosscutting Concepts', """TXDX""")
1828
+ self.__add_chapter(main_reference_file, reference_content_folder, 9, 'Architectural Decisions', """## Decision-board
1829
+
1830
+ | Decision-identifier | Date | Decision | Reason and notes |
1831
+ | ------------------- | ---- | -------- | ---------------- |""") # empty because there are no decsions yet
1832
+ self.__add_chapter(main_reference_file, reference_content_folder, 10, 'Quality Requirements', """TXDX""")
1833
+ self.__add_chapter(main_reference_file, reference_content_folder, 11, 'Risks and Technical Debt', """## Risks
1834
+
1835
+ Currently there are no known risks.
1836
+
1837
+ ## Technical debts
1838
+
1839
+ Currently there are no technical depts.""")
1840
+ self.__add_chapter(main_reference_file, reference_content_folder, 12, 'Glossary', """## Terms
1841
+
1842
+ | Term | Meaning |
1843
+ | ---- | ------- |
1844
+
1845
+ ## Abbreviations
1846
+
1847
+ | Abbreviation | Meaning |
1848
+ | ------------ | ------- |""")
1849
+
1850
+ GeneralUtilities.append_to_file(main_reference_file, """
1851
+
1852
+ ## Responsibilities
1853
+
1854
+ | Responsibility | Name and contact-information |
1855
+ | --------------- | ---------------------------- |
1856
+ | Pdocut-owner | TXDX |
1857
+ | Product-manager | TXDX |
1858
+ | Support | TXDX |
1859
+
1860
+ ## License & Pricing
1861
+
1862
+ TXDX
1863
+
1864
+ ## External resources
1865
+
1866
+ - [Repository](TXDX)
1867
+ - [Productive-System](TXDX)
1868
+ - [QualityCheck-system](TXDX)
1869
+
1870
+ """)