ScriptCollection 4.0.12__py3-none-any.whl → 4.0.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ScriptCollection/AnionBuildPlatform.py +2 -0
- ScriptCollection/ScriptCollectionCore.py +1 -1
- ScriptCollection/TFCPS/Docker/TFCPS_CodeUnitSpecific_Docker.py +88 -0
- ScriptCollection/TFCPS/Docker/__init__.py +0 -0
- ScriptCollection/TFCPS/DotNet/CertificateGeneratorInformationBase.py +8 -0
- ScriptCollection/TFCPS/DotNet/CertificateGeneratorInformationGenerate.py +6 -0
- ScriptCollection/TFCPS/DotNet/CertificateGeneratorInformationNoGenerate.py +7 -0
- ScriptCollection/TFCPS/DotNet/TFCPS_CodeUnitSpecific_DotNet.py +479 -0
- ScriptCollection/TFCPS/DotNet/__init__.py +0 -0
- ScriptCollection/TFCPS/Flutter/TFCPS_CodeUnitSpecific_Flutter.py +43 -0
- ScriptCollection/TFCPS/Flutter/__init__.py +0 -0
- ScriptCollection/TFCPS/NodeJS/TFCPS_CodeUnitSpecific_NodeJS.py +123 -0
- ScriptCollection/TFCPS/NodeJS/__init__.py +0 -0
- ScriptCollection/TFCPS/Python/TFCPS_CodeUnitSpecific_Python.py +114 -0
- ScriptCollection/TFCPS/Python/__init__.py +0 -0
- ScriptCollection/TFCPS/TFCPS_CodeUnitSpecific_Base.py +417 -0
- ScriptCollection/TFCPS/TFCPS_CodeUnit_BuildCodeUnit.py +120 -0
- ScriptCollection/TFCPS/TFCPS_CodeUnit_BuildCodeUnits.py +80 -0
- ScriptCollection/TFCPS/TFCPS_CreateRelease.py +97 -0
- ScriptCollection/TFCPS/TFCPS_Generic.py +43 -0
- ScriptCollection/TFCPS/TFCPS_MergeToMain.py +125 -0
- ScriptCollection/TFCPS/TFCPS_MergeToStable.py +361 -0
- ScriptCollection/TFCPS/TFCPS_Tools_Dependencies.py +16 -0
- ScriptCollection/TFCPS/TFCPS_Tools_General.py +1076 -0
- ScriptCollection/TFCPS/__init__.py +0 -0
- {scriptcollection-4.0.12.dist-info → scriptcollection-4.0.13.dist-info}/METADATA +1 -1
- scriptcollection-4.0.13.dist-info/RECORD +41 -0
- scriptcollection-4.0.12.dist-info/RECORD +0 -17
- {scriptcollection-4.0.12.dist-info → scriptcollection-4.0.13.dist-info}/WHEEL +0 -0
- {scriptcollection-4.0.12.dist-info → scriptcollection-4.0.13.dist-info}/entry_points.txt +0 -0
- {scriptcollection-4.0.12.dist-info → scriptcollection-4.0.13.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1076 @@
|
|
1
|
+
from datetime import datetime,timezone
|
2
|
+
from graphlib import TopologicalSorter
|
3
|
+
import os
|
4
|
+
from pathlib import Path
|
5
|
+
import shutil
|
6
|
+
import zipfile
|
7
|
+
import tarfile
|
8
|
+
import re
|
9
|
+
import sys
|
10
|
+
import traceback
|
11
|
+
import json
|
12
|
+
import tempfile
|
13
|
+
import uuid
|
14
|
+
import urllib.request
|
15
|
+
from packaging import version
|
16
|
+
import requests
|
17
|
+
from lxml import etree
|
18
|
+
from ..GeneralUtilities import GeneralUtilities
|
19
|
+
from ..ScriptCollectionCore import ScriptCollectionCore
|
20
|
+
from ..SCLog import LogLevel
|
21
|
+
from ..ImageUpdater import ImageUpdater, VersionEcholon
|
22
|
+
|
23
|
+
class TFCPS_Tools_General:
|
24
|
+
|
25
|
+
__sc:ScriptCollectionCore=ScriptCollectionCore()
|
26
|
+
|
27
|
+
def __init__(self,sc:ScriptCollectionCore):
|
28
|
+
self.__sc=sc
|
29
|
+
|
30
|
+
@GeneralUtilities.check_arguments
|
31
|
+
def codeunit_is_enabled(self, codeunit_file: str) -> bool:
|
32
|
+
root: etree._ElementTree = etree.parse(codeunit_file)
|
33
|
+
return GeneralUtilities.string_to_boolean(str(root.xpath('//cps:codeunit/@enabled', namespaces={'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'})[0]))
|
34
|
+
|
35
|
+
@GeneralUtilities.check_arguments
|
36
|
+
def ensure_cyclonedxcli_is_available(self, target_folder: str,enforce_update:bool) -> None:
|
37
|
+
if shutil.which("cyclonedx-cli") is None:
|
38
|
+
local_filename = "cyclonedx-cli"
|
39
|
+
filename_on_github: str
|
40
|
+
if GeneralUtilities.current_system_is_windows():
|
41
|
+
filename_on_github = "cyclonedx-win-x64.exe"
|
42
|
+
local_filename = local_filename+".exe"
|
43
|
+
else:
|
44
|
+
filename_on_github = "cyclonedx-linux-x64"
|
45
|
+
self.ensure_file_from_github_assets_is_available_with_retry(target_folder, "CycloneDX", "cyclonedx-cli", "CycloneDXCLI", local_filename, lambda latest_version: filename_on_github,enforce_update=False)
|
46
|
+
@GeneralUtilities.check_arguments
|
47
|
+
def ensure_file_from_github_assets_is_available_with_retry(self, target_folder: str, githubuser: str, githubprojectname: str, resource_name: str, local_filename: str, get_filename_on_github, amount_of_attempts: int = 5,enforce_update:bool=False) -> None:
|
48
|
+
GeneralUtilities.retry_action(lambda: self.ensure_file_from_github_assets_is_available(target_folder, githubuser, githubprojectname, resource_name, local_filename, get_filename_on_github,enforce_update), amount_of_attempts)
|
49
|
+
|
50
|
+
@GeneralUtilities.check_arguments
|
51
|
+
def ensure_file_from_github_assets_is_available(self, target_folder: str, githubuser: str, githubprojectname: str, resource_name: str, local_filename: str, get_filename_on_github,enforce_update:bool) -> None:
|
52
|
+
resource_folder = os.path.join(target_folder, "Other", "Resources", resource_name)
|
53
|
+
internet_connection_is_available = GeneralUtilities.internet_connection_is_available()
|
54
|
+
file = f"{resource_folder}/{local_filename}"
|
55
|
+
file_exists = os.path.isfile(file)
|
56
|
+
if internet_connection_is_available: # Load/Update
|
57
|
+
try:
|
58
|
+
if enforce_update or not file_exists:
|
59
|
+
self.__sc.log.log(f"Download Asset \"{githubuser}/{githubprojectname}: {resource_name}\" from GitHub...", LogLevel.Debug)
|
60
|
+
GeneralUtilities.ensure_directory_does_not_exist(resource_folder)
|
61
|
+
GeneralUtilities.ensure_directory_exists(resource_folder)
|
62
|
+
headers = {'Cache-Control': 'no-cache', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.5845.96 Safari/537.36'}
|
63
|
+
self.__add_github_api_key_if_available(headers)
|
64
|
+
url = f"https://api.github.com/repos/{githubuser}/{githubprojectname}/releases/latest"
|
65
|
+
self.__sc.log.log(f"Download \"{url}\"...", LogLevel.Debug)
|
66
|
+
response = requests.get(url, headers=headers, allow_redirects=True, timeout=(10, 10))
|
67
|
+
latest_version = response.json()["tag_name"]
|
68
|
+
filename_on_github = get_filename_on_github(latest_version)
|
69
|
+
link = f"https://github.com/{githubuser}/{githubprojectname}/releases/download/{latest_version}/{filename_on_github}"
|
70
|
+
with requests.get(link, headers=headers, stream=True, allow_redirects=True, timeout=(5, 300)) as r:
|
71
|
+
r.raise_for_status()
|
72
|
+
total_size = int(r.headers.get("Content-Length", 0))
|
73
|
+
downloaded = 0
|
74
|
+
with open(file, "wb") as f:
|
75
|
+
for chunk in r.iter_content(chunk_size=8192):
|
76
|
+
f.write(chunk)
|
77
|
+
show_progress: bool = False
|
78
|
+
if show_progress:
|
79
|
+
downloaded += len(chunk)
|
80
|
+
if total_size:
|
81
|
+
percent = downloaded / total_size * 100
|
82
|
+
sys.stdout.write(f"\rDownload: {percent:.2f}%")
|
83
|
+
sys.stdout.flush()
|
84
|
+
self.__sc.log.log(f"Downloaded \"{url}\".", LogLevel.Diagnostic)
|
85
|
+
except Exception as e:
|
86
|
+
if file_exists:
|
87
|
+
self.__sc.log.log_exception(f"Can not update {resource_name}", e,traceback,LogLevel.Warning)
|
88
|
+
else:
|
89
|
+
raise
|
90
|
+
else:
|
91
|
+
if file_exists:
|
92
|
+
self.__sc.log.log(f"Can not check for updates of {resource_name} due to missing internet-connection.", LogLevel.Warning)
|
93
|
+
else:
|
94
|
+
raise ValueError(f"Can not download {resource_name}.")
|
95
|
+
|
96
|
+
def __add_github_api_key_if_available(self, headers: dict):
|
97
|
+
token = os.getenv("GITHUB_TOKEN")
|
98
|
+
if token is not None:
|
99
|
+
headers["Authorization"] = f"Bearer {token}"
|
100
|
+
else:
|
101
|
+
user_folder = str(Path.home())
|
102
|
+
github_token_file: str = str(os.path.join(user_folder, ".github", "token.txt"))
|
103
|
+
if os.path.isfile(github_token_file):
|
104
|
+
token = GeneralUtilities.read_text_from_file(github_token_file)
|
105
|
+
headers["Authorization"] = f"Bearer {token}"
|
106
|
+
return headers
|
107
|
+
|
108
|
+
|
109
|
+
@GeneralUtilities.check_arguments
|
110
|
+
def is_codeunit_folder(self, codeunit_folder: str) -> bool:
|
111
|
+
repo_folder = GeneralUtilities.resolve_relative_path("..", codeunit_folder)
|
112
|
+
if not self.__sc.is_git_repository(repo_folder):
|
113
|
+
return False
|
114
|
+
codeunit_name = os.path.basename(codeunit_folder)
|
115
|
+
codeunit_file: str = os.path.join(codeunit_folder, f"{codeunit_name}.codeunit.xml")
|
116
|
+
if not os.path.isfile(codeunit_file):
|
117
|
+
return False
|
118
|
+
return True
|
119
|
+
|
120
|
+
@GeneralUtilities.check_arguments
|
121
|
+
def assert_is_codeunit_folder(self, codeunit_folder: str) -> str:
|
122
|
+
repo_folder = GeneralUtilities.resolve_relative_path("..", codeunit_folder)
|
123
|
+
if not self.__sc.is_git_repository(repo_folder):
|
124
|
+
raise ValueError(f"'{codeunit_folder}' can not be a valid codeunit-folder because '{repo_folder}' is not a git-repository.")
|
125
|
+
codeunit_name = os.path.basename(codeunit_folder)
|
126
|
+
codeunit_file: str = os.path.join(codeunit_folder, f"{codeunit_name}.codeunit.xml")
|
127
|
+
if not os.path.isfile(codeunit_file):
|
128
|
+
raise ValueError(f"'{codeunit_folder}' is no codeunit-folder because '{codeunit_file}' does not exist.")
|
129
|
+
|
130
|
+
@GeneralUtilities.check_arguments
|
131
|
+
def get_codeunits(self, repository_folder: str, ignore_disabled_codeunits: bool = True) -> list[str]:
|
132
|
+
codeunits_with_dependent_codeunits: dict[str, set[str]] = dict[str, set[str]]()
|
133
|
+
subfolders = GeneralUtilities.get_direct_folders_of_folder(repository_folder)
|
134
|
+
for subfolder in subfolders:
|
135
|
+
codeunit_name: str = os.path.basename(subfolder)
|
136
|
+
codeunit_file = os.path.join(subfolder, f"{codeunit_name}.codeunit.xml")
|
137
|
+
if os.path.exists(codeunit_file):
|
138
|
+
if ignore_disabled_codeunits and not self.codeunit_is_enabled(codeunit_file):
|
139
|
+
continue
|
140
|
+
codeunits_with_dependent_codeunits[codeunit_name] = self.get_dependent_code_units(codeunit_file)
|
141
|
+
sorted_codeunits = self._internal_get_sorted_codeunits_by_dict(codeunits_with_dependent_codeunits)
|
142
|
+
#TODO show warning somehow for enabled codeunits which depends on ignored codeunits
|
143
|
+
return sorted_codeunits
|
144
|
+
|
145
|
+
@GeneralUtilities.check_arguments
|
146
|
+
def repository_has_codeunits(self, repository: str, ignore_disabled_codeunits: bool = True) -> bool:
|
147
|
+
return 0<len(self.get_codeunits(repository, ignore_disabled_codeunits))
|
148
|
+
|
149
|
+
@GeneralUtilities.check_arguments
|
150
|
+
def get_dependent_code_units(self, codeunit_file: str) -> list[str]:
|
151
|
+
root: etree._ElementTree = etree.parse(codeunit_file)
|
152
|
+
result = set(root.xpath('//cps:dependentcodeunit/text()', namespaces={'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'}))
|
153
|
+
result = sorted(result)
|
154
|
+
return result
|
155
|
+
|
156
|
+
@GeneralUtilities.check_arguments
|
157
|
+
def _internal_get_sorted_codeunits_by_dict(self, codeunits: dict[str, set[str]]) -> list[str]:
|
158
|
+
sorted_codeunits = {
|
159
|
+
node: sorted(codeunits[node])
|
160
|
+
for node in sorted(codeunits)
|
161
|
+
}
|
162
|
+
|
163
|
+
ts = TopologicalSorter()
|
164
|
+
for node, deps in sorted_codeunits.items():
|
165
|
+
ts.add(node, *deps)
|
166
|
+
|
167
|
+
result_typed = list(ts.static_order())
|
168
|
+
result = [str(item) for item in result_typed]
|
169
|
+
return result
|
170
|
+
|
171
|
+
@GeneralUtilities.check_arguments
|
172
|
+
def get_unsupported_versions(self, repository_folder: str, moment: datetime) -> list[tuple[str, datetime, datetime]]:
|
173
|
+
self.__sc.assert_is_git_repository(repository_folder)
|
174
|
+
result: list[tuple[str, datetime, datetime]] = list[tuple[str, datetime, datetime]]()
|
175
|
+
for entry in self.get_versions(repository_folder):
|
176
|
+
if not (entry[1] <= moment and moment <= entry[2]):
|
177
|
+
result.append(entry)
|
178
|
+
return result
|
179
|
+
|
180
|
+
|
181
|
+
@GeneralUtilities.check_arguments
|
182
|
+
def get_versions(self, repository_folder: str) -> list[tuple[str, datetime, datetime]]:
|
183
|
+
self.__sc.assert_is_git_repository(repository_folder)
|
184
|
+
folder = os.path.join(repository_folder, "Other", "Resources", "Support")
|
185
|
+
file = os.path.join(folder, "InformationAboutSupportedVersions.csv")
|
186
|
+
result: list[(str, datetime, datetime)] = list[(str, datetime, datetime)]()
|
187
|
+
if not os.path.isfile(file):
|
188
|
+
return result
|
189
|
+
entries = GeneralUtilities.read_csv_file(file, True)
|
190
|
+
for entry in entries:
|
191
|
+
d1 = GeneralUtilities.string_to_datetime(entry[1])
|
192
|
+
if d1.tzinfo is None:
|
193
|
+
d1 = d1.replace(tzinfo=timezone.utc)
|
194
|
+
d2 = GeneralUtilities.string_to_datetime(entry[2])
|
195
|
+
if d2.tzinfo is None:
|
196
|
+
d2 = d2.replace(tzinfo=timezone.utc)
|
197
|
+
result.append((entry[0], d1, d2))
|
198
|
+
return result
|
199
|
+
|
200
|
+
@GeneralUtilities.check_arguments
|
201
|
+
def dependent_codeunit_exists(self, repository: str, codeunit: str) -> None:
|
202
|
+
codeunit_file = f"{repository}/{codeunit}/{codeunit}.codeunit.xml"
|
203
|
+
return os.path.isfile(codeunit_file)
|
204
|
+
|
205
|
+
@GeneralUtilities.check_arguments
|
206
|
+
def get_all_authors_and_committers_of_repository(self, repository_folder: str, subfolder: str = None) -> list[tuple[str, str]]:
|
207
|
+
self.__sc.is_git_or_bare_git_repository(repository_folder)
|
208
|
+
space_character = "_"
|
209
|
+
if subfolder is None:
|
210
|
+
subfolder_argument = GeneralUtilities.empty_string
|
211
|
+
else:
|
212
|
+
subfolder_argument = f" -- {subfolder}"
|
213
|
+
log_result = self.__sc.run_program("git", f'log --pretty=%aN{space_character}%aE%n%cN{space_character}%cE HEAD{subfolder_argument}', repository_folder)
|
214
|
+
plain_content: list[str] = list(
|
215
|
+
set([line for line in log_result[1].split("\n") if len(line) > 0]))
|
216
|
+
result: list[tuple[str, str]] = []
|
217
|
+
for item in plain_content:
|
218
|
+
if len(re.findall(space_character, item)) == 1:
|
219
|
+
splitted = item.split(space_character)
|
220
|
+
result.append((splitted[0], splitted[1]))
|
221
|
+
else:
|
222
|
+
raise ValueError(f'Unexpected author: "{item}"')
|
223
|
+
return result
|
224
|
+
|
225
|
+
@GeneralUtilities.check_arguments
|
226
|
+
def copy_artifacts_from_dependent_code_units(self, repo_folder: str, codeunit_name: str) -> None:
|
227
|
+
codeunit_file = os.path.join(repo_folder, codeunit_name, codeunit_name + ".codeunit.xml")
|
228
|
+
dependent_codeunits = self.get_dependent_code_units(codeunit_file)
|
229
|
+
if len(dependent_codeunits) > 0:
|
230
|
+
self.__sc.log.log(f"Get dependent artifacts for codeunit {codeunit_name}.")
|
231
|
+
dependent_codeunits_folder = os.path.join(repo_folder, codeunit_name, "Other", "Resources", "DependentCodeUnits")
|
232
|
+
GeneralUtilities.ensure_directory_does_not_exist(dependent_codeunits_folder)
|
233
|
+
for dependent_codeunit in dependent_codeunits:
|
234
|
+
target_folder = os.path.join(dependent_codeunits_folder, dependent_codeunit)
|
235
|
+
GeneralUtilities.ensure_directory_does_not_exist(target_folder)
|
236
|
+
other_folder = os.path.join(repo_folder, dependent_codeunit, "Other")
|
237
|
+
artifacts_folder = os.path.join(other_folder, "Artifacts")
|
238
|
+
shutil.copytree(artifacts_folder, target_folder)
|
239
|
+
|
240
|
+
|
241
|
+
@GeneralUtilities.check_arguments
|
242
|
+
def write_version_to_codeunit_file(self, codeunit_file: str, current_version: str) -> None:
|
243
|
+
versionregex = "\\d+\\.\\d+\\.\\d+"
|
244
|
+
versiononlyregex = f"^{versionregex}$"
|
245
|
+
pattern = re.compile(versiononlyregex)
|
246
|
+
if pattern.match(current_version):
|
247
|
+
GeneralUtilities.write_text_to_file(codeunit_file, re.sub(f"<cps:version>{versionregex}<\\/cps:version>", f"<cps:version>{current_version}</cps:version>", GeneralUtilities.read_text_from_file(codeunit_file)))
|
248
|
+
else:
|
249
|
+
raise ValueError(f"Version '{current_version}' does not match version-regex '{versiononlyregex}'.")
|
250
|
+
|
251
|
+
@GeneralUtilities.check_arguments
|
252
|
+
def set_default_constants(self, codeunit_folder: str) -> None:
|
253
|
+
self.assert_is_codeunit_folder(codeunit_folder)
|
254
|
+
self.set_constant_for_commitid(codeunit_folder)
|
255
|
+
self.set_constant_for_commitdate(codeunit_folder)
|
256
|
+
self.set_constant_for_codeunitname(codeunit_folder)
|
257
|
+
self.set_constant_for_codeunitversion(codeunit_folder)
|
258
|
+
self.set_constant_for_codeunitmajorversion(codeunit_folder)
|
259
|
+
self.set_constant_for_description(codeunit_folder)
|
260
|
+
|
261
|
+
@GeneralUtilities.check_arguments
|
262
|
+
def set_constant_for_commitid(self, codeunit_folder: str) -> None:
|
263
|
+
self.assert_is_codeunit_folder(codeunit_folder)
|
264
|
+
repository = GeneralUtilities.resolve_relative_path("..", codeunit_folder)
|
265
|
+
commit_id = self.__sc.git_get_commit_id(repository)
|
266
|
+
self.set_constant(codeunit_folder, "CommitId", commit_id)
|
267
|
+
|
268
|
+
@GeneralUtilities.check_arguments
|
269
|
+
def set_constant_for_commitdate(self, codeunit_folder: str) -> None:
|
270
|
+
self.assert_is_codeunit_folder(codeunit_folder)
|
271
|
+
repository = GeneralUtilities.resolve_relative_path("..", codeunit_folder)
|
272
|
+
commit_date: datetime = self.__sc.git_get_commit_date(repository)
|
273
|
+
self.set_constant(codeunit_folder, "CommitDate", GeneralUtilities.datetime_to_string(commit_date))
|
274
|
+
|
275
|
+
@GeneralUtilities.check_arguments
|
276
|
+
def set_constant_for_codeunitname(self, codeunit_folder: str) -> None:
|
277
|
+
self.assert_is_codeunit_folder(codeunit_folder)
|
278
|
+
codeunit_name: str = os.path.basename(codeunit_folder)
|
279
|
+
self.set_constant(codeunit_folder, "CodeUnitName", codeunit_name)
|
280
|
+
|
281
|
+
@GeneralUtilities.check_arguments
|
282
|
+
def set_constant_for_codeunitversion(self, codeunit_folder: str) -> None:
|
283
|
+
self.assert_is_codeunit_folder(codeunit_folder)
|
284
|
+
codeunit_version: str = self.get_version_of_codeunit(os.path.join(codeunit_folder,f"{os.path.basename(codeunit_folder)}.codeunit.xml"))
|
285
|
+
self.set_constant(codeunit_folder, "CodeUnitVersion", codeunit_version)
|
286
|
+
|
287
|
+
@GeneralUtilities.check_arguments
|
288
|
+
def set_constant_for_codeunitmajorversion(self, codeunit_folder: str) -> None:
|
289
|
+
self.assert_is_codeunit_folder(codeunit_folder)
|
290
|
+
major_version = int(self.get_version_of_codeunit(os.path.join(codeunit_folder,f"{os.path.basename(codeunit_folder)}.codeunit.xml")).split(".")[0])
|
291
|
+
self.set_constant(codeunit_folder, "CodeUnitMajorVersion", str(major_version))
|
292
|
+
|
293
|
+
|
294
|
+
@GeneralUtilities.check_arguments
|
295
|
+
def get_version_of_codeunit(self,codeunit_file:str) -> None:
|
296
|
+
codeunit_file_content:str=GeneralUtilities.read_text_from_file(codeunit_file)
|
297
|
+
return self.get_version_of_codeunit_filecontent(codeunit_file_content)
|
298
|
+
|
299
|
+
@GeneralUtilities.check_arguments
|
300
|
+
def get_version_of_codeunit_filecontent(self,file_content:str) -> None:
|
301
|
+
root: etree._ElementTree = etree.fromstring(file_content.encode("utf-8"))
|
302
|
+
result = str(root.xpath('//cps:version/text()', namespaces={'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'})[0])
|
303
|
+
return result
|
304
|
+
|
305
|
+
@GeneralUtilities.check_arguments
|
306
|
+
def set_constant_for_description(self, codeunit_folder: str) -> None:
|
307
|
+
self.assert_is_codeunit_folder(codeunit_folder)
|
308
|
+
codeunit_file:str=os.path.join(codeunit_folder,f"{os.path.basename(codeunit_folder)}.codeunit.xml")
|
309
|
+
codeunit_description: str = self.get_codeunit_description(codeunit_file)
|
310
|
+
self.set_constant(codeunit_folder, "CodeUnitDescription", codeunit_description)
|
311
|
+
|
312
|
+
@GeneralUtilities.check_arguments
|
313
|
+
def get_codeunit_description(self,codeunit_file:str) -> bool:
|
314
|
+
root: etree._ElementTree = etree.parse(codeunit_file)
|
315
|
+
return str(root.xpath('//cps:properties/@description', namespaces={'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'})[0])
|
316
|
+
|
317
|
+
@GeneralUtilities.check_arguments
|
318
|
+
def set_constant(self, codeunit_folder: str, constantname: str, constant_value: str, documentationsummary: str = None, constants_valuefile: str = None) -> None:
|
319
|
+
self.assert_is_codeunit_folder(codeunit_folder)
|
320
|
+
if documentationsummary is None:
|
321
|
+
documentationsummary = GeneralUtilities.empty_string
|
322
|
+
constants_folder = os.path.join(codeunit_folder, "Other", "Resources", "Constants")
|
323
|
+
GeneralUtilities.ensure_directory_exists(constants_folder)
|
324
|
+
constants_metafile = os.path.join(constants_folder, f"{constantname}.constant.xml")
|
325
|
+
if constants_valuefile is None:
|
326
|
+
constants_valuefile_folder = constants_folder
|
327
|
+
constants_valuefile_name = f"{constantname}.value.txt"
|
328
|
+
constants_valuefiler_reference = f"./{constants_valuefile_name}"
|
329
|
+
else:
|
330
|
+
constants_valuefile_folder = os.path.dirname(constants_valuefile)
|
331
|
+
constants_valuefile_name = os.path.basename(constants_valuefile)
|
332
|
+
constants_valuefiler_reference = os.path.join(constants_valuefile_folder, constants_valuefile_name)
|
333
|
+
|
334
|
+
# TODO implement usage of self.reference_latest_version_of_xsd_when_generating_xml
|
335
|
+
GeneralUtilities.write_text_to_file(constants_metafile, f"""<?xml version="1.0" encoding="UTF-8" ?>
|
336
|
+
<cps:constant xmlns:cps="https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure" constantspecificationversion="1.1.0"
|
337
|
+
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/raw/main/Conventions/RepositoryStructure/CommonProjectStructure/constant.xsd">
|
338
|
+
<cps:name>{constantname}</cps:name>
|
339
|
+
<cps:documentationsummary>{documentationsummary}</cps:documentationsummary>
|
340
|
+
<cps:path>{constants_valuefiler_reference}</cps:path>
|
341
|
+
</cps:constant>""")
|
342
|
+
# TODO validate generated xml against xsd
|
343
|
+
GeneralUtilities.write_text_to_file(os.path.join(constants_valuefile_folder, constants_valuefile_name), constant_value)
|
344
|
+
|
345
|
+
@GeneralUtilities.check_arguments
|
346
|
+
def get_constant_value(self, source_codeunit_folder: str, constant_name: str) -> str:
|
347
|
+
self.assert_is_codeunit_folder(source_codeunit_folder)
|
348
|
+
value_file_relative = self.__get_constant_helper(source_codeunit_folder, constant_name, "path")
|
349
|
+
value_file = GeneralUtilities.resolve_relative_path(value_file_relative, os.path.join(source_codeunit_folder, "Other", "Resources", "Constants"))
|
350
|
+
return GeneralUtilities.read_text_from_file(value_file)
|
351
|
+
|
352
|
+
@GeneralUtilities.check_arguments
|
353
|
+
def get_constant_documentation(self, source_codeunit_folder: str, constant_name: str) -> str:
|
354
|
+
self.assert_is_codeunit_folder(source_codeunit_folder)
|
355
|
+
return self.__get_constant_helper(source_codeunit_folder, constant_name, "documentationsummary")
|
356
|
+
|
357
|
+
@GeneralUtilities.check_arguments
|
358
|
+
def __get_constant_helper(self, source_codeunit_folder: str, constant_name: str, propertyname: str) -> str:
|
359
|
+
self.assert_is_codeunit_folder(source_codeunit_folder)
|
360
|
+
root: etree._ElementTree = etree.parse(os.path.join(source_codeunit_folder, "Other", "Resources", "Constants", f"{constant_name}.constant.xml"))
|
361
|
+
results = root.xpath(f'//cps:{propertyname}/text()', namespaces={
|
362
|
+
'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'
|
363
|
+
})
|
364
|
+
length = len(results)
|
365
|
+
if (length == 0):
|
366
|
+
return ""
|
367
|
+
elif length == 1:
|
368
|
+
return results[0]
|
369
|
+
else:
|
370
|
+
raise ValueError("Too many results found.")
|
371
|
+
|
372
|
+
@GeneralUtilities.check_arguments
|
373
|
+
def copy_licence_file(self, codeunit_folder: str) -> None:
|
374
|
+
folder_of_current_file = os.path.join(codeunit_folder,"Other")
|
375
|
+
license_file = GeneralUtilities.resolve_relative_path("../../License.txt", folder_of_current_file)
|
376
|
+
target_folder = GeneralUtilities.resolve_relative_path("Artifacts/License", folder_of_current_file)
|
377
|
+
GeneralUtilities.ensure_directory_exists(target_folder)
|
378
|
+
shutil.copy(license_file, target_folder)
|
379
|
+
|
380
|
+
@GeneralUtilities.check_arguments
|
381
|
+
def generate_diff_report(self, repository_folder: str, codeunit_name: str, current_version: str) -> None:
|
382
|
+
self.__sc.assert_is_git_repository(repository_folder)
|
383
|
+
codeunit_folder = os.path.join(repository_folder, codeunit_name)
|
384
|
+
target_folder = GeneralUtilities.resolve_relative_path("Other/Artifacts/DiffReport", codeunit_folder)
|
385
|
+
GeneralUtilities.ensure_directory_does_not_exist(target_folder)
|
386
|
+
GeneralUtilities.ensure_directory_exists(target_folder)
|
387
|
+
target_file_light = os.path.join(target_folder, "DiffReport.html").replace("\\", "/")
|
388
|
+
target_file_dark = os.path.join(target_folder, "DiffReportDark.html").replace("\\", "/")
|
389
|
+
src = "4b825dc642cb6eb9a060e54bf8d69288fbee4904" # hash/id of empty git-tree
|
390
|
+
src_prefix = "Begin"
|
391
|
+
if self.__sc.get_current_git_branch_has_tag(repository_folder):
|
392
|
+
latest_tag = self.__sc.get_latest_git_tag(repository_folder)
|
393
|
+
src = self.__sc.git_get_commitid_of_tag(repository_folder, latest_tag)
|
394
|
+
src_prefix = latest_tag
|
395
|
+
dst = "HEAD"
|
396
|
+
dst_prefix = f"v{current_version}"
|
397
|
+
|
398
|
+
temp_file = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
|
399
|
+
try:
|
400
|
+
GeneralUtilities.ensure_file_does_not_exist(temp_file)
|
401
|
+
GeneralUtilities.write_text_to_file(temp_file, self.__sc.run_program("git", f'--no-pager diff --src-prefix={src_prefix}/ --dst-prefix={dst_prefix}/ {src} {dst} -- {codeunit_name}', repository_folder)[1])
|
402
|
+
styles:dict[str,str]={
|
403
|
+
"default":target_file_light,
|
404
|
+
"github-dark":target_file_dark
|
405
|
+
}
|
406
|
+
for style,target_file in styles.items():
|
407
|
+
self.__sc.run_program_argsasarray("pygmentize", ['-l', 'diff', '-f', 'html', '-O', 'full', '-o', target_file, '-P', f'style={style}', temp_file], repository_folder)
|
408
|
+
finally:
|
409
|
+
GeneralUtilities.ensure_file_does_not_exist(temp_file)
|
410
|
+
|
411
|
+
@GeneralUtilities.check_arguments
|
412
|
+
def get_version_of_project(self,repositoryfolder:str) -> str:
|
413
|
+
self.__sc.assert_is_git_repository(repositoryfolder)
|
414
|
+
return self.__sc.get_semver_version_from_gitversion(repositoryfolder)
|
415
|
+
|
416
|
+
@GeneralUtilities.check_arguments
|
417
|
+
def create_changelog_entry(self, repositoryfolder: str, message: str, commit: bool, force: bool):
|
418
|
+
self.__sc.assert_is_git_repository(repositoryfolder)
|
419
|
+
random_file = os.path.join(repositoryfolder, str(uuid.uuid4()))
|
420
|
+
if force and not self.__sc.git_repository_has_uncommitted_changes(repositoryfolder):
|
421
|
+
GeneralUtilities.ensure_file_exists(random_file)
|
422
|
+
current_version = self.get_version_of_project(repositoryfolder)
|
423
|
+
changelog_file = os.path.join(repositoryfolder, "Other", "Resources", "Changelog", f"v{current_version}.md")
|
424
|
+
if os.path.isfile(changelog_file):
|
425
|
+
self.__sc.log.log(f"Changelog-file '{changelog_file}' already exists.")
|
426
|
+
else:
|
427
|
+
GeneralUtilities.ensure_file_exists(changelog_file)
|
428
|
+
GeneralUtilities.write_text_to_file(changelog_file, f"""# Release notes
|
429
|
+
|
430
|
+
## Changes
|
431
|
+
|
432
|
+
- {message}
|
433
|
+
""")
|
434
|
+
GeneralUtilities.ensure_file_does_not_exist(random_file)
|
435
|
+
if commit:
|
436
|
+
self.__sc.git_commit(repositoryfolder, f"Added changelog-file for v{current_version}.")
|
437
|
+
|
438
|
+
@GeneralUtilities.check_arguments
|
439
|
+
def merge_sbom_file_from_dependent_codeunit_into_this(self,codeunit_folder: str, codeunitname:str,dependent_codeunit_name: str,use_cache:bool) -> None:
|
440
|
+
repository_folder = GeneralUtilities.resolve_relative_path("..", codeunit_folder)
|
441
|
+
dependent_codeunit_folder = os.path.join(repository_folder, dependent_codeunit_name).replace("\\", "/")
|
442
|
+
codeunit_file:str=os.path.join(codeunit_folder,f"{codeunitname}.codeunit.xml")
|
443
|
+
dependent_codeunit_file:str=os.path.join(dependent_codeunit_folder,f"{dependent_codeunit_name}.codeunit.xml")
|
444
|
+
sbom_file = f"{repository_folder}/{codeunitname}/Other/Artifacts/BOM/{codeunitname}.{self.get_version_of_codeunit(codeunit_file)}.sbom.xml"
|
445
|
+
dependent_sbom_file = f"{repository_folder}/{dependent_codeunit_name}/Other/Artifacts/BOM/{dependent_codeunit_name}.{self.get_version_of_codeunit(dependent_codeunit_file)}.sbom.xml"
|
446
|
+
self.merge_sbom_file(repository_folder, dependent_sbom_file, sbom_file,use_cache)
|
447
|
+
|
448
|
+
@GeneralUtilities.check_arguments
|
449
|
+
def merge_sbom_file(self, repository_folder: str, source_sbom_file_relative: str, target_sbom_file_relative: str,use_cache:bool) -> None:
|
450
|
+
GeneralUtilities.assert_file_exists(os.path.join(repository_folder, source_sbom_file_relative))
|
451
|
+
GeneralUtilities.assert_file_exists(os.path.join(repository_folder, target_sbom_file_relative))
|
452
|
+
target_original_sbom_file_relative = os.path.dirname(target_sbom_file_relative)+"/"+os.path.basename(target_sbom_file_relative)+".original.xml"
|
453
|
+
os.rename(os.path.join(repository_folder, target_sbom_file_relative), os.path.join(repository_folder, target_original_sbom_file_relative))
|
454
|
+
|
455
|
+
self.ensure_cyclonedxcli_is_available(repository_folder,not use_cache)
|
456
|
+
cyclonedx_exe ="cyclonedx-cli"# os.path.join(repository_folder, "Other/Resources/CycloneDXCLI/cyclonedx-cli")
|
457
|
+
#if GeneralUtilities.current_system_is_windows():
|
458
|
+
# cyclonedx_exe = cyclonedx_exe+".exe"
|
459
|
+
self.__sc.run_program(cyclonedx_exe, f"merge --input-files {source_sbom_file_relative} {target_original_sbom_file_relative} --output-file {target_sbom_file_relative}", repository_folder)
|
460
|
+
GeneralUtilities.ensure_file_does_not_exist(os.path.join(repository_folder, target_original_sbom_file_relative))
|
461
|
+
self.__sc.format_xml_file(os.path.join(repository_folder, target_sbom_file_relative))
|
462
|
+
|
463
|
+
@GeneralUtilities.check_arguments
|
464
|
+
def codeunit_has_testable_sourcecode(self,codeunit_file:str) -> bool:
|
465
|
+
self.assert_is_codeunit_folder(os.path.dirname(codeunit_file))
|
466
|
+
root: etree._ElementTree = etree.parse(codeunit_file)
|
467
|
+
return GeneralUtilities.string_to_boolean(str(root.xpath('//cps:properties/@codeunithastestablesourcecode', namespaces={'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'})[0]))
|
468
|
+
|
469
|
+
@GeneralUtilities.check_arguments
|
470
|
+
def codeunit_has_updatable_dependencies(self,codeunit_file:str) -> bool:
|
471
|
+
self.assert_is_codeunit_folder(os.path.dirname(codeunit_file))
|
472
|
+
root: etree._ElementTree = etree.parse(codeunit_file)
|
473
|
+
return GeneralUtilities.string_to_boolean(str(root.xpath('//cps:properties/@codeunithasupdatabledependencies', namespaces={'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'})[0]))
|
474
|
+
|
475
|
+
@GeneralUtilities.check_arguments
|
476
|
+
def get_codeunit_owner_emailaddress(self,codeunit_file:str) -> None:
|
477
|
+
self.assert_is_codeunit_folder(os.path.dirname(codeunit_file))
|
478
|
+
namespaces = {'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure', 'xsi': 'http://www.w3.org/2001/XMLSchema-instance'}
|
479
|
+
root: etree._ElementTree = etree.parse(codeunit_file)
|
480
|
+
result = root.xpath('//cps:codeunit/cps:codeunitowneremailaddress/text()', namespaces=namespaces)[0]
|
481
|
+
return result
|
482
|
+
|
483
|
+
@GeneralUtilities.check_arguments
|
484
|
+
def get_codeunit_owner_name(self,codeunit_file:str) -> None:
|
485
|
+
self.assert_is_codeunit_folder(os.path.dirname(codeunit_file))
|
486
|
+
namespaces = {'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure', 'xsi': 'http://www.w3.org/2001/XMLSchema-instance'}
|
487
|
+
root: etree._ElementTree = etree.parse(codeunit_file)
|
488
|
+
result = root.xpath('//cps:codeunit/cps:codeunitownername/text()', namespaces=namespaces)[0]
|
489
|
+
return result
|
490
|
+
|
491
|
+
@GeneralUtilities.check_arguments
|
492
|
+
def generate_svg_files_from_plantuml_files_for_repository(self, repository_folder: str,use_cache:bool) -> None:
|
493
|
+
self.__sc.log.log("Generate svg-files from plantuml-files...")
|
494
|
+
self.__sc.assert_is_git_repository(repository_folder)
|
495
|
+
self.ensure_plantuml_is_available(repository_folder,not use_cache)
|
496
|
+
plant_uml_folder = os.path.join(repository_folder, "Other", "Resources", "PlantUML")
|
497
|
+
target_folder = os.path.join(repository_folder, "Other", "Reference")
|
498
|
+
self.__generate_svg_files_from_plantuml(target_folder, plant_uml_folder)
|
499
|
+
|
500
|
+
@GeneralUtilities.check_arguments
|
501
|
+
def generate_svg_files_from_plantuml_files_for_codeunit(self, codeunit_folder: str,use_cache:bool) -> None:
|
502
|
+
self.assert_is_codeunit_folder(codeunit_folder)
|
503
|
+
repository_folder = os.path.dirname(codeunit_folder)
|
504
|
+
self.ensure_plantuml_is_available(repository_folder,not use_cache)
|
505
|
+
plant_uml_folder = os.path.join(repository_folder, "Other", "Resources", "PlantUML")
|
506
|
+
target_folder = os.path.join(codeunit_folder, "Other", "Reference")
|
507
|
+
self.__generate_svg_files_from_plantuml(target_folder, plant_uml_folder)
|
508
|
+
|
509
|
+
@GeneralUtilities.check_arguments
|
510
|
+
def ensure_plantuml_is_available(self, target_folder: str,enforce_update:bool) -> None:
|
511
|
+
self.ensure_file_from_github_assets_is_available_with_retry(target_folder, "plantuml", "plantuml", "PlantUML", "plantuml.jar", lambda latest_version: "plantuml.jar",enforce_update=enforce_update)
|
512
|
+
|
513
|
+
@GeneralUtilities.check_arguments
|
514
|
+
def __generate_svg_files_from_plantuml(self, diagrams_files_folder: str, plant_uml_folder: str) -> None:
|
515
|
+
for file in GeneralUtilities.get_all_files_of_folder(diagrams_files_folder):
|
516
|
+
if file.endswith(".plantuml"):
|
517
|
+
output_filename = self.get_output_filename_for_plantuml_filename(file)
|
518
|
+
argument = ['-jar', f'{plant_uml_folder}/plantuml.jar', '-tsvg', os.path.basename(file)]
|
519
|
+
folder = os.path.dirname(file)
|
520
|
+
self.__sc.run_program_argsasarray("java", argument, folder)
|
521
|
+
result_file = folder+"/" + output_filename
|
522
|
+
GeneralUtilities.assert_file_exists(result_file)
|
523
|
+
self.__sc.format_xml_file(result_file)
|
524
|
+
|
525
|
+
@GeneralUtilities.check_arguments
|
526
|
+
def get_output_filename_for_plantuml_filename(self, plantuml_file: str) -> str:
|
527
|
+
for line in GeneralUtilities.read_lines_from_file(plantuml_file):
|
528
|
+
prefix = "@startuml "
|
529
|
+
if line.startswith(prefix):
|
530
|
+
title = line[len(prefix):]
|
531
|
+
return title+".svg"
|
532
|
+
return Path(plantuml_file).stem+".svg"
|
533
|
+
|
534
|
+
@GeneralUtilities.check_arguments
|
535
|
+
def generate_codeunits_overview_diagram(self, repository_folder: str) -> None:
|
536
|
+
self.__sc.log.log("Generate Codeunits-overview-diagram...")
|
537
|
+
self.__sc.assert_is_git_repository(repository_folder)
|
538
|
+
project_name: str = os.path.basename(repository_folder)
|
539
|
+
target_folder = os.path.join(repository_folder, "Other", "Reference", "Technical", "Diagrams")
|
540
|
+
GeneralUtilities.ensure_directory_exists(target_folder)
|
541
|
+
target_file = os.path.join(target_folder, "CodeUnits-Overview.plantuml")
|
542
|
+
lines = ["@startuml CodeUnits-Overview"]
|
543
|
+
lines.append(f"title CodeUnits of {project_name}")
|
544
|
+
|
545
|
+
codeunits = self.get_codeunits(repository_folder)
|
546
|
+
for codeunitname in codeunits:
|
547
|
+
codeunit_file: str = os.path.join(repository_folder, codeunitname, f"{codeunitname}.codeunit.xml")
|
548
|
+
|
549
|
+
description = self.get_codeunit_description(codeunit_file)
|
550
|
+
|
551
|
+
lines.append(GeneralUtilities.empty_string)
|
552
|
+
lines.append(f"[{codeunitname}]")
|
553
|
+
lines.append(f"note as {codeunitname}Note")
|
554
|
+
lines.append(f" {description}")
|
555
|
+
lines.append(f"end note")
|
556
|
+
lines.append(f"{codeunitname} .. {codeunitname}Note")
|
557
|
+
|
558
|
+
lines.append(GeneralUtilities.empty_string)
|
559
|
+
for codeunitname in codeunits:
|
560
|
+
codeunit_file: str = os.path.join(repository_folder, codeunitname, f"{codeunitname}.codeunit.xml")
|
561
|
+
dependent_codeunits = self.get_dependent_code_units(codeunit_file)
|
562
|
+
for dependent_codeunit in dependent_codeunits:
|
563
|
+
lines.append(f"{codeunitname} --> {dependent_codeunit}")
|
564
|
+
|
565
|
+
lines.append(GeneralUtilities.empty_string)
|
566
|
+
lines.append("@enduml")
|
567
|
+
|
568
|
+
GeneralUtilities.write_lines_to_file(target_file, lines)
|
569
|
+
|
570
|
+
@GeneralUtilities.check_arguments
|
571
|
+
def generate_tasksfile_from_workspace_file(self, repository_folder: str, append_cli_args_at_end: bool = False) -> None:
|
572
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
573
|
+
if self.__sc.program_runner.will_be_executed_locally(): # works only locally, but much more performant than always running an external program
|
574
|
+
self.__sc.log.log("Generate taskfile from code-workspace-file...")
|
575
|
+
self.__sc.assert_is_git_repository(repository_folder)
|
576
|
+
workspace_file: str = self.__sc.find_file_by_extension(repository_folder, "code-workspace")
|
577
|
+
task_file: str = repository_folder + "/Taskfile.yml"
|
578
|
+
lines: list[str] = ["version: '3'", GeneralUtilities.empty_string, "tasks:", GeneralUtilities.empty_string]
|
579
|
+
workspace_file_content: str = self.__sc.get_file_content(workspace_file)
|
580
|
+
jsoncontent = json.loads(workspace_file_content)
|
581
|
+
tasks = jsoncontent["tasks"]["tasks"]
|
582
|
+
tasks.sort(key=lambda x: x["label"].split("/")[-1], reverse=False) # sort by the label of the task
|
583
|
+
for task in tasks:
|
584
|
+
if task["type"] == "shell":
|
585
|
+
|
586
|
+
description: str = task["label"]
|
587
|
+
name: str = GeneralUtilities.to_pascal_case(description)
|
588
|
+
command = task["command"]
|
589
|
+
relative_script_file = task["command"]
|
590
|
+
|
591
|
+
relative_script_file = "."
|
592
|
+
cwd: str = None
|
593
|
+
if "options" in task:
|
594
|
+
options = task["options"]
|
595
|
+
if "cwd" in options:
|
596
|
+
cwd = options["cwd"]
|
597
|
+
cwd = cwd.replace("${workspaceFolder}", ".")
|
598
|
+
cwd = cwd.replace("\\", "\\\\").replace('"', '\\"') # escape backslashes and double quotes for YAML
|
599
|
+
relative_script_file = cwd
|
600
|
+
if len(relative_script_file) == 0:
|
601
|
+
relative_script_file = "."
|
602
|
+
|
603
|
+
command_with_args = command
|
604
|
+
if "args" in task:
|
605
|
+
args = task["args"]
|
606
|
+
if len(args) > 1:
|
607
|
+
command_with_args = f"{command_with_args} {' '.join(args)}"
|
608
|
+
|
609
|
+
if "description" in task:
|
610
|
+
additional_description = task["description"]
|
611
|
+
description = f"{description} ({additional_description})"
|
612
|
+
|
613
|
+
if append_cli_args_at_end:
|
614
|
+
command_with_args = f"{command_with_args} {{{{.CLI_ARGS}}}}"
|
615
|
+
|
616
|
+
description_literal = description.replace("\\", "\\\\").replace('"', '\\"') # escape backslashes and double quotes for YAML
|
617
|
+
command_with_args = command_with_args.replace("\\", "\\\\").replace('"', '\\"') # escape backslashes and double quotes for YAML
|
618
|
+
|
619
|
+
lines.append(f" {name}:")
|
620
|
+
lines.append(f' desc: "{description_literal}"')
|
621
|
+
lines.append(' silent: true')
|
622
|
+
if cwd is not None:
|
623
|
+
lines.append(f' dir: "{cwd}"')
|
624
|
+
lines.append(" cmds:")
|
625
|
+
lines.append(f' - "{command_with_args}"')
|
626
|
+
lines.append(' aliases:')
|
627
|
+
lines.append(f' - {name.lower()}')
|
628
|
+
if "aliases" in task:
|
629
|
+
aliases = task["aliases"]
|
630
|
+
for alias in aliases:
|
631
|
+
lines.append(f' - {alias}')
|
632
|
+
lines.append(GeneralUtilities.empty_string)
|
633
|
+
|
634
|
+
self.__sc.set_file_content(task_file, "\n".join(lines))
|
635
|
+
else:
|
636
|
+
self.__sc.run_program("scgeneratetasksfilefromworkspacefile", f"--repositoryfolder {repository_folder}")
|
637
|
+
|
638
|
+
@GeneralUtilities.check_arguments
|
639
|
+
def ensure_androidappbundletool_is_available(self, target_folder: str,enforce_update:bool) -> None:
|
640
|
+
self.ensure_file_from_github_assets_is_available_with_retry(target_folder, "google", "bundletool", "AndroidAppBundleTool", "bundletool.jar", lambda latest_version: f"bundletool-all-{latest_version}.jar",enforce_update=enforce_update)
|
641
|
+
|
642
|
+
@GeneralUtilities.check_arguments
|
643
|
+
def ensure_mediamtx_is_available(self, target_folder: str,enforce_update:bool) -> None:
|
644
|
+
def download_and_extract(osname: str, osname_in_github_asset: str, extension: str):
|
645
|
+
resource_name: str = f"MediaMTX_{osname}"
|
646
|
+
zip_filename: str = f"{resource_name}.{extension}"
|
647
|
+
resource_folder: str = os.path.join(target_folder, "Other", "Resources", resource_name)
|
648
|
+
target_folder_extracted = os.path.join(resource_folder, "MediaMTX")
|
649
|
+
update:bool=not os.path.isdir(target_folder_extracted) or GeneralUtilities.folder_is_empty(target_folder_extracted) or enforce_update
|
650
|
+
if update:
|
651
|
+
self.ensure_file_from_github_assets_is_available_with_retry(target_folder, "bluenviron", "mediamtx", resource_name, zip_filename, lambda latest_version: f"mediamtx_{latest_version}_{osname_in_github_asset}_amd64.{extension}",enforce_update=enforce_update)
|
652
|
+
local_zip_file: str = os.path.join(resource_folder, f"{resource_name}.{extension}")
|
653
|
+
GeneralUtilities.ensure_folder_exists_and_is_empty(target_folder_extracted)
|
654
|
+
if extension == "zip":
|
655
|
+
with zipfile.ZipFile(local_zip_file, 'r') as zip_ref:
|
656
|
+
zip_ref.extractall(target_folder_extracted)
|
657
|
+
elif extension == "tar.gz":
|
658
|
+
with tarfile.open(local_zip_file, "r:gz") as tar:
|
659
|
+
tar.extractall(path=target_folder_extracted)
|
660
|
+
else:
|
661
|
+
raise ValueError(f"Unknown extension: \"{extension}\"")
|
662
|
+
GeneralUtilities.ensure_file_does_not_exist(local_zip_file)
|
663
|
+
|
664
|
+
download_and_extract("Windows", "windows", "zip")
|
665
|
+
download_and_extract("Linux", "linux", "tar.gz")
|
666
|
+
download_and_extract("MacOS", "darwin", "tar.gz")
|
667
|
+
|
668
|
+
@GeneralUtilities.check_arguments
|
669
|
+
def clone_repository_as_resource(self, local_repository_folder: str, remote_repository_link: str, resource_name: str, repository_subname: str = None,use_cache:bool=True) -> None:
|
670
|
+
self.__sc.log.log(f'Clone resource {resource_name}...')
|
671
|
+
resrepo_commit_id_folder: str = os.path.join(local_repository_folder, "Other", "Resources", f"{resource_name}Version")
|
672
|
+
resrepo_commit_id_file: str = os.path.join(resrepo_commit_id_folder, f"{resource_name}Version.txt")
|
673
|
+
latest_version: str = GeneralUtilities.read_text_from_file(resrepo_commit_id_file)
|
674
|
+
resrepo_data_folder: str = os.path.join(local_repository_folder, "Other", "Resources", resource_name).replace("\\", "/")
|
675
|
+
current_version: str = None
|
676
|
+
resrepo_data_version: str = os.path.join(resrepo_data_folder, f"{resource_name}Version.txt")
|
677
|
+
if os.path.isdir(resrepo_data_folder):
|
678
|
+
if os.path.isfile(resrepo_data_version):
|
679
|
+
current_version = GeneralUtilities.read_text_from_file(resrepo_data_version)
|
680
|
+
if (current_version is None) or (current_version != latest_version):
|
681
|
+
target_folder: str = resrepo_data_folder
|
682
|
+
if repository_subname is not None:
|
683
|
+
target_folder = f"{resrepo_data_folder}/{repository_subname}"
|
684
|
+
|
685
|
+
update:bool=GeneralUtilities.folder_is_empty(target_folder) or not use_cache
|
686
|
+
if update:
|
687
|
+
self.__sc.run_program(f"Clone {remote_repository_link} as resource", LogLevel.Information)
|
688
|
+
GeneralUtilities.ensure_folder_exists_and_is_empty(target_folder)
|
689
|
+
self.__sc.run_program("git", f"clone --recurse-submodules {remote_repository_link} {target_folder}")
|
690
|
+
self.__sc.run_program("git", f"checkout {latest_version}", target_folder)
|
691
|
+
GeneralUtilities.write_text_to_file(resrepo_data_version, latest_version)
|
692
|
+
|
693
|
+
git_folders: list[str] = []
|
694
|
+
git_files: list[str] = []
|
695
|
+
for dirpath, dirnames, filenames in os.walk(target_folder):
|
696
|
+
for dirname in dirnames:
|
697
|
+
if dirname == ".git":
|
698
|
+
full_path = os.path.join(dirpath, dirname)
|
699
|
+
git_folders.append(full_path)
|
700
|
+
for filename in filenames:
|
701
|
+
if filename == ".git":
|
702
|
+
full_path = os.path.join(dirpath, filename)
|
703
|
+
git_files.append(full_path)
|
704
|
+
for git_folder in git_folders:
|
705
|
+
if os.path.isdir(git_folder):
|
706
|
+
GeneralUtilities.ensure_directory_does_not_exist(git_folder)
|
707
|
+
for git_file in git_files:
|
708
|
+
if os.path.isdir(git_file):
|
709
|
+
GeneralUtilities.ensure_file_does_not_exist(git_file)
|
710
|
+
|
711
|
+
@GeneralUtilities.check_arguments
|
712
|
+
def ensure_certificate_authority_for_development_purposes_is_generated(self, product_folder: str):
|
713
|
+
product_name: str = os.path.basename(product_folder)
|
714
|
+
now = GeneralUtilities.get_now()
|
715
|
+
ca_name = f"{product_name}CA_{now.year:04}{now.month:02}{now.day:02}{now.hour:02}{now.min:02}{now.second:02}"
|
716
|
+
ca_folder = os.path.join(product_folder, "Other", "Resources", "CA")
|
717
|
+
generate_certificate = True
|
718
|
+
if os.path.isdir(ca_folder):
|
719
|
+
ca_files = [file for file in GeneralUtilities.get_direct_files_of_folder(ca_folder) if file.endswith(".crt")]
|
720
|
+
if len(ca_files) > 0:
|
721
|
+
ca_file = ca_files[-1] # pylint:disable=unused-variable
|
722
|
+
certificate_is_valid = True # TODO check if certificate is really valid
|
723
|
+
generate_certificate = not certificate_is_valid
|
724
|
+
if generate_certificate:
|
725
|
+
self.__sc.generate_certificate_authority(ca_folder, ca_name, "DE", "SubjST", "SubjL", "SubjO", "SubjOU")
|
726
|
+
# TODO add switch to auto-install the script if desired
|
727
|
+
# for windows: powershell Import-Certificate -FilePath ConSurvCA_20241121000236.crt -CertStoreLocation 'Cert:\CurrentUser\Root'
|
728
|
+
# for linux: (TODO)
|
729
|
+
|
730
|
+
@GeneralUtilities.check_arguments
|
731
|
+
def generate_certificate_for_development_purposes_for_product(self, repository_folder: str):
|
732
|
+
self.__sc.assert_is_git_repository(repository_folder)
|
733
|
+
product_name = os.path.basename(repository_folder)
|
734
|
+
ca_folder: str = os.path.join(repository_folder, "Other", "Resources", "CA")
|
735
|
+
self.__generate_certificate_for_development_purposes(product_name, os.path.join(repository_folder, "Other", "Resources"), ca_folder, None)
|
736
|
+
|
737
|
+
@GeneralUtilities.check_arguments
|
738
|
+
def __generate_certificate_for_development_purposes(self, service_name: str, resources_folder: str, ca_folder: str, domain: str = None):
|
739
|
+
if domain is None:
|
740
|
+
domain = f"{service_name}.test.local"
|
741
|
+
domain = domain.lower()
|
742
|
+
resource_name: str = "DevelopmentCertificate"
|
743
|
+
certificate_folder: str = os.path.join(resources_folder, resource_name)
|
744
|
+
|
745
|
+
resource_content_filename: str = service_name+resource_name
|
746
|
+
certificate_file = os.path.join(certificate_folder, f"{domain}.crt")
|
747
|
+
unsignedcertificate_file = os.path.join(certificate_folder, f"{domain}.unsigned.crt")
|
748
|
+
certificate_exists = os.path.exists(certificate_file)
|
749
|
+
if certificate_exists:
|
750
|
+
certificate_expired = GeneralUtilities.certificate_is_expired(certificate_file)
|
751
|
+
generate_new_certificate = certificate_expired
|
752
|
+
else:
|
753
|
+
generate_new_certificate = True
|
754
|
+
if generate_new_certificate:
|
755
|
+
GeneralUtilities.ensure_directory_does_not_exist(certificate_folder)
|
756
|
+
GeneralUtilities.ensure_directory_exists(certificate_folder)
|
757
|
+
self.__sc.log.log("Generate TLS-certificate for development-purposes...")
|
758
|
+
self.__sc.generate_certificate(certificate_folder, domain, resource_content_filename, "DE", "SubjST", "SubjL", "SubjO", "SubjOU")
|
759
|
+
self.__sc.generate_certificate_sign_request(certificate_folder, domain, resource_content_filename, "DE", "SubjST", "SubjL", "SubjO", "SubjOU")
|
760
|
+
ca_name = os.path.basename(self.__sc.find_last_file_by_extension(ca_folder, "crt"))[:-4]
|
761
|
+
self.__sc.sign_certificate(certificate_folder, ca_folder, ca_name, domain, resource_content_filename)
|
762
|
+
GeneralUtilities.ensure_file_does_not_exist(unsignedcertificate_file)
|
763
|
+
self.__sc.log.log("Finished generating TLS-certificate for development-purposes...",LogLevel.Debug)
|
764
|
+
|
765
|
+
|
766
|
+
@GeneralUtilities.check_arguments
|
767
|
+
def do_npm_install(self, package_json_folder: str, npm_force: bool,use_cache:bool) -> None:
|
768
|
+
target_folder:str=os.path.join(package_json_folder,"node_modules")
|
769
|
+
update:bool=GeneralUtilities.folder_is_empty(target_folder) or GeneralUtilities.folder_is_empty(target_folder) or not use_cache
|
770
|
+
if update:
|
771
|
+
self.__sc.log.log("Do npm-install...")
|
772
|
+
argument1 = "install"
|
773
|
+
if npm_force:
|
774
|
+
argument1 = f"{argument1} --force"
|
775
|
+
self.__sc.run_with_epew("npm", argument1, package_json_folder)
|
776
|
+
|
777
|
+
argument2 = "install --package-lock-only"
|
778
|
+
if npm_force:
|
779
|
+
argument2 = f"{argument2} --force"
|
780
|
+
self.__sc.run_with_epew("npm", argument2, package_json_folder)
|
781
|
+
|
782
|
+
argument3 = "clean-install"
|
783
|
+
if npm_force:
|
784
|
+
argument3 = f"{argument3} --force"
|
785
|
+
self.__sc.run_with_epew("npm", argument3, package_json_folder)
|
786
|
+
|
787
|
+
@staticmethod
|
788
|
+
@GeneralUtilities.check_arguments
|
789
|
+
def sort_reference_folder(folder1: str, folder2: str) -> int:
|
790
|
+
"""Returns a value greater than 0 if and only if folder1 has a base-folder-name with a with a higher version than the base-folder-name of folder2.
|
791
|
+
Returns a value lower than 0 if and only if folder1 has a base-folder-name with a with a lower version than the base-folder-name of folder2.
|
792
|
+
Returns 0 if both values are equal."""
|
793
|
+
if (folder1 == folder2):
|
794
|
+
return 0
|
795
|
+
|
796
|
+
version_identifier_1 = os.path.basename(folder1)
|
797
|
+
if version_identifier_1 == "Latest":
|
798
|
+
return -1
|
799
|
+
version_identifier_1 = version_identifier_1[1:]
|
800
|
+
|
801
|
+
version_identifier_2 = os.path.basename(folder2)
|
802
|
+
if version_identifier_2 == "Latest":
|
803
|
+
return 1
|
804
|
+
version_identifier_2 = version_identifier_2[1:]
|
805
|
+
|
806
|
+
if version.parse(version_identifier_1) < version.parse(version_identifier_2):
|
807
|
+
return -1
|
808
|
+
elif version.parse(version_identifier_1) > version.parse(version_identifier_2):
|
809
|
+
return 1
|
810
|
+
else:
|
811
|
+
return 0
|
812
|
+
|
813
|
+
@GeneralUtilities.check_arguments
|
814
|
+
def t4_transform(self, codeunit_folder: str, ignore_git_ignored_files: bool ,use_cache:bool):
|
815
|
+
self.__ensure_grylibrary_is_available(codeunit_folder,use_cache)
|
816
|
+
repository_folder: str = os.path.dirname(codeunit_folder)
|
817
|
+
codeunitname: str = os.path.basename(codeunit_folder)
|
818
|
+
codeunit_folder = os.path.join(repository_folder, codeunitname)
|
819
|
+
for search_result in Path(codeunit_folder).glob('**/*.tt'):
|
820
|
+
tt_file = str(search_result)
|
821
|
+
relative_path_to_tt_file_from_repository = str(Path(tt_file).relative_to(repository_folder))
|
822
|
+
if (not ignore_git_ignored_files) or (ignore_git_ignored_files and not self.__sc.file_is_git_ignored(relative_path_to_tt_file_from_repository, repository_folder)):
|
823
|
+
relative_path_to_tt_file_from_codeunit_file = str(Path(tt_file).relative_to(codeunit_folder))
|
824
|
+
argument = [f"--parameter=repositoryFolder={repository_folder}", f"--parameter=codeUnitName={codeunitname}", relative_path_to_tt_file_from_codeunit_file]
|
825
|
+
self.__sc.run_program_argsasarray("t4", argument, codeunit_folder)
|
826
|
+
|
827
|
+
@GeneralUtilities.check_arguments
|
828
|
+
def __ensure_grylibrary_is_available(self, codeunit_folder: str,use_cache:bool) -> None:
|
829
|
+
grylibrary_folder = os.path.join(codeunit_folder, "Other", "Resources", "GRYLibrary")
|
830
|
+
grylibrary_dll_file = os.path.join(grylibrary_folder, "BuildResult_DotNet_win-x64", "GRYLibrary.dll")
|
831
|
+
internet_connection_is_available = GeneralUtilities.internet_connection_is_available()
|
832
|
+
grylibrary_dll_file_exists = os.path.isfile(grylibrary_dll_file)
|
833
|
+
update:bool=(not grylibrary_dll_file_exists) or (not use_cache)
|
834
|
+
if update:
|
835
|
+
if internet_connection_is_available: # Load/Update GRYLibrary
|
836
|
+
self.__sc.log("Download GRYLibrary...",LogLevel.Debug)
|
837
|
+
grylibrary_latest_codeunit_file = "https://raw.githubusercontent.com/anionDev/GRYLibrary/stable/GRYLibrary/GRYLibrary.codeunit.xml"
|
838
|
+
with urllib.request.urlopen(grylibrary_latest_codeunit_file) as url_result:
|
839
|
+
grylibrary_latest_version = self.get_version_of_codeunit_filecontent(url_result.read().decode("utf-8"))
|
840
|
+
if grylibrary_dll_file_exists:
|
841
|
+
grylibrary_existing_codeunit_file = os.path.join(grylibrary_folder, "SourceCode", "GRYLibrary.codeunit.xml")
|
842
|
+
grylibrary_existing_codeunit_version = self.get_version_of_codeunit(grylibrary_existing_codeunit_file)
|
843
|
+
if grylibrary_existing_codeunit_version != grylibrary_latest_version:
|
844
|
+
GeneralUtilities.ensure_directory_does_not_exist(grylibrary_folder)
|
845
|
+
if not os.path.isfile(grylibrary_dll_file):
|
846
|
+
GeneralUtilities.ensure_directory_does_not_exist(grylibrary_folder)
|
847
|
+
GeneralUtilities.ensure_directory_exists(grylibrary_folder)
|
848
|
+
archive_name = f"GRYLibrary.v{grylibrary_latest_version}.Productive.Artifacts.zip"
|
849
|
+
archive_download_link = f"https://github.com/anionDev/GRYLibrary/releases/download/v{grylibrary_latest_version}/{archive_name}"
|
850
|
+
archive_file = os.path.join(grylibrary_folder, archive_name)
|
851
|
+
urllib.request.urlretrieve(archive_download_link, archive_file)
|
852
|
+
with zipfile.ZipFile(archive_file, 'r') as zip_ref:
|
853
|
+
zip_ref.extractall(grylibrary_folder)
|
854
|
+
GeneralUtilities.ensure_file_does_not_exist(archive_file)
|
855
|
+
else:
|
856
|
+
if grylibrary_dll_file_exists:
|
857
|
+
self.__sc.log.log("Can not check for updates of GRYLibrary due to missing internet-connection.")
|
858
|
+
else:
|
859
|
+
raise ValueError("Can not download GRYLibrary.")
|
860
|
+
|
861
|
+
@GeneralUtilities.check_arguments
|
862
|
+
def ensure_ffmpeg_is_available(self, codeunit_folder: str,use_cache:bool) -> None:
|
863
|
+
self.assert_is_codeunit_folder(codeunit_folder)
|
864
|
+
ffmpeg_folder = os.path.join(codeunit_folder, "Other", "Resources", "FFMPEG")
|
865
|
+
internet_connection_is_available = GeneralUtilities.internet_connection_is_available()
|
866
|
+
exe_file = f"{ffmpeg_folder}/ffmpeg.exe"
|
867
|
+
exe_file_exists = os.path.isfile(exe_file)
|
868
|
+
update:bool=(not exe_file_exists) or (not use_cache)
|
869
|
+
if update:
|
870
|
+
if internet_connection_is_available: # Load/Update
|
871
|
+
GeneralUtilities.ensure_directory_does_not_exist(ffmpeg_folder)
|
872
|
+
GeneralUtilities.ensure_directory_exists(ffmpeg_folder)
|
873
|
+
ffmpeg_temp_folder = ffmpeg_folder+"Temp"
|
874
|
+
GeneralUtilities.ensure_directory_does_not_exist(ffmpeg_temp_folder)
|
875
|
+
GeneralUtilities.ensure_directory_exists(ffmpeg_temp_folder)
|
876
|
+
zip_file_on_disk = os.path.join(ffmpeg_temp_folder, "ffmpeg.zip")
|
877
|
+
original_zip_filename = "ffmpeg-master-latest-win64-gpl-shared"
|
878
|
+
zip_link = f"https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/{original_zip_filename}.zip"
|
879
|
+
urllib.request.urlretrieve(zip_link, zip_file_on_disk)
|
880
|
+
shutil.unpack_archive(zip_file_on_disk, ffmpeg_temp_folder)
|
881
|
+
bin_folder_source = os.path.join(ffmpeg_temp_folder, "ffmpeg-master-latest-win64-gpl-shared/bin")
|
882
|
+
bin_folder_target = ffmpeg_folder
|
883
|
+
GeneralUtilities.copy_content_of_folder(bin_folder_source, bin_folder_target)
|
884
|
+
GeneralUtilities.ensure_directory_does_not_exist(ffmpeg_temp_folder)
|
885
|
+
else:
|
886
|
+
if exe_file_exists:
|
887
|
+
self.__sc.log.log("Can not check for updates of FFMPEG due to missing internet-connection.")
|
888
|
+
else:
|
889
|
+
raise ValueError("Can not download FFMPEG.")
|
890
|
+
|
891
|
+
@GeneralUtilities.check_arguments
|
892
|
+
def set_constants_for_certificate_private_information(self, codeunit_folder: str) -> None:
|
893
|
+
"""Expects a certificate-resource and generates a constant for its sensitive information in hex-format"""
|
894
|
+
self.assert_is_codeunit_folder(codeunit_folder)
|
895
|
+
repo_name:str=os.path.basename(GeneralUtilities.resolve_relative_path("..",codeunit_folder))
|
896
|
+
resource_name: str = "DevelopmentCertificate"
|
897
|
+
filename: str = repo_name+"DevelopmentCertificate"
|
898
|
+
self.generate_constant_from_resource_by_filename(codeunit_folder, resource_name, f"{filename}.pfx", "PFX")
|
899
|
+
self.generate_constant_from_resource_by_filename(codeunit_folder, resource_name, f"{filename}.password", "Password")
|
900
|
+
|
901
|
+
@GeneralUtilities.check_arguments
|
902
|
+
def generate_constant_from_resource_by_filename(self, codeunit_folder: str, resource_name: str, filename: str, constant_name: str) -> None:
|
903
|
+
self.assert_is_codeunit_folder(codeunit_folder)
|
904
|
+
certificate_resource_folder = GeneralUtilities.resolve_relative_path(f"Other/Resources/{resource_name}", codeunit_folder)
|
905
|
+
resource_file = os.path.join(certificate_resource_folder, filename)
|
906
|
+
resource_file_content = GeneralUtilities.read_binary_from_file(resource_file)
|
907
|
+
resource_file_as_hex = resource_file_content.hex()
|
908
|
+
self.set_constant(codeunit_folder, f"{resource_name}{constant_name}Hex", resource_file_as_hex)
|
909
|
+
|
910
|
+
@GeneralUtilities.check_arguments
|
911
|
+
def get_resource_from_global_resource(self, codeunit_folder: str, resource_name: str):
|
912
|
+
repository_folder: str = GeneralUtilities.resolve_relative_path("..", codeunit_folder)
|
913
|
+
source_folder: str = os.path.join(repository_folder, "Other", "Resources", resource_name)
|
914
|
+
target_folder: str = os.path.join(codeunit_folder, "Other", "Resources", resource_name)
|
915
|
+
GeneralUtilities.ensure_folder_exists_and_is_empty(target_folder)
|
916
|
+
GeneralUtilities.copy_content_of_folder(source_folder, target_folder)
|
917
|
+
|
918
|
+
|
919
|
+
@GeneralUtilities.check_arguments
|
920
|
+
def merge_packages(self,coverage_file:str,package_name:str) -> None:
|
921
|
+
tree = etree.parse(coverage_file)
|
922
|
+
root = tree.getroot()
|
923
|
+
packages = root.findall("./packages/package")
|
924
|
+
all_classes = []
|
925
|
+
for pkg in packages:
|
926
|
+
pkg_name:str=pkg.get("name")
|
927
|
+
if pkg_name==package_name or pkg_name.startswith(f"{package_name}."):
|
928
|
+
classes = pkg.find("classes")
|
929
|
+
if classes is not None:
|
930
|
+
all_classes.extend(classes.findall("class"))
|
931
|
+
new_package = etree.Element("package", name=package_name)
|
932
|
+
new_classes = etree.SubElement(new_package, "classes")
|
933
|
+
for cls in all_classes:
|
934
|
+
new_classes.append(cls)
|
935
|
+
packages_node = root.find("./packages")
|
936
|
+
packages_node.clear()
|
937
|
+
packages_node.append(new_package)
|
938
|
+
tree.write(coverage_file, pretty_print=True, xml_declaration=True, encoding="UTF-8")
|
939
|
+
self.__calculate_entire_line_rate(coverage_file)
|
940
|
+
|
941
|
+
|
942
|
+
@GeneralUtilities.check_arguments
|
943
|
+
def __calculate_entire_line_rate(self,coverage_file:str) -> None:
|
944
|
+
tree = etree.parse(coverage_file)
|
945
|
+
root = tree.getroot()
|
946
|
+
package = root.find("./packages/package")
|
947
|
+
if package is None:
|
948
|
+
raise RuntimeError("No <package>-Element found")
|
949
|
+
|
950
|
+
line_elements = package.findall(".//line")
|
951
|
+
|
952
|
+
amount_of_lines = 0
|
953
|
+
amount_of_hited_lines = 0
|
954
|
+
|
955
|
+
for line in line_elements:
|
956
|
+
amount_of_lines += 1
|
957
|
+
hits = int(line.get("hits", "0"))
|
958
|
+
if hits > 0:
|
959
|
+
amount_of_hited_lines += 1
|
960
|
+
line_rate = amount_of_hited_lines / amount_of_lines if amount_of_lines > 0 else 0.0
|
961
|
+
package.set("line-rate", str(line_rate))
|
962
|
+
tree.write(coverage_file, pretty_print=True, xml_declaration=True, encoding="UTF-8")
|
963
|
+
|
964
|
+
@GeneralUtilities.check_arguments
|
965
|
+
def generate_api_client_from_dependent_codeunit_for_angular(self, codeunit_folder:str, name_of_api_providing_codeunit: str, generated_program_part_name: str,language:str,use_cache:bool) -> None:
|
966
|
+
target_subfolder_in_codeunit = f"src/app/generated/{generated_program_part_name}"
|
967
|
+
self.ensure_openapigenerator_is_available(codeunit_folder,use_cache)
|
968
|
+
openapigenerator_jar_file = os.path.join(codeunit_folder, "Other", "Resources", "OpenAPIGenerator", "open-api-generator.jar")
|
969
|
+
openapi_spec_file = os.path.join(codeunit_folder, "Other", "Resources", "DependentCodeUnits", name_of_api_providing_codeunit, "APISpecification", f"{name_of_api_providing_codeunit}.latest.api.json")
|
970
|
+
target_folder = os.path.join(codeunit_folder, target_subfolder_in_codeunit)
|
971
|
+
GeneralUtilities.ensure_folder_exists_and_is_empty(target_folder)
|
972
|
+
self.__sc.run_program("java", f'-jar {openapigenerator_jar_file} generate -i {openapi_spec_file} -g {language} -o {target_folder} --global-property supportingFiles --global-property models --global-property apis', codeunit_folder)
|
973
|
+
|
974
|
+
@GeneralUtilities.check_arguments
|
975
|
+
def replace_version_in_packagejson_file(self, packagejson_file: str, codeunit_version: str) -> None:
|
976
|
+
encoding = "utf-8"
|
977
|
+
with open(packagejson_file, encoding=encoding) as f:
|
978
|
+
data = json.load(f)
|
979
|
+
data['version'] = codeunit_version
|
980
|
+
with open(packagejson_file, 'w', encoding=encoding) as f:
|
981
|
+
json.dump(data, f, indent=2)
|
982
|
+
|
983
|
+
@GeneralUtilities.check_arguments
|
984
|
+
def ensure_openapigenerator_is_available(self, codeunit_folder: str,use_cache:bool) -> None:
|
985
|
+
self.assert_is_codeunit_folder(codeunit_folder)
|
986
|
+
openapigenerator_folder = os.path.join(codeunit_folder, "Other", "Resources", "OpenAPIGenerator")
|
987
|
+
internet_connection_is_available = GeneralUtilities.internet_connection_is_available()
|
988
|
+
filename = "open-api-generator.jar"
|
989
|
+
jar_file = f"{openapigenerator_folder}/{filename}"
|
990
|
+
jar_file_exists = os.path.isfile(jar_file)
|
991
|
+
update:bool=not jar_file_exists or not use_cache
|
992
|
+
if update:
|
993
|
+
if internet_connection_is_available: # Load/Update
|
994
|
+
self.__sc.log("Download OpenAPIGeneratorCLI...",LogLevel.Debug)
|
995
|
+
version_file = os.path.join(codeunit_folder, "Other", "Resources", "Dependencies", "OpenAPIGenerator", "Version.txt")
|
996
|
+
used_version = GeneralUtilities.read_text_from_file(version_file)
|
997
|
+
download_link = f"https://repo1.maven.org/maven2/org/openapitools/openapi-generator-cli/{used_version}/openapi-generator-cli-{used_version}.jar"
|
998
|
+
GeneralUtilities.ensure_directory_does_not_exist(openapigenerator_folder)
|
999
|
+
GeneralUtilities.ensure_directory_exists(openapigenerator_folder)
|
1000
|
+
urllib.request.urlretrieve(download_link, jar_file)
|
1001
|
+
else:
|
1002
|
+
if jar_file_exists:
|
1003
|
+
self.__sc.log.log("Can not check for updates of OpenAPIGenerator due to missing internet-connection.")
|
1004
|
+
else:
|
1005
|
+
raise ValueError("Can not download OpenAPIGenerator.")
|
1006
|
+
|
1007
|
+
@GeneralUtilities.check_arguments
|
1008
|
+
def standardized_tasks_update_version_in_docker_examples(self, codeunit_folder:str, codeunit_version:str) -> None:
|
1009
|
+
codeunit_name = os.path.basename(codeunit_folder)
|
1010
|
+
codeunit_name_lower = codeunit_name.lower()
|
1011
|
+
examples_folder = GeneralUtilities.resolve_relative_path("Other/Reference/ReferenceContent/Examples", codeunit_folder)
|
1012
|
+
for example_folder in GeneralUtilities.get_direct_folders_of_folder(examples_folder):
|
1013
|
+
docker_compose_file = os.path.join(example_folder, "docker-compose.yml")
|
1014
|
+
if os.path.isfile(docker_compose_file):
|
1015
|
+
filecontent = GeneralUtilities.read_text_from_file(docker_compose_file)
|
1016
|
+
replaced = re.sub(f'image:\\s+{codeunit_name_lower}:\\d+\\.\\d+\\.\\d+', f"image: {codeunit_name_lower}:{codeunit_version}", filecontent)
|
1017
|
+
GeneralUtilities.write_text_to_file(docker_compose_file, replaced)
|
1018
|
+
|
1019
|
+
@GeneralUtilities.check_arguments
|
1020
|
+
def set_version_of_openapigenerator(self, codeunit_folder: str, used_version: str = None) -> None:
|
1021
|
+
target_folder: str = os.path.join(codeunit_folder, "Other", "Resources", "Dependencies", "OpenAPIGenerator")
|
1022
|
+
version_file = os.path.join(target_folder, "Version.txt")
|
1023
|
+
GeneralUtilities.ensure_directory_exists(target_folder)
|
1024
|
+
GeneralUtilities.ensure_file_exists(version_file)
|
1025
|
+
GeneralUtilities.write_text_to_file(version_file, used_version)
|
1026
|
+
|
1027
|
+
@GeneralUtilities.check_arguments
|
1028
|
+
def get_latest_version_of_openapigenerator(self) -> None:
|
1029
|
+
headers = {'Cache-Control': 'no-cache'}
|
1030
|
+
self.__add_github_api_key_if_available(headers)
|
1031
|
+
response = requests.get(f"https://api.github.com/repos/OpenAPITools/openapi-generator/releases", headers=headers, timeout=(10, 10))
|
1032
|
+
latest_version = response.json()["tag_name"]
|
1033
|
+
return latest_version
|
1034
|
+
|
1035
|
+
@GeneralUtilities.check_arguments
|
1036
|
+
def update_images_in_example(self, codeunit_folder: str):
|
1037
|
+
iu = ImageUpdater()
|
1038
|
+
iu.add_default_mapper()
|
1039
|
+
dockercomposefile: str = f"{codeunit_folder}\\Other\\Reference\\ReferenceContent\\Examples\\MinimalDockerComposeFile\\docker-compose.yml"
|
1040
|
+
excluded = ["opendms"]
|
1041
|
+
iu.update_all_services_in_docker_compose_file(dockercomposefile, VersionEcholon.LatestPatchOrLatestMinor, excluded)
|
1042
|
+
iu.check_for_newest_version(dockercomposefile, excluded)
|
1043
|
+
|
1044
|
+
@GeneralUtilities.check_arguments
|
1045
|
+
def push_wheel_build_artifact(self, push_build_artifacts_file, codeunitname, repository: str, apikey: str, gpg_identity: str, repository_folder_name: str,verbosity:LogLevel) -> None:
|
1046
|
+
folder_of_this_file = os.path.dirname(push_build_artifacts_file)
|
1047
|
+
repository_folder = GeneralUtilities.resolve_relative_path(f"..{os.path.sep}../Submodules{os.path.sep}{repository_folder_name}", folder_of_this_file)
|
1048
|
+
wheel_file = self.get_wheel_file(repository_folder, codeunitname)
|
1049
|
+
self.__standardized_tasks_push_wheel_file_to_registry(wheel_file, apikey, repository, gpg_identity,verbosity)
|
1050
|
+
|
1051
|
+
@GeneralUtilities.check_arguments
|
1052
|
+
def get_wheel_file(self, repository_folder: str, codeunit_name: str) -> str:
|
1053
|
+
self.__sc.assert_is_git_repository(repository_folder)
|
1054
|
+
return self.__sc.find_file_by_extension(os.path.join(repository_folder, codeunit_name,"Other","Artifacts", "BuildResult_Wheel"), "whl")
|
1055
|
+
|
1056
|
+
@GeneralUtilities.check_arguments
|
1057
|
+
def __standardized_tasks_push_wheel_file_to_registry(self, wheel_file: str, api_key: str, repository: str, gpg_identity: str,verbosity:LogLevel) -> None:
|
1058
|
+
# repository-value when PyPi should be used: "pypi"
|
1059
|
+
# gpg_identity-value when wheel-file should not be signed: None
|
1060
|
+
folder = os.path.dirname(wheel_file)
|
1061
|
+
filename = os.path.basename(wheel_file)
|
1062
|
+
|
1063
|
+
if gpg_identity is None:
|
1064
|
+
gpg_identity_argument = GeneralUtilities.empty_string
|
1065
|
+
else:
|
1066
|
+
gpg_identity_argument = GeneralUtilities.empty_string # f" --sign --identity {gpg_identity}"
|
1067
|
+
# disabled due to https://blog.pypi.org/posts/2023-05-23-removing-pgp/
|
1068
|
+
|
1069
|
+
if int(LogLevel.Information)<int(verbosity):
|
1070
|
+
verbose_argument = " --verbose"
|
1071
|
+
else:
|
1072
|
+
verbose_argument = GeneralUtilities.empty_string
|
1073
|
+
|
1074
|
+
twine_argument = f"upload{gpg_identity_argument} --repository {repository} --non-interactive {filename} --disable-progress-bar"
|
1075
|
+
twine_argument = f"{twine_argument} --username __token__ --password {api_key}{verbose_argument}"
|
1076
|
+
self.__sc.run_program("twine", twine_argument, folder, throw_exception_if_exitcode_is_not_zero=True)
|