ScriptCollection 3.5.164__py3-none-any.whl → 4.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ScriptCollection/CertificateUpdater.py +1 -1
- ScriptCollection/Executables.py +69 -21
- ScriptCollection/GeneralUtilities.py +22 -7
- ScriptCollection/ProgramRunnerBase.py +0 -5
- ScriptCollection/ProgramRunnerEpew.py +27 -5
- ScriptCollection/ProgramRunnerMock.py +2 -0
- ScriptCollection/SCLog.py +5 -5
- ScriptCollection/ScriptCollectionCore.py +106 -120
- {scriptcollection-3.5.164.dist-info → scriptcollection-4.0.11.dist-info}/METADATA +1 -1
- scriptcollection-4.0.11.dist-info/RECORD +17 -0
- ScriptCollection/TasksForCommonProjectStructure.py +0 -3626
- scriptcollection-3.5.164.dist-info/RECORD +0 -17
- {scriptcollection-3.5.164.dist-info → scriptcollection-4.0.11.dist-info}/WHEEL +0 -0
- {scriptcollection-3.5.164.dist-info → scriptcollection-4.0.11.dist-info}/entry_points.txt +0 -0
- {scriptcollection-3.5.164.dist-info → scriptcollection-4.0.11.dist-info}/top_level.txt +0 -0
@@ -1,3626 +0,0 @@
|
|
1
|
-
from datetime import datetime, timedelta, timezone
|
2
|
-
from graphlib import TopologicalSorter
|
3
|
-
import os
|
4
|
-
from pathlib import Path
|
5
|
-
from functools import cmp_to_key
|
6
|
-
import shutil
|
7
|
-
import math
|
8
|
-
import tarfile
|
9
|
-
import re
|
10
|
-
import urllib.request
|
11
|
-
import zipfile
|
12
|
-
import json
|
13
|
-
import configparser
|
14
|
-
import tempfile
|
15
|
-
import uuid
|
16
|
-
import yaml
|
17
|
-
import requests
|
18
|
-
from packaging import version
|
19
|
-
import xmlschema
|
20
|
-
from OpenSSL import crypto
|
21
|
-
from lxml import etree
|
22
|
-
from .GeneralUtilities import GeneralUtilities
|
23
|
-
from .ScriptCollectionCore import ScriptCollectionCore
|
24
|
-
from .SCLog import SCLog, LogLevel
|
25
|
-
from .ProgramRunnerEpew import ProgramRunnerEpew
|
26
|
-
from .ImageUpdater import ImageUpdater, VersionEcholon
|
27
|
-
|
28
|
-
|
29
|
-
class CreateReleaseConfiguration():
|
30
|
-
projectname: str
|
31
|
-
remotename: str
|
32
|
-
artifacts_folder: str
|
33
|
-
push_artifacts_scripts_folder: str
|
34
|
-
verbosity: int
|
35
|
-
reference_repository_remote_name: str = None
|
36
|
-
reference_repository_branch_name: str = "main"
|
37
|
-
build_repository_branch: str = "main"
|
38
|
-
public_repository_url: str
|
39
|
-
additional_arguments_file: str = None
|
40
|
-
repository_folder_name: str = None
|
41
|
-
repository_folder: str = None
|
42
|
-
__sc: ScriptCollectionCore = None
|
43
|
-
|
44
|
-
def __init__(self, projectname: str, remotename: str, build_artifacts_target_folder: str, push_artifacts_scripts_folder: str, verbosity: int, repository_folder: str, additional_arguments_file: str, repository_folder_name: str):
|
45
|
-
self.__sc = ScriptCollectionCore()
|
46
|
-
self.projectname = projectname
|
47
|
-
self.remotename = remotename
|
48
|
-
self.artifacts_folder = build_artifacts_target_folder
|
49
|
-
self.push_artifacts_scripts_folder = push_artifacts_scripts_folder
|
50
|
-
self.verbosity = verbosity
|
51
|
-
if self.remotename is None:
|
52
|
-
self.public_repository_url = None
|
53
|
-
else:
|
54
|
-
self.public_repository_url = self.__sc.git_get_remote_url(repository_folder, remotename)
|
55
|
-
self.reference_repository_remote_name = self.remotename
|
56
|
-
self.additional_arguments_file = additional_arguments_file
|
57
|
-
self.repository_folder = repository_folder
|
58
|
-
self.repository_folder_name = repository_folder_name
|
59
|
-
|
60
|
-
|
61
|
-
class CreateReleaseInformationForProjectInCommonProjectFormat:
|
62
|
-
projectname: str
|
63
|
-
repository: str
|
64
|
-
artifacts_folder: str
|
65
|
-
verbosity: int = 1
|
66
|
-
reference_repository: str = None
|
67
|
-
public_repository_url: str = None
|
68
|
-
target_branch_name: str = None
|
69
|
-
push_artifacts_scripts_folder: str = None
|
70
|
-
target_environmenttype_for_qualitycheck: str = "QualityCheck"
|
71
|
-
target_environmenttype_for_productive: str = "Productive"
|
72
|
-
additional_arguments_file: str = None
|
73
|
-
export_target: str = None
|
74
|
-
|
75
|
-
def __init__(self, repository: str, artifacts_folder: str, projectname: str, public_repository_url: str, target_branch_name: str, additional_arguments_file: str, export_target: str, push_artifacts_scripts_folder: str):
|
76
|
-
self.repository = repository
|
77
|
-
self.public_repository_url = public_repository_url
|
78
|
-
self.target_branch_name = target_branch_name
|
79
|
-
self.artifacts_folder = artifacts_folder
|
80
|
-
self.additional_arguments_file = additional_arguments_file
|
81
|
-
self.export_target = export_target
|
82
|
-
self.push_artifacts_scripts_folder = push_artifacts_scripts_folder
|
83
|
-
if projectname is None:
|
84
|
-
projectname = os.path.basename(self.repository)
|
85
|
-
else:
|
86
|
-
self.projectname = projectname
|
87
|
-
self.reference_repository = f"{repository}Reference"
|
88
|
-
|
89
|
-
|
90
|
-
class MergeToStableBranchInformationForProjectInCommonProjectFormat:
|
91
|
-
repository: str
|
92
|
-
sourcebranch: str = "main"
|
93
|
-
targetbranch: str = "stable"
|
94
|
-
sign_git_tags: bool = True
|
95
|
-
target_environmenttype_for_qualitycheck: str = "QualityCheck"
|
96
|
-
target_environmenttype_for_productive: str = "Productive"
|
97
|
-
additional_arguments_file: str = None
|
98
|
-
export_target: str = None
|
99
|
-
|
100
|
-
push_source_branch: bool = False
|
101
|
-
push_source_branch_remote_name: str = None
|
102
|
-
push_target_branch: bool = False
|
103
|
-
push_target_branch_remote_name: str = None
|
104
|
-
|
105
|
-
verbosity: int = 1
|
106
|
-
|
107
|
-
def __init__(self, repository: str, additional_arguments_file: str, export_target: str):
|
108
|
-
self.repository = repository
|
109
|
-
self.additional_arguments_file = additional_arguments_file
|
110
|
-
self.export_target = export_target
|
111
|
-
|
112
|
-
|
113
|
-
class TasksForCommonProjectStructure:
|
114
|
-
__sc: ScriptCollectionCore = None
|
115
|
-
reference_latest_version_of_xsd_when_generating_xml: bool = True
|
116
|
-
validate_developers_of_repository: bool = True
|
117
|
-
dotnet_runsettings_file = "runsettings.xml"
|
118
|
-
|
119
|
-
def __init__(self, sc: ScriptCollectionCore = None):
|
120
|
-
if sc is None:
|
121
|
-
log: SCLog = SCLog()
|
122
|
-
log.loglevel = LogLevel.Information
|
123
|
-
sc = ScriptCollectionCore()
|
124
|
-
sc.log = log
|
125
|
-
self.__sc = sc
|
126
|
-
|
127
|
-
@GeneralUtilities.check_arguments
|
128
|
-
def is_codeunit_folder(self, codeunit_folder: str) -> bool:
|
129
|
-
repo_folder = GeneralUtilities.resolve_relative_path("..", codeunit_folder)
|
130
|
-
if not self.__sc.is_git_repository(repo_folder):
|
131
|
-
return False
|
132
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
133
|
-
codeunit_file: str = os.path.join(codeunit_folder, f"{codeunit_name}.codeunit.xml")
|
134
|
-
if not os.path.isfile(codeunit_file):
|
135
|
-
return False
|
136
|
-
return True
|
137
|
-
|
138
|
-
@GeneralUtilities.check_arguments
|
139
|
-
def assert_is_codeunit_folder(self, codeunit_folder: str) -> str:
|
140
|
-
repo_folder = GeneralUtilities.resolve_relative_path("..", codeunit_folder)
|
141
|
-
if not self.__sc.is_git_repository(repo_folder):
|
142
|
-
raise ValueError(f"'{codeunit_folder}' can not be a valid codeunit-folder because '{repo_folder}' is not a git-repository.")
|
143
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
144
|
-
codeunit_file: str = os.path.join(codeunit_folder, f"{codeunit_name}.codeunit.xml")
|
145
|
-
if not os.path.isfile(codeunit_file):
|
146
|
-
raise ValueError(f"'{codeunit_folder}' is no codeunit-folder because '{codeunit_file}' does not exist.")
|
147
|
-
|
148
|
-
@staticmethod
|
149
|
-
@GeneralUtilities.check_arguments
|
150
|
-
def get_development_environment_name() -> str:
|
151
|
-
return "Development"
|
152
|
-
|
153
|
-
@staticmethod
|
154
|
-
@GeneralUtilities.check_arguments
|
155
|
-
def get_qualitycheck_environment_name() -> str:
|
156
|
-
return "QualityCheck"
|
157
|
-
|
158
|
-
@staticmethod
|
159
|
-
@GeneralUtilities.check_arguments
|
160
|
-
def get_productive_environment_name() -> str:
|
161
|
-
return "Productive"
|
162
|
-
|
163
|
-
@GeneralUtilities.check_arguments
|
164
|
-
def get_build_folder(self, repository_folder: str, codeunit_name: str) -> str:
|
165
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
166
|
-
return os.path.join(repository_folder, codeunit_name, "Other", "Build")
|
167
|
-
|
168
|
-
@GeneralUtilities.check_arguments
|
169
|
-
def get_artifacts_folder(self, repository_folder: str, codeunit_name: str) -> str:
|
170
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
171
|
-
return os.path.join(repository_folder, codeunit_name, "Other", "Artifacts")
|
172
|
-
|
173
|
-
@GeneralUtilities.check_arguments
|
174
|
-
def get_wheel_file(self, repository_folder: str, codeunit_name: str) -> str:
|
175
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
176
|
-
return self.__sc.find_file_by_extension(os.path.join(self.get_artifacts_folder(repository_folder, codeunit_name), "BuildResult_Wheel"), "whl")
|
177
|
-
|
178
|
-
@GeneralUtilities.check_arguments
|
179
|
-
def get_testcoverage_threshold_from_codeunit_file(self, codeunit_file: str):
|
180
|
-
root: etree._ElementTree = etree.parse(codeunit_file)
|
181
|
-
return float(str(root.xpath('//cps:properties/cps:testsettings/@minimalcodecoverageinpercent', namespaces={'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'})[0]))
|
182
|
-
|
183
|
-
@GeneralUtilities.check_arguments
|
184
|
-
def codeunit_has_testable_sourcecode(self, codeunit_file: str) -> bool:
|
185
|
-
root: etree._ElementTree = etree.parse(codeunit_file)
|
186
|
-
return GeneralUtilities.string_to_boolean(str(root.xpath('//cps:properties/@codeunithastestablesourcecode', namespaces={'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'})[0]))
|
187
|
-
|
188
|
-
@GeneralUtilities.check_arguments
|
189
|
-
def codeunit_throws_exception_if_codeunitfile_is_not_validatable(self, codeunit_file: str) -> bool:
|
190
|
-
root: etree._ElementTree = etree.parse(codeunit_file)
|
191
|
-
return GeneralUtilities.string_to_boolean(str(root.xpath('//cps:properties/@throwexceptionifcodeunitfilecannotbevalidated', namespaces={'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'})[0]))
|
192
|
-
|
193
|
-
@GeneralUtilities.check_arguments
|
194
|
-
def codeunit_has_updatable_dependencies(self, codeunit_file: str) -> bool:
|
195
|
-
root: etree._ElementTree = etree.parse(codeunit_file)
|
196
|
-
return GeneralUtilities.string_to_boolean(str(root.xpath('//cps:properties/@codeunithasupdatabledependencies', namespaces={'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'})[0]))
|
197
|
-
|
198
|
-
@GeneralUtilities.check_arguments
|
199
|
-
def get_codeunit_description(self, codeunit_file: str) -> bool:
|
200
|
-
root: etree._ElementTree = etree.parse(codeunit_file)
|
201
|
-
return str(root.xpath('//cps:properties/@description', namespaces={'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'})[0])
|
202
|
-
|
203
|
-
@GeneralUtilities.check_arguments
|
204
|
-
def check_testcoverage(self, testcoverage_file_in_cobertura_format: str, repository_folder: str, codeunitname: str) -> None:
|
205
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
206
|
-
GeneralUtilities.write_message_to_stdout("Check testcoverage..")
|
207
|
-
root: etree._ElementTree = etree.parse(testcoverage_file_in_cobertura_format)
|
208
|
-
if len(root.xpath('//coverage/packages/package')) != 1:
|
209
|
-
raise ValueError(f"'{testcoverage_file_in_cobertura_format}' must contain exactly 1 package.")
|
210
|
-
if root.xpath('//coverage/packages/package[1]/@name')[0] != codeunitname:
|
211
|
-
raise ValueError(f"The package name of the tested package in '{testcoverage_file_in_cobertura_format}' must be '{codeunitname}'.")
|
212
|
-
coverage_in_percent = round(float(str(root.xpath('//coverage/packages/package[1]/@line-rate')[0]))*100, 2)
|
213
|
-
technicalminimalrequiredtestcoverageinpercent = 0
|
214
|
-
if not technicalminimalrequiredtestcoverageinpercent < coverage_in_percent:
|
215
|
-
raise ValueError(f"The test-coverage of package '{codeunitname}' must be greater than {technicalminimalrequiredtestcoverageinpercent}%.")
|
216
|
-
codeunit_file = os.path.join(repository_folder, codeunitname, f"{codeunitname}.codeunit.xml")
|
217
|
-
minimalrequiredtestcoverageinpercent = self.get_testcoverage_threshold_from_codeunit_file(codeunit_file)
|
218
|
-
if (coverage_in_percent < minimalrequiredtestcoverageinpercent):
|
219
|
-
raise ValueError(f"The testcoverage for codeunit {codeunitname} must be {minimalrequiredtestcoverageinpercent}% or more but is {coverage_in_percent}%.")
|
220
|
-
|
221
|
-
@GeneralUtilities.check_arguments
|
222
|
-
def replace_version_in_python_file(self, file: str, new_version_value: str) -> None:
|
223
|
-
GeneralUtilities.write_text_to_file(file, re.sub("version = \"\\d+\\.\\d+\\.\\d+\"", f"version = \"{new_version_value}\"", GeneralUtilities.read_text_from_file(file)))
|
224
|
-
|
225
|
-
@GeneralUtilities.check_arguments
|
226
|
-
def standardized_tasks_run_testcases_for_python_codeunit(self, run_testcases_file: str, generate_badges: bool, verbosity: int, targetenvironmenttype: str, commandline_arguments: list[str]) -> None:
|
227
|
-
codeunitname: str = Path(os.path.dirname(run_testcases_file)).parent.parent.name
|
228
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
229
|
-
repository_folder: str = str(Path(os.path.dirname(run_testcases_file)).parent.parent.parent.absolute())
|
230
|
-
codeunit_folder = os.path.join(repository_folder, codeunitname)
|
231
|
-
self.__sc.run_program("coverage", f"run -m pytest -s ./{codeunitname}Tests", codeunit_folder, verbosity=verbosity)
|
232
|
-
self.__sc.run_program("coverage", "xml", codeunit_folder, verbosity=verbosity)
|
233
|
-
coveragefolder = os.path.join(repository_folder, codeunitname, "Other/Artifacts/TestCoverage")
|
234
|
-
GeneralUtilities.ensure_directory_exists(coveragefolder)
|
235
|
-
coveragefile = os.path.join(coveragefolder, "TestCoverage.xml")
|
236
|
-
GeneralUtilities.ensure_file_does_not_exist(coveragefile)
|
237
|
-
os.rename(os.path.join(repository_folder, codeunitname, "coverage.xml"), coveragefile)
|
238
|
-
self.run_testcases_common_post_task(repository_folder, codeunitname, verbosity, generate_badges, targetenvironmenttype, commandline_arguments)
|
239
|
-
|
240
|
-
@GeneralUtilities.check_arguments
|
241
|
-
def copy_source_files_to_output_directory(self, buildscript_file: str) -> None:
|
242
|
-
GeneralUtilities.write_message_to_stdout("Copy sourcecode...")
|
243
|
-
folder = os.path.dirname(os.path.realpath(buildscript_file))
|
244
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../..", folder)
|
245
|
-
result = self.__sc.run_program_argsasarray("git", ["ls-tree", "-r", "HEAD", "--name-only"], codeunit_folder)
|
246
|
-
files = [f for f in result[1].split('\n') if len(f) > 0]
|
247
|
-
for file in files:
|
248
|
-
full_source_file = os.path.join(codeunit_folder, file)
|
249
|
-
if os.path.isfile(full_source_file):
|
250
|
-
# Reson of isdir-check:
|
251
|
-
# Prevent trying to copy files which are not exist.
|
252
|
-
# Otherwise exceptions occurr because uncommitted deletions of files will result in an error here.
|
253
|
-
target_file = os.path.join(codeunit_folder, "Other", "Artifacts", "SourceCode", file)
|
254
|
-
target_folder = os.path.dirname(target_file)
|
255
|
-
GeneralUtilities.ensure_directory_exists(target_folder)
|
256
|
-
shutil.copyfile(full_source_file, target_file)
|
257
|
-
|
258
|
-
@GeneralUtilities.check_arguments
|
259
|
-
def standardized_tasks_build_for_dart_project_in_common_project_structure(self, build_script_file: str, verbosity: int, targets: list[str], args: list[str], package_name: str = None):
|
260
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../../..", build_script_file)
|
261
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
262
|
-
src_folder: str = None
|
263
|
-
if package_name is None:
|
264
|
-
src_folder = codeunit_folder
|
265
|
-
else:
|
266
|
-
src_folder = GeneralUtilities.resolve_relative_path(package_name, codeunit_folder) # TODO replace packagename
|
267
|
-
artifacts_folder = os.path.join(codeunit_folder, "Other", "Artifacts")
|
268
|
-
verbosity = self.get_verbosity_from_commandline_arguments(args, verbosity)
|
269
|
-
target_names: dict[str, str] = {
|
270
|
-
"web": "WebApplication",
|
271
|
-
"windows": "Windows",
|
272
|
-
"ios": "IOS",
|
273
|
-
"appbundle": "Android",
|
274
|
-
}
|
275
|
-
for target in targets:
|
276
|
-
GeneralUtilities.write_message_to_stdout(f"Build flutter-codeunit {codeunit_name} for target {target_names[target]}...")
|
277
|
-
self.run_with_epew("flutter", f"build {target}", src_folder, verbosity)
|
278
|
-
if target == "web":
|
279
|
-
web_relase_folder = os.path.join(src_folder, "build/web")
|
280
|
-
web_folder = os.path.join(artifacts_folder, "BuildResult_WebApplication")
|
281
|
-
GeneralUtilities.ensure_directory_does_not_exist(web_folder)
|
282
|
-
GeneralUtilities.ensure_directory_exists(web_folder)
|
283
|
-
GeneralUtilities.copy_content_of_folder(web_relase_folder, web_folder)
|
284
|
-
elif target == "windows":
|
285
|
-
windows_release_folder = os.path.join(src_folder, "build/windows/x64/runner/Release")
|
286
|
-
windows_folder = os.path.join(artifacts_folder, "BuildResult_Windows")
|
287
|
-
GeneralUtilities.ensure_directory_does_not_exist(windows_folder)
|
288
|
-
GeneralUtilities.ensure_directory_exists(windows_folder)
|
289
|
-
GeneralUtilities.copy_content_of_folder(windows_release_folder, windows_folder)
|
290
|
-
elif target == "ios":
|
291
|
-
raise ValueError("building for ios is not implemented yet")
|
292
|
-
elif target == "appbundle":
|
293
|
-
aab_folder = os.path.join(artifacts_folder, "BuildResult_AAB")
|
294
|
-
GeneralUtilities.ensure_directory_does_not_exist(aab_folder)
|
295
|
-
GeneralUtilities.ensure_directory_exists(aab_folder)
|
296
|
-
aab_relase_folder = os.path.join(src_folder, "build/app/outputs/bundle/release")
|
297
|
-
aab_file_original = self.__sc.find_file_by_extension(aab_relase_folder, "aab")
|
298
|
-
aab_file = os.path.join(aab_folder, f"{codeunit_name}.aab")
|
299
|
-
shutil.copyfile(aab_file_original, aab_file)
|
300
|
-
bundletool = os.path.join(codeunit_folder, "Other/Resources/AndroidAppBundleTool/bundletool.jar")
|
301
|
-
apk_folder = os.path.join(artifacts_folder, "BuildResult_APK")
|
302
|
-
GeneralUtilities.ensure_directory_does_not_exist(apk_folder)
|
303
|
-
GeneralUtilities.ensure_directory_exists(apk_folder)
|
304
|
-
apks_file = f"{apk_folder}/{codeunit_name}.apks"
|
305
|
-
self.__sc.run_program("java", f"-jar {bundletool} build-apks --bundle={aab_file} --output={apks_file} --mode=universal", aab_relase_folder, verbosity)
|
306
|
-
with zipfile.ZipFile(apks_file, "r") as zip_ref:
|
307
|
-
zip_ref.extract("universal.apk", apk_folder)
|
308
|
-
GeneralUtilities.ensure_file_does_not_exist(apks_file)
|
309
|
-
os.rename(f"{apk_folder}/universal.apk", f"{apk_folder}/{codeunit_name}.apk")
|
310
|
-
else:
|
311
|
-
raise ValueError(f"Not supported target: {target}")
|
312
|
-
self.copy_source_files_to_output_directory(build_script_file)
|
313
|
-
|
314
|
-
@GeneralUtilities.check_arguments
|
315
|
-
def standardized_tasks_build_for_python_codeunit(self, buildscript_file: str, verbosity: int, targetenvironmenttype: str, commandline_arguments: list[str]) -> None:
|
316
|
-
codeunitname: str = Path(os.path.dirname(buildscript_file)).parent.parent.name
|
317
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
318
|
-
codeunit_folder = str(Path(os.path.dirname(buildscript_file)).parent.parent.absolute())
|
319
|
-
repository_folder: str = str(Path(os.path.dirname(buildscript_file)).parent.parent.parent.absolute())
|
320
|
-
target_directory = GeneralUtilities.resolve_relative_path("../Artifacts/BuildResult_Wheel", os.path.join(self.get_artifacts_folder(repository_folder, codeunitname)))
|
321
|
-
GeneralUtilities.ensure_directory_exists(target_directory)
|
322
|
-
self.__sc.run_program("python", f"-m build --wheel --outdir {target_directory}", codeunit_folder, verbosity=verbosity)
|
323
|
-
self.generate_bom_for_python_project(verbosity, codeunit_folder, codeunitname, commandline_arguments)
|
324
|
-
self.copy_source_files_to_output_directory(buildscript_file)
|
325
|
-
|
326
|
-
@GeneralUtilities.check_arguments
|
327
|
-
def generate_bom_for_python_project(self, verbosity: int, codeunit_folder: str, codeunitname: str, commandline_arguments: list[str]) -> None:
|
328
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
329
|
-
repository_folder = os.path.dirname(codeunit_folder)
|
330
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
331
|
-
codeunitversion = self.get_version_of_codeunit_folder(codeunit_folder)
|
332
|
-
bom_folder = "Other/Artifacts/BOM"
|
333
|
-
bom_folder_full = os.path.join(codeunit_folder, bom_folder)
|
334
|
-
GeneralUtilities.ensure_directory_exists(bom_folder_full)
|
335
|
-
if not os.path.isfile(os.path.join(codeunit_folder, "requirements.txt")):
|
336
|
-
raise ValueError(f"Codeunit {codeunitname} does not have a 'requirements.txt'-file.")
|
337
|
-
# TODO check that all values from setup.cfg are contained in requirements.txt
|
338
|
-
result = self.__sc.run_program("cyclonedx-py", "requirements", codeunit_folder, verbosity=verbosity)
|
339
|
-
bom_file_relative_json = f"{bom_folder}/{codeunitname}.{codeunitversion}.bom.json"
|
340
|
-
bom_file_relative_xml = f"{bom_folder}/{codeunitname}.{codeunitversion}.bom.xml"
|
341
|
-
bom_file_json = os.path.join(codeunit_folder, bom_file_relative_json)
|
342
|
-
bom_file_xml = os.path.join(codeunit_folder, bom_file_relative_xml)
|
343
|
-
|
344
|
-
GeneralUtilities.ensure_file_exists(bom_file_json)
|
345
|
-
GeneralUtilities.write_text_to_file(bom_file_json, result[1])
|
346
|
-
self.ensure_cyclonedxcli_is_available(repository_folder)
|
347
|
-
cyclonedx_exe = os.path.join(repository_folder, "Other/Resources/CycloneDXCLI/cyclonedx-cli")
|
348
|
-
if GeneralUtilities.current_system_is_windows():
|
349
|
-
cyclonedx_exe = cyclonedx_exe+".exe"
|
350
|
-
self.__sc.run_program(cyclonedx_exe, f"convert --input-file ./{codeunitname}/{bom_file_relative_json} --input-format json --output-file ./{codeunitname}/{bom_file_relative_xml} --output-format xml", repository_folder)
|
351
|
-
self.__sc.format_xml_file(bom_file_xml)
|
352
|
-
GeneralUtilities.ensure_file_does_not_exist(bom_file_json)
|
353
|
-
|
354
|
-
@GeneralUtilities.check_arguments
|
355
|
-
def standardized_tasks_push_wheel_file_to_registry(self, wheel_file: str, api_key: str, repository: str, gpg_identity: str, verbosity: int) -> None:
|
356
|
-
# repository-value when PyPi should be used: "pypi"
|
357
|
-
# gpg_identity-value when wheel-file should not be signed: None
|
358
|
-
folder = os.path.dirname(wheel_file)
|
359
|
-
filename = os.path.basename(wheel_file)
|
360
|
-
|
361
|
-
if gpg_identity is None:
|
362
|
-
gpg_identity_argument = GeneralUtilities.empty_string
|
363
|
-
else:
|
364
|
-
gpg_identity_argument = GeneralUtilities.empty_string # f" --sign --identity {gpg_identity}"
|
365
|
-
# disabled due to https://blog.pypi.org/posts/2023-05-23-removing-pgp/
|
366
|
-
|
367
|
-
if verbosity > 2:
|
368
|
-
verbose_argument = " --verbose"
|
369
|
-
else:
|
370
|
-
verbose_argument = GeneralUtilities.empty_string
|
371
|
-
|
372
|
-
twine_argument = f"upload{gpg_identity_argument} --repository {repository} --non-interactive {filename} --disable-progress-bar"
|
373
|
-
twine_argument = f"{twine_argument} --username __token__ --password {api_key}{verbose_argument}"
|
374
|
-
self.__sc.run_program("twine", twine_argument, folder, verbosity=verbosity, throw_exception_if_exitcode_is_not_zero=True)
|
375
|
-
|
376
|
-
@GeneralUtilities.check_arguments
|
377
|
-
def push_wheel_build_artifact(self, push_build_artifacts_file, product_name, codeunitname, repository: str, apikey: str, gpg_identity: str, verbosity: int, commandline_arguments: list[str], repository_folder_name: str) -> None:
|
378
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
379
|
-
folder_of_this_file = os.path.dirname(push_build_artifacts_file)
|
380
|
-
repository_folder = GeneralUtilities.resolve_relative_path(f"..{os.path.sep}../Submodules{os.path.sep}{repository_folder_name}", folder_of_this_file)
|
381
|
-
wheel_file = self.get_wheel_file(repository_folder, codeunitname)
|
382
|
-
self.standardized_tasks_push_wheel_file_to_registry(wheel_file, apikey, repository, gpg_identity, verbosity)
|
383
|
-
|
384
|
-
@GeneralUtilities.check_arguments
|
385
|
-
def get_version_of_codeunit_file_content(self, codeunit_file_content: str) -> str:
|
386
|
-
root: etree._ElementTree = etree.fromstring(codeunit_file_content.encode("utf-8"))
|
387
|
-
result = str(root.xpath('//cps:version/text()', namespaces={'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'})[0])
|
388
|
-
return result
|
389
|
-
|
390
|
-
@GeneralUtilities.check_arguments
|
391
|
-
def get_version_of_codeunit(self, codeunit_file: str) -> None:
|
392
|
-
return self.get_version_of_codeunit_file_content(GeneralUtilities.read_text_from_file(codeunit_file))
|
393
|
-
|
394
|
-
@GeneralUtilities.check_arguments
|
395
|
-
def get_version_of_codeunit_folder(self, codeunit_folder: str) -> None:
|
396
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
397
|
-
codeunit_file = os.path.join(codeunit_folder, f"{os.path.basename(codeunit_folder)}.codeunit.xml")
|
398
|
-
return self.get_version_of_codeunit(codeunit_file)
|
399
|
-
|
400
|
-
@staticmethod
|
401
|
-
@GeneralUtilities.check_arguments
|
402
|
-
def get_string_value_from_commandline_arguments(commandline_arguments: list[str], property_name: str, default_value: str) -> str:
|
403
|
-
result = TasksForCommonProjectStructure.get_property_from_commandline_arguments(commandline_arguments, property_name)
|
404
|
-
if result is None:
|
405
|
-
return default_value
|
406
|
-
else:
|
407
|
-
return result
|
408
|
-
|
409
|
-
@staticmethod
|
410
|
-
@GeneralUtilities.check_arguments
|
411
|
-
def get_is_pre_merge_value_from_commandline_arguments(commandline_arguments: list[str], default_value: bool) -> bool:
|
412
|
-
result = TasksForCommonProjectStructure.get_property_from_commandline_arguments(commandline_arguments, "is_pre_merge")
|
413
|
-
if result is None:
|
414
|
-
return default_value
|
415
|
-
else:
|
416
|
-
return GeneralUtilities.string_to_boolean(result)
|
417
|
-
|
418
|
-
@staticmethod
|
419
|
-
@GeneralUtilities.check_arguments
|
420
|
-
def get_assume_dependent_codeunits_are_already_built_from_commandline_arguments(commandline_arguments: list[str], default_value: bool) -> bool:
|
421
|
-
result = TasksForCommonProjectStructure.get_property_from_commandline_arguments(commandline_arguments, "assume_dependent_codeunits_are_already_built")
|
422
|
-
if result is None:
|
423
|
-
return default_value
|
424
|
-
else:
|
425
|
-
return GeneralUtilities.string_to_boolean(result)
|
426
|
-
|
427
|
-
@staticmethod
|
428
|
-
@GeneralUtilities.check_arguments
|
429
|
-
def get_verbosity_from_commandline_arguments(commandline_arguments: list[str], default_value: int) -> int:
|
430
|
-
result = TasksForCommonProjectStructure.get_property_from_commandline_arguments(commandline_arguments, "verbosity")
|
431
|
-
if result is None:
|
432
|
-
return default_value
|
433
|
-
else:
|
434
|
-
return int(result)
|
435
|
-
|
436
|
-
@staticmethod
|
437
|
-
@GeneralUtilities.check_arguments
|
438
|
-
def get_targetenvironmenttype_from_commandline_arguments(commandline_arguments: list[str], default_value: str) -> str:
|
439
|
-
result = TasksForCommonProjectStructure.get_property_from_commandline_arguments(commandline_arguments, "targetenvironmenttype")
|
440
|
-
if result is None:
|
441
|
-
return default_value
|
442
|
-
else:
|
443
|
-
return result
|
444
|
-
|
445
|
-
@staticmethod
|
446
|
-
@GeneralUtilities.check_arguments
|
447
|
-
def get_additionalargumentsfile_from_commandline_arguments(commandline_arguments: list[str], default_value: str) -> str:
|
448
|
-
result = TasksForCommonProjectStructure.get_property_from_commandline_arguments(commandline_arguments, "additionalargumentsfile")
|
449
|
-
if result is None:
|
450
|
-
return default_value
|
451
|
-
else:
|
452
|
-
return result
|
453
|
-
|
454
|
-
@staticmethod
|
455
|
-
@GeneralUtilities.check_arguments
|
456
|
-
def get_filestosign_from_commandline_arguments(commandline_arguments: list[str], default_value: dict[str, str]) -> dict[str, str]:
|
457
|
-
result_plain = TasksForCommonProjectStructure.get_property_from_commandline_arguments(commandline_arguments, "sign")
|
458
|
-
if result_plain is None:
|
459
|
-
return default_value
|
460
|
-
else:
|
461
|
-
result: dict[str, str] = dict[str, str]()
|
462
|
-
files_tuples = GeneralUtilities.to_list(result_plain, ";")
|
463
|
-
for files_tuple in files_tuples:
|
464
|
-
splitted = files_tuple.split("=")
|
465
|
-
result[splitted[0]] = splitted[1]
|
466
|
-
return result
|
467
|
-
|
468
|
-
@staticmethod
|
469
|
-
@GeneralUtilities.check_arguments
|
470
|
-
def get_property_from_commandline_arguments(commandline_arguments: list[str], property_name: str) -> str:
|
471
|
-
result: str = None
|
472
|
-
count = len(commandline_arguments)
|
473
|
-
loop_index = -1
|
474
|
-
for commandline_argument in commandline_arguments:
|
475
|
-
loop_index = loop_index+1
|
476
|
-
if loop_index < count-1:
|
477
|
-
prefix = f"--overwrite_{property_name}"
|
478
|
-
if commandline_argument == prefix:
|
479
|
-
result = commandline_arguments[loop_index+1]
|
480
|
-
return result
|
481
|
-
return result
|
482
|
-
|
483
|
-
@GeneralUtilities.check_arguments
|
484
|
-
def update_version_of_codeunit(self, common_tasks_file: str, current_version: str) -> None:
|
485
|
-
codeunit_name: str = os.path.basename(GeneralUtilities.resolve_relative_path("..", os.path.dirname(common_tasks_file)))
|
486
|
-
codeunit_file: str = os.path.join(GeneralUtilities.resolve_relative_path("..", os.path.dirname(common_tasks_file)), f"{codeunit_name}.codeunit.xml")
|
487
|
-
self.write_version_to_codeunit_file(codeunit_file, current_version)
|
488
|
-
|
489
|
-
@GeneralUtilities.check_arguments
|
490
|
-
def t4_transform(self, commontasks_script_file_of_current_file: str, verbosity: int, ignore_git_ignored_files: bool = True):
|
491
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../..", commontasks_script_file_of_current_file)
|
492
|
-
self.__ensure_grylibrary_is_available(codeunit_folder)
|
493
|
-
repository_folder: str = os.path.dirname(codeunit_folder)
|
494
|
-
codeunitname: str = os.path.basename(codeunit_folder)
|
495
|
-
codeunit_folder = os.path.join(repository_folder, codeunitname)
|
496
|
-
for search_result in Path(codeunit_folder).glob('**/*.tt'):
|
497
|
-
tt_file = str(search_result)
|
498
|
-
relative_path_to_tt_file_from_repository = str(Path(tt_file).relative_to(repository_folder))
|
499
|
-
if (not ignore_git_ignored_files) or (ignore_git_ignored_files and not self.__sc.file_is_git_ignored(relative_path_to_tt_file_from_repository, repository_folder)):
|
500
|
-
relative_path_to_tt_file_from_codeunit_file = str(Path(tt_file).relative_to(codeunit_folder))
|
501
|
-
argument = [f"--parameter=repositoryFolder={repository_folder}", f"--parameter=codeUnitName={codeunitname}", relative_path_to_tt_file_from_codeunit_file]
|
502
|
-
self.__sc.run_program_argsasarray("t4", argument, codeunit_folder, verbosity=verbosity)
|
503
|
-
|
504
|
-
@GeneralUtilities.check_arguments
|
505
|
-
def get_resource_from_global_resource(self, codeunit_folder: str, resource_name: str):
|
506
|
-
repository_folder: str = GeneralUtilities.resolve_relative_path("..", codeunit_folder)
|
507
|
-
source_folder: str = os.path.join(repository_folder, "Other", "Resources", resource_name)
|
508
|
-
target_folder: str = os.path.join(codeunit_folder, "Other", "Resources", resource_name)
|
509
|
-
GeneralUtilities.ensure_folder_exists_and_is_empty(target_folder)
|
510
|
-
GeneralUtilities.copy_content_of_folder(source_folder, target_folder)
|
511
|
-
|
512
|
-
@GeneralUtilities.check_arguments
|
513
|
-
def standardized_tasks_generate_reference_by_docfx(self, generate_reference_script_file: str, verbosity: int, targetenvironmenttype: str, commandline_arguments: list[str]) -> None:
|
514
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
515
|
-
folder_of_current_file = os.path.dirname(generate_reference_script_file)
|
516
|
-
generated_reference_folder = GeneralUtilities.resolve_relative_path("../Artifacts/Reference", folder_of_current_file)
|
517
|
-
GeneralUtilities.ensure_directory_does_not_exist(generated_reference_folder)
|
518
|
-
GeneralUtilities.ensure_directory_exists(generated_reference_folder)
|
519
|
-
obj_folder = os.path.join(folder_of_current_file, "obj")
|
520
|
-
GeneralUtilities.ensure_directory_does_not_exist(obj_folder)
|
521
|
-
GeneralUtilities.ensure_directory_exists(obj_folder)
|
522
|
-
self.__sc.run_program("docfx", "docfx.json", folder_of_current_file, verbosity=verbosity)
|
523
|
-
# TODO generate also a darkmode-variant (darkFX for example, see https://dotnet.github.io/docfx/extensions/templates.html )
|
524
|
-
GeneralUtilities.ensure_directory_does_not_exist(obj_folder)
|
525
|
-
|
526
|
-
def standardized_task_verify_standard_format_csproj_files(self, codeunit_folder: str) -> bool:
|
527
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
528
|
-
repository_folder = os.path.dirname(codeunit_folder)
|
529
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
530
|
-
codeunit_folder = os.path.join(repository_folder, codeunit_name)
|
531
|
-
codeunit_version = self.get_version_of_codeunit_folder(codeunit_folder)
|
532
|
-
|
533
|
-
csproj_project_name = codeunit_name
|
534
|
-
csproj_file = os.path.join(codeunit_folder, csproj_project_name, csproj_project_name+".csproj")
|
535
|
-
result1: tuple[bool, str, list[str]] = self.__standardized_task_verify_standard_format_for_project_csproj_file(csproj_file, codeunit_folder, codeunit_name, codeunit_version)
|
536
|
-
if not result1[0]:
|
537
|
-
hints: str = "\n".join(result1[2])
|
538
|
-
raise ValueError(f"'{csproj_file}' with content '{GeneralUtilities.read_text_from_file(csproj_file)}' does not match the standardized .csproj-file-format which is defined by the regex '{result1[1]}'.\n{hints}")
|
539
|
-
|
540
|
-
test_csproj_project_name = csproj_project_name+"Tests"
|
541
|
-
test_csproj_file = os.path.join(codeunit_folder, test_csproj_project_name, test_csproj_project_name+".csproj")
|
542
|
-
result2: tuple[bool, str, list[str]] = self.__standardized_task_verify_standard_format_for_test_csproj_file(test_csproj_file, codeunit_name, codeunit_version)
|
543
|
-
if not result2[0]:
|
544
|
-
hints: str = "\n".join(result2[2])
|
545
|
-
raise ValueError(f"'{test_csproj_file}' with content '{GeneralUtilities.read_text_from_file(test_csproj_file)}' does not match the standardized .csproj-file-format which is defined by the regex '{result2[1]}'.\n{hints}")
|
546
|
-
|
547
|
-
def __standardized_task_verify_standard_format_for_project_csproj_file(self, csproj_file: str, codeunit_folder: str, codeunit_name: str, codeunit_version: str) -> tuple[bool, str, str]:
|
548
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
549
|
-
codeunit_name_regex = re.escape(codeunit_name)
|
550
|
-
codeunit_file = os.path.join(codeunit_folder, f"{codeunit_name}.codeunit.xml")
|
551
|
-
codeunit_description = self.get_codeunit_description(codeunit_file)
|
552
|
-
codeunit_version_regex = re.escape(codeunit_version)
|
553
|
-
codeunit_description_regex = re.escape(codeunit_description)
|
554
|
-
regex = f"""^<Project Sdk=\\"Microsoft\\.NET\\.Sdk\\">
|
555
|
-
<PropertyGroup>
|
556
|
-
<TargetFramework>([^<]+)<\\/TargetFramework>
|
557
|
-
<Authors>([^<]+)<\\/Authors>
|
558
|
-
<Version>{codeunit_version_regex}<\\/Version>
|
559
|
-
<AssemblyVersion>{codeunit_version_regex}<\\/AssemblyVersion>
|
560
|
-
<FileVersion>{codeunit_version_regex}<\\/FileVersion>
|
561
|
-
<SelfContained>false<\\/SelfContained>
|
562
|
-
<IsPackable>false<\\/IsPackable>
|
563
|
-
<PreserveCompilationContext>false<\\/PreserveCompilationContext>
|
564
|
-
<GenerateRuntimeConfigurationFiles>true<\\/GenerateRuntimeConfigurationFiles>
|
565
|
-
<Copyright>([^<]+)<\\/Copyright>
|
566
|
-
<Description>{codeunit_description_regex}<\\/Description>
|
567
|
-
<PackageProjectUrl>https:\\/\\/([^<]+)<\\/PackageProjectUrl>
|
568
|
-
<RepositoryUrl>https:\\/\\/([^<]+)\\.git<\\/RepositoryUrl>
|
569
|
-
<RootNamespace>([^<]+)\\.Core<\\/RootNamespace>
|
570
|
-
<ProduceReferenceAssembly>false<\\/ProduceReferenceAssembly>
|
571
|
-
<Nullable>(disable|enable|warnings|annotations)<\\/Nullable>
|
572
|
-
<Configurations>Development;QualityCheck;Productive<\\/Configurations>
|
573
|
-
<IsTestProject>false<\\/IsTestProject>
|
574
|
-
<LangVersion>([^<]+)<\\/LangVersion>
|
575
|
-
<PackageRequireLicenseAcceptance>true<\\/PackageRequireLicenseAcceptance>
|
576
|
-
<GenerateSerializationAssemblies>Off<\\/GenerateSerializationAssemblies>
|
577
|
-
<AppendTargetFrameworkToOutputPath>false<\\/AppendTargetFrameworkToOutputPath>
|
578
|
-
<OutputPath>\\.\\.\\\\Other\\\\Artifacts\\\\BuildResult_DotNet_win\\-x64<\\/OutputPath>
|
579
|
-
<PlatformTarget>([^<]+)<\\/PlatformTarget>
|
580
|
-
<WarningLevel>\\d<\\/WarningLevel>
|
581
|
-
<Prefer32Bit>false<\\/Prefer32Bit>
|
582
|
-
<SignAssembly>True<\\/SignAssembly>
|
583
|
-
<AssemblyOriginatorKeyFile>\\.\\.\\\\\\.\\.\\\\Other\\\\Resources\\\\PublicKeys\\\\StronglyNamedKey\\\\([^<]+)PublicKey\\.snk<\\/AssemblyOriginatorKeyFile>
|
584
|
-
<DelaySign>True<\\/DelaySign>
|
585
|
-
<NoWarn>([^<]+)<\\/NoWarn>
|
586
|
-
<WarningsAsErrors>([^<]+)<\\/WarningsAsErrors>
|
587
|
-
<ErrorLog>\\.\\.\\\\Other\\\\Resources\\\\CodeAnalysisResult\\\\{codeunit_name_regex}\\.sarif<\\/ErrorLog>
|
588
|
-
<OutputType>([^<]+)<\\/OutputType>
|
589
|
-
<DocumentationFile>\\.\\.\\\\Other\\\\Artifacts\\\\MetaInformation\\\\{codeunit_name_regex}\\.xml<\\/DocumentationFile>(\\n|.)*
|
590
|
-
<\\/PropertyGroup>
|
591
|
-
<PropertyGroup Condition=\\\"'\\$\\(Configuration\\)'=='Development'\\\">
|
592
|
-
<DebugType>full<\\/DebugType>
|
593
|
-
<DebugSymbols>true<\\/DebugSymbols>
|
594
|
-
<Optimize>false<\\/Optimize>
|
595
|
-
<DefineConstants>TRACE;DEBUG;Development<\\/DefineConstants>
|
596
|
-
<ErrorReport>prompt<\\/ErrorReport>
|
597
|
-
<\\/PropertyGroup>
|
598
|
-
<PropertyGroup Condition=\\\"'\\$\\(Configuration\\)'=='QualityCheck'\\\">
|
599
|
-
<DebugType>portable<\\/DebugType>
|
600
|
-
<DebugSymbols>true<\\/DebugSymbols>
|
601
|
-
<Optimize>false<\\/Optimize>
|
602
|
-
<DefineConstants>TRACE;QualityCheck<\\/DefineConstants>
|
603
|
-
<ErrorReport>none<\\/ErrorReport>
|
604
|
-
<\\/PropertyGroup>
|
605
|
-
<PropertyGroup Condition=\\\"'\\$\\(Configuration\\)'=='Productive'\\\">
|
606
|
-
<DebugType>none<\\/DebugType>
|
607
|
-
<DebugSymbols>false<\\/DebugSymbols>
|
608
|
-
<Optimize>true<\\/Optimize>
|
609
|
-
<DefineConstants>Productive<\\/DefineConstants>
|
610
|
-
<ErrorReport>none<\\/ErrorReport>
|
611
|
-
<\\/PropertyGroup>(\\n|.)*
|
612
|
-
<\\/Project>$"""
|
613
|
-
result = self.__standardized_task_verify_standard_format_for_csproj_files(regex, csproj_file)
|
614
|
-
return (result[0], regex, result[1])
|
615
|
-
|
616
|
-
def __standardized_task_verify_standard_format_for_test_csproj_file(self, csproj_file: str, codeunit_name: str, codeunit_version: str) -> tuple[bool, str, str]:
|
617
|
-
codeunit_name_regex = re.escape(codeunit_name)
|
618
|
-
codeunit_version_regex = re.escape(codeunit_version)
|
619
|
-
regex = f"""^<Project Sdk=\\"Microsoft\\.NET\\.Sdk\\">
|
620
|
-
<PropertyGroup>
|
621
|
-
<TargetFramework>([^<]+)<\\/TargetFramework>
|
622
|
-
<Authors>([^<]+)<\\/Authors>
|
623
|
-
<Version>{codeunit_version_regex}<\\/Version>
|
624
|
-
<AssemblyVersion>{codeunit_version_regex}<\\/AssemblyVersion>
|
625
|
-
<FileVersion>{codeunit_version_regex}<\\/FileVersion>
|
626
|
-
<SelfContained>false<\\/SelfContained>
|
627
|
-
<IsPackable>false<\\/IsPackable>
|
628
|
-
<PreserveCompilationContext>false<\\/PreserveCompilationContext>
|
629
|
-
<GenerateRuntimeConfigurationFiles>true<\\/GenerateRuntimeConfigurationFiles>
|
630
|
-
<Copyright>([^<]+)<\\/Copyright>
|
631
|
-
<Description>{codeunit_name_regex}Tests is the test-project for {codeunit_name_regex}\\.<\\/Description>
|
632
|
-
<PackageProjectUrl>https:\\/\\/([^<]+)<\\/PackageProjectUrl>
|
633
|
-
<RepositoryUrl>https:\\/\\/([^<]+)\\.git</RepositoryUrl>
|
634
|
-
<RootNamespace>([^<]+)\\.Tests<\\/RootNamespace>
|
635
|
-
<ProduceReferenceAssembly>false<\\/ProduceReferenceAssembly>
|
636
|
-
<Nullable>(disable|enable|warnings|annotations)<\\/Nullable>
|
637
|
-
<Configurations>Development;QualityCheck;Productive<\\/Configurations>
|
638
|
-
<IsTestProject>true<\\/IsTestProject>
|
639
|
-
<LangVersion>([^<]+)<\\/LangVersion>
|
640
|
-
<PackageRequireLicenseAcceptance>true<\\/PackageRequireLicenseAcceptance>
|
641
|
-
<GenerateSerializationAssemblies>Off<\\/GenerateSerializationAssemblies>
|
642
|
-
<AppendTargetFrameworkToOutputPath>false<\\/AppendTargetFrameworkToOutputPath>
|
643
|
-
<OutputPath>\\.\\.\\\\Other\\\\Artifacts\\\\BuildResultTests_DotNet_win\\-x64<\\/OutputPath>
|
644
|
-
<PlatformTarget>([^<]+)<\\/PlatformTarget>
|
645
|
-
<WarningLevel>\\d<\\/WarningLevel>
|
646
|
-
<Prefer32Bit>false<\\/Prefer32Bit>
|
647
|
-
<SignAssembly>true<\\/SignAssembly>
|
648
|
-
<AssemblyOriginatorKeyFile>\\.\\.\\\\\\.\\.\\\\Other\\\\Resources\\\\PublicKeys\\\\StronglyNamedKey\\\\([^<]+)PublicKey\\.snk<\\/AssemblyOriginatorKeyFile>
|
649
|
-
<DelaySign>true<\\/DelaySign>
|
650
|
-
<NoWarn>([^<]+)<\\/NoWarn>
|
651
|
-
<WarningsAsErrors>([^<]+)<\\/WarningsAsErrors>
|
652
|
-
<ErrorLog>\\.\\.\\\\Other\\\\Resources\\\\CodeAnalysisResult\\\\{codeunit_name_regex}Tests\\.sarif<\\/ErrorLog>
|
653
|
-
<OutputType>Library<\\/OutputType>(\\n|.)*
|
654
|
-
<\\/PropertyGroup>
|
655
|
-
<PropertyGroup Condition=\\\"'\\$\\(Configuration\\)'=='Development'\\\">
|
656
|
-
<DebugType>full<\\/DebugType>
|
657
|
-
<DebugSymbols>true<\\/DebugSymbols>
|
658
|
-
<Optimize>false<\\/Optimize>
|
659
|
-
<DefineConstants>TRACE;DEBUG;Development<\\/DefineConstants>
|
660
|
-
<ErrorReport>prompt<\\/ErrorReport>
|
661
|
-
<\\/PropertyGroup>
|
662
|
-
<PropertyGroup Condition=\\\"'\\$\\(Configuration\\)'=='QualityCheck'\\\">
|
663
|
-
<DebugType>portable<\\/DebugType>
|
664
|
-
<DebugSymbols>true<\\/DebugSymbols>
|
665
|
-
<Optimize>false<\\/Optimize>
|
666
|
-
<DefineConstants>TRACE;QualityCheck<\\/DefineConstants>
|
667
|
-
<ErrorReport>none<\\/ErrorReport>
|
668
|
-
<\\/PropertyGroup>
|
669
|
-
<PropertyGroup Condition=\\\"'\\$\\(Configuration\\)'=='Productive'\\\">
|
670
|
-
<DebugType>none<\\/DebugType>
|
671
|
-
<DebugSymbols>false<\\/DebugSymbols>
|
672
|
-
<Optimize>true<\\/Optimize>
|
673
|
-
<DefineConstants>Productive<\\/DefineConstants>
|
674
|
-
<ErrorReport>none<\\/ErrorReport>
|
675
|
-
<\\/PropertyGroup>(\\n|.)*
|
676
|
-
<\\/Project>$"""
|
677
|
-
result = self.__standardized_task_verify_standard_format_for_csproj_files(regex, csproj_file)
|
678
|
-
return (result[0], regex, result[1])
|
679
|
-
|
680
|
-
def __standardized_task_verify_standard_format_for_csproj_files(self, regex: str, csproj_file: str) -> tuple[bool, list[str]]:
|
681
|
-
filename = os.path.basename(csproj_file)
|
682
|
-
GeneralUtilities.write_message_to_stdout(f"Check {filename}...")
|
683
|
-
file_content = GeneralUtilities.read_text_from_file(csproj_file)
|
684
|
-
regex = regex.replace("\r", GeneralUtilities.empty_string).replace("\n", "\\n")
|
685
|
-
file_content = file_content.replace("\r", GeneralUtilities.empty_string)
|
686
|
-
match = re.match(regex, file_content)
|
687
|
-
result = match is not None
|
688
|
-
hints = None
|
689
|
-
if not result:
|
690
|
-
hints = self.get_hints_for_csproj()
|
691
|
-
return (result, hints)
|
692
|
-
|
693
|
-
@GeneralUtilities.check_arguments
|
694
|
-
def get_hints_for_csproj(self) -> list[str]:
|
695
|
-
result: list[str] = []
|
696
|
-
with open("string.txt", "r", encoding="utf-8") as f:
|
697
|
-
strings = [line.rstrip("\n") for line in f]
|
698
|
-
|
699
|
-
with open("regex.txt", "r", encoding="utf-8") as f:
|
700
|
-
regexes = [line.rstrip("\n") for line in f]
|
701
|
-
|
702
|
-
amount_of_lines = len(regexes)
|
703
|
-
if len(strings) < amount_of_lines:
|
704
|
-
result.append("csproj-file has less lines than the regex requires.")
|
705
|
-
return result
|
706
|
-
for i in range(amount_of_lines - 1):
|
707
|
-
s = strings[i]
|
708
|
-
r = regexes[i]
|
709
|
-
if not re.match(r, s):
|
710
|
-
result.append(f"Line {i+1} does not match: Regex='{r}' String='{s}'")
|
711
|
-
return result
|
712
|
-
|
713
|
-
@GeneralUtilities.check_arguments
|
714
|
-
def __standardized_tasks_build_for_dotnet_build(self, csproj_file: str, originaloutputfolder: str, files_to_sign: dict[str, str], commitid: str, verbosity: int, runtimes: list[str], target_environmenttype: str, target_environmenttype_mapping: dict[str, str], copy_license_file_to_target_folder: bool, repository_folder: str, codeunit_name: str, commandline_arguments: list[str]) -> None:
|
715
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
716
|
-
csproj_filename = os.path.basename(csproj_file)
|
717
|
-
GeneralUtilities.write_message_to_stdout(f"Build {csproj_filename}...")
|
718
|
-
dotnet_build_configuration: str = target_environmenttype_mapping[target_environmenttype]
|
719
|
-
verbosity = self.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
720
|
-
codeunit_folder = os.path.join(repository_folder, codeunit_name)
|
721
|
-
csproj_file_folder = os.path.dirname(csproj_file)
|
722
|
-
csproj_file_name = os.path.basename(csproj_file)
|
723
|
-
csproj_file_name_without_extension = csproj_file_name.split(".")[0]
|
724
|
-
sarif_folder = os.path.join(codeunit_folder, "Other", "Resources", "CodeAnalysisResult")
|
725
|
-
GeneralUtilities.ensure_directory_exists(sarif_folder)
|
726
|
-
gitkeep_file = os.path.join(sarif_folder, ".gitkeep")
|
727
|
-
GeneralUtilities.ensure_file_exists(gitkeep_file)
|
728
|
-
for runtime in runtimes:
|
729
|
-
outputfolder = originaloutputfolder+runtime
|
730
|
-
GeneralUtilities.ensure_directory_does_not_exist(os.path.join(csproj_file_folder, "obj"))
|
731
|
-
GeneralUtilities.ensure_directory_does_not_exist(outputfolder)
|
732
|
-
self.__sc.run_program("dotnet", "clean", csproj_file_folder, verbosity=verbosity)
|
733
|
-
GeneralUtilities.ensure_directory_exists(outputfolder)
|
734
|
-
self.__sc.run_program("dotnet", "restore", codeunit_folder, verbosity=verbosity)
|
735
|
-
self.__sc.run_program_argsasarray("dotnet", ["build", csproj_file_name, "-c", dotnet_build_configuration, "-o", outputfolder, "--runtime", runtime], csproj_file_folder, verbosity=verbosity)
|
736
|
-
if copy_license_file_to_target_folder:
|
737
|
-
license_file = os.path.join(repository_folder, "License.txt")
|
738
|
-
target = os.path.join(outputfolder, f"{codeunit_name}.License.txt")
|
739
|
-
shutil.copyfile(license_file, target)
|
740
|
-
if 0 < len(files_to_sign):
|
741
|
-
for key, value in files_to_sign.items():
|
742
|
-
dll_file = key
|
743
|
-
snk_file = value
|
744
|
-
dll_file_full = os.path.join(outputfolder, dll_file)
|
745
|
-
if os.path.isfile(dll_file_full):
|
746
|
-
GeneralUtilities.assert_condition(self.__sc.run_program("sn", f"-vf {dll_file}", outputfolder, throw_exception_if_exitcode_is_not_zero=False)[0] == 1, f"Pre-verifying of {dll_file} failed.")
|
747
|
-
self.__sc.run_program_argsasarray("sn", ["-R", dll_file, snk_file], outputfolder)
|
748
|
-
GeneralUtilities.assert_condition(self.__sc.run_program("sn", f"-vf {dll_file}", outputfolder, throw_exception_if_exitcode_is_not_zero=False)[0] == 0, f"Verifying of {dll_file} failed.")
|
749
|
-
sarif_filename = f"{csproj_file_name_without_extension}.sarif"
|
750
|
-
sarif_source_file = os.path.join(sarif_folder, sarif_filename)
|
751
|
-
if os.path.exists(sarif_source_file):
|
752
|
-
sarif_folder_target = os.path.join(codeunit_folder, "Other", "Artifacts", "CodeAnalysisResult")
|
753
|
-
GeneralUtilities.ensure_directory_exists(sarif_folder_target)
|
754
|
-
sarif_target_file = os.path.join(sarif_folder_target, sarif_filename)
|
755
|
-
GeneralUtilities.ensure_file_does_not_exist(sarif_target_file)
|
756
|
-
shutil.copyfile(sarif_source_file, sarif_target_file)
|
757
|
-
|
758
|
-
@GeneralUtilities.check_arguments
|
759
|
-
def standardized_tasks_build_for_dotnet_project(self, buildscript_file: str, default_target_environmenttype: str, target_environmenttype_mapping: dict[str, str], runtimes: list[str], verbosity: int, commandline_arguments: list[str]) -> None:
|
760
|
-
# hint: arguments can be overwritten by commandline_arguments
|
761
|
-
# this function builds an exe
|
762
|
-
target_environmenttype = self.get_targetenvironmenttype_from_commandline_arguments(commandline_arguments, default_target_environmenttype)
|
763
|
-
self.__standardized_tasks_build_for_dotnet_project(
|
764
|
-
buildscript_file, target_environmenttype_mapping, default_target_environmenttype, verbosity, target_environmenttype, runtimes, True, commandline_arguments)
|
765
|
-
|
766
|
-
@GeneralUtilities.check_arguments
|
767
|
-
def standardized_tasks_build_for_dotnet_library_project(self, buildscript_file: str, default_target_environmenttype: str, target_environmenttype_mapping: dict[str, str], runtimes: list[str], verbosity: int, commandline_arguments: list[str]) -> None:
|
768
|
-
# hint: arguments can be overwritten by commandline_arguments
|
769
|
-
# this function builds a dll and converts it to a nupkg-file
|
770
|
-
|
771
|
-
target_environmenttype = self.get_targetenvironmenttype_from_commandline_arguments(commandline_arguments, default_target_environmenttype)
|
772
|
-
self.__standardized_tasks_build_for_dotnet_project(buildscript_file, target_environmenttype_mapping, default_target_environmenttype, verbosity, target_environmenttype, runtimes, True, commandline_arguments)
|
773
|
-
self.__standardized_tasks_build_nupkg_for_dotnet_create_package(buildscript_file, verbosity, commandline_arguments)
|
774
|
-
|
775
|
-
@GeneralUtilities.check_arguments
|
776
|
-
def get_default_target_environmenttype_mapping(self) -> dict[str, str]:
|
777
|
-
return {
|
778
|
-
TasksForCommonProjectStructure.get_development_environment_name(): TasksForCommonProjectStructure.get_development_environment_name(),
|
779
|
-
TasksForCommonProjectStructure.get_qualitycheck_environment_name(): TasksForCommonProjectStructure.get_qualitycheck_environment_name(),
|
780
|
-
TasksForCommonProjectStructure.get_productive_environment_name(): TasksForCommonProjectStructure.get_productive_environment_name()
|
781
|
-
}
|
782
|
-
|
783
|
-
@GeneralUtilities.check_arguments
|
784
|
-
def __standardized_tasks_build_for_dotnet_project(self, buildscript_file: str, target_environmenttype_mapping: dict[str, str], default_target_environment_type: str, verbosity: int, target_environment_type: str, runtimes: list[str], copy_license_file_to_target_folder: bool, commandline_arguments: list[str]) -> None:
|
785
|
-
codeunitname: str = os.path.basename(str(Path(os.path.dirname(buildscript_file)).parent.parent.absolute()))
|
786
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
787
|
-
files_to_sign: dict[str, str] = TasksForCommonProjectStructure.get_filestosign_from_commandline_arguments(commandline_arguments, dict())
|
788
|
-
repository_folder: str = str(Path(os.path.dirname(buildscript_file)).parent.parent.parent.absolute())
|
789
|
-
commitid = self.__sc.git_get_commit_id(repository_folder)
|
790
|
-
outputfolder = GeneralUtilities.resolve_relative_path("../Artifacts", os.path.dirname(buildscript_file))
|
791
|
-
codeunit_folder = os.path.join(repository_folder, codeunitname)
|
792
|
-
csproj_file = os.path.join(codeunit_folder, codeunitname, codeunitname + ".csproj")
|
793
|
-
csproj_test_file = os.path.join(codeunit_folder, codeunitname+"Tests", codeunitname+"Tests.csproj")
|
794
|
-
self.__standardized_tasks_build_for_dotnet_build(csproj_file, os.path.join(outputfolder, "BuildResult_DotNet_"), files_to_sign, commitid, verbosity, runtimes, target_environment_type, target_environmenttype_mapping, copy_license_file_to_target_folder, repository_folder, codeunitname, commandline_arguments)
|
795
|
-
self.__standardized_tasks_build_for_dotnet_build(csproj_test_file, os.path.join(outputfolder, "BuildResultTests_DotNet_"), files_to_sign, commitid, verbosity, runtimes, target_environment_type, target_environmenttype_mapping, copy_license_file_to_target_folder, repository_folder, codeunitname, commandline_arguments)
|
796
|
-
self.generate_sbom_for_dotnet_project(codeunit_folder, verbosity, commandline_arguments)
|
797
|
-
self.copy_source_files_to_output_directory(buildscript_file)
|
798
|
-
|
799
|
-
@GeneralUtilities.check_arguments
|
800
|
-
def __standardized_tasks_build_nupkg_for_dotnet_create_package(self, buildscript_file: str, verbosity: int, commandline_arguments: list[str]) -> None:
|
801
|
-
codeunitname: str = os.path.basename(str(Path(os.path.dirname(buildscript_file)).parent.parent.absolute()))
|
802
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
803
|
-
repository_folder: str = str(Path(os.path.dirname(buildscript_file)).parent.parent.parent.absolute())
|
804
|
-
build_folder = os.path.join(repository_folder, codeunitname, "Other", "Build")
|
805
|
-
outputfolder = GeneralUtilities.resolve_relative_path("../Artifacts/BuildResult_NuGet", os.path.dirname(buildscript_file))
|
806
|
-
root: etree._ElementTree = etree.parse(os.path.join(build_folder, f"{codeunitname}.nuspec"))
|
807
|
-
current_version = root.xpath("//*[name() = 'package']/*[name() = 'metadata']/*[name() = 'version']/text()")[0]
|
808
|
-
nupkg_filename = f"{codeunitname}.{current_version}.nupkg"
|
809
|
-
nupkg_file = f"{build_folder}/{nupkg_filename}"
|
810
|
-
GeneralUtilities.ensure_file_does_not_exist(nupkg_file)
|
811
|
-
commit_id = self.__sc.git_get_commit_id(repository_folder)
|
812
|
-
self.__sc.run_program("nuget", f"pack {codeunitname}.nuspec -Properties \"commitid={commit_id}\"", build_folder, verbosity=verbosity)
|
813
|
-
GeneralUtilities.ensure_directory_does_not_exist(outputfolder)
|
814
|
-
GeneralUtilities.ensure_directory_exists(outputfolder)
|
815
|
-
os.rename(nupkg_file, f"{outputfolder}/{nupkg_filename}")
|
816
|
-
|
817
|
-
@GeneralUtilities.check_arguments
|
818
|
-
def generate_sbom_for_dotnet_project(self, codeunit_folder: str, verbosity: int, commandline_arguments: list[str]) -> None:
|
819
|
-
GeneralUtilities.write_message_to_stdout("Generate SBOM...")
|
820
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
821
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
822
|
-
bomfile_folder = "Other\\Artifacts\\BOM"
|
823
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
824
|
-
self.__sc.run_program_argsasarray("dotnet", ["CycloneDX", f"{codeunit_name}\\{codeunit_name}.csproj", "-o", bomfile_folder, "--disable-github-licenses"], codeunit_folder, verbosity=verbosity)
|
825
|
-
codeunitversion = self.get_version_of_codeunit(os.path.join(codeunit_folder, f"{codeunit_name}.codeunit.xml"))
|
826
|
-
target = f"{codeunit_folder}\\{bomfile_folder}\\{codeunit_name}.{codeunitversion}.sbom.xml"
|
827
|
-
GeneralUtilities.ensure_file_does_not_exist(target)
|
828
|
-
os.rename(f"{codeunit_folder}\\{bomfile_folder}\\bom.xml", target)
|
829
|
-
self.__sc.format_xml_file(target)
|
830
|
-
|
831
|
-
@GeneralUtilities.check_arguments
|
832
|
-
def standardized_tasks_run_linting_for_flutter_project_in_common_project_structure(self, script_file: str, default_verbosity: int, args: list[str]):
|
833
|
-
pass # TODO
|
834
|
-
|
835
|
-
@GeneralUtilities.check_arguments
|
836
|
-
def standardized_tasks_linting_for_python_codeunit(self, linting_script_file: str, verbosity: int, targetenvironmenttype: str, commandline_arguments: list[str]) -> None:
|
837
|
-
codeunitname: str = Path(os.path.dirname(linting_script_file)).parent.parent.name
|
838
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
839
|
-
repository_folder: str = str(Path(os.path.dirname(linting_script_file)).parent.parent.parent.absolute())
|
840
|
-
errors_found = False
|
841
|
-
GeneralUtilities.write_message_to_stdout(f"Check for linting-issues in codeunit {codeunitname}.")
|
842
|
-
src_folder = os.path.join(repository_folder, codeunitname, codeunitname)
|
843
|
-
tests_folder = src_folder+"Tests"
|
844
|
-
# TODO check if there are errors in sarif-file
|
845
|
-
for file in GeneralUtilities.get_all_files_of_folder(src_folder)+GeneralUtilities.get_all_files_of_folder(tests_folder):
|
846
|
-
relative_file_path_in_repository = os.path.relpath(file, repository_folder)
|
847
|
-
if file.endswith(".py") and os.path.getsize(file) > 0 and not self.__sc.file_is_git_ignored(relative_file_path_in_repository, repository_folder):
|
848
|
-
GeneralUtilities.write_message_to_stdout(f"Check for linting-issues in {os.path.relpath(file, os.path.join(repository_folder, codeunitname))}.")
|
849
|
-
linting_result = self.__sc.python_file_has_errors(file, repository_folder)
|
850
|
-
if (linting_result[0]):
|
851
|
-
errors_found = True
|
852
|
-
for error in linting_result[1]:
|
853
|
-
GeneralUtilities.write_message_to_stderr(error)
|
854
|
-
if errors_found:
|
855
|
-
raise ValueError("Linting-issues occurred.")
|
856
|
-
else:
|
857
|
-
GeneralUtilities.write_message_to_stdout("No linting-issues found.")
|
858
|
-
|
859
|
-
@GeneralUtilities.check_arguments
|
860
|
-
def standardized_tasks_generate_coverage_report(self, repository_folder: str, codeunitname: str, verbosity: int, generate_badges: bool, targetenvironmenttype: str, commandline_arguments: list[str], add_testcoverage_history_entry: bool = None) -> None:
|
861
|
-
"""This function expects that the file '<repositorybasefolder>/<codeunitname>/Other/Artifacts/TestCoverage/TestCoverage.xml'
|
862
|
-
which contains a test-coverage-report in the cobertura-format exists.
|
863
|
-
This script expectes that the testcoverage-reportfolder is '<repositorybasefolder>/<codeunitname>/Other/Artifacts/TestCoverageReport'.
|
864
|
-
This script expectes that a test-coverage-badges should be added to '<repositorybasefolder>/<codeunitname>/Other/Resources/Badges'."""
|
865
|
-
GeneralUtilities.write_message_to_stdout("Generate testcoverage report..")
|
866
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
867
|
-
codeunit_version = self.get_version_of_codeunit(os.path.join(repository_folder, codeunitname, f"{codeunitname}.codeunit.xml"))
|
868
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
869
|
-
if verbosity == 0:
|
870
|
-
verbose_argument_for_reportgenerator = "Off"
|
871
|
-
elif verbosity == 1:
|
872
|
-
verbose_argument_for_reportgenerator = "Error"
|
873
|
-
elif verbosity == 2:
|
874
|
-
verbose_argument_for_reportgenerator = "Info"
|
875
|
-
elif verbosity == 3:
|
876
|
-
verbose_argument_for_reportgenerator = "Verbose"
|
877
|
-
else:
|
878
|
-
raise ValueError(f"Unknown value for verbosity: {GeneralUtilities.str_none_safe(verbosity)}")
|
879
|
-
|
880
|
-
# Generating report
|
881
|
-
GeneralUtilities.ensure_directory_does_not_exist(os.path.join(repository_folder, codeunitname, f"{codeunitname}/Other/Artifacts/TestCoverageReport"))
|
882
|
-
GeneralUtilities.ensure_directory_exists(os.path.join(repository_folder, codeunitname, "Other/Artifacts/TestCoverageReport"))
|
883
|
-
|
884
|
-
if add_testcoverage_history_entry is None:
|
885
|
-
add_testcoverage_history_entry = self.get_is_pre_merge_value_from_commandline_arguments(commandline_arguments, add_testcoverage_history_entry)
|
886
|
-
|
887
|
-
history_folder = f"{codeunitname}/Other/Resources/TestCoverageHistory"
|
888
|
-
history_folder_full = os.path.join(repository_folder, history_folder)
|
889
|
-
GeneralUtilities.ensure_directory_exists(history_folder_full)
|
890
|
-
history_argument = f" -historydir:{history_folder}"
|
891
|
-
argument = f"-reports:{codeunitname}/Other/Artifacts/TestCoverage/TestCoverage.xml -targetdir:{codeunitname}/Other/Artifacts/TestCoverageReport --verbosity:{verbose_argument_for_reportgenerator}{history_argument} -title:{codeunitname} -tag:v{codeunit_version}"
|
892
|
-
self.__sc.run_program("reportgenerator", argument, repository_folder, verbosity=verbosity)
|
893
|
-
if not add_testcoverage_history_entry:
|
894
|
-
os.remove(GeneralUtilities.get_direct_files_of_folder(history_folder_full)[-1])
|
895
|
-
|
896
|
-
# Generating badges
|
897
|
-
if generate_badges:
|
898
|
-
testcoverageubfolger = "Other/Resources/TestCoverageBadges"
|
899
|
-
fulltestcoverageubfolger = os.path.join(repository_folder, codeunitname, testcoverageubfolger)
|
900
|
-
GeneralUtilities.ensure_directory_does_not_exist(fulltestcoverageubfolger)
|
901
|
-
GeneralUtilities.ensure_directory_exists(fulltestcoverageubfolger)
|
902
|
-
self.__sc.run_program("reportgenerator", f"-reports:Other/Artifacts/TestCoverage/TestCoverage.xml -targetdir:{testcoverageubfolger} -reporttypes:Badges --verbosity:{verbose_argument_for_reportgenerator}", os.path.join(repository_folder, codeunitname), verbosity=verbosity)
|
903
|
-
|
904
|
-
@GeneralUtilities.check_arguments
|
905
|
-
def standardized_tasks_run_testcases_for_dotnet_project(self, runtestcases_file: str, targetenvironmenttype: str, verbosity: int, generate_badges: bool, target_environmenttype_mapping: dict[str, str], commandline_arguments: list[str]) -> None:
|
906
|
-
GeneralUtilities.write_message_to_stdout("Run testcases...")
|
907
|
-
dotnet_build_configuration: str = target_environmenttype_mapping[targetenvironmenttype]
|
908
|
-
codeunit_name: str = os.path.basename(str(Path(os.path.dirname(runtestcases_file)).parent.parent.absolute()))
|
909
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
910
|
-
repository_folder: str = str(Path(os.path.dirname(runtestcases_file)).parent.parent.parent.absolute()).replace("\\", "/")
|
911
|
-
coverage_file_folder = os.path.join(repository_folder, codeunit_name, "Other/Artifacts/TestCoverage")
|
912
|
-
temp_folder = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
|
913
|
-
GeneralUtilities.ensure_directory_exists(temp_folder)
|
914
|
-
runsettings_file = self.dotnet_runsettings_file
|
915
|
-
codeunit_folder = f"{repository_folder}/{codeunit_name}"
|
916
|
-
arg = f"test . -c {dotnet_build_configuration} -o {temp_folder}"
|
917
|
-
if os.path.isfile(os.path.join(codeunit_folder, runsettings_file)):
|
918
|
-
arg = f"{arg} --settings {runsettings_file}"
|
919
|
-
arg = f"{arg} /p:CollectCoverage=true /p:CoverletOutput=../Other/Artifacts/TestCoverage/Testcoverage /p:CoverletOutputFormat=cobertura"
|
920
|
-
self.__sc.run_program("dotnet", arg, codeunit_folder, verbosity=verbosity, print_live_output=True)
|
921
|
-
target_file = os.path.join(coverage_file_folder, "TestCoverage.xml")
|
922
|
-
GeneralUtilities.ensure_file_does_not_exist(target_file)
|
923
|
-
os.rename(os.path.join(coverage_file_folder, "Testcoverage.cobertura.xml"), target_file)
|
924
|
-
self.__remove_unrelated_package_from_testcoverage_file(target_file, codeunit_name)
|
925
|
-
root: etree._ElementTree = etree.parse(target_file)
|
926
|
-
source_base_path_in_coverage_file: str = root.xpath("//coverage/sources/source/text()")[0].replace("\\", "/")
|
927
|
-
content = GeneralUtilities.read_text_from_file(target_file)
|
928
|
-
GeneralUtilities.assert_condition(source_base_path_in_coverage_file.startswith(repository_folder) or repository_folder.startswith(source_base_path_in_coverage_file), f"Unexpected path for coverage. Sourcepath: \"{source_base_path_in_coverage_file}\"; repository: \"{repository_folder}\"")
|
929
|
-
content = re.sub('\\\\', '/', content)
|
930
|
-
content = re.sub("filename=\"([^\"]+)\"", lambda match: self.__standardized_tasks_run_testcases_for_dotnet_project_helper(source_base_path_in_coverage_file, codeunit_folder, match), content)
|
931
|
-
GeneralUtilities.write_text_to_file(target_file, content)
|
932
|
-
self.run_testcases_common_post_task(repository_folder, codeunit_name, verbosity, generate_badges, targetenvironmenttype, commandline_arguments)
|
933
|
-
artifacts_folder = os.path.join(repository_folder, codeunit_name, "Other", "Artifacts")
|
934
|
-
for subfolder in GeneralUtilities.get_direct_folders_of_folder(artifacts_folder):
|
935
|
-
if os.path.basename(subfolder).startswith("BuildResultTests_DotNet_"):
|
936
|
-
GeneralUtilities.ensure_directory_does_not_exist(subfolder)
|
937
|
-
|
938
|
-
@GeneralUtilities.check_arguments
|
939
|
-
def run_testcases_common_post_task(self, repository_folder: str, codeunit_name: str, verbosity: int, generate_badges: bool, targetenvironmenttype: str, commandline_arguments: list[str]) -> None:
|
940
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
941
|
-
coverage_file_folder = os.path.join(repository_folder, codeunit_name, "Other/Artifacts/TestCoverage")
|
942
|
-
coveragefiletarget = os.path.join(coverage_file_folder, "TestCoverage.xml")
|
943
|
-
self.update_path_of_source_in_testcoverage_file(repository_folder, codeunit_name)
|
944
|
-
self.standardized_tasks_generate_coverage_report(repository_folder, codeunit_name, verbosity, generate_badges, targetenvironmenttype, commandline_arguments)
|
945
|
-
self.check_testcoverage(coveragefiletarget, repository_folder, codeunit_name)
|
946
|
-
|
947
|
-
@GeneralUtilities.check_arguments
|
948
|
-
def update_path_of_source_in_testcoverage_file(self, repository_folder: str, codeunitname: str) -> None:
|
949
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
950
|
-
GeneralUtilities.write_message_to_stdout("Update paths of source files in testcoverage files..")
|
951
|
-
folder = f"{repository_folder}/{codeunitname}/Other/Artifacts/TestCoverage"
|
952
|
-
filename = "TestCoverage.xml"
|
953
|
-
full_file = os.path.join(folder, filename)
|
954
|
-
GeneralUtilities.write_text_to_file(full_file, re.sub("<source>.+<\\/source>", f"<source><!--[repository]/-->./{codeunitname}/</source>", GeneralUtilities.read_text_from_file(full_file)))
|
955
|
-
self.__remove_not_existing_files_from_testcoverage_file(full_file, repository_folder, codeunitname)
|
956
|
-
|
957
|
-
@GeneralUtilities.check_arguments
|
958
|
-
def __standardized_tasks_run_testcases_for_dotnet_project_helper(self, source: str, codeunit_folder: str, match: re.Match) -> str:
|
959
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
960
|
-
filename = match.group(1)
|
961
|
-
file = os.path.join(source, filename)
|
962
|
-
# GeneralUtilities.assert_condition(os.path.isfile(file),f"File \"{file}\" does not exist.")
|
963
|
-
GeneralUtilities.assert_condition(file.startswith(codeunit_folder), f"Unexpected path for coverage-file. File: \"{file}\"; codeunitfolder: \"{codeunit_folder}\"")
|
964
|
-
filename_relative = f".{file[len(codeunit_folder):]}"
|
965
|
-
return f'filename="{filename_relative}"'
|
966
|
-
|
967
|
-
@GeneralUtilities.check_arguments
|
968
|
-
def __remove_not_existing_files_from_testcoverage_file(self, testcoveragefile: str, repository_folder: str, codeunit_name: str) -> None:
|
969
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
970
|
-
root: etree._ElementTree = etree.parse(testcoveragefile)
|
971
|
-
codeunit_folder = os.path.join(repository_folder, codeunit_name)
|
972
|
-
xpath = f"//coverage/packages/package[@name='{codeunit_name}']/classes/class"
|
973
|
-
coverage_report_classes = root.xpath(xpath)
|
974
|
-
found_existing_files = False
|
975
|
-
for coverage_report_class in coverage_report_classes:
|
976
|
-
filename = coverage_report_class.attrib['filename']
|
977
|
-
file = os.path.join(codeunit_folder, filename)
|
978
|
-
if os.path.isfile(file):
|
979
|
-
found_existing_files = True
|
980
|
-
else:
|
981
|
-
coverage_report_class.getparent().remove(coverage_report_class)
|
982
|
-
GeneralUtilities.assert_condition(found_existing_files, f"No existing files in testcoderage-report-file \"{testcoveragefile}\".")
|
983
|
-
result = etree.tostring(root).decode("utf-8")
|
984
|
-
GeneralUtilities.write_text_to_file(testcoveragefile, result)
|
985
|
-
|
986
|
-
@GeneralUtilities.check_arguments
|
987
|
-
def __remove_unrelated_package_from_testcoverage_file(self, file: str, codeunit_name: str) -> None:
|
988
|
-
root: etree._ElementTree = etree.parse(file)
|
989
|
-
packages = root.xpath('//coverage/packages/package')
|
990
|
-
for package in packages:
|
991
|
-
if package.attrib['name'] != codeunit_name:
|
992
|
-
package.getparent().remove(package)
|
993
|
-
result = etree.tostring(root).decode("utf-8")
|
994
|
-
GeneralUtilities.write_text_to_file(file, result)
|
995
|
-
|
996
|
-
@GeneralUtilities.check_arguments
|
997
|
-
def write_version_to_codeunit_file(self, codeunit_file: str, current_version: str) -> None:
|
998
|
-
versionregex = "\\d+\\.\\d+\\.\\d+"
|
999
|
-
versiononlyregex = f"^{versionregex}$"
|
1000
|
-
pattern = re.compile(versiononlyregex)
|
1001
|
-
if pattern.match(current_version):
|
1002
|
-
GeneralUtilities.write_text_to_file(codeunit_file, re.sub(f"<cps:version>{versionregex}<\\/cps:version>", f"<cps:version>{current_version}</cps:version>", GeneralUtilities.read_text_from_file(codeunit_file)))
|
1003
|
-
else:
|
1004
|
-
raise ValueError(f"Version '{current_version}' does not match version-regex '{versiononlyregex}'.")
|
1005
|
-
|
1006
|
-
@GeneralUtilities.check_arguments
|
1007
|
-
def standardized_tasks_linting_for_dotnet_project(self, linting_script_file: str, verbosity: int, targetenvironmenttype: str, commandline_arguments: list[str]) -> None:
|
1008
|
-
GeneralUtilities.write_message_to_stdout("Run linting...")
|
1009
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
1010
|
-
# TODO check if there are errors in sarif-file
|
1011
|
-
|
1012
|
-
@GeneralUtilities.check_arguments
|
1013
|
-
def __export_codeunit_reference_content_to_reference_repository(self, project_version_identifier: str, replace_existing_content: bool, target_folder_for_reference_repository: str, repository: str, codeunitname: str, projectname: str, codeunit_version: str, public_repository_url: str, branch: str) -> None:
|
1014
|
-
codeunit_folder = os.path.join(repository, codeunitname)
|
1015
|
-
codeunit_file = os.path.join(codeunit_folder, f"{codeunitname}.codeunit.xml")
|
1016
|
-
codeunit_has_testcases = self.codeunit_has_testable_sourcecode(codeunit_file)
|
1017
|
-
target_folder = os.path.join(target_folder_for_reference_repository, project_version_identifier, codeunitname)
|
1018
|
-
if os.path.isdir(target_folder) and not replace_existing_content:
|
1019
|
-
raise ValueError(f"Folder '{target_folder}' already exists.")
|
1020
|
-
GeneralUtilities.ensure_directory_does_not_exist(target_folder)
|
1021
|
-
GeneralUtilities.ensure_directory_exists(target_folder)
|
1022
|
-
codeunit_version_identifier = "Latest" if project_version_identifier == "Latest" else "v"+codeunit_version
|
1023
|
-
page_title = f"{codeunitname} {codeunit_version_identifier} codeunit-reference"
|
1024
|
-
diff_report = f"{repository}/{codeunitname}/Other/Artifacts/DiffReport/DiffReport.html"
|
1025
|
-
diff_target_folder = os.path.join(target_folder, "DiffReport")
|
1026
|
-
GeneralUtilities.ensure_directory_exists(diff_target_folder)
|
1027
|
-
diff_target_file = os.path.join(diff_target_folder, "DiffReport.html")
|
1028
|
-
title = (f'Reference of codeunit {codeunitname} {codeunit_version_identifier} (contained in project <a href="{public_repository_url}">{projectname}</a> {project_version_identifier})')
|
1029
|
-
if public_repository_url is None:
|
1030
|
-
repo_url_html = GeneralUtilities.empty_string
|
1031
|
-
else:
|
1032
|
-
repo_url_html = f'<a href="{public_repository_url}/tree/{branch}/{codeunitname}">Source-code</a>'
|
1033
|
-
if codeunit_has_testcases:
|
1034
|
-
coverage_report_link = '<a href="./TestCoverageReport/index.html">Test-coverage-report</a><br>'
|
1035
|
-
else:
|
1036
|
-
coverage_report_link = GeneralUtilities.empty_string
|
1037
|
-
index_file_for_reference = os.path.join(target_folder, "index.html")
|
1038
|
-
|
1039
|
-
design_file = None
|
1040
|
-
design = "ModestDark"
|
1041
|
-
if design == "ModestDark":
|
1042
|
-
design_file = GeneralUtilities.get_modest_dark_url()
|
1043
|
-
# TODO make designs from customizable sources be available by a customizable name and outsource this to a class-property because this is duplicated code.
|
1044
|
-
if design_file is None:
|
1045
|
-
design_html = GeneralUtilities.empty_string
|
1046
|
-
else:
|
1047
|
-
design_html = f'<link type="text/css" rel="stylesheet" href="{design_file}" />'
|
1048
|
-
|
1049
|
-
index_file_content = f"""<!DOCTYPE html>
|
1050
|
-
<html lang="en">
|
1051
|
-
|
1052
|
-
<head>
|
1053
|
-
<meta charset="UTF-8">
|
1054
|
-
<title>{page_title}</title>
|
1055
|
-
{design_html}
|
1056
|
-
</head>
|
1057
|
-
|
1058
|
-
<body>
|
1059
|
-
<h1>{title}</h1>
|
1060
|
-
<hr/>
|
1061
|
-
Available reference-content for {codeunitname}:<br>
|
1062
|
-
{repo_url_html}<br>
|
1063
|
-
<!--TODO add artefacts-link: <a href="./x">Artefacts</a><br>-->
|
1064
|
-
<a href="./Reference/index.html">Reference</a><br>
|
1065
|
-
<a href="./DiffReport/DiffReport.html">Diff-report</a><br>
|
1066
|
-
{coverage_report_link}
|
1067
|
-
</body>
|
1068
|
-
|
1069
|
-
</html>
|
1070
|
-
"""
|
1071
|
-
|
1072
|
-
GeneralUtilities.ensure_file_exists(index_file_for_reference)
|
1073
|
-
GeneralUtilities.write_text_to_file(index_file_for_reference, index_file_content)
|
1074
|
-
other_folder_in_repository = os.path.join(repository, codeunitname, "Other")
|
1075
|
-
source_generatedreference = os.path.join(other_folder_in_repository, "Artifacts", "Reference")
|
1076
|
-
target_generatedreference = os.path.join(target_folder, "Reference")
|
1077
|
-
shutil.copytree(source_generatedreference, target_generatedreference)
|
1078
|
-
|
1079
|
-
shutil.copyfile(diff_report, diff_target_file)
|
1080
|
-
|
1081
|
-
if codeunit_has_testcases:
|
1082
|
-
source_testcoveragereport = os.path.join(other_folder_in_repository, "Artifacts", "TestCoverageReport")
|
1083
|
-
if os.path.isdir(source_testcoveragereport): # check, because it is not a mandatory artifact. if the artifact is not available, the user gets already a warning.
|
1084
|
-
target_testcoveragereport = os.path.join(target_folder, "TestCoverageReport")
|
1085
|
-
shutil.copytree(source_testcoveragereport, target_testcoveragereport)
|
1086
|
-
|
1087
|
-
@GeneralUtilities.check_arguments
|
1088
|
-
def __standardized_tasks_release_artifact(self, information: CreateReleaseInformationForProjectInCommonProjectFormat) -> None:
|
1089
|
-
GeneralUtilities.write_message_to_stdout("Release artifacts...")
|
1090
|
-
project_version = self.__sc.get_semver_version_from_gitversion(information.repository)
|
1091
|
-
target_folder_base = os.path.join(information.artifacts_folder, information.projectname, project_version)
|
1092
|
-
GeneralUtilities.ensure_directory_exists(target_folder_base)
|
1093
|
-
|
1094
|
-
self.build_codeunits(information.repository, information.verbosity, information.target_environmenttype_for_productive, information.additional_arguments_file, False, information.export_target, [], True, "Generate artifacts") # Generate artifacts after merge (because now are constants like commit-id of the new version available)
|
1095
|
-
|
1096
|
-
reference_folder = os.path.join(information.reference_repository, "ReferenceContent")
|
1097
|
-
|
1098
|
-
for codeunitname in self.get_codeunits(information.repository):
|
1099
|
-
# Push artifacts to registry
|
1100
|
-
if information.verbosity > 2:
|
1101
|
-
GeneralUtilities.write_message_to_stdout(f"Push artifacts of {codeunitname}...")
|
1102
|
-
scriptfilename = f"PushArtifacts.{codeunitname}.py"
|
1103
|
-
push_artifact_to_registry_script = os.path.join(information.push_artifacts_scripts_folder, scriptfilename)
|
1104
|
-
if os.path.isfile(push_artifact_to_registry_script):
|
1105
|
-
GeneralUtilities.write_message_to_stdout(f"Push artifacts of codeunit {codeunitname}...")
|
1106
|
-
self.__sc.run_program("python", push_artifact_to_registry_script, information.push_artifacts_scripts_folder, verbosity=information.verbosity, throw_exception_if_exitcode_is_not_zero=True)
|
1107
|
-
|
1108
|
-
# Copy reference of codeunit to reference-repository
|
1109
|
-
codeunit_version = self.get_version_of_codeunit_folder(os.path.join(information.repository, codeunitname))
|
1110
|
-
self.__export_codeunit_reference_content_to_reference_repository(f"v{project_version}", False, reference_folder, information.repository, codeunitname, information.projectname, codeunit_version, information.public_repository_url, f"v{project_version}")
|
1111
|
-
self.__export_codeunit_reference_content_to_reference_repository("Latest", True, reference_folder, information.repository, codeunitname, information.projectname, codeunit_version, information.public_repository_url, information.target_branch_name)
|
1112
|
-
|
1113
|
-
# Generate reference
|
1114
|
-
self.__generate_entire_reference(information.projectname, project_version, reference_folder)
|
1115
|
-
|
1116
|
-
@staticmethod
|
1117
|
-
@GeneralUtilities.check_arguments
|
1118
|
-
def _internal_sort_reference_folder(folder1: str, folder2: str) -> int:
|
1119
|
-
"""Returns a value greater than 0 if and only if folder1 has a base-folder-name with a with a higher version than the base-folder-name of folder2.
|
1120
|
-
Returns a value lower than 0 if and only if folder1 has a base-folder-name with a with a lower version than the base-folder-name of folder2.
|
1121
|
-
Returns 0 if both values are equal."""
|
1122
|
-
if (folder1 == folder2):
|
1123
|
-
return 0
|
1124
|
-
|
1125
|
-
version_identifier_1 = os.path.basename(folder1)
|
1126
|
-
if version_identifier_1 == "Latest":
|
1127
|
-
return -1
|
1128
|
-
version_identifier_1 = version_identifier_1[1:]
|
1129
|
-
|
1130
|
-
version_identifier_2 = os.path.basename(folder2)
|
1131
|
-
if version_identifier_2 == "Latest":
|
1132
|
-
return 1
|
1133
|
-
version_identifier_2 = version_identifier_2[1:]
|
1134
|
-
|
1135
|
-
if version.parse(version_identifier_1) < version.parse(version_identifier_2):
|
1136
|
-
return -1
|
1137
|
-
elif version.parse(version_identifier_1) > version.parse(version_identifier_2):
|
1138
|
-
return 1
|
1139
|
-
else:
|
1140
|
-
return 0
|
1141
|
-
|
1142
|
-
@GeneralUtilities.check_arguments
|
1143
|
-
def __generate_entire_reference(self, projectname: str, project_version: str, reference_folder: str) -> None:
|
1144
|
-
all_available_version_identifier_folders_of_reference: list[str] = list(folder for folder in GeneralUtilities.get_direct_folders_of_folder(reference_folder))
|
1145
|
-
all_available_version_identifier_folders_of_reference = sorted(all_available_version_identifier_folders_of_reference, key=cmp_to_key(TasksForCommonProjectStructure._internal_sort_reference_folder))
|
1146
|
-
reference_versions_html_lines = []
|
1147
|
-
reference_versions_html_lines.append(' <hr/>')
|
1148
|
-
for all_available_version_identifier_folder_of_reference in all_available_version_identifier_folders_of_reference:
|
1149
|
-
version_identifier_of_project = os.path.basename(all_available_version_identifier_folder_of_reference)
|
1150
|
-
if version_identifier_of_project == "Latest":
|
1151
|
-
latest_version_hint = f" (v{project_version})"
|
1152
|
-
else:
|
1153
|
-
latest_version_hint = GeneralUtilities.empty_string
|
1154
|
-
reference_versions_html_lines.append(f' <h2>{version_identifier_of_project}{latest_version_hint}</h2>')
|
1155
|
-
reference_versions_html_lines.append(" Contained codeunits:<br/>")
|
1156
|
-
reference_versions_html_lines.append(" <ul>")
|
1157
|
-
for codeunit_reference_folder in list(folder for folder in GeneralUtilities.get_direct_folders_of_folder(all_available_version_identifier_folder_of_reference)):
|
1158
|
-
reference_versions_html_lines.append(f' <li><a href="./{version_identifier_of_project}/{os.path.basename(codeunit_reference_folder)}/index.html">' +
|
1159
|
-
f'{os.path.basename(codeunit_reference_folder)} {version_identifier_of_project}</a></li>')
|
1160
|
-
reference_versions_html_lines.append(" </ul>")
|
1161
|
-
reference_versions_html_lines.append(' <hr/>')
|
1162
|
-
if version_identifier_of_project == "Latest":
|
1163
|
-
latest_version_hint = " <h2>History</h2>"
|
1164
|
-
|
1165
|
-
design_file = None
|
1166
|
-
design = "ModestDark"
|
1167
|
-
if design == "ModestDark":
|
1168
|
-
design_file = GeneralUtilities.get_modest_dark_url()
|
1169
|
-
# TODO make designs from customizable sources be available by a customizable name and outsource this to a class-property because this is duplicated code.
|
1170
|
-
if design_file is None:
|
1171
|
-
design_html = GeneralUtilities.empty_string
|
1172
|
-
else:
|
1173
|
-
design_html = f'<link type="text/css" rel="stylesheet" href="{design_file}" />'
|
1174
|
-
|
1175
|
-
reference_versions_links_file_content = " \n".join(reference_versions_html_lines)
|
1176
|
-
title = f"{projectname}-reference"
|
1177
|
-
reference_index_file = os.path.join(reference_folder, "index.html")
|
1178
|
-
reference_index_file_content = f"""<!DOCTYPE html>
|
1179
|
-
<html lang="en">
|
1180
|
-
|
1181
|
-
<head>
|
1182
|
-
<meta charset="UTF-8">
|
1183
|
-
<title>{title}</title>
|
1184
|
-
{design_html}
|
1185
|
-
</head>
|
1186
|
-
|
1187
|
-
<body>
|
1188
|
-
<h1>{title}</h1>
|
1189
|
-
{reference_versions_links_file_content}
|
1190
|
-
</body>
|
1191
|
-
|
1192
|
-
</html>
|
1193
|
-
""" # see https://getbootstrap.com/docs/5.1/getting-started/introduction/
|
1194
|
-
GeneralUtilities.write_text_to_file(reference_index_file, reference_index_file_content)
|
1195
|
-
|
1196
|
-
@GeneralUtilities.check_arguments
|
1197
|
-
def push_nuget_build_artifact(self, push_script_file: str, codeunitname: str, registry_address: str, repository_folder_name: str, api_key: str):
|
1198
|
-
# when pusing to "default public" nuget-server then use registry_address: "nuget.org"
|
1199
|
-
build_artifact_folder = GeneralUtilities.resolve_relative_path(f"../../Submodules/{repository_folder_name}/{codeunitname}/Other/Artifacts/BuildResult_NuGet", os.path.dirname(push_script_file))
|
1200
|
-
self.__sc.push_nuget_build_artifact(self.__sc.find_file_by_extension(build_artifact_folder, "nupkg"), registry_address, api_key)
|
1201
|
-
|
1202
|
-
@GeneralUtilities.check_arguments
|
1203
|
-
def assert_no_uncommitted_changes(self, repository_folder: str):
|
1204
|
-
if self.__sc.git_repository_has_uncommitted_changes(repository_folder):
|
1205
|
-
raise ValueError(f"Repository '{repository_folder}' has uncommitted changes.")
|
1206
|
-
|
1207
|
-
@GeneralUtilities.check_arguments
|
1208
|
-
def ensure_certificate_authority_for_development_purposes_is_generated(self, product_folder: str):
|
1209
|
-
product_name: str = os.path.basename(product_folder)
|
1210
|
-
now = GeneralUtilities.get_now()
|
1211
|
-
ca_name = f"{product_name}CA_{now.year:04}{now.month:02}{now.day:02}{now.hour:02}{now.min:02}{now.second:02}"
|
1212
|
-
ca_folder = os.path.join(product_folder, "Other", "Resources", "CA")
|
1213
|
-
generate_certificate = True
|
1214
|
-
if os.path.isdir(ca_folder):
|
1215
|
-
ca_files = [file for file in GeneralUtilities.get_direct_files_of_folder(ca_folder) if file.endswith(".crt")]
|
1216
|
-
if len(ca_files) > 0:
|
1217
|
-
ca_file = ca_files[-1] # pylint:disable=unused-variable
|
1218
|
-
certificate_is_valid = True # TODO check if certificate is really valid
|
1219
|
-
generate_certificate = not certificate_is_valid
|
1220
|
-
if generate_certificate:
|
1221
|
-
self.__sc.generate_certificate_authority(ca_folder, ca_name, "DE", "SubjST", "SubjL", "SubjO", "SubjOU")
|
1222
|
-
# TODO add switch to auto-install the script if desired
|
1223
|
-
# for windows: powershell Import-Certificate -FilePath ConSurvCA_20241121000236.crt -CertStoreLocation 'Cert:\CurrentUser\Root'
|
1224
|
-
# for linux: (TODO)
|
1225
|
-
|
1226
|
-
@GeneralUtilities.check_arguments
|
1227
|
-
def generate_certificate_for_development_purposes_for_product(self, repository_folder: str):
|
1228
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
1229
|
-
product_name = os.path.basename(repository_folder)
|
1230
|
-
ca_folder: str = os.path.join(repository_folder, "Other", "Resources", "CA")
|
1231
|
-
self.__generate_certificate_for_development_purposes(product_name, os.path.join(repository_folder, "Other", "Resources"), ca_folder, None)
|
1232
|
-
|
1233
|
-
@GeneralUtilities.check_arguments
|
1234
|
-
def generate_certificate_for_development_purposes_for_external_service(self, service_folder: str, domain: str = None):
|
1235
|
-
testservice_name = os.path.basename(service_folder)
|
1236
|
-
ca_folder: str = None # TODO
|
1237
|
-
self.__generate_certificate_for_development_purposes(testservice_name, os.path.join(service_folder, "Resources"), ca_folder, domain)
|
1238
|
-
|
1239
|
-
@GeneralUtilities.check_arguments
|
1240
|
-
def generate_certificate_for_development_purposes_for_codeunit(self, codeunit_folder: str, domain: str = None):
|
1241
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
1242
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
1243
|
-
self.ensure_product_resource_is_imported(codeunit_folder, "CA")
|
1244
|
-
ca_folder: str = os.path.join(codeunit_folder, "Other", "Resources", "CA")
|
1245
|
-
self.__generate_certificate_for_development_purposes(codeunit_name, os.path.join(codeunit_folder, "Other", "Resources"), ca_folder, domain)
|
1246
|
-
|
1247
|
-
@GeneralUtilities.check_arguments
|
1248
|
-
def __generate_certificate_for_development_purposes(self, service_name: str, resources_folder: str, ca_folder: str, domain: str = None):
|
1249
|
-
if domain is None:
|
1250
|
-
domain = f"{service_name}.test.local"
|
1251
|
-
domain = domain.lower()
|
1252
|
-
resource_name: str = "DevelopmentCertificate"
|
1253
|
-
certificate_folder: str = os.path.join(resources_folder, resource_name)
|
1254
|
-
|
1255
|
-
resource_content_filename: str = service_name+resource_name
|
1256
|
-
certificate_file = os.path.join(certificate_folder, f"{domain}.crt")
|
1257
|
-
unsignedcertificate_file = os.path.join(certificate_folder, f"{domain}.unsigned.crt")
|
1258
|
-
certificate_exists = os.path.exists(certificate_file)
|
1259
|
-
if certificate_exists:
|
1260
|
-
certificate_expired = GeneralUtilities.certificate_is_expired(certificate_file)
|
1261
|
-
generate_new_certificate = certificate_expired
|
1262
|
-
else:
|
1263
|
-
generate_new_certificate = True
|
1264
|
-
if generate_new_certificate:
|
1265
|
-
GeneralUtilities.ensure_directory_does_not_exist(certificate_folder)
|
1266
|
-
GeneralUtilities.ensure_directory_exists(certificate_folder)
|
1267
|
-
GeneralUtilities.write_message_to_stdout("Generate TLS-certificate for development-purposes.")
|
1268
|
-
self.__sc.generate_certificate(certificate_folder, domain, resource_content_filename, "DE", "SubjST", "SubjL", "SubjO", "SubjOU")
|
1269
|
-
self.__sc.generate_certificate_sign_request(certificate_folder, domain, resource_content_filename, "DE", "SubjST", "SubjL", "SubjO", "SubjOU")
|
1270
|
-
ca_name = os.path.basename(self.__sc.find_last_file_by_extension(ca_folder, "crt"))[:-4]
|
1271
|
-
self.__sc.sign_certificate(certificate_folder, ca_folder, ca_name, domain, resource_content_filename)
|
1272
|
-
GeneralUtilities.ensure_file_does_not_exist(unsignedcertificate_file)
|
1273
|
-
|
1274
|
-
@GeneralUtilities.check_arguments
|
1275
|
-
def copy_product_resource_to_codeunit_resource_folder(self, codeunit_folder: str, resourcename: str) -> None:
|
1276
|
-
repository_folder = GeneralUtilities.resolve_relative_path(f"..", codeunit_folder)
|
1277
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
1278
|
-
src_folder = GeneralUtilities.resolve_relative_path(f"Other/Resources/{resourcename}", repository_folder)
|
1279
|
-
GeneralUtilities.assert_condition(os.path.isdir(src_folder), f"Required product-resource {resourcename} does not exist. Expected folder: {src_folder}")
|
1280
|
-
trg_folder = GeneralUtilities.resolve_relative_path(f"Other/Resources/{resourcename}", codeunit_folder)
|
1281
|
-
GeneralUtilities.ensure_directory_does_not_exist(trg_folder)
|
1282
|
-
GeneralUtilities.ensure_directory_exists(trg_folder)
|
1283
|
-
GeneralUtilities.copy_content_of_folder(src_folder, trg_folder)
|
1284
|
-
|
1285
|
-
@GeneralUtilities.check_arguments
|
1286
|
-
def ensure_product_resource_is_imported(self, codeunit_folder: str, product_resource_name: str) -> None:
|
1287
|
-
product_folder = os.path.dirname(codeunit_folder)
|
1288
|
-
source_folder = os.path.join(product_folder, "Other", "Resources", product_resource_name)
|
1289
|
-
target_folder = os.path.join(codeunit_folder, "Other", "Resources", product_resource_name)
|
1290
|
-
GeneralUtilities.ensure_directory_does_not_exist(target_folder)
|
1291
|
-
GeneralUtilities.ensure_directory_exists(target_folder)
|
1292
|
-
GeneralUtilities.copy_content_of_folder(source_folder, target_folder)
|
1293
|
-
|
1294
|
-
@GeneralUtilities.check_arguments
|
1295
|
-
def get_codeunits(self, repository_folder: str, ignore_disabled_codeunits: bool = True) -> list[str]:
|
1296
|
-
codeunits_with_dependent_codeunits: dict[str, set[str]] = dict[str, set[str]]()
|
1297
|
-
subfolders = GeneralUtilities.get_direct_folders_of_folder(repository_folder)
|
1298
|
-
for subfolder in subfolders:
|
1299
|
-
codeunit_name: str = os.path.basename(subfolder)
|
1300
|
-
codeunit_file = os.path.join(subfolder, f"{codeunit_name}.codeunit.xml")
|
1301
|
-
if os.path.exists(codeunit_file):
|
1302
|
-
if ignore_disabled_codeunits and not self.codeunit_is_enabled(codeunit_file):
|
1303
|
-
continue
|
1304
|
-
codeunits_with_dependent_codeunits[codeunit_name] = self.get_dependent_code_units(codeunit_file)
|
1305
|
-
sorted_codeunits = self._internal_get_sorted_codeunits_by_dict(codeunits_with_dependent_codeunits)
|
1306
|
-
return sorted_codeunits
|
1307
|
-
|
1308
|
-
@GeneralUtilities.check_arguments
|
1309
|
-
def codeunit_is_enabled(self, codeunit_file: str) -> bool:
|
1310
|
-
root: etree._ElementTree = etree.parse(codeunit_file)
|
1311
|
-
return GeneralUtilities.string_to_boolean(str(root.xpath('//cps:codeunit/@enabled', namespaces={'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'})[0]))
|
1312
|
-
|
1313
|
-
@GeneralUtilities.check_arguments
|
1314
|
-
def merge_to_main_branch(self, repository_folder: str, source_branch: str = "other/next-release", target_branch: str = "main", verbosity: int = 1, additional_arguments_file: str = None, fast_forward_source_branch: bool = False) -> None:
|
1315
|
-
# This is an automatization for automatic merges. Usual this merge would be done by a pull request in a sourcecode-version-control-platform
|
1316
|
-
# (like GitHub, GitLab or Azure DevOps)
|
1317
|
-
GeneralUtilities.write_message_to_stdout(f"Merge to main-branch...")
|
1318
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
1319
|
-
self.assert_no_uncommitted_changes(repository_folder)
|
1320
|
-
|
1321
|
-
src_branch_commit_id = self.__sc.git_get_commit_id(repository_folder, source_branch)
|
1322
|
-
if (src_branch_commit_id == self.__sc.git_get_commit_id(repository_folder, target_branch)):
|
1323
|
-
raise ValueError(f"Can not merge because the source-branch and the target-branch are on the same commit (commit-id: {src_branch_commit_id})")
|
1324
|
-
|
1325
|
-
self.__sc.git_checkout(repository_folder, source_branch)
|
1326
|
-
self.build_codeunits(repository_folder, verbosity, TasksForCommonProjectStructure.get_qualitycheck_environment_name(), additional_arguments_file, True, None, [], True, "Check if product is buildable")
|
1327
|
-
self.__sc.git_merge(repository_folder, source_branch, target_branch, False, False, None, False, False)
|
1328
|
-
self.__sc.git_commit(repository_folder, f'Merge branch {source_branch} into {target_branch}', stage_all_changes=True, no_changes_behavior=1)
|
1329
|
-
self.__sc.git_checkout(repository_folder, target_branch)
|
1330
|
-
if fast_forward_source_branch:
|
1331
|
-
self.__sc.git_merge(repository_folder, target_branch, source_branch, True, True)
|
1332
|
-
|
1333
|
-
@GeneralUtilities.check_arguments
|
1334
|
-
def merge_to_stable_branch(self, create_release_file: str, createRelease_configuration: CreateReleaseConfiguration):
|
1335
|
-
|
1336
|
-
GeneralUtilities.write_message_to_stdout(f"Create release for project {createRelease_configuration.projectname}.")
|
1337
|
-
GeneralUtilities.write_message_to_stdout(f"Merge to stable-branch...")
|
1338
|
-
self.__sc.assert_is_git_repository(createRelease_configuration.repository_folder)
|
1339
|
-
folder_of_create_release_file_file = os.path.abspath(os.path.dirname(create_release_file))
|
1340
|
-
|
1341
|
-
build_repository_folder = GeneralUtilities.resolve_relative_path(f"..{os.path.sep}..", folder_of_create_release_file_file)
|
1342
|
-
self.assert_no_uncommitted_changes(build_repository_folder)
|
1343
|
-
|
1344
|
-
repository_folder = GeneralUtilities.resolve_relative_path(f"Submodules{os.path.sep}{createRelease_configuration.repository_folder_name}", build_repository_folder)
|
1345
|
-
mergeInformation = MergeToStableBranchInformationForProjectInCommonProjectFormat(repository_folder, createRelease_configuration.additional_arguments_file, createRelease_configuration.artifacts_folder)
|
1346
|
-
createReleaseInformation = CreateReleaseInformationForProjectInCommonProjectFormat(repository_folder, createRelease_configuration.artifacts_folder, createRelease_configuration.projectname, createRelease_configuration.public_repository_url, mergeInformation.targetbranch, mergeInformation.additional_arguments_file, mergeInformation.export_target, createRelease_configuration.push_artifacts_scripts_folder)
|
1347
|
-
createReleaseInformation.verbosity = createRelease_configuration.verbosity
|
1348
|
-
|
1349
|
-
self.__sc.git_checkout(build_repository_folder, createRelease_configuration.build_repository_branch)
|
1350
|
-
self.__sc.git_checkout(createReleaseInformation.reference_repository, createRelease_configuration.reference_repository_branch_name)
|
1351
|
-
|
1352
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
1353
|
-
self.__sc.assert_is_git_repository(createReleaseInformation.reference_repository)
|
1354
|
-
|
1355
|
-
# TODO check if repository_folder-merge-source-branch and repository_folder-merge-target-branch have different commits
|
1356
|
-
self.assert_no_uncommitted_changes(repository_folder)
|
1357
|
-
mergeInformation.verbosity = createRelease_configuration.verbosity
|
1358
|
-
mergeInformation.push_target_branch = createRelease_configuration.remotename is not None
|
1359
|
-
mergeInformation.push_target_branch_remote_name = createRelease_configuration.remotename
|
1360
|
-
mergeInformation.push_source_branch = createRelease_configuration.remotename is not None
|
1361
|
-
mergeInformation.push_source_branch_remote_name = createRelease_configuration.remotename
|
1362
|
-
new_project_version = self.__standardized_tasks_merge_to_stable_branch(mergeInformation)
|
1363
|
-
|
1364
|
-
self.__standardized_tasks_release_artifact(createReleaseInformation)
|
1365
|
-
|
1366
|
-
GeneralUtilities.assert_condition(createRelease_configuration.reference_repository_remote_name is not None, "Remote for reference-repository not set.")
|
1367
|
-
self.__sc.git_commit(createReleaseInformation.reference_repository, f"Added reference of {createRelease_configuration.projectname} v{new_project_version}")
|
1368
|
-
self.__sc.git_push_with_retry(createReleaseInformation.reference_repository, createRelease_configuration.reference_repository_remote_name, createRelease_configuration.reference_repository_branch_name, createRelease_configuration.reference_repository_branch_name, verbosity=createRelease_configuration.verbosity)
|
1369
|
-
self.__sc.git_commit(build_repository_folder, f"Added {createRelease_configuration.projectname} release v{new_project_version}")
|
1370
|
-
GeneralUtilities.write_message_to_stdout(f"Finished release for project {createRelease_configuration.projectname} v{new_project_version} successfully.")
|
1371
|
-
return new_project_version
|
1372
|
-
|
1373
|
-
@GeneralUtilities.check_arguments
|
1374
|
-
def create_release_starter_for_repository_in_standardized_format(self, create_release_file: str, logfile: str, verbosity: int, addLogOverhead: bool, commandline_arguments: list[str]):
|
1375
|
-
# hint: arguments can be overwritten by commandline_arguments
|
1376
|
-
folder_of_this_file = os.path.dirname(create_release_file)
|
1377
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
1378
|
-
result = self.__sc.run_program("python", f"CreateRelease.py --overwrite_verbosity {str(verbosity)}", folder_of_this_file, verbosity=verbosity, log_file=logfile, addLogOverhead=addLogOverhead, print_live_output=True, throw_exception_if_exitcode_is_not_zero=False)
|
1379
|
-
if result[0] != 0:
|
1380
|
-
raise ValueError(f"CreateRelease.py resulted in exitcode {result[0]}.")
|
1381
|
-
|
1382
|
-
@GeneralUtilities.check_arguments
|
1383
|
-
def __standardized_tasks_merge_to_stable_branch(self, information: MergeToStableBranchInformationForProjectInCommonProjectFormat) -> str:
|
1384
|
-
src_branch_commit_id = self.__sc.git_get_commit_id(information.repository, information.sourcebranch)
|
1385
|
-
if (src_branch_commit_id == self.__sc.git_get_commit_id(information.repository, information.targetbranch)):
|
1386
|
-
raise ValueError(f"Can not merge because the source-branch and the target-branch are on the same commit (commit-id: {src_branch_commit_id})")
|
1387
|
-
|
1388
|
-
self.assert_no_uncommitted_changes(information.repository)
|
1389
|
-
self.__sc.git_checkout(information.repository, information.sourcebranch)
|
1390
|
-
self.__sc.run_program("git", "clean -dfx", information.repository, verbosity=information.verbosity, throw_exception_if_exitcode_is_not_zero=True)
|
1391
|
-
project_version = self.__sc.get_semver_version_from_gitversion(information.repository)
|
1392
|
-
|
1393
|
-
self.build_codeunits(information.repository, information.verbosity, information.target_environmenttype_for_qualitycheck, information.additional_arguments_file, False, information.export_target, [], True, "Productive build") # verify hat codeunits are buildable with productive-config before merge
|
1394
|
-
|
1395
|
-
self.assert_no_uncommitted_changes(information.repository)
|
1396
|
-
|
1397
|
-
commit_id = self.__sc.git_merge(information.repository, information.sourcebranch, information.targetbranch, True, True)
|
1398
|
-
self.__sc.git_create_tag(information.repository, commit_id, f"v{project_version}", information.sign_git_tags)
|
1399
|
-
|
1400
|
-
if information.push_source_branch:
|
1401
|
-
GeneralUtilities.write_message_to_stdout("Push source-branch...")
|
1402
|
-
self.__sc.git_push_with_retry(information.repository, information.push_source_branch_remote_name, information.sourcebranch, information.sourcebranch, pushalltags=True, verbosity=information.verbosity)
|
1403
|
-
|
1404
|
-
if information.push_target_branch:
|
1405
|
-
GeneralUtilities.write_message_to_stdout("Push target-branch...")
|
1406
|
-
self.__sc.git_push_with_retry(information.repository, information.push_target_branch_remote_name, information.targetbranch, information.targetbranch, pushalltags=True, verbosity=information.verbosity)
|
1407
|
-
|
1408
|
-
return project_version
|
1409
|
-
|
1410
|
-
@GeneralUtilities.check_arguments
|
1411
|
-
def standardized_tasks_build_for_docker_project(self, build_script_file: str, target_environment_type: str, verbosity: int, commandline_arguments: list[str], custom_arguments: dict[str, str] = None) -> None:
|
1412
|
-
self.standardized_tasks_build_for_docker_project_with_additional_build_arguments(build_script_file, target_environment_type, verbosity, commandline_arguments, custom_arguments)
|
1413
|
-
self.generate_sbom_for_docker_image(build_script_file, verbosity, commandline_arguments)
|
1414
|
-
|
1415
|
-
@GeneralUtilities.check_arguments
|
1416
|
-
def merge_sbom_file_from_dependent_codeunit_into_this(self, build_script_file: str, dependent_codeunit_name: str) -> None:
|
1417
|
-
codeunitname: str = Path(os.path.dirname(build_script_file)).parent.parent.name
|
1418
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../..", str(os.path.dirname(build_script_file)))
|
1419
|
-
repository_folder = GeneralUtilities.resolve_relative_path("..", codeunit_folder)
|
1420
|
-
dependent_codeunit_folder = os.path.join(repository_folder, dependent_codeunit_name).replace("\\", "/")
|
1421
|
-
t = TasksForCommonProjectStructure()
|
1422
|
-
sbom_file = f"{repository_folder}/{codeunitname}/Other/Artifacts/BOM/{codeunitname}.{t.get_version_of_codeunit_folder(codeunit_folder)}.sbom.xml"
|
1423
|
-
dependent_sbom_file = f"{repository_folder}/{dependent_codeunit_name}/Other/Artifacts/BOM/{dependent_codeunit_name}.{t.get_version_of_codeunit_folder(dependent_codeunit_folder)}.sbom.xml"
|
1424
|
-
self.merge_sbom_file(repository_folder, dependent_sbom_file, sbom_file)
|
1425
|
-
|
1426
|
-
@GeneralUtilities.check_arguments
|
1427
|
-
def merge_sbom_file(self, repository_folder: str, source_sbom_file_relative: str, target_sbom_file_relative: str) -> None:
|
1428
|
-
GeneralUtilities.assert_file_exists(os.path.join(repository_folder, source_sbom_file_relative))
|
1429
|
-
GeneralUtilities.assert_file_exists(os.path.join(repository_folder, target_sbom_file_relative))
|
1430
|
-
target_original_sbom_file_relative = os.path.dirname(target_sbom_file_relative)+"/"+os.path.basename(target_sbom_file_relative)+".original.xml"
|
1431
|
-
os.rename(os.path.join(repository_folder, target_sbom_file_relative), os.path.join(repository_folder, target_original_sbom_file_relative))
|
1432
|
-
|
1433
|
-
self.ensure_cyclonedxcli_is_available(repository_folder)
|
1434
|
-
cyclonedx_exe = os.path.join(repository_folder, "Other/Resources/CycloneDXCLI/cyclonedx-cli")
|
1435
|
-
if GeneralUtilities.current_system_is_windows():
|
1436
|
-
cyclonedx_exe = cyclonedx_exe+".exe"
|
1437
|
-
self.__sc.run_program(cyclonedx_exe, f"merge --input-files {source_sbom_file_relative} {target_original_sbom_file_relative} --output-file {target_sbom_file_relative}", repository_folder)
|
1438
|
-
GeneralUtilities.ensure_file_does_not_exist(os.path.join(repository_folder, target_original_sbom_file_relative))
|
1439
|
-
self.__sc.format_xml_file(os.path.join(repository_folder, target_sbom_file_relative))
|
1440
|
-
|
1441
|
-
@GeneralUtilities.check_arguments
|
1442
|
-
def standardized_tasks_build_for_docker_project_with_additional_build_arguments(self, build_script_file: str, target_environment_type: str, verbosity: int, commandline_arguments: list[str], custom_arguments: dict[str, str]) -> None:
|
1443
|
-
use_cache: bool = False
|
1444
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
1445
|
-
codeunitname: str = Path(os.path.dirname(build_script_file)).parent.parent.name
|
1446
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../..", str(os.path.dirname(build_script_file)))
|
1447
|
-
codeunitname_lower = codeunitname.lower()
|
1448
|
-
codeunit_file = os.path.join(codeunit_folder, f"{codeunitname}.codeunit.xml")
|
1449
|
-
codeunitversion = self.get_version_of_codeunit(codeunit_file)
|
1450
|
-
args = ["image", "build", "--pull", "--force-rm", "--progress=plain", "--build-arg", f"TargetEnvironmentType={target_environment_type}", "--build-arg", f"CodeUnitName={codeunitname}", "--build-arg", f"CodeUnitVersion={codeunitversion}", "--build-arg", f"CodeUnitOwnerName={self.get_codeunit_owner_name(codeunit_file)}", "--build-arg", f"CodeUnitOwnerEMailAddress={self.get_codeunit_owner_emailaddress(codeunit_file)}"]
|
1451
|
-
if custom_arguments is not None:
|
1452
|
-
for custom_argument_key, custom_argument_value in custom_arguments.items():
|
1453
|
-
args.append("--build-arg")
|
1454
|
-
args.append(f"{custom_argument_key}={custom_argument_value}")
|
1455
|
-
args = args+["--tag", f"{codeunitname_lower}:latest", "--tag", f"{codeunitname_lower}:{codeunitversion}", "--file", f"{codeunitname}/Dockerfile"]
|
1456
|
-
if not use_cache:
|
1457
|
-
args.append("--no-cache")
|
1458
|
-
args.append(".")
|
1459
|
-
codeunit_content_folder = os.path.join(codeunit_folder)
|
1460
|
-
self.__sc.run_program_argsasarray("docker", args, codeunit_content_folder, verbosity=verbosity, print_errors_as_information=True)
|
1461
|
-
artifacts_folder = GeneralUtilities.resolve_relative_path("Other/Artifacts", codeunit_folder)
|
1462
|
-
app_artifacts_folder = os.path.join(artifacts_folder, "BuildResult_OCIImage")
|
1463
|
-
GeneralUtilities.ensure_directory_does_not_exist(app_artifacts_folder)
|
1464
|
-
GeneralUtilities.ensure_directory_exists(app_artifacts_folder)
|
1465
|
-
self.__sc.run_program_argsasarray("docker", ["save", "--output", f"{codeunitname}_v{codeunitversion}.tar", f"{codeunitname_lower}:{codeunitversion}"], app_artifacts_folder, verbosity=verbosity, print_errors_as_information=True)
|
1466
|
-
self.copy_source_files_to_output_directory(build_script_file)
|
1467
|
-
|
1468
|
-
@GeneralUtilities.check_arguments
|
1469
|
-
def generate_sbom_for_docker_image(self, build_script_file: str, verbosity: int, commandline_arguments: list[str]) -> None:
|
1470
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
1471
|
-
codeunitname: str = Path(os.path.dirname(build_script_file)).parent.parent.name
|
1472
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../..", str(os.path.dirname(build_script_file)))
|
1473
|
-
artifacts_folder = GeneralUtilities.resolve_relative_path("Other/Artifacts", codeunit_folder)
|
1474
|
-
codeunitname_lower = codeunitname.lower()
|
1475
|
-
sbom_folder = os.path.join(artifacts_folder, "BOM")
|
1476
|
-
codeunitversion = self.get_version_of_codeunit(os.path.join(codeunit_folder, f"{codeunitname}.codeunit.xml"))
|
1477
|
-
GeneralUtilities.ensure_directory_exists(sbom_folder)
|
1478
|
-
self.__sc.run_program_argsasarray("docker", ["sbom", "--format", "cyclonedx", f"{codeunitname_lower}:{codeunitversion}", "--output", f"{codeunitname}.{codeunitversion}.sbom.xml"], sbom_folder, verbosity=verbosity, print_errors_as_information=True)
|
1479
|
-
self.__sc.format_xml_file(sbom_folder+f"/{codeunitname}.{codeunitversion}.sbom.xml")
|
1480
|
-
|
1481
|
-
@GeneralUtilities.check_arguments
|
1482
|
-
def push_docker_build_artifact(self, push_artifacts_file: str, registry: str, verbosity: int, push_readme: bool, commandline_arguments: list[str], repository_folder_name: str, remote_image_name: str = None) -> None:
|
1483
|
-
folder_of_this_file = os.path.dirname(push_artifacts_file)
|
1484
|
-
filename = os.path.basename(push_artifacts_file)
|
1485
|
-
codeunitname_regex: str = "([a-zA-Z0-9]+)"
|
1486
|
-
filename_regex: str = f"PushArtifacts\\.{codeunitname_regex}\\.py"
|
1487
|
-
if match := re.search(filename_regex, filename, re.IGNORECASE):
|
1488
|
-
codeunitname = match.group(1)
|
1489
|
-
else:
|
1490
|
-
raise ValueError(f"Expected push-artifacts-file to match the regex \"{filename_regex}\" where \"{codeunitname_regex}\" represents the codeunit-name.")
|
1491
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
1492
|
-
repository_folder = GeneralUtilities.resolve_relative_path(f"..{os.path.sep}..{os.path.sep}Submodules{os.path.sep}{repository_folder_name}", folder_of_this_file)
|
1493
|
-
codeunit_folder = os.path.join(repository_folder, codeunitname)
|
1494
|
-
artifacts_folder = self.get_artifacts_folder(repository_folder, codeunitname)
|
1495
|
-
applicationimage_folder = os.path.join(artifacts_folder, "BuildResult_OCIImage")
|
1496
|
-
image_file = self.__sc.find_file_by_extension(applicationimage_folder, "tar")
|
1497
|
-
image_filename = os.path.basename(image_file)
|
1498
|
-
codeunit_version = self.get_version_of_codeunit(os.path.join(codeunit_folder, f"{codeunitname}.codeunit.xml"))
|
1499
|
-
if remote_image_name is None:
|
1500
|
-
remote_image_name = codeunitname
|
1501
|
-
remote_image_name = remote_image_name.lower()
|
1502
|
-
local_image_name = codeunitname.lower()
|
1503
|
-
remote_repo = f"{registry}/{remote_image_name}"
|
1504
|
-
remote_image_latest = f"{remote_repo}:latest"
|
1505
|
-
remote_image_version = f"{remote_repo}:{codeunit_version}"
|
1506
|
-
GeneralUtilities.write_message_to_stdout("Load image...")
|
1507
|
-
self.__sc.run_program("docker", f"load --input {image_filename}", applicationimage_folder, verbosity=verbosity)
|
1508
|
-
GeneralUtilities.write_message_to_stdout("Tag image...")
|
1509
|
-
self.__sc.run_program_with_retry("docker", f"tag {local_image_name}:{codeunit_version} {remote_image_latest}", verbosity=verbosity)
|
1510
|
-
self.__sc.run_program_with_retry("docker", f"tag {local_image_name}:{codeunit_version} {remote_image_version}", verbosity=verbosity)
|
1511
|
-
GeneralUtilities.write_message_to_stdout("Push image...")
|
1512
|
-
self.__sc.run_program_with_retry("docker", f"push {remote_image_latest}", verbosity=verbosity)
|
1513
|
-
self.__sc.run_program_with_retry("docker", f"push {remote_image_version}", verbosity=verbosity)
|
1514
|
-
if push_readme:
|
1515
|
-
self.__sc.run_program_with_retry("docker-pushrm", f"{remote_repo}", codeunit_folder, verbosity=verbosity)
|
1516
|
-
|
1517
|
-
@GeneralUtilities.check_arguments
|
1518
|
-
def get_dependent_code_units(self, codeunit_file: str) -> list[str]:
|
1519
|
-
root: etree._ElementTree = etree.parse(codeunit_file)
|
1520
|
-
result = set(root.xpath('//cps:dependentcodeunit/text()', namespaces={'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'}))
|
1521
|
-
result = sorted(result)
|
1522
|
-
return result
|
1523
|
-
|
1524
|
-
@GeneralUtilities.check_arguments
|
1525
|
-
def dependent_codeunit_exists(self, repository: str, codeunit: str) -> None:
|
1526
|
-
codeunit_file = f"{repository}/{codeunit}/{codeunit}.codeunit.xml"
|
1527
|
-
return os.path.isfile(codeunit_file)
|
1528
|
-
|
1529
|
-
@GeneralUtilities.check_arguments
|
1530
|
-
def standardized_tasks_linting_for_docker_project(self, linting_script_file: str, verbosity: int, targetenvironmenttype: str, commandline_arguments: list[str]) -> None:
|
1531
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
1532
|
-
# TODO check if there are errors in sarif-file
|
1533
|
-
|
1534
|
-
@GeneralUtilities.check_arguments
|
1535
|
-
def copy_licence_file(self, common_tasks_scripts_file: str) -> None:
|
1536
|
-
folder_of_current_file = os.path.dirname(common_tasks_scripts_file)
|
1537
|
-
license_file = GeneralUtilities.resolve_relative_path("../../License.txt", folder_of_current_file)
|
1538
|
-
target_folder = GeneralUtilities.resolve_relative_path("Artifacts/License", folder_of_current_file)
|
1539
|
-
GeneralUtilities.ensure_directory_exists(target_folder)
|
1540
|
-
shutil.copy(license_file, target_folder)
|
1541
|
-
|
1542
|
-
@GeneralUtilities.check_arguments
|
1543
|
-
def take_readmefile_from_main_readmefile_of_repository(self, common_tasks_scripts_file: str) -> None:
|
1544
|
-
folder_of_current_file = os.path.dirname(common_tasks_scripts_file)
|
1545
|
-
source_file = GeneralUtilities.resolve_relative_path("../../ReadMe.md", folder_of_current_file)
|
1546
|
-
target_file = GeneralUtilities.resolve_relative_path("../ReadMe.md", folder_of_current_file)
|
1547
|
-
GeneralUtilities.ensure_file_does_not_exist(target_file)
|
1548
|
-
shutil.copyfile(source_file, target_file)
|
1549
|
-
|
1550
|
-
@GeneralUtilities.check_arguments
|
1551
|
-
def standardized_tasks_do_common_tasks(self, common_tasks_scripts_file: str, codeunit_version: str, verbosity: int, targetenvironmenttype: str, clear_artifacts_folder: bool, additional_arguments_file: str, assume_dependent_codeunits_are_already_built: bool, commandline_arguments: list[str]) -> None:
|
1552
|
-
additional_arguments_file = self.get_additionalargumentsfile_from_commandline_arguments(commandline_arguments, additional_arguments_file)
|
1553
|
-
target_environmenttype = self.get_targetenvironmenttype_from_commandline_arguments(commandline_arguments, targetenvironmenttype) # pylint: disable=unused-variable
|
1554
|
-
# assume_dependent_codeunits_are_already_built = self.get_assume_dependent_codeunits_are_already_built_from_commandline_arguments(commandline_arguments, assume_dependent_codeunits_are_already_built)
|
1555
|
-
if commandline_arguments is None:
|
1556
|
-
raise ValueError('The "commandline_arguments"-parameter is not defined.')
|
1557
|
-
if len(commandline_arguments) == 0:
|
1558
|
-
raise ValueError('An empty array as argument for the "commandline_arguments"-parameter is not valid.')
|
1559
|
-
commandline_arguments = commandline_arguments[1:]
|
1560
|
-
repository_folder: str = str(Path(os.path.dirname(common_tasks_scripts_file)).parent.parent.absolute())
|
1561
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
1562
|
-
codeunit_name: str = str(os.path.basename(Path(os.path.dirname(common_tasks_scripts_file)).parent.absolute()))
|
1563
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
1564
|
-
project_version = self.get_version_of_project(repository_folder)
|
1565
|
-
codeunit_folder = os.path.join(repository_folder, codeunit_name)
|
1566
|
-
|
1567
|
-
# check codeunit-conformity
|
1568
|
-
# TODO check if foldername=="<codeunitname>[.codeunit.xml]" == <codeunitname> in file
|
1569
|
-
supported_codeunitspecificationversion = "2.9.4" # should always be the latest version of the ProjectTemplates-repository
|
1570
|
-
codeunit_file = os.path.join(codeunit_folder, f"{codeunit_name}.codeunit.xml")
|
1571
|
-
if not os.path.isfile(codeunit_file):
|
1572
|
-
raise ValueError(f'Codeunitfile "{codeunit_file}" does not exist.')
|
1573
|
-
# TODO implement usage of self.reference_latest_version_of_xsd_when_generating_xml
|
1574
|
-
namespaces = {'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure', 'xsi': 'http://www.w3.org/2001/XMLSchema-instance'}
|
1575
|
-
root: etree._ElementTree = etree.parse(codeunit_file)
|
1576
|
-
|
1577
|
-
# check codeunit-spcecification-version
|
1578
|
-
try:
|
1579
|
-
codeunit_file_version = root.xpath('//cps:codeunit/@codeunitspecificationversion', namespaces=namespaces)[0]
|
1580
|
-
if codeunit_file_version != supported_codeunitspecificationversion:
|
1581
|
-
raise ValueError(f"ScriptCollection only supports processing codeunits with codeunit-specification-version={supported_codeunitspecificationversion}.")
|
1582
|
-
schemaLocation = root.xpath('//cps:codeunit/@xsi:schemaLocation', namespaces=namespaces)[0]
|
1583
|
-
xmlschema.validate(codeunit_file, schemaLocation)
|
1584
|
-
# TODO check if the properties codeunithastestablesourcecode, codeunithasupdatabledependencies, throwexceptionifcodeunitfilecannotbevalidated, developmentState and description exist and the values are valid
|
1585
|
-
except Exception as exception:
|
1586
|
-
if self.codeunit_throws_exception_if_codeunitfile_is_not_validatable(codeunit_file):
|
1587
|
-
raise exception
|
1588
|
-
else:
|
1589
|
-
GeneralUtilities.write_message_to_stderr(f'Warning: Codeunitfile "{codeunit_file}" can not be validated due to the following exception:')
|
1590
|
-
GeneralUtilities.write_exception_to_stderr(exception)
|
1591
|
-
|
1592
|
-
# check codeunit-name
|
1593
|
-
codeunit_name_in_codeunit_file = root.xpath('//cps:codeunit/cps:name/text()', namespaces=namespaces)[0]
|
1594
|
-
if codeunit_name != codeunit_name_in_codeunit_file:
|
1595
|
-
raise ValueError(f"The folder-name ('{codeunit_name}') is not equal to the codeunit-name ('{codeunit_name_in_codeunit_file}').")
|
1596
|
-
|
1597
|
-
# check owner-name
|
1598
|
-
codeunit_ownername_in_codeunit_file = self. get_codeunit_owner_name(codeunit_file)
|
1599
|
-
GeneralUtilities.assert_condition(GeneralUtilities.string_has_content(codeunit_ownername_in_codeunit_file), "No valid name for codeunitowner given.")
|
1600
|
-
|
1601
|
-
# check owner-emailaddress
|
1602
|
-
codeunit_owneremailaddress_in_codeunit_file = self.get_codeunit_owner_emailaddress(codeunit_file)
|
1603
|
-
GeneralUtilities.assert_condition(GeneralUtilities.string_has_content(codeunit_owneremailaddress_in_codeunit_file), "No valid email-address for codeunitowner given.")
|
1604
|
-
|
1605
|
-
# check development-state
|
1606
|
-
developmentstate = root.xpath('//cps:properties/@developmentstate', namespaces=namespaces)[0]
|
1607
|
-
developmentstate_active = "Active development"
|
1608
|
-
developmentstate_maintenance = "Maintenance-updates only"
|
1609
|
-
developmentstate_inactive = "Inactive"
|
1610
|
-
GeneralUtilities.assert_condition(developmentstate in (developmentstate_active, developmentstate_maintenance, developmentstate_inactive), f"Invalid development-state. Must be '{developmentstate_active}' or '{developmentstate_maintenance}' or '{developmentstate_inactive}' but was '{developmentstate}'.")
|
1611
|
-
|
1612
|
-
# check for mandatory files
|
1613
|
-
files = ["Other/Build/Build.py", "Other/QualityCheck/Linting.py", "Other/Reference/GenerateReference.py"]
|
1614
|
-
if self.codeunit_has_testable_sourcecode(codeunit_file):
|
1615
|
-
# TODO check if the testsettings-section appears in the codeunit-file
|
1616
|
-
files.append("Other/QualityCheck/RunTestcases.py")
|
1617
|
-
if self.codeunit_has_updatable_dependencies(codeunit_file):
|
1618
|
-
# TODO check if the updatesettings-section appears in the codeunit-file
|
1619
|
-
files.append("Other/UpdateDependencies.py")
|
1620
|
-
for file in files:
|
1621
|
-
combined_file = os.path.join(codeunit_folder, file)
|
1622
|
-
if not os.path.isfile(combined_file):
|
1623
|
-
raise ValueError(f'The mandatory file "{file}" does not exist in the codeunit-folder.')
|
1624
|
-
|
1625
|
-
if os.path.isfile(os.path.join(codeunit_folder, "Other", "requirements.txt")):
|
1626
|
-
self.install_requirementstxt_for_codeunit(codeunit_folder, verbosity)
|
1627
|
-
|
1628
|
-
# check developer
|
1629
|
-
if self.validate_developers_of_repository:
|
1630
|
-
expected_authors: list[tuple[str, str]] = []
|
1631
|
-
expected_authors_in_xml = root.xpath('//cps:codeunit/cps:developerteam/cps:developer', namespaces=namespaces)
|
1632
|
-
for expected_author in expected_authors_in_xml:
|
1633
|
-
author_name = expected_author.xpath('./cps:developername/text()', namespaces=namespaces)[0]
|
1634
|
-
author_emailaddress = expected_author.xpath('./cps:developeremailaddress/text()', namespaces=namespaces)[0]
|
1635
|
-
expected_authors.append((author_name, author_emailaddress))
|
1636
|
-
actual_authors: list[tuple[str, str]] = self.__sc.get_all_authors_and_committers_of_repository(repository_folder, codeunit_name, verbosity)
|
1637
|
-
# TODO refactor this check to only check commits which are behind this but which are not already on main
|
1638
|
-
# TODO verify also if the commit is signed by a valid key of the author
|
1639
|
-
for actual_author in actual_authors:
|
1640
|
-
if not (actual_author) in expected_authors:
|
1641
|
-
actual_author_formatted = f"{actual_author[0]} <{actual_author[1]}>"
|
1642
|
-
raise ValueError(f'Author/Comitter "{actual_author_formatted}" is not in the codeunit-developer-team. If {actual_author} is a authorized developer for this codeunit you should consider defining this in the codeunit-file or adapting the name using a .mailmap-file (see https://git-scm.com/docs/gitmailmap). The developer-team-check can also be disabled using the property validate_developers_of_repository.')
|
1643
|
-
|
1644
|
-
dependent_codeunits = self.get_dependent_code_units(codeunit_file)
|
1645
|
-
for dependent_codeunit in dependent_codeunits:
|
1646
|
-
if not self.dependent_codeunit_exists(repository_folder, dependent_codeunit):
|
1647
|
-
raise ValueError(f"Codeunit {codeunit_name} does have dependent codeunit {dependent_codeunit} which does not exist.")
|
1648
|
-
|
1649
|
-
# TODO implement cycle-check for dependent codeunits
|
1650
|
-
|
1651
|
-
# clear previously builded artifacts if desired:
|
1652
|
-
if clear_artifacts_folder:
|
1653
|
-
artifacts_folder = os.path.join(codeunit_folder, "Other", "Artifacts")
|
1654
|
-
GeneralUtilities.ensure_directory_does_not_exist(artifacts_folder)
|
1655
|
-
|
1656
|
-
# get artifacts from dependent codeunits
|
1657
|
-
# if assume_dependent_codeunits_are_already_built:
|
1658
|
-
# self.build_dependent_code_units(repository_folder, codeunit_name, verbosity, target_environmenttype, additional_arguments_file, commandline_arguments)
|
1659
|
-
self.copy_artifacts_from_dependent_code_units(repository_folder, codeunit_name)
|
1660
|
-
|
1661
|
-
# update codeunit-version
|
1662
|
-
self.update_version_of_codeunit(common_tasks_scripts_file, codeunit_version)
|
1663
|
-
|
1664
|
-
# set project version
|
1665
|
-
package_json_file = os.path.join(repository_folder, "package.json") # TDOO move this to a general project-specific (and codeunit-independent-script)
|
1666
|
-
if os.path.isfile(package_json_file):
|
1667
|
-
package_json_data: str = None
|
1668
|
-
with open(package_json_file, "r", encoding="utf-8") as f1:
|
1669
|
-
package_json_data = json.load(f1)
|
1670
|
-
package_json_data["version"] = project_version
|
1671
|
-
with open(package_json_file, "w", encoding="utf-8") as f2:
|
1672
|
-
json.dump(package_json_data, f2, indent=2)
|
1673
|
-
GeneralUtilities.write_text_to_file(package_json_file, GeneralUtilities.read_text_from_file(package_json_file).replace("\r", ""))
|
1674
|
-
|
1675
|
-
# set default constants
|
1676
|
-
self.set_default_constants(os.path.join(codeunit_folder))
|
1677
|
-
|
1678
|
-
# Copy changelog-file
|
1679
|
-
changelog_folder = os.path.join(repository_folder, "Other", "Resources", "Changelog")
|
1680
|
-
changelog_file = os.path.join(changelog_folder, f"v{project_version}.md")
|
1681
|
-
target_folder = os.path.join(codeunit_folder, "Other", "Artifacts", "Changelog")
|
1682
|
-
GeneralUtilities.ensure_directory_exists(target_folder)
|
1683
|
-
shutil.copy(changelog_file, target_folder)
|
1684
|
-
|
1685
|
-
# Hints-file
|
1686
|
-
hints_file = os.path.join(codeunit_folder, "Other", "Reference", "ReferenceContent", "Hints.md")
|
1687
|
-
if not os.path.isfile(hints_file):
|
1688
|
-
raise ValueError(f"Hints-file '{hints_file}' does not exist.")
|
1689
|
-
|
1690
|
-
# Copy license-file
|
1691
|
-
self.copy_licence_file(common_tasks_scripts_file)
|
1692
|
-
|
1693
|
-
# Generate diff-report
|
1694
|
-
self.generate_diff_report(repository_folder, codeunit_name, codeunit_version)
|
1695
|
-
|
1696
|
-
@GeneralUtilities.check_arguments
|
1697
|
-
def __suport_information_exists(self, repository_folder: str, version_of_product: str) -> bool:
|
1698
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
1699
|
-
folder = os.path.join(repository_folder, "Other", "Resources", "Support")
|
1700
|
-
file = os.path.join(folder, "InformationAboutSupportedVersions.csv")
|
1701
|
-
if not os.path.isfile(file):
|
1702
|
-
return False
|
1703
|
-
entries = GeneralUtilities.read_csv_file(file, True)
|
1704
|
-
for entry in entries:
|
1705
|
-
if entry[0] == version_of_product:
|
1706
|
-
return True
|
1707
|
-
return False
|
1708
|
-
|
1709
|
-
@GeneralUtilities.check_arguments
|
1710
|
-
def get_versions(self, repository_folder: str) -> list[tuple[str, datetime, datetime]]:
|
1711
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
1712
|
-
folder = os.path.join(repository_folder, "Other", "Resources", "Support")
|
1713
|
-
file = os.path.join(folder, "InformationAboutSupportedVersions.csv")
|
1714
|
-
result: list[(str, datetime, datetime)] = list[(str, datetime, datetime)]()
|
1715
|
-
if not os.path.isfile(file):
|
1716
|
-
return result
|
1717
|
-
entries = GeneralUtilities.read_csv_file(file, True)
|
1718
|
-
for entry in entries:
|
1719
|
-
d1 = GeneralUtilities.string_to_datetime(entry[1])
|
1720
|
-
if d1.tzinfo is None:
|
1721
|
-
d1 = d1.replace(tzinfo=timezone.utc)
|
1722
|
-
d2 = GeneralUtilities.string_to_datetime(entry[2])
|
1723
|
-
if d2.tzinfo is None:
|
1724
|
-
d2 = d2.replace(tzinfo=timezone.utc)
|
1725
|
-
result.append((entry[0], d1, d2))
|
1726
|
-
return result
|
1727
|
-
|
1728
|
-
@GeneralUtilities.check_arguments
|
1729
|
-
def get_supported_versions(self, repository_folder: str, moment: datetime) -> list[tuple[str, datetime, datetime]]:
|
1730
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
1731
|
-
result: list[tuple[str, datetime, datetime]] = list[tuple[str, datetime, datetime]]()
|
1732
|
-
for entry in self.get_versions(repository_folder):
|
1733
|
-
if entry[1] <= moment and moment <= entry[2]:
|
1734
|
-
result.append(entry)
|
1735
|
-
return result
|
1736
|
-
|
1737
|
-
@GeneralUtilities.check_arguments
|
1738
|
-
def get_unsupported_versions(self, repository_folder: str, moment: datetime) -> list[tuple[str, datetime, datetime]]:
|
1739
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
1740
|
-
result: list[tuple[str, datetime, datetime]] = list[tuple[str, datetime, datetime]]()
|
1741
|
-
for entry in self.get_versions(repository_folder):
|
1742
|
-
if not (entry[1] <= moment and moment <= entry[2]):
|
1743
|
-
result.append(entry)
|
1744
|
-
return result
|
1745
|
-
|
1746
|
-
@GeneralUtilities.check_arguments
|
1747
|
-
def mark_current_version_as_supported(self, repository_folder: str, version_of_product: str, supported_from: datetime, supported_until: datetime):
|
1748
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
1749
|
-
if self.__suport_information_exists(repository_folder, version_of_product):
|
1750
|
-
raise ValueError(f"Version-support for v{version_of_product} already defined.")
|
1751
|
-
folder = os.path.join(repository_folder, "Other", "Resources", "Support")
|
1752
|
-
GeneralUtilities.ensure_directory_exists(folder)
|
1753
|
-
file = os.path.join(folder, "InformationAboutSupportedVersions.csv")
|
1754
|
-
if not os.path.isfile(file):
|
1755
|
-
GeneralUtilities.ensure_file_exists(file)
|
1756
|
-
GeneralUtilities.append_line_to_file(file, "Version;SupportBegin;SupportEnd")
|
1757
|
-
GeneralUtilities.append_line_to_file(file, f"{version_of_product};{GeneralUtilities.datetime_to_string(supported_from)};{GeneralUtilities.datetime_to_string(supported_until)}")
|
1758
|
-
|
1759
|
-
@GeneralUtilities.check_arguments
|
1760
|
-
def get_codeunit_owner_name(self, codeunit_file: str) -> None:
|
1761
|
-
namespaces = {'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure', 'xsi': 'http://www.w3.org/2001/XMLSchema-instance'}
|
1762
|
-
root: etree._ElementTree = etree.parse(codeunit_file)
|
1763
|
-
result = root.xpath('//cps:codeunit/cps:codeunitownername/text()', namespaces=namespaces)[0]
|
1764
|
-
return result
|
1765
|
-
|
1766
|
-
@GeneralUtilities.check_arguments
|
1767
|
-
def get_codeunit_owner_emailaddress(self, codeunit_file: str) -> None:
|
1768
|
-
namespaces = {'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure', 'xsi': 'http://www.w3.org/2001/XMLSchema-instance'}
|
1769
|
-
root: etree._ElementTree = etree.parse(codeunit_file)
|
1770
|
-
result = root.xpath('//cps:codeunit/cps:codeunitowneremailaddress/text()', namespaces=namespaces)[0]
|
1771
|
-
return result
|
1772
|
-
|
1773
|
-
@GeneralUtilities.check_arguments
|
1774
|
-
def generate_diff_report(self, repository_folder: str, codeunit_name: str, current_version: str) -> None:
|
1775
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
1776
|
-
codeunit_folder = os.path.join(repository_folder, codeunit_name)
|
1777
|
-
target_folder = GeneralUtilities.resolve_relative_path("Other/Artifacts/DiffReport", codeunit_folder)
|
1778
|
-
GeneralUtilities.ensure_directory_does_not_exist(target_folder)
|
1779
|
-
GeneralUtilities.ensure_directory_exists(target_folder)
|
1780
|
-
target_file_light = os.path.join(target_folder, "DiffReport.html").replace("\\", "/")
|
1781
|
-
target_file_dark = os.path.join(target_folder, "DiffReportDark.html").replace("\\", "/")
|
1782
|
-
src = "4b825dc642cb6eb9a060e54bf8d69288fbee4904" # hash/id of empty git-tree
|
1783
|
-
src_prefix = "Begin"
|
1784
|
-
if self.__sc.get_current_git_branch_has_tag(repository_folder):
|
1785
|
-
latest_tag = self.__sc.get_latest_git_tag(repository_folder)
|
1786
|
-
src = self.__sc.git_get_commitid_of_tag(repository_folder, latest_tag)
|
1787
|
-
src_prefix = latest_tag
|
1788
|
-
dst = "HEAD"
|
1789
|
-
dst_prefix = f"v{current_version}"
|
1790
|
-
|
1791
|
-
temp_file = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
|
1792
|
-
try:
|
1793
|
-
GeneralUtilities.ensure_file_does_not_exist(temp_file)
|
1794
|
-
GeneralUtilities.write_text_to_file(temp_file, self.__sc.run_program("git", f'--no-pager diff --src-prefix={src_prefix}/ --dst-prefix={dst_prefix}/ {src} {dst} -- {codeunit_name}', repository_folder)[1])
|
1795
|
-
self.__sc.run_program_argsasarray("pygmentize", ['-l', 'diff', '-f', 'html', '-O', 'full', '-o', target_file_light, '-P', 'style=default', temp_file], repository_folder)
|
1796
|
-
self.__sc.run_program_argsasarray("pygmentize", ['-l', 'diff', '-f', 'html', '-O', 'full', '-o', target_file_dark, '-P', 'style=github-dark', temp_file], repository_folder)
|
1797
|
-
finally:
|
1798
|
-
GeneralUtilities.ensure_file_does_not_exist(temp_file)
|
1799
|
-
|
1800
|
-
@GeneralUtilities.check_arguments
|
1801
|
-
def get_version_of_project(self, repository_folder: str) -> str:
|
1802
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
1803
|
-
return self.__sc.get_semver_version_from_gitversion(repository_folder)
|
1804
|
-
|
1805
|
-
@GeneralUtilities.check_arguments
|
1806
|
-
def replace_common_variables_in_nuspec_file(self, codeunit_folder: str) -> None:
|
1807
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
1808
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
1809
|
-
codeunit_version = self.get_version_of_codeunit_folder(codeunit_folder)
|
1810
|
-
nuspec_file = os.path.join(codeunit_folder, "Other", "Build", f"{codeunit_name}.nuspec")
|
1811
|
-
self.__sc.replace_version_in_nuspec_file(nuspec_file, codeunit_version)
|
1812
|
-
|
1813
|
-
@GeneralUtilities.check_arguments
|
1814
|
-
def standardized_tasks_build_for_angular_codeunit(self, build_script_file: str, build_environment_target_type: str, verbosity: int, commandline_arguments: list[str]) -> None:
|
1815
|
-
build_script_folder = os.path.dirname(build_script_file)
|
1816
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../..", build_script_folder)
|
1817
|
-
GeneralUtilities.ensure_directory_does_not_exist(f"{codeunit_folder}/.angular")
|
1818
|
-
self.standardized_tasks_build_for_node_codeunit(build_script_file, build_environment_target_type, verbosity, commandline_arguments)
|
1819
|
-
|
1820
|
-
@GeneralUtilities.check_arguments
|
1821
|
-
def standardized_tasks_build_for_node_codeunit(self, build_script_file: str, build_environment_target_type: str, verbosity: int, commandline_arguments: list[str]) -> None:
|
1822
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
1823
|
-
build_script_folder = os.path.dirname(build_script_file)
|
1824
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../..", build_script_folder)
|
1825
|
-
self.run_with_epew("npm", f"run build-{build_environment_target_type}", codeunit_folder, verbosity=verbosity)
|
1826
|
-
self.standardized_tasks_build_bom_for_node_project(codeunit_folder, verbosity, commandline_arguments)
|
1827
|
-
self.copy_source_files_to_output_directory(build_script_file)
|
1828
|
-
|
1829
|
-
@GeneralUtilities.check_arguments
|
1830
|
-
def standardized_tasks_build_bom_for_node_project(self, codeunit_folder: str, verbosity: int, commandline_arguments: list[str]) -> None:
|
1831
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
1832
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
1833
|
-
relative_path_to_bom_file = f"Other/Artifacts/BOM/{os.path.basename(codeunit_folder)}.{self.get_version_of_codeunit_folder(codeunit_folder)}.sbom.xml"
|
1834
|
-
self.run_with_epew("cyclonedx-npm", f"--output-format xml --output-file {relative_path_to_bom_file}", codeunit_folder, verbosity=verbosity)
|
1835
|
-
self.__sc.format_xml_file(codeunit_folder+"/"+relative_path_to_bom_file)
|
1836
|
-
|
1837
|
-
@GeneralUtilities.check_arguments
|
1838
|
-
def standardized_tasks_linting_for_angular_codeunit(self, linting_script_file: str, verbosity: int, build_environment_target_type: str, commandline_arguments: list[str]) -> None:
|
1839
|
-
self.standardized_tasks_linting_for_node_codeunit(linting_script_file, verbosity, build_environment_target_type, commandline_arguments)
|
1840
|
-
|
1841
|
-
@GeneralUtilities.check_arguments
|
1842
|
-
def standardized_tasks_linting_for_node_codeunit(self, linting_script_file: str, verbosity: int, build_environment_target_type: str, commandline_arguments: list[str]) -> None:
|
1843
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
1844
|
-
build_script_folder = os.path.dirname(linting_script_file)
|
1845
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../..", build_script_folder)
|
1846
|
-
self.run_with_epew("ng", "lint", codeunit_folder, verbosity=verbosity)
|
1847
|
-
|
1848
|
-
@GeneralUtilities.check_arguments
|
1849
|
-
def standardized_tasks_run_testcases_for_flutter_project_in_common_project_structure(self, script_file: str, verbosity: int, args: list[str], package_name: str, build_environment_target_type: str, generate_badges: bool):
|
1850
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../../..", script_file)
|
1851
|
-
repository_folder = GeneralUtilities.resolve_relative_path("..", codeunit_folder)
|
1852
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
1853
|
-
src_folder = GeneralUtilities.resolve_relative_path(package_name, codeunit_folder)
|
1854
|
-
verbosity = self.get_verbosity_from_commandline_arguments(args, verbosity)
|
1855
|
-
self.run_with_epew("flutter", "test --coverage", src_folder, verbosity)
|
1856
|
-
test_coverage_folder_relative = "Other/Artifacts/TestCoverage"
|
1857
|
-
test_coverage_folder = GeneralUtilities.resolve_relative_path(test_coverage_folder_relative, codeunit_folder)
|
1858
|
-
GeneralUtilities.ensure_directory_exists(test_coverage_folder)
|
1859
|
-
coverage_file_relative = f"{test_coverage_folder_relative}/TestCoverage.xml"
|
1860
|
-
coverage_file = GeneralUtilities.resolve_relative_path(coverage_file_relative, codeunit_folder)
|
1861
|
-
self.run_with_epew("lcov_cobertura", f"coverage/lcov.info --base-dir . --excludes test --output ../{coverage_file_relative} --demangle", src_folder, verbosity)
|
1862
|
-
|
1863
|
-
# format correctly
|
1864
|
-
content = GeneralUtilities.read_text_from_file(coverage_file)
|
1865
|
-
content = re.sub('<![^<]+>', '', content)
|
1866
|
-
content = re.sub('\\\\', '/', content)
|
1867
|
-
content = re.sub('\\ name=\\"lib\\"', '', content)
|
1868
|
-
content = re.sub('\\ filename=\\"lib/', f' filename="{package_name}/lib/', content)
|
1869
|
-
GeneralUtilities.write_text_to_file(coverage_file, content)
|
1870
|
-
self.__testcoverage_for_flutter_project_merge_packages(coverage_file)
|
1871
|
-
self.__testcoverage_for_flutter_project_calculate_line_rate(coverage_file)
|
1872
|
-
|
1873
|
-
self.run_testcases_common_post_task(repository_folder, codeunit_name, verbosity, generate_badges, build_environment_target_type, args)
|
1874
|
-
|
1875
|
-
def __testcoverage_for_flutter_project_merge_packages(self, coverage_file: str):
|
1876
|
-
tree = etree.parse(coverage_file)
|
1877
|
-
root = tree.getroot()
|
1878
|
-
|
1879
|
-
packages = root.findall("./packages/package")
|
1880
|
-
|
1881
|
-
all_classes = []
|
1882
|
-
for pkg in packages:
|
1883
|
-
classes = pkg.find("classes")
|
1884
|
-
if classes is not None:
|
1885
|
-
all_classes.extend(classes.findall("class"))
|
1886
|
-
new_package = etree.Element("package", name="Malno")
|
1887
|
-
new_classes = etree.SubElement(new_package, "classes")
|
1888
|
-
for cls in all_classes:
|
1889
|
-
new_classes.append(cls)
|
1890
|
-
packages_node = root.find("./packages")
|
1891
|
-
packages_node.clear()
|
1892
|
-
packages_node.append(new_package)
|
1893
|
-
tree.write(coverage_file, pretty_print=True, xml_declaration=True, encoding="UTF-8")
|
1894
|
-
|
1895
|
-
def __testcoverage_for_flutter_project_calculate_line_rate(self, coverage_file: str):
|
1896
|
-
tree = etree.parse(coverage_file)
|
1897
|
-
root = tree.getroot()
|
1898
|
-
package = root.find("./packages/package")
|
1899
|
-
if package is None:
|
1900
|
-
raise RuntimeError("No <package>-Element found")
|
1901
|
-
|
1902
|
-
line_elements = package.findall(".//line")
|
1903
|
-
|
1904
|
-
amount_of_lines = 0
|
1905
|
-
amount_of_hited_lines = 0
|
1906
|
-
|
1907
|
-
for line in line_elements:
|
1908
|
-
amount_of_lines += 1
|
1909
|
-
hits = int(line.get("hits", "0"))
|
1910
|
-
if hits > 0:
|
1911
|
-
amount_of_hited_lines += 1
|
1912
|
-
line_rate = amount_of_hited_lines / amount_of_lines if amount_of_lines > 0 else 0.0
|
1913
|
-
package.set("line-rate", str(line_rate))
|
1914
|
-
tree.write(coverage_file, pretty_print=True, xml_declaration=True, encoding="UTF-8")
|
1915
|
-
|
1916
|
-
@GeneralUtilities.check_arguments
|
1917
|
-
def standardized_tasks_run_testcases_for_angular_codeunit(self, runtestcases_script_file: str, build_environment_target_type: str, generate_badges: bool, verbosity: int, commandline_arguments: list[str]) -> None:
|
1918
|
-
# prepare
|
1919
|
-
codeunit_name: str = os.path.basename(str(Path(os.path.dirname(runtestcases_script_file)).parent.parent.absolute()))
|
1920
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
1921
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../..", os.path.dirname(runtestcases_script_file))
|
1922
|
-
repository_folder = os.path.dirname(codeunit_folder)
|
1923
|
-
|
1924
|
-
# run testcases
|
1925
|
-
self.standardized_tasks_run_testcases_for_node_codeunit(runtestcases_script_file, build_environment_target_type, generate_badges, verbosity, commandline_arguments)
|
1926
|
-
|
1927
|
-
# rename file
|
1928
|
-
coverage_folder = os.path.join(codeunit_folder, "Other", "Artifacts", "TestCoverage")
|
1929
|
-
target_file = os.path.join(coverage_folder, "TestCoverage.xml")
|
1930
|
-
GeneralUtilities.ensure_file_does_not_exist(target_file)
|
1931
|
-
os.rename(os.path.join(coverage_folder, "cobertura-coverage.xml"), target_file)
|
1932
|
-
self.__rename_packagename_in_coverage_file(target_file, codeunit_name)
|
1933
|
-
|
1934
|
-
# adapt backslashs to slashs
|
1935
|
-
content = GeneralUtilities.read_text_from_file(target_file)
|
1936
|
-
content = re.sub('\\\\', '/', content)
|
1937
|
-
GeneralUtilities.write_text_to_file(target_file, content)
|
1938
|
-
|
1939
|
-
# aggregate packages in testcoverage-file
|
1940
|
-
roottree: etree._ElementTree = etree.parse(target_file)
|
1941
|
-
existing_classes = list(roottree.xpath('//coverage/packages/package/classes/class'))
|
1942
|
-
|
1943
|
-
old_packages_list = roottree.xpath('//coverage/packages/package')
|
1944
|
-
for package in old_packages_list:
|
1945
|
-
package.getparent().remove(package)
|
1946
|
-
|
1947
|
-
root = roottree.getroot()
|
1948
|
-
packages_element = root.find("packages")
|
1949
|
-
package_element = etree.SubElement(packages_element, "package")
|
1950
|
-
package_element.attrib['name'] = codeunit_name
|
1951
|
-
package_element.attrib['lines-valid'] = root.attrib["lines-valid"]
|
1952
|
-
package_element.attrib['lines-covered'] = root.attrib["lines-covered"]
|
1953
|
-
package_element.attrib['line-rate'] = root.attrib["line-rate"]
|
1954
|
-
package_element.attrib['branches-valid'] = root.attrib["branches-valid"]
|
1955
|
-
package_element.attrib['branches-covered'] = root.attrib["branches-covered"]
|
1956
|
-
package_element.attrib['branch-rate'] = root.attrib["branch-rate"]
|
1957
|
-
package_element.attrib['timestamp'] = root.attrib["timestamp"]
|
1958
|
-
package_element.attrib['complexity'] = root.attrib["complexity"]
|
1959
|
-
|
1960
|
-
classes_element = etree.SubElement(package_element, "classes")
|
1961
|
-
|
1962
|
-
for existing_class in existing_classes:
|
1963
|
-
classes_element.append(existing_class)
|
1964
|
-
|
1965
|
-
result = etree.tostring(roottree, pretty_print=True).decode("utf-8")
|
1966
|
-
GeneralUtilities.write_text_to_file(target_file, result)
|
1967
|
-
|
1968
|
-
# post tasks
|
1969
|
-
self.run_testcases_common_post_task(repository_folder, codeunit_name, verbosity, generate_badges, build_environment_target_type, commandline_arguments)
|
1970
|
-
|
1971
|
-
@GeneralUtilities.check_arguments
|
1972
|
-
def standardized_tasks_run_testcases_for_node_codeunit(self, runtestcases_script_file: str, build_environment_target_type: str, generate_badges: bool, verbosity: int, commandline_arguments: list[str]) -> None:
|
1973
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
1974
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../..", os.path.dirname(runtestcases_script_file))
|
1975
|
-
self.run_with_epew("npm", f"run test-{build_environment_target_type}", codeunit_folder, verbosity=verbosity)
|
1976
|
-
|
1977
|
-
@GeneralUtilities.check_arguments
|
1978
|
-
def __rename_packagename_in_coverage_file(self, file: str, codeunit_name: str) -> None:
|
1979
|
-
root: etree._ElementTree = etree.parse(file)
|
1980
|
-
packages = root.xpath('//coverage/packages/package')
|
1981
|
-
for package in packages:
|
1982
|
-
package.attrib['name'] = codeunit_name
|
1983
|
-
result = etree.tostring(root).decode("utf-8")
|
1984
|
-
GeneralUtilities.write_text_to_file(file, result)
|
1985
|
-
|
1986
|
-
@GeneralUtilities.check_arguments
|
1987
|
-
def do_npm_install(self, package_json_folder: str, force: bool, verbosity: int = 1) -> None:
|
1988
|
-
argument1 = "install"
|
1989
|
-
if force:
|
1990
|
-
argument1 = f"{argument1} --force"
|
1991
|
-
self.run_with_epew("npm", argument1, package_json_folder, verbosity=verbosity)
|
1992
|
-
|
1993
|
-
argument2 = "install --package-lock-only"
|
1994
|
-
if force:
|
1995
|
-
argument2 = f"{argument2} --force"
|
1996
|
-
self.run_with_epew("npm", argument2, package_json_folder, verbosity=verbosity)
|
1997
|
-
|
1998
|
-
argument3 = "clean-install"
|
1999
|
-
if force:
|
2000
|
-
argument3 = f"{argument3} --force"
|
2001
|
-
self.run_with_epew("npm", argument3, package_json_folder, verbosity=verbosity)
|
2002
|
-
|
2003
|
-
@GeneralUtilities.check_arguments
|
2004
|
-
def run_with_epew(self, program: str, argument: str = "", working_directory: str = None, verbosity: int = 1, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, throw_exception_if_exitcode_is_not_zero: bool = True, custom_argument: object = None, interactive: bool = False) -> tuple[int, str, str, int]:
|
2005
|
-
sc: ScriptCollectionCore = ScriptCollectionCore()
|
2006
|
-
sc.program_runner = ProgramRunnerEpew()
|
2007
|
-
return sc.run_program(program, argument, working_directory, verbosity, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, throw_exception_if_exitcode_is_not_zero, custom_argument, interactive)
|
2008
|
-
|
2009
|
-
@GeneralUtilities.check_arguments
|
2010
|
-
def set_default_constants(self, codeunit_folder: str) -> None:
|
2011
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2012
|
-
self.set_constant_for_commitid(codeunit_folder)
|
2013
|
-
self.set_constant_for_commitdate(codeunit_folder)
|
2014
|
-
self.set_constant_for_codeunitname(codeunit_folder)
|
2015
|
-
self.set_constant_for_codeunitversion(codeunit_folder)
|
2016
|
-
self.set_constant_for_codeunitmajorversion(codeunit_folder)
|
2017
|
-
self.set_constant_for_description(codeunit_folder)
|
2018
|
-
|
2019
|
-
@GeneralUtilities.check_arguments
|
2020
|
-
def set_constant_for_commitid(self, codeunit_folder: str) -> None:
|
2021
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2022
|
-
repository = GeneralUtilities.resolve_relative_path("..", codeunit_folder)
|
2023
|
-
commit_id = self.__sc.git_get_commit_id(repository)
|
2024
|
-
self.set_constant(codeunit_folder, "CommitId", commit_id)
|
2025
|
-
|
2026
|
-
@GeneralUtilities.check_arguments
|
2027
|
-
def set_constant_for_commitdate(self, codeunit_folder: str) -> None:
|
2028
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2029
|
-
repository = GeneralUtilities.resolve_relative_path("..", codeunit_folder)
|
2030
|
-
commit_date: datetime = self.__sc.git_get_commit_date(repository)
|
2031
|
-
self.set_constant(codeunit_folder, "CommitDate", GeneralUtilities.datetime_to_string(commit_date))
|
2032
|
-
|
2033
|
-
@GeneralUtilities.check_arguments
|
2034
|
-
def set_constant_for_codeunitname(self, codeunit_folder: str) -> None:
|
2035
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2036
|
-
codeunit_name: str = os.path.basename(codeunit_folder)
|
2037
|
-
self.set_constant(codeunit_folder, "CodeUnitName", codeunit_name)
|
2038
|
-
|
2039
|
-
@GeneralUtilities.check_arguments
|
2040
|
-
def set_constant_for_codeunitversion(self, codeunit_folder: str) -> None:
|
2041
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2042
|
-
codeunit_version: str = self.get_version_of_codeunit_folder(codeunit_folder)
|
2043
|
-
self.set_constant(codeunit_folder, "CodeUnitVersion", codeunit_version)
|
2044
|
-
|
2045
|
-
@GeneralUtilities.check_arguments
|
2046
|
-
def set_constant_for_codeunitmajorversion(self, codeunit_folder: str) -> None:
|
2047
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2048
|
-
codeunit_version: str = self.get_version_of_codeunit_folder(codeunit_folder)
|
2049
|
-
major_version = int(codeunit_version.split(".")[0])
|
2050
|
-
self.set_constant(codeunit_folder, "CodeUnitMajorVersion", str(major_version))
|
2051
|
-
|
2052
|
-
@GeneralUtilities.check_arguments
|
2053
|
-
def set_constant_for_description(self, codeunit_folder: str) -> None:
|
2054
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2055
|
-
codeunit_name: str = os.path.basename(codeunit_folder)
|
2056
|
-
codeunit_description: str = self.get_codeunit_description(f"{codeunit_folder}/{codeunit_name}.codeunit.xml")
|
2057
|
-
self.set_constant(codeunit_folder, "CodeUnitDescription", codeunit_description)
|
2058
|
-
|
2059
|
-
@GeneralUtilities.check_arguments
|
2060
|
-
def set_constant(self, codeunit_folder: str, constantname: str, constant_value: str, documentationsummary: str = None, constants_valuefile: str = None) -> None:
|
2061
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2062
|
-
if documentationsummary is None:
|
2063
|
-
documentationsummary = GeneralUtilities.empty_string
|
2064
|
-
constants_folder = os.path.join(codeunit_folder, "Other", "Resources", "Constants")
|
2065
|
-
GeneralUtilities.ensure_directory_exists(constants_folder)
|
2066
|
-
constants_metafile = os.path.join(constants_folder, f"{constantname}.constant.xml")
|
2067
|
-
if constants_valuefile is None:
|
2068
|
-
constants_valuefile_folder = constants_folder
|
2069
|
-
constants_valuefile_name = f"{constantname}.value.txt"
|
2070
|
-
constants_valuefiler_reference = f"./{constants_valuefile_name}"
|
2071
|
-
else:
|
2072
|
-
constants_valuefile_folder = os.path.dirname(constants_valuefile)
|
2073
|
-
constants_valuefile_name = os.path.basename(constants_valuefile)
|
2074
|
-
constants_valuefiler_reference = os.path.join(constants_valuefile_folder, constants_valuefile_name)
|
2075
|
-
|
2076
|
-
# TODO implement usage of self.reference_latest_version_of_xsd_when_generating_xml
|
2077
|
-
GeneralUtilities.write_text_to_file(constants_metafile, f"""<?xml version="1.0" encoding="UTF-8" ?>
|
2078
|
-
<cps:constant xmlns:cps="https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure" constantspecificationversion="1.1.0"
|
2079
|
-
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/raw/main/Conventions/RepositoryStructure/CommonProjectStructure/constant.xsd">
|
2080
|
-
<cps:name>{constantname}</cps:name>
|
2081
|
-
<cps:documentationsummary>{documentationsummary}</cps:documentationsummary>
|
2082
|
-
<cps:path>{constants_valuefiler_reference}</cps:path>
|
2083
|
-
</cps:constant>""")
|
2084
|
-
# TODO validate generated xml against xsd
|
2085
|
-
GeneralUtilities.write_text_to_file(os.path.join(constants_valuefile_folder, constants_valuefile_name), constant_value)
|
2086
|
-
|
2087
|
-
@GeneralUtilities.check_arguments
|
2088
|
-
def get_constant_value(self, source_codeunit_folder: str, constant_name: str) -> str:
|
2089
|
-
self.assert_is_codeunit_folder(source_codeunit_folder)
|
2090
|
-
value_file_relative = self.__get_constant_helper(source_codeunit_folder, constant_name, "path")
|
2091
|
-
value_file = GeneralUtilities.resolve_relative_path(value_file_relative, os.path.join(source_codeunit_folder, "Other", "Resources", "Constants"))
|
2092
|
-
return GeneralUtilities.read_text_from_file(value_file)
|
2093
|
-
|
2094
|
-
@GeneralUtilities.check_arguments
|
2095
|
-
def get_constant_documentation(self, source_codeunit_folder: str, constant_name: str) -> str:
|
2096
|
-
self.assert_is_codeunit_folder(source_codeunit_folder)
|
2097
|
-
return self.__get_constant_helper(source_codeunit_folder, constant_name, "documentationsummary")
|
2098
|
-
|
2099
|
-
@GeneralUtilities.check_arguments
|
2100
|
-
def __get_constant_helper(self, source_codeunit_folder: str, constant_name: str, propertyname: str) -> str:
|
2101
|
-
self.assert_is_codeunit_folder(source_codeunit_folder)
|
2102
|
-
root: etree._ElementTree = etree.parse(os.path.join(source_codeunit_folder, "Other", "Resources", "Constants", f"{constant_name}.constant.xml"))
|
2103
|
-
results = root.xpath(f'//cps:{propertyname}/text()', namespaces={
|
2104
|
-
'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure'
|
2105
|
-
})
|
2106
|
-
length = len(results)
|
2107
|
-
if (length == 0):
|
2108
|
-
return ""
|
2109
|
-
elif length == 1:
|
2110
|
-
return results[0]
|
2111
|
-
else:
|
2112
|
-
raise ValueError("Too many results found.")
|
2113
|
-
|
2114
|
-
@GeneralUtilities.check_arguments
|
2115
|
-
def copy_development_certificate_to_default_development_directory(self, codeunit_folder: str, build_environment: str, domain: str = None, certificate_resource_name: str = "DevelopmentCertificate") -> None:
|
2116
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2117
|
-
if build_environment != "Productive":
|
2118
|
-
codeunit_name: str = os.path.basename(codeunit_folder)
|
2119
|
-
if domain is None:
|
2120
|
-
domain = f"{codeunit_name}.test.local".lower()
|
2121
|
-
|
2122
|
-
src_folder = os.path.join(codeunit_folder, "Other", "Resources", certificate_resource_name)
|
2123
|
-
src_file_pfx = os.path.join(src_folder, f"{codeunit_name}{certificate_resource_name}.pfx")
|
2124
|
-
src_file_psw = os.path.join(src_folder, f"{codeunit_name}{certificate_resource_name}.password")
|
2125
|
-
|
2126
|
-
trg_folder = os.path.join(codeunit_folder, "Other", "Workspace", "Configuration", "Certificates")
|
2127
|
-
trg_file_pfx = os.path.join(trg_folder, f"{domain}.pfx")
|
2128
|
-
trg_file_psw = os.path.join(trg_folder, f"{domain}.password")
|
2129
|
-
|
2130
|
-
GeneralUtilities.assert_file_exists(src_file_pfx)
|
2131
|
-
GeneralUtilities.assert_file_exists(src_file_psw)
|
2132
|
-
GeneralUtilities.ensure_file_does_not_exist(trg_file_pfx)
|
2133
|
-
GeneralUtilities.ensure_file_does_not_exist(trg_file_psw)
|
2134
|
-
|
2135
|
-
GeneralUtilities.ensure_directory_exists(trg_folder)
|
2136
|
-
|
2137
|
-
GeneralUtilities.ensure_directory_exists(trg_folder)
|
2138
|
-
shutil.copyfile(src_file_pfx, trg_file_pfx)
|
2139
|
-
shutil.copyfile(src_file_psw, trg_file_psw)
|
2140
|
-
|
2141
|
-
@GeneralUtilities.check_arguments
|
2142
|
-
def set_constants_for_certificate_public_information(self, codeunit_folder: str, source_constant_name: str = "DevelopmentCertificate", domain: str = None) -> None:
|
2143
|
-
"""Expects a certificate-resource and generates a constant for its public information"""
|
2144
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2145
|
-
certificate_file = os.path.join(codeunit_folder, "Other", "Resources", source_constant_name, f"{source_constant_name}.crt")
|
2146
|
-
with open(certificate_file, encoding="utf-8") as text_wrapper:
|
2147
|
-
certificate = crypto.load_certificate(crypto.FILETYPE_PEM, text_wrapper.read())
|
2148
|
-
certificate_publickey = crypto.dump_publickey(crypto.FILETYPE_PEM, certificate.get_pubkey()).decode("utf-8")
|
2149
|
-
self.set_constant(codeunit_folder, source_constant_name+"PublicKey", certificate_publickey)
|
2150
|
-
|
2151
|
-
@GeneralUtilities.check_arguments
|
2152
|
-
def set_constants_for_certificate_private_information(self, codeunit_folder: str) -> None:
|
2153
|
-
"""Expects a certificate-resource and generates a constant for its sensitive information in hex-format"""
|
2154
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2155
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
2156
|
-
resource_name: str = "DevelopmentCertificate"
|
2157
|
-
filename: str = codeunit_name+"DevelopmentCertificate"
|
2158
|
-
self.generate_constant_from_resource_by_filename(codeunit_folder, resource_name, f"{filename}.pfx", "PFX")
|
2159
|
-
self.generate_constant_from_resource_by_filename(codeunit_folder, resource_name, f"{filename}.password", "Password")
|
2160
|
-
|
2161
|
-
@GeneralUtilities.check_arguments
|
2162
|
-
def generate_constant_from_resource_by_filename(self, codeunit_folder: str, resource_name: str, filename: str, constant_name: str) -> None:
|
2163
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2164
|
-
certificate_resource_folder = GeneralUtilities.resolve_relative_path(f"Other/Resources/{resource_name}", codeunit_folder)
|
2165
|
-
resource_file = os.path.join(certificate_resource_folder, filename)
|
2166
|
-
resource_file_content = GeneralUtilities.read_binary_from_file(resource_file)
|
2167
|
-
resource_file_as_hex = resource_file_content.hex()
|
2168
|
-
self.set_constant(codeunit_folder, f"{resource_name}{constant_name}Hex", resource_file_as_hex)
|
2169
|
-
|
2170
|
-
@GeneralUtilities.check_arguments
|
2171
|
-
def generate_constant_from_resource_by_extension(self, codeunit_folder: str, resource_name: str, extension: str, constant_name: str) -> None:
|
2172
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2173
|
-
certificate_resource_folder = GeneralUtilities.resolve_relative_path(f"Other/Resources/{resource_name}", codeunit_folder)
|
2174
|
-
resource_file = self.__sc.find_file_by_extension(certificate_resource_folder, extension)
|
2175
|
-
resource_file_content = GeneralUtilities.read_binary_from_file(resource_file)
|
2176
|
-
resource_file_as_hex = resource_file_content.hex()
|
2177
|
-
self.set_constant(codeunit_folder, f"{resource_name}{constant_name}Hex", resource_file_as_hex)
|
2178
|
-
|
2179
|
-
@GeneralUtilities.check_arguments
|
2180
|
-
def copy_constant_from_dependent_codeunit(self, codeunit_folder: str, constant_name: str, source_codeunit_name: str) -> None:
|
2181
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2182
|
-
source_codeunit_folder: str = GeneralUtilities.resolve_relative_path(f"../{source_codeunit_name}", codeunit_folder)
|
2183
|
-
value = self.get_constant_value(source_codeunit_folder, constant_name)
|
2184
|
-
documentation = self.get_constant_documentation(source_codeunit_folder, constant_name)
|
2185
|
-
self.set_constant(codeunit_folder, constant_name, value, documentation)
|
2186
|
-
|
2187
|
-
@GeneralUtilities.check_arguments
|
2188
|
-
def copy_resources_from_dependent_codeunit(self, codeunit_folder: str, resource_name: str, source_codeunit_name: str) -> None:
|
2189
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2190
|
-
source_folder: str = GeneralUtilities.resolve_relative_path(f"../{source_codeunit_name}/Other/Resources/{resource_name}", codeunit_folder)
|
2191
|
-
target_folder: str = GeneralUtilities.resolve_relative_path(f"Other/Resources/{resource_name}", codeunit_folder)
|
2192
|
-
GeneralUtilities.ensure_directory_does_not_exist(target_folder)
|
2193
|
-
shutil.copytree(source_folder, target_folder)
|
2194
|
-
|
2195
|
-
@GeneralUtilities.check_arguments
|
2196
|
-
def copy_resources_from_global_project_resources(self, codeunit_folder: str, resource_name: str) -> None:
|
2197
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2198
|
-
source_folder: str = GeneralUtilities.resolve_relative_path(f"../Other/Resources/{resource_name}", codeunit_folder)
|
2199
|
-
target_folder: str = GeneralUtilities.resolve_relative_path(f"Other/Resources/{resource_name}", codeunit_folder)
|
2200
|
-
GeneralUtilities.ensure_directory_does_not_exist(target_folder)
|
2201
|
-
shutil.copytree(source_folder, target_folder)
|
2202
|
-
|
2203
|
-
@GeneralUtilities.check_arguments
|
2204
|
-
def generate_openapi_file(self, buildscript_file: str, runtime: str, verbosity: int, commandline_arguments: list[str], swagger_document_name: str = "APISpecification") -> None:
|
2205
|
-
GeneralUtilities.write_message_to_stdout("Generate OpenAPI-specification-file...")
|
2206
|
-
codeunitname = os.path.basename(str(Path(os.path.dirname(buildscript_file)).parent.parent.absolute()))
|
2207
|
-
repository_folder = str(Path(os.path.dirname(buildscript_file)).parent.parent.parent.absolute())
|
2208
|
-
codeunit_folder = os.path.join(repository_folder, codeunitname)
|
2209
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2210
|
-
artifacts_folder = os.path.join(codeunit_folder, "Other", "Artifacts")
|
2211
|
-
GeneralUtilities.ensure_directory_exists(os.path.join(artifacts_folder, "APISpecification"))
|
2212
|
-
verbosity = self.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
2213
|
-
codeunit_version = self.get_version_of_codeunit_folder(codeunit_folder)
|
2214
|
-
|
2215
|
-
versioned_api_spec_file = f"APISpecification/{codeunitname}.v{codeunit_version}.api.json"
|
2216
|
-
self.__sc.run_program("swagger", f"tofile --output {versioned_api_spec_file} BuildResult_DotNet_{runtime}/{codeunitname}.dll {swagger_document_name}", artifacts_folder, verbosity=verbosity)
|
2217
|
-
api_file: str = os.path.join(artifacts_folder, versioned_api_spec_file)
|
2218
|
-
shutil.copyfile(api_file, os.path.join(artifacts_folder, f"APISpecification/{codeunitname}.latest.api.json"))
|
2219
|
-
|
2220
|
-
resources_folder = os.path.join(codeunit_folder, "Other", "Resources")
|
2221
|
-
GeneralUtilities.ensure_directory_exists(resources_folder)
|
2222
|
-
resources_apispec_folder = os.path.join(resources_folder, "APISpecification")
|
2223
|
-
GeneralUtilities.ensure_directory_exists(resources_apispec_folder)
|
2224
|
-
resource_target_file = os.path.join(resources_apispec_folder, f"{codeunitname}.api.json")
|
2225
|
-
GeneralUtilities.ensure_file_does_not_exist(resource_target_file)
|
2226
|
-
shutil.copyfile(api_file, resource_target_file)
|
2227
|
-
|
2228
|
-
with open(api_file, encoding="utf-8") as api_file_content:
|
2229
|
-
reloaded_json = json.load(api_file_content)
|
2230
|
-
|
2231
|
-
yamlfile1: str = str(os.path.join(artifacts_folder, f"APISpecification/{codeunitname}.v{codeunit_version}.api.yaml"))
|
2232
|
-
GeneralUtilities.ensure_file_does_not_exist(yamlfile1)
|
2233
|
-
GeneralUtilities.ensure_file_exists(yamlfile1)
|
2234
|
-
with open(yamlfile1, "w+", encoding="utf-8") as yamlfile:
|
2235
|
-
yaml.dump(reloaded_json, yamlfile, allow_unicode=True)
|
2236
|
-
|
2237
|
-
yamlfile2: str = str(os.path.join(artifacts_folder, f"APISpecification/{codeunitname}.latest.api.yaml"))
|
2238
|
-
GeneralUtilities.ensure_file_does_not_exist(yamlfile2)
|
2239
|
-
shutil.copyfile(yamlfile1, yamlfile2)
|
2240
|
-
|
2241
|
-
yamlfile3: str = str(os.path.join(resources_apispec_folder, f"{codeunitname}.api.yaml"))
|
2242
|
-
GeneralUtilities.ensure_file_does_not_exist(yamlfile3)
|
2243
|
-
shutil.copyfile(yamlfile1, yamlfile3)
|
2244
|
-
|
2245
|
-
@GeneralUtilities.check_arguments
|
2246
|
-
def get_latest_version_of_openapigenerator(self) -> None:
|
2247
|
-
github_api_releases_link = "https://api.github.com/repos/OpenAPITools/openapi-generator/releases"
|
2248
|
-
with urllib.request.urlopen(github_api_releases_link) as release_information_url:
|
2249
|
-
latest_release_infos = json.load(release_information_url)[0]
|
2250
|
-
latest_version = latest_release_infos["tag_name"][1:]
|
2251
|
-
return latest_version
|
2252
|
-
|
2253
|
-
@GeneralUtilities.check_arguments
|
2254
|
-
def set_version_of_openapigenerator_by_update_dependencies_file(self, update_dependencies_script_file: str, used_version: str = None) -> None:
|
2255
|
-
codeunit_folder: str = GeneralUtilities.resolve_relative_path("../..", update_dependencies_script_file)
|
2256
|
-
self.set_version_of_openapigenerator(codeunit_folder, used_version)
|
2257
|
-
|
2258
|
-
@GeneralUtilities.check_arguments
|
2259
|
-
def set_version_of_openapigenerator(self, codeunit_folder: str, used_version: str = None) -> None:
|
2260
|
-
target_folder: str = os.path.join(codeunit_folder, "Other", "Resources", "Dependencies", "OpenAPIGenerator")
|
2261
|
-
version_file = os.path.join(target_folder, "Version.txt")
|
2262
|
-
if used_version is None:
|
2263
|
-
used_version = self.get_latest_version_of_openapigenerator()
|
2264
|
-
GeneralUtilities.ensure_directory_exists(target_folder)
|
2265
|
-
GeneralUtilities.ensure_file_exists(version_file)
|
2266
|
-
GeneralUtilities.write_text_to_file(version_file, used_version)
|
2267
|
-
|
2268
|
-
@GeneralUtilities.check_arguments
|
2269
|
-
def ensure_openapigenerator_is_available(self, codeunit_folder: str) -> None:
|
2270
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2271
|
-
openapigenerator_folder = os.path.join(codeunit_folder, "Other", "Resources", "OpenAPIGenerator")
|
2272
|
-
internet_connection_is_available = GeneralUtilities.internet_connection_is_available()
|
2273
|
-
filename = "open-api-generator.jar"
|
2274
|
-
jar_file = f"{openapigenerator_folder}/{filename}"
|
2275
|
-
jar_file_exists = os.path.isfile(jar_file)
|
2276
|
-
if internet_connection_is_available: # Load/Update
|
2277
|
-
version_file = os.path.join(codeunit_folder, "Other", "Resources", "Dependencies", "OpenAPIGenerator", "Version.txt")
|
2278
|
-
used_version = GeneralUtilities.read_text_from_file(version_file)
|
2279
|
-
download_link = f"https://repo1.maven.org/maven2/org/openapitools/openapi-generator-cli/{used_version}/openapi-generator-cli-{used_version}.jar"
|
2280
|
-
GeneralUtilities.ensure_directory_does_not_exist(openapigenerator_folder)
|
2281
|
-
GeneralUtilities.ensure_directory_exists(openapigenerator_folder)
|
2282
|
-
urllib.request.urlretrieve(download_link, jar_file)
|
2283
|
-
else:
|
2284
|
-
if jar_file_exists:
|
2285
|
-
GeneralUtilities.write_message_to_stdout("Warning: Can not check for updates of OpenAPIGenerator due to missing internet-connection.")
|
2286
|
-
else:
|
2287
|
-
raise ValueError("Can not download OpenAPIGenerator.")
|
2288
|
-
|
2289
|
-
@GeneralUtilities.check_arguments
|
2290
|
-
def generate_api_client_from_dependent_codeunit_in_angular(self, file: str, name_of_api_providing_codeunit: str, generated_program_part_name: str) -> None:
|
2291
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../..", file)
|
2292
|
-
target_subfolder_in_codeunit = f"src/app/generated/{generated_program_part_name}"
|
2293
|
-
language = "typescript-angular"
|
2294
|
-
self.ensure_openapigenerator_is_available(codeunit_folder)
|
2295
|
-
openapigenerator_jar_file = os.path.join(codeunit_folder, "Other", "Resources", "OpenAPIGenerator", "open-api-generator.jar")
|
2296
|
-
openapi_spec_file = os.path.join(codeunit_folder, "Other", "Resources", "DependentCodeUnits", name_of_api_providing_codeunit, "APISpecification", f"{name_of_api_providing_codeunit}.latest.api.json")
|
2297
|
-
target_folder = os.path.join(codeunit_folder, target_subfolder_in_codeunit)
|
2298
|
-
GeneralUtilities.ensure_folder_exists_and_is_empty(target_folder)
|
2299
|
-
self.__sc.run_program("java", f'-jar {openapigenerator_jar_file} generate -i {openapi_spec_file} -g {language} -o {target_folder} --global-property supportingFiles --global-property models --global-property apis', codeunit_folder)
|
2300
|
-
|
2301
|
-
@GeneralUtilities.check_arguments
|
2302
|
-
def generate_api_client_from_dependent_codeunit_in_dotnet(self, file: str, name_of_api_providing_codeunit: str, base_namespace: str) -> None:
|
2303
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../..", file)
|
2304
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
2305
|
-
client_subpath = f"{codeunit_name}/APIClients/{name_of_api_providing_codeunit}"
|
2306
|
-
namespace = f"{base_namespace}.APIClients.{name_of_api_providing_codeunit}"
|
2307
|
-
target_subfolder_in_codeunit = client_subpath
|
2308
|
-
language = "csharp"
|
2309
|
-
additional_properties = f"--additional-properties packageName={namespace}"
|
2310
|
-
|
2311
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("../..", file)
|
2312
|
-
self.ensure_openapigenerator_is_available(codeunit_folder)
|
2313
|
-
openapigenerator_jar_file = os.path.join(codeunit_folder, "Other", "Resources", "OpenAPIGenerator", "open-api-generator.jar")
|
2314
|
-
openapi_spec_file = os.path.join(codeunit_folder, "Other", "Resources", "DependentCodeUnits", name_of_api_providing_codeunit, "APISpecification", f"{name_of_api_providing_codeunit}.latest.api.json")
|
2315
|
-
target_folder = os.path.join(codeunit_folder, target_subfolder_in_codeunit)
|
2316
|
-
GeneralUtilities.ensure_directory_exists(target_folder)
|
2317
|
-
self.__sc.run_program("java", f'-jar {openapigenerator_jar_file} generate -i {openapi_spec_file} -g {language} -o {target_folder} --global-property supportingFiles --global-property models --global-property apis {additional_properties}', codeunit_folder)
|
2318
|
-
|
2319
|
-
# move docs to correct folder
|
2320
|
-
target_folder_docs = os.path.join(target_folder, "docs")
|
2321
|
-
target_folder_docs_correct = os.path.join(codeunit_folder, "Other", "Reference", "ReferenceContent", f"{name_of_api_providing_codeunit}-API")
|
2322
|
-
GeneralUtilities.ensure_directory_does_not_exist(target_folder_docs_correct)
|
2323
|
-
GeneralUtilities.ensure_directory_exists(target_folder_docs_correct)
|
2324
|
-
GeneralUtilities.move_content_of_folder(target_folder_docs, target_folder_docs_correct)
|
2325
|
-
GeneralUtilities.ensure_directory_does_not_exist(target_folder_docs)
|
2326
|
-
|
2327
|
-
code_folders = GeneralUtilities.get_direct_folders_of_folder(os.path.join(target_folder, "src"))
|
2328
|
-
|
2329
|
-
# remove test-folder
|
2330
|
-
tests_folder = [x for x in code_folders if x.endswith(".Test")][0]
|
2331
|
-
GeneralUtilities.ensure_directory_does_not_exist(tests_folder)
|
2332
|
-
|
2333
|
-
# move source to correct folder
|
2334
|
-
src_folder = [x for x in code_folders if not x.endswith(".Test")][0]
|
2335
|
-
target_folder_src = GeneralUtilities.resolve_relative_path("../..", src_folder)
|
2336
|
-
|
2337
|
-
for targetfile in GeneralUtilities.get_direct_files_of_folder(target_folder_src):
|
2338
|
-
GeneralUtilities.ensure_file_does_not_exist(targetfile)
|
2339
|
-
for folder in GeneralUtilities.get_direct_folders_of_folder(target_folder_src):
|
2340
|
-
f = folder.replace("\\", "/")
|
2341
|
-
if not f.endswith("/.openapi-generator") and not f.endswith("/src"):
|
2342
|
-
GeneralUtilities.ensure_directory_does_not_exist(f)
|
2343
|
-
GeneralUtilities.ensure_directory_exists(target_folder_src)
|
2344
|
-
GeneralUtilities.move_content_of_folder(src_folder, target_folder_src)
|
2345
|
-
GeneralUtilities.ensure_directory_does_not_exist(src_folder)
|
2346
|
-
for targetfile in GeneralUtilities.get_direct_files_of_folder(target_folder_src):
|
2347
|
-
GeneralUtilities.ensure_file_does_not_exist(targetfile)
|
2348
|
-
|
2349
|
-
@GeneralUtilities.check_arguments
|
2350
|
-
def replace_version_in_packagejson_file(self, packagejson_file: str, codeunit_version: str) -> None:
|
2351
|
-
encoding = "utf-8"
|
2352
|
-
with open(packagejson_file, encoding=encoding) as f:
|
2353
|
-
data = json.load(f)
|
2354
|
-
data['version'] = codeunit_version
|
2355
|
-
with open(packagejson_file, 'w', encoding=encoding) as f:
|
2356
|
-
json.dump(data, f, indent=2)
|
2357
|
-
|
2358
|
-
@GeneralUtilities.check_arguments
|
2359
|
-
def build_dependent_code_units(self, repo_folder: str, codeunit_name: str, verbosity: int, target_environmenttype: str, additional_arguments_file: str, commandlinearguments: list[str]) -> None:
|
2360
|
-
verbosity = self.get_verbosity_from_commandline_arguments(commandlinearguments, verbosity)
|
2361
|
-
codeunit_file = os.path.join(repo_folder, codeunit_name, codeunit_name + ".codeunit.xml")
|
2362
|
-
dependent_codeunits = self.get_dependent_code_units(codeunit_file)
|
2363
|
-
dependent_codeunits_folder = os.path.join(repo_folder, codeunit_name, "Other", "Resources", "DependentCodeUnits")
|
2364
|
-
GeneralUtilities.ensure_directory_does_not_exist(dependent_codeunits_folder)
|
2365
|
-
if 0 < len(dependent_codeunits):
|
2366
|
-
GeneralUtilities.write_message_to_stdout(f"Start building dependent codeunits for codeunit {codeunit_name}.")
|
2367
|
-
for dependent_codeunit in dependent_codeunits:
|
2368
|
-
self.__build_codeunit(os.path.join(repo_folder, dependent_codeunit), verbosity, target_environmenttype, additional_arguments_file, False, False, commandlinearguments)
|
2369
|
-
if 0 < len(dependent_codeunits):
|
2370
|
-
GeneralUtilities.write_message_to_stdout(f"Finished building dependent codeunits for codeunit {codeunit_name}.")
|
2371
|
-
|
2372
|
-
@GeneralUtilities.check_arguments
|
2373
|
-
def copy_artifacts_from_dependent_code_units(self, repo_folder: str, codeunit_name: str) -> None:
|
2374
|
-
codeunit_file = os.path.join(repo_folder, codeunit_name, codeunit_name + ".codeunit.xml")
|
2375
|
-
dependent_codeunits = self.get_dependent_code_units(codeunit_file)
|
2376
|
-
if len(dependent_codeunits) > 0:
|
2377
|
-
GeneralUtilities.write_message_to_stdout(f"Get dependent artifacts for codeunit {codeunit_name}.")
|
2378
|
-
dependent_codeunits_folder = os.path.join(repo_folder, codeunit_name, "Other", "Resources", "DependentCodeUnits")
|
2379
|
-
GeneralUtilities.ensure_directory_does_not_exist(dependent_codeunits_folder)
|
2380
|
-
for dependent_codeunit in dependent_codeunits:
|
2381
|
-
target_folder = os.path.join(dependent_codeunits_folder, dependent_codeunit)
|
2382
|
-
GeneralUtilities.ensure_directory_does_not_exist(target_folder)
|
2383
|
-
other_folder = os.path.join(repo_folder, dependent_codeunit, "Other")
|
2384
|
-
artifacts_folder = os.path.join(other_folder, "Artifacts")
|
2385
|
-
shutil.copytree(artifacts_folder, target_folder)
|
2386
|
-
|
2387
|
-
@GeneralUtilities.check_arguments
|
2388
|
-
def add_github_release(self, productname: str, projectversion: str, build_artifacts_folder: str, github_username: str, repository_folder: str, commandline_arguments: list[str], additional_attached_files: list[str]) -> None:
|
2389
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
2390
|
-
GeneralUtilities.write_message_to_stdout(f"Create GitHub-release for {productname}...")
|
2391
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, 1)
|
2392
|
-
github_repo = f"{github_username}/{productname}"
|
2393
|
-
artifact_files = []
|
2394
|
-
codeunits = self.get_codeunits(repository_folder)
|
2395
|
-
for codeunit in codeunits:
|
2396
|
-
artifact_files.append(self.__sc.find_file_by_extension(f"{build_artifacts_folder}\\{productname}\\{projectversion}\\{codeunit}", "Productive.Artifacts.zip"))
|
2397
|
-
if additional_attached_files is not None:
|
2398
|
-
for additional_attached_file in additional_attached_files:
|
2399
|
-
artifact_files.append(additional_attached_file)
|
2400
|
-
changelog_file = os.path.join(repository_folder, "Other", "Resources", "Changelog", f"v{projectversion}.md")
|
2401
|
-
self.__sc.run_program_argsasarray("gh", ["release", "create", f"v{projectversion}", "--repo", github_repo, "--notes-file", changelog_file, "--title", f"Release v{projectversion}"]+artifact_files, verbosity=verbosity)
|
2402
|
-
|
2403
|
-
@GeneralUtilities.check_arguments
|
2404
|
-
def get_dependencies_which_are_ignored_from_updates(self, codeunit_folder: str, print_warnings_for_ignored_dependencies: bool) -> list[str]:
|
2405
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2406
|
-
namespaces = {'cps': 'https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure', 'xsi': 'http://www.w3.org/2001/XMLSchema-instance'}
|
2407
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
2408
|
-
codeunit_file = os.path.join(codeunit_folder, f"{codeunit_name}.codeunit.xml")
|
2409
|
-
root: etree._ElementTree = etree.parse(codeunit_file)
|
2410
|
-
ignoreddependencies = root.xpath('//cps:codeunit/cps:properties/cps:updatesettings/cps:ignoreddependencies/cps:ignoreddependency', namespaces=namespaces)
|
2411
|
-
result = [x.text.replace("\\n", GeneralUtilities.empty_string).replace("\\r", GeneralUtilities.empty_string).replace("\n", GeneralUtilities.empty_string).replace("\r", GeneralUtilities.empty_string).strip() for x in ignoreddependencies]
|
2412
|
-
if print_warnings_for_ignored_dependencies and len(result) > 0:
|
2413
|
-
GeneralUtilities.write_message_to_stderr(f"Warning: Codeunit {codeunit_name} contains the following dependencies which will are ignoed for automatic updates: "+', '.join(result))
|
2414
|
-
return result
|
2415
|
-
|
2416
|
-
@GeneralUtilities.check_arguments
|
2417
|
-
def update_dependencies_of_typical_flutter_codeunit(self, update_script_file: str, verbosity: int, cmd_args: list[str]) -> None:
|
2418
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("..", os.path.dirname(update_script_file))
|
2419
|
-
ignored_dependencies = self.get_dependencies_which_are_ignored_from_updates(codeunit_folder, True)
|
2420
|
-
# TODO implement
|
2421
|
-
|
2422
|
-
@GeneralUtilities.check_arguments
|
2423
|
-
def update_dependencies_of_typical_python_repository_requirements(self, repository_folder: str, verbosity: int, cmd_args: list[str]) -> None:
|
2424
|
-
verbosity = self.get_verbosity_from_commandline_arguments(cmd_args, verbosity)
|
2425
|
-
|
2426
|
-
development_requirements_file = os.path.join(repository_folder, "Other", "requirements.txt")
|
2427
|
-
if (os.path.isfile(development_requirements_file)):
|
2428
|
-
self.__sc.update_dependencies_of_python_in_requirementstxt_file(development_requirements_file, [], verbosity)
|
2429
|
-
|
2430
|
-
@GeneralUtilities.check_arguments
|
2431
|
-
def update_dependencies_of_typical_python_codeunit(self, update_script_file: str, verbosity: int, cmd_args: list[str]) -> None:
|
2432
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("..", os.path.dirname(update_script_file))
|
2433
|
-
ignored_dependencies = self.get_dependencies_which_are_ignored_from_updates(codeunit_folder, True)
|
2434
|
-
# TODO consider ignored_dependencies
|
2435
|
-
verbosity = self.get_verbosity_from_commandline_arguments(cmd_args, verbosity)
|
2436
|
-
|
2437
|
-
setup_cfg = os.path.join(codeunit_folder, "setup.cfg")
|
2438
|
-
if (os.path.isfile(setup_cfg)):
|
2439
|
-
self.__sc.update_dependencies_of_python_in_setupcfg_file(setup_cfg, ignored_dependencies, verbosity)
|
2440
|
-
|
2441
|
-
development_requirements_file = os.path.join(codeunit_folder, "requirements.txt") # required for codeunits which contain python-code which need third-party dependencies
|
2442
|
-
if (os.path.isfile(development_requirements_file)):
|
2443
|
-
self.__sc.update_dependencies_of_python_in_requirementstxt_file(development_requirements_file, ignored_dependencies, verbosity)
|
2444
|
-
|
2445
|
-
development_requirements_file2 = os.path.join(codeunit_folder, "Other", "requirements.txt") # required for codeunits which contain python-scripts which needs third-party dependencies
|
2446
|
-
if (os.path.isfile(development_requirements_file2)):
|
2447
|
-
self.__sc.update_dependencies_of_python_in_requirementstxt_file(development_requirements_file2, ignored_dependencies, verbosity)
|
2448
|
-
|
2449
|
-
@GeneralUtilities.check_arguments
|
2450
|
-
def update_dependencies_of_typical_dotnet_codeunit(self, update_script_file: str, verbosity: int, cmd_args: list[str]) -> None:
|
2451
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("..", os.path.dirname(update_script_file))
|
2452
|
-
ignored_dependencies = self.get_dependencies_which_are_ignored_from_updates(codeunit_folder, True)
|
2453
|
-
verbosity = self.get_verbosity_from_commandline_arguments(cmd_args, verbosity)
|
2454
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
2455
|
-
|
2456
|
-
build_folder = os.path.join(codeunit_folder, "Other", "Build")
|
2457
|
-
self.__sc.run_program("python", "Build.py", build_folder, verbosity)
|
2458
|
-
|
2459
|
-
test_csproj_file = os.path.join(codeunit_folder, f"{codeunit_name}Tests", f"{codeunit_name}Tests.csproj")
|
2460
|
-
self.__sc.update_dependencies_of_dotnet_project(test_csproj_file, verbosity, ignored_dependencies)
|
2461
|
-
csproj_file = os.path.join(codeunit_folder, codeunit_name, f"{codeunit_name}.csproj")
|
2462
|
-
self.__sc.update_dependencies_of_dotnet_project(csproj_file, verbosity, ignored_dependencies)
|
2463
|
-
|
2464
|
-
@GeneralUtilities.check_arguments
|
2465
|
-
def update_dependencies_of_package_json(self, folder: str, verbosity: int, cmd_args: list[str]) -> None:
|
2466
|
-
if self.is_codeunit_folder(folder):
|
2467
|
-
ignored_dependencies = self.get_dependencies_which_are_ignored_from_updates(folder, True)
|
2468
|
-
else:
|
2469
|
-
ignored_dependencies = []
|
2470
|
-
# TODO consider ignored_dependencies
|
2471
|
-
result = self.run_with_epew("npm", "outdated", folder, verbosity, throw_exception_if_exitcode_is_not_zero=False)
|
2472
|
-
if result[0] == 0:
|
2473
|
-
return # all dependencies up to date
|
2474
|
-
elif result[0] == 1:
|
2475
|
-
package_json_content = None
|
2476
|
-
package_json_file = f"{folder}/package.json"
|
2477
|
-
with open(package_json_file, "r", encoding="utf-8") as package_json_file_object:
|
2478
|
-
package_json_content = json.load(package_json_file_object)
|
2479
|
-
lines = GeneralUtilities.string_to_lines(result[1])[1:][:-1]
|
2480
|
-
for line in lines:
|
2481
|
-
normalized_line_splitted = ' '.join(line.split()).split(" ")
|
2482
|
-
package = normalized_line_splitted[0]
|
2483
|
-
latest_version = normalized_line_splitted[3]
|
2484
|
-
if package in package_json_content["dependencies"]:
|
2485
|
-
package_json_content["dependencies"][package] = latest_version
|
2486
|
-
if package in package_json_content["devDependencies"]:
|
2487
|
-
package_json_content["devDependencies"][package] = latest_version
|
2488
|
-
with open(package_json_file, "w", encoding="utf-8") as package_json_file_object:
|
2489
|
-
json.dump(package_json_content, package_json_file_object, indent=4)
|
2490
|
-
self.do_npm_install(folder, True, verbosity)
|
2491
|
-
else:
|
2492
|
-
GeneralUtilities.write_message_to_stderr("Update dependencies resulted in an error.")
|
2493
|
-
|
2494
|
-
@GeneralUtilities.check_arguments
|
2495
|
-
def generate_tasksfile_from_workspace_file(self, repository_folder: str, append_cli_args_at_end: bool = False) -> None:
|
2496
|
-
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
2497
|
-
if self.__sc.program_runner.will_be_executed_locally(): # works only locally, but much more performant than always running an external program
|
2498
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
2499
|
-
workspace_file: str = self.__sc.find_file_by_extension(repository_folder, "code-workspace")
|
2500
|
-
task_file: str = repository_folder + "/Taskfile.yml"
|
2501
|
-
lines: list[str] = ["version: '3'", GeneralUtilities.empty_string, "tasks:", GeneralUtilities.empty_string]
|
2502
|
-
workspace_file_content: str = self.__sc.get_file_content(workspace_file)
|
2503
|
-
jsoncontent = json.loads(workspace_file_content)
|
2504
|
-
tasks = jsoncontent["tasks"]["tasks"]
|
2505
|
-
tasks.sort(key=lambda x: x["label"].split("/")[-1], reverse=False) # sort by the label of the task
|
2506
|
-
for task in tasks:
|
2507
|
-
if task["type"] == "shell":
|
2508
|
-
|
2509
|
-
description: str = task["label"]
|
2510
|
-
name: str = GeneralUtilities.to_pascal_case(description)
|
2511
|
-
command = task["command"]
|
2512
|
-
relative_script_file = task["command"]
|
2513
|
-
|
2514
|
-
relative_script_file = "."
|
2515
|
-
cwd: str = None
|
2516
|
-
if "options" in task:
|
2517
|
-
options = task["options"]
|
2518
|
-
if "cwd" in options:
|
2519
|
-
cwd = options["cwd"]
|
2520
|
-
cwd = cwd.replace("${workspaceFolder}", ".")
|
2521
|
-
cwd = cwd.replace("\\", "\\\\").replace('"', '\\"') # escape backslashes and double quotes for YAML
|
2522
|
-
relative_script_file = cwd
|
2523
|
-
if len(relative_script_file) == 0:
|
2524
|
-
relative_script_file = "."
|
2525
|
-
|
2526
|
-
command_with_args = command
|
2527
|
-
if "args" in task:
|
2528
|
-
args = task["args"]
|
2529
|
-
if len(args) > 1:
|
2530
|
-
command_with_args = f"{command_with_args} {' '.join(args)}"
|
2531
|
-
|
2532
|
-
if "description" in task:
|
2533
|
-
additional_description = task["description"]
|
2534
|
-
description = f"{description} ({additional_description})"
|
2535
|
-
|
2536
|
-
if append_cli_args_at_end:
|
2537
|
-
command_with_args = f"{command_with_args} {{{{.CLI_ARGS}}}}"
|
2538
|
-
|
2539
|
-
description_literal = description.replace("\\", "\\\\").replace('"', '\\"') # escape backslashes and double quotes for YAML
|
2540
|
-
command_with_args = command_with_args.replace("\\", "\\\\").replace('"', '\\"') # escape backslashes and double quotes for YAML
|
2541
|
-
|
2542
|
-
lines.append(f" {name}:")
|
2543
|
-
lines.append(f' desc: "{description_literal}"')
|
2544
|
-
lines.append(' silent: true')
|
2545
|
-
if cwd is not None:
|
2546
|
-
lines.append(f' dir: "{cwd}"')
|
2547
|
-
lines.append(" cmds:")
|
2548
|
-
lines.append(f' - "{command_with_args}"')
|
2549
|
-
lines.append(' aliases:')
|
2550
|
-
lines.append(f' - {name.lower()}')
|
2551
|
-
if "aliases" in task:
|
2552
|
-
aliases = task["aliases"]
|
2553
|
-
for alias in aliases:
|
2554
|
-
lines.append(f' - {alias}')
|
2555
|
-
lines.append(GeneralUtilities.empty_string)
|
2556
|
-
|
2557
|
-
self.__sc.set_file_content(task_file, "\n".join(lines))
|
2558
|
-
else:
|
2559
|
-
self.__sc.run_program("scgeneratetasksfilefromworkspacefile", f"--repositoryfolder {repository_folder}")
|
2560
|
-
|
2561
|
-
@GeneralUtilities.check_arguments
|
2562
|
-
def start_local_test_service(self, file: str):
|
2563
|
-
example_folder = os.path.dirname(file)
|
2564
|
-
docker_compose_file = os.path.join(example_folder, "docker-compose.yml")
|
2565
|
-
for service in self.__sc.get_services_from_yaml_file(docker_compose_file):
|
2566
|
-
self.__sc.kill_docker_container(service)
|
2567
|
-
example_name = os.path.basename(example_folder)
|
2568
|
-
title = f"Test{example_name}"
|
2569
|
-
self.__sc.run_program("docker", f"compose -p {title.lower()} up --detach", example_folder, title=title)
|
2570
|
-
|
2571
|
-
@GeneralUtilities.check_arguments
|
2572
|
-
def stop_local_test_service(self, file: str):
|
2573
|
-
example_folder = os.path.dirname(file)
|
2574
|
-
example_name = os.path.basename(example_folder)
|
2575
|
-
title = f"Test{example_name}"
|
2576
|
-
self.__sc.run_program("docker", f"compose -p {title.lower()} down", example_folder, title=title)
|
2577
|
-
|
2578
|
-
@GeneralUtilities.check_arguments
|
2579
|
-
def standardized_tasks_update_version_in_docker_examples(self, file, codeunit_version) -> None:
|
2580
|
-
folder_of_current_file = os.path.dirname(file)
|
2581
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path("..", folder_of_current_file)
|
2582
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
2583
|
-
codeunit_name_lower = codeunit_name.lower()
|
2584
|
-
examples_folder = GeneralUtilities.resolve_relative_path("Other/Reference/ReferenceContent/Examples", codeunit_folder)
|
2585
|
-
for example_folder in GeneralUtilities.get_direct_folders_of_folder(examples_folder):
|
2586
|
-
docker_compose_file = os.path.join(example_folder, "docker-compose.yml")
|
2587
|
-
if os.path.isfile(docker_compose_file):
|
2588
|
-
filecontent = GeneralUtilities.read_text_from_file(docker_compose_file)
|
2589
|
-
replaced = re.sub(f'image:\\s+{codeunit_name_lower}:\\d+\\.\\d+\\.\\d+', f"image: {codeunit_name_lower}:{codeunit_version}", filecontent)
|
2590
|
-
GeneralUtilities.write_text_to_file(docker_compose_file, replaced)
|
2591
|
-
|
2592
|
-
@GeneralUtilities.check_arguments
|
2593
|
-
def start_dockerfile_example(self, current_file: str, verbosity: int, remove_old_container: bool, remove_volumes_folder: bool, commandline_arguments: list[str], env_file: str) -> None:
|
2594
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
2595
|
-
folder = os.path.dirname(current_file)
|
2596
|
-
example_name = os.path.basename(folder)
|
2597
|
-
oci_image_artifacts_folder = GeneralUtilities.resolve_relative_path("../../../../Artifacts/BuildResult_OCIImage", folder)
|
2598
|
-
image_filename = os.path.basename(self.__sc.find_file_by_extension(oci_image_artifacts_folder, "tar"))
|
2599
|
-
codeunit_name = os.path.basename(GeneralUtilities.resolve_relative_path("../../../../..", folder))
|
2600
|
-
if remove_old_container:
|
2601
|
-
docker_compose_file = f"{folder}/docker-compose.yml"
|
2602
|
-
container_names = []
|
2603
|
-
lines = GeneralUtilities.read_lines_from_file(docker_compose_file)
|
2604
|
-
for line in lines:
|
2605
|
-
if match := re.search("container_name:\\s*'?([^']+)'?", line):
|
2606
|
-
container_names.append(match.group(1))
|
2607
|
-
GeneralUtilities.write_message_to_stdout(f"Ensure container of {docker_compose_file} do not exist...")
|
2608
|
-
for container_name in container_names:
|
2609
|
-
GeneralUtilities.write_message_to_stdout(f"Ensure container {container_name} does not exist...")
|
2610
|
-
self.__sc.run_program("docker", f"container rm -f {container_name}", oci_image_artifacts_folder, verbosity=0, throw_exception_if_exitcode_is_not_zero=False)
|
2611
|
-
if remove_volumes_folder:
|
2612
|
-
volumes_folder = os.path.join(folder, "Volumes")
|
2613
|
-
GeneralUtilities.write_message_to_stdout(f"Ensure volumes-folder '{volumes_folder}' does not exist...")
|
2614
|
-
GeneralUtilities.ensure_directory_does_not_exist(volumes_folder)
|
2615
|
-
GeneralUtilities.ensure_directory_exists(volumes_folder)
|
2616
|
-
GeneralUtilities.write_message_to_stdout("Load docker-image...")
|
2617
|
-
self.__sc.run_program("docker", f"load -i {image_filename}", oci_image_artifacts_folder, verbosity=verbosity)
|
2618
|
-
docker_project_name = f"{codeunit_name}_{example_name}".lower()
|
2619
|
-
GeneralUtilities.write_message_to_stdout("Start docker-container...")
|
2620
|
-
argument = f"compose --project-name {docker_project_name}"
|
2621
|
-
if env_file is not None:
|
2622
|
-
argument = f"{argument} --env-file {env_file}"
|
2623
|
-
argument = f"{argument} up --detach"
|
2624
|
-
self.__sc.run_program("docker", argument, folder, verbosity=verbosity)
|
2625
|
-
|
2626
|
-
@GeneralUtilities.check_arguments
|
2627
|
-
def ensure_env_file_is_generated(self, current_file: str, env_file_name: str, env_values: dict[str, str]):
|
2628
|
-
folder = os.path.dirname(current_file)
|
2629
|
-
env_file = os.path.join(folder, env_file_name)
|
2630
|
-
if not os.path.isfile(env_file):
|
2631
|
-
lines = []
|
2632
|
-
for key, value in env_values.items():
|
2633
|
-
lines.append(f"{key}={value}")
|
2634
|
-
GeneralUtilities.write_lines_to_file(env_file, lines)
|
2635
|
-
|
2636
|
-
@GeneralUtilities.check_arguments
|
2637
|
-
def stop_dockerfile_example(self, current_file: str, verbosity: int, remove_old_container: bool, remove_volumes_folder: bool, commandline_arguments: list[str]) -> None:
|
2638
|
-
verbosity = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
2639
|
-
folder = os.path.dirname(current_file)
|
2640
|
-
example_name = os.path.basename(folder)
|
2641
|
-
codeunit_name = os.path.basename(GeneralUtilities.resolve_relative_path("../../../../..", folder))
|
2642
|
-
docker_project_name = f"{codeunit_name}_{example_name}".lower()
|
2643
|
-
GeneralUtilities.write_message_to_stdout("Stop docker-container...")
|
2644
|
-
self.__sc.run_program("docker", f"compose --project-name {docker_project_name} down", folder, verbosity=verbosity)
|
2645
|
-
|
2646
|
-
@GeneralUtilities.check_arguments
|
2647
|
-
def create_artifact_for_development_certificate(self, codeunit_folder: str):
|
2648
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2649
|
-
ce_source_folder = GeneralUtilities.resolve_relative_path("Other/Resources/DevelopmentCertificate", codeunit_folder)
|
2650
|
-
ca_source_folder = GeneralUtilities.resolve_relative_path("Other/Resources/CA", codeunit_folder)
|
2651
|
-
ce_target_folder = GeneralUtilities.resolve_relative_path("Other/Artifacts/DevelopmentCertificate", codeunit_folder)
|
2652
|
-
ca_target_folder = GeneralUtilities.resolve_relative_path("Other/Artifacts/CA", codeunit_folder)
|
2653
|
-
|
2654
|
-
GeneralUtilities.ensure_directory_does_not_exist(ce_target_folder)
|
2655
|
-
GeneralUtilities.ensure_directory_exists(ce_target_folder)
|
2656
|
-
GeneralUtilities.copy_content_of_folder(ce_source_folder, ce_target_folder)
|
2657
|
-
GeneralUtilities.ensure_directory_does_not_exist(ca_target_folder)
|
2658
|
-
GeneralUtilities.ensure_directory_exists(ca_target_folder)
|
2659
|
-
GeneralUtilities.copy_content_of_folder(ca_source_folder, ca_target_folder)
|
2660
|
-
|
2661
|
-
@GeneralUtilities.check_arguments
|
2662
|
-
def _internal_get_sorted_codeunits_by_dict(self, codeunits: dict[str, set[str]]) -> list[str]:
|
2663
|
-
sorted_codeunits = {
|
2664
|
-
node: sorted(codeunits[node])
|
2665
|
-
for node in sorted(codeunits)
|
2666
|
-
}
|
2667
|
-
|
2668
|
-
ts = TopologicalSorter()
|
2669
|
-
for node, deps in sorted_codeunits.items():
|
2670
|
-
ts.add(node, *deps)
|
2671
|
-
|
2672
|
-
result_typed = list(ts.static_order())
|
2673
|
-
result = [str(item) for item in result_typed]
|
2674
|
-
return result
|
2675
|
-
|
2676
|
-
@GeneralUtilities.check_arguments
|
2677
|
-
def get_project_name(self, repository_folder: str) -> str:
|
2678
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
2679
|
-
for file in GeneralUtilities.get_direct_files_of_folder(repository_folder):
|
2680
|
-
if file.endswith(".code-workspace"):
|
2681
|
-
return Path(file).stem
|
2682
|
-
raise ValueError(f'Project-name can not be calculated for repository "{repository_folder}"')
|
2683
|
-
|
2684
|
-
def __check_target_environmenttype(self, target_environmenttype: str):
|
2685
|
-
allowed_values = list(self.get_default_target_environmenttype_mapping().values())
|
2686
|
-
if not (target_environmenttype in allowed_values):
|
2687
|
-
raise ValueError(f"Invalid target-environmenttype: '{target_environmenttype}'")
|
2688
|
-
|
2689
|
-
@GeneralUtilities.check_arguments
|
2690
|
-
def build_codeunit(self, codeunit_folder: str, verbosity: int = 1, target_environmenttype: str = "QualityCheck", additional_arguments_file: str = None, is_pre_merge: bool = False, export_target_directory: str = None, assume_dependent_codeunits_are_already_built: bool = False, commandlinearguments: list[str] = []) -> None:
|
2691
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2692
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
2693
|
-
repository_folder = os.path.dirname(codeunit_folder)
|
2694
|
-
self.build_specific_codeunits(repository_folder, [codeunit_name], verbosity, target_environmenttype, additional_arguments_file, is_pre_merge, export_target_directory, assume_dependent_codeunits_are_already_built, commandlinearguments, False)
|
2695
|
-
|
2696
|
-
@GeneralUtilities.check_arguments
|
2697
|
-
def build_codeunitsC(self, repository_folder: str, image: str, verbosity: int = 1, target_environmenttype: str = "QualityCheck", additional_arguments_file: str = None, commandlinearguments: list[str] = []) -> None:
|
2698
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
2699
|
-
if target_environmenttype == "Development":
|
2700
|
-
raise ValueError(f"build_codeunitsC is not available for target_environmenttype {target_environmenttype}.")
|
2701
|
-
# TODO handle additional_arguments_file
|
2702
|
-
# TODO add option to allow building different codeunits in same project with different images due to their demands
|
2703
|
-
# TODO check if image provides all demands of codeunit
|
2704
|
-
self.__sc.run_program("docker", f"run --volume {repository_folder}:/Workspace/Repository " + f"-e repositoryfolder=/Workspace/Repository -e verbosity={verbosity} -e targetenvironment={target_environmenttype} {image}", repository_folder)
|
2705
|
-
|
2706
|
-
@GeneralUtilities.check_arguments
|
2707
|
-
def build_codeunits(self, repository_folder: str, verbosity: int = 1, target_environmenttype: str = "QualityCheck", additional_arguments_file: str = None, is_pre_merge: bool = False, export_target_directory: str = None, commandline_arguments: list[str] = [], do_git_clean_when_no_changes: bool = False, note: str = None) -> None:
|
2708
|
-
self.__check_target_environmenttype(target_environmenttype)
|
2709
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
2710
|
-
repository_folder = GeneralUtilities.resolve_relative_path_from_current_working_directory(repository_folder)
|
2711
|
-
codeunits = self.get_codeunits(repository_folder, False)
|
2712
|
-
project_version = self.get_version_of_project(repository_folder)
|
2713
|
-
|
2714
|
-
now = GeneralUtilities.get_now()
|
2715
|
-
|
2716
|
-
project_resources_folder = os.path.join(repository_folder, "Other", "Scripts")
|
2717
|
-
PrepareBuildCodeunits_script_name = "PrepareBuildCodeunits.py"
|
2718
|
-
prepare_build_codeunits_scripts = os.path.join(project_resources_folder, PrepareBuildCodeunits_script_name)
|
2719
|
-
|
2720
|
-
if do_git_clean_when_no_changes and not self.__sc.git_repository_has_uncommitted_changes(repository_folder):
|
2721
|
-
self.__sc.run_program("git", "clean -dfx", repository_folder)
|
2722
|
-
if os.path.isfile(prepare_build_codeunits_scripts):
|
2723
|
-
GeneralUtilities.write_message_to_stdout(f'Run "{PrepareBuildCodeunits_script_name}"')
|
2724
|
-
result = self.__sc.run_program("python", f"{PrepareBuildCodeunits_script_name}", project_resources_folder, throw_exception_if_exitcode_is_not_zero=False, print_live_output=True)
|
2725
|
-
if result[0] != 0:
|
2726
|
-
raise ValueError(f"PrepareBuildCodeunits.py resulted in exitcode {result[0]}.")
|
2727
|
-
|
2728
|
-
self.__do_repository_checks(repository_folder, project_version)
|
2729
|
-
if not self.__suport_information_exists(repository_folder, project_version):
|
2730
|
-
support_time = timedelta(days=365*2+30*3+1) # TODO make this configurable
|
2731
|
-
until = now + support_time
|
2732
|
-
until_day = datetime(until.year, until.month, until.day, 0, 0, 0)
|
2733
|
-
from_day = datetime(now.year, now.month, now.day, 0, 0, 0)
|
2734
|
-
self.mark_current_version_as_supported(repository_folder, project_version, from_day, until_day)
|
2735
|
-
self.build_specific_codeunits(repository_folder, codeunits, verbosity, target_environmenttype, additional_arguments_file, is_pre_merge, export_target_directory, False, commandline_arguments, do_git_clean_when_no_changes, note)
|
2736
|
-
self.__save_lines_of_code(repository_folder, project_version)
|
2737
|
-
|
2738
|
-
@GeneralUtilities.check_arguments
|
2739
|
-
def __save_lines_of_code(self, repository_folder: str, project_version: str) -> None:
|
2740
|
-
loc = self.__sc.get_lines_of_code_with_default_excluded_patterns(repository_folder)
|
2741
|
-
loc_metric_folder = os.path.join(repository_folder, "Other", "Metrics")
|
2742
|
-
GeneralUtilities.ensure_directory_exists(loc_metric_folder)
|
2743
|
-
loc_metric_file = os.path.join(loc_metric_folder, "LinesOfCode.csv")
|
2744
|
-
GeneralUtilities.ensure_file_exists(loc_metric_file)
|
2745
|
-
old_lines = GeneralUtilities.read_lines_from_file(loc_metric_file)
|
2746
|
-
new_lines = []
|
2747
|
-
for line in old_lines:
|
2748
|
-
if not line.startswith(f"v{project_version};"):
|
2749
|
-
new_lines.append(line)
|
2750
|
-
new_lines.append(f"v{project_version};{loc}")
|
2751
|
-
GeneralUtilities.write_lines_to_file(loc_metric_file, new_lines)
|
2752
|
-
|
2753
|
-
@GeneralUtilities.check_arguments
|
2754
|
-
def build_specific_codeunits(self, repository_folder: str, codeunits: list[str], verbosity: int = 1, target_environmenttype: str = "QualityCheck", additional_arguments_file: str = None, is_pre_merge: bool = False, export_target_directory: str = None, assume_dependent_codeunits_are_already_built: bool = True, commandline_arguments: list[str] = [], do_git_clean_when_no_changes: bool = False, note: str = None, check_for_new_files: bool = True) -> None:
|
2755
|
-
now_begin: datetime = GeneralUtilities.get_now()
|
2756
|
-
codeunits_list = "{"+", ".join(codeunits)+"}"
|
2757
|
-
if verbosity > 2:
|
2758
|
-
GeneralUtilities.write_message_to_stdout(f"Start building codeunits {codeunits_list} in repository '{repository_folder}'...")
|
2759
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
2760
|
-
self.__check_target_environmenttype(target_environmenttype)
|
2761
|
-
repository_folder = GeneralUtilities.resolve_relative_path_from_current_working_directory(repository_folder)
|
2762
|
-
repository_name = os.path.basename(repository_folder)
|
2763
|
-
contains_uncommitted_changes_at_begin = self.__sc.git_repository_has_uncommitted_changes(repository_folder)
|
2764
|
-
if contains_uncommitted_changes_at_begin:
|
2765
|
-
if is_pre_merge:
|
2766
|
-
raise ValueError(f'Repository "{repository_folder}" has uncommitted changes.')
|
2767
|
-
codeunit_subfolders = [os.path.join(repository_folder, codeunit) for codeunit in codeunits]
|
2768
|
-
codeunits_with_dependent_codeunits: dict[str, set[str]] = dict[str, set[str]]()
|
2769
|
-
|
2770
|
-
for subfolder in codeunit_subfolders:
|
2771
|
-
codeunit_name: str = os.path.basename(subfolder)
|
2772
|
-
codeunit_file = os.path.join(subfolder, f"{codeunit_name}.codeunit.xml")
|
2773
|
-
GeneralUtilities.assert_condition(os.path.exists(codeunit_file), f"Codeunit-file '{codeunit_file}' does nost exist.")
|
2774
|
-
codeunits_with_dependent_codeunits[codeunit_name] = self.get_dependent_code_units(codeunit_file)
|
2775
|
-
sorted_codeunits = self.get_codeunits(repository_folder)
|
2776
|
-
sorted_codeunits = [codeunit for codeunit in sorted_codeunits if codeunit in codeunits]
|
2777
|
-
project_version = self.get_version_of_project(repository_folder)
|
2778
|
-
|
2779
|
-
message = f"Build codeunits in product {repository_name}... (Started: {GeneralUtilities.datetime_to_string_for_logfile_entry(now_begin)})"
|
2780
|
-
if note is not None:
|
2781
|
-
message = f"{message} ({note})"
|
2782
|
-
GeneralUtilities.write_message_to_stdout(message)
|
2783
|
-
|
2784
|
-
if len(sorted_codeunits) == 0:
|
2785
|
-
raise ValueError(f'No codeunit found in subfolders of "{repository_folder}".')
|
2786
|
-
else:
|
2787
|
-
if verbosity > 1:
|
2788
|
-
GeneralUtilities.write_message_to_stdout(f"Attempt to build codeunits ({codeunits_list}) for project version {project_version} in the following order:")
|
2789
|
-
i = 0
|
2790
|
-
for codeunit in sorted_codeunits:
|
2791
|
-
i = i+1
|
2792
|
-
GeneralUtilities.write_message_to_stdout(f"{i}.: {codeunit}")
|
2793
|
-
for codeunit in sorted_codeunits:
|
2794
|
-
GeneralUtilities.write_message_to_stdout(GeneralUtilities.get_line())
|
2795
|
-
self.__build_codeunit(os.path.join(repository_folder, codeunit), verbosity, target_environmenttype, additional_arguments_file, is_pre_merge, assume_dependent_codeunits_are_already_built, commandline_arguments)
|
2796
|
-
GeneralUtilities.write_message_to_stdout(GeneralUtilities.get_line())
|
2797
|
-
contains_uncommitted_changes_at_end = self.__sc.git_repository_has_uncommitted_changes(repository_folder)
|
2798
|
-
if contains_uncommitted_changes_at_end and (not is_pre_merge) and check_for_new_files:
|
2799
|
-
if contains_uncommitted_changes_at_begin:
|
2800
|
-
GeneralUtilities.write_message_to_stdout(f'There are still uncommitted changes in the repository "{repository_folder}".')
|
2801
|
-
else:
|
2802
|
-
message = f'Due to the build-process the repository "{repository_folder}" has new uncommitted changes.'
|
2803
|
-
if target_environmenttype == "Development":
|
2804
|
-
GeneralUtilities.write_message_to_stderr(f"Warning: {message}")
|
2805
|
-
else:
|
2806
|
-
raise ValueError(message)
|
2807
|
-
|
2808
|
-
if export_target_directory is not None:
|
2809
|
-
project_name = self.get_project_name(repository_folder)
|
2810
|
-
for codeunit in sorted_codeunits:
|
2811
|
-
codeunit_version = self.get_version_of_codeunit_folder(os.path.join(repository_folder, codeunit))
|
2812
|
-
artifacts_folder = os.path.join(repository_folder, codeunit, "Other", "Artifacts")
|
2813
|
-
target_folder = os.path.join(export_target_directory, project_name, project_version, codeunit)
|
2814
|
-
GeneralUtilities.ensure_directory_does_not_exist(target_folder)
|
2815
|
-
GeneralUtilities.ensure_directory_exists(target_folder)
|
2816
|
-
filename_without_extension = f"{codeunit}.v{codeunit_version}.{target_environmenttype}.Artifacts"
|
2817
|
-
shutil.make_archive(filename_without_extension, 'zip', artifacts_folder)
|
2818
|
-
archive_file = os.path.join(os.getcwd(), f"{filename_without_extension}.zip")
|
2819
|
-
shutil.move(archive_file, target_folder)
|
2820
|
-
|
2821
|
-
now_end: datetime = GeneralUtilities.get_now()
|
2822
|
-
message2 = f"Finished build codeunits in product {repository_name}. (Finished: {GeneralUtilities.datetime_to_string_for_logfile_entry(now_end)})"
|
2823
|
-
if note is not None:
|
2824
|
-
message2 = f"{message2} ({note})"
|
2825
|
-
GeneralUtilities.write_message_to_stdout(message2)
|
2826
|
-
|
2827
|
-
@GeneralUtilities.check_arguments
|
2828
|
-
def __do_repository_checks(self, repository_folder: str, project_version: str) -> None: # TDOO move this to a general project-specific (and codeunit-independent-script)
|
2829
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
2830
|
-
self.__check_if_changelog_exists(repository_folder, project_version)
|
2831
|
-
self.__check_whether_security_txt_exists(repository_folder)
|
2832
|
-
self.__check_whether_general_reference_exists(repository_folder)
|
2833
|
-
self.__check_whether_workspace_file_exists(repository_folder)
|
2834
|
-
self.__check_for_staged_or_committed_ignored_files(repository_folder)
|
2835
|
-
|
2836
|
-
@GeneralUtilities.check_arguments
|
2837
|
-
def __check_whether_general_reference_exists(self, repository_folder: str) -> None:
|
2838
|
-
GeneralUtilities.assert_file_exists(os.path.join(repository_folder, "Other", "Reference", "Reference.md"))
|
2839
|
-
|
2840
|
-
@GeneralUtilities.check_arguments
|
2841
|
-
def __check_if_changelog_exists(self, repository_folder: str, project_version: str) -> None:
|
2842
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
2843
|
-
changelog_folder = os.path.join(repository_folder, "Other", "Resources", "Changelog")
|
2844
|
-
changelog_file = os.path.join(changelog_folder, f"v{project_version}.md")
|
2845
|
-
if not os.path.isfile(changelog_file):
|
2846
|
-
raise ValueError(f"Changelog-file '{changelog_file}' does not exist. Try creating it using 'sccreatechangelogentry' for example.")
|
2847
|
-
|
2848
|
-
@GeneralUtilities.check_arguments
|
2849
|
-
def __check_whether_security_txt_exists(self, repository_folder: str) -> None:
|
2850
|
-
security_txt_file_relative = ".well-known/security.txt"
|
2851
|
-
security_txt_file = GeneralUtilities.resolve_relative_path(security_txt_file_relative, repository_folder)
|
2852
|
-
if not os.path.isfile(security_txt_file):
|
2853
|
-
raise ValueError(f"The repository does not contain a '{security_txt_file_relative}'-file. See https://securitytxt.org/ for more information.")
|
2854
|
-
# TODO throw error if the date set in the file is expired
|
2855
|
-
# TODO write wartning if the date set in the file expires soon
|
2856
|
-
|
2857
|
-
@GeneralUtilities.check_arguments
|
2858
|
-
def __check_for_staged_or_committed_ignored_files(self, repository_folder: str) -> None:
|
2859
|
-
for file in self.__sc.get_staged_or_committed_git_ignored_files(repository_folder):
|
2860
|
-
GeneralUtilities.write_message_to_stderr(f'Warning: Repository contains staged or committed file "{file}" which is git-ignored.')
|
2861
|
-
|
2862
|
-
@GeneralUtilities.check_arguments
|
2863
|
-
def __check_whether_workspace_file_exists(self, repository_folder: str) -> None:
|
2864
|
-
count = 0
|
2865
|
-
for file in GeneralUtilities.get_direct_files_of_folder(repository_folder):
|
2866
|
-
if file.endswith(".code-workspace"):
|
2867
|
-
count = count + 1
|
2868
|
-
if count != 1:
|
2869
|
-
raise ValueError('The repository must contain exactly one ".code-workspace"-file on the top-level.')
|
2870
|
-
|
2871
|
-
@GeneralUtilities.check_arguments
|
2872
|
-
def update_dependency_in_resources_folder(self, update_dependencies_file, dependency_name: str, latest_version_function: str) -> None:
|
2873
|
-
dependency_folder = GeneralUtilities.resolve_relative_path(f"../Resources/Dependencies/{dependency_name}", update_dependencies_file)
|
2874
|
-
version_file = os.path.join(dependency_folder, "Version.txt")
|
2875
|
-
version_file_exists = os.path.isfile(version_file)
|
2876
|
-
write_to_file = False
|
2877
|
-
if version_file_exists:
|
2878
|
-
current_version = GeneralUtilities.read_text_from_file(version_file)
|
2879
|
-
if current_version != latest_version_function:
|
2880
|
-
write_to_file = True
|
2881
|
-
else:
|
2882
|
-
GeneralUtilities.ensure_directory_exists(dependency_folder)
|
2883
|
-
GeneralUtilities.ensure_file_exists(version_file)
|
2884
|
-
write_to_file = True
|
2885
|
-
if write_to_file:
|
2886
|
-
GeneralUtilities.write_text_to_file(version_file, latest_version_function)
|
2887
|
-
|
2888
|
-
@GeneralUtilities.check_arguments
|
2889
|
-
def __ensure_grylibrary_is_available(self, codeunit_folder: str) -> None:
|
2890
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2891
|
-
grylibrary_folder = os.path.join(codeunit_folder, "Other", "Resources", "GRYLibrary")
|
2892
|
-
grylibrary_dll_file = os.path.join(grylibrary_folder, "BuildResult_DotNet_win-x64", "GRYLibrary.dll")
|
2893
|
-
internet_connection_is_available = GeneralUtilities.internet_connection_is_available()
|
2894
|
-
grylibrary_dll_file_exists = os.path.isfile(grylibrary_dll_file)
|
2895
|
-
if internet_connection_is_available: # Load/Update GRYLibrary
|
2896
|
-
grylibrary_latest_codeunit_file = "https://raw.githubusercontent.com/anionDev/GRYLibrary/stable/GRYLibrary/GRYLibrary.codeunit.xml"
|
2897
|
-
with urllib.request.urlopen(grylibrary_latest_codeunit_file) as url_result:
|
2898
|
-
grylibrary_latest_version = self.get_version_of_codeunit_file_content(url_result.read().decode("utf-8"))
|
2899
|
-
if grylibrary_dll_file_exists:
|
2900
|
-
grylibrary_existing_codeunit_file = os.path.join(grylibrary_folder, "SourceCode", "GRYLibrary.codeunit.xml")
|
2901
|
-
grylibrary_existing_codeunit_version = self.get_version_of_codeunit(grylibrary_existing_codeunit_file)
|
2902
|
-
if grylibrary_existing_codeunit_version != grylibrary_latest_version:
|
2903
|
-
GeneralUtilities.ensure_directory_does_not_exist(grylibrary_folder)
|
2904
|
-
if not os.path.isfile(grylibrary_dll_file):
|
2905
|
-
GeneralUtilities.ensure_directory_does_not_exist(grylibrary_folder)
|
2906
|
-
GeneralUtilities.ensure_directory_exists(grylibrary_folder)
|
2907
|
-
archive_name = f"GRYLibrary.v{grylibrary_latest_version}.Productive.Artifacts.zip"
|
2908
|
-
archive_download_link = f"https://github.com/anionDev/GRYLibrary/releases/download/v{grylibrary_latest_version}/{archive_name}"
|
2909
|
-
archive_file = os.path.join(grylibrary_folder, archive_name)
|
2910
|
-
urllib.request.urlretrieve(archive_download_link, archive_file)
|
2911
|
-
with zipfile.ZipFile(archive_file, 'r') as zip_ref:
|
2912
|
-
zip_ref.extractall(grylibrary_folder)
|
2913
|
-
GeneralUtilities.ensure_file_does_not_exist(archive_file)
|
2914
|
-
else:
|
2915
|
-
if grylibrary_dll_file_exists:
|
2916
|
-
GeneralUtilities.write_message_to_stdout("Warning: Can not check for updates of GRYLibrary due to missing internet-connection.")
|
2917
|
-
else:
|
2918
|
-
raise ValueError("Can not download GRYLibrary.")
|
2919
|
-
|
2920
|
-
@GeneralUtilities.check_arguments
|
2921
|
-
def ensure_ffmpeg_is_available(self, codeunit_folder: str) -> None:
|
2922
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
2923
|
-
ffmpeg_folder = os.path.join(codeunit_folder, "Other", "Resources", "FFMPEG")
|
2924
|
-
internet_connection_is_available = GeneralUtilities.internet_connection_is_available()
|
2925
|
-
exe_file = f"{ffmpeg_folder}/ffmpeg.exe"
|
2926
|
-
exe_file_exists = os.path.isfile(exe_file)
|
2927
|
-
if internet_connection_is_available: # Load/Update
|
2928
|
-
GeneralUtilities.ensure_directory_does_not_exist(ffmpeg_folder)
|
2929
|
-
GeneralUtilities.ensure_directory_exists(ffmpeg_folder)
|
2930
|
-
ffmpeg_temp_folder = ffmpeg_folder+"Temp"
|
2931
|
-
GeneralUtilities.ensure_directory_does_not_exist(ffmpeg_temp_folder)
|
2932
|
-
GeneralUtilities.ensure_directory_exists(ffmpeg_temp_folder)
|
2933
|
-
zip_file_on_disk = os.path.join(ffmpeg_temp_folder, "ffmpeg.zip")
|
2934
|
-
original_zip_filename = "ffmpeg-master-latest-win64-gpl-shared"
|
2935
|
-
zip_link = f"https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/{original_zip_filename}.zip"
|
2936
|
-
urllib.request.urlretrieve(zip_link, zip_file_on_disk)
|
2937
|
-
shutil.unpack_archive(zip_file_on_disk, ffmpeg_temp_folder)
|
2938
|
-
bin_folder_source = os.path.join(ffmpeg_temp_folder, "ffmpeg-master-latest-win64-gpl-shared/bin")
|
2939
|
-
bin_folder_target = ffmpeg_folder
|
2940
|
-
GeneralUtilities.copy_content_of_folder(bin_folder_source, bin_folder_target)
|
2941
|
-
GeneralUtilities.ensure_directory_does_not_exist(ffmpeg_temp_folder)
|
2942
|
-
else:
|
2943
|
-
if exe_file_exists:
|
2944
|
-
GeneralUtilities.write_message_to_stdout("Warning: Can not check for updates of FFMPEG due to missing internet-connection.")
|
2945
|
-
else:
|
2946
|
-
raise ValueError("Can not download FFMPEG.")
|
2947
|
-
|
2948
|
-
@GeneralUtilities.check_arguments
|
2949
|
-
def ensure_plantuml_is_available(self, target_folder: str) -> None:
|
2950
|
-
self.ensure_file_from_github_assets_is_available_with_retry(target_folder, "plantuml", "plantuml", "PlantUML", "plantuml.jar", lambda latest_version: "plantuml.jar")
|
2951
|
-
|
2952
|
-
@GeneralUtilities.check_arguments
|
2953
|
-
def ensure_androidappbundletool_is_available(self, target_folder: str) -> None:
|
2954
|
-
self.ensure_file_from_github_assets_is_available_with_retry(target_folder, "google", "bundletool", "AndroidAppBundleTool", "bundletool.jar", lambda latest_version: f"bundletool-all-{latest_version}.jar")
|
2955
|
-
|
2956
|
-
@GeneralUtilities.check_arguments
|
2957
|
-
def ensure_mediamtx_is_available(self, target_folder: str) -> None:
|
2958
|
-
def download_and_extract(osname: str, osname_in_github_asset: str, extension: str):
|
2959
|
-
resource_name: str = f"MediaMTX_{osname}"
|
2960
|
-
zip_filename: str = f"{resource_name}.{extension}"
|
2961
|
-
self.ensure_file_from_github_assets_is_available_with_retry(target_folder, "bluenviron", "mediamtx", resource_name, zip_filename, lambda latest_version: f"mediamtx_{latest_version}_{osname_in_github_asset}_amd64.{extension}")
|
2962
|
-
resource_folder: str = os.path.join(target_folder, "Other", "Resources", resource_name)
|
2963
|
-
target_folder_extracted = os.path.join(resource_folder, "MediaMTX")
|
2964
|
-
local_zip_file: str = os.path.join(resource_folder, f"{resource_name}.{extension}")
|
2965
|
-
GeneralUtilities.ensure_folder_exists_and_is_empty(target_folder_extracted)
|
2966
|
-
if extension == "zip":
|
2967
|
-
with zipfile.ZipFile(local_zip_file, 'r') as zip_ref:
|
2968
|
-
zip_ref.extractall(target_folder_extracted)
|
2969
|
-
elif extension == "tar.gz":
|
2970
|
-
with tarfile.open(local_zip_file, "r:gz") as tar:
|
2971
|
-
tar.extractall(path=target_folder_extracted)
|
2972
|
-
else:
|
2973
|
-
raise ValueError(f"Unknown extension: \"{extension}\"")
|
2974
|
-
GeneralUtilities.ensure_file_does_not_exist(local_zip_file)
|
2975
|
-
|
2976
|
-
download_and_extract("Windows", "windows", "zip")
|
2977
|
-
download_and_extract("Linux", "linux", "tar.gz")
|
2978
|
-
download_and_extract("MacOS", "darwin", "tar.gz")
|
2979
|
-
|
2980
|
-
@GeneralUtilities.check_arguments
|
2981
|
-
def ensure_cyclonedxcli_is_available(self, target_folder: str) -> None:
|
2982
|
-
local_filename = "cyclonedx-cli"
|
2983
|
-
filename_on_github: str
|
2984
|
-
if GeneralUtilities.current_system_is_windows():
|
2985
|
-
filename_on_github = "cyclonedx-win-x64.exe"
|
2986
|
-
local_filename = local_filename+".exe"
|
2987
|
-
else:
|
2988
|
-
filename_on_github = "cyclonedx-linux-x64"
|
2989
|
-
self.ensure_file_from_github_assets_is_available_with_retry(target_folder, "CycloneDX", "cyclonedx-cli", "CycloneDXCLI", local_filename, lambda latest_version: filename_on_github)
|
2990
|
-
|
2991
|
-
@GeneralUtilities.check_arguments
|
2992
|
-
def ensure_file_from_github_assets_is_available_with_retry(self, target_folder: str, githubuser: str, githubprojectname: str, resource_name: str, local_filename: str, get_filename_on_github, amount_of_attempts: int = 5) -> None:
|
2993
|
-
GeneralUtilities.retry_action(lambda: self.ensure_file_from_github_assets_is_available(target_folder, githubuser, githubprojectname, resource_name, local_filename, get_filename_on_github), amount_of_attempts)
|
2994
|
-
|
2995
|
-
@GeneralUtilities.check_arguments
|
2996
|
-
def ensure_file_from_github_assets_is_available(self, target_folder: str, githubuser: str, githubprojectname: str, resource_name: str, local_filename: str, get_filename_on_github) -> None:
|
2997
|
-
resource_folder = os.path.join(target_folder, "Other", "Resources", resource_name)
|
2998
|
-
internet_connection_is_available = GeneralUtilities.internet_connection_is_available()
|
2999
|
-
file = f"{resource_folder}/{local_filename}"
|
3000
|
-
file_exists = os.path.isfile(file)
|
3001
|
-
if internet_connection_is_available: # Load/Update
|
3002
|
-
GeneralUtilities.ensure_directory_does_not_exist(resource_folder)
|
3003
|
-
GeneralUtilities.ensure_directory_exists(resource_folder)
|
3004
|
-
headers = {'Cache-Control': 'no-cache'}
|
3005
|
-
response = requests.get(f"https://api.github.com/repos/{githubuser}/{githubprojectname}/releases/latest", timeout=10, headers=headers)
|
3006
|
-
latest_version = response.json()["tag_name"]
|
3007
|
-
filename_on_github = get_filename_on_github(latest_version)
|
3008
|
-
link = f"https://github.com/{githubuser}/{githubprojectname}/releases/download/{latest_version}/{filename_on_github}"
|
3009
|
-
urllib.request.urlretrieve(link, file)
|
3010
|
-
else:
|
3011
|
-
if file_exists:
|
3012
|
-
GeneralUtilities.write_message_to_stdout(f"Warning: Can not check for updates of {resource_name} due to missing internet-connection.")
|
3013
|
-
else:
|
3014
|
-
raise ValueError(f"Can not download {resource_name}.")
|
3015
|
-
|
3016
|
-
@GeneralUtilities.check_arguments
|
3017
|
-
def generate_svg_files_from_plantuml_files_for_repository(self, repository_folder: str) -> None:
|
3018
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
3019
|
-
self.ensure_plantuml_is_available(repository_folder)
|
3020
|
-
plant_uml_folder = os.path.join(repository_folder, "Other", "Resources", "PlantUML")
|
3021
|
-
target_folder = os.path.join(repository_folder, "Other", "Reference")
|
3022
|
-
self.__generate_svg_files_from_plantuml(target_folder, plant_uml_folder)
|
3023
|
-
|
3024
|
-
@GeneralUtilities.check_arguments
|
3025
|
-
def generate_svg_files_from_plantuml_files_for_codeunit(self, codeunit_folder: str) -> None:
|
3026
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
3027
|
-
repository_folder = os.path.dirname(codeunit_folder)
|
3028
|
-
self.ensure_plantuml_is_available(repository_folder)
|
3029
|
-
plant_uml_folder = os.path.join(repository_folder, "Other", "Resources", "PlantUML")
|
3030
|
-
target_folder = os.path.join(codeunit_folder, "Other", "Reference")
|
3031
|
-
self.__generate_svg_files_from_plantuml(target_folder, plant_uml_folder)
|
3032
|
-
|
3033
|
-
@GeneralUtilities.check_arguments
|
3034
|
-
def __generate_svg_files_from_plantuml(self, diagrams_files_folder: str, plant_uml_folder: str) -> None:
|
3035
|
-
for file in GeneralUtilities.get_all_files_of_folder(diagrams_files_folder):
|
3036
|
-
if file.endswith(".plantuml"):
|
3037
|
-
output_filename = self.get_output_filename_for_plantuml_filename(file)
|
3038
|
-
argument = ['-jar', f'{plant_uml_folder}/plantuml.jar', '-tsvg', os.path.basename(file)]
|
3039
|
-
folder = os.path.dirname(file)
|
3040
|
-
self.__sc.run_program_argsasarray("java", argument, folder, verbosity=0)
|
3041
|
-
result_file = folder+"/" + output_filename
|
3042
|
-
GeneralUtilities.assert_file_exists(result_file)
|
3043
|
-
self.__sc.format_xml_file(result_file)
|
3044
|
-
|
3045
|
-
@GeneralUtilities.check_arguments
|
3046
|
-
def get_output_filename_for_plantuml_filename(self, plantuml_file: str) -> str:
|
3047
|
-
for line in GeneralUtilities.read_lines_from_file(plantuml_file):
|
3048
|
-
prefix = "@startuml "
|
3049
|
-
if line.startswith(prefix):
|
3050
|
-
title = line[len(prefix):]
|
3051
|
-
return title+".svg"
|
3052
|
-
return Path(plantuml_file).stem+".svg"
|
3053
|
-
|
3054
|
-
@GeneralUtilities.check_arguments
|
3055
|
-
def generate_codeunits_overview_diagram(self, repository_folder: str) -> None:
|
3056
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
3057
|
-
project_name: str = os.path.basename(repository_folder)
|
3058
|
-
target_folder = os.path.join(repository_folder, "Other", "Reference", "Technical", "Diagrams")
|
3059
|
-
GeneralUtilities.ensure_directory_exists(target_folder)
|
3060
|
-
target_file = os.path.join(target_folder, "CodeUnits-Overview.plantuml")
|
3061
|
-
lines = ["@startuml CodeUnits-Overview"]
|
3062
|
-
lines.append(f"title CodeUnits of {project_name}")
|
3063
|
-
|
3064
|
-
codeunits = self.get_codeunits(repository_folder)
|
3065
|
-
for codeunitname in codeunits:
|
3066
|
-
codeunit_file: str = os.path.join(repository_folder, codeunitname, f"{codeunitname}.codeunit.xml")
|
3067
|
-
|
3068
|
-
description = self.get_codeunit_description(codeunit_file)
|
3069
|
-
|
3070
|
-
lines.append(GeneralUtilities.empty_string)
|
3071
|
-
lines.append(f"[{codeunitname}]")
|
3072
|
-
lines.append(f"note as {codeunitname}Note")
|
3073
|
-
lines.append(f" {description}")
|
3074
|
-
lines.append(f"end note")
|
3075
|
-
lines.append(f"{codeunitname} .. {codeunitname}Note")
|
3076
|
-
|
3077
|
-
lines.append(GeneralUtilities.empty_string)
|
3078
|
-
for codeunitname in codeunits:
|
3079
|
-
codeunit_file: str = os.path.join(repository_folder, codeunitname, f"{codeunitname}.codeunit.xml")
|
3080
|
-
dependent_codeunits = self.get_dependent_code_units(codeunit_file)
|
3081
|
-
for dependent_codeunit in dependent_codeunits:
|
3082
|
-
lines.append(f"{codeunitname} --> {dependent_codeunit}")
|
3083
|
-
|
3084
|
-
lines.append(GeneralUtilities.empty_string)
|
3085
|
-
lines.append("@enduml")
|
3086
|
-
|
3087
|
-
GeneralUtilities.write_lines_to_file(target_file, lines)
|
3088
|
-
|
3089
|
-
@GeneralUtilities.check_arguments
|
3090
|
-
def load_deb_control_file_content(self, file: str, codeunitname: str, codeunitversion: str, installedsize: int, maintainername: str, maintaineremail: str, description: str,) -> str:
|
3091
|
-
content = GeneralUtilities.read_text_from_file(file)
|
3092
|
-
content = GeneralUtilities.replace_variable_in_string(content, "codeunitname", codeunitname)
|
3093
|
-
content = GeneralUtilities.replace_variable_in_string(content, "codeunitversion", codeunitversion)
|
3094
|
-
content = GeneralUtilities.replace_variable_in_string(content, "installedsize", str(installedsize))
|
3095
|
-
content = GeneralUtilities.replace_variable_in_string(content, "maintainername", maintainername)
|
3096
|
-
content = GeneralUtilities.replace_variable_in_string(content, "maintaineremail", maintaineremail)
|
3097
|
-
content = GeneralUtilities.replace_variable_in_string(content, "description", description)
|
3098
|
-
return content
|
3099
|
-
|
3100
|
-
@GeneralUtilities.check_arguments
|
3101
|
-
def calculate_deb_package_size(self, binary_folder: str) -> int:
|
3102
|
-
size_in_bytes = 0
|
3103
|
-
for file in GeneralUtilities.get_all_files_of_folder(binary_folder):
|
3104
|
-
size_in_bytes = size_in_bytes+os.path.getsize(file)
|
3105
|
-
result = math.ceil(size_in_bytes/1024)
|
3106
|
-
return result
|
3107
|
-
|
3108
|
-
@GeneralUtilities.check_arguments
|
3109
|
-
def create_deb_package_for_artifact(self, codeunit_folder: str, maintainername: str, maintaineremail: str, description: str, verbosity: int, cmd_arguments: list[str]) -> None:
|
3110
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
3111
|
-
verbosity = self.get_verbosity_from_commandline_arguments(cmd_arguments, verbosity)
|
3112
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
3113
|
-
binary_folder = GeneralUtilities.resolve_relative_path("Other/Artifacts/BuildResult_DotNet_linux-x64", codeunit_folder)
|
3114
|
-
deb_output_folder = GeneralUtilities.resolve_relative_path("Other/Artifacts/BuildResult_Deb", codeunit_folder)
|
3115
|
-
control_file = GeneralUtilities.resolve_relative_path("Other/Build/DebControlFile.txt", codeunit_folder)
|
3116
|
-
installedsize = self.calculate_deb_package_size(binary_folder)
|
3117
|
-
control_file_content = self.load_deb_control_file_content(control_file, codeunit_name, self.get_version_of_codeunit_folder(codeunit_folder), installedsize, maintainername, maintaineremail, description)
|
3118
|
-
self.__sc.create_deb_package(codeunit_name, binary_folder, control_file_content, deb_output_folder, verbosity, 555)
|
3119
|
-
|
3120
|
-
@GeneralUtilities.check_arguments
|
3121
|
-
def create_zip_file_for_artifact(self, codeunit_folder: str, artifact_source_name: str, name_of_new_artifact: str, verbosity: int, cmd_arguments: list[str]) -> None:
|
3122
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
3123
|
-
verbosity = self.get_verbosity_from_commandline_arguments(cmd_arguments, verbosity)
|
3124
|
-
src_artifact_folder = GeneralUtilities.resolve_relative_path(f"Other/Artifacts/{artifact_source_name}", codeunit_folder)
|
3125
|
-
shutil.make_archive(name_of_new_artifact, 'zip', src_artifact_folder)
|
3126
|
-
archive_file = os.path.join(os.getcwd(), f"{name_of_new_artifact}.zip")
|
3127
|
-
target_folder = GeneralUtilities.resolve_relative_path(f"Other/Artifacts/{name_of_new_artifact}", codeunit_folder)
|
3128
|
-
GeneralUtilities.ensure_folder_exists_and_is_empty(target_folder)
|
3129
|
-
shutil.move(archive_file, target_folder)
|
3130
|
-
|
3131
|
-
def generate_winget_zip_manifest(self, codeunit_folder: str, artifact_name_of_zip: str):
|
3132
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
3133
|
-
codeunit_version = self.get_version_of_codeunit_folder(codeunit_folder)
|
3134
|
-
build_folder = os.path.join(codeunit_folder, "Other", "Build")
|
3135
|
-
artifacts_folder = os.path.join(codeunit_folder, "Other", "Artifacts", artifact_name_of_zip)
|
3136
|
-
manifest_folder = os.path.join(codeunit_folder, "Other", "Artifacts", "WinGet-Manifest")
|
3137
|
-
GeneralUtilities.assert_folder_exists(artifacts_folder)
|
3138
|
-
artifacts_file = self.__sc.find_file_by_extension(artifacts_folder, "zip")
|
3139
|
-
winget_template_file = os.path.join(build_folder, "WinGet-Template.yaml")
|
3140
|
-
winget_manifest_file = os.path.join(manifest_folder, "WinGet-Manifest.yaml")
|
3141
|
-
GeneralUtilities.assert_file_exists(winget_template_file)
|
3142
|
-
GeneralUtilities.ensure_directory_exists(manifest_folder)
|
3143
|
-
GeneralUtilities.ensure_file_exists(winget_manifest_file)
|
3144
|
-
manifest_content = GeneralUtilities.read_text_from_file(winget_template_file)
|
3145
|
-
manifest_content = GeneralUtilities.replace_variable_in_string(manifest_content, "version", codeunit_version)
|
3146
|
-
manifest_content = GeneralUtilities.replace_variable_in_string(manifest_content, "sha256_hashvalue", GeneralUtilities.get_sha256_of_file(artifacts_file))
|
3147
|
-
GeneralUtilities.write_text_to_file(winget_manifest_file, manifest_content)
|
3148
|
-
|
3149
|
-
@GeneralUtilities.check_arguments
|
3150
|
-
def update_year_in_license_file_in_common_scripts_file(self, common_tasks_scripts_file: str) -> None:
|
3151
|
-
self.update_year_in_license_file(GeneralUtilities.resolve_relative_path("../../..", common_tasks_scripts_file))
|
3152
|
-
|
3153
|
-
@GeneralUtilities.check_arguments
|
3154
|
-
def update_year_in_license_file(self, repository_folder: str) -> None:
|
3155
|
-
self.__sc.update_year_in_first_line_of_file(os.path.join(repository_folder, "License.txt"))
|
3156
|
-
|
3157
|
-
@GeneralUtilities.check_arguments
|
3158
|
-
def update_year_for_dotnet_codeunit_in_common_scripts_file(self, common_tasks_scripts_file: str) -> None:
|
3159
|
-
self.update_year_for_dotnet_codeunit(GeneralUtilities.resolve_relative_path("../..", common_tasks_scripts_file))
|
3160
|
-
|
3161
|
-
@GeneralUtilities.check_arguments
|
3162
|
-
def update_year_for_dotnet_codeunit(self, codeunit_folder: str) -> None:
|
3163
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
3164
|
-
codeunit_name = os.path.basename(codeunit_folder)
|
3165
|
-
csproj_file = os.path.join(codeunit_folder, codeunit_name, f"{codeunit_name}.csproj")
|
3166
|
-
self.__sc.update_year_in_copyright_tags(csproj_file)
|
3167
|
-
csprojtests_file = os.path.join(codeunit_folder, f"{codeunit_name}Tests", f"{codeunit_name}Tests.csproj")
|
3168
|
-
self.__sc.update_year_in_copyright_tags(csprojtests_file)
|
3169
|
-
nuspec_file = os.path.join(codeunit_folder, "Other", "Build", f"{codeunit_name}.nuspec")
|
3170
|
-
if os.path.isfile(nuspec_file):
|
3171
|
-
self.__sc.update_year_in_copyright_tags(nuspec_file)
|
3172
|
-
|
3173
|
-
@GeneralUtilities.check_arguments
|
3174
|
-
def repository_has_codeunits(self, repository: str, ignore_disabled_codeunits: bool = True) -> bool:
|
3175
|
-
return len(self.get_codeunits(repository, ignore_disabled_codeunits))
|
3176
|
-
|
3177
|
-
@GeneralUtilities.check_arguments
|
3178
|
-
def verify_artifact_exists(self, codeunit_folder: str, artifact_name_regexes: dict[str, bool]) -> None:
|
3179
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
3180
|
-
codeunit_name: str = os.path.basename(codeunit_folder)
|
3181
|
-
artifacts_folder = os.path.join(codeunit_folder, "Other/Artifacts")
|
3182
|
-
existing_artifacts = [os.path.basename(x) for x in GeneralUtilities.get_direct_folders_of_folder(artifacts_folder)]
|
3183
|
-
for artifact_name_regex, required in artifact_name_regexes.items():
|
3184
|
-
artifact_exists = False
|
3185
|
-
for existing_artifact in existing_artifacts:
|
3186
|
-
pattern = re.compile(artifact_name_regex)
|
3187
|
-
if pattern.match(existing_artifact):
|
3188
|
-
artifact_exists = True
|
3189
|
-
if not artifact_exists:
|
3190
|
-
message = f"Codeunit {codeunit_name} does not contain an artifact which matches the name '{artifact_name_regex}'."
|
3191
|
-
if required:
|
3192
|
-
raise ValueError(message)
|
3193
|
-
else:
|
3194
|
-
GeneralUtilities.write_message_to_stderr(f"Warning: {message}")
|
3195
|
-
|
3196
|
-
@GeneralUtilities.check_arguments
|
3197
|
-
def __build_codeunit(self, codeunit_folder: str, verbosity: int = 1, target_environmenttype: str = "QualityCheck", additional_arguments_file: str = None, is_pre_merge: bool = False, assume_dependent_codeunits_are_already_built: bool = False, commandline_arguments: list[str] = []) -> None:
|
3198
|
-
self.assert_is_codeunit_folder(codeunit_folder)
|
3199
|
-
now = GeneralUtilities.get_now()
|
3200
|
-
codeunit_folder = GeneralUtilities.resolve_relative_path_from_current_working_directory(codeunit_folder)
|
3201
|
-
repository_folder = GeneralUtilities.resolve_relative_path("..", codeunit_folder)
|
3202
|
-
codeunit_name: str = os.path.basename(codeunit_folder)
|
3203
|
-
if verbosity > 2:
|
3204
|
-
GeneralUtilities.write_message_to_stdout(f"Start building codeunit {codeunit_name}")
|
3205
|
-
codeunit_file = os.path.join(codeunit_folder, f"{codeunit_name}.codeunit.xml")
|
3206
|
-
|
3207
|
-
if (not os.path.isfile(codeunit_file)):
|
3208
|
-
raise ValueError(f'"{codeunit_folder}" is no codeunit-folder.')
|
3209
|
-
|
3210
|
-
if not self.codeunit_is_enabled(codeunit_file):
|
3211
|
-
GeneralUtilities.write_message_to_stdout(f"Warning: Codeunit {codeunit_name} is disabled.")
|
3212
|
-
return
|
3213
|
-
|
3214
|
-
GeneralUtilities.write_message_to_stdout(f"Start building codeunit {codeunit_name}.")
|
3215
|
-
GeneralUtilities.write_message_to_stdout(f"Build-environmenttype: {target_environmenttype}")
|
3216
|
-
if not self.__sc.git_repository_has_uncommitted_changes(repository_folder):
|
3217
|
-
self.__sc.run_program("git", "clean -dfx", codeunit_folder)
|
3218
|
-
|
3219
|
-
verbosity_for_executed_programs = self.get_verbosity_from_commandline_arguments(commandline_arguments, verbosity)
|
3220
|
-
|
3221
|
-
other_folder = os.path.join(codeunit_folder, "Other")
|
3222
|
-
build_folder = os.path.join(other_folder, "Build")
|
3223
|
-
quality_folder = os.path.join(other_folder, "QualityCheck")
|
3224
|
-
reference_folder = os.path.join(other_folder, "Reference")
|
3225
|
-
additional_arguments_c: str = GeneralUtilities.empty_string
|
3226
|
-
additional_arguments_b: str = GeneralUtilities.empty_string
|
3227
|
-
additional_arguments_r: str = GeneralUtilities.empty_string
|
3228
|
-
additional_arguments_l: str = GeneralUtilities.empty_string
|
3229
|
-
additional_arguments_g: str = GeneralUtilities.empty_string
|
3230
|
-
additional_arguments_f: str = GeneralUtilities.empty_string
|
3231
|
-
general_argument = f' --overwrite_verbosity {str(verbosity)} --overwrite_targetenvironmenttype {target_environmenttype}'
|
3232
|
-
|
3233
|
-
c_additionalargumentsfile_argument = GeneralUtilities.empty_string
|
3234
|
-
|
3235
|
-
if is_pre_merge:
|
3236
|
-
general_argument = general_argument+" --overwrite_is_pre_merge true"
|
3237
|
-
GeneralUtilities.write_message_to_stdout("This is a pre-merge-build")
|
3238
|
-
|
3239
|
-
if assume_dependent_codeunits_are_already_built:
|
3240
|
-
c_additionalargumentsfile_argument = c_additionalargumentsfile_argument+" --overwrite_assume_dependent_codeunits_are_already_built true"
|
3241
|
-
diagnostic = False
|
3242
|
-
if diagnostic:
|
3243
|
-
GeneralUtilities.write_message_to_stdout("Assume dependent codeunits are already built")
|
3244
|
-
|
3245
|
-
if additional_arguments_file is not None:
|
3246
|
-
config = configparser.ConfigParser()
|
3247
|
-
config.read(additional_arguments_file)
|
3248
|
-
section_name = f"{codeunit_name}_Configuration"
|
3249
|
-
if config.has_option(section_name, "ArgumentsForCommonTasks"):
|
3250
|
-
additional_arguments_c = " " + config.get(section_name, "ArgumentsForCommonTasks")
|
3251
|
-
if config.has_option(section_name, "ArgumentsForBuild"):
|
3252
|
-
additional_arguments_b = " " + config.get(section_name, "ArgumentsForBuild")
|
3253
|
-
if config.has_option(section_name, "ArgumentsForRunTestcases"):
|
3254
|
-
additional_arguments_r = " " + config.get(section_name, "ArgumentsForRunTestcases")
|
3255
|
-
if config.has_option(section_name, "ArgumentsForLinting"):
|
3256
|
-
additional_arguments_l = " " + config.get(section_name, "ArgumentsForLinting")
|
3257
|
-
if config.has_option(section_name, "ArgumentsForGenerateReference"):
|
3258
|
-
additional_arguments_g = " " + config.get(section_name, "ArgumentsForGenerateReference")
|
3259
|
-
if config.has_option(section_name, "ArgumentsForOnFinish"):
|
3260
|
-
additional_arguments_f = " " + config.get(section_name, "ArgumentsForOnFinish")
|
3261
|
-
c_additionalargumentsfile_argument = f' --overwrite_additionalargumentsfile "{additional_arguments_file}"'
|
3262
|
-
|
3263
|
-
GeneralUtilities.write_message_to_stdout('Run "CommonTasks.py"...')
|
3264
|
-
self.__sc.run_program("python", f"CommonTasks.py{additional_arguments_c}{general_argument}{c_additionalargumentsfile_argument}", other_folder, verbosity=verbosity_for_executed_programs, throw_exception_if_exitcode_is_not_zero=True, print_live_output=2 < verbosity)
|
3265
|
-
self.verify_artifact_exists(codeunit_folder, dict[str, bool]({"Changelog": False, "License": True, "DiffReport": True}))
|
3266
|
-
|
3267
|
-
GeneralUtilities.write_message_to_stdout('Run "Build.py"...')
|
3268
|
-
self.__sc.run_program("python", f"Build.py{additional_arguments_b}{general_argument}", build_folder, verbosity=verbosity_for_executed_programs, throw_exception_if_exitcode_is_not_zero=True, print_live_output=2 < verbosity)
|
3269
|
-
|
3270
|
-
artifacts = {"BuildResult_.+": True, "BOM": False, "SourceCode": True}
|
3271
|
-
if self.codeunit_has_testable_sourcecode(codeunit_file):
|
3272
|
-
artifacts["CodeAnalysisResult"] = False
|
3273
|
-
self.verify_artifact_exists(codeunit_folder, dict[str, bool](artifacts))
|
3274
|
-
|
3275
|
-
codeunit_hast_testable_sourcecode = self.codeunit_has_testable_sourcecode(codeunit_file)
|
3276
|
-
if codeunit_hast_testable_sourcecode:
|
3277
|
-
GeneralUtilities.write_message_to_stdout('Run "RunTestcases.py"...')
|
3278
|
-
self.__sc.run_program("python", f"RunTestcases.py{additional_arguments_r}{general_argument}", quality_folder, verbosity=verbosity_for_executed_programs, throw_exception_if_exitcode_is_not_zero=True, print_live_output=2 < verbosity)
|
3279
|
-
self.verify_artifact_exists(codeunit_folder, dict[str, bool]({"TestCoverage": True, "TestCoverageReport": False}))
|
3280
|
-
|
3281
|
-
GeneralUtilities.write_message_to_stdout('Run "Linting.py"...')
|
3282
|
-
self.__sc.run_program("python", f"Linting.py{additional_arguments_l}{general_argument}", quality_folder, verbosity=verbosity_for_executed_programs, throw_exception_if_exitcode_is_not_zero=True, print_live_output=2 < verbosity)
|
3283
|
-
self.verify_artifact_exists(codeunit_folder, dict[str, bool]())
|
3284
|
-
|
3285
|
-
GeneralUtilities.write_message_to_stdout('Run "GenerateReference.py"...')
|
3286
|
-
self.__sc.run_program("python", f"GenerateReference.py{additional_arguments_g}{general_argument}", reference_folder, verbosity=verbosity_for_executed_programs, throw_exception_if_exitcode_is_not_zero=True, print_live_output=2 < verbosity)
|
3287
|
-
self.verify_artifact_exists(codeunit_folder, dict[str, bool]({"Reference": True}))
|
3288
|
-
|
3289
|
-
if os.path.isfile(os.path.join(other_folder, "OnBuildingFinished.py")):
|
3290
|
-
GeneralUtilities.write_message_to_stdout('Run "OnBuildingFinished.py"...')
|
3291
|
-
self.__sc.run_program("python", f"OnBuildingFinished.py{additional_arguments_f}{general_argument}", other_folder, verbosity=verbosity_for_executed_programs, throw_exception_if_exitcode_is_not_zero=True, print_live_output=2 < verbosity)
|
3292
|
-
|
3293
|
-
artifacts_folder = os.path.join(codeunit_folder, "Other", "Artifacts")
|
3294
|
-
artifactsinformation_file = os.path.join(artifacts_folder, f"{codeunit_name}.artifactsinformation.xml")
|
3295
|
-
codeunit_version = self.get_version_of_codeunit(codeunit_file)
|
3296
|
-
GeneralUtilities.ensure_file_exists(artifactsinformation_file)
|
3297
|
-
artifacts_list = []
|
3298
|
-
for artifact_folder in GeneralUtilities.get_direct_folders_of_folder(artifacts_folder):
|
3299
|
-
artifact_name = os.path.basename(artifact_folder)
|
3300
|
-
artifacts_list.append(f" <cps:artifact>{artifact_name}<cps:artifact>")
|
3301
|
-
artifacts = '\n'.join(artifacts_list)
|
3302
|
-
moment = GeneralUtilities.datetime_to_string(now)
|
3303
|
-
# TODO implement usage of self.reference_latest_version_of_xsd_when_generating_xml
|
3304
|
-
GeneralUtilities.write_text_to_file(artifactsinformation_file, f"""<?xml version="1.0" encoding="UTF-8" ?>
|
3305
|
-
<cps:artifactsinformation xmlns:cps="https://projects.aniondev.de/PublicProjects/Common/ProjectTemplates/-/tree/main/Conventions/RepositoryStructure/CommonProjectStructure" artifactsinformationspecificationversion="1.0.0"
|
3306
|
-
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://raw.githubusercontent.com/anionDev/ProjectTemplates/main/Templates/Conventions/RepositoryStructure/CommonProjectStructure/artifactsinformation.xsd">
|
3307
|
-
<cps:name>{codeunit_name}</cps:name>
|
3308
|
-
<cps:version>{codeunit_version}</cps:version>
|
3309
|
-
<cps:timestamp>{moment}</cps:timestamp>
|
3310
|
-
<cps:targetenvironmenttype>{target_environmenttype}</cps:targetenvironmenttype>
|
3311
|
-
<cps:artifacts>
|
3312
|
-
{artifacts}
|
3313
|
-
</cps:artifacts>
|
3314
|
-
</cps:artifactsinformation>""")
|
3315
|
-
# TODO validate artifactsinformation_file against xsd
|
3316
|
-
GeneralUtilities.write_message_to_stdout(f"Finished building codeunit {codeunit_name} without errors.")
|
3317
|
-
|
3318
|
-
def __ensure_changelog_file_is_added(self, repository_folder: str, version_of_project: str):
|
3319
|
-
changelog_file = os.path.join(repository_folder, "Other", "Resources", "Changelog", f"v{version_of_project}.md")
|
3320
|
-
if not os.path.isfile(changelog_file):
|
3321
|
-
GeneralUtilities.ensure_file_exists(changelog_file)
|
3322
|
-
GeneralUtilities.write_text_to_file(changelog_file, """# Release notes
|
3323
|
-
|
3324
|
-
## Changes
|
3325
|
-
|
3326
|
-
- Updated dependencies.
|
3327
|
-
""")
|
3328
|
-
|
3329
|
-
@GeneralUtilities.check_arguments
|
3330
|
-
def generic_update_dependencies(self, repository_folder: str, verbosity: int = 1):
|
3331
|
-
# Prepare
|
3332
|
-
GeneralUtilities.write_message_to_stdout("Update dependencies...")
|
3333
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
3334
|
-
codeunits = self.get_codeunits(repository_folder)
|
3335
|
-
update_dependencies_script_filename = "UpdateDependencies.py"
|
3336
|
-
target_environmenttype = "QualityCheck"
|
3337
|
-
project_name: str = os.path.basename(repository_folder)
|
3338
|
-
GeneralUtilities.assert_condition(not self.__sc.git_repository_has_uncommitted_changes(repository_folder), "There are uncommitted changes in the repository.")
|
3339
|
-
self.build_codeunits(repository_folder, target_environmenttype=target_environmenttype, do_git_clean_when_no_changes=True, note="Prepare dependency-update") # Required because update dependencies is not always possible for not-buildet codeunits (depends on the programming language or package manager)
|
3340
|
-
|
3341
|
-
# update dependencies of resources
|
3342
|
-
global_scripts_folder = os.path.join(repository_folder, "Other", "Scripts")
|
3343
|
-
if os.path.isfile(os.path.join(global_scripts_folder, update_dependencies_script_filename)):
|
3344
|
-
self.__sc.run_program("python", update_dependencies_script_filename, global_scripts_folder, print_live_output=True)
|
3345
|
-
version_of_project = self.get_version_of_project(repository_folder)
|
3346
|
-
self.__ensure_changelog_file_is_added(repository_folder, version_of_project)
|
3347
|
-
GeneralUtilities.write_message_to_stdout(f"Updated global dependencies of {project_name}.")
|
3348
|
-
self.build_codeunits(repository_folder, verbosity, "QualityCheck", None, False, None, [], False, "Build codeunits due to updated product-wide dependencies")
|
3349
|
-
|
3350
|
-
# update dependencies of codeunits
|
3351
|
-
for codeunit in codeunits:
|
3352
|
-
codeunit_file = os.path.join(repository_folder, codeunit, f"{codeunit}.codeunit.xml")
|
3353
|
-
codeunit_has_updatable_dependencies = self.codeunit_has_updatable_dependencies(codeunit_file)
|
3354
|
-
codeunit_folder: str = os.path.join(repository_folder, codeunit)
|
3355
|
-
self.build_codeunit(codeunit_folder, verbosity, "QualityCheck", None, False, None, False, [])
|
3356
|
-
if codeunit_has_updatable_dependencies:
|
3357
|
-
codeunit_folder = os.path.join(repository_folder, codeunit)
|
3358
|
-
update_dependencies_script_folder = os.path.join(codeunit_folder, "Other")
|
3359
|
-
GeneralUtilities.ensure_directory_exists(os.path.join(update_dependencies_script_folder, "Resources", "CodeAnalysisResult"))
|
3360
|
-
self.__sc.run_program("python", update_dependencies_script_filename, update_dependencies_script_folder, verbosity, print_live_output=True)
|
3361
|
-
if self.__sc.git_repository_has_uncommitted_changes(repository_folder):
|
3362
|
-
version_of_project = self.get_version_of_project(repository_folder)
|
3363
|
-
self.__ensure_changelog_file_is_added(repository_folder, version_of_project)
|
3364
|
-
GeneralUtilities.write_message_to_stdout(f"Updated dependencies in codeunit {codeunit}.")
|
3365
|
-
|
3366
|
-
self.build_codeunits(repository_folder, verbosity, "QualityCheck", None, False, None, [], False, "Build all codeunits due to updated dependencies")
|
3367
|
-
self.__sc.git_commit(repository_folder, "Updated dependencies")
|
3368
|
-
|
3369
|
-
class GenericPrepareNewReleaseArguments:
|
3370
|
-
current_file: str
|
3371
|
-
product_name: str
|
3372
|
-
commandline_arguments: list[str]
|
3373
|
-
|
3374
|
-
def __init__(self, current_file: str, product_name: str, commandline_arguments: list[str]):
|
3375
|
-
self.current_file = current_file
|
3376
|
-
self.product_name = product_name
|
3377
|
-
self.commandline_arguments = commandline_arguments
|
3378
|
-
|
3379
|
-
@GeneralUtilities.check_arguments
|
3380
|
-
def generic_prepare_new_release(self, generic_prepare_new_release_arguments: GenericPrepareNewReleaseArguments):
|
3381
|
-
GeneralUtilities.write_message_to_stdout(f"Prepare release for {generic_prepare_new_release_arguments.product_name}.")
|
3382
|
-
|
3383
|
-
# constants
|
3384
|
-
folder_of_this_file = os.path.dirname(generic_prepare_new_release_arguments.current_file)
|
3385
|
-
build_repository_folder = GeneralUtilities.resolve_relative_path("../..", folder_of_this_file)
|
3386
|
-
self.__sc.assert_is_git_repository(build_repository_folder)
|
3387
|
-
|
3388
|
-
repository_folder = GeneralUtilities.resolve_relative_path(f"../../Submodules/{generic_prepare_new_release_arguments.product_name}", folder_of_this_file)
|
3389
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
3390
|
-
reference_folder = repository_folder+"Reference"
|
3391
|
-
self.__sc.assert_is_git_repository(reference_folder)
|
3392
|
-
verbosity: int = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(generic_prepare_new_release_arguments.commandline_arguments, 1)
|
3393
|
-
|
3394
|
-
merge_source_branch = "other/next-release" # maybe this should be configurable
|
3395
|
-
main_branch = "main" # maybe this should be configurable
|
3396
|
-
|
3397
|
-
# prepare
|
3398
|
-
self.assert_no_uncommitted_changes(repository_folder)
|
3399
|
-
self.assert_no_uncommitted_changes(reference_folder)
|
3400
|
-
self.assert_no_uncommitted_changes(build_repository_folder)
|
3401
|
-
self.__sc.git_checkout(build_repository_folder, "main", True)
|
3402
|
-
self.__sc.git_checkout(repository_folder, merge_source_branch, True)
|
3403
|
-
self.__sc.git_checkout(reference_folder, "main", True)
|
3404
|
-
self.assert_no_uncommitted_changes(repository_folder)
|
3405
|
-
self.assert_no_uncommitted_changes(reference_folder)
|
3406
|
-
self.__sc.git_commit(build_repository_folder, "Updated submodules")
|
3407
|
-
|
3408
|
-
if "--dependencyupdate" in generic_prepare_new_release_arguments.commandline_arguments:
|
3409
|
-
GeneralUtilities.write_message_to_stdout("Debug: Update dependencies...")
|
3410
|
-
self.generic_update_dependencies(repository_folder)
|
3411
|
-
self.assert_no_uncommitted_changes(repository_folder)
|
3412
|
-
else:
|
3413
|
-
GeneralUtilities.write_message_to_stdout("Debug: Dependency-update skipped.")
|
3414
|
-
|
3415
|
-
GeneralUtilities.write_message_to_stdout(f"Check reference-repository...")
|
3416
|
-
now = GeneralUtilities.get_now()
|
3417
|
-
for unsupported_version in self.get_unsupported_versions(repository_folder, now):
|
3418
|
-
reference_folder = f"{reference_folder}/ReferenceContent/v{unsupported_version[0]}"
|
3419
|
-
GeneralUtilities.ensure_directory_does_not_exist(reference_folder)
|
3420
|
-
self.__sc.git_commit(reference_folder, "Removed reference of outdated versions.")
|
3421
|
-
|
3422
|
-
merge_source_branch_commit_id = self.__sc.git_get_commit_id(repository_folder, merge_source_branch)
|
3423
|
-
main_branch_commit_id = self.__sc.git_get_commit_id(repository_folder, main_branch)
|
3424
|
-
if merge_source_branch_commit_id == main_branch_commit_id:
|
3425
|
-
GeneralUtilities.write_message_to_stdout("Release will not be prepared because there are no changed which can be released.")
|
3426
|
-
else:
|
3427
|
-
self.merge_to_main_branch(repository_folder, merge_source_branch, verbosity=verbosity, fast_forward_source_branch=True)
|
3428
|
-
self.__sc.git_commit(build_repository_folder, "Updated submodule due to merge to main-branch.")
|
3429
|
-
GeneralUtilities.write_message_to_stdout(f"Finished prepare release for {generic_prepare_new_release_arguments.product_name}.")
|
3430
|
-
|
3431
|
-
class GenericCreateReleaseArguments():
|
3432
|
-
current_file: str
|
3433
|
-
product_name: str
|
3434
|
-
common_remote_name: str
|
3435
|
-
artifacts_target_folder: str
|
3436
|
-
commandline_arguments: list[str]
|
3437
|
-
|
3438
|
-
def __init__(self, current_file: str, product_name: str, common_remote_name: str, artifacts_target_folder: str, commandline_arguments: list[str]):
|
3439
|
-
self.current_file = current_file
|
3440
|
-
self.product_name = product_name
|
3441
|
-
self.common_remote_name = common_remote_name
|
3442
|
-
self.artifacts_target_folder = artifacts_target_folder
|
3443
|
-
self.commandline_arguments = commandline_arguments
|
3444
|
-
|
3445
|
-
@GeneralUtilities.check_arguments
|
3446
|
-
def generic_create_release(self, generic_create_release_arguments: GenericCreateReleaseArguments) -> tuple[bool, str]:
|
3447
|
-
GeneralUtilities.write_message_to_stdout(f"Create release for {generic_create_release_arguments.product_name}.")
|
3448
|
-
folder_of_this_file = os.path.dirname(generic_create_release_arguments.current_file)
|
3449
|
-
build_repository_folder = GeneralUtilities.resolve_relative_path("../..", folder_of_this_file)
|
3450
|
-
repository_folder_name = generic_create_release_arguments.product_name
|
3451
|
-
repository_folder = GeneralUtilities.resolve_relative_path(f"../../Submodules/{generic_create_release_arguments.product_name}", folder_of_this_file)
|
3452
|
-
self.__sc.assert_is_git_repository(repository_folder)
|
3453
|
-
|
3454
|
-
merge_source_branch = "main" # TODO make this configurable
|
3455
|
-
main_branch = "stable" # TODO make this configurable
|
3456
|
-
|
3457
|
-
additional_arguments_file = os.path.join(folder_of_this_file, "AdditionalArguments.configuration")
|
3458
|
-
verbosity: int = TasksForCommonProjectStructure.get_verbosity_from_commandline_arguments(generic_create_release_arguments.commandline_arguments, 1)
|
3459
|
-
createReleaseConfiguration: CreateReleaseConfiguration = CreateReleaseConfiguration(generic_create_release_arguments.product_name, generic_create_release_arguments.common_remote_name, generic_create_release_arguments.artifacts_target_folder, folder_of_this_file, verbosity, repository_folder, additional_arguments_file, repository_folder_name)
|
3460
|
-
|
3461
|
-
merge_source_branch_commit_id = self.__sc.git_get_commit_id(repository_folder, merge_source_branch)
|
3462
|
-
main_branch_commit_id = self.__sc.git_get_commit_id(repository_folder, main_branch)
|
3463
|
-
if merge_source_branch_commit_id == main_branch_commit_id:
|
3464
|
-
GeneralUtilities.write_message_to_stdout("Release will not be done because there are no changed which can be released.")
|
3465
|
-
return False, None
|
3466
|
-
else:
|
3467
|
-
self.__sc.git_checkout(repository_folder, merge_source_branch)
|
3468
|
-
reference_repo: str = os.path.join(build_repository_folder, "Submodules", f"{generic_create_release_arguments.product_name}Reference")
|
3469
|
-
self.__sc.git_commit(reference_repo, "Updated reference")
|
3470
|
-
self.__sc.git_push_with_retry(reference_repo, generic_create_release_arguments.common_remote_name, "main", "main")
|
3471
|
-
self.__sc.git_commit(build_repository_folder, "Updated submodule")
|
3472
|
-
|
3473
|
-
# create release
|
3474
|
-
new_version = self.merge_to_stable_branch(generic_create_release_arguments.current_file, createReleaseConfiguration)
|
3475
|
-
GeneralUtilities.write_message_to_stdout(f"Finished create release for {generic_create_release_arguments.product_name}.")
|
3476
|
-
return True, new_version
|
3477
|
-
|
3478
|
-
class UpdateHTTPDocumentationArguments:
|
3479
|
-
current_file: str
|
3480
|
-
product_name: str
|
3481
|
-
common_remote_name: str
|
3482
|
-
new_project_version: str
|
3483
|
-
reference_repository_name: str
|
3484
|
-
commandline_arguments: list[str]
|
3485
|
-
main_branch_name: str
|
3486
|
-
|
3487
|
-
def __init__(self, current_file: str, product_name: str, common_remote_name: str, new_project_version: str, reference_repository_name: str, commandline_arguments: list[str]):
|
3488
|
-
self.current_file = current_file
|
3489
|
-
self.product_name = product_name
|
3490
|
-
self.common_remote_name = common_remote_name
|
3491
|
-
self.new_project_version = new_project_version
|
3492
|
-
self.reference_repository_name = reference_repository_name
|
3493
|
-
self.commandline_arguments = commandline_arguments
|
3494
|
-
self.main_branch_name = "main"
|
3495
|
-
|
3496
|
-
@GeneralUtilities.check_arguments
|
3497
|
-
def create_changelog_entry(self, repositoryfolder: str, message: str, commit: bool, force: bool):
|
3498
|
-
self.__sc.assert_is_git_repository(repositoryfolder)
|
3499
|
-
random_file = os.path.join(repositoryfolder, str(uuid.uuid4()))
|
3500
|
-
if force and not self.__sc.git_repository_has_uncommitted_changes(repositoryfolder):
|
3501
|
-
GeneralUtilities.ensure_file_exists(random_file)
|
3502
|
-
current_version = self.get_version_of_project(repositoryfolder)
|
3503
|
-
changelog_file = os.path.join(repositoryfolder, "Other", "Resources", "Changelog", f"v{current_version}.md")
|
3504
|
-
if os.path.isfile(changelog_file):
|
3505
|
-
GeneralUtilities.write_message_to_stdout(f"Changelog-file '{changelog_file}' already exists.")
|
3506
|
-
else:
|
3507
|
-
GeneralUtilities.ensure_file_exists(changelog_file)
|
3508
|
-
GeneralUtilities.write_text_to_file(changelog_file, f"""# Release notes
|
3509
|
-
|
3510
|
-
## Changes
|
3511
|
-
|
3512
|
-
- {message}
|
3513
|
-
""")
|
3514
|
-
GeneralUtilities.ensure_file_does_not_exist(random_file)
|
3515
|
-
if commit:
|
3516
|
-
self.__sc.git_commit(repositoryfolder, f"Added changelog-file for v{current_version}.")
|
3517
|
-
|
3518
|
-
@GeneralUtilities.check_arguments
|
3519
|
-
def update_http_documentation(self, update_http_documentation_arguments: UpdateHTTPDocumentationArguments):
|
3520
|
-
GeneralUtilities.write_message_to_stdout(f"Update HTTP-documentation for for {update_http_documentation_arguments.product_name}...")
|
3521
|
-
folder_of_this_file = str(os.path.dirname(update_http_documentation_arguments.current_file))
|
3522
|
-
|
3523
|
-
ref_repo = GeneralUtilities.resolve_relative_path(f"../../Submodules/{update_http_documentation_arguments.reference_repository_name}", folder_of_this_file)
|
3524
|
-
self.__sc.assert_is_git_repository(ref_repo)
|
3525
|
-
self.__sc.git_checkout(ref_repo, update_http_documentation_arguments.main_branch_name)
|
3526
|
-
|
3527
|
-
# update reference
|
3528
|
-
target = os.path.join(ref_repo, "Reference", update_http_documentation_arguments.product_name)
|
3529
|
-
GeneralUtilities.ensure_directory_does_not_exist(target)
|
3530
|
-
shutil.copytree(GeneralUtilities.resolve_relative_path(f"../../Submodules/{update_http_documentation_arguments.product_name}Reference/ReferenceContent", folder_of_this_file), target)
|
3531
|
-
self.__sc.git_commit(ref_repo, f"Added reference of {update_http_documentation_arguments.product_name} v{update_http_documentation_arguments.new_project_version}")
|
3532
|
-
|
3533
|
-
# Sync reference-repository
|
3534
|
-
self.__sc.git_fetch(ref_repo, update_http_documentation_arguments.common_remote_name)
|
3535
|
-
self.__sc.git_merge(ref_repo, update_http_documentation_arguments.common_remote_name+"/"+update_http_documentation_arguments.main_branch_name, update_http_documentation_arguments.main_branch_name)
|
3536
|
-
self.__sc.git_checkout(ref_repo, update_http_documentation_arguments.main_branch_name)
|
3537
|
-
self.__sc.git_push_with_retry(ref_repo, update_http_documentation_arguments.common_remote_name, update_http_documentation_arguments.main_branch_name, update_http_documentation_arguments.main_branch_name)
|
3538
|
-
self.__sc.git_commit(GeneralUtilities.resolve_relative_path("../..", folder_of_this_file), f"Updated content of {update_http_documentation_arguments.product_name} v{update_http_documentation_arguments.new_project_version} in {update_http_documentation_arguments.reference_repository_name}-submodule")
|
3539
|
-
|
3540
|
-
@GeneralUtilities.check_arguments
|
3541
|
-
def install_requirementstxt_for_codeunit(self, codeunit_folder: str, verbosity: int):
|
3542
|
-
self.__sc.install_requirementstxt_file(codeunit_folder+"/Other/requirements.txt", verbosity)
|
3543
|
-
|
3544
|
-
@GeneralUtilities.check_arguments
|
3545
|
-
def install_requirementstxt_for_repository(self, repository_folde: str, verbosity: int):
|
3546
|
-
self.__sc.install_requirementstxt_file(repository_folde+"/Other/requirements.txt", verbosity)
|
3547
|
-
|
3548
|
-
@GeneralUtilities.check_arguments
|
3549
|
-
def update_submodule(self, repository_folder: str, submodule_name: str, local_branch: str = "main", remote_branch: str = "main", remote: str = "origin"):
|
3550
|
-
submodule_folder = GeneralUtilities.resolve_relative_path("Other/Resources/Submodules/"+submodule_name, repository_folder)
|
3551
|
-
self.__sc.git_fetch(submodule_folder, remote)
|
3552
|
-
self.__sc.git_checkout(submodule_folder, local_branch)
|
3553
|
-
self.__sc.git_pull(submodule_folder, remote, local_branch, remote_branch, True)
|
3554
|
-
current_version = self.__sc.get_semver_version_from_gitversion(repository_folder)
|
3555
|
-
changelog_file = os.path.join(repository_folder, "Other", "Resources", "Changelog", f"v{current_version}.md")
|
3556
|
-
if (not os.path.isfile(changelog_file)):
|
3557
|
-
GeneralUtilities.ensure_file_exists(changelog_file)
|
3558
|
-
GeneralUtilities.write_text_to_file(changelog_file, """# Release notes
|
3559
|
-
|
3560
|
-
## Changes
|
3561
|
-
|
3562
|
-
- Updated geo-ip-database.
|
3563
|
-
""")
|
3564
|
-
|
3565
|
-
@GeneralUtilities.check_arguments
|
3566
|
-
def update_images_in_example(self, codeunit_folder: str):
|
3567
|
-
iu = ImageUpdater()
|
3568
|
-
iu.add_default_mapper()
|
3569
|
-
dockercomposefile: str = f"{codeunit_folder}\\Other\\Reference\\ReferenceContent\\Examples\\MinimalDockerComposeFile\\docker-compose.yml"
|
3570
|
-
excluded = ["opendms"]
|
3571
|
-
iu.update_all_services_in_docker_compose_file(dockercomposefile, VersionEcholon.LatestPatchOrLatestMinor, excluded)
|
3572
|
-
iu.check_for_newest_version(dockercomposefile, excluded)
|
3573
|
-
|
3574
|
-
@GeneralUtilities.check_arguments
|
3575
|
-
def clone_repository_as_resource(self, local_repository_folder: str, remote_repository_link: str, resource_name: str, repository_subname: str = None) -> None:
|
3576
|
-
GeneralUtilities.write_message_to_stdout(f'Clone resource {resource_name}...')
|
3577
|
-
resrepo_commit_id_folder: str = os.path.join(local_repository_folder, "Other", "Resources", f"{resource_name}Version")
|
3578
|
-
resrepo_commit_id_file: str = os.path.join(resrepo_commit_id_folder, f"{resource_name}Version.txt")
|
3579
|
-
latest_version: str = GeneralUtilities.read_text_from_file(resrepo_commit_id_file)
|
3580
|
-
resrepo_data_folder: str = os.path.join(local_repository_folder, "Other", "Resources", resource_name).replace("\\", "/")
|
3581
|
-
current_version: str = None
|
3582
|
-
resrepo_data_version: str = os.path.join(resrepo_data_folder, f"{resource_name}Version.txt")
|
3583
|
-
if os.path.isdir(resrepo_data_folder):
|
3584
|
-
if os.path.isfile(resrepo_data_version):
|
3585
|
-
current_version = GeneralUtilities.read_text_from_file(resrepo_data_version)
|
3586
|
-
if (current_version is None) or (current_version != latest_version):
|
3587
|
-
target_folder: str = resrepo_data_folder
|
3588
|
-
if repository_subname is not None:
|
3589
|
-
target_folder = f"{resrepo_data_folder}/{repository_subname}"
|
3590
|
-
GeneralUtilities.ensure_folder_exists_and_is_empty(target_folder)
|
3591
|
-
self.__sc.run_program("git", f"clone --recurse-submodules {remote_repository_link} {target_folder}")
|
3592
|
-
self.__sc.run_program("git", f"checkout {latest_version}", target_folder)
|
3593
|
-
GeneralUtilities.write_text_to_file(resrepo_data_version, latest_version)
|
3594
|
-
|
3595
|
-
git_folders: list[str] = []
|
3596
|
-
git_files: list[str] = []
|
3597
|
-
for dirpath, dirnames, filenames in os.walk(target_folder):
|
3598
|
-
for dirname in dirnames:
|
3599
|
-
if dirname == ".git":
|
3600
|
-
full_path = os.path.join(dirpath, dirname)
|
3601
|
-
git_folders.append(full_path)
|
3602
|
-
for filename in filenames:
|
3603
|
-
if filename == ".git":
|
3604
|
-
full_path = os.path.join(dirpath, filename)
|
3605
|
-
git_files.append(full_path)
|
3606
|
-
for git_folder in git_folders:
|
3607
|
-
if os.path.isdir(git_folder):
|
3608
|
-
GeneralUtilities.ensure_directory_does_not_exist(git_folder)
|
3609
|
-
for git_file in git_files:
|
3610
|
-
if os.path.isdir(git_file):
|
3611
|
-
GeneralUtilities.ensure_file_does_not_exist(git_file)
|
3612
|
-
|
3613
|
-
def set_latest_version_for_clone_repository_as_resource(self, resourcename: str, github_link: str, branch: str = "main"):
|
3614
|
-
current_file = str(Path(__file__).absolute())
|
3615
|
-
repository_folder = GeneralUtilities.resolve_relative_path("../../..", current_file)
|
3616
|
-
|
3617
|
-
resrepo_commit_id_folder: str = os.path.join(repository_folder, "Other", "Resources", f"{resourcename}Version")
|
3618
|
-
resrepo_commit_id_file: str = os.path.join(resrepo_commit_id_folder, f"{resourcename}Version.txt")
|
3619
|
-
current_version: str = GeneralUtilities.read_text_from_file(resrepo_commit_id_file)
|
3620
|
-
|
3621
|
-
stdOut = [l.split("\t") for l in GeneralUtilities.string_to_lines(self.__sc.run_program("git", f"ls-remote {github_link}")[1])]
|
3622
|
-
stdOut = [l for l in stdOut if l[1] == f"refs/heads/{branch}"]
|
3623
|
-
GeneralUtilities.assert_condition(len(stdOut) == 1)
|
3624
|
-
latest_version: str = stdOut[0][0]
|
3625
|
-
if current_version != latest_version:
|
3626
|
-
GeneralUtilities.write_text_to_file(resrepo_commit_id_file, latest_version)
|