qubership-pipelines-common-library 0.2.6__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. qubership_pipelines_common_library/v1/execution/exec_command.py +52 -1
  2. qubership_pipelines_common_library/v1/execution/exec_info.py +4 -0
  3. qubership_pipelines_common_library/v1/github_client.py +9 -0
  4. qubership_pipelines_common_library/v1/gitlab_client.py +170 -10
  5. qubership_pipelines_common_library/v1/utils/utils_file.py +17 -0
  6. qubership_pipelines_common_library/v2/__init__.py +0 -0
  7. qubership_pipelines_common_library/v2/extensions/pipeline_data_importer.py +24 -0
  8. qubership_pipelines_common_library/v2/github/__init__.py +0 -0
  9. qubership_pipelines_common_library/v2/github/github_client.py +5 -0
  10. qubership_pipelines_common_library/v2/github/github_pipeline_data_importer.py +21 -0
  11. qubership_pipelines_common_library/v2/github/github_run_pipeline_command.py +175 -0
  12. qubership_pipelines_common_library/v2/github/safe_github_client.py +24 -0
  13. qubership_pipelines_common_library/v2/gitlab/__init__.py +0 -0
  14. qubership_pipelines_common_library/v2/gitlab/custom_extensions.py +101 -0
  15. qubership_pipelines_common_library/v2/gitlab/gitlab_client.py +36 -0
  16. qubership_pipelines_common_library/v2/gitlab/gitlab_pipeline_data_importer.py +26 -0
  17. qubership_pipelines_common_library/v2/gitlab/gitlab_run_pipeline_command.py +195 -0
  18. qubership_pipelines_common_library/v2/gitlab/safe_gitlab_client.py +32 -0
  19. qubership_pipelines_common_library/v2/podman/__init__.py +0 -0
  20. qubership_pipelines_common_library/v2/podman/podman_command.md +172 -0
  21. qubership_pipelines_common_library/v2/podman/podman_command.py +311 -0
  22. qubership_pipelines_common_library/v2/sops/sops_client.py +116 -0
  23. qubership_pipelines_common_library/v2/utils/crypto_utils.py +48 -0
  24. qubership_pipelines_common_library/v2/utils/extension_utils.py +22 -0
  25. qubership_pipelines_common_library/v2/utils/retry_decorator.py +93 -0
  26. {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.0.dist-info}/METADATA +1 -1
  27. qubership_pipelines_common_library-2.0.0.dist-info/RECORD +52 -0
  28. qubership_pipelines_common_library-0.2.6.dist-info/RECORD +0 -32
  29. {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.0.dist-info}/WHEEL +0 -0
  30. {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,311 @@
1
+ import os, subprocess, time, uuid
2
+
3
+ from pathlib import Path
4
+ from qubership_pipelines_common_library.v1.execution.exec_command import ExecutionCommand
5
+ from qubership_pipelines_common_library.v1.utils.utils_string import UtilsString
6
+
7
+
8
+ class PodmanRunImage(ExecutionCommand):
9
+ """
10
+ Executes a container using "podman run" command.
11
+
12
+ This command supports running containers with configurable execution parameters,
13
+ environment variable management, file mounting, and output extraction.
14
+
15
+ Input Parameters Structure (this structure is expected inside "input_params.params" block):
16
+ ```
17
+ {
18
+ "image": "docker.io/library/hello-world:latest", # REQUIRED: Container image to run
19
+ "command": "python -m pipelines_declarative_executor run --pipeline_dir=\"/WORK/EXEC_DIR\"", # OPTIONAL: Command to execute in container
20
+ "execution_config": { # ALL OF THESE ARE OPTIONAL
21
+ "working_dir": "/some/dir/inside/container", # Working directory inside container
22
+ "timeout": "600", # Maximum execution time in seconds
23
+ "operations_timeout": "15", # Timeout for operations like file copying
24
+ "remove_container": True, # Whether to remove container after execution
25
+ "save_stdout_to_logs": True, # Save container stdout to execution logs
26
+ "save_stdout_to_files": True, # Save container stdout to output files
27
+ "save_stdout_to_params": False, # Save container stdout to output parameters
28
+ "expected_return_codes": "0,125", # Comma-separated list of acceptable exit codes
29
+ "additional_run_flags": "--cgroups=disabled", # Optional string of flags that will be added to "podman run" command
30
+ },
31
+ "before_script": {
32
+ "mounts": { # Filesystem mounts, "host_path: container_path"
33
+ "output_files": "/WORK",
34
+ "prepared_data": "/CONFIGS"
35
+ },
36
+ "env_vars": {
37
+ "explicit": { # Direct environment variable assignment
38
+ "PIPELINES_DECLARATIVE_EXECUTOR_ENCRYPT_OUTPUT_SECURE_PARAMS": False
39
+ },
40
+ "env_files": [ # Environment files on host to load and pass into container
41
+ "../CONFIGS/sample.env"
42
+ ],
43
+ "pass_via_file": { # Sensitive vars passed via temp file
44
+ "SOMETHING_VERY_SECURE": "PASSWORD"
45
+ },
46
+ "host_prefixes": [ # Host environment variable prefixes to pass through. Can use "*" to pass everything from host.
47
+ "SOME_PREFIX_*"
48
+ ]
49
+ }
50
+ },
51
+ "after_script": {
52
+ "copy_files_to_host": { # Copy files from container to host after execution, "host_path: container_path"
53
+ "output_files/report.json": "/WORK/EXEC_DIR/pipeline_state/pipeline_ui_view.json",
54
+ "output_files/pipeline_state": "/WORK/EXEC_DIR/pipeline_state",
55
+ },
56
+ "extract_params_from_files": { # OPTIONAL: Extract parameters from container files. Supports JSON, YAML, and ENV files
57
+ "SOME_FILE_IN_CONTAINER": "SECTION_NAME_IN_PARAMS_WHERE_IT_WILL_BE_STORED",
58
+ }
59
+ }
60
+ }
61
+ ```
62
+
63
+ Output Parameters:
64
+ - params.execution_time: Total execution time in seconds
65
+ - params.return_code: Container exit code
66
+ - params.stdout: Container stdout (if save_stdout_to_params enabled)
67
+ - params.stderr: Container stderr (if save_stdout_to_params enabled)
68
+ - params.extracted_output.*: Extracted parameters from files (if extract_params_from_files configured)
69
+
70
+ Notes:
71
+ - The command automatically handles container lifecycle including start, execution, and cleanup
72
+ - All host-paths (including mount paths) are resolved relative to context directory.
73
+ """
74
+
75
+ def _validate(self):
76
+ names = [
77
+ "paths.input.params",
78
+ "paths.output.params",
79
+ "paths.output.files",
80
+ "params.image",
81
+ ]
82
+ if not self.context.validate(names):
83
+ return False
84
+
85
+ # Check if podman is available
86
+ try:
87
+ subprocess.run(["podman", "--version"], capture_output=True, check=True)
88
+ except (subprocess.CalledProcessError, FileNotFoundError):
89
+ self.context.logger.error("Podman is not available on this system. Please install podman to use this command.")
90
+ return False
91
+
92
+ # Setup defaults & convert values
93
+ self.image = self.context.input_param_get("params.image")
94
+ self.command = self.context.input_param_get("params.command")
95
+
96
+ # execution_config
97
+ self.working_dir = self.context.input_param_get("params.execution_config.working_dir")
98
+ self.timeout = float(self.context.input_param_get("params.execution_config.timeout", 60))
99
+ self.operations_timeout = float(self.context.input_param_get("params.execution_config.operations_timeout", 15))
100
+ self.remove_container = UtilsString.convert_to_bool(self.context.input_param_get("params.execution_config.remove_container", True))
101
+ self.save_stdout_to_logs = UtilsString.convert_to_bool(self.context.input_param_get("params.execution_config.save_stdout_to_logs", True))
102
+ self.save_stdout_to_files = UtilsString.convert_to_bool(self.context.input_param_get("params.execution_config.save_stdout_to_files", True))
103
+ self.save_stdout_to_params = UtilsString.convert_to_bool(self.context.input_param_get("params.execution_config.save_stdout_to_params", False))
104
+ self.expected_return_codes = [int(num) for num in self.context.input_param_get("params.execution_config.expected_return_codes", "0").split(',')]
105
+ self.additional_run_flags = self.context.input_param_get("params.execution_config.additional_run_flags")
106
+
107
+ # before_script
108
+ self.mounts_config = self.context.input_param_get("params.before_script.mounts", {})
109
+ self.env_vars_config = self.context.input_param_get("params.before_script.env_vars", {})
110
+
111
+ # after_script
112
+ self.copy_files_config = self.context.input_param_get("params.after_script.copy_files_to_host", {})
113
+ self.extract_params_config = self.context.input_param_get("params.after_script.extract_params_from_files", {})
114
+
115
+ # Get base paths
116
+ self.context_dir_path = Path(os.path.dirname(self.context.context_path))
117
+ self.input_params_path = Path(self.context.input_param_get("paths.input.params"))
118
+ self.output_params_path = Path(self.context.input_param_get("paths.output.params"))
119
+ self.output_files_path = Path(self.context.input_param_get("paths.output.files"))
120
+ self.container_name = f"podman_{str(uuid.uuid4())}"
121
+ return True
122
+
123
+ def _run_sp_command(self, command, timeout=None):
124
+ return subprocess.run(command, capture_output=True, text=True,
125
+ timeout=timeout if timeout else self.timeout,
126
+ cwd=self.context_dir_path)
127
+
128
+ def _build_podman_command(self) -> list[str]:
129
+ cmd = ["podman", "run", "--name", self.container_name]
130
+
131
+ if self.additional_run_flags:
132
+ import shlex
133
+ cmd.extend(shlex.split(self.additional_run_flags))
134
+
135
+ if self.working_dir:
136
+ cmd.extend(["--workdir", self.working_dir])
137
+
138
+ if self.env_vars_config:
139
+ cmd.extend(self._build_command_env_var_args())
140
+
141
+ for host_path, container_path in self.mounts_config.items():
142
+ cmd.extend(["--mount", f"type=bind,source={host_path},target={container_path}"])
143
+
144
+ cmd.append(self.image)
145
+
146
+ if self.command:
147
+ import shlex
148
+ cmd.extend(shlex.split(self.command))
149
+
150
+ return cmd
151
+
152
+ def _build_command_env_var_args(self) -> list[str]:
153
+ args = []
154
+ for key, value in self.env_vars_config.get("explicit", {}).items():
155
+ args.extend(["--env", f"{key}={value}"])
156
+
157
+ for prefix in self.env_vars_config.get("host_prefixes", []):
158
+ args.extend(["--env", f"{prefix}"])
159
+
160
+ for env_file in self.env_vars_config.get("env_files", []):
161
+ args.extend(["--env-file", f"{env_file}"])
162
+
163
+ if self.env_vars_config.get("pass_via_file"):
164
+ env_file_path = self.context_dir_path.joinpath("temp").joinpath("temp.env")
165
+ env_file_path.parent.mkdir(parents=True, exist_ok=True)
166
+ with open(env_file_path, 'w') as f:
167
+ for key, value in self.env_vars_config["pass_via_file"].items():
168
+ f.write(f"{key}={value}\n")
169
+ args.extend(["--env-file", str(env_file_path)])
170
+
171
+ return args
172
+
173
+ def _copy_files_from_container(self):
174
+ for host_path, container_path in self.copy_files_config.items():
175
+ full_host_path = self.context_dir_path.joinpath(host_path)
176
+ full_host_path.parent.mkdir(parents=True, exist_ok=True)
177
+
178
+ copy_command = ["podman", "cp", f"{self.container_name}:{container_path}", str(full_host_path)]
179
+ try:
180
+ copy_result = self._run_sp_command(copy_command, self.operations_timeout)
181
+ if copy_result.returncode != 0:
182
+ self.context.logger.warning(f"Failed to copy {container_path} to {host_path}: {copy_result.stderr}")
183
+ else:
184
+ self.context.logger.debug(f"Copied {container_path} to {host_path}")
185
+ except subprocess.TimeoutExpired:
186
+ self.context.logger.warning(f"Copy command timed out after {self.operations_timeout} seconds")
187
+
188
+ def _extract_params_from_container(self):
189
+ import tempfile
190
+ with tempfile.TemporaryDirectory() as temp_dir:
191
+ for container_file_path, output_key_base in self.extract_params_config.items():
192
+ try:
193
+ temp_file_path = Path(temp_dir) / Path(container_file_path).name
194
+ copy_command = ["podman", "cp", f"{self.container_name}:{container_file_path}", str(temp_file_path)]
195
+ copy_result = self._run_sp_command(copy_command, self.operations_timeout)
196
+ if copy_result.returncode != 0:
197
+ self.context.logger.warning(f"Failed to copy file {container_file_path} for params-extraction: {copy_result.stderr}")
198
+ continue
199
+ if not temp_file_path.exists():
200
+ self.context.logger.warning(f"File {container_file_path} for params-extraction not found after copy")
201
+ continue
202
+ if file_content := self._parse_custom_file_params(temp_file_path):
203
+ base_key = output_key_base if output_key_base else container_file_path.replace('/','_').replace('.', '_')
204
+ self.context.output_param_set(f"params.extracted_output.{base_key}", file_content)
205
+ except Exception as e:
206
+ self.context.logger.warning(f"Failed to extract params from file {container_file_path}: {e}")
207
+
208
+ def _parse_custom_file_params(self, file_path: Path):
209
+ try:
210
+ try:
211
+ import json
212
+ with open(file_path, 'r', encoding='utf-8') as f:
213
+ return json.load(f)
214
+ except:
215
+ pass
216
+
217
+ try:
218
+ import yaml
219
+ with open(file_path, 'r', encoding='utf-8') as f:
220
+ return yaml.safe_load(f)
221
+ except:
222
+ pass
223
+
224
+ try:
225
+ key_values = {}
226
+ with open(file_path, 'r', encoding='utf-8') as f:
227
+ for line in f:
228
+ line = line.strip()
229
+ if line and not line.startswith('#') and '=' in line:
230
+ key, value = line.split('=', 1)
231
+ key_values[key.strip()] = value.strip()
232
+ return key_values if key_values else None
233
+ except:
234
+ pass
235
+
236
+ with open(file_path, 'r', encoding='utf-8') as f:
237
+ return f.read().strip()
238
+
239
+ except Exception as e:
240
+ self.context.logger.warning(f"Failed to parse custom-params file {file_path}: {e}")
241
+ return None
242
+
243
+ def _write_stdout_files(self, stdout: str, stderr: str):
244
+ (self.output_files_path / "container_stdout.txt").write_text(stdout, encoding='utf-8')
245
+ (self.output_files_path / "container_stderr.txt").write_text(stderr, encoding='utf-8')
246
+
247
+ def _process_output(self, output: subprocess.CompletedProcess):
248
+ self.context.output_param_set("params.execution_time", f"{self.execution_time:0.3f}s")
249
+ self.context.output_param_set("params.return_code", output.returncode)
250
+
251
+ if output.stdout and isinstance(output.stdout, bytes):
252
+ output.stdout = output.stdout.decode('utf-8', errors='replace')
253
+ if output.stderr and isinstance(output.stderr, bytes):
254
+ output.stderr = output.stderr.decode('utf-8', errors='replace')
255
+
256
+ if self.save_stdout_to_logs:
257
+ if output.stdout:
258
+ self.context.logger.debug(f"Container stdout:\n{output.stdout}")
259
+ if output.stderr:
260
+ self.context.logger.debug(f"Container stderr:\n{output.stderr}")
261
+
262
+ if self.save_stdout_to_files:
263
+ self._write_stdout_files(output.stdout, output.stderr)
264
+
265
+ if self.save_stdout_to_params:
266
+ self.context.output_param_set("params.stdout", output.stdout)
267
+ self.context.output_param_set("params.stderr", output.stderr)
268
+
269
+ if self.extract_params_config:
270
+ self._extract_params_from_container()
271
+
272
+ if self.copy_files_config:
273
+ self._copy_files_from_container()
274
+
275
+ if output.returncode not in self.expected_return_codes:
276
+ raise PodmanException(output.stderr)
277
+
278
+ def _execute(self):
279
+ self.context.logger.info(f"Running podman image \"{self.image}\"...")
280
+ start = time.perf_counter()
281
+ try:
282
+ output = self._run_sp_command(self._build_podman_command())
283
+ self.execution_time = time.perf_counter() - start
284
+ self.context.logger.info(
285
+ f"Container finished with code: {output.returncode}"
286
+ f"\nExecution time: {self.execution_time:0.3f}s"
287
+ )
288
+ self._process_output(output)
289
+
290
+ except subprocess.TimeoutExpired:
291
+ self.context.logger.error(f"Container execution timed out after {self.timeout} seconds")
292
+ raise
293
+
294
+ except PodmanException:
295
+ self.context.logger.error(f"Container exited with unexpected exitcode")
296
+ raise
297
+
298
+ except Exception as e:
299
+ self.context.logger.error(f"Container execution failed: {e}")
300
+ raise
301
+
302
+ finally:
303
+ if self.remove_container:
304
+ remove_output = subprocess.run(["podman", "rm", "-f", self.container_name], capture_output=True)
305
+ if remove_output.returncode != 0:
306
+ self.context.logger.warning(f"Failed to remove container {self.container_name}:\n{remove_output.stdout}\n{remove_output.stderr}")
307
+ self.context.output_params_save()
308
+
309
+
310
+ class PodmanException(Exception):
311
+ pass
@@ -0,0 +1,116 @@
1
+ import logging
2
+ import os
3
+ import shutil
4
+ import subprocess
5
+ import uuid
6
+ import yaml
7
+
8
+ from pathlib import Path
9
+
10
+
11
+ class SopsClient:
12
+
13
+ def __init__(self, sops_artifact_configs_folder_path: Path):
14
+ self.sops_artifact_configs_folder_path = sops_artifact_configs_folder_path
15
+ self.sops_executable = Path(os.environ.get("SOPS_EXECUTABLE", "/usr/local/bin/sops"))
16
+ self.logger = logging.getLogger()
17
+
18
+ def encrypt_content_by_path(
19
+ self, age_public_key: str, source_file_path_to_encrypt: Path, target_file_path: Path = None):
20
+ """
21
+ Encrypts file and saves result into file by path
22
+ Args:
23
+ age_public_key: age public key
24
+ source_file_path_to_encrypt: file to encrypt
25
+ target_file_path: file to save result of encryption. If None then `source_file_path_to_encrypt` will be used
26
+ """
27
+ if not self.sops_executable.exists():
28
+ self.logger.error(f"Sops executable doesn't exist. Can't encrypt file {source_file_path_to_encrypt}")
29
+ return
30
+
31
+ sops_config_path = self._get_prepared_sops_config_path(age_public_key)
32
+ encrypted_file_path = target_file_path
33
+ if not encrypted_file_path:
34
+ encrypted_file_path = source_file_path_to_encrypt
35
+
36
+ args = (self.sops_executable, "--config", sops_config_path, "encrypt", source_file_path_to_encrypt)
37
+ sops_encrypt_result = subprocess.run(args, capture_output=True, text=True)
38
+ if sops_encrypt_result.stderr:
39
+ self.logger.error(f"Error during encryption of {source_file_path_to_encrypt}. "
40
+ f"Saving empty content into {encrypted_file_path}"
41
+ f"Error: {sops_encrypt_result.stderr}")
42
+
43
+ with open(encrypted_file_path, 'w') as encrypted_file:
44
+ encrypted_file.write("")
45
+ self._remove_sops_config(sops_config_path.parent)
46
+ return
47
+
48
+ with open(encrypted_file_path, 'w') as encrypted_file:
49
+ encrypted_file.write(sops_encrypt_result.stdout)
50
+ self.logger.debug(f"Content {source_file_path_to_encrypt} was encrypted by sops. "
51
+ f"Result saved into {encrypted_file_path}")
52
+ self._remove_sops_config(sops_config_path.parent)
53
+
54
+ def get_decrypted_content_by_path(self, age_private_key: str, source_file_path_to_decrypt: Path) -> str:
55
+ """
56
+ Decrypts file by path
57
+ Args:
58
+ age_private_key: age private key
59
+ source_file_path_to_decrypt: file path to decrypt
60
+
61
+ Returns:
62
+ decrypted file content or empty string if error occurs
63
+ """
64
+ if not self.sops_executable.exists():
65
+ self.logger.error(f"Sops executable doesn't exist. Can't decrypt file {source_file_path_to_decrypt}")
66
+ return ""
67
+ if not age_private_key:
68
+ self.logger.warning("sops_private_key is not defined, skipping decryption")
69
+ return ""
70
+ environment_variables = os.environ.copy()
71
+ environment_variables["SOPS_AGE_KEY"] = age_private_key.strip()
72
+ args = (self.sops_executable, "-d", source_file_path_to_decrypt)
73
+ sops_decrypt_result = subprocess.run(args, env=environment_variables, capture_output=True, text=True)
74
+ if sops_decrypt_result.stderr:
75
+ self.logger.error(f"Error during {source_file_path_to_decrypt} decrypt. Error: {sops_decrypt_result.stderr}")
76
+ return ""
77
+ self.logger.debug(f"Content {source_file_path_to_decrypt} was decrypted by sops")
78
+ return sops_decrypt_result.stdout
79
+
80
+ def _get_prepared_sops_config_path(self, age_public_key) -> Path:
81
+ """
82
+ Generates `.sops.yaml` file `age-public-key`
83
+ Creates folder `uuid.uuid4()` under `self.sops_artifact_configs_folder_path` to make `.sops.yaml`
84
+ unique for exact encryption
85
+ Args:
86
+ age_public_key: age public key
87
+
88
+ Returns:
89
+ path to generated `.sops.yaml`
90
+ """
91
+ self.logger.debug(f"Preparing sops config for encryption")
92
+ sops_config_content = {
93
+ "creation_rules": [
94
+ {
95
+ "age": age_public_key
96
+ }
97
+ ]
98
+ }
99
+ sops_config_path = self.sops_artifact_configs_folder_path.joinpath(str(uuid.uuid4()), ".sops.yaml")
100
+ sops_config_path.parent.mkdir(parents=True, exist_ok=True)
101
+ with open(sops_config_path, mode="w") as file:
102
+ yaml.dump(sops_config_content, file)
103
+ return sops_config_path
104
+
105
+ def _remove_sops_config(self, sops_config_folder: Path):
106
+ """
107
+ Removes folder with generated sops config
108
+ Args:
109
+ sops_config_folder: path to folder with sops config
110
+
111
+ Returns:
112
+
113
+ """
114
+ self.logger.debug(f"Removing sops config")
115
+ if sops_config_folder.exists() and sops_config_folder.is_dir():
116
+ shutil.rmtree(sops_config_folder)
@@ -0,0 +1,48 @@
1
+ class CryptoUtils:
2
+ EXCLUDE_KEYS = {"kind", "apiVersion"}
3
+
4
+ @staticmethod
5
+ def is_base64(s) -> bool:
6
+ try:
7
+ if isinstance(s, str):
8
+ s_bytes = s.encode('utf-8')
9
+ else:
10
+ s_bytes = s
11
+ import base64
12
+ return base64.b64encode(base64.b64decode(s_bytes)) == s_bytes
13
+ except Exception:
14
+ return False
15
+
16
+ @staticmethod
17
+ def get_base64_encrypted_str(s) -> str:
18
+ if isinstance(s, str):
19
+ s_bytes = s.encode('utf-8')
20
+ else:
21
+ s_bytes = s
22
+ import base64
23
+ return base64.b64encode(s_bytes).decode('utf-8')
24
+
25
+ @staticmethod
26
+ def mask_values(data, path=None):
27
+ if path is None:
28
+ path = []
29
+
30
+ if isinstance(data, dict):
31
+ return {
32
+ key: (CryptoUtils.mask_values(value, path + [key])
33
+ if not (len(path) == 0 and key in CryptoUtils.EXCLUDE_KEYS)
34
+ else value)
35
+ for key, value in data.items()
36
+ }
37
+ elif isinstance(data, list):
38
+ return [CryptoUtils.mask_values(item, path) for item in data]
39
+ else:
40
+ return "[MASKED]"
41
+
42
+ @staticmethod
43
+ def get_parameters_for_print(content: dict, need_mask: bool):
44
+ import yaml
45
+ if need_mask:
46
+ return yaml.dump(CryptoUtils.mask_values(content), default_flow_style=False)
47
+ else:
48
+ return yaml.dump(content, default_flow_style=False)
@@ -0,0 +1,22 @@
1
+ import importlib
2
+ from typing import Type, Any, Optional
3
+
4
+
5
+ class ExtensionLoader:
6
+ """Utility methods to create instance of a class by its classpath and validate its expected base class"""
7
+
8
+ @staticmethod
9
+ def load_class(class_path: str) -> Type[Any]:
10
+ try:
11
+ module_path, class_name = class_path.rsplit('.', 1)
12
+ module = importlib.import_module(module_path)
13
+ return getattr(module, class_name)
14
+ except (ImportError, AttributeError, ValueError) as e:
15
+ raise ImportError(f"Failed to load class {class_path}: {e}")
16
+
17
+ @staticmethod
18
+ def create_instance(class_path: str, expected_base_class: Optional[Type] = None, **kwargs) -> Any:
19
+ klass = ExtensionLoader.load_class(class_path)
20
+ if expected_base_class and not issubclass(klass, expected_base_class):
21
+ raise TypeError(f"Class {class_path} must inherit from {expected_base_class.__name__}")
22
+ return klass(**kwargs)
@@ -0,0 +1,93 @@
1
+ import logging
2
+ import sys
3
+ import time
4
+ from functools import wraps
5
+ from typing import Callable
6
+
7
+
8
+ class RetryDecorator:
9
+
10
+ def __init__(self, condition_func, retry_timeout_seconds: int = 180, retry_wait_seconds: int = 1):
11
+ self.condition_func = condition_func
12
+ self.retry_timeout_seconds = retry_timeout_seconds
13
+ self.retry_wait_seconds = retry_wait_seconds
14
+ self.logger = logging.getLogger()
15
+
16
+ def __call__(self, func: Callable) -> Callable:
17
+ @wraps(func)
18
+ def wrapper(*args, **kwargs):
19
+ self._update_retry_timeout_seconds_from_kwargs(kwargs, func)
20
+ self._update_retry_wait_seconds_from_kwargs(kwargs, func)
21
+
22
+ count_seconds = 0
23
+ retries = 0
24
+ last_log_time = time.perf_counter()
25
+ last_result = None
26
+ estimated_max_attempts = self.retry_timeout_seconds // self.retry_wait_seconds
27
+
28
+
29
+ while count_seconds < self.retry_timeout_seconds and not self.condition_func(last_result):
30
+ try:
31
+ last_result = func(*args, **kwargs)
32
+ if self.condition_func(last_result):
33
+ self.logger.debug(f"Function {func.__name__} successfully executed after {retries} attempts in {count_seconds}s")
34
+ return last_result
35
+ retries += 1
36
+
37
+ now = time.perf_counter()
38
+ if now - last_log_time >= 10:
39
+ self._sleep_with_warning_log(
40
+ f"Made [{retries} of {estimated_max_attempts}] retries with {self.retry_wait_seconds} seconds between attempts. Trying to execute func: {self.condition_func.__name__}. {count_seconds} of {self.retry_timeout_seconds} seconds left")
41
+ last_log_time = now
42
+ else:
43
+ time.sleep(self.retry_wait_seconds)
44
+
45
+ except Exception as e:
46
+ retries += 1
47
+ now = time.perf_counter()
48
+ if now - last_log_time >= 10:
49
+ self._process_exception_during_func_execution(e, count_seconds, func.__name__, retries, estimated_max_attempts)
50
+ last_log_time = now
51
+ else:
52
+ time.sleep(self.retry_wait_seconds)
53
+
54
+ finally:
55
+ count_seconds += self.retry_wait_seconds
56
+
57
+ if self.condition_func(last_result):
58
+ self.logger.debug(f"Function {func.__name__} successfully executed after {retries} attempts in {count_seconds}s")
59
+ return last_result
60
+
61
+ self._exit_with_error_message(func.__name__)
62
+ return wrapper
63
+
64
+ def _sleep_with_warning_log(self, message):
65
+ self.logger.warning(message)
66
+ time.sleep(self.retry_wait_seconds)
67
+
68
+ def _update_retry_timeout_seconds_from_kwargs(self, kwargs, func):
69
+ kwargs_retry_timeout_seconds = kwargs.get('retry_timeout_seconds')
70
+ if kwargs_retry_timeout_seconds:
71
+ self.retry_timeout_seconds = kwargs_retry_timeout_seconds
72
+ else:
73
+ self.logger.debug(
74
+ f"`retry_timeout_seconds` is not found in func {func.__name__} arguments. Using default value = {self.retry_timeout_seconds}")
75
+
76
+ def _update_retry_wait_seconds_from_kwargs(self, kwargs, func):
77
+ kwargs_retry_wait_seconds = kwargs.get('retry_wait_seconds')
78
+ if kwargs_retry_wait_seconds:
79
+ self.retry_wait_seconds = kwargs_retry_wait_seconds
80
+ else:
81
+ self.logger.debug(
82
+ f"`retry_wait_seconds` is not found in func {func.__name__} arguments. Using default value = {self.retry_wait_seconds}")
83
+
84
+ def _process_exception_during_func_execution(self, exception, count_seconds, func_name, retries, estimated_max_attempts):
85
+ if count_seconds < self.retry_timeout_seconds:
86
+ self._sleep_with_warning_log(
87
+ f"Made [{retries} of {estimated_max_attempts}] retries. Exception happened during function {func_name} execution, waiting {count_seconds} of {self.retry_timeout_seconds}. Exception: {exception}")
88
+ else:
89
+ self._exit_with_error_message(func_name)
90
+
91
+ def _exit_with_error_message(self, func_name):
92
+ self.logger.error(f"Can't execute function {func_name} in {self.retry_timeout_seconds} seconds")
93
+ sys.exit(1)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: qubership-pipelines-common-library
3
- Version: 0.2.6
3
+ Version: 2.0.0
4
4
  Summary: Qubership Pipelines common library
5
5
  License: Apache-2.0
6
6
  License-File: LICENSE
@@ -0,0 +1,52 @@
1
+ qubership_pipelines_common_library/__init__.py,sha256=91r6ljRCMIXiH1mE5cME45OstbTMJTicNEbTpGgJjQY,703
2
+ qubership_pipelines_common_library/v1/__init__.py,sha256=QczIlSYNOtXMuMWSznhV_BkXMM5KLn1wOogtlT2kcy0,598
3
+ qubership_pipelines_common_library/v1/artifactory_client.py,sha256=Gwf21BXUYNpKT_Y_wMyM07WlpDNTIBSUkSIsJlWfURg,4105
4
+ qubership_pipelines_common_library/v1/execution/__init__.py,sha256=QczIlSYNOtXMuMWSznhV_BkXMM5KLn1wOogtlT2kcy0,598
5
+ qubership_pipelines_common_library/v1/execution/exec_command.py,sha256=dezqVgaPG1rstl9U8MveqK-6ex-4waJ9Acfm0XBbBsA,5403
6
+ qubership_pipelines_common_library/v1/execution/exec_context.py,sha256=R9Kmb4t3QRXCJTMhC3qcPtxtyvCrIV037Ix9P_VD5YI,6055
7
+ qubership_pipelines_common_library/v1/execution/exec_context_file.py,sha256=kbuL9mA21qhaueVe6SWvI3OM49Ekrm8v1lj1FFspBq4,7397
8
+ qubership_pipelines_common_library/v1/execution/exec_info.py,sha256=c1ksds6c-NcegSH9SPknzFxd1iw8xT8qS2_aZaVZ7F0,4280
9
+ qubership_pipelines_common_library/v1/execution/exec_logger.py,sha256=rtSCLo3mqtwIc2S_tBs0uizehdthBGfygB1Vpwa-sRA,3102
10
+ qubership_pipelines_common_library/v1/git_client.py,sha256=uop4dREW0HoaAbGHSzp3P4vk1Hk-VrPK5RhAP3Hj51o,6100
11
+ qubership_pipelines_common_library/v1/github_client.py,sha256=okKc48tIB4G95gLYe6DgIyTYKK4joqrEQ-2-i0Yxywg,15058
12
+ qubership_pipelines_common_library/v1/gitlab_client.py,sha256=ZhjaMT9JMfE5Mc0XSYvXHA_vE2imE7sXzbsse-4oBJ0,15736
13
+ qubership_pipelines_common_library/v1/jenkins_client.py,sha256=VsD4KQNmLTeFvyVnY0m1xPv3s5bb-sNbgO6SwTJ2FfY,8597
14
+ qubership_pipelines_common_library/v1/kube_client.py,sha256=rbdc0Q2r6AhJ49FKr-15_1r9Uit4_6U68rWwGYDjdWc,12715
15
+ qubership_pipelines_common_library/v1/log_client.py,sha256=DTJ8aI_37l570RyolDC2cHaOkkccZWi7cFE6qYUuQeo,1514
16
+ qubership_pipelines_common_library/v1/maven_client.py,sha256=DbyPp6lh17op04GGeq2jIbk-SyVzCCHRcr2ox-eUv54,15054
17
+ qubership_pipelines_common_library/v1/minio_client.py,sha256=4KlkCJvtgGKQOujChxRtKrpoZVukooMLfj5D8C9CKC4,4343
18
+ qubership_pipelines_common_library/v1/utils/__init__.py,sha256=QczIlSYNOtXMuMWSznhV_BkXMM5KLn1wOogtlT2kcy0,598
19
+ qubership_pipelines_common_library/v1/utils/rest.py,sha256=MaCS6L6Khs_HaWoi3WNj9Go33d9zEVErLP5T8iVRyHA,3068
20
+ qubership_pipelines_common_library/v1/utils/utils.py,sha256=5PhXyFC1Zfuz0KDrWC9QgacTLVVk8zu0-6wxYS0bmzE,1865
21
+ qubership_pipelines_common_library/v1/utils/utils_aws.py,sha256=BPPnHBzPPXPqFijtAiw16sTPu1tFZjS95GkSMX_HdjA,808
22
+ qubership_pipelines_common_library/v1/utils/utils_cli.py,sha256=3b4SbAKUearf2d_W6S8kuMNaB_Up1Qiblm8Nc5DHEqE,3199
23
+ qubership_pipelines_common_library/v1/utils/utils_context.py,sha256=IlMFXGxS8zJw33Gu3SbOUcj88wquIkobBlWkdFbR7MA,3767
24
+ qubership_pipelines_common_library/v1/utils/utils_dictionary.py,sha256=6wGAoBmLzPGGqdtkoqU9RtMBYuOO-UkZsZDh7GzubjA,1365
25
+ qubership_pipelines_common_library/v1/utils/utils_file.py,sha256=E4RhpeCRhUt-EQ_6pUz-QEKkH8JidJWwYxZmrAvpHBk,2905
26
+ qubership_pipelines_common_library/v1/utils/utils_json.py,sha256=QczIlSYNOtXMuMWSznhV_BkXMM5KLn1wOogtlT2kcy0,598
27
+ qubership_pipelines_common_library/v1/utils/utils_string.py,sha256=Phx5ZXPRjhjg9AaSPx6WLX9zQvwJH1txslfnG3jJ43w,993
28
+ qubership_pipelines_common_library/v1/webex_client.py,sha256=JU_0NgLu_p6zgaUi-ixgZeFMlJaTAvXwrU1oA607Bv0,2997
29
+ qubership_pipelines_common_library/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
+ qubership_pipelines_common_library/v2/extensions/pipeline_data_importer.py,sha256=3I6hvqZy19kZsjhhqaluBFoaGBeDeF0DVOmXjYwibAA,784
31
+ qubership_pipelines_common_library/v2/github/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
+ qubership_pipelines_common_library/v2/github/github_client.py,sha256=8ZLFnJDXveoxS4-agiYTqv0sZFDT7FCvMRfIln51QnQ,142
33
+ qubership_pipelines_common_library/v2/github/github_pipeline_data_importer.py,sha256=BXq549psTvErvUYOrx38QkpglgZg-TH10TfGk0PcQxE,1035
34
+ qubership_pipelines_common_library/v2/github/github_run_pipeline_command.py,sha256=calEWrmFAbEk6XS_-RDghqR4KwnTMLA_eecgKyDDois,11245
35
+ qubership_pipelines_common_library/v2/github/safe_github_client.py,sha256=tS6Huv5K_lkv4mKAY3Uw0PoG21kJkrXQ9e1NdGLvpws,1295
36
+ qubership_pipelines_common_library/v2/gitlab/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
+ qubership_pipelines_common_library/v2/gitlab/custom_extensions.py,sha256=cQX5H2VJJ7yqVAR9XsrXvDD6oiuHelAuu_QznU2s6UM,5628
38
+ qubership_pipelines_common_library/v2/gitlab/gitlab_client.py,sha256=OdAZOZbLwpfxLqvqDMT3uGdNxD1SfRKAWyemIr7lVss,1707
39
+ qubership_pipelines_common_library/v2/gitlab/gitlab_pipeline_data_importer.py,sha256=eWB92mGQl5L2XQpUnzpAPTVFm-p8k6H6LMF6T4VkR7o,1413
40
+ qubership_pipelines_common_library/v2/gitlab/gitlab_run_pipeline_command.py,sha256=zMe1w4P1BlRcnCaKusA0Dv-muhYhP16Q5v5iH8z0hC0,11833
41
+ qubership_pipelines_common_library/v2/gitlab/safe_gitlab_client.py,sha256=Ptlmgw5OCO0S24B-fAVxSdBO1yxx0LadVTLZXyhOwuI,1813
42
+ qubership_pipelines_common_library/v2/podman/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
43
+ qubership_pipelines_common_library/v2/podman/podman_command.md,sha256=D4eA8RsVmRq5o4NIxpMLV7QcRkBR_dCf3A5cVsh0w3M,6490
44
+ qubership_pipelines_common_library/v2/podman/podman_command.py,sha256=kz5lGPzgih6PZNtHjr1fnzs0KYthY_WLgduBJAI2Kzo,15512
45
+ qubership_pipelines_common_library/v2/sops/sops_client.py,sha256=CzeNTlMb3UPTmzT0Lgooqj34qyUrLTDLuUp8o2Leua4,4948
46
+ qubership_pipelines_common_library/v2/utils/crypto_utils.py,sha256=zZ32IJY7WKzJEJNyZQVCPdWC4uujo6goR0MyzBAhn78,1504
47
+ qubership_pipelines_common_library/v2/utils/extension_utils.py,sha256=-OyT6xrIg-PdHHy2Y712rbOAB6Q7WXTqGwP7oVne4k4,965
48
+ qubership_pipelines_common_library/v2/utils/retry_decorator.py,sha256=Q0gXvOijOCF62z37qoRpYc3w0QPVkqk1Wg8PnW8musY,4455
49
+ qubership_pipelines_common_library-2.0.0.dist-info/METADATA,sha256=Jb38VttMg0aGH1QEoEKH0V_zC_yLg7yZknlqI2lKNnw,3017
50
+ qubership_pipelines_common_library-2.0.0.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
51
+ qubership_pipelines_common_library-2.0.0.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
52
+ qubership_pipelines_common_library-2.0.0.dist-info/RECORD,,