qubership-pipelines-common-library 2.0.0__py3-none-any.whl → 2.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. qubership_pipelines_common_library/__init__.py +1 -1
  2. qubership_pipelines_common_library/v1/artifactory_client.py +1 -1
  3. qubership_pipelines_common_library/v1/execution/exec_command.py +11 -1
  4. qubership_pipelines_common_library/v1/execution/exec_context.py +6 -6
  5. qubership_pipelines_common_library/v1/execution/exec_context_file.py +1 -1
  6. qubership_pipelines_common_library/v1/execution/exec_logger.py +7 -5
  7. qubership_pipelines_common_library/v1/github_client.py +1 -1
  8. qubership_pipelines_common_library/v1/gitlab_client.py +11 -7
  9. qubership_pipelines_common_library/v1/jenkins_client.py +55 -18
  10. qubership_pipelines_common_library/v1/maven_client.py +2 -2
  11. qubership_pipelines_common_library/v1/minio_client.py +1 -1
  12. qubership_pipelines_common_library/v1/utils/rest.py +1 -1
  13. qubership_pipelines_common_library/v1/utils/utils.py +1 -1
  14. qubership_pipelines_common_library/v1/utils/utils_cli.py +53 -8
  15. qubership_pipelines_common_library/v1/utils/utils_dictionary.py +1 -1
  16. qubership_pipelines_common_library/v1/utils/utils_logging.py +52 -0
  17. qubership_pipelines_common_library/v2/artifacts_finder/__init__.py +0 -0
  18. qubership_pipelines_common_library/v2/artifacts_finder/artifact_finder.py +56 -0
  19. qubership_pipelines_common_library/v2/artifacts_finder/auth/__init__.py +0 -0
  20. qubership_pipelines_common_library/v2/artifacts_finder/auth/aws_credentials.py +106 -0
  21. qubership_pipelines_common_library/v2/artifacts_finder/auth/azure_credentials.py +72 -0
  22. qubership_pipelines_common_library/v2/artifacts_finder/auth/gcp_credentials.py +88 -0
  23. qubership_pipelines_common_library/v2/artifacts_finder/model/__init__.py +0 -0
  24. qubership_pipelines_common_library/v2/artifacts_finder/model/artifact.py +20 -0
  25. qubership_pipelines_common_library/v2/artifacts_finder/model/artifact_provider.py +35 -0
  26. qubership_pipelines_common_library/v2/artifacts_finder/model/credentials.py +16 -0
  27. qubership_pipelines_common_library/v2/artifacts_finder/model/credentials_provider.py +16 -0
  28. qubership_pipelines_common_library/v2/artifacts_finder/providers/__init__.py +0 -0
  29. qubership_pipelines_common_library/v2/artifacts_finder/providers/artifactory.py +52 -0
  30. qubership_pipelines_common_library/v2/artifacts_finder/providers/aws_code_artifact.py +79 -0
  31. qubership_pipelines_common_library/v2/artifacts_finder/providers/azure_artifacts.py +98 -0
  32. qubership_pipelines_common_library/v2/artifacts_finder/providers/gcp_artifact_registry.py +50 -0
  33. qubership_pipelines_common_library/v2/artifacts_finder/providers/nexus.py +41 -0
  34. qubership_pipelines_common_library/v2/github/github_run_pipeline_command.py +3 -3
  35. qubership_pipelines_common_library/v2/gitlab/custom_extensions.py +1 -1
  36. qubership_pipelines_common_library/v2/gitlab/gitlab_run_pipeline_command.py +4 -4
  37. qubership_pipelines_common_library/v2/jenkins/__init__.py +0 -0
  38. qubership_pipelines_common_library/v2/jenkins/custom_extensions.py +63 -0
  39. qubership_pipelines_common_library/v2/jenkins/jenkins_client.py +5 -0
  40. qubership_pipelines_common_library/v2/jenkins/jenkins_pipeline_data_importer.py +31 -0
  41. qubership_pipelines_common_library/v2/jenkins/jenkins_run_pipeline_command.py +165 -0
  42. qubership_pipelines_common_library/v2/jenkins/safe_jenkins_client.py +14 -0
  43. qubership_pipelines_common_library/v2/podman/podman_command.md +7 -1
  44. qubership_pipelines_common_library/v2/podman/podman_command.py +4 -4
  45. qubership_pipelines_common_library/v2/sops/sops_client.py +2 -2
  46. qubership_pipelines_common_library/v2/utils/retry_decorator.py +5 -5
  47. {qubership_pipelines_common_library-2.0.0.dist-info → qubership_pipelines_common_library-2.0.2.dist-info}/METADATA +5 -3
  48. qubership_pipelines_common_library-2.0.2.dist-info/RECORD +76 -0
  49. qubership_pipelines_common_library-2.0.0.dist-info/RECORD +0 -52
  50. {qubership_pipelines_common_library-2.0.0.dist-info → qubership_pipelines_common_library-2.0.2.dist-info}/WHEEL +0 -0
  51. {qubership_pipelines_common_library-2.0.0.dist-info → qubership_pipelines_common_library-2.0.2.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,41 @@
1
+ from pathlib import Path
2
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.artifact import Artifact
3
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.artifact_provider import ArtifactProvider
4
+
5
+
6
+ class NexusProvider(ArtifactProvider):
7
+
8
+ def __init__(self, registry_url: str, username: str = None, password: str = None, **kwargs):
9
+ """
10
+ Initializes this client to work with **Sonatype Nexus Repository** for maven artifacts.
11
+ Requires `username` and its `password` or `token`.
12
+ """
13
+ super().__init__(**kwargs)
14
+ self.registry_url = registry_url
15
+ if password:
16
+ from requests.auth import HTTPBasicAuth
17
+ self._session.auth = HTTPBasicAuth(username, password)
18
+
19
+ def download_artifact(self, resource_url: str, local_path: str | Path, **kwargs) -> None:
20
+ return self.generic_download(resource_url=resource_url, local_path=local_path)
21
+
22
+ def search_artifacts(self, artifact: Artifact, **kwargs) -> list[str]:
23
+ search_params = {
24
+ "maven.extension": artifact.extension,
25
+ "maven.artifactId": artifact.artifact_id,
26
+ **({"maven.groupId": artifact.group_id} if artifact.group_id else {}),
27
+ }
28
+ if artifact.is_snapshot():
29
+ search_params["maven.baseVersion"] = artifact.version
30
+ else:
31
+ search_params["version"] = artifact.version
32
+
33
+ response = self._session.get(url=f"{self.registry_url}/service/rest/v1/search/assets",
34
+ params=search_params,
35
+ timeout=self.timeout)
36
+ if response.status_code != 200:
37
+ raise Exception(f"Could not find '{artifact.artifact_id}' - search request returned {response.status_code}!")
38
+ return [result["downloadUrl"] for result in response.json()["items"]]
39
+
40
+ def get_provider_name(self) -> str:
41
+ return "nexus"
@@ -102,9 +102,9 @@ class GithubRunPipeline(ExecutionCommand):
102
102
  self.pipeline_branch = self.context.input_param_get("params.pipeline_branch")
103
103
  self.pipeline_params = self.context.input_param_get("params.pipeline_params", {})
104
104
  if not self.pipeline_params:
105
- self.context.logger.info(f"Pipeline parameters were not specified. This means that pipeline will be started with its default values")
105
+ self.context.logger.info("Pipeline parameters were not specified. This means that pipeline will be started with its default values")
106
106
  if not isinstance(self.pipeline_params, dict):
107
- self.context.logger.error(f"Pipeline parameters were not loaded correctly. Probably mistake in the params definition")
107
+ self.context.logger.error("Pipeline parameters were not loaded correctly. Probably mistake in the params definition")
108
108
  return False
109
109
  self.import_artifacts = UtilsString.convert_to_bool(self.context.input_param_get("params.import_artifacts", False))
110
110
  self.success_statuses = [x.strip() for x in self.context.input_param_get("params.success_statuses", ExecutionInfo.STATUS_SUCCESS).split(",")]
@@ -165,7 +165,7 @@ class GithubRunPipeline(ExecutionCommand):
165
165
  self._exit(False, f"Status: {execution.get_status()}")
166
166
 
167
167
  def _save_execution_info(self, execution: ExecutionInfo):
168
- self.context.logger.info(f"Writing GitHub workflow execution status")
168
+ self.context.logger.info("Writing GitHub workflow execution status")
169
169
  self.context.output_param_set("params.build.url", execution.get_url())
170
170
  self.context.output_param_set("params.build.id", execution.get_id())
171
171
  self.context.output_param_set("params.build.status", execution.get_status())
@@ -54,7 +54,7 @@ class GitlabModulesOpsPipelineDataImporter(PipelineDataImporter):
54
54
  self.context.logger.debug(f"Contents of folder {local_dirpath} (after zip.extractall): {os.listdir(local_dirpath)}")
55
55
  self._import_downloaded_data(local_dirpath / self.IMPORTED_CONTEXT_FILE)
56
56
  else:
57
- self.context.logger.warning(f"No jobs found")
57
+ self.context.logger.warning("No jobs found")
58
58
 
59
59
  self.context.output_params.load(self.context.context.get("paths.output.params"))
60
60
  self.context.output_params_secure.load(self.context.context.get("paths.output.params_secure"))
@@ -110,9 +110,9 @@ class GitlabRunPipeline(ExecutionCommand):
110
110
 
111
111
  self.pipeline_params = self.context.input_param_get("params.pipeline_params", {})
112
112
  if not self.pipeline_params:
113
- self.context.logger.info(f"Pipeline parameters were not specified. This means that pipeline will be started with its default values")
113
+ self.context.logger.info("Pipeline parameters were not specified. This means that pipeline will be started with its default values")
114
114
  if not isinstance(self.pipeline_params, dict):
115
- self.context.logger.error(f"Pipeline parameters were not loaded correctly. Probably mistake in the params definition")
115
+ self.context.logger.error("Pipeline parameters were not loaded correctly. Probably mistake in the params definition")
116
116
  return False
117
117
  self.import_artifacts = UtilsString.convert_to_bool(self.context.input_param_get("params.import_artifacts", False))
118
118
  self.success_statuses = [x.strip() for x in self.context.input_param_get("params.success_statuses", ExecutionInfo.STATUS_SUCCESS).split(",")]
@@ -129,7 +129,7 @@ class GitlabRunPipeline(ExecutionCommand):
129
129
  retry_timeout_seconds=self.retry_timeout_seconds,
130
130
  retry_wait_seconds=self.retry_wait_seconds
131
131
  )
132
- self.context.logger.info(f"Successfully initialized GitLab client")
132
+ self.context.logger.info("Successfully initialized GitLab client")
133
133
 
134
134
  if not self.pipeline_branch:
135
135
  self.pipeline_branch = self.gl_client.get_default_branch(project_id=self.pipeline_path)
@@ -185,7 +185,7 @@ class GitlabRunPipeline(ExecutionCommand):
185
185
  self._exit(False, f"Status: {execution.get_status()}")
186
186
 
187
187
  def _save_execution_info(self, execution: ExecutionInfo):
188
- self.context.logger.info(f"Writing GitLab pipeline execution status")
188
+ self.context.logger.info("Writing GitLab pipeline execution status")
189
189
  self.context.output_param_set("params.build.url", execution.get_url())
190
190
  self.context.output_param_set("params.build.id", execution.get_id())
191
191
  self.context.output_param_set("params.build.status", execution.get_status())
@@ -0,0 +1,63 @@
1
+ from pathlib import Path
2
+
3
+ from qubership_pipelines_common_library.v1.execution.exec_command import ExecutionCommandExtension
4
+ from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
5
+ from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
6
+
7
+
8
+ class JenkinsOutputParamsPipelineDataImporter(PipelineDataImporter):
9
+ """
10
+ Jenkins Output Params Importer:
11
+ imports data from contracted Declarative Pipelines
12
+ extracts output files and params of targeted pipeline into 'output' folder of this command
13
+ """
14
+ def import_pipeline_data(self, execution: ExecutionInfo) -> None:
15
+ self.context.logger.info("Processing jenkins job artifacts")
16
+ artifact_paths = self.command.jenkins_client.get_pipeline_execution_artifacts(execution)
17
+ if artifact_paths and len(artifact_paths):
18
+ for artifact_path in artifact_paths:
19
+ if artifact_path == "output/params.yaml":
20
+ self.context.logger.info(f"Artifact with name '{artifact_path}' will be processed as output params")
21
+ file_path = self.context.input_param_get("paths.output.params")
22
+ self.command.jenkins_client.save_pipeline_execution_artifact_to_file(execution, artifact_path, file_path)
23
+ self.context.output_params.load(file_path)
24
+ elif artifact_path == "output/params_secure.yaml":
25
+ self.context.logger.info(f"Artifact with name '{artifact_path}' will be processed as output secure params")
26
+ file_path = self.context.input_param_get("paths.output.params_secure")
27
+ self.command.jenkins_client.save_pipeline_execution_artifact_to_file(execution, artifact_path, file_path)
28
+ self.context.output_params_secure.load(file_path)
29
+ else:
30
+ self.context.logger.info(f"Artifact with name '{artifact_path}' will be saved as output file")
31
+ file_path = Path(self.context.input_param_get("paths.output.files")).joinpath(artifact_path)
32
+ self.command.jenkins_client.save_pipeline_execution_artifact_to_file(execution, artifact_path, file_path)
33
+ else:
34
+ self.context.logger.info("No artifacts found in the job")
35
+
36
+
37
+ class JenkinsSaveInjectedEnvVars(ExecutionCommandExtension):
38
+ """
39
+ Post-execution extension, saving injected environment variables from the build
40
+ """
41
+
42
+ INJECTED_ENV_VARS_URL = "injectedEnvVars/api/json"
43
+
44
+ def execute(self):
45
+ import os, requests
46
+ from requests.auth import HTTPBasicAuth
47
+
48
+ self.context.logger.info("Trying to get and save injected vars from build")
49
+ build_url = self.command.execution_info.get_url()
50
+ if build_url:
51
+ injected_api_url = build_url + self.INJECTED_ENV_VARS_URL
52
+ response = requests.get(injected_api_url,
53
+ auth=HTTPBasicAuth(self.context.input_param_get("systems.jenkins.username"),
54
+ self.context.input_param_get("systems.jenkins.password")),
55
+ verify=True if os.getenv('PYTHONHTTPSVERIFY', '1') == '0' else False)
56
+
57
+ if response.status_code == 200:
58
+ self.context.output_param_set("params.build.injected_vars", response.json().get("envMap", {}))
59
+ self.context.output_params_save()
60
+ else:
61
+ self.context.logger.warning(f"Can't get injected variables for url {injected_api_url} with response code {response.status_code}")
62
+ else:
63
+ self.context.logger.warning("Can't get build url for injectedEnvVars")
@@ -0,0 +1,5 @@
1
+ from qubership_pipelines_common_library.v1.jenkins_client import JenkinsClient as JenkinsClientV1
2
+
3
+
4
+ class JenkinsClient(JenkinsClientV1):
5
+ pass
@@ -0,0 +1,31 @@
1
+ import zipfile
2
+ from pathlib import Path
3
+
4
+ from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
5
+ from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
6
+ from qubership_pipelines_common_library.v2.jenkins.jenkins_client import JenkinsClient
7
+
8
+
9
+ class DefaultJenkinsPipelineDataImporter(PipelineDataImporter):
10
+ """
11
+ Default Jenkins implementation:
12
+ downloads all available workflow run artifacts as one archive,
13
+ extracts them into context-defined 'paths.output.files' path
14
+ """
15
+ def import_pipeline_data(self, execution: ExecutionInfo) -> None:
16
+ self.context.logger.info("DefaultJenkinsPipelineDataImporter - importing pipeline data...")
17
+ artifact_paths = self.command.jenkins_client.get_pipeline_execution_artifacts(execution)
18
+ if artifact_paths:
19
+ self.context.logger.info(f"Job produced {len(artifact_paths)} artifact(s)")
20
+ self.command.jenkins_client.save_pipeline_execution_artifact_to_file(
21
+ execution,
22
+ JenkinsClient.BUILD_ARTIFACTS_ZIP_PATH,
23
+ self.context.path_temp / "archive.zip")
24
+ else:
25
+ self.context.logger.info("No artifacts found, skipping pipeline import.")
26
+
27
+ output_path = Path(self.context.input_param_get("paths.output.files"))
28
+ output_path.mkdir(parents=True, exist_ok=True)
29
+ for file_path in Path(self.context.path_temp).iterdir():
30
+ with zipfile.ZipFile(file_path) as zf:
31
+ zf.extractall(output_path)
@@ -0,0 +1,165 @@
1
+ from qubership_pipelines_common_library.v1.execution.exec_command import ExecutionCommand
2
+ from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
3
+ from qubership_pipelines_common_library.v1.utils.utils_string import UtilsString
4
+ from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
5
+ from qubership_pipelines_common_library.v2.jenkins.jenkins_pipeline_data_importer import DefaultJenkinsPipelineDataImporter
6
+ from qubership_pipelines_common_library.v2.jenkins.safe_jenkins_client import SafeJenkinsClient
7
+
8
+
9
+ class JenkinsRunPipeline(ExecutionCommand):
10
+ """
11
+ Runs Jenkins Pipeline and optionally imports artifacts.
12
+
13
+ This command runs Jenkins Pipeline, monitors its execution, and provides
14
+ options for importing resulting artifacts and custom data processing through extensible
15
+ importers.
16
+
17
+ Input Parameters Structure (this structure is expected inside "input_params.params" block):
18
+ ```
19
+ {
20
+ "pipeline_path": "TENANT-NAME/path/to/job", # REQUIRED: Full pipeline path (e.g. "TENANT/folder/job")
21
+ "pipeline_params": { # OPTIONAL: Input parameters to pass to the pipeline
22
+ "KEY1": "VALUE1", # Side-note: if you want to run your parametrized job with default parameters,
23
+ "KEY2": "VALUE2" # you still need to pass some fake params (they will be ignored by Jenkins), e.g. "__fake_key":"fake_value",
24
+ }, # Otherwise, if this dict is empty - endpoint for non-parametrized jobs will be triggered
25
+ "import_artifacts": true, # OPTIONAL: Whether to import pipeline artifacts (default: true)
26
+ "use_existing_pipeline": 123456789, # OPTIONAL: Use existing pipeline ID instead of starting new one (debug feature)
27
+ "timeout_seconds": 1800, # OPTIONAL: Maximum wait time for pipeline completion in seconds (default: 1800, 0 for async execution)
28
+ "wait_seconds": 1, # OPTIONAL: Wait interval between status checks in seconds (default: 1)
29
+ "retry_timeout_seconds": 180, # OPTIONAL: Timeout for GitLab client initialization and pipeline start retries in seconds (default: 180)
30
+ "retry_wait_seconds": 1, # OPTIONAL: Wait interval between retries in seconds (default: 1)
31
+ "success_statuses": "SUCCESS,UNSTABLE" # OPTIONAL: Comma-separated list of acceptable completion statuses (default: SUCCESS)
32
+ }
33
+ ```
34
+
35
+ Systems Configuration (expected in "systems.jenkins" block):
36
+ ```
37
+ {
38
+ "url": "https://github.com", # REQUIRED: Jenkins instance URL
39
+ "username": "<jenkins_user>" # REQUIRED: Jenkins user
40
+ "password": "<jenkins_token>" # REQUIRED: Jenkins password or token with job-triggering permissions
41
+ }
42
+ ```
43
+
44
+ Output Parameters:
45
+ - params.build.url: URL to view the pipeline run in GitLab
46
+ - params.build.id: ID of the executed pipeline
47
+ - params.build.status: Final status of the pipeline execution
48
+ - params.build.date: Workflow start time in ISO format
49
+ - params.build.duration: Total execution duration in human-readable format
50
+ - params.build.name: Name of the pipeline execution
51
+
52
+ Extension Points:
53
+ - Custom pipeline data importers can be implemented by extending PipelineDataImporter interface
54
+ - PipelineDataImporter is passed into constructor of command via "pipeline_data_importer" arg
55
+
56
+ Notes:
57
+ - Setting timeout_seconds to 0 enables asynchronous execution (workflow starts but command doesn't wait for completion, and won't fetch build id)
58
+ """
59
+
60
+ # default timeout values
61
+ WAIT_TIMEOUT = 1800
62
+ WAIT_SECONDS = 1
63
+ RETRY_TIMEOUT_SECONDS = 180
64
+ RETRY_WAIT_SECONDS = 1
65
+
66
+ PARAM_NAME_IS_DRY_RUN = "IS_DRY_RUN"
67
+
68
+ def __init__(self, *args, pipeline_data_importer: PipelineDataImporter = None, **kwargs):
69
+ super().__init__(*args, **kwargs)
70
+ self.pipeline_data_importer = pipeline_data_importer or DefaultJenkinsPipelineDataImporter()
71
+ if pipeline_data_importer and not isinstance(pipeline_data_importer, PipelineDataImporter):
72
+ raise TypeError(f"Class {type(pipeline_data_importer)} must inherit from PipelineDataImporter")
73
+
74
+ def _validate(self):
75
+ names = [
76
+ "paths.input.params",
77
+ "paths.output.params",
78
+ "paths.output.files",
79
+ "systems.jenkins.url",
80
+ "systems.jenkins.username",
81
+ "systems.jenkins.password",
82
+ "params.pipeline_path",
83
+ ]
84
+ if not self.context.validate(names):
85
+ return False
86
+
87
+ self.timeout_seconds = max(0, int(self.context.input_param_get("params.timeout_seconds", self.WAIT_TIMEOUT)))
88
+ self.wait_seconds = max(1, int(self.context.input_param_get("params.wait_seconds", self.WAIT_SECONDS)))
89
+
90
+ self.retry_timeout_seconds = int(self.context.input_param_get("params.retry_timeout_seconds", self.RETRY_TIMEOUT_SECONDS))
91
+ self.retry_wait_seconds = int(self.context.input_param_get("params.retry_wait_seconds", self.RETRY_WAIT_SECONDS))
92
+
93
+ if self.timeout_seconds == 0:
94
+ self.context.logger.info(f"Timeout is set to: {self.timeout_seconds}. This means that the pipeline will be started asynchronously")
95
+
96
+ self.pipeline_path = self.context.input_param_get("params.pipeline_path").strip("/")
97
+ self.pipeline_params = self.context.input_param_get("params.pipeline_params", {})
98
+ if not self.pipeline_params:
99
+ self.context.logger.info("Pipeline parameters were not specified. This means that pipeline will be started with its default values")
100
+ if not isinstance(self.pipeline_params, dict):
101
+ self.context.logger.error("Pipeline parameters were not loaded correctly. Probably mistake in the params definition")
102
+ return False
103
+
104
+ self.success_statuses = [x.strip() for x in self.context.input_param_get("params.success_statuses", ExecutionInfo.STATUS_SUCCESS).split(",")]
105
+ if UtilsString.convert_to_bool(self.context.input_param_get("params.is_dry_run", False)):
106
+ self.pipeline_params[self.PARAM_NAME_IS_DRY_RUN] = True
107
+ self.import_artifacts = UtilsString.convert_to_bool(self.context.input_param_get("params.import_artifacts", True))
108
+ self.use_existing_pipeline = self.context.input_param_get("params.use_existing_pipeline")
109
+ return True
110
+
111
+ def _execute(self):
112
+ self.context.logger.info("Running jenkins-run-pipeline...")
113
+ self.jenkins_client = SafeJenkinsClient.create_jenkins_client(
114
+ self.context.input_param_get("systems.jenkins.url"),
115
+ self.context.input_param_get("systems.jenkins.username"),
116
+ self.context.input_param_get("systems.jenkins.password"),
117
+ retry_timeout_seconds=self.retry_timeout_seconds,
118
+ retry_wait_seconds=self.retry_wait_seconds
119
+ )
120
+ self.context.logger.info("Successfully initialized Jenkins client")
121
+
122
+ if self.use_existing_pipeline: # work with existing job
123
+ self.context.logger.info(f"Using existing job {self.pipeline_path} - {self.use_existing_pipeline}")
124
+ execution = (ExecutionInfo().with_params(self.pipeline_params)
125
+ .with_name(self.pipeline_path).with_id(int(self.use_existing_pipeline))
126
+ .with_status(ExecutionInfo.STATUS_UNKNOWN))
127
+ execution.start()
128
+ else:
129
+ execution = self.jenkins_client.run_pipeline(
130
+ self.pipeline_path, self.pipeline_params,
131
+ timeout_seconds=self.timeout_seconds,
132
+ wait_seconds=self.wait_seconds
133
+ )
134
+
135
+ self.execution_info = execution
136
+ if execution.get_status() != ExecutionInfo.STATUS_IN_PROGRESS:
137
+ self._exit(False, f"Pipeline was not started. Status {execution.get_status()}")
138
+ elif self.timeout_seconds < 1:
139
+ self.context.logger.info("Pipeline was started in asynchronous mode. Pipeline status and artifacts will not be processed")
140
+ return
141
+
142
+ self.context.logger.info(f"Pipeline successfully started. Waiting {self.timeout_seconds} seconds for execution to complete")
143
+ execution = self.jenkins_client.wait_pipeline_execution(execution, self.timeout_seconds, self.wait_seconds)
144
+ self.context.logger.info(f"Pipeline status: {execution.get_status()}\nPipeline available at {execution.get_url()}")
145
+
146
+ if self.import_artifacts and self.pipeline_data_importer and execution.get_status() in ExecutionInfo.STATUSES_COMPLETE:
147
+ try:
148
+ self.pipeline_data_importer.with_command(self)
149
+ self.pipeline_data_importer.import_pipeline_data(execution)
150
+ except Exception as e:
151
+ self.context.logger.error(f"Exception during pipeline_data_importer execution: {e}")
152
+
153
+ self._save_execution_info(execution)
154
+ if execution.get_status() not in self.success_statuses:
155
+ self._exit(False, f"Status: {execution.get_status()}")
156
+
157
+ def _save_execution_info(self, execution: ExecutionInfo):
158
+ self.context.logger.info("Writing jenkins job execution status")
159
+ self.context.output_param_set("params.build.url", execution.get_url())
160
+ self.context.output_param_set("params.build.id", execution.get_id())
161
+ self.context.output_param_set("params.build.status", execution.get_status())
162
+ self.context.output_param_set("params.build.date", execution.get_time_start().isoformat())
163
+ self.context.output_param_set("params.build.duration", execution.get_duration_str())
164
+ self.context.output_param_set("params.build.name", execution.get_name())
165
+ self.context.output_params_save()
@@ -0,0 +1,14 @@
1
+ from qubership_pipelines_common_library.v2.jenkins.jenkins_client import JenkinsClient
2
+ from qubership_pipelines_common_library.v2.utils.retry_decorator import RetryDecorator
3
+
4
+
5
+ class SafeJenkinsClient(JenkinsClient):
6
+
7
+ def __init__(self, host: str, user: str, password: str):
8
+ super().__init__(host, user, password)
9
+
10
+ @classmethod
11
+ @RetryDecorator(condition_func=lambda result: result is not None)
12
+ def create_jenkins_client(cls, host: str, user: str, password: str,
13
+ retry_timeout_seconds: int = 180, retry_wait_seconds: int = 1):
14
+ return cls(host, user, password)
@@ -135,6 +135,7 @@ To install and use `podman run` in your Dockerimage (`python:3.11-slim` was used
135
135
  ### GitHub
136
136
 
137
137
  1. `apt-get install podman nftables fuse-overlayfs`
138
+
138
139
  2. ```bash
139
140
  RUN cat <<EOF > /etc/containers/storage.conf
140
141
  [storage]
@@ -145,8 +146,10 @@ To install and use `podman run` in your Dockerimage (`python:3.11-slim` was used
145
146
  mount_program = "/usr/bin/fuse-overlayfs"
146
147
  EOF
147
148
  ```
149
+
148
150
  3. In your workflow file, need to pass `--privileged` option
149
- ```
151
+
152
+ ```yaml
150
153
  jobs:
151
154
  execute-pipeline:
152
155
  runs-on: ubuntu-latest
@@ -154,11 +157,13 @@ To install and use `podman run` in your Dockerimage (`python:3.11-slim` was used
154
157
  image: ghcr.io/netcracker/qubership-pipelines-declarative-executor:dev_podman_engine
155
158
  options: --privileged
156
159
  ```
160
+
157
161
  4. Need to run `PodmanRunImage` command with additional flags: `"additional_run_flags": "--cgroups=disabled"`
158
162
 
159
163
  ### GitLab
160
164
 
161
165
  1. `apt-get install podman nftables slirp4netns fuse-overlayfs`
166
+
162
167
  2. ```bash
163
168
  RUN cat <<EOF > /etc/containers/storage.conf
164
169
  [storage]
@@ -169,4 +174,5 @@ To install and use `podman run` in your Dockerimage (`python:3.11-slim` was used
169
174
  mount_program = "/usr/bin/fuse-overlayfs"
170
175
  EOF
171
176
  ```
177
+
172
178
  3. Need to run `PodmanRunImage` command with additional flags: `"additional_run_flags": "--cgroups=disabled --network slirp4netns"`
@@ -211,14 +211,14 @@ class PodmanRunImage(ExecutionCommand):
211
211
  import json
212
212
  with open(file_path, 'r', encoding='utf-8') as f:
213
213
  return json.load(f)
214
- except:
214
+ except Exception:
215
215
  pass
216
216
 
217
217
  try:
218
218
  import yaml
219
219
  with open(file_path, 'r', encoding='utf-8') as f:
220
220
  return yaml.safe_load(f)
221
- except:
221
+ except Exception:
222
222
  pass
223
223
 
224
224
  try:
@@ -230,7 +230,7 @@ class PodmanRunImage(ExecutionCommand):
230
230
  key, value = line.split('=', 1)
231
231
  key_values[key.strip()] = value.strip()
232
232
  return key_values if key_values else None
233
- except:
233
+ except Exception:
234
234
  pass
235
235
 
236
236
  with open(file_path, 'r', encoding='utf-8') as f:
@@ -292,7 +292,7 @@ class PodmanRunImage(ExecutionCommand):
292
292
  raise
293
293
 
294
294
  except PodmanException:
295
- self.context.logger.error(f"Container exited with unexpected exitcode")
295
+ self.context.logger.error("Container exited with unexpected exitcode")
296
296
  raise
297
297
 
298
298
  except Exception as e:
@@ -88,7 +88,7 @@ class SopsClient:
88
88
  Returns:
89
89
  path to generated `.sops.yaml`
90
90
  """
91
- self.logger.debug(f"Preparing sops config for encryption")
91
+ self.logger.debug("Preparing sops config for encryption")
92
92
  sops_config_content = {
93
93
  "creation_rules": [
94
94
  {
@@ -111,6 +111,6 @@ class SopsClient:
111
111
  Returns:
112
112
 
113
113
  """
114
- self.logger.debug(f"Removing sops config")
114
+ self.logger.debug("Removing sops config")
115
115
  if sops_config_folder.exists() and sops_config_folder.is_dir():
116
116
  shutil.rmtree(sops_config_folder)
@@ -25,7 +25,7 @@ class RetryDecorator:
25
25
  last_result = None
26
26
  estimated_max_attempts = self.retry_timeout_seconds // self.retry_wait_seconds
27
27
 
28
-
28
+
29
29
  while count_seconds < self.retry_timeout_seconds and not self.condition_func(last_result):
30
30
  try:
31
31
  last_result = func(*args, **kwargs)
@@ -41,7 +41,7 @@ class RetryDecorator:
41
41
  last_log_time = now
42
42
  else:
43
43
  time.sleep(self.retry_wait_seconds)
44
-
44
+
45
45
  except Exception as e:
46
46
  retries += 1
47
47
  now = time.perf_counter()
@@ -50,14 +50,14 @@ class RetryDecorator:
50
50
  last_log_time = now
51
51
  else:
52
52
  time.sleep(self.retry_wait_seconds)
53
-
53
+
54
54
  finally:
55
55
  count_seconds += self.retry_wait_seconds
56
56
 
57
57
  if self.condition_func(last_result):
58
58
  self.logger.debug(f"Function {func.__name__} successfully executed after {retries} attempts in {count_seconds}s")
59
59
  return last_result
60
-
60
+
61
61
  self._exit_with_error_message(func.__name__)
62
62
  return wrapper
63
63
 
@@ -80,7 +80,7 @@ class RetryDecorator:
80
80
  else:
81
81
  self.logger.debug(
82
82
  f"`retry_wait_seconds` is not found in func {func.__name__} arguments. Using default value = {self.retry_wait_seconds}")
83
-
83
+
84
84
  def _process_exception_during_func_execution(self, exception, count_seconds, func_name, retries, estimated_max_attempts):
85
85
  if count_seconds < self.retry_timeout_seconds:
86
86
  self._sleep_with_warning_log(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: qubership-pipelines-common-library
3
- Version: 2.0.0
3
+ Version: 2.0.2
4
4
  Summary: Qubership Pipelines common library
5
5
  License: Apache-2.0
6
6
  License-File: LICENSE
@@ -24,6 +24,7 @@ Requires-Dist: python-gitlab (>=4.13.0,<5.0.0)
24
24
  Requires-Dist: python-jenkins (>=1.8.2,<2.0.0)
25
25
  Requires-Dist: pyyaml (>=6.0.2,<7.0.0)
26
26
  Requires-Dist: requests (>=2.32.3,<3.0.0)
27
+ Requires-Dist: rich (>=14.2.0,<15.0.0)
27
28
  Requires-Dist: urllib3 (>=2.2.3,<3.0.0)
28
29
  Requires-Dist: webexpythonsdk (==2.0.1)
29
30
  Description-Content-Type: text/markdown
@@ -37,7 +38,7 @@ Description-Content-Type: text/markdown
37
38
 
38
39
  Open-source python library of clients used by Qubership pipelines/modules.
39
40
 
40
- Library provides easy-to-use clients and wrappers for common devops services (e.g. Jenkins, MiniO, GitLab Pipelines)
41
+ Library provides easy-to-use clients and wrappers for common DevOps services (e.g. Jenkins, MiniO, GitLab Pipelines)
41
42
 
42
43
  ## Sample implementation
43
44
 
@@ -49,7 +50,7 @@ It includes reference python implementation along with the [Development Guide](h
49
50
 
50
51
  Library is presented as a set of clients with predefined operations
51
52
 
52
- Auto-generated reference (via mkdocs) is available on [this repo's GitHub Pages](https://netcracker.github.io/qubership-pipelines-common-python-library)
53
+ Auto-generated reference (via mkdocs) is available on [this repo's GitHub Pages](https://netcracker.github.io/qubership-pipelines-common-python-library/mkdocs)
53
54
 
54
55
  ## Installation
55
56
 
@@ -61,6 +62,7 @@ Auto-generated reference (via mkdocs) is available on [this repo's GitHub Pages]
61
62
  ```
62
63
 
63
64
  - Or you can install it via `pip`:
65
+
64
66
  ```bash
65
67
  pip install qubership-pipelines-common-library
66
68
  ```