qubership-pipelines-common-library 0.2.6__py3-none-any.whl → 2.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qubership_pipelines_common_library/__init__.py +1 -1
- qubership_pipelines_common_library/v1/artifactory_client.py +1 -1
- qubership_pipelines_common_library/v1/execution/exec_command.py +63 -2
- qubership_pipelines_common_library/v1/execution/exec_context.py +6 -6
- qubership_pipelines_common_library/v1/execution/exec_context_file.py +1 -1
- qubership_pipelines_common_library/v1/execution/exec_info.py +4 -0
- qubership_pipelines_common_library/v1/execution/exec_logger.py +7 -5
- qubership_pipelines_common_library/v1/github_client.py +10 -1
- qubership_pipelines_common_library/v1/gitlab_client.py +175 -11
- qubership_pipelines_common_library/v1/jenkins_client.py +55 -18
- qubership_pipelines_common_library/v1/maven_client.py +2 -2
- qubership_pipelines_common_library/v1/minio_client.py +1 -1
- qubership_pipelines_common_library/v1/utils/rest.py +1 -1
- qubership_pipelines_common_library/v1/utils/utils.py +1 -1
- qubership_pipelines_common_library/v1/utils/utils_cli.py +43 -9
- qubership_pipelines_common_library/v1/utils/utils_dictionary.py +1 -1
- qubership_pipelines_common_library/v1/utils/utils_file.py +17 -0
- qubership_pipelines_common_library/v1/utils/utils_logging.py +53 -0
- qubership_pipelines_common_library/v2/__init__.py +0 -0
- qubership_pipelines_common_library/v2/artifacts_finder/__init__.py +0 -0
- qubership_pipelines_common_library/v2/artifacts_finder/artifact_finder.py +56 -0
- qubership_pipelines_common_library/v2/artifacts_finder/auth/__init__.py +0 -0
- qubership_pipelines_common_library/v2/artifacts_finder/auth/aws_credentials.py +106 -0
- qubership_pipelines_common_library/v2/artifacts_finder/auth/azure_credentials.py +72 -0
- qubership_pipelines_common_library/v2/artifacts_finder/auth/gcp_credentials.py +88 -0
- qubership_pipelines_common_library/v2/artifacts_finder/model/__init__.py +0 -0
- qubership_pipelines_common_library/v2/artifacts_finder/model/artifact.py +20 -0
- qubership_pipelines_common_library/v2/artifacts_finder/model/artifact_provider.py +35 -0
- qubership_pipelines_common_library/v2/artifacts_finder/model/credentials.py +16 -0
- qubership_pipelines_common_library/v2/artifacts_finder/model/credentials_provider.py +16 -0
- qubership_pipelines_common_library/v2/artifacts_finder/providers/__init__.py +0 -0
- qubership_pipelines_common_library/v2/artifacts_finder/providers/artifactory.py +52 -0
- qubership_pipelines_common_library/v2/artifacts_finder/providers/aws_code_artifact.py +79 -0
- qubership_pipelines_common_library/v2/artifacts_finder/providers/azure_artifacts.py +98 -0
- qubership_pipelines_common_library/v2/artifacts_finder/providers/gcp_artifact_registry.py +50 -0
- qubership_pipelines_common_library/v2/artifacts_finder/providers/nexus.py +41 -0
- qubership_pipelines_common_library/v2/extensions/pipeline_data_importer.py +24 -0
- qubership_pipelines_common_library/v2/github/__init__.py +0 -0
- qubership_pipelines_common_library/v2/github/github_client.py +5 -0
- qubership_pipelines_common_library/v2/github/github_pipeline_data_importer.py +21 -0
- qubership_pipelines_common_library/v2/github/github_run_pipeline_command.py +175 -0
- qubership_pipelines_common_library/v2/github/safe_github_client.py +24 -0
- qubership_pipelines_common_library/v2/gitlab/__init__.py +0 -0
- qubership_pipelines_common_library/v2/gitlab/custom_extensions.py +101 -0
- qubership_pipelines_common_library/v2/gitlab/gitlab_client.py +36 -0
- qubership_pipelines_common_library/v2/gitlab/gitlab_pipeline_data_importer.py +26 -0
- qubership_pipelines_common_library/v2/gitlab/gitlab_run_pipeline_command.py +195 -0
- qubership_pipelines_common_library/v2/gitlab/safe_gitlab_client.py +32 -0
- qubership_pipelines_common_library/v2/jenkins/__init__.py +0 -0
- qubership_pipelines_common_library/v2/jenkins/custom_extensions.py +63 -0
- qubership_pipelines_common_library/v2/jenkins/jenkins_client.py +5 -0
- qubership_pipelines_common_library/v2/jenkins/jenkins_pipeline_data_importer.py +31 -0
- qubership_pipelines_common_library/v2/jenkins/jenkins_run_pipeline_command.py +165 -0
- qubership_pipelines_common_library/v2/jenkins/safe_jenkins_client.py +14 -0
- qubership_pipelines_common_library/v2/podman/__init__.py +0 -0
- qubership_pipelines_common_library/v2/podman/podman_command.md +178 -0
- qubership_pipelines_common_library/v2/podman/podman_command.py +311 -0
- qubership_pipelines_common_library/v2/sops/sops_client.py +116 -0
- qubership_pipelines_common_library/v2/utils/crypto_utils.py +48 -0
- qubership_pipelines_common_library/v2/utils/extension_utils.py +22 -0
- qubership_pipelines_common_library/v2/utils/retry_decorator.py +93 -0
- {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.1.dist-info}/METADATA +5 -3
- qubership_pipelines_common_library-2.0.1.dist-info/RECORD +76 -0
- qubership_pipelines_common_library-0.2.6.dist-info/RECORD +0 -32
- {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.1.dist-info}/WHEEL +0 -0
- {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from qubership_pipelines_common_library.v1.execution.exec_command import ExecutionCommandExtension
|
|
4
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
5
|
+
from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class GitlabDOBPParamsPreExt(ExecutionCommandExtension):
|
|
9
|
+
"""
|
|
10
|
+
Pre-execution extension, enriching 'pipeline_params' with values from environment variables
|
|
11
|
+
"""
|
|
12
|
+
def execute(self):
|
|
13
|
+
self.context.logger.info("Adding DOBP-specific params to pipeline_params...")
|
|
14
|
+
|
|
15
|
+
# Add upstream-cancelled params:
|
|
16
|
+
import os
|
|
17
|
+
if project_url := os.getenv('PROJECT_URL'):
|
|
18
|
+
from urllib.parse import urlparse
|
|
19
|
+
parsed_project_url = urlparse(project_url)
|
|
20
|
+
self.command.pipeline_params.setdefault('DOBP_UPSTREAM_SERVER_URL', f"{parsed_project_url.scheme}://{parsed_project_url.netloc}")
|
|
21
|
+
self.command.pipeline_params.setdefault('DOBP_UPSTREAM_PROJECT_PATH', parsed_project_url.path.strip('/'))
|
|
22
|
+
|
|
23
|
+
if pipeline_id := os.getenv('PIPELINE_ID'):
|
|
24
|
+
self.command.pipeline_params.setdefault('DOBP_UPSTREAM_PIPELINE_ID', pipeline_id)
|
|
25
|
+
|
|
26
|
+
# Add retry params:
|
|
27
|
+
if retry_downstream_pipeline_id := os.getenv('DOBP_RETRY_DOWNSTREAM_PIPELINE_ID'):
|
|
28
|
+
self.command.pipeline_params.setdefault('DOBP_RETRY_PIPELINE_ID', retry_downstream_pipeline_id)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class GitlabModulesOpsPipelineDataImporter(PipelineDataImporter):
|
|
32
|
+
"""
|
|
33
|
+
GitLab Modules Ops implementation:
|
|
34
|
+
imports data from contracted GitLab Declarative Pipelines
|
|
35
|
+
extracts output files and params of targeted pipeline into 'output' folder of this command
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
IMPORTED_CONTEXT_FILE = 'pipeline/output/context.yaml'
|
|
39
|
+
|
|
40
|
+
def import_pipeline_data(self, execution: ExecutionInfo) -> None:
|
|
41
|
+
import os, zipfile
|
|
42
|
+
self.context.logger.info("GitlabModulesOpsPipelineDataImporter - importing pipeline data...")
|
|
43
|
+
project_id = execution.get_name()
|
|
44
|
+
pipeline_id = execution.get_id()
|
|
45
|
+
|
|
46
|
+
if job := self.command.gl_client.get_latest_job(project_id, pipeline_id):
|
|
47
|
+
self.context.logger.info(f"Latest job: {job.id}")
|
|
48
|
+
local_dirpath = self.context.path_temp
|
|
49
|
+
self.context.logger.debug(f"Contents of folder {local_dirpath}: {os.listdir(local_dirpath)}")
|
|
50
|
+
if artifacts_file := self.command.gl_client.download_job_artifacts(job.pipeline.get('project_id'), job.id, local_dirpath):
|
|
51
|
+
with zipfile.ZipFile(artifacts_file) as zf:
|
|
52
|
+
self.context.logger.debug(f"Zip contents: ${zf.namelist()}")
|
|
53
|
+
zf.extractall(local_dirpath)
|
|
54
|
+
self.context.logger.debug(f"Contents of folder {local_dirpath} (after zip.extractall): {os.listdir(local_dirpath)}")
|
|
55
|
+
self._import_downloaded_data(local_dirpath / self.IMPORTED_CONTEXT_FILE)
|
|
56
|
+
else:
|
|
57
|
+
self.context.logger.warning("No jobs found")
|
|
58
|
+
|
|
59
|
+
self.context.output_params.load(self.context.context.get("paths.output.params"))
|
|
60
|
+
self.context.output_params_secure.load(self.context.context.get("paths.output.params_secure"))
|
|
61
|
+
|
|
62
|
+
def _import_downloaded_data(self, src_context_filepath: Path):
|
|
63
|
+
import shutil
|
|
64
|
+
from qubership_pipelines_common_library.v1.utils.utils_file import UtilsFile
|
|
65
|
+
from qubership_pipelines_common_library.v1.utils.utils_dictionary import UtilsDictionary
|
|
66
|
+
|
|
67
|
+
if src_context_filepath.is_file():
|
|
68
|
+
self.context.logger.info(f"Importing from context file {src_context_filepath}")
|
|
69
|
+
src_context = UtilsFile.read_yaml(src_context_filepath)
|
|
70
|
+
src_base_dirpath = src_context_filepath.parent
|
|
71
|
+
|
|
72
|
+
def get_path_from_src_context(param, default_value=None):
|
|
73
|
+
if param_value := UtilsDictionary.get_by_path(src_context, param, default_value):
|
|
74
|
+
return Path(src_base_dirpath, param_value)
|
|
75
|
+
return None
|
|
76
|
+
|
|
77
|
+
for src in ('paths.output.params', 'paths.output.params_secure',):
|
|
78
|
+
src_filepath = get_path_from_src_context(src)
|
|
79
|
+
if src_filepath and src_filepath.is_file():
|
|
80
|
+
dst_file = self.context.context.get(src)
|
|
81
|
+
self.context.logger.info(f"Copying file {src_filepath} -> {dst_file}")
|
|
82
|
+
UtilsFile.create_parent_dirs(dst_file)
|
|
83
|
+
shutil.copyfile(src_filepath, dst_file)
|
|
84
|
+
|
|
85
|
+
src_files_dirpath = get_path_from_src_context('paths.output.files')
|
|
86
|
+
if src_files_dirpath and src_files_dirpath.is_dir():
|
|
87
|
+
dst_files_dir = self.context.context.get('paths.output.files')
|
|
88
|
+
self.context.logger.info(f"Copying dir {src_files_dirpath} -> {dst_files_dir}")
|
|
89
|
+
shutil.copytree(src_files_dirpath, dst_files_dir, dirs_exist_ok=True)
|
|
90
|
+
|
|
91
|
+
src_logs_dirpath = get_path_from_src_context('paths.logs', 'logs')
|
|
92
|
+
for _ext in ('json', 'yaml',):
|
|
93
|
+
src_exec_report_filepath = src_logs_dirpath / f"execution_report.{_ext}"
|
|
94
|
+
if src_exec_report_filepath.is_file():
|
|
95
|
+
dst_exec_report_filepath = self.context.path_logs / f"nested_pipeline_report.{_ext}"
|
|
96
|
+
UtilsFile.create_parent_dirs(dst_exec_report_filepath)
|
|
97
|
+
self.context.logger.info(f"Copying file {src_exec_report_filepath} -> {dst_exec_report_filepath}")
|
|
98
|
+
shutil.copyfile(src_exec_report_filepath, dst_exec_report_filepath)
|
|
99
|
+
|
|
100
|
+
else:
|
|
101
|
+
self.context.logger.warning(f"Imported context file does not exist: {src_context_filepath}")
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import os, logging
|
|
2
|
+
|
|
3
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
4
|
+
from qubership_pipelines_common_library.v1.gitlab_client import GitlabClient as GitlabClientV1
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class GitlabClient(GitlabClientV1):
|
|
8
|
+
|
|
9
|
+
def trigger_pipeline(self, project_id: str, ref: str, trigger_token: str = None, variables: dict = None, use_ci_job_token: bool = False):
|
|
10
|
+
""""""
|
|
11
|
+
if variables is None:
|
|
12
|
+
variables = {}
|
|
13
|
+
if use_ci_job_token:
|
|
14
|
+
trigger_token = os.getenv('CI_JOB_TOKEN')
|
|
15
|
+
trigger_data = {k: self._cast_to_string(v) for k, v in variables.items()}
|
|
16
|
+
project = self.gl.projects.get(project_id, lazy=True)
|
|
17
|
+
pipeline = project.trigger_pipeline(ref, trigger_token, trigger_data)
|
|
18
|
+
logging.info(f"Pipeline successfully started (via TRIGGER) at {pipeline.web_url}")
|
|
19
|
+
return ExecutionInfo().with_name(project_id).with_id(pipeline.get_id()) \
|
|
20
|
+
.with_url(pipeline.web_url).with_params(trigger_data) \
|
|
21
|
+
.start()
|
|
22
|
+
|
|
23
|
+
def create_pipeline(self, project_id: str, ref: str, variables: dict):
|
|
24
|
+
""""""
|
|
25
|
+
if variables is None:
|
|
26
|
+
variables = {}
|
|
27
|
+
create_data = {
|
|
28
|
+
'ref': ref,
|
|
29
|
+
'variables': [{'key': k, 'value': self._cast_to_string(v)} for k, v in variables.items()],
|
|
30
|
+
}
|
|
31
|
+
project = self.gl.projects.get(project_id, lazy=True)
|
|
32
|
+
pipeline = project.pipelines.create(create_data)
|
|
33
|
+
logging.info(f"Pipeline successfully started (via CREATE) at {pipeline.web_url}")
|
|
34
|
+
return ExecutionInfo().with_name(project_id).with_id(pipeline.get_id()) \
|
|
35
|
+
.with_url(pipeline.web_url).with_params(create_data) \
|
|
36
|
+
.start()
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import zipfile
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
5
|
+
from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class DefaultGitlabPipelineDataImporter(PipelineDataImporter):
|
|
9
|
+
"""
|
|
10
|
+
Default GitLab implementation:
|
|
11
|
+
downloads all available workflow run artifacts,
|
|
12
|
+
extracts them into context-defined 'paths.output.files' path
|
|
13
|
+
"""
|
|
14
|
+
def import_pipeline_data(self, execution: ExecutionInfo) -> None:
|
|
15
|
+
self.context.logger.info("DefaultGitlabPipelineDataImporter - importing pipeline data...")
|
|
16
|
+
project_id = execution.get_name()
|
|
17
|
+
pipeline_id = execution.get_id()
|
|
18
|
+
if job := self.command.gl_client.get_latest_job(project_id, pipeline_id):
|
|
19
|
+
if artifacts_file := self.command.gl_client.download_job_artifacts(job.pipeline.get('project_id'), job.id, self.context.path_temp):
|
|
20
|
+
output_path = Path(self.context.input_param_get("paths.output.files"))
|
|
21
|
+
output_path.mkdir(parents=True, exist_ok=True)
|
|
22
|
+
with zipfile.ZipFile(artifacts_file) as zf:
|
|
23
|
+
self.context.logger.debug(f"Zip contents: ${zf.namelist()}")
|
|
24
|
+
zf.extractall(output_path)
|
|
25
|
+
else:
|
|
26
|
+
self.context.logger.warning(f"Job not found! project_id: {project_id}, pipeline_id: {pipeline_id}")
|
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
from qubership_pipelines_common_library.v1.execution.exec_command import ExecutionCommand
|
|
2
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
3
|
+
from qubership_pipelines_common_library.v1.utils.utils_string import UtilsString
|
|
4
|
+
from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
|
|
5
|
+
from qubership_pipelines_common_library.v2.gitlab.gitlab_pipeline_data_importer import DefaultGitlabPipelineDataImporter
|
|
6
|
+
from qubership_pipelines_common_library.v2.gitlab.safe_gitlab_client import SafeGitlabClient
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class GitlabRunPipeline(ExecutionCommand):
|
|
10
|
+
"""
|
|
11
|
+
Runs GitLab Pipeline via Trigger or Create API and optionally imports artifacts.
|
|
12
|
+
|
|
13
|
+
This command runs GitLab Pipeline, monitors its execution, and provides
|
|
14
|
+
options for importing resulting artifacts and custom data processing through extensible
|
|
15
|
+
importers.
|
|
16
|
+
|
|
17
|
+
Input Parameters Structure (this structure is expected inside "input_params.params" block):
|
|
18
|
+
```
|
|
19
|
+
{
|
|
20
|
+
"pipeline_path": "path/to/gitlab_project", # REQUIRED: Full pipeline path (e.g. "group/subgroup/repo")
|
|
21
|
+
"pipeline_branch": "main", # OPTIONAL: Branch to run pipeline from (default: repo's default branch)
|
|
22
|
+
"trigger_type": "CREATE_PIPELINE", # OPTIONAL: Which API will be used to trigger the pipeline (CREATE_PIPELINE or TRIGGER_PIPELINE)
|
|
23
|
+
"pipeline_params": { # OPTIONAL: Input parameters to pass to the pipeline
|
|
24
|
+
"KEY1": "VALUE1",
|
|
25
|
+
"KEY2": "VALUE2"
|
|
26
|
+
},
|
|
27
|
+
"import_artifacts": false, # OPTIONAL: Whether to import pipeline artifacts (default: false)
|
|
28
|
+
"use_existing_pipeline": 123456789, # OPTIONAL: Use existing pipeline ID (or use 'latest' here) instead of starting new one (debug feature)
|
|
29
|
+
"timeout_seconds": 1800, # OPTIONAL: Maximum wait time for pipeline completion in seconds (default: 1800, 0 for async execution)
|
|
30
|
+
"wait_seconds": 1, # OPTIONAL: Wait interval between status checks in seconds (default: 1)
|
|
31
|
+
"retry_timeout_seconds": 180, # OPTIONAL: Timeout for GitLab client initialization and pipeline start retries in seconds (default: 180)
|
|
32
|
+
"retry_wait_seconds": 1, # OPTIONAL: Wait interval between retries in seconds (default: 1)
|
|
33
|
+
"success_statuses": "SUCCESS,UNSTABLE" # OPTIONAL: Comma-separated list of acceptable completion statuses (default: SUCCESS)
|
|
34
|
+
}
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
Systems Configuration (expected in "systems.gitlab" block):
|
|
38
|
+
```
|
|
39
|
+
{
|
|
40
|
+
"url": "https://github.com", # OPTIONAL: GitLab URL for self-hosted instances (default: https://gitlab.com)
|
|
41
|
+
"password": "<gitlab_token>" # REQUIRED: GitLab access token with CI/CD permissions
|
|
42
|
+
"trigger_token": "<gitlab_trigger_token>" # OPTIONAL: Special token issued for triggering pipeline. If not provided - will try to use CI_JOB_TOKEN
|
|
43
|
+
}
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
Output Parameters:
|
|
47
|
+
- params.build.url: URL to view the pipeline run in GitLab
|
|
48
|
+
- params.build.id: ID of the executed pipeline
|
|
49
|
+
- params.build.status: Final status of the pipeline execution
|
|
50
|
+
- params.build.date: Workflow start time in ISO format
|
|
51
|
+
- params.build.duration: Total execution duration in human-readable format
|
|
52
|
+
- params.build.name: Name of the pipeline execution
|
|
53
|
+
|
|
54
|
+
Extension Points:
|
|
55
|
+
- Custom pipeline data importers can be implemented by extending PipelineDataImporter interface
|
|
56
|
+
- PipelineDataImporter is passed into constructor of command via "pipeline_data_importer" arg
|
|
57
|
+
|
|
58
|
+
Notes:
|
|
59
|
+
- Setting timeout_seconds to 0 enables asynchronous execution (workflow starts but command doesn't wait for completion)
|
|
60
|
+
- For self-hosted GitLab instances, configure "systems.github.url"
|
|
61
|
+
- Custom data importers receive the command context and can implement advanced processing logic
|
|
62
|
+
"""
|
|
63
|
+
|
|
64
|
+
# default timeout values
|
|
65
|
+
WAIT_TIMEOUT = 1800
|
|
66
|
+
WAIT_SECONDS = 1
|
|
67
|
+
RETRY_TIMEOUT_SECONDS = 180
|
|
68
|
+
RETRY_WAIT_SECONDS = 1
|
|
69
|
+
|
|
70
|
+
TRIGGER_TYPE_TRIGGER_PIPELINE = 'TRIGGER_PIPELINE'
|
|
71
|
+
TRIGGER_TYPE_CREATE_PIPELINE = 'CREATE_PIPELINE'
|
|
72
|
+
TRIGGER_TYPES = (TRIGGER_TYPE_TRIGGER_PIPELINE, TRIGGER_TYPE_CREATE_PIPELINE,)
|
|
73
|
+
|
|
74
|
+
def __init__(self, *args, pipeline_data_importer: PipelineDataImporter = None, **kwargs):
|
|
75
|
+
super().__init__(*args, **kwargs)
|
|
76
|
+
self.pipeline_data_importer = pipeline_data_importer or DefaultGitlabPipelineDataImporter()
|
|
77
|
+
if pipeline_data_importer and not isinstance(pipeline_data_importer, PipelineDataImporter):
|
|
78
|
+
raise TypeError(f"Class {type(pipeline_data_importer)} must inherit from PipelineDataImporter")
|
|
79
|
+
|
|
80
|
+
def _validate(self):
|
|
81
|
+
names = [
|
|
82
|
+
"paths.input.params",
|
|
83
|
+
"paths.output.params",
|
|
84
|
+
"paths.output.files",
|
|
85
|
+
"systems.gitlab.password",
|
|
86
|
+
"params.pipeline_path",
|
|
87
|
+
]
|
|
88
|
+
if not self.context.validate(names):
|
|
89
|
+
return False
|
|
90
|
+
|
|
91
|
+
self.timeout_seconds = max(0, int(self.context.input_param_get("params.timeout_seconds", self.WAIT_TIMEOUT)))
|
|
92
|
+
self.wait_seconds = max(1, int(self.context.input_param_get("params.wait_seconds", self.WAIT_SECONDS)))
|
|
93
|
+
|
|
94
|
+
self.retry_timeout_seconds = int(self.context.input_param_get("params.retry_timeout_seconds", self.RETRY_TIMEOUT_SECONDS))
|
|
95
|
+
self.retry_wait_seconds = int(self.context.input_param_get("params.retry_wait_seconds", self.RETRY_WAIT_SECONDS))
|
|
96
|
+
|
|
97
|
+
if self.timeout_seconds == 0:
|
|
98
|
+
self.context.logger.info(f"Timeout is set to: {self.timeout_seconds}. This means that the pipeline will be started asynchronously")
|
|
99
|
+
|
|
100
|
+
self.gitlab_url = self.context.input_param_get("systems.gitlab.url", "https://gitlab.com")
|
|
101
|
+
self.pipeline_path = self.context.input_param_get("params.pipeline_path").strip("/")
|
|
102
|
+
self.pipeline_branch = self.context.input_param_get("params.pipeline_branch")
|
|
103
|
+
|
|
104
|
+
self.trigger_type = self.context.input_param_get("params.trigger_type", self.TRIGGER_TYPE_CREATE_PIPELINE)
|
|
105
|
+
if self.trigger_type not in self.TRIGGER_TYPES:
|
|
106
|
+
self.context.logger.error(f"Unsupported trigger_type: {self.trigger_type}")
|
|
107
|
+
return False
|
|
108
|
+
if self.trigger_type == self.TRIGGER_TYPE_TRIGGER_PIPELINE:
|
|
109
|
+
self.trigger_token = self.context.input_param_get("systems.gitlab.trigger_token")
|
|
110
|
+
|
|
111
|
+
self.pipeline_params = self.context.input_param_get("params.pipeline_params", {})
|
|
112
|
+
if not self.pipeline_params:
|
|
113
|
+
self.context.logger.info("Pipeline parameters were not specified. This means that pipeline will be started with its default values")
|
|
114
|
+
if not isinstance(self.pipeline_params, dict):
|
|
115
|
+
self.context.logger.error("Pipeline parameters were not loaded correctly. Probably mistake in the params definition")
|
|
116
|
+
return False
|
|
117
|
+
self.import_artifacts = UtilsString.convert_to_bool(self.context.input_param_get("params.import_artifacts", False))
|
|
118
|
+
self.success_statuses = [x.strip() for x in self.context.input_param_get("params.success_statuses", ExecutionInfo.STATUS_SUCCESS).split(",")]
|
|
119
|
+
self.use_existing_pipeline = self.context.input_param_get("params.use_existing_pipeline")
|
|
120
|
+
return True
|
|
121
|
+
|
|
122
|
+
def _execute(self):
|
|
123
|
+
self.context.logger.info("Running gitlab-run-pipeline...")
|
|
124
|
+
|
|
125
|
+
self.gl_client = SafeGitlabClient.create_gitlab_client(
|
|
126
|
+
host=self.gitlab_url,
|
|
127
|
+
username="",
|
|
128
|
+
password=self.context.input_param_get("systems.gitlab.password"),
|
|
129
|
+
retry_timeout_seconds=self.retry_timeout_seconds,
|
|
130
|
+
retry_wait_seconds=self.retry_wait_seconds
|
|
131
|
+
)
|
|
132
|
+
self.context.logger.info("Successfully initialized GitLab client")
|
|
133
|
+
|
|
134
|
+
if not self.pipeline_branch:
|
|
135
|
+
self.pipeline_branch = self.gl_client.get_default_branch(project_id=self.pipeline_path)
|
|
136
|
+
|
|
137
|
+
if self.use_existing_pipeline: # work with existing pipeline run
|
|
138
|
+
if self.use_existing_pipeline == 'latest':
|
|
139
|
+
pipeline_id = self.gl_client.get_latest_pipeline_id(project_id=self.pipeline_path, ref=self.pipeline_branch)
|
|
140
|
+
else:
|
|
141
|
+
pipeline_id = self.use_existing_pipeline
|
|
142
|
+
self.context.logger.info(f"Using existing pipeline: {pipeline_id}")
|
|
143
|
+
execution = ExecutionInfo().with_name(self.pipeline_path).with_id(pipeline_id).with_status(ExecutionInfo.STATUS_UNKNOWN)
|
|
144
|
+
execution.start()
|
|
145
|
+
else:
|
|
146
|
+
if self.trigger_type == self.TRIGGER_TYPE_CREATE_PIPELINE:
|
|
147
|
+
execution = self.gl_client.create_pipeline(
|
|
148
|
+
project_id=self.pipeline_path,
|
|
149
|
+
ref=self.pipeline_branch,
|
|
150
|
+
variables=self.pipeline_params,
|
|
151
|
+
retry_timeout_seconds=self.retry_timeout_seconds,
|
|
152
|
+
retry_wait_seconds=self.retry_wait_seconds
|
|
153
|
+
)
|
|
154
|
+
elif self.trigger_type == self.TRIGGER_TYPE_TRIGGER_PIPELINE:
|
|
155
|
+
execution = self.gl_client.trigger_pipeline(
|
|
156
|
+
project_id=self.pipeline_path,
|
|
157
|
+
ref=self.pipeline_branch,
|
|
158
|
+
trigger_token=self.trigger_token,
|
|
159
|
+
variables=self.pipeline_params,
|
|
160
|
+
use_ci_job_token=(self.trigger_token is None),
|
|
161
|
+
retry_timeout_seconds=self.retry_timeout_seconds,
|
|
162
|
+
retry_wait_seconds=self.retry_wait_seconds
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
if execution.get_status() != ExecutionInfo.STATUS_IN_PROGRESS:
|
|
166
|
+
self._exit(False, f"Pipeline was not started. Status {execution.get_status()}")
|
|
167
|
+
elif self.timeout_seconds < 1:
|
|
168
|
+
self.context.logger.info("Pipeline was started in asynchronous mode. Pipeline status and artifacts will not be processed")
|
|
169
|
+
return
|
|
170
|
+
|
|
171
|
+
self.context.logger.info(f"Pipeline successfully started. Waiting {self.timeout_seconds} seconds for execution to complete")
|
|
172
|
+
execution = self.gl_client.wait_pipeline_execution(execution=execution, timeout_seconds=self.timeout_seconds,
|
|
173
|
+
wait_seconds=self.wait_seconds)
|
|
174
|
+
self.context.logger.info(f"Pipeline status: {execution.get_status()}")
|
|
175
|
+
|
|
176
|
+
if self.import_artifacts and self.pipeline_data_importer and execution.get_status() in ExecutionInfo.STATUSES_COMPLETE:
|
|
177
|
+
try:
|
|
178
|
+
self.pipeline_data_importer.with_command(self)
|
|
179
|
+
self.pipeline_data_importer.import_pipeline_data(execution)
|
|
180
|
+
except Exception as e:
|
|
181
|
+
self.context.logger.error(f"Exception during pipeline_data_importer execution: {e}")
|
|
182
|
+
|
|
183
|
+
self._save_execution_info(execution)
|
|
184
|
+
if execution.get_status() not in self.success_statuses:
|
|
185
|
+
self._exit(False, f"Status: {execution.get_status()}")
|
|
186
|
+
|
|
187
|
+
def _save_execution_info(self, execution: ExecutionInfo):
|
|
188
|
+
self.context.logger.info("Writing GitLab pipeline execution status")
|
|
189
|
+
self.context.output_param_set("params.build.url", execution.get_url())
|
|
190
|
+
self.context.output_param_set("params.build.id", execution.get_id())
|
|
191
|
+
self.context.output_param_set("params.build.status", execution.get_status())
|
|
192
|
+
self.context.output_param_set("params.build.date", execution.get_time_start().isoformat())
|
|
193
|
+
self.context.output_param_set("params.build.duration", execution.get_duration_str())
|
|
194
|
+
self.context.output_param_set("params.build.name", execution.get_name())
|
|
195
|
+
self.context.output_params_save()
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
2
|
+
from qubership_pipelines_common_library.v2.gitlab.gitlab_client import GitlabClient
|
|
3
|
+
from qubership_pipelines_common_library.v2.utils.retry_decorator import RetryDecorator
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class SafeGitlabClient(GitlabClient):
|
|
7
|
+
|
|
8
|
+
def __init__(self, host: str, username: str, password: str):
|
|
9
|
+
super().__init__(host=host, username=username, password=password)
|
|
10
|
+
|
|
11
|
+
@classmethod
|
|
12
|
+
@RetryDecorator(condition_func=lambda result: result is not None)
|
|
13
|
+
def create_gitlab_client(cls, host: str, username: str, password: str,
|
|
14
|
+
retry_timeout_seconds: int = 180, retry_wait_seconds: int = 1):
|
|
15
|
+
return cls(host, username, password)
|
|
16
|
+
|
|
17
|
+
@RetryDecorator(
|
|
18
|
+
condition_func=lambda result: result is not None and result.get_status() not in [
|
|
19
|
+
ExecutionInfo.STATUS_NOT_STARTED, ExecutionInfo.STATUS_UNKNOWN]
|
|
20
|
+
)
|
|
21
|
+
def trigger_pipeline(self, project_id: str, ref: str, trigger_token: str = None, variables: dict = None,
|
|
22
|
+
use_ci_job_token: bool = False, retry_timeout_seconds: int = 180, retry_wait_seconds: int = 1):
|
|
23
|
+
return super().trigger_pipeline(project_id=project_id, ref=ref, trigger_token=trigger_token,
|
|
24
|
+
variables=variables, use_ci_job_token=use_ci_job_token)
|
|
25
|
+
|
|
26
|
+
@RetryDecorator(
|
|
27
|
+
condition_func=lambda result: result is not None and result.get_status() not in [
|
|
28
|
+
ExecutionInfo.STATUS_NOT_STARTED, ExecutionInfo.STATUS_UNKNOWN]
|
|
29
|
+
)
|
|
30
|
+
def create_pipeline(self, project_id: str, ref: str, variables: dict = None,
|
|
31
|
+
retry_timeout_seconds: int = 180, retry_wait_seconds: int = 1):
|
|
32
|
+
return super().create_pipeline(project_id=project_id, ref=ref, variables=variables)
|
|
File without changes
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from qubership_pipelines_common_library.v1.execution.exec_command import ExecutionCommandExtension
|
|
4
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
5
|
+
from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class JenkinsOutputParamsPipelineDataImporter(PipelineDataImporter):
|
|
9
|
+
"""
|
|
10
|
+
Jenkins Output Params Importer:
|
|
11
|
+
imports data from contracted Declarative Pipelines
|
|
12
|
+
extracts output files and params of targeted pipeline into 'output' folder of this command
|
|
13
|
+
"""
|
|
14
|
+
def import_pipeline_data(self, execution: ExecutionInfo) -> None:
|
|
15
|
+
self.context.logger.info("Processing jenkins job artifacts")
|
|
16
|
+
artifact_paths = self.command.jenkins_client.get_pipeline_execution_artifacts(execution)
|
|
17
|
+
if artifact_paths and len(artifact_paths):
|
|
18
|
+
for artifact_path in artifact_paths:
|
|
19
|
+
if artifact_path == "output/params.yaml":
|
|
20
|
+
self.context.logger.info(f"Artifact with name '{artifact_path}' will be processed as output params")
|
|
21
|
+
file_path = self.context.input_param_get("paths.output.params")
|
|
22
|
+
self.command.jenkins_client.save_pipeline_execution_artifact_to_file(execution, artifact_path, file_path)
|
|
23
|
+
self.context.output_params.load(file_path)
|
|
24
|
+
elif artifact_path == "output/params_secure.yaml":
|
|
25
|
+
self.context.logger.info(f"Artifact with name '{artifact_path}' will be processed as output secure params")
|
|
26
|
+
file_path = self.context.input_param_get("paths.output.params_secure")
|
|
27
|
+
self.command.jenkins_client.save_pipeline_execution_artifact_to_file(execution, artifact_path, file_path)
|
|
28
|
+
self.context.output_params_secure.load(file_path)
|
|
29
|
+
else:
|
|
30
|
+
self.context.logger.info(f"Artifact with name '{artifact_path}' will be saved as output file")
|
|
31
|
+
file_path = Path(self.context.input_param_get("paths.output.files")).joinpath(artifact_path)
|
|
32
|
+
self.command.jenkins_client.save_pipeline_execution_artifact_to_file(execution, artifact_path, file_path)
|
|
33
|
+
else:
|
|
34
|
+
self.context.logger.info("No artifacts found in the job")
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class JenkinsSaveInjectedEnvVars(ExecutionCommandExtension):
|
|
38
|
+
"""
|
|
39
|
+
Post-execution extension, saving injected environment variables from the build
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
INJECTED_ENV_VARS_URL = "injectedEnvVars/api/json"
|
|
43
|
+
|
|
44
|
+
def execute(self):
|
|
45
|
+
import os, requests
|
|
46
|
+
from requests.auth import HTTPBasicAuth
|
|
47
|
+
|
|
48
|
+
self.context.logger.info("Trying to get and save injected vars from build")
|
|
49
|
+
build_url = self.command.execution_info.get_url()
|
|
50
|
+
if build_url:
|
|
51
|
+
injected_api_url = build_url + self.INJECTED_ENV_VARS_URL
|
|
52
|
+
response = requests.get(injected_api_url,
|
|
53
|
+
auth=HTTPBasicAuth(self.context.input_param_get("systems.jenkins.username"),
|
|
54
|
+
self.context.input_param_get("systems.jenkins.password")),
|
|
55
|
+
verify=True if os.getenv('PYTHONHTTPSVERIFY', '1') == '0' else False)
|
|
56
|
+
|
|
57
|
+
if response.status_code == 200:
|
|
58
|
+
self.context.output_param_set("params.build.injected_vars", response.json().get("envMap", {}))
|
|
59
|
+
self.context.output_params_save()
|
|
60
|
+
else:
|
|
61
|
+
self.context.logger.warning(f"Can't get injected variables for url {injected_api_url} with response code {response.status_code}")
|
|
62
|
+
else:
|
|
63
|
+
self.context.logger.warning("Can't get build url for injectedEnvVars")
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import zipfile
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
5
|
+
from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
|
|
6
|
+
from qubership_pipelines_common_library.v2.jenkins.jenkins_client import JenkinsClient
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class DefaultJenkinsPipelineDataImporter(PipelineDataImporter):
|
|
10
|
+
"""
|
|
11
|
+
Default Jenkins implementation:
|
|
12
|
+
downloads all available workflow run artifacts as one archive,
|
|
13
|
+
extracts them into context-defined 'paths.output.files' path
|
|
14
|
+
"""
|
|
15
|
+
def import_pipeline_data(self, execution: ExecutionInfo) -> None:
|
|
16
|
+
self.context.logger.info("DefaultJenkinsPipelineDataImporter - importing pipeline data...")
|
|
17
|
+
artifact_paths = self.command.jenkins_client.get_pipeline_execution_artifacts(execution)
|
|
18
|
+
if artifact_paths:
|
|
19
|
+
self.context.logger.info(f"Job produced {len(artifact_paths)} artifact(s)")
|
|
20
|
+
self.command.jenkins_client.save_pipeline_execution_artifact_to_file(
|
|
21
|
+
execution,
|
|
22
|
+
JenkinsClient.BUILD_ARTIFACTS_ZIP_PATH,
|
|
23
|
+
self.context.path_temp / "archive.zip")
|
|
24
|
+
else:
|
|
25
|
+
self.context.logger.info("No artifacts found, skipping pipeline import.")
|
|
26
|
+
|
|
27
|
+
output_path = Path(self.context.input_param_get("paths.output.files"))
|
|
28
|
+
output_path.mkdir(parents=True, exist_ok=True)
|
|
29
|
+
for file_path in Path(self.context.path_temp).iterdir():
|
|
30
|
+
with zipfile.ZipFile(file_path) as zf:
|
|
31
|
+
zf.extractall(output_path)
|