qubership-pipelines-common-library 0.2.6__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qubership_pipelines_common_library/v1/execution/exec_command.py +52 -1
- qubership_pipelines_common_library/v1/execution/exec_info.py +4 -0
- qubership_pipelines_common_library/v1/github_client.py +9 -0
- qubership_pipelines_common_library/v1/gitlab_client.py +170 -10
- qubership_pipelines_common_library/v1/utils/utils_file.py +17 -0
- qubership_pipelines_common_library/v2/__init__.py +0 -0
- qubership_pipelines_common_library/v2/extensions/pipeline_data_importer.py +24 -0
- qubership_pipelines_common_library/v2/github/__init__.py +0 -0
- qubership_pipelines_common_library/v2/github/github_client.py +5 -0
- qubership_pipelines_common_library/v2/github/github_pipeline_data_importer.py +21 -0
- qubership_pipelines_common_library/v2/github/github_run_pipeline_command.py +175 -0
- qubership_pipelines_common_library/v2/github/safe_github_client.py +24 -0
- qubership_pipelines_common_library/v2/gitlab/__init__.py +0 -0
- qubership_pipelines_common_library/v2/gitlab/custom_extensions.py +101 -0
- qubership_pipelines_common_library/v2/gitlab/gitlab_client.py +36 -0
- qubership_pipelines_common_library/v2/gitlab/gitlab_pipeline_data_importer.py +26 -0
- qubership_pipelines_common_library/v2/gitlab/gitlab_run_pipeline_command.py +195 -0
- qubership_pipelines_common_library/v2/gitlab/safe_gitlab_client.py +32 -0
- qubership_pipelines_common_library/v2/podman/__init__.py +0 -0
- qubership_pipelines_common_library/v2/podman/podman_command.md +172 -0
- qubership_pipelines_common_library/v2/podman/podman_command.py +311 -0
- qubership_pipelines_common_library/v2/sops/sops_client.py +116 -0
- qubership_pipelines_common_library/v2/utils/crypto_utils.py +48 -0
- qubership_pipelines_common_library/v2/utils/extension_utils.py +22 -0
- qubership_pipelines_common_library/v2/utils/retry_decorator.py +93 -0
- {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.0.dist-info}/METADATA +1 -1
- qubership_pipelines_common_library-2.0.0.dist-info/RECORD +52 -0
- qubership_pipelines_common_library-0.2.6.dist-info/RECORD +0 -32
- {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.0.dist-info}/WHEEL +0 -0
- {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from qubership_pipelines_common_library.v1.execution.exec_command import ExecutionCommandExtension
|
|
4
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
5
|
+
from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class GitlabDOBPParamsPreExt(ExecutionCommandExtension):
|
|
9
|
+
"""
|
|
10
|
+
Pre-execution extension, enriching 'pipeline_params' with values from environment variables
|
|
11
|
+
"""
|
|
12
|
+
def execute(self):
|
|
13
|
+
self.context.logger.info("Adding DOBP-specific params to pipeline_params...")
|
|
14
|
+
|
|
15
|
+
# Add upstream-cancelled params:
|
|
16
|
+
import os
|
|
17
|
+
if project_url := os.getenv('PROJECT_URL'):
|
|
18
|
+
from urllib.parse import urlparse
|
|
19
|
+
parsed_project_url = urlparse(project_url)
|
|
20
|
+
self.command.pipeline_params.setdefault('DOBP_UPSTREAM_SERVER_URL', f"{parsed_project_url.scheme}://{parsed_project_url.netloc}")
|
|
21
|
+
self.command.pipeline_params.setdefault('DOBP_UPSTREAM_PROJECT_PATH', parsed_project_url.path.strip('/'))
|
|
22
|
+
|
|
23
|
+
if pipeline_id := os.getenv('PIPELINE_ID'):
|
|
24
|
+
self.command.pipeline_params.setdefault('DOBP_UPSTREAM_PIPELINE_ID', pipeline_id)
|
|
25
|
+
|
|
26
|
+
# Add retry params:
|
|
27
|
+
if retry_downstream_pipeline_id := os.getenv('DOBP_RETRY_DOWNSTREAM_PIPELINE_ID'):
|
|
28
|
+
self.command.pipeline_params.setdefault('DOBP_RETRY_PIPELINE_ID', retry_downstream_pipeline_id)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class GitlabModulesOpsPipelineDataImporter(PipelineDataImporter):
|
|
32
|
+
"""
|
|
33
|
+
GitLab Modules Ops implementation:
|
|
34
|
+
imports data from contracted GitLab Declarative Pipelines
|
|
35
|
+
extracts output files and params of targeted pipeline into 'output' folder of this command
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
IMPORTED_CONTEXT_FILE = 'pipeline/output/context.yaml'
|
|
39
|
+
|
|
40
|
+
def import_pipeline_data(self, execution: ExecutionInfo) -> None:
|
|
41
|
+
import os, zipfile
|
|
42
|
+
self.context.logger.info("GitlabModulesOpsPipelineDataImporter - importing pipeline data...")
|
|
43
|
+
project_id = execution.get_name()
|
|
44
|
+
pipeline_id = execution.get_id()
|
|
45
|
+
|
|
46
|
+
if job := self.command.gl_client.get_latest_job(project_id, pipeline_id):
|
|
47
|
+
self.context.logger.info(f"Latest job: {job.id}")
|
|
48
|
+
local_dirpath = self.context.path_temp
|
|
49
|
+
self.context.logger.debug(f"Contents of folder {local_dirpath}: {os.listdir(local_dirpath)}")
|
|
50
|
+
if artifacts_file := self.command.gl_client.download_job_artifacts(job.pipeline.get('project_id'), job.id, local_dirpath):
|
|
51
|
+
with zipfile.ZipFile(artifacts_file) as zf:
|
|
52
|
+
self.context.logger.debug(f"Zip contents: ${zf.namelist()}")
|
|
53
|
+
zf.extractall(local_dirpath)
|
|
54
|
+
self.context.logger.debug(f"Contents of folder {local_dirpath} (after zip.extractall): {os.listdir(local_dirpath)}")
|
|
55
|
+
self._import_downloaded_data(local_dirpath / self.IMPORTED_CONTEXT_FILE)
|
|
56
|
+
else:
|
|
57
|
+
self.context.logger.warning(f"No jobs found")
|
|
58
|
+
|
|
59
|
+
self.context.output_params.load(self.context.context.get("paths.output.params"))
|
|
60
|
+
self.context.output_params_secure.load(self.context.context.get("paths.output.params_secure"))
|
|
61
|
+
|
|
62
|
+
def _import_downloaded_data(self, src_context_filepath: Path):
|
|
63
|
+
import shutil
|
|
64
|
+
from qubership_pipelines_common_library.v1.utils.utils_file import UtilsFile
|
|
65
|
+
from qubership_pipelines_common_library.v1.utils.utils_dictionary import UtilsDictionary
|
|
66
|
+
|
|
67
|
+
if src_context_filepath.is_file():
|
|
68
|
+
self.context.logger.info(f"Importing from context file {src_context_filepath}")
|
|
69
|
+
src_context = UtilsFile.read_yaml(src_context_filepath)
|
|
70
|
+
src_base_dirpath = src_context_filepath.parent
|
|
71
|
+
|
|
72
|
+
def get_path_from_src_context(param, default_value=None):
|
|
73
|
+
if param_value := UtilsDictionary.get_by_path(src_context, param, default_value):
|
|
74
|
+
return Path(src_base_dirpath, param_value)
|
|
75
|
+
return None
|
|
76
|
+
|
|
77
|
+
for src in ('paths.output.params', 'paths.output.params_secure',):
|
|
78
|
+
src_filepath = get_path_from_src_context(src)
|
|
79
|
+
if src_filepath and src_filepath.is_file():
|
|
80
|
+
dst_file = self.context.context.get(src)
|
|
81
|
+
self.context.logger.info(f"Copying file {src_filepath} -> {dst_file}")
|
|
82
|
+
UtilsFile.create_parent_dirs(dst_file)
|
|
83
|
+
shutil.copyfile(src_filepath, dst_file)
|
|
84
|
+
|
|
85
|
+
src_files_dirpath = get_path_from_src_context('paths.output.files')
|
|
86
|
+
if src_files_dirpath and src_files_dirpath.is_dir():
|
|
87
|
+
dst_files_dir = self.context.context.get('paths.output.files')
|
|
88
|
+
self.context.logger.info(f"Copying dir {src_files_dirpath} -> {dst_files_dir}")
|
|
89
|
+
shutil.copytree(src_files_dirpath, dst_files_dir, dirs_exist_ok=True)
|
|
90
|
+
|
|
91
|
+
src_logs_dirpath = get_path_from_src_context('paths.logs', 'logs')
|
|
92
|
+
for _ext in ('json', 'yaml',):
|
|
93
|
+
src_exec_report_filepath = src_logs_dirpath / f"execution_report.{_ext}"
|
|
94
|
+
if src_exec_report_filepath.is_file():
|
|
95
|
+
dst_exec_report_filepath = self.context.path_logs / f"nested_pipeline_report.{_ext}"
|
|
96
|
+
UtilsFile.create_parent_dirs(dst_exec_report_filepath)
|
|
97
|
+
self.context.logger.info(f"Copying file {src_exec_report_filepath} -> {dst_exec_report_filepath}")
|
|
98
|
+
shutil.copyfile(src_exec_report_filepath, dst_exec_report_filepath)
|
|
99
|
+
|
|
100
|
+
else:
|
|
101
|
+
self.context.logger.warning(f"Imported context file does not exist: {src_context_filepath}")
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import os, logging
|
|
2
|
+
|
|
3
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
4
|
+
from qubership_pipelines_common_library.v1.gitlab_client import GitlabClient as GitlabClientV1
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class GitlabClient(GitlabClientV1):
|
|
8
|
+
|
|
9
|
+
def trigger_pipeline(self, project_id: str, ref: str, trigger_token: str = None, variables: dict = None, use_ci_job_token: bool = False):
|
|
10
|
+
""""""
|
|
11
|
+
if variables is None:
|
|
12
|
+
variables = {}
|
|
13
|
+
if use_ci_job_token:
|
|
14
|
+
trigger_token = os.getenv('CI_JOB_TOKEN')
|
|
15
|
+
trigger_data = {k: self._cast_to_string(v) for k, v in variables.items()}
|
|
16
|
+
project = self.gl.projects.get(project_id, lazy=True)
|
|
17
|
+
pipeline = project.trigger_pipeline(ref, trigger_token, trigger_data)
|
|
18
|
+
logging.info(f"Pipeline successfully started (via TRIGGER) at {pipeline.web_url}")
|
|
19
|
+
return ExecutionInfo().with_name(project_id).with_id(pipeline.get_id()) \
|
|
20
|
+
.with_url(pipeline.web_url).with_params(trigger_data) \
|
|
21
|
+
.start()
|
|
22
|
+
|
|
23
|
+
def create_pipeline(self, project_id: str, ref: str, variables: dict):
|
|
24
|
+
""""""
|
|
25
|
+
if variables is None:
|
|
26
|
+
variables = {}
|
|
27
|
+
create_data = {
|
|
28
|
+
'ref': ref,
|
|
29
|
+
'variables': [{'key': k, 'value': self._cast_to_string(v)} for k, v in variables.items()],
|
|
30
|
+
}
|
|
31
|
+
project = self.gl.projects.get(project_id, lazy=True)
|
|
32
|
+
pipeline = project.pipelines.create(create_data)
|
|
33
|
+
logging.info(f"Pipeline successfully started (via CREATE) at {pipeline.web_url}")
|
|
34
|
+
return ExecutionInfo().with_name(project_id).with_id(pipeline.get_id()) \
|
|
35
|
+
.with_url(pipeline.web_url).with_params(create_data) \
|
|
36
|
+
.start()
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import zipfile
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
5
|
+
from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class DefaultGitlabPipelineDataImporter(PipelineDataImporter):
|
|
9
|
+
"""
|
|
10
|
+
Default GitLab implementation:
|
|
11
|
+
downloads all available workflow run artifacts,
|
|
12
|
+
extracts them into context-defined 'paths.output.files' path
|
|
13
|
+
"""
|
|
14
|
+
def import_pipeline_data(self, execution: ExecutionInfo) -> None:
|
|
15
|
+
self.context.logger.info("DefaultGitlabPipelineDataImporter - importing pipeline data...")
|
|
16
|
+
project_id = execution.get_name()
|
|
17
|
+
pipeline_id = execution.get_id()
|
|
18
|
+
if job := self.command.gl_client.get_latest_job(project_id, pipeline_id):
|
|
19
|
+
if artifacts_file := self.command.gl_client.download_job_artifacts(job.pipeline.get('project_id'), job.id, self.context.path_temp):
|
|
20
|
+
output_path = Path(self.context.input_param_get("paths.output.files"))
|
|
21
|
+
output_path.mkdir(parents=True, exist_ok=True)
|
|
22
|
+
with zipfile.ZipFile(artifacts_file) as zf:
|
|
23
|
+
self.context.logger.debug(f"Zip contents: ${zf.namelist()}")
|
|
24
|
+
zf.extractall(output_path)
|
|
25
|
+
else:
|
|
26
|
+
self.context.logger.warning(f"Job not found! project_id: {project_id}, pipeline_id: {pipeline_id}")
|
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
from qubership_pipelines_common_library.v1.execution.exec_command import ExecutionCommand
|
|
2
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
3
|
+
from qubership_pipelines_common_library.v1.utils.utils_string import UtilsString
|
|
4
|
+
from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
|
|
5
|
+
from qubership_pipelines_common_library.v2.gitlab.gitlab_pipeline_data_importer import DefaultGitlabPipelineDataImporter
|
|
6
|
+
from qubership_pipelines_common_library.v2.gitlab.safe_gitlab_client import SafeGitlabClient
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class GitlabRunPipeline(ExecutionCommand):
|
|
10
|
+
"""
|
|
11
|
+
Runs GitLab Pipeline via Trigger or Create API and optionally imports artifacts.
|
|
12
|
+
|
|
13
|
+
This command runs GitLab Pipeline, monitors its execution, and provides
|
|
14
|
+
options for importing resulting artifacts and custom data processing through extensible
|
|
15
|
+
importers.
|
|
16
|
+
|
|
17
|
+
Input Parameters Structure (this structure is expected inside "input_params.params" block):
|
|
18
|
+
```
|
|
19
|
+
{
|
|
20
|
+
"pipeline_path": "path/to/gitlab_project", # REQUIRED: Full pipeline path (e.g. "group/subgroup/repo")
|
|
21
|
+
"pipeline_branch": "main", # OPTIONAL: Branch to run pipeline from (default: repo's default branch)
|
|
22
|
+
"trigger_type": "CREATE_PIPELINE", # OPTIONAL: Which API will be used to trigger the pipeline (CREATE_PIPELINE or TRIGGER_PIPELINE)
|
|
23
|
+
"pipeline_params": { # OPTIONAL: Input parameters to pass to the pipeline
|
|
24
|
+
"KEY1": "VALUE1",
|
|
25
|
+
"KEY2": "VALUE2"
|
|
26
|
+
},
|
|
27
|
+
"import_artifacts": false, # OPTIONAL: Whether to import pipeline artifacts (default: false)
|
|
28
|
+
"use_existing_pipeline": 123456789, # OPTIONAL: Use existing pipeline ID (or use 'latest' here) instead of starting new one (debug feature)
|
|
29
|
+
"timeout_seconds": 1800, # OPTIONAL: Maximum wait time for pipeline completion in seconds (default: 1800, 0 for async execution)
|
|
30
|
+
"wait_seconds": 1, # OPTIONAL: Wait interval between status checks in seconds (default: 1)
|
|
31
|
+
"retry_timeout_seconds": 180, # OPTIONAL: Timeout for GitLab client initialization and pipeline start retries in seconds (default: 180)
|
|
32
|
+
"retry_wait_seconds": 1, # OPTIONAL: Wait interval between retries in seconds (default: 1)
|
|
33
|
+
"success_statuses": "SUCCESS,UNSTABLE" # OPTIONAL: Comma-separated list of acceptable completion statuses (default: SUCCESS)
|
|
34
|
+
}
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
Systems Configuration (expected in "systems.gitlab" block):
|
|
38
|
+
```
|
|
39
|
+
{
|
|
40
|
+
"url": "https://github.com", # OPTIONAL: GitLab URL for self-hosted instances (default: https://gitlab.com)
|
|
41
|
+
"password": "<gitlab_token>" # REQUIRED: GitLab access token with CI/CD permissions
|
|
42
|
+
"trigger_token": "<gitlab_trigger_token>" # OPTIONAL: Special token issued for triggering pipeline. If not provided - will try to use CI_JOB_TOKEN
|
|
43
|
+
}
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
Output Parameters:
|
|
47
|
+
- params.build.url: URL to view the pipeline run in GitLab
|
|
48
|
+
- params.build.id: ID of the executed pipeline
|
|
49
|
+
- params.build.status: Final status of the pipeline execution
|
|
50
|
+
- params.build.date: Workflow start time in ISO format
|
|
51
|
+
- params.build.duration: Total execution duration in human-readable format
|
|
52
|
+
- params.build.name: Name of the pipeline execution
|
|
53
|
+
|
|
54
|
+
Extension Points:
|
|
55
|
+
- Custom pipeline data importers can be implemented by extending PipelineDataImporter interface
|
|
56
|
+
- PipelineDataImporter is passed into constructor of command via "pipeline_data_importer" arg
|
|
57
|
+
|
|
58
|
+
Notes:
|
|
59
|
+
- Setting timeout_seconds to 0 enables asynchronous execution (workflow starts but command doesn't wait for completion)
|
|
60
|
+
- For self-hosted GitLab instances, configure "systems.github.url"
|
|
61
|
+
- Custom data importers receive the command context and can implement advanced processing logic
|
|
62
|
+
"""
|
|
63
|
+
|
|
64
|
+
# default timeout values
|
|
65
|
+
WAIT_TIMEOUT = 1800
|
|
66
|
+
WAIT_SECONDS = 1
|
|
67
|
+
RETRY_TIMEOUT_SECONDS = 180
|
|
68
|
+
RETRY_WAIT_SECONDS = 1
|
|
69
|
+
|
|
70
|
+
TRIGGER_TYPE_TRIGGER_PIPELINE = 'TRIGGER_PIPELINE'
|
|
71
|
+
TRIGGER_TYPE_CREATE_PIPELINE = 'CREATE_PIPELINE'
|
|
72
|
+
TRIGGER_TYPES = (TRIGGER_TYPE_TRIGGER_PIPELINE, TRIGGER_TYPE_CREATE_PIPELINE,)
|
|
73
|
+
|
|
74
|
+
def __init__(self, *args, pipeline_data_importer: PipelineDataImporter = None, **kwargs):
|
|
75
|
+
super().__init__(*args, **kwargs)
|
|
76
|
+
self.pipeline_data_importer = pipeline_data_importer or DefaultGitlabPipelineDataImporter()
|
|
77
|
+
if pipeline_data_importer and not isinstance(pipeline_data_importer, PipelineDataImporter):
|
|
78
|
+
raise TypeError(f"Class {type(pipeline_data_importer)} must inherit from PipelineDataImporter")
|
|
79
|
+
|
|
80
|
+
def _validate(self):
|
|
81
|
+
names = [
|
|
82
|
+
"paths.input.params",
|
|
83
|
+
"paths.output.params",
|
|
84
|
+
"paths.output.files",
|
|
85
|
+
"systems.gitlab.password",
|
|
86
|
+
"params.pipeline_path",
|
|
87
|
+
]
|
|
88
|
+
if not self.context.validate(names):
|
|
89
|
+
return False
|
|
90
|
+
|
|
91
|
+
self.timeout_seconds = max(0, int(self.context.input_param_get("params.timeout_seconds", self.WAIT_TIMEOUT)))
|
|
92
|
+
self.wait_seconds = max(1, int(self.context.input_param_get("params.wait_seconds", self.WAIT_SECONDS)))
|
|
93
|
+
|
|
94
|
+
self.retry_timeout_seconds = int(self.context.input_param_get("params.retry_timeout_seconds", self.RETRY_TIMEOUT_SECONDS))
|
|
95
|
+
self.retry_wait_seconds = int(self.context.input_param_get("params.retry_wait_seconds", self.RETRY_WAIT_SECONDS))
|
|
96
|
+
|
|
97
|
+
if self.timeout_seconds == 0:
|
|
98
|
+
self.context.logger.info(f"Timeout is set to: {self.timeout_seconds}. This means that the pipeline will be started asynchronously")
|
|
99
|
+
|
|
100
|
+
self.gitlab_url = self.context.input_param_get("systems.gitlab.url", "https://gitlab.com")
|
|
101
|
+
self.pipeline_path = self.context.input_param_get("params.pipeline_path").strip("/")
|
|
102
|
+
self.pipeline_branch = self.context.input_param_get("params.pipeline_branch")
|
|
103
|
+
|
|
104
|
+
self.trigger_type = self.context.input_param_get("params.trigger_type", self.TRIGGER_TYPE_CREATE_PIPELINE)
|
|
105
|
+
if self.trigger_type not in self.TRIGGER_TYPES:
|
|
106
|
+
self.context.logger.error(f"Unsupported trigger_type: {self.trigger_type}")
|
|
107
|
+
return False
|
|
108
|
+
if self.trigger_type == self.TRIGGER_TYPE_TRIGGER_PIPELINE:
|
|
109
|
+
self.trigger_token = self.context.input_param_get("systems.gitlab.trigger_token")
|
|
110
|
+
|
|
111
|
+
self.pipeline_params = self.context.input_param_get("params.pipeline_params", {})
|
|
112
|
+
if not self.pipeline_params:
|
|
113
|
+
self.context.logger.info(f"Pipeline parameters were not specified. This means that pipeline will be started with its default values")
|
|
114
|
+
if not isinstance(self.pipeline_params, dict):
|
|
115
|
+
self.context.logger.error(f"Pipeline parameters were not loaded correctly. Probably mistake in the params definition")
|
|
116
|
+
return False
|
|
117
|
+
self.import_artifacts = UtilsString.convert_to_bool(self.context.input_param_get("params.import_artifacts", False))
|
|
118
|
+
self.success_statuses = [x.strip() for x in self.context.input_param_get("params.success_statuses", ExecutionInfo.STATUS_SUCCESS).split(",")]
|
|
119
|
+
self.use_existing_pipeline = self.context.input_param_get("params.use_existing_pipeline")
|
|
120
|
+
return True
|
|
121
|
+
|
|
122
|
+
def _execute(self):
|
|
123
|
+
self.context.logger.info("Running gitlab-run-pipeline...")
|
|
124
|
+
|
|
125
|
+
self.gl_client = SafeGitlabClient.create_gitlab_client(
|
|
126
|
+
host=self.gitlab_url,
|
|
127
|
+
username="",
|
|
128
|
+
password=self.context.input_param_get("systems.gitlab.password"),
|
|
129
|
+
retry_timeout_seconds=self.retry_timeout_seconds,
|
|
130
|
+
retry_wait_seconds=self.retry_wait_seconds
|
|
131
|
+
)
|
|
132
|
+
self.context.logger.info(f"Successfully initialized GitLab client")
|
|
133
|
+
|
|
134
|
+
if not self.pipeline_branch:
|
|
135
|
+
self.pipeline_branch = self.gl_client.get_default_branch(project_id=self.pipeline_path)
|
|
136
|
+
|
|
137
|
+
if self.use_existing_pipeline: # work with existing pipeline run
|
|
138
|
+
if self.use_existing_pipeline == 'latest':
|
|
139
|
+
pipeline_id = self.gl_client.get_latest_pipeline_id(project_id=self.pipeline_path, ref=self.pipeline_branch)
|
|
140
|
+
else:
|
|
141
|
+
pipeline_id = self.use_existing_pipeline
|
|
142
|
+
self.context.logger.info(f"Using existing pipeline: {pipeline_id}")
|
|
143
|
+
execution = ExecutionInfo().with_name(self.pipeline_path).with_id(pipeline_id).with_status(ExecutionInfo.STATUS_UNKNOWN)
|
|
144
|
+
execution.start()
|
|
145
|
+
else:
|
|
146
|
+
if self.trigger_type == self.TRIGGER_TYPE_CREATE_PIPELINE:
|
|
147
|
+
execution = self.gl_client.create_pipeline(
|
|
148
|
+
project_id=self.pipeline_path,
|
|
149
|
+
ref=self.pipeline_branch,
|
|
150
|
+
variables=self.pipeline_params,
|
|
151
|
+
retry_timeout_seconds=self.retry_timeout_seconds,
|
|
152
|
+
retry_wait_seconds=self.retry_wait_seconds
|
|
153
|
+
)
|
|
154
|
+
elif self.trigger_type == self.TRIGGER_TYPE_TRIGGER_PIPELINE:
|
|
155
|
+
execution = self.gl_client.trigger_pipeline(
|
|
156
|
+
project_id=self.pipeline_path,
|
|
157
|
+
ref=self.pipeline_branch,
|
|
158
|
+
trigger_token=self.trigger_token,
|
|
159
|
+
variables=self.pipeline_params,
|
|
160
|
+
use_ci_job_token=(self.trigger_token is None),
|
|
161
|
+
retry_timeout_seconds=self.retry_timeout_seconds,
|
|
162
|
+
retry_wait_seconds=self.retry_wait_seconds
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
if execution.get_status() != ExecutionInfo.STATUS_IN_PROGRESS:
|
|
166
|
+
self._exit(False, f"Pipeline was not started. Status {execution.get_status()}")
|
|
167
|
+
elif self.timeout_seconds < 1:
|
|
168
|
+
self.context.logger.info("Pipeline was started in asynchronous mode. Pipeline status and artifacts will not be processed")
|
|
169
|
+
return
|
|
170
|
+
|
|
171
|
+
self.context.logger.info(f"Pipeline successfully started. Waiting {self.timeout_seconds} seconds for execution to complete")
|
|
172
|
+
execution = self.gl_client.wait_pipeline_execution(execution=execution, timeout_seconds=self.timeout_seconds,
|
|
173
|
+
wait_seconds=self.wait_seconds)
|
|
174
|
+
self.context.logger.info(f"Pipeline status: {execution.get_status()}")
|
|
175
|
+
|
|
176
|
+
if self.import_artifacts and self.pipeline_data_importer and execution.get_status() in ExecutionInfo.STATUSES_COMPLETE:
|
|
177
|
+
try:
|
|
178
|
+
self.pipeline_data_importer.with_command(self)
|
|
179
|
+
self.pipeline_data_importer.import_pipeline_data(execution)
|
|
180
|
+
except Exception as e:
|
|
181
|
+
self.context.logger.error(f"Exception during pipeline_data_importer execution: {e}")
|
|
182
|
+
|
|
183
|
+
self._save_execution_info(execution)
|
|
184
|
+
if execution.get_status() not in self.success_statuses:
|
|
185
|
+
self._exit(False, f"Status: {execution.get_status()}")
|
|
186
|
+
|
|
187
|
+
def _save_execution_info(self, execution: ExecutionInfo):
|
|
188
|
+
self.context.logger.info(f"Writing GitLab pipeline execution status")
|
|
189
|
+
self.context.output_param_set("params.build.url", execution.get_url())
|
|
190
|
+
self.context.output_param_set("params.build.id", execution.get_id())
|
|
191
|
+
self.context.output_param_set("params.build.status", execution.get_status())
|
|
192
|
+
self.context.output_param_set("params.build.date", execution.get_time_start().isoformat())
|
|
193
|
+
self.context.output_param_set("params.build.duration", execution.get_duration_str())
|
|
194
|
+
self.context.output_param_set("params.build.name", execution.get_name())
|
|
195
|
+
self.context.output_params_save()
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
2
|
+
from qubership_pipelines_common_library.v2.gitlab.gitlab_client import GitlabClient
|
|
3
|
+
from qubership_pipelines_common_library.v2.utils.retry_decorator import RetryDecorator
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class SafeGitlabClient(GitlabClient):
|
|
7
|
+
|
|
8
|
+
def __init__(self, host: str, username: str, password: str):
|
|
9
|
+
super().__init__(host=host, username=username, password=password)
|
|
10
|
+
|
|
11
|
+
@classmethod
|
|
12
|
+
@RetryDecorator(condition_func=lambda result: result is not None)
|
|
13
|
+
def create_gitlab_client(cls, host: str, username: str, password: str,
|
|
14
|
+
retry_timeout_seconds: int = 180, retry_wait_seconds: int = 1):
|
|
15
|
+
return cls(host, username, password)
|
|
16
|
+
|
|
17
|
+
@RetryDecorator(
|
|
18
|
+
condition_func=lambda result: result is not None and result.get_status() not in [
|
|
19
|
+
ExecutionInfo.STATUS_NOT_STARTED, ExecutionInfo.STATUS_UNKNOWN]
|
|
20
|
+
)
|
|
21
|
+
def trigger_pipeline(self, project_id: str, ref: str, trigger_token: str = None, variables: dict = None,
|
|
22
|
+
use_ci_job_token: bool = False, retry_timeout_seconds: int = 180, retry_wait_seconds: int = 1):
|
|
23
|
+
return super().trigger_pipeline(project_id=project_id, ref=ref, trigger_token=trigger_token,
|
|
24
|
+
variables=variables, use_ci_job_token=use_ci_job_token)
|
|
25
|
+
|
|
26
|
+
@RetryDecorator(
|
|
27
|
+
condition_func=lambda result: result is not None and result.get_status() not in [
|
|
28
|
+
ExecutionInfo.STATUS_NOT_STARTED, ExecutionInfo.STATUS_UNKNOWN]
|
|
29
|
+
)
|
|
30
|
+
def create_pipeline(self, project_id: str, ref: str, variables: dict = None,
|
|
31
|
+
retry_timeout_seconds: int = 180, retry_wait_seconds: int = 1):
|
|
32
|
+
return super().create_pipeline(project_id=project_id, ref=ref, variables=variables)
|
|
File without changes
|
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
# Podman Run Image Command
|
|
2
|
+
|
|
3
|
+
Executes a container using `podman run` command.
|
|
4
|
+
|
|
5
|
+
This command supports running containers with configurable execution parameters, environment variable management, file mounting, and output extraction.
|
|
6
|
+
|
|
7
|
+
## Input Parameters
|
|
8
|
+
|
|
9
|
+
This structure is expected inside the `input_params.params` block:
|
|
10
|
+
|
|
11
|
+
```json
|
|
12
|
+
{
|
|
13
|
+
"image": "docker.io/library/hello-world:latest",
|
|
14
|
+
"command": "python -m pipelines_declarative_executor run --pipeline_dir=\"/WORK/EXEC_DIR\"",
|
|
15
|
+
"execution_config": {
|
|
16
|
+
"working_dir": "/some/dir/inside/container",
|
|
17
|
+
"timeout": "600",
|
|
18
|
+
"operations_timeout": "15",
|
|
19
|
+
"remove_container": true,
|
|
20
|
+
"save_stdout_to_logs": true,
|
|
21
|
+
"save_stdout_to_files": true,
|
|
22
|
+
"save_stdout_to_params": false,
|
|
23
|
+
"expected_return_codes": "0,125",
|
|
24
|
+
"additional_run_flags": "--cgroups=disabled"
|
|
25
|
+
},
|
|
26
|
+
"before_script": {
|
|
27
|
+
"mounts": {
|
|
28
|
+
"output_files": "/WORK",
|
|
29
|
+
"prepared_data": "/CONFIGS"
|
|
30
|
+
},
|
|
31
|
+
"env_vars": {
|
|
32
|
+
"explicit": {
|
|
33
|
+
"PIPELINES_DECLARATIVE_EXECUTOR_ENCRYPT_OUTPUT_SECURE_PARAMS": false
|
|
34
|
+
},
|
|
35
|
+
"env_files": [
|
|
36
|
+
"../CONFIGS/sample.env"
|
|
37
|
+
],
|
|
38
|
+
"pass_via_file": {
|
|
39
|
+
"SOMETHING_VERY_SECURE": "PASSWORD"
|
|
40
|
+
},
|
|
41
|
+
"host_prefixes": [
|
|
42
|
+
"SOME_PREFIX_*"
|
|
43
|
+
]
|
|
44
|
+
}
|
|
45
|
+
},
|
|
46
|
+
"after_script": {
|
|
47
|
+
"copy_files_to_host": {
|
|
48
|
+
"output_files/report.json": "/WORK/EXEC_DIR/pipeline_state/pipeline_ui_view.json",
|
|
49
|
+
"output_files/pipeline_state": "/WORK/EXEC_DIR/pipeline_state"
|
|
50
|
+
},
|
|
51
|
+
"extract_params_from_files": {
|
|
52
|
+
"SOME_FILE_IN_CONTAINER": "SECTION_NAME_IN_PARAMS_WHERE_IT_WILL_BE_STORED"
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
## Parameter Reference
|
|
59
|
+
|
|
60
|
+
All params are referenced here without their top-level "params" section.
|
|
61
|
+
|
|
62
|
+
Actual `input_params.yaml` should look like this sample:
|
|
63
|
+
|
|
64
|
+
```yaml
|
|
65
|
+
kind: AtlasModuleParamsInsecure
|
|
66
|
+
apiVersion: v1
|
|
67
|
+
params:
|
|
68
|
+
image: docker.io/library/hello-world:latest
|
|
69
|
+
command: ....
|
|
70
|
+
execution_config:
|
|
71
|
+
timeout: 300
|
|
72
|
+
save_container_stdout_to_params: True
|
|
73
|
+
before_script:
|
|
74
|
+
mounts:
|
|
75
|
+
output_files: /WORK
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
Or you can also pass required parameters via CLI arguments:
|
|
79
|
+
|
|
80
|
+
`qubership_cli_samples podman-run -p params.image=docker.io/my_image -p params.execution_config.timeout=300`
|
|
81
|
+
|
|
82
|
+
### Required Parameters
|
|
83
|
+
|
|
84
|
+
- **`image`** (string): Container image to run
|
|
85
|
+
|
|
86
|
+
### Optional Parameters
|
|
87
|
+
|
|
88
|
+
#### Execution Configuration
|
|
89
|
+
|
|
90
|
+
- **`command`** (string): Command to execute in container
|
|
91
|
+
- **`execution_config`** (object): Container execution settings
|
|
92
|
+
- **`working_dir`** (string): Working directory inside container
|
|
93
|
+
- **`timeout`** (float/string): Maximum execution time in seconds (e.g. "60", "36.6", etc.)
|
|
94
|
+
- **`operations_timeout`** (float/string): Timeout for operations like file copying in seconds
|
|
95
|
+
- **`remove_container`** (boolean): Whether to remove container after execution
|
|
96
|
+
- **`save_stdout_to_logs`** (boolean): Save container stdout to execution logs
|
|
97
|
+
- **`save_stdout_to_files`** (boolean): Save container stdout to output files
|
|
98
|
+
- **`save_stdout_to_params`** (boolean): Save container stdout to output parameters
|
|
99
|
+
- **`expected_return_codes`** (string): Comma-separated list of acceptable exit codes
|
|
100
|
+
- **`additional_run_flags`** (string): Flags that will be added to "podman run" command
|
|
101
|
+
|
|
102
|
+
#### Before Script Configuration
|
|
103
|
+
|
|
104
|
+
- **`before_script`** (object): Pre-execution configuration
|
|
105
|
+
- **`mounts`** (object): Filesystem mounts from host to container (`host_path: container_path`)
|
|
106
|
+
- **`env_vars`** (object): Environment variable configuration. There's podman-specific priority of these vars (lower to highest): file vars, direct vars, host vars.
|
|
107
|
+
- **`explicit`** (object): Direct environment variable assignment
|
|
108
|
+
- **`env_files`** (array): Environment files on host to load and pass into container
|
|
109
|
+
- **`pass_via_file`** (object): Sensitive vars passed via temp file
|
|
110
|
+
- **`host_prefixes`** (array): Host environment variable prefixes to pass through (can use `"*"` to pass everything from host)
|
|
111
|
+
|
|
112
|
+
#### After Script Configuration
|
|
113
|
+
|
|
114
|
+
- **`after_script`** (object): Post-execution operations
|
|
115
|
+
- **`copy_files_to_host`** (object): Copy files from container to host after execution (`host_path: container_path`)
|
|
116
|
+
- **`extract_params_from_files`** (object): Extract parameters from container files (supports JSON, YAML, and ENV files)
|
|
117
|
+
|
|
118
|
+
## Output Parameters
|
|
119
|
+
|
|
120
|
+
- `params.execution_time`: Total execution time in seconds
|
|
121
|
+
- `params.return_code`: Container exit code
|
|
122
|
+
- `params.stdout`: Container stdout (if `save_stdout_to_params` enabled)
|
|
123
|
+
- `params.stderr`: Container stderr (if `save_stdout_to_params` enabled)
|
|
124
|
+
- `params.extracted_output.*`: Extracted parameters from files (if `extract_params_from_files` configured)
|
|
125
|
+
|
|
126
|
+
## Notes
|
|
127
|
+
|
|
128
|
+
- The command automatically handles container lifecycle including start, execution, and cleanup
|
|
129
|
+
- All host-paths (including mount paths) are resolved relative to context directory
|
|
130
|
+
|
|
131
|
+
## Adding podman executable in your image
|
|
132
|
+
|
|
133
|
+
To install and use `podman run` in your Dockerimage (`python:3.11-slim` was used as a base image, to run `Pipelines Declarative Executor`) inside usual CIs (GitHub/GitLab) following approaches worked:
|
|
134
|
+
|
|
135
|
+
### GitHub
|
|
136
|
+
|
|
137
|
+
1. `apt-get install podman nftables fuse-overlayfs`
|
|
138
|
+
2. ```bash
|
|
139
|
+
RUN cat <<EOF > /etc/containers/storage.conf
|
|
140
|
+
[storage]
|
|
141
|
+
driver = "overlay"
|
|
142
|
+
runroot = "/run/containers/storage"
|
|
143
|
+
graphroot = "/var/lib/containers/storage"
|
|
144
|
+
[storage.options]
|
|
145
|
+
mount_program = "/usr/bin/fuse-overlayfs"
|
|
146
|
+
EOF
|
|
147
|
+
```
|
|
148
|
+
3. In your workflow file, need to pass `--privileged` option
|
|
149
|
+
```
|
|
150
|
+
jobs:
|
|
151
|
+
execute-pipeline:
|
|
152
|
+
runs-on: ubuntu-latest
|
|
153
|
+
container:
|
|
154
|
+
image: ghcr.io/netcracker/qubership-pipelines-declarative-executor:dev_podman_engine
|
|
155
|
+
options: --privileged
|
|
156
|
+
```
|
|
157
|
+
4. Need to run `PodmanRunImage` command with additional flags: `"additional_run_flags": "--cgroups=disabled"`
|
|
158
|
+
|
|
159
|
+
### GitLab
|
|
160
|
+
|
|
161
|
+
1. `apt-get install podman nftables slirp4netns fuse-overlayfs`
|
|
162
|
+
2. ```bash
|
|
163
|
+
RUN cat <<EOF > /etc/containers/storage.conf
|
|
164
|
+
[storage]
|
|
165
|
+
driver = "overlay"
|
|
166
|
+
runroot = "/run/containers/storage"
|
|
167
|
+
graphroot = "/var/lib/containers/storage"
|
|
168
|
+
[storage.options]
|
|
169
|
+
mount_program = "/usr/bin/fuse-overlayfs"
|
|
170
|
+
EOF
|
|
171
|
+
```
|
|
172
|
+
3. Need to run `PodmanRunImage` command with additional flags: `"additional_run_flags": "--cgroups=disabled --network slirp4netns"`
|