qubership-pipelines-common-library 0.2.6__py3-none-any.whl → 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. qubership_pipelines_common_library/__init__.py +1 -1
  2. qubership_pipelines_common_library/v1/artifactory_client.py +1 -1
  3. qubership_pipelines_common_library/v1/execution/exec_command.py +63 -2
  4. qubership_pipelines_common_library/v1/execution/exec_context.py +6 -6
  5. qubership_pipelines_common_library/v1/execution/exec_context_file.py +1 -1
  6. qubership_pipelines_common_library/v1/execution/exec_info.py +4 -0
  7. qubership_pipelines_common_library/v1/execution/exec_logger.py +7 -5
  8. qubership_pipelines_common_library/v1/github_client.py +10 -1
  9. qubership_pipelines_common_library/v1/gitlab_client.py +175 -11
  10. qubership_pipelines_common_library/v1/jenkins_client.py +55 -18
  11. qubership_pipelines_common_library/v1/maven_client.py +2 -2
  12. qubership_pipelines_common_library/v1/minio_client.py +1 -1
  13. qubership_pipelines_common_library/v1/utils/rest.py +1 -1
  14. qubership_pipelines_common_library/v1/utils/utils.py +1 -1
  15. qubership_pipelines_common_library/v1/utils/utils_cli.py +43 -9
  16. qubership_pipelines_common_library/v1/utils/utils_dictionary.py +1 -1
  17. qubership_pipelines_common_library/v1/utils/utils_file.py +17 -0
  18. qubership_pipelines_common_library/v1/utils/utils_logging.py +53 -0
  19. qubership_pipelines_common_library/v2/__init__.py +0 -0
  20. qubership_pipelines_common_library/v2/artifacts_finder/__init__.py +0 -0
  21. qubership_pipelines_common_library/v2/artifacts_finder/artifact_finder.py +56 -0
  22. qubership_pipelines_common_library/v2/artifacts_finder/auth/__init__.py +0 -0
  23. qubership_pipelines_common_library/v2/artifacts_finder/auth/aws_credentials.py +106 -0
  24. qubership_pipelines_common_library/v2/artifacts_finder/auth/azure_credentials.py +72 -0
  25. qubership_pipelines_common_library/v2/artifacts_finder/auth/gcp_credentials.py +88 -0
  26. qubership_pipelines_common_library/v2/artifacts_finder/model/__init__.py +0 -0
  27. qubership_pipelines_common_library/v2/artifacts_finder/model/artifact.py +20 -0
  28. qubership_pipelines_common_library/v2/artifacts_finder/model/artifact_provider.py +35 -0
  29. qubership_pipelines_common_library/v2/artifacts_finder/model/credentials.py +16 -0
  30. qubership_pipelines_common_library/v2/artifacts_finder/model/credentials_provider.py +16 -0
  31. qubership_pipelines_common_library/v2/artifacts_finder/providers/__init__.py +0 -0
  32. qubership_pipelines_common_library/v2/artifacts_finder/providers/artifactory.py +52 -0
  33. qubership_pipelines_common_library/v2/artifacts_finder/providers/aws_code_artifact.py +79 -0
  34. qubership_pipelines_common_library/v2/artifacts_finder/providers/azure_artifacts.py +98 -0
  35. qubership_pipelines_common_library/v2/artifacts_finder/providers/gcp_artifact_registry.py +50 -0
  36. qubership_pipelines_common_library/v2/artifacts_finder/providers/nexus.py +41 -0
  37. qubership_pipelines_common_library/v2/extensions/pipeline_data_importer.py +24 -0
  38. qubership_pipelines_common_library/v2/github/__init__.py +0 -0
  39. qubership_pipelines_common_library/v2/github/github_client.py +5 -0
  40. qubership_pipelines_common_library/v2/github/github_pipeline_data_importer.py +21 -0
  41. qubership_pipelines_common_library/v2/github/github_run_pipeline_command.py +175 -0
  42. qubership_pipelines_common_library/v2/github/safe_github_client.py +24 -0
  43. qubership_pipelines_common_library/v2/gitlab/__init__.py +0 -0
  44. qubership_pipelines_common_library/v2/gitlab/custom_extensions.py +101 -0
  45. qubership_pipelines_common_library/v2/gitlab/gitlab_client.py +36 -0
  46. qubership_pipelines_common_library/v2/gitlab/gitlab_pipeline_data_importer.py +26 -0
  47. qubership_pipelines_common_library/v2/gitlab/gitlab_run_pipeline_command.py +195 -0
  48. qubership_pipelines_common_library/v2/gitlab/safe_gitlab_client.py +32 -0
  49. qubership_pipelines_common_library/v2/jenkins/__init__.py +0 -0
  50. qubership_pipelines_common_library/v2/jenkins/custom_extensions.py +63 -0
  51. qubership_pipelines_common_library/v2/jenkins/jenkins_client.py +5 -0
  52. qubership_pipelines_common_library/v2/jenkins/jenkins_pipeline_data_importer.py +31 -0
  53. qubership_pipelines_common_library/v2/jenkins/jenkins_run_pipeline_command.py +165 -0
  54. qubership_pipelines_common_library/v2/jenkins/safe_jenkins_client.py +14 -0
  55. qubership_pipelines_common_library/v2/podman/__init__.py +0 -0
  56. qubership_pipelines_common_library/v2/podman/podman_command.md +178 -0
  57. qubership_pipelines_common_library/v2/podman/podman_command.py +311 -0
  58. qubership_pipelines_common_library/v2/sops/sops_client.py +116 -0
  59. qubership_pipelines_common_library/v2/utils/crypto_utils.py +48 -0
  60. qubership_pipelines_common_library/v2/utils/extension_utils.py +22 -0
  61. qubership_pipelines_common_library/v2/utils/retry_decorator.py +93 -0
  62. {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.1.dist-info}/METADATA +5 -3
  63. qubership_pipelines_common_library-2.0.1.dist-info/RECORD +76 -0
  64. qubership_pipelines_common_library-0.2.6.dist-info/RECORD +0 -32
  65. {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.1.dist-info}/WHEEL +0 -0
  66. {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,165 @@
1
+ from qubership_pipelines_common_library.v1.execution.exec_command import ExecutionCommand
2
+ from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
3
+ from qubership_pipelines_common_library.v1.utils.utils_string import UtilsString
4
+ from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
5
+ from qubership_pipelines_common_library.v2.jenkins.jenkins_pipeline_data_importer import DefaultJenkinsPipelineDataImporter
6
+ from qubership_pipelines_common_library.v2.jenkins.safe_jenkins_client import SafeJenkinsClient
7
+
8
+
9
+ class JenkinsRunPipeline(ExecutionCommand):
10
+ """
11
+ Runs Jenkins Pipeline and optionally imports artifacts.
12
+
13
+ This command runs Jenkins Pipeline, monitors its execution, and provides
14
+ options for importing resulting artifacts and custom data processing through extensible
15
+ importers.
16
+
17
+ Input Parameters Structure (this structure is expected inside "input_params.params" block):
18
+ ```
19
+ {
20
+ "pipeline_path": "TENANT-NAME/path/to/job", # REQUIRED: Full pipeline path (e.g. "TENANT/folder/job")
21
+ "pipeline_params": { # OPTIONAL: Input parameters to pass to the pipeline
22
+ "KEY1": "VALUE1", # Side-note: if you want to run your parametrized job with default parameters,
23
+ "KEY2": "VALUE2" # you still need to pass some fake params (they will be ignored by Jenkins), e.g. "__fake_key":"fake_value",
24
+ }, # Otherwise, if this dict is empty - endpoint for non-parametrized jobs will be triggered
25
+ "import_artifacts": true, # OPTIONAL: Whether to import pipeline artifacts (default: true)
26
+ "use_existing_pipeline": 123456789, # OPTIONAL: Use existing pipeline ID instead of starting new one (debug feature)
27
+ "timeout_seconds": 1800, # OPTIONAL: Maximum wait time for pipeline completion in seconds (default: 1800, 0 for async execution)
28
+ "wait_seconds": 1, # OPTIONAL: Wait interval between status checks in seconds (default: 1)
29
+ "retry_timeout_seconds": 180, # OPTIONAL: Timeout for GitLab client initialization and pipeline start retries in seconds (default: 180)
30
+ "retry_wait_seconds": 1, # OPTIONAL: Wait interval between retries in seconds (default: 1)
31
+ "success_statuses": "SUCCESS,UNSTABLE" # OPTIONAL: Comma-separated list of acceptable completion statuses (default: SUCCESS)
32
+ }
33
+ ```
34
+
35
+ Systems Configuration (expected in "systems.jenkins" block):
36
+ ```
37
+ {
38
+ "url": "https://github.com", # REQUIRED: Jenkins instance URL
39
+ "username": "<jenkins_user>" # REQUIRED: Jenkins user
40
+ "password": "<jenkins_token>" # REQUIRED: Jenkins password or token with job-triggering permissions
41
+ }
42
+ ```
43
+
44
+ Output Parameters:
45
+ - params.build.url: URL to view the pipeline run in GitLab
46
+ - params.build.id: ID of the executed pipeline
47
+ - params.build.status: Final status of the pipeline execution
48
+ - params.build.date: Workflow start time in ISO format
49
+ - params.build.duration: Total execution duration in human-readable format
50
+ - params.build.name: Name of the pipeline execution
51
+
52
+ Extension Points:
53
+ - Custom pipeline data importers can be implemented by extending PipelineDataImporter interface
54
+ - PipelineDataImporter is passed into constructor of command via "pipeline_data_importer" arg
55
+
56
+ Notes:
57
+ - Setting timeout_seconds to 0 enables asynchronous execution (workflow starts but command doesn't wait for completion, and won't fetch build id)
58
+ """
59
+
60
+ # default timeout values
61
+ WAIT_TIMEOUT = 1800
62
+ WAIT_SECONDS = 1
63
+ RETRY_TIMEOUT_SECONDS = 180
64
+ RETRY_WAIT_SECONDS = 1
65
+
66
+ PARAM_NAME_IS_DRY_RUN = "IS_DRY_RUN"
67
+
68
+ def __init__(self, *args, pipeline_data_importer: PipelineDataImporter = None, **kwargs):
69
+ super().__init__(*args, **kwargs)
70
+ self.pipeline_data_importer = pipeline_data_importer or DefaultJenkinsPipelineDataImporter()
71
+ if pipeline_data_importer and not isinstance(pipeline_data_importer, PipelineDataImporter):
72
+ raise TypeError(f"Class {type(pipeline_data_importer)} must inherit from PipelineDataImporter")
73
+
74
+ def _validate(self):
75
+ names = [
76
+ "paths.input.params",
77
+ "paths.output.params",
78
+ "paths.output.files",
79
+ "systems.jenkins.url",
80
+ "systems.jenkins.username",
81
+ "systems.jenkins.password",
82
+ "params.pipeline_path",
83
+ ]
84
+ if not self.context.validate(names):
85
+ return False
86
+
87
+ self.timeout_seconds = max(0, int(self.context.input_param_get("params.timeout_seconds", self.WAIT_TIMEOUT)))
88
+ self.wait_seconds = max(1, int(self.context.input_param_get("params.wait_seconds", self.WAIT_SECONDS)))
89
+
90
+ self.retry_timeout_seconds = int(self.context.input_param_get("params.retry_timeout_seconds", self.RETRY_TIMEOUT_SECONDS))
91
+ self.retry_wait_seconds = int(self.context.input_param_get("params.retry_wait_seconds", self.RETRY_WAIT_SECONDS))
92
+
93
+ if self.timeout_seconds == 0:
94
+ self.context.logger.info(f"Timeout is set to: {self.timeout_seconds}. This means that the pipeline will be started asynchronously")
95
+
96
+ self.pipeline_path = self.context.input_param_get("params.pipeline_path").strip("/")
97
+ self.pipeline_params = self.context.input_param_get("params.pipeline_params", {})
98
+ if not self.pipeline_params:
99
+ self.context.logger.info("Pipeline parameters were not specified. This means that pipeline will be started with its default values")
100
+ if not isinstance(self.pipeline_params, dict):
101
+ self.context.logger.error("Pipeline parameters were not loaded correctly. Probably mistake in the params definition")
102
+ return False
103
+
104
+ self.success_statuses = [x.strip() for x in self.context.input_param_get("params.success_statuses", ExecutionInfo.STATUS_SUCCESS).split(",")]
105
+ if UtilsString.convert_to_bool(self.context.input_param_get("params.is_dry_run", False)):
106
+ self.pipeline_params[self.PARAM_NAME_IS_DRY_RUN] = True
107
+ self.import_artifacts = UtilsString.convert_to_bool(self.context.input_param_get("params.import_artifacts", True))
108
+ self.use_existing_pipeline = self.context.input_param_get("params.use_existing_pipeline")
109
+ return True
110
+
111
+ def _execute(self):
112
+ self.context.logger.info("Running jenkins-run-pipeline...")
113
+ self.jenkins_client = SafeJenkinsClient.create_jenkins_client(
114
+ self.context.input_param_get("systems.jenkins.url"),
115
+ self.context.input_param_get("systems.jenkins.username"),
116
+ self.context.input_param_get("systems.jenkins.password"),
117
+ retry_timeout_seconds=self.retry_timeout_seconds,
118
+ retry_wait_seconds=self.retry_wait_seconds
119
+ )
120
+ self.context.logger.info("Successfully initialized Jenkins client")
121
+
122
+ if self.use_existing_pipeline: # work with existing job
123
+ self.context.logger.info(f"Using existing job {self.pipeline_path} - {self.use_existing_pipeline}")
124
+ execution = (ExecutionInfo().with_params(self.pipeline_params)
125
+ .with_name(self.pipeline_path).with_id(int(self.use_existing_pipeline))
126
+ .with_status(ExecutionInfo.STATUS_UNKNOWN))
127
+ execution.start()
128
+ else:
129
+ execution = self.jenkins_client.run_pipeline(
130
+ self.pipeline_path, self.pipeline_params,
131
+ timeout_seconds=self.timeout_seconds,
132
+ wait_seconds=self.wait_seconds
133
+ )
134
+
135
+ self.execution_info = execution
136
+ if execution.get_status() != ExecutionInfo.STATUS_IN_PROGRESS:
137
+ self._exit(False, f"Pipeline was not started. Status {execution.get_status()}")
138
+ elif self.timeout_seconds < 1:
139
+ self.context.logger.info("Pipeline was started in asynchronous mode. Pipeline status and artifacts will not be processed")
140
+ return
141
+
142
+ self.context.logger.info(f"Pipeline successfully started. Waiting {self.timeout_seconds} seconds for execution to complete")
143
+ execution = self.jenkins_client.wait_pipeline_execution(execution, self.timeout_seconds, self.wait_seconds)
144
+ self.context.logger.info(f"Pipeline status: {execution.get_status()}\nPipeline available at {execution.get_url()}")
145
+
146
+ if self.import_artifacts and self.pipeline_data_importer and execution.get_status() in ExecutionInfo.STATUSES_COMPLETE:
147
+ try:
148
+ self.pipeline_data_importer.with_command(self)
149
+ self.pipeline_data_importer.import_pipeline_data(execution)
150
+ except Exception as e:
151
+ self.context.logger.error(f"Exception during pipeline_data_importer execution: {e}")
152
+
153
+ self._save_execution_info(execution)
154
+ if execution.get_status() not in self.success_statuses:
155
+ self._exit(False, f"Status: {execution.get_status()}")
156
+
157
+ def _save_execution_info(self, execution: ExecutionInfo):
158
+ self.context.logger.info("Writing jenkins job execution status")
159
+ self.context.output_param_set("params.build.url", execution.get_url())
160
+ self.context.output_param_set("params.build.id", execution.get_id())
161
+ self.context.output_param_set("params.build.status", execution.get_status())
162
+ self.context.output_param_set("params.build.date", execution.get_time_start().isoformat())
163
+ self.context.output_param_set("params.build.duration", execution.get_duration_str())
164
+ self.context.output_param_set("params.build.name", execution.get_name())
165
+ self.context.output_params_save()
@@ -0,0 +1,14 @@
1
+ from qubership_pipelines_common_library.v2.jenkins.jenkins_client import JenkinsClient
2
+ from qubership_pipelines_common_library.v2.utils.retry_decorator import RetryDecorator
3
+
4
+
5
+ class SafeJenkinsClient(JenkinsClient):
6
+
7
+ def __init__(self, host: str, user: str, password: str):
8
+ super().__init__(host, user, password)
9
+
10
+ @classmethod
11
+ @RetryDecorator(condition_func=lambda result: result is not None)
12
+ def create_jenkins_client(cls, host: str, user: str, password: str,
13
+ retry_timeout_seconds: int = 180, retry_wait_seconds: int = 1):
14
+ return cls(host, user, password)
@@ -0,0 +1,178 @@
1
+ # Podman Run Image Command
2
+
3
+ Executes a container using `podman run` command.
4
+
5
+ This command supports running containers with configurable execution parameters, environment variable management, file mounting, and output extraction.
6
+
7
+ ## Input Parameters
8
+
9
+ This structure is expected inside the `input_params.params` block:
10
+
11
+ ```json
12
+ {
13
+ "image": "docker.io/library/hello-world:latest",
14
+ "command": "python -m pipelines_declarative_executor run --pipeline_dir=\"/WORK/EXEC_DIR\"",
15
+ "execution_config": {
16
+ "working_dir": "/some/dir/inside/container",
17
+ "timeout": "600",
18
+ "operations_timeout": "15",
19
+ "remove_container": true,
20
+ "save_stdout_to_logs": true,
21
+ "save_stdout_to_files": true,
22
+ "save_stdout_to_params": false,
23
+ "expected_return_codes": "0,125",
24
+ "additional_run_flags": "--cgroups=disabled"
25
+ },
26
+ "before_script": {
27
+ "mounts": {
28
+ "output_files": "/WORK",
29
+ "prepared_data": "/CONFIGS"
30
+ },
31
+ "env_vars": {
32
+ "explicit": {
33
+ "PIPELINES_DECLARATIVE_EXECUTOR_ENCRYPT_OUTPUT_SECURE_PARAMS": false
34
+ },
35
+ "env_files": [
36
+ "../CONFIGS/sample.env"
37
+ ],
38
+ "pass_via_file": {
39
+ "SOMETHING_VERY_SECURE": "PASSWORD"
40
+ },
41
+ "host_prefixes": [
42
+ "SOME_PREFIX_*"
43
+ ]
44
+ }
45
+ },
46
+ "after_script": {
47
+ "copy_files_to_host": {
48
+ "output_files/report.json": "/WORK/EXEC_DIR/pipeline_state/pipeline_ui_view.json",
49
+ "output_files/pipeline_state": "/WORK/EXEC_DIR/pipeline_state"
50
+ },
51
+ "extract_params_from_files": {
52
+ "SOME_FILE_IN_CONTAINER": "SECTION_NAME_IN_PARAMS_WHERE_IT_WILL_BE_STORED"
53
+ }
54
+ }
55
+ }
56
+ ```
57
+
58
+ ## Parameter Reference
59
+
60
+ All params are referenced here without their top-level "params" section.
61
+
62
+ Actual `input_params.yaml` should look like this sample:
63
+
64
+ ```yaml
65
+ kind: AtlasModuleParamsInsecure
66
+ apiVersion: v1
67
+ params:
68
+ image: docker.io/library/hello-world:latest
69
+ command: ....
70
+ execution_config:
71
+ timeout: 300
72
+ save_container_stdout_to_params: True
73
+ before_script:
74
+ mounts:
75
+ output_files: /WORK
76
+ ```
77
+
78
+ Or you can also pass required parameters via CLI arguments:
79
+
80
+ `qubership_cli_samples podman-run -p params.image=docker.io/my_image -p params.execution_config.timeout=300`
81
+
82
+ ### Required Parameters
83
+
84
+ - **`image`** (string): Container image to run
85
+
86
+ ### Optional Parameters
87
+
88
+ #### Execution Configuration
89
+
90
+ - **`command`** (string): Command to execute in container
91
+ - **`execution_config`** (object): Container execution settings
92
+ - **`working_dir`** (string): Working directory inside container
93
+ - **`timeout`** (float/string): Maximum execution time in seconds (e.g. "60", "36.6", etc.)
94
+ - **`operations_timeout`** (float/string): Timeout for operations like file copying in seconds
95
+ - **`remove_container`** (boolean): Whether to remove container after execution
96
+ - **`save_stdout_to_logs`** (boolean): Save container stdout to execution logs
97
+ - **`save_stdout_to_files`** (boolean): Save container stdout to output files
98
+ - **`save_stdout_to_params`** (boolean): Save container stdout to output parameters
99
+ - **`expected_return_codes`** (string): Comma-separated list of acceptable exit codes
100
+ - **`additional_run_flags`** (string): Flags that will be added to "podman run" command
101
+
102
+ #### Before Script Configuration
103
+
104
+ - **`before_script`** (object): Pre-execution configuration
105
+ - **`mounts`** (object): Filesystem mounts from host to container (`host_path: container_path`)
106
+ - **`env_vars`** (object): Environment variable configuration. There's podman-specific priority of these vars (lower to highest): file vars, direct vars, host vars.
107
+ - **`explicit`** (object): Direct environment variable assignment
108
+ - **`env_files`** (array): Environment files on host to load and pass into container
109
+ - **`pass_via_file`** (object): Sensitive vars passed via temp file
110
+ - **`host_prefixes`** (array): Host environment variable prefixes to pass through (can use `"*"` to pass everything from host)
111
+
112
+ #### After Script Configuration
113
+
114
+ - **`after_script`** (object): Post-execution operations
115
+ - **`copy_files_to_host`** (object): Copy files from container to host after execution (`host_path: container_path`)
116
+ - **`extract_params_from_files`** (object): Extract parameters from container files (supports JSON, YAML, and ENV files)
117
+
118
+ ## Output Parameters
119
+
120
+ - `params.execution_time`: Total execution time in seconds
121
+ - `params.return_code`: Container exit code
122
+ - `params.stdout`: Container stdout (if `save_stdout_to_params` enabled)
123
+ - `params.stderr`: Container stderr (if `save_stdout_to_params` enabled)
124
+ - `params.extracted_output.*`: Extracted parameters from files (if `extract_params_from_files` configured)
125
+
126
+ ## Notes
127
+
128
+ - The command automatically handles container lifecycle including start, execution, and cleanup
129
+ - All host-paths (including mount paths) are resolved relative to context directory
130
+
131
+ ## Adding podman executable in your image
132
+
133
+ To install and use `podman run` in your Dockerimage (`python:3.11-slim` was used as a base image, to run `Pipelines Declarative Executor`) inside usual CIs (GitHub/GitLab) following approaches worked:
134
+
135
+ ### GitHub
136
+
137
+ 1. `apt-get install podman nftables fuse-overlayfs`
138
+
139
+ 2. ```bash
140
+ RUN cat <<EOF > /etc/containers/storage.conf
141
+ [storage]
142
+ driver = "overlay"
143
+ runroot = "/run/containers/storage"
144
+ graphroot = "/var/lib/containers/storage"
145
+ [storage.options]
146
+ mount_program = "/usr/bin/fuse-overlayfs"
147
+ EOF
148
+ ```
149
+
150
+ 3. In your workflow file, need to pass `--privileged` option
151
+
152
+ ```yaml
153
+ jobs:
154
+ execute-pipeline:
155
+ runs-on: ubuntu-latest
156
+ container:
157
+ image: ghcr.io/netcracker/qubership-pipelines-declarative-executor:dev_podman_engine
158
+ options: --privileged
159
+ ```
160
+
161
+ 4. Need to run `PodmanRunImage` command with additional flags: `"additional_run_flags": "--cgroups=disabled"`
162
+
163
+ ### GitLab
164
+
165
+ 1. `apt-get install podman nftables slirp4netns fuse-overlayfs`
166
+
167
+ 2. ```bash
168
+ RUN cat <<EOF > /etc/containers/storage.conf
169
+ [storage]
170
+ driver = "overlay"
171
+ runroot = "/run/containers/storage"
172
+ graphroot = "/var/lib/containers/storage"
173
+ [storage.options]
174
+ mount_program = "/usr/bin/fuse-overlayfs"
175
+ EOF
176
+ ```
177
+
178
+ 3. Need to run `PodmanRunImage` command with additional flags: `"additional_run_flags": "--cgroups=disabled --network slirp4netns"`
@@ -0,0 +1,311 @@
1
+ import os, subprocess, time, uuid
2
+
3
+ from pathlib import Path
4
+ from qubership_pipelines_common_library.v1.execution.exec_command import ExecutionCommand
5
+ from qubership_pipelines_common_library.v1.utils.utils_string import UtilsString
6
+
7
+
8
+ class PodmanRunImage(ExecutionCommand):
9
+ """
10
+ Executes a container using "podman run" command.
11
+
12
+ This command supports running containers with configurable execution parameters,
13
+ environment variable management, file mounting, and output extraction.
14
+
15
+ Input Parameters Structure (this structure is expected inside "input_params.params" block):
16
+ ```
17
+ {
18
+ "image": "docker.io/library/hello-world:latest", # REQUIRED: Container image to run
19
+ "command": "python -m pipelines_declarative_executor run --pipeline_dir=\"/WORK/EXEC_DIR\"", # OPTIONAL: Command to execute in container
20
+ "execution_config": { # ALL OF THESE ARE OPTIONAL
21
+ "working_dir": "/some/dir/inside/container", # Working directory inside container
22
+ "timeout": "600", # Maximum execution time in seconds
23
+ "operations_timeout": "15", # Timeout for operations like file copying
24
+ "remove_container": True, # Whether to remove container after execution
25
+ "save_stdout_to_logs": True, # Save container stdout to execution logs
26
+ "save_stdout_to_files": True, # Save container stdout to output files
27
+ "save_stdout_to_params": False, # Save container stdout to output parameters
28
+ "expected_return_codes": "0,125", # Comma-separated list of acceptable exit codes
29
+ "additional_run_flags": "--cgroups=disabled", # Optional string of flags that will be added to "podman run" command
30
+ },
31
+ "before_script": {
32
+ "mounts": { # Filesystem mounts, "host_path: container_path"
33
+ "output_files": "/WORK",
34
+ "prepared_data": "/CONFIGS"
35
+ },
36
+ "env_vars": {
37
+ "explicit": { # Direct environment variable assignment
38
+ "PIPELINES_DECLARATIVE_EXECUTOR_ENCRYPT_OUTPUT_SECURE_PARAMS": False
39
+ },
40
+ "env_files": [ # Environment files on host to load and pass into container
41
+ "../CONFIGS/sample.env"
42
+ ],
43
+ "pass_via_file": { # Sensitive vars passed via temp file
44
+ "SOMETHING_VERY_SECURE": "PASSWORD"
45
+ },
46
+ "host_prefixes": [ # Host environment variable prefixes to pass through. Can use "*" to pass everything from host.
47
+ "SOME_PREFIX_*"
48
+ ]
49
+ }
50
+ },
51
+ "after_script": {
52
+ "copy_files_to_host": { # Copy files from container to host after execution, "host_path: container_path"
53
+ "output_files/report.json": "/WORK/EXEC_DIR/pipeline_state/pipeline_ui_view.json",
54
+ "output_files/pipeline_state": "/WORK/EXEC_DIR/pipeline_state",
55
+ },
56
+ "extract_params_from_files": { # OPTIONAL: Extract parameters from container files. Supports JSON, YAML, and ENV files
57
+ "SOME_FILE_IN_CONTAINER": "SECTION_NAME_IN_PARAMS_WHERE_IT_WILL_BE_STORED",
58
+ }
59
+ }
60
+ }
61
+ ```
62
+
63
+ Output Parameters:
64
+ - params.execution_time: Total execution time in seconds
65
+ - params.return_code: Container exit code
66
+ - params.stdout: Container stdout (if save_stdout_to_params enabled)
67
+ - params.stderr: Container stderr (if save_stdout_to_params enabled)
68
+ - params.extracted_output.*: Extracted parameters from files (if extract_params_from_files configured)
69
+
70
+ Notes:
71
+ - The command automatically handles container lifecycle including start, execution, and cleanup
72
+ - All host-paths (including mount paths) are resolved relative to context directory.
73
+ """
74
+
75
+ def _validate(self):
76
+ names = [
77
+ "paths.input.params",
78
+ "paths.output.params",
79
+ "paths.output.files",
80
+ "params.image",
81
+ ]
82
+ if not self.context.validate(names):
83
+ return False
84
+
85
+ # Check if podman is available
86
+ try:
87
+ subprocess.run(["podman", "--version"], capture_output=True, check=True)
88
+ except (subprocess.CalledProcessError, FileNotFoundError):
89
+ self.context.logger.error("Podman is not available on this system. Please install podman to use this command.")
90
+ return False
91
+
92
+ # Setup defaults & convert values
93
+ self.image = self.context.input_param_get("params.image")
94
+ self.command = self.context.input_param_get("params.command")
95
+
96
+ # execution_config
97
+ self.working_dir = self.context.input_param_get("params.execution_config.working_dir")
98
+ self.timeout = float(self.context.input_param_get("params.execution_config.timeout", 60))
99
+ self.operations_timeout = float(self.context.input_param_get("params.execution_config.operations_timeout", 15))
100
+ self.remove_container = UtilsString.convert_to_bool(self.context.input_param_get("params.execution_config.remove_container", True))
101
+ self.save_stdout_to_logs = UtilsString.convert_to_bool(self.context.input_param_get("params.execution_config.save_stdout_to_logs", True))
102
+ self.save_stdout_to_files = UtilsString.convert_to_bool(self.context.input_param_get("params.execution_config.save_stdout_to_files", True))
103
+ self.save_stdout_to_params = UtilsString.convert_to_bool(self.context.input_param_get("params.execution_config.save_stdout_to_params", False))
104
+ self.expected_return_codes = [int(num) for num in self.context.input_param_get("params.execution_config.expected_return_codes", "0").split(',')]
105
+ self.additional_run_flags = self.context.input_param_get("params.execution_config.additional_run_flags")
106
+
107
+ # before_script
108
+ self.mounts_config = self.context.input_param_get("params.before_script.mounts", {})
109
+ self.env_vars_config = self.context.input_param_get("params.before_script.env_vars", {})
110
+
111
+ # after_script
112
+ self.copy_files_config = self.context.input_param_get("params.after_script.copy_files_to_host", {})
113
+ self.extract_params_config = self.context.input_param_get("params.after_script.extract_params_from_files", {})
114
+
115
+ # Get base paths
116
+ self.context_dir_path = Path(os.path.dirname(self.context.context_path))
117
+ self.input_params_path = Path(self.context.input_param_get("paths.input.params"))
118
+ self.output_params_path = Path(self.context.input_param_get("paths.output.params"))
119
+ self.output_files_path = Path(self.context.input_param_get("paths.output.files"))
120
+ self.container_name = f"podman_{str(uuid.uuid4())}"
121
+ return True
122
+
123
+ def _run_sp_command(self, command, timeout=None):
124
+ return subprocess.run(command, capture_output=True, text=True,
125
+ timeout=timeout if timeout else self.timeout,
126
+ cwd=self.context_dir_path)
127
+
128
+ def _build_podman_command(self) -> list[str]:
129
+ cmd = ["podman", "run", "--name", self.container_name]
130
+
131
+ if self.additional_run_flags:
132
+ import shlex
133
+ cmd.extend(shlex.split(self.additional_run_flags))
134
+
135
+ if self.working_dir:
136
+ cmd.extend(["--workdir", self.working_dir])
137
+
138
+ if self.env_vars_config:
139
+ cmd.extend(self._build_command_env_var_args())
140
+
141
+ for host_path, container_path in self.mounts_config.items():
142
+ cmd.extend(["--mount", f"type=bind,source={host_path},target={container_path}"])
143
+
144
+ cmd.append(self.image)
145
+
146
+ if self.command:
147
+ import shlex
148
+ cmd.extend(shlex.split(self.command))
149
+
150
+ return cmd
151
+
152
+ def _build_command_env_var_args(self) -> list[str]:
153
+ args = []
154
+ for key, value in self.env_vars_config.get("explicit", {}).items():
155
+ args.extend(["--env", f"{key}={value}"])
156
+
157
+ for prefix in self.env_vars_config.get("host_prefixes", []):
158
+ args.extend(["--env", f"{prefix}"])
159
+
160
+ for env_file in self.env_vars_config.get("env_files", []):
161
+ args.extend(["--env-file", f"{env_file}"])
162
+
163
+ if self.env_vars_config.get("pass_via_file"):
164
+ env_file_path = self.context_dir_path.joinpath("temp").joinpath("temp.env")
165
+ env_file_path.parent.mkdir(parents=True, exist_ok=True)
166
+ with open(env_file_path, 'w') as f:
167
+ for key, value in self.env_vars_config["pass_via_file"].items():
168
+ f.write(f"{key}={value}\n")
169
+ args.extend(["--env-file", str(env_file_path)])
170
+
171
+ return args
172
+
173
+ def _copy_files_from_container(self):
174
+ for host_path, container_path in self.copy_files_config.items():
175
+ full_host_path = self.context_dir_path.joinpath(host_path)
176
+ full_host_path.parent.mkdir(parents=True, exist_ok=True)
177
+
178
+ copy_command = ["podman", "cp", f"{self.container_name}:{container_path}", str(full_host_path)]
179
+ try:
180
+ copy_result = self._run_sp_command(copy_command, self.operations_timeout)
181
+ if copy_result.returncode != 0:
182
+ self.context.logger.warning(f"Failed to copy {container_path} to {host_path}: {copy_result.stderr}")
183
+ else:
184
+ self.context.logger.debug(f"Copied {container_path} to {host_path}")
185
+ except subprocess.TimeoutExpired:
186
+ self.context.logger.warning(f"Copy command timed out after {self.operations_timeout} seconds")
187
+
188
+ def _extract_params_from_container(self):
189
+ import tempfile
190
+ with tempfile.TemporaryDirectory() as temp_dir:
191
+ for container_file_path, output_key_base in self.extract_params_config.items():
192
+ try:
193
+ temp_file_path = Path(temp_dir) / Path(container_file_path).name
194
+ copy_command = ["podman", "cp", f"{self.container_name}:{container_file_path}", str(temp_file_path)]
195
+ copy_result = self._run_sp_command(copy_command, self.operations_timeout)
196
+ if copy_result.returncode != 0:
197
+ self.context.logger.warning(f"Failed to copy file {container_file_path} for params-extraction: {copy_result.stderr}")
198
+ continue
199
+ if not temp_file_path.exists():
200
+ self.context.logger.warning(f"File {container_file_path} for params-extraction not found after copy")
201
+ continue
202
+ if file_content := self._parse_custom_file_params(temp_file_path):
203
+ base_key = output_key_base if output_key_base else container_file_path.replace('/','_').replace('.', '_')
204
+ self.context.output_param_set(f"params.extracted_output.{base_key}", file_content)
205
+ except Exception as e:
206
+ self.context.logger.warning(f"Failed to extract params from file {container_file_path}: {e}")
207
+
208
+ def _parse_custom_file_params(self, file_path: Path):
209
+ try:
210
+ try:
211
+ import json
212
+ with open(file_path, 'r', encoding='utf-8') as f:
213
+ return json.load(f)
214
+ except Exception:
215
+ pass
216
+
217
+ try:
218
+ import yaml
219
+ with open(file_path, 'r', encoding='utf-8') as f:
220
+ return yaml.safe_load(f)
221
+ except Exception:
222
+ pass
223
+
224
+ try:
225
+ key_values = {}
226
+ with open(file_path, 'r', encoding='utf-8') as f:
227
+ for line in f:
228
+ line = line.strip()
229
+ if line and not line.startswith('#') and '=' in line:
230
+ key, value = line.split('=', 1)
231
+ key_values[key.strip()] = value.strip()
232
+ return key_values if key_values else None
233
+ except Exception:
234
+ pass
235
+
236
+ with open(file_path, 'r', encoding='utf-8') as f:
237
+ return f.read().strip()
238
+
239
+ except Exception as e:
240
+ self.context.logger.warning(f"Failed to parse custom-params file {file_path}: {e}")
241
+ return None
242
+
243
+ def _write_stdout_files(self, stdout: str, stderr: str):
244
+ (self.output_files_path / "container_stdout.txt").write_text(stdout, encoding='utf-8')
245
+ (self.output_files_path / "container_stderr.txt").write_text(stderr, encoding='utf-8')
246
+
247
+ def _process_output(self, output: subprocess.CompletedProcess):
248
+ self.context.output_param_set("params.execution_time", f"{self.execution_time:0.3f}s")
249
+ self.context.output_param_set("params.return_code", output.returncode)
250
+
251
+ if output.stdout and isinstance(output.stdout, bytes):
252
+ output.stdout = output.stdout.decode('utf-8', errors='replace')
253
+ if output.stderr and isinstance(output.stderr, bytes):
254
+ output.stderr = output.stderr.decode('utf-8', errors='replace')
255
+
256
+ if self.save_stdout_to_logs:
257
+ if output.stdout:
258
+ self.context.logger.debug(f"Container stdout:\n{output.stdout}")
259
+ if output.stderr:
260
+ self.context.logger.debug(f"Container stderr:\n{output.stderr}")
261
+
262
+ if self.save_stdout_to_files:
263
+ self._write_stdout_files(output.stdout, output.stderr)
264
+
265
+ if self.save_stdout_to_params:
266
+ self.context.output_param_set("params.stdout", output.stdout)
267
+ self.context.output_param_set("params.stderr", output.stderr)
268
+
269
+ if self.extract_params_config:
270
+ self._extract_params_from_container()
271
+
272
+ if self.copy_files_config:
273
+ self._copy_files_from_container()
274
+
275
+ if output.returncode not in self.expected_return_codes:
276
+ raise PodmanException(output.stderr)
277
+
278
+ def _execute(self):
279
+ self.context.logger.info(f"Running podman image \"{self.image}\"...")
280
+ start = time.perf_counter()
281
+ try:
282
+ output = self._run_sp_command(self._build_podman_command())
283
+ self.execution_time = time.perf_counter() - start
284
+ self.context.logger.info(
285
+ f"Container finished with code: {output.returncode}"
286
+ f"\nExecution time: {self.execution_time:0.3f}s"
287
+ )
288
+ self._process_output(output)
289
+
290
+ except subprocess.TimeoutExpired:
291
+ self.context.logger.error(f"Container execution timed out after {self.timeout} seconds")
292
+ raise
293
+
294
+ except PodmanException:
295
+ self.context.logger.error("Container exited with unexpected exitcode")
296
+ raise
297
+
298
+ except Exception as e:
299
+ self.context.logger.error(f"Container execution failed: {e}")
300
+ raise
301
+
302
+ finally:
303
+ if self.remove_container:
304
+ remove_output = subprocess.run(["podman", "rm", "-f", self.container_name], capture_output=True)
305
+ if remove_output.returncode != 0:
306
+ self.context.logger.warning(f"Failed to remove container {self.container_name}:\n{remove_output.stdout}\n{remove_output.stderr}")
307
+ self.context.output_params_save()
308
+
309
+
310
+ class PodmanException(Exception):
311
+ pass