qubership-pipelines-common-library 0.2.5__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qubership_pipelines_common_library/v1/execution/exec_command.py +52 -1
- qubership_pipelines_common_library/v1/execution/exec_info.py +4 -0
- qubership_pipelines_common_library/v1/github_client.py +9 -0
- qubership_pipelines_common_library/v1/gitlab_client.py +170 -10
- qubership_pipelines_common_library/v1/utils/utils_cli.py +3 -1
- qubership_pipelines_common_library/v1/utils/utils_file.py +17 -0
- qubership_pipelines_common_library/v2/__init__.py +0 -0
- qubership_pipelines_common_library/v2/extensions/pipeline_data_importer.py +24 -0
- qubership_pipelines_common_library/v2/github/__init__.py +0 -0
- qubership_pipelines_common_library/v2/github/github_client.py +5 -0
- qubership_pipelines_common_library/v2/github/github_pipeline_data_importer.py +21 -0
- qubership_pipelines_common_library/v2/github/github_run_pipeline_command.py +175 -0
- qubership_pipelines_common_library/v2/github/safe_github_client.py +24 -0
- qubership_pipelines_common_library/v2/gitlab/__init__.py +0 -0
- qubership_pipelines_common_library/v2/gitlab/custom_extensions.py +101 -0
- qubership_pipelines_common_library/v2/gitlab/gitlab_client.py +36 -0
- qubership_pipelines_common_library/v2/gitlab/gitlab_pipeline_data_importer.py +26 -0
- qubership_pipelines_common_library/v2/gitlab/gitlab_run_pipeline_command.py +195 -0
- qubership_pipelines_common_library/v2/gitlab/safe_gitlab_client.py +32 -0
- qubership_pipelines_common_library/v2/podman/__init__.py +0 -0
- qubership_pipelines_common_library/v2/podman/podman_command.md +172 -0
- qubership_pipelines_common_library/v2/podman/podman_command.py +311 -0
- qubership_pipelines_common_library/v2/sops/sops_client.py +116 -0
- qubership_pipelines_common_library/v2/utils/crypto_utils.py +48 -0
- qubership_pipelines_common_library/v2/utils/extension_utils.py +22 -0
- qubership_pipelines_common_library/v2/utils/retry_decorator.py +93 -0
- {qubership_pipelines_common_library-0.2.5.dist-info → qubership_pipelines_common_library-2.0.0.dist-info}/METADATA +1 -1
- qubership_pipelines_common_library-2.0.0.dist-info/RECORD +52 -0
- qubership_pipelines_common_library-0.2.5.dist-info/RECORD +0 -32
- {qubership_pipelines_common_library-0.2.5.dist-info → qubership_pipelines_common_library-2.0.0.dist-info}/WHEEL +0 -0
- {qubership_pipelines_common_library-0.2.5.dist-info → qubership_pipelines_common_library-2.0.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -15,9 +15,11 @@
|
|
|
15
15
|
import logging
|
|
16
16
|
import sys
|
|
17
17
|
import traceback
|
|
18
|
+
from abc import ABC, abstractmethod
|
|
18
19
|
|
|
19
20
|
from qubership_pipelines_common_library.v1.execution.exec_context import ExecutionContext
|
|
20
21
|
from qubership_pipelines_common_library.v1.utils.utils_context import create_execution_context
|
|
22
|
+
from qubership_pipelines_common_library.v2.utils.crypto_utils import CryptoUtils
|
|
21
23
|
|
|
22
24
|
|
|
23
25
|
class ExecutionCommand:
|
|
@@ -26,7 +28,9 @@ class ExecutionCommand:
|
|
|
26
28
|
FAILURE_MSG = "Status: FAILURE"
|
|
27
29
|
|
|
28
30
|
def __init__(self, context_path: str = None, input_params: dict = None, input_params_secure: dict = None,
|
|
29
|
-
folder_path: str = None, parent_context_to_reuse: ExecutionContext = None
|
|
31
|
+
folder_path: str = None, parent_context_to_reuse: ExecutionContext = None,
|
|
32
|
+
pre_execute_actions: list['ExecutionCommandExtension'] = None,
|
|
33
|
+
post_execute_actions: list['ExecutionCommandExtension'] = None):
|
|
30
34
|
"""
|
|
31
35
|
Extendable interface intended to simplify working with input/output params and passing them between commands in different Pipeline Executors
|
|
32
36
|
|
|
@@ -40,30 +44,56 @@ class ExecutionCommand:
|
|
|
40
44
|
input_params_secure (dict): Secure parameters that will be merged into dynamically created params
|
|
41
45
|
folder_path (str): Folder path where dynamically-created context will be stored. Optional, will create new temp folder if missing.
|
|
42
46
|
parent_context_to_reuse (ExecutionContext): Optional, existing context to propagate input params from.
|
|
47
|
+
pre_execute_actions: Optional, list of actions, implementing ExecutionCommandExtension, to be executed before command
|
|
48
|
+
post_execute_actions: Optional, list of actions, implementing ExecutionCommandExtension, to be executed after command
|
|
43
49
|
"""
|
|
44
50
|
if not context_path:
|
|
45
51
|
context_path = create_execution_context(input_params=input_params, input_params_secure=input_params_secure,
|
|
46
52
|
folder_path=folder_path, parent_context_to_reuse=parent_context_to_reuse)
|
|
47
53
|
self.context = ExecutionContext(context_path)
|
|
54
|
+
self._pre_execute_actions = []
|
|
55
|
+
if pre_execute_actions:
|
|
56
|
+
self._pre_execute_actions.extend(pre_execute_actions)
|
|
57
|
+
self._post_execute_actions = []
|
|
58
|
+
if post_execute_actions:
|
|
59
|
+
self._post_execute_actions.extend(post_execute_actions)
|
|
48
60
|
|
|
49
61
|
def run(self):
|
|
50
62
|
"""Runs command following its lifecycle"""
|
|
51
63
|
try:
|
|
64
|
+
self._log_input_params()
|
|
52
65
|
if not self._validate():
|
|
53
66
|
logging.error(ExecutionCommand.FAILURE_MSG)
|
|
54
67
|
self._exit(False, ExecutionCommand.FAILURE_MSG)
|
|
68
|
+
self._pre_execute()
|
|
55
69
|
self._execute()
|
|
70
|
+
self._post_execute()
|
|
56
71
|
self._exit(True, ExecutionCommand.SUCCESS_MSG)
|
|
57
72
|
except Exception as e:
|
|
58
73
|
logging.error(traceback.format_exc())
|
|
59
74
|
self._exit(False, ExecutionCommand.FAILURE_MSG)
|
|
60
75
|
|
|
76
|
+
def _log_input_params(self):
|
|
77
|
+
self.context.logger.info(
|
|
78
|
+
"Input context parameters:\n%s\n%s",
|
|
79
|
+
CryptoUtils.get_parameters_for_print(self.context.input_params_secure.content, True),
|
|
80
|
+
CryptoUtils.get_parameters_for_print(self.context.input_params.content, False)
|
|
81
|
+
)
|
|
82
|
+
|
|
61
83
|
def _validate(self):
|
|
62
84
|
return self.context.validate(["paths.input.params"])
|
|
63
85
|
|
|
86
|
+
def _pre_execute(self):
|
|
87
|
+
for action in self._pre_execute_actions:
|
|
88
|
+
action.with_command(self).execute()
|
|
89
|
+
|
|
64
90
|
def _execute(self):
|
|
65
91
|
logging.info("Status: SKIPPED")
|
|
66
92
|
|
|
93
|
+
def _post_execute(self):
|
|
94
|
+
for action in self._post_execute_actions:
|
|
95
|
+
action.with_command(self).execute()
|
|
96
|
+
|
|
67
97
|
def _exit(self, success: bool, message: str):
|
|
68
98
|
if success:
|
|
69
99
|
self.context.logger.info(message)
|
|
@@ -71,3 +101,24 @@ class ExecutionCommand:
|
|
|
71
101
|
else:
|
|
72
102
|
self.context.logger.error(message)
|
|
73
103
|
sys.exit(1)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
class ExecutionCommandExtension(ABC):
|
|
107
|
+
"""
|
|
108
|
+
Base interface used in ExecutionCommand pre_execute and post_execute actions
|
|
109
|
+
Can be extended by users to perform custom extension logic before and after execution
|
|
110
|
+
"""
|
|
111
|
+
|
|
112
|
+
def __init__(self):
|
|
113
|
+
self.context = None
|
|
114
|
+
self.command = None
|
|
115
|
+
|
|
116
|
+
def with_command(self, command: ExecutionCommand) -> 'ExecutionCommandExtension':
|
|
117
|
+
self.command = command
|
|
118
|
+
self.context = command.context
|
|
119
|
+
return self
|
|
120
|
+
|
|
121
|
+
@abstractmethod
|
|
122
|
+
def execute(self) -> None:
|
|
123
|
+
"""Implements custom extension logic"""
|
|
124
|
+
pass
|
|
@@ -108,3 +108,7 @@ class ExecutionInfo:
|
|
|
108
108
|
def with_params(self, params: dict):
|
|
109
109
|
self.params = params
|
|
110
110
|
return self
|
|
111
|
+
|
|
112
|
+
def __str__(self):
|
|
113
|
+
return (f"ExecutionInfo(id='{self.id}', url='{self.url}', status='{self.status}', "
|
|
114
|
+
f"time_start={self.time_start.isoformat()})")
|
|
@@ -51,11 +51,20 @@ class GithubClient:
|
|
|
51
51
|
|
|
52
52
|
def __init__(self, token: str = None, api_url: str = None, **kwargs):
|
|
53
53
|
"""
|
|
54
|
+
This class is deprecated and will be removed in v3.0.0. Use class from v2 module instead.
|
|
54
55
|
Arguments:
|
|
55
56
|
token (str): Token used in auth request
|
|
56
57
|
api_url (str): Optional Github Enterprise API URL, leave empty if using github.com
|
|
57
58
|
**kwargs (Any): will be passed into Github API constructor
|
|
58
59
|
"""
|
|
60
|
+
if self.__class__ == GithubClient:
|
|
61
|
+
import warnings
|
|
62
|
+
warnings.warn(
|
|
63
|
+
"v1.github_client.GithubClient is deprecated since v2.0.0 and will be removed in v3.0.0. "
|
|
64
|
+
"Use v2.github.github_client.GithubClient instead.",
|
|
65
|
+
DeprecationWarning,
|
|
66
|
+
stacklevel=2
|
|
67
|
+
)
|
|
59
68
|
self.gh = GhApi(token=token, gh_host=api_url, **kwargs)
|
|
60
69
|
logging.info("Github Client configured")
|
|
61
70
|
|
|
@@ -12,8 +12,9 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
|
|
15
|
-
import logging, gitlab
|
|
16
|
-
from
|
|
15
|
+
import logging, gitlab, time
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
|
|
17
18
|
from gitlab import GitlabGetError
|
|
18
19
|
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
19
20
|
|
|
@@ -36,6 +37,7 @@ class GitlabClient:
|
|
|
36
37
|
|
|
37
38
|
def __init__(self, host: str, username: str, password: str, email: str = None, **kwargs):
|
|
38
39
|
"""
|
|
40
|
+
This class is deprecated and will be removed in v3.0.0. Use class from v2 module instead.
|
|
39
41
|
Arguments:
|
|
40
42
|
host (str): Gitlab instance URL
|
|
41
43
|
username (str): User used in auth request, might be empty string if no auth is required
|
|
@@ -43,6 +45,14 @@ class GitlabClient:
|
|
|
43
45
|
email (str): Email used when committing changes using API
|
|
44
46
|
**kwargs (Any): will be passed into Gitlab API constructor
|
|
45
47
|
"""
|
|
48
|
+
if self.__class__ == GitlabClient:
|
|
49
|
+
import warnings
|
|
50
|
+
warnings.warn(
|
|
51
|
+
"v1.gitlab_client.GitlabClient is deprecated since v2.0.0 and will be removed in v3.0.0. "
|
|
52
|
+
"Use v2.gitlab.gitlab_client.GitlabClient instead.",
|
|
53
|
+
DeprecationWarning,
|
|
54
|
+
stacklevel=2
|
|
55
|
+
)
|
|
46
56
|
self.host = host.rstrip("/")
|
|
47
57
|
self.username = username
|
|
48
58
|
self.email = email
|
|
@@ -120,7 +130,7 @@ class GitlabClient:
|
|
|
120
130
|
while counter < timeout:
|
|
121
131
|
counter += 1
|
|
122
132
|
logging.info("Waiting pipeline execution timeout 1 second")
|
|
123
|
-
sleep(1)
|
|
133
|
+
time.sleep(1)
|
|
124
134
|
continue
|
|
125
135
|
pipeline.cancel()
|
|
126
136
|
return execution.stop(ExecutionInfo.STATUS_ABORTED)
|
|
@@ -136,14 +146,17 @@ class GitlabClient:
|
|
|
136
146
|
logging.error("Can't get pipeline status")
|
|
137
147
|
return execution
|
|
138
148
|
|
|
139
|
-
def wait_pipeline_execution(self, execution: ExecutionInfo, timeout_seconds: float =
|
|
149
|
+
def wait_pipeline_execution(self, execution: ExecutionInfo, timeout_seconds: float = 180.0,
|
|
140
150
|
break_status_list: list = None, wait_seconds: float = 1.0):
|
|
141
151
|
""""""
|
|
142
152
|
if break_status_list is None:
|
|
143
153
|
break_status_list = self.BREAK_STATUS_LIST
|
|
144
|
-
|
|
154
|
+
count_seconds = 0
|
|
155
|
+
last_log_time = time.perf_counter()
|
|
156
|
+
estimated_max_attempts = timeout_seconds // wait_seconds
|
|
157
|
+
retries = 0
|
|
145
158
|
execution.with_status(execution.get_status())
|
|
146
|
-
while
|
|
159
|
+
while count_seconds < timeout_seconds:
|
|
147
160
|
try:
|
|
148
161
|
project = self.gl.projects.get(execution.get_name(), lazy=True)
|
|
149
162
|
pipeline = project.pipelines.get(execution.get_id())
|
|
@@ -154,10 +167,13 @@ class GitlabClient:
|
|
|
154
167
|
break
|
|
155
168
|
except:
|
|
156
169
|
pass
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
170
|
+
now = time.perf_counter()
|
|
171
|
+
retries += 1
|
|
172
|
+
if now - last_log_time >= 10.0:
|
|
173
|
+
logging.info(f"Made [{retries} of {estimated_max_attempts}] retries. Waiting pipeline execution {count_seconds} of {timeout_seconds}")
|
|
174
|
+
last_log_time = now
|
|
175
|
+
count_seconds += wait_seconds
|
|
176
|
+
time.sleep(wait_seconds)
|
|
161
177
|
return execution
|
|
162
178
|
|
|
163
179
|
@staticmethod
|
|
@@ -170,6 +186,65 @@ class GitlabClient:
|
|
|
170
186
|
return {"repo": url[:pos1], "branch": url[pos1 + len(part):pos2], "path": url[pos2 + 1:]}
|
|
171
187
|
return None
|
|
172
188
|
|
|
189
|
+
def get_default_branch(self, project_id: str) -> str:
|
|
190
|
+
return self.gl.projects.get(project_id).default_branch
|
|
191
|
+
|
|
192
|
+
def get_latest_pipeline_id(self, project_id: str, ref: str) -> int | str:
|
|
193
|
+
project = self.gl.projects.get(project_id, lazy=True)
|
|
194
|
+
return project.pipelines.latest(ref=ref).get_id()
|
|
195
|
+
|
|
196
|
+
def get_latest_job(self, project_id: str, pipeline_id: str):
|
|
197
|
+
project = self.gl.projects.get(project_id, lazy=True)
|
|
198
|
+
pipeline = project.pipelines.get(pipeline_id, lazy=True)
|
|
199
|
+
|
|
200
|
+
jobs = pipeline.jobs.list(get_all=True)
|
|
201
|
+
logging.debug(f"All jobs from the pipeline: {jobs}")
|
|
202
|
+
|
|
203
|
+
# get jobs from downstream pipelines
|
|
204
|
+
bridges = pipeline.bridges.list(get_all=True)
|
|
205
|
+
logging.debug(f"Bridges: {bridges}")
|
|
206
|
+
for bridge in bridges:
|
|
207
|
+
downstream_pipeline_data = bridge.downstream_pipeline
|
|
208
|
+
downstream_project = self.gl.projects.get(downstream_pipeline_data.get('project_id'), lazy=True)
|
|
209
|
+
logging.debug(f"Getting jobs from a downstream pipeline: {downstream_pipeline_data.get('id')}...")
|
|
210
|
+
downstream_pipeline = downstream_project.pipelines.get(downstream_pipeline_data.get('id'))
|
|
211
|
+
jobs.extend(downstream_pipeline.jobs.list(get_all=True))
|
|
212
|
+
|
|
213
|
+
# get jobs from child pipelines
|
|
214
|
+
child_pipelines = project.pipelines.list(ref=f"downstream/{pipeline_id}", source="pipeline", all=True)
|
|
215
|
+
logging.debug(f"Child pipelines: {child_pipelines}")
|
|
216
|
+
for child_pipeline in child_pipelines:
|
|
217
|
+
logging.debug(f"Getting jobs from a child pipeline: {child_pipeline.id}...")
|
|
218
|
+
child_jobs = child_pipeline.jobs.list(get_all=True)
|
|
219
|
+
jobs.extend(child_jobs)
|
|
220
|
+
|
|
221
|
+
logging.debug(f"All jobs (+ jobs from downstream pipelines): {jobs}")
|
|
222
|
+
jobs = [j for j in jobs if j.started_at]
|
|
223
|
+
jobs = sorted(jobs, key=lambda j: j.started_at, reverse=True)
|
|
224
|
+
return jobs[0] if jobs else None
|
|
225
|
+
|
|
226
|
+
def download_job_artifacts(self, project_id, job_id, local_dir):
|
|
227
|
+
project = self.gl.projects.get(project_id, lazy=True)
|
|
228
|
+
job = project.jobs.get(job_id, lazy=True)
|
|
229
|
+
local_file = Path(local_dir, f"{job_id}.zip")
|
|
230
|
+
with local_file.open('wb') as f:
|
|
231
|
+
try:
|
|
232
|
+
job.artifacts(streamed=True, action=f.write)
|
|
233
|
+
except gitlab.GitlabGetError as e:
|
|
234
|
+
if e.response_code == 404:
|
|
235
|
+
logging.warning(f"No artifacts for job {job_id}")
|
|
236
|
+
return None
|
|
237
|
+
else: raise
|
|
238
|
+
logging.info(f"Artifacts downloaded to {local_file}")
|
|
239
|
+
return local_file
|
|
240
|
+
|
|
241
|
+
@staticmethod
|
|
242
|
+
def _cast_to_string(value) -> str:
|
|
243
|
+
if isinstance(value, str): return value
|
|
244
|
+
if value is None: return ''
|
|
245
|
+
if isinstance(value, bool): return 'true' if value else 'false'
|
|
246
|
+
return str(value)
|
|
247
|
+
|
|
173
248
|
def _map_status(self, git_status: str, default_status: str):
|
|
174
249
|
result = default_status
|
|
175
250
|
if git_status in (GitlabClient.STATUS_CREATED, GitlabClient.STATUS_WAITING,
|
|
@@ -186,3 +261,88 @@ class GitlabClient:
|
|
|
186
261
|
elif git_status == GitlabClient.STATUS_MANUAL:
|
|
187
262
|
result = ExecutionInfo.STATUS_MANUAL
|
|
188
263
|
return result
|
|
264
|
+
|
|
265
|
+
# Related static methods, with direct REST access
|
|
266
|
+
@staticmethod
|
|
267
|
+
def is_gitlab_project_exist(gitlab_url, gitlab_project, gitlab_token):
|
|
268
|
+
""""""
|
|
269
|
+
import requests
|
|
270
|
+
headers = {"PRIVATE-TOKEN": gitlab_token}
|
|
271
|
+
request = f"{gitlab_url}/api/v4/projects/{requests.utils.quote(gitlab_project, safe='')}"
|
|
272
|
+
logging.debug(f"Sending '{request}' request...")
|
|
273
|
+
response = requests.get(request, headers=headers)
|
|
274
|
+
if response.status_code == 200:
|
|
275
|
+
return True
|
|
276
|
+
else:
|
|
277
|
+
logging.error(f"Error {response.status_code}: {response.text}")
|
|
278
|
+
return False
|
|
279
|
+
|
|
280
|
+
@staticmethod
|
|
281
|
+
def search_group_id(gitlab_url, gitlab_project, gitlab_token):
|
|
282
|
+
""""""
|
|
283
|
+
import requests
|
|
284
|
+
headers = {"PRIVATE-TOKEN": gitlab_token}
|
|
285
|
+
request = f"{gitlab_url}/api/v4/groups?search={gitlab_project}"
|
|
286
|
+
logging.debug(f"Sending '{request}' request...")
|
|
287
|
+
response = requests.get(request, headers=headers)
|
|
288
|
+
if response.status_code == 200:
|
|
289
|
+
groups = response.json()
|
|
290
|
+
for group in groups:
|
|
291
|
+
if group.get("full_path") == gitlab_project:
|
|
292
|
+
return group["id"]
|
|
293
|
+
else:
|
|
294
|
+
logging.error(f"Error {response.status_code}: {response.text}")
|
|
295
|
+
|
|
296
|
+
@staticmethod
|
|
297
|
+
def create_internal_gitlab_project(gitlab_url, gitlab_token, group_id, repo_name, repo_branch, visibility="internal"):
|
|
298
|
+
""""""
|
|
299
|
+
import requests
|
|
300
|
+
headers = {"PRIVATE-TOKEN": gitlab_token, "Content-Type": "application/json"}
|
|
301
|
+
data = {
|
|
302
|
+
"name": repo_name,
|
|
303
|
+
"namespace_id": group_id,
|
|
304
|
+
"visibility": visibility,
|
|
305
|
+
"default_branch": repo_branch
|
|
306
|
+
}
|
|
307
|
+
request = f"{gitlab_url}/api/v4/projects"
|
|
308
|
+
logging.debug(f"Sending '{request}' request...")
|
|
309
|
+
response = requests.post(request, headers=headers, json=data)
|
|
310
|
+
if response.status_code == 201:
|
|
311
|
+
response_json = response.json()
|
|
312
|
+
logging.info(f"Gitlab project was created. Url: '{response_json['web_url']}'")
|
|
313
|
+
GitlabClient.make_first_commit_to_gitlab_project(
|
|
314
|
+
gitlab_url=gitlab_url,
|
|
315
|
+
gitlab_token=gitlab_token,
|
|
316
|
+
project_id=response_json['id'],
|
|
317
|
+
repo_branch=repo_branch
|
|
318
|
+
)
|
|
319
|
+
else:
|
|
320
|
+
logging.error(f"Error {response.status_code}: {response.text}")
|
|
321
|
+
|
|
322
|
+
@staticmethod
|
|
323
|
+
def make_first_commit_to_gitlab_project(gitlab_url, gitlab_token, project_id, repo_branch):
|
|
324
|
+
""""""
|
|
325
|
+
import requests
|
|
326
|
+
logging.debug(f"Making first commit...")
|
|
327
|
+
headers = {"PRIVATE-TOKEN": gitlab_token, "Content-Type": "application/json"}
|
|
328
|
+
commit_payload = {
|
|
329
|
+
"branch": repo_branch,
|
|
330
|
+
"commit_message": "Initial commit",
|
|
331
|
+
"actions": [
|
|
332
|
+
{
|
|
333
|
+
"action": "create",
|
|
334
|
+
"file_path": "README.md",
|
|
335
|
+
"content": "# This is an automatically created project"
|
|
336
|
+
}
|
|
337
|
+
]
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
response = requests.post(
|
|
341
|
+
f"{gitlab_url}/api/v4/projects/{project_id}/repository/commits",
|
|
342
|
+
headers=headers,
|
|
343
|
+
json=commit_payload
|
|
344
|
+
)
|
|
345
|
+
if response.status_code == 201:
|
|
346
|
+
logging.info(f"Commit successfull")
|
|
347
|
+
else:
|
|
348
|
+
logging.error(f"Error {response.status_code}: {response.text}")
|
|
@@ -15,6 +15,8 @@ def utils_cli(func):
|
|
|
15
15
|
@click.option('--context_path', required=True, default=DEFAULT_CONTEXT_FILE_PATH, type=str, help="Path to context")
|
|
16
16
|
@click.option("--input_params", "-p", multiple=True, callback=_input_params_to_dict,
|
|
17
17
|
help="Params to use instead of context as key-values. Nested keys are supported with double-underscores or dots as separators, e.g. -p params__group__key=value")
|
|
18
|
+
@click.option("--input_params_secure", "-s", multiple=True, callback=_input_params_to_dict,
|
|
19
|
+
help="Params to use instead of context as key-values. Nested keys are supported with double-underscores or dots as separators, e.g. -p params__group__key=value")
|
|
18
20
|
@click.pass_context
|
|
19
21
|
def wrapper(ctx, *args, log_level, **kwargs):
|
|
20
22
|
ExecutionLogger.EXECUTION_LOG_LEVEL = getattr(logging, log_level.upper(), logging.INFO)
|
|
@@ -38,7 +40,7 @@ def _configure_global_logger(global_logger: logging.Logger, log_level: str, form
|
|
|
38
40
|
|
|
39
41
|
|
|
40
42
|
def _transform_kwargs(kwargs):
|
|
41
|
-
if kwargs.get("input_params"):
|
|
43
|
+
if kwargs.get("input_params") or kwargs.get("input_params_secure"):
|
|
42
44
|
kwargs.pop("context_path")
|
|
43
45
|
|
|
44
46
|
|
|
@@ -66,3 +66,20 @@ class UtilsFile:
|
|
|
66
66
|
func(path)
|
|
67
67
|
else:
|
|
68
68
|
raise
|
|
69
|
+
|
|
70
|
+
@staticmethod
|
|
71
|
+
def create_parent_dirs(filepath):
|
|
72
|
+
if directory := os.path.dirname(filepath):
|
|
73
|
+
os.makedirs(directory, exist_ok=True)
|
|
74
|
+
|
|
75
|
+
@staticmethod
|
|
76
|
+
def create_exec_dir(execution_folder_path: str | Path, exists_ok: bool = False) -> Path:
|
|
77
|
+
import shutil
|
|
78
|
+
exec_dir = Path(execution_folder_path)
|
|
79
|
+
if exec_dir.exists() and not exists_ok:
|
|
80
|
+
if exec_dir.is_dir():
|
|
81
|
+
shutil.rmtree(exec_dir)
|
|
82
|
+
else:
|
|
83
|
+
raise FileExistsError(f"Path '{execution_folder_path}' exists and is a file, not a directory.")
|
|
84
|
+
exec_dir.mkdir(parents=True, exist_ok=exists_ok)
|
|
85
|
+
return exec_dir
|
|
File without changes
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
|
|
3
|
+
from qubership_pipelines_common_library.v1.execution.exec_command import ExecutionCommand
|
|
4
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class PipelineDataImporter(ABC):
|
|
8
|
+
"""
|
|
9
|
+
Base interface used by "GitHub/GitLab Run Workflow" commands
|
|
10
|
+
Can be extended by users to perform custom artifacts transformations at the end of workflow
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
def __init__(self):
|
|
14
|
+
self.context = None
|
|
15
|
+
self.command = None
|
|
16
|
+
|
|
17
|
+
def with_command(self, command: ExecutionCommand):
|
|
18
|
+
self.command = command
|
|
19
|
+
self.context = command.context
|
|
20
|
+
|
|
21
|
+
@abstractmethod
|
|
22
|
+
def import_pipeline_data(self, execution: ExecutionInfo) -> None:
|
|
23
|
+
"""Implements custom data import logic"""
|
|
24
|
+
pass
|
|
File without changes
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import zipfile
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
5
|
+
from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class DefaultGithubPipelineDataImporter(PipelineDataImporter):
|
|
9
|
+
"""
|
|
10
|
+
Default GitHub implementation:
|
|
11
|
+
downloads all available workflow run artifacts,
|
|
12
|
+
extracts them into context-defined 'paths.output.files' path
|
|
13
|
+
"""
|
|
14
|
+
def import_pipeline_data(self, execution: ExecutionInfo) -> None:
|
|
15
|
+
self.context.logger.info("DefaultGithubPipelineDataImporter - importing pipeline data...")
|
|
16
|
+
self.command.github_client.download_workflow_run_artifacts(execution, self.context.path_temp)
|
|
17
|
+
output_path = Path(self.context.input_param_get("paths.output.files"))
|
|
18
|
+
output_path.mkdir(parents=True, exist_ok=True)
|
|
19
|
+
for file_path in Path(self.context.path_temp).iterdir():
|
|
20
|
+
with zipfile.ZipFile(file_path) as zf:
|
|
21
|
+
zf.extractall(output_path)
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
from qubership_pipelines_common_library.v1.execution.exec_command import ExecutionCommand
|
|
2
|
+
from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
|
|
3
|
+
from qubership_pipelines_common_library.v1.utils.utils_string import UtilsString
|
|
4
|
+
from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
|
|
5
|
+
from qubership_pipelines_common_library.v2.github.github_pipeline_data_importer import DefaultGithubPipelineDataImporter
|
|
6
|
+
from qubership_pipelines_common_library.v2.github.safe_github_client import SafeGithubClient
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class GithubRunPipeline(ExecutionCommand):
|
|
10
|
+
"""
|
|
11
|
+
Executes a GitHub Actions workflow pipeline and optionally imports artifacts.
|
|
12
|
+
|
|
13
|
+
This command triggers a GitHub workflow run, monitors its execution, and provides
|
|
14
|
+
options for importing workflow artifacts and custom data processing through extensible
|
|
15
|
+
importers.
|
|
16
|
+
|
|
17
|
+
Input Parameters Structure (this structure is expected inside "input_params.params" block):
|
|
18
|
+
```
|
|
19
|
+
{
|
|
20
|
+
"pipeline_owner": "Netcracker", # REQUIRED: Repository owner/organization
|
|
21
|
+
"pipeline_repo_name": "qubership-test-pipelines", # REQUIRED: Repository name
|
|
22
|
+
"pipeline_workflow_file_name": "test.yaml", # REQUIRED: Workflow filename (e.g., main.yaml, ci-cd.yml)
|
|
23
|
+
"pipeline_branch": "main", # OPTIONAL: Branch to run workflow from (default: repo's default branch)
|
|
24
|
+
"pipeline_params": { # OPTIONAL: Input parameters to pass to the workflow
|
|
25
|
+
"KEY1": "VALUE1",
|
|
26
|
+
"KEY2": "VALUE2"
|
|
27
|
+
},
|
|
28
|
+
"import_artifacts": false, # OPTIONAL: Whether to import workflow artifacts (default: false)
|
|
29
|
+
"use_existing_pipeline": 123456789, # OPTIONAL: Use existing workflow run ID instead of starting new one (debug feature)
|
|
30
|
+
"timeout_seconds": 1800, # OPTIONAL: Maximum wait time for workflow completion in seconds (default: 1800, 0 for async execution)
|
|
31
|
+
"wait_seconds": 1, # OPTIONAL: Wait interval between status checks in seconds (default: 1)
|
|
32
|
+
"retry_timeout_seconds": 180, # OPTIONAL: Timeout for GitHub client initialization and workflow start retries in seconds (default: 180)
|
|
33
|
+
"retry_wait_seconds": 1, # OPTIONAL: Wait interval between retries in seconds (default: 1)
|
|
34
|
+
"success_statuses": "SUCCESS,UNSTABLE" # OPTIONAL: Comma-separated list of acceptable completion statuses (default: SUCCESS)
|
|
35
|
+
}
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
Systems Configuration (expected in "systems.github" block):
|
|
39
|
+
```
|
|
40
|
+
{
|
|
41
|
+
"url": "https://github.com", # OPTIONAL: GitHub UI URL for self-hosted instances (default: https://github.com)
|
|
42
|
+
"api_url": "https://api.github.com", # OPTIONAL: GitHub API URL for self-hosted instances (default: https://api.github.com)
|
|
43
|
+
"password": "<github_token>" # REQUIRED: GitHub access token with workflow permissions
|
|
44
|
+
}
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
Output Parameters:
|
|
48
|
+
- params.build.url: URL to view the workflow run in GitHub
|
|
49
|
+
- params.build.id: ID of the executed workflow run
|
|
50
|
+
- params.build.status: Final status of the workflow execution
|
|
51
|
+
- params.build.date: Workflow start time in ISO format
|
|
52
|
+
- params.build.duration: Total execution duration in human-readable format
|
|
53
|
+
- params.build.name: Name of the workflow run
|
|
54
|
+
|
|
55
|
+
Extension Points:
|
|
56
|
+
- Custom pipeline data importers can be implemented by extending PipelineDataImporter interface
|
|
57
|
+
- PipelineDataImporter is passed into constructor of command via "pipeline_data_importer" arg
|
|
58
|
+
|
|
59
|
+
Notes:
|
|
60
|
+
- Setting timeout_seconds to 0 enables asynchronous execution (workflow starts but command doesn't wait for completion)
|
|
61
|
+
- For self-hosted GitHub Enterprise, configure both "systems.github.url" and "systems.github.api_url"
|
|
62
|
+
- Custom data importers receive the command context and can implement advanced processing logic
|
|
63
|
+
"""
|
|
64
|
+
|
|
65
|
+
# default timeout values
|
|
66
|
+
WAIT_TIMEOUT = 1800
|
|
67
|
+
WAIT_SECONDS = 1
|
|
68
|
+
RETRY_TIMEOUT_SECONDS = 180
|
|
69
|
+
RETRY_WAIT_SECONDS = 1
|
|
70
|
+
|
|
71
|
+
def __init__(self, *args, pipeline_data_importer: PipelineDataImporter = None, **kwargs):
|
|
72
|
+
super().__init__(*args, **kwargs)
|
|
73
|
+
self.pipeline_data_importer = pipeline_data_importer or DefaultGithubPipelineDataImporter()
|
|
74
|
+
if pipeline_data_importer and not isinstance(pipeline_data_importer, PipelineDataImporter):
|
|
75
|
+
raise TypeError(f"Class {type(pipeline_data_importer)} must inherit from PipelineDataImporter")
|
|
76
|
+
|
|
77
|
+
def _validate(self):
|
|
78
|
+
names = [
|
|
79
|
+
"paths.input.params",
|
|
80
|
+
"paths.output.params",
|
|
81
|
+
"paths.output.files",
|
|
82
|
+
"systems.github.password",
|
|
83
|
+
"params.pipeline_owner",
|
|
84
|
+
"params.pipeline_repo_name",
|
|
85
|
+
"params.pipeline_workflow_file_name",
|
|
86
|
+
]
|
|
87
|
+
if not self.context.validate(names):
|
|
88
|
+
return False
|
|
89
|
+
|
|
90
|
+
self.timeout_seconds = max(0, int(self.context.input_param_get("params.timeout_seconds", self.WAIT_TIMEOUT)))
|
|
91
|
+
self.wait_seconds = max(1, int(self.context.input_param_get("params.wait_seconds", self.WAIT_SECONDS)))
|
|
92
|
+
|
|
93
|
+
self.retry_timeout_seconds = int(self.context.input_param_get("params.retry_timeout_seconds", self.RETRY_TIMEOUT_SECONDS))
|
|
94
|
+
self.retry_wait_seconds = int(self.context.input_param_get("params.retry_wait_seconds", self.RETRY_WAIT_SECONDS))
|
|
95
|
+
|
|
96
|
+
if self.timeout_seconds == 0:
|
|
97
|
+
self.context.logger.info(f"Timeout is set to: {self.timeout_seconds}. This means that the pipeline will be started asynchronously")
|
|
98
|
+
|
|
99
|
+
self.pipeline_owner = self.context.input_param_get("params.pipeline_owner")
|
|
100
|
+
self.pipeline_repo_name = self.context.input_param_get("params.pipeline_repo_name")
|
|
101
|
+
self.pipeline_workflow_file_name = self.context.input_param_get("params.pipeline_workflow_file_name")
|
|
102
|
+
self.pipeline_branch = self.context.input_param_get("params.pipeline_branch")
|
|
103
|
+
self.pipeline_params = self.context.input_param_get("params.pipeline_params", {})
|
|
104
|
+
if not self.pipeline_params:
|
|
105
|
+
self.context.logger.info(f"Pipeline parameters were not specified. This means that pipeline will be started with its default values")
|
|
106
|
+
if not isinstance(self.pipeline_params, dict):
|
|
107
|
+
self.context.logger.error(f"Pipeline parameters were not loaded correctly. Probably mistake in the params definition")
|
|
108
|
+
return False
|
|
109
|
+
self.import_artifacts = UtilsString.convert_to_bool(self.context.input_param_get("params.import_artifacts", False))
|
|
110
|
+
self.success_statuses = [x.strip() for x in self.context.input_param_get("params.success_statuses", ExecutionInfo.STATUS_SUCCESS).split(",")]
|
|
111
|
+
self.use_existing_pipeline = self.context.input_param_get("params.use_existing_pipeline")
|
|
112
|
+
self.ui_url = self.context.input_param_get("systems.github.ui_url", "https://github.com")
|
|
113
|
+
return True
|
|
114
|
+
|
|
115
|
+
def _execute(self):
|
|
116
|
+
self.context.logger.info("GithubRunPipeline - triggering GitHub workflow run and fetching results...")
|
|
117
|
+
|
|
118
|
+
self.github_client = SafeGithubClient.create_github_client(
|
|
119
|
+
api_url=self.context.input_param_get("systems.github.api_url"),
|
|
120
|
+
token=self.context.input_param_get("systems.github.password"),
|
|
121
|
+
retry_timeout_seconds=self.retry_timeout_seconds,
|
|
122
|
+
retry_wait_seconds=self.retry_wait_seconds
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
if self.use_existing_pipeline: # work with existing workflow run
|
|
126
|
+
pipeline_id = self.use_existing_pipeline
|
|
127
|
+
self.context.logger.info(f"Using existing pipeline {pipeline_id}")
|
|
128
|
+
execution = (ExecutionInfo()
|
|
129
|
+
.with_url(f"{self.ui_url}/{self.pipeline_owner}/{self.pipeline_repo_name}/")
|
|
130
|
+
.with_name(self.pipeline_workflow_file_name).with_id(int(pipeline_id))
|
|
131
|
+
.with_status(ExecutionInfo.STATUS_UNKNOWN))
|
|
132
|
+
execution.start()
|
|
133
|
+
else:
|
|
134
|
+
branch = self.pipeline_branch
|
|
135
|
+
if not branch:
|
|
136
|
+
branch = self.github_client.get_repo_default_branch(self.pipeline_owner, self.pipeline_repo_name)
|
|
137
|
+
execution = self.github_client.trigger_workflow(owner=self.pipeline_owner, repo_name=self.pipeline_repo_name,
|
|
138
|
+
workflow_file_name=self.pipeline_workflow_file_name,
|
|
139
|
+
branch=branch, pipeline_params=self.pipeline_params,
|
|
140
|
+
retry_timeout_seconds=self.retry_timeout_seconds,
|
|
141
|
+
retry_wait_seconds=self.retry_wait_seconds
|
|
142
|
+
)
|
|
143
|
+
self.context.logger.info(f"Triggered pipeline {execution.get_id()}, status: {execution.get_status()}, url: {execution.get_url()}")
|
|
144
|
+
|
|
145
|
+
if execution.get_status() != ExecutionInfo.STATUS_IN_PROGRESS:
|
|
146
|
+
self._exit(False, f"Pipeline was not started. Status {execution.get_status()}")
|
|
147
|
+
elif self.timeout_seconds < 1:
|
|
148
|
+
self.context.logger.info("Pipeline was started in asynchronous mode. Pipeline status and artifacts will not be processed")
|
|
149
|
+
return
|
|
150
|
+
|
|
151
|
+
execution = self.github_client.wait_workflow_run_execution(execution=execution,
|
|
152
|
+
timeout_seconds=self.timeout_seconds,
|
|
153
|
+
wait_seconds=self.wait_seconds)
|
|
154
|
+
self.context.logger.info(f"Pipeline status: {execution.get_status()}")
|
|
155
|
+
|
|
156
|
+
if self.import_artifacts and self.pipeline_data_importer and execution.get_status() in ExecutionInfo.STATUSES_COMPLETE:
|
|
157
|
+
try:
|
|
158
|
+
self.pipeline_data_importer.with_command(self)
|
|
159
|
+
self.pipeline_data_importer.import_pipeline_data(execution)
|
|
160
|
+
except Exception as e:
|
|
161
|
+
self.context.logger.error(f"Exception during pipeline_data_importer execution: {e}")
|
|
162
|
+
|
|
163
|
+
self._save_execution_info(execution)
|
|
164
|
+
if execution.get_status() not in self.success_statuses:
|
|
165
|
+
self._exit(False, f"Status: {execution.get_status()}")
|
|
166
|
+
|
|
167
|
+
def _save_execution_info(self, execution: ExecutionInfo):
|
|
168
|
+
self.context.logger.info(f"Writing GitHub workflow execution status")
|
|
169
|
+
self.context.output_param_set("params.build.url", execution.get_url())
|
|
170
|
+
self.context.output_param_set("params.build.id", execution.get_id())
|
|
171
|
+
self.context.output_param_set("params.build.status", execution.get_status())
|
|
172
|
+
self.context.output_param_set("params.build.date", execution.get_time_start().isoformat())
|
|
173
|
+
self.context.output_param_set("params.build.duration", execution.get_duration_str())
|
|
174
|
+
self.context.output_param_set("params.build.name", execution.get_name())
|
|
175
|
+
self.context.output_params_save()
|