qubership-pipelines-common-library 0.1.8__tar.gz → 0.1.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/PKG-INFO +5 -5
  2. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/README.md +2 -2
  3. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/pyproject.toml +3 -4
  4. qubership_pipelines_common_library-0.1.10/qubership_pipelines_common_library/v1/artifactory_client.py +119 -0
  5. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/execution/exec_logger.py +9 -5
  6. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/git_client.py +18 -0
  7. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/github_client.py +55 -42
  8. qubership_pipelines_common_library-0.1.10/qubership_pipelines_common_library/v1/utils/utils_cli.py +73 -0
  9. qubership_pipelines_common_library-0.1.8/qubership_pipelines_common_library/v1/artifactory_client.py +0 -52
  10. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/LICENSE +0 -0
  11. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/__init__.py +0 -0
  12. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/__init__.py +0 -0
  13. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/execution/__init__.py +0 -0
  14. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/execution/exec_command.py +0 -0
  15. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/execution/exec_context.py +0 -0
  16. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/execution/exec_context_file.py +0 -0
  17. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/execution/exec_info.py +0 -0
  18. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/gitlab_client.py +0 -0
  19. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/jenkins_client.py +0 -0
  20. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/kube_client.py +0 -0
  21. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/log_client.py +0 -0
  22. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/minio_client.py +0 -0
  23. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/utils/__init__.py +0 -0
  24. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/utils/rest.py +0 -0
  25. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/utils/utils.py +0 -0
  26. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/utils/utils_context.py +0 -0
  27. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/utils/utils_dictionary.py +0 -0
  28. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/utils/utils_file.py +0 -0
  29. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/utils/utils_json.py +0 -0
  30. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/utils/utils_string.py +0 -0
  31. {qubership_pipelines_common_library-0.1.8 → qubership_pipelines_common_library-0.1.10}/qubership_pipelines_common_library/v1/webex_client.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: qubership-pipelines-common-library
3
- Version: 0.1.8
3
+ Version: 0.1.10
4
4
  Summary: Qubership Pipelines common library
5
5
  License: Apache-2.0
6
6
  Author: Igor Lebedev
@@ -12,9 +12,8 @@ Classifier: Programming Language :: Python :: 3.11
12
12
  Classifier: Programming Language :: Python :: 3.12
13
13
  Classifier: Programming Language :: Python :: 3.13
14
14
  Requires-Dist: GitPython (>=3.1.43,<4.0.0)
15
- Requires-Dist: PyArtifactory (>=2.7.1,<3.0.0)
16
- Requires-Dist: PyGithub (>=2.6.1,<3.0.0)
17
15
  Requires-Dist: click (>=8.1.7,<9.0.0)
16
+ Requires-Dist: ghapi (>=1.0.6,<2.0.0)
18
17
  Requires-Dist: http-exceptions (>=0.2.10,<0.3.0)
19
18
  Requires-Dist: kubernetes (>=29.0.0,<30.0.0)
20
19
  Requires-Dist: minio (>=7.2.12,<8.0.0)
@@ -64,6 +63,7 @@ pip install qubership-pipelines-common-library-py39
64
63
 
65
64
  ## Sample implementation
66
65
 
67
- Sample implementation of CLI commands using this library is available at [Quber CLI](https://github.com/LightlessOne/Quber-CLI?tab=readme-ov-file)
66
+ Sample implementation of CLI commands using this library is available at [qubership-pipelines-cli-command-samples](https://github.com/Netcracker/qubership-pipelines-cli-command-samples)
67
+
68
+ It includes reference python implementation along with the [Development Guide](https://github.com/Netcracker/qubership-pipelines-cli-command-samples/blob/main/docs/development.md)
68
69
 
69
- It includes reference python implementation along with [Development Guide](https://github.com/LightlessOne/Quber-CLI/blob/master/docs/development.md)
@@ -36,6 +36,6 @@ pip install qubership-pipelines-common-library-py39
36
36
 
37
37
  ## Sample implementation
38
38
 
39
- Sample implementation of CLI commands using this library is available at [Quber CLI](https://github.com/LightlessOne/Quber-CLI?tab=readme-ov-file)
39
+ Sample implementation of CLI commands using this library is available at [qubership-pipelines-cli-command-samples](https://github.com/Netcracker/qubership-pipelines-cli-command-samples)
40
40
 
41
- It includes reference python implementation along with [Development Guide](https://github.com/LightlessOne/Quber-CLI/blob/master/docs/development.md)
41
+ It includes reference python implementation along with the [Development Guide](https://github.com/Netcracker/qubership-pipelines-cli-command-samples/blob/main/docs/development.md)
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "qubership-pipelines-common-library"
3
- version = "0.1.8"
3
+ version = "0.1.10"
4
4
  description = "Qubership Pipelines common library"
5
5
  authors = ["Igor Lebedev <lebedev.light@gmail.com>"]
6
6
  readme = "README.md"
@@ -13,7 +13,6 @@ pyyaml = "^6.0.2"
13
13
  click = "^8.1.7"
14
14
  requests = "^2.32.3"
15
15
  urllib3 = "^2.2.3"
16
- PyArtifactory = "^2.7.1"
17
16
  python-jenkins = "^1.8.2"
18
17
  GitPython = "^3.1.43"
19
18
  http-exceptions = "^0.2.10"
@@ -21,7 +20,7 @@ python-gitlab = "^4.13.0"
21
20
  minio = "^7.2.12"
22
21
  kubernetes = "^29.0.0"
23
22
  webexpythonsdk = "2.0.1"
24
- PyGithub = "^2.6.1"
23
+ ghapi = "^1.0.6"
25
24
 
26
25
  [tool.poetry.group.test.dependencies]
27
26
  pytest = "^6.0.0"
@@ -33,4 +32,4 @@ markers = [
33
32
 
34
33
  [build-system]
35
34
  requires = ["poetry-core"]
36
- build-backend = "poetry.core.masonry.api"
35
+ build-backend = "poetry.core.masonry.api"
@@ -0,0 +1,119 @@
1
+ # Copyright 2024 NetCracker Technology Corporation
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import logging
16
+ from dataclasses import dataclass
17
+
18
+ import requests
19
+ from requests.auth import HTTPBasicAuth
20
+
21
+
22
+ class ArtifactoryClient:
23
+ def __init__(self, params: dict):
24
+ """
25
+ **`params`** is a dictionary with following mandatory params:
26
+
27
+ Arguments:
28
+ url (str): Artifactory host url
29
+ username (str): User used in auth request
30
+ password (str): Token used in auth request
31
+ """
32
+ self.url = params.get("url")
33
+ self.user = params.get("username")
34
+ self.token = params.get("password")
35
+ self.artifactory = ArtifactoryAPI(self.url, HTTPBasicAuth(self.user, self.token))
36
+ logging.info("Artifactory Client configured for %s", params.get("url"))
37
+
38
+ def get_artifact_properties(self, path_to_artifact: str):
39
+ """"""
40
+ try:
41
+ properties = self.artifactory.get_artifact_properties(artifact_path=path_to_artifact)
42
+ except ArtifactoryError:
43
+ logging.error("There are not properties for artifact %s", path_to_artifact)
44
+ properties = None
45
+ return properties
46
+
47
+ def get_folder_files_list(self, path_to_folder: str):
48
+ """"""
49
+ return self.artifactory.get_files_list(artifact_path=path_to_folder)
50
+
51
+ def get_artifact_content_by_url(self, path_to_file: str):
52
+ """"""
53
+ return self.artifactory.get_file_content(artifact_path=path_to_file)
54
+
55
+
56
+ class ArtifactoryAPI:
57
+ def __init__(self, api_url: str, auth, verify=False):
58
+ self.api_url = api_url.rstrip('/')
59
+ self._session = requests.session()
60
+ self._session.verify = False
61
+ if auth:
62
+ self._session.auth = auth
63
+
64
+ def _get(self, url):
65
+ response = self._session.get(url)
66
+ response.raise_for_status()
67
+ return response
68
+
69
+ def get_artifact_info(self, artifact_path: str):
70
+ try:
71
+ response = self._get(f"{self.api_url}/api/storage/{artifact_path}").json()
72
+ artifact_info = ArtifactInfo(repo=response['repo'], path=response['path'])
73
+ return artifact_info
74
+ except requests.exceptions.HTTPError as error:
75
+ raise ArtifactoryError from error
76
+
77
+ def get_artifact_properties(self, artifact_path: str):
78
+ try:
79
+ response = self._get(f"{self.api_url}/api/storage/{artifact_path}?properties").json()
80
+ return ArtifactProperties(properties=response['properties'])
81
+ except requests.exceptions.HTTPError as error:
82
+ raise ArtifactoryError from error
83
+
84
+ def get_files_list(self, artifact_path: str):
85
+ try:
86
+ response = self._get(f"{self.api_url}/api/storage/{artifact_path}?list&deep=1&listFolders=1").json()
87
+ return [ArtifactListEntry(uri=f['uri'], size=int(f['size']), folder=f['folder'] == True) for f in
88
+ response['files']]
89
+ except requests.exceptions.HTTPError as error:
90
+ raise ArtifactoryError from error
91
+
92
+ def get_file_content(self, artifact_path: str):
93
+ try:
94
+ info = self.get_artifact_info(artifact_path)
95
+ return self._get(f"{self.api_url}/{info.repo}{info.path}").content.decode("utf-8")
96
+ except requests.exceptions.HTTPError as error:
97
+ raise ArtifactoryError from error
98
+
99
+
100
+ class ArtifactoryError(Exception):
101
+ pass
102
+
103
+
104
+ @dataclass
105
+ class ArtifactInfo:
106
+ repo: str
107
+ path: str
108
+
109
+
110
+ @dataclass
111
+ class ArtifactProperties:
112
+ properties: dict
113
+
114
+
115
+ @dataclass
116
+ class ArtifactListEntry:
117
+ uri: str
118
+ size: int
119
+ folder: bool
@@ -18,6 +18,8 @@ import logging, os
18
18
  class ExecutionLogger:
19
19
  FILE_NAME_EXECUTION = "execution.log"
20
20
  FILE_NAME_FULL = "full.log"
21
+ EXECUTION_LOG_LEVEL = logging.INFO
22
+ FULL_LOG_LEVEL = logging.DEBUG
21
23
  DEFAULT_FORMAT = u'[%(asctime)s] [%(levelname)-5s] [class=%(filename)s:%(lineno)-3s] %(message)s'
22
24
 
23
25
  def __init__(self, path_logs):
@@ -26,18 +28,20 @@ class ExecutionLogger:
26
28
  self.path_logs = path_logs
27
29
  self.logger = logging.getLogger("execution_logger")
28
30
  self.logger.setLevel(logging.DEBUG) # set to the lowest level to allow handlers to capture anything
29
- # execution logs - only in local logger
30
31
  if path_logs:
32
+ # execution logs - only in local logger
31
33
  handler_exec = logging.FileHandler(os.path.join(path_logs, ExecutionLogger.FILE_NAME_EXECUTION))
32
- handler_exec.setLevel(logging.INFO)
34
+ handler_exec.setLevel(ExecutionLogger.EXECUTION_LOG_LEVEL)
33
35
  handler_exec.setFormatter(logging.Formatter(ExecutionLogger.DEFAULT_FORMAT))
34
36
  self.logger.addHandler(handler_exec)
35
- # full logs - attach to a global logger
36
- if path_logs:
37
+
38
+ # full logs - attach to a global logger
37
39
  handler_full = logging.FileHandler(os.path.join(path_logs, ExecutionLogger.FILE_NAME_FULL))
38
- handler_full.setLevel(logging.DEBUG)
40
+ handler_full.setLevel(ExecutionLogger.FULL_LOG_LEVEL)
39
41
  handler_full.setFormatter(logging.Formatter(ExecutionLogger.DEFAULT_FORMAT))
42
+ logging.getLogger().propagate = False
40
43
  logging.getLogger().addHandler(handler_full)
44
+ self.logger.propagate = True
41
45
 
42
46
  def info(self, msg, *args, **kwargs):
43
47
  self.logger.info(msg, *args, **kwargs)
@@ -59,7 +59,25 @@ class GitClient:
59
59
  **kwargs
60
60
  )
61
61
 
62
+ def clone_repo_from_commit_hash(self, repo_path: str, commit_hash: str, temp_path: str):
63
+ """"""
64
+ repo_path = repo_path.lstrip("/").rstrip("/")
65
+ if not repo_path:
66
+ raise Exception("Repository path should be defined")
67
+ if not commit_hash:
68
+ raise Exception("Commit hash should be defined")
69
+ if not temp_path:
70
+ raise Exception("Temporary path should be defined")
71
+ self._cleanup_resources()
72
+ self.repo_path = repo_path
73
+ self.temp_path = temp_path
74
+ self.repo = Repo.init(path=temp_path)
75
+ self.repo.create_remote(name="origin", url=self._gen_repo_auth_url(self.host, self.username, self.password, self.repo_path))
76
+ self.repo.git.fetch("--depth", "1", "origin", commit_hash)
77
+ self.repo.git.checkout("FETCH_HEAD")
78
+
62
79
  def commit_and_push(self, commit_message: str):
80
+ """"""
63
81
  self.commit(commit_message)
64
82
  self.push()
65
83
 
@@ -19,13 +19,10 @@ import uuid
19
19
  import zipfile
20
20
  import requests
21
21
 
22
+ from ghapi.all import GhApi
22
23
  from datetime import datetime, timezone
23
24
  from pathlib import Path
24
- from github import Github, Auth
25
25
  from time import sleep
26
- from github.Artifact import Artifact
27
- from github.PaginatedList import PaginatedList
28
- from github.WorkflowRun import WorkflowRun
29
26
 
30
27
  from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
31
28
 
@@ -59,11 +56,7 @@ class GithubClient:
59
56
  api_url (str): Optional Github Enterprise API URL, leave empty if using github.com
60
57
  **kwargs (Any): will be passed into Github API constructor
61
58
  """
62
- self.auth = Auth.Token(token) if token else None
63
- if api_url:
64
- self.gh = Github(base_url=api_url, auth=self.auth, **kwargs)
65
- else:
66
- self.gh = Github(auth=self.auth, **kwargs)
59
+ self.gh = GhApi(token=token, gh_host=api_url, **kwargs)
67
60
  logging.info("Github Client configured")
68
61
 
69
62
  def trigger_workflow(self, owner: str, repo_name: str, workflow_file_name: str, branch: str, pipeline_params: dict,
@@ -84,24 +77,29 @@ class GithubClient:
84
77
  pipeline_params[uuid_param_name] = str(uuid.uuid4())
85
78
  if len(pipeline_params) > GithubClient.DISPATCH_PARAMS_LIMIT:
86
79
  logging.warning(f"Trying to dispatch workflow with more than {GithubClient.DISPATCH_PARAMS_LIMIT} pipeline_params, GitHub does not support it!")
87
- workflow = self.gh.get_repo(f"{owner}/{repo_name}", lazy=True).get_workflow(workflow_file_name)
88
80
  dispatch_time = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
89
81
  execution = ExecutionInfo()
90
- is_created = workflow.create_dispatch(ref=branch, inputs=pipeline_params)
82
+ try:
83
+ self.gh.actions.create_workflow_dispatch(owner, repo_name, workflow_file_name, branch, pipeline_params)
84
+ is_created = True
85
+ except Exception as ex:
86
+ logging.error(f"Exception when triggering workflow: {ex}")
87
+ is_created = False
91
88
  logging.info(f"Workflow Dispatch event for {workflow_file_name} is sent, workflow is created: {is_created}")
92
89
  if is_created:
93
90
  current_timeout = 0
94
91
  already_checked_runs = []
95
92
  while current_timeout < timeout_seconds:
96
- runs_list = workflow.get_runs(event="workflow_dispatch", created=f">={dispatch_time}", branch=branch)
97
- if runs_list.totalCount > 0:
93
+ runs_list = self.gh.actions.list_workflow_runs(owner, repo_name, workflow_file_name, event="workflow_dispatch", created=f">={dispatch_time}", branch=branch)
94
+ if runs_list.total_count > 0:
98
95
  if find_via_uuid:
99
- created_run = self._find_run_via_uuid_input_param(runs_list, already_checked_runs,
96
+ created_run = self._find_run_via_uuid_input_param(owner, repo_name, runs_list.workflow_runs,
97
+ already_checked_runs,
100
98
  uuid_artifact_name, uuid_file_name,
101
99
  uuid_param_name,
102
100
  pipeline_params[uuid_param_name])
103
101
  else:
104
- created_run = runs_list.get_page(0).pop()
102
+ created_run = runs_list.workflow_runs[0]
105
103
  if created_run:
106
104
  logging.info(f"Pipeline successfully started at {created_run.html_url}")
107
105
  return execution.with_name(created_run.name).with_id(created_run.id) \
@@ -117,10 +115,10 @@ class GithubClient:
117
115
 
118
116
  def get_workflow_run_status(self, execution: ExecutionInfo):
119
117
  """"""
120
- repo_full_name = self._get_repo_full_name(execution)
121
- if not repo_full_name:
118
+ owner_and_repo_name = self._get_owner_and_repo(execution)
119
+ if not owner_and_repo_name:
122
120
  return execution.with_status(ExecutionInfo.STATUS_UNKNOWN)
123
- run = self.gh.get_repo(repo_full_name).get_workflow_run(int(execution.get_id()))
121
+ run = self.gh.actions.get_workflow_run(owner_and_repo_name[0], owner_and_repo_name[1], execution.get_id())
124
122
  if run:
125
123
  execution.with_status(self._map_status_and_conclusion(run.status, run.conclusion, ExecutionInfo.STATUS_UNKNOWN))
126
124
  else:
@@ -133,13 +131,13 @@ class GithubClient:
133
131
  """"""
134
132
  if break_status_list is None:
135
133
  break_status_list = self.BREAK_STATUS_LIST
136
- repo_full_name = self._get_repo_full_name(execution)
137
- if not repo_full_name:
134
+ owner_and_repo_name = self._get_owner_and_repo(execution)
135
+ if not owner_and_repo_name:
138
136
  return execution.with_status(ExecutionInfo.STATUS_UNKNOWN)
139
137
  timeout = 0
140
138
  while timeout < timeout_seconds:
141
139
  try:
142
- run = self.gh.get_repo(repo_full_name).get_workflow_run(int(execution.get_id()))
140
+ run = self.gh.actions.get_workflow_run(owner_and_repo_name[0], owner_and_repo_name[1], execution.get_id())
143
141
  execution.with_status(self._map_status_and_conclusion(run.status, run.conclusion, ExecutionInfo.STATUS_UNKNOWN))
144
142
  if run.status in break_status_list:
145
143
  logging.info(f"Workflow Run status: '{run.status}' is present in input break statuses list. Stop waiting.")
@@ -155,46 +153,54 @@ class GithubClient:
155
153
 
156
154
  def cancel_workflow_run_execution(self, execution: ExecutionInfo, timeout: float = 1.0):
157
155
  """"""
158
- repo_full_name = self._get_repo_full_name(execution)
159
- if not repo_full_name:
156
+ owner_and_repo_name = self._get_owner_and_repo(execution)
157
+ if not owner_and_repo_name:
160
158
  return execution
161
- run = self.gh.get_repo(repo_full_name).get_workflow_run(int(execution.get_id()))
162
159
  counter = 0
163
160
  while counter < timeout:
164
161
  counter += 1
165
162
  logging.info("Waiting pipeline execution timeout 1 second")
166
163
  sleep(1)
167
- run.cancel()
164
+ self.gh.actions.cancel_workflow_run(owner_and_repo_name[0], owner_and_repo_name[1], execution.get_id())
168
165
  return execution.stop(ExecutionInfo.STATUS_ABORTED)
169
166
 
170
167
  def download_workflow_run_artifacts(self, execution: ExecutionInfo, local_dir: str):
171
168
  """"""
172
- repo_full_name = self._get_repo_full_name(execution)
173
- if not repo_full_name:
169
+ owner_and_repo_name = self._get_owner_and_repo(execution)
170
+ if not owner_and_repo_name:
174
171
  return execution
175
172
  local_dir_path = Path(local_dir)
176
173
  if not local_dir_path.exists():
177
174
  local_dir_path.mkdir(parents=True, exist_ok=True)
178
- run = self.gh.get_repo(repo_full_name).get_workflow_run(int(execution.get_id()))
179
- for artifact in run.get_artifacts():
175
+ artifacts = self.gh.actions.list_workflow_run_artifacts(owner_and_repo_name[0], owner_and_repo_name[1], execution.get_id(), 100)
176
+ for artifact in artifacts.artifacts:
180
177
  self._save_artifact_to_dir(artifact, local_dir_path)
181
178
 
182
- def get_workflow_run_input_params(self, run: WorkflowRun, artifact_name: str = DEFAULT_UUID_ARTIFACT_NAME,
179
+ def get_workflow_run_input_params(self, execution: ExecutionInfo, artifact_name: str = DEFAULT_UUID_ARTIFACT_NAME,
183
180
  file_name: str = DEFAULT_UUID_FILE_NAME):
184
181
  """"""
185
- for artifact in run.get_artifacts():
182
+ owner_and_repo_name = self._get_owner_and_repo(execution)
183
+ if not owner_and_repo_name:
184
+ return {}
185
+ artifacts = self.gh.actions.list_workflow_run_artifacts(owner_and_repo_name[0], owner_and_repo_name[1], execution.get_id(), 100)
186
+ for artifact in artifacts.artifacts:
186
187
  if artifact.name == artifact_name:
187
188
  return self._get_input_params_from_artifact(artifact, file_name)
188
- logging.info(f"Could not find input_params artifact for run {run.id}")
189
+ logging.info(f"Could not find input_params artifact for run {execution.get_id()}")
189
190
  return {}
190
191
 
191
- def _find_run_via_uuid_input_param(self, runs_list: PaginatedList[WorkflowRun], already_checked_runs: list,
192
+ def get_repo_default_branch(self, owner: str, repo_name: str):
193
+ return self.gh.repos.get(owner, repo_name).default_branch
194
+
195
+ def _find_run_via_uuid_input_param(self, owner: str, repo_name: str,
196
+ workflow_runs: list, already_checked_runs: list,
192
197
  uuid_artifact_name: str, uuid_file_name: str,
193
198
  uuid_param_name: str, uuid_param_value: str):
194
- for run in runs_list:
199
+ for run in workflow_runs:
195
200
  if run.id in already_checked_runs:
196
201
  continue
197
- for artifact in run.get_artifacts():
202
+ artifacts = self.gh.actions.list_workflow_run_artifacts(owner, repo_name, run.id, 100)
203
+ for artifact in artifacts.artifacts:
198
204
  if artifact.name == uuid_artifact_name:
199
205
  if self._check_input_params_uuid(artifact, uuid_file_name, uuid_param_name, uuid_param_value):
200
206
  logging.info(f"Found workflow run with expected UUID: {run.id} with {uuid_param_name}={uuid_param_value}")
@@ -204,7 +210,7 @@ class GithubClient:
204
210
  break
205
211
  return None
206
212
 
207
- def _check_input_params_uuid(self, artifact: Artifact, uuid_file_name: str, uuid_param_name: str, uuid_param_value: str):
213
+ def _check_input_params_uuid(self, artifact, uuid_file_name: str, uuid_param_name: str, uuid_param_value: str):
208
214
  try:
209
215
  input_params = self._get_input_params_from_artifact(artifact, uuid_file_name)
210
216
  return input_params.get(uuid_param_name) == uuid_param_value
@@ -212,7 +218,7 @@ class GithubClient:
212
218
  logging.error(f"Exception when downloading and checking artifact ({artifact.name}): {ex}")
213
219
  return False
214
220
 
215
- def _get_input_params_from_artifact(self, artifact: Artifact, file_name: str):
221
+ def _get_input_params_from_artifact(self, artifact, file_name: str):
216
222
  with tempfile.TemporaryDirectory() as temp_dirname:
217
223
  artifact_path = self._save_artifact_to_dir(artifact, temp_dirname)
218
224
  with zipfile.ZipFile(artifact_path) as zf:
@@ -220,13 +226,13 @@ class GithubClient:
220
226
  with open(Path(temp_dirname, file_name)) as input_params_file:
221
227
  return json.load(input_params_file)
222
228
 
223
- def _save_artifact_to_dir(self, artifact: Artifact, dirname):
229
+ def _save_artifact_to_dir(self, artifact, dirname):
224
230
  local_path = Path(dirname, f"{artifact.name}.zip")
225
- (status, headers, _) = artifact.requester.requestBlob("GET", artifact.archive_download_url)
226
- if status != 302:
227
- logging.error(f"Unexpected status while downloading run artifact {artifact.name}: expected 302, got {status}")
231
+ redirect_response = requests.get(artifact.archive_download_url, headers=self.gh.headers, allow_redirects=False)
232
+ if redirect_response.status_code != 302:
233
+ logging.error(f"Unexpected status while downloading run artifact {artifact.name}: expected 302, got {redirect_response.status_code}")
228
234
  return None
229
- response = requests.get(headers["location"])
235
+ response = requests.get(redirect_response.headers["location"])
230
236
  with local_path.open('wb') as f:
231
237
  logging.info(f"saving {local_path}...")
232
238
  f.write(response.content)
@@ -258,3 +264,10 @@ class GithubClient:
258
264
  return None
259
265
  url_parts = execution.get_url().split("://")[1].split("/")
260
266
  return f"{url_parts[1]}/{url_parts[2]}"
267
+
268
+ def _get_owner_and_repo(self, execution: ExecutionInfo):
269
+ if not execution.get_id() or not execution.get_url():
270
+ logging.error("Can't get workflow run - empty run id or url in ExecutionInfo!")
271
+ return None
272
+ url_parts = execution.get_url().split("://")[1].split("/")
273
+ return url_parts[1], url_parts[2]
@@ -0,0 +1,73 @@
1
+ import re
2
+ import click
3
+ import logging
4
+ import sys, os
5
+ from qubership_pipelines_common_library.v1.execution.exec_logger import ExecutionLogger
6
+
7
+ DEFAULT_CONTEXT_FILE_PATH = 'context.yaml'
8
+
9
+
10
+ def utils_cli(func):
11
+ """Decorator to add CLI options for logging level, context path and custom input params."""
12
+ @click.option('--log-level', default='INFO', show_default=True,
13
+ type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], case_sensitive=False),
14
+ help="Set the logging level")
15
+ @click.option('--context_path', required=True, default=DEFAULT_CONTEXT_FILE_PATH, type=str, help="Path to context")
16
+ @click.option("--input_params", "-p", multiple=True, callback=_input_params_to_dict,
17
+ help="Params to use instead of context as key-values. Nested keys are supported with double-underscores or dots as separators, e.g. -p params__group__key=value")
18
+ @click.pass_context
19
+ def wrapper(ctx, *args, log_level, **kwargs):
20
+ ExecutionLogger.EXECUTION_LOG_LEVEL = getattr(logging, log_level.upper(), logging.INFO)
21
+ _configure_global_logger(logging.getLogger(), log_level, ExecutionLogger.DEFAULT_FORMAT)
22
+ _transform_kwargs(kwargs)
23
+ return ctx.invoke(func, *args, **kwargs)
24
+ return wrapper
25
+
26
+
27
+ def _configure_global_logger(global_logger: logging.Logger, log_level: str, formatter_str: str):
28
+ """Configure the global logger with a specific log level and formatter."""
29
+ log_level_value = getattr(logging, log_level.upper(), logging.INFO)
30
+ global_logger.setLevel(logging.DEBUG)
31
+ if global_logger.hasHandlers():
32
+ global_logger.handlers.clear()
33
+ global_logger.propagate = True
34
+ stdout_handler = logging.StreamHandler(sys.stdout)
35
+ stdout_handler.setLevel(log_level_value)
36
+ stdout_handler.setFormatter(logging.Formatter(formatter_str))
37
+ global_logger.addHandler(stdout_handler)
38
+
39
+
40
+ def _transform_kwargs(kwargs):
41
+ if kwargs.get("input_params"):
42
+ kwargs.pop("context_path")
43
+
44
+
45
+ def _input_params_to_dict(ctx, param, values: tuple[str, ...]):
46
+ result = {}
47
+ for kvp in values:
48
+ key, value = [item.strip() for item in kvp.split("=", 1)]
49
+ if _validate_key(key):
50
+ _set_item_by_path(result, key, _transform_value(value))
51
+ return result if result else None
52
+
53
+
54
+ def _validate_key(key):
55
+ return True
56
+
57
+
58
+ def _transform_value(value):
59
+ return value
60
+
61
+
62
+ _KEY_PARTS_DELIMITER_PATTERN = re.compile(r'\.|__')
63
+ def _set_item_by_path(target_dict: dict, path, value):
64
+ current_dict = target_dict
65
+ key_parts = _KEY_PARTS_DELIMITER_PATTERN.split(path)
66
+ for i, key in enumerate(key_parts):
67
+ if i == len(key_parts) - 1:
68
+ current_dict[key] = value
69
+ break
70
+ if not isinstance(current_dict.get(key), dict):
71
+ current_dict[key] = {}
72
+ current_dict = current_dict[key]
73
+ return target_dict
@@ -1,52 +0,0 @@
1
- # Copyright 2024 NetCracker Technology Corporation
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- import logging
16
- from pyartifactory import Artifactory
17
- from pyartifactory.exception import PropertyNotFoundError
18
-
19
-
20
- class ArtifactoryClient:
21
- def __init__(self, params: dict):
22
- """
23
- **`params`** is a dictionary with following mandatory params:
24
-
25
- Arguments:
26
- url (str): Artifactory host url
27
- username (str): User used in auth request
28
- password (str): Token used in auth request
29
- """
30
- self.url = params.get("url")
31
- self.user = params.get("username")
32
- self.token = params.get("password")
33
- self.artifactory = Artifactory(url=self.url, auth=(self.user, self.token), api_version=1)
34
- logging.info("Artifactory Client configured for %s", params.get("url"))
35
-
36
- def get_artifact_properties(self, path_to_artifact: str):
37
- """"""
38
- try:
39
- properties = self.artifactory.artifacts.properties(artifact_path=path_to_artifact)
40
- except PropertyNotFoundError:
41
- logging.error("There are not properties for artifact %s", path_to_artifact)
42
- properties = None
43
- return properties
44
-
45
- def get_folder_files_list(self, path_to_folder: str):
46
- """"""
47
- return self.artifactory.artifacts.list(artifact_path=path_to_folder).files
48
-
49
- def get_artifact_content_by_url(self, path_to_file: str):
50
- """"""
51
- file_content = self.artifactory.artifacts.download(artifact_path=path_to_file)
52
- return file_content.read_text("utf-8")