qubership-pipelines-common-library 2.0.0__py3-none-any.whl → 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. qubership_pipelines_common_library/__init__.py +1 -1
  2. qubership_pipelines_common_library/v1/artifactory_client.py +1 -1
  3. qubership_pipelines_common_library/v1/execution/exec_command.py +11 -1
  4. qubership_pipelines_common_library/v1/execution/exec_context.py +6 -6
  5. qubership_pipelines_common_library/v1/execution/exec_context_file.py +1 -1
  6. qubership_pipelines_common_library/v1/execution/exec_logger.py +7 -5
  7. qubership_pipelines_common_library/v1/github_client.py +1 -1
  8. qubership_pipelines_common_library/v1/gitlab_client.py +11 -7
  9. qubership_pipelines_common_library/v1/jenkins_client.py +55 -18
  10. qubership_pipelines_common_library/v1/maven_client.py +2 -2
  11. qubership_pipelines_common_library/v1/minio_client.py +1 -1
  12. qubership_pipelines_common_library/v1/utils/rest.py +1 -1
  13. qubership_pipelines_common_library/v1/utils/utils.py +1 -1
  14. qubership_pipelines_common_library/v1/utils/utils_cli.py +43 -9
  15. qubership_pipelines_common_library/v1/utils/utils_dictionary.py +1 -1
  16. qubership_pipelines_common_library/v1/utils/utils_logging.py +53 -0
  17. qubership_pipelines_common_library/v2/artifacts_finder/__init__.py +0 -0
  18. qubership_pipelines_common_library/v2/artifacts_finder/artifact_finder.py +56 -0
  19. qubership_pipelines_common_library/v2/artifacts_finder/auth/__init__.py +0 -0
  20. qubership_pipelines_common_library/v2/artifacts_finder/auth/aws_credentials.py +106 -0
  21. qubership_pipelines_common_library/v2/artifacts_finder/auth/azure_credentials.py +72 -0
  22. qubership_pipelines_common_library/v2/artifacts_finder/auth/gcp_credentials.py +88 -0
  23. qubership_pipelines_common_library/v2/artifacts_finder/model/__init__.py +0 -0
  24. qubership_pipelines_common_library/v2/artifacts_finder/model/artifact.py +20 -0
  25. qubership_pipelines_common_library/v2/artifacts_finder/model/artifact_provider.py +35 -0
  26. qubership_pipelines_common_library/v2/artifacts_finder/model/credentials.py +16 -0
  27. qubership_pipelines_common_library/v2/artifacts_finder/model/credentials_provider.py +16 -0
  28. qubership_pipelines_common_library/v2/artifacts_finder/providers/__init__.py +0 -0
  29. qubership_pipelines_common_library/v2/artifacts_finder/providers/artifactory.py +52 -0
  30. qubership_pipelines_common_library/v2/artifacts_finder/providers/aws_code_artifact.py +79 -0
  31. qubership_pipelines_common_library/v2/artifacts_finder/providers/azure_artifacts.py +98 -0
  32. qubership_pipelines_common_library/v2/artifacts_finder/providers/gcp_artifact_registry.py +50 -0
  33. qubership_pipelines_common_library/v2/artifacts_finder/providers/nexus.py +41 -0
  34. qubership_pipelines_common_library/v2/github/github_run_pipeline_command.py +3 -3
  35. qubership_pipelines_common_library/v2/gitlab/custom_extensions.py +1 -1
  36. qubership_pipelines_common_library/v2/gitlab/gitlab_run_pipeline_command.py +4 -4
  37. qubership_pipelines_common_library/v2/jenkins/__init__.py +0 -0
  38. qubership_pipelines_common_library/v2/jenkins/custom_extensions.py +63 -0
  39. qubership_pipelines_common_library/v2/jenkins/jenkins_client.py +5 -0
  40. qubership_pipelines_common_library/v2/jenkins/jenkins_pipeline_data_importer.py +31 -0
  41. qubership_pipelines_common_library/v2/jenkins/jenkins_run_pipeline_command.py +165 -0
  42. qubership_pipelines_common_library/v2/jenkins/safe_jenkins_client.py +14 -0
  43. qubership_pipelines_common_library/v2/podman/podman_command.md +7 -1
  44. qubership_pipelines_common_library/v2/podman/podman_command.py +4 -4
  45. qubership_pipelines_common_library/v2/sops/sops_client.py +2 -2
  46. qubership_pipelines_common_library/v2/utils/retry_decorator.py +5 -5
  47. {qubership_pipelines_common_library-2.0.0.dist-info → qubership_pipelines_common_library-2.0.1.dist-info}/METADATA +5 -3
  48. qubership_pipelines_common_library-2.0.1.dist-info/RECORD +76 -0
  49. qubership_pipelines_common_library-2.0.0.dist-info/RECORD +0 -52
  50. {qubership_pipelines_common_library-2.0.0.dist-info → qubership_pipelines_common_library-2.0.1.dist-info}/WHEEL +0 -0
  51. {qubership_pipelines_common_library-2.0.0.dist-info → qubership_pipelines_common_library-2.0.1.dist-info}/licenses/LICENSE +0 -0
@@ -14,4 +14,4 @@
14
14
 
15
15
  import sys, os
16
16
  current_path = os.path.dirname(os.path.abspath(__file__))
17
- sys.path.insert(0, current_path)
17
+ sys.path.insert(0, current_path)
@@ -84,7 +84,7 @@ class ArtifactoryAPI:
84
84
  def get_files_list(self, artifact_path: str):
85
85
  try:
86
86
  response = self._get(f"{self.api_url}/api/storage/{artifact_path}?list&deep=1&listFolders=1").json()
87
- return [ArtifactListEntry(uri=f['uri'], size=int(f['size']), folder=f['folder'] == True) for f in
87
+ return [ArtifactListEntry(uri=f['uri'], size=int(f['size']), folder=(f['folder'] is True)) for f in
88
88
  response['files']]
89
89
  except requests.exceptions.HTTPError as error:
90
90
  raise ArtifactoryError from error
@@ -61,6 +61,8 @@ class ExecutionCommand:
61
61
  def run(self):
62
62
  """Runs command following its lifecycle"""
63
63
  try:
64
+ self._log_command_class_name()
65
+ self._log_border_line()
64
66
  self._log_input_params()
65
67
  if not self._validate():
66
68
  logging.error(ExecutionCommand.FAILURE_MSG)
@@ -69,9 +71,17 @@ class ExecutionCommand:
69
71
  self._execute()
70
72
  self._post_execute()
71
73
  self._exit(True, ExecutionCommand.SUCCESS_MSG)
72
- except Exception as e:
74
+ except Exception:
73
75
  logging.error(traceback.format_exc())
74
76
  self._exit(False, ExecutionCommand.FAILURE_MSG)
77
+ finally:
78
+ self._log_border_line()
79
+
80
+ def _log_command_class_name(self):
81
+ self.context.logger.info("command_class_name = %s", type(self).__name__)
82
+
83
+ def _log_border_line(self):
84
+ self.context.logger.info("=" * 60)
75
85
 
76
86
  def _log_input_params(self):
77
87
  self.context.logger.info(
@@ -12,7 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- import os, logging
15
+ import os
16
16
 
17
17
  from pathlib import Path
18
18
  from qubership_pipelines_common_library.v1.utils.utils_file import UtilsFile
@@ -40,7 +40,7 @@ class ExecutionContext:
40
40
  self.__init_temp_folder()
41
41
  self.__init_logger()
42
42
  # load context from files
43
- logging.info(f"""Execution context params:
43
+ self.logger.debug(f"""Execution context params:
44
44
  paths.logs: {self.context.get("paths.logs")}
45
45
  paths.temp: {self.context.get("paths.temp")}
46
46
  paths.input.params: {self.context.get("paths.input.params")}
@@ -55,10 +55,10 @@ class ExecutionContext:
55
55
  def output_params_save(self):
56
56
  """Stores output_param files to disk"""
57
57
  if self.context.get("paths.output.params"):
58
- logging.info(f"Writing insecure param file '{self.context.get('paths.output.params')}'")
58
+ self.logger.info(f"Writing insecure param file '{self.context.get('paths.output.params')}'")
59
59
  self.output_params.save(self.context.get("paths.output.params"))
60
60
  if self.context.get("paths.output.params_secure"):
61
- logging.info(f"Writing secure param file '{self.context.get('paths.output.params_secure')}'")
61
+ self.logger.info(f"Writing secure param file '{self.context.get('paths.output.params_secure')}'")
62
62
  self.output_params_secure.save(self.context.get("paths.output.params_secure"))
63
63
 
64
64
  def input_param_get(self, path, def_value=None):
@@ -85,13 +85,13 @@ class ExecutionContext:
85
85
  if not self.__validate_param(key):
86
86
  valid = False
87
87
  if not silent:
88
- logging.error(f"Parameter '{key}' is mandatory but not defined")
88
+ self.logger.error(f"Parameter '{key}' is mandatory but not defined")
89
89
  return valid
90
90
 
91
91
  def __validate_param(self, name):
92
92
  try:
93
93
  return self.context.get(name) or self.input_param_get(name) # or self.__dict__.get(name)
94
- except:
94
+ except Exception:
95
95
  return False
96
96
 
97
97
  def __input_params_load(self):
@@ -141,7 +141,7 @@ class ExecutionContextFile:
141
141
  logging.error(f"Incorrect apiVersion value: {self.content['apiVersion']} in file '{full_path}'. "
142
142
  f"Only '{ExecutionContextFile.SUPPORTED_API_VERSIONS}' are supported")
143
143
  self.init_empty()
144
- except FileNotFoundError as e:
144
+ except FileNotFoundError:
145
145
  self.init_empty()
146
146
 
147
147
  def save(self, path):
@@ -12,7 +12,8 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- import logging, os
15
+ import logging
16
+ import os
16
17
 
17
18
 
18
19
  class ExecutionLogger:
@@ -20,7 +21,8 @@ class ExecutionLogger:
20
21
  FILE_NAME_FULL = "full.log"
21
22
  EXECUTION_LOG_LEVEL = logging.INFO
22
23
  FULL_LOG_LEVEL = logging.DEBUG
23
- DEFAULT_FORMAT = u'[%(asctime)s] [%(levelname)-5s] [class=%(filename)s:%(lineno)-3s] %(message)s'
24
+ DEFAULT_FORMAT = u'[%(asctime)s] [%(levelname)-7s] [class=%(filename)s:%(lineno)-3s] %(message)s'
25
+ LEVELNAME_COLORED_FORMAT = u'[%(asctime)s] [%(levelname_color_open_tag)s%(levelname)-7s%(levelname_color_close_tag)s] \\[class=%(filename)s:%(lineno)-3s] %(message)s'
24
26
 
25
27
  def __init__(self, path_logs):
26
28
  """
@@ -34,7 +36,9 @@ class ExecutionLogger:
34
36
  # Also, file handlers are never removed
35
37
  self.path_logs = path_logs
36
38
  self.logger = logging.getLogger("execution_logger")
37
- self.logger.setLevel(logging.DEBUG) # set to the lowest level to allow handlers to capture anything
39
+ self.logger.setLevel(logging.DEBUG) # set to the lowest level to allow handlers to capture anything
40
+ self.logger.propagate = True
41
+
38
42
  if path_logs:
39
43
  # execution logs - only in local logger
40
44
  handler_exec = logging.FileHandler(os.path.join(path_logs, ExecutionLogger.FILE_NAME_EXECUTION))
@@ -46,9 +50,7 @@ class ExecutionLogger:
46
50
  handler_full = logging.FileHandler(os.path.join(path_logs, ExecutionLogger.FILE_NAME_FULL))
47
51
  handler_full.setLevel(ExecutionLogger.FULL_LOG_LEVEL)
48
52
  handler_full.setFormatter(logging.Formatter(ExecutionLogger.DEFAULT_FORMAT))
49
- logging.getLogger().propagate = False
50
53
  logging.getLogger().addHandler(handler_full)
51
- self.logger.propagate = True
52
54
 
53
55
  def info(self, msg, *args, **kwargs):
54
56
  self.logger.info(msg, *args, **kwargs)
@@ -152,7 +152,7 @@ class GithubClient:
152
152
  logging.info(f"Workflow Run status: '{run.status}' is present in input break statuses list. Stop waiting.")
153
153
  execution.stop()
154
154
  break
155
- except:
155
+ except Exception:
156
156
  pass
157
157
  timeout += wait_seconds
158
158
  logging.info(f"Waiting workflow run execution timeout {wait_seconds} seconds")
@@ -165,7 +165,7 @@ class GitlabClient:
165
165
  logging.info(f"Pipeline status: '{pipeline.status}' contains in input break status list. Stop waiting.")
166
166
  execution.stop()
167
167
  break
168
- except:
168
+ except Exception:
169
169
  pass
170
170
  now = time.perf_counter()
171
171
  retries += 1
@@ -234,15 +234,19 @@ class GitlabClient:
234
234
  if e.response_code == 404:
235
235
  logging.warning(f"No artifacts for job {job_id}")
236
236
  return None
237
- else: raise
237
+ else:
238
+ raise
238
239
  logging.info(f"Artifacts downloaded to {local_file}")
239
240
  return local_file
240
241
 
241
242
  @staticmethod
242
243
  def _cast_to_string(value) -> str:
243
- if isinstance(value, str): return value
244
- if value is None: return ''
245
- if isinstance(value, bool): return 'true' if value else 'false'
244
+ if isinstance(value, str):
245
+ return value
246
+ if value is None:
247
+ return ''
248
+ if isinstance(value, bool):
249
+ return 'true' if value else 'false'
246
250
  return str(value)
247
251
 
248
252
  def _map_status(self, git_status: str, default_status: str):
@@ -323,7 +327,7 @@ class GitlabClient:
323
327
  def make_first_commit_to_gitlab_project(gitlab_url, gitlab_token, project_id, repo_branch):
324
328
  """"""
325
329
  import requests
326
- logging.debug(f"Making first commit...")
330
+ logging.debug("Making first commit...")
327
331
  headers = {"PRIVATE-TOKEN": gitlab_token, "Content-Type": "application/json"}
328
332
  commit_payload = {
329
333
  "branch": repo_branch,
@@ -343,6 +347,6 @@ class GitlabClient:
343
347
  json=commit_payload
344
348
  )
345
349
  if response.status_code == 201:
346
- logging.info(f"Commit successfull")
350
+ logging.info("Commit successfull")
347
351
  else:
348
352
  logging.error(f"Error {response.status_code}: {response.text}")
@@ -12,14 +12,15 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- import logging, jenkins
15
+ import logging, jenkins, time
16
16
 
17
- from time import sleep
18
17
  from pathlib import Path
19
18
  from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
19
+ from qubership_pipelines_common_library.v1.utils.utils_file import UtilsFile
20
20
 
21
21
 
22
22
  class JenkinsClient:
23
+
23
24
  # statuses taken from https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/Result.java
24
25
  STATUS_SUCCESS = "SUCCESS"
25
26
  STATUS_UNSTABLE = "UNSTABLE"
@@ -27,15 +28,24 @@ class JenkinsClient:
27
28
  STATUS_ABORTED = "ABORTED"
28
29
  STATUS_NOT_BUILT = "NOT_BUILT"
29
30
 
30
- STATUSES_COMPLETE = [STATUS_SUCCESS, STATUS_UNSTABLE, STATUS_FAILURE, STATUS_ABORTED, STATUS_NOT_BUILT]
31
+ BUILD_ARTIFACTS_ZIP_PATH = "*zip*/archive.zip"
31
32
 
32
33
  def __init__(self, host: str, user: str, password: str):
33
34
  """
35
+ This class is deprecated and will be removed in v3.0.0. Use class from v2 module instead.
34
36
  Arguments:
35
37
  host (str): Jenkins host URL
36
38
  user (str): User used in auth request
37
39
  password (str): Token used in auth request
38
40
  """
41
+ if self.__class__ == JenkinsClient:
42
+ import warnings
43
+ warnings.warn(
44
+ "v1.jenkins_client.JenkinsClient is deprecated since v2.0.0 and will be removed in v3.0.0. "
45
+ "Use v2.jenkins.jenkins_client.JenkinsClient instead.",
46
+ DeprecationWarning,
47
+ stacklevel=2
48
+ )
39
49
  self.url = host
40
50
  self.user = user
41
51
  self.token = password
@@ -63,13 +73,16 @@ class JenkinsClient:
63
73
  if count < timeout_seconds:
64
74
  logging.info("Job is not queued yet, waiting %s of %s", count, timeout_seconds)
65
75
  count += wait_seconds
66
- sleep(wait_seconds)
76
+ time.sleep(wait_seconds)
67
77
  continue
68
78
  else:
69
79
  logging.error("Wasn't able to queue the job within %s seconds", timeout_seconds)
70
80
  return execution
71
81
  count = 0
72
82
  build_id = 0
83
+ if timeout_seconds < 1:
84
+ logging.debug("Job put to queue, not fetching job id in async mode...")
85
+ return execution.start()
73
86
  while build_id == 0:
74
87
  try:
75
88
  queue_info = self.server.get_queue_item(queue_id)
@@ -79,7 +92,7 @@ class JenkinsClient:
79
92
  if count < timeout_seconds:
80
93
  logging.info("Job is not started yet, waiting %s of %s", count, timeout_seconds)
81
94
  count += 5
82
- sleep(5)
95
+ time.sleep(5)
83
96
  continue
84
97
  else:
85
98
  logging.error("Wasn't able to start job within %s seconds", timeout_seconds)
@@ -100,15 +113,24 @@ class JenkinsClient:
100
113
  logging.error("Can't get job result within %s seconds", timeout_seconds)
101
114
  return execution
102
115
 
103
- def wait_pipeline_execution(self, execution: ExecutionInfo, timeout_seconds: float, wait_seconds: float = 1.0):
116
+ def wait_pipeline_execution(self, execution: ExecutionInfo, timeout_seconds: float = 180.0, wait_seconds: float = 1.0):
104
117
  """"""
105
- count = 0
106
- while count < timeout_seconds:
118
+ count_seconds = 0
119
+ last_log_time = time.perf_counter()
120
+ estimated_max_attempts = timeout_seconds // wait_seconds
121
+ retries = 0
122
+ while count_seconds < timeout_seconds:
107
123
  try:
108
124
  build_info = self.server.get_build_info(execution.get_name(), execution.get_id(), depth=0)
109
125
  logging.debug("Job info: %s", build_info)
110
126
  build_result = build_info["result"]
111
- if build_info["inProgress"] == False and build_result:
127
+
128
+ if "inProgress" in build_info: # Jenkins version >= 2.375. Use 'inProgress' property
129
+ is_job_stopped = build_info["inProgress"] is False and build_result
130
+ else: # Jenkins version <= 2.369. Use 'building' property
131
+ is_job_stopped = build_info["building"] is False and build_result
132
+
133
+ if is_job_stopped:
112
134
  logging.info("Job is stopped with result '%s'", build_result)
113
135
  build_url = build_info["url"]
114
136
  build_status = self._map_status(build_result, ExecutionInfo.STATUS_UNKNOWN)
@@ -116,12 +138,15 @@ class JenkinsClient:
116
138
  break
117
139
  except Exception:
118
140
  execution.with_status(ExecutionInfo.STATUS_UNKNOWN)
119
- logging.error("Failed to get information about jon with name '%s' and id '%s'",
120
- execution.get_name(), execution.get_id())
121
- logging.info("Waiting job execution %s of %s", count, timeout_seconds)
122
- count += wait_seconds
123
- sleep(wait_seconds)
124
- if count >= timeout_seconds:
141
+ logging.error("Failed to get information about job with name '%s' and id '%s'", execution.get_name(), execution.get_id())
142
+ now = time.perf_counter()
143
+ retries += 1
144
+ if now - last_log_time >= 10.0:
145
+ logging.info(f"Made [{retries} of {estimated_max_attempts}] retries. Waiting pipeline execution {count_seconds} of {timeout_seconds}")
146
+ last_log_time = now
147
+ count_seconds += wait_seconds
148
+ time.sleep(wait_seconds)
149
+ if count_seconds >= timeout_seconds:
125
150
  execution.with_status(ExecutionInfo.STATUS_TIMEOUT)
126
151
  return execution
127
152
 
@@ -132,7 +157,7 @@ class JenkinsClient:
132
157
  while count < timeout_seconds:
133
158
  logging.info("Waiting while job stop %s of %s", count, timeout_seconds)
134
159
  count += wait_seconds
135
- sleep(wait_seconds)
160
+ time.sleep(wait_seconds)
136
161
  return execution.stop(ExecutionInfo.STATUS_ABORTED)
137
162
 
138
163
  def get_pipeline_execution_artifacts(self, execution: ExecutionInfo, timeout_seconds: float = 30.0, wait_seconds: float = 1.0):
@@ -149,7 +174,19 @@ class JenkinsClient:
149
174
  def save_pipeline_execution_artifact_to_file(self, execution: ExecutionInfo, artifact_path: str, file_path: str):
150
175
  """"""
151
176
  artifact_bytes = self.server.get_build_artifact_as_bytes(execution.get_name(), execution.get_id(), artifact_path)
152
- Path(file_path).write_bytes(artifact_bytes)
177
+ UtilsFile.create_parent_dirs(file_path)
178
+ # Jenkins might return gzipped artifacts:
179
+ if len(artifact_bytes) >= 2 and artifact_bytes[0] == 0x1f and artifact_bytes[1] == 0x8b:
180
+ try:
181
+ import io, gzip
182
+ with gzip.GzipFile(fileobj=io.BytesIO(artifact_bytes)) as f:
183
+ decompressed_bytes = f.read()
184
+ Path(file_path).write_bytes(decompressed_bytes)
185
+ except Exception as e:
186
+ logging.warning(f"Failed to decompress gzip, writing raw: {e}")
187
+ Path(file_path).write_bytes(artifact_bytes)
188
+ else:
189
+ Path(file_path).write_bytes(artifact_bytes)
153
190
 
154
191
  def _get_build_info(self, execution: ExecutionInfo, timeout_seconds: float = 30.0, wait_seconds: float = 1.0):
155
192
  count = 0
@@ -160,7 +197,7 @@ class JenkinsClient:
160
197
  except Exception:
161
198
  logging.info("Can't get job result, waiting %s of %s", count, timeout_seconds)
162
199
  count = count + wait_seconds
163
- sleep(wait_seconds)
200
+ time.sleep(wait_seconds)
164
201
  continue
165
202
  return None
166
203
 
@@ -22,7 +22,7 @@ class Artifact:
22
22
  def from_string(artifact_str: str):
23
23
  parts = artifact_str.split(":")
24
24
  if len(parts) == 3:
25
- group, artifact, version = parts[0], parts[1], parts[-1]
25
+ artifact, version = parts[1], parts[-1]
26
26
  return Artifact(artifact, version)
27
27
 
28
28
 
@@ -67,7 +67,7 @@ class MavenArtifactSearcher:
67
67
  if not artifact:
68
68
  artifact = Artifact(artifact_id=artifact_id, version=version, extension=extension)
69
69
  if not artifact.artifact_id or not artifact.version:
70
- raise Exception(f"Artifact 'artifact_id' and 'version' must be specified!")
70
+ raise Exception("Artifact 'artifact_id' and 'version' must be specified!")
71
71
  logging.debug(f"Searching for '{artifact.artifact_id}' in {self.registry_url}...")
72
72
  return self._search_func(artifact=artifact)
73
73
 
@@ -68,7 +68,7 @@ class MinioClient:
68
68
 
69
69
  def put_file(self, bucket_name: str, path: str, local_path: str):
70
70
  """"""
71
- result = self.minio.fput_object(bucket_name, path, local_path)
71
+ self.minio.fput_object(bucket_name, path, local_path)
72
72
 
73
73
  def get_text_file_content(self, bucket_name: str, file_path: str):
74
74
  """"""
@@ -77,4 +77,4 @@ class RestClient:
77
77
  return False
78
78
  except requests.RequestException as e:
79
79
  logging.error(f"Error: {e}")
80
- return False
80
+ return False
@@ -47,4 +47,4 @@ def recursive_merge(source: dict, target: dict):
47
47
  source[key] = recursive_merge(source[key], value)
48
48
  else:
49
49
  source[key] = value
50
- return source
50
+ return source
@@ -1,14 +1,21 @@
1
+ import logging
1
2
  import re
3
+
2
4
  import click
3
- import logging
4
- import sys, os
5
+ from rich import box
6
+ from rich.logging import RichHandler
7
+ from rich.panel import Panel
8
+
5
9
  from qubership_pipelines_common_library.v1.execution.exec_logger import ExecutionLogger
10
+ from qubership_pipelines_common_library.v1.utils.utils_logging import rich_console, ExtendedReprHighlighter, \
11
+ LevelColorFilter
6
12
 
7
13
  DEFAULT_CONTEXT_FILE_PATH = 'context.yaml'
8
14
 
9
15
 
10
16
  def utils_cli(func):
11
17
  """Decorator to add CLI options for logging level, context path and custom input params."""
18
+
12
19
  @click.option('--log-level', default='INFO', show_default=True,
13
20
  type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], case_sensitive=False),
14
21
  help="Set the logging level")
@@ -20,23 +27,50 @@ def utils_cli(func):
20
27
  @click.pass_context
21
28
  def wrapper(ctx, *args, log_level, **kwargs):
22
29
  ExecutionLogger.EXECUTION_LOG_LEVEL = getattr(logging, log_level.upper(), logging.INFO)
23
- _configure_global_logger(logging.getLogger(), log_level, ExecutionLogger.DEFAULT_FORMAT)
30
+ _configure_global_logger(logging.getLogger(), log_level)
31
+ _print_command_name()
24
32
  _transform_kwargs(kwargs)
25
33
  return ctx.invoke(func, *args, **kwargs)
34
+
26
35
  return wrapper
27
36
 
28
37
 
29
- def _configure_global_logger(global_logger: logging.Logger, log_level: str, formatter_str: str):
38
+ def _configure_global_logger(global_logger: logging.Logger, log_level: str):
30
39
  """Configure the global logger with a specific log level and formatter."""
31
- log_level_value = getattr(logging, log_level.upper(), logging.INFO)
32
40
  global_logger.setLevel(logging.DEBUG)
33
41
  if global_logger.hasHandlers():
34
42
  global_logger.handlers.clear()
35
43
  global_logger.propagate = True
36
- stdout_handler = logging.StreamHandler(sys.stdout)
37
- stdout_handler.setLevel(log_level_value)
38
- stdout_handler.setFormatter(logging.Formatter(formatter_str))
39
- global_logger.addHandler(stdout_handler)
44
+ rich_handler = RichHandler(
45
+ console=rich_console,
46
+ show_time=False,
47
+ show_level=False,
48
+ show_path=False,
49
+ enable_link_path=False,
50
+ rich_tracebacks=True,
51
+ tracebacks_show_locals=False,
52
+ markup=True,
53
+ highlighter=ExtendedReprHighlighter(),
54
+ )
55
+ rich_handler.addFilter(LevelColorFilter())
56
+ rich_handler.setFormatter(logging.Formatter(ExecutionLogger.LEVELNAME_COLORED_FORMAT))
57
+ log_level_value = getattr(logging, log_level.upper(), logging.INFO)
58
+ rich_handler.setLevel(log_level_value)
59
+ global_logger.addHandler(rich_handler)
60
+
61
+
62
+ def _print_command_name():
63
+ try:
64
+ click_context = click.get_current_context()
65
+ command_name = click_context.command.name or click_context.info_name
66
+ except RuntimeError:
67
+ logging.getLogger().warning("Can't find command name.")
68
+ command_name = ""
69
+
70
+ command_panel = Panel(f"command_name = {command_name}", expand=False, padding=(0, 1), box=box.ROUNDED)
71
+ rich_console.print()
72
+ rich_console.print(command_panel)
73
+ rich_console.print()
40
74
 
41
75
 
42
76
  def _transform_kwargs(kwargs):
@@ -39,4 +39,4 @@ class UtilsDictionary:
39
39
  elif key not in curr:
40
40
  curr[key] = {}
41
41
  curr = curr[key]
42
- return input_dict
42
+ return input_dict
@@ -0,0 +1,53 @@
1
+ import logging
2
+
3
+ from rich.console import Console
4
+ from rich.highlighter import ReprHighlighter
5
+ from rich.theme import Theme
6
+
7
+ soft_theme = Theme({
8
+ "repr.number": "rgb(180,200,255)",
9
+ "repr.bool_true": "rgb(140,230,140)",
10
+ "repr.bool_false": "rgb(230,140,140)",
11
+ "repr.none": "rgb(200,200,200) italic",
12
+ "repr.path": "rgb(190,220,160)",
13
+ "repr.filename": "rgb(160,210,190)",
14
+ "repr.url": "rgb(130,180,255) underline",
15
+ "repr.uuid": "rgb(200,180,220)",
16
+ "repr.attrib_name": "rgb(220,200,130)",
17
+ "repr.attrib_value": "rgb(170,190,220)",
18
+ "repr.str": "rgb(140,180,140)",
19
+ "repr.tag_name": "rgb(200,170,220)",
20
+ "repr.tag_value": "rgb(170,200,220)",
21
+
22
+ "repr.time": "rgb(160,190,220) italic",
23
+ })
24
+
25
+ rich_console = Console(
26
+ theme=soft_theme,
27
+ force_terminal=True,
28
+ no_color=False,
29
+ highlight=True,
30
+ width=150,
31
+ )
32
+
33
+ level_colors = {
34
+ logging.DEBUG: "steel_blue",
35
+ logging.INFO: "light_sea_green",
36
+ logging.WARNING: "orange3",
37
+ logging.ERROR: "indian_red",
38
+ logging.CRITICAL: "bold medium_violet_red",
39
+ }
40
+
41
+
42
+ class LevelColorFilter(logging.Filter):
43
+ def filter(self, record):
44
+ color = level_colors.get(record.levelno, "default")
45
+ record.levelname_color_open_tag = f"[{color}]"
46
+ record.levelname_color_close_tag = "[/]"
47
+ return True
48
+
49
+
50
+ class ExtendedReprHighlighter(ReprHighlighter):
51
+ highlights = ReprHighlighter.highlights + [
52
+ r"(?P<time>\b([01]?[0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9](?:[.,]\d{1,9})?\b)",
53
+ ]
@@ -0,0 +1,56 @@
1
+ import logging
2
+ from pathlib import Path
3
+
4
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.artifact import Artifact
5
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.artifact_provider import ArtifactProvider
6
+
7
+
8
+ class ArtifactFinder:
9
+ """
10
+ Allows searching for specific descriptor artifacts in different repositories without knowing full coordinates
11
+ (e.g. knowing only `artifact_id` and `version`, but not its `group_id`)
12
+
13
+ Supports different repository providers: Artifactory, Nexus, AWS, GCP, Azure
14
+
15
+ Provides different auth methods for Cloud Providers, implementing `CloudCredentialsProvider` interface
16
+
17
+ Start by initializing this client with one of implementations:
18
+ ``finder = ArtifactFinder(artifact_provider=ArtifactoryProvider(registry_url="https://our_url", username="user", password="password"))``
19
+
20
+ Then find your artifacts using
21
+ ``resource_urls = finder.find_artifact_urls(artifact_id='art_id', version='1.0.0', extension='json')``
22
+
23
+ Additionally, perform filtering of returned results (if you expect to find more than one artifact), and then download necessary artifacts with
24
+ ``finder.download_artifact(one_of_the_returned_resource_urls, './my_artifact.json')``
25
+
26
+ For more complex providers (e.g. AWS Code Artifact), you need to use specific Credential Providers
27
+ As an example:
28
+ ```
29
+ aws_creds = AwsCredentialsProvider().with_assume_role(...all the required params...).get_credentials()
30
+ aws_code_artifact_provider = AwsCodeArtifactProvider(creds=creds, domain='our_domain', project='our_project')
31
+ finder = ArtifactFinder(artifact_provider=aws_code_artifact_provider)
32
+ ```
33
+ """
34
+
35
+ def __init__(self, artifact_provider: ArtifactProvider, **kwargs):
36
+ if not artifact_provider:
37
+ raise Exception("Initialize ArtifactFinder with one of registry artifact providers first!")
38
+ self.provider = artifact_provider
39
+
40
+ def find_artifact_urls(self, artifact_id: str = None, version: str = None, group_id: str = None,
41
+ extension: str = "jar", artifact: Artifact = None) -> list[str]:
42
+ if not artifact:
43
+ artifact = Artifact(group_id=group_id, artifact_id=artifact_id, version=version, extension=extension)
44
+ if not artifact.artifact_id or not artifact.version:
45
+ raise Exception("Artifact 'artifact_id' and 'version' must be specified!")
46
+ logging.debug(f"Searching for '{artifact.artifact_id}:{artifact.version}' in {self.provider.get_provider_name()}...")
47
+ return self.provider.search_artifacts(artifact=artifact)
48
+
49
+ def download_artifact(self, resource_url: str, local_path: str | Path, artifact: Artifact = None):
50
+ from qubership_pipelines_common_library.v1.utils.utils_file import UtilsFile
51
+ download_path = Path(local_path)
52
+ if artifact:
53
+ download_path = download_path.joinpath(artifact.get_filename())
54
+ UtilsFile.create_parent_dirs(download_path)
55
+ logging.debug(f"Downloading artifact from '{resource_url}' to '{download_path}'...")
56
+ return self.provider.download_artifact(resource_url=resource_url, local_path=download_path)