qubership-pipelines-common-library 0.2.6__tar.gz → 2.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/PKG-INFO +5 -3
  2. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/README.md +3 -2
  3. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/pyproject.toml +3 -1
  4. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/__init__.py +1 -1
  5. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/artifactory_client.py +1 -1
  6. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/execution/exec_command.py +63 -2
  7. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/execution/exec_context.py +6 -6
  8. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/execution/exec_context_file.py +1 -1
  9. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/execution/exec_info.py +4 -0
  10. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/execution/exec_logger.py +7 -5
  11. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/github_client.py +10 -1
  12. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/gitlab_client.py +175 -11
  13. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/jenkins_client.py +55 -18
  14. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/maven_client.py +2 -2
  15. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/minio_client.py +1 -1
  16. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/utils/rest.py +1 -1
  17. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/utils/utils.py +1 -1
  18. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/utils/utils_cli.py +43 -9
  19. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/utils/utils_dictionary.py +1 -1
  20. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/utils/utils_file.py +17 -0
  21. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v1/utils/utils_logging.py +53 -0
  22. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/__init__.py +0 -0
  23. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/__init__.py +0 -0
  24. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/artifact_finder.py +56 -0
  25. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/auth/__init__.py +0 -0
  26. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/auth/aws_credentials.py +106 -0
  27. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/auth/azure_credentials.py +72 -0
  28. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/auth/gcp_credentials.py +88 -0
  29. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/model/__init__.py +0 -0
  30. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/model/artifact.py +20 -0
  31. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/model/artifact_provider.py +35 -0
  32. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/model/credentials.py +16 -0
  33. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/model/credentials_provider.py +16 -0
  34. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/providers/__init__.py +0 -0
  35. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/providers/artifactory.py +52 -0
  36. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/providers/aws_code_artifact.py +79 -0
  37. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/providers/azure_artifacts.py +98 -0
  38. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/providers/gcp_artifact_registry.py +50 -0
  39. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/artifacts_finder/providers/nexus.py +41 -0
  40. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/extensions/pipeline_data_importer.py +24 -0
  41. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/github/__init__.py +0 -0
  42. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/github/github_client.py +5 -0
  43. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/github/github_pipeline_data_importer.py +21 -0
  44. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/github/github_run_pipeline_command.py +175 -0
  45. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/github/safe_github_client.py +24 -0
  46. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/gitlab/__init__.py +0 -0
  47. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/gitlab/custom_extensions.py +101 -0
  48. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/gitlab/gitlab_client.py +36 -0
  49. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/gitlab/gitlab_pipeline_data_importer.py +26 -0
  50. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/gitlab/gitlab_run_pipeline_command.py +195 -0
  51. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/gitlab/safe_gitlab_client.py +32 -0
  52. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/jenkins/__init__.py +0 -0
  53. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/jenkins/custom_extensions.py +63 -0
  54. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/jenkins/jenkins_client.py +5 -0
  55. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/jenkins/jenkins_pipeline_data_importer.py +31 -0
  56. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/jenkins/jenkins_run_pipeline_command.py +165 -0
  57. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/jenkins/safe_jenkins_client.py +14 -0
  58. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/podman/__init__.py +0 -0
  59. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/podman/podman_command.md +178 -0
  60. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/podman/podman_command.py +311 -0
  61. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/sops/sops_client.py +116 -0
  62. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/utils/crypto_utils.py +48 -0
  63. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/utils/extension_utils.py +22 -0
  64. qubership_pipelines_common_library-2.0.1/qubership_pipelines_common_library/v2/utils/retry_decorator.py +93 -0
  65. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/LICENSE +0 -0
  66. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/__init__.py +0 -0
  67. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/execution/__init__.py +0 -0
  68. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/git_client.py +0 -0
  69. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/kube_client.py +0 -0
  70. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/log_client.py +0 -0
  71. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/utils/__init__.py +0 -0
  72. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/utils/utils_aws.py +0 -0
  73. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/utils/utils_context.py +0 -0
  74. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/utils/utils_json.py +0 -0
  75. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/utils/utils_string.py +0 -0
  76. {qubership_pipelines_common_library-0.2.6 → qubership_pipelines_common_library-2.0.1}/qubership_pipelines_common_library/v1/webex_client.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: qubership-pipelines-common-library
3
- Version: 0.2.6
3
+ Version: 2.0.1
4
4
  Summary: Qubership Pipelines common library
5
5
  License: Apache-2.0
6
6
  License-File: LICENSE
@@ -24,6 +24,7 @@ Requires-Dist: python-gitlab (>=4.13.0,<5.0.0)
24
24
  Requires-Dist: python-jenkins (>=1.8.2,<2.0.0)
25
25
  Requires-Dist: pyyaml (>=6.0.2,<7.0.0)
26
26
  Requires-Dist: requests (>=2.32.3,<3.0.0)
27
+ Requires-Dist: rich (>=14.2.0,<15.0.0)
27
28
  Requires-Dist: urllib3 (>=2.2.3,<3.0.0)
28
29
  Requires-Dist: webexpythonsdk (==2.0.1)
29
30
  Description-Content-Type: text/markdown
@@ -37,7 +38,7 @@ Description-Content-Type: text/markdown
37
38
 
38
39
  Open-source python library of clients used by Qubership pipelines/modules.
39
40
 
40
- Library provides easy-to-use clients and wrappers for common devops services (e.g. Jenkins, MiniO, GitLab Pipelines)
41
+ Library provides easy-to-use clients and wrappers for common DevOps services (e.g. Jenkins, MiniO, GitLab Pipelines)
41
42
 
42
43
  ## Sample implementation
43
44
 
@@ -49,7 +50,7 @@ It includes reference python implementation along with the [Development Guide](h
49
50
 
50
51
  Library is presented as a set of clients with predefined operations
51
52
 
52
- Auto-generated reference (via mkdocs) is available on [this repo's GitHub Pages](https://netcracker.github.io/qubership-pipelines-common-python-library)
53
+ Auto-generated reference (via mkdocs) is available on [this repo's GitHub Pages](https://netcracker.github.io/qubership-pipelines-common-python-library/mkdocs)
53
54
 
54
55
  ## Installation
55
56
 
@@ -61,6 +62,7 @@ Auto-generated reference (via mkdocs) is available on [this repo's GitHub Pages]
61
62
  ```
62
63
 
63
64
  - Or you can install it via `pip`:
65
+
64
66
  ```bash
65
67
  pip install qubership-pipelines-common-library
66
68
  ```
@@ -7,7 +7,7 @@
7
7
 
8
8
  Open-source python library of clients used by Qubership pipelines/modules.
9
9
 
10
- Library provides easy-to-use clients and wrappers for common devops services (e.g. Jenkins, MiniO, GitLab Pipelines)
10
+ Library provides easy-to-use clients and wrappers for common DevOps services (e.g. Jenkins, MiniO, GitLab Pipelines)
11
11
 
12
12
  ## Sample implementation
13
13
 
@@ -19,7 +19,7 @@ It includes reference python implementation along with the [Development Guide](h
19
19
 
20
20
  Library is presented as a set of clients with predefined operations
21
21
 
22
- Auto-generated reference (via mkdocs) is available on [this repo's GitHub Pages](https://netcracker.github.io/qubership-pipelines-common-python-library)
22
+ Auto-generated reference (via mkdocs) is available on [this repo's GitHub Pages](https://netcracker.github.io/qubership-pipelines-common-python-library/mkdocs)
23
23
 
24
24
  ## Installation
25
25
 
@@ -31,6 +31,7 @@ Auto-generated reference (via mkdocs) is available on [this repo's GitHub Pages]
31
31
  ```
32
32
 
33
33
  - Or you can install it via `pip`:
34
+
34
35
  ```bash
35
36
  pip install qubership-pipelines-common-library
36
37
  ```
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "qubership-pipelines-common-library"
3
- version = "0.2.6"
3
+ version = "2.0.1"
4
4
  description = "Qubership Pipelines common library"
5
5
  authors = ["Qubership"]
6
6
  readme = "README.md"
@@ -23,6 +23,8 @@ webexpythonsdk = "2.0.1"
23
23
  ghapi = "^1.0.6"
24
24
  boto3 = "^1.39.4"
25
25
  google-cloud-artifact-registry = "^1.16.1"
26
+ rich = "^14.2.0"
27
+ # jira = "3.10.5"
26
28
 
27
29
  [tool.poetry.group.test.dependencies]
28
30
  pytest = "^6.0.0"
@@ -14,4 +14,4 @@
14
14
 
15
15
  import sys, os
16
16
  current_path = os.path.dirname(os.path.abspath(__file__))
17
- sys.path.insert(0, current_path)
17
+ sys.path.insert(0, current_path)
@@ -84,7 +84,7 @@ class ArtifactoryAPI:
84
84
  def get_files_list(self, artifact_path: str):
85
85
  try:
86
86
  response = self._get(f"{self.api_url}/api/storage/{artifact_path}?list&deep=1&listFolders=1").json()
87
- return [ArtifactListEntry(uri=f['uri'], size=int(f['size']), folder=f['folder'] == True) for f in
87
+ return [ArtifactListEntry(uri=f['uri'], size=int(f['size']), folder=(f['folder'] is True)) for f in
88
88
  response['files']]
89
89
  except requests.exceptions.HTTPError as error:
90
90
  raise ArtifactoryError from error
@@ -15,9 +15,11 @@
15
15
  import logging
16
16
  import sys
17
17
  import traceback
18
+ from abc import ABC, abstractmethod
18
19
 
19
20
  from qubership_pipelines_common_library.v1.execution.exec_context import ExecutionContext
20
21
  from qubership_pipelines_common_library.v1.utils.utils_context import create_execution_context
22
+ from qubership_pipelines_common_library.v2.utils.crypto_utils import CryptoUtils
21
23
 
22
24
 
23
25
  class ExecutionCommand:
@@ -26,7 +28,9 @@ class ExecutionCommand:
26
28
  FAILURE_MSG = "Status: FAILURE"
27
29
 
28
30
  def __init__(self, context_path: str = None, input_params: dict = None, input_params_secure: dict = None,
29
- folder_path: str = None, parent_context_to_reuse: ExecutionContext = None):
31
+ folder_path: str = None, parent_context_to_reuse: ExecutionContext = None,
32
+ pre_execute_actions: list['ExecutionCommandExtension'] = None,
33
+ post_execute_actions: list['ExecutionCommandExtension'] = None):
30
34
  """
31
35
  Extendable interface intended to simplify working with input/output params and passing them between commands in different Pipeline Executors
32
36
 
@@ -40,30 +44,66 @@ class ExecutionCommand:
40
44
  input_params_secure (dict): Secure parameters that will be merged into dynamically created params
41
45
  folder_path (str): Folder path where dynamically-created context will be stored. Optional, will create new temp folder if missing.
42
46
  parent_context_to_reuse (ExecutionContext): Optional, existing context to propagate input params from.
47
+ pre_execute_actions: Optional, list of actions, implementing ExecutionCommandExtension, to be executed before command
48
+ post_execute_actions: Optional, list of actions, implementing ExecutionCommandExtension, to be executed after command
43
49
  """
44
50
  if not context_path:
45
51
  context_path = create_execution_context(input_params=input_params, input_params_secure=input_params_secure,
46
52
  folder_path=folder_path, parent_context_to_reuse=parent_context_to_reuse)
47
53
  self.context = ExecutionContext(context_path)
54
+ self._pre_execute_actions = []
55
+ if pre_execute_actions:
56
+ self._pre_execute_actions.extend(pre_execute_actions)
57
+ self._post_execute_actions = []
58
+ if post_execute_actions:
59
+ self._post_execute_actions.extend(post_execute_actions)
48
60
 
49
61
  def run(self):
50
62
  """Runs command following its lifecycle"""
51
63
  try:
64
+ self._log_command_class_name()
65
+ self._log_border_line()
66
+ self._log_input_params()
52
67
  if not self._validate():
53
68
  logging.error(ExecutionCommand.FAILURE_MSG)
54
69
  self._exit(False, ExecutionCommand.FAILURE_MSG)
70
+ self._pre_execute()
55
71
  self._execute()
72
+ self._post_execute()
56
73
  self._exit(True, ExecutionCommand.SUCCESS_MSG)
57
- except Exception as e:
74
+ except Exception:
58
75
  logging.error(traceback.format_exc())
59
76
  self._exit(False, ExecutionCommand.FAILURE_MSG)
77
+ finally:
78
+ self._log_border_line()
79
+
80
+ def _log_command_class_name(self):
81
+ self.context.logger.info("command_class_name = %s", type(self).__name__)
82
+
83
+ def _log_border_line(self):
84
+ self.context.logger.info("=" * 60)
85
+
86
+ def _log_input_params(self):
87
+ self.context.logger.info(
88
+ "Input context parameters:\n%s\n%s",
89
+ CryptoUtils.get_parameters_for_print(self.context.input_params_secure.content, True),
90
+ CryptoUtils.get_parameters_for_print(self.context.input_params.content, False)
91
+ )
60
92
 
61
93
  def _validate(self):
62
94
  return self.context.validate(["paths.input.params"])
63
95
 
96
+ def _pre_execute(self):
97
+ for action in self._pre_execute_actions:
98
+ action.with_command(self).execute()
99
+
64
100
  def _execute(self):
65
101
  logging.info("Status: SKIPPED")
66
102
 
103
+ def _post_execute(self):
104
+ for action in self._post_execute_actions:
105
+ action.with_command(self).execute()
106
+
67
107
  def _exit(self, success: bool, message: str):
68
108
  if success:
69
109
  self.context.logger.info(message)
@@ -71,3 +111,24 @@ class ExecutionCommand:
71
111
  else:
72
112
  self.context.logger.error(message)
73
113
  sys.exit(1)
114
+
115
+
116
+ class ExecutionCommandExtension(ABC):
117
+ """
118
+ Base interface used in ExecutionCommand pre_execute and post_execute actions
119
+ Can be extended by users to perform custom extension logic before and after execution
120
+ """
121
+
122
+ def __init__(self):
123
+ self.context = None
124
+ self.command = None
125
+
126
+ def with_command(self, command: ExecutionCommand) -> 'ExecutionCommandExtension':
127
+ self.command = command
128
+ self.context = command.context
129
+ return self
130
+
131
+ @abstractmethod
132
+ def execute(self) -> None:
133
+ """Implements custom extension logic"""
134
+ pass
@@ -12,7 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- import os, logging
15
+ import os
16
16
 
17
17
  from pathlib import Path
18
18
  from qubership_pipelines_common_library.v1.utils.utils_file import UtilsFile
@@ -40,7 +40,7 @@ class ExecutionContext:
40
40
  self.__init_temp_folder()
41
41
  self.__init_logger()
42
42
  # load context from files
43
- logging.info(f"""Execution context params:
43
+ self.logger.debug(f"""Execution context params:
44
44
  paths.logs: {self.context.get("paths.logs")}
45
45
  paths.temp: {self.context.get("paths.temp")}
46
46
  paths.input.params: {self.context.get("paths.input.params")}
@@ -55,10 +55,10 @@ class ExecutionContext:
55
55
  def output_params_save(self):
56
56
  """Stores output_param files to disk"""
57
57
  if self.context.get("paths.output.params"):
58
- logging.info(f"Writing insecure param file '{self.context.get('paths.output.params')}'")
58
+ self.logger.info(f"Writing insecure param file '{self.context.get('paths.output.params')}'")
59
59
  self.output_params.save(self.context.get("paths.output.params"))
60
60
  if self.context.get("paths.output.params_secure"):
61
- logging.info(f"Writing secure param file '{self.context.get('paths.output.params_secure')}'")
61
+ self.logger.info(f"Writing secure param file '{self.context.get('paths.output.params_secure')}'")
62
62
  self.output_params_secure.save(self.context.get("paths.output.params_secure"))
63
63
 
64
64
  def input_param_get(self, path, def_value=None):
@@ -85,13 +85,13 @@ class ExecutionContext:
85
85
  if not self.__validate_param(key):
86
86
  valid = False
87
87
  if not silent:
88
- logging.error(f"Parameter '{key}' is mandatory but not defined")
88
+ self.logger.error(f"Parameter '{key}' is mandatory but not defined")
89
89
  return valid
90
90
 
91
91
  def __validate_param(self, name):
92
92
  try:
93
93
  return self.context.get(name) or self.input_param_get(name) # or self.__dict__.get(name)
94
- except:
94
+ except Exception:
95
95
  return False
96
96
 
97
97
  def __input_params_load(self):
@@ -141,7 +141,7 @@ class ExecutionContextFile:
141
141
  logging.error(f"Incorrect apiVersion value: {self.content['apiVersion']} in file '{full_path}'. "
142
142
  f"Only '{ExecutionContextFile.SUPPORTED_API_VERSIONS}' are supported")
143
143
  self.init_empty()
144
- except FileNotFoundError as e:
144
+ except FileNotFoundError:
145
145
  self.init_empty()
146
146
 
147
147
  def save(self, path):
@@ -108,3 +108,7 @@ class ExecutionInfo:
108
108
  def with_params(self, params: dict):
109
109
  self.params = params
110
110
  return self
111
+
112
+ def __str__(self):
113
+ return (f"ExecutionInfo(id='{self.id}', url='{self.url}', status='{self.status}', "
114
+ f"time_start={self.time_start.isoformat()})")
@@ -12,7 +12,8 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- import logging, os
15
+ import logging
16
+ import os
16
17
 
17
18
 
18
19
  class ExecutionLogger:
@@ -20,7 +21,8 @@ class ExecutionLogger:
20
21
  FILE_NAME_FULL = "full.log"
21
22
  EXECUTION_LOG_LEVEL = logging.INFO
22
23
  FULL_LOG_LEVEL = logging.DEBUG
23
- DEFAULT_FORMAT = u'[%(asctime)s] [%(levelname)-5s] [class=%(filename)s:%(lineno)-3s] %(message)s'
24
+ DEFAULT_FORMAT = u'[%(asctime)s] [%(levelname)-7s] [class=%(filename)s:%(lineno)-3s] %(message)s'
25
+ LEVELNAME_COLORED_FORMAT = u'[%(asctime)s] [%(levelname_color_open_tag)s%(levelname)-7s%(levelname_color_close_tag)s] \\[class=%(filename)s:%(lineno)-3s] %(message)s'
24
26
 
25
27
  def __init__(self, path_logs):
26
28
  """
@@ -34,7 +36,9 @@ class ExecutionLogger:
34
36
  # Also, file handlers are never removed
35
37
  self.path_logs = path_logs
36
38
  self.logger = logging.getLogger("execution_logger")
37
- self.logger.setLevel(logging.DEBUG) # set to the lowest level to allow handlers to capture anything
39
+ self.logger.setLevel(logging.DEBUG) # set to the lowest level to allow handlers to capture anything
40
+ self.logger.propagate = True
41
+
38
42
  if path_logs:
39
43
  # execution logs - only in local logger
40
44
  handler_exec = logging.FileHandler(os.path.join(path_logs, ExecutionLogger.FILE_NAME_EXECUTION))
@@ -46,9 +50,7 @@ class ExecutionLogger:
46
50
  handler_full = logging.FileHandler(os.path.join(path_logs, ExecutionLogger.FILE_NAME_FULL))
47
51
  handler_full.setLevel(ExecutionLogger.FULL_LOG_LEVEL)
48
52
  handler_full.setFormatter(logging.Formatter(ExecutionLogger.DEFAULT_FORMAT))
49
- logging.getLogger().propagate = False
50
53
  logging.getLogger().addHandler(handler_full)
51
- self.logger.propagate = True
52
54
 
53
55
  def info(self, msg, *args, **kwargs):
54
56
  self.logger.info(msg, *args, **kwargs)
@@ -51,11 +51,20 @@ class GithubClient:
51
51
 
52
52
  def __init__(self, token: str = None, api_url: str = None, **kwargs):
53
53
  """
54
+ This class is deprecated and will be removed in v3.0.0. Use class from v2 module instead.
54
55
  Arguments:
55
56
  token (str): Token used in auth request
56
57
  api_url (str): Optional Github Enterprise API URL, leave empty if using github.com
57
58
  **kwargs (Any): will be passed into Github API constructor
58
59
  """
60
+ if self.__class__ == GithubClient:
61
+ import warnings
62
+ warnings.warn(
63
+ "v1.github_client.GithubClient is deprecated since v2.0.0 and will be removed in v3.0.0. "
64
+ "Use v2.github.github_client.GithubClient instead.",
65
+ DeprecationWarning,
66
+ stacklevel=2
67
+ )
59
68
  self.gh = GhApi(token=token, gh_host=api_url, **kwargs)
60
69
  logging.info("Github Client configured")
61
70
 
@@ -143,7 +152,7 @@ class GithubClient:
143
152
  logging.info(f"Workflow Run status: '{run.status}' is present in input break statuses list. Stop waiting.")
144
153
  execution.stop()
145
154
  break
146
- except:
155
+ except Exception:
147
156
  pass
148
157
  timeout += wait_seconds
149
158
  logging.info(f"Waiting workflow run execution timeout {wait_seconds} seconds")
@@ -12,8 +12,9 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- import logging, gitlab
16
- from time import sleep
15
+ import logging, gitlab, time
16
+ from pathlib import Path
17
+
17
18
  from gitlab import GitlabGetError
18
19
  from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
19
20
 
@@ -36,6 +37,7 @@ class GitlabClient:
36
37
 
37
38
  def __init__(self, host: str, username: str, password: str, email: str = None, **kwargs):
38
39
  """
40
+ This class is deprecated and will be removed in v3.0.0. Use class from v2 module instead.
39
41
  Arguments:
40
42
  host (str): Gitlab instance URL
41
43
  username (str): User used in auth request, might be empty string if no auth is required
@@ -43,6 +45,14 @@ class GitlabClient:
43
45
  email (str): Email used when committing changes using API
44
46
  **kwargs (Any): will be passed into Gitlab API constructor
45
47
  """
48
+ if self.__class__ == GitlabClient:
49
+ import warnings
50
+ warnings.warn(
51
+ "v1.gitlab_client.GitlabClient is deprecated since v2.0.0 and will be removed in v3.0.0. "
52
+ "Use v2.gitlab.gitlab_client.GitlabClient instead.",
53
+ DeprecationWarning,
54
+ stacklevel=2
55
+ )
46
56
  self.host = host.rstrip("/")
47
57
  self.username = username
48
58
  self.email = email
@@ -120,7 +130,7 @@ class GitlabClient:
120
130
  while counter < timeout:
121
131
  counter += 1
122
132
  logging.info("Waiting pipeline execution timeout 1 second")
123
- sleep(1)
133
+ time.sleep(1)
124
134
  continue
125
135
  pipeline.cancel()
126
136
  return execution.stop(ExecutionInfo.STATUS_ABORTED)
@@ -136,14 +146,17 @@ class GitlabClient:
136
146
  logging.error("Can't get pipeline status")
137
147
  return execution
138
148
 
139
- def wait_pipeline_execution(self, execution: ExecutionInfo, timeout_seconds: float = 10.0,
149
+ def wait_pipeline_execution(self, execution: ExecutionInfo, timeout_seconds: float = 180.0,
140
150
  break_status_list: list = None, wait_seconds: float = 1.0):
141
151
  """"""
142
152
  if break_status_list is None:
143
153
  break_status_list = self.BREAK_STATUS_LIST
144
- timeout = 0
154
+ count_seconds = 0
155
+ last_log_time = time.perf_counter()
156
+ estimated_max_attempts = timeout_seconds // wait_seconds
157
+ retries = 0
145
158
  execution.with_status(execution.get_status())
146
- while timeout < timeout_seconds:
159
+ while count_seconds < timeout_seconds:
147
160
  try:
148
161
  project = self.gl.projects.get(execution.get_name(), lazy=True)
149
162
  pipeline = project.pipelines.get(execution.get_id())
@@ -152,12 +165,15 @@ class GitlabClient:
152
165
  logging.info(f"Pipeline status: '{pipeline.status}' contains in input break status list. Stop waiting.")
153
166
  execution.stop()
154
167
  break
155
- except:
168
+ except Exception:
156
169
  pass
157
- timeout += wait_seconds
158
- logging.info(f"Waiting pipeline execution timeout {wait_seconds} seconds")
159
- sleep(wait_seconds)
160
- continue
170
+ now = time.perf_counter()
171
+ retries += 1
172
+ if now - last_log_time >= 10.0:
173
+ logging.info(f"Made [{retries} of {estimated_max_attempts}] retries. Waiting pipeline execution {count_seconds} of {timeout_seconds}")
174
+ last_log_time = now
175
+ count_seconds += wait_seconds
176
+ time.sleep(wait_seconds)
161
177
  return execution
162
178
 
163
179
  @staticmethod
@@ -170,6 +186,69 @@ class GitlabClient:
170
186
  return {"repo": url[:pos1], "branch": url[pos1 + len(part):pos2], "path": url[pos2 + 1:]}
171
187
  return None
172
188
 
189
+ def get_default_branch(self, project_id: str) -> str:
190
+ return self.gl.projects.get(project_id).default_branch
191
+
192
+ def get_latest_pipeline_id(self, project_id: str, ref: str) -> int | str:
193
+ project = self.gl.projects.get(project_id, lazy=True)
194
+ return project.pipelines.latest(ref=ref).get_id()
195
+
196
+ def get_latest_job(self, project_id: str, pipeline_id: str):
197
+ project = self.gl.projects.get(project_id, lazy=True)
198
+ pipeline = project.pipelines.get(pipeline_id, lazy=True)
199
+
200
+ jobs = pipeline.jobs.list(get_all=True)
201
+ logging.debug(f"All jobs from the pipeline: {jobs}")
202
+
203
+ # get jobs from downstream pipelines
204
+ bridges = pipeline.bridges.list(get_all=True)
205
+ logging.debug(f"Bridges: {bridges}")
206
+ for bridge in bridges:
207
+ downstream_pipeline_data = bridge.downstream_pipeline
208
+ downstream_project = self.gl.projects.get(downstream_pipeline_data.get('project_id'), lazy=True)
209
+ logging.debug(f"Getting jobs from a downstream pipeline: {downstream_pipeline_data.get('id')}...")
210
+ downstream_pipeline = downstream_project.pipelines.get(downstream_pipeline_data.get('id'))
211
+ jobs.extend(downstream_pipeline.jobs.list(get_all=True))
212
+
213
+ # get jobs from child pipelines
214
+ child_pipelines = project.pipelines.list(ref=f"downstream/{pipeline_id}", source="pipeline", all=True)
215
+ logging.debug(f"Child pipelines: {child_pipelines}")
216
+ for child_pipeline in child_pipelines:
217
+ logging.debug(f"Getting jobs from a child pipeline: {child_pipeline.id}...")
218
+ child_jobs = child_pipeline.jobs.list(get_all=True)
219
+ jobs.extend(child_jobs)
220
+
221
+ logging.debug(f"All jobs (+ jobs from downstream pipelines): {jobs}")
222
+ jobs = [j for j in jobs if j.started_at]
223
+ jobs = sorted(jobs, key=lambda j: j.started_at, reverse=True)
224
+ return jobs[0] if jobs else None
225
+
226
+ def download_job_artifacts(self, project_id, job_id, local_dir):
227
+ project = self.gl.projects.get(project_id, lazy=True)
228
+ job = project.jobs.get(job_id, lazy=True)
229
+ local_file = Path(local_dir, f"{job_id}.zip")
230
+ with local_file.open('wb') as f:
231
+ try:
232
+ job.artifacts(streamed=True, action=f.write)
233
+ except gitlab.GitlabGetError as e:
234
+ if e.response_code == 404:
235
+ logging.warning(f"No artifacts for job {job_id}")
236
+ return None
237
+ else:
238
+ raise
239
+ logging.info(f"Artifacts downloaded to {local_file}")
240
+ return local_file
241
+
242
+ @staticmethod
243
+ def _cast_to_string(value) -> str:
244
+ if isinstance(value, str):
245
+ return value
246
+ if value is None:
247
+ return ''
248
+ if isinstance(value, bool):
249
+ return 'true' if value else 'false'
250
+ return str(value)
251
+
173
252
  def _map_status(self, git_status: str, default_status: str):
174
253
  result = default_status
175
254
  if git_status in (GitlabClient.STATUS_CREATED, GitlabClient.STATUS_WAITING,
@@ -186,3 +265,88 @@ class GitlabClient:
186
265
  elif git_status == GitlabClient.STATUS_MANUAL:
187
266
  result = ExecutionInfo.STATUS_MANUAL
188
267
  return result
268
+
269
+ # Related static methods, with direct REST access
270
+ @staticmethod
271
+ def is_gitlab_project_exist(gitlab_url, gitlab_project, gitlab_token):
272
+ """"""
273
+ import requests
274
+ headers = {"PRIVATE-TOKEN": gitlab_token}
275
+ request = f"{gitlab_url}/api/v4/projects/{requests.utils.quote(gitlab_project, safe='')}"
276
+ logging.debug(f"Sending '{request}' request...")
277
+ response = requests.get(request, headers=headers)
278
+ if response.status_code == 200:
279
+ return True
280
+ else:
281
+ logging.error(f"Error {response.status_code}: {response.text}")
282
+ return False
283
+
284
+ @staticmethod
285
+ def search_group_id(gitlab_url, gitlab_project, gitlab_token):
286
+ """"""
287
+ import requests
288
+ headers = {"PRIVATE-TOKEN": gitlab_token}
289
+ request = f"{gitlab_url}/api/v4/groups?search={gitlab_project}"
290
+ logging.debug(f"Sending '{request}' request...")
291
+ response = requests.get(request, headers=headers)
292
+ if response.status_code == 200:
293
+ groups = response.json()
294
+ for group in groups:
295
+ if group.get("full_path") == gitlab_project:
296
+ return group["id"]
297
+ else:
298
+ logging.error(f"Error {response.status_code}: {response.text}")
299
+
300
+ @staticmethod
301
+ def create_internal_gitlab_project(gitlab_url, gitlab_token, group_id, repo_name, repo_branch, visibility="internal"):
302
+ """"""
303
+ import requests
304
+ headers = {"PRIVATE-TOKEN": gitlab_token, "Content-Type": "application/json"}
305
+ data = {
306
+ "name": repo_name,
307
+ "namespace_id": group_id,
308
+ "visibility": visibility,
309
+ "default_branch": repo_branch
310
+ }
311
+ request = f"{gitlab_url}/api/v4/projects"
312
+ logging.debug(f"Sending '{request}' request...")
313
+ response = requests.post(request, headers=headers, json=data)
314
+ if response.status_code == 201:
315
+ response_json = response.json()
316
+ logging.info(f"Gitlab project was created. Url: '{response_json['web_url']}'")
317
+ GitlabClient.make_first_commit_to_gitlab_project(
318
+ gitlab_url=gitlab_url,
319
+ gitlab_token=gitlab_token,
320
+ project_id=response_json['id'],
321
+ repo_branch=repo_branch
322
+ )
323
+ else:
324
+ logging.error(f"Error {response.status_code}: {response.text}")
325
+
326
+ @staticmethod
327
+ def make_first_commit_to_gitlab_project(gitlab_url, gitlab_token, project_id, repo_branch):
328
+ """"""
329
+ import requests
330
+ logging.debug("Making first commit...")
331
+ headers = {"PRIVATE-TOKEN": gitlab_token, "Content-Type": "application/json"}
332
+ commit_payload = {
333
+ "branch": repo_branch,
334
+ "commit_message": "Initial commit",
335
+ "actions": [
336
+ {
337
+ "action": "create",
338
+ "file_path": "README.md",
339
+ "content": "# This is an automatically created project"
340
+ }
341
+ ]
342
+ }
343
+
344
+ response = requests.post(
345
+ f"{gitlab_url}/api/v4/projects/{project_id}/repository/commits",
346
+ headers=headers,
347
+ json=commit_payload
348
+ )
349
+ if response.status_code == 201:
350
+ logging.info("Commit successfull")
351
+ else:
352
+ logging.error(f"Error {response.status_code}: {response.text}")