qubership-pipelines-common-library 0.2.6__py3-none-any.whl → 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. qubership_pipelines_common_library/__init__.py +1 -1
  2. qubership_pipelines_common_library/v1/artifactory_client.py +1 -1
  3. qubership_pipelines_common_library/v1/execution/exec_command.py +63 -2
  4. qubership_pipelines_common_library/v1/execution/exec_context.py +6 -6
  5. qubership_pipelines_common_library/v1/execution/exec_context_file.py +1 -1
  6. qubership_pipelines_common_library/v1/execution/exec_info.py +4 -0
  7. qubership_pipelines_common_library/v1/execution/exec_logger.py +7 -5
  8. qubership_pipelines_common_library/v1/github_client.py +10 -1
  9. qubership_pipelines_common_library/v1/gitlab_client.py +175 -11
  10. qubership_pipelines_common_library/v1/jenkins_client.py +55 -18
  11. qubership_pipelines_common_library/v1/maven_client.py +2 -2
  12. qubership_pipelines_common_library/v1/minio_client.py +1 -1
  13. qubership_pipelines_common_library/v1/utils/rest.py +1 -1
  14. qubership_pipelines_common_library/v1/utils/utils.py +1 -1
  15. qubership_pipelines_common_library/v1/utils/utils_cli.py +43 -9
  16. qubership_pipelines_common_library/v1/utils/utils_dictionary.py +1 -1
  17. qubership_pipelines_common_library/v1/utils/utils_file.py +17 -0
  18. qubership_pipelines_common_library/v1/utils/utils_logging.py +53 -0
  19. qubership_pipelines_common_library/v2/__init__.py +0 -0
  20. qubership_pipelines_common_library/v2/artifacts_finder/__init__.py +0 -0
  21. qubership_pipelines_common_library/v2/artifacts_finder/artifact_finder.py +56 -0
  22. qubership_pipelines_common_library/v2/artifacts_finder/auth/__init__.py +0 -0
  23. qubership_pipelines_common_library/v2/artifacts_finder/auth/aws_credentials.py +106 -0
  24. qubership_pipelines_common_library/v2/artifacts_finder/auth/azure_credentials.py +72 -0
  25. qubership_pipelines_common_library/v2/artifacts_finder/auth/gcp_credentials.py +88 -0
  26. qubership_pipelines_common_library/v2/artifacts_finder/model/__init__.py +0 -0
  27. qubership_pipelines_common_library/v2/artifacts_finder/model/artifact.py +20 -0
  28. qubership_pipelines_common_library/v2/artifacts_finder/model/artifact_provider.py +35 -0
  29. qubership_pipelines_common_library/v2/artifacts_finder/model/credentials.py +16 -0
  30. qubership_pipelines_common_library/v2/artifacts_finder/model/credentials_provider.py +16 -0
  31. qubership_pipelines_common_library/v2/artifacts_finder/providers/__init__.py +0 -0
  32. qubership_pipelines_common_library/v2/artifacts_finder/providers/artifactory.py +52 -0
  33. qubership_pipelines_common_library/v2/artifacts_finder/providers/aws_code_artifact.py +79 -0
  34. qubership_pipelines_common_library/v2/artifacts_finder/providers/azure_artifacts.py +98 -0
  35. qubership_pipelines_common_library/v2/artifacts_finder/providers/gcp_artifact_registry.py +50 -0
  36. qubership_pipelines_common_library/v2/artifacts_finder/providers/nexus.py +41 -0
  37. qubership_pipelines_common_library/v2/extensions/pipeline_data_importer.py +24 -0
  38. qubership_pipelines_common_library/v2/github/__init__.py +0 -0
  39. qubership_pipelines_common_library/v2/github/github_client.py +5 -0
  40. qubership_pipelines_common_library/v2/github/github_pipeline_data_importer.py +21 -0
  41. qubership_pipelines_common_library/v2/github/github_run_pipeline_command.py +175 -0
  42. qubership_pipelines_common_library/v2/github/safe_github_client.py +24 -0
  43. qubership_pipelines_common_library/v2/gitlab/__init__.py +0 -0
  44. qubership_pipelines_common_library/v2/gitlab/custom_extensions.py +101 -0
  45. qubership_pipelines_common_library/v2/gitlab/gitlab_client.py +36 -0
  46. qubership_pipelines_common_library/v2/gitlab/gitlab_pipeline_data_importer.py +26 -0
  47. qubership_pipelines_common_library/v2/gitlab/gitlab_run_pipeline_command.py +195 -0
  48. qubership_pipelines_common_library/v2/gitlab/safe_gitlab_client.py +32 -0
  49. qubership_pipelines_common_library/v2/jenkins/__init__.py +0 -0
  50. qubership_pipelines_common_library/v2/jenkins/custom_extensions.py +63 -0
  51. qubership_pipelines_common_library/v2/jenkins/jenkins_client.py +5 -0
  52. qubership_pipelines_common_library/v2/jenkins/jenkins_pipeline_data_importer.py +31 -0
  53. qubership_pipelines_common_library/v2/jenkins/jenkins_run_pipeline_command.py +165 -0
  54. qubership_pipelines_common_library/v2/jenkins/safe_jenkins_client.py +14 -0
  55. qubership_pipelines_common_library/v2/podman/__init__.py +0 -0
  56. qubership_pipelines_common_library/v2/podman/podman_command.md +178 -0
  57. qubership_pipelines_common_library/v2/podman/podman_command.py +311 -0
  58. qubership_pipelines_common_library/v2/sops/sops_client.py +116 -0
  59. qubership_pipelines_common_library/v2/utils/crypto_utils.py +48 -0
  60. qubership_pipelines_common_library/v2/utils/extension_utils.py +22 -0
  61. qubership_pipelines_common_library/v2/utils/retry_decorator.py +93 -0
  62. {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.1.dist-info}/METADATA +5 -3
  63. qubership_pipelines_common_library-2.0.1.dist-info/RECORD +76 -0
  64. qubership_pipelines_common_library-0.2.6.dist-info/RECORD +0 -32
  65. {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.1.dist-info}/WHEEL +0 -0
  66. {qubership_pipelines_common_library-0.2.6.dist-info → qubership_pipelines_common_library-2.0.1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,52 @@
1
+ import logging
2
+ import re
3
+
4
+ from pathlib import Path
5
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.artifact import Artifact
6
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.artifact_provider import ArtifactProvider
7
+
8
+
9
+ class ArtifactoryProvider(ArtifactProvider):
10
+
11
+ def __init__(self, registry_url: str, username: str = None, password: str = None, **kwargs):
12
+ """
13
+ Initializes this client to work with **JFrog Artifactory** maven repositories.
14
+ Requires `username` and its `password` or `token`.
15
+ """
16
+ super().__init__(**kwargs)
17
+ self.registry_url = registry_url
18
+ if password:
19
+ from requests.auth import HTTPBasicAuth
20
+ self._session.auth = HTTPBasicAuth(username, password)
21
+
22
+ def download_artifact(self, resource_url: str, local_path: str | Path, **kwargs) -> None:
23
+ return self.generic_download(resource_url=resource_url, local_path=local_path)
24
+
25
+ def search_artifacts(self, artifact: Artifact, **kwargs) -> list[str]:
26
+ timestamp_version_match = re.match(self.TIMESTAMP_VERSION_PATTERN, artifact.version)
27
+ if timestamp_version_match:
28
+ base_version = timestamp_version_match.group(1) + "SNAPSHOT"
29
+ else:
30
+ base_version = artifact.version
31
+
32
+ search_params = {
33
+ **({"g": artifact.group_id} if artifact.group_id else {}),
34
+ "a": artifact.artifact_id,
35
+ "v": base_version,
36
+ "specific": "true"
37
+ }
38
+ search_api_url = f"{self.registry_url}/api/search/gavc"
39
+ logging.debug(f"Search URL: {search_api_url}"f"\nSearch Parameters: {search_params}")
40
+
41
+ response = self._session.get(url=search_api_url,
42
+ params=search_params,
43
+ timeout=self.timeout)
44
+ if response.status_code != 200:
45
+ raise Exception(f"Could not find '{artifact.artifact_id}' - search request returned {response.status_code}!")
46
+
47
+ return [result["downloadUri"] for result in response.json()["results"]
48
+ if result["ext"] == artifact.extension
49
+ and (not timestamp_version_match or result["downloadUri"].endswith(f"{artifact.version}.{artifact.extension}"))]
50
+
51
+ def get_provider_name(self) -> str:
52
+ return "artifactory"
@@ -0,0 +1,79 @@
1
+ import logging
2
+ import boto3
3
+
4
+ from pathlib import Path
5
+ from botocore.config import Config
6
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.artifact import Artifact
7
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.artifact_provider import ArtifactProvider
8
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.credentials import Credentials
9
+
10
+
11
+ class AwsCodeArtifactProvider(ArtifactProvider):
12
+
13
+ def __init__(self, credentials: Credentials, domain: str, repository: str, package_format: str = "generic", **kwargs):
14
+ """
15
+ Initializes this client to work with **AWS Code Artifact** for generic or maven artifacts.
16
+ Requires `Credentials` provided by `AwsCredentialsProvider`.
17
+ """
18
+ super().__init__(**kwargs)
19
+ self._credentials = credentials
20
+ self._domain = domain
21
+ self._repository = repository
22
+ self._format = package_format
23
+ self._aws_client = boto3.client(
24
+ service_name='codeartifact',
25
+ config=Config(region_name=credentials.region_name),
26
+ aws_access_key_id=credentials.access_key,
27
+ aws_secret_access_key=credentials.secret_key,
28
+ aws_session_token=credentials.session_token,
29
+ )
30
+
31
+ def download_artifact(self, resource_url: str, local_path: str | Path, **kwargs) -> None:
32
+ """ 'resource_url' is actually AWS-specific resource_id, expected to be "namespace/package/version/asset_name" """
33
+ asset_parts = resource_url.split("/")
34
+ response = self._aws_client.get_package_version_asset(
35
+ domain=self._domain, repository=self._repository,
36
+ format=self._format, namespace=asset_parts[0],
37
+ package=asset_parts[1], packageVersion=asset_parts[2],
38
+ asset=asset_parts[3]
39
+ )
40
+ with open(local_path, 'wb') as file:
41
+ file.write(response.get('asset').read())
42
+
43
+ def search_artifacts(self, artifact: Artifact, **kwargs) -> list[str]:
44
+ list_packages_response = self._aws_client.list_packages(
45
+ domain=self._domain, repository=self._repository,
46
+ format=self._format, packagePrefix=artifact.artifact_id
47
+ )
48
+ logging.debug(f"list_packages_response: {list_packages_response}")
49
+
50
+ namespaces = [package.get('namespace') for package in list_packages_response.get('packages')
51
+ if package.get('package') == artifact.artifact_id]
52
+ logging.debug(f"namespaces: {namespaces}")
53
+
54
+ if not namespaces:
55
+ logging.warning(f"Found no packages with artifactId = {artifact.artifact_id}!")
56
+ return []
57
+ if len(namespaces) > 1:
58
+ logging.warning(f"Found multiple namespaces with same artifactId = {artifact.artifact_id}:\n{namespaces}")
59
+
60
+ results = []
61
+ for namespace in namespaces:
62
+ try:
63
+ assets_response = self._aws_client.list_package_version_assets(
64
+ domain=self._domain, repository=self._repository,
65
+ format=self._format, package=artifact.artifact_id,
66
+ packageVersion=artifact.version, namespace=namespace
67
+ )
68
+ logging.debug(f"assets: {assets_response}")
69
+ for asset in assets_response.get('assets'):
70
+ if asset.get('name').lower().endswith(artifact.extension.lower()):
71
+ results.append(f"{assets_response.get('namespace')}/{assets_response.get('package')}/"
72
+ f"{assets_response.get('version')}/{asset.get('name')}")
73
+ except Exception:
74
+ logging.warning(f"Specific version ({artifact.version}) of package ({namespace}.{artifact.artifact_id}) not found!")
75
+ logging.info(f"AWS search results: {results}")
76
+ return results
77
+
78
+ def get_provider_name(self) -> str:
79
+ return "aws_code_artifact"
@@ -0,0 +1,98 @@
1
+ import logging
2
+ import re
3
+
4
+ from pathlib import Path
5
+ from requests.auth import HTTPBasicAuth
6
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.artifact import Artifact
7
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.artifact_provider import ArtifactProvider
8
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.credentials import Credentials
9
+
10
+
11
+ class AzureArtifactsProvider(ArtifactProvider):
12
+
13
+ def __init__(self, credentials: Credentials, organization: str, project: str, feed: str, **kwargs):
14
+ """
15
+ Initializes this client to work with **Azure Artifacts** for generic artifacts.
16
+ Requires `Credentials` provided by `AzureCredentialsProvider`.
17
+ """
18
+ super().__init__(**kwargs)
19
+ self._credentials = credentials
20
+ self._session.auth = HTTPBasicAuth("", self._credentials.access_token)
21
+ self.organization = organization
22
+ self.project = project
23
+ self.feed = feed
24
+
25
+ def download_artifact(self, resource_url: str, local_path: str | Path, **kwargs) -> None:
26
+ return self.generic_download(resource_url=resource_url, local_path=local_path)
27
+
28
+ def search_artifacts(self, artifact: Artifact, **kwargs) -> list[str]:
29
+ acceptable_versions = [artifact.version]
30
+ if timestamp_version_match := re.match(self.TIMESTAMP_VERSION_PATTERN, artifact.version):
31
+ acceptable_versions.append(timestamp_version_match.group(1) + "SNAPSHOT")
32
+
33
+ # Try to find package with name ~ "artifact_id"
34
+ feeds_search_url = f"https://feeds.dev.azure.com/{self.organization}/{self.project}/_apis/packaging/feeds/{self.feed}/packages"
35
+ feed_search_params = {
36
+ "includeAllVersions": "true",
37
+ "packageNameQuery": artifact.artifact_id,
38
+ "protocolType": "maven",
39
+ "api-version": "7.1",
40
+ }
41
+ feeds_response = self._session.get(url=feeds_search_url, params=feed_search_params, timeout=self.timeout)
42
+ feeds_response_json = feeds_response.json()
43
+ if feeds_response.status_code != 200:
44
+ logging.error(f"Feeds search error ({feeds_response.status_code}) response: {feeds_response_json}")
45
+ raise Exception(f"Could not find '{artifact.artifact_id}' - search request returned {feeds_response.status_code}!")
46
+
47
+ logging.debug(f"Feeds search response: {feeds_response_json}")
48
+ if feeds_response_json.get("count") > 1:
49
+ logging.warning("Found more than 1 feeds. Use the first one.")
50
+ elif feeds_response_json.get("count") == 0:
51
+ logging.warning("No feeds were found.")
52
+ return []
53
+ feed = feeds_response_json.get("value")[0]
54
+ feed_links = feed.get("_links", {})
55
+
56
+ # Get feed versions
57
+ feed_versions_url = feed_links.get("versions", {}).get("href", "")
58
+ feed_versions_response = self._session.get(url=feed_versions_url, timeout=self.timeout)
59
+ feed_versions_response_json = feed_versions_response.json()
60
+ if feed_versions_response.status_code != 200:
61
+ logging.error(f"Feed versions error ({feed_versions_response.status_code}) response: {feed_versions_response_json}")
62
+ raise Exception(f"Could not find feed versions, search request returned {feed_versions_response.status_code}!")
63
+ logging.debug(f"Feed versions response: {feed_versions_response_json}")
64
+ feed_versions = feed_versions_response_json.get("value")
65
+
66
+ # Filter by acceptable versions
67
+ logging.debug(f"Filtering by acceptable versions: '{acceptable_versions}'")
68
+ feed_version = [f for f in feed_versions if (f.get('protocolMetadata').get('data').get('version') in acceptable_versions)]
69
+ if len(feed_version) == 0:
70
+ logging.warning("All feed versions filtered.")
71
+ return []
72
+ filtered_feed_version = feed_version[0]
73
+
74
+ # Search for target file
75
+ files = [f for f in filtered_feed_version.get("files") if f.get('name').startswith(f"{artifact.artifact_id}-{artifact.version}") and f.get('name').endswith(artifact.extension)]
76
+ logging.debug(f"Files found: {files}")
77
+ if len(files) == 0:
78
+ logging.warning("All files filtered.")
79
+ return []
80
+ target_file = files[0]
81
+
82
+ # Build download url
83
+ feed_id = feed_links.get("feed").get("href").split("/")[-1] # take id from link to feed
84
+ feed_version = filtered_feed_version.get("version")
85
+ group_id = filtered_feed_version.get('protocolMetadata').get('data').get("groupId")
86
+ artifact_id = filtered_feed_version.get('protocolMetadata').get('data').get("artifactId")
87
+ target_file_name = target_file.get("name")
88
+
89
+ download_url = (
90
+ f"https://pkgs.dev.azure.com/{self.organization}/{self.project}/_apis/packaging/feeds/{feed_id}/maven/"
91
+ f"{group_id}/{artifact_id}/{feed_version}/{target_file_name}/content"
92
+ f"?api-version=7.1-preview.1"
93
+ )
94
+ logging.info(f"Azure search resulting url: {download_url}")
95
+ return [download_url]
96
+
97
+ def get_provider_name(self) -> str:
98
+ return "azure_artifacts"
@@ -0,0 +1,50 @@
1
+ from pathlib import Path
2
+ from google.cloud import artifactregistry_v1
3
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.artifact import Artifact
4
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.artifact_provider import ArtifactProvider
5
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.credentials import Credentials
6
+
7
+
8
+ class GcpArtifactRegistryProvider(ArtifactProvider):
9
+
10
+ def __init__(self, credentials: Credentials, project: str, region_name: str, repository: str, **kwargs):
11
+ """
12
+ Initializes this client to work with **GCP Artifact Registry** for generic artifacts.
13
+ Requires `Credentials` provided by `GcpCredentialsProvider`.
14
+ """
15
+ super().__init__(**kwargs)
16
+ self._credentials = credentials
17
+ self._project = project
18
+ self._region_name = region_name
19
+ self._repository = repository
20
+ self._repo_resource_id = f"projects/{project}/locations/{region_name}/repositories/{repository}"
21
+
22
+ self._gcp_client = artifactregistry_v1.ArtifactRegistryClient(
23
+ credentials=self._credentials.google_credentials_object
24
+ )
25
+ self._authorized_session = self._credentials.authorized_session
26
+
27
+ def download_artifact(self, resource_url: str, local_path: str | Path, **kwargs) -> None:
28
+ response = self._authorized_session.get(url=resource_url, timeout=self.timeout)
29
+ response.raise_for_status()
30
+ with open(local_path, 'wb') as file:
31
+ file.write(response.content)
32
+
33
+ def search_artifacts(self, artifact: Artifact, **kwargs) -> list[str]:
34
+ # works with both "Maven" and "Generic" type repositories
35
+ name_filter = f"{self._repo_resource_id}/files/*{artifact.artifact_id}-{artifact.version}.{artifact.extension}"
36
+ list_files_request = artifactregistry_v1.ListFilesRequest(
37
+ parent=f"{self._repo_resource_id}",
38
+ filter=f'name="{name_filter}"',
39
+ )
40
+ files = self._gcp_client.list_files(request=list_files_request)
41
+ # logging.debug(f"[GCP search_artifacts] files: {files}")
42
+
43
+ urls = []
44
+ for file in files:
45
+ download_url = f"https://artifactregistry.googleapis.com/download/v1/{file.name}:download?alt=media"
46
+ urls.append(download_url)
47
+ return urls
48
+
49
+ def get_provider_name(self) -> str:
50
+ return "gcp_artifact_registry"
@@ -0,0 +1,41 @@
1
+ from pathlib import Path
2
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.artifact import Artifact
3
+ from qubership_pipelines_common_library.v2.artifacts_finder.model.artifact_provider import ArtifactProvider
4
+
5
+
6
+ class NexusProvider(ArtifactProvider):
7
+
8
+ def __init__(self, registry_url: str, username: str = None, password: str = None, **kwargs):
9
+ """
10
+ Initializes this client to work with **Sonatype Nexus Repository** for maven artifacts.
11
+ Requires `username` and its `password` or `token`.
12
+ """
13
+ super().__init__(**kwargs)
14
+ self.registry_url = registry_url
15
+ if password:
16
+ from requests.auth import HTTPBasicAuth
17
+ self._session.auth = HTTPBasicAuth(username, password)
18
+
19
+ def download_artifact(self, resource_url: str, local_path: str | Path, **kwargs) -> None:
20
+ return self.generic_download(resource_url=resource_url, local_path=local_path)
21
+
22
+ def search_artifacts(self, artifact: Artifact, **kwargs) -> list[str]:
23
+ search_params = {
24
+ "maven.extension": artifact.extension,
25
+ "maven.artifactId": artifact.artifact_id,
26
+ **({"maven.groupId": artifact.group_id} if artifact.group_id else {}),
27
+ }
28
+ if artifact.is_snapshot():
29
+ search_params["maven.baseVersion"] = artifact.version
30
+ else:
31
+ search_params["version"] = artifact.version
32
+
33
+ response = self._session.get(url=f"{self.registry_url}/service/rest/v1/search/assets",
34
+ params=search_params,
35
+ timeout=self.timeout)
36
+ if response.status_code != 200:
37
+ raise Exception(f"Could not find '{artifact.artifact_id}' - search request returned {response.status_code}!")
38
+ return [result["downloadUrl"] for result in response.json()["items"]]
39
+
40
+ def get_provider_name(self) -> str:
41
+ return "nexus"
@@ -0,0 +1,24 @@
1
+ from abc import ABC, abstractmethod
2
+
3
+ from qubership_pipelines_common_library.v1.execution.exec_command import ExecutionCommand
4
+ from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
5
+
6
+
7
+ class PipelineDataImporter(ABC):
8
+ """
9
+ Base interface used by "GitHub/GitLab Run Workflow" commands
10
+ Can be extended by users to perform custom artifacts transformations at the end of workflow
11
+ """
12
+
13
+ def __init__(self):
14
+ self.context = None
15
+ self.command = None
16
+
17
+ def with_command(self, command: ExecutionCommand):
18
+ self.command = command
19
+ self.context = command.context
20
+
21
+ @abstractmethod
22
+ def import_pipeline_data(self, execution: ExecutionInfo) -> None:
23
+ """Implements custom data import logic"""
24
+ pass
@@ -0,0 +1,5 @@
1
+ from qubership_pipelines_common_library.v1.github_client import GithubClient as GithubClientV1
2
+
3
+
4
+ class GithubClient(GithubClientV1):
5
+ pass
@@ -0,0 +1,21 @@
1
+ import zipfile
2
+ from pathlib import Path
3
+
4
+ from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
5
+ from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
6
+
7
+
8
+ class DefaultGithubPipelineDataImporter(PipelineDataImporter):
9
+ """
10
+ Default GitHub implementation:
11
+ downloads all available workflow run artifacts,
12
+ extracts them into context-defined 'paths.output.files' path
13
+ """
14
+ def import_pipeline_data(self, execution: ExecutionInfo) -> None:
15
+ self.context.logger.info("DefaultGithubPipelineDataImporter - importing pipeline data...")
16
+ self.command.github_client.download_workflow_run_artifacts(execution, self.context.path_temp)
17
+ output_path = Path(self.context.input_param_get("paths.output.files"))
18
+ output_path.mkdir(parents=True, exist_ok=True)
19
+ for file_path in Path(self.context.path_temp).iterdir():
20
+ with zipfile.ZipFile(file_path) as zf:
21
+ zf.extractall(output_path)
@@ -0,0 +1,175 @@
1
+ from qubership_pipelines_common_library.v1.execution.exec_command import ExecutionCommand
2
+ from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
3
+ from qubership_pipelines_common_library.v1.utils.utils_string import UtilsString
4
+ from qubership_pipelines_common_library.v2.extensions.pipeline_data_importer import PipelineDataImporter
5
+ from qubership_pipelines_common_library.v2.github.github_pipeline_data_importer import DefaultGithubPipelineDataImporter
6
+ from qubership_pipelines_common_library.v2.github.safe_github_client import SafeGithubClient
7
+
8
+
9
+ class GithubRunPipeline(ExecutionCommand):
10
+ """
11
+ Executes a GitHub Actions workflow pipeline and optionally imports artifacts.
12
+
13
+ This command triggers a GitHub workflow run, monitors its execution, and provides
14
+ options for importing workflow artifacts and custom data processing through extensible
15
+ importers.
16
+
17
+ Input Parameters Structure (this structure is expected inside "input_params.params" block):
18
+ ```
19
+ {
20
+ "pipeline_owner": "Netcracker", # REQUIRED: Repository owner/organization
21
+ "pipeline_repo_name": "qubership-test-pipelines", # REQUIRED: Repository name
22
+ "pipeline_workflow_file_name": "test.yaml", # REQUIRED: Workflow filename (e.g., main.yaml, ci-cd.yml)
23
+ "pipeline_branch": "main", # OPTIONAL: Branch to run workflow from (default: repo's default branch)
24
+ "pipeline_params": { # OPTIONAL: Input parameters to pass to the workflow
25
+ "KEY1": "VALUE1",
26
+ "KEY2": "VALUE2"
27
+ },
28
+ "import_artifacts": false, # OPTIONAL: Whether to import workflow artifacts (default: false)
29
+ "use_existing_pipeline": 123456789, # OPTIONAL: Use existing workflow run ID instead of starting new one (debug feature)
30
+ "timeout_seconds": 1800, # OPTIONAL: Maximum wait time for workflow completion in seconds (default: 1800, 0 for async execution)
31
+ "wait_seconds": 1, # OPTIONAL: Wait interval between status checks in seconds (default: 1)
32
+ "retry_timeout_seconds": 180, # OPTIONAL: Timeout for GitHub client initialization and workflow start retries in seconds (default: 180)
33
+ "retry_wait_seconds": 1, # OPTIONAL: Wait interval between retries in seconds (default: 1)
34
+ "success_statuses": "SUCCESS,UNSTABLE" # OPTIONAL: Comma-separated list of acceptable completion statuses (default: SUCCESS)
35
+ }
36
+ ```
37
+
38
+ Systems Configuration (expected in "systems.github" block):
39
+ ```
40
+ {
41
+ "url": "https://github.com", # OPTIONAL: GitHub UI URL for self-hosted instances (default: https://github.com)
42
+ "api_url": "https://api.github.com", # OPTIONAL: GitHub API URL for self-hosted instances (default: https://api.github.com)
43
+ "password": "<github_token>" # REQUIRED: GitHub access token with workflow permissions
44
+ }
45
+ ```
46
+
47
+ Output Parameters:
48
+ - params.build.url: URL to view the workflow run in GitHub
49
+ - params.build.id: ID of the executed workflow run
50
+ - params.build.status: Final status of the workflow execution
51
+ - params.build.date: Workflow start time in ISO format
52
+ - params.build.duration: Total execution duration in human-readable format
53
+ - params.build.name: Name of the workflow run
54
+
55
+ Extension Points:
56
+ - Custom pipeline data importers can be implemented by extending PipelineDataImporter interface
57
+ - PipelineDataImporter is passed into constructor of command via "pipeline_data_importer" arg
58
+
59
+ Notes:
60
+ - Setting timeout_seconds to 0 enables asynchronous execution (workflow starts but command doesn't wait for completion)
61
+ - For self-hosted GitHub Enterprise, configure both "systems.github.url" and "systems.github.api_url"
62
+ - Custom data importers receive the command context and can implement advanced processing logic
63
+ """
64
+
65
+ # default timeout values
66
+ WAIT_TIMEOUT = 1800
67
+ WAIT_SECONDS = 1
68
+ RETRY_TIMEOUT_SECONDS = 180
69
+ RETRY_WAIT_SECONDS = 1
70
+
71
+ def __init__(self, *args, pipeline_data_importer: PipelineDataImporter = None, **kwargs):
72
+ super().__init__(*args, **kwargs)
73
+ self.pipeline_data_importer = pipeline_data_importer or DefaultGithubPipelineDataImporter()
74
+ if pipeline_data_importer and not isinstance(pipeline_data_importer, PipelineDataImporter):
75
+ raise TypeError(f"Class {type(pipeline_data_importer)} must inherit from PipelineDataImporter")
76
+
77
+ def _validate(self):
78
+ names = [
79
+ "paths.input.params",
80
+ "paths.output.params",
81
+ "paths.output.files",
82
+ "systems.github.password",
83
+ "params.pipeline_owner",
84
+ "params.pipeline_repo_name",
85
+ "params.pipeline_workflow_file_name",
86
+ ]
87
+ if not self.context.validate(names):
88
+ return False
89
+
90
+ self.timeout_seconds = max(0, int(self.context.input_param_get("params.timeout_seconds", self.WAIT_TIMEOUT)))
91
+ self.wait_seconds = max(1, int(self.context.input_param_get("params.wait_seconds", self.WAIT_SECONDS)))
92
+
93
+ self.retry_timeout_seconds = int(self.context.input_param_get("params.retry_timeout_seconds", self.RETRY_TIMEOUT_SECONDS))
94
+ self.retry_wait_seconds = int(self.context.input_param_get("params.retry_wait_seconds", self.RETRY_WAIT_SECONDS))
95
+
96
+ if self.timeout_seconds == 0:
97
+ self.context.logger.info(f"Timeout is set to: {self.timeout_seconds}. This means that the pipeline will be started asynchronously")
98
+
99
+ self.pipeline_owner = self.context.input_param_get("params.pipeline_owner")
100
+ self.pipeline_repo_name = self.context.input_param_get("params.pipeline_repo_name")
101
+ self.pipeline_workflow_file_name = self.context.input_param_get("params.pipeline_workflow_file_name")
102
+ self.pipeline_branch = self.context.input_param_get("params.pipeline_branch")
103
+ self.pipeline_params = self.context.input_param_get("params.pipeline_params", {})
104
+ if not self.pipeline_params:
105
+ self.context.logger.info("Pipeline parameters were not specified. This means that pipeline will be started with its default values")
106
+ if not isinstance(self.pipeline_params, dict):
107
+ self.context.logger.error("Pipeline parameters were not loaded correctly. Probably mistake in the params definition")
108
+ return False
109
+ self.import_artifacts = UtilsString.convert_to_bool(self.context.input_param_get("params.import_artifacts", False))
110
+ self.success_statuses = [x.strip() for x in self.context.input_param_get("params.success_statuses", ExecutionInfo.STATUS_SUCCESS).split(",")]
111
+ self.use_existing_pipeline = self.context.input_param_get("params.use_existing_pipeline")
112
+ self.ui_url = self.context.input_param_get("systems.github.ui_url", "https://github.com")
113
+ return True
114
+
115
+ def _execute(self):
116
+ self.context.logger.info("GithubRunPipeline - triggering GitHub workflow run and fetching results...")
117
+
118
+ self.github_client = SafeGithubClient.create_github_client(
119
+ api_url=self.context.input_param_get("systems.github.api_url"),
120
+ token=self.context.input_param_get("systems.github.password"),
121
+ retry_timeout_seconds=self.retry_timeout_seconds,
122
+ retry_wait_seconds=self.retry_wait_seconds
123
+ )
124
+
125
+ if self.use_existing_pipeline: # work with existing workflow run
126
+ pipeline_id = self.use_existing_pipeline
127
+ self.context.logger.info(f"Using existing pipeline {pipeline_id}")
128
+ execution = (ExecutionInfo()
129
+ .with_url(f"{self.ui_url}/{self.pipeline_owner}/{self.pipeline_repo_name}/")
130
+ .with_name(self.pipeline_workflow_file_name).with_id(int(pipeline_id))
131
+ .with_status(ExecutionInfo.STATUS_UNKNOWN))
132
+ execution.start()
133
+ else:
134
+ branch = self.pipeline_branch
135
+ if not branch:
136
+ branch = self.github_client.get_repo_default_branch(self.pipeline_owner, self.pipeline_repo_name)
137
+ execution = self.github_client.trigger_workflow(owner=self.pipeline_owner, repo_name=self.pipeline_repo_name,
138
+ workflow_file_name=self.pipeline_workflow_file_name,
139
+ branch=branch, pipeline_params=self.pipeline_params,
140
+ retry_timeout_seconds=self.retry_timeout_seconds,
141
+ retry_wait_seconds=self.retry_wait_seconds
142
+ )
143
+ self.context.logger.info(f"Triggered pipeline {execution.get_id()}, status: {execution.get_status()}, url: {execution.get_url()}")
144
+
145
+ if execution.get_status() != ExecutionInfo.STATUS_IN_PROGRESS:
146
+ self._exit(False, f"Pipeline was not started. Status {execution.get_status()}")
147
+ elif self.timeout_seconds < 1:
148
+ self.context.logger.info("Pipeline was started in asynchronous mode. Pipeline status and artifacts will not be processed")
149
+ return
150
+
151
+ execution = self.github_client.wait_workflow_run_execution(execution=execution,
152
+ timeout_seconds=self.timeout_seconds,
153
+ wait_seconds=self.wait_seconds)
154
+ self.context.logger.info(f"Pipeline status: {execution.get_status()}")
155
+
156
+ if self.import_artifacts and self.pipeline_data_importer and execution.get_status() in ExecutionInfo.STATUSES_COMPLETE:
157
+ try:
158
+ self.pipeline_data_importer.with_command(self)
159
+ self.pipeline_data_importer.import_pipeline_data(execution)
160
+ except Exception as e:
161
+ self.context.logger.error(f"Exception during pipeline_data_importer execution: {e}")
162
+
163
+ self._save_execution_info(execution)
164
+ if execution.get_status() not in self.success_statuses:
165
+ self._exit(False, f"Status: {execution.get_status()}")
166
+
167
+ def _save_execution_info(self, execution: ExecutionInfo):
168
+ self.context.logger.info("Writing GitHub workflow execution status")
169
+ self.context.output_param_set("params.build.url", execution.get_url())
170
+ self.context.output_param_set("params.build.id", execution.get_id())
171
+ self.context.output_param_set("params.build.status", execution.get_status())
172
+ self.context.output_param_set("params.build.date", execution.get_time_start().isoformat())
173
+ self.context.output_param_set("params.build.duration", execution.get_duration_str())
174
+ self.context.output_param_set("params.build.name", execution.get_name())
175
+ self.context.output_params_save()
@@ -0,0 +1,24 @@
1
+ from qubership_pipelines_common_library.v1.execution.exec_info import ExecutionInfo
2
+ from qubership_pipelines_common_library.v2.github.github_client import GithubClient
3
+ from qubership_pipelines_common_library.v2.utils.retry_decorator import RetryDecorator
4
+
5
+
6
+ class SafeGithubClient(GithubClient):
7
+
8
+ def __init__(self, api_url: str, token: str):
9
+ super().__init__(api_url=api_url, token=token)
10
+
11
+ @classmethod
12
+ @RetryDecorator(condition_func=lambda result: result is not None)
13
+ def create_github_client(cls, api_url: str, token, retry_timeout_seconds: int = 180, retry_wait_seconds: int = 1):
14
+ return cls(api_url=api_url, token=token)
15
+
16
+ @RetryDecorator(
17
+ condition_func=lambda result: result is not None and result.get_status() not in [
18
+ ExecutionInfo.STATUS_NOT_STARTED, ExecutionInfo.STATUS_UNKNOWN]
19
+ )
20
+ def trigger_workflow(self, owner: str, repo_name: str, workflow_file_name: str, branch: str,
21
+ pipeline_params, retry_timeout_seconds: int = 180, retry_wait_seconds: int = 1):
22
+ return super().trigger_workflow(owner=owner, repo_name=repo_name,
23
+ workflow_file_name=workflow_file_name,
24
+ branch=branch, pipeline_params=pipeline_params)