psr-cloud 0.3.10__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- psr_cloud-0.3.10/PKG-INFO +112 -0
- psr_cloud-0.3.10/README.md +100 -0
- psr_cloud-0.3.10/psr/cloud/__init__.py +7 -0
- psr_cloud-0.3.10/psr/cloud/aws.py +271 -0
- psr_cloud-0.3.10/psr/cloud/cloud.py +1498 -0
- psr_cloud-0.3.10/psr/cloud/data.py +127 -0
- psr_cloud-0.3.10/psr/cloud/desktop.py +82 -0
- psr_cloud-0.3.10/psr/cloud/log.py +40 -0
- psr_cloud-0.3.10/psr/cloud/status.py +81 -0
- psr_cloud-0.3.10/psr/cloud/tempfile.py +117 -0
- psr_cloud-0.3.10/psr/cloud/version.py +5 -0
- psr_cloud-0.3.10/psr/cloud/xml.py +57 -0
- psr_cloud-0.3.10/psr_cloud.egg-info/PKG-INFO +112 -0
- psr_cloud-0.3.10/psr_cloud.egg-info/SOURCES.txt +19 -0
- psr_cloud-0.3.10/psr_cloud.egg-info/dependency_links.txt +1 -0
- psr_cloud-0.3.10/psr_cloud.egg-info/requires.txt +5 -0
- psr_cloud-0.3.10/psr_cloud.egg-info/top_level.txt +1 -0
- psr_cloud-0.3.10/pyproject.toml +28 -0
- psr_cloud-0.3.10/setup.cfg +4 -0
- psr_cloud-0.3.10/tests/tests.py +276 -0
- psr_cloud-0.3.10/tests/tests_remote.py +93 -0
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: psr-cloud
|
|
3
|
+
Version: 0.3.10
|
|
4
|
+
Summary: PSRCloud Python Module
|
|
5
|
+
Author-email: Lucas Storino <lstorino@psr-inc.com>, Pedro Henrique <pedrohenrique@psr-inc.com>
|
|
6
|
+
Description-Content-Type: text/markdown
|
|
7
|
+
Requires-Dist: zeep
|
|
8
|
+
Requires-Dist: filelock
|
|
9
|
+
Requires-Dist: pefile
|
|
10
|
+
Requires-Dist: boto3
|
|
11
|
+
Requires-Dist: tqdm
|
|
12
|
+
|
|
13
|
+
# psr.cloud (pycloud) Module
|
|
14
|
+
|
|
15
|
+
## Installation
|
|
16
|
+
|
|
17
|
+
### From source (Recommended)
|
|
18
|
+
|
|
19
|
+
Copy `psr` folder and its contents to your work directory or add its parent path to `PYTHONPATH` environment variable before running it.
|
|
20
|
+
|
|
21
|
+
### pip
|
|
22
|
+
|
|
23
|
+
If you have pip installed and the correct git credentials run the command:
|
|
24
|
+
|
|
25
|
+
```python
|
|
26
|
+
pip install git+https://github.com/psrenergy/pycloud.git
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
## Usage
|
|
30
|
+
|
|
31
|
+
```python
|
|
32
|
+
import psr.cloud
|
|
33
|
+
|
|
34
|
+
client = psr.cloud.Client()
|
|
35
|
+
|
|
36
|
+
case = psr.cloud.Case(data_path=r"C:\PSR\Sddp17.3\Example\12_stages\Case21",
|
|
37
|
+
price_optimized=True,
|
|
38
|
+
program="SDDP",
|
|
39
|
+
program_version="17.3.7",
|
|
40
|
+
name="Test PyCloud",
|
|
41
|
+
execution_type="Default",
|
|
42
|
+
number_of_processes=64,
|
|
43
|
+
memory_per_process_ratio="2:1",
|
|
44
|
+
repository_duration=2 # Normal (1 month)
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
client.run_case(case)
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
## Authentication
|
|
51
|
+
|
|
52
|
+
#### Keyword argument specified in `Client` constructor:
|
|
53
|
+
|
|
54
|
+
- `username` - specify username string
|
|
55
|
+
- `password` - plain password string
|
|
56
|
+
|
|
57
|
+
```python
|
|
58
|
+
client = psr.cloud.Client(username="myuser", password=os.environ["MY_PASSWORD"])
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
The password will never be stored plainly, only its md5 hash will be used.
|
|
62
|
+
|
|
63
|
+
#### Read from environment variables
|
|
64
|
+
|
|
65
|
+
Prefered over keyword arguments:
|
|
66
|
+
|
|
67
|
+
- `PSR_CLOUD_USER` - specify username
|
|
68
|
+
- `PSR_CLOUD_PASSWORD_HASH` - md5 password hash
|
|
69
|
+
|
|
70
|
+
Password hash can be obtained by running the code below:
|
|
71
|
+
|
|
72
|
+
```python
|
|
73
|
+
import psr.cloud as pycloud
|
|
74
|
+
pycloud.hash_password("ExamplePassword")
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
#### Automatic
|
|
79
|
+
|
|
80
|
+
Will use PSR Cloud client auth data, if avaiblable.
|
|
81
|
+
|
|
82
|
+
## Querying PSR Cloud options
|
|
83
|
+
|
|
84
|
+
#### Available programs/models
|
|
85
|
+
|
|
86
|
+
```python
|
|
87
|
+
get_programs() -> list[str]
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
#### Available model versions
|
|
91
|
+
|
|
92
|
+
```python
|
|
93
|
+
get_program_versions(program: str) -> dict[int, str]
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
#### Available execution types
|
|
97
|
+
|
|
98
|
+
```python
|
|
99
|
+
get_execution_types(program: str, version: Union[str, int]) -> dict[int, str]
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
#### Available memory per process ratios
|
|
103
|
+
|
|
104
|
+
```python
|
|
105
|
+
get_memory_per_process_ratios() -> list[str]
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
#### Available repository durations
|
|
109
|
+
|
|
110
|
+
```python
|
|
111
|
+
get_repository_durations() -> dict[int, str]
|
|
112
|
+
```
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
# psr.cloud (pycloud) Module
|
|
2
|
+
|
|
3
|
+
## Installation
|
|
4
|
+
|
|
5
|
+
### From source (Recommended)
|
|
6
|
+
|
|
7
|
+
Copy `psr` folder and its contents to your work directory or add its parent path to `PYTHONPATH` environment variable before running it.
|
|
8
|
+
|
|
9
|
+
### pip
|
|
10
|
+
|
|
11
|
+
If you have pip installed and the correct git credentials run the command:
|
|
12
|
+
|
|
13
|
+
```python
|
|
14
|
+
pip install git+https://github.com/psrenergy/pycloud.git
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
## Usage
|
|
18
|
+
|
|
19
|
+
```python
|
|
20
|
+
import psr.cloud
|
|
21
|
+
|
|
22
|
+
client = psr.cloud.Client()
|
|
23
|
+
|
|
24
|
+
case = psr.cloud.Case(data_path=r"C:\PSR\Sddp17.3\Example\12_stages\Case21",
|
|
25
|
+
price_optimized=True,
|
|
26
|
+
program="SDDP",
|
|
27
|
+
program_version="17.3.7",
|
|
28
|
+
name="Test PyCloud",
|
|
29
|
+
execution_type="Default",
|
|
30
|
+
number_of_processes=64,
|
|
31
|
+
memory_per_process_ratio="2:1",
|
|
32
|
+
repository_duration=2 # Normal (1 month)
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
client.run_case(case)
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
## Authentication
|
|
39
|
+
|
|
40
|
+
#### Keyword argument specified in `Client` constructor:
|
|
41
|
+
|
|
42
|
+
- `username` - specify username string
|
|
43
|
+
- `password` - plain password string
|
|
44
|
+
|
|
45
|
+
```python
|
|
46
|
+
client = psr.cloud.Client(username="myuser", password=os.environ["MY_PASSWORD"])
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
The password will never be stored plainly, only its md5 hash will be used.
|
|
50
|
+
|
|
51
|
+
#### Read from environment variables
|
|
52
|
+
|
|
53
|
+
Prefered over keyword arguments:
|
|
54
|
+
|
|
55
|
+
- `PSR_CLOUD_USER` - specify username
|
|
56
|
+
- `PSR_CLOUD_PASSWORD_HASH` - md5 password hash
|
|
57
|
+
|
|
58
|
+
Password hash can be obtained by running the code below:
|
|
59
|
+
|
|
60
|
+
```python
|
|
61
|
+
import psr.cloud as pycloud
|
|
62
|
+
pycloud.hash_password("ExamplePassword")
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
#### Automatic
|
|
67
|
+
|
|
68
|
+
Will use PSR Cloud client auth data, if avaiblable.
|
|
69
|
+
|
|
70
|
+
## Querying PSR Cloud options
|
|
71
|
+
|
|
72
|
+
#### Available programs/models
|
|
73
|
+
|
|
74
|
+
```python
|
|
75
|
+
get_programs() -> list[str]
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
#### Available model versions
|
|
79
|
+
|
|
80
|
+
```python
|
|
81
|
+
get_program_versions(program: str) -> dict[int, str]
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
#### Available execution types
|
|
85
|
+
|
|
86
|
+
```python
|
|
87
|
+
get_execution_types(program: str, version: Union[str, int]) -> dict[int, str]
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
#### Available memory per process ratios
|
|
91
|
+
|
|
92
|
+
```python
|
|
93
|
+
get_memory_per_process_ratios() -> list[str]
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
#### Available repository durations
|
|
97
|
+
|
|
98
|
+
```python
|
|
99
|
+
get_repository_durations() -> dict[int, str]
|
|
100
|
+
```
|
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import tempfile
|
|
3
|
+
import zipfile
|
|
4
|
+
from typing import Dict, List, Optional
|
|
5
|
+
|
|
6
|
+
import boto3
|
|
7
|
+
from botocore.exceptions import ClientError
|
|
8
|
+
from tqdm import tqdm
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class AWS:
|
|
12
|
+
def __init__(
|
|
13
|
+
self,
|
|
14
|
+
access: str,
|
|
15
|
+
secret: str,
|
|
16
|
+
session_token: str,
|
|
17
|
+
url: str,
|
|
18
|
+
Logger=None,
|
|
19
|
+
):
|
|
20
|
+
self.s3_client = boto3.client(
|
|
21
|
+
"s3",
|
|
22
|
+
aws_access_key_id=access,
|
|
23
|
+
aws_secret_access_key=secret,
|
|
24
|
+
aws_session_token=session_token,
|
|
25
|
+
region_name=AWS.get_region(url),
|
|
26
|
+
)
|
|
27
|
+
self.logger = Logger
|
|
28
|
+
|
|
29
|
+
@staticmethod
|
|
30
|
+
def get_region(url: Optional[str]) -> Optional[str]:
|
|
31
|
+
"""Extract the region from the S3 URL."""
|
|
32
|
+
if url:
|
|
33
|
+
parts = url.split(".")
|
|
34
|
+
return parts[0]
|
|
35
|
+
return None
|
|
36
|
+
|
|
37
|
+
def upload_file(
|
|
38
|
+
self,
|
|
39
|
+
file_path: str,
|
|
40
|
+
bucket_name: str,
|
|
41
|
+
object_name: Optional[str] = None,
|
|
42
|
+
extra_args: Optional[dict] = None,
|
|
43
|
+
Callback=None,
|
|
44
|
+
) -> bool:
|
|
45
|
+
"""Upload a file to an S3 bucket using the AWS instance's S3 client."""
|
|
46
|
+
if object_name is None:
|
|
47
|
+
object_name = os.path.basename(file_path)
|
|
48
|
+
try:
|
|
49
|
+
self.s3_client.upload_file(
|
|
50
|
+
file_path,
|
|
51
|
+
bucket_name,
|
|
52
|
+
object_name,
|
|
53
|
+
ExtraArgs=extra_args,
|
|
54
|
+
Callback=Callback,
|
|
55
|
+
)
|
|
56
|
+
return True
|
|
57
|
+
except ClientError as e:
|
|
58
|
+
self.logger.error(f"Error uploading file: {e}")
|
|
59
|
+
return False
|
|
60
|
+
|
|
61
|
+
def upload_case(
|
|
62
|
+
self,
|
|
63
|
+
files: List[str],
|
|
64
|
+
repository_id: str,
|
|
65
|
+
bucket_name: str,
|
|
66
|
+
checksums: Optional[Dict[str, str]] = None,
|
|
67
|
+
zip_compress: bool = False,
|
|
68
|
+
compress_zip_name: str = None,
|
|
69
|
+
):
|
|
70
|
+
"""Upload files to an S3 bucket."""
|
|
71
|
+
base_metadata: Dict[str, str] = {
|
|
72
|
+
"upload": str(True).lower(),
|
|
73
|
+
"user-agent": "aws-fsx-lustre",
|
|
74
|
+
"file-owner": "537",
|
|
75
|
+
"file-group": "500",
|
|
76
|
+
"file-permissions": "100777",
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
if zip_compress and not compress_zip_name:
|
|
80
|
+
compress_zip_name = str(repository_id)
|
|
81
|
+
|
|
82
|
+
if zip_compress:
|
|
83
|
+
# Create a temporary zip file
|
|
84
|
+
with tempfile.NamedTemporaryFile(
|
|
85
|
+
suffix=".zip", delete=False
|
|
86
|
+
) as tmp_zip_file:
|
|
87
|
+
zip_path = tmp_zip_file.name
|
|
88
|
+
tmp_zip_file.close()
|
|
89
|
+
|
|
90
|
+
try:
|
|
91
|
+
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
|
|
92
|
+
for file_path in files:
|
|
93
|
+
zipf.write(file_path, arcname=os.path.basename(file_path))
|
|
94
|
+
|
|
95
|
+
object_name = f"{repository_id}/uploaded/{compress_zip_name}.zip"
|
|
96
|
+
extra_args = {"Metadata": base_metadata.copy()}
|
|
97
|
+
|
|
98
|
+
if not self.upload_file(
|
|
99
|
+
zip_path, bucket_name, object_name, extra_args=extra_args
|
|
100
|
+
):
|
|
101
|
+
raise ValueError(
|
|
102
|
+
f"Failed to upload zip file {zip_path} to S3 bucket {bucket_name}."
|
|
103
|
+
)
|
|
104
|
+
finally:
|
|
105
|
+
if os.path.exists(zip_path):
|
|
106
|
+
os.unlink(zip_path)
|
|
107
|
+
else:
|
|
108
|
+
for file_path in files:
|
|
109
|
+
file_basename = os.path.basename(file_path)
|
|
110
|
+
object_name = f"{repository_id}/uploaded/{file_basename}"
|
|
111
|
+
|
|
112
|
+
current_file_metadata = base_metadata.copy()
|
|
113
|
+
if checksums:
|
|
114
|
+
current_file_metadata["checksum"] = checksums.get(file_basename, "")
|
|
115
|
+
|
|
116
|
+
extra_args = {"Metadata": current_file_metadata}
|
|
117
|
+
|
|
118
|
+
if not self.upload_file(file_path, object_name, extra_args=extra_args):
|
|
119
|
+
raise ValueError(
|
|
120
|
+
f"Failed to upload file {file_path} to S3 bucket {bucket_name}."
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
# Always upload .metadata files if the source 'files' list is provided
|
|
124
|
+
if files:
|
|
125
|
+
data_directory = os.path.dirname(files[0])
|
|
126
|
+
metadata_dir_local_path = os.path.join(data_directory, ".metadata")
|
|
127
|
+
|
|
128
|
+
if os.path.isdir(metadata_dir_local_path):
|
|
129
|
+
for original_file_path in files:
|
|
130
|
+
original_file_basename = os.path.basename(original_file_path)
|
|
131
|
+
local_metadata_file_path = os.path.join(
|
|
132
|
+
metadata_dir_local_path, original_file_basename
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
if os.path.isfile(local_metadata_file_path):
|
|
136
|
+
s3_metadata_object_name = (
|
|
137
|
+
f"{repository_id}/.metadata/{original_file_basename}"
|
|
138
|
+
)
|
|
139
|
+
extra_args = {"Metadata": base_metadata.copy()}
|
|
140
|
+
if not self.upload_file(
|
|
141
|
+
local_metadata_file_path,
|
|
142
|
+
s3_metadata_object_name,
|
|
143
|
+
extra_args=extra_args,
|
|
144
|
+
):
|
|
145
|
+
raise ValueError(
|
|
146
|
+
f"Failed to upload metadata file {local_metadata_file_path} to S3 bucket {bucket_name}."
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
def upload_version(
|
|
150
|
+
self,
|
|
151
|
+
model_name: str,
|
|
152
|
+
version_name: str,
|
|
153
|
+
bucket_name: str,
|
|
154
|
+
file_path: str,
|
|
155
|
+
):
|
|
156
|
+
"""
|
|
157
|
+
Uploads a new version of the model to S3.
|
|
158
|
+
"""
|
|
159
|
+
build = os.path.basename(file_path)
|
|
160
|
+
object_name = f"{model_name}/{version_name}/linux/{build}"
|
|
161
|
+
file_size = os.path.getsize(file_path)
|
|
162
|
+
custom_format = (
|
|
163
|
+
"{desc} [{percentage:3.0f}%] |{bar}| {n_fmt}/{total_fmt} @ {rate_fmt}"
|
|
164
|
+
)
|
|
165
|
+
with tqdm(
|
|
166
|
+
bar_format=custom_format,
|
|
167
|
+
total=file_size,
|
|
168
|
+
unit="B",
|
|
169
|
+
unit_scale=True,
|
|
170
|
+
desc=f"Uploading {build}...",
|
|
171
|
+
) as pbar:
|
|
172
|
+
if not self.upload_file(
|
|
173
|
+
file_path,
|
|
174
|
+
bucket_name,
|
|
175
|
+
object_name,
|
|
176
|
+
Callback=lambda bytes_transferred: pbar.update(bytes_transferred),
|
|
177
|
+
):
|
|
178
|
+
raise ValueError(
|
|
179
|
+
f"Failed to upload file {file_path} to S3 bucket {bucket_name}."
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
# Delete all objects in the "latest" folder
|
|
183
|
+
latest_prefix = f"{model_name}/{version_name}/linux/latest/"
|
|
184
|
+
try:
|
|
185
|
+
response = self.s3_client.list_objects_v2(
|
|
186
|
+
Bucket=bucket_name, Prefix=latest_prefix
|
|
187
|
+
)
|
|
188
|
+
if "Contents" in response:
|
|
189
|
+
for obj in response["Contents"]:
|
|
190
|
+
self.s3_client.delete_object(Bucket=bucket_name, Key=obj["Key"])
|
|
191
|
+
except ClientError as e:
|
|
192
|
+
self.logger.error(f"Error deleting objects in 'latest' folder: {e}")
|
|
193
|
+
raise RuntimeError(f"Failed to clean 'latest' folder in S3: {e}")
|
|
194
|
+
|
|
195
|
+
# Copy the uploaded version file to the "latest" folder
|
|
196
|
+
latest_object_name = f"{model_name}/{version_name}/linux/latest/{build}"
|
|
197
|
+
copy_source = {"Bucket": bucket_name, "Key": object_name}
|
|
198
|
+
try:
|
|
199
|
+
self.s3_client.copy_object(
|
|
200
|
+
Bucket=bucket_name, CopySource=copy_source, Key=latest_object_name
|
|
201
|
+
)
|
|
202
|
+
except ClientError as e:
|
|
203
|
+
self.logger.error(f"Error copying version file to 'latest' folder: {e}")
|
|
204
|
+
raise RuntimeError(
|
|
205
|
+
f"Failed to copy version file to 'latest' folder in S3: {e}"
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
def download_file(
|
|
209
|
+
self, bucket_name: str, s3_object_key: str, local_file_path: str
|
|
210
|
+
) -> bool:
|
|
211
|
+
"""Downloads a single object from S3 to a local file path."""
|
|
212
|
+
try:
|
|
213
|
+
self.s3_client.download_file(bucket_name, s3_object_key, local_file_path)
|
|
214
|
+
return True
|
|
215
|
+
except ClientError as e:
|
|
216
|
+
self.logger.error(f"ERROR: Failed to download {s3_object_key} from S3: {e}")
|
|
217
|
+
return False
|
|
218
|
+
|
|
219
|
+
def download_case(
|
|
220
|
+
self,
|
|
221
|
+
repository_id: str,
|
|
222
|
+
cluster_name: str,
|
|
223
|
+
bucket_name: str,
|
|
224
|
+
output_path: str,
|
|
225
|
+
file_list: List[str],
|
|
226
|
+
) -> List[str]:
|
|
227
|
+
"""
|
|
228
|
+
Downloads files from an S3 bucket for a given case repository.
|
|
229
|
+
"""
|
|
230
|
+
downloaded_files: List[str] = []
|
|
231
|
+
|
|
232
|
+
try:
|
|
233
|
+
for file_name in file_list:
|
|
234
|
+
s3_object_key = f"{repository_id}/{file_name}"
|
|
235
|
+
local_file_path = os.path.join(output_path, file_name)
|
|
236
|
+
if self.logger:
|
|
237
|
+
self.logger.info(
|
|
238
|
+
f"Downloading {s3_object_key} to {local_file_path}"
|
|
239
|
+
)
|
|
240
|
+
if self.download_file(bucket_name, s3_object_key, local_file_path):
|
|
241
|
+
downloaded_files.append(os.path.basename(local_file_path))
|
|
242
|
+
except ClientError as e:
|
|
243
|
+
self.logger.error(f"ERROR: S3 ClientError during download: {e}")
|
|
244
|
+
raise RuntimeError(f"Failed to download files from S3: {e}")
|
|
245
|
+
except Exception as e:
|
|
246
|
+
self.logger.error(
|
|
247
|
+
f"ERROR: An unexpected error occurred during download: {e}"
|
|
248
|
+
)
|
|
249
|
+
raise RuntimeError(f"An unexpected error occurred during S3 download: {e}")
|
|
250
|
+
|
|
251
|
+
return downloaded_files
|
|
252
|
+
|
|
253
|
+
def check_version_build_exists(
|
|
254
|
+
self,
|
|
255
|
+
bucket_name: str,
|
|
256
|
+
model_name: str,
|
|
257
|
+
version_name: str,
|
|
258
|
+
build_id: str,
|
|
259
|
+
) -> bool:
|
|
260
|
+
"""
|
|
261
|
+
Checks if a specific version build exists in the S3 bucket.
|
|
262
|
+
"""
|
|
263
|
+
object_key = f"{model_name}/{version_name}/linux/{build_id}.zip"
|
|
264
|
+
try:
|
|
265
|
+
self.s3_client.head_object(Bucket=bucket_name, Key=object_key)
|
|
266
|
+
return True
|
|
267
|
+
except ClientError as e:
|
|
268
|
+
if e.response["Error"]["Code"] == "404":
|
|
269
|
+
return False
|
|
270
|
+
self.logger.error(f"Error checking version build existence: {e}")
|
|
271
|
+
raise RuntimeError(f"Failed to check version build existence in S3: {e}")
|