psr-factory 5.0.0b20__py3-none-win_amd64.whl → 5.0.0b23__py3-none-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- psr/cloud/aws.py +169 -241
- psr/cloud/cloud.py +74 -46
- psr/factory/__init__.py +1 -1
- psr/factory/api.py +15 -0
- psr/factory/factory.dll +0 -0
- psr/factory/factory.pmd +8 -0
- psr/factory/factorylib.py +2 -0
- psr/factory/libcurl-x64.dll +0 -0
- {psr_factory-5.0.0b20.dist-info → psr_factory-5.0.0b23.dist-info}/METADATA +1 -1
- {psr_factory-5.0.0b20.dist-info → psr_factory-5.0.0b23.dist-info}/RECORD +13 -13
- {psr_factory-5.0.0b20.dist-info → psr_factory-5.0.0b23.dist-info}/WHEEL +0 -0
- {psr_factory-5.0.0b20.dist-info → psr_factory-5.0.0b23.dist-info}/licenses/LICENSE.txt +0 -0
- {psr_factory-5.0.0b20.dist-info → psr_factory-5.0.0b23.dist-info}/top_level.txt +0 -0
psr/cloud/aws.py
CHANGED
@@ -7,250 +7,178 @@ import boto3
|
|
7
7
|
from botocore.exceptions import ClientError
|
8
8
|
|
9
9
|
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
secret: Optional[str] = None,
|
48
|
-
session_token: Optional[str] = None,
|
49
|
-
bucket_name: Optional[str] = None,
|
50
|
-
url: Optional[str] = None,
|
51
|
-
zip_compress: bool = False,
|
52
|
-
compress_zip_name: str = None,
|
53
|
-
):
|
54
|
-
"""Upload files to an S3 bucket."""
|
55
|
-
|
56
|
-
region = _get_region(url)
|
57
|
-
|
58
|
-
if not region or not access or not secret or not session_token or not bucket_name:
|
59
|
-
raise ValueError("Unable to set up AWS connection.")
|
60
|
-
|
61
|
-
s3_client = boto3.client(
|
62
|
-
"s3",
|
63
|
-
aws_access_key_id=access,
|
64
|
-
aws_secret_access_key=secret,
|
65
|
-
aws_session_token=session_token,
|
66
|
-
region_name=region,
|
67
|
-
)
|
68
|
-
|
69
|
-
# Base metadata, common for both zip and individual files
|
70
|
-
base_metadata: Dict[str, str] = {
|
71
|
-
"upload": str(True).lower(),
|
72
|
-
"user-agent": "aws-fsx-lustre",
|
73
|
-
"file-owner": "537",
|
74
|
-
"file-group": "500",
|
75
|
-
"file-permissions": "100777",
|
76
|
-
}
|
77
|
-
|
78
|
-
if zip_compress and not compress_zip_name:
|
79
|
-
compress_zip_name = str(repository_id)
|
80
|
-
|
81
|
-
if zip_compress:
|
82
|
-
# Create a temporary zip file
|
83
|
-
with tempfile.NamedTemporaryFile(suffix=".zip", delete=False) as tmp_zip_file:
|
84
|
-
zip_path = tmp_zip_file.name
|
85
|
-
tmp_zip_file.close() # Close the file handle so zipfile can open it
|
86
|
-
|
10
|
+
class AWS:
|
11
|
+
def __init__(
|
12
|
+
self,
|
13
|
+
access: str,
|
14
|
+
secret: str,
|
15
|
+
session_token: str,
|
16
|
+
url: str,
|
17
|
+
bucket_name: str,
|
18
|
+
Logger=None,
|
19
|
+
):
|
20
|
+
self.s3_client = boto3.client(
|
21
|
+
"s3",
|
22
|
+
aws_access_key_id=access,
|
23
|
+
aws_secret_access_key=secret,
|
24
|
+
aws_session_token=session_token,
|
25
|
+
region_name=AWS.get_region(url),
|
26
|
+
)
|
27
|
+
self.bucket_name = bucket_name
|
28
|
+
self.logger = Logger
|
29
|
+
|
30
|
+
@staticmethod
|
31
|
+
def get_region(url: Optional[str]) -> Optional[str]:
|
32
|
+
"""Extract the region from the S3 URL."""
|
33
|
+
if url:
|
34
|
+
parts = url.split(".")
|
35
|
+
return parts[0]
|
36
|
+
return None
|
37
|
+
|
38
|
+
def upload_file(
|
39
|
+
self,
|
40
|
+
file_path: str,
|
41
|
+
object_name: Optional[str] = None,
|
42
|
+
extra_args: Optional[dict] = None,
|
43
|
+
) -> bool:
|
44
|
+
"""Upload a file to an S3 bucket using the AWS instance's S3 client."""
|
45
|
+
if object_name is None:
|
46
|
+
object_name = os.path.basename(file_path)
|
87
47
|
try:
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
)
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
48
|
+
self.s3_client.upload_file(
|
49
|
+
file_path, self.bucket_name, object_name, ExtraArgs=extra_args
|
50
|
+
)
|
51
|
+
return True
|
52
|
+
except ClientError as e:
|
53
|
+
self.logger.error(f"Error uploading file: {e}")
|
54
|
+
return False
|
55
|
+
|
56
|
+
def upload_case(
|
57
|
+
self,
|
58
|
+
files: List[str],
|
59
|
+
repository_id: str,
|
60
|
+
cluster_name: str,
|
61
|
+
checksums: Optional[Dict[str, str]] = None,
|
62
|
+
zip_compress: bool = False,
|
63
|
+
compress_zip_name: str = None,
|
64
|
+
):
|
65
|
+
"""Upload files to an S3 bucket."""
|
66
|
+
base_metadata: Dict[str, str] = {
|
67
|
+
"upload": str(True).lower(),
|
68
|
+
"user-agent": "aws-fsx-lustre",
|
69
|
+
"file-owner": "537",
|
70
|
+
"file-group": "500",
|
71
|
+
"file-permissions": "100777",
|
72
|
+
}
|
73
|
+
|
74
|
+
if zip_compress and not compress_zip_name:
|
75
|
+
compress_zip_name = str(repository_id)
|
76
|
+
|
77
|
+
if zip_compress:
|
78
|
+
# Create a temporary zip file
|
79
|
+
with tempfile.NamedTemporaryFile(
|
80
|
+
suffix=".zip", delete=False
|
81
|
+
) as tmp_zip_file:
|
82
|
+
zip_path = tmp_zip_file.name
|
83
|
+
tmp_zip_file.close() # Close the file handle so zipfile can open it
|
84
|
+
|
85
|
+
try:
|
86
|
+
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
|
87
|
+
for file_path in files:
|
88
|
+
zipf.write(file_path, arcname=os.path.basename(file_path))
|
89
|
+
|
90
|
+
object_name = f"{repository_id}/uploaded/{compress_zip_name}.zip"
|
91
|
+
extra_args = {"Metadata": base_metadata.copy()}
|
92
|
+
|
93
|
+
if not self.upload_file(zip_path, object_name, extra_args=extra_args):
|
94
|
+
raise ValueError(
|
95
|
+
f"Failed to upload zip file {zip_path} to S3 bucket {self.bucket_name}."
|
96
|
+
)
|
97
|
+
finally:
|
98
|
+
if os.path.exists(zip_path):
|
99
|
+
os.unlink(zip_path)
|
100
|
+
else:
|
101
|
+
for file_path in files:
|
102
|
+
file_basename = os.path.basename(file_path)
|
103
|
+
object_name = f"{repository_id}/uploaded/{file_basename}"
|
104
|
+
|
105
|
+
current_file_metadata = base_metadata.copy()
|
106
|
+
if checksums:
|
107
|
+
current_file_metadata["checksum"] = checksums.get(file_basename, "")
|
108
|
+
|
109
|
+
extra_args = {"Metadata": current_file_metadata}
|
110
|
+
|
111
|
+
if not self.upload_file(file_path, object_name, extra_args=extra_args):
|
112
|
+
raise ValueError(
|
113
|
+
f"Failed to upload file {file_path} to S3 bucket {self.bucket_name}."
|
152
114
|
)
|
153
|
-
extra_args = {"Metadata": base_metadata.copy()}
|
154
|
-
if not upload_file_to_s3(
|
155
|
-
s3_client,
|
156
|
-
bucket_name,
|
157
|
-
local_metadata_file_path,
|
158
|
-
s3_metadata_object_name,
|
159
|
-
extra_args=extra_args,
|
160
|
-
):
|
161
|
-
raise ValueError(
|
162
|
-
f"Failed to upload metadata file {local_metadata_file_path} to S3 bucket {bucket_name}."
|
163
|
-
)
|
164
|
-
|
165
|
-
|
166
|
-
def _download_s3_object(
|
167
|
-
s3_client, bucket_name: str, s3_object_key: str, local_file_path: str
|
168
|
-
) -> bool:
|
169
|
-
"""
|
170
|
-
Downloads a single object from S3 to a local file path.
|
171
|
-
|
172
|
-
:param s3_client: Initialized S3 client.
|
173
|
-
:param bucket_name: Name of the S3 bucket.
|
174
|
-
:param s3_object_key: The key of the object in S3.
|
175
|
-
:param local_file_path: The local path where the file should be saved.
|
176
|
-
:return: True if download was successful, False otherwise.
|
177
|
-
"""
|
178
|
-
|
179
|
-
try:
|
180
|
-
s3_client.download_file(bucket_name, s3_object_key, local_file_path)
|
181
|
-
return True
|
182
|
-
except ClientError as e:
|
183
|
-
print(f"ERROR: Failed to download {s3_object_key} from S3: {e}")
|
184
|
-
return False
|
185
|
-
|
186
|
-
|
187
|
-
def download_case_from_s3(
|
188
|
-
repository_id: str,
|
189
|
-
cluster_name: str, # Kept for consistency with caller, though not used directly in S3 ops
|
190
|
-
access: str,
|
191
|
-
secret: str,
|
192
|
-
session_token: str,
|
193
|
-
bucket_name: str,
|
194
|
-
url: str, # S3 endpoint URL, used by _get_region
|
195
|
-
output_path: str,
|
196
|
-
file_list: List[str],
|
197
|
-
) -> List[str]:
|
198
|
-
"""
|
199
|
-
Downloads files from an S3 bucket for a given case repository.
|
200
|
-
|
201
|
-
It iterates through the provided `file_list`, downloads each specified file
|
202
|
-
from the S3 path `{repository_id}/{file_in_list}`, preserving its relative path
|
203
|
-
under `output_path`. It then checks if each downloaded file is gzipped,
|
204
|
-
decompresses it if necessary, and returns a list of basenames of the
|
205
|
-
final downloaded (and potentially decompressed) files.
|
206
|
-
|
207
|
-
:param repository_id: The ID of the repository in S3.
|
208
|
-
:param cluster_name: Name of the cluster (for context, not used in S3 calls).
|
209
|
-
:param access: AWS access key ID.
|
210
|
-
:param secret: AWS secret access key.
|
211
|
-
:param session_token: AWS session token.
|
212
|
-
:param bucket_name: Name of the S3 bucket.
|
213
|
-
:param url: S3 service URL (used to determine region via _get_region).
|
214
|
-
:param output_path: Local directory where files will be downloaded.
|
215
|
-
:param file_list: A list of file names (basenames) to be downloaded.
|
216
|
-
:return: A list of basenames of the downloaded (and decompressed) files.
|
217
|
-
:raises ValueError: If S3 connection parameters are missing or filter is invalid.
|
218
|
-
:raises RuntimeError: If S3 operations fail.
|
219
|
-
"""
|
220
|
-
region = _get_region(url)
|
221
|
-
if not all([region, access, secret, session_token, bucket_name]):
|
222
|
-
# TODO: Replace print with proper logging
|
223
|
-
print(
|
224
|
-
"ERROR: Missing S3 connection parameters (region, access, secret, token, or bucket name)."
|
225
|
-
)
|
226
|
-
raise ValueError("Missing S3 connection parameters.")
|
227
|
-
|
228
|
-
s3_client = boto3.client(
|
229
|
-
"s3",
|
230
|
-
aws_access_key_id=access,
|
231
|
-
aws_secret_access_key=secret,
|
232
|
-
aws_session_token=session_token,
|
233
|
-
region_name=region,
|
234
|
-
)
|
235
|
-
|
236
|
-
downloaded_files: List[str] = []
|
237
115
|
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
116
|
+
# Always upload .metadata files if the source 'files' list is provided
|
117
|
+
if files:
|
118
|
+
data_directory = os.path.dirname(files[0])
|
119
|
+
metadata_dir_local_path = os.path.join(data_directory, ".metadata")
|
242
120
|
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
121
|
+
if os.path.isdir(metadata_dir_local_path):
|
122
|
+
for original_file_path in files:
|
123
|
+
original_file_basename = os.path.basename(original_file_path)
|
124
|
+
local_metadata_file_path = os.path.join(
|
125
|
+
metadata_dir_local_path, original_file_basename
|
126
|
+
)
|
248
127
|
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
128
|
+
if os.path.isfile(local_metadata_file_path):
|
129
|
+
s3_metadata_object_name = (
|
130
|
+
f"{repository_id}/.metadata/{original_file_basename}"
|
131
|
+
)
|
132
|
+
extra_args = {"Metadata": base_metadata.copy()}
|
133
|
+
if not self.upload_file(
|
134
|
+
local_metadata_file_path,
|
135
|
+
s3_metadata_object_name,
|
136
|
+
extra_args=extra_args,
|
137
|
+
):
|
138
|
+
raise ValueError(
|
139
|
+
f"Failed to upload metadata file {local_metadata_file_path} to S3 bucket {self.bucket_name}."
|
140
|
+
)
|
141
|
+
|
142
|
+
def download_file(self, s3_object_key: str, local_file_path: str) -> bool:
|
143
|
+
"""Downloads a single object from S3 to a local file path."""
|
144
|
+
try:
|
145
|
+
self.s3_client.download_file(
|
146
|
+
self.bucket_name, s3_object_key, local_file_path
|
147
|
+
)
|
148
|
+
return True
|
149
|
+
except ClientError as e:
|
150
|
+
self.logger.error(f"ERROR: Failed to download {s3_object_key} from S3: {e}")
|
151
|
+
return False
|
152
|
+
|
153
|
+
def download_case(
|
154
|
+
self,
|
155
|
+
repository_id: str,
|
156
|
+
cluster_name: str,
|
157
|
+
output_path: str,
|
158
|
+
file_list: List[str],
|
159
|
+
) -> List[str]:
|
160
|
+
"""
|
161
|
+
Downloads files from an S3 bucket for a given case repository.
|
162
|
+
"""
|
163
|
+
downloaded_files: List[str] = []
|
255
164
|
|
256
|
-
|
165
|
+
try:
|
166
|
+
for file_name in file_list:
|
167
|
+
s3_object_key = f"{repository_id}/{file_name}"
|
168
|
+
local_file_path = os.path.join(output_path, file_name)
|
169
|
+
if self.logger:
|
170
|
+
self.logger.info(
|
171
|
+
f"Downloading {s3_object_key} to {local_file_path}"
|
172
|
+
)
|
173
|
+
if self.download_file(s3_object_key, local_file_path):
|
174
|
+
downloaded_files.append(os.path.basename(local_file_path))
|
175
|
+
except ClientError as e:
|
176
|
+
self.logger.error(f"ERROR: S3 ClientError during download: {e}")
|
177
|
+
raise RuntimeError(f"Failed to download files from S3: {e}")
|
178
|
+
except Exception as e:
|
179
|
+
self.logger.error(
|
180
|
+
f"ERROR: An unexpected error occurred during download: {e}"
|
181
|
+
)
|
182
|
+
raise RuntimeError(f"An unexpected error occurred during S3 download: {e}")
|
183
|
+
|
184
|
+
return downloaded_files
|
psr/cloud/cloud.py
CHANGED
@@ -23,7 +23,7 @@ import pefile
|
|
23
23
|
import zeep
|
24
24
|
from filelock import FileLock
|
25
25
|
|
26
|
-
from .aws import
|
26
|
+
from .aws import AWS
|
27
27
|
from .data import Case, CloudError, CloudInputError
|
28
28
|
from .desktop import import_case
|
29
29
|
from .log import enable_log_timestamp, get_logger
|
@@ -647,7 +647,8 @@ class Client:
|
|
647
647
|
xml_content = create_case_xml(parameters)
|
648
648
|
|
649
649
|
if dry_run:
|
650
|
-
|
650
|
+
self._logger.info(f"Dry run:\n{xml_content}")
|
651
|
+
return 0
|
651
652
|
|
652
653
|
if self._python_client:
|
653
654
|
case_id = self._execute_case(parameters)
|
@@ -758,6 +759,21 @@ class Client:
|
|
758
759
|
files: Optional[List[str]] = None,
|
759
760
|
extensions: Optional[List[str]] = None,
|
760
761
|
) -> None:
|
762
|
+
case = self.get_case(case_id)
|
763
|
+
output_path = _handle_relative_path(output_path)
|
764
|
+
parameters = {
|
765
|
+
"urlServico": self.cluster["url"],
|
766
|
+
"usuario": self.username,
|
767
|
+
"senha": self.__password,
|
768
|
+
"idioma": "3",
|
769
|
+
"_cluster": self.cluster["name"],
|
770
|
+
"modelo": case.program,
|
771
|
+
"comando": "download",
|
772
|
+
"diretorioDestino": output_path,
|
773
|
+
"repositorioId": str(case_id),
|
774
|
+
}
|
775
|
+
|
776
|
+
# Handling download filter
|
761
777
|
filter = ""
|
762
778
|
|
763
779
|
if not extensions and not files:
|
@@ -767,33 +783,31 @@ class Client:
|
|
767
783
|
|
768
784
|
if extensions:
|
769
785
|
Client._validate_extensions(extensions)
|
770
|
-
filter_elements.extend([f"
|
786
|
+
filter_elements.extend([f".*.{ext}" for ext in extensions])
|
771
787
|
|
772
788
|
if files:
|
773
789
|
filter_elements.extend(files)
|
774
790
|
|
775
|
-
|
791
|
+
if self._python_client:
|
792
|
+
# Convert mask to regex for python_client
|
793
|
+
# regex_parts = []
|
794
|
+
# for part in filter_elements:
|
795
|
+
# regex_parts.append(r".*" + re.escape(part[1:]) + r"$" if part.startswith("*") else r"^" + re.escape(part) + r"$")
|
796
|
+
filter = "|".join(filter_elements)
|
797
|
+
parameters["filtroDownload"] = filter
|
798
|
+
else:
|
799
|
+
filter = "|".join(filter_elements)
|
800
|
+
parameters["filtroDownloadPorMascara"] = filter
|
776
801
|
|
777
802
|
self._logger.info("Download filter: " + filter)
|
778
|
-
case = self.get_case(case_id)
|
779
|
-
output_path = _handle_relative_path(output_path)
|
780
|
-
parameters = {
|
781
|
-
"urlServico": self.cluster["url"],
|
782
|
-
"usuario": self.username,
|
783
|
-
"senha": self.__password,
|
784
|
-
"idioma": "3",
|
785
|
-
"_cluster": self.cluster["name"],
|
786
|
-
"modelo": case.program,
|
787
|
-
"comando": "download",
|
788
|
-
"diretorioDestino": output_path,
|
789
|
-
"repositorioId": str(case_id),
|
790
|
-
"filtroDownloadPorMascara": filter,
|
791
|
-
}
|
792
803
|
|
793
804
|
os.makedirs(output_path, exist_ok=True)
|
794
805
|
|
795
806
|
if self._python_client:
|
796
|
-
self._download_results_python(parameters)
|
807
|
+
self._download_results_python(parameters)
|
808
|
+
self._logger.debug("Creating download.ok file")
|
809
|
+
with open(os.path.join(output_path, "download.ok"), "w") as f:
|
810
|
+
f.write("")
|
797
811
|
else:
|
798
812
|
# Download results using Console
|
799
813
|
xml_content = create_case_xml(parameters)
|
@@ -969,7 +983,6 @@ class Client:
|
|
969
983
|
parameters.update(additional_arguments)
|
970
984
|
|
971
985
|
xml_input = create_case_xml(parameters)
|
972
|
-
|
973
986
|
try:
|
974
987
|
xml_output_str = portal_ws.service.despacharServico(
|
975
988
|
service, self.username, password_md5, xml_input
|
@@ -1098,19 +1111,23 @@ class Client:
|
|
1098
1111
|
f"Uploading list of files to remote repository {repository_id.text}"
|
1099
1112
|
)
|
1100
1113
|
|
1101
|
-
|
1102
|
-
upload_case_to_s3(
|
1103
|
-
files=file_list,
|
1104
|
-
repository_id=repository_id.text,
|
1105
|
-
cluster_name=self.cluster["name"],
|
1106
|
-
checksums=checksum_dictionary,
|
1114
|
+
awsS3 = AWS(
|
1107
1115
|
access=cloud_access.text if cloud_access is not None else None,
|
1108
1116
|
secret=cloud_secret.text if cloud_secret is not None else None,
|
1117
|
+
bucket_name=bucket_name.text if bucket_name is not None else None,
|
1118
|
+
url=cloud_aws_url.text if cloud_aws_url is not None else None,
|
1109
1119
|
session_token=cloud_session_token.text
|
1110
1120
|
if cloud_session_token is not None
|
1111
1121
|
else None,
|
1112
|
-
|
1113
|
-
|
1122
|
+
Logger=self._logger,
|
1123
|
+
)
|
1124
|
+
|
1125
|
+
# TODO validate when no file has been sent at all
|
1126
|
+
awsS3.upload_case(
|
1127
|
+
files=file_list,
|
1128
|
+
repository_id=repository_id.text,
|
1129
|
+
cluster_name=self.cluster["name"],
|
1130
|
+
checksums=checksum_dictionary,
|
1114
1131
|
zip_compress=True,
|
1115
1132
|
)
|
1116
1133
|
|
@@ -1168,7 +1185,7 @@ class Client:
|
|
1168
1185
|
"""
|
1169
1186
|
|
1170
1187
|
repository_id = parameters.get("repositorioId")
|
1171
|
-
download_filter = parameters.get("
|
1188
|
+
download_filter = parameters.get("filtroDownload")
|
1172
1189
|
output_path = parameters.get("diretorioDestino")
|
1173
1190
|
|
1174
1191
|
download_filter = (
|
@@ -1179,37 +1196,48 @@ class Client:
|
|
1179
1196
|
"buscaCredenciasDownload", additional_arguments=parameters
|
1180
1197
|
)
|
1181
1198
|
|
1182
|
-
|
1183
|
-
|
1184
|
-
|
1185
|
-
|
1199
|
+
cloud_access = credentials.find("./Parametro[@nome='cloudAccess']").text
|
1200
|
+
cloud_secret = credentials.find("./Parametro[@nome='cloudSecret']").text
|
1201
|
+
cloud_session_token = credentials.find(
|
1202
|
+
"./Parametro[@nome='cloudSessionToken']"
|
1203
|
+
).text
|
1204
|
+
cloud_url = credentials.find("./Parametro[@nome='cloudUrl']").text
|
1186
1205
|
bucket_name = credentials.find("./Parametro[@nome='diretorioBase']").text
|
1187
1206
|
bucket_name = bucket_name.replace("repository", "repository-download")
|
1188
1207
|
|
1189
|
-
if
|
1208
|
+
if (
|
1209
|
+
cloud_access is None
|
1210
|
+
or cloud_secret is None
|
1211
|
+
or cloud_session_token is None
|
1212
|
+
or cloud_url is None
|
1213
|
+
):
|
1190
1214
|
raise CloudError("Failed to retrieve credentials for downloading results.")
|
1191
1215
|
|
1192
1216
|
file_list = self.list_download_files(repository_id)
|
1193
|
-
|
1194
1217
|
# filtering files to download
|
1195
1218
|
if download_filter:
|
1196
|
-
filtered_file_list = [
|
1197
|
-
|
1198
|
-
|
1199
|
-
|
1200
|
-
]
|
1219
|
+
filtered_file_list = []
|
1220
|
+
for file in file_list:
|
1221
|
+
if re.match(download_filter, file["name"]):
|
1222
|
+
filtered_file_list.append(file["name"])
|
1201
1223
|
else:
|
1202
1224
|
filtered_file_list = [file["name"] for file in file_list]
|
1203
1225
|
|
1204
1226
|
self._logger.info("Downloading results")
|
1205
|
-
|
1227
|
+
awsS3 = AWS(
|
1228
|
+
access=cloud_access if cloud_access is not None else None,
|
1229
|
+
secret=cloud_secret if cloud_secret is not None else None,
|
1230
|
+
bucket_name=bucket_name if bucket_name is not None else None,
|
1231
|
+
session_token=cloud_session_token
|
1232
|
+
if cloud_session_token is not None
|
1233
|
+
else None,
|
1234
|
+
url=cloud_url if cloud_url is not None else None,
|
1235
|
+
Logger=self._logger,
|
1236
|
+
)
|
1237
|
+
|
1238
|
+
downloaded_list = awsS3.download_case(
|
1206
1239
|
repository_id=parameters["repositorioId"],
|
1207
1240
|
cluster_name=self.cluster["name"],
|
1208
|
-
access=access,
|
1209
|
-
secret=secret,
|
1210
|
-
session_token=session_token,
|
1211
|
-
bucket_name=bucket_name,
|
1212
|
-
url=url,
|
1213
1241
|
output_path=output_path,
|
1214
1242
|
file_list=filtered_file_list,
|
1215
1243
|
)
|
psr/factory/__init__.py
CHANGED
psr/factory/api.py
CHANGED
@@ -140,6 +140,9 @@ def _bytes(value: str) -> int:
|
|
140
140
|
class FactoryException(Exception):
|
141
141
|
pass
|
142
142
|
|
143
|
+
class FactoryLicenseError(Exception):
|
144
|
+
pass
|
145
|
+
|
143
146
|
|
144
147
|
class LogLevel(enum.Enum):
|
145
148
|
NOTSET = 0
|
@@ -194,6 +197,18 @@ def build_version() -> str:
|
|
194
197
|
return _version_short()
|
195
198
|
|
196
199
|
|
200
|
+
def check_license() -> Tuple[bool, str]:
|
201
|
+
"""Returns True if license is valid and active."""
|
202
|
+
_check_loaded()
|
203
|
+
error = Error()
|
204
|
+
factorylib.lib.psrd_check_license(error.handler())
|
205
|
+
valid = error.code == 0
|
206
|
+
invalid = error.code == 15
|
207
|
+
if not valid and not invalid:
|
208
|
+
raise FactoryException("Error checking license: " + error.what)
|
209
|
+
return valid, error.what
|
210
|
+
|
211
|
+
|
197
212
|
def get_log_level() -> LogLevel:
|
198
213
|
"""Get log level."""
|
199
214
|
_check_loaded()
|
psr/factory/factory.dll
CHANGED
Binary file
|
psr/factory/factory.pmd
CHANGED
@@ -5925,6 +5925,14 @@ DEFINE_MODEL MODL:SDDP_Transformer
|
|
5925
5925
|
VECTOR DATE DateCostToFrom @addyear_chronological
|
5926
5926
|
VECTOR REAL CostToFrom DIM(block) INDEX DateCostToFrom
|
5927
5927
|
|
5928
|
+
PARM INTEGER MaxSecondaryReserveUnit
|
5929
|
+
PARM INTEGER HasMaxSecondaryReserve
|
5930
|
+
VETOR DATE DataMaxSecondaryReserve @chronological @addyear_chronological
|
5931
|
+
VETOR REAL MaxSecondaryReserve DIM(block) INDEX DataMaxSecondaryReserve
|
5932
|
+
PARM INTEGER HasPriceSecondaryReserve
|
5933
|
+
VETOR DATE DataPriceSecondaryReserve @chronological @addyear_chronological
|
5934
|
+
VETOR REAL PriceSecondaryReserve DIM(block) INDEX DataPriceSecondaryReserve
|
5935
|
+
|
5928
5936
|
MERGE_MODEL MODL:SDDP_Asset
|
5929
5937
|
END_MODEL
|
5930
5938
|
//-----------------------------------------------
|
psr/factory/factorylib.py
CHANGED
@@ -29,6 +29,8 @@ def initialize():
|
|
29
29
|
global lib
|
30
30
|
lib = load_lib()
|
31
31
|
|
32
|
+
lib.psrd_check_license.restype = ctypes.c_int
|
33
|
+
lib.psrd_check_license.argtypes = [ctypes.c_void_p]
|
32
34
|
lib.psrd_initialize.restype = ctypes.c_int
|
33
35
|
lib.psrd_initialize.argtypes = [ctypes.c_char_p, ctypes.c_long, ctypes.c_void_p]
|
34
36
|
lib.psrd_unload.restype = ctypes.c_int
|
psr/factory/libcurl-x64.dll
CHANGED
Binary file
|
@@ -2,8 +2,8 @@ psr/apps/__init__.py,sha256=frSq1WIy5vIdU21xJIGX7U3XoAZRj0pcQmFb-R00b7I,228
|
|
2
2
|
psr/apps/apps.py,sha256=V8Ewht7P1I-3sSkV3dnbxbLjF2slxPjcmtzmVaLjiNY,6746
|
3
3
|
psr/apps/version.py,sha256=vs459L6JsatAkUxna7BNG-vMCaXpO1Ye8c1bmkEx4U4,194
|
4
4
|
psr/cloud/__init__.py,sha256=inZMwG7O9Fca9hg1BhqYObOYtTTJOkpuTIuXnkHJZkI,246
|
5
|
-
psr/cloud/aws.py,sha256=
|
6
|
-
psr/cloud/cloud.py,sha256=
|
5
|
+
psr/cloud/aws.py,sha256=aq3yqDC_D1tplPICqa39pmMbyp_liRQ8B_Ubbl7q2Dw,7048
|
6
|
+
psr/cloud/cloud.py,sha256=XnTYk5t27iTbbkfyQtD9gjIDUM2aNEly0-AHhaOpktw,60368
|
7
7
|
psr/cloud/data.py,sha256=oDJyzcNsA7aAYi_qJKCUjCeGZvN-25E8KjZ-5RamNLE,4160
|
8
8
|
psr/cloud/desktop.py,sha256=JFroCMEFV1Nz3has74n7OVrGCg2lS7Ev5bcjdw2hRxY,2980
|
9
9
|
psr/cloud/log.py,sha256=Dvhz1enIWlFWeaRK7JAAuZVPfODgoEIRNcHEmbEliyQ,1366
|
@@ -16,13 +16,13 @@ psr/execqueue/config.py,sha256=3KVwASOgRlymOSPeabotgBdLVB5sPKnPQ9og2q3LQfw,1418
|
|
16
16
|
psr/execqueue/db.py,sha256=sNr_StNEgZZQCKcyCWiB1WrQJIhE9UvLUxPA2tWiXGs,8498
|
17
17
|
psr/execqueue/server.py,sha256=nW-Hi5zWHgPeLicASKJND7u6rz6eqwC16k91tUUQPxk,15741
|
18
18
|
psr/execqueue/watcher.py,sha256=7dZZm9TiYVF7SdU0c_6Vq2_SZRobxgcspfBMzKFSsjQ,5637
|
19
|
-
psr/factory/__init__.py,sha256=
|
20
|
-
psr/factory/api.py,sha256=
|
21
|
-
psr/factory/factory.dll,sha256=
|
22
|
-
psr/factory/factory.pmd,sha256=
|
19
|
+
psr/factory/__init__.py,sha256=RX2BHtGLpAKVg8cV_ZlxTGPzfClbln7dhEfnjCnU_E8,219
|
20
|
+
psr/factory/api.py,sha256=a4zqV4LXjK3psylmRKmMd5Ke0Y7tmAG22BgR3xBfcSs,104265
|
21
|
+
psr/factory/factory.dll,sha256=omq0WeZ3xy6BQGC3pV0CRX1GBy3QzhVVuc9tSf5cuoQ,18314064
|
22
|
+
psr/factory/factory.pmd,sha256=ncDpoE2AnsygsIgjH0cCj9KetTPSTLvITpBogiTFX4o,250960
|
23
23
|
psr/factory/factory.pmk,sha256=THhHxBKTBchru3fxTCos-pBAPJJnuug8T2dw0xniDfQ,580185
|
24
|
-
psr/factory/factorylib.py,sha256=
|
25
|
-
psr/factory/libcurl-x64.dll,sha256=
|
24
|
+
psr/factory/factorylib.py,sha256=o5Irbw6k-yIOJVUtDu2YYqw2x16P2LmCdouImwSssdw,28290
|
25
|
+
psr/factory/libcurl-x64.dll,sha256=2wmYqNoIpP8kkfgqUQLA2Q3xUizFi7ws5T1_QQGihA0,5317968
|
26
26
|
psr/factory/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
27
27
|
psr/factory/samples/__init__.py,sha256=xxOch5Fokzjy571a6OHD87FWM17qKgvfcbr8xn-n36I,80
|
28
28
|
psr/factory/samples/sddp_case01.py,sha256=eLhtOAS2STl9-H7Nr5VUG4ATO0bVcn-CJtCn3Rf-vpI,5044
|
@@ -33,8 +33,8 @@ psr/psrfcommon/tempfile.py,sha256=5S13wa2DCLYTUdwbLm_KMBRnDRJ0WDlu8GO2BmZoNdg,39
|
|
33
33
|
psr/runner/__init__.py,sha256=kI9HDX-B_LMQJUHHylFHas2rNpWfNNa0pZXoIvX_Alw,230
|
34
34
|
psr/runner/runner.py,sha256=hCVH62HAZK_M9YUiHQgqCkMevN17utegjfRIw49MdvM,27542
|
35
35
|
psr/runner/version.py,sha256=mch2Y8anSXGMn9w72Z78PhSRhOyn55EwaoLAYhY4McE,194
|
36
|
-
psr_factory-5.0.
|
37
|
-
psr_factory-5.0.
|
38
|
-
psr_factory-5.0.
|
39
|
-
psr_factory-5.0.
|
40
|
-
psr_factory-5.0.
|
36
|
+
psr_factory-5.0.0b23.dist-info/licenses/LICENSE.txt,sha256=N6mqZK2Ft3iXGHj-by_MHC_dJo9qwn0URjakEPys3H4,1089
|
37
|
+
psr_factory-5.0.0b23.dist-info/METADATA,sha256=BK68yUoCrDJ-NBXBZaNRhlIPRdq9lFRupeJenmzb1Po,2333
|
38
|
+
psr_factory-5.0.0b23.dist-info/WHEEL,sha256=ZjXRCNaQ9YSypEK2TE0LRB0sy2OVXSszb4Sx1XjM99k,97
|
39
|
+
psr_factory-5.0.0b23.dist-info/top_level.txt,sha256=Jb393O96WQk3b5D1gMcrZBLKJJgZpzNjTPoldUi00ck,4
|
40
|
+
psr_factory-5.0.0b23.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|