psr-factory 5.0.0b21__py3-none-win_amd64.whl → 5.0.0b24__py3-none-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
psr/cloud/aws.py CHANGED
@@ -7,250 +7,178 @@ import boto3
7
7
  from botocore.exceptions import ClientError
8
8
 
9
9
 
10
- def _get_region(url):
11
- """Extract the region from the S3 URL."""
12
- if url:
13
- parts = url.split(".")
14
- return parts[0]
15
- return None
16
-
17
-
18
- def upload_file_to_s3(
19
- s3_client, bucket_name, file_path, object_name=None, extra_args=None
20
- ):
21
- """Upload a file to an S3 bucket using a provided S3 client.
22
-
23
- :param s3_client: Initialized S3 client.
24
- :param bucket_name: Name of the S3 bucket.
25
- :param file_path: Path to the file to upload.
26
- :param object_name: S3 object name. If not specified, file_path's basename is used.
27
- :param extra_args: A dictionary of extra arguments to pass to S3's upload_file.
28
- :return: True if file was uploaded, else False.
29
- """
30
- if object_name is None:
31
- object_name = os.path.basename(file_path)
32
-
33
- try:
34
- s3_client.upload_file(file_path, bucket_name, object_name, ExtraArgs=extra_args)
35
- return True
36
- except ClientError as e:
37
- print(f"Error uploading file: {e}")
38
- return False
39
-
40
-
41
- def upload_case_to_s3(
42
- files: List[str],
43
- repository_id: str,
44
- cluster_name: str,
45
- checksums: Optional[Dict[str, str]] = None,
46
- access: Optional[str] = None,
47
- secret: Optional[str] = None,
48
- session_token: Optional[str] = None,
49
- bucket_name: Optional[str] = None,
50
- url: Optional[str] = None,
51
- zip_compress: bool = False,
52
- compress_zip_name: str = None,
53
- ):
54
- """Upload files to an S3 bucket."""
55
-
56
- region = _get_region(url)
57
-
58
- if not region or not access or not secret or not session_token or not bucket_name:
59
- raise ValueError("Unable to set up AWS connection.")
60
-
61
- s3_client = boto3.client(
62
- "s3",
63
- aws_access_key_id=access,
64
- aws_secret_access_key=secret,
65
- aws_session_token=session_token,
66
- region_name=region,
67
- )
68
-
69
- # Base metadata, common for both zip and individual files
70
- base_metadata: Dict[str, str] = {
71
- "upload": str(True).lower(),
72
- "user-agent": "aws-fsx-lustre",
73
- "file-owner": "537",
74
- "file-group": "500",
75
- "file-permissions": "100777",
76
- }
77
-
78
- if zip_compress and not compress_zip_name:
79
- compress_zip_name = str(repository_id)
80
-
81
- if zip_compress:
82
- # Create a temporary zip file
83
- with tempfile.NamedTemporaryFile(suffix=".zip", delete=False) as tmp_zip_file:
84
- zip_path = tmp_zip_file.name
85
- tmp_zip_file.close() # Close the file handle so zipfile can open it
86
-
10
+ class AWS:
11
+ def __init__(
12
+ self,
13
+ access: str,
14
+ secret: str,
15
+ session_token: str,
16
+ url: str,
17
+ bucket_name: str,
18
+ Logger=None,
19
+ ):
20
+ self.s3_client = boto3.client(
21
+ "s3",
22
+ aws_access_key_id=access,
23
+ aws_secret_access_key=secret,
24
+ aws_session_token=session_token,
25
+ region_name=AWS.get_region(url),
26
+ )
27
+ self.bucket_name = bucket_name
28
+ self.logger = Logger
29
+
30
+ @staticmethod
31
+ def get_region(url: Optional[str]) -> Optional[str]:
32
+ """Extract the region from the S3 URL."""
33
+ if url:
34
+ parts = url.split(".")
35
+ return parts[0]
36
+ return None
37
+
38
+ def upload_file(
39
+ self,
40
+ file_path: str,
41
+ object_name: Optional[str] = None,
42
+ extra_args: Optional[dict] = None,
43
+ ) -> bool:
44
+ """Upload a file to an S3 bucket using the AWS instance's S3 client."""
45
+ if object_name is None:
46
+ object_name = os.path.basename(file_path)
87
47
  try:
88
- with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
89
- for file_path in files:
90
- # Add file to zip, using only the basename inside the zip
91
- zipf.write(file_path, arcname=os.path.basename(file_path))
92
-
93
- # Construct object name for the zip file
94
- object_name = f"{repository_id}/uploaded/{compress_zip_name}.zip"
95
-
96
- # For zip files, we use the base metadata without a specific checksum
97
- # (as checksums are per-file in the original design)
98
- extra_args = {
99
- "Metadata": base_metadata.copy()
100
- } # Use a copy to avoid modifying base_metadata
101
-
102
- if not upload_file_to_s3(
103
- s3_client, bucket_name, zip_path, object_name, extra_args=extra_args
104
- ):
105
- raise ValueError(
106
- f"Failed to upload zip file {zip_path} to S3 bucket {bucket_name}."
107
- )
108
-
109
- finally:
110
- # Clean up the temporary zip file
111
- if os.path.exists(zip_path):
112
- os.unlink(zip_path)
113
-
114
- else:
115
- # Original logic: upload files individually
116
- for file_path in files:
117
- file_basename = os.path.basename(file_path)
118
- object_name = f"{repository_id}/uploaded/{file_basename}"
119
-
120
- current_file_metadata = base_metadata.copy()
121
- if checksums:
122
- current_file_metadata["checksum"] = checksums.get(file_basename, "")
123
-
124
- extra_args = {"Metadata": current_file_metadata}
125
-
126
- if not upload_file_to_s3(
127
- s3_client, bucket_name, file_path, object_name, extra_args=extra_args
128
- ):
129
- raise ValueError(
130
- f"Failed to upload file {file_path} to S3 bucket {bucket_name}."
131
- )
132
-
133
- # Always upload .metadata files if the source 'files' list is provided
134
- if files:
135
- # Assuming all files in the 'files' list share the same parent directory,
136
- # which is the case data directory.
137
- data_directory = os.path.dirname(files[0])
138
- metadata_dir_local_path = os.path.join(data_directory, ".metadata")
139
-
140
- if os.path.isdir(metadata_dir_local_path):
141
- # Iterate through the original list of files to find corresponding metadata files
142
- for original_file_path in files:
143
- original_file_basename = os.path.basename(original_file_path)
144
- local_metadata_file_path = os.path.join(
145
- metadata_dir_local_path, original_file_basename
146
- )
147
-
148
- if os.path.isfile(local_metadata_file_path):
149
- # S3 object name for the metadata file (e.g., repository_id/.metadata/original_file_basename)
150
- s3_metadata_object_name = (
151
- f"{repository_id}/.metadata/{original_file_basename}"
48
+ self.s3_client.upload_file(
49
+ file_path, self.bucket_name, object_name, ExtraArgs=extra_args
50
+ )
51
+ return True
52
+ except ClientError as e:
53
+ self.logger.error(f"Error uploading file: {e}")
54
+ return False
55
+
56
+ def upload_case(
57
+ self,
58
+ files: List[str],
59
+ repository_id: str,
60
+ cluster_name: str,
61
+ checksums: Optional[Dict[str, str]] = None,
62
+ zip_compress: bool = False,
63
+ compress_zip_name: str = None,
64
+ ):
65
+ """Upload files to an S3 bucket."""
66
+ base_metadata: Dict[str, str] = {
67
+ "upload": str(True).lower(),
68
+ "user-agent": "aws-fsx-lustre",
69
+ "file-owner": "537",
70
+ "file-group": "500",
71
+ "file-permissions": "100777",
72
+ }
73
+
74
+ if zip_compress and not compress_zip_name:
75
+ compress_zip_name = str(repository_id)
76
+
77
+ if zip_compress:
78
+ # Create a temporary zip file
79
+ with tempfile.NamedTemporaryFile(
80
+ suffix=".zip", delete=False
81
+ ) as tmp_zip_file:
82
+ zip_path = tmp_zip_file.name
83
+ tmp_zip_file.close() # Close the file handle so zipfile can open it
84
+
85
+ try:
86
+ with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
87
+ for file_path in files:
88
+ zipf.write(file_path, arcname=os.path.basename(file_path))
89
+
90
+ object_name = f"{repository_id}/uploaded/{compress_zip_name}.zip"
91
+ extra_args = {"Metadata": base_metadata.copy()}
92
+
93
+ if not self.upload_file(zip_path, object_name, extra_args=extra_args):
94
+ raise ValueError(
95
+ f"Failed to upload zip file {zip_path} to S3 bucket {self.bucket_name}."
96
+ )
97
+ finally:
98
+ if os.path.exists(zip_path):
99
+ os.unlink(zip_path)
100
+ else:
101
+ for file_path in files:
102
+ file_basename = os.path.basename(file_path)
103
+ object_name = f"{repository_id}/uploaded/{file_basename}"
104
+
105
+ current_file_metadata = base_metadata.copy()
106
+ if checksums:
107
+ current_file_metadata["checksum"] = checksums.get(file_basename, "")
108
+
109
+ extra_args = {"Metadata": current_file_metadata}
110
+
111
+ if not self.upload_file(file_path, object_name, extra_args=extra_args):
112
+ raise ValueError(
113
+ f"Failed to upload file {file_path} to S3 bucket {self.bucket_name}."
152
114
  )
153
- extra_args = {"Metadata": base_metadata.copy()}
154
- if not upload_file_to_s3(
155
- s3_client,
156
- bucket_name,
157
- local_metadata_file_path,
158
- s3_metadata_object_name,
159
- extra_args=extra_args,
160
- ):
161
- raise ValueError(
162
- f"Failed to upload metadata file {local_metadata_file_path} to S3 bucket {bucket_name}."
163
- )
164
-
165
-
166
- def _download_s3_object(
167
- s3_client, bucket_name: str, s3_object_key: str, local_file_path: str
168
- ) -> bool:
169
- """
170
- Downloads a single object from S3 to a local file path.
171
-
172
- :param s3_client: Initialized S3 client.
173
- :param bucket_name: Name of the S3 bucket.
174
- :param s3_object_key: The key of the object in S3.
175
- :param local_file_path: The local path where the file should be saved.
176
- :return: True if download was successful, False otherwise.
177
- """
178
-
179
- try:
180
- s3_client.download_file(bucket_name, s3_object_key, local_file_path)
181
- return True
182
- except ClientError as e:
183
- print(f"ERROR: Failed to download {s3_object_key} from S3: {e}")
184
- return False
185
-
186
-
187
- def download_case_from_s3(
188
- repository_id: str,
189
- cluster_name: str, # Kept for consistency with caller, though not used directly in S3 ops
190
- access: str,
191
- secret: str,
192
- session_token: str,
193
- bucket_name: str,
194
- url: str, # S3 endpoint URL, used by _get_region
195
- output_path: str,
196
- file_list: List[str],
197
- ) -> List[str]:
198
- """
199
- Downloads files from an S3 bucket for a given case repository.
200
-
201
- It iterates through the provided `file_list`, downloads each specified file
202
- from the S3 path `{repository_id}/{file_in_list}`, preserving its relative path
203
- under `output_path`. It then checks if each downloaded file is gzipped,
204
- decompresses it if necessary, and returns a list of basenames of the
205
- final downloaded (and potentially decompressed) files.
206
-
207
- :param repository_id: The ID of the repository in S3.
208
- :param cluster_name: Name of the cluster (for context, not used in S3 calls).
209
- :param access: AWS access key ID.
210
- :param secret: AWS secret access key.
211
- :param session_token: AWS session token.
212
- :param bucket_name: Name of the S3 bucket.
213
- :param url: S3 service URL (used to determine region via _get_region).
214
- :param output_path: Local directory where files will be downloaded.
215
- :param file_list: A list of file names (basenames) to be downloaded.
216
- :return: A list of basenames of the downloaded (and decompressed) files.
217
- :raises ValueError: If S3 connection parameters are missing or filter is invalid.
218
- :raises RuntimeError: If S3 operations fail.
219
- """
220
- region = _get_region(url)
221
- if not all([region, access, secret, session_token, bucket_name]):
222
- # TODO: Replace print with proper logging
223
- print(
224
- "ERROR: Missing S3 connection parameters (region, access, secret, token, or bucket name)."
225
- )
226
- raise ValueError("Missing S3 connection parameters.")
227
-
228
- s3_client = boto3.client(
229
- "s3",
230
- aws_access_key_id=access,
231
- aws_secret_access_key=secret,
232
- aws_session_token=session_token,
233
- region_name=region,
234
- )
235
-
236
- downloaded_files: List[str] = []
237
115
 
238
- try:
239
- for file_name in file_list:
240
- # Construct the full S3 object key
241
- s3_object_key = f"{repository_id}/{file_name}"
116
+ # Always upload .metadata files if the source 'files' list is provided
117
+ if files:
118
+ data_directory = os.path.dirname(files[0])
119
+ metadata_dir_local_path = os.path.join(data_directory, ".metadata")
242
120
 
243
- local_file_path = os.path.join(output_path, file_name)
244
- if _download_s3_object(
245
- s3_client, bucket_name, s3_object_key, local_file_path
246
- ):
247
- downloaded_files.append(os.path.basename(local_file_path))
121
+ if os.path.isdir(metadata_dir_local_path):
122
+ for original_file_path in files:
123
+ original_file_basename = os.path.basename(original_file_path)
124
+ local_metadata_file_path = os.path.join(
125
+ metadata_dir_local_path, original_file_basename
126
+ )
248
127
 
249
- except ClientError as e:
250
- print(f"ERROR: S3 ClientError during download: {e}")
251
- raise RuntimeError(f"Failed to download files from S3: {e}")
252
- except Exception as e:
253
- print(f"ERROR: An unexpected error occurred during download: {e}")
254
- raise RuntimeError(f"An unexpected error occurred during S3 download: {e}")
128
+ if os.path.isfile(local_metadata_file_path):
129
+ s3_metadata_object_name = (
130
+ f"{repository_id}/.metadata/{original_file_basename}"
131
+ )
132
+ extra_args = {"Metadata": base_metadata.copy()}
133
+ if not self.upload_file(
134
+ local_metadata_file_path,
135
+ s3_metadata_object_name,
136
+ extra_args=extra_args,
137
+ ):
138
+ raise ValueError(
139
+ f"Failed to upload metadata file {local_metadata_file_path} to S3 bucket {self.bucket_name}."
140
+ )
141
+
142
+ def download_file(self, s3_object_key: str, local_file_path: str) -> bool:
143
+ """Downloads a single object from S3 to a local file path."""
144
+ try:
145
+ self.s3_client.download_file(
146
+ self.bucket_name, s3_object_key, local_file_path
147
+ )
148
+ return True
149
+ except ClientError as e:
150
+ self.logger.error(f"ERROR: Failed to download {s3_object_key} from S3: {e}")
151
+ return False
152
+
153
+ def download_case(
154
+ self,
155
+ repository_id: str,
156
+ cluster_name: str,
157
+ output_path: str,
158
+ file_list: List[str],
159
+ ) -> List[str]:
160
+ """
161
+ Downloads files from an S3 bucket for a given case repository.
162
+ """
163
+ downloaded_files: List[str] = []
255
164
 
256
- return downloaded_files
165
+ try:
166
+ for file_name in file_list:
167
+ s3_object_key = f"{repository_id}/{file_name}"
168
+ local_file_path = os.path.join(output_path, file_name)
169
+ if self.logger:
170
+ self.logger.info(
171
+ f"Downloading {s3_object_key} to {local_file_path}"
172
+ )
173
+ if self.download_file(s3_object_key, local_file_path):
174
+ downloaded_files.append(os.path.basename(local_file_path))
175
+ except ClientError as e:
176
+ self.logger.error(f"ERROR: S3 ClientError during download: {e}")
177
+ raise RuntimeError(f"Failed to download files from S3: {e}")
178
+ except Exception as e:
179
+ self.logger.error(
180
+ f"ERROR: An unexpected error occurred during download: {e}"
181
+ )
182
+ raise RuntimeError(f"An unexpected error occurred during S3 download: {e}")
183
+
184
+ return downloaded_files
psr/cloud/cloud.py CHANGED
@@ -23,7 +23,7 @@ import pefile
23
23
  import zeep
24
24
  from filelock import FileLock
25
25
 
26
- from .aws import download_case_from_s3, upload_case_to_s3
26
+ from .aws import AWS
27
27
  from .data import Case, CloudError, CloudInputError
28
28
  from .desktop import import_case
29
29
  from .log import enable_log_timestamp, get_logger
@@ -647,7 +647,8 @@ class Client:
647
647
  xml_content = create_case_xml(parameters)
648
648
 
649
649
  if dry_run:
650
- return xml_content
650
+ self._logger.info(f"Dry run:\n{xml_content}")
651
+ return 0
651
652
 
652
653
  if self._python_client:
653
654
  case_id = self._execute_case(parameters)
@@ -758,6 +759,21 @@ class Client:
758
759
  files: Optional[List[str]] = None,
759
760
  extensions: Optional[List[str]] = None,
760
761
  ) -> None:
762
+ case = self.get_case(case_id)
763
+ output_path = _handle_relative_path(output_path)
764
+ parameters = {
765
+ "urlServico": self.cluster["url"],
766
+ "usuario": self.username,
767
+ "senha": self.__password,
768
+ "idioma": "3",
769
+ "_cluster": self.cluster["name"],
770
+ "modelo": case.program,
771
+ "comando": "download",
772
+ "diretorioDestino": output_path,
773
+ "repositorioId": str(case_id),
774
+ }
775
+
776
+ # Handling download filter
761
777
  filter = ""
762
778
 
763
779
  if not extensions and not files:
@@ -767,33 +783,31 @@ class Client:
767
783
 
768
784
  if extensions:
769
785
  Client._validate_extensions(extensions)
770
- filter_elements.extend([f"*.{ext}" for ext in extensions])
786
+ filter_elements.extend([f".*.{ext}" for ext in extensions])
771
787
 
772
788
  if files:
773
789
  filter_elements.extend(files)
774
790
 
775
- filter += "|".join(filter_elements)
791
+ if self._python_client:
792
+ # Convert mask to regex for python_client
793
+ # regex_parts = []
794
+ # for part in filter_elements:
795
+ # regex_parts.append(r".*" + re.escape(part[1:]) + r"$" if part.startswith("*") else r"^" + re.escape(part) + r"$")
796
+ filter = "|".join(filter_elements)
797
+ parameters["filtroDownload"] = filter
798
+ else:
799
+ filter = "|".join(filter_elements)
800
+ parameters["filtroDownloadPorMascara"] = filter
776
801
 
777
802
  self._logger.info("Download filter: " + filter)
778
- case = self.get_case(case_id)
779
- output_path = _handle_relative_path(output_path)
780
- parameters = {
781
- "urlServico": self.cluster["url"],
782
- "usuario": self.username,
783
- "senha": self.__password,
784
- "idioma": "3",
785
- "_cluster": self.cluster["name"],
786
- "modelo": case.program,
787
- "comando": "download",
788
- "diretorioDestino": output_path,
789
- "repositorioId": str(case_id),
790
- "filtroDownloadPorMascara": filter,
791
- }
792
803
 
793
804
  os.makedirs(output_path, exist_ok=True)
794
805
 
795
806
  if self._python_client:
796
- self._download_results_python(parameters) ## Not implemented yet
807
+ self._download_results_python(parameters)
808
+ self._logger.debug("Creating download.ok file")
809
+ with open(os.path.join(output_path, "download.ok"), "w") as f:
810
+ f.write("")
797
811
  else:
798
812
  # Download results using Console
799
813
  xml_content = create_case_xml(parameters)
@@ -969,7 +983,6 @@ class Client:
969
983
  parameters.update(additional_arguments)
970
984
 
971
985
  xml_input = create_case_xml(parameters)
972
-
973
986
  try:
974
987
  xml_output_str = portal_ws.service.despacharServico(
975
988
  service, self.username, password_md5, xml_input
@@ -1098,19 +1111,23 @@ class Client:
1098
1111
  f"Uploading list of files to remote repository {repository_id.text}"
1099
1112
  )
1100
1113
 
1101
- # Uploading files to S3
1102
- upload_case_to_s3(
1103
- files=file_list,
1104
- repository_id=repository_id.text,
1105
- cluster_name=self.cluster["name"],
1106
- checksums=checksum_dictionary,
1114
+ awsS3 = AWS(
1107
1115
  access=cloud_access.text if cloud_access is not None else None,
1108
1116
  secret=cloud_secret.text if cloud_secret is not None else None,
1117
+ bucket_name=bucket_name.text if bucket_name is not None else None,
1118
+ url=cloud_aws_url.text if cloud_aws_url is not None else None,
1109
1119
  session_token=cloud_session_token.text
1110
1120
  if cloud_session_token is not None
1111
1121
  else None,
1112
- bucket_name=bucket_name.text if bucket_name is not None else None,
1113
- url=cloud_aws_url.text if cloud_aws_url is not None else None,
1122
+ Logger=self._logger,
1123
+ )
1124
+
1125
+ # TODO validate when no file has been sent at all
1126
+ awsS3.upload_case(
1127
+ files=file_list,
1128
+ repository_id=repository_id.text,
1129
+ cluster_name=self.cluster["name"],
1130
+ checksums=checksum_dictionary,
1114
1131
  zip_compress=True,
1115
1132
  )
1116
1133
 
@@ -1168,7 +1185,7 @@ class Client:
1168
1185
  """
1169
1186
 
1170
1187
  repository_id = parameters.get("repositorioId")
1171
- download_filter = parameters.get("filtroDownloadPorMascara")
1188
+ download_filter = parameters.get("filtroDownload")
1172
1189
  output_path = parameters.get("diretorioDestino")
1173
1190
 
1174
1191
  download_filter = (
@@ -1179,37 +1196,48 @@ class Client:
1179
1196
  "buscaCredenciasDownload", additional_arguments=parameters
1180
1197
  )
1181
1198
 
1182
- access = credentials.find("./Parametro[@nome='cloudAccess']").text
1183
- secret = credentials.find("./Parametro[@nome='cloudSecret']").text
1184
- session_token = credentials.find("./Parametro[@nome='cloudSessionToken']").text
1185
- url = credentials.find("./Parametro[@nome='cloudUrl']").text
1199
+ cloud_access = credentials.find("./Parametro[@nome='cloudAccess']").text
1200
+ cloud_secret = credentials.find("./Parametro[@nome='cloudSecret']").text
1201
+ cloud_session_token = credentials.find(
1202
+ "./Parametro[@nome='cloudSessionToken']"
1203
+ ).text
1204
+ cloud_url = credentials.find("./Parametro[@nome='cloudUrl']").text
1186
1205
  bucket_name = credentials.find("./Parametro[@nome='diretorioBase']").text
1187
1206
  bucket_name = bucket_name.replace("repository", "repository-download")
1188
1207
 
1189
- if access is None or secret is None or session_token is None or url is None:
1208
+ if (
1209
+ cloud_access is None
1210
+ or cloud_secret is None
1211
+ or cloud_session_token is None
1212
+ or cloud_url is None
1213
+ ):
1190
1214
  raise CloudError("Failed to retrieve credentials for downloading results.")
1191
1215
 
1192
1216
  file_list = self.list_download_files(repository_id)
1193
-
1194
1217
  # filtering files to download
1195
1218
  if download_filter:
1196
- filtered_file_list = [
1197
- file["name"]
1198
- for file in file_list
1199
- if re.match(download_filter, file["name"])
1200
- ]
1219
+ filtered_file_list = []
1220
+ for file in file_list:
1221
+ if re.match(download_filter, file["name"]):
1222
+ filtered_file_list.append(file["name"])
1201
1223
  else:
1202
1224
  filtered_file_list = [file["name"] for file in file_list]
1203
1225
 
1204
1226
  self._logger.info("Downloading results")
1205
- downloaded_list = download_case_from_s3(
1227
+ awsS3 = AWS(
1228
+ access=cloud_access if cloud_access is not None else None,
1229
+ secret=cloud_secret if cloud_secret is not None else None,
1230
+ bucket_name=bucket_name if bucket_name is not None else None,
1231
+ session_token=cloud_session_token
1232
+ if cloud_session_token is not None
1233
+ else None,
1234
+ url=cloud_url if cloud_url is not None else None,
1235
+ Logger=self._logger,
1236
+ )
1237
+
1238
+ downloaded_list = awsS3.download_case(
1206
1239
  repository_id=parameters["repositorioId"],
1207
1240
  cluster_name=self.cluster["name"],
1208
- access=access,
1209
- secret=secret,
1210
- session_token=session_token,
1211
- bucket_name=bucket_name,
1212
- url=url,
1213
1241
  output_path=output_path,
1214
1242
  file_list=filtered_file_list,
1215
1243
  )
psr/factory/__init__.py CHANGED
@@ -2,6 +2,6 @@
2
2
  # Unauthorized copying of this file, via any medium is strictly prohibited
3
3
  # Proprietary and confidential
4
4
 
5
- __version__ = "5.0.0b21"
5
+ __version__ = "5.0.0b24"
6
6
 
7
7
  from .api import *
psr/factory/factory.dll CHANGED
Binary file
psr/factory/factory.pmd CHANGED
@@ -5925,6 +5925,14 @@ DEFINE_MODEL MODL:SDDP_Transformer
5925
5925
  VECTOR DATE DateCostToFrom @addyear_chronological
5926
5926
  VECTOR REAL CostToFrom DIM(block) INDEX DateCostToFrom
5927
5927
 
5928
+ PARM INTEGER MaxSecondaryReserveUnit
5929
+ PARM INTEGER HasMaxSecondaryReserve
5930
+ VETOR DATE DataMaxSecondaryReserve @chronological @addyear_chronological
5931
+ VETOR REAL MaxSecondaryReserve DIM(block) INDEX DataMaxSecondaryReserve
5932
+ PARM INTEGER HasPriceSecondaryReserve
5933
+ VETOR DATE DataPriceSecondaryReserve @chronological @addyear_chronological
5934
+ VETOR REAL PriceSecondaryReserve DIM(block) INDEX DataPriceSecondaryReserve
5935
+
5928
5936
  MERGE_MODEL MODL:SDDP_Asset
5929
5937
  END_MODEL
5930
5938
  //-----------------------------------------------
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: psr-factory
3
- Version: 5.0.0b21
3
+ Version: 5.0.0b24
4
4
  Summary: PSR database management module.
5
5
  Author-email: "PSR Inc." <psrfactory@psr-inc.com>
6
6
  License-Expression: MIT
@@ -2,8 +2,8 @@ psr/apps/__init__.py,sha256=frSq1WIy5vIdU21xJIGX7U3XoAZRj0pcQmFb-R00b7I,228
2
2
  psr/apps/apps.py,sha256=V8Ewht7P1I-3sSkV3dnbxbLjF2slxPjcmtzmVaLjiNY,6746
3
3
  psr/apps/version.py,sha256=vs459L6JsatAkUxna7BNG-vMCaXpO1Ye8c1bmkEx4U4,194
4
4
  psr/cloud/__init__.py,sha256=inZMwG7O9Fca9hg1BhqYObOYtTTJOkpuTIuXnkHJZkI,246
5
- psr/cloud/aws.py,sha256=ro8kBNVxpGDXgZ5haceqX-MAD-0F5KFDJJ4M6rRvwS8,9915
6
- psr/cloud/cloud.py,sha256=gLwVL4aJITo75F4x2k53TJx0-giBLF614p3t3NdfD9o,59206
5
+ psr/cloud/aws.py,sha256=aq3yqDC_D1tplPICqa39pmMbyp_liRQ8B_Ubbl7q2Dw,7048
6
+ psr/cloud/cloud.py,sha256=XnTYk5t27iTbbkfyQtD9gjIDUM2aNEly0-AHhaOpktw,60368
7
7
  psr/cloud/data.py,sha256=oDJyzcNsA7aAYi_qJKCUjCeGZvN-25E8KjZ-5RamNLE,4160
8
8
  psr/cloud/desktop.py,sha256=JFroCMEFV1Nz3has74n7OVrGCg2lS7Ev5bcjdw2hRxY,2980
9
9
  psr/cloud/log.py,sha256=Dvhz1enIWlFWeaRK7JAAuZVPfODgoEIRNcHEmbEliyQ,1366
@@ -16,10 +16,10 @@ psr/execqueue/config.py,sha256=3KVwASOgRlymOSPeabotgBdLVB5sPKnPQ9og2q3LQfw,1418
16
16
  psr/execqueue/db.py,sha256=sNr_StNEgZZQCKcyCWiB1WrQJIhE9UvLUxPA2tWiXGs,8498
17
17
  psr/execqueue/server.py,sha256=nW-Hi5zWHgPeLicASKJND7u6rz6eqwC16k91tUUQPxk,15741
18
18
  psr/execqueue/watcher.py,sha256=7dZZm9TiYVF7SdU0c_6Vq2_SZRobxgcspfBMzKFSsjQ,5637
19
- psr/factory/__init__.py,sha256=8piBlYuDWX7cfD4Smn_7jzq2sysPynLfDhgEaRhc_3A,219
19
+ psr/factory/__init__.py,sha256=kY3IBNXaeyQ9JHV8eHprnFlcl0-RbJnoV3IwQyMMqy4,219
20
20
  psr/factory/api.py,sha256=a4zqV4LXjK3psylmRKmMd5Ke0Y7tmAG22BgR3xBfcSs,104265
21
- psr/factory/factory.dll,sha256=4U1WUtVdlmHEa5JzE6_xFfBVc9eCMSBy3yBTUZL6o0c,18301440
22
- psr/factory/factory.pmd,sha256=HryDY8T6vAV5r0W4ytKNPiIYcdn4LexrsVZ5qFYBZ-g,250535
21
+ psr/factory/factory.dll,sha256=rDAsaX20jxOUhrKaWs9OORH1GnNRTXKsDlCDDpg2pzQ,18306560
22
+ psr/factory/factory.pmd,sha256=ncDpoE2AnsygsIgjH0cCj9KetTPSTLvITpBogiTFX4o,250960
23
23
  psr/factory/factory.pmk,sha256=THhHxBKTBchru3fxTCos-pBAPJJnuug8T2dw0xniDfQ,580185
24
24
  psr/factory/factorylib.py,sha256=o5Irbw6k-yIOJVUtDu2YYqw2x16P2LmCdouImwSssdw,28290
25
25
  psr/factory/libcurl-x64.dll,sha256=6WGBmqX4q_eD8Vc0E2VpCvVrFV3W7TQoaKqSdbhXBu0,5313096
@@ -33,8 +33,8 @@ psr/psrfcommon/tempfile.py,sha256=5S13wa2DCLYTUdwbLm_KMBRnDRJ0WDlu8GO2BmZoNdg,39
33
33
  psr/runner/__init__.py,sha256=kI9HDX-B_LMQJUHHylFHas2rNpWfNNa0pZXoIvX_Alw,230
34
34
  psr/runner/runner.py,sha256=hCVH62HAZK_M9YUiHQgqCkMevN17utegjfRIw49MdvM,27542
35
35
  psr/runner/version.py,sha256=mch2Y8anSXGMn9w72Z78PhSRhOyn55EwaoLAYhY4McE,194
36
- psr_factory-5.0.0b21.dist-info/licenses/LICENSE.txt,sha256=N6mqZK2Ft3iXGHj-by_MHC_dJo9qwn0URjakEPys3H4,1089
37
- psr_factory-5.0.0b21.dist-info/METADATA,sha256=728tb5owfD70z5HJ5CFCj5f3mHWVZLjtjJnw1USMbEQ,2333
38
- psr_factory-5.0.0b21.dist-info/WHEEL,sha256=ZjXRCNaQ9YSypEK2TE0LRB0sy2OVXSszb4Sx1XjM99k,97
39
- psr_factory-5.0.0b21.dist-info/top_level.txt,sha256=Jb393O96WQk3b5D1gMcrZBLKJJgZpzNjTPoldUi00ck,4
40
- psr_factory-5.0.0b21.dist-info/RECORD,,
36
+ psr_factory-5.0.0b24.dist-info/licenses/LICENSE.txt,sha256=N6mqZK2Ft3iXGHj-by_MHC_dJo9qwn0URjakEPys3H4,1089
37
+ psr_factory-5.0.0b24.dist-info/METADATA,sha256=qzWqwL7Qi-eDFqWplElyX_yOLKWeUMiR97xSu6UFfX0,2333
38
+ psr_factory-5.0.0b24.dist-info/WHEEL,sha256=ZjXRCNaQ9YSypEK2TE0LRB0sy2OVXSszb4Sx1XjM99k,97
39
+ psr_factory-5.0.0b24.dist-info/top_level.txt,sha256=Jb393O96WQk3b5D1gMcrZBLKJJgZpzNjTPoldUi00ck,4
40
+ psr_factory-5.0.0b24.dist-info/RECORD,,