psr-factory 5.0.0b1__py3-none-win_amd64.whl → 5.0.0b2__py3-none-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
psr/cloud/aws.py ADDED
@@ -0,0 +1,256 @@
1
+ import os
2
+ import tempfile
3
+ import zipfile
4
+ from typing import Dict, List, Optional
5
+
6
+ import boto3
7
+ from botocore.exceptions import ClientError
8
+
9
+
10
+ def _get_region(url):
11
+ """Extract the region from the S3 URL."""
12
+ if url:
13
+ parts = url.split(".")
14
+ return parts[0]
15
+ return None
16
+
17
+
18
+ def upload_file_to_s3(
19
+ s3_client, bucket_name, file_path, object_name=None, extra_args=None
20
+ ):
21
+ """Upload a file to an S3 bucket using a provided S3 client.
22
+
23
+ :param s3_client: Initialized S3 client.
24
+ :param bucket_name: Name of the S3 bucket.
25
+ :param file_path: Path to the file to upload.
26
+ :param object_name: S3 object name. If not specified, file_path's basename is used.
27
+ :param extra_args: A dictionary of extra arguments to pass to S3's upload_file.
28
+ :return: True if file was uploaded, else False.
29
+ """
30
+ if object_name is None:
31
+ object_name = os.path.basename(file_path)
32
+
33
+ try:
34
+ s3_client.upload_file(file_path, bucket_name, object_name, ExtraArgs=extra_args)
35
+ return True
36
+ except ClientError as e:
37
+ print(f"Error uploading file: {e}")
38
+ return False
39
+
40
+
41
+ def upload_case_to_s3(
42
+ files: List[str],
43
+ repository_id: str,
44
+ cluster_name: str,
45
+ checksums: Optional[Dict[str, str]] = None,
46
+ access: Optional[str] = None,
47
+ secret: Optional[str] = None,
48
+ session_token: Optional[str] = None,
49
+ bucket_name: Optional[str] = None,
50
+ url: Optional[str] = None,
51
+ zip_compress: bool = False,
52
+ compress_zip_name: str = None,
53
+ ):
54
+ """Upload files to an S3 bucket."""
55
+
56
+ region = _get_region(url)
57
+
58
+ if not region or not access or not secret or not session_token or not bucket_name:
59
+ raise ValueError("Unable to set up AWS connection.")
60
+
61
+ s3_client = boto3.client(
62
+ "s3",
63
+ aws_access_key_id=access,
64
+ aws_secret_access_key=secret,
65
+ aws_session_token=session_token,
66
+ region_name=region,
67
+ )
68
+
69
+ # Base metadata, common for both zip and individual files
70
+ base_metadata: Dict[str, str] = {
71
+ "upload": str(True).lower(),
72
+ "user-agent": "aws-fsx-lustre",
73
+ "file-owner": "537",
74
+ "file-group": "500",
75
+ "file-permissions": "100777",
76
+ }
77
+
78
+ if zip_compress and not compress_zip_name:
79
+ compress_zip_name = str(repository_id)
80
+
81
+ if zip_compress:
82
+ # Create a temporary zip file
83
+ with tempfile.NamedTemporaryFile(suffix=".zip", delete=False) as tmp_zip_file:
84
+ zip_path = tmp_zip_file.name
85
+ tmp_zip_file.close() # Close the file handle so zipfile can open it
86
+
87
+ try:
88
+ with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
89
+ for file_path in files:
90
+ # Add file to zip, using only the basename inside the zip
91
+ zipf.write(file_path, arcname=os.path.basename(file_path))
92
+
93
+ # Construct object name for the zip file
94
+ object_name = f"{repository_id}/uploaded/{compress_zip_name}.zip"
95
+
96
+ # For zip files, we use the base metadata without a specific checksum
97
+ # (as checksums are per-file in the original design)
98
+ extra_args = {
99
+ "Metadata": base_metadata.copy()
100
+ } # Use a copy to avoid modifying base_metadata
101
+
102
+ if not upload_file_to_s3(
103
+ s3_client, bucket_name, zip_path, object_name, extra_args=extra_args
104
+ ):
105
+ raise ValueError(
106
+ f"Failed to upload zip file {zip_path} to S3 bucket {bucket_name}."
107
+ )
108
+
109
+ finally:
110
+ # Clean up the temporary zip file
111
+ if os.path.exists(zip_path):
112
+ os.unlink(zip_path)
113
+
114
+ else:
115
+ # Original logic: upload files individually
116
+ for file_path in files:
117
+ file_basename = os.path.basename(file_path)
118
+ object_name = f"{repository_id}/uploaded/{file_basename}"
119
+
120
+ current_file_metadata = base_metadata.copy()
121
+ if checksums:
122
+ current_file_metadata["checksum"] = checksums.get(file_basename, "")
123
+
124
+ extra_args = {"Metadata": current_file_metadata}
125
+
126
+ if not upload_file_to_s3(
127
+ s3_client, bucket_name, file_path, object_name, extra_args=extra_args
128
+ ):
129
+ raise ValueError(
130
+ f"Failed to upload file {file_path} to S3 bucket {bucket_name}."
131
+ )
132
+
133
+ # Always upload .metadata files if the source 'files' list is provided
134
+ if files:
135
+ # Assuming all files in the 'files' list share the same parent directory,
136
+ # which is the case data directory.
137
+ data_directory = os.path.dirname(files[0])
138
+ metadata_dir_local_path = os.path.join(data_directory, ".metadata")
139
+
140
+ if os.path.isdir(metadata_dir_local_path):
141
+ # Iterate through the original list of files to find corresponding metadata files
142
+ for original_file_path in files:
143
+ original_file_basename = os.path.basename(original_file_path)
144
+ local_metadata_file_path = os.path.join(
145
+ metadata_dir_local_path, original_file_basename
146
+ )
147
+
148
+ if os.path.isfile(local_metadata_file_path):
149
+ # S3 object name for the metadata file (e.g., repository_id/.metadata/original_file_basename)
150
+ s3_metadata_object_name = (
151
+ f"{repository_id}/.metadata/{original_file_basename}"
152
+ )
153
+ extra_args = {"Metadata": base_metadata.copy()}
154
+ if not upload_file_to_s3(
155
+ s3_client,
156
+ bucket_name,
157
+ local_metadata_file_path,
158
+ s3_metadata_object_name,
159
+ extra_args=extra_args,
160
+ ):
161
+ raise ValueError(
162
+ f"Failed to upload metadata file {local_metadata_file_path} to S3 bucket {bucket_name}."
163
+ )
164
+
165
+
166
+ def _download_s3_object(
167
+ s3_client, bucket_name: str, s3_object_key: str, local_file_path: str
168
+ ) -> bool:
169
+ """
170
+ Downloads a single object from S3 to a local file path.
171
+
172
+ :param s3_client: Initialized S3 client.
173
+ :param bucket_name: Name of the S3 bucket.
174
+ :param s3_object_key: The key of the object in S3.
175
+ :param local_file_path: The local path where the file should be saved.
176
+ :return: True if download was successful, False otherwise.
177
+ """
178
+
179
+ try:
180
+ s3_client.download_file(bucket_name, s3_object_key, local_file_path)
181
+ return True
182
+ except ClientError as e:
183
+ print(f"ERROR: Failed to download {s3_object_key} from S3: {e}")
184
+ return False
185
+
186
+
187
+ def download_case_from_s3(
188
+ repository_id: str,
189
+ cluster_name: str, # Kept for consistency with caller, though not used directly in S3 ops
190
+ access: str,
191
+ secret: str,
192
+ session_token: str,
193
+ bucket_name: str,
194
+ url: str, # S3 endpoint URL, used by _get_region
195
+ output_path: str,
196
+ file_list: List[str],
197
+ ) -> List[str]:
198
+ """
199
+ Downloads files from an S3 bucket for a given case repository.
200
+
201
+ It iterates through the provided `file_list`, downloads each specified file
202
+ from the S3 path `{repository_id}/{file_in_list}`, preserving its relative path
203
+ under `output_path`. It then checks if each downloaded file is gzipped,
204
+ decompresses it if necessary, and returns a list of basenames of the
205
+ final downloaded (and potentially decompressed) files.
206
+
207
+ :param repository_id: The ID of the repository in S3.
208
+ :param cluster_name: Name of the cluster (for context, not used in S3 calls).
209
+ :param access: AWS access key ID.
210
+ :param secret: AWS secret access key.
211
+ :param session_token: AWS session token.
212
+ :param bucket_name: Name of the S3 bucket.
213
+ :param url: S3 service URL (used to determine region via _get_region).
214
+ :param output_path: Local directory where files will be downloaded.
215
+ :param file_list: A list of file names (basenames) to be downloaded.
216
+ :return: A list of basenames of the downloaded (and decompressed) files.
217
+ :raises ValueError: If S3 connection parameters are missing or filter is invalid.
218
+ :raises RuntimeError: If S3 operations fail.
219
+ """
220
+ region = _get_region(url)
221
+ if not all([region, access, secret, session_token, bucket_name]):
222
+ # TODO: Replace print with proper logging
223
+ print(
224
+ "ERROR: Missing S3 connection parameters (region, access, secret, token, or bucket name)."
225
+ )
226
+ raise ValueError("Missing S3 connection parameters.")
227
+
228
+ s3_client = boto3.client(
229
+ "s3",
230
+ aws_access_key_id=access,
231
+ aws_secret_access_key=secret,
232
+ aws_session_token=session_token,
233
+ region_name=region,
234
+ )
235
+
236
+ downloaded_files: List[str] = []
237
+
238
+ try:
239
+ for file_name in file_list:
240
+ # Construct the full S3 object key
241
+ s3_object_key = f"{repository_id}/{file_name}"
242
+
243
+ local_file_path = os.path.join(output_path, file_name)
244
+ if _download_s3_object(
245
+ s3_client, bucket_name, s3_object_key, local_file_path
246
+ ):
247
+ downloaded_files.append(os.path.basename(local_file_path))
248
+
249
+ except ClientError as e:
250
+ print(f"ERROR: S3 ClientError during download: {e}")
251
+ raise RuntimeError(f"Failed to download files from S3: {e}")
252
+ except Exception as e:
253
+ print(f"ERROR: An unexpected error occurred during download: {e}")
254
+ raise RuntimeError(f"An unexpected error occurred during S3 download: {e}")
255
+
256
+ return downloaded_files
psr/cloud/cloud.py CHANGED
@@ -100,7 +100,7 @@ _CONSOLE_REL_PARENT_PATH = r"Oper\Console"
100
100
 
101
101
  _CONSOLE_APP = r"FakeConsole.exe"
102
102
 
103
- _ALLOWED_PROGRAMS = ["SDDP", "OPTGEN", "PSRIO", "GRAF"]
103
+ _ALLOWED_PROGRAMS = ["SDDP", "OPTGEN", "PSRIO", "GRAF", "MyModel", "GNoMo"]
104
104
 
105
105
  if os.name == "nt":
106
106
  _PSRCLOUD_CREDENTIALS_PATH = os.path.expandvars(
@@ -462,6 +462,7 @@ class Client:
462
462
  return selection
463
463
 
464
464
  program_versions = self.get_program_versions(case.program)
465
+ case.program_version_name = case.program_version
465
466
  case.program_version = validate_selection(
466
467
  case.program_version, program_versions, "Version", case.program
467
468
  )
@@ -504,6 +505,14 @@ class Client:
504
505
  # Replace partial with complete budget name
505
506
  case.budget = match_list[0]
506
507
 
508
+ # MyModel
509
+ if case.program == "MyModel":
510
+ if case.mymodel_program_files is None:
511
+ raise CloudInputError("MyModel program files not provided")
512
+
513
+ if case.program != "MyModel" and case.mymodel_program_files is not None:
514
+ msg = "Ignoring mymodel_program_files parameter for non MyModel case."
515
+ warnings.warn(msg)
507
516
  return case
508
517
 
509
518
  def _pre_process_graph(self, path: str, case_id: int) -> None:
@@ -610,6 +619,7 @@ class Client:
610
619
  "validacaoModelo": "True",
611
620
  "validacaoUsuario": "False",
612
621
  "idVersao": case.program_version,
622
+ "modeloVersao": case.program_version_name,
613
623
  "pathModelo": "C:\\PSR",
614
624
  "idTipoExecucao": case.execution_type,
615
625
  "nomeCaso": case.name,
@@ -618,6 +628,7 @@ class Client:
618
628
  "userTag": "(Untagged)",
619
629
  "lifecycle": case.repository_duration,
620
630
  "versaoInterface": interface_version,
631
+ "pathPrograma": case.mymodel_program_files,
621
632
  }
622
633
 
623
634
  if case.budget:
@@ -902,26 +913,46 @@ class Client:
902
913
  budgets.sort()
903
914
  return budgets
904
915
 
916
+ def get_number_of_processes(self, programa_nome):
917
+ xml = self._get_cloud_versions_xml()
918
+
919
+ programa = xml.find(f".//Programa[@nome='{programa_nome}']")
920
+ if programa is None:
921
+ raise CloudError(f"Programa '{programa_nome}' não encontrado.")
922
+
923
+ cluster = programa.find(f".//Cluster[@nome='{self.cluster['name']}']")
924
+ if cluster is None:
925
+ raise CloudError(
926
+ f"Cluster '{self.cluster['name']}' não encontrado no programa '{programa_nome}'."
927
+ )
928
+
929
+ maximo_processos = cluster.get("maximoProcessos")
930
+ processos_por_maquina = cluster.get("processosPorMaquina")
931
+
932
+ if maximo_processos and processos_por_maquina:
933
+ maximo_processos = int(maximo_processos)
934
+ processos_por_maquina = int(processos_por_maquina)
935
+
936
+ lista_processos = list(
937
+ range(
938
+ processos_por_maquina, maximo_processos + 1, processos_por_maquina
939
+ )
940
+ )
941
+
942
+ return lista_processos
943
+
944
+ raise CloudError(f"Invalid values for cluster '{self.cluster['name']}'.")
945
+
905
946
  def _make_soap_request(self, service: str, name: str = "", **kwargs) -> ET.Element:
906
947
  portal_ws = zeep.Client(self.cluster["url"] + "?WSDL")
907
948
  section = str(id(self))
908
- password_md5 = _md5sum(self.username + self.__password + section).upper()
909
- password_md5 = (
910
- password_md5
911
- if self.cluster["name"] == "PSR-US"
912
- or self.cluster["name"] == "PSR-HOTFIX"
913
- or self.cluster["name"] == "PSR-US_OHIO"
914
- else self.__password.upper()
915
- )
949
+ password_md5 = self.__password.upper()
916
950
  additional_arguments = kwargs.get("additional_arguments", None)
917
951
  parameters = {
918
952
  "sessao_id": section,
919
- "tipo_autenticacao": "portal"
920
- if self.cluster["name"] == "PSR-US"
921
- or self.cluster["name"] == "PSR-HOTFIX"
922
- or self.cluster["name"] == "PSR-US_OHIO"
923
- else "bcrypt",
953
+ "tipo_autenticacao": "bcrypt",
924
954
  "idioma": "3",
955
+ "versao_cliente": self._get_console_version().split("-")[0],
925
956
  }
926
957
  if additional_arguments:
927
958
  parameters.update(additional_arguments)
psr/cloud/data.py CHANGED
@@ -76,7 +76,9 @@ class Case:
76
76
  "Memory per process ratio must be a string",
77
77
  )
78
78
 
79
- self.repository_duration: Optional[Union[str, int]] = kwargs.get("repository_duration", 2)
79
+ self.repository_duration: Optional[Union[str, int]] = kwargs.get(
80
+ "repository_duration", 2
81
+ )
80
82
  self._validate_type(
81
83
  self.repository_duration,
82
84
  (int, str),
@@ -94,6 +96,13 @@ class Case:
94
96
  # Save In Cloud
95
97
  self.upload_only = kwargs.get("upload_only", False)
96
98
 
99
+ # Model Optional Attributes
100
+
101
+ # MyModel
102
+ self.mymodel_program_files: Optional[str] = kwargs.get(
103
+ "mymodel_program_files", None
104
+ )
105
+
97
106
  @staticmethod
98
107
  def _validate_type(
99
108
  value, expected_type: Union[List[type], Tuple[type], type], error_message: str
@@ -103,3 +112,11 @@ class Case:
103
112
 
104
113
  def __str__(self):
105
114
  return str(self.__dict__)
115
+
116
+ def to_dict(self) -> dict:
117
+ def serialize(obj):
118
+ if isinstance(obj, datetime):
119
+ return obj.isoformat()
120
+ return obj
121
+
122
+ return {k: serialize(v) for k, v in self.__dict__.items() if v is not None}
psr/cloud/version.py CHANGED
@@ -2,4 +2,4 @@
2
2
  # Unauthorized copying of this file, via any medium is strictly prohibited
3
3
  # Proprietary and confidential
4
4
 
5
- __version__ = "0.3.3"
5
+ __version__ = "0.3.7"
psr/cloud/xml.py CHANGED
@@ -9,6 +9,8 @@ from xml.etree import ElementTree as ET
9
9
  def create_case_xml(parameters: Dict[str, Any]) -> str:
10
10
  root = ET.Element("ColecaoParametro")
11
11
  for name, value in parameters.items():
12
+ if value is None:
13
+ continue
12
14
  value = _handle_invalid_xml_chars(value)
13
15
  parameter = ET.SubElement(root, "Parametro", nome=name, tipo="System.String")
14
16
  parameter.text = value
psr/factory/__init__.py CHANGED
@@ -2,6 +2,6 @@
2
2
  # Unauthorized copying of this file, via any medium is strictly prohibited
3
3
  # Proprietary and confidential
4
4
 
5
- __version__ = "5.0.0b1"
5
+ __version__ = "5.0.0b2"
6
6
 
7
7
  from .api import *
psr/factory/factory.dll CHANGED
Binary file