psr-factory 5.0.0b69__py3-none-manylinux_2_28_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of psr-factory might be problematic. Click here for more details.
- psr/apps/__init__.py +7 -0
- psr/apps/apps.py +225 -0
- psr/apps/version.py +5 -0
- psr/execqueue/client.py +126 -0
- psr/execqueue/config.py +52 -0
- psr/execqueue/db.py +286 -0
- psr/execqueue/server.py +689 -0
- psr/execqueue/watcher.py +146 -0
- psr/factory/__init__.py +7 -0
- psr/factory/api.py +2745 -0
- psr/factory/factory.pmd +7322 -0
- psr/factory/factory.pmk +19461 -0
- psr/factory/factorylib.py +410 -0
- psr/factory/libfactory.so +0 -0
- psr/factory/py.typed +0 -0
- psr/factory/samples/__init__.py +2 -0
- psr/factory/samples/sddp_case01.py +166 -0
- psr/factory/samples/sddp_case21.py +242 -0
- psr/outputs/__init__.py +5 -0
- psr/outputs/outputs.py +179 -0
- psr/outputs/resample.py +289 -0
- psr/psrfcommon/__init__.py +6 -0
- psr/psrfcommon/psrfcommon.py +57 -0
- psr/psrfcommon/tempfile.py +118 -0
- psr/runner/__init__.py +7 -0
- psr/runner/runner.py +743 -0
- psr/runner/version.py +5 -0
- psr_factory-5.0.0b69.dist-info/METADATA +47 -0
- psr_factory-5.0.0b69.dist-info/RECORD +32 -0
- psr_factory-5.0.0b69.dist-info/WHEEL +5 -0
- psr_factory-5.0.0b69.dist-info/licenses/LICENSE.txt +21 -0
- psr_factory-5.0.0b69.dist-info/top_level.txt +1 -0
psr/apps/__init__.py
ADDED
psr/apps/apps.py
ADDED
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
# PSR Factory. Copyright (C) PSR, Inc - All Rights Reserved
|
|
2
|
+
# Unauthorized copying of this file, via any medium is strictly prohibited
|
|
3
|
+
# Proprietary and confidential
|
|
4
|
+
|
|
5
|
+
import glob
|
|
6
|
+
import os
|
|
7
|
+
import pathlib
|
|
8
|
+
from typing import (
|
|
9
|
+
Dict,
|
|
10
|
+
List,
|
|
11
|
+
Optional,
|
|
12
|
+
Tuple,
|
|
13
|
+
Union
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
if os.name == "nt":
|
|
17
|
+
import winreg
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
from psr.runner import (
|
|
21
|
+
run_sddp,
|
|
22
|
+
run_sddp_check,
|
|
23
|
+
run_hydro_estimation,
|
|
24
|
+
run_sddp_cleanup,
|
|
25
|
+
get_sddp_version,
|
|
26
|
+
run_optgen,
|
|
27
|
+
run_optgen_check,
|
|
28
|
+
run_optgen_cleanup,
|
|
29
|
+
run_ncp,
|
|
30
|
+
run_graph,
|
|
31
|
+
run_psrcloud,
|
|
32
|
+
run_tsl,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class ModelNotFound(Exception):
|
|
37
|
+
pass
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _get_versions_from_base_path(base_path: Union[str, pathlib.Path], program_name: str) -> Dict[str, str]:
|
|
41
|
+
program_path = os.path.join(base_path, program_name)
|
|
42
|
+
if not os.path.exists(program_path):
|
|
43
|
+
return dict()
|
|
44
|
+
versions = dict()
|
|
45
|
+
for entry in os.scandir(program_path):
|
|
46
|
+
if entry.is_dir():
|
|
47
|
+
versions[entry.name] = entry.path
|
|
48
|
+
return versions
|
|
49
|
+
|
|
50
|
+
def get_program_versions_paths(program_name: str) -> Dict[str, str]:
|
|
51
|
+
if os.name == "nt":
|
|
52
|
+
return _get_registry_versions(program_name)
|
|
53
|
+
else:
|
|
54
|
+
return _get_versions_from_base_path("/opt/psr/", program_name)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def get_latest_version_and_path(program_name: str) -> Tuple[str, str]:
|
|
58
|
+
versions = get_program_versions_paths(program_name)
|
|
59
|
+
if not versions:
|
|
60
|
+
raise ModelNotFound(f"Model {program_name} not found")
|
|
61
|
+
# sort keys
|
|
62
|
+
versions_keys = dict(sorted(versions.items()))
|
|
63
|
+
latest = list(versions_keys.keys())[-1]
|
|
64
|
+
return latest, versions[latest]
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def get_latest_version(program_name: str) -> "AppRunner":
|
|
68
|
+
program_name_lower = program_name.lower()
|
|
69
|
+
version, path = get_latest_version_and_path(program_name)
|
|
70
|
+
if program_name_lower == "sddp":
|
|
71
|
+
return SDDP(path)
|
|
72
|
+
|
|
73
|
+
if program_name_lower == "optgen":
|
|
74
|
+
_, sddp_path = get_latest_version_and_path("sddp")
|
|
75
|
+
return OptGen(path, sddp_path, version)
|
|
76
|
+
|
|
77
|
+
if program_name_lower == "ncp":
|
|
78
|
+
return NCP(path, version)
|
|
79
|
+
|
|
80
|
+
if program_name_lower == "graph":
|
|
81
|
+
return Graph(path, version)
|
|
82
|
+
|
|
83
|
+
if program_name_lower == "psrcloud":
|
|
84
|
+
return PSRCloud(path, version)
|
|
85
|
+
|
|
86
|
+
if program_name_lower == "tsl" or program_name_lower == "timeserieslab":
|
|
87
|
+
return TSL(path, version)
|
|
88
|
+
|
|
89
|
+
raise ModelNotFound(f"Model {program_name} not found")
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class AppRunner:
|
|
93
|
+
def __init__(self):
|
|
94
|
+
pass
|
|
95
|
+
def run(self, case_path: str, **kwargs):
|
|
96
|
+
pass
|
|
97
|
+
def version(self) -> str:
|
|
98
|
+
pass
|
|
99
|
+
def install_path(self) -> str:
|
|
100
|
+
pass
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
class SDDP(AppRunner):
|
|
104
|
+
def __init__(self, sddp_path: str):
|
|
105
|
+
super().__init__()
|
|
106
|
+
self._sddp_path = sddp_path
|
|
107
|
+
|
|
108
|
+
def run(self, case_path: str, **kwargs):
|
|
109
|
+
run_sddp(case_path, self._sddp_path, **kwargs)
|
|
110
|
+
|
|
111
|
+
def run_check(self, case_path: str, **kwargs):
|
|
112
|
+
run_sddp_check(case_path, self._sddp_path, **kwargs)
|
|
113
|
+
|
|
114
|
+
def run_cleanup(self, case_path: str, **kwargs):
|
|
115
|
+
run_sddp_cleanup(case_path, self._sddp_path, **kwargs)
|
|
116
|
+
|
|
117
|
+
def run_hydro_estimation(self, case_path: str, **kwargs):
|
|
118
|
+
run_hydro_estimation(case_path, self._sddp_path, **kwargs)
|
|
119
|
+
|
|
120
|
+
def version(self) -> str:
|
|
121
|
+
return get_sddp_version(self._sddp_path)
|
|
122
|
+
|
|
123
|
+
def install_path(self) -> str:
|
|
124
|
+
return self._sddp_path
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
class OptGen(AppRunner):
|
|
128
|
+
def __init__(self, optgen_path: str, sddp_path: str, version: str):
|
|
129
|
+
super().__init__()
|
|
130
|
+
self._optgen_path = optgen_path
|
|
131
|
+
self._sddp_path = sddp_path
|
|
132
|
+
self._version = version
|
|
133
|
+
|
|
134
|
+
def run(self, case_path: str, **kwargs):
|
|
135
|
+
run_optgen(case_path, self._optgen_path, self._sddp_path, **kwargs)
|
|
136
|
+
|
|
137
|
+
def run_check(self, case_path: str, **kwargs):
|
|
138
|
+
run_optgen_check(case_path, self._optgen_path, self._sddp_path, **kwargs)
|
|
139
|
+
|
|
140
|
+
def run_cleanup(self, case_path: str, **kwargs):
|
|
141
|
+
run_optgen_cleanup(case_path, self._optgen_path, self._sddp_path, **kwargs)
|
|
142
|
+
|
|
143
|
+
def version(self) -> str:
|
|
144
|
+
return self._version
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
class NCP(AppRunner):
|
|
148
|
+
def __init__(self, ncp_path: str, version: str):
|
|
149
|
+
super().__init__()
|
|
150
|
+
self._ncp_path = ncp_path
|
|
151
|
+
self._version = version
|
|
152
|
+
|
|
153
|
+
def run(self, case_path: str, **kwargs):
|
|
154
|
+
run_ncp(case_path, self._ncp_path, **kwargs)
|
|
155
|
+
|
|
156
|
+
def version(self) -> str:
|
|
157
|
+
return self._version
|
|
158
|
+
|
|
159
|
+
class Graph(AppRunner):
|
|
160
|
+
def __init__(self, graph_path: str, version: str):
|
|
161
|
+
super().__init__()
|
|
162
|
+
self._graph_path = graph_path
|
|
163
|
+
self._version = version
|
|
164
|
+
|
|
165
|
+
def run(self, case_path: str, **kwargs):
|
|
166
|
+
run_graph(case_path, self._graph_path, **kwargs)
|
|
167
|
+
|
|
168
|
+
def version(self) -> str:
|
|
169
|
+
return self._version
|
|
170
|
+
|
|
171
|
+
class TSL(AppRunner):
|
|
172
|
+
def __init__(self, tsl_path: str, version: str):
|
|
173
|
+
super().__init__()
|
|
174
|
+
self._tsl_path = tsl_path
|
|
175
|
+
self._version = version
|
|
176
|
+
|
|
177
|
+
def run(self, **kwargs):
|
|
178
|
+
run_tsl(self._tsl_path, **kwargs)
|
|
179
|
+
|
|
180
|
+
def version(self) -> str:
|
|
181
|
+
return self._version
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class PSRCloud(AppRunner):
|
|
185
|
+
def __init__(self, psrcloud_path: str, version: str):
|
|
186
|
+
super().__init__()
|
|
187
|
+
self._psrcloud_path = psrcloud_path
|
|
188
|
+
self._version = version
|
|
189
|
+
|
|
190
|
+
def run(self, **kwargs):
|
|
191
|
+
run_psrcloud(self._psrcloud_path, **kwargs)
|
|
192
|
+
|
|
193
|
+
def version(self) -> str:
|
|
194
|
+
return self._version
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
if os.name == "nt":
|
|
199
|
+
def _get_registry_versions(program_name: str) -> Dict[str, str]:
|
|
200
|
+
base_key = winreg.HKEY_LOCAL_MACHINE
|
|
201
|
+
subkey_path = rf"SOFTWARE\PSR\{program_name}"
|
|
202
|
+
version_paths: Dict[str, str] = dict()
|
|
203
|
+
try:
|
|
204
|
+
with winreg.OpenKey(base_key, subkey_path) as key:
|
|
205
|
+
i = 0
|
|
206
|
+
while True:
|
|
207
|
+
try:
|
|
208
|
+
subkey_name = winreg.EnumKey(key, i)
|
|
209
|
+
subkey_full_path = f"{subkey_path}\\{subkey_name}"
|
|
210
|
+
|
|
211
|
+
with winreg.OpenKey(base_key, subkey_full_path) as subkey:
|
|
212
|
+
try:
|
|
213
|
+
path_value, _ = winreg.QueryValueEx(subkey, "Path")
|
|
214
|
+
# if subkey ends with .x, replace with blank
|
|
215
|
+
subkey_name = subkey_name.replace(".x", "")
|
|
216
|
+
version_paths[subkey_name] = path_value
|
|
217
|
+
except FileNotFoundError:
|
|
218
|
+
pass
|
|
219
|
+
i += 1
|
|
220
|
+
except OSError:
|
|
221
|
+
break
|
|
222
|
+
except FileNotFoundError:
|
|
223
|
+
pass
|
|
224
|
+
return version_paths
|
|
225
|
+
|
psr/apps/version.py
ADDED
psr/execqueue/client.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import zipfile
|
|
3
|
+
import requests
|
|
4
|
+
from typing import List, Optional, Tuple
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def zip_directory(directory_path, output_zip):
|
|
8
|
+
"""Compress a directory into a zip file."""
|
|
9
|
+
with zipfile.ZipFile(output_zip, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
|
10
|
+
for root, _, files in os.walk(directory_path):
|
|
11
|
+
for file in files:
|
|
12
|
+
file_path = os.path.join(root, file)
|
|
13
|
+
arcname = os.path.relpath(file_path, start=directory_path)
|
|
14
|
+
zipf.write(file_path, arcname=arcname)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def upload_case_file(zip_path, server_url):
|
|
18
|
+
"""Upload a zip file to the server."""
|
|
19
|
+
with open(zip_path, 'rb') as f:
|
|
20
|
+
files = {'file': (os.path.basename(zip_path), f)}
|
|
21
|
+
response = requests.post(f"{server_url}/upload", files=files)
|
|
22
|
+
|
|
23
|
+
if response.status_code == 200:
|
|
24
|
+
print("Upload successful!")
|
|
25
|
+
print("Case ID:", response.json().get('case_id'))
|
|
26
|
+
return response.json().get('case_id')
|
|
27
|
+
else:
|
|
28
|
+
print("Upload failed:", response.text)
|
|
29
|
+
return None
|
|
30
|
+
|
|
31
|
+
def run_module(case_id: str, module_name: str, server_url: str) -> Optional[str]:
|
|
32
|
+
"""Add a module to the execution queue. Returns the execution id."""
|
|
33
|
+
data = {"case_id": case_id, "module_name": module_name}
|
|
34
|
+
response = requests.post(f"{server_url}/run_module",data=data)
|
|
35
|
+
|
|
36
|
+
if response.status_code == 200:
|
|
37
|
+
print("Added to execution queue successfully!")
|
|
38
|
+
print("Execution ID:", response.json().get('execution_id'))
|
|
39
|
+
return response.json().get('execution_id')
|
|
40
|
+
else:
|
|
41
|
+
print("Module enqueue failed:", response.status_code, response.text)
|
|
42
|
+
return None
|
|
43
|
+
|
|
44
|
+
def run_case(case_id: str, server_url: str, cloud_execution: bool = False) -> Optional[str]:
|
|
45
|
+
"""Add a case to the execution queue. For server-local run,
|
|
46
|
+
returns the execution id. For cloud run, returns the cloud upload id."""
|
|
47
|
+
response = requests.post(f"{server_url}/run" , params={'cloud_execution': cloud_execution})
|
|
48
|
+
|
|
49
|
+
if response.status_code == 200:
|
|
50
|
+
if not cloud_execution:
|
|
51
|
+
print("Added to queue successfully!")
|
|
52
|
+
print("Execution ID:", response.json().get('execution_id'))
|
|
53
|
+
return response.json().get('execution_id')
|
|
54
|
+
else:
|
|
55
|
+
print("Cloud execution queued!")
|
|
56
|
+
print("Cloud upload ID:", response.json().get('cloud_upload_id'))
|
|
57
|
+
return response.json().get('cloud_upload_id')
|
|
58
|
+
else:
|
|
59
|
+
print("Run case failed:", response.status_code, response.text)
|
|
60
|
+
return None
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def get_module_log(case_id: str, server_url: str, module_name: Optional[str] = None) -> Optional[str]:
|
|
64
|
+
"""Fetch the content of a module's fixed log file. If module_name is None, returns last module run log."""
|
|
65
|
+
params = {}
|
|
66
|
+
if module_name:
|
|
67
|
+
params['module'] = module_name
|
|
68
|
+
response = requests.get(f"{server_url}/module_log/{case_id}", params=params)
|
|
69
|
+
if response.status_code == 200:
|
|
70
|
+
return response.text
|
|
71
|
+
else:
|
|
72
|
+
print("Fetch module log failed:", response.text)
|
|
73
|
+
return None
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def upload_and_run_file(zip_path: str, server_url: str, cloud_execution: bool = False):
|
|
77
|
+
"""Upload a zip file to the server."""
|
|
78
|
+
with open(zip_path, 'rb') as f:
|
|
79
|
+
files = {'file': (os.path.basename(zip_path), f)}
|
|
80
|
+
response = requests.post(f"{server_url}/upload_and_run", files=files , params={'cloud_execution': cloud_execution} )
|
|
81
|
+
|
|
82
|
+
if response.status_code == 200:
|
|
83
|
+
print("Upload successful! Waiting for execution.")
|
|
84
|
+
|
|
85
|
+
if cloud_execution:
|
|
86
|
+
print("Cloud upload ID:", response.json().get('cloud_upload_id'))
|
|
87
|
+
return response.json().get('cloud_upload_id')
|
|
88
|
+
else:
|
|
89
|
+
print("Local execution ID:", response.json().get('execution_id'))
|
|
90
|
+
return response.json().get('execution_id')
|
|
91
|
+
else:
|
|
92
|
+
print("Upload failed:", response.text)
|
|
93
|
+
return None
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def get_execution_status(execution_id: str, server_url: str, cloud_execution: bool = False) -> Optional[tuple[int, str]]:
|
|
97
|
+
"""Get the status of an execution."""
|
|
98
|
+
print("Getting status for execution ID:", execution_id)
|
|
99
|
+
response = requests.get(f"{server_url}/status/{execution_id}", params={'cloud_execution': cloud_execution})
|
|
100
|
+
result = response.status_code == 200
|
|
101
|
+
return response.json().get('status_id'), response.json().get('status_msg') if result else None
|
|
102
|
+
|
|
103
|
+
def get_results(execution_id, server_url, cloud_execution=False) -> Optional[List[str]]:
|
|
104
|
+
"""Download the results of an execution."""
|
|
105
|
+
response = requests.get(f"{server_url}/results/{execution_id}", params={'cloud_execution': cloud_execution})
|
|
106
|
+
|
|
107
|
+
if response.status_code == 200:
|
|
108
|
+
print("Results downloaded successfully!")
|
|
109
|
+
files = response.json().get('files')
|
|
110
|
+
print("Files:", files)
|
|
111
|
+
return files
|
|
112
|
+
else:
|
|
113
|
+
print("Results download failed:", response.text)
|
|
114
|
+
return None
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def download_execution_file(execution_id: str, server_url: str, file: str, download_path: str, cloud_execution: bool = False):
|
|
118
|
+
"""Download the results of an execution."""
|
|
119
|
+
response = requests.get(f"{server_url}/results/{execution_id}/{file}", params={'cloud_execution': cloud_execution})
|
|
120
|
+
|
|
121
|
+
# TODO: add validation for download_path existence.
|
|
122
|
+
if response.status_code == 200:
|
|
123
|
+
with open(os.path.join(download_path, file), 'wb') as f:
|
|
124
|
+
f.write(response.content)
|
|
125
|
+
else:
|
|
126
|
+
print("Download failed:", response.text)
|
psr/execqueue/config.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import tomllib
|
|
3
|
+
|
|
4
|
+
__version__ = "0.3.0"
|
|
5
|
+
_app_name = "PSR Factory ExecQueue"
|
|
6
|
+
DEFAULT_PORT = 5000
|
|
7
|
+
DEFAULT_HOST = "127.0.0.1"
|
|
8
|
+
FLASK_DEBUG = False
|
|
9
|
+
_SETTINGS_FILE_PATH = "server_settings.toml"
|
|
10
|
+
|
|
11
|
+
if os.name == 'nt':
|
|
12
|
+
_DEFAULT_SDDP_PATH = r"C:/PSR/Sddp17.3"
|
|
13
|
+
else:
|
|
14
|
+
_DEFAULT_SDDP_PATH = "/opt/psr/sddp"
|
|
15
|
+
DEFAULT_CLUSTER_NAME = "server"
|
|
16
|
+
DEFAULT_PSRCLOUD_CLUSTER = "external"
|
|
17
|
+
DEFAULT_PSRCLOUD_CLUSTER_URL = ""
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
# read toml settings file
|
|
21
|
+
with open(os.path.join(os.getcwd(), _SETTINGS_FILE_PATH), 'rb') as f:
|
|
22
|
+
settings = tomllib.load(f)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
sddp_path = settings.get("sddp_path", _DEFAULT_SDDP_PATH)
|
|
26
|
+
|
|
27
|
+
cluster_name = settings.get("cluster_name", DEFAULT_CLUSTER_NAME)
|
|
28
|
+
psrcloud_cluster = settings.get("psrcloud_cluster", DEFAULT_PSRCLOUD_CLUSTER)
|
|
29
|
+
psrcloud_cluster_url = settings.get("psrcloud_cluster", DEFAULT_PSRCLOUD_CLUSTER_URL)
|
|
30
|
+
|
|
31
|
+
# Base server data storage path.
|
|
32
|
+
STORAGE_PATH = settings.get("storage_path", os.path.join(os.getcwd(), 'serverdata'))
|
|
33
|
+
|
|
34
|
+
# Where uploaded (received) cases will be stored.
|
|
35
|
+
UPLOADS_FOLDER = os.path.join(STORAGE_PATH, 'uploads')
|
|
36
|
+
|
|
37
|
+
# Where results of local runs will be stored.
|
|
38
|
+
LOCAL_RESULTS_FOLDER = os.path.join(STORAGE_PATH, 'local_results')
|
|
39
|
+
|
|
40
|
+
# Where results of cloud runs will be stored.
|
|
41
|
+
CLOUD_RESULTS_FOLDER = os.path.join(STORAGE_PATH, 'cloud_results')
|
|
42
|
+
|
|
43
|
+
# Where temporary extracted case files will be stored
|
|
44
|
+
TEMPORARY_UPLOAD_FOLDER = os.path.join(STORAGE_PATH, 'tmp')
|
|
45
|
+
|
|
46
|
+
# Optional: modules configuration
|
|
47
|
+
# Expected format in server_settings.toml:
|
|
48
|
+
# [modules.<name>]
|
|
49
|
+
# command = "python some_script.py --case \"{case_path}\""
|
|
50
|
+
# log_file = "<optional fixed log file name>"
|
|
51
|
+
MODULES = settings.get("modules", {})
|
|
52
|
+
|