psr-factory 5.0.0b69__py3-none-manylinux_2_28_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of psr-factory might be problematic. Click here for more details.

psr/runner/runner.py ADDED
@@ -0,0 +1,743 @@
1
+ # PSR Factory. Copyright (C) PSR, Inc - All Rights Reserved
2
+ # Unauthorized copying of this file, via any medium is strictly prohibited
3
+ # Proprietary and confidential
4
+
5
+ from contextlib import contextmanager
6
+ import glob
7
+ import os
8
+ import pathlib
9
+ import shutil
10
+ import socket
11
+ import subprocess
12
+ from types import ModuleType
13
+ from typing import Any, Dict, List, Optional, Tuple, Union
14
+ import warnings
15
+
16
+
17
+ from psr.psrfcommon import change_cwd, exec_cmd
18
+ import psr.psrfcommon.tempfile
19
+ import psr.factory
20
+
21
+ # Check whether psutil module is available.
22
+ _HAS_PSUTIL: Optional[bool] = None
23
+
24
+ psutil: Optional[ModuleType] = None
25
+
26
+ _DEBUG: bool = True
27
+
28
+
29
+ def _has_psutil() -> bool:
30
+ """Check if psutil is available."""
31
+ global _HAS_PSUTIL
32
+ global psutil
33
+ if _HAS_PSUTIL is None:
34
+ try:
35
+ import psutil
36
+ _HAS_PSUTIL = True
37
+ except ImportError:
38
+ _HAS_PSUTIL = False
39
+ return _HAS_PSUTIL
40
+
41
+
42
+ if os.name == "nt":
43
+ __default_mpi_path = "C:\\Program Files\\MPICH2\\bin"
44
+ else:
45
+ __default_mpi_path = "/usr/bin"
46
+
47
+
48
+ def _get_semver_version(version: str) -> Tuple[int, int, Union[int, str], Optional[str]]:
49
+ def get_tag_from_part(part: str) -> Tuple[int, Optional[str]]:
50
+ to_try = ("beta", "rc")
51
+ part = part.lower()
52
+ for tag_name in to_try:
53
+ if tag_name in part:
54
+ tag_pos = part.lower().index(tag_name)
55
+ part_value = int(part[:tag_pos])
56
+ tag = part[tag_pos:]
57
+ return part_value, tag
58
+ return int(part), None
59
+ parts = version.split(".")
60
+ major = int(parts[0])
61
+ tag = None
62
+ minor = 0
63
+ patch = 0
64
+ if len(parts) == 2:
65
+ minor, tag = get_tag_from_part(parts[1])
66
+ patch = 0
67
+ elif len(parts) == 3:
68
+ minor = int(parts[1])
69
+ patch, tag = get_tag_from_part(parts[2])
70
+
71
+ return major, minor, patch, tag
72
+
73
+
74
+ def _get_available_cpu() -> int:
75
+ if not _has_psutil():
76
+ raise ImportError("psutil module is required to get available CPU count")
77
+ return psutil.cpu_count()
78
+
79
+
80
+ def _get_host_name() -> str:
81
+ return socket.gethostname().upper()
82
+
83
+
84
+ def _get_nproc(specified: int, available: int) -> int:
85
+ if available > specified:
86
+ return specified
87
+ elif available < specified:
88
+ warnings.warn(f"Specified number of threads ({specified}) is greater than available ({available})")
89
+ return available
90
+ else:
91
+ return available
92
+
93
+ def _write_mpi_settings(mpi_file_path: Union[str, pathlib.Path, Any], cluster_settings: Optional[Union[int, bool, Dict[str, int]]]):
94
+ if cluster_settings is not None:
95
+ available_cpu = _get_available_cpu()
96
+ if isinstance(cluster_settings, bool):
97
+ # Rewrite with default settings.
98
+ if cluster_settings:
99
+ computer_name = _get_host_name()
100
+ nproc = available_cpu
101
+ cluster_settings = {computer_name: nproc}
102
+ else:
103
+ cluster_settings = None
104
+ elif isinstance(cluster_settings, int):
105
+ computer_name = _get_host_name()
106
+ specified_cpu_number = cluster_settings
107
+ nproc = _get_nproc(specified_cpu_number, available_cpu)
108
+ cluster_settings = {computer_name: nproc}
109
+ elif isinstance(cluster_settings, dict):
110
+ pass
111
+ else:
112
+ raise ValueError("Invalid cluster settings type")
113
+ else:
114
+ computer_name = socket.gethostname()
115
+ nproc = _get_available_cpu()
116
+ cluster_settings = {computer_name: nproc}
117
+
118
+ if isinstance(cluster_settings, dict):
119
+ if isinstance(mpi_file_path, (str, pathlib.Path)):
120
+ f = open(mpi_file_path, 'w')
121
+ must_close = True
122
+ else:
123
+ f = open(mpi_file_path.name, 'w')
124
+ must_close = False
125
+ for computer, nproc in cluster_settings.items():
126
+ f.write(f"{computer}:{nproc}\n")
127
+ if must_close:
128
+ f.close()
129
+
130
+
131
+ def run_sddp(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
132
+ case_path = os.path.abspath(str(case_path))
133
+ sddp_path = str(sddp_path)
134
+ parallel_run = kwargs.get("parallel_run", True)
135
+ cluster_settings: Optional[Union[int, bool, Dict[str, int]]] = kwargs.get("cluster_settings", None)
136
+ dry_run = kwargs.get("dry_run", False)
137
+ show_progress = kwargs.get("show_progress", False)
138
+ extra_args = " ".join(kwargs.get("extra_args", ()))
139
+ exec_mode = kwargs.get("_mode", None)
140
+ mpi_path = kwargs.get("mpi_path", __default_mpi_path)
141
+ env = kwargs.get("env", {})
142
+
143
+ sddp_path_full = _get_sddp_executable_parent_path(sddp_path)
144
+ # Append last / if missing.
145
+ case_path_last_slash = os.path.join(os.path.abspath(case_path), "")
146
+
147
+ mode_arg = exec_mode if exec_mode is not None else ""
148
+ # Disable parallel run in check mode.
149
+ parallel_run = parallel_run if exec_mode is None else False
150
+
151
+ major, minor, patch, tag = _get_semver_version(get_sddp_version(sddp_path))
152
+
153
+ temp_folder = os.path.join(os.getenv("TEMP") or os.getenv("TMPDIR") or os.getenv("TMP") or "/tmp", "")
154
+ with (psr.psrfcommon.tempfile.CreateTempFile(temp_folder, "mpd_sddp", "", ".hosts", False) as mpi_temp_file,
155
+ change_cwd(sddp_path_full)):
156
+
157
+ # Write MPI settings if required
158
+ if parallel_run and cluster_settings is not None:
159
+ if major >= 18 and minor >= 0 and patch >= 7:
160
+ _write_mpi_settings(mpi_temp_file, cluster_settings)
161
+ extra_args = extra_args + f" --hostsfile=\"{mpi_temp_file.name}\""
162
+ if dry_run:
163
+ print("Using temporary mpi settings file:", mpi_temp_file.name)
164
+ mpi_written = True
165
+ elif major >= 18:
166
+ mpi_file_path = os.path.join(sddp_path_full, "mpd_sddp.hosts")
167
+ mpi_written = False
168
+ else:
169
+ mpi_file_path = os.path.join(sddp_path_full, "mpd.hosts")
170
+ mpi_written = False
171
+ if not mpi_written:
172
+ _write_mpi_settings(mpi_file_path, cluster_settings)
173
+
174
+ if parallel_run:
175
+ if os.name == 'nt':
176
+ if major <= 17:
177
+ cmd = f'sddpar.exe --path="{sddp_path_full}" --mpipath="{mpi_path}" --pathdata="{case_path_last_slash}" {extra_args}'
178
+ else:
179
+ cmd = f'sddpar.exe --path="{sddp_path_full}" --mpipath="{mpi_path}" --pathdata="{case_path}" {extra_args}'
180
+ else:
181
+ # 17.3 and before uses one type of args, newer uses another
182
+ if (major == 17 and minor <= 3) or major < 17:
183
+ cmd = f'./sddpar --path="{case_path_last_slash}" --mpipath="{mpi_path}" --habilitarhidra=1 {extra_args}'
184
+ else:
185
+ cmd = f'./sddpar --path="{sddp_path}" --mpipath="{mpi_path}" --habilitarhidra=1 --pathdata="{case_path_last_slash}" {extra_args}'
186
+
187
+ else:
188
+ if os.name == 'nt':
189
+ cmd = f'sddp.exe {mode_arg} -path "{case_path_last_slash}" {extra_args}'
190
+ else:
191
+ cmd = f'./sddp {mode_arg} -path "{case_path_last_slash}" {extra_args}'
192
+
193
+ if os.name != "nt":
194
+ env["LD_LIBRARY_PATH"] = os.path.realpath(sddp_path_full)
195
+ env["MPI_PATH"] = os.path.realpath(mpi_path)
196
+ kwargs["env"] = env
197
+ exec_cmd(cmd, **kwargs)
198
+
199
+
200
+ def run_sddp_check(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
201
+ kwargs["_mode"] = "check"
202
+ run_sddp(case_path, sddp_path, **kwargs)
203
+
204
+
205
+ def run_sddp_cleanup(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
206
+ kwargs["_mode"] = "clean"
207
+ run_sddp(case_path, sddp_path, **kwargs)
208
+
209
+
210
+ def run_sddp_convert_fcf(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
211
+ kwargs["_mode"] = "printfcf"
212
+ # TODO: generated file use \t as separator, has an empty column and its name depends on study stage type.
213
+ run_sddp(case_path, sddp_path, **kwargs)
214
+
215
+ def _get_sddp_executable_parent_path(sddp_path: Union[str, pathlib.Path]) -> str:
216
+ if os.name == 'nt':
217
+ model_path = os.path.join(sddp_path, "models", "sddp")
218
+ if os.path.exists(model_path):
219
+ return model_path
220
+ else:
221
+ return os.path.join(sddp_path, "Oper")
222
+ else:
223
+ # solve symlinks, if needed
224
+ sddp_path = os.path.realpath(sddp_path)
225
+ return sddp_path
226
+
227
+ def _get_optgen_executable_parent_path(optgen_path: Union[str, pathlib.Path]) -> str:
228
+ if os.name == 'nt':
229
+ model_path = os.path.join(optgen_path, "models", "optgen", "Model")
230
+ if os.path.exists(model_path):
231
+ return model_path
232
+ else:
233
+ return os.path.join(optgen_path, "Model")
234
+ else:
235
+ # solve symlinks, if needed
236
+ optgen_path = os.path.realpath(optgen_path)
237
+ return optgen_path
238
+
239
+ def _get_optmain_executable_parent_path(optmain_path: Union[str, pathlib.Path]) -> str:
240
+ if os.name == 'nt':
241
+ model_path = os.path.join(optmain_path, "models", "optmain")
242
+ if os.path.exists(model_path):
243
+ return model_path
244
+ else:
245
+ return os.path.join(optmain_path, "Model")
246
+ else:
247
+ # solve symlinks, if needed
248
+ optmain_path = os.path.realpath(optmain_path)
249
+ return optmain_path
250
+
251
+
252
+ def get_sddp_version(sddp_path: Union[str, pathlib.Path]) -> str:
253
+ sddp_path = str(sddp_path)
254
+ sddp_path_full = _get_sddp_executable_parent_path(sddp_path)
255
+ if os.name == 'nt':
256
+ command = [os.path.join(sddp_path_full, "sddp.exe"), "ver"]
257
+ else:
258
+ command = [os.path.join(sddp_path_full, "sddp"), "ver"]
259
+
260
+ if os.name != "nt":
261
+ env = {
262
+ "LD_LIBRARY_PATH": os.path.realpath(sddp_path_full)
263
+ }
264
+ else:
265
+ env = {}
266
+
267
+ sub = subprocess.run(command, stdout=subprocess.PIPE, check=False, env=env)
268
+ output = sub.stdout.decode("utf-8").strip()
269
+ return output.split()[2]
270
+
271
+
272
+ def run_ncp(case_path: Union[str, pathlib.Path], ncp_path: Union[str, pathlib.Path], **kwargs):
273
+ if os.name != 'nt':
274
+ raise NotImplementedError("Running NCP is only available on Windows")
275
+ case_path = os.path.abspath(str(case_path))
276
+ ncp_path = str(ncp_path)
277
+ dry_run = kwargs.get("dry_run", False)
278
+ show_progress = kwargs.get("show_progress", False)
279
+
280
+ cnv_version = _ncp_determine_cnv_version(case_path)
281
+
282
+ print("NCP cnv version is", cnv_version)
283
+
284
+ ncp_path_full = os.path.join(ncp_path, "Oper")
285
+ cnv_path_full = os.path.join(ncp_path, "Cnv", cnv_version)
286
+
287
+ # Append last / if missing.
288
+ case_path = os.path.join(os.path.abspath(case_path), "")
289
+
290
+ coes_tmp_file_path = os.path.join(case_path, "coes.dat")
291
+ coes_dat_file_path = os.path.join(case_path, "coes.tmp")
292
+
293
+ with change_cwd(cnv_path_full):
294
+ if os.path.exists(coes_dat_file_path):
295
+ shutil.move(coes_dat_file_path, coes_tmp_file_path)
296
+ exec_cmd(f"csvcnv csv -path {case_path}")
297
+
298
+ with change_cwd(ncp_path_full):
299
+ exec_cmd(f"sddprede -path {case_path}")
300
+ if os.path.exists(coes_tmp_file_path):
301
+ shutil.move(coes_tmp_file_path, coes_dat_file_path)
302
+
303
+ exec_cmd(f"cpplus -path {case_path}")
304
+
305
+ executed_successfully = os.path.exists(os.path.join(case_path, 'cpplus.ok'))
306
+ if executed_successfully:
307
+ if os.path.exists("post-run.bat"):
308
+ exec_cmd(f'post-run.bat "{case_path}"')
309
+
310
+
311
+ def run_optgen(case_path: Union[str, pathlib.Path], optgen_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
312
+ case_path = os.path.abspath(str(case_path)).replace("\\", "/") + "/"
313
+ optgen_path = str(optgen_path)
314
+ sddp_path = str(sddp_path)
315
+ sddp_path_full = _get_sddp_executable_parent_path(sddp_path)
316
+ optgen_path_full = _get_optgen_executable_parent_path(optgen_path)
317
+ exec_mode = kwargs.get("_mode", None)
318
+
319
+ mode_arg = exec_mode if exec_mode is not None else ""
320
+
321
+ with change_cwd(optgen_path_full):
322
+ cmd = f'optgen {mode_arg} -optgdat="{case_path}" -sddpexe="{sddp_path_full}" -sddpmpi="{__default_mpi_path}"'
323
+ exec_cmd(cmd, **kwargs)
324
+
325
+
326
+ def run_optgen_check(case_path: Union[str, pathlib.Path], optgen_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
327
+ kwargs["_mode"] = "check"
328
+ run_optgen(case_path, optgen_path, sddp_path, **kwargs)
329
+
330
+
331
+ def run_optgen_cleanup(case_path: Union[str, pathlib.Path], optgen_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
332
+ kwargs["_mode"] = "clean"
333
+ run_optgen(case_path, optgen_path, sddp_path, **kwargs)
334
+
335
+
336
+ def run_optmain(case_path: Union[str, pathlib.Path], optmain_path: Union[str, pathlib.Path], **kwargs):
337
+ case_path = os.path.abspath(str(case_path)).replace("\\", "/") + "/"
338
+ optmain_path = str(optmain_path)
339
+ optmain_path_full = _get_optmain_executable_parent_path(optmain_path)
340
+
341
+ with change_cwd(optmain_path_full):
342
+ cmd = f'optmain {case_path}'
343
+ exec_cmd(cmd, **kwargs)
344
+
345
+
346
+ def run_psrio(case_path, sddp_path: str, **kwargs):
347
+ recipe_script = kwargs.get('r', kwargs.get('recipes', False))
348
+ output_path = kwargs.get('o', kwargs.get('output', False))
349
+
350
+ log_verbose = kwargs.get('v', kwargs.get('verbose', "0"))
351
+ study_model = kwargs.get('model', "sddp")
352
+ load_file_format = kwargs.get('load_format', "both")
353
+
354
+ load_from_output_path = kwargs.get('load_from_output_path', False)
355
+ save_only_in_csv = kwargs.get('csv', False)
356
+
357
+ psrio_path_full = os.path.join(sddp_path, "Oper\\psrio\\")
358
+
359
+ with change_cwd(psrio_path_full):
360
+
361
+ cmd = psrio_path_full + 'psrio.exe --model ' + study_model + ' --load_format ' + load_file_format
362
+
363
+ if recipe_script:
364
+ cmd += '-v' + log_verbose
365
+
366
+ if load_from_output_path:
367
+ cmd += ' load_from_output_path'
368
+ if save_only_in_csv:
369
+ cmd += ' save_only_in_csv'
370
+
371
+ if output_path:
372
+ cmd += f' -o "{output_path}"'
373
+ if recipe_script:
374
+ cmd += f' -r "{recipe_script}"'
375
+
376
+ if isinstance(case_path, str):
377
+ cmd += f' "{case_path}"'
378
+ else:
379
+ case_paths = list(case_path)
380
+ for path in case_paths:
381
+ cmd += f' "{path}"'
382
+
383
+ exec_cmd(cmd, **kwargs)
384
+
385
+ def run_nwsddp(input_case_path: Union[str, pathlib.Path], output_case_path: Union[str, pathlib.Path], nwsddp_app_path: Union[str, pathlib.Path], mdc_file_path: Optional[Union[str, pathlib.Path]] = None, **kwargs):
386
+ if os.name != 'nt':
387
+ raise NotImplementedError("Running NWSDDP is only available on Windows")
388
+
389
+ input_case_path = os.path.abspath(str(input_case_path)).rstrip("\\")
390
+ output_case_path = os.path.abspath(str(output_case_path)).rstrip("\\")
391
+ nwsddp_app_path = str(nwsddp_app_path)
392
+ mdc_file_path = str(mdc_file_path)
393
+
394
+ if mdc_file_path is not None:
395
+ mdc_file_path = os.path.abspath(mdc_file_path)
396
+ nwsddp_path_full = os.path.join(nwsddp_app_path, "bin", "")
397
+
398
+ with change_cwd(nwsddp_path_full):
399
+ if mdc_file_path is not None:
400
+ extra_args = "-MDC "
401
+ # Copy mdc file to case directory.
402
+ output_mdc_path = os.path.join(input_case_path, "nwsddp.mdc")
403
+ # compare if input and output mdc path are equal
404
+ if mdc_file_path.lower().strip() != output_mdc_path.lower().strip():
405
+ shutil.copy(mdc_file_path, output_mdc_path)
406
+ case_args = f"-NW:\"{os.path.join(input_case_path, '')}\" -SP:\"{os.path.join(output_case_path, '')}\""
407
+ cmd1 = 'nwsddp.exe ' + extra_args + case_args
408
+ return_code = exec_cmd(cmd1, **kwargs)
409
+ if return_code == 0:
410
+ cmd2_args = ["nwpatch.exe", "-nw", f"{input_case_path}", "-sp", f"{output_case_path}"]
411
+ exec_cmd(cmd2_args, **kwargs)
412
+
413
+
414
+
415
+ __hydro_estimation_path_contents = """ ------- PATH ---------------------------
416
+ Directorio Datos {path}
417
+ Directorio Hidro {path}
418
+ """
419
+
420
+
421
+ def run_hydro_estimation(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
422
+ if os.name != 'nt':
423
+ raise NotImplementedError("Running hydro estimation is only available on Windows")
424
+ case_path = os.path.abspath(str(case_path))
425
+ sddp_path = str(sddp_path)
426
+ # get SDDP major version
427
+ major, minor, patch, tag = _get_semver_version(get_sddp_version(sddp_path))
428
+
429
+ if major >= 18:
430
+ estima_path = os.path.join(sddp_path, "models", "estima")
431
+ else:
432
+ estima_path = os.path.join(sddp_path, "Hidro")
433
+ estima_files = [
434
+ os.path.join(estima_path, "estima.exe"),
435
+ os.path.join(estima_path, "estimaen.fmt"),
436
+ os.path.join(estima_path, "estimaes.fmt"),
437
+ os.path.join(estima_path, "estimapo.fmt"),
438
+ ]
439
+
440
+ path_file = os.path.join(case_path, "path.dat")
441
+ path_file_contents = __hydro_estimation_path_contents.format(path=case_path)
442
+ with change_cwd(case_path), __temporary_copy_of_files(case_path, *estima_files), \
443
+ __temporary_file(path_file, path_file_contents):
444
+ # create temporary path.dat file
445
+ exec_cmd(f"estima", **kwargs)
446
+
447
+
448
+ def run_graph(case_path: Union[str, pathlib.Path], graph_path: Union[str, pathlib.Path], **kwargs):
449
+ if os.name != 'nt':
450
+ raise NotImplementedError("Running graph tool is only available on Windows")
451
+ case_path = os.path.abspath(str(case_path))
452
+ graph_base_path = os.path.abspath(str(graph_path))
453
+ graph_abs_path = os.path.join(graph_base_path, "PSRGraphInterface.exe")
454
+
455
+ with change_cwd(case_path):
456
+ exec_cmd(graph_abs_path, **kwargs)
457
+
458
+
459
+ def run_psrcloud(psrcloud_path: Union[str, pathlib.Path], **kwargs):
460
+ if os.name != 'nt':
461
+ raise NotImplementedError("Running PSRCloud Desktop tool is only available on Windows")
462
+ psrcloud_base_path = os.path.abspath(str(psrcloud_path))
463
+
464
+ with change_cwd(psrcloud_base_path):
465
+ exec_cmd("PSRCloud.exe", **kwargs)
466
+
467
+
468
+ @contextmanager
469
+ def __temporary_copy_of_files(target_dir: str, *files: str):
470
+ for file in files:
471
+ shutil.copy(file, target_dir)
472
+ try:
473
+ yield
474
+ finally:
475
+ for file in files:
476
+ os.remove(os.path.join(target_dir, os.path.basename(file)))
477
+
478
+
479
+ @contextmanager
480
+ def __temporary_file(file_path: Union[str, pathlib.Path], content: str):
481
+ with open(file_path, 'w') as file:
482
+ file.write(content)
483
+ try:
484
+ yield
485
+ finally:
486
+ os.remove(file_path)
487
+
488
+
489
+ def _ncp_determine_cnv_version(case_path: Union[str, pathlib.Path]) -> str:
490
+ CURRENT_CNV_VERSION = "V14"
491
+ LEGACY_CNV_VERSION = "V12"
492
+
493
+ csumcirc_path = os.path.join(case_path, "csumcirc.dat")
494
+ if os.path.exists(csumcirc_path):
495
+ with open(csumcirc_path, 'r') as csumcirc_file:
496
+ line = next(csumcirc_file)
497
+ if line.strip().lower().find("$version") == -1:
498
+ return LEGACY_CNV_VERSION
499
+
500
+ all_ctermis = glob.glob(os.path.join(case_path, "ctermi*.dat"))
501
+ for ctermi_path in all_ctermis:
502
+ with open(ctermi_path, 'r') as ctermi_file:
503
+ line = next(ctermi_file)
504
+ if line.strip().lower().find("$version=") == -1:
505
+ return LEGACY_CNV_VERSION
506
+
507
+ all_cgnds = glob.glob(os.path.join(case_path, "cgnd*.dat"))
508
+ for cgnd_path in all_cgnds:
509
+ with open(cgnd_path, 'r') as cgnd_file:
510
+ line = next(cgnd_file)
511
+ if line.strip().lower().find("$version=") == -1:
512
+ return LEGACY_CNV_VERSION
513
+
514
+ return CURRENT_CNV_VERSION
515
+
516
+
517
+ def _tsl_filter_plants_with_coordinates(plant_list: List[psr.factory.DataObject]):
518
+ filtered = []
519
+ for plant in plant_list:
520
+ lat = plant.get("Latitude")
521
+ lon = plant.get("Longitude")
522
+ if not((lat) and (lon)):
523
+ filtered.append(plant)
524
+ return filtered
525
+
526
+ def _tsl_get_renewable_plants_with_coordinates(study: psr.factory.Study, tech_type: int) -> List[psr.factory.DataObject]:
527
+ plant_list = study.find("RenewablePlant.*")
528
+ plant_list = [plant for plant in plant_list if plant.get("TechnologyType") == tech_type]
529
+ return _tsl_filter_plants_with_coordinates(plant_list)
530
+
531
+ def _tsl_get_csp_plants_with_coordinates(study: psr.factory.Study) -> List[psr.factory.DataObject]:
532
+ plant_list = study.find("CSP.*")
533
+ return _tsl_filter_plants_with_coordinates(plant_list)
534
+
535
+ def _tsl_create_csol_dat_file(case_path: Union[str, pathlib.Path], plant_list: List[psr.factory.DataObject]):
536
+ csol_dat_path = os.path.join(case_path, "csol.dat")
537
+ with open(csol_dat_path, 'w') as csol_dat_file:
538
+ csol_dat_file.write("ID,CODE,NAME,SYS,CLUSTER_ID,CLUSTER,POT_INST,LON,LAT,TRACKING,TILT,AZIMUTH,CFOBS_ID,PROFILE_TYPE,AC_DC_RATIO,SYSTEM_LOSSES,USE_AZIMUTH\n")
539
+ for plant in plant_list:
540
+ unique_id = "peteca"
541
+ capacity_profile = plant.get("RefCapacityProfile")
542
+ cluster_id = "" # FIXME
543
+ cluster = "" # FIXME
544
+ cfobs_id = "" if capacity_profile is None else capacity_profile.name
545
+ profile_type = "" if capacity_profile is None else capacity_profile.get("Type")
546
+ values = [
547
+ unique_id,
548
+ plant.code,
549
+ plant.name,
550
+ plant.get("RefSystem").id,
551
+ cluster_id,
552
+ cluster,
553
+ plant.get("InstalledCapacity"),
554
+ plant.get("Longitude"),
555
+ plant.get("Latitude"),
556
+ plant.get("Tracking"),
557
+ plant.get("Tilt"),
558
+ plant.get("Azimuth"),
559
+ cfobs_id,
560
+ profile_type,
561
+ plant.get("DCACRatio"),
562
+ plant.get("SystemLosses"),
563
+ plant.get("UseAzimuth")
564
+ ]
565
+ csol_dat_file.write(",".join(map(str, values)) + "\n")
566
+
567
+ def _tsl_create_ceol_dat_file(case_path: Union[str, pathlib.Path], plant_list: List[psr.factory.DataObject]):
568
+ ceol_dat_file = os.path.join(case_path, "ceol.dat")
569
+ with (open(ceol_dat_file, 'w') as ceol_dat):
570
+ ceol_dat.write("ID,PLANT_CODE,PLANT_NAME,PLANT_SYS,STATION_CODE,STATION_NAME,PLANT_POT_INST,LON,LAT,PROFILE_CODE,PLANT_HEIGHT,PLANT_TURBINE_MODEL,PROFILE_TYPE,DOWNS_FLAG,DENS_FLAG,DENS_SITE_HEIGHT\n")
571
+ for plant in plant_list:
572
+ unique_id = "peteca"
573
+ system = plant.get("RefSystem")
574
+ turbine = plant.get("RefTurbine")
575
+ station = plant.get("RefStation")
576
+ capacity_profile = plant.get("RefCapacityProfile")
577
+
578
+ turbine_model = "" if turbine is None else turbine.name
579
+ profile_code = "" if capacity_profile is None else capacity_profile.name
580
+ profile_type = "" if capacity_profile is None else capacity_profile.get("Type")
581
+ values = [
582
+ unique_id,
583
+ plant.code,
584
+ plant.name,
585
+ system.id,
586
+ station.code,
587
+ station.name,
588
+ plant.get("InstalledCapacity"),
589
+ plant.get("Longitude"),
590
+ plant.get("Latitude"),
591
+ plant.get("Tracking"),
592
+ plant.get("Tilt"),
593
+ plant.get("Azimuth"),
594
+ profile_code,
595
+ plant.get("Height"),
596
+ turbine_model,
597
+ profile_type,
598
+ plant.get("DownscalingFlag"),
599
+ plant.get("DensityCorrectionFlag"),
600
+ plant.get("DensityCorrection"),
601
+ ]
602
+ ceol_dat.write(",".join(map(str, values)) + "\n")
603
+
604
+ def _tsl_create_ccsp_dat_file(case_path: Union[str, pathlib.Path], plant_list: List[psr.factory.DataObject]):
605
+ ccsp_dat_file = os.path.join(case_path, "ccsp.dat")
606
+ with (open(ccsp_dat_file, 'w') as ccsp_dat):
607
+ ccsp_dat.write("ID,CODE,NAME,SYS,CLUSTER_ID,CLUSTER,POT_INST,LON,LAT,SM,EFF,CFOBS_ID,PROFILE_TYPE\n")
608
+ for plant in plant_list:
609
+ unique_id = "peteca"
610
+ cluster_id = "" # FIXME
611
+ cluster = "" # FIXME
612
+ capacity_profile = plant.get("RefCapacityProfile")
613
+ cfobs_id = "" if capacity_profile is None else capacity_profile.name
614
+ profile_type = "" if capacity_profile is None else capacity_profile.get("Type")
615
+ values = [
616
+ unique_id,
617
+ plant.code,
618
+ plant.name,
619
+ plant.get("RefSystem").id,
620
+ cluster_id,
621
+ cluster,
622
+ plant.get("InstalledCapacity"),
623
+ plant.get("Longitude"),
624
+ plant.get("Latitude"),
625
+ plant.get("SM"),
626
+ plant.get("Efficiency"),
627
+ cfobs_id,
628
+ profile_type,
629
+ ]
630
+ ccsp_dat.write(",".join(map(str, values)) + "\n")
631
+
632
+
633
+ def run_rpsdata(tsl_path: Union[str, pathlib.Path], case_path: Union[str, pathlib.Path], file_name: str, base_type: str, **kwargs):
634
+ rps_parentpath = os.path.join(str(tsl_path), "Extensions","Script")
635
+ file_path = os.path.join(str(case_path), file_name)
636
+ with change_cwd(rps_parentpath):
637
+ cmd = f'RPSDataConsole.exe GET_POINTS "{file_path}" "{case_path}" {base_type}'
638
+ exec_cmd(cmd, **kwargs)
639
+
640
+ def run_tsldata(tsl_path: Union[str, pathlib.Path], case_path: Union[str, pathlib.Path], db_type: str, **kwargs):
641
+ tsldata_parentpath = os.path.join(str(tsl_path), "Extensions","tsldata-distribution")
642
+ with change_cwd(tsldata_parentpath):
643
+ cmd = f'TSLData.exe --path "{str(case_path)}" --{db_type}'
644
+ exec_cmd(cmd, **kwargs)
645
+
646
+ def run_tslconsole(tsl_path: Union[str, pathlib.Path], script_path: Union[str, pathlib.Path], **kwargs):
647
+ tsl_console = os.path.join(tsl_path, "Extensions", "TimeSeriesLab")
648
+ with change_cwd(tsl_console):
649
+ cmd = f'TimeSeriesConsole.exe "{str(script_path)}"'
650
+ exec_cmd(cmd, **kwargs)
651
+
652
+ def run_tsl_generate_inflow_from_external_natural(case_path: Union[str, pathlib.Path], tsl_path: Union[str, pathlib.Path], **kwargs):
653
+ commands = ["generate_inflow_from_external_natural"]
654
+ case_path = os.path.abspath(str(case_path))
655
+ tsl_path = str(tsl_path)
656
+ _run_tslconsole_command(tsl_path, case_path, commands)
657
+
658
+
659
+ def run_tsl_generate_inflow_from_external_incremental(case_path: Union[str, pathlib.Path], tsl_path: Union[str, pathlib.Path], **kwargs):
660
+ commands = ["generate_inflow_from_external_incremental"]
661
+ case_path = os.path.abspath(str(case_path))
662
+ tsl_path = str(tsl_path)
663
+ _run_tslconsole_command(tsl_path, case_path, commands)
664
+
665
+
666
+ def run_tsl(case_path: Union[str, pathlib.Path], tsl_path: Union[str, pathlib.Path], base_type: str, **kwargs):
667
+ if os.name != 'nt':
668
+ raise NotImplementedError("Running TimeSeriesLab is only available on Windows")
669
+ case_path = os.path.abspath(str(case_path))
670
+ tsl_path = str(tsl_path)
671
+ dry_run = kwargs.get("dry_run", False)
672
+ show_progress = kwargs.get("show_progress", False)
673
+
674
+ def _run_rpsdata(file_name):
675
+ run_rpsdata(tsl_path, case_path, file_name, base_type, **kwargs)
676
+
677
+ def _run_tsldata(db_type):
678
+ run_tsldata(tsl_path, case_path, db_type, **kwargs)
679
+
680
+ def _run_tslconsole(commands: List[str]):
681
+ _run_tslconsole_command(tsl_path, case_path, commands, **kwargs)
682
+
683
+ study = psr.factory.load_study(case_path, ["TSL"])
684
+
685
+ wind_list = _tsl_get_renewable_plants_with_coordinates(study, 1)
686
+ if len(wind_list) > 0:
687
+ _tsl_create_ceol_dat_file(case_path, wind_list)
688
+ _run_rpsdata("ceol.dat")
689
+ _run_tsldata("wind")
690
+
691
+ solar_list = _tsl_get_renewable_plants_with_coordinates(study, 2)
692
+ if len(solar_list) > 0:
693
+ _tsl_create_csol_dat_file(case_path, solar_list)
694
+ _run_rpsdata("csol.dat")
695
+ _run_tsldata("solar")
696
+
697
+ csp_list = _tsl_get_csp_plants_with_coordinates(study)
698
+ if len(csp_list) > 0:
699
+ _tsl_create_ccsp_dat_file(case_path, csp_list)
700
+ _run_rpsdata("ccsp.dat")
701
+ _run_tsldata("csp")
702
+
703
+ if len(solar_list) > 0 or len(csp_list) > 0:
704
+ _run_tsldata("solar-correction")
705
+
706
+ # todo: create cdlr.dat
707
+ _run_rpsdata("cdlr.dat")
708
+ _run_tsldata("dlr")
709
+
710
+ _run_tsldata("merge")
711
+
712
+ #todo: generate default script for parameters
713
+ _run_tslconsole([])
714
+
715
+ #todo: generate default script for scenarios
716
+ _run_tslconsole([])
717
+
718
+
719
+ def _run_tslconsole_command(tsl_path: Union[str, pathlib.Path], case_path: Union[str, pathlib.Path], commands: list[str], script_prefix: str = "", **kwargs):
720
+ tsl_console_path = os.path.join(tsl_path, "Extensions", "TimeSeriesLab")
721
+ delete_xml = not _DEBUG
722
+ full_path = os.path.join(os.path.abspath(case_path), "")
723
+ with psr.psrfcommon.tempfile.CreateTempFile(
724
+ "./", script_prefix, "", ".dat", delete_xml
725
+ ) as script_file, change_cwd(tsl_console_path):
726
+ with open(script_file.name, "w") as script_file:
727
+ script_file.write(f"SET,PATHDATA,{full_path}\n")
728
+ for command in commands:
729
+ script_file.write(f"RUN,{command}\n")
730
+ run_tslconsole(tsl_path, os.path.abspath(script_file.name), **kwargs)
731
+
732
+
733
+ def run_tsl_generate_external_scenarios(case_path: Union[str, pathlib.Path], tsl_path: Union[str, pathlib.Path], option: str, **kwargs):
734
+ inflow_path = os.path.join(str(case_path), "inflow.dat")
735
+ option_command_map = {
736
+ "natural": "generate_inflow_from_external_natural",
737
+ "incremental": "generate_inflow_from_external_incremental",
738
+ }
739
+ if option not in option_command_map.keys():
740
+ raise ValueError(f"Invalid option. Should be one of {','.join(option_command_map.keys())}")
741
+
742
+ commands = [option_command_map[option]]
743
+ _run_tslconsole_command(tsl_path, case_path, commands)