psr-factory 4.1.0b5__py3-none-manylinux_2_28_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
psr/runner/runner.py ADDED
@@ -0,0 +1,649 @@
1
+ # PSR Factory. Copyright (C) PSR, Inc - All Rights Reserved
2
+ # Unauthorized copying of this file, via any medium is strictly prohibited
3
+ # Proprietary and confidential
4
+
5
+ from contextlib import contextmanager
6
+ import glob
7
+ import os
8
+ import pathlib
9
+ import shutil
10
+ import socket
11
+ import subprocess
12
+ from types import ModuleType
13
+ from typing import Dict, List, Optional, Tuple, Union
14
+ import warnings
15
+
16
+
17
+ from psr.psrfcommon import change_cwd, exec_cmd
18
+ import psr.psrfcommon.tempfile
19
+ import psr.factory
20
+
21
+ # Check whether psutil module is available.
22
+ _HAS_PSUTIL: Optional[bool] = None
23
+
24
+ psutil: Optional[ModuleType] = None
25
+
26
+ _DEBUG: bool = True
27
+
28
+
29
+ def _has_psutil() -> bool:
30
+ """Check if psutil is available."""
31
+ global _HAS_PSUTIL
32
+ global psutil
33
+ if _HAS_PSUTIL is None:
34
+ try:
35
+ import psutil
36
+ _HAS_PSUTIL = True
37
+ except ImportError:
38
+ _HAS_PSUTIL = False
39
+ return _HAS_PSUTIL
40
+
41
+
42
+ if os.name == "nt":
43
+ __default_mpi_path = "C:\\Program Files\\MPICH2\\bin"
44
+ else:
45
+ __default_mpi_path = "/usr/bin"
46
+
47
+
48
+ def _get_semver_version(version: str) -> Tuple[int, int, Union[int, str], Optional[str]]:
49
+ def get_tag_from_part(part: str) -> Tuple[int, Optional[str]]:
50
+ to_try = ("beta", "rc")
51
+ for tag_name in to_try:
52
+ if tag_name in part:
53
+ tag_pos = part.lower().index(tag_name)
54
+ part_value = int(part[:tag_pos])
55
+ tag = part[tag_pos:]
56
+ return part_value, tag
57
+ return int(part), None
58
+ parts = version.split(".")
59
+ major = int(parts[0])
60
+ tag = None
61
+ minor = 0
62
+ patch = 0
63
+ if len(parts) == 2:
64
+ minor, tag = get_tag_from_part(parts[1])
65
+ patch = 0
66
+ elif len(parts) == 3:
67
+ minor = int(parts[1])
68
+ patch, tag = get_tag_from_part(parts[2])
69
+
70
+ return major, minor, patch, tag
71
+
72
+
73
+ def _get_available_cpu() -> int:
74
+ if not _has_psutil():
75
+ raise ImportError("psutil module is required to get available CPU count")
76
+ return psutil.cpu_count()
77
+
78
+
79
+ def _get_host_name() -> str:
80
+ return socket.gethostname().upper()
81
+
82
+
83
+ def _get_nproc(specified: int, available: int) -> int:
84
+ if available > specified:
85
+ return specified
86
+ elif available < specified:
87
+ warnings.warn(f"Specified number of threads ({specified}) is greater than available ({available})")
88
+ return available
89
+ else:
90
+ return available
91
+
92
+ def _write_mpi_settings(mpi_file_path: Union[str, pathlib.Path], cluster_settings: Optional[Union[int, bool, Dict[str, int]]]):
93
+ if cluster_settings is not None:
94
+ mpi_file_path = str(mpi_file_path)
95
+ available_cpu = _get_available_cpu()
96
+ if isinstance(cluster_settings, int):
97
+ computer_name = _get_host_name()
98
+ specified_cpu_number = cluster_settings
99
+ nproc = _get_nproc(specified_cpu_number, available_cpu)
100
+ cluster_settings = {computer_name: nproc}
101
+ elif isinstance(cluster_settings, dict):
102
+ pass
103
+ elif isinstance(cluster_settings, bool):
104
+ # Rewrite with default settings.
105
+ if cluster_settings:
106
+ computer_name = _get_host_name()
107
+ nproc = available_cpu
108
+ cluster_settings = {computer_name: nproc}
109
+ else:
110
+ cluster_settings = None
111
+ else:
112
+ raise ValueError("Invalid cluster settings type")
113
+ else:
114
+ computer_name = socket.gethostname()
115
+ nproc = _get_available_cpu()
116
+ cluster_settings = {computer_name: nproc}
117
+
118
+ if isinstance(cluster_settings, dict):
119
+ with open(mpi_file_path, 'w') as f:
120
+ for computer, nproc in cluster_settings.items():
121
+ f.write(f"{computer}:{nproc}\n")
122
+
123
+
124
+
125
+ def run_sddp(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
126
+ case_path = os.path.abspath(str(case_path))
127
+ sddp_path = str(sddp_path)
128
+ parallel_run = kwargs.get("parallel_run", True)
129
+ cluster_settings: Optional[Union[int, bool, Dict[str, int]]] = kwargs.get("cluster_settings", False)
130
+ dry_run = kwargs.get("dry_run", False)
131
+ show_progress = kwargs.get("show_progress", False)
132
+ extra_args = " ".join(kwargs.get("extra_args", ()))
133
+ exec_mode = kwargs.get("_mode", None)
134
+ mpi_path = kwargs.get("mpi_path", __default_mpi_path)
135
+
136
+ if os.name == 'nt':
137
+ sddp_path_full = os.path.join(sddp_path, "Oper")
138
+ else:
139
+ sddp_path_full = sddp_path
140
+ # Append last / if missing.
141
+ case_path = os.path.join(os.path.abspath(case_path), "")
142
+
143
+ mode_arg = exec_mode if exec_mode is not None else ""
144
+ # Disable parallel run in check mode.
145
+ parallel_run = parallel_run if exec_mode is None else False
146
+
147
+ major, minor, patch, tag = _get_semver_version(get_sddp_version(sddp_path))
148
+
149
+ with change_cwd(sddp_path_full):
150
+ # Write MPI settings if required
151
+ if parallel_run and cluster_settings is not None:
152
+ mpi_file_path = os.path.join(sddp_path_full, "mpd.hosts")
153
+ _write_mpi_settings(mpi_file_path, cluster_settings)
154
+
155
+ if parallel_run:
156
+ if os.name == 'nt':
157
+ cmd = f'sddpar.exe --path="{sddp_path_full}" --mpipath="{mpi_path}" --pathdata="{case_path}" {extra_args}'
158
+ else:
159
+ # 17.3 and before uses one type of args, newer uses another
160
+ if (major == 17 and minor <= 3) or major < 17:
161
+ cmd = f'./sddpar --path="{case_path}" --mpipath="{mpi_path}" --habilitarhidra=1 {extra_args}'
162
+ else:
163
+ cmd = f'./sddpar --path="{sddp_path}" --mpipath="{mpi_path}" --habilitarhidra=1 --pathdata="{case_path}" {extra_args}'
164
+ else:
165
+ if os.name == 'nt':
166
+ cmd = f'sddp.exe {mode_arg} -path "{case_path}" {extra_args}'
167
+ else:
168
+ cmd = f'./sddp {mode_arg} -path "{case_path}" {extra_args}'
169
+
170
+ if os.name != "nt":
171
+ os.environ["LD_LIBRARY_PATH"] = os.path.abspath(sddp_path_full)
172
+ exec_cmd(cmd, **kwargs)
173
+
174
+
175
+ def run_sddp_check(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
176
+ kwargs["_mode"] = "check"
177
+ run_sddp(case_path, sddp_path, **kwargs)
178
+
179
+
180
+ def run_sddp_cleanup(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
181
+ kwargs["_mode"] = "clean"
182
+ run_sddp(case_path, sddp_path, **kwargs)
183
+
184
+
185
+ def run_sddp_convert_fcf(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
186
+ kwargs["_mode"] = "printfcf"
187
+ # TODO: generated file use \t as separator, has an empty column and its name depends on study stage type.
188
+ run_sddp(case_path, sddp_path, **kwargs)
189
+
190
+
191
+ def get_sddp_version(sddp_path: Union[str, pathlib.Path]) -> str:
192
+ sddp_path = str(sddp_path)
193
+ if os.name == 'nt':
194
+ sddp_path_full = os.path.join(sddp_path, "Oper")
195
+ else:
196
+ sddp_path_full = sddp_path
197
+ if os.name == 'nt':
198
+ command = [os.path.join(sddp_path_full, "sddp.exe"), "ver"]
199
+ else:
200
+ command = [os.path.join(sddp_path_full, "sddp"), "ver"]
201
+
202
+ if os.name != "nt":
203
+ os.environ["LD_LIBRARY_PATH"] = os.path.abspath(sddp_path_full)
204
+ sub = subprocess.run(command, stdout=subprocess.PIPE, check=False)
205
+ output = sub.stdout.decode("utf-8").strip()
206
+ return output.split()[2]
207
+
208
+
209
+ def run_ncp(case_path: Union[str, pathlib.Path], ncp_path: Union[str, pathlib.Path], **kwargs):
210
+ if os.name != 'nt':
211
+ raise NotImplementedError("Running NCP is only available on Windows")
212
+ case_path = os.path.abspath(str(case_path))
213
+ ncp_path = str(ncp_path)
214
+ dry_run = kwargs.get("dry_run", False)
215
+ show_progress = kwargs.get("show_progress", False)
216
+
217
+ cnv_version = _ncp_determine_cnv_version(case_path)
218
+
219
+ print("NCP cnv version is", cnv_version)
220
+
221
+ ncp_path_full = os.path.join(ncp_path, "Oper")
222
+ cnv_path_full = os.path.join(ncp_path, "Cnv", cnv_version)
223
+
224
+ # Append last / if missing.
225
+ case_path = os.path.join(os.path.abspath(case_path), "")
226
+
227
+ coes_tmp_file_path = os.path.join(case_path, "coes.dat")
228
+ coes_dat_file_path = os.path.join(case_path, "coes.tmp")
229
+
230
+ with change_cwd(cnv_path_full):
231
+ if os.path.exists(coes_dat_file_path):
232
+ shutil.move(coes_dat_file_path, coes_tmp_file_path)
233
+ exec_cmd(f"csvcnv csv -path {case_path}")
234
+
235
+ with change_cwd(ncp_path_full):
236
+ exec_cmd(f"sddprede -path {case_path}")
237
+ if os.path.exists(coes_tmp_file_path):
238
+ shutil.move(coes_tmp_file_path, coes_dat_file_path)
239
+
240
+ exec_cmd(f"cpplus -path {case_path}")
241
+
242
+ executed_successfully = os.path.exists(os.path.join(case_path, 'cpplus.ok'))
243
+ if executed_successfully:
244
+ if os.path.exists("post-run.bat"):
245
+ exec_cmd(f'post-run.bat "{case_path}"')
246
+
247
+
248
+ def run_optgen(case_path: Union[str, pathlib.Path], optgen_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
249
+ case_path = os.path.abspath(str(case_path)).replace("\\", "/") + "/"
250
+ optgen_path = str(optgen_path)
251
+ sddp_path = str(sddp_path)
252
+ sddp_full_path = os.path.join(sddp_path, "Oper")
253
+ optgen_path = os.path.join(optgen_path, "Model")
254
+ exec_mode = kwargs.get("_mode", None)
255
+
256
+ mode_arg = exec_mode if exec_mode is not None else ""
257
+
258
+ with change_cwd(optgen_path):
259
+ cmd = f'optgen {mode_arg} -optgdat="{case_path}" -sddpexe="{sddp_full_path}" -sddpmpi="{__default_mpi_path}"'
260
+ exec_cmd(cmd, **kwargs)
261
+
262
+
263
+ def run_optgen_check(case_path: Union[str, pathlib.Path], optgen_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
264
+ kwargs["_mode"] = "check"
265
+ run_optgen(case_path, optgen_path, sddp_path, **kwargs)
266
+
267
+
268
+ def run_optgen_cleanup(case_path: Union[str, pathlib.Path], optgen_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
269
+ kwargs["_mode"] = "clean"
270
+ run_optgen(case_path, optgen_path, sddp_path, **kwargs)
271
+
272
+
273
+ def run_psrio(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
274
+ if os.name != 'nt':
275
+ raise NotImplementedError("Running PSRIO is only available on Windows")
276
+ case_path = str(case_path)
277
+ sddp_path = str(sddp_path)
278
+ recipe_script = kwargs.get('r', kwargs.get('recipes', False))
279
+ output_path = kwargs.get('o', kwargs.get('output', False))
280
+
281
+ log_verbose = kwargs.get('v', kwargs.get('verbose', "0"))
282
+ study_model = kwargs.get('model', "sddp")
283
+ load_file_format = kwargs.get('load_format', "both")
284
+
285
+ load_from_output_path = kwargs.get('load_from_output_path', False)
286
+ save_only_in_csv = kwargs.get('csv', False)
287
+
288
+ psrio_path_full = os.path.join(sddp_path, "Oper\\psrio\\")
289
+
290
+ with change_cwd(psrio_path_full):
291
+
292
+ cmd = psrio_path_full + 'psrio.exe --model ' + study_model + ' --load_format ' + load_file_format
293
+
294
+ if recipe_script:
295
+ cmd += '-v' + log_verbose
296
+
297
+ if load_from_output_path:
298
+ cmd += ' load_from_output_path'
299
+ if save_only_in_csv:
300
+ cmd += ' save_only_in_csv'
301
+
302
+ if output_path:
303
+ cmd += f' -o "{output_path}"'
304
+ if recipe_script:
305
+ cmd += f' -r "{recipe_script}"'
306
+
307
+ cmd += f' "{case_path}"'
308
+ exec_cmd(cmd, **kwargs)
309
+
310
+
311
+ def run_nwsddp(input_case_path: Union[str, pathlib.Path], output_case_path: Union[str, pathlib.Path], nwsddp_app_path: Union[str, pathlib.Path], mdc_file_path: Optional[Union[str, pathlib.Path]] = None, **kwargs):
312
+ if os.name != 'nt':
313
+ raise NotImplementedError("Running NWSDDP is only available on Windows")
314
+
315
+ input_case_path = os.path.abspath(str(input_case_path)).rstrip("\\")
316
+ output_case_path = os.path.abspath(str(output_case_path)).rstrip("\\")
317
+ nwsddp_app_path = str(nwsddp_app_path)
318
+ mdc_file_path = str(mdc_file_path)
319
+
320
+ if mdc_file_path is not None:
321
+ mdc_file_path = os.path.abspath(mdc_file_path)
322
+ nwsddp_path_full = os.path.join(nwsddp_app_path, "bin", "")
323
+
324
+ with change_cwd(nwsddp_path_full):
325
+ if mdc_file_path is not None:
326
+ extra_args = "-MDC "
327
+ # Copy mdc file to case directory.
328
+ output_mdc_path = os.path.join(input_case_path, "nwsddp.mdc")
329
+ # compare if input and output mdc path are equal
330
+ if mdc_file_path.lower().strip() != output_mdc_path.lower().strip():
331
+ shutil.copy(mdc_file_path, output_mdc_path)
332
+ case_args = f"-NW:\"{os.path.join(input_case_path, '')}\" -SP:\"{os.path.join(output_case_path, '')}\""
333
+ cmd1 = 'nwsddp.exe ' + extra_args + case_args
334
+ return_code = exec_cmd(cmd1, **kwargs)
335
+ if return_code == 0:
336
+ cmd2_args = ["nwpatch.exe", "-nw", f"{input_case_path}", "-sp", f"{output_case_path}"]
337
+ exec_cmd(cmd2_args, **kwargs)
338
+
339
+
340
+
341
+ __hydro_estimation_path_contents = """ ------- PATH ---------------------------
342
+ Directorio Datos {path}
343
+ Directorio Hidro {path}
344
+ """
345
+
346
+
347
+ def run_hydro_estimation(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
348
+ if os.name != 'nt':
349
+ raise NotImplementedError("Running hydro estimation is only available on Windows")
350
+ case_path = os.path.abspath(str(case_path))
351
+ sddp_path = str(sddp_path)
352
+ estima_path = os.path.join(sddp_path, "Hidro")
353
+ estima_files = [
354
+ os.path.join(estima_path, "estima.exe"),
355
+ os.path.join(estima_path, "estimaen.fmt"),
356
+ os.path.join(estima_path, "estimaes.fmt"),
357
+ os.path.join(estima_path, "estimapo.fmt"),
358
+ ]
359
+
360
+ path_file = os.path.join(case_path, "path.dat")
361
+ path_file_contents = __hydro_estimation_path_contents.format(path=case_path)
362
+ with change_cwd(case_path), __temporary_copy_of_files(case_path, *estima_files), \
363
+ __temporary_file(path_file, path_file_contents):
364
+ # create temporary path.dat file
365
+ exec_cmd(f"estima", **kwargs)
366
+
367
+
368
+ def run_graph(case_path: Union[str, pathlib.Path], graph_path: Union[str, pathlib.Path], **kwargs):
369
+ if os.name != 'nt':
370
+ raise NotImplementedError("Running graph tool is only available on Windows")
371
+ case_path = os.path.abspath(str(case_path))
372
+ graph_base_path = os.path.abspath(str(graph_path))
373
+ graph_abs_path = os.path.join(graph_base_path, "PSRGraphInterface.exe")
374
+
375
+ with change_cwd(case_path):
376
+ exec_cmd(graph_abs_path, **kwargs)
377
+
378
+
379
+ def run_psrcloud(psrcloud_path: Union[str, pathlib.Path], **kwargs):
380
+ if os.name != 'nt':
381
+ raise NotImplementedError("Running PSRCloud Desktop tool is only available on Windows")
382
+ psrcloud_base_path = os.path.abspath(str(psrcloud_path))
383
+
384
+ with change_cwd(psrcloud_base_path):
385
+ exec_cmd("PSRCloud.exe", **kwargs)
386
+
387
+
388
+ @contextmanager
389
+ def __temporary_copy_of_files(target_dir: str, *files: str):
390
+ for file in files:
391
+ shutil.copy(file, target_dir)
392
+ try:
393
+ yield
394
+ finally:
395
+ for file in files:
396
+ os.remove(os.path.join(target_dir, os.path.basename(file)))
397
+
398
+
399
+ @contextmanager
400
+ def __temporary_file(file_path: Union[str, pathlib.Path], content: str):
401
+ with open(file_path, 'w') as file:
402
+ file.write(content)
403
+ try:
404
+ yield
405
+ finally:
406
+ os.remove(file_path)
407
+
408
+
409
+ def _ncp_determine_cnv_version(case_path: Union[str, pathlib.Path]) -> str:
410
+ CURRENT_CNV_VERSION = "V14"
411
+ LEGACY_CNV_VERSION = "V12"
412
+
413
+ csumcirc_path = os.path.join(case_path, "csumcirc.dat")
414
+ if os.path.exists(csumcirc_path):
415
+ with open(csumcirc_path, 'r') as csumcirc_file:
416
+ line = next(csumcirc_file)
417
+ if line.strip().lower().find("$version") == -1:
418
+ return LEGACY_CNV_VERSION
419
+
420
+ all_ctermis = glob.glob(os.path.join(case_path, "ctermi*.dat"))
421
+ for ctermi_path in all_ctermis:
422
+ with open(ctermi_path, 'r') as ctermi_file:
423
+ line = next(ctermi_file)
424
+ if line.strip().lower().find("$version=") == -1:
425
+ return LEGACY_CNV_VERSION
426
+
427
+ all_cgnds = glob.glob(os.path.join(case_path, "cgnd*.dat"))
428
+ for cgnd_path in all_cgnds:
429
+ with open(cgnd_path, 'r') as cgnd_file:
430
+ line = next(cgnd_file)
431
+ if line.strip().lower().find("$version=") == -1:
432
+ return LEGACY_CNV_VERSION
433
+
434
+ return CURRENT_CNV_VERSION
435
+
436
+
437
+ def _tsl_filter_plants_with_coordinates(plant_list: List[psr.factory.DataObject]):
438
+ filtered = []
439
+ for plant in plant_list:
440
+ lat = plant.get("Latitude")
441
+ lon = plant.get("Longitude")
442
+ if not((lat) and (lon)):
443
+ filtered.append(plant)
444
+ return filtered
445
+
446
+ def _tsl_get_renewable_plants_with_coordinates(study: psr.factory.Study, tech_type: int) -> List[psr.factory.DataObject]:
447
+ plant_list = study.find("RenewablePlant.*")
448
+ plant_list = [plant for plant in plant_list if plant.get("TechnologyType") == tech_type]
449
+ return _tsl_filter_plants_with_coordinates(plant_list)
450
+
451
+ def _tsl_get_csp_plants_with_coordinates(study: psr.factory.Study) -> List[psr.factory.DataObject]:
452
+ plant_list = study.find("CSP.*")
453
+ return _tsl_filter_plants_with_coordinates(plant_list)
454
+
455
+ def _tsl_create_csol_dat_file(case_path: Union[str, pathlib.Path], plant_list: List[psr.factory.DataObject]):
456
+ csol_dat_path = os.path.join(case_path, "csol.dat")
457
+ with open(csol_dat_path, 'w') as csol_dat_file:
458
+ csol_dat_file.write("ID,CODE,NAME,SYS,CLUSTER_ID,CLUSTER,POT_INST,LON,LAT,TRACKING,TILT,AZIMUTH,CFOBS_ID,PROFILE_TYPE,AC_DC_RATIO,SYSTEM_LOSSES,USE_AZIMUTH\n")
459
+ for plant in plant_list:
460
+ unique_id = "peteca"
461
+ capacity_profile = plant.get("RefCapacityProfile")
462
+ cluster_id = "" # FIXME
463
+ cluster = "" # FIXME
464
+ cfobs_id = "" if capacity_profile is None else capacity_profile.name
465
+ profile_type = "" if capacity_profile is None else capacity_profile.get("Type")
466
+ values = [
467
+ unique_id,
468
+ plant.code,
469
+ plant.name,
470
+ plant.get("RefSystem").id,
471
+ cluster_id,
472
+ cluster,
473
+ plant.get("InstalledCapacity"),
474
+ plant.get("Longitude"),
475
+ plant.get("Latitude"),
476
+ plant.get("Tracking"),
477
+ plant.get("Tilt"),
478
+ plant.get("Azimuth"),
479
+ cfobs_id,
480
+ profile_type,
481
+ plant.get("DCACRatio"),
482
+ plant.get("SystemLosses"),
483
+ plant.get("UseAzimuth")
484
+ ]
485
+ csol_dat_file.write(",".join(map(str, values)) + "\n")
486
+
487
+ def _tsl_create_ceol_dat_file(case_path: Union[str, pathlib.Path], plant_list: List[psr.factory.DataObject]):
488
+ ceol_dat_file = os.path.join(case_path, "ceol.dat")
489
+ with (open(ceol_dat_file, 'w') as ceol_dat):
490
+ ceol_dat.write("ID,PLANT_CODE,PLANT_NAME,PLANT_SYS,STATION_CODE,STATION_NAME,PLANT_POT_INST,LON,LAT,PROFILE_CODE,PLANT_HEIGHT,PLANT_TURBINE_MODEL,PROFILE_TYPE,DOWNS_FLAG,DENS_FLAG,DENS_SITE_HEIGHT\n")
491
+ for plant in plant_list:
492
+ unique_id = "peteca"
493
+ system = plant.get("RefSystem")
494
+ turbine = plant.get("RefTurbine")
495
+ station = plant.get("RefStation")
496
+ capacity_profile = plant.get("RefCapacityProfile")
497
+
498
+ turbine_model = "" if turbine is None else turbine.name
499
+ profile_code = "" if capacity_profile is None else capacity_profile.name
500
+ profile_type = "" if capacity_profile is None else capacity_profile.get("Type")
501
+ values = [
502
+ unique_id,
503
+ plant.code,
504
+ plant.name,
505
+ system.id,
506
+ station.code,
507
+ station.name,
508
+ plant.get("InstalledCapacity"),
509
+ plant.get("Longitude"),
510
+ plant.get("Latitude"),
511
+ plant.get("Tracking"),
512
+ plant.get("Tilt"),
513
+ plant.get("Azimuth"),
514
+ profile_code,
515
+ plant.get("Height"),
516
+ turbine_model,
517
+ profile_type,
518
+ plant.get("DownscalingFlag"),
519
+ plant.get("DensityCorrectionFlag"),
520
+ plant.get("DensityCorrection"),
521
+ ]
522
+ ceol_dat.write(",".join(map(str, values)) + "\n")
523
+
524
+ def _tsl_create_ccsp_dat_file(case_path: Union[str, pathlib.Path], plant_list: List[psr.factory.DataObject]):
525
+ ccsp_dat_file = os.path.join(case_path, "ccsp.dat")
526
+ with (open(ccsp_dat_file, 'w') as ccsp_dat):
527
+ ccsp_dat.write("ID,CODE,NAME,SYS,CLUSTER_ID,CLUSTER,POT_INST,LON,LAT,SM,EFF,CFOBS_ID,PROFILE_TYPE\n")
528
+ for plant in plant_list:
529
+ unique_id = "peteca"
530
+ cluster_id = "" # FIXME
531
+ cluster = "" # FIXME
532
+ capacity_profile = plant.get("RefCapacityProfile")
533
+ cfobs_id = "" if capacity_profile is None else capacity_profile.name
534
+ profile_type = "" if capacity_profile is None else capacity_profile.get("Type")
535
+ values = [
536
+ unique_id,
537
+ plant.code,
538
+ plant.name,
539
+ plant.get("RefSystem").id,
540
+ cluster_id,
541
+ cluster,
542
+ plant.get("InstalledCapacity"),
543
+ plant.get("Longitude"),
544
+ plant.get("Latitude"),
545
+ plant.get("SM"),
546
+ plant.get("Efficiency"),
547
+ cfobs_id,
548
+ profile_type,
549
+ ]
550
+ ccsp_dat.write(",".join(map(str, values)) + "\n")
551
+
552
+
553
+ def run_rpsdata(tsl_path: Union[str, pathlib.Path], case_path: Union[str, pathlib.Path], file_name: str, base_type: str, **kwargs):
554
+ rps_parentpath = os.path.join(str(tsl_path), "Extensions","Script")
555
+ file_path = os.path.join(str(case_path), file_name)
556
+ with change_cwd(rps_parentpath):
557
+ cmd = f'RPSDataConsole.exe GET_POINTS "{file_path}" "{case_path}" {base_type}'
558
+ exec_cmd(cmd, **kwargs)
559
+
560
+ def run_tsldata(tsl_path: Union[str, pathlib.Path], case_path: Union[str, pathlib.Path], db_type: str, **kwargs):
561
+ tsldata_parentpath = os.path.join(str(tsl_path), "Extensions","tsldata-distribution")
562
+ with change_cwd(tsldata_parentpath):
563
+ cmd = f'TSLData.exe --path "{str(case_path)}" --{db_type}'
564
+ exec_cmd(cmd, **kwargs)
565
+
566
+ def run_tslconsole(tsl_path: Union[str, pathlib.Path], script_path: Union[str, pathlib.Path], **kwargs):
567
+ tsl_console = os.path.join(tsl_path, "Extensions", "TimeSeriesLab")
568
+ with change_cwd(tsl_console):
569
+ cmd = f'TimeSeriesConsole.exe "{str(script_path)}"'
570
+ exec_cmd(cmd, **kwargs)
571
+
572
+ def run_tsl(case_path: Union[str, pathlib.Path], tsl_path: Union[str, pathlib.Path], base_type: str, **kwargs):
573
+ if os.name != 'nt':
574
+ raise NotImplementedError("Running TimeSeriesLab is only available on Windows")
575
+ case_path = os.path.abspath(str(case_path))
576
+ tsl_path = str(tsl_path)
577
+ dry_run = kwargs.get("dry_run", False)
578
+ show_progress = kwargs.get("show_progress", False)
579
+
580
+ def _run_rpsdata(file_name):
581
+ run_rpsdata(tsl_path, case_path, file_name, base_type, **kwargs)
582
+
583
+ def _run_tsldata(db_type):
584
+ run_tsldata(tsl_path, case_path, db_type, **kwargs)
585
+
586
+ def _run_tslconsole(commands: List[str]):
587
+ _run_tslconsole_command(tsl_path, case_path, commands, **kwargs)
588
+
589
+ study = psr.factory.load_study(case_path, ["TSL"])
590
+
591
+ wind_list = _tsl_get_renewable_plants_with_coordinates(study, 1)
592
+ if len(wind_list) > 0:
593
+ _tsl_create_ceol_dat_file(case_path, wind_list)
594
+ _run_rpsdata("ceol.dat")
595
+ _run_tsldata("wind")
596
+
597
+ solar_list = _tsl_get_renewable_plants_with_coordinates(study, 2)
598
+ if len(solar_list) > 0:
599
+ _tsl_create_csol_dat_file(case_path, solar_list)
600
+ _run_rpsdata("csol.dat")
601
+ _run_tsldata("solar")
602
+
603
+ csp_list = _tsl_get_csp_plants_with_coordinates(study)
604
+ if len(csp_list) > 0:
605
+ _tsl_create_ccsp_dat_file(case_path, csp_list)
606
+ _run_rpsdata("ccsp.dat")
607
+ _run_tsldata("csp")
608
+
609
+ if len(solar_list) > 0 or len(csp_list) > 0:
610
+ _run_tsldata("solar-correction")
611
+
612
+ # todo: create cdlr.dat
613
+ _run_rpsdata("cdlr.dat")
614
+ _run_tsldata("dlr")
615
+
616
+ _run_tsldata("merge")
617
+
618
+ #todo: generate default script for parameters
619
+ _run_tslconsole([])
620
+
621
+ #todo: generate default script for scenarios
622
+ _run_tslconsole([])
623
+
624
+
625
+ def _run_tslconsole_command(tsl_path: Union[str, pathlib.Path], case_path: Union[str, pathlib.Path], commands: list[str], script_prefix: str = "", **kwargs):
626
+ tsl_console_path = os.path.join(tsl_path, "Extensions", "TimeSeriesLab")
627
+ delete_xml = not _DEBUG
628
+ full_path = os.path.join(os.path.abspath(case_path), "")
629
+ with psr.psrfcommon.tempfile.CreateTempFile(
630
+ "./", script_prefix, "", ".dat", delete_xml
631
+ ) as script_file, change_cwd(tsl_console_path):
632
+ with open(script_file.name, "w") as script_file:
633
+ script_file.write(f"SET,PATHDATA,{full_path}\n")
634
+ for command in commands:
635
+ script_file.write(f"RUN,{command}\n")
636
+ run_tslconsole(tsl_path, os.path.abspath(script_file.name), **kwargs)
637
+
638
+
639
+ def run_tsl_generate_external_scenarios(case_path: Union[str, pathlib.Path], tsl_path: Union[str, pathlib.Path], option: str, **kwargs):
640
+ inflow_path = os.path.join(str(case_path), "inflow.dat")
641
+ option_command_map = {
642
+ "natural": "generate_inflow_from_external_natural",
643
+ "incremental": "generate_inflow_from_external_incremental",
644
+ }
645
+ if option not in option_command_map.keys():
646
+ raise ValueError(f"Invalid option. Should be one of {','.join(option_command_map.keys())}")
647
+
648
+ commands = [option_command_map[option]]
649
+ _run_tslconsole_command(tsl_path, case_path, commands)
psr/runner/version.py ADDED
@@ -0,0 +1,5 @@
1
+ # PSR Factory. Copyright (C) PSR, Inc - All Rights Reserved
2
+ # Unauthorized copying of this file, via any medium is strictly prohibited
3
+ # Proprietary and confidential
4
+
5
+ __version__ = "1.0.1"