psr-factory 4.0.27__py3-none-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
psr/runner/runner.py ADDED
@@ -0,0 +1,629 @@
1
+ # PSR Factory. Copyright (C) PSR, Inc - All Rights Reserved
2
+ # Unauthorized copying of this file, via any medium is strictly prohibited
3
+ # Proprietary and confidential
4
+
5
+ from contextlib import contextmanager
6
+ import glob
7
+ import os
8
+ import pathlib
9
+ import shutil
10
+ import socket
11
+ import subprocess
12
+ from types import ModuleType
13
+ from typing import Dict, List, Optional, Tuple, Union
14
+ import warnings
15
+
16
+ from psr.psrfcommon import change_cwd, exec_cmd
17
+ import psr.psrfcommon.tempfile
18
+ import psr.factory
19
+
20
+ # Check whether psutil module is available.
21
+ _HAS_PSUTIL: Optional[bool] = None
22
+
23
+ psutil: Optional[ModuleType] = None
24
+
25
+ _DEBUG: bool = True
26
+
27
+
28
+ def _has_psutil() -> bool:
29
+ """Check if psutil is available."""
30
+ global _HAS_PSUTIL
31
+ global psutil
32
+ if _HAS_PSUTIL is None:
33
+ try:
34
+ import psutil
35
+ _HAS_PSUTIL = True
36
+ except ImportError:
37
+ _HAS_PSUTIL = False
38
+ return _HAS_PSUTIL
39
+
40
+
41
+ if os.name == 'nt':
42
+ __default_mpi_path = "C:\\Program Files\\MPICH2\\bin"
43
+ else:
44
+ __default_mpi_path = "/usr/bin"
45
+
46
+
47
+ def _get_semver_version(version: str) -> Tuple[int, int, Union[int, str], Optional[str]]:
48
+ def get_tag_from_part(part: str) -> Tuple[int, Optional[str]]:
49
+ to_try = ("beta", "rc")
50
+ for tag_name in to_try:
51
+ if tag_name in part:
52
+ tag_pos = part.lower().index(tag_name)
53
+ part_value = int(part[:tag_pos])
54
+ tag = part[tag_pos:]
55
+ return part_value, tag
56
+ return int(part), None
57
+ parts = version.split(".")
58
+ major = int(parts[0])
59
+ tag = None
60
+ minor = 0
61
+ patch = 0
62
+ if len(parts) == 2:
63
+ minor, tag = get_tag_from_part(parts[1])
64
+ patch = 0
65
+ elif len(parts) == 3:
66
+ minor = int(parts[1])
67
+ patch, tag = get_tag_from_part(parts[2])
68
+
69
+ return major, minor, patch, tag
70
+
71
+
72
+ def _get_available_cpu() -> int:
73
+ if not _has_psutil():
74
+ raise ImportError("psutil module is required to get available CPU count")
75
+ return psutil.cpu_count()
76
+
77
+
78
+ def _get_host_name() -> str:
79
+ return socket.gethostname().upper()
80
+
81
+
82
+ def _get_nproc(specified: int, available: int) -> int:
83
+ if available > specified:
84
+ return specified
85
+ elif available < specified:
86
+ warnings.warn(f"Specified number of threads ({specified}) is greater than available ({available})")
87
+ return available
88
+ else:
89
+ return available
90
+
91
+ def _write_mpi_settings(mpi_file_path: Union[str, pathlib.Path], cluster_settings: Optional[Union[int, bool, Dict[str, int]]]):
92
+ if cluster_settings is not None:
93
+ mpi_file_path = str(mpi_file_path)
94
+ available_cpu = _get_available_cpu()
95
+ if isinstance(cluster_settings, int):
96
+ computer_name = _get_host_name()
97
+ specified_cpu_number = cluster_settings
98
+ nproc = _get_nproc(specified_cpu_number, available_cpu)
99
+ cluster_settings = {computer_name: nproc}
100
+ elif isinstance(cluster_settings, dict):
101
+ pass
102
+ elif isinstance(cluster_settings, bool):
103
+ # Rewrite with default settings.
104
+ if cluster_settings:
105
+ computer_name = _get_host_name()
106
+ nproc = available_cpu
107
+ cluster_settings = {computer_name: nproc}
108
+ else:
109
+ cluster_settings = None
110
+ else:
111
+ raise ValueError("Invalid cluster settings type")
112
+ else:
113
+ computer_name = socket.gethostname()
114
+ nproc = _get_available_cpu()
115
+ cluster_settings = {computer_name: nproc}
116
+
117
+ if isinstance(cluster_settings, dict):
118
+ with open(mpi_file_path, 'w') as f:
119
+ for computer, nproc in cluster_settings.items():
120
+ f.write(f"{computer}:{nproc}\n")
121
+
122
+
123
+
124
+ def run_sddp(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
125
+ case_path = os.path.abspath(str(case_path))
126
+ sddp_path = str(sddp_path)
127
+ parallel_run = kwargs.get("parallel_run", True)
128
+ cluster_settings: Optional[Union[int, bool, Dict[str, int]]] = kwargs.get("cluster_settings", False)
129
+ dry_run = kwargs.get("dry_run", False)
130
+ show_progress = kwargs.get("show_progress", False)
131
+ extra_args = " ".join(kwargs.get("extra_args", ()))
132
+ exec_mode = kwargs.get("_mode", None)
133
+ mpi_path = kwargs.get("mpi_path", __default_mpi_path)
134
+
135
+ if os.name == 'nt':
136
+ sddp_path_full = os.path.join(sddp_path, "Oper")
137
+ else:
138
+ sddp_path_full = sddp_path
139
+ # Append last / if missing.
140
+ case_path = os.path.join(os.path.abspath(case_path), "")
141
+
142
+ mode_arg = exec_mode if exec_mode is not None else ""
143
+ # Disable parallel run in check mode.
144
+ parallel_run = parallel_run if exec_mode is None else False
145
+
146
+ major, minor, patch, tag = _get_semver_version(get_sddp_version(sddp_path))
147
+
148
+ with change_cwd(sddp_path_full):
149
+ # Write MPI settings if required
150
+ if parallel_run and cluster_settings is not None:
151
+ mpi_file_path = os.path.join(sddp_path_full, "mpd.hosts")
152
+ _write_mpi_settings(mpi_file_path, cluster_settings)
153
+
154
+ if parallel_run:
155
+ if os.name == 'nt':
156
+ cmd = f'sddpar.exe --path="{sddp_path_full}" --mpipath="{mpi_path}" --pathdata="{case_path}" {extra_args}'
157
+ else:
158
+ # 17.3 and before uses one type of args, newer uses another
159
+ if (major == 17 and minor <= 3) or major < 17:
160
+ cmd = f'./sddpar --path="{case_path}" --mpipath="{mpi_path}" --habilitarhidra=1 {extra_args}'
161
+ else:
162
+ cmd = f'./sddpar --path="{sddp_path}" --mpipath="{mpi_path}" --habilitarhidra=1 --pathdata="{case_path}" {extra_args}'
163
+ else:
164
+ if os.name == 'nt':
165
+ cmd = f'sddp.exe {mode_arg} -path "{case_path}" {extra_args}'
166
+ else:
167
+ cmd = f'./sddp {mode_arg} -path "{case_path}" {extra_args}'
168
+
169
+ if os.name != "nt":
170
+ os.environ["LD_LIBRARY_PATH"] = os.path.abspath(sddp_path_full)
171
+ exec_cmd(cmd, **kwargs)
172
+
173
+
174
+ def run_sddp_check(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
175
+ kwargs["_mode"] = "check"
176
+ run_sddp(case_path, sddp_path, **kwargs)
177
+
178
+
179
+ def run_sddp_cleanup(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
180
+ kwargs["_mode"] = "clean"
181
+ run_sddp(case_path, sddp_path, **kwargs)
182
+
183
+
184
+ def run_sddp_convert_fcf(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
185
+ kwargs["_mode"] = "printfcf"
186
+ # TODO: generated file use \t as separator, has an empty column and its name depends on study stage type.
187
+ run_sddp(case_path, sddp_path, **kwargs)
188
+
189
+
190
+ def get_sddp_version(sddp_path: Union[str, pathlib.Path]) -> str:
191
+ sddp_path = str(sddp_path)
192
+ if os.name == 'nt':
193
+ sddp_path_full = os.path.join(sddp_path, "Oper")
194
+ else:
195
+ sddp_path_full = sddp_path
196
+ if os.name == 'nt':
197
+ command = [os.path.join(sddp_path_full, "sddp.exe"), "ver"]
198
+ else:
199
+ command = [os.path.join(sddp_path_full, "sddp"), "ver"]
200
+
201
+ if os.name != "nt":
202
+ os.environ["LD_LIBRARY_PATH"] = os.path.abspath(sddp_path_full)
203
+ sub = subprocess.run(command, stdout=subprocess.PIPE, check=False)
204
+ output = sub.stdout.decode("utf-8").strip()
205
+ return output.split()[2]
206
+
207
+
208
+ def run_ncp(case_path: Union[str, pathlib.Path], ncp_path: Union[str, pathlib.Path], **kwargs):
209
+ if os.name != 'nt':
210
+ raise NotImplementedError("Running NCP is only available on Windows")
211
+ case_path = os.path.abspath(str(case_path))
212
+ ncp_path = str(ncp_path)
213
+ dry_run = kwargs.get("dry_run", False)
214
+ show_progress = kwargs.get("show_progress", False)
215
+
216
+ cnv_version = _ncp_determine_cnv_version(case_path)
217
+
218
+ print("NCP cnv version is", cnv_version)
219
+
220
+ ncp_path_full = os.path.join(ncp_path, "Oper")
221
+ cnv_path_full = os.path.join(ncp_path, "Cnv", cnv_version)
222
+
223
+ # Append last / if missing.
224
+ case_path = os.path.join(os.path.abspath(case_path), "")
225
+
226
+ coes_tmp_file_path = os.path.join(case_path, "coes.dat")
227
+ coes_dat_file_path = os.path.join(case_path, "coes.tmp")
228
+
229
+ with change_cwd(cnv_path_full):
230
+ if os.path.exists(coes_dat_file_path):
231
+ shutil.move(coes_dat_file_path, coes_tmp_file_path)
232
+ exec_cmd(f"csvcnv csv -path {case_path}")
233
+
234
+ with change_cwd(ncp_path_full):
235
+ exec_cmd(f"sddprede -path {case_path}")
236
+ if os.path.exists(coes_tmp_file_path):
237
+ shutil.move(coes_tmp_file_path, coes_dat_file_path)
238
+
239
+ exec_cmd(f"cpplus -path {case_path}")
240
+
241
+ executed_successfully = os.path.exists(os.path.join(case_path, 'cpplus.ok'))
242
+ if executed_successfully:
243
+ if os.path.exists("post-run.bat"):
244
+ exec_cmd(f'post-run.bat "{case_path}"')
245
+
246
+
247
+ def run_optgen(case_path: Union[str, pathlib.Path], optgen_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
248
+ case_path = os.path.abspath(str(case_path)).replace("\\", "/") + "/"
249
+ optgen_path = str(optgen_path)
250
+ sddp_path = str(sddp_path)
251
+ sddp_full_path = os.path.join(sddp_path, "Oper")
252
+ optgen_path = os.path.join(optgen_path, "Model")
253
+ exec_mode = kwargs.get("_mode", None)
254
+
255
+ mode_arg = exec_mode if exec_mode is not None else ""
256
+
257
+ with change_cwd(optgen_path):
258
+ cmd = f'optgen {mode_arg} -optgdat="{case_path}" -sddpexe="{sddp_full_path}" -sddpmpi="{__default_mpi_path}"'
259
+ exec_cmd(cmd, **kwargs)
260
+
261
+
262
+ def run_optgen_check(case_path: Union[str, pathlib.Path], optgen_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
263
+ kwargs["_mode"] = "check"
264
+ run_optgen(case_path, optgen_path, sddp_path, **kwargs)
265
+
266
+
267
+ def run_optgen_cleanup(case_path: Union[str, pathlib.Path], optgen_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
268
+ kwargs["_mode"] = "clean"
269
+ run_optgen(case_path, optgen_path, sddp_path, **kwargs)
270
+
271
+
272
+ def run_psrio(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
273
+ if os.name != 'nt':
274
+ raise NotImplementedError("Running PSRIO is only available on Windows")
275
+ case_path = str(case_path)
276
+ sddp_path = str(sddp_path)
277
+ recipe_script = kwargs.get('r', kwargs.get('recipes', False))
278
+ output_path = kwargs.get('o', kwargs.get('output', False))
279
+
280
+ log_verbose = kwargs.get('v', kwargs.get('verbose', "0"))
281
+ study_model = kwargs.get('model', "sddp")
282
+ load_file_format = kwargs.get('load_format', "both")
283
+
284
+ load_from_output_path = kwargs.get('load_from_output_path', False)
285
+ save_only_in_csv = kwargs.get('csv', False)
286
+
287
+ psrio_path_full = os.path.join(sddp_path, "Oper\\psrio\\")
288
+
289
+ with change_cwd(psrio_path_full):
290
+
291
+ cmd = psrio_path_full + 'psrio.exe --model ' + study_model + ' --load_format ' + load_file_format
292
+
293
+ if recipe_script:
294
+ cmd += '-v' + log_verbose
295
+
296
+ if load_from_output_path:
297
+ cmd += ' load_from_output_path'
298
+ if save_only_in_csv:
299
+ cmd += ' save_only_in_csv'
300
+
301
+ if output_path:
302
+ cmd += f' -o "{output_path}"'
303
+ if recipe_script:
304
+ cmd += f' -r "{recipe_script}"'
305
+
306
+ cmd += f' "{case_path}"'
307
+ exec_cmd(cmd, **kwargs)
308
+
309
+
310
+ def run_nwsddp(input_case_path: Union[str, pathlib.Path], output_case_path: Union[str, pathlib.Path], nwsddp_app_path: Union[str, pathlib.Path], mdc_file_path: Optional[Union[str, pathlib.Path]] = None, **kwargs):
311
+ if os.name != 'nt':
312
+ raise NotImplementedError("Running NWSDDP is only available on Windows")
313
+
314
+ input_case_path = os.path.abspath(str(input_case_path)).rstrip("\\")
315
+ output_case_path = os.path.abspath(str(output_case_path)).rstrip("\\")
316
+ nwsddp_app_path = str(nwsddp_app_path)
317
+ mdc_file_path = str(mdc_file_path)
318
+
319
+ if mdc_file_path is not None:
320
+ mdc_file_path = os.path.abspath(mdc_file_path)
321
+ nwsddp_path_full = os.path.join(nwsddp_app_path, "bin", "")
322
+
323
+ with change_cwd(nwsddp_path_full):
324
+ if mdc_file_path is not None:
325
+ extra_args = "-MDC "
326
+ # Copy mdc file to case directory.
327
+ output_mdc_path = os.path.join(input_case_path, "nwsddp.mdc")
328
+ # compare if input and output mdc path are equal
329
+ if mdc_file_path.lower().strip() != output_mdc_path.lower().strip():
330
+ shutil.copy(mdc_file_path, output_mdc_path)
331
+ case_args = f"-NW:\"{os.path.join(input_case_path, '')}\" -SP:\"{os.path.join(output_case_path, '')}\""
332
+ cmd1 = 'nwsddp.exe ' + extra_args + case_args
333
+ return_code = exec_cmd(cmd1, **kwargs)
334
+ if return_code == 0:
335
+ cmd2_args = ["nwpatch.exe", "-nw", f"{input_case_path}", "-sp", f"{output_case_path}"]
336
+ exec_cmd(cmd2_args, **kwargs)
337
+
338
+
339
+
340
+ __hydro_estimation_path_contents = """ ------- PATH ---------------------------
341
+ Directorio Datos {path}
342
+ Directorio Hidro {path}
343
+ """
344
+
345
+
346
+ def run_hydro_estimation(case_path: Union[str, pathlib.Path], sddp_path: Union[str, pathlib.Path], **kwargs):
347
+ if os.name != 'nt':
348
+ raise NotImplementedError("Running hydro estimation is only available on Windows")
349
+ case_path = os.path.abspath(str(case_path))
350
+ sddp_path = str(sddp_path)
351
+ estima_path = os.path.join(sddp_path, "Hidro")
352
+ estima_files = [
353
+ os.path.join(estima_path, "estima.exe"),
354
+ os.path.join(estima_path, "estimaen.fmt"),
355
+ os.path.join(estima_path, "estimaes.fmt"),
356
+ os.path.join(estima_path, "estimapo.fmt"),
357
+ ]
358
+
359
+ path_file = os.path.join(case_path, "path.dat")
360
+ path_file_contents = __hydro_estimation_path_contents.format(path=case_path)
361
+ with change_cwd(case_path), __temporary_copy_of_files(case_path, *estima_files), \
362
+ __temporary_file(path_file, path_file_contents):
363
+ # create temporary path.dat file
364
+ exec_cmd(f"estima", **kwargs)
365
+
366
+
367
+
368
+ @contextmanager
369
+ def __temporary_copy_of_files(target_dir: str, *files: str):
370
+ for file in files:
371
+ shutil.copy(file, target_dir)
372
+ try:
373
+ yield
374
+ finally:
375
+ for file in files:
376
+ os.remove(os.path.join(target_dir, os.path.basename(file)))
377
+
378
+
379
+ @contextmanager
380
+ def __temporary_file(file_path: Union[str, pathlib.Path], content: str):
381
+ with open(file_path, 'w') as file:
382
+ file.write(content)
383
+ try:
384
+ yield
385
+ finally:
386
+ os.remove(file_path)
387
+
388
+
389
+ def _ncp_determine_cnv_version(case_path: Union[str, pathlib.Path]) -> str:
390
+ CURRENT_CNV_VERSION = "V14"
391
+ LEGACY_CNV_VERSION = "V12"
392
+
393
+ csumcirc_path = os.path.join(case_path, "csumcirc.dat")
394
+ if os.path.exists(csumcirc_path):
395
+ with open(csumcirc_path, 'r') as csumcirc_file:
396
+ line = next(csumcirc_file)
397
+ if line.strip().lower().find("$version") == -1:
398
+ return LEGACY_CNV_VERSION
399
+
400
+ all_ctermis = glob.glob(os.path.join(case_path, "ctermi*.dat"))
401
+ for ctermi_path in all_ctermis:
402
+ with open(ctermi_path, 'r') as ctermi_file:
403
+ line = next(ctermi_file)
404
+ if line.strip().lower().find("$version=") == -1:
405
+ return LEGACY_CNV_VERSION
406
+
407
+ all_cgnds = glob.glob(os.path.join(case_path, "cgnd*.dat"))
408
+ for cgnd_path in all_cgnds:
409
+ with open(cgnd_path, 'r') as cgnd_file:
410
+ line = next(cgnd_file)
411
+ if line.strip().lower().find("$version=") == -1:
412
+ return LEGACY_CNV_VERSION
413
+
414
+ return CURRENT_CNV_VERSION
415
+
416
+
417
+ def _tsl_filter_plants_with_coordinates(plant_list: List[psr.factory.DataObject]):
418
+ filtered = []
419
+ for plant in plant_list:
420
+ lat = plant.get("Latitude")
421
+ lon = plant.get("Longitude")
422
+ if not((lat) and (lon)):
423
+ filtered.append(plant)
424
+ return filtered
425
+
426
+ def _tsl_get_renewable_plants_with_coordinates(study: psr.factory.Study, tech_type: int) -> List[psr.factory.DataObject]:
427
+ plant_list = study.find("RenewablePlant.*")
428
+ plant_list = [plant for plant in plant_list if plant.get("TechnologyType") == tech_type]
429
+ return _tsl_filter_plants_with_coordinates(plant_list)
430
+
431
+ def _tsl_get_csp_plants_with_coordinates(study: psr.factory.Study) -> List[psr.factory.DataObject]:
432
+ plant_list = study.find("CSP.*")
433
+ return _tsl_filter_plants_with_coordinates(plant_list)
434
+
435
+ def _tsl_create_csol_dat_file(case_path: Union[str, pathlib.Path], plant_list: List[psr.factory.DataObject]):
436
+ csol_dat_path = os.path.join(case_path, "csol.dat")
437
+ with open(csol_dat_path, 'w') as csol_dat_file:
438
+ csol_dat_file.write("ID,CODE,NAME,SYS,CLUSTER_ID,CLUSTER,POT_INST,LON,LAT,TRACKING,TILT,AZIMUTH,CFOBS_ID,PROFILE_TYPE,AC_DC_RATIO,SYSTEM_LOSSES,USE_AZIMUTH\n")
439
+ for plant in plant_list:
440
+ unique_id = "peteca"
441
+ capacity_profile = plant.get("RefCapacityProfile")
442
+ cluster_id = "" # FIXME
443
+ cluster = "" # FIXME
444
+ cfobs_id = "" if capacity_profile is None else capacity_profile.name
445
+ profile_type = "" if capacity_profile is None else capacity_profile.get("Type")
446
+ values = [
447
+ unique_id,
448
+ plant.code,
449
+ plant.name,
450
+ plant.get("RefSystem").id,
451
+ cluster_id,
452
+ cluster,
453
+ plant.get("InstalledCapacity"),
454
+ plant.get("Longitude"),
455
+ plant.get("Latitude"),
456
+ plant.get("Tracking"),
457
+ plant.get("Tilt"),
458
+ plant.get("Azimuth"),
459
+ cfobs_id,
460
+ profile_type,
461
+ plant.get("DCACRatio"),
462
+ plant.get("SystemLosses"),
463
+ plant.get("UseAzimuth")
464
+ ]
465
+ csol_dat_file.write(",".join(map(str, values)) + "\n")
466
+
467
+ def _tsl_create_ceol_dat_file(case_path: Union[str, pathlib.Path], plant_list: List[psr.factory.DataObject]):
468
+ ceol_dat_file = os.path.join(case_path, "ceol.dat")
469
+ with (open(ceol_dat_file, 'w') as ceol_dat):
470
+ ceol_dat.write("ID,PLANT_CODE,PLANT_NAME,PLANT_SYS,STATION_CODE,STATION_NAME,PLANT_POT_INST,LON,LAT,PROFILE_CODE,PLANT_HEIGHT,PLANT_TURBINE_MODEL,PROFILE_TYPE,DOWNS_FLAG,DENS_FLAG,DENS_SITE_HEIGHT\n")
471
+ for plant in plant_list:
472
+ unique_id = "peteca"
473
+ system = plant.get("RefSystem")
474
+ turbine = plant.get("RefTurbine")
475
+ station = plant.get("RefStation")
476
+ capacity_profile = plant.get("RefCapacityProfile")
477
+
478
+ turbine_model = "" if turbine is None else turbine.name
479
+ profile_code = "" if capacity_profile is None else capacity_profile.name
480
+ profile_type = "" if capacity_profile is None else capacity_profile.get("Type")
481
+ values = [
482
+ unique_id,
483
+ plant.code,
484
+ plant.name,
485
+ system.id,
486
+ station.code,
487
+ station.name,
488
+ plant.get("InstalledCapacity"),
489
+ plant.get("Longitude"),
490
+ plant.get("Latitude"),
491
+ plant.get("Tracking"),
492
+ plant.get("Tilt"),
493
+ plant.get("Azimuth"),
494
+ profile_code,
495
+ plant.get("Height"),
496
+ turbine_model,
497
+ profile_type,
498
+ plant.get("DownscalingFlag"),
499
+ plant.get("DensityCorrectionFlag"),
500
+ plant.get("DensityCorrection"),
501
+ ]
502
+ ceol_dat.write(",".join(map(str, values)) + "\n")
503
+
504
+ def _tsl_create_ccsp_dat_file(case_path: Union[str, pathlib.Path], plant_list: List[psr.factory.DataObject]):
505
+ ccsp_dat_file = os.path.join(case_path, "ccsp.dat")
506
+ with (open(ccsp_dat_file, 'w') as ccsp_dat):
507
+ ccsp_dat.write("ID,CODE,NAME,SYS,CLUSTER_ID,CLUSTER,POT_INST,LON,LAT,SM,EFF,CFOBS_ID,PROFILE_TYPE\n")
508
+ for plant in plant_list:
509
+ unique_id = "peteca"
510
+ cluster_id = "" # FIXME
511
+ cluster = "" # FIXME
512
+ capacity_profile = plant.get("RefCapacityProfile")
513
+ cfobs_id = "" if capacity_profile is None else capacity_profile.name
514
+ profile_type = "" if capacity_profile is None else capacity_profile.get("Type")
515
+ values = [
516
+ unique_id,
517
+ plant.code,
518
+ plant.name,
519
+ plant.get("RefSystem").id,
520
+ cluster_id,
521
+ cluster,
522
+ plant.get("InstalledCapacity"),
523
+ plant.get("Longitude"),
524
+ plant.get("Latitude"),
525
+ plant.get("SM"),
526
+ plant.get("Efficiency"),
527
+ cfobs_id,
528
+ profile_type,
529
+ ]
530
+ ccsp_dat.write(",".join(map(str, values)) + "\n")
531
+
532
+
533
+ def run_rpsdata(tsl_path: Union[str, pathlib.Path], case_path: Union[str, pathlib.Path], file_name: str, base_type: str, **kwargs):
534
+ rps_parentpath = os.path.join(str(tsl_path), "Extensions","Script")
535
+ file_path = os.path.join(str(case_path), file_name)
536
+ with change_cwd(rps_parentpath):
537
+ cmd = f'RPSDataConsole.exe GET_POINTS "{file_path}" "{case_path}" {base_type}'
538
+ exec_cmd(cmd, **kwargs)
539
+
540
+ def run_tsldata(tsl_path: Union[str, pathlib.Path], case_path: Union[str, pathlib.Path], db_type: str, **kwargs):
541
+ tsldata_parentpath = os.path.join(str(tsl_path), "Extensions","tsldata-distribution")
542
+ with change_cwd(tsldata_parentpath):
543
+ cmd = f'TSLData.exe --path "{str(case_path)}" --{db_type}'
544
+ exec_cmd(cmd, **kwargs)
545
+
546
+ def run_tslconsole(tsl_path: Union[str, pathlib.Path], script_path: Union[str, pathlib.Path], **kwargs):
547
+ tsl_console = os.path.join(tsl_path, "Extensions", "TimeSeriesLab")
548
+ with change_cwd(tsl_console):
549
+ cmd = f'TimeSeriesConsole.exe "{str(script_path)}"'
550
+ exec_cmd(cmd, **kwargs)
551
+
552
+ def run_tsl(case_path: Union[str, pathlib.Path], tsl_path: Union[str, pathlib.Path], base_type: str, **kwargs):
553
+ if os.name != 'nt':
554
+ raise NotImplementedError("Running TimeSeriesLab is only available on Windows")
555
+ case_path = os.path.abspath(str(case_path))
556
+ tsl_path = str(tsl_path)
557
+ dry_run = kwargs.get("dry_run", False)
558
+ show_progress = kwargs.get("show_progress", False)
559
+
560
+ def _run_rpsdata(file_name):
561
+ run_rpsdata(tsl_path, case_path, file_name, base_type, **kwargs)
562
+
563
+ def _run_tsldata(db_type):
564
+ run_tsldata(tsl_path, case_path, db_type, **kwargs)
565
+
566
+ def _run_tslconsole(commands: List[str]):
567
+ _run_tslconsole_command(tsl_path, case_path, commands, **kwargs)
568
+
569
+ study = psr.factory.load_study(case_path, ["TSL"])
570
+
571
+ wind_list = _tsl_get_renewable_plants_with_coordinates(study, 1)
572
+ if len(wind_list) > 0:
573
+ _tsl_create_ceol_dat_file(case_path, wind_list)
574
+ _run_rpsdata("ceol.dat")
575
+ _run_tsldata("wind")
576
+
577
+ solar_list = _tsl_get_renewable_plants_with_coordinates(study, 2)
578
+ if len(solar_list) > 0:
579
+ _tsl_create_csol_dat_file(case_path, solar_list)
580
+ _run_rpsdata("csol.dat")
581
+ _run_tsldata("solar")
582
+
583
+ csp_list = _tsl_get_csp_plants_with_coordinates(study)
584
+ if len(csp_list) > 0:
585
+ _tsl_create_ccsp_dat_file(case_path, csp_list)
586
+ _run_rpsdata("ccsp.dat")
587
+ _run_tsldata("csp")
588
+
589
+ if len(solar_list) > 0 or len(csp_list) > 0:
590
+ _run_tsldata("solar-correction")
591
+
592
+ # todo: create cdlr.dat
593
+ _run_rpsdata("cdlr.dat")
594
+ _run_tsldata("dlr")
595
+
596
+ _run_tsldata("merge")
597
+
598
+ #todo: generate default script for parameters
599
+ _run_tslconsole([])
600
+
601
+ #todo: generate default script for scenarios
602
+ _run_tslconsole([])
603
+
604
+
605
+ def _run_tslconsole_command(tsl_path: Union[str, pathlib.Path], case_path: Union[str, pathlib.Path], commands: list[str], script_prefix: str = "", **kwargs):
606
+ tsl_console_path = os.path.join(tsl_path, "Extensions", "TimeSeriesLab")
607
+ delete_xml = not _DEBUG
608
+ full_path = os.path.join(os.path.abspath(case_path), "")
609
+ with psr.psrfcommon.tempfile.CreateTempFile(
610
+ "./", script_prefix, "", ".dat", delete_xml
611
+ ) as script_file, change_cwd(tsl_console_path):
612
+ with open(script_file.name, "w") as script_file:
613
+ script_file.write(f"SET,PATHDATA,{full_path}\n")
614
+ for command in commands:
615
+ script_file.write(f"RUN,{command}\n")
616
+ run_tslconsole(tsl_path, os.path.abspath(script_file.name), **kwargs)
617
+
618
+
619
+ def run_tsl_generate_external_scenarios(case_path: Union[str, pathlib.Path], tsl_path: Union[str, pathlib.Path], option: str, **kwargs):
620
+ inflow_path = os.path.join(str(case_path), "inflow.dat")
621
+ option_command_map = {
622
+ "natural": "generate_inflow_from_external_natural",
623
+ "incremental": "generate_inflow_from_external_incremental",
624
+ }
625
+ if option not in option_command_map.keys():
626
+ raise ValueError(f"Invalid option. Should be one of {','.join(option_command_map.keys())}")
627
+
628
+ commands = [option_command_map[option]]
629
+ _run_tslconsole_command(tsl_path, case_path, commands)
psr/runner/version.py ADDED
@@ -0,0 +1,5 @@
1
+ # PSR Factory. Copyright (C) PSR, Inc - All Rights Reserved
2
+ # Unauthorized copying of this file, via any medium is strictly prohibited
3
+ # Proprietary and confidential
4
+
5
+ __version__ = "1.0.1"