rtc-tools 2.6.0b2__py3-none-any.whl → 2.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rtc-tools might be problematic. Click here for more details.
- {rtc_tools-2.6.0b2.dist-info → rtc_tools-2.7.0.dist-info}/METADATA +26 -15
- rtc_tools-2.7.0.dist-info/RECORD +50 -0
- {rtc_tools-2.6.0b2.dist-info → rtc_tools-2.7.0.dist-info}/WHEEL +1 -1
- {rtc_tools-2.6.0b2.dist-info → rtc_tools-2.7.0.dist-info}/entry_points.txt +0 -1
- rtctools/_internal/casadi_helpers.py +5 -5
- rtctools/_version.py +4 -4
- rtctools/data/csv.py +18 -7
- rtctools/data/interpolation/bspline1d.py +5 -1
- rtctools/data/netcdf.py +16 -15
- rtctools/data/pi.py +72 -41
- rtctools/data/rtc.py +6 -5
- rtctools/optimization/collocated_integrated_optimization_problem.py +14 -17
- rtctools/optimization/control_tree_mixin.py +8 -5
- rtctools/optimization/csv_lookup_table_mixin.py +15 -15
- rtctools/optimization/csv_mixin.py +3 -0
- rtctools/optimization/goal_programming_mixin.py +11 -2
- rtctools/optimization/goal_programming_mixin_base.py +5 -3
- rtctools/optimization/modelica_mixin.py +28 -8
- rtctools/optimization/optimization_problem.py +18 -0
- rtctools/optimization/pi_mixin.py +13 -0
- rtctools/rtctoolsapp.py +17 -14
- rtctools/simulation/io_mixin.py +1 -1
- rtctools/simulation/pi_mixin.py +13 -0
- rtctools/simulation/simulation_problem.py +130 -22
- rtctools/util.py +1 -0
- rtc_tools-2.6.0b2.dist-info/RECORD +0 -50
- {rtc_tools-2.6.0b2.dist-info → rtc_tools-2.7.0.dist-info/licenses}/COPYING.LESSER +0 -0
- {rtc_tools-2.6.0b2.dist-info → rtc_tools-2.7.0.dist-info}/top_level.txt +0 -0
|
@@ -1,12 +1,11 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: rtc-tools
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.7.0
|
|
4
4
|
Summary: Toolbox for control and optimization of water systems.
|
|
5
5
|
Home-page: https://oss.deltares.nl/web/rtc-tools/home
|
|
6
|
+
Download-URL: http://github.com/deltares/rtc-tools/
|
|
6
7
|
Author: Deltares
|
|
7
8
|
Maintainer: Deltares
|
|
8
|
-
License: UNKNOWN
|
|
9
|
-
Download-URL: http://gitlab.com/deltares/rtc-tools/
|
|
10
9
|
Platform: Windows
|
|
11
10
|
Platform: Linux
|
|
12
11
|
Platform: Mac OS-X
|
|
@@ -24,19 +23,31 @@ Classifier: Operating System :: Microsoft :: Windows
|
|
|
24
23
|
Classifier: Operating System :: POSIX
|
|
25
24
|
Classifier: Operating System :: Unix
|
|
26
25
|
Classifier: Operating System :: MacOS
|
|
27
|
-
Requires-Python: >=3.
|
|
26
|
+
Requires-Python: >=3.9
|
|
28
27
|
License-File: COPYING.LESSER
|
|
29
|
-
Requires-Dist: casadi
|
|
30
|
-
Requires-Dist: numpy
|
|
31
|
-
Requires-Dist: scipy
|
|
32
|
-
Requires-Dist: pymoca
|
|
33
|
-
Requires-Dist: rtc-tools-channel-flow
|
|
34
|
-
|
|
35
|
-
Requires-Dist:
|
|
28
|
+
Requires-Dist: casadi!=3.6.6,<=3.7,>=3.6.3
|
|
29
|
+
Requires-Dist: numpy>=1.16.0
|
|
30
|
+
Requires-Dist: scipy>=1.0.0
|
|
31
|
+
Requires-Dist: pymoca==0.9.*,>=0.9.1
|
|
32
|
+
Requires-Dist: rtc-tools-channel-flow>=1.2.0
|
|
33
|
+
Requires-Dist: defusedxml>=0.7.0
|
|
34
|
+
Requires-Dist: importlib_metadata>=5.0.0; python_version < "3.10"
|
|
36
35
|
Provides-Extra: netcdf
|
|
37
|
-
Requires-Dist: netCDF4
|
|
36
|
+
Requires-Dist: netCDF4; extra == "netcdf"
|
|
37
|
+
Provides-Extra: all
|
|
38
|
+
Requires-Dist: netCDF4; extra == "all"
|
|
39
|
+
Dynamic: author
|
|
40
|
+
Dynamic: classifier
|
|
41
|
+
Dynamic: description
|
|
42
|
+
Dynamic: download-url
|
|
43
|
+
Dynamic: home-page
|
|
44
|
+
Dynamic: license-file
|
|
45
|
+
Dynamic: maintainer
|
|
46
|
+
Dynamic: platform
|
|
47
|
+
Dynamic: provides-extra
|
|
48
|
+
Dynamic: requires-dist
|
|
49
|
+
Dynamic: requires-python
|
|
50
|
+
Dynamic: summary
|
|
38
51
|
|
|
39
52
|
RTC-Tools is the Deltares toolbox for control and optimization of water systems.
|
|
40
53
|
|
|
41
|
-
|
|
42
|
-
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
rtc_tools-2.7.0.dist-info/licenses/COPYING.LESSER,sha256=46mU2C5kSwOnkqkw9XQAJlhBL2JAf1_uCD8lVcXyMRg,7652
|
|
2
|
+
rtctools/__init__.py,sha256=91hvS2-ryd2Pvw0COpsUzTwJwSnTZ035REiej-1hNI4,107
|
|
3
|
+
rtctools/_version.py,sha256=GRY0UZKaQFpBKoUgttmG_1W-QGsYu3oGtZePebKhXj4,497
|
|
4
|
+
rtctools/rtctoolsapp.py,sha256=2RVZI4QQUg0yC6ii4lr50yx1blEfHBFsAgUjLR5pBkA,4336
|
|
5
|
+
rtctools/util.py,sha256=8IGva7xWcAH-9Xcr1LaxUpYoZjF6vbo1eqdNJ9pKgGA,9098
|
|
6
|
+
rtctools/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
|
+
rtctools/_internal/alias_tools.py,sha256=XuQSAhhFuVtwn0yrAObZWIKPsSF4j2axXRtEmitIFPs,5310
|
|
8
|
+
rtctools/_internal/caching.py,sha256=p4gqSL7kCI7Hff-KjMEP7mhJCQSiU_lYm2MR7E18gBM,905
|
|
9
|
+
rtctools/_internal/casadi_helpers.py,sha256=q8j5h9XXXkZMUgjg6wbkcFj1mcHi5_SdEi8SrkM---M,1457
|
|
10
|
+
rtctools/_internal/debug_check_helpers.py,sha256=UgQTEPw4PyR7MbYLewSSWQqTwQj7xr5yUBk820O9Kk4,1084
|
|
11
|
+
rtctools/data/__init__.py,sha256=EllgSmCdrlvQZSd1VilvjPaeYJGhY9ErPiQtedmuFoA,157
|
|
12
|
+
rtctools/data/csv.py,sha256=hEpoTH3nhZaAvRN4r-9-nYeAjaFiNDRoiZWg8GxM3yo,5539
|
|
13
|
+
rtctools/data/netcdf.py,sha256=tMs-zcSlOR0HhajUKJVbXGNoi3GeKCM3X4DjuW8FDo8,19130
|
|
14
|
+
rtctools/data/pi.py,sha256=D2r9gaYu6qMpgWRqiWpWPSPJXWgqCVV0bz6ewgM78mc,46701
|
|
15
|
+
rtctools/data/rtc.py,sha256=tYPOzZSFE02bAXX3lgcGR1saoQNIv6oWVWH8CS0dl5Q,9079
|
|
16
|
+
rtctools/data/storage.py,sha256=67J4BRTl0AMEzlKNZ8Xdpy_4cGtwx8Lo_tL2n0G4S9w,13206
|
|
17
|
+
rtctools/data/interpolation/__init__.py,sha256=GBubCIT5mFoSTV-lOk7cpwvZekNMEe5bvqSQJ9HE34M,73
|
|
18
|
+
rtctools/data/interpolation/bspline.py,sha256=qevB842XWCH3fWlWMBqKMy1mw37ust-0YtSnb9PKCEc,948
|
|
19
|
+
rtctools/data/interpolation/bspline1d.py,sha256=HAh7m5xLBuiFKzMzuYEqZX_GmCPChKjV7ynTS6iRZOc,6166
|
|
20
|
+
rtctools/data/interpolation/bspline2d.py,sha256=ScmX0fPDxbUVtj3pbUE0L7UJocqroD_6fUT-4cvdRMc,1693
|
|
21
|
+
rtctools/optimization/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
22
|
+
rtctools/optimization/collocated_integrated_optimization_problem.py,sha256=gB1RgEsOSgb745NxSosDqjBEJZI_ULtjlHZcuCcRjO4,131267
|
|
23
|
+
rtctools/optimization/control_tree_mixin.py,sha256=ZMMH7Xy_qIVXeLDNtPdXQ8o_0ELRYVdM5QK2R8YulKU,9036
|
|
24
|
+
rtctools/optimization/csv_lookup_table_mixin.py,sha256=TUYAT-u-mzH6HLP0iJHnLBVqV5tWnhYAqDC4Aj17MJg,17399
|
|
25
|
+
rtctools/optimization/csv_mixin.py,sha256=_6iPVK_EJ8PxnukepzkhFtidceucsozRML_DDEycYik,12453
|
|
26
|
+
rtctools/optimization/goal_programming_mixin.py,sha256=vdnKnz1Ov3OFN-J9KQiiAwHbrLjWH6o_PeZz2YfLz6k,33320
|
|
27
|
+
rtctools/optimization/goal_programming_mixin_base.py,sha256=qJQQcJlJdio4GTcrKfuBi6Nho9u16pDuuprzK0LUyhA,43835
|
|
28
|
+
rtctools/optimization/homotopy_mixin.py,sha256=Kh0kMfxB-Fo1FBGW5tPOQk24Xx_Mmw_p0YuSQotdkMU,6905
|
|
29
|
+
rtctools/optimization/initial_state_estimation_mixin.py,sha256=74QYfG-VYYTNVg-kAnCG6QoY3_sUmaID0ideF7bPkkY,3116
|
|
30
|
+
rtctools/optimization/io_mixin.py,sha256=AsZQ7YOUcUbWoczmjTXaSje5MUEsPNbQyZBJ6qzSjzU,11821
|
|
31
|
+
rtctools/optimization/linearization_mixin.py,sha256=mG5S7uwvwDydw-eBPyQKnLyKoy08EBjQh25vu97afhY,1049
|
|
32
|
+
rtctools/optimization/linearized_order_goal_programming_mixin.py,sha256=LQ2qpYt0YGLpEoerif4FJ5wwzq16q--27bsRjcqIU5A,9087
|
|
33
|
+
rtctools/optimization/min_abs_goal_programming_mixin.py,sha256=WMOv9EF8cfDJgTunzXfI_cUmBSQK26u1HJB_9EAarfM,14031
|
|
34
|
+
rtctools/optimization/modelica_mixin.py,sha256=b_VsEcg_VsAnODnTQybrY0GbuZUNQ3uugQmML6FlklE,18037
|
|
35
|
+
rtctools/optimization/netcdf_mixin.py,sha256=-zkXh3sMYE50c3kHsrmUVGWMSFm-0cXQpGrCm0yn-Tc,7563
|
|
36
|
+
rtctools/optimization/optimization_problem.py,sha256=lTk4tUBEb1xy9eyNy7-w0D6L6HxSCJ0MyvxjzrYLsR4,44824
|
|
37
|
+
rtctools/optimization/pi_mixin.py,sha256=G_6RPlXO-IOjqYxNsMZGY4fmnfxVpwN-_T5Ka3rDwK4,11788
|
|
38
|
+
rtctools/optimization/planning_mixin.py,sha256=O_Y74X8xZmaNZR4iYOe7BR06s9hnmcapbuHYHQTBPPQ,724
|
|
39
|
+
rtctools/optimization/single_pass_goal_programming_mixin.py,sha256=Zb9szg3PGT2o6gkGsXluSfEaAswkw3TKfPQDzUrj_Y4,25784
|
|
40
|
+
rtctools/optimization/timeseries.py,sha256=nCrsGCJThBMh9lvngEpbBDa834_QvklVvkxJqwX4a1M,1734
|
|
41
|
+
rtctools/simulation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
42
|
+
rtctools/simulation/csv_mixin.py,sha256=rGDUFPsqGHmF0_dWdXeWzWzMpkPmwCNweTBVrwSh31g,6704
|
|
43
|
+
rtctools/simulation/io_mixin.py,sha256=WIKOQxr3fS-aNbgjet9iWoUayuD22zLIYmqlWEqxXHo,6215
|
|
44
|
+
rtctools/simulation/pi_mixin.py,sha256=_TU2DrK2MQqVsyrHBD9W4SDEuot9dYmgTDNiXkDAJfk,9833
|
|
45
|
+
rtctools/simulation/simulation_problem.py,sha256=v5Lk2x-yuVb5s7ne5fFgxONxGniLHTyTR0XRzYRl1fw,50005
|
|
46
|
+
rtc_tools-2.7.0.dist-info/METADATA,sha256=ULtELHwTloQVwwzBsmnqDlvFMj-SMQmNO2r3ATeJRC8,1772
|
|
47
|
+
rtc_tools-2.7.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
48
|
+
rtc_tools-2.7.0.dist-info/entry_points.txt,sha256=DVS8sWf3b9ph9h8srEr6zmQ7ZKGwblwgZgGPZg-jRNQ,150
|
|
49
|
+
rtc_tools-2.7.0.dist-info/top_level.txt,sha256=pnBrb58PFPd1kp1dqa-JHU7R55h3alDNJIJnF3Jf9Dw,9
|
|
50
|
+
rtc_tools-2.7.0.dist-info/RECORD,,
|
|
@@ -5,12 +5,12 @@ import casadi as ca
|
|
|
5
5
|
logger = logging.getLogger("rtctools")
|
|
6
6
|
|
|
7
7
|
|
|
8
|
-
def is_affine(
|
|
8
|
+
def is_affine(expr, symbols):
|
|
9
9
|
try:
|
|
10
|
-
Af = ca.Function("f", [
|
|
11
|
-
except RuntimeError as
|
|
12
|
-
if "'eval_sx' not defined for" in str(
|
|
13
|
-
Af = ca.Function("f", [
|
|
10
|
+
Af = ca.Function("f", [symbols], [ca.jacobian(expr, symbols)]).expand()
|
|
11
|
+
except RuntimeError as error:
|
|
12
|
+
if "'eval_sx' not defined for" in str(error):
|
|
13
|
+
Af = ca.Function("f", [symbols], [ca.jacobian(expr, symbols)])
|
|
14
14
|
else:
|
|
15
15
|
raise
|
|
16
16
|
return Af.sparsity_jac(0, 0).nnz() == 0
|
rtctools/_version.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
|
|
2
|
-
# This file was generated by 'versioneer.py' (0.
|
|
2
|
+
# This file was generated by 'versioneer.py' (0.29) from
|
|
3
3
|
# revision-control system data, or from the parent directory name of an
|
|
4
4
|
# unpacked source archive. Distribution tarballs contain a pre-generated copy
|
|
5
5
|
# of this file.
|
|
@@ -8,11 +8,11 @@ import json
|
|
|
8
8
|
|
|
9
9
|
version_json = '''
|
|
10
10
|
{
|
|
11
|
-
"date": "
|
|
11
|
+
"date": "2025-05-23T14:56:50+0200",
|
|
12
12
|
"dirty": false,
|
|
13
13
|
"error": null,
|
|
14
|
-
"full-revisionid": "
|
|
15
|
-
"version": "2.
|
|
14
|
+
"full-revisionid": "16ab781cd81009655c779d5cc5b7c4aa3e73fa85",
|
|
15
|
+
"version": "2.7.0"
|
|
16
16
|
}
|
|
17
17
|
''' # END VERSION_JSON
|
|
18
18
|
|
rtctools/data/csv.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import sys
|
|
3
3
|
from datetime import datetime
|
|
4
|
+
from typing import Union
|
|
4
5
|
|
|
5
6
|
import numpy as np
|
|
6
7
|
|
|
@@ -41,6 +42,21 @@ def _boolean_to_nan(data, fname):
|
|
|
41
42
|
return data
|
|
42
43
|
|
|
43
44
|
|
|
45
|
+
def _string_to_datetime(string: Union[str, bytes]) -> datetime:
|
|
46
|
+
"""Convert a string to a datetime object."""
|
|
47
|
+
if isinstance(string, bytes):
|
|
48
|
+
string = string.decode("utf-8")
|
|
49
|
+
return datetime.strptime(string, "%Y-%m-%d %H:%M:%S")
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _string_to_float(string: Union[str, bytes]) -> float:
|
|
53
|
+
"""Convert a string to a float."""
|
|
54
|
+
if isinstance(string, bytes):
|
|
55
|
+
string = string.decode("utf-8")
|
|
56
|
+
string = string.replace(",", ".")
|
|
57
|
+
return float(string)
|
|
58
|
+
|
|
59
|
+
|
|
44
60
|
def load(fname, delimiter=",", with_time=False):
|
|
45
61
|
"""
|
|
46
62
|
Check delimiter of csv and read contents to an array. Assumes no date-time conversion needed.
|
|
@@ -53,7 +69,7 @@ def load(fname, delimiter=",", with_time=False):
|
|
|
53
69
|
"""
|
|
54
70
|
c = {}
|
|
55
71
|
if with_time:
|
|
56
|
-
c.update({0:
|
|
72
|
+
c.update({0: _string_to_datetime})
|
|
57
73
|
|
|
58
74
|
# Check delimiter of csv file. If semicolon, check if decimal separator is
|
|
59
75
|
# a comma.
|
|
@@ -73,12 +89,7 @@ def load(fname, delimiter=",", with_time=False):
|
|
|
73
89
|
# If commas are used as decimal separator, we need additional
|
|
74
90
|
# converters.
|
|
75
91
|
if n_comma_decimal:
|
|
76
|
-
c.update(
|
|
77
|
-
{
|
|
78
|
-
i + len(c): lambda str: float(str.decode("utf-8").replace(",", "."))
|
|
79
|
-
for i in range(1 + n_semicolon - len(c))
|
|
80
|
-
}
|
|
81
|
-
)
|
|
92
|
+
c.update({i + len(c): _string_to_float for i in range(1 + n_semicolon - len(c))})
|
|
82
93
|
|
|
83
94
|
# Read the csv file and convert to array
|
|
84
95
|
try:
|
|
@@ -55,6 +55,7 @@ class BSpline1D(BSpline):
|
|
|
55
55
|
epsilon=1e-7,
|
|
56
56
|
delta=1e-4,
|
|
57
57
|
interior_pts=None,
|
|
58
|
+
ipopt_options=None,
|
|
58
59
|
):
|
|
59
60
|
"""
|
|
60
61
|
fit() returns a tck tuple like scipy.interpolate.splrep, but adjusts
|
|
@@ -153,7 +154,10 @@ class BSpline1D(BSpline):
|
|
|
153
154
|
nlp = {"x": c, "f": f, "g": g}
|
|
154
155
|
my_solver = "ipopt"
|
|
155
156
|
solver = nlpsol(
|
|
156
|
-
"solver",
|
|
157
|
+
"solver",
|
|
158
|
+
my_solver,
|
|
159
|
+
nlp,
|
|
160
|
+
{"print_time": 0, "expand": True, "ipopt": ipopt_options},
|
|
157
161
|
)
|
|
158
162
|
sol = solver(lbg=lbg, ubg=ubg)
|
|
159
163
|
stats = solver.stats()
|
rtctools/data/netcdf.py
CHANGED
|
@@ -401,20 +401,21 @@ class ExportDataset:
|
|
|
401
401
|
"""
|
|
402
402
|
assert len(set(variable_names)) == len(variable_names)
|
|
403
403
|
|
|
404
|
-
assert (
|
|
405
|
-
|
|
406
|
-
)
|
|
407
|
-
assert (
|
|
408
|
-
|
|
409
|
-
)
|
|
404
|
+
assert self.__time_dim is not None, (
|
|
405
|
+
"First call write_times to ensure the time dimension has been created."
|
|
406
|
+
)
|
|
407
|
+
assert self.__station_dim is not None, (
|
|
408
|
+
"First call write_station_data to ensure the station dimension has been created"
|
|
409
|
+
)
|
|
410
410
|
assert (
|
|
411
411
|
self.__station_id_to_index_mapping is not None
|
|
412
412
|
) # should also be created in write_station_data
|
|
413
413
|
|
|
414
414
|
if ensemble_size > 1:
|
|
415
|
-
assert (
|
|
416
|
-
|
|
417
|
-
|
|
415
|
+
assert self.__ensemble_member_dim is not None, (
|
|
416
|
+
"First call write_ensemble_data to ensure "
|
|
417
|
+
"the realization dimension has been created"
|
|
418
|
+
)
|
|
418
419
|
|
|
419
420
|
for variable_name in variable_names:
|
|
420
421
|
self.__dataset.createVariable(
|
|
@@ -446,15 +447,15 @@ class ExportDataset:
|
|
|
446
447
|
:param values: The values that are to be written to the file
|
|
447
448
|
:param ensemble_size: the number of members in the ensemble
|
|
448
449
|
"""
|
|
449
|
-
assert (
|
|
450
|
-
|
|
451
|
-
)
|
|
450
|
+
assert self.__station_id_to_index_mapping is not None, (
|
|
451
|
+
"First call write_station_data and create_variables."
|
|
452
|
+
)
|
|
452
453
|
|
|
453
454
|
station_index = self.__station_id_to_index_mapping[station_id]
|
|
454
455
|
if ensemble_size > 1:
|
|
455
|
-
self.__dataset.variables[variable_name][
|
|
456
|
-
|
|
457
|
-
|
|
456
|
+
self.__dataset.variables[variable_name][:, station_index, ensemble_member_index] = (
|
|
457
|
+
values
|
|
458
|
+
)
|
|
458
459
|
else:
|
|
459
460
|
self.__dataset.variables[variable_name][:, station_index] = values
|
|
460
461
|
|
rtctools/data/pi.py
CHANGED
|
@@ -5,6 +5,7 @@ import logging
|
|
|
5
5
|
import os
|
|
6
6
|
import xml.etree.ElementTree as ET
|
|
7
7
|
|
|
8
|
+
import defusedxml.ElementTree as DefusedElementTree
|
|
8
9
|
import numpy as np
|
|
9
10
|
|
|
10
11
|
ns = {"fews": "http://www.wldelft.nl/fews", "pi": "http://www.wldelft.nl/fews/PI"}
|
|
@@ -30,7 +31,7 @@ class Diag:
|
|
|
30
31
|
"""
|
|
31
32
|
self.__path_xml = os.path.join(folder, basename + ".xml")
|
|
32
33
|
|
|
33
|
-
self.__tree =
|
|
34
|
+
self.__tree = DefusedElementTree.parse(self.__path_xml)
|
|
34
35
|
self.__xml_root = self.__tree.getroot()
|
|
35
36
|
|
|
36
37
|
def get(self, level=ERROR_FATAL):
|
|
@@ -87,7 +88,7 @@ class DiagHandler(logging.Handler):
|
|
|
87
88
|
self.__path_xml = os.path.join(folder, basename + ".xml")
|
|
88
89
|
|
|
89
90
|
try:
|
|
90
|
-
self.__tree =
|
|
91
|
+
self.__tree = DefusedElementTree.parse(self.__path_xml)
|
|
91
92
|
self.__xml_root = self.__tree.getroot()
|
|
92
93
|
except Exception:
|
|
93
94
|
self.__xml_root = ET.Element("{%s}Diag" % (ns["pi"],))
|
|
@@ -135,7 +136,7 @@ class ParameterConfig:
|
|
|
135
136
|
basename = basename + ".xml"
|
|
136
137
|
self.__path_xml = os.path.join(folder, basename)
|
|
137
138
|
|
|
138
|
-
self.__tree =
|
|
139
|
+
self.__tree = DefusedElementTree.parse(self.__path_xml)
|
|
139
140
|
self.__xml_root = self.__tree.getroot()
|
|
140
141
|
|
|
141
142
|
def get(self, group_id, parameter_id, location_id=None, model=None):
|
|
@@ -300,9 +301,8 @@ class ParameterConfig:
|
|
|
300
301
|
# get table contenstart_datetime
|
|
301
302
|
el_row = child.findall("pi:row", ns)
|
|
302
303
|
table = {
|
|
303
|
-
columnId[key]: np.empty(len(el_row), columnType[key])
|
|
304
|
-
|
|
305
|
-
}
|
|
304
|
+
columnId[key]: np.empty(len(el_row), columnType[key]) for key in columnId
|
|
305
|
+
} # initialize table
|
|
306
306
|
|
|
307
307
|
i_row = 0
|
|
308
308
|
for row in el_row:
|
|
@@ -333,8 +333,11 @@ class ParameterConfig:
|
|
|
333
333
|
|
|
334
334
|
parameters = group.findall("pi:parameter", ns)
|
|
335
335
|
for parameter in parameters:
|
|
336
|
-
yield
|
|
337
|
-
|
|
336
|
+
yield (
|
|
337
|
+
location_id,
|
|
338
|
+
model_id,
|
|
339
|
+
parameter.attrib["id"],
|
|
340
|
+
self.__parse_parameter(parameter),
|
|
338
341
|
)
|
|
339
342
|
|
|
340
343
|
|
|
@@ -369,8 +372,6 @@ class Timeseries:
|
|
|
369
372
|
self.__folder = folder
|
|
370
373
|
self.__basename = basename
|
|
371
374
|
|
|
372
|
-
self.__path_xml = os.path.join(self.__folder, basename + ".xml")
|
|
373
|
-
|
|
374
375
|
self.__internal_dtype = np.float64
|
|
375
376
|
self.__pi_dtype = np.float32
|
|
376
377
|
|
|
@@ -378,7 +379,7 @@ class Timeseries:
|
|
|
378
379
|
if self.make_new_file:
|
|
379
380
|
self.__reset_xml_tree()
|
|
380
381
|
else:
|
|
381
|
-
self.__tree =
|
|
382
|
+
self.__tree = DefusedElementTree.parse(self.path)
|
|
382
383
|
self.__xml_root = self.__tree.getroot()
|
|
383
384
|
|
|
384
385
|
self.__values = [{}]
|
|
@@ -801,13 +802,20 @@ class Timeseries:
|
|
|
801
802
|
# Add series to xml
|
|
802
803
|
self.__xml_root.append(series)
|
|
803
804
|
|
|
804
|
-
def write(self):
|
|
805
|
+
def write(self, output_folder=None, output_filename=None) -> None:
|
|
805
806
|
"""
|
|
806
807
|
Writes the time series data to disk.
|
|
808
|
+
|
|
809
|
+
:param output_folder: The folder in which the output file is located.
|
|
810
|
+
If None, the original folder is used.
|
|
811
|
+
:param output_filename: The name of the output file without extension.
|
|
812
|
+
If None, the original filename is used.
|
|
807
813
|
"""
|
|
814
|
+
xml_path = self.output_path(output_folder, output_filename)
|
|
815
|
+
binary_path = self.output_binary_path(output_folder, output_filename)
|
|
808
816
|
|
|
809
817
|
if self.__binary:
|
|
810
|
-
f = io.open(
|
|
818
|
+
f = io.open(binary_path, "wb")
|
|
811
819
|
|
|
812
820
|
if self.make_new_file:
|
|
813
821
|
# Force reinitialization in case write() is called more than once
|
|
@@ -855,7 +863,7 @@ class Timeseries:
|
|
|
855
863
|
|
|
856
864
|
variable = self.__data_config.variable(header)
|
|
857
865
|
|
|
858
|
-
miss_val =
|
|
866
|
+
miss_val = header.find("pi:missVal", ns).text
|
|
859
867
|
values = self.__values[ensemble_member][variable]
|
|
860
868
|
|
|
861
869
|
# Update the header, which may have changed
|
|
@@ -868,37 +876,34 @@ class Timeseries:
|
|
|
868
876
|
self.__xml_root.remove(series)
|
|
869
877
|
continue
|
|
870
878
|
|
|
871
|
-
# Replace NaN with missing value
|
|
872
|
-
nans = np.isnan(values)
|
|
873
|
-
values[nans] = miss_val
|
|
874
|
-
|
|
875
879
|
# Write output
|
|
880
|
+
nans = np.isnan(values)
|
|
876
881
|
if self.__binary:
|
|
877
882
|
f.write(values.astype(self.__pi_dtype).tobytes())
|
|
878
883
|
else:
|
|
879
884
|
events = series.findall("pi:event", ns)
|
|
880
885
|
|
|
881
886
|
t = self.__start_datetime
|
|
882
|
-
for i in
|
|
887
|
+
for i, value in enumerate(values):
|
|
883
888
|
if self.dt is None:
|
|
884
889
|
t = self.times[i]
|
|
885
|
-
# Set the date/time, so that any date/time steps that
|
|
886
|
-
# are wrong in the placeholder file are corrected.
|
|
887
|
-
events[i].set("date", t.strftime("%Y-%m-%d"))
|
|
888
|
-
events[i].set("time", t.strftime("%H:%M:%S"))
|
|
889
890
|
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
891
|
+
if i < len(events):
|
|
892
|
+
event = events[i]
|
|
893
|
+
else:
|
|
894
|
+
event = ET.Element("pi:event")
|
|
895
|
+
series.append(event)
|
|
896
|
+
|
|
897
|
+
# Always set the date/time, so that any date/time steps
|
|
898
|
+
# that are wrong in the placeholder file are corrected.
|
|
898
899
|
event.set("date", t.strftime("%Y-%m-%d"))
|
|
899
900
|
event.set("time", t.strftime("%H:%M:%S"))
|
|
900
|
-
|
|
901
|
-
|
|
901
|
+
|
|
902
|
+
if nans[i]:
|
|
903
|
+
event.set("value", miss_val)
|
|
904
|
+
else:
|
|
905
|
+
event.set("value", str(value))
|
|
906
|
+
|
|
902
907
|
if self.dt:
|
|
903
908
|
t += self.dt
|
|
904
909
|
|
|
@@ -907,14 +912,11 @@ class Timeseries:
|
|
|
907
912
|
for i in range(len(values), len(events)):
|
|
908
913
|
series.remove(events[i])
|
|
909
914
|
|
|
910
|
-
# Restore NaN
|
|
911
|
-
values[nans] = np.nan
|
|
912
|
-
|
|
913
915
|
if self.__binary:
|
|
914
916
|
f.close()
|
|
915
917
|
|
|
916
918
|
self.format_xml_data()
|
|
917
|
-
self.__tree.write(
|
|
919
|
+
self.__tree.write(xml_path)
|
|
918
920
|
|
|
919
921
|
def format_xml_data(self):
|
|
920
922
|
"""
|
|
@@ -1173,16 +1175,45 @@ class Timeseries:
|
|
|
1173
1175
|
self.__end_datetime = end_datetime
|
|
1174
1176
|
|
|
1175
1177
|
@property
|
|
1176
|
-
def path(self):
|
|
1177
|
-
|
|
1178
|
+
def path(self) -> str:
|
|
1179
|
+
"""
|
|
1180
|
+
The path to the original xml file.
|
|
1181
|
+
"""
|
|
1182
|
+
return os.path.join(self.__folder, self.__basename + ".xml")
|
|
1178
1183
|
|
|
1179
1184
|
@property
|
|
1180
|
-
def binary_path(self):
|
|
1185
|
+
def binary_path(self) -> str:
|
|
1181
1186
|
"""
|
|
1182
|
-
The path
|
|
1187
|
+
The path to the original binary data .bin file.
|
|
1183
1188
|
"""
|
|
1184
1189
|
return os.path.join(self.__folder, self.__basename + ".bin")
|
|
1185
1190
|
|
|
1191
|
+
def _output_path_without_extension(self, output_folder=None, output_filename=None) -> str:
|
|
1192
|
+
"""
|
|
1193
|
+
Get the output path without file extension.
|
|
1194
|
+
"""
|
|
1195
|
+
if output_folder is None:
|
|
1196
|
+
output_folder = self.__folder
|
|
1197
|
+
if output_filename is None:
|
|
1198
|
+
output_filename = self.__basename
|
|
1199
|
+
return os.path.join(output_folder, output_filename)
|
|
1200
|
+
|
|
1201
|
+
def output_path(self, output_folder=None, output_filename=None) -> str:
|
|
1202
|
+
"""
|
|
1203
|
+
Get the path to the output xml file.
|
|
1204
|
+
|
|
1205
|
+
The optional arguments are the same as in :py:method:`write`.
|
|
1206
|
+
"""
|
|
1207
|
+
return self._output_path_without_extension(output_folder, output_filename) + ".xml"
|
|
1208
|
+
|
|
1209
|
+
def output_binary_path(self, output_folder=None, output_filename=None) -> str:
|
|
1210
|
+
"""
|
|
1211
|
+
Get the path to the output binary file.
|
|
1212
|
+
|
|
1213
|
+
The optional arguments are the same as in :py:method:`write`.
|
|
1214
|
+
"""
|
|
1215
|
+
return self._output_path_without_extension(output_folder, output_filename) + ".bin"
|
|
1216
|
+
|
|
1186
1217
|
def items(self, ensemble_member=0):
|
|
1187
1218
|
"""
|
|
1188
1219
|
Returns an iterator over all timeseries IDs and value arrays for the given
|
rtctools/data/rtc.py
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import os
|
|
3
|
-
import xml.etree.ElementTree as ET
|
|
4
3
|
from collections import namedtuple
|
|
5
4
|
|
|
5
|
+
import defusedxml.ElementTree as DefusedElementTree
|
|
6
|
+
|
|
6
7
|
ts_ids = namedtuple("ids", "location_id parameter_id qualifier_id")
|
|
7
8
|
p_ids = namedtuple("ids", "model_id location_id parameter_id")
|
|
8
9
|
|
|
@@ -31,7 +32,7 @@ class DataConfig:
|
|
|
31
32
|
|
|
32
33
|
path = os.path.join(folder, "rtcDataConfig.xml")
|
|
33
34
|
try:
|
|
34
|
-
tree =
|
|
35
|
+
tree = DefusedElementTree.parse(path)
|
|
35
36
|
root = tree.getroot()
|
|
36
37
|
|
|
37
38
|
timeseriess1 = root.findall("./*/fews:timeSeries", ns)
|
|
@@ -59,9 +60,9 @@ class DataConfig:
|
|
|
59
60
|
logger.error(message)
|
|
60
61
|
raise Exception(message)
|
|
61
62
|
else:
|
|
62
|
-
self.__location_parameter_ids[
|
|
63
|
-
|
|
64
|
-
|
|
63
|
+
self.__location_parameter_ids[internal_id] = (
|
|
64
|
+
self.__pi_location_parameter_id(pi_timeseries, "fews")
|
|
65
|
+
)
|
|
65
66
|
self.__variable_map[external_id] = internal_id
|
|
66
67
|
|
|
67
68
|
for k in ["import", "export"]:
|
|
@@ -898,11 +898,11 @@ class CollocatedIntegratedOptimizationProblem(OptimizationProblem, metaclass=ABC
|
|
|
898
898
|
function_options,
|
|
899
899
|
)
|
|
900
900
|
|
|
901
|
+
# Expand the residual function if possible.
|
|
901
902
|
try:
|
|
902
903
|
dae_residual_function_integrated = dae_residual_function_integrated.expand()
|
|
903
904
|
except RuntimeError as e:
|
|
904
|
-
|
|
905
|
-
if "'eval_sx' not defined for External" in str(e):
|
|
905
|
+
if "'eval_sx' not defined for" in str(e):
|
|
906
906
|
pass
|
|
907
907
|
else:
|
|
908
908
|
raise
|
|
@@ -933,13 +933,13 @@ class CollocatedIntegratedOptimizationProblem(OptimizationProblem, metaclass=ABC
|
|
|
933
933
|
[dae_residual_collocated],
|
|
934
934
|
function_options,
|
|
935
935
|
)
|
|
936
|
+
# Expand the residual function if possible.
|
|
936
937
|
try:
|
|
937
938
|
self.__dae_residual_function_collocated = (
|
|
938
939
|
self.__dae_residual_function_collocated.expand()
|
|
939
940
|
)
|
|
940
941
|
except RuntimeError as e:
|
|
941
|
-
|
|
942
|
-
if "'eval_sx' not defined for External" in str(e):
|
|
942
|
+
if "'eval_sx' not defined for" in str(e):
|
|
943
943
|
pass
|
|
944
944
|
else:
|
|
945
945
|
raise
|
|
@@ -1028,8 +1028,8 @@ class CollocatedIntegratedOptimizationProblem(OptimizationProblem, metaclass=ABC
|
|
|
1028
1028
|
+ len(self.dae_variables["constant_inputs"])
|
|
1029
1029
|
]
|
|
1030
1030
|
constant_inputs_1 = accumulated_U[
|
|
1031
|
-
2 * len(collocated_variables)
|
|
1032
|
-
|
|
1031
|
+
2 * len(collocated_variables) + len(self.dae_variables["constant_inputs"]) : 2
|
|
1032
|
+
* len(collocated_variables)
|
|
1033
1033
|
+ 2 * len(self.dae_variables["constant_inputs"])
|
|
1034
1034
|
]
|
|
1035
1035
|
|
|
@@ -1803,9 +1803,9 @@ class CollocatedIntegratedOptimizationProblem(OptimizationProblem, metaclass=ABC
|
|
|
1803
1803
|
# Cast delay from DM to np.array
|
|
1804
1804
|
delay = delay.toarray().flatten()
|
|
1805
1805
|
|
|
1806
|
-
assert np.all(
|
|
1807
|
-
|
|
1808
|
-
)
|
|
1806
|
+
assert np.all(np.isfinite(delay)), (
|
|
1807
|
+
"Delay duration must be resolvable to real values at transcribe()"
|
|
1808
|
+
)
|
|
1809
1809
|
|
|
1810
1810
|
out_times = np.concatenate([history_times, collocation_times])
|
|
1811
1811
|
out_values = ca.veccat(
|
|
@@ -2043,9 +2043,7 @@ class CollocatedIntegratedOptimizationProblem(OptimizationProblem, metaclass=ABC
|
|
|
2043
2043
|
def controls(self):
|
|
2044
2044
|
return self.__controls
|
|
2045
2045
|
|
|
2046
|
-
def _collint_get_lbx_ubx(self, count, indices):
|
|
2047
|
-
bounds = self.bounds()
|
|
2048
|
-
|
|
2046
|
+
def _collint_get_lbx_ubx(self, bounds, count, indices):
|
|
2049
2047
|
lbx = np.full(count, -np.inf, dtype=np.float64)
|
|
2050
2048
|
ubx = np.full(count, np.inf, dtype=np.float64)
|
|
2051
2049
|
|
|
@@ -2210,7 +2208,7 @@ class CollocatedIntegratedOptimizationProblem(OptimizationProblem, metaclass=ABC
|
|
|
2210
2208
|
count = max(count, control_indices_stop)
|
|
2211
2209
|
|
|
2212
2210
|
discrete = self._collint_get_discrete(count, indices)
|
|
2213
|
-
lbx, ubx = self._collint_get_lbx_ubx(count, indices)
|
|
2211
|
+
lbx, ubx = self._collint_get_lbx_ubx(bounds, count, indices)
|
|
2214
2212
|
x0 = self._collint_get_x0(count, indices)
|
|
2215
2213
|
|
|
2216
2214
|
# Return number of control variables
|
|
@@ -2326,7 +2324,7 @@ class CollocatedIntegratedOptimizationProblem(OptimizationProblem, metaclass=ABC
|
|
|
2326
2324
|
offset += 1
|
|
2327
2325
|
|
|
2328
2326
|
discrete = self._collint_get_discrete(count, indices)
|
|
2329
|
-
lbx, ubx = self._collint_get_lbx_ubx(count, indices)
|
|
2327
|
+
lbx, ubx = self._collint_get_lbx_ubx(bounds, count, indices)
|
|
2330
2328
|
x0 = self._collint_get_x0(count, indices)
|
|
2331
2329
|
|
|
2332
2330
|
# Return number of state variables
|
|
@@ -2869,8 +2867,7 @@ class CollocatedIntegratedOptimizationProblem(OptimizationProblem, metaclass=ABC
|
|
|
2869
2867
|
|
|
2870
2868
|
# Check coefficient matrix
|
|
2871
2869
|
logger.info(
|
|
2872
|
-
"Sanity check on objective and constraints Jacobian matrix"
|
|
2873
|
-
"/constant coefficients values"
|
|
2870
|
+
"Sanity check on objective and constraints Jacobian matrix/constant coefficients values"
|
|
2874
2871
|
)
|
|
2875
2872
|
|
|
2876
2873
|
in_var = nlp["x"]
|
|
@@ -3113,7 +3110,7 @@ class CollocatedIntegratedOptimizationProblem(OptimizationProblem, metaclass=ABC
|
|
|
3113
3110
|
variable_to_all_indices = {k: set(v) for k, v in indices[0].items()}
|
|
3114
3111
|
for ensemble_indices in indices[1:]:
|
|
3115
3112
|
for k, v in ensemble_indices.items():
|
|
3116
|
-
variable_to_all_indices[k] |= v
|
|
3113
|
+
variable_to_all_indices[k] |= set(v)
|
|
3117
3114
|
|
|
3118
3115
|
if len(inds_up) > 0:
|
|
3119
3116
|
exceedences = []
|