rtc-tools 2.5.2rc4__py3-none-any.whl → 2.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rtc-tools might be problematic. Click here for more details.
- {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/METADATA +7 -7
- rtc_tools-2.6.0.dist-info/RECORD +50 -0
- {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/WHEEL +1 -1
- rtctools/__init__.py +2 -1
- rtctools/_internal/alias_tools.py +12 -10
- rtctools/_internal/caching.py +5 -3
- rtctools/_internal/casadi_helpers.py +11 -32
- rtctools/_internal/debug_check_helpers.py +1 -1
- rtctools/_version.py +3 -3
- rtctools/data/__init__.py +2 -2
- rtctools/data/csv.py +54 -33
- rtctools/data/interpolation/bspline.py +3 -3
- rtctools/data/interpolation/bspline1d.py +42 -29
- rtctools/data/interpolation/bspline2d.py +10 -4
- rtctools/data/netcdf.py +137 -93
- rtctools/data/pi.py +304 -210
- rtctools/data/rtc.py +64 -53
- rtctools/data/storage.py +91 -51
- rtctools/optimization/collocated_integrated_optimization_problem.py +1244 -696
- rtctools/optimization/control_tree_mixin.py +68 -66
- rtctools/optimization/csv_lookup_table_mixin.py +107 -74
- rtctools/optimization/csv_mixin.py +83 -52
- rtctools/optimization/goal_programming_mixin.py +237 -146
- rtctools/optimization/goal_programming_mixin_base.py +204 -111
- rtctools/optimization/homotopy_mixin.py +36 -27
- rtctools/optimization/initial_state_estimation_mixin.py +8 -8
- rtctools/optimization/io_mixin.py +48 -43
- rtctools/optimization/linearization_mixin.py +3 -1
- rtctools/optimization/linearized_order_goal_programming_mixin.py +57 -28
- rtctools/optimization/min_abs_goal_programming_mixin.py +72 -29
- rtctools/optimization/modelica_mixin.py +135 -81
- rtctools/optimization/netcdf_mixin.py +32 -18
- rtctools/optimization/optimization_problem.py +181 -127
- rtctools/optimization/pi_mixin.py +68 -36
- rtctools/optimization/planning_mixin.py +19 -0
- rtctools/optimization/single_pass_goal_programming_mixin.py +159 -112
- rtctools/optimization/timeseries.py +4 -6
- rtctools/rtctoolsapp.py +18 -18
- rtctools/simulation/csv_mixin.py +37 -30
- rtctools/simulation/io_mixin.py +9 -5
- rtctools/simulation/pi_mixin.py +62 -32
- rtctools/simulation/simulation_problem.py +471 -180
- rtctools/util.py +84 -56
- rtc_tools-2.5.2rc4.dist-info/RECORD +0 -49
- {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/COPYING.LESSER +0 -0
- {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/entry_points.txt +0 -0
- {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: rtc-tools
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.6.0
|
|
4
4
|
Summary: Toolbox for control and optimization of water systems.
|
|
5
5
|
Home-page: https://oss.deltares.nl/web/rtc-tools/home
|
|
6
6
|
Author: Deltares
|
|
@@ -24,13 +24,13 @@ Classifier: Operating System :: Microsoft :: Windows
|
|
|
24
24
|
Classifier: Operating System :: POSIX
|
|
25
25
|
Classifier: Operating System :: Unix
|
|
26
26
|
Classifier: Operating System :: MacOS
|
|
27
|
-
Requires-Python: >=3.
|
|
27
|
+
Requires-Python: >=3.8
|
|
28
28
|
License-File: COPYING.LESSER
|
|
29
|
-
Requires-Dist: casadi
|
|
30
|
-
Requires-Dist: numpy
|
|
31
|
-
Requires-Dist: scipy
|
|
32
|
-
Requires-Dist: pymoca
|
|
33
|
-
Requires-Dist: rtc-tools-channel-flow
|
|
29
|
+
Requires-Dist: casadi ==3.6.*,>=3.6.3
|
|
30
|
+
Requires-Dist: numpy <1.26,>=1.16.0
|
|
31
|
+
Requires-Dist: scipy <1.11,>=1.0.0
|
|
32
|
+
Requires-Dist: pymoca ==0.9.1
|
|
33
|
+
Requires-Dist: rtc-tools-channel-flow >=1.1.0
|
|
34
34
|
Provides-Extra: all
|
|
35
35
|
Requires-Dist: netCDF4 ; extra == 'all'
|
|
36
36
|
Provides-Extra: netcdf
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
rtctools/__init__.py,sha256=91hvS2-ryd2Pvw0COpsUzTwJwSnTZ035REiej-1hNI4,107
|
|
2
|
+
rtctools/_version.py,sha256=Ew8vUIDrRgybDaHsPu1sREz91JeynwrSLqOvc2B4Ts8,497
|
|
3
|
+
rtctools/rtctoolsapp.py,sha256=A6M0v6vymAZWdBPLzgRVyAUfahD0knC8B4IptJ7_sBM,4097
|
|
4
|
+
rtctools/util.py,sha256=PaeKfDUA174ODZbY5fZjCTf-F-TdhW7yEuP189Ro190,9075
|
|
5
|
+
rtctools/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
+
rtctools/_internal/alias_tools.py,sha256=XuQSAhhFuVtwn0yrAObZWIKPsSF4j2axXRtEmitIFPs,5310
|
|
7
|
+
rtctools/_internal/caching.py,sha256=p4gqSL7kCI7Hff-KjMEP7mhJCQSiU_lYm2MR7E18gBM,905
|
|
8
|
+
rtctools/_internal/casadi_helpers.py,sha256=oAf5zyFkZbaMhvhgMnQkOi2A6dBOzj-VAUkYwBf-Jxk,1410
|
|
9
|
+
rtctools/_internal/debug_check_helpers.py,sha256=UgQTEPw4PyR7MbYLewSSWQqTwQj7xr5yUBk820O9Kk4,1084
|
|
10
|
+
rtctools/data/__init__.py,sha256=EllgSmCdrlvQZSd1VilvjPaeYJGhY9ErPiQtedmuFoA,157
|
|
11
|
+
rtctools/data/csv.py,sha256=iYOEED3AaNxt4ews_aAkHfl9Tq9a-9vjxvYwjVR_lQE,5231
|
|
12
|
+
rtctools/data/netcdf.py,sha256=xpk4Ggl7gItNG6lO7p3OJPR-elK8_CiCtxUI7cX0gwk,19109
|
|
13
|
+
rtctools/data/pi.py,sha256=yyuLHpCYrfAqL9dZWEXn4IgQ7yeAOUMpg8GD7Sh-kN4,45341
|
|
14
|
+
rtctools/data/rtc.py,sha256=FBuUQ6aL4D3y6puKudyuLAeP_vmpDiliqzJg8f1kd7g,9043
|
|
15
|
+
rtctools/data/storage.py,sha256=67J4BRTl0AMEzlKNZ8Xdpy_4cGtwx8Lo_tL2n0G4S9w,13206
|
|
16
|
+
rtctools/data/interpolation/__init__.py,sha256=GBubCIT5mFoSTV-lOk7cpwvZekNMEe5bvqSQJ9HE34M,73
|
|
17
|
+
rtctools/data/interpolation/bspline.py,sha256=qevB842XWCH3fWlWMBqKMy1mw37ust-0YtSnb9PKCEc,948
|
|
18
|
+
rtctools/data/interpolation/bspline1d.py,sha256=hQrok4rrBcJV_HciuFjZYSwrSP8w_VufQRP6JLZhA7U,6106
|
|
19
|
+
rtctools/data/interpolation/bspline2d.py,sha256=ScmX0fPDxbUVtj3pbUE0L7UJocqroD_6fUT-4cvdRMc,1693
|
|
20
|
+
rtctools/optimization/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
21
|
+
rtctools/optimization/collocated_integrated_optimization_problem.py,sha256=Bv1a1O4QTAdIpe9SOA9_oEDDph-oj-9q5yfN4SFMdW8,131343
|
|
22
|
+
rtctools/optimization/control_tree_mixin.py,sha256=CC6TWp3kFQgMokx6213pRLx9iY17Fd0VcwG4Wpwa0Uo,8974
|
|
23
|
+
rtctools/optimization/csv_lookup_table_mixin.py,sha256=xiyOKpPT3kQ6yuxSJRorGYW_QwnlBzW6vvrtvQNhHsE,17264
|
|
24
|
+
rtctools/optimization/csv_mixin.py,sha256=sRp5paHWfCw2bz-23Nw-HdFLS3CZTpVwaBdFo98DbvE,12252
|
|
25
|
+
rtctools/optimization/goal_programming_mixin.py,sha256=GK25DrbAY_rMsra080pSCDZwzLQNN2Ppd-2d0_FEllg,32999
|
|
26
|
+
rtctools/optimization/goal_programming_mixin_base.py,sha256=oh9CsEiyYTmthcfvRbX-9Z9bIo6SHv_DCiVt9kx0sjI,43781
|
|
27
|
+
rtctools/optimization/homotopy_mixin.py,sha256=Kh0kMfxB-Fo1FBGW5tPOQk24Xx_Mmw_p0YuSQotdkMU,6905
|
|
28
|
+
rtctools/optimization/initial_state_estimation_mixin.py,sha256=74QYfG-VYYTNVg-kAnCG6QoY3_sUmaID0ideF7bPkkY,3116
|
|
29
|
+
rtctools/optimization/io_mixin.py,sha256=AsZQ7YOUcUbWoczmjTXaSje5MUEsPNbQyZBJ6qzSjzU,11821
|
|
30
|
+
rtctools/optimization/linearization_mixin.py,sha256=mG5S7uwvwDydw-eBPyQKnLyKoy08EBjQh25vu97afhY,1049
|
|
31
|
+
rtctools/optimization/linearized_order_goal_programming_mixin.py,sha256=LQ2qpYt0YGLpEoerif4FJ5wwzq16q--27bsRjcqIU5A,9087
|
|
32
|
+
rtctools/optimization/min_abs_goal_programming_mixin.py,sha256=WMOv9EF8cfDJgTunzXfI_cUmBSQK26u1HJB_9EAarfM,14031
|
|
33
|
+
rtctools/optimization/modelica_mixin.py,sha256=ysVMayNA4sSFoHkSdhjWOxT6UzOVbN0ZeM4v-RpvZXE,17161
|
|
34
|
+
rtctools/optimization/netcdf_mixin.py,sha256=-zkXh3sMYE50c3kHsrmUVGWMSFm-0cXQpGrCm0yn-Tc,7563
|
|
35
|
+
rtctools/optimization/optimization_problem.py,sha256=qzpc81NaZMeoXKuayFmBF15iXYuNAk5yxmaER_Gcz_A,44131
|
|
36
|
+
rtctools/optimization/pi_mixin.py,sha256=63qda6i7hjtDuP3hL6RO29vCCP11aUpR9B4KoqlLFVI,11314
|
|
37
|
+
rtctools/optimization/planning_mixin.py,sha256=O_Y74X8xZmaNZR4iYOe7BR06s9hnmcapbuHYHQTBPPQ,724
|
|
38
|
+
rtctools/optimization/single_pass_goal_programming_mixin.py,sha256=Zb9szg3PGT2o6gkGsXluSfEaAswkw3TKfPQDzUrj_Y4,25784
|
|
39
|
+
rtctools/optimization/timeseries.py,sha256=nCrsGCJThBMh9lvngEpbBDa834_QvklVvkxJqwX4a1M,1734
|
|
40
|
+
rtctools/simulation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
41
|
+
rtctools/simulation/csv_mixin.py,sha256=rGDUFPsqGHmF0_dWdXeWzWzMpkPmwCNweTBVrwSh31g,6704
|
|
42
|
+
rtctools/simulation/io_mixin.py,sha256=SJasNGI--OQ9Y-Z61oeeaGCxSrNddYz4AOVfJYbmf74,6209
|
|
43
|
+
rtctools/simulation/pi_mixin.py,sha256=uwl61LYjb8dmMz910EB2-bC0KSuhLzsrJzk0hxWYEhk,9359
|
|
44
|
+
rtctools/simulation/simulation_problem.py,sha256=gTAimG2MLw_TTkeHLkIMxpYgAmR-voqzvje7pcFnw4U,44556
|
|
45
|
+
rtc_tools-2.6.0.dist-info/COPYING.LESSER,sha256=46mU2C5kSwOnkqkw9XQAJlhBL2JAf1_uCD8lVcXyMRg,7652
|
|
46
|
+
rtc_tools-2.6.0.dist-info/METADATA,sha256=jn_eXPKFLJnhDwsGUgbGcqff27OfYGJkt8llDEyT4ws,1450
|
|
47
|
+
rtc_tools-2.6.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
48
|
+
rtc_tools-2.6.0.dist-info/entry_points.txt,sha256=-x622IB_l1duw2M6t6syfQ4yzOiQTp0IZxKGcYRgWgk,151
|
|
49
|
+
rtc_tools-2.6.0.dist-info/top_level.txt,sha256=pnBrb58PFPd1kp1dqa-JHU7R55h3alDNJIJnF3Jf9Dw,9
|
|
50
|
+
rtc_tools-2.6.0.dist-info/RECORD,,
|
rtctools/__init__.py
CHANGED
|
@@ -13,8 +13,8 @@ class OrderedSet(MutableSet):
|
|
|
13
13
|
|
|
14
14
|
def __init__(self, iterable=None):
|
|
15
15
|
self.end = end = []
|
|
16
|
-
end += [None, end, end]
|
|
17
|
-
self.map = {}
|
|
16
|
+
end += [None, end, end] # sentinel node for doubly linked list
|
|
17
|
+
self.map = {} # key --> [key, prev, next]
|
|
18
18
|
if iterable is not None:
|
|
19
19
|
self |= iterable
|
|
20
20
|
|
|
@@ -25,11 +25,11 @@ class OrderedSet(MutableSet):
|
|
|
25
25
|
return key in self.map
|
|
26
26
|
|
|
27
27
|
def __getstate__(self):
|
|
28
|
-
"""
|
|
28
|
+
"""Avoids max depth RecursionError when using pickle"""
|
|
29
29
|
return list(self)
|
|
30
30
|
|
|
31
31
|
def __setstate__(self, state):
|
|
32
|
-
"""
|
|
32
|
+
"""Tells pickle how to restore instance"""
|
|
33
33
|
self.__init__(state)
|
|
34
34
|
|
|
35
35
|
def __getitem__(self, index):
|
|
@@ -45,7 +45,7 @@ class OrderedSet(MutableSet):
|
|
|
45
45
|
return curr[0]
|
|
46
46
|
curr = curr[2]
|
|
47
47
|
i += 1
|
|
48
|
-
raise IndexError(
|
|
48
|
+
raise IndexError("set index {} out of range with length {}".format(index, len(self)))
|
|
49
49
|
|
|
50
50
|
def add(self, key):
|
|
51
51
|
if key not in self.map:
|
|
@@ -75,25 +75,27 @@ class OrderedSet(MutableSet):
|
|
|
75
75
|
|
|
76
76
|
def pop(self, last=True):
|
|
77
77
|
if not self:
|
|
78
|
-
raise KeyError(
|
|
78
|
+
raise KeyError("set is empty")
|
|
79
79
|
key = self.end[1][0] if last else self.end[2][0]
|
|
80
80
|
self.discard(key)
|
|
81
81
|
return key
|
|
82
82
|
|
|
83
83
|
def __repr__(self):
|
|
84
84
|
if not self:
|
|
85
|
-
return
|
|
86
|
-
return
|
|
85
|
+
return "%s()" % (self.__class__.__name__,)
|
|
86
|
+
return "%s(%r)" % (self.__class__.__name__, list(self))
|
|
87
87
|
|
|
88
88
|
def __eq__(self, other):
|
|
89
89
|
if isinstance(other, OrderedSet):
|
|
90
90
|
return len(self) == len(other) and list(self) == list(other)
|
|
91
91
|
return set(self) == set(other)
|
|
92
|
+
|
|
93
|
+
|
|
92
94
|
# End snippet
|
|
93
95
|
|
|
94
96
|
|
|
95
|
-
KT = TypeVar(
|
|
96
|
-
VT = TypeVar(
|
|
97
|
+
KT = TypeVar("KT")
|
|
98
|
+
VT = TypeVar("VT")
|
|
97
99
|
|
|
98
100
|
|
|
99
101
|
class AliasDict(Generic[KT, VT]):
|
rtctools/_internal/caching.py
CHANGED
|
@@ -5,14 +5,15 @@ def cached(f):
|
|
|
5
5
|
return f(self, ensemble_member)
|
|
6
6
|
else:
|
|
7
7
|
return f(self)
|
|
8
|
+
|
|
8
9
|
# Add a check so that caching is applied to the 'toplevel'
|
|
9
10
|
# method implementation in the class hierarchy only.
|
|
10
|
-
call_in_progress =
|
|
11
|
+
call_in_progress = "__" + f.__name__ + "_in_progress"
|
|
11
12
|
if hasattr(self, call_in_progress):
|
|
12
13
|
return call()
|
|
13
|
-
cache_name =
|
|
14
|
+
cache_name = "__" + f.__name__
|
|
14
15
|
if ensemble_member is not None:
|
|
15
|
-
cache_name =
|
|
16
|
+
cache_name = "{}[{}]".format(cache_name, ensemble_member)
|
|
16
17
|
if hasattr(self, cache_name):
|
|
17
18
|
return getattr(self, cache_name)
|
|
18
19
|
setattr(self, call_in_progress, True)
|
|
@@ -20,4 +21,5 @@ def cached(f):
|
|
|
20
21
|
setattr(self, cache_name, value)
|
|
21
22
|
delattr(self, call_in_progress)
|
|
22
23
|
return value
|
|
24
|
+
|
|
23
25
|
return wrapper
|
|
@@ -4,22 +4,16 @@ import casadi as ca
|
|
|
4
4
|
|
|
5
5
|
logger = logging.getLogger("rtctools")
|
|
6
6
|
|
|
7
|
-
try:
|
|
8
|
-
from casadi import interp1d
|
|
9
|
-
except ImportError:
|
|
10
|
-
logger.warning('interp1d not available in this version of CasADi. Linear interpolation will not work.')
|
|
11
|
-
interp1d = None
|
|
12
|
-
|
|
13
7
|
|
|
14
8
|
def is_affine(e, v):
|
|
15
9
|
try:
|
|
16
|
-
Af = ca.Function(
|
|
10
|
+
Af = ca.Function("f", [v], [ca.jacobian(e, v)]).expand()
|
|
17
11
|
except RuntimeError as e:
|
|
18
12
|
if "'eval_sx' not defined for" in str(e):
|
|
19
|
-
Af = ca.Function(
|
|
13
|
+
Af = ca.Function("f", [v], [ca.jacobian(e, v)])
|
|
20
14
|
else:
|
|
21
15
|
raise
|
|
22
|
-
return
|
|
16
|
+
return Af.sparsity_jac(0, 0).nnz() == 0
|
|
23
17
|
|
|
24
18
|
|
|
25
19
|
def nullvertcat(*L):
|
|
@@ -38,7 +32,7 @@ def reduce_matvec(e, v):
|
|
|
38
32
|
|
|
39
33
|
This reduces the number of nodes required to represent the linear operations.
|
|
40
34
|
"""
|
|
41
|
-
Af = ca.Function(
|
|
35
|
+
Af = ca.Function("Af", [ca.MX()], [ca.jacobian(e, v)])
|
|
42
36
|
A = Af(ca.DM())
|
|
43
37
|
return ca.reshape(ca.mtimes(A, v), e.shape)
|
|
44
38
|
|
|
@@ -47,30 +41,15 @@ def substitute_in_external(expr, symbols, values):
|
|
|
47
41
|
if len(symbols) == 0 or all(isinstance(x, ca.DM) for x in expr):
|
|
48
42
|
return expr
|
|
49
43
|
else:
|
|
50
|
-
f = ca.Function(
|
|
44
|
+
f = ca.Function("f", symbols, expr)
|
|
51
45
|
return f.call(values, True, False)
|
|
52
46
|
|
|
53
47
|
|
|
54
48
|
def interpolate(ts, xs, t, equidistant, mode=0):
|
|
55
|
-
if
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
mode_str = 'floor'
|
|
60
|
-
else:
|
|
61
|
-
mode_str = 'ceil'
|
|
62
|
-
return interp1d(ts, xs, t, mode_str, equidistant)
|
|
49
|
+
if mode == 0:
|
|
50
|
+
mode_str = "linear"
|
|
51
|
+
elif mode == 1:
|
|
52
|
+
mode_str = "floor"
|
|
63
53
|
else:
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
else:
|
|
67
|
-
xs = xs[1:] # block-backward
|
|
68
|
-
t = ca.MX(t)
|
|
69
|
-
if t.size1() > 1:
|
|
70
|
-
t_ = ca.MX.sym('t')
|
|
71
|
-
xs_ = ca.MX.sym('xs', xs.size1())
|
|
72
|
-
f = ca.Function('interpolant', [t_, xs_], [ca.mtimes(ca.transpose((t_ >= ts[:-1]) * (t_ < ts[1:])), xs_)])
|
|
73
|
-
f = f.map(t.size1(), 'serial')
|
|
74
|
-
return ca.transpose(f(ca.transpose(t), ca.repmat(xs, 1, t.size1())))
|
|
75
|
-
else:
|
|
76
|
-
return ca.mtimes(ca.transpose((t >= ts[:-1]) * (t < ts[1:])), xs)
|
|
54
|
+
mode_str = "ceil"
|
|
55
|
+
return ca.interp1d(ts, xs, t, mode_str, equidistant)
|
rtctools/_version.py
CHANGED
|
@@ -8,11 +8,11 @@ import json
|
|
|
8
8
|
|
|
9
9
|
version_json = '''
|
|
10
10
|
{
|
|
11
|
-
"date": "
|
|
11
|
+
"date": "2024-01-30T12:56:46+0000",
|
|
12
12
|
"dirty": false,
|
|
13
13
|
"error": null,
|
|
14
|
-
"full-revisionid": "
|
|
15
|
-
"version": "2.
|
|
14
|
+
"full-revisionid": "7f078d793396303774d150ae5ebabb7534420ace",
|
|
15
|
+
"version": "2.6.0"
|
|
16
16
|
}
|
|
17
17
|
''' # END VERSION_JSON
|
|
18
18
|
|
rtctools/data/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
import xml.etree.ElementTree as ET
|
|
2
2
|
|
|
3
|
-
ET.register_namespace(
|
|
4
|
-
ET.register_namespace(
|
|
3
|
+
ET.register_namespace("fews", "http://www.wldelft.nl/fews")
|
|
4
|
+
ET.register_namespace("pi", "http://www.wldelft.nl/fews/PI")
|
rtctools/data/csv.py
CHANGED
|
@@ -22,15 +22,18 @@ def _boolean_to_nan(data, fname):
|
|
|
22
22
|
convert_to_nan = []
|
|
23
23
|
dtypes_out = []
|
|
24
24
|
for i, name in enumerate(data.dtype.names):
|
|
25
|
-
if dtypes_in[i][1][1] ==
|
|
25
|
+
if dtypes_in[i][1][1] == "b":
|
|
26
26
|
convert_to_nan.append(name)
|
|
27
|
-
dtypes_out.append((dtypes_in[i][0],
|
|
27
|
+
dtypes_out.append((dtypes_in[i][0], "<f8"))
|
|
28
28
|
else:
|
|
29
29
|
dtypes_out.append(dtypes_in[i])
|
|
30
30
|
|
|
31
31
|
if convert_to_nan:
|
|
32
|
-
logger.warning(
|
|
33
|
-
"
|
|
32
|
+
logger.warning(
|
|
33
|
+
"Column(s) {} were detected as boolean in '{}'; converting to NaN".format(
|
|
34
|
+
", ".join(["'{}'".format(name) for name in convert_to_nan]), fname
|
|
35
|
+
)
|
|
36
|
+
)
|
|
34
37
|
data = data.astype(dtypes_out)
|
|
35
38
|
for name in convert_to_nan:
|
|
36
39
|
data[name] = np.nan
|
|
@@ -38,7 +41,7 @@ def _boolean_to_nan(data, fname):
|
|
|
38
41
|
return data
|
|
39
42
|
|
|
40
43
|
|
|
41
|
-
def load(fname, delimiter=
|
|
44
|
+
def load(fname, delimiter=",", with_time=False):
|
|
42
45
|
"""
|
|
43
46
|
Check delimiter of csv and read contents to an array. Assumes no date-time conversion needed.
|
|
44
47
|
|
|
@@ -50,56 +53,68 @@ def load(fname, delimiter=',', with_time=False):
|
|
|
50
53
|
"""
|
|
51
54
|
c = {}
|
|
52
55
|
if with_time:
|
|
53
|
-
c.update({0: lambda str: datetime.strptime(
|
|
54
|
-
str.decode("utf-8"), '%Y-%m-%d %H:%M:%S')})
|
|
56
|
+
c.update({0: lambda str: datetime.strptime(str.decode("utf-8"), "%Y-%m-%d %H:%M:%S")})
|
|
55
57
|
|
|
56
58
|
# Check delimiter of csv file. If semicolon, check if decimal separator is
|
|
57
59
|
# a comma.
|
|
58
|
-
if delimiter ==
|
|
59
|
-
with open(fname,
|
|
60
|
+
if delimiter == ";":
|
|
61
|
+
with open(fname, "rb") as csvfile:
|
|
60
62
|
# Read the first line, this should be a header. Count columns by
|
|
61
63
|
# counting separator.
|
|
62
64
|
sample_csvfile = csvfile.readline()
|
|
63
|
-
n_semicolon = sample_csvfile.count(b
|
|
64
|
-
# We actually only need one number to evaluate if commas are used as decimal
|
|
65
|
-
# certain csv writers don't use a decimal when the value has
|
|
66
|
-
# (e.g. 12.0 becomes 12) so we read the next 1024 bytes
|
|
65
|
+
n_semicolon = sample_csvfile.count(b";")
|
|
66
|
+
# We actually only need one number to evaluate if commas are used as decimal
|
|
67
|
+
# separator, but certain csv writers don't use a decimal when the value has
|
|
68
|
+
# no meaningful decimal(e.g. 12.0 becomes 12) so we read the next 1024 bytes
|
|
69
|
+
# to make sure we catch a number.
|
|
67
70
|
sample_csvfile = csvfile.read(1024)
|
|
68
71
|
# Count the commas
|
|
69
|
-
n_comma_decimal = sample_csvfile.count(b
|
|
72
|
+
n_comma_decimal = sample_csvfile.count(b",")
|
|
70
73
|
# If commas are used as decimal separator, we need additional
|
|
71
74
|
# converters.
|
|
72
75
|
if n_comma_decimal:
|
|
73
|
-
c.update(
|
|
74
|
-
|
|
76
|
+
c.update(
|
|
77
|
+
{
|
|
78
|
+
i + len(c): lambda str: float(str.decode("utf-8").replace(",", "."))
|
|
79
|
+
for i in range(1 + n_semicolon - len(c))
|
|
80
|
+
}
|
|
81
|
+
)
|
|
75
82
|
|
|
76
83
|
# Read the csv file and convert to array
|
|
77
84
|
try:
|
|
78
85
|
if len(c): # Converters exist, so use them.
|
|
79
86
|
try:
|
|
80
|
-
data = np.genfromtxt(
|
|
87
|
+
data = np.genfromtxt(
|
|
88
|
+
fname, delimiter=delimiter, deletechars="", dtype=None, names=True, converters=c
|
|
89
|
+
)
|
|
81
90
|
return _boolean_to_nan(data, fname)
|
|
82
|
-
except
|
|
91
|
+
except (
|
|
92
|
+
np.lib._iotools.ConverterError
|
|
93
|
+
): # value does not conform to expected date-time format
|
|
83
94
|
type, value, traceback = sys.exc_info()
|
|
84
95
|
logger.error(
|
|
85
|
-
|
|
96
|
+
"CSVMixin: converter of csv reader failed on {}: {}".format(fname, value)
|
|
97
|
+
)
|
|
86
98
|
raise ValueError(
|
|
87
|
-
|
|
88
|
-
|
|
99
|
+
"CSVMixin: wrong date time or value format in {}. "
|
|
100
|
+
"Should be %Y-%m-%d %H:%M:%S and numerical values everywhere.".format(fname)
|
|
101
|
+
)
|
|
89
102
|
else:
|
|
90
|
-
data = np.genfromtxt(fname, delimiter=delimiter, deletechars=
|
|
103
|
+
data = np.genfromtxt(fname, delimiter=delimiter, deletechars="", dtype=None, names=True)
|
|
91
104
|
return _boolean_to_nan(data, fname)
|
|
92
|
-
except ValueError:
|
|
105
|
+
except ValueError:
|
|
106
|
+
# can occur when delimiter changes after first 1024 bytes of file,
|
|
107
|
+
# or delimiter is not , or ;
|
|
93
108
|
type, value, traceback = sys.exc_info()
|
|
94
|
-
logger.error(
|
|
95
|
-
'CSV: Value reader of csv reader failed on {}: {}'.format(fname, value))
|
|
109
|
+
logger.error("CSV: Value reader of csv reader failed on {}: {}".format(fname, value))
|
|
96
110
|
raise ValueError(
|
|
97
111
|
"CSV: could not read all values from {}. Used delimiter '{}'. "
|
|
98
112
|
"Please check delimiter (should be ',' or ';' throughout the file) "
|
|
99
|
-
"and if all values are numbers.".format(fname, delimiter)
|
|
113
|
+
"and if all values are numbers.".format(fname, delimiter)
|
|
114
|
+
)
|
|
100
115
|
|
|
101
116
|
|
|
102
|
-
def save(fname, data, delimiter=
|
|
117
|
+
def save(fname, data, delimiter=",", with_time=False):
|
|
103
118
|
"""
|
|
104
119
|
Write the contents of an array to a csv file.
|
|
105
120
|
|
|
@@ -109,10 +124,16 @@ def save(fname, data, delimiter=',', with_time=False):
|
|
|
109
124
|
:param with_time: Whether to output the first column of the data as time stamps.
|
|
110
125
|
"""
|
|
111
126
|
if with_time:
|
|
112
|
-
data[
|
|
113
|
-
fmt = [
|
|
127
|
+
data["time"] = [t.strftime("%Y-%m-%d %H:%M:%S") for t in data["time"]]
|
|
128
|
+
fmt = ["%s"] + (len(data.dtype.names) - 1) * ["%f"]
|
|
114
129
|
else:
|
|
115
|
-
fmt = len(data.dtype.names) * [
|
|
116
|
-
|
|
117
|
-
np.savetxt(
|
|
118
|
-
|
|
130
|
+
fmt = len(data.dtype.names) * ["%f"]
|
|
131
|
+
|
|
132
|
+
np.savetxt(
|
|
133
|
+
fname,
|
|
134
|
+
data,
|
|
135
|
+
delimiter=delimiter,
|
|
136
|
+
header=delimiter.join(data.dtype.names),
|
|
137
|
+
fmt=fmt,
|
|
138
|
+
comments="",
|
|
139
|
+
)
|
|
@@ -14,7 +14,8 @@ class BSpline:
|
|
|
14
14
|
:param k: Order of the basis function.
|
|
15
15
|
:param i: Knot number.
|
|
16
16
|
|
|
17
|
-
:returns: The B-Spline basis function of the given order, at the given knot, evaluated at
|
|
17
|
+
:returns: The B-Spline basis function of the given order, at the given knot, evaluated at
|
|
18
|
+
the given point.
|
|
18
19
|
"""
|
|
19
20
|
if k == 0:
|
|
20
21
|
return if_else(logic_and(t[i] <= x, x < t[i + 1]), 1.0, 0.0)
|
|
@@ -24,8 +25,7 @@ class BSpline:
|
|
|
24
25
|
else:
|
|
25
26
|
a = 0.0
|
|
26
27
|
if t[i + 1] < t[i + k + 1]:
|
|
27
|
-
b = (t[i + k + 1] - x) / (t[i + k + 1] - t[i + 1]) *
|
|
28
|
-
self.basis(t, x, k - 1, i + 1)
|
|
28
|
+
b = (t[i + k + 1] - x) / (t[i + k + 1] - t[i + 1]) * self.basis(t, x, k - 1, i + 1)
|
|
29
29
|
else:
|
|
30
30
|
b = 0.0
|
|
31
31
|
return a + b
|
|
@@ -1,13 +1,13 @@
|
|
|
1
|
-
from casadi import Function, SX, if_else, inf, jacobian, logic_and, nlpsol, sum2, vertcat
|
|
2
|
-
|
|
3
1
|
import numpy as np
|
|
2
|
+
from casadi import SX, Function, if_else, inf, jacobian, logic_and, nlpsol, sum2, vertcat
|
|
4
3
|
|
|
5
4
|
from .bspline import BSpline
|
|
6
5
|
|
|
7
6
|
|
|
8
7
|
class BSpline1D(BSpline):
|
|
9
8
|
"""
|
|
10
|
-
Arbitrary order, one-dimensional, non-uniform B-Spline implementation using Cox-de Boor
|
|
9
|
+
Arbitrary order, one-dimensional, non-uniform B-Spline implementation using Cox-de Boor
|
|
10
|
+
recursion.
|
|
11
11
|
"""
|
|
12
12
|
|
|
13
13
|
def __init__(self, t, w, k=3):
|
|
@@ -36,13 +36,26 @@ class BSpline1D(BSpline):
|
|
|
36
36
|
"""
|
|
37
37
|
y = 0.0
|
|
38
38
|
for i in range(len(self.__t) - self.__k - 1):
|
|
39
|
-
y += if_else(
|
|
40
|
-
|
|
39
|
+
y += if_else(
|
|
40
|
+
logic_and(x >= self.__t[i], x <= self.__t[i + self.__k + 1]),
|
|
41
|
+
self.__w[i] * self.basis(self.__t, x, self.__k, i),
|
|
42
|
+
0.0,
|
|
43
|
+
)
|
|
41
44
|
return y
|
|
42
45
|
|
|
43
46
|
@classmethod
|
|
44
|
-
def fit(
|
|
45
|
-
|
|
47
|
+
def fit(
|
|
48
|
+
cls,
|
|
49
|
+
x,
|
|
50
|
+
y,
|
|
51
|
+
k=3,
|
|
52
|
+
monotonicity=0,
|
|
53
|
+
curvature=0,
|
|
54
|
+
num_test_points=100,
|
|
55
|
+
epsilon=1e-7,
|
|
56
|
+
delta=1e-4,
|
|
57
|
+
interior_pts=None,
|
|
58
|
+
):
|
|
46
59
|
"""
|
|
47
60
|
fit() returns a tck tuple like scipy.interpolate.splrep, but adjusts
|
|
48
61
|
the weights to meet the desired constraints to the curvature of the spline curve.
|
|
@@ -86,33 +99,33 @@ class BSpline1D(BSpline):
|
|
|
86
99
|
# Generate knots: This algorithm is based on the Fitpack algorithm by p.dierckx
|
|
87
100
|
# The original code lives here: http://www.netlib.org/dierckx/
|
|
88
101
|
if k % 2 == 1:
|
|
89
|
-
interior_pts = x[k // 2 + 1
|
|
102
|
+
interior_pts = x[k // 2 + 1 : -k // 2]
|
|
90
103
|
else:
|
|
91
|
-
interior_pts = (x[k // 2 + 1
|
|
104
|
+
interior_pts = (x[k // 2 + 1 : -k // 2] + x[k // 2 : -k // 2 - 1]) / 2
|
|
92
105
|
t = np.concatenate(
|
|
93
|
-
(np.full(k + 1, x[0] - delta), interior_pts, np.full(k + 1, x[-1] + delta))
|
|
106
|
+
(np.full(k + 1, x[0] - delta), interior_pts, np.full(k + 1, x[-1] + delta))
|
|
107
|
+
)
|
|
94
108
|
num_knots = len(t)
|
|
95
109
|
|
|
96
110
|
# Casadi Variable Symbols
|
|
97
|
-
c = SX.sym(
|
|
98
|
-
x_sym = SX.sym(
|
|
111
|
+
c = SX.sym("c", num_knots)
|
|
112
|
+
x_sym = SX.sym("x")
|
|
99
113
|
|
|
100
114
|
# Casadi Representation of Spline Function & Derivatives
|
|
101
115
|
expr = cls(t, c, k)(x_sym)
|
|
102
116
|
free_vars = [c, x_sym]
|
|
103
|
-
bspline = Function(
|
|
117
|
+
bspline = Function("bspline", free_vars, [expr])
|
|
104
118
|
J = jacobian(expr, x_sym)
|
|
105
119
|
# bspline_prime = Function('bspline_prime', free_vars, [J])
|
|
106
120
|
H = jacobian(J, x_sym)
|
|
107
|
-
bspline_prime_prime = Function(
|
|
121
|
+
bspline_prime_prime = Function("bspline_prime_prime", free_vars, [H])
|
|
108
122
|
|
|
109
123
|
# Objective Function
|
|
110
|
-
xpt = SX.sym(
|
|
111
|
-
ypt = SX.sym(
|
|
112
|
-
sq_diff = Function(
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
f = sum2(sq_diff(SX(x), SX(y)))
|
|
124
|
+
xpt = SX.sym("xpt")
|
|
125
|
+
ypt = SX.sym("ypt")
|
|
126
|
+
sq_diff = Function("sq_diff", [c, xpt, ypt], [(ypt - bspline(c, xpt)) ** 2])
|
|
127
|
+
sq_diff = sq_diff.map(N, "serial")
|
|
128
|
+
f = sum2(sq_diff(c, SX(x), SX(y)))
|
|
116
129
|
|
|
117
130
|
# Setup Curvature Constraints
|
|
118
131
|
delta_c_max = np.full(num_knots - 1, inf)
|
|
@@ -129,24 +142,24 @@ class BSpline1D(BSpline):
|
|
|
129
142
|
max_slope_slope = np.full(num_test_points, -epsilon)
|
|
130
143
|
else:
|
|
131
144
|
min_slope_slope = np.full(num_test_points, epsilon)
|
|
132
|
-
monotonicity_constraints = vertcat(*[
|
|
133
|
-
c[i + 1] - c[i] for i in range(num_knots - 1)])
|
|
145
|
+
monotonicity_constraints = vertcat(*[c[i + 1] - c[i] for i in range(num_knots - 1)])
|
|
134
146
|
x_linspace = np.linspace(x[0], x[-1], num_test_points)
|
|
135
|
-
curvature_constraints = vertcat(*[
|
|
136
|
-
bspline_prime_prime(c, SX(x)) for x in x_linspace])
|
|
147
|
+
curvature_constraints = vertcat(*[bspline_prime_prime(c, SX(x)) for x in x_linspace])
|
|
137
148
|
g = vertcat(monotonicity_constraints, curvature_constraints)
|
|
138
149
|
lbg = np.concatenate((delta_c_min, min_slope_slope))
|
|
139
150
|
ubg = np.concatenate((delta_c_max, max_slope_slope))
|
|
140
151
|
|
|
141
152
|
# Perform mini-optimization problem to calculate the the values of c
|
|
142
|
-
nlp = {
|
|
153
|
+
nlp = {"x": c, "f": f, "g": g}
|
|
143
154
|
my_solver = "ipopt"
|
|
144
|
-
solver = nlpsol(
|
|
155
|
+
solver = nlpsol(
|
|
156
|
+
"solver", my_solver, nlp, {"print_time": 0, "expand": True, "ipopt": {"print_level": 0}}
|
|
157
|
+
)
|
|
145
158
|
sol = solver(lbg=lbg, ubg=ubg)
|
|
146
159
|
stats = solver.stats()
|
|
147
|
-
return_status = stats[
|
|
148
|
-
if return_status not in [
|
|
160
|
+
return_status = stats["return_status"]
|
|
161
|
+
if return_status not in ["Solve_Succeeded", "Solved_To_Acceptable_Level", "SUCCESS"]:
|
|
149
162
|
raise Exception("Spline fitting failed with status {}".format(return_status))
|
|
150
163
|
|
|
151
164
|
# Return the new tck tuple
|
|
152
|
-
return (t, np.array(sol[
|
|
165
|
+
return (t, np.array(sol["x"]).ravel(), k)
|
|
@@ -39,10 +39,16 @@ class BSpline2D(BSpline):
|
|
|
39
39
|
"""
|
|
40
40
|
z = 0.0
|
|
41
41
|
for i in range(len(self.__tx) - self.__kx - 1):
|
|
42
|
-
bx = if_else(
|
|
43
|
-
|
|
42
|
+
bx = if_else(
|
|
43
|
+
logic_and(x >= self.__tx[i], x <= self.__tx[i + self.__kx + 1]),
|
|
44
|
+
self.basis(self.__tx, x, self.__kx, i),
|
|
45
|
+
0.0,
|
|
46
|
+
)
|
|
44
47
|
for j in range(len(self.__ty) - self.__ky - 1):
|
|
45
|
-
by = if_else(
|
|
46
|
-
|
|
48
|
+
by = if_else(
|
|
49
|
+
logic_and(y >= self.__ty[j], y <= self.__ty[j + self.__ky + 1]),
|
|
50
|
+
self.basis(self.__ty, y, self.__ky, j),
|
|
51
|
+
0.0,
|
|
52
|
+
)
|
|
47
53
|
z += self.__w[i * (len(self.__ty) - self.__ky - 1) + j] * bx * by
|
|
48
54
|
return z
|