rtc-tools 2.5.2rc3__py3-none-any.whl → 2.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rtc-tools might be problematic. Click here for more details.

Files changed (47) hide show
  1. {rtc_tools-2.5.2rc3.dist-info → rtc_tools-2.6.0.dist-info}/METADATA +7 -7
  2. rtc_tools-2.6.0.dist-info/RECORD +50 -0
  3. {rtc_tools-2.5.2rc3.dist-info → rtc_tools-2.6.0.dist-info}/WHEEL +1 -1
  4. rtctools/__init__.py +2 -1
  5. rtctools/_internal/alias_tools.py +12 -10
  6. rtctools/_internal/caching.py +5 -3
  7. rtctools/_internal/casadi_helpers.py +11 -32
  8. rtctools/_internal/debug_check_helpers.py +1 -1
  9. rtctools/_version.py +3 -3
  10. rtctools/data/__init__.py +2 -2
  11. rtctools/data/csv.py +54 -33
  12. rtctools/data/interpolation/bspline.py +3 -3
  13. rtctools/data/interpolation/bspline1d.py +42 -29
  14. rtctools/data/interpolation/bspline2d.py +10 -4
  15. rtctools/data/netcdf.py +137 -93
  16. rtctools/data/pi.py +304 -210
  17. rtctools/data/rtc.py +64 -53
  18. rtctools/data/storage.py +91 -51
  19. rtctools/optimization/collocated_integrated_optimization_problem.py +1244 -696
  20. rtctools/optimization/control_tree_mixin.py +68 -66
  21. rtctools/optimization/csv_lookup_table_mixin.py +107 -74
  22. rtctools/optimization/csv_mixin.py +83 -52
  23. rtctools/optimization/goal_programming_mixin.py +239 -148
  24. rtctools/optimization/goal_programming_mixin_base.py +204 -111
  25. rtctools/optimization/homotopy_mixin.py +36 -27
  26. rtctools/optimization/initial_state_estimation_mixin.py +8 -8
  27. rtctools/optimization/io_mixin.py +48 -43
  28. rtctools/optimization/linearization_mixin.py +3 -1
  29. rtctools/optimization/linearized_order_goal_programming_mixin.py +57 -28
  30. rtctools/optimization/min_abs_goal_programming_mixin.py +72 -29
  31. rtctools/optimization/modelica_mixin.py +135 -81
  32. rtctools/optimization/netcdf_mixin.py +32 -18
  33. rtctools/optimization/optimization_problem.py +181 -127
  34. rtctools/optimization/pi_mixin.py +68 -36
  35. rtctools/optimization/planning_mixin.py +19 -0
  36. rtctools/optimization/single_pass_goal_programming_mixin.py +159 -112
  37. rtctools/optimization/timeseries.py +4 -6
  38. rtctools/rtctoolsapp.py +18 -18
  39. rtctools/simulation/csv_mixin.py +37 -30
  40. rtctools/simulation/io_mixin.py +9 -5
  41. rtctools/simulation/pi_mixin.py +62 -32
  42. rtctools/simulation/simulation_problem.py +471 -180
  43. rtctools/util.py +84 -56
  44. rtc_tools-2.5.2rc3.dist-info/RECORD +0 -49
  45. {rtc_tools-2.5.2rc3.dist-info → rtc_tools-2.6.0.dist-info}/COPYING.LESSER +0 -0
  46. {rtc_tools-2.5.2rc3.dist-info → rtc_tools-2.6.0.dist-info}/entry_points.txt +0 -0
  47. {rtc_tools-2.5.2rc3.dist-info → rtc_tools-2.6.0.dist-info}/top_level.txt +0 -0
rtctools/data/rtc.py CHANGED
@@ -3,11 +3,10 @@ import os
3
3
  import xml.etree.ElementTree as ET
4
4
  from collections import namedtuple
5
5
 
6
- ts_ids = namedtuple('ids', 'location_id parameter_id qualifier_id')
7
- p_ids = namedtuple('ids', 'model_id location_id parameter_id')
6
+ ts_ids = namedtuple("ids", "location_id parameter_id qualifier_id")
7
+ p_ids = namedtuple("ids", "model_id location_id parameter_id")
8
8
 
9
- ns = {'fews': 'http://www.wldelft.nl/fews',
10
- 'pi': 'http://www.wldelft.nl/fews/PI'}
9
+ ns = {"fews": "http://www.wldelft.nl/fews", "pi": "http://www.wldelft.nl/fews/PI"}
11
10
 
12
11
  logger = logging.getLogger("rtctools")
13
12
 
@@ -35,77 +34,85 @@ class DataConfig:
35
34
  tree = ET.parse(path)
36
35
  root = tree.getroot()
37
36
 
38
- timeseriess1 = root.findall('./*/fews:timeSeries', ns)
39
- timeseriess2 = root.findall('./fews:timeSeries', ns)
37
+ timeseriess1 = root.findall("./*/fews:timeSeries", ns)
38
+ timeseriess2 = root.findall("./fews:timeSeries", ns)
40
39
  timeseriess1.extend(timeseriess2)
41
40
 
42
41
  for timeseries in timeseriess1:
43
- pi_timeseries = timeseries.find('fews:PITimeSeries', ns)
42
+ pi_timeseries = timeseries.find("fews:PITimeSeries", ns)
44
43
  if pi_timeseries is not None:
45
- internal_id = timeseries.get('id')
46
- external_id = self.__pi_timeseries_id(pi_timeseries, 'fews')
44
+ internal_id = timeseries.get("id")
45
+ external_id = self.__pi_timeseries_id(pi_timeseries, "fews")
47
46
 
48
47
  if internal_id in self.__location_parameter_ids:
49
48
  message = (
50
- 'Found more than one external timeseries '
51
- 'mapped to internal id {} in {}.').format(internal_id, path)
49
+ "Found more than one external timeseries "
50
+ "mapped to internal id {} in {}."
51
+ ).format(internal_id, path)
52
52
  logger.error(message)
53
53
  raise Exception(message)
54
54
  elif external_id in self.__variable_map:
55
55
  message = (
56
- 'Found more than one internal timeseries '
57
- 'mapped to external id {} in {}.').format(external_id, path)
56
+ "Found more than one internal timeseries "
57
+ "mapped to external id {} in {}."
58
+ ).format(external_id, path)
58
59
  logger.error(message)
59
60
  raise Exception(message)
60
61
  else:
61
- self.__location_parameter_ids[internal_id] = \
62
- self.__pi_location_parameter_id(pi_timeseries, 'fews')
62
+ self.__location_parameter_ids[
63
+ internal_id
64
+ ] = self.__pi_location_parameter_id(pi_timeseries, "fews")
63
65
  self.__variable_map[external_id] = internal_id
64
66
 
65
- for k in ['import', 'export']:
66
- res = root.find(
67
- './fews:%s/fews:PITimeSeriesFile/fews:timeSeriesFile' % k, ns)
67
+ for k in ["import", "export"]:
68
+ res = root.find("./fews:%s/fews:PITimeSeriesFile/fews:timeSeriesFile" % k, ns)
68
69
  if res is not None:
69
- setattr(self, 'basename_%s' %
70
- k, os.path.splitext(res.text)[0])
70
+ setattr(self, "basename_%s" % k, os.path.splitext(res.text)[0])
71
71
 
72
- parameters = root.findall('./fews:parameter', ns)
72
+ parameters = root.findall("./fews:parameter", ns)
73
73
  if parameters is not None:
74
74
  for parameter in parameters:
75
- pi_parameter = parameter.find('fews:PIParameter', ns)
75
+ pi_parameter = parameter.find("fews:PIParameter", ns)
76
76
  if pi_parameter is not None:
77
- internal_id = parameter.get('id')
78
- external_id = self.__pi_parameter_id(pi_parameter, 'fews')
77
+ internal_id = parameter.get("id")
78
+ external_id = self.__pi_parameter_id(pi_parameter, "fews")
79
79
 
80
80
  if internal_id in self.__model_parameter_ids:
81
81
  message = (
82
- 'Found more than one external parameter mapped '
83
- 'to internal id {} in {}.').format(internal_id, path)
82
+ "Found more than one external parameter mapped "
83
+ "to internal id {} in {}."
84
+ ).format(internal_id, path)
84
85
  logger.error(message)
85
86
  raise Exception(message)
86
87
  if external_id in self.__parameter_map:
87
88
  message = (
88
- 'Found more than one interal parameter mapped to external '
89
- 'modelId {}, locationId {}, parameterId {} in {}.').format(
90
- external_id.model_id, external_id.location_id, external_id.parameter_id, path)
89
+ "Found more than one interal parameter mapped to external "
90
+ "modelId {}, locationId {}, parameterId {} in {}."
91
+ ).format(
92
+ external_id.model_id,
93
+ external_id.location_id,
94
+ external_id.parameter_id,
95
+ path,
96
+ )
91
97
  logger.error(message)
92
98
  raise Exception(message)
93
99
  else:
94
- self.__model_parameter_ids[internal_id] = self.__pi_model_parameter_id(pi_parameter, 'fews')
100
+ self.__model_parameter_ids[internal_id] = self.__pi_model_parameter_id(
101
+ pi_parameter, "fews"
102
+ )
95
103
  self.__parameter_map[external_id] = internal_id
96
104
 
97
105
  except IOError:
98
- logger.error(
99
- 'No rtcDataConfig.xml file was found in "{}".'.format(folder))
106
+ logger.error('No rtcDataConfig.xml file was found in "{}".'.format(folder))
100
107
  raise
101
108
 
102
109
  def __pi_timeseries_id(self, el, namespace):
103
- location_id = el.find(namespace + ':locationId', ns).text
104
- parameter_id = el.find(namespace + ':parameterId', ns).text
110
+ location_id = el.find(namespace + ":locationId", ns).text
111
+ parameter_id = el.find(namespace + ":parameterId", ns).text
105
112
 
106
- timeseries_id = location_id + ':' + parameter_id
113
+ timeseries_id = location_id + ":" + parameter_id
107
114
 
108
- qualifiers = el.findall(namespace + ':qualifierId', ns)
115
+ qualifiers = el.findall(namespace + ":qualifierId", ns)
109
116
  qualifier_ids = []
110
117
  for qualifier in qualifiers:
111
118
  qualifier_ids.append(qualifier.text)
@@ -113,36 +120,40 @@ class DataConfig:
113
120
  if len(qualifier_ids) > 0:
114
121
  qualifier_ids.sort()
115
122
 
116
- return timeseries_id + ':' + ':'.join(qualifier_ids)
123
+ return timeseries_id + ":" + ":".join(qualifier_ids)
117
124
  else:
118
125
  return timeseries_id
119
126
 
120
127
  def __pi_location_parameter_id(self, el, namespace):
121
128
  qualifier_ids = []
122
- qualifiers = el.findall(namespace + ':qualifierId', ns)
129
+ qualifiers = el.findall(namespace + ":qualifierId", ns)
123
130
  for qualifier in qualifiers:
124
131
  qualifier_ids.append(qualifier.text)
125
132
 
126
- location_parameter_ids = ts_ids(location_id=el.find(namespace + ':locationId', ns).text,
127
- parameter_id=el.find(namespace + ':parameterId', ns).text,
128
- qualifier_id=qualifier_ids)
133
+ location_parameter_ids = ts_ids(
134
+ location_id=el.find(namespace + ":locationId", ns).text,
135
+ parameter_id=el.find(namespace + ":parameterId", ns).text,
136
+ qualifier_id=qualifier_ids,
137
+ )
129
138
  return location_parameter_ids
130
139
 
131
140
  def __pi_parameter_id(self, el, namespace):
132
- model_id = el.find(namespace + ':modelId', ns).text
133
- location_id = el.find(namespace + ':locationId', ns).text
134
- parameter_id = el.find(namespace + ':parameterId', ns).text
141
+ model_id = el.find(namespace + ":modelId", ns).text
142
+ location_id = el.find(namespace + ":locationId", ns).text
143
+ parameter_id = el.find(namespace + ":parameterId", ns).text
135
144
 
136
145
  return self.__long_parameter_id(parameter_id, location_id, model_id)
137
146
 
138
147
  def __pi_model_parameter_id(self, el, namespace):
139
- model_id = el.find(namespace + ':modelId', ns).text
140
- location_id = el.find(namespace + ':locationId', ns).text
141
- parameter_id = el.find(namespace + ':parameterId', ns).text
148
+ model_id = el.find(namespace + ":modelId", ns).text
149
+ location_id = el.find(namespace + ":locationId", ns).text
150
+ parameter_id = el.find(namespace + ":parameterId", ns).text
142
151
 
143
- model_parameter_ids = p_ids(model_id=(model_id if model_id is not None else ""),
144
- location_id=(location_id if location_id is not None else ""),
145
- parameter_id=(parameter_id if parameter_id is not None else ""))
152
+ model_parameter_ids = p_ids(
153
+ model_id=(model_id if model_id is not None else ""),
154
+ location_id=(location_id if location_id is not None else ""),
155
+ parameter_id=(parameter_id if parameter_id is not None else ""),
156
+ )
146
157
 
147
158
  return model_parameter_ids
148
159
 
@@ -152,9 +163,9 @@ class DataConfig:
152
163
  of the form model:location:parameter.
153
164
  """
154
165
  if location_id is not None:
155
- parameter_id = location_id + ':' + parameter_id
166
+ parameter_id = location_id + ":" + parameter_id
156
167
  if model_id is not None:
157
- parameter_id = model_id + ':' + parameter_id
168
+ parameter_id = model_id + ":" + parameter_id
158
169
  return parameter_id
159
170
 
160
171
  def variable(self, pi_header):
@@ -166,7 +177,7 @@ class DataConfig:
166
177
  :returns: A timeseries ID.
167
178
  :rtype: string
168
179
  """
169
- series_id = self.__pi_timeseries_id(pi_header, 'pi')
180
+ series_id = self.__pi_timeseries_id(pi_header, "pi")
170
181
  try:
171
182
  return self.__variable_map[series_id]
172
183
  except KeyError:
rtctools/data/storage.py CHANGED
@@ -23,14 +23,14 @@ class DataStoreAccessor(metaclass=ABCMeta):
23
23
  """
24
24
 
25
25
  #: Import file basename
26
- timeseries_import_basename = 'timeseries_import'
26
+ timeseries_import_basename = "timeseries_import"
27
27
  #: Export file basename
28
- timeseries_export_basename = 'timeseries_export'
28
+ timeseries_export_basename = "timeseries_export"
29
29
 
30
30
  def __init__(self, **kwargs):
31
31
  # Save arguments
32
- self._input_folder = kwargs['input_folder'] if 'input_folder' in kwargs else 'input'
33
- self._output_folder = kwargs['output_folder'] if 'output_folder' in kwargs else 'output'
32
+ self._input_folder = kwargs["input_folder"] if "input_folder" in kwargs else "input"
33
+ self._output_folder = kwargs["output_folder"] if "output_folder" in kwargs else "output"
34
34
 
35
35
  if logger.getEffectiveLevel() == logging.DEBUG:
36
36
  logger.debug("Expecting input files to be located in '" + self._input_folder + "'.")
@@ -71,7 +71,9 @@ class DataStore:
71
71
  @reference_datetime.setter
72
72
  def reference_datetime(self, value):
73
73
  if self.__reference_datetime_fixed and value != self.__reference_datetime:
74
- raise RuntimeError("Cannot change reference datetime after times in seconds has been requested.")
74
+ raise RuntimeError(
75
+ "Cannot change reference datetime after times in seconds has been requested."
76
+ )
75
77
  self.__reference_datetime = value
76
78
 
77
79
  @property
@@ -92,7 +94,7 @@ class DataStore:
92
94
 
93
95
  @property
94
96
  def datetimes(self) -> List[datetime]:
95
- """"
97
+ """
96
98
  Returns the timeseries times in seconds.
97
99
 
98
100
  :returns: timeseries datetimes, or None if there has been no call
@@ -102,7 +104,7 @@ class DataStore:
102
104
 
103
105
  @property
104
106
  def times_sec(self) -> np.ndarray:
105
- """"
107
+ """
106
108
  Returns the timeseries times in seconds.
107
109
 
108
110
  Note that once this method is called, it is no longer allowed to
@@ -123,18 +125,26 @@ class DataStore:
123
125
  # "0.0" as one of our times in seconds. This restriction may be
124
126
  # loosened in the future.
125
127
  if self.reference_datetime not in self.__timeseries_datetimes:
126
- raise Exception("Reference datetime {} should be equal to one of the timeseries datetimes {}".format(
127
- self.reference_datetime, self.__timeseries_datetimes))
128
- self.__timeseries_times_sec = self.datetime_to_sec(self.__timeseries_datetimes, self.reference_datetime)
128
+ raise Exception(
129
+ "Reference datetime {} should be equal to "
130
+ "one of the timeseries datetimes {}".format(
131
+ self.reference_datetime, self.__timeseries_datetimes
132
+ )
133
+ )
134
+ self.__timeseries_times_sec = self.datetime_to_sec(
135
+ self.__timeseries_datetimes, self.reference_datetime
136
+ )
129
137
  self.__timeseries_times_sec.flags.writeable = False
130
138
  self.__reference_datetime_fixed = True
131
139
 
132
- def set_timeseries(self,
133
- variable: str,
134
- datetimes: Iterable[datetime],
135
- values: np.ndarray,
136
- ensemble_member: int = 0,
137
- check_duplicates: bool = False) -> None:
140
+ def set_timeseries(
141
+ self,
142
+ variable: str,
143
+ datetimes: Iterable[datetime],
144
+ values: np.ndarray,
145
+ ensemble_member: int = 0,
146
+ check_duplicates: bool = False,
147
+ ) -> None:
138
148
  """
139
149
  Stores input time series values in the internal data store.
140
150
 
@@ -143,7 +153,7 @@ class DataStore:
143
153
  :param values: The values to be stored.
144
154
  :param ensemble_member: The ensemble member index.
145
155
  :param check_duplicates: If True, a warning will be given when overwriting values.
146
- If False, existing values can be silently overwritten with new values.
156
+ If False, existing values are silently overwritten with new values.
147
157
  """
148
158
  datetimes = list(datetimes)
149
159
 
@@ -151,24 +161,33 @@ class DataStore:
151
161
  raise TypeError("DateStore.set_timeseries() only support datetimes")
152
162
 
153
163
  if self.__timeseries_datetimes is not None and datetimes != self.__timeseries_datetimes:
154
- raise RuntimeError("Attempting to overwrite the input time series datetimes with different values. "
155
- "Please ensure all input time series have the same datetimes.")
164
+ raise RuntimeError(
165
+ "Attempting to overwrite the input time series datetimes with different values. "
166
+ "Please ensure all input time series have the same datetimes."
167
+ )
156
168
  self.__timeseries_datetimes = datetimes
157
169
 
158
170
  if len(self.__timeseries_datetimes) != len(values):
159
- raise ValueError("Length of values ({}) must be the same as length of datetimes ({})"
160
- .format(len(values), len(self.__timeseries_datetimes)))
171
+ raise ValueError(
172
+ "Length of values ({}) must be the same as length of datetimes ({})".format(
173
+ len(values), len(self.__timeseries_datetimes)
174
+ )
175
+ )
161
176
 
162
177
  if ensemble_member >= self.__ensemble_size:
163
178
  self.__update_ensemble_size(ensemble_member + 1)
164
179
 
165
180
  if check_duplicates and variable in self.__timeseries_values[ensemble_member].keys():
166
- logger.warning("Time series values for ensemble member {} and variable {} set twice. "
167
- "Overwriting old values.".format(ensemble_member, variable))
181
+ logger.warning(
182
+ "Time series values for ensemble member {} and variable {} set twice. "
183
+ "Overwriting old values.".format(ensemble_member, variable)
184
+ )
168
185
 
169
186
  self.__timeseries_values[ensemble_member][variable] = values
170
187
 
171
- def get_timeseries(self, variable: str, ensemble_member: int = 0) -> Tuple[List[datetime], np.ndarray]:
188
+ def get_timeseries(
189
+ self, variable: str, ensemble_member: int = 0
190
+ ) -> Tuple[List[datetime], np.ndarray]:
172
191
  """
173
192
  Looks up the time series in the internal data store.
174
193
 
@@ -181,12 +200,14 @@ class DataStore:
181
200
  def get_timeseries_names(self, ensemble_member: int = 0) -> Iterable[str]:
182
201
  return self.__timeseries_values[ensemble_member].keys()
183
202
 
184
- def set_timeseries_sec(self,
185
- variable: str,
186
- times_in_sec: np.ndarray,
187
- values: np.ndarray,
188
- ensemble_member: int = 0,
189
- check_duplicates: bool = False) -> None:
203
+ def set_timeseries_sec(
204
+ self,
205
+ variable: str,
206
+ times_in_sec: np.ndarray,
207
+ values: np.ndarray,
208
+ ensemble_member: int = 0,
209
+ check_duplicates: bool = False,
210
+ ) -> None:
190
211
  """
191
212
  Stores input time series values in the internal data store.
192
213
 
@@ -198,31 +219,42 @@ class DataStore:
198
219
  :param values: The values to be stored.
199
220
  :param ensemble_member: The ensemble member index.
200
221
  :param check_duplicates: If True, a warning will be given when overwriting values.
201
- If False, existing values can be silently overwritten with new values.
222
+ If False, existing values are silently overwritten with new values.
202
223
  """
203
224
  self._datetimes_to_seconds()
204
225
 
205
226
  if self.reference_datetime is None:
206
227
  raise RuntimeError("Cannot use times in seconds before reference datetime is set.")
207
228
 
208
- if self.__timeseries_times_sec is not None and not np.array_equal(times_in_sec, self.__timeseries_times_sec):
209
- raise RuntimeError("Attempting to overwrite the input time series times with different values. "
210
- "Please ensure all input time series have the same times.")
229
+ if self.__timeseries_times_sec is not None and not np.array_equal(
230
+ times_in_sec, self.__timeseries_times_sec
231
+ ):
232
+ raise RuntimeError(
233
+ "Attempting to overwrite the input time series times with different values. "
234
+ "Please ensure all input time series have the same times."
235
+ )
211
236
 
212
237
  if len(self.__timeseries_datetimes) != len(values):
213
- raise ValueError("Length of values ({}) must be the same as length of times ({})"
214
- .format(len(values), len(self.__timeseries_datetimes)))
238
+ raise ValueError(
239
+ "Length of values ({}) must be the same as length of times ({})".format(
240
+ len(values), len(self.__timeseries_datetimes)
241
+ )
242
+ )
215
243
 
216
244
  if ensemble_member >= self.__ensemble_size:
217
245
  self.__update_ensemble_size(ensemble_member + 1)
218
246
 
219
247
  if check_duplicates and variable in self.__timeseries_values[ensemble_member].keys():
220
- logger.warning("Time series values for ensemble member {} and variable {} set twice. "
221
- "Overwriting old values.".format(ensemble_member, variable))
248
+ logger.warning(
249
+ "Time series values for ensemble member {} and variable {} set twice. "
250
+ "Overwriting old values.".format(ensemble_member, variable)
251
+ )
222
252
 
223
253
  self.__timeseries_values[ensemble_member][variable] = values
224
254
 
225
- def get_timeseries_sec(self, variable: str, ensemble_member: int = 0) -> Tuple[np.ndarray, np.ndarray]:
255
+ def get_timeseries_sec(
256
+ self, variable: str, ensemble_member: int = 0
257
+ ) -> Tuple[np.ndarray, np.ndarray]:
226
258
  """
227
259
  Looks up the time series in the internal data store.
228
260
 
@@ -237,11 +269,13 @@ class DataStore:
237
269
  raise KeyError("ensemble_member {} does not exist".format(ensemble_member))
238
270
  return self.__timeseries_times_sec, self.__timeseries_values[ensemble_member][variable]
239
271
 
240
- def set_parameter(self,
241
- parameter_name: str,
242
- value: float,
243
- ensemble_member: int = 0,
244
- check_duplicates: bool = False) -> None:
272
+ def set_parameter(
273
+ self,
274
+ parameter_name: str,
275
+ value: float,
276
+ ensemble_member: int = 0,
277
+ check_duplicates: bool = False,
278
+ ) -> None:
245
279
  """
246
280
  Stores the parameter value in the internal data store.
247
281
 
@@ -249,14 +283,16 @@ class DataStore:
249
283
  :param value: The values to be stored.
250
284
  :param ensemble_member: The ensemble member index.
251
285
  :param check_duplicates: If True, a warning will be given when overwriting values.
252
- If False, existing values can be silently overwritten with new values.
286
+ If False, existing values are silently overwritten with new values.
253
287
  """
254
288
  if ensemble_member >= self.__ensemble_size:
255
289
  self.__update_ensemble_size(ensemble_member + 1)
256
290
 
257
291
  if check_duplicates and parameter_name in self.__parameters[ensemble_member].keys():
258
- logger.warning("Attempting to set parameter value for ensemble member {} and name {} twice. "
259
- "Using new value of {}.".format(ensemble_member, parameter_name, value))
292
+ logger.warning(
293
+ "Attempting to set parameter value for ensemble member {} and name {} twice. "
294
+ "Using new value of {}.".format(ensemble_member, parameter_name, value)
295
+ )
260
296
 
261
297
  self.__parameters[ensemble_member][parameter_name] = value
262
298
 
@@ -277,27 +313,31 @@ class DataStore:
277
313
  return self.__parameters[ensemble_member]
278
314
 
279
315
  @staticmethod
280
- def datetime_to_sec(d: Union[Iterable[datetime], datetime], t0: datetime) -> Union[np.ndarray, float]:
316
+ def datetime_to_sec(
317
+ d: Union[Iterable[datetime], datetime], t0: datetime
318
+ ) -> Union[np.ndarray, float]:
281
319
  """
282
320
  Returns the date/timestamps in seconds since t0.
283
321
 
284
322
  :param d: Iterable of datetimes or a single datetime object.
285
323
  :param t0: Reference datetime.
286
324
  """
287
- if hasattr(d, '__iter__'):
325
+ if hasattr(d, "__iter__"):
288
326
  return np.array([(t - t0).total_seconds() for t in d])
289
327
  else:
290
328
  return (d - t0).total_seconds()
291
329
 
292
330
  @staticmethod
293
- def sec_to_datetime(s: Union[Iterable[float], float], t0: datetime) -> Union[List[datetime], datetime]:
331
+ def sec_to_datetime(
332
+ s: Union[Iterable[float], float], t0: datetime
333
+ ) -> Union[List[datetime], datetime]:
294
334
  """
295
335
  Return the date/timestamps in seconds since t0 as datetime objects.
296
336
 
297
337
  :param s: Iterable of ints or a single int (number of seconds before or after t0).
298
338
  :param t0: Reference datetime.
299
339
  """
300
- if hasattr(s, '__iter__'):
340
+ if hasattr(s, "__iter__"):
301
341
  return [t0 + timedelta(seconds=t) for t in s]
302
342
  else:
303
343
  return t0 + timedelta(seconds=s)