rtc-tools 2.5.2rc4__py3-none-any.whl → 2.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rtc-tools might be problematic. Click here for more details.

Files changed (47) hide show
  1. {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/METADATA +7 -7
  2. rtc_tools-2.6.0.dist-info/RECORD +50 -0
  3. {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/WHEEL +1 -1
  4. rtctools/__init__.py +2 -1
  5. rtctools/_internal/alias_tools.py +12 -10
  6. rtctools/_internal/caching.py +5 -3
  7. rtctools/_internal/casadi_helpers.py +11 -32
  8. rtctools/_internal/debug_check_helpers.py +1 -1
  9. rtctools/_version.py +3 -3
  10. rtctools/data/__init__.py +2 -2
  11. rtctools/data/csv.py +54 -33
  12. rtctools/data/interpolation/bspline.py +3 -3
  13. rtctools/data/interpolation/bspline1d.py +42 -29
  14. rtctools/data/interpolation/bspline2d.py +10 -4
  15. rtctools/data/netcdf.py +137 -93
  16. rtctools/data/pi.py +304 -210
  17. rtctools/data/rtc.py +64 -53
  18. rtctools/data/storage.py +91 -51
  19. rtctools/optimization/collocated_integrated_optimization_problem.py +1244 -696
  20. rtctools/optimization/control_tree_mixin.py +68 -66
  21. rtctools/optimization/csv_lookup_table_mixin.py +107 -74
  22. rtctools/optimization/csv_mixin.py +83 -52
  23. rtctools/optimization/goal_programming_mixin.py +237 -146
  24. rtctools/optimization/goal_programming_mixin_base.py +204 -111
  25. rtctools/optimization/homotopy_mixin.py +36 -27
  26. rtctools/optimization/initial_state_estimation_mixin.py +8 -8
  27. rtctools/optimization/io_mixin.py +48 -43
  28. rtctools/optimization/linearization_mixin.py +3 -1
  29. rtctools/optimization/linearized_order_goal_programming_mixin.py +57 -28
  30. rtctools/optimization/min_abs_goal_programming_mixin.py +72 -29
  31. rtctools/optimization/modelica_mixin.py +135 -81
  32. rtctools/optimization/netcdf_mixin.py +32 -18
  33. rtctools/optimization/optimization_problem.py +181 -127
  34. rtctools/optimization/pi_mixin.py +68 -36
  35. rtctools/optimization/planning_mixin.py +19 -0
  36. rtctools/optimization/single_pass_goal_programming_mixin.py +159 -112
  37. rtctools/optimization/timeseries.py +4 -6
  38. rtctools/rtctoolsapp.py +18 -18
  39. rtctools/simulation/csv_mixin.py +37 -30
  40. rtctools/simulation/io_mixin.py +9 -5
  41. rtctools/simulation/pi_mixin.py +62 -32
  42. rtctools/simulation/simulation_problem.py +471 -180
  43. rtctools/util.py +84 -56
  44. rtc_tools-2.5.2rc4.dist-info/RECORD +0 -49
  45. {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/COPYING.LESSER +0 -0
  46. {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/entry_points.txt +0 -0
  47. {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/top_level.txt +0 -0
rtctools/data/netcdf.py CHANGED
@@ -17,7 +17,6 @@ import numpy as np
17
17
 
18
18
 
19
19
  class Stations:
20
-
21
20
  def __init__(self, dataset: Dataset, station_variable: Variable):
22
21
  self.__station_variable = station_variable
23
22
 
@@ -43,14 +42,17 @@ class Stations:
43
42
  @property
44
43
  def station_ids(self) -> Iterable:
45
44
  """
46
- :return: An ordered iterable of the station ids (location ids) for which station data is available.
45
+ :return: An ordered iterable of the station ids (location ids) for which
46
+ station data is available.
47
+
47
48
  """
48
49
  return self.__attributes.keys()
49
50
 
50
51
  @property
51
52
  def attributes(self) -> OrderedDict:
52
53
  """
53
- :return: An OrderedDict containing dicts containing the values for all station attributes of the input dataset.
54
+ :return: An OrderedDict containing dicts containing the values for all
55
+ station attributes of the input dataset.
54
56
  """
55
57
  return self.__attributes
56
58
 
@@ -77,26 +79,28 @@ class ImportDataset:
77
79
  self.__ensemble_size = 1
78
80
 
79
81
  # Load the content of a NetCDF file into a Dataset.
80
- self.__filename = os.path.join(
81
- folder,
82
- basename + ".nc"
83
- )
82
+ self.__filename = os.path.join(folder, basename + ".nc")
84
83
  self.__dataset = Dataset(self.__filename)
85
84
 
86
85
  # Find the number of ensemble members and the time and station id variables
87
86
  self.__time_variable = self.__find_time_variable()
88
87
  if self.__time_variable is None:
89
- raise Exception('No time variable found in file ' + self.__filename + '. '
90
- 'Please ensure the file contains a time variable with standard_name "time" and axis "T".')
88
+ raise Exception(
89
+ "No time variable found in file " + self.__filename + ". "
90
+ "Please ensure the file contains a time variable with standard_name "
91
+ '"time" and axis "T".'
92
+ )
91
93
 
92
94
  self.__ensemble_member_variable = self.__find_ensemble_member_variable()
93
95
  if self.__ensemble_member_variable:
94
- self.__ensemble_size = self.__dataset.dimensions['realization'].size
96
+ self.__ensemble_size = self.__dataset.dimensions["realization"].size
95
97
 
96
98
  self.__station_variable = self.__find_station_variable()
97
99
  if self.__station_variable is None:
98
- raise Exception('No station variable found in file ' + self.__filename + '. '
99
- 'Please ensure the file contains a variable with cf_role "timeseries_id".')
100
+ raise Exception(
101
+ "No station variable found in file " + self.__filename + ". "
102
+ 'Please ensure the file contains a variable with cf_role "timeseries_id".'
103
+ )
100
104
 
101
105
  def __str__(self):
102
106
  return self.__filename
@@ -109,21 +113,26 @@ class ImportDataset:
109
113
  :return: a netCDF4.Variable object of the time variable (or None if none found)
110
114
  """
111
115
  for variable in self.__dataset.variables.values():
112
- if ('standard_name' in variable.ncattrs() and 'axis' in variable.ncattrs()
113
- and variable.standard_name == 'time' and variable.axis == 'T'):
116
+ if (
117
+ "standard_name" in variable.ncattrs()
118
+ and "axis" in variable.ncattrs()
119
+ and variable.standard_name == "time"
120
+ and variable.axis == "T"
121
+ ):
114
122
  return variable
115
123
 
116
124
  return None
117
125
 
118
126
  def __find_ensemble_member_variable(self) -> Union[Variable, None]:
119
127
  """
120
- Find the variable containing the ensemble member index in the given Dataset.
128
+ Find the variable containing the ensemble member index in the given Dataset.
121
129
 
122
- :param dataset: The Dataset to be searched.
123
- :return: a netCDF4.Variable object of the ensemble member index variable (or None if none found)
130
+ :param dataset: The Dataset to be searched.
131
+ :return: a netCDF4.Variable object of the ensemble member index variable (or None
132
+ if none found)
124
133
  """
125
134
  for variable in self.__dataset.variables.values():
126
- if 'standard_name' in variable.ncattrs() and variable.standard_name == "realization":
135
+ if "standard_name" in variable.ncattrs() and variable.standard_name == "realization":
127
136
  return variable
128
137
 
129
138
  return None
@@ -136,7 +145,7 @@ class ImportDataset:
136
145
  :return: a netCDF4.Variable object of the station id variable (or None if none found)
137
146
  """
138
147
  for variable in self.__dataset.variables.values():
139
- if 'cf_role' in variable.ncattrs() and variable.cf_role == 'timeseries_id':
148
+ if "cf_role" in variable.ncattrs() and variable.cf_role == "timeseries_id":
140
149
  return variable
141
150
 
142
151
  return None
@@ -153,7 +162,7 @@ class ImportDataset:
153
162
  try:
154
163
  time_calendar = self.__time_variable.calendar
155
164
  except AttributeError:
156
- time_calendar = u'gregorian'
165
+ time_calendar = "gregorian"
157
166
 
158
167
  return num2date(time_values, units=time_unit, calendar=time_calendar)
159
168
 
@@ -162,9 +171,9 @@ class ImportDataset:
162
171
 
163
172
  def find_timeseries_variables(self) -> List[str]:
164
173
  """
165
- Find the keys of all 2-D or 3-D variables with dimensions {station, time} or {station, time, realization}
166
- where station is the dimension of the station_variable, time the dimension of the time_variable and
167
- realization the dimension for ensemble_member_index.
174
+ Find the keys of all 2-D or 3-D variables with dimensions {station, time} or {station, time,
175
+ realization} where station is the dimension of the station_variable, time the dimension of
176
+ the time_variable and realization the dimension for ensemble_member_index.
168
177
 
169
178
  :param dataset: The Dataset to be searched.
170
179
  :param station_variable: The station id variable.
@@ -176,10 +185,13 @@ class ImportDataset:
176
185
  if self.__ensemble_member_variable is not None:
177
186
  ensemble_dim = self.__ensemble_member_variable.dimensions[0]
178
187
  expected_dims = [
179
- (time_dim, station_dim, ensemble_dim), (time_dim, ensemble_dim, station_dim),
180
- (station_dim, time_dim, ensemble_dim), (station_dim, ensemble_dim, time_dim),
181
- (ensemble_dim, time_dim, station_dim), (ensemble_dim, station_dim, time_dim)] \
182
- + [(station_dim, time_dim), (time_dim, station_dim)]
188
+ (time_dim, station_dim, ensemble_dim),
189
+ (time_dim, ensemble_dim, station_dim),
190
+ (station_dim, time_dim, ensemble_dim),
191
+ (station_dim, ensemble_dim, time_dim),
192
+ (ensemble_dim, time_dim, station_dim),
193
+ (ensemble_dim, station_dim, time_dim),
194
+ ] + [(station_dim, time_dim), (time_dim, station_dim)]
183
195
  else:
184
196
  expected_dims = [(station_dim, time_dim), (time_dim, station_dim)]
185
197
 
@@ -190,7 +202,9 @@ class ImportDataset:
190
202
 
191
203
  return timeseries_variables
192
204
 
193
- def read_timeseries_values(self, station_index: int, variable_name: str, ensemble_member: int = 0) -> np.ndarray:
205
+ def read_timeseries_values(
206
+ self, station_index: int, variable_name: str, ensemble_member: int = 0
207
+ ) -> np.ndarray:
194
208
  """
195
209
  Reads the specified timeseries from the input file.
196
210
 
@@ -206,7 +220,10 @@ class ImportDataset:
206
220
  # assert set(timeseries_variable.dimensions)==set(('time', 'station')) \
207
221
  # or set(timeseries_variable.dimensions)==set(('time', 'station', 'realization'))
208
222
 
209
- if self.__ensemble_member_variable is not None and 'realization' in timeseries_variable.dimensions:
223
+ if (
224
+ self.__ensemble_member_variable is not None
225
+ and "realization" in timeseries_variable.dimensions
226
+ ):
210
227
  ensemble_member_dim = self.__ensemble_member_variable.dimensions[0]
211
228
  for i in range(3):
212
229
  if timeseries_variable.dimensions[i] == station_dim:
@@ -215,7 +232,7 @@ class ImportDataset:
215
232
  ensemble_arg_Index = i
216
233
  time_arg_Index = set(range(3)) - {station_arg_Index, ensemble_arg_Index}
217
234
  time_arg_Index = time_arg_Index.pop()
218
- argument = [None]*3
235
+ argument = [None] * 3
219
236
  argument[station_arg_Index] = station_index
220
237
  argument[ensemble_arg_Index] = ensemble_member
221
238
  argument[time_arg_Index] = slice(None)
@@ -255,11 +272,11 @@ class ImportDataset:
255
272
 
256
273
  class ExportDataset:
257
274
  """
258
- A class used to write data to a NetCDF file.
259
- Creates a new file or overwrites an old file. The file metadata will be written upon initialization. Data such
260
- as times, station data and timeseries data should be presented to the ExportDataset through the various methods.
261
- When all data has been written, the close method must be called to flush the changes from local memory to the
262
- actual file on disk.
275
+ A class used to write data to a NetCDF file. Creates a new file or overwrites an old file. The
276
+ file metadata will be written upon initialization. Data such as times, station data and
277
+ timeseries data should be presented to the ExportDataset through the various methods. When all
278
+ data has been written, the close method must be called to flush the changes from local memory
279
+ to the actual file on disk.
263
280
  """
264
281
 
265
282
  def __init__(self, folder: str, basename: str):
@@ -268,22 +285,20 @@ class ExportDataset:
268
285
  :param basename: Basename of the file, extension ".nc" will be appended to this
269
286
  """
270
287
  # Create the file and open a Dataset to access it
271
- self.__filename = os.path.join(
272
- folder,
273
- basename + ".nc"
274
- )
288
+ self.__filename = os.path.join(folder, basename + ".nc")
275
289
  # use same write format as FEWS
276
- self.__dataset = Dataset(self.__filename, mode='w', format='NETCDF3_CLASSIC')
290
+ self.__dataset = Dataset(self.__filename, mode="w", format="NETCDF3_CLASSIC")
277
291
 
278
292
  # write metadata to the file
279
- self.__dataset.title = 'RTC-Tools Output Data'
280
- self.__dataset.institution = 'Deltares'
281
- self.__dataset.source = 'RTC-Tools'
282
- self.__dataset.history = 'Generated on {}'.format(datetime.now())
283
- self.__dataset.Conventions = 'CF-1.6'
284
- self.__dataset.featureType = 'timeseries'
285
-
286
- # dimensions are created when writing times and station data, must be created before writing variables
293
+ self.__dataset.title = "RTC-Tools Output Data"
294
+ self.__dataset.institution = "Deltares"
295
+ self.__dataset.source = "RTC-Tools"
296
+ self.__dataset.history = "Generated on {}".format(datetime.now())
297
+ self.__dataset.Conventions = "CF-1.6"
298
+ self.__dataset.featureType = "timeseries"
299
+
300
+ # dimensions are created when writing times and station data, must be created before
301
+ # writing variables
287
302
  self.__time_dim = None
288
303
  self.__station_dim = None
289
304
  self.__station_id_to_index_mapping = None
@@ -298,35 +313,41 @@ class ExportDataset:
298
313
  """
299
314
  Writes a time variable to the given dataset.
300
315
 
301
- :param dataset: The NetCDF4.Dataset object that the times will be written to (must have write permission)
316
+ :param dataset: The NetCDF4.Dataset object that the times will be written to
317
+ (must have write permission)
302
318
  :param times: The times that are to be written in seconds.
303
319
  :param forecast_time: The forecast time in seconds corresponding to the forecast date
304
- :param forecast_date: The datetime corresponding with time in seconds at the forecast index.
320
+ :param forecast_date: The datetime corresponding with time in seconds at the forecast
321
+ index.
305
322
  """
306
323
 
307
324
  # in a NetCDF file times are written with respect to a reference date
308
- # the written values for the times may never be negative, so use the earliest time as the reference date
325
+ # the written values for the times may never be negative, so use the earliest time as the
326
+ # reference date
309
327
  reference_date = forecast_date
310
328
  minimum_time = np.min(times)
311
329
  if minimum_time < 0:
312
330
  times = times - minimum_time
313
331
  reference_date = reference_date - timedelta(seconds=forecast_time - minimum_time)
314
332
 
315
- self.__time_dim = self.__dataset.createDimension('time', None)
333
+ self.__time_dim = self.__dataset.createDimension("time", None)
316
334
 
317
- time_var = self.__dataset.createVariable('time', 'f8', ('time',))
318
- time_var.standard_name = 'time'
319
- time_var.units = 'seconds since {}'.format(reference_date)
320
- time_var.axis = 'T'
335
+ time_var = self.__dataset.createVariable("time", "f8", ("time",))
336
+ time_var.standard_name = "time"
337
+ time_var.units = "seconds since {}".format(reference_date)
338
+ time_var.axis = "T"
321
339
  time_var[:] = times
322
340
 
323
341
  def write_ensemble_data(self, ensemble_size):
324
-
325
342
  if ensemble_size > 1:
326
- self.__ensemble_member_dim = self.__dataset.createDimension('realization', ensemble_size)
327
- ensemble_member_var = self.__dataset.createVariable('realization', 'i', ('realization',))
328
- ensemble_member_var.standard_name = 'realization'
329
- ensemble_member_var.long_name = 'Index of an ensemble member within an ensemble'
343
+ self.__ensemble_member_dim = self.__dataset.createDimension(
344
+ "realization", ensemble_size
345
+ )
346
+ ensemble_member_var = self.__dataset.createVariable(
347
+ "realization", "i", ("realization",)
348
+ )
349
+ ensemble_member_var.standard_name = "realization"
350
+ ensemble_member_var.long_name = "Index of an ensemble member within an ensemble"
330
351
  ensemble_member_var.units = 1
331
352
 
332
353
  def write_station_data(self, stations: Stations, output_station_ids: List[str]) -> None:
@@ -334,20 +355,24 @@ class ExportDataset:
334
355
  Writes the station ids and additional station information to the given dataset.
335
356
 
336
357
  :param stations: The stations data read from the input file.
337
- :param output_station_ids: The set of station ids for which output will be written. Must be unique.
358
+ :param output_station_ids: The set of station ids for which output will be written. Must be
359
+ unique.
338
360
  """
339
361
  assert len(set(output_station_ids)) == len(output_station_ids)
340
362
 
341
- self.__station_dim = self.__dataset.createDimension('station', len(output_station_ids))
363
+ self.__station_dim = self.__dataset.createDimension("station", len(output_station_ids))
342
364
 
343
365
  # first write the ids
344
366
  max_id_length = max(len(id) for id in output_station_ids)
345
- self.__dataset.createDimension('char_leng_id', max_id_length)
346
- station_id_var = self.__dataset.createVariable('station_id', 'c', ('station', 'char_leng_id'))
347
- station_id_var.long_name = 'station identification code'
348
- station_id_var.cf_role = 'timeseries_id'
367
+ self.__dataset.createDimension("char_leng_id", max_id_length)
368
+ station_id_var = self.__dataset.createVariable(
369
+ "station_id", "c", ("station", "char_leng_id")
370
+ )
371
+ station_id_var.long_name = "station identification code"
372
+ station_id_var.cf_role = "timeseries_id"
349
373
 
350
- # we must store the index we use for each station id, to be able to write the data at the correct index later
374
+ # we must store the index we use for each station id, to be able to write the data at the
375
+ # correct index later
351
376
  self.__station_id_to_index_mapping = {}
352
377
  for i, id in enumerate(output_station_ids):
353
378
  station_id_var[i, :] = list(id)
@@ -355,7 +380,7 @@ class ExportDataset:
355
380
 
356
381
  # now write the stored attributes
357
382
  for var_name, attr_var in stations.attribute_variables.items():
358
- variable = self.__dataset.createVariable(var_name, attr_var.datatype, ('station',))
383
+ variable = self.__dataset.createVariable(var_name, attr_var.datatype, ("station",))
359
384
  # copy all attributes from the original input variable
360
385
  variable.setncatts(attr_var.__dict__)
361
386
 
@@ -367,50 +392,69 @@ class ExportDataset:
367
392
  def create_variables(self, variable_names: List[str], ensemble_size: int) -> None:
368
393
  """
369
394
  Creates variables in the dataset for each of the provided parameter ids.
370
- The write_times and write_station_data methods must be called first, to ensure the necessary dimensions have
371
- already been created in the output NetCDF file.
395
+ The write_times and write_station_data methods must be called first, to ensure the necessary
396
+ dimensions have already been created in the output NetCDF file.
372
397
 
373
- :param variable_names: The parameter ids for which variables must be created. Must be unique.
398
+ :param variable_names: The parameter ids for which variables must be created. Must be
399
+ unique.
374
400
  :param ensemble_size: the number of members in the ensemble
375
401
  """
376
402
  assert len(set(variable_names)) == len(variable_names)
377
403
 
378
- assert self.__time_dim is not None, 'First call write_times to ensure the time dimension has been created.'
379
- assert self.__station_dim is not None, 'First call write_station_data to ensure ' \
380
- 'the station dimension has been created'
381
- assert self.__station_id_to_index_mapping is not None # should also be created in write_station_data
404
+ assert (
405
+ self.__time_dim is not None
406
+ ), "First call write_times to ensure the time dimension has been created."
407
+ assert (
408
+ self.__station_dim is not None
409
+ ), "First call write_station_data to ensure the station dimension has been created"
410
+ assert (
411
+ self.__station_id_to_index_mapping is not None
412
+ ) # should also be created in write_station_data
382
413
 
383
414
  if ensemble_size > 1:
384
- assert self.__ensemble_member_dim is not None, \
385
- 'First call write_ensemble_data to ensure the realization dimension has been created.'
415
+ assert (
416
+ self.__ensemble_member_dim is not None
417
+ ), "First call write_ensemble_data to ensure the realization dimension has been created"
386
418
 
387
419
  for variable_name in variable_names:
388
- self.__dataset.createVariable(variable_name,
389
- 'f8', ('time', 'station', 'realization'), fill_value=np.nan)
420
+ self.__dataset.createVariable(
421
+ variable_name, "f8", ("time", "station", "realization"), fill_value=np.nan
422
+ )
390
423
  else:
391
424
  for variable_name in variable_names:
392
- self.__dataset.createVariable(variable_name,
393
- 'f8', ('time', 'station'), fill_value=np.nan)
394
-
395
- def write_output_values(self, station_id: str, variable_name: str,
396
- ensemble_member_index: int, values: np.ndarray,
397
- ensemble_size: int) -> None:
398
- """
399
- Writes the given data to the dataset. The variable must have already been created through the
400
- create_variables method. After all calls to write_output_values, the close method must be called to flush all
401
- changes.
425
+ self.__dataset.createVariable(
426
+ variable_name, "f8", ("time", "station"), fill_value=np.nan
427
+ )
428
+
429
+ def write_output_values(
430
+ self,
431
+ station_id: str,
432
+ variable_name: str,
433
+ ensemble_member_index: int,
434
+ values: np.ndarray,
435
+ ensemble_size: int,
436
+ ) -> None:
437
+ """
438
+ Writes the given data to the dataset. The variable must have already been created through
439
+ the create_variables method. After all calls to write_output_values, the close method must
440
+ be called to flush all changes.
402
441
 
403
442
  :param station_id: The id of the station the data is written for.
404
- :param variable_name: The name of the variable the data is written to (must have already been created).
443
+ :param variable_name: The name of the variable the data is written to (must have already
444
+ been created).
405
445
  :param ensemble_member_index: The index associated to the ensemble member
406
446
  :param values: The values that are to be written to the file
407
447
  :param ensemble_size: the number of members in the ensemble
408
448
  """
409
- assert self.__station_id_to_index_mapping is not None, 'First call write_station_data and create_variables.'
449
+ assert (
450
+ self.__station_id_to_index_mapping is not None
451
+ ), "First call write_station_data and create_variables."
410
452
 
411
453
  station_index = self.__station_id_to_index_mapping[station_id]
412
454
  if ensemble_size > 1:
413
- self.__dataset.variables[variable_name][:, station_index, ensemble_member_index] = values
455
+ self.__dataset.variables[variable_name][
456
+ :, station_index, ensemble_member_index
457
+ ] = values
414
458
  else:
415
459
  self.__dataset.variables[variable_name][:, station_index] = values
416
460