rtc-tools 2.5.2rc4__py3-none-any.whl → 2.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rtc-tools might be problematic. Click here for more details.

Files changed (47) hide show
  1. {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/METADATA +7 -7
  2. rtc_tools-2.6.0.dist-info/RECORD +50 -0
  3. {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/WHEEL +1 -1
  4. rtctools/__init__.py +2 -1
  5. rtctools/_internal/alias_tools.py +12 -10
  6. rtctools/_internal/caching.py +5 -3
  7. rtctools/_internal/casadi_helpers.py +11 -32
  8. rtctools/_internal/debug_check_helpers.py +1 -1
  9. rtctools/_version.py +3 -3
  10. rtctools/data/__init__.py +2 -2
  11. rtctools/data/csv.py +54 -33
  12. rtctools/data/interpolation/bspline.py +3 -3
  13. rtctools/data/interpolation/bspline1d.py +42 -29
  14. rtctools/data/interpolation/bspline2d.py +10 -4
  15. rtctools/data/netcdf.py +137 -93
  16. rtctools/data/pi.py +304 -210
  17. rtctools/data/rtc.py +64 -53
  18. rtctools/data/storage.py +91 -51
  19. rtctools/optimization/collocated_integrated_optimization_problem.py +1244 -696
  20. rtctools/optimization/control_tree_mixin.py +68 -66
  21. rtctools/optimization/csv_lookup_table_mixin.py +107 -74
  22. rtctools/optimization/csv_mixin.py +83 -52
  23. rtctools/optimization/goal_programming_mixin.py +237 -146
  24. rtctools/optimization/goal_programming_mixin_base.py +204 -111
  25. rtctools/optimization/homotopy_mixin.py +36 -27
  26. rtctools/optimization/initial_state_estimation_mixin.py +8 -8
  27. rtctools/optimization/io_mixin.py +48 -43
  28. rtctools/optimization/linearization_mixin.py +3 -1
  29. rtctools/optimization/linearized_order_goal_programming_mixin.py +57 -28
  30. rtctools/optimization/min_abs_goal_programming_mixin.py +72 -29
  31. rtctools/optimization/modelica_mixin.py +135 -81
  32. rtctools/optimization/netcdf_mixin.py +32 -18
  33. rtctools/optimization/optimization_problem.py +181 -127
  34. rtctools/optimization/pi_mixin.py +68 -36
  35. rtctools/optimization/planning_mixin.py +19 -0
  36. rtctools/optimization/single_pass_goal_programming_mixin.py +159 -112
  37. rtctools/optimization/timeseries.py +4 -6
  38. rtctools/rtctoolsapp.py +18 -18
  39. rtctools/simulation/csv_mixin.py +37 -30
  40. rtctools/simulation/io_mixin.py +9 -5
  41. rtctools/simulation/pi_mixin.py +62 -32
  42. rtctools/simulation/simulation_problem.py +471 -180
  43. rtctools/util.py +84 -56
  44. rtc_tools-2.5.2rc4.dist-info/RECORD +0 -49
  45. {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/COPYING.LESSER +0 -0
  46. {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/entry_points.txt +0 -0
  47. {rtc_tools-2.5.2rc4.dist-info → rtc_tools-2.6.0.dist-info}/top_level.txt +0 -0
rtctools/data/pi.py CHANGED
@@ -7,8 +7,7 @@ import xml.etree.ElementTree as ET
7
7
 
8
8
  import numpy as np
9
9
 
10
- ns = {'fews': 'http://www.wldelft.nl/fews',
11
- 'pi': 'http://www.wldelft.nl/fews/PI'}
10
+ ns = {"fews": "http://www.wldelft.nl/fews", "pi": "http://www.wldelft.nl/fews/PI"}
12
11
 
13
12
 
14
13
  class Diag:
@@ -22,14 +21,14 @@ class Diag:
22
21
  INFO = 1 << 3
23
22
  DEBUG = 1 << 4
24
23
 
25
- def __init__(self, folder, basename='diag'):
24
+ def __init__(self, folder, basename="diag"):
26
25
  """
27
26
  Parse diag file.
28
27
 
29
28
  :param folder: Folder in which diag.xml is found or to be created.
30
29
  :param basename: Alternative basename for the diagnostics XML file.
31
30
  """
32
- self.__path_xml = os.path.join(folder, basename + '.xml')
31
+ self.__path_xml = os.path.join(folder, basename + ".xml")
33
32
 
34
33
  self.__tree = ET.parse(self.__path_xml)
35
34
  self.__xml_root = self.__tree.getroot()
@@ -46,19 +45,19 @@ class Diag:
46
45
  USED_LEVELS = []
47
46
 
48
47
  if level & self.ERROR_FATAL:
49
- USED_LEVELS.append('0')
48
+ USED_LEVELS.append("0")
50
49
  if level & self.ERROR:
51
- USED_LEVELS.append('1')
50
+ USED_LEVELS.append("1")
52
51
  if level & self.WARN:
53
- USED_LEVELS.append('2')
52
+ USED_LEVELS.append("2")
54
53
  if level & self.INFO:
55
- USED_LEVELS.append('3')
54
+ USED_LEVELS.append("3")
56
55
  if level & self.DEBUG:
57
- USED_LEVELS.append('4')
56
+ USED_LEVELS.append("4")
58
57
 
59
58
  for child in diag_lines:
60
59
  for used_level in USED_LEVELS:
61
- if child.get('level') == used_level:
60
+ if child.get("level") == used_level:
62
61
  diag_lines_out.append(child)
63
62
 
64
63
  return diag_lines_out
@@ -82,16 +81,16 @@ class DiagHandler(logging.Handler):
82
81
  PI diag file logging handler.
83
82
  """
84
83
 
85
- def __init__(self, folder, basename='diag', level=logging.NOTSET):
86
- super(DiagHandler, self).__init__(level=level)
84
+ def __init__(self, folder, basename="diag", level=logging.NOTSET):
85
+ super(DiagHandler, self).__init__(level=level)
87
86
 
88
- self.__path_xml = os.path.join(folder, basename + '.xml')
87
+ self.__path_xml = os.path.join(folder, basename + ".xml")
89
88
 
90
89
  try:
91
90
  self.__tree = ET.parse(self.__path_xml)
92
91
  self.__xml_root = self.__tree.getroot()
93
92
  except Exception:
94
- self.__xml_root = ET.Element('{%s}Diag' % (ns['pi'], ))
93
+ self.__xml_root = ET.Element("{%s}Diag" % (ns["pi"],))
95
94
  self.__tree = ET.ElementTree(element=self.__xml_root)
96
95
 
97
96
  self.__map_level = {50: 0, 40: 1, 30: 2, 20: 3, 10: 4, 0: 4}
@@ -100,11 +99,11 @@ class DiagHandler(logging.Handler):
100
99
  self.format(record)
101
100
 
102
101
  self.acquire()
103
- el = ET.SubElement(self.__xml_root, '{%s}line' % (ns['pi'], ))
102
+ el = ET.SubElement(self.__xml_root, "{%s}line" % (ns["pi"],))
104
103
  # Work around cElementTree issue 21403
105
- el.set('description', record.message)
106
- el.set('eventCode', record.module + '.' + record.funcName)
107
- el.set('level', str(self.__map_level[record.levelno]))
104
+ el.set("description", record.message)
105
+ el.set("eventCode", record.module + "." + record.funcName)
106
+ el.set("level", str(self.__map_level[record.levelno]))
108
107
  self.release()
109
108
 
110
109
  def append_element(self, el):
@@ -132,8 +131,8 @@ class ParameterConfig:
132
131
  :param folder: Folder in which the parameter configuration file is located.
133
132
  :param basename: Basename of the parameter configuration file (e.g, 'rtcParameterConfig').
134
133
  """
135
- if os.path.splitext(basename)[1] != '.xml':
136
- basename = basename + '.xml'
134
+ if os.path.splitext(basename)[1] != ".xml":
135
+ basename = basename + ".xml"
137
136
  self.__path_xml = os.path.join(folder, basename)
138
137
 
139
138
  self.__tree = ET.parse(self.__path_xml)
@@ -150,8 +149,7 @@ class ParameterConfig:
150
149
 
151
150
  :returns: The value of the specified parameter.
152
151
  """
153
- groups = self.__xml_root.findall(
154
- "pi:group[@id='{}']".format(group_id), ns)
152
+ groups = self.__xml_root.findall("pi:group[@id='{}']".format(group_id), ns)
155
153
  for group in groups:
156
154
  el = group.find("pi:locationId", ns)
157
155
  if location_id is not None and el is not None:
@@ -168,8 +166,7 @@ class ParameterConfig:
168
166
  raise KeyError
169
167
  return self.__parse_parameter(el)
170
168
 
171
- raise KeyError("No such parameter ({}, {})".format(
172
- group_id, parameter_id))
169
+ raise KeyError("No such parameter ({}, {})".format(group_id, parameter_id))
173
170
 
174
171
  def set(self, group_id, parameter_id, new_value, location_id=None, model=None):
175
172
  """
@@ -181,8 +178,7 @@ class ParameterConfig:
181
178
  :param location_id: The optional ID of the parameter location to look in.
182
179
  :param model: The optional ID of the parameter model to look in.
183
180
  """
184
- groups = self.__xml_root.findall(
185
- "pi:group[@id='{}']".format(group_id), ns)
181
+ groups = self.__xml_root.findall("pi:group[@id='{}']".format(group_id), ns)
186
182
  for group in groups:
187
183
  el = group.find("pi:locationId", ns)
188
184
  if location_id is not None and el is not None:
@@ -198,27 +194,25 @@ class ParameterConfig:
198
194
  if el is None:
199
195
  raise KeyError
200
196
  for child in el:
201
- if child.tag.endswith('boolValue'):
197
+ if child.tag.endswith("boolValue"):
202
198
  if new_value is True:
203
- child.text = 'true'
199
+ child.text = "true"
204
200
  return
205
201
  elif new_value is False:
206
- child.text = 'false'
202
+ child.text = "false"
207
203
  return
208
204
  else:
209
- raise Exception(
210
- "Unsupported value for tag {}".format(child.tag))
211
- elif child.tag.endswith('intValue'):
205
+ raise Exception("Unsupported value for tag {}".format(child.tag))
206
+ elif child.tag.endswith("intValue"):
212
207
  child.text = str(int(new_value))
213
208
  return
214
- elif child.tag.endswith('dblValue'):
209
+ elif child.tag.endswith("dblValue"):
215
210
  child.text = str(new_value)
216
211
  return
217
212
  else:
218
213
  raise Exception("Unsupported tag {}".format(child.tag))
219
214
 
220
- raise KeyError("No such parameter ({}, {})".format(
221
- group_id, parameter_id))
215
+ raise KeyError("No such parameter ({}, {})".format(group_id, parameter_id))
222
216
 
223
217
  def write(self, folder=None, basename=None):
224
218
  """
@@ -239,7 +233,7 @@ class ParameterConfig:
239
233
  if folder is not None:
240
234
  if not os.path.exists(folder):
241
235
  # Make sure folder exists
242
- raise FileNotFoundError('Folder not found: {}'.format(folder))
236
+ raise FileNotFoundError("Folder not found: {}".format(folder))
243
237
  else:
244
238
  # Reuse folder of original file
245
239
  folder = os.path.dirname(self.path)
@@ -247,8 +241,8 @@ class ParameterConfig:
247
241
  # Determine basename
248
242
  if basename is not None:
249
243
  # Make sure basename ends in '.xml'
250
- if os.path.splitext(basename)[1] != '.xml':
251
- basename = basename + '.xml'
244
+ if os.path.splitext(basename)[1] != ".xml":
245
+ basename = basename + ".xml"
252
246
  else:
253
247
  # Reuse basename of original file
254
248
  basename = os.path.split(self.path)[1]
@@ -264,33 +258,32 @@ class ParameterConfig:
264
258
 
265
259
  def __parse_type(self, fews_type):
266
260
  # Parse a FEWS type to an np type
267
- if fews_type == 'double':
268
- return np.dtype('float')
261
+ if fews_type == "double":
262
+ return np.dtype("float")
269
263
  else:
270
- return np.dtype('S128')
264
+ return np.dtype("S128")
271
265
 
272
266
  def __parse_parameter(self, parameter):
273
267
  for child in parameter:
274
- if child.tag.endswith('boolValue'):
275
- if child.text.lower() == 'true':
268
+ if child.tag.endswith("boolValue"):
269
+ if child.text.lower() == "true":
276
270
  return True
277
271
  else:
278
272
  return False
279
- elif child.tag.endswith('intValue'):
273
+ elif child.tag.endswith("intValue"):
280
274
  return int(child.text)
281
- elif child.tag.endswith('dblValue'):
275
+ elif child.tag.endswith("dblValue"):
282
276
  return float(child.text)
283
- elif child.tag.endswith('stringValue'):
277
+ elif child.tag.endswith("stringValue"):
284
278
  return child.text
285
279
  # return dict of lisstart_datetime
286
- elif child.tag.endswith('table'):
280
+ elif child.tag.endswith("table"):
287
281
  columnId = {}
288
282
  columnType = {}
289
283
  for key in child.find("pi:row", ns).attrib:
290
284
  # default Id
291
285
  columnId[key] = key
292
- columnType[key] = np.dtype(
293
- 'S128') # default Type
286
+ columnType[key] = np.dtype("S128") # default Type
294
287
 
295
288
  # get Id's if present
296
289
  el_columnIds = child.find("pi:columnIds", ns)
@@ -306,8 +299,10 @@ class ParameterConfig:
306
299
 
307
300
  # get table contenstart_datetime
308
301
  el_row = child.findall("pi:row", ns)
309
- table = {columnId[key]: np.empty(len(el_row), # initialize table
310
- columnType[key]) for key in columnId}
302
+ table = {
303
+ columnId[key]: np.empty(len(el_row), columnType[key]) # initialize table
304
+ for key in columnId
305
+ }
311
306
 
312
307
  i_row = 0
313
308
  for row in el_row:
@@ -315,7 +310,7 @@ class ParameterConfig:
315
310
  table[columnId[key]][i_row] = value
316
311
  i_row += 1
317
312
  return table
318
- elif child.tag.endswith('description'):
313
+ elif child.tag.endswith("description"):
319
314
  pass
320
315
  else:
321
316
  raise Exception("Unsupported tag {}".format(child.tag))
@@ -338,7 +333,9 @@ class ParameterConfig:
338
333
 
339
334
  parameters = group.findall("pi:parameter", ns)
340
335
  for parameter in parameters:
341
- yield location_id, model_id, parameter.attrib['id'], self.__parse_parameter(parameter)
336
+ yield location_id, model_id, parameter.attrib["id"], self.__parse_parameter(
337
+ parameter
338
+ )
342
339
 
343
340
 
344
341
  class Timeseries:
@@ -346,7 +343,15 @@ class Timeseries:
346
343
  PI timeseries wrapper.
347
344
  """
348
345
 
349
- def __init__(self, data_config, folder, basename, binary=True, pi_validate_times=False, make_new_file=False):
346
+ def __init__(
347
+ self,
348
+ data_config,
349
+ folder,
350
+ basename,
351
+ binary=True,
352
+ pi_validate_times=False,
353
+ make_new_file=False,
354
+ ):
350
355
  """
351
356
  Load the timeseries from disk.
352
357
 
@@ -364,7 +369,7 @@ class Timeseries:
364
369
  self.__folder = folder
365
370
  self.__basename = basename
366
371
 
367
- self.__path_xml = os.path.join(self.__folder, basename + '.xml')
372
+ self.__path_xml = os.path.join(self.__folder, basename + ".xml")
368
373
 
369
374
  self.__internal_dtype = np.float64
370
375
  self.__pi_dtype = np.float32
@@ -385,13 +390,13 @@ class Timeseries:
385
390
  f = None
386
391
  if self.__binary:
387
392
  try:
388
- f = io.open(self.binary_path, 'rb')
393
+ f = io.open(self.binary_path, "rb")
389
394
  except IOError:
390
395
  # Support placeholder XML files.
391
396
  pass
392
397
 
393
398
  # Read timezone
394
- timezone = self.__xml_root.find('pi:timeZone', ns)
399
+ timezone = self.__xml_root.find("pi:timeZone", ns)
395
400
  if timezone is not None:
396
401
  self.__timezone = float(timezone.text)
397
402
  else:
@@ -405,47 +410,52 @@ class Timeseries:
405
410
  self.__forecast_index = None
406
411
  self.__contains_ensemble = False
407
412
  self.__ensemble_size = 1
408
- for series in self.__xml_root.findall('pi:series', ns):
409
- header = series.find('pi:header', ns)
413
+ for series in self.__xml_root.findall("pi:series", ns):
414
+ header = series.find("pi:header", ns)
410
415
 
411
416
  variable = self.__data_config.variable(header)
412
417
 
413
418
  try:
414
- dt = self.__parse_time_step(header.find('pi:timeStep', ns))
419
+ dt = self.__parse_time_step(header.find("pi:timeStep", ns))
415
420
  except ValueError:
416
421
  raise Exception(
417
- 'PI: Multiplier of time step of variable {} '
418
- 'must be a positive integer per the PI schema.'.format(variable))
422
+ "PI: Multiplier of time step of variable {} "
423
+ "must be a positive integer per the PI schema.".format(variable)
424
+ )
419
425
  if self.__dt is None:
420
426
  self.__dt = dt
421
427
  else:
422
428
  if dt != self.__dt:
423
- raise Exception(
424
- 'PI: Not all timeseries have the same time step size.')
429
+ raise Exception("PI: Not all timeseries have the same time step size.")
425
430
  try:
426
- start_datetime = self.__parse_date_time(
427
- header.find('pi:startDate', ns))
431
+ start_datetime = self.__parse_date_time(header.find("pi:startDate", ns))
428
432
  if self.__start_datetime is None:
429
433
  self.__start_datetime = start_datetime
430
434
  else:
431
435
  if start_datetime < self.__start_datetime:
432
436
  self.__start_datetime = start_datetime
433
437
  except (AttributeError, ValueError):
434
- raise Exception('PI: Variable {} in {} has no startDate.'.format(
435
- variable, os.path.join(self.__folder, basename + '.xml')))
438
+ raise Exception(
439
+ "PI: Variable {} in {} has no startDate.".format(
440
+ variable, os.path.join(self.__folder, basename + ".xml")
441
+ )
442
+ )
436
443
 
437
444
  try:
438
- end_datetime = self.__parse_date_time(header.find('pi:endDate', ns))
445
+ end_datetime = self.__parse_date_time(header.find("pi:endDate", ns))
439
446
  if self.__end_datetime is None:
440
447
  self.__end_datetime = end_datetime
441
448
  else:
442
449
  if end_datetime > self.__end_datetime:
443
450
  self.__end_datetime = end_datetime
444
451
  except (AttributeError, ValueError):
445
- raise Exception('PI: Variable {} in {} has no endDate.'.format(
446
- variable, os.path.join(self.__folder, basename + '.xml')))
452
+ raise Exception(
453
+ "PI: Variable {} in {} has no endDate.".format(
454
+ variable, os.path.join(self.__folder, basename + ".xml")
455
+ )
456
+ )
447
457
 
448
- el = header.find('pi:forecastDate', ns)
458
+ el = header.find("pi:forecastDate", ns)
449
459
  if el is not None:
450
460
  forecast_datetime = self.__parse_date_time(el)
451
461
  else:
@@ -456,10 +466,9 @@ class Timeseries:
456
466
  self.__forecast_datetime = forecast_datetime
457
467
  else:
458
468
  if el is not None and forecast_datetime != self.__forecast_datetime:
459
- raise Exception(
460
- 'PI: Not all timeseries share the same forecastDate.')
469
+ raise Exception("PI: Not all timeseries share the same forecastDate.")
461
470
 
462
- el = header.find('pi:ensembleMemberIndex', ns)
471
+ el = header.find("pi:ensembleMemberIndex", ns)
463
472
  if el is not None:
464
473
  contains_ensemble = True
465
474
  if int(el.text) > self.__ensemble_size - 1: # Assume zero-based
@@ -473,13 +482,18 @@ class Timeseries:
473
482
  # Define the times, and floor the global forecast_datetime to the
474
483
  # global time step to get its index
475
484
  if self.__dt:
476
- t_len = int(round(
477
- (self.__end_datetime - self.__start_datetime).total_seconds() / self.__dt.total_seconds() + 1))
485
+ t_len = int(
486
+ round(
487
+ (self.__end_datetime - self.__start_datetime).total_seconds()
488
+ / self.__dt.total_seconds()
489
+ + 1
490
+ )
491
+ )
478
492
  self.__times = [self.__start_datetime + i * self.__dt for i in range(t_len)]
479
493
  else: # Timeseries are non-equidistant
480
494
  self.__times = []
481
- for series in self.__xml_root.findall('pi:series', ns):
482
- events = series.findall('pi:event', ns)
495
+ for series in self.__xml_root.findall("pi:series", ns):
496
+ events = series.findall("pi:event", ns)
483
497
  # We assume that timeseries can differ in length, but always are
484
498
  # a complete 'slice' of datetimes between start and end. The
485
499
  # longest timeseries then contains all datetimes between start and end.
@@ -490,21 +504,22 @@ class Timeseries:
490
504
  # time range.
491
505
  if pi_validate_times:
492
506
  ref_times_set = set(self.__times)
493
- for series in self.__xml_root.findall('pi:series', ns):
494
- events = series.findall('pi:event', ns)
507
+ for series in self.__xml_root.findall("pi:series", ns):
508
+ events = series.findall("pi:event", ns)
495
509
  times = {self.__parse_date_time(e) for e in events}
496
510
  if not ref_times_set.issuperset(times):
497
511
  raise ValueError(
498
- 'PI: Not all timeseries share the same time step spacing. Make sure '
499
- 'the time steps of all series are a subset of the global time steps.')
512
+ "PI: Not all timeseries share the same time step spacing. Make sure "
513
+ "the time steps of all series are a subset of the global time steps."
514
+ )
500
515
 
501
516
  if self.__forecast_datetime is not None:
502
517
  if self.__dt:
503
518
  self.__forecast_datetime = self.__floor_date_time(
504
- dt=self.__forecast_datetime, tdel=self.__dt)
519
+ dt=self.__forecast_datetime, tdel=self.__dt
520
+ )
505
521
  try:
506
- self.__forecast_index = self.__times.index(
507
- self.__forecast_datetime)
522
+ self.__forecast_index = self.__times.index(self.__forecast_datetime)
508
523
  except ValueError:
509
524
  # This may occur if forecast_datetime is outside of
510
525
  # the timeseries' range. Can be a valid case for historical
@@ -512,17 +527,17 @@ class Timeseries:
512
527
  self.__forecast_index = -1
513
528
 
514
529
  # Parse data
515
- for series in self.__xml_root.findall('pi:series', ns):
516
- header = series.find('pi:header', ns)
530
+ for series in self.__xml_root.findall("pi:series", ns):
531
+ header = series.find("pi:header", ns)
517
532
 
518
533
  variable = self.__data_config.variable(header)
519
534
 
520
- dt = self.__parse_time_step(header.find('pi:timeStep', ns))
521
- start_datetime = self.__parse_date_time(header.find('pi:startDate', ns))
522
- end_datetime = self.__parse_date_time(header.find('pi:endDate', ns))
535
+ dt = self.__parse_time_step(header.find("pi:timeStep", ns))
536
+ start_datetime = self.__parse_date_time(header.find("pi:startDate", ns))
537
+ end_datetime = self.__parse_date_time(header.find("pi:endDate", ns))
523
538
 
524
539
  make_virtual_ensemble = False
525
- el = header.find('pi:ensembleMemberIndex', ns)
540
+ el = header.find("pi:ensembleMemberIndex", ns)
526
541
  if el is not None:
527
542
  ensemble_member = int(el.text)
528
543
  while ensemble_member >= len(self.__values):
@@ -543,70 +558,103 @@ class Timeseries:
543
558
 
544
559
  if self.__dt:
545
560
  n_values = int(
546
- round((end_datetime - start_datetime).total_seconds() / dt.total_seconds() + 1))
561
+ round(
562
+ (end_datetime - start_datetime).total_seconds() / dt.total_seconds() + 1
563
+ )
564
+ )
547
565
  else:
548
- n_values = (bisect.bisect_left(self.__times, end_datetime) -
549
- bisect.bisect_left(self.__times, start_datetime) + 1)
566
+ n_values = (
567
+ bisect.bisect_left(self.__times, end_datetime)
568
+ - bisect.bisect_left(self.__times, start_datetime)
569
+ + 1
570
+ )
550
571
 
551
572
  if self.__binary:
552
573
  if f is not None:
553
574
  self.__values[ensemble_member][variable] = np.fromfile(
554
- f, count=n_values, dtype=self.__pi_dtype)
575
+ f, count=n_values, dtype=self.__pi_dtype
576
+ )
555
577
  else:
556
578
  self.__values[ensemble_member][variable] = np.empty(
557
- n_values, dtype=self.__internal_dtype)
579
+ n_values, dtype=self.__internal_dtype
580
+ )
558
581
  self.__values[ensemble_member][variable].fill(np.nan)
559
582
 
560
583
  else:
561
- events = series.findall('pi:event', ns)
584
+ events = series.findall("pi:event", ns)
562
585
 
563
586
  self.__values[ensemble_member][variable] = np.empty(
564
- n_values, dtype=self.__internal_dtype)
587
+ n_values, dtype=self.__internal_dtype
588
+ )
565
589
  self.__values[ensemble_member][variable].fill(np.nan)
566
590
  # This assumes that start_datetime equals the datetime of the
567
591
  # first value (which should be the case).
568
592
  for i in range(min(n_values, len(events))):
569
- self.__values[ensemble_member][variable][
570
- i] = float(events[i].get('value'))
593
+ self.__values[ensemble_member][variable][i] = float(events[i].get("value"))
571
594
 
572
- miss_val = float(header.find('pi:missVal', ns).text)
573
- self.__values[ensemble_member][variable][self.__values[
574
- ensemble_member][variable] == miss_val] = np.nan
595
+ miss_val = float(header.find("pi:missVal", ns).text)
596
+ self.__values[ensemble_member][variable][
597
+ self.__values[ensemble_member][variable] == miss_val
598
+ ] = np.nan
575
599
 
576
- unit = header.find('pi:units', ns).text
600
+ unit = header.find("pi:units", ns).text
577
601
  self.set_unit(variable, unit=unit, ensemble_member=ensemble_member)
578
602
 
579
603
  # Prepend empty space, if start_datetime > self.__start_datetime.
580
604
  if start_datetime > self.__start_datetime:
581
605
  if self.__dt:
582
606
  filler = np.empty(
583
- int(round((start_datetime - self.__start_datetime).total_seconds() / dt.total_seconds())),
584
- dtype=self.__internal_dtype)
607
+ int(
608
+ round(
609
+ (start_datetime - self.__start_datetime).total_seconds()
610
+ / dt.total_seconds()
611
+ )
612
+ ),
613
+ dtype=self.__internal_dtype,
614
+ )
585
615
  else:
586
616
  filler = np.empty(
587
- int(round(bisect.bisect_left(self.__times, start_datetime) -
588
- bisect.bisect_left(self.__times, self.__start_datetime))),
589
- dtype=self.__internal_dtype)
617
+ int(
618
+ round(
619
+ bisect.bisect_left(self.__times, start_datetime)
620
+ - bisect.bisect_left(self.__times, self.__start_datetime)
621
+ )
622
+ ),
623
+ dtype=self.__internal_dtype,
624
+ )
590
625
 
591
626
  filler.fill(np.nan)
592
627
  self.__values[ensemble_member][variable] = np.hstack(
593
- (filler, self.__values[ensemble_member][variable]))
628
+ (filler, self.__values[ensemble_member][variable])
629
+ )
594
630
 
595
631
  # Append empty space, if end_datetime < self.__end_datetime
596
632
  if end_datetime < self.__end_datetime:
597
633
  if self.__dt:
598
634
  filler = np.empty(
599
- int(round((self.__end_datetime - end_datetime).total_seconds() / dt.total_seconds())),
600
- dtype=self.__internal_dtype)
635
+ int(
636
+ round(
637
+ (self.__end_datetime - end_datetime).total_seconds()
638
+ / dt.total_seconds()
639
+ )
640
+ ),
641
+ dtype=self.__internal_dtype,
642
+ )
601
643
  else:
602
644
  filler = np.empty(
603
- int(round(bisect.bisect_left(self.__times, self.__end_datetime) -
604
- bisect.bisect_left(self.__times, end_datetime))),
605
- dtype=self.__internal_dtype)
645
+ int(
646
+ round(
647
+ bisect.bisect_left(self.__times, self.__end_datetime)
648
+ - bisect.bisect_left(self.__times, end_datetime)
649
+ )
650
+ ),
651
+ dtype=self.__internal_dtype,
652
+ )
606
653
 
607
654
  filler.fill(np.nan)
608
655
  self.__values[ensemble_member][variable] = np.hstack(
609
- (self.__values[ensemble_member][variable], filler))
656
+ (self.__values[ensemble_member][variable], filler)
657
+ )
610
658
 
611
659
  if make_virtual_ensemble:
612
660
  # Make references to the original input series for the virtual
@@ -619,35 +667,43 @@ class Timeseries:
619
667
  # Remove time values outside the start/end datetimes.
620
668
  # Only needed for non-equidistant, because we can't build the
621
669
  # times automatically from global start/end datetime.
622
- self.__times = self.__times[bisect.bisect_left(self.__times, self.__start_datetime):
623
- bisect.bisect_left(self.__times, self.__end_datetime) + 1]
670
+ self.__times = self.__times[
671
+ bisect.bisect_left(self.__times, self.__start_datetime) : bisect.bisect_left(
672
+ self.__times, self.__end_datetime
673
+ )
674
+ + 1
675
+ ]
624
676
 
625
677
  if f is not None and self.__binary:
626
678
  f.close()
627
679
 
628
680
  def __reset_xml_tree(self):
629
681
  # Make a new empty XML tree
630
- self.__xml_root = ET.Element('{%s}' % (ns['pi'], ) + 'TimeSeries')
682
+ self.__xml_root = ET.Element("{%s}" % (ns["pi"],) + "TimeSeries")
631
683
  self.__tree = ET.ElementTree(self.__xml_root)
632
684
 
633
- self.__xml_root.set('xmlns:xsi', 'http://www.w3.org/2001/XMLSchema-instance')
634
- self.__xml_root.set('version', '1.2')
685
+ self.__xml_root.set("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance")
686
+ self.__xml_root.set("version", "1.2")
635
687
  self.__xml_root.set(
636
- 'xsi:schemaLocation',
637
- 'http://www.wldelft.nl/fews/PI http://fews.wldelft.nl/schemas/version1.0/pi-schemas/pi_timeseries.xsd')
688
+ "xsi:schemaLocation",
689
+ "http://www.wldelft.nl/fews/PI "
690
+ "http://fews.wldelft.nl/schemas/version1.0/pi-schemas/pi_timeseries.xsd",
691
+ )
638
692
 
639
693
  def __parse_date_time(self, el):
640
694
  # Parse a PI date time element.
641
- return datetime.datetime.strptime(el.get('date') + ' ' + el.get('time'), '%Y-%m-%d %H:%M:%S')
695
+ return datetime.datetime.strptime(
696
+ el.get("date") + " " + el.get("time"), "%Y-%m-%d %H:%M:%S"
697
+ )
642
698
 
643
699
  def __parse_time_step(self, el):
644
700
  # Parse a PI time step element.
645
- if el.get('unit') == 'second':
646
- return datetime.timedelta(seconds=int(el.get('multiplier')))
647
- elif el.get('unit') == 'nonequidistant':
701
+ if el.get("unit") == "second":
702
+ return datetime.timedelta(seconds=int(el.get("multiplier")))
703
+ elif el.get("unit") == "nonequidistant":
648
704
  return None
649
705
  else:
650
- raise Exception('Unsupported unit type: ' + el.get('unit'))
706
+ raise Exception("Unsupported unit type: " + el.get("unit"))
651
707
 
652
708
  def __floor_date_time(self, dt, tdel):
653
709
  """
@@ -661,7 +717,9 @@ class Timeseries:
661
717
  rounding = (seconds + roundTo / 2) // roundTo * roundTo
662
718
  return dt + datetime.timedelta(0, rounding - seconds, -dt.microsecond)
663
719
 
664
- def __add_header(self, variable, location_parameter_id, ensemble_member=0, miss_val=-999, unit='unit_unknown'):
720
+ def __add_header(
721
+ self, variable, location_parameter_id, ensemble_member=0, miss_val=-999, unit="unit_unknown"
722
+ ):
665
723
  """
666
724
  Add a timeseries header to the timeseries object.
667
725
  """
@@ -670,56 +728,75 @@ class Timeseries:
670
728
 
671
729
  # Define the basic structure of the header
672
730
  header_elements = [
673
- 'type', 'locationId', 'parameterId', 'timeStep', 'startDate', 'endDate',
674
- 'missVal', 'stationName', 'units', 'creationDate', 'creationTime']
731
+ "type",
732
+ "locationId",
733
+ "parameterId",
734
+ "timeStep",
735
+ "startDate",
736
+ "endDate",
737
+ "missVal",
738
+ "stationName",
739
+ "units",
740
+ "creationDate",
741
+ "creationTime",
742
+ ]
675
743
  header_element_texts = [
676
- 'instantaneous', location_parameter_id.location_id, location_parameter_id.parameter_id,
677
- '', '', '', str(miss_val), location_parameter_id.location_id, unit,
678
- now.strftime('%Y-%m-%d'), now.strftime('%H:%M:%S')]
744
+ "instantaneous",
745
+ location_parameter_id.location_id,
746
+ location_parameter_id.parameter_id,
747
+ "",
748
+ "",
749
+ "",
750
+ str(miss_val),
751
+ location_parameter_id.location_id,
752
+ unit,
753
+ now.strftime("%Y-%m-%d"),
754
+ now.strftime("%H:%M:%S"),
755
+ ]
679
756
 
680
757
  # Add ensembleMemberIndex, forecastDate and qualifierId if necessary.
681
758
  if self.__forecast_datetime != self.__start_datetime:
682
- header_elements.insert(6, 'forecastDate')
683
- header_element_texts.insert(6, '')
759
+ header_elements.insert(6, "forecastDate")
760
+ header_element_texts.insert(6, "")
684
761
  if self.contains_ensemble:
685
- header_elements.insert(3, 'ensembleMemberIndex')
762
+ header_elements.insert(3, "ensembleMemberIndex")
686
763
  header_element_texts.insert(3, str(ensemble_member))
687
764
  if len(location_parameter_id.qualifier_id) > 0:
688
765
  # Track relative index to preserve original ordering of qualifier ID's
689
766
  i = 0
690
767
  for qualifier_id in location_parameter_id.qualifier_id:
691
- header_elements.insert(3, 'qualifierId')
692
- header_element_texts.insert(3+i, qualifier_id)
768
+ header_elements.insert(3, "qualifierId")
769
+ header_element_texts.insert(3 + i, qualifier_id)
693
770
  i += 1
694
771
 
695
772
  # Fill the basics of the series
696
- series = ET.Element('{%s}' % (ns['pi'], ) + 'series')
697
- header = ET.SubElement(series, '{%s}' % (ns['pi'], ) + 'header')
773
+ series = ET.Element("{%s}" % (ns["pi"],) + "series")
774
+ header = ET.SubElement(series, "{%s}" % (ns["pi"],) + "header")
698
775
  for i in range(len(header_elements)):
699
- el = ET.SubElement(header, '{%s}' % (ns['pi'], ) + header_elements[i])
776
+ el = ET.SubElement(header, "{%s}" % (ns["pi"],) + header_elements[i])
700
777
  el.text = header_element_texts[i]
701
778
 
702
- el = header.find('pi:timeStep', ns)
779
+ el = header.find("pi:timeStep", ns)
703
780
  # Set time step
704
781
  if self.dt:
705
- el.set('unit', 'second')
706
- el.set('multiplier', str(int(self.dt.total_seconds())))
782
+ el.set("unit", "second")
783
+ el.set("multiplier", str(int(self.dt.total_seconds())))
707
784
  else:
708
- el.set('unit', 'nonequidistant')
785
+ el.set("unit", "nonequidistant")
709
786
 
710
787
  # Set the time range.
711
- el = header.find('pi:startDate', ns)
712
- el.set('date', self.__start_datetime.strftime('%Y-%m-%d'))
713
- el.set('time', self.__start_datetime.strftime('%H:%M:%S'))
714
- el = header.find('pi:endDate', ns)
715
- el.set('date', self.__end_datetime.strftime('%Y-%m-%d'))
716
- el.set('time', self.__end_datetime.strftime('%H:%M:%S'))
788
+ el = header.find("pi:startDate", ns)
789
+ el.set("date", self.__start_datetime.strftime("%Y-%m-%d"))
790
+ el.set("time", self.__start_datetime.strftime("%H:%M:%S"))
791
+ el = header.find("pi:endDate", ns)
792
+ el.set("date", self.__end_datetime.strftime("%Y-%m-%d"))
793
+ el.set("time", self.__end_datetime.strftime("%H:%M:%S"))
717
794
 
718
795
  # Set the forecast date if applicable
719
796
  if self.__forecast_datetime != self.__start_datetime:
720
- el = header.find('pi:forecastDate', ns)
721
- el.set('date', self.__forecast_datetime.strftime('%Y-%m-%d'))
722
- el.set('time', self.__forecast_datetime.strftime('%H:%M:%S'))
797
+ el = header.find("pi:forecastDate", ns)
798
+ el.set("date", self.__forecast_datetime.strftime("%Y-%m-%d"))
799
+ el.set("time", self.__forecast_datetime.strftime("%H:%M:%S"))
723
800
 
724
801
  # Add series to xml
725
802
  self.__xml_root.append(series)
@@ -730,7 +807,7 @@ class Timeseries:
730
807
  """
731
808
 
732
809
  if self.__binary:
733
- f = io.open(self.binary_path, 'wb')
810
+ f = io.open(self.binary_path, "wb")
734
811
 
735
812
  if self.make_new_file:
736
813
  # Force reinitialization in case write() is called more than once
@@ -741,44 +818,48 @@ class Timeseries:
741
818
  location_parameter_id = self.__data_config.pi_variable_ids(variable)
742
819
  unit = self.get_unit(variable, ensemble_member)
743
820
  self.__add_header(
744
- variable, location_parameter_id,
745
- ensemble_member=ensemble_member, miss_val=-999, unit=unit)
821
+ variable,
822
+ location_parameter_id,
823
+ ensemble_member=ensemble_member,
824
+ miss_val=-999,
825
+ unit=unit,
826
+ )
746
827
 
747
828
  for ensemble_member in range(len(self.__values)):
748
829
  if self.timezone is not None:
749
- timezone = self.__xml_root.find('pi:timeZone', ns)
830
+ timezone = self.__xml_root.find("pi:timeZone", ns)
750
831
  if timezone is None:
751
- timezone = ET.Element('{%s}' % (ns['pi'],) + 'timeZone')
832
+ timezone = ET.Element("{%s}" % (ns["pi"],) + "timeZone")
752
833
  # timeZone has to be the first element according to the schema
753
834
  self.__xml_root.insert(0, timezone)
754
835
  timezone.text = str(self.timezone)
755
836
 
756
- for series in self.__xml_root.findall('pi:series', ns):
757
- header = series.find('pi:header', ns)
837
+ for series in self.__xml_root.findall("pi:series", ns):
838
+ header = series.find("pi:header", ns)
758
839
 
759
840
  # First check ensembleMemberIndex, to see if it is the correct one.
760
- el = header.find('pi:ensembleMemberIndex', ns)
841
+ el = header.find("pi:ensembleMemberIndex", ns)
761
842
  if el is not None:
762
843
  if ensemble_member != int(el.text):
763
844
  # Skip over this series, wrong index.
764
845
  continue
765
846
 
766
847
  # Update the time range, which may have changed.
767
- el = header.find('pi:startDate', ns)
768
- el.set('date', self.__start_datetime.strftime('%Y-%m-%d'))
769
- el.set('time', self.__start_datetime.strftime('%H:%M:%S'))
848
+ el = header.find("pi:startDate", ns)
849
+ el.set("date", self.__start_datetime.strftime("%Y-%m-%d"))
850
+ el.set("time", self.__start_datetime.strftime("%H:%M:%S"))
770
851
 
771
- el = header.find('pi:endDate', ns)
772
- el.set('date', self.__end_datetime.strftime('%Y-%m-%d'))
773
- el.set('time', self.__end_datetime.strftime('%H:%M:%S'))
852
+ el = header.find("pi:endDate", ns)
853
+ el.set("date", self.__end_datetime.strftime("%Y-%m-%d"))
854
+ el.set("time", self.__end_datetime.strftime("%H:%M:%S"))
774
855
 
775
856
  variable = self.__data_config.variable(header)
776
857
 
777
- miss_val = float(header.find('pi:missVal', ns).text)
858
+ miss_val = header.find("pi:missVal", ns).text
778
859
  values = self.__values[ensemble_member][variable]
779
860
 
780
861
  # Update the header, which may have changed
781
- el = header.find('pi:units', ns)
862
+ el = header.find("pi:units", ns)
782
863
  el.text = self.get_unit(variable, ensemble_member)
783
864
 
784
865
  # No values to be written, so the entire element is removed from
@@ -787,15 +868,12 @@ class Timeseries:
787
868
  self.__xml_root.remove(series)
788
869
  continue
789
870
 
790
- # Replace NaN with missing value
791
- nans = np.isnan(values)
792
- values[nans] = miss_val
793
-
794
871
  # Write output
872
+ nans = np.isnan(values)
795
873
  if self.__binary:
796
874
  f.write(values.astype(self.__pi_dtype).tobytes())
797
875
  else:
798
- events = series.findall('pi:event', ns)
876
+ events = series.findall("pi:event", ns)
799
877
 
800
878
  t = self.__start_datetime
801
879
  for i in range(min(len(events), len(values))):
@@ -803,20 +881,23 @@ class Timeseries:
803
881
  t = self.times[i]
804
882
  # Set the date/time, so that any date/time steps that
805
883
  # are wrong in the placeholder file are corrected.
806
- events[i].set('date', t.strftime('%Y-%m-%d'))
807
- events[i].set('time', t.strftime('%H:%M:%S'))
884
+ events[i].set("date", t.strftime("%Y-%m-%d"))
885
+ events[i].set("time", t.strftime("%H:%M:%S"))
808
886
 
809
887
  # Set the value
810
- events[i].set('value', str(values[i]))
888
+ events[i].set("value", str(values[i]))
811
889
  if self.dt:
812
890
  t += self.dt
813
891
  for i in range(len(events), len(values)):
814
892
  if self.dt is None:
815
893
  t = self.times[i]
816
- event = ET.Element('pi:event')
817
- event.set('date', t.strftime('%Y-%m-%d'))
818
- event.set('time', t.strftime('%H:%M:%S'))
819
- event.set('value', str(values[i]))
894
+ event = ET.Element("pi:event")
895
+ event.set("date", t.strftime("%Y-%m-%d"))
896
+ event.set("time", t.strftime("%H:%M:%S"))
897
+ if nans[i]:
898
+ event.set("value", miss_val)
899
+ else:
900
+ event.set("value", str(values[i]))
820
901
  series.append(event)
821
902
  if self.dt:
822
903
  t += self.dt
@@ -826,9 +907,6 @@ class Timeseries:
826
907
  for i in range(len(values), len(events)):
827
908
  series.remove(events[i])
828
909
 
829
- # Restore NaN
830
- values[nans] = np.nan
831
-
832
910
  if self.__binary:
833
911
  f.close()
834
912
 
@@ -852,14 +930,14 @@ class Timeseries:
852
930
  This code is based on
853
931
  https://stackoverflow.com/questions/3095434/inserting-newlines-in-xml-file-generated-via-xml-etree-elementtree-in-python
854
932
  """
855
- indent = "\n" + level*" "
933
+ indent = "\n" + level * " "
856
934
  if len(element):
857
935
  if not element.text or not element.text.strip():
858
936
  element.text = indent + " "
859
937
  if not element.tail or not element.tail.strip():
860
938
  element.tail = indent
861
939
  for subelement in element:
862
- Timeseries.format_xml_element(subelement, level+1)
940
+ Timeseries.format_xml_element(subelement, level + 1)
863
941
  if not element.tail or not element.tail.strip():
864
942
  element.tail = indent
865
943
  else:
@@ -996,12 +1074,13 @@ class Timeseries:
996
1074
  :param variable: Time series ID.
997
1075
  :param ensemble_member: Ensemble member index.
998
1076
 
999
- :returns: A :string: containing the unit. If not set for the variable, returns 'unit_unknown'.
1077
+ :returns: A :string: containing the unit. If not set for the variable,
1078
+ returns 'unit_unknown'.
1000
1079
  """
1001
1080
  try:
1002
1081
  return self.__units[ensemble_member][variable]
1003
1082
  except KeyError:
1004
- return 'unit_unknown'
1083
+ return "unit_unknown"
1005
1084
 
1006
1085
  def set_unit(self, variable, unit, ensemble_member=0):
1007
1086
  """
@@ -1022,43 +1101,56 @@ class Timeseries:
1022
1101
  """
1023
1102
 
1024
1103
  if self.__dt:
1025
- n_delta_s = int(round(
1026
- (start_datetime - self.__start_datetime).total_seconds() / self.__dt.total_seconds()))
1104
+ n_delta_s = int(
1105
+ round(
1106
+ (start_datetime - self.__start_datetime).total_seconds()
1107
+ / self.__dt.total_seconds()
1108
+ )
1109
+ )
1027
1110
  else:
1028
1111
  if start_datetime >= self.__start_datetime:
1029
- n_delta_s = bisect.bisect_left(self.__times, start_datetime) - \
1030
- bisect.bisect_left(self.__times, self.__start_datetime)
1112
+ n_delta_s = bisect.bisect_left(self.__times, start_datetime) - bisect.bisect_left(
1113
+ self.__times, self.__start_datetime
1114
+ )
1031
1115
  else:
1032
1116
  raise ValueError(
1033
- 'PI: Resizing a non-equidistant timeseries to stretch '
1034
- 'outside of the global range of times is not allowed.')
1117
+ "PI: Resizing a non-equidistant timeseries to stretch "
1118
+ "outside of the global range of times is not allowed."
1119
+ )
1035
1120
 
1036
1121
  for ensemble_member in range(len(self.__values)):
1037
1122
  if n_delta_s > 0:
1038
1123
  # New start datetime lies after old start datetime (timeseries will be shortened).
1039
1124
  for key in self.__values[ensemble_member].keys():
1040
- self.__values[ensemble_member][key] = self.__values[
1041
- ensemble_member][key][n_delta_s:]
1125
+ self.__values[ensemble_member][key] = self.__values[ensemble_member][key][
1126
+ n_delta_s:
1127
+ ]
1042
1128
  elif n_delta_s < 0:
1043
1129
  # New start datetime lies before old start datetime (timeseries will be lengthened).
1044
1130
  filler = np.empty(abs(n_delta_s))
1045
1131
  filler.fill(np.nan)
1046
1132
  for key in self.__values[ensemble_member].keys():
1047
1133
  self.__values[ensemble_member][key] = np.hstack(
1048
- (filler, self.__values[ensemble_member][key]))
1134
+ (filler, self.__values[ensemble_member][key])
1135
+ )
1049
1136
  self.__start_datetime = start_datetime
1050
1137
 
1051
1138
  if self.__dt:
1052
- n_delta_e = int(round(
1053
- (end_datetime - self.__end_datetime).total_seconds() / self.__dt.total_seconds()))
1139
+ n_delta_e = int(
1140
+ round(
1141
+ (end_datetime - self.__end_datetime).total_seconds() / self.__dt.total_seconds()
1142
+ )
1143
+ )
1054
1144
  else:
1055
1145
  if end_datetime <= self.__end_datetime:
1056
- n_delta_e = bisect.bisect_left(self.__times, end_datetime) - \
1057
- bisect.bisect_left(self.__times, self.__end_datetime)
1146
+ n_delta_e = bisect.bisect_left(self.__times, end_datetime) - bisect.bisect_left(
1147
+ self.__times, self.__end_datetime
1148
+ )
1058
1149
  else:
1059
1150
  raise ValueError(
1060
- 'PI: Resizing a non-equidistant timeseries to stretch '
1061
- 'outside of the global range of times is not allowed.')
1151
+ "PI: Resizing a non-equidistant timeseries to stretch "
1152
+ "outside of the global range of times is not allowed."
1153
+ )
1062
1154
 
1063
1155
  for ensemble_member in range(len(self.__values)):
1064
1156
  if n_delta_e > 0:
@@ -1067,12 +1159,14 @@ class Timeseries:
1067
1159
  filler.fill(np.nan)
1068
1160
  for key in self.__values[ensemble_member].keys():
1069
1161
  self.__values[ensemble_member][key] = np.hstack(
1070
- (self.__values[ensemble_member][key], filler))
1162
+ (self.__values[ensemble_member][key], filler)
1163
+ )
1071
1164
  elif n_delta_e < 0:
1072
1165
  # New end datetime lies before old end datetime (timeseries will be shortened).
1073
1166
  for key in self.__values[ensemble_member].keys():
1074
- self.__values[ensemble_member][key] = self.__values[
1075
- ensemble_member][key][:n_delta_e]
1167
+ self.__values[ensemble_member][key] = self.__values[ensemble_member][key][
1168
+ :n_delta_e
1169
+ ]
1076
1170
  self.__end_datetime = end_datetime
1077
1171
 
1078
1172
  @property
@@ -1084,7 +1178,7 @@ class Timeseries:
1084
1178
  """
1085
1179
  The path for the binary data .bin file.
1086
1180
  """
1087
- return os.path.join(self.__folder, self.__basename + '.bin')
1181
+ return os.path.join(self.__folder, self.__basename + ".bin")
1088
1182
 
1089
1183
  def items(self, ensemble_member=0):
1090
1184
  """