emhass 0.10.5__py3-none-any.whl → 0.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
emhass/utils.py CHANGED
@@ -1,6 +1,8 @@
1
1
  #!/usr/bin/env python3
2
2
  # -*- coding: utf-8 -*-
3
3
 
4
+ import csv
5
+ import os
4
6
  from typing import Tuple, Optional
5
7
  from datetime import datetime, timedelta, timezone
6
8
  import logging
@@ -9,6 +11,7 @@ import json
9
11
  import copy
10
12
  import numpy as np
11
13
  import pandas as pd
14
+ from requests import get
12
15
  import yaml
13
16
  import pytz
14
17
  import ast
@@ -19,7 +22,6 @@ pd.options.plotting.backend = "plotly"
19
22
 
20
23
  from emhass.machine_learning_forecaster import MLForecaster
21
24
 
22
-
23
25
  def get_root(file: str, num_parent: Optional[int] = 3) -> str:
24
26
  """
25
27
  Get the root absolute path of the working directory.
@@ -62,7 +64,10 @@ def get_logger(fun_name: str, emhass_conf: dict, save_to_file: Optional[bool] =
62
64
  logger.propagate = True
63
65
  logger.fileSetting = save_to_file
64
66
  if save_to_file:
65
- ch = logging.FileHandler(emhass_conf['data_path'] / 'logger_emhass.log')
67
+ if os.path.isdir(emhass_conf['data_path']):
68
+ ch = logging.FileHandler(emhass_conf['data_path'] / 'logger_emhass.log')
69
+ else:
70
+ raise Exception("Unable to access data_path: "+emhass_conf['data_path'])
66
71
  else:
67
72
  ch = logging.StreamHandler()
68
73
  if logging_level == "DEBUG":
@@ -89,7 +94,7 @@ def get_logger(fun_name: str, emhass_conf: dict, save_to_file: Optional[bool] =
89
94
  return logger, ch
90
95
 
91
96
 
92
- def get_forecast_dates(freq: int, delta_forecast: int, timedelta_days: Optional[int] = 0
97
+ def get_forecast_dates(freq: int, delta_forecast: int, time_zone: datetime.tzinfo, timedelta_days: Optional[int] = 0
93
98
  ) -> pd.core.indexes.datetimes.DatetimeIndex:
94
99
  """
95
100
  Get the date_range list of the needed future dates using the delta_forecast parameter.
@@ -109,7 +114,7 @@ def get_forecast_dates(freq: int, delta_forecast: int, timedelta_days: Optional[
109
114
  end_forecast = (start_forecast + pd.Timedelta(days=delta_forecast)).replace(microsecond=0)
110
115
  forecast_dates = pd.date_range(start=start_forecast,
111
116
  end=end_forecast+timedelta(days=timedelta_days)-freq,
112
- freq=freq).round(freq, ambiguous='infer', nonexistent='shift_forward')
117
+ freq=freq, tz=time_zone).tz_convert('utc').round(freq, ambiguous='infer', nonexistent='shift_forward').tz_convert(time_zone)
113
118
  return forecast_dates
114
119
 
115
120
 
@@ -121,7 +126,7 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
121
126
 
122
127
  :param runtimeparams: Json string containing the runtime parameters dict.
123
128
  :type runtimeparams: str
124
- :param params: Configuration parameters passed from data/options.json
129
+ :param params: Built configuration parameters
125
130
  :type params: str
126
131
  :param retrieve_hass_conf: Container for data retrieving parameters.
127
132
  :type retrieve_hass_conf: dict
@@ -137,14 +142,17 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
137
142
  :rtype: Tuple[str, dict]
138
143
 
139
144
  """
145
+ # check if passed params is a dict
140
146
  if (params != None) and (params != "null"):
141
- params = json.loads(params)
147
+ if type(params) is str:
148
+ params = json.loads(params)
142
149
  else:
143
150
  params = {}
151
+
144
152
  # Some default data needed
145
153
  custom_deferrable_forecast_id = []
146
154
  custom_predicted_temperature_id = []
147
- for k in range(optim_conf["num_def_loads"]):
155
+ for k in range(optim_conf['number_of_deferrable_loads']):
148
156
  custom_deferrable_forecast_id.append(
149
157
  {
150
158
  "entity_id": "sensor.p_deferrable{}".format(k),
@@ -224,12 +232,20 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
224
232
  params["passed_data"][key] = value
225
233
  else:
226
234
  params["passed_data"] = default_passed_dict
235
+
227
236
  if runtimeparams is not None:
228
- runtimeparams = json.loads(runtimeparams)
229
- freq = int(retrieve_hass_conf["freq"].seconds / 60.0)
230
- delta_forecast = int(optim_conf["delta_forecast"].days)
231
- forecast_dates = get_forecast_dates(freq, delta_forecast)
232
- if set_type == "regressor-model-fit":
237
+ if type(runtimeparams) is str:
238
+ runtimeparams = json.loads(runtimeparams)
239
+ # Format required date/time parameters
240
+ optimization_time_step = int(
241
+ retrieve_hass_conf['optimization_time_step'].seconds / 60.0)
242
+ delta_forecast = int(optim_conf['delta_forecast_daily'].days)
243
+ time_zone = retrieve_hass_conf["time_zone"]
244
+ forecast_dates = get_forecast_dates(
245
+ optimization_time_step, delta_forecast, time_zone)
246
+
247
+ # regressor-model-fit
248
+ if set_type == "regressor-model-fit":
233
249
  if "csv_file" in runtimeparams:
234
250
  csv_file = runtimeparams["csv_file"]
235
251
  params["passed_data"]["csv_file"] = csv_file
@@ -249,6 +265,8 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
249
265
  else:
250
266
  date_features = runtimeparams["date_features"]
251
267
  params["passed_data"]["date_features"] = date_features
268
+
269
+ # regressor-model-predict
252
270
  if set_type == "regressor-model-predict":
253
271
  if "new_values" in runtimeparams:
254
272
  new_values = runtimeparams["new_values"]
@@ -262,6 +280,7 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
262
280
  if "target" in runtimeparams:
263
281
  target = runtimeparams["target"]
264
282
  params["passed_data"]["target"] = target
283
+
265
284
  # Treating special data passed for MPC control case
266
285
  if set_type == "naive-mpc-optim":
267
286
  if "prediction_horizon" not in runtimeparams.keys():
@@ -270,74 +289,79 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
270
289
  prediction_horizon = runtimeparams["prediction_horizon"]
271
290
  params["passed_data"]["prediction_horizon"] = prediction_horizon
272
291
  if "soc_init" not in runtimeparams.keys():
273
- soc_init = plant_conf["SOCtarget"]
292
+ soc_init = plant_conf['battery_target_state_of_charge']
274
293
  else:
275
294
  soc_init = runtimeparams["soc_init"]
276
295
  params["passed_data"]["soc_init"] = soc_init
277
296
  if "soc_final" not in runtimeparams.keys():
278
- soc_final = plant_conf["SOCtarget"]
297
+ soc_final = plant_conf['battery_target_state_of_charge']
279
298
  else:
280
299
  soc_final = runtimeparams["soc_final"]
281
300
  params["passed_data"]["soc_final"] = soc_final
282
- if "def_total_hours" not in runtimeparams.keys():
283
- def_total_hours = optim_conf["def_total_hours"]
301
+ if 'operating_hours_of_each_deferrable_load' not in runtimeparams.keys():
302
+ def_total_hours = optim_conf['operating_hours_of_each_deferrable_load']
284
303
  else:
285
- def_total_hours = runtimeparams["def_total_hours"]
286
- params["passed_data"]["def_total_hours"] = def_total_hours
287
- if "def_start_timestep" not in runtimeparams.keys():
288
- def_start_timestep = optim_conf["def_start_timestep"]
304
+ def_total_hours = runtimeparams['operating_hours_of_each_deferrable_load']
305
+ params["passed_data"]['operating_hours_of_each_deferrable_load'] = def_total_hours
306
+ if 'start_timesteps_of_each_deferrable_load' in runtimeparams.keys():
307
+ def_start_timestep = runtimeparams['start_timesteps_of_each_deferrable_load']
289
308
  else:
290
- def_start_timestep = runtimeparams["def_start_timestep"]
291
- params["passed_data"]["def_start_timestep"] = def_start_timestep
292
- if "def_end_timestep" not in runtimeparams.keys():
293
- def_end_timestep = optim_conf["def_end_timestep"]
309
+ def_start_timestep = runtimeparams.get(
310
+ 'def_start_timestep', optim_conf['start_timesteps_of_each_deferrable_load'])
311
+ params["passed_data"]['start_timesteps_of_each_deferrable_load'] = def_start_timestep
312
+ if 'end_timesteps_of_each_deferrable_load' in runtimeparams.keys():
313
+ def_end_timestep = runtimeparams['end_timesteps_of_each_deferrable_load']
294
314
  else:
295
- def_end_timestep = runtimeparams["def_end_timestep"]
296
- params["passed_data"]["def_end_timestep"] = def_end_timestep
297
- forecast_dates = copy.deepcopy(forecast_dates)[0:prediction_horizon]
315
+ def_end_timestep = runtimeparams.get(
316
+ 'def_end_timestep', optim_conf['end_timesteps_of_each_deferrable_load'])
317
+ params["passed_data"]['end_timesteps_of_each_deferrable_load'] = def_end_timestep
318
+ forecast_dates = copy.deepcopy(forecast_dates)[
319
+ 0:prediction_horizon]
298
320
  else:
299
321
  params["passed_data"]["prediction_horizon"] = None
300
322
  params["passed_data"]["soc_init"] = None
301
323
  params["passed_data"]["soc_final"] = None
302
- params["passed_data"]["def_total_hours"] = None
303
- params["passed_data"]["def_start_timestep"] = None
304
- params["passed_data"]["def_end_timestep"] = None
324
+ params["passed_data"]['operating_hours_of_each_deferrable_load'] = None
325
+ params["passed_data"]['start_timesteps_of_each_deferrable_load'] = None
326
+ params["passed_data"]['end_timesteps_of_each_deferrable_load'] = None
305
327
  # Treat passed forecast data lists
306
- list_forecast_key = ['pv_power_forecast', 'load_power_forecast', 'load_cost_forecast', 'prod_price_forecast', 'outdoor_temperature_forecast']
307
- forecast_methods = ['weather_forecast_method', 'load_forecast_method', 'load_cost_forecast_method', 'prod_price_forecast_method', 'outdoor_temperature_forecast_method']
308
- # Param to save forecast cache (i.e. Solcast)
309
- if "weather_forecast_cache" not in runtimeparams.keys():
310
- weather_forecast_cache = False
311
- else:
312
- weather_forecast_cache = runtimeparams["weather_forecast_cache"]
313
- params["passed_data"]["weather_forecast_cache"] = weather_forecast_cache
314
- # Param to make sure optimization only uses cached data. (else produce error)
315
- if "weather_forecast_cache_only" not in runtimeparams.keys():
316
- weather_forecast_cache_only = False
317
- else:
318
- weather_forecast_cache_only = runtimeparams["weather_forecast_cache_only"]
319
- params["passed_data"]["weather_forecast_cache_only"] = weather_forecast_cache_only
328
+ list_forecast_key = ['pv_power_forecast', 'load_power_forecast',
329
+ 'load_cost_forecast', 'prod_price_forecast', 'outdoor_temperature_forecast']
330
+ forecast_methods = ['weather_forecast_method', 'load_forecast_method', 'load_cost_forecast_method',
331
+ 'production_price_forecast_method', 'outdoor_temperature_forecast_method']
332
+
333
+ # Loop forecasts, check if value is a list and greater than or equal to forecast_dates
320
334
  for method, forecast_key in enumerate(list_forecast_key):
321
335
  if forecast_key in runtimeparams.keys():
322
336
  if type(runtimeparams[forecast_key]) == list and len(runtimeparams[forecast_key]) >= len(forecast_dates):
323
337
  params['passed_data'][forecast_key] = runtimeparams[forecast_key]
324
338
  optim_conf[forecast_methods[method]] = 'list'
325
339
  else:
326
- logger.error(f"ERROR: The passed data is either not a list or the length is not correct, length should be {str(len(forecast_dates))}")
327
- logger.error(f"Passed type is {str(type(runtimeparams[forecast_key]))} and length is {str(len(runtimeparams[forecast_key]))}")
328
- list_non_digits = [x for x in runtimeparams[forecast_key] if not (isinstance(x, int) or isinstance(x, float))]
340
+ logger.error(
341
+ f"ERROR: The passed data is either not a list or the length is not correct, length should be {str(len(forecast_dates))}")
342
+ logger.error(
343
+ f"Passed type is {str(type(runtimeparams[forecast_key]))} and length is {str(len(runtimeparams[forecast_key]))}")
344
+ # Check if string contains list, if so extract
345
+ if type(runtimeparams[forecast_key]) == str:
346
+ if type(ast.literal_eval(runtimeparams[forecast_key])) == list:
347
+ runtimeparams[forecast_key] = ast.literal_eval(runtimeparams[forecast_key])
348
+ list_non_digits = [x for x in runtimeparams[forecast_key] if not (
349
+ isinstance(x, int) or isinstance(x, float))]
329
350
  if len(list_non_digits) > 0:
330
- logger.warning(f"There are non numeric values on the passed data for {forecast_key}, check for missing values (nans, null, etc)")
351
+ logger.warning(
352
+ f"There are non numeric values on the passed data for {forecast_key}, check for missing values (nans, null, etc)")
331
353
  for x in list_non_digits:
332
- logger.warning(f"This value in {forecast_key} was detected as non digits: {str(x)}")
354
+ logger.warning(
355
+ f"This value in {forecast_key} was detected as non digits: {str(x)}")
333
356
  else:
334
357
  params['passed_data'][forecast_key] = None
358
+
335
359
  # Treat passed data for forecast model fit/predict/tune at runtime
336
- if "days_to_retrieve" not in runtimeparams.keys():
337
- days_to_retrieve = 9
360
+ if 'historic_days_to_retrieve' in runtimeparams.keys():
361
+ days_to_retrieve = runtimeparams['historic_days_to_retrieve']
338
362
  else:
339
- days_to_retrieve = runtimeparams["days_to_retrieve"]
340
- params["passed_data"]["days_to_retrieve"] = days_to_retrieve
363
+ days_to_retrieve = runtimeparams.get('days_to_retrieve', 9)
364
+ params["passed_data"]['historic_days_to_retrieve'] = days_to_retrieve
341
365
  if "model_type" not in runtimeparams.keys():
342
366
  model_type = "load_forecast"
343
367
  else:
@@ -371,12 +395,14 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
371
395
  if "perform_backtest" not in runtimeparams.keys():
372
396
  perform_backtest = False
373
397
  else:
374
- perform_backtest = ast.literal_eval(str(runtimeparams["perform_backtest"]).capitalize())
398
+ perform_backtest = ast.literal_eval(
399
+ str(runtimeparams["perform_backtest"]).capitalize())
375
400
  params["passed_data"]["perform_backtest"] = perform_backtest
376
401
  if "model_predict_publish" not in runtimeparams.keys():
377
402
  model_predict_publish = False
378
403
  else:
379
- model_predict_publish = ast.literal_eval(str(runtimeparams["model_predict_publish"]).capitalize())
404
+ model_predict_publish = ast.literal_eval(
405
+ str(runtimeparams["model_predict_publish"]).capitalize())
380
406
  params["passed_data"]["model_predict_publish"] = model_predict_publish
381
407
  if "model_predict_entity_id" not in runtimeparams.keys():
382
408
  model_predict_entity_id = "sensor.p_load_forecast_custom_model"
@@ -408,6 +434,7 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
408
434
  else:
409
435
  mlr_predict_friendly_name = runtimeparams["mlr_predict_friendly_name"]
410
436
  params["passed_data"]["mlr_predict_friendly_name"] = mlr_predict_friendly_name
437
+
411
438
  # Treat passed data for other parameters
412
439
  if "alpha" not in runtimeparams.keys():
413
440
  alpha = 0.5
@@ -419,68 +446,124 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
419
446
  else:
420
447
  beta = runtimeparams["beta"]
421
448
  params["passed_data"]["beta"] = beta
422
- # Treat optimization configuration parameters passed at runtime
423
- if "num_def_loads" in runtimeparams.keys():
424
- optim_conf["num_def_loads"] = runtimeparams["num_def_loads"]
425
- if "P_deferrable_nom" in runtimeparams.keys():
426
- optim_conf["P_deferrable_nom"] = runtimeparams["P_deferrable_nom"]
427
- if "def_total_hours" in runtimeparams.keys():
428
- optim_conf["def_total_hours"] = runtimeparams["def_total_hours"]
429
- if "def_start_timestep" in runtimeparams.keys():
430
- optim_conf["def_start_timestep"] = runtimeparams["def_start_timestep"]
431
- if "def_end_timestep" in runtimeparams.keys():
432
- optim_conf["def_end_timestep"] = runtimeparams["def_end_timestep"]
449
+ # Param to save forecast cache (i.e. Solcast)
450
+ if "weather_forecast_cache" not in runtimeparams.keys():
451
+ weather_forecast_cache = False
452
+ else:
453
+ weather_forecast_cache = runtimeparams["weather_forecast_cache"]
454
+ params["passed_data"]["weather_forecast_cache"] = weather_forecast_cache
455
+ # Param to make sure optimization only uses cached data. (else produce error)
456
+ if "weather_forecast_cache_only" not in runtimeparams.keys():
457
+ weather_forecast_cache_only = False
458
+ else:
459
+ weather_forecast_cache_only = runtimeparams["weather_forecast_cache_only"]
460
+ params["passed_data"]["weather_forecast_cache_only"] = weather_forecast_cache_only
461
+ # A condition to manually save entity data under data_path/entities after optimization
462
+ if "entity_save" not in runtimeparams.keys():
463
+ entity_save = ""
464
+ else:
465
+ entity_save = runtimeparams["entity_save"]
466
+ params["passed_data"]["entity_save"] = entity_save
467
+ # A condition to put a prefix on all published data, or check for saved data under prefix name
468
+ if "publish_prefix" not in runtimeparams.keys():
469
+ publish_prefix = ""
470
+ else:
471
+ publish_prefix = runtimeparams["publish_prefix"]
472
+ params["passed_data"]["publish_prefix"] = publish_prefix
473
+
474
+ # Treat optimization (optim_conf) configuration parameters passed at runtime
475
+ if 'number_of_deferrable_loads' in runtimeparams.keys():
476
+ optim_conf['number_of_deferrable_loads'] = runtimeparams['number_of_deferrable_loads']
477
+ if 'num_def_loads' in runtimeparams.keys():
478
+ optim_conf['number_of_deferrable_loads'] = runtimeparams['num_def_loads']
479
+ if 'nominal_power_of_deferrable_loads' in runtimeparams.keys():
480
+ optim_conf['nominal_power_of_deferrable_loads'] = runtimeparams['nominal_power_of_deferrable_loads']
481
+ if 'P_deferrable_nom' in runtimeparams.keys():
482
+ optim_conf['nominal_power_of_deferrable_loads'] = runtimeparams['P_deferrable_nom']
483
+ if 'operating_hours_of_each_deferrable_load' in runtimeparams.keys():
484
+ optim_conf['operating_hours_of_each_deferrable_load'] = runtimeparams['operating_hours_of_each_deferrable_load']
485
+ if 'def_total_hours' in runtimeparams.keys():
486
+ optim_conf['operating_hours_of_each_deferrable_load'] = runtimeparams['def_total_hours']
487
+ if 'start_timesteps_of_each_deferrable_load' in runtimeparams.keys():
488
+ optim_conf['start_timesteps_of_each_deferrable_load'] = runtimeparams['start_timesteps_of_each_deferrable_load']
489
+ if 'end_timesteps_of_each_deferrable_load' in runtimeparams.keys():
490
+ optim_conf['end_timesteps_of_each_deferrable_load'] = runtimeparams['end_timesteps_of_each_deferrable_load']
433
491
  if "def_current_state" in runtimeparams.keys():
434
- optim_conf["def_current_state"] = [bool(s) for s in runtimeparams["def_current_state"]]
435
- if "treat_def_as_semi_cont" in runtimeparams.keys():
436
- optim_conf["treat_def_as_semi_cont"] = [
492
+ optim_conf["def_current_state"] = [
493
+ bool(s) for s in runtimeparams["def_current_state"]]
494
+ if 'treat_deferrable_load_as_semi_cont' in runtimeparams.keys():
495
+ optim_conf['treat_deferrable_load_as_semi_cont'] = [
496
+ ast.literal_eval(str(k).capitalize())
497
+ for k in runtimeparams['treat_deferrable_load_as_semi_cont']
498
+ ]
499
+ if 'treat_def_as_semi_cont' in runtimeparams.keys():
500
+ optim_conf['treat_deferrable_load_as_semi_cont'] = [
437
501
  ast.literal_eval(str(k).capitalize())
438
- for k in runtimeparams["treat_def_as_semi_cont"]
502
+ for k in runtimeparams['treat_def_as_semi_cont']
439
503
  ]
440
- if "set_def_constant" in runtimeparams.keys():
441
- optim_conf["set_def_constant"] = [
442
- ast.literal_eval(str(k).capitalize()) for k in runtimeparams["set_def_constant"]
504
+ if 'set_deferrable_load_single_constant' in runtimeparams.keys():
505
+ optim_conf['set_deferrable_load_single_constant'] = [
506
+ ast.literal_eval(str(k).capitalize()) for k in runtimeparams['set_deferrable_load_single_constant']
443
507
  ]
444
- if "def_start_penalty" in runtimeparams.keys():
445
- optim_conf["def_start_penalty"] = [
446
- ast.literal_eval(str(k).capitalize()) for k in runtimeparams["def_start_penalty"]
508
+ if 'set_def_constant' in runtimeparams.keys():
509
+ optim_conf['set_deferrable_load_single_constant'] = [
510
+ ast.literal_eval(str(k).capitalize()) for k in runtimeparams['set_def_constant']
511
+ ]
512
+ if 'set_deferrable_startup_penalty' in runtimeparams.keys():
513
+ optim_conf['set_deferrable_startup_penalty'] = [
514
+ ast.literal_eval(str(k).capitalize()) for k in runtimeparams['set_deferrable_startup_penalty']
515
+ ]
516
+ if 'def_start_penalty' in runtimeparams.keys():
517
+ optim_conf['set_deferrable_startup_penalty'] = [
518
+ ast.literal_eval(str(k).capitalize()) for k in runtimeparams['def_start_penalty']
447
519
  ]
448
520
  if 'def_load_config' in runtimeparams:
449
521
  optim_conf["def_load_config"] = runtimeparams['def_load_config']
522
+ if 'weight_battery_discharge' in runtimeparams.keys():
523
+ optim_conf['weight_battery_discharge'] = runtimeparams[
524
+ 'weight_battery_discharge'
525
+ ]
526
+ if 'weight_battery_charge' in runtimeparams.keys():
527
+ optim_conf['weight_battery_charge'] = runtimeparams['weight_battery_charge']
528
+
529
+ # Treat retrieve data from Home Assistant (retrieve_hass_conf) configuration parameters passed at runtime
530
+ if 'optimization_time_step' in runtimeparams.keys():
531
+ retrieve_hass_conf['optimization_time_step'] = pd.to_timedelta(
532
+ runtimeparams['optimization_time_step'], "minutes")
533
+ if 'continual_publish' in runtimeparams.keys():
534
+ retrieve_hass_conf['continual_publish'] = bool(
535
+ runtimeparams['continual_publish'])
450
536
  if "solcast_api_key" in runtimeparams.keys():
451
537
  retrieve_hass_conf["solcast_api_key"] = runtimeparams["solcast_api_key"]
452
- optim_conf["weather_forecast_method"] = "solcast"
538
+ optim_conf['weather_forecast_method'] = "solcast"
453
539
  if "solcast_rooftop_id" in runtimeparams.keys():
454
540
  retrieve_hass_conf["solcast_rooftop_id"] = runtimeparams[
455
541
  "solcast_rooftop_id"
456
542
  ]
457
- optim_conf["weather_forecast_method"] = "solcast"
543
+ optim_conf['weather_forecast_method'] = "solcast"
458
544
  if "solar_forecast_kwp" in runtimeparams.keys():
459
545
  retrieve_hass_conf["solar_forecast_kwp"] = runtimeparams[
460
546
  "solar_forecast_kwp"
461
547
  ]
462
- optim_conf["weather_forecast_method"] = "solar.forecast"
463
- if "weight_battery_discharge" in runtimeparams.keys():
464
- optim_conf["weight_battery_discharge"] = runtimeparams[
465
- "weight_battery_discharge"
466
- ]
467
- if "weight_battery_charge" in runtimeparams.keys():
468
- optim_conf["weight_battery_charge"] = runtimeparams["weight_battery_charge"]
469
- if 'freq' in runtimeparams.keys():
470
- retrieve_hass_conf['freq'] = pd.to_timedelta(runtimeparams['freq'], "minutes")
471
- if 'continual_publish' in runtimeparams.keys():
472
- retrieve_hass_conf['continual_publish'] = bool(runtimeparams['continual_publish'])
473
- # Treat plant configuration parameters passed at runtime
474
- if "SOCmin" in runtimeparams.keys():
475
- plant_conf["SOCmin"] = runtimeparams["SOCmin"]
476
- if "SOCmax" in runtimeparams.keys():
477
- plant_conf["SOCmax"] = runtimeparams["SOCmax"]
478
- if "SOCtarget" in runtimeparams.keys():
479
- plant_conf["SOCtarget"] = runtimeparams["SOCtarget"]
480
- if "Pd_max" in runtimeparams.keys():
481
- plant_conf["Pd_max"] = runtimeparams["Pd_max"]
482
- if "Pc_max" in runtimeparams.keys():
483
- plant_conf["Pc_max"] = runtimeparams["Pc_max"]
548
+ optim_conf['weather_forecast_method'] = "solar.forecast"
549
+
550
+ # Treat system model parameters (plant) configuration parameters passed at runtime
551
+ if 'battery_minimum_state_of_charge' in runtimeparams.keys() or 'SOCmin' in runtimeparams.keys():
552
+ plant_conf['battery_minimum_state_of_charge'] = runtimeparams.get(
553
+ 'battery_minimum_state_of_charge', runtimeparams.get('SOCmin'))
554
+ if 'battery_maximum_state_of_charge' in runtimeparams.keys() or 'SOCmax' in runtimeparams.keys():
555
+ plant_conf['battery_maximum_state_of_charge'] = runtimeparams.get(
556
+ 'battery_maximum_state_of_charge', runtimeparams.get('SOCmax'))
557
+ if 'battery_target_state_of_charge' in runtimeparams.keys() or 'SOCtarget' in runtimeparams.keys():
558
+ plant_conf['battery_target_state_of_charge'] = runtimeparams.get(
559
+ 'battery_target_state_of_charge', runtimeparams.get('SOCtarget'))
560
+ if 'battery_discharge_power_max' in runtimeparams.keys() or 'Pd_max' in runtimeparams.keys():
561
+ plant_conf['battery_discharge_power_max'] = runtimeparams.get(
562
+ 'battery_discharge_power_max', runtimeparams.get('Pd_max'))
563
+ if 'battery_charge_power_max' in runtimeparams.keys() or 'Pc_max' in runtimeparams.keys():
564
+ plant_conf['battery_charge_power_max'] = runtimeparams.get(
565
+ 'battery_charge_power_max', runtimeparams.get('Pc_max'))
566
+
484
567
  # Treat custom entities id's and friendly names for variables
485
568
  if "custom_pv_forecast_id" in runtimeparams.keys():
486
569
  params["passed_data"]["custom_pv_forecast_id"] = runtimeparams[
@@ -534,84 +617,47 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
534
617
  params["passed_data"]["custom_predicted_temperature_id"] = runtimeparams[
535
618
  "custom_predicted_temperature_id"
536
619
  ]
537
- # A condition to put a prefix on all published data, or check for saved data under prefix name
538
- if "publish_prefix" not in runtimeparams.keys():
539
- publish_prefix = ""
540
- else:
541
- publish_prefix = runtimeparams["publish_prefix"]
542
- params["passed_data"]["publish_prefix"] = publish_prefix
543
- # A condition to manually save entity data under data_path/entities after optimization
544
- if "entity_save" not in runtimeparams.keys():
545
- entity_save = ""
546
- else:
547
- entity_save = runtimeparams["entity_save"]
548
- params["passed_data"]["entity_save"] = entity_save
620
+
549
621
  # Serialize the final params
550
- params = json.dumps(params)
622
+ params = json.dumps(params, default=str)
551
623
  return params, retrieve_hass_conf, optim_conf, plant_conf
552
624
 
553
625
 
554
- def get_yaml_parse(emhass_conf: dict, use_secrets: Optional[bool] = True,
555
- params: Optional[str] = None) -> Tuple[dict, dict, dict]:
626
+ def get_yaml_parse(params: str, logger: logging.Logger) -> Tuple[dict, dict, dict]:
556
627
  """
557
- Perform parsing of the config.yaml file.
628
+ Perform parsing of the params into the configuration catagories
558
629
 
559
- :param emhass_conf: Dictionary containing the needed emhass paths
560
- :type emhass_conf: dict
561
- :param use_secrets: Indicate if we should use a secrets file or not.
562
- Set to False for unit tests.
563
- :type use_secrets: bool, optional
564
- :param params: Configuration parameters passed from data/options.json
630
+ :param params: Built configuration parameters
565
631
  :type params: str
632
+ :param logger: The logger object
633
+ :type logger: logging.Logger
566
634
  :return: A tuple with the dictionaries containing the parsed data
567
635
  :rtype: tuple(dict)
568
636
 
569
637
  """
570
- if params is None:
571
- with open(emhass_conf["config_path"], 'r') as file:
572
- input_conf = yaml.load(file, Loader=yaml.FullLoader)
573
- else:
574
- input_conf = json.loads(params)
575
- if use_secrets:
576
- if params is None:
577
- with open(emhass_conf["config_path"].parent / 'secrets_emhass.yaml', 'r') as file: # Assume secrets and config file paths are the same
578
- input_secrets = yaml.load(file, Loader=yaml.FullLoader)
638
+ if params:
639
+ if type(params) is str:
640
+ input_conf = json.loads(params)
579
641
  else:
580
- input_secrets = input_conf.pop("params_secrets", None)
581
-
582
- if type(input_conf["retrieve_hass_conf"]) == list: # if using old config version
583
- retrieve_hass_conf = dict(
584
- {key: d[key] for d in input_conf["retrieve_hass_conf"] for key in d}
585
- )
642
+ input_conf = params
586
643
  else:
587
- retrieve_hass_conf = input_conf.get("retrieve_hass_conf", {})
644
+ input_conf = {}
645
+ logger.error("No params have been detected for get_yaml_parse")
646
+ return False, False, False
588
647
 
589
- if use_secrets:
590
- retrieve_hass_conf.update(input_secrets)
591
- else:
592
- retrieve_hass_conf["hass_url"] = "http://supervisor/core/api"
593
- retrieve_hass_conf["long_lived_token"] = "${SUPERVISOR_TOKEN}"
594
- retrieve_hass_conf["time_zone"] = "Europe/Paris"
595
- retrieve_hass_conf["lat"] = 45.83
596
- retrieve_hass_conf["lon"] = 6.86
597
- retrieve_hass_conf["alt"] = 4807.8
598
- retrieve_hass_conf["freq"] = pd.to_timedelta(retrieve_hass_conf["freq"], "minutes")
599
- retrieve_hass_conf["time_zone"] = pytz.timezone(retrieve_hass_conf["time_zone"])
600
-
601
- if type(input_conf["optim_conf"]) == list:
602
- optim_conf = dict({key: d[key] for d in input_conf["optim_conf"] for key in d})
603
- else:
604
- optim_conf = input_conf.get("optim_conf", {})
648
+ optim_conf = input_conf.get("optim_conf", {})
605
649
 
606
- optim_conf["list_hp_periods"] = dict(
607
- (key, d[key]) for d in optim_conf["list_hp_periods"] for key in d
608
- )
609
- optim_conf["delta_forecast"] = pd.Timedelta(days=optim_conf["delta_forecast"])
650
+ retrieve_hass_conf = input_conf.get("retrieve_hass_conf", {})
610
651
 
611
- if type(input_conf["plant_conf"]) == list:
612
- plant_conf = dict({key: d[key] for d in input_conf["plant_conf"] for key in d})
613
- else:
614
- plant_conf = input_conf.get("plant_conf", {})
652
+ plant_conf = input_conf.get("plant_conf", {})
653
+
654
+ # Format time parameters
655
+ if optim_conf.get('delta_forecast_daily',None) is not None:
656
+ optim_conf['delta_forecast_daily'] = pd.Timedelta(days=optim_conf['delta_forecast_daily'])
657
+ if retrieve_hass_conf.get('optimization_time_step',None) is not None:
658
+ retrieve_hass_conf['optimization_time_step'] = pd.to_timedelta(retrieve_hass_conf['optimization_time_step'], "minutes")
659
+ if retrieve_hass_conf.get('time_zone',None) is not None:
660
+ retrieve_hass_conf["time_zone"] = pytz.timezone(retrieve_hass_conf["time_zone"])
615
661
 
616
662
  return retrieve_hass_conf, optim_conf, plant_conf
617
663
 
@@ -762,156 +808,402 @@ def get_injection_dict_forecast_model_tune(df_pred_optim: pd.DataFrame, mlf: MLF
762
808
  injection_dict["figure_0"] = image_path_0
763
809
  return injection_dict
764
810
 
811
+ def build_config(emhass_conf: dict, logger: logging.Logger, defaults_path: str, config_path: Optional[str] = None,
812
+ legacy_config_path: Optional[str] = None) -> dict:
813
+ """
814
+ Retrieve parameters from configuration files.
815
+ priority order (low - high) = defaults_path, config_path legacy_config_path
765
816
 
766
- def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
817
+ :param emhass_conf: Dictionary containing the needed emhass paths
818
+ :type emhass_conf: dict
819
+ :param logger: The logger object
820
+ :type logger: logging.Logger
821
+ :param defaults_path: path to config file for parameter defaults (config_defaults.json)
822
+ :type defaults_path: str
823
+ :param config_path: path to the main configuration file (config.json)
824
+ :type config_path: str
825
+ :param legacy_config_path: path to legacy config file (config_emhass.yaml)
826
+ :type legacy_config_path: str
827
+ :return: The built config dictionary
828
+ :rtype: dict
829
+ """
830
+
831
+ # Read default parameters (default root_path/data/config_defaults.json)
832
+ if defaults_path and pathlib.Path(defaults_path).is_file():
833
+ with defaults_path.open('r') as data:
834
+ config = json.load(data)
835
+ else:
836
+ logger.error("config_defaults.json. does not exist ")
837
+ return False
838
+
839
+ # Read user config parameters if provided (default /share/config.json)
840
+ if config_path and pathlib.Path(config_path).is_file():
841
+ with config_path.open('r') as data:
842
+ # Set override default parameters (config_defaults) with user given parameters (config.json)
843
+ logger.info("Obtaining parameters from config.json:")
844
+ config.update(json.load(data))
845
+ else:
846
+ logger.info("config.json does not exist, or has not been passed")
847
+ logger.info("you may like to generate the config.json file on the configuration page")
848
+
849
+ # Check to see if legacy config_emhass.yaml was provided (default /app/config_emhass.yaml)
850
+ # Convert legacy parameter definitions/format to match config.json
851
+ if legacy_config_path and pathlib.Path(legacy_config_path).is_file():
852
+ with open(legacy_config_path, 'r') as data:
853
+ legacy_config = yaml.load(data, Loader=yaml.FullLoader)
854
+ legacy_config_parameters = build_legacy_config_params(emhass_conf,legacy_config,logger)
855
+ if type(legacy_config_parameters) is not bool:
856
+ logger.info("Obtaining parameters from config_emhass.yaml:")
857
+ config.update(legacy_config_parameters)
858
+
859
+ return config
860
+
861
+
862
+ def build_legacy_config_params(emhass_conf: dict, legacy_config: dict,
863
+ logger: logging.Logger) -> dict:
864
+ """
865
+ Build a config dictionary with legacy config_emhass.yaml file.
866
+ Uses the associations file to convert parameter naming conventions (to config.json/config_defaults.json).
867
+ Extracts the parameter values and formats to match config.json.
868
+
869
+ :param emhass_conf: Dictionary containing the needed emhass paths
870
+ :type emhass_conf: dict
871
+ :param legacy_config: The legacy config dictionary
872
+ :type legacy_config: dict
873
+ :param logger: The logger object
874
+ :type logger: logging.Logger
875
+ :return: The built config dictionary
876
+ :rtype: dict
877
+ """
878
+
879
+
880
+ # Association file key reference
881
+ # association[0] = config catagories
882
+ # association[1] = legacy parameter name
883
+ # association[2] = parameter (config.json/config_defaults.json)
884
+ # association[3] = parameter list name if exists (not used, from legacy options.json)
885
+
886
+ # Check each config catagories exists, else create blank dict for categories (avoid errors)
887
+ legacy_config['retrieve_hass_conf'] = legacy_config.get('retrieve_hass_conf',{})
888
+ legacy_config['optim_conf'] = legacy_config.get('optim_conf',{})
889
+ legacy_config['plant_conf'] = legacy_config.get('plant_conf',{})
890
+ config = {}
891
+
892
+ # Use associations list to map legacy parameter name with config.json parameter name
893
+ if emhass_conf['associations_path'].exists():
894
+ with emhass_conf['associations_path'].open('r') as data:
895
+ associations = list(csv.reader(data, delimiter=","))
896
+ else:
897
+ logger.error("Cant find associations file (associations.csv) in: " + str(emhass_conf['associations_path']))
898
+ return False
899
+
900
+ # Loop through all parameters in association file
901
+ # Append config with existing legacy config parameters (converting alternative parameter naming conventions with associations list)
902
+ for association in associations:
903
+ # if legacy config catagories exists and if legacy parameter exists in config catagories
904
+ if legacy_config.get(association[0],None) is not None and legacy_config[association[0]].get(association[1],None) is not None:
905
+ config[association[2]] = legacy_config[association[0]][association[1]]
906
+
907
+ # If config now has load_peak_hour_periods, extract from list of dict
908
+ if association[2] == "load_peak_hour_periods" and type(config[association[2]]) is list:
909
+ config[association[2]] = dict((key, d[key]) for d in config[association[2]] for key in d)
910
+
911
+ return config
912
+ # params['associations_dict'] = associations_dict
913
+
914
+ def param_to_config(param: dict,
767
915
  logger: logging.Logger) -> dict:
768
916
  """
769
- Build the main params dictionary from the loaded options.json when using the add-on.
917
+ A function that extracts the parameters from param back to the config.json format.
918
+ Extracts parameters from config catagories.
919
+ Attempts to exclude secrets hosed in retrieve_hass_conf.
920
+
921
+ :param params: Built configuration parameters
922
+ :type param: dict
923
+ :param logger: The logger object
924
+ :type logger: logging.Logger
925
+ :return: The built config dictionary
926
+ :rtype: dict
927
+ """
928
+ logger.debug("Converting param to config")
929
+
930
+ return_config = {}
931
+
932
+ config_catagories = ["retrieve_hass_conf","optim_conf","plant_conf"]
933
+ secret_params = ["hass_url", "time_zone", "Latitude", "Longitude", "Altitude", "long_lived_token", "solcast_api_key", "solcast_rooftop_id", "solar_forecast_kwp"]
934
+
935
+ # Loop through config catagories that contain config params, and extract
936
+ for config in config_catagories:
937
+ for parameter in param[config]:
938
+ # If parameter is not a secret, append to return_config
939
+ if parameter not in secret_params:
940
+ return_config[str(parameter)] = param[config][parameter]
941
+
942
+ return return_config
943
+
944
+ def build_secrets(emhass_conf: dict, logger: logging.Logger, argument: Optional[dict] = {}, options_path: Optional[str] = None,
945
+ secrets_path: Optional[str] = None, no_response: Optional[bool] = False) -> Tuple[dict, dict]:
946
+ """
947
+ Retrieve and build parameters from secrets locations (ENV, ARG, Secrets file (secrets_emhass.yaml/options.json) and/or Home Assistant (via API))
948
+ priority order (lwo to high) = Defaults (written in function), ENV, Options json file, Home Assistant API, Secrets yaml file, Arguments
949
+
950
+ :param emhass_conf: Dictionary containing the needed emhass paths
951
+ :type emhass_conf: dict
952
+ :param logger: The logger object
953
+ :type logger: logging.Logger
954
+ :param argument: dictionary of secrets arguments passed (url,key)
955
+ :type argument: dict
956
+ :param options_path: path to the options file (options.json) (usually provided bt EMHASS-Add-on)
957
+ :type options_path: str
958
+ :param secrets_path: path to secrets file (secrets_emhass.yaml)
959
+ :type secrets_path: str
960
+ :param no_response: bypass get request to Home Assistant (json response errors)
961
+ :type no_response: bool
962
+ :return: Updated emhass_conf, the built secrets dictionary
963
+ :rtype: Tuple[dict, dict]:
964
+ """
965
+
966
+ #Set defaults to be overwritten
967
+ params_secrets = {
968
+ "hass_url": "https://myhass.duckdns.org/",
969
+ "long_lived_token": "thatverylongtokenhere",
970
+ "time_zone": "Europe/Paris",
971
+ "Latitude": 45.83,
972
+ "Longitude": 6.86,
973
+ "Altitude": 4807.8,
974
+ "solcast_api_key": "yoursecretsolcastapikey",
975
+ "solcast_rooftop_id": "yourrooftopid",
976
+ "solar_forecast_kwp": 5
977
+ }
978
+
979
+ # Obtain Secrets from ENV?
980
+ params_secrets['hass_url'] = os.getenv("EMHASS_URL",params_secrets['hass_url'])
981
+ params_secrets['long_lived_token'] = os.getenv("SUPERVISOR_TOKEN", params_secrets['long_lived_token'])
982
+ params_secrets['time_zone'] = os.getenv("TIME_ZONE", params_secrets['time_zone'])
983
+ params_secrets['Latitude'] = float(os.getenv("LAT", params_secrets['Latitude']))
984
+ params_secrets['Longitude'] = float(os.getenv("LON", params_secrets['Longitude']))
985
+ params_secrets['Altitude'] = float(os.getenv("ALT", params_secrets['Altitude']))
986
+
987
+ # Obtain secrets from options.json (Generated from EMHASS-Add-on, Home Assistant addon Configuration page) or Home Assistant API (from local Supervisor API)?
988
+ # Use local supervisor API to obtain secrets from Home Assistant if hass_url in options.json is empty and SUPERVISOR_TOKEN ENV exists (provided by Home Assistant when running the container as addon)
989
+ options = {}
990
+ if options_path and pathlib.Path(options_path).is_file():
991
+ with options_path.open('r') as data:
992
+ options = json.load(data)
993
+
994
+ # Obtain secrets from Home Assistant?
995
+ url_from_options = options.get('hass_url', 'empty')
996
+ key_from_options = options.get('long_lived_token', 'empty')
997
+
998
+ # If data path specified by options.json, overwrite emhass_conf['data_path']
999
+ if options.get('data_path', None) != None and pathlib.Path(options['data_path']).exists():
1000
+ emhass_conf['data_path'] = pathlib.Path(options['data_path']);
1001
+
1002
+ # Check to use Home Assistant local API
1003
+ if not no_response and \
1004
+ (url_from_options == 'empty' or url_from_options == '' or url_from_options == "http://supervisor/core/api") and \
1005
+ os.getenv("SUPERVISOR_TOKEN", None) is not None:
1006
+
1007
+ params_secrets['long_lived_token'] = os.getenv("SUPERVISOR_TOKEN",None)
1008
+ params_secrets['hass_url'] = "http://supervisor/core/api"
1009
+ headers = {
1010
+ "Authorization": "Bearer " + params_secrets['long_lived_token'],
1011
+ "content-type": "application/json"
1012
+ }
1013
+ # Obtain secrets from Home Assistant via API
1014
+ logger.debug("Obtaining secrets from Home Assistant Supervisor API")
1015
+ response = get((params_secrets['hass_url'] + "/config"), headers=headers)
1016
+ if response.status_code < 400:
1017
+ config_hass = response.json()
1018
+ params_secrets = {
1019
+ 'hass_url': params_secrets['hass_url'],
1020
+ 'long_lived_token': params_secrets['long_lived_token'],
1021
+ 'time_zone': config_hass['time_zone'],
1022
+ 'Latitude': config_hass['latitude'],
1023
+ 'Longitude': config_hass['longitude'],
1024
+ 'Altitude': config_hass['elevation']
1025
+ }
1026
+ else:
1027
+ # Obtain the url and key secrets if any from options.json (default /app/options.json)
1028
+ logger.warning("Error obtaining secrets from Home Assistant Supervisor API")
1029
+ logger.debug("Obtaining url and key secrets from options.json")
1030
+ if url_from_options != 'empty' and url_from_options != '':
1031
+ params_secrets['hass_url'] = url_from_options
1032
+ if key_from_options != 'empty' and key_from_options != '':
1033
+ params_secrets['long_lived_token'] = key_from_options
1034
+ if options.get('time_zone',"empty") != "empty" and options['time_zone'] != '':
1035
+ params_secrets['time_zone'] = options['time_zone']
1036
+ if options.get('Latitude',None) is not None and bool(options['Latitude']):
1037
+ params_secrets['Latitude'] = options['Latitude']
1038
+ if options.get('Longitude',None) is not None and bool(options['Longitude']):
1039
+ params_secrets['Longitude'] = options['Longitude']
1040
+ if options.get('Altitude',None) is not None and bool(options['Altitude']):
1041
+ params_secrets['Altitude'] = options['Altitude']
1042
+ else:
1043
+ # Obtain the url and key secrets if any from options.json (default /app/options.json)
1044
+ logger.debug("Obtaining url and key secrets from options.json")
1045
+ if url_from_options != 'empty' and url_from_options != '':
1046
+ params_secrets['hass_url'] = url_from_options
1047
+ if key_from_options != 'empty' and key_from_options != '':
1048
+ params_secrets['long_lived_token'] = key_from_options
1049
+ if options.get('time_zone',"empty") != "empty" and options['time_zone'] != '':
1050
+ params_secrets['time_zone'] = options['time_zone']
1051
+ if options.get('Latitude',None) is not None and bool(options['Latitude']):
1052
+ params_secrets['Latitude'] = options['Latitude']
1053
+ if options.get('Longitude',None) is not None and bool(options['Longitude']):
1054
+ params_secrets['Longitude'] = options['Longitude']
1055
+ if options.get('Altitude',None) is not None and bool(options['Altitude']):
1056
+ params_secrets['Altitude'] = options['Altitude']
1057
+
1058
+ # Obtain the forecast secrets (if any) from options.json (default /app/options.json)
1059
+ forecast_secrets = ["solcast_api_key","solcast_rooftop_id","solar_forecast_kwp"]
1060
+ if any(x in forecast_secrets for x in list(options.keys())):
1061
+ logger.debug("Obtaining forecast secrets from options.json")
1062
+ if options.get('solcast_api_key',"empty") != "empty" and options['solcast_api_key'] != '':
1063
+ params_secrets['solcast_api_key'] = options['solcast_api_key']
1064
+ if options.get('solcast_rooftop_id',"empty") != "empty" and options['solcast_rooftop_id'] != '':
1065
+ params_secrets['solcast_rooftop_id'] = options['solcast_rooftop_id']
1066
+ if options.get('solar_forecast_kwp',None) and bool(options['solar_forecast_kwp']):
1067
+ params_secrets['solar_forecast_kwp'] = options['solar_forecast_kwp']
1068
+
1069
+ # Obtain secrets from secrets_emhass.yaml? (default /app/secrets_emhass.yaml)
1070
+ if secrets_path and pathlib.Path(secrets_path).is_file():
1071
+ logger.debug("Obtaining secrets from secrets file")
1072
+ with open(pathlib.Path(secrets_path), 'r') as file:
1073
+ params_secrets.update(yaml.load(file, Loader=yaml.FullLoader))
1074
+
1075
+ # Receive key and url from ARG/arguments?
1076
+ if argument.get('url',None) is not None:
1077
+ params_secrets['hass_url'] = argument['url']
1078
+ logger.debug("Obtaining url from passed argument")
1079
+ if argument.get('key',None) is not None:
1080
+ params_secrets['long_lived_token'] = argument['key']
1081
+ logger.debug("Obtaining long_lived_token from passed argument")
1082
+
1083
+ return emhass_conf, params_secrets
1084
+
1085
+
770
1086
 
771
- :param params: The main params dictionary
772
- :type params: dict
773
- :param params_secrets: The dictionary containing the secret protected variables
1087
+ def build_params(emhass_conf: dict, params_secrets: dict, config: dict,
1088
+ logger: logging.Logger) -> dict:
1089
+ """
1090
+ Build the main params dictionary from the config and secrets
1091
+ Appends configuration catagories used by emhass to the parameters. (with use of the associations file as a reference)
1092
+
1093
+ :param emhass_conf: Dictionary containing the needed emhass paths
1094
+ :type emhass_conf: dict
1095
+ :param params_secrets: The dictionary containing the built secret variables
774
1096
  :type params_secrets: dict
775
- :param options: The load dictionary from options.json
776
- :type options: dict
777
- :param addon: A "bool" to select if we are using the add-on
778
- :type addon: int
1097
+ :param config: The dictionary of built config parameters
1098
+ :type config: dict
779
1099
  :param logger: The logger object
780
1100
  :type logger: logging.Logger
781
- :return: The builded dictionary
1101
+ :return: The built param dictionary
782
1102
  :rtype: dict
783
1103
  """
784
- if addon == 1:
785
- # Updating variables in retrieve_hass_conf
786
- params["retrieve_hass_conf"]["freq"] = options.get("optimization_time_step", params["retrieve_hass_conf"]["freq"])
787
- params["retrieve_hass_conf"]["days_to_retrieve"] = options.get("historic_days_to_retrieve", params["retrieve_hass_conf"]["days_to_retrieve"])
788
- params["retrieve_hass_conf"]["var_PV"] = options.get("sensor_power_photovoltaics", params["retrieve_hass_conf"]["var_PV"])
789
- params["retrieve_hass_conf"]["var_load"] = options.get("sensor_power_load_no_var_loads", params["retrieve_hass_conf"]["var_load"])
790
- params["retrieve_hass_conf"]["load_negative"] = options.get("load_negative", params["retrieve_hass_conf"]["load_negative"])
791
- params["retrieve_hass_conf"]["set_zero_min"] = options.get("set_zero_min", params["retrieve_hass_conf"]["set_zero_min"])
792
- params["retrieve_hass_conf"]["var_replace_zero"] = [options.get("sensor_power_photovoltaics", params["retrieve_hass_conf"]["var_replace_zero"])]
793
- params["retrieve_hass_conf"]["var_interp"] = [
794
- options.get("sensor_power_photovoltaics", params["retrieve_hass_conf"]["var_PV"]),
795
- options.get("sensor_power_load_no_var_loads", params["retrieve_hass_conf"]["var_load"])
796
- ]
797
- params["retrieve_hass_conf"]["method_ts_round"] = options.get("method_ts_round", params["retrieve_hass_conf"]["method_ts_round"])
798
- params["retrieve_hass_conf"]["continual_publish"] = options.get("continual_publish", params["retrieve_hass_conf"]["continual_publish"])
799
- # Update params Secrets if specified
800
- params["params_secrets"] = params_secrets
801
- params["params_secrets"]["time_zone"] = options.get("time_zone", params_secrets["time_zone"])
802
- params["params_secrets"]["lat"] = options.get("Latitude", params_secrets["lat"])
803
- params["params_secrets"]["lon"] = options.get("Longitude", params_secrets["lon"])
804
- params["params_secrets"]["alt"] = options.get("Altitude", params_secrets["alt"])
805
- # Updating variables in optim_conf
806
- params["optim_conf"]["set_use_battery"] = options.get("set_use_battery", params["optim_conf"]["set_use_battery"])
807
- params["optim_conf"]["num_def_loads"] = options.get("number_of_deferrable_loads", params["optim_conf"]["num_def_loads"])
808
- if options.get("list_nominal_power_of_deferrable_loads", None) != None:
809
- params["optim_conf"]["P_deferrable_nom"] = [i["nominal_power_of_deferrable_loads"] for i in options.get("list_nominal_power_of_deferrable_loads")]
810
- if options.get("list_operating_hours_of_each_deferrable_load", None) != None:
811
- params["optim_conf"]["def_total_hours"] = [i["operating_hours_of_each_deferrable_load"] for i in options.get("list_operating_hours_of_each_deferrable_load")]
812
- if options.get("list_treat_deferrable_load_as_semi_cont", None) != None:
813
- params["optim_conf"]["treat_def_as_semi_cont"] = [i["treat_deferrable_load_as_semi_cont"] for i in options.get("list_treat_deferrable_load_as_semi_cont")]
814
- if options.get("list_set_deferrable_load_single_constant", None) != None:
815
- params["optim_conf"]["set_def_constant"] = [i["set_deferrable_load_single_constant"] for i in options.get("list_set_deferrable_load_single_constant")]
816
- if options.get("list_set_deferrable_startup_penalty", None) != None:
817
- params["optim_conf"]["def_start_penalty"] = [i["set_deferrable_startup_penalty"] for i in options.get("list_set_deferrable_startup_penalty")]
818
- params["optim_conf"]["weather_forecast_method"] = options.get("weather_forecast_method", params["optim_conf"]["weather_forecast_method"])
819
- # Update optional param secrets
820
- if params["optim_conf"]["weather_forecast_method"] == "solcast":
821
- params["params_secrets"]["solcast_api_key"] = options.get("optional_solcast_api_key", params_secrets.get("solcast_api_key", "123456"))
822
- params["params_secrets"]["solcast_rooftop_id"] = options.get("optional_solcast_rooftop_id", params_secrets.get("solcast_rooftop_id", "123456"))
823
- elif params["optim_conf"]["weather_forecast_method"] == "solar.forecast":
824
- params["params_secrets"]["solar_forecast_kwp"] = options.get("optional_solar_forecast_kwp", params_secrets.get("solar_forecast_kwp", 5))
825
- params["optim_conf"]["load_forecast_method"] = options.get("load_forecast_method", params["optim_conf"]["load_forecast_method"])
826
- params["optim_conf"]["delta_forecast"] = options.get("delta_forecast_daily", params["optim_conf"]["delta_forecast"])
827
- params["optim_conf"]["load_cost_forecast_method"] = options.get("load_cost_forecast_method", params["optim_conf"]["load_cost_forecast_method"])
828
- if (options.get("list_peak_hours_periods_start_hours", None) != None and options.get("list_peak_hours_periods_end_hours", None) != None):
829
- start_hours_list = [i["peak_hours_periods_start_hours"] for i in options["list_peak_hours_periods_start_hours"]]
830
- end_hours_list = [i["peak_hours_periods_end_hours"] for i in options["list_peak_hours_periods_end_hours"]]
1104
+ if type(params_secrets) is not dict:
1105
+ params_secrets = {}
1106
+
1107
+ params = {}
1108
+ #Start with blank config catagories
1109
+ params['retrieve_hass_conf'] = {}
1110
+ params['params_secrets'] = {}
1111
+ params['optim_conf'] = {}
1112
+ params['plant_conf'] = {}
1113
+
1114
+ # Obtain associations to categorize parameters to their corresponding config catagories
1115
+ if emhass_conf.get('associations_path', get_root(__file__, num_parent=2) / 'data/associations.csv').exists():
1116
+ with emhass_conf['associations_path'].open('r') as data:
1117
+ associations = list(csv.reader(data, delimiter=","))
1118
+ else:
1119
+ logger.error("Unable to obtain the associations file (associations.csv) in: " + str(emhass_conf['associations_path']))
1120
+ return False
1121
+
1122
+ # Association file key reference
1123
+ # association[0] = config catagories
1124
+ # association[1] = legacy parameter name
1125
+ # association[2] = parameter (config.json/config_defaults.json)
1126
+ # association[3] = parameter list name if exists (not used, from legacy options.json)
1127
+
1128
+ # Use association list to append parameters from config into params (with corresponding config catagories)
1129
+ for association in associations:
1130
+ # If parameter has list_ name and parameter in config is presented with its list name
1131
+ # (ie, config parameter is in legacy options.json format)
1132
+ if len(association) == 4 and config.get(association[3],None) is not None:
1133
+ # Extract lists of dictionaries
1134
+ if config[association[3]] and type(config[association[3]][0]) is dict:
1135
+ params[association[0]][association[2]] = [i[association[2]] for i in config[association[3]]]
1136
+ else:
1137
+ params[association[0]][association[2]] = config[association[3]]
1138
+ # Else, directly set value of config parameter to param
1139
+ elif config.get(association[2],None) is not None:
1140
+ params[association[0]][association[2]] = config[association[2]]
1141
+
1142
+ # Check if we need to create `list_hp_periods` from config (ie. legacy options.json format)
1143
+ if params.get('optim_conf',None) is not None and config.get("list_peak_hours_periods_start_hours", None) is not None and config.get("list_peak_hours_periods_end_hours", None) is not None:
1144
+ start_hours_list = [i["peak_hours_periods_start_hours"] for i in config["list_peak_hours_periods_start_hours"]]
1145
+ end_hours_list = [i["peak_hours_periods_end_hours"] for i in config["list_peak_hours_periods_end_hours"]]
831
1146
  num_peak_hours = len(start_hours_list)
832
- list_hp_periods_list = [{'period_hp_'+str(i+1):[{'start':start_hours_list[i]},{'end':end_hours_list[i]}]} for i in range(num_peak_hours)]
833
- params['optim_conf']['list_hp_periods'] = list_hp_periods_list
834
- params['optim_conf']['load_cost_hp'] = options.get('load_peak_hours_cost', params['optim_conf']['load_cost_hp'])
835
- params['optim_conf']['load_cost_hc'] = options.get('load_offpeak_hours_cost', params['optim_conf']['load_cost_hc'])
836
- params['optim_conf']['prod_price_forecast_method'] = options.get('production_price_forecast_method', params['optim_conf']['prod_price_forecast_method'])
837
- params['optim_conf']['prod_sell_price'] = options.get('photovoltaic_production_sell_price', params['optim_conf']['prod_sell_price'])
838
- params['optim_conf']['set_total_pv_sell'] = options.get('set_total_pv_sell', params['optim_conf']['set_total_pv_sell'])
839
- params['optim_conf']['lp_solver'] = options.get('lp_solver', params['optim_conf']['lp_solver'])
840
- params['optim_conf']['lp_solver_path'] = options.get('lp_solver_path', params['optim_conf']['lp_solver_path'])
841
- params['optim_conf']['set_nocharge_from_grid'] = options.get('set_nocharge_from_grid', params['optim_conf']['set_nocharge_from_grid'])
842
- params['optim_conf']['set_nodischarge_to_grid'] = options.get('set_nodischarge_to_grid', params['optim_conf']['set_nodischarge_to_grid'])
843
- params['optim_conf']['set_battery_dynamic'] = options.get('set_battery_dynamic', params['optim_conf']['set_battery_dynamic'])
844
- params['optim_conf']['battery_dynamic_max'] = options.get('battery_dynamic_max', params['optim_conf']['battery_dynamic_max'])
845
- params['optim_conf']['battery_dynamic_min'] = options.get('battery_dynamic_min', params['optim_conf']['battery_dynamic_min'])
846
- params['optim_conf']['weight_battery_discharge'] = options.get('weight_battery_discharge', params['optim_conf']['weight_battery_discharge'])
847
- params['optim_conf']['weight_battery_charge'] = options.get('weight_battery_charge', params['optim_conf']['weight_battery_charge'])
848
- if options.get('list_start_timesteps_of_each_deferrable_load',None) != None:
849
- params['optim_conf']['def_start_timestep'] = [i['start_timesteps_of_each_deferrable_load'] for i in options.get('list_start_timesteps_of_each_deferrable_load')]
850
- if options.get('list_end_timesteps_of_each_deferrable_load',None) != None:
851
- params['optim_conf']['def_end_timestep'] = [i['end_timesteps_of_each_deferrable_load'] for i in options.get('list_end_timesteps_of_each_deferrable_load')]
852
- # Updating variables in plant_conf
853
- params['plant_conf']['P_from_grid_max'] = options.get('maximum_power_from_grid', params['plant_conf']['P_from_grid_max'])
854
- params['plant_conf']['P_to_grid_max'] = options.get('maximum_power_to_grid', params['plant_conf']['P_to_grid_max'])
855
- if options.get('list_pv_module_model',None) != None:
856
- params['plant_conf']['module_model'] = [i['pv_module_model'] for i in options.get('list_pv_module_model')]
857
- if options.get('list_pv_inverter_model',None) != None:
858
- params['plant_conf']['inverter_model'] = [i['pv_inverter_model'] for i in options.get('list_pv_inverter_model')]
859
- if options.get('list_surface_tilt',None) != None:
860
- params['plant_conf']['surface_tilt'] = [i['surface_tilt'] for i in options.get('list_surface_tilt')]
861
- if options.get('list_surface_azimuth',None) != None:
862
- params['plant_conf']['surface_azimuth'] = [i['surface_azimuth'] for i in options.get('list_surface_azimuth')]
863
- if options.get('list_modules_per_string',None) != None:
864
- params['plant_conf']['modules_per_string'] = [i['modules_per_string'] for i in options.get('list_modules_per_string')]
865
- if options.get('list_strings_per_inverter',None) != None:
866
- params['plant_conf']['strings_per_inverter'] = [i['strings_per_inverter'] for i in options.get('list_strings_per_inverter')]
867
- params["plant_conf"]["inverter_is_hybrid"] = options.get("inverter_is_hybrid", params["plant_conf"]["inverter_is_hybrid"])
868
- params["plant_conf"]["compute_curtailment"] = options.get("compute_curtailment", params["plant_conf"]["compute_curtailment"])
869
- params['plant_conf']['Pd_max'] = options.get('battery_discharge_power_max', params['plant_conf']['Pd_max'])
870
- params['plant_conf']['Pc_max'] = options.get('battery_charge_power_max', params['plant_conf']['Pc_max'])
871
- params['plant_conf']['eta_disch'] = options.get('battery_discharge_efficiency', params['plant_conf']['eta_disch'])
872
- params['plant_conf']['eta_ch'] = options.get('battery_charge_efficiency', params['plant_conf']['eta_ch'])
873
- params['plant_conf']['Enom'] = options.get('battery_nominal_energy_capacity', params['plant_conf']['Enom'])
874
- params['plant_conf']['SOCmin'] = options.get('battery_minimum_state_of_charge', params['plant_conf']['SOCmin'])
875
- params['plant_conf']['SOCmax'] = options.get('battery_maximum_state_of_charge', params['plant_conf']['SOCmax'])
876
- params['plant_conf']['SOCtarget'] = options.get('battery_target_state_of_charge', params['plant_conf']['SOCtarget'])
877
- # Check parameter lists have the same amounts as deferrable loads
878
- # If not, set defaults it fill in gaps
879
- if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_start_timestep']):
880
- logger.warning("def_start_timestep / list_start_timesteps_of_each_deferrable_load does not match number in num_def_loads, adding default values to parameter")
881
- for x in range(len(params['optim_conf']['def_start_timestep']), params['optim_conf']['num_def_loads']):
882
- params['optim_conf']['def_start_timestep'].append(0)
883
- if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_end_timestep']):
884
- logger.warning("def_end_timestep / list_end_timesteps_of_each_deferrable_load does not match number in num_def_loads, adding default values to parameter")
885
- for x in range(len(params['optim_conf']['def_end_timestep']), params['optim_conf']['num_def_loads']):
886
- params['optim_conf']['def_end_timestep'].append(0)
887
- if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['set_def_constant']):
888
- logger.warning("set_def_constant / list_set_deferrable_load_single_constant does not match number in num_def_loads, adding default values to parameter")
889
- for x in range(len(params['optim_conf']['set_def_constant']), params['optim_conf']['num_def_loads']):
890
- params['optim_conf']['set_def_constant'].append(False)
891
- if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['treat_def_as_semi_cont']):
892
- logger.warning("treat_def_as_semi_cont / list_treat_deferrable_load_as_semi_cont does not match number in num_def_loads, adding default values to parameter")
893
- for x in range(len(params['optim_conf']['treat_def_as_semi_cont']), params['optim_conf']['num_def_loads']):
894
- params['optim_conf']['treat_def_as_semi_cont'].append(True)
895
- if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_start_penalty']):
896
- logger.warning("def_start_penalty / list_set_deferrable_startup_penalty does not match number in num_def_loads, adding default values to parameter")
897
- for x in range(len(params['optim_conf']['def_start_penalty']), params['optim_conf']['num_def_loads']):
898
- params['optim_conf']['def_start_penalty'].append(0.0)
899
- # days_to_retrieve should be no less then 2
900
- if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_total_hours']):
901
- logger.warning("def_total_hours / list_operating_hours_of_each_deferrable_load does not match number in num_def_loads, adding default values to parameter")
902
- for x in range(len(params['optim_conf']['def_total_hours']), params['optim_conf']['num_def_loads']):
903
- params['optim_conf']['def_total_hours'].append(0)
904
- if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['P_deferrable_nom']):
905
- logger.warning("P_deferrable_nom / list_nominal_power_of_deferrable_loads does not match number in num_def_loads, adding default values to parameter")
906
- for x in range(len(params['optim_conf']['P_deferrable_nom']), params['optim_conf']['num_def_loads']):
907
- params['optim_conf']['P_deferrable_nom'].append(0)
908
- # days_to_retrieve should be no less then 2
909
- if params["retrieve_hass_conf"]["days_to_retrieve"] < 2:
910
- params["retrieve_hass_conf"]["days_to_retrieve"] = 2
1147
+ list_hp_periods_list = {'period_hp_'+str(i+1):[{'start':start_hours_list[i]},{'end':end_hours_list[i]}] for i in range(num_peak_hours)}
1148
+ params['optim_conf']['load_peak_hour_periods'] = list_hp_periods_list
1149
+ else:
1150
+ # Else, check param already contains load_peak_hour_periods from config
1151
+ if params['optim_conf'].get('load_peak_hour_periods',None) is None:
1152
+ logger.warning("Unable to detect or create load_peak_hour_periods parameter")
1153
+
1154
+ # Format load_peak_hour_periods list to dict if necessary
1155
+ if params['optim_conf'].get('load_peak_hour_periods',None) is not None and isinstance(params['optim_conf']['load_peak_hour_periods'], list):
1156
+ params['optim_conf']['load_peak_hour_periods'] = dict((key, d[key]) for d in params['optim_conf']['load_peak_hour_periods'] for key in d)
1157
+
1158
+ # Call function to check parameter lists that require the same length as deferrable loads
1159
+ # If not, set defaults it fill in gaps
1160
+ if params['optim_conf'].get('number_of_deferrable_loads',None) is not None:
1161
+ num_def_loads = params['optim_conf']['number_of_deferrable_loads']
1162
+ params['optim_conf']['start_timesteps_of_each_deferrable_load'] = check_def_loads(num_def_loads,params['optim_conf'],0,'start_timesteps_of_each_deferrable_load',logger)
1163
+ params['optim_conf']['end_timesteps_of_each_deferrable_load'] = check_def_loads(num_def_loads,params['optim_conf'],0,'end_timesteps_of_each_deferrable_load',logger)
1164
+ params['optim_conf']['set_deferrable_load_single_constant'] = check_def_loads(num_def_loads,params['optim_conf'],False,'set_deferrable_load_single_constant',logger)
1165
+ params['optim_conf']['treat_deferrable_load_as_semi_cont'] = check_def_loads(num_def_loads,params['optim_conf'],True,'treat_deferrable_load_as_semi_cont',logger)
1166
+ params['optim_conf']['set_deferrable_startup_penalty'] = check_def_loads(num_def_loads,params['optim_conf'],0.0,'set_deferrable_startup_penalty',logger)
1167
+ params['optim_conf']['operating_hours_of_each_deferrable_load'] = check_def_loads(num_def_loads,params['optim_conf'],0,'operating_hours_of_each_deferrable_load',logger)
1168
+ params['optim_conf']['nominal_power_of_deferrable_loads'] = check_def_loads(num_def_loads,params['optim_conf'],0,'nominal_power_of_deferrable_loads',logger)
1169
+ else:
1170
+ logger.warning("unable to obtain parameter: number_of_deferrable_loads")
1171
+ # historic_days_to_retrieve should be no less then 2
1172
+ if params["retrieve_hass_conf"].get('historic_days_to_retrieve',None) is not None:
1173
+ if params["retrieve_hass_conf"]['historic_days_to_retrieve'] < 2:
1174
+ params["retrieve_hass_conf"]['historic_days_to_retrieve'] = 2
911
1175
  logger.warning("days_to_retrieve should not be lower then 2, setting days_to_retrieve to 2. Make sure your sensors also have at least 2 days of history")
912
1176
  else:
913
- params["params_secrets"] = params_secrets
914
- # The params dict
1177
+ logger.warning("unable to obtain parameter: historic_days_to_retrieve")
1178
+
1179
+ # Configure secrets, set params to correct config categorie
1180
+ # retrieve_hass_conf
1181
+ params['retrieve_hass_conf']["hass_url"] = params_secrets.get("hass_url",None)
1182
+ params['retrieve_hass_conf']["long_lived_token"] = params_secrets.get("long_lived_token",None)
1183
+ params['retrieve_hass_conf']["time_zone"] = params_secrets.get("time_zone",None)
1184
+ params['retrieve_hass_conf']['Latitude'] = params_secrets.get('Latitude',None)
1185
+ params['retrieve_hass_conf']['Longitude'] = params_secrets.get('Longitude',None)
1186
+ params['retrieve_hass_conf']['Altitude'] = params_secrets.get('Altitude',None)
1187
+ # Update optional param secrets
1188
+ if params["optim_conf"].get('weather_forecast_method',None) is not None:
1189
+ if params["optim_conf"]['weather_forecast_method'] == "solcast":
1190
+ params["retrieve_hass_conf"]["solcast_api_key"] = params_secrets.get("solcast_api_key", "123456")
1191
+ params["params_secrets"]["solcast_api_key"] = params_secrets.get("solcast_api_key", "123456")
1192
+ params["retrieve_hass_conf"]["solcast_rooftop_id"] = params_secrets.get("solcast_rooftop_id", "123456")
1193
+ params["params_secrets"]["solcast_rooftop_id"] = params_secrets.get("solcast_rooftop_id", "123456")
1194
+ elif params["optim_conf"]['weather_forecast_method'] == "solar.forecast":
1195
+ params["retrieve_hass_conf"]["solar_forecast_kwp"] = params_secrets.get("solar_forecast_kwp", 5)
1196
+ params["params_secrets"]["solar_forecast_kwp"] = params_secrets.get("solar_forecast_kwp", 5)
1197
+ else:
1198
+ logger.warning("Unable to detect weather_forecast_method parameter")
1199
+ # Check if secrets parameters still defaults values
1200
+ secret_params = ["https://myhass.duckdns.org/","thatverylongtokenhere",45.83,6.86,4807.8]
1201
+ if any(x in secret_params for x in params['retrieve_hass_conf'].values()):
1202
+ logger.warning("Some secret parameters values are still matching their defaults")
1203
+
1204
+
1205
+ # Set empty dict objects for params passed_data
1206
+ # To be latter populated with runtime parameters (treat_runtimeparams)
915
1207
  params["passed_data"] = {
916
1208
  "pv_power_forecast": None,
917
1209
  "load_power_forecast": None,
@@ -920,14 +1212,39 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
920
1212
  "prediction_horizon": None,
921
1213
  "soc_init": None,
922
1214
  "soc_final": None,
923
- "def_total_hours": None,
924
- "def_start_timestep": None,
925
- "def_end_timestep": None,
1215
+ 'operating_hours_of_each_deferrable_load': None,
1216
+ 'start_timesteps_of_each_deferrable_load': None,
1217
+ 'end_timesteps_of_each_deferrable_load': None,
926
1218
  "alpha": None,
927
1219
  "beta": None,
928
1220
  }
1221
+
929
1222
  return params
930
1223
 
1224
+ def check_def_loads(num_def_loads: int, parameter: list[dict], default, parameter_name: str, logger):
1225
+ """
1226
+ Check parameter lists with deferrable loads number, if they do not match, enlarge to fit.
1227
+
1228
+ :param num_def_loads: Total number deferrable loads
1229
+ :type num_def_loads: int
1230
+ :param parameter: parameter config dict containing paramater
1231
+ :type: list[dict]
1232
+ :param default: default value for parameter to pad missing
1233
+ :type: obj
1234
+ :param parameter_name: name of parameter
1235
+ :type logger: str
1236
+ :param logger: The logger object
1237
+ :type logger: logging.Logger
1238
+ return: parameter list
1239
+ :rtype: list[dict]
1240
+
1241
+ """
1242
+ if parameter.get(parameter_name,None) is not None and type(parameter[parameter_name]) is list and num_def_loads > len(parameter[parameter_name]):
1243
+ logger.warning(parameter_name + " does not match number in num_def_loads, adding default values ("+ str(default) + ") to parameter")
1244
+ for x in range(len(parameter[parameter_name]), num_def_loads):
1245
+ parameter[parameter_name].append(default)
1246
+ return parameter[parameter_name]
1247
+
931
1248
 
932
1249
  def get_days_list(days_to_retrieve: int) -> pd.date_range:
933
1250
  """