emhass 0.10.6__py3-none-any.whl → 0.11.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- emhass/command_line.py +178 -85
- emhass/data/associations.csv +63 -0
- emhass/data/config_defaults.json +117 -0
- emhass/forecast.py +38 -36
- emhass/machine_learning_forecaster.py +2 -1
- emhass/machine_learning_regressor.py +7 -2
- emhass/optimization.py +62 -62
- emhass/retrieve_hass.py +9 -4
- emhass/static/advanced.html +2 -1
- emhass/static/basic.html +4 -2
- emhass/static/configuration_list.html +44 -0
- emhass/static/configuration_script.js +872 -0
- emhass/static/data/param_definitions.json +424 -0
- emhass/static/script.js +345 -322
- emhass/static/style.css +267 -8
- emhass/templates/configuration.html +75 -0
- emhass/templates/index.html +15 -8
- emhass/utils.py +626 -302
- emhass/web_server.py +322 -213
- {emhass-0.10.6.dist-info → emhass-0.11.1.dist-info}/METADATA +207 -169
- emhass-0.11.1.dist-info/RECORD +32 -0
- {emhass-0.10.6.dist-info → emhass-0.11.1.dist-info}/WHEEL +1 -1
- emhass-0.10.6.dist-info/RECORD +0 -26
- {emhass-0.10.6.dist-info → emhass-0.11.1.dist-info}/LICENSE +0 -0
- {emhass-0.10.6.dist-info → emhass-0.11.1.dist-info}/entry_points.txt +0 -0
- {emhass-0.10.6.dist-info → emhass-0.11.1.dist-info}/top_level.txt +0 -0
emhass/utils.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1
1
|
#!/usr/bin/env python3
|
2
2
|
# -*- coding: utf-8 -*-
|
3
3
|
|
4
|
+
import csv
|
5
|
+
import os
|
4
6
|
from typing import Tuple, Optional
|
5
7
|
from datetime import datetime, timedelta, timezone
|
6
8
|
import logging
|
@@ -9,6 +11,7 @@ import json
|
|
9
11
|
import copy
|
10
12
|
import numpy as np
|
11
13
|
import pandas as pd
|
14
|
+
from requests import get
|
12
15
|
import yaml
|
13
16
|
import pytz
|
14
17
|
import ast
|
@@ -19,7 +22,6 @@ pd.options.plotting.backend = "plotly"
|
|
19
22
|
|
20
23
|
from emhass.machine_learning_forecaster import MLForecaster
|
21
24
|
|
22
|
-
|
23
25
|
def get_root(file: str, num_parent: Optional[int] = 3) -> str:
|
24
26
|
"""
|
25
27
|
Get the root absolute path of the working directory.
|
@@ -62,7 +64,10 @@ def get_logger(fun_name: str, emhass_conf: dict, save_to_file: Optional[bool] =
|
|
62
64
|
logger.propagate = True
|
63
65
|
logger.fileSetting = save_to_file
|
64
66
|
if save_to_file:
|
65
|
-
|
67
|
+
if os.path.isdir(emhass_conf['data_path']):
|
68
|
+
ch = logging.FileHandler(emhass_conf['data_path'] / 'logger_emhass.log')
|
69
|
+
else:
|
70
|
+
raise Exception("Unable to access data_path: "+emhass_conf['data_path'])
|
66
71
|
else:
|
67
72
|
ch = logging.StreamHandler()
|
68
73
|
if logging_level == "DEBUG":
|
@@ -89,7 +94,7 @@ def get_logger(fun_name: str, emhass_conf: dict, save_to_file: Optional[bool] =
|
|
89
94
|
return logger, ch
|
90
95
|
|
91
96
|
|
92
|
-
def get_forecast_dates(freq: int, delta_forecast: int, timedelta_days: Optional[int] = 0
|
97
|
+
def get_forecast_dates(freq: int, delta_forecast: int, time_zone: datetime.tzinfo, timedelta_days: Optional[int] = 0
|
93
98
|
) -> pd.core.indexes.datetimes.DatetimeIndex:
|
94
99
|
"""
|
95
100
|
Get the date_range list of the needed future dates using the delta_forecast parameter.
|
@@ -109,7 +114,7 @@ def get_forecast_dates(freq: int, delta_forecast: int, timedelta_days: Optional[
|
|
109
114
|
end_forecast = (start_forecast + pd.Timedelta(days=delta_forecast)).replace(microsecond=0)
|
110
115
|
forecast_dates = pd.date_range(start=start_forecast,
|
111
116
|
end=end_forecast+timedelta(days=timedelta_days)-freq,
|
112
|
-
freq=freq).round(freq, ambiguous='infer', nonexistent='shift_forward')
|
117
|
+
freq=freq, tz=time_zone).tz_convert('utc').round(freq, ambiguous='infer', nonexistent='shift_forward').tz_convert(time_zone)
|
113
118
|
return forecast_dates
|
114
119
|
|
115
120
|
|
@@ -121,7 +126,7 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
121
126
|
|
122
127
|
:param runtimeparams: Json string containing the runtime parameters dict.
|
123
128
|
:type runtimeparams: str
|
124
|
-
:param params:
|
129
|
+
:param params: Built configuration parameters
|
125
130
|
:type params: str
|
126
131
|
:param retrieve_hass_conf: Container for data retrieving parameters.
|
127
132
|
:type retrieve_hass_conf: dict
|
@@ -137,14 +142,17 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
137
142
|
:rtype: Tuple[str, dict]
|
138
143
|
|
139
144
|
"""
|
145
|
+
# check if passed params is a dict
|
140
146
|
if (params != None) and (params != "null"):
|
141
|
-
|
147
|
+
if type(params) is str:
|
148
|
+
params = json.loads(params)
|
142
149
|
else:
|
143
150
|
params = {}
|
151
|
+
|
144
152
|
# Some default data needed
|
145
153
|
custom_deferrable_forecast_id = []
|
146
154
|
custom_predicted_temperature_id = []
|
147
|
-
for k in range(optim_conf[
|
155
|
+
for k in range(optim_conf['number_of_deferrable_loads']):
|
148
156
|
custom_deferrable_forecast_id.append(
|
149
157
|
{
|
150
158
|
"entity_id": "sensor.p_deferrable{}".format(k),
|
@@ -224,12 +232,20 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
224
232
|
params["passed_data"][key] = value
|
225
233
|
else:
|
226
234
|
params["passed_data"] = default_passed_dict
|
235
|
+
|
227
236
|
if runtimeparams is not None:
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
237
|
+
if type(runtimeparams) is str:
|
238
|
+
runtimeparams = json.loads(runtimeparams)
|
239
|
+
# Format required date/time parameters
|
240
|
+
optimization_time_step = int(
|
241
|
+
retrieve_hass_conf['optimization_time_step'].seconds / 60.0)
|
242
|
+
delta_forecast = int(optim_conf['delta_forecast_daily'].days)
|
243
|
+
time_zone = retrieve_hass_conf["time_zone"]
|
244
|
+
forecast_dates = get_forecast_dates(
|
245
|
+
optimization_time_step, delta_forecast, time_zone)
|
246
|
+
|
247
|
+
# regressor-model-fit
|
248
|
+
if set_type == "regressor-model-fit":
|
233
249
|
if "csv_file" in runtimeparams:
|
234
250
|
csv_file = runtimeparams["csv_file"]
|
235
251
|
params["passed_data"]["csv_file"] = csv_file
|
@@ -249,6 +265,8 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
249
265
|
else:
|
250
266
|
date_features = runtimeparams["date_features"]
|
251
267
|
params["passed_data"]["date_features"] = date_features
|
268
|
+
|
269
|
+
# regressor-model-predict
|
252
270
|
if set_type == "regressor-model-predict":
|
253
271
|
if "new_values" in runtimeparams:
|
254
272
|
new_values = runtimeparams["new_values"]
|
@@ -262,6 +280,7 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
262
280
|
if "target" in runtimeparams:
|
263
281
|
target = runtimeparams["target"]
|
264
282
|
params["passed_data"]["target"] = target
|
283
|
+
|
265
284
|
# Treating special data passed for MPC control case
|
266
285
|
if set_type == "naive-mpc-optim":
|
267
286
|
if "prediction_horizon" not in runtimeparams.keys():
|
@@ -270,74 +289,86 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
270
289
|
prediction_horizon = runtimeparams["prediction_horizon"]
|
271
290
|
params["passed_data"]["prediction_horizon"] = prediction_horizon
|
272
291
|
if "soc_init" not in runtimeparams.keys():
|
273
|
-
soc_init = plant_conf[
|
292
|
+
soc_init = plant_conf['battery_target_state_of_charge']
|
274
293
|
else:
|
275
294
|
soc_init = runtimeparams["soc_init"]
|
276
295
|
params["passed_data"]["soc_init"] = soc_init
|
277
296
|
if "soc_final" not in runtimeparams.keys():
|
278
|
-
soc_final = plant_conf[
|
297
|
+
soc_final = plant_conf['battery_target_state_of_charge']
|
279
298
|
else:
|
280
299
|
soc_final = runtimeparams["soc_final"]
|
281
300
|
params["passed_data"]["soc_final"] = soc_final
|
282
|
-
if
|
283
|
-
def_total_hours = optim_conf[
|
301
|
+
if 'operating_hours_of_each_deferrable_load' not in runtimeparams.keys():
|
302
|
+
def_total_hours = optim_conf['operating_hours_of_each_deferrable_load']
|
284
303
|
else:
|
285
|
-
def_total_hours = runtimeparams[
|
286
|
-
params["passed_data"][
|
287
|
-
if
|
288
|
-
def_start_timestep =
|
304
|
+
def_total_hours = runtimeparams['operating_hours_of_each_deferrable_load']
|
305
|
+
params["passed_data"]['operating_hours_of_each_deferrable_load'] = def_total_hours
|
306
|
+
if 'start_timesteps_of_each_deferrable_load' in runtimeparams.keys():
|
307
|
+
def_start_timestep = runtimeparams['start_timesteps_of_each_deferrable_load']
|
289
308
|
else:
|
290
|
-
def_start_timestep = runtimeparams
|
291
|
-
|
292
|
-
|
293
|
-
|
309
|
+
def_start_timestep = runtimeparams.get(
|
310
|
+
'def_start_timestep', optim_conf['start_timesteps_of_each_deferrable_load'])
|
311
|
+
params["passed_data"]['start_timesteps_of_each_deferrable_load'] = def_start_timestep
|
312
|
+
if 'end_timesteps_of_each_deferrable_load' in runtimeparams.keys():
|
313
|
+
def_end_timestep = runtimeparams['end_timesteps_of_each_deferrable_load']
|
294
314
|
else:
|
295
|
-
def_end_timestep = runtimeparams
|
296
|
-
|
315
|
+
def_end_timestep = runtimeparams.get(
|
316
|
+
'def_end_timestep', optim_conf['end_timesteps_of_each_deferrable_load'])
|
317
|
+
params["passed_data"]["end_timesteps_of_each_deferrable_load"] = def_end_timestep
|
297
318
|
forecast_dates = copy.deepcopy(forecast_dates)[0:prediction_horizon]
|
319
|
+
# Load the default config
|
320
|
+
if "def_load_config" in optim_conf:
|
321
|
+
for k in range(len(optim_conf["def_load_config"])):
|
322
|
+
if "thermal_config" in optim_conf["def_load_config"][k]:
|
323
|
+
if ("heater_desired_temperatures" in runtimeparams and len(runtimeparams["heater_desired_temperatures"]) > k):
|
324
|
+
optim_conf["def_load_config"][k]["thermal_config"]["desired_temperatures"] = runtimeparams["heater_desired_temperatures"][k]
|
325
|
+
if ("heater_start_temperatures" in runtimeparams and len(runtimeparams["heater_start_temperatures"]) > k):
|
326
|
+
optim_conf["def_load_config"][k]["thermal_config"]["start_temperature"] = runtimeparams["heater_start_temperatures"][k]
|
298
327
|
else:
|
299
328
|
params["passed_data"]["prediction_horizon"] = None
|
300
329
|
params["passed_data"]["soc_init"] = None
|
301
330
|
params["passed_data"]["soc_final"] = None
|
302
|
-
params["passed_data"][
|
303
|
-
params["passed_data"][
|
304
|
-
params["passed_data"][
|
331
|
+
params["passed_data"]['operating_hours_of_each_deferrable_load'] = None
|
332
|
+
params["passed_data"]['start_timesteps_of_each_deferrable_load'] = None
|
333
|
+
params["passed_data"]['end_timesteps_of_each_deferrable_load'] = None
|
305
334
|
# Treat passed forecast data lists
|
306
|
-
list_forecast_key = ['pv_power_forecast', 'load_power_forecast',
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
weather_forecast_cache = runtimeparams["weather_forecast_cache"]
|
313
|
-
params["passed_data"]["weather_forecast_cache"] = weather_forecast_cache
|
314
|
-
# Param to make sure optimization only uses cached data. (else produce error)
|
315
|
-
if "weather_forecast_cache_only" not in runtimeparams.keys():
|
316
|
-
weather_forecast_cache_only = False
|
317
|
-
else:
|
318
|
-
weather_forecast_cache_only = runtimeparams["weather_forecast_cache_only"]
|
319
|
-
params["passed_data"]["weather_forecast_cache_only"] = weather_forecast_cache_only
|
335
|
+
list_forecast_key = ['pv_power_forecast', 'load_power_forecast',
|
336
|
+
'load_cost_forecast', 'prod_price_forecast', 'outdoor_temperature_forecast']
|
337
|
+
forecast_methods = ['weather_forecast_method', 'load_forecast_method', 'load_cost_forecast_method',
|
338
|
+
'production_price_forecast_method', 'outdoor_temperature_forecast_method']
|
339
|
+
|
340
|
+
# Loop forecasts, check if value is a list and greater than or equal to forecast_dates
|
320
341
|
for method, forecast_key in enumerate(list_forecast_key):
|
321
342
|
if forecast_key in runtimeparams.keys():
|
322
343
|
if type(runtimeparams[forecast_key]) == list and len(runtimeparams[forecast_key]) >= len(forecast_dates):
|
323
344
|
params['passed_data'][forecast_key] = runtimeparams[forecast_key]
|
324
345
|
optim_conf[forecast_methods[method]] = 'list'
|
325
346
|
else:
|
326
|
-
logger.error(
|
327
|
-
|
328
|
-
|
347
|
+
logger.error(
|
348
|
+
f"ERROR: The passed data is either not a list or the length is not correct, length should be {str(len(forecast_dates))}")
|
349
|
+
logger.error(
|
350
|
+
f"Passed type is {str(type(runtimeparams[forecast_key]))} and length is {str(len(runtimeparams[forecast_key]))}")
|
351
|
+
# Check if string contains list, if so extract
|
352
|
+
if type(runtimeparams[forecast_key]) == str:
|
353
|
+
if type(ast.literal_eval(runtimeparams[forecast_key])) == list:
|
354
|
+
runtimeparams[forecast_key] = ast.literal_eval(runtimeparams[forecast_key])
|
355
|
+
list_non_digits = [x for x in runtimeparams[forecast_key] if not (
|
356
|
+
isinstance(x, int) or isinstance(x, float))]
|
329
357
|
if len(list_non_digits) > 0:
|
330
|
-
logger.warning(
|
358
|
+
logger.warning(
|
359
|
+
f"There are non numeric values on the passed data for {forecast_key}, check for missing values (nans, null, etc)")
|
331
360
|
for x in list_non_digits:
|
332
|
-
logger.warning(
|
361
|
+
logger.warning(
|
362
|
+
f"This value in {forecast_key} was detected as non digits: {str(x)}")
|
333
363
|
else:
|
334
364
|
params['passed_data'][forecast_key] = None
|
365
|
+
|
335
366
|
# Treat passed data for forecast model fit/predict/tune at runtime
|
336
|
-
if
|
337
|
-
days_to_retrieve =
|
367
|
+
if 'historic_days_to_retrieve' in runtimeparams.keys():
|
368
|
+
days_to_retrieve = runtimeparams['historic_days_to_retrieve']
|
338
369
|
else:
|
339
|
-
days_to_retrieve = runtimeparams
|
340
|
-
params["passed_data"][
|
370
|
+
days_to_retrieve = runtimeparams.get('days_to_retrieve', 9)
|
371
|
+
params["passed_data"]['historic_days_to_retrieve'] = days_to_retrieve
|
341
372
|
if "model_type" not in runtimeparams.keys():
|
342
373
|
model_type = "load_forecast"
|
343
374
|
else:
|
@@ -371,12 +402,14 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
371
402
|
if "perform_backtest" not in runtimeparams.keys():
|
372
403
|
perform_backtest = False
|
373
404
|
else:
|
374
|
-
perform_backtest = ast.literal_eval(
|
405
|
+
perform_backtest = ast.literal_eval(
|
406
|
+
str(runtimeparams["perform_backtest"]).capitalize())
|
375
407
|
params["passed_data"]["perform_backtest"] = perform_backtest
|
376
408
|
if "model_predict_publish" not in runtimeparams.keys():
|
377
409
|
model_predict_publish = False
|
378
410
|
else:
|
379
|
-
model_predict_publish = ast.literal_eval(
|
411
|
+
model_predict_publish = ast.literal_eval(
|
412
|
+
str(runtimeparams["model_predict_publish"]).capitalize())
|
380
413
|
params["passed_data"]["model_predict_publish"] = model_predict_publish
|
381
414
|
if "model_predict_entity_id" not in runtimeparams.keys():
|
382
415
|
model_predict_entity_id = "sensor.p_load_forecast_custom_model"
|
@@ -408,6 +441,7 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
408
441
|
else:
|
409
442
|
mlr_predict_friendly_name = runtimeparams["mlr_predict_friendly_name"]
|
410
443
|
params["passed_data"]["mlr_predict_friendly_name"] = mlr_predict_friendly_name
|
444
|
+
|
411
445
|
# Treat passed data for other parameters
|
412
446
|
if "alpha" not in runtimeparams.keys():
|
413
447
|
alpha = 0.5
|
@@ -419,68 +453,124 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
419
453
|
else:
|
420
454
|
beta = runtimeparams["beta"]
|
421
455
|
params["passed_data"]["beta"] = beta
|
422
|
-
#
|
423
|
-
if "
|
424
|
-
|
425
|
-
|
426
|
-
|
427
|
-
|
428
|
-
|
429
|
-
if "
|
430
|
-
|
431
|
-
|
432
|
-
|
456
|
+
# Param to save forecast cache (i.e. Solcast)
|
457
|
+
if "weather_forecast_cache" not in runtimeparams.keys():
|
458
|
+
weather_forecast_cache = False
|
459
|
+
else:
|
460
|
+
weather_forecast_cache = runtimeparams["weather_forecast_cache"]
|
461
|
+
params["passed_data"]["weather_forecast_cache"] = weather_forecast_cache
|
462
|
+
# Param to make sure optimization only uses cached data. (else produce error)
|
463
|
+
if "weather_forecast_cache_only" not in runtimeparams.keys():
|
464
|
+
weather_forecast_cache_only = False
|
465
|
+
else:
|
466
|
+
weather_forecast_cache_only = runtimeparams["weather_forecast_cache_only"]
|
467
|
+
params["passed_data"]["weather_forecast_cache_only"] = weather_forecast_cache_only
|
468
|
+
# A condition to manually save entity data under data_path/entities after optimization
|
469
|
+
if "entity_save" not in runtimeparams.keys():
|
470
|
+
entity_save = ""
|
471
|
+
else:
|
472
|
+
entity_save = runtimeparams["entity_save"]
|
473
|
+
params["passed_data"]["entity_save"] = entity_save
|
474
|
+
# A condition to put a prefix on all published data, or check for saved data under prefix name
|
475
|
+
if "publish_prefix" not in runtimeparams.keys():
|
476
|
+
publish_prefix = ""
|
477
|
+
else:
|
478
|
+
publish_prefix = runtimeparams["publish_prefix"]
|
479
|
+
params["passed_data"]["publish_prefix"] = publish_prefix
|
480
|
+
|
481
|
+
# Treat optimization (optim_conf) configuration parameters passed at runtime
|
482
|
+
if 'number_of_deferrable_loads' in runtimeparams.keys():
|
483
|
+
optim_conf['number_of_deferrable_loads'] = runtimeparams['number_of_deferrable_loads']
|
484
|
+
if 'num_def_loads' in runtimeparams.keys():
|
485
|
+
optim_conf['number_of_deferrable_loads'] = runtimeparams['num_def_loads']
|
486
|
+
if 'nominal_power_of_deferrable_loads' in runtimeparams.keys():
|
487
|
+
optim_conf['nominal_power_of_deferrable_loads'] = runtimeparams['nominal_power_of_deferrable_loads']
|
488
|
+
if 'P_deferrable_nom' in runtimeparams.keys():
|
489
|
+
optim_conf['nominal_power_of_deferrable_loads'] = runtimeparams['P_deferrable_nom']
|
490
|
+
if 'operating_hours_of_each_deferrable_load' in runtimeparams.keys():
|
491
|
+
optim_conf['operating_hours_of_each_deferrable_load'] = runtimeparams['operating_hours_of_each_deferrable_load']
|
492
|
+
if 'def_total_hours' in runtimeparams.keys():
|
493
|
+
optim_conf['operating_hours_of_each_deferrable_load'] = runtimeparams['def_total_hours']
|
494
|
+
if 'start_timesteps_of_each_deferrable_load' in runtimeparams.keys():
|
495
|
+
optim_conf['start_timesteps_of_each_deferrable_load'] = runtimeparams['start_timesteps_of_each_deferrable_load']
|
496
|
+
if 'end_timesteps_of_each_deferrable_load' in runtimeparams.keys():
|
497
|
+
optim_conf['end_timesteps_of_each_deferrable_load'] = runtimeparams['end_timesteps_of_each_deferrable_load']
|
433
498
|
if "def_current_state" in runtimeparams.keys():
|
434
|
-
optim_conf["def_current_state"] = [
|
435
|
-
|
436
|
-
|
499
|
+
optim_conf["def_current_state"] = [
|
500
|
+
bool(s) for s in runtimeparams["def_current_state"]]
|
501
|
+
if 'treat_deferrable_load_as_semi_cont' in runtimeparams.keys():
|
502
|
+
optim_conf['treat_deferrable_load_as_semi_cont'] = [
|
503
|
+
ast.literal_eval(str(k).capitalize())
|
504
|
+
for k in runtimeparams['treat_deferrable_load_as_semi_cont']
|
505
|
+
]
|
506
|
+
if 'treat_def_as_semi_cont' in runtimeparams.keys():
|
507
|
+
optim_conf['treat_deferrable_load_as_semi_cont'] = [
|
437
508
|
ast.literal_eval(str(k).capitalize())
|
438
|
-
for k in runtimeparams[
|
509
|
+
for k in runtimeparams['treat_def_as_semi_cont']
|
439
510
|
]
|
440
|
-
if
|
441
|
-
optim_conf[
|
442
|
-
ast.literal_eval(str(k).capitalize()) for k in runtimeparams[
|
511
|
+
if 'set_deferrable_load_single_constant' in runtimeparams.keys():
|
512
|
+
optim_conf['set_deferrable_load_single_constant'] = [
|
513
|
+
ast.literal_eval(str(k).capitalize()) for k in runtimeparams['set_deferrable_load_single_constant']
|
443
514
|
]
|
444
|
-
if
|
445
|
-
optim_conf[
|
446
|
-
ast.literal_eval(str(k).capitalize()) for k in runtimeparams[
|
515
|
+
if 'set_def_constant' in runtimeparams.keys():
|
516
|
+
optim_conf['set_deferrable_load_single_constant'] = [
|
517
|
+
ast.literal_eval(str(k).capitalize()) for k in runtimeparams['set_def_constant']
|
518
|
+
]
|
519
|
+
if 'set_deferrable_startup_penalty' in runtimeparams.keys():
|
520
|
+
optim_conf['set_deferrable_startup_penalty'] = [
|
521
|
+
ast.literal_eval(str(k).capitalize()) for k in runtimeparams['set_deferrable_startup_penalty']
|
522
|
+
]
|
523
|
+
if 'def_start_penalty' in runtimeparams.keys():
|
524
|
+
optim_conf['set_deferrable_startup_penalty'] = [
|
525
|
+
ast.literal_eval(str(k).capitalize()) for k in runtimeparams['def_start_penalty']
|
447
526
|
]
|
448
527
|
if 'def_load_config' in runtimeparams:
|
449
528
|
optim_conf["def_load_config"] = runtimeparams['def_load_config']
|
529
|
+
if 'weight_battery_discharge' in runtimeparams.keys():
|
530
|
+
optim_conf['weight_battery_discharge'] = runtimeparams[
|
531
|
+
'weight_battery_discharge'
|
532
|
+
]
|
533
|
+
if 'weight_battery_charge' in runtimeparams.keys():
|
534
|
+
optim_conf['weight_battery_charge'] = runtimeparams['weight_battery_charge']
|
535
|
+
|
536
|
+
# Treat retrieve data from Home Assistant (retrieve_hass_conf) configuration parameters passed at runtime
|
537
|
+
if 'optimization_time_step' in runtimeparams.keys():
|
538
|
+
retrieve_hass_conf['optimization_time_step'] = pd.to_timedelta(
|
539
|
+
runtimeparams['optimization_time_step'], "minutes")
|
540
|
+
if 'continual_publish' in runtimeparams.keys():
|
541
|
+
retrieve_hass_conf['continual_publish'] = bool(
|
542
|
+
runtimeparams['continual_publish'])
|
450
543
|
if "solcast_api_key" in runtimeparams.keys():
|
451
544
|
retrieve_hass_conf["solcast_api_key"] = runtimeparams["solcast_api_key"]
|
452
|
-
optim_conf[
|
545
|
+
optim_conf['weather_forecast_method'] = "solcast"
|
453
546
|
if "solcast_rooftop_id" in runtimeparams.keys():
|
454
547
|
retrieve_hass_conf["solcast_rooftop_id"] = runtimeparams[
|
455
548
|
"solcast_rooftop_id"
|
456
549
|
]
|
457
|
-
optim_conf[
|
550
|
+
optim_conf['weather_forecast_method'] = "solcast"
|
458
551
|
if "solar_forecast_kwp" in runtimeparams.keys():
|
459
552
|
retrieve_hass_conf["solar_forecast_kwp"] = runtimeparams[
|
460
553
|
"solar_forecast_kwp"
|
461
554
|
]
|
462
|
-
optim_conf[
|
463
|
-
|
464
|
-
|
465
|
-
|
466
|
-
]
|
467
|
-
|
468
|
-
|
469
|
-
|
470
|
-
|
471
|
-
if '
|
472
|
-
|
473
|
-
|
474
|
-
if
|
475
|
-
plant_conf[
|
476
|
-
|
477
|
-
|
478
|
-
|
479
|
-
|
480
|
-
|
481
|
-
plant_conf["Pd_max"] = runtimeparams["Pd_max"]
|
482
|
-
if "Pc_max" in runtimeparams.keys():
|
483
|
-
plant_conf["Pc_max"] = runtimeparams["Pc_max"]
|
555
|
+
optim_conf['weather_forecast_method'] = "solar.forecast"
|
556
|
+
|
557
|
+
# Treat system model parameters (plant) configuration parameters passed at runtime
|
558
|
+
if 'battery_minimum_state_of_charge' in runtimeparams.keys() or 'SOCmin' in runtimeparams.keys():
|
559
|
+
plant_conf['battery_minimum_state_of_charge'] = runtimeparams.get(
|
560
|
+
'battery_minimum_state_of_charge', runtimeparams.get('SOCmin'))
|
561
|
+
if 'battery_maximum_state_of_charge' in runtimeparams.keys() or 'SOCmax' in runtimeparams.keys():
|
562
|
+
plant_conf['battery_maximum_state_of_charge'] = runtimeparams.get(
|
563
|
+
'battery_maximum_state_of_charge', runtimeparams.get('SOCmax'))
|
564
|
+
if 'battery_target_state_of_charge' in runtimeparams.keys() or 'SOCtarget' in runtimeparams.keys():
|
565
|
+
plant_conf['battery_target_state_of_charge'] = runtimeparams.get(
|
566
|
+
'battery_target_state_of_charge', runtimeparams.get('SOCtarget'))
|
567
|
+
if 'battery_discharge_power_max' in runtimeparams.keys() or 'Pd_max' in runtimeparams.keys():
|
568
|
+
plant_conf['battery_discharge_power_max'] = runtimeparams.get(
|
569
|
+
'battery_discharge_power_max', runtimeparams.get('Pd_max'))
|
570
|
+
if 'battery_charge_power_max' in runtimeparams.keys() or 'Pc_max' in runtimeparams.keys():
|
571
|
+
plant_conf['battery_charge_power_max'] = runtimeparams.get(
|
572
|
+
'battery_charge_power_max', runtimeparams.get('Pc_max'))
|
573
|
+
|
484
574
|
# Treat custom entities id's and friendly names for variables
|
485
575
|
if "custom_pv_forecast_id" in runtimeparams.keys():
|
486
576
|
params["passed_data"]["custom_pv_forecast_id"] = runtimeparams[
|
@@ -534,84 +624,47 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
534
624
|
params["passed_data"]["custom_predicted_temperature_id"] = runtimeparams[
|
535
625
|
"custom_predicted_temperature_id"
|
536
626
|
]
|
537
|
-
|
538
|
-
if "publish_prefix" not in runtimeparams.keys():
|
539
|
-
publish_prefix = ""
|
540
|
-
else:
|
541
|
-
publish_prefix = runtimeparams["publish_prefix"]
|
542
|
-
params["passed_data"]["publish_prefix"] = publish_prefix
|
543
|
-
# A condition to manually save entity data under data_path/entities after optimization
|
544
|
-
if "entity_save" not in runtimeparams.keys():
|
545
|
-
entity_save = ""
|
546
|
-
else:
|
547
|
-
entity_save = runtimeparams["entity_save"]
|
548
|
-
params["passed_data"]["entity_save"] = entity_save
|
627
|
+
|
549
628
|
# Serialize the final params
|
550
|
-
params = json.dumps(params)
|
629
|
+
params = json.dumps(params, default=str)
|
551
630
|
return params, retrieve_hass_conf, optim_conf, plant_conf
|
552
631
|
|
553
632
|
|
554
|
-
def get_yaml_parse(
|
555
|
-
params: Optional[str] = None) -> Tuple[dict, dict, dict]:
|
633
|
+
def get_yaml_parse(params: str, logger: logging.Logger) -> Tuple[dict, dict, dict]:
|
556
634
|
"""
|
557
|
-
Perform parsing of the
|
635
|
+
Perform parsing of the params into the configuration catagories
|
558
636
|
|
559
|
-
:param
|
560
|
-
:type emhass_conf: dict
|
561
|
-
:param use_secrets: Indicate if we should use a secrets file or not.
|
562
|
-
Set to False for unit tests.
|
563
|
-
:type use_secrets: bool, optional
|
564
|
-
:param params: Configuration parameters passed from data/options.json
|
637
|
+
:param params: Built configuration parameters
|
565
638
|
:type params: str
|
639
|
+
:param logger: The logger object
|
640
|
+
:type logger: logging.Logger
|
566
641
|
:return: A tuple with the dictionaries containing the parsed data
|
567
642
|
:rtype: tuple(dict)
|
568
643
|
|
569
644
|
"""
|
570
|
-
if params
|
571
|
-
|
572
|
-
input_conf =
|
573
|
-
else:
|
574
|
-
input_conf = json.loads(params)
|
575
|
-
if use_secrets:
|
576
|
-
if params is None:
|
577
|
-
with open(emhass_conf["config_path"].parent / 'secrets_emhass.yaml', 'r') as file: # Assume secrets and config file paths are the same
|
578
|
-
input_secrets = yaml.load(file, Loader=yaml.FullLoader)
|
645
|
+
if params:
|
646
|
+
if type(params) is str:
|
647
|
+
input_conf = json.loads(params)
|
579
648
|
else:
|
580
|
-
|
581
|
-
|
582
|
-
if type(input_conf["retrieve_hass_conf"]) == list: # if using old config version
|
583
|
-
retrieve_hass_conf = dict(
|
584
|
-
{key: d[key] for d in input_conf["retrieve_hass_conf"] for key in d}
|
585
|
-
)
|
649
|
+
input_conf = params
|
586
650
|
else:
|
587
|
-
|
651
|
+
input_conf = {}
|
652
|
+
logger.error("No params have been detected for get_yaml_parse")
|
653
|
+
return False, False, False
|
588
654
|
|
589
|
-
|
590
|
-
retrieve_hass_conf.update(input_secrets)
|
591
|
-
else:
|
592
|
-
retrieve_hass_conf["hass_url"] = "http://supervisor/core/api"
|
593
|
-
retrieve_hass_conf["long_lived_token"] = "${SUPERVISOR_TOKEN}"
|
594
|
-
retrieve_hass_conf["time_zone"] = "Europe/Paris"
|
595
|
-
retrieve_hass_conf["lat"] = 45.83
|
596
|
-
retrieve_hass_conf["lon"] = 6.86
|
597
|
-
retrieve_hass_conf["alt"] = 4807.8
|
598
|
-
retrieve_hass_conf["freq"] = pd.to_timedelta(retrieve_hass_conf["freq"], "minutes")
|
599
|
-
retrieve_hass_conf["time_zone"] = pytz.timezone(retrieve_hass_conf["time_zone"])
|
600
|
-
|
601
|
-
if type(input_conf["optim_conf"]) == list:
|
602
|
-
optim_conf = dict({key: d[key] for d in input_conf["optim_conf"] for key in d})
|
603
|
-
else:
|
604
|
-
optim_conf = input_conf.get("optim_conf", {})
|
655
|
+
optim_conf = input_conf.get("optim_conf", {})
|
605
656
|
|
606
|
-
|
607
|
-
(key, d[key]) for d in optim_conf["list_hp_periods"] for key in d
|
608
|
-
)
|
609
|
-
optim_conf["delta_forecast"] = pd.Timedelta(days=optim_conf["delta_forecast"])
|
657
|
+
retrieve_hass_conf = input_conf.get("retrieve_hass_conf", {})
|
610
658
|
|
611
|
-
|
612
|
-
|
613
|
-
|
614
|
-
|
659
|
+
plant_conf = input_conf.get("plant_conf", {})
|
660
|
+
|
661
|
+
# Format time parameters
|
662
|
+
if optim_conf.get('delta_forecast_daily',None) is not None:
|
663
|
+
optim_conf['delta_forecast_daily'] = pd.Timedelta(days=optim_conf['delta_forecast_daily'])
|
664
|
+
if retrieve_hass_conf.get('optimization_time_step',None) is not None:
|
665
|
+
retrieve_hass_conf['optimization_time_step'] = pd.to_timedelta(retrieve_hass_conf['optimization_time_step'], "minutes")
|
666
|
+
if retrieve_hass_conf.get('time_zone',None) is not None:
|
667
|
+
retrieve_hass_conf["time_zone"] = pytz.timezone(retrieve_hass_conf["time_zone"])
|
615
668
|
|
616
669
|
return retrieve_hass_conf, optim_conf, plant_conf
|
617
670
|
|
@@ -762,156 +815,402 @@ def get_injection_dict_forecast_model_tune(df_pred_optim: pd.DataFrame, mlf: MLF
|
|
762
815
|
injection_dict["figure_0"] = image_path_0
|
763
816
|
return injection_dict
|
764
817
|
|
818
|
+
def build_config(emhass_conf: dict, logger: logging.Logger, defaults_path: str, config_path: Optional[str] = None,
|
819
|
+
legacy_config_path: Optional[str] = None) -> dict:
|
820
|
+
"""
|
821
|
+
Retrieve parameters from configuration files.
|
822
|
+
priority order (low - high) = defaults_path, config_path legacy_config_path
|
765
823
|
|
766
|
-
|
824
|
+
:param emhass_conf: Dictionary containing the needed emhass paths
|
825
|
+
:type emhass_conf: dict
|
826
|
+
:param logger: The logger object
|
827
|
+
:type logger: logging.Logger
|
828
|
+
:param defaults_path: path to config file for parameter defaults (config_defaults.json)
|
829
|
+
:type defaults_path: str
|
830
|
+
:param config_path: path to the main configuration file (config.json)
|
831
|
+
:type config_path: str
|
832
|
+
:param legacy_config_path: path to legacy config file (config_emhass.yaml)
|
833
|
+
:type legacy_config_path: str
|
834
|
+
:return: The built config dictionary
|
835
|
+
:rtype: dict
|
836
|
+
"""
|
837
|
+
|
838
|
+
# Read default parameters (default root_path/data/config_defaults.json)
|
839
|
+
if defaults_path and pathlib.Path(defaults_path).is_file():
|
840
|
+
with defaults_path.open('r') as data:
|
841
|
+
config = json.load(data)
|
842
|
+
else:
|
843
|
+
logger.error("config_defaults.json. does not exist ")
|
844
|
+
return False
|
845
|
+
|
846
|
+
# Read user config parameters if provided (default /share/config.json)
|
847
|
+
if config_path and pathlib.Path(config_path).is_file():
|
848
|
+
with config_path.open('r') as data:
|
849
|
+
# Set override default parameters (config_defaults) with user given parameters (config.json)
|
850
|
+
logger.info("Obtaining parameters from config.json:")
|
851
|
+
config.update(json.load(data))
|
852
|
+
else:
|
853
|
+
logger.info("config.json does not exist, or has not been passed. config parameters may default to config_defaults.json")
|
854
|
+
logger.info("you may like to generate the config.json file on the configuration page")
|
855
|
+
|
856
|
+
# Check to see if legacy config_emhass.yaml was provided (default /app/config_emhass.yaml)
|
857
|
+
# Convert legacy parameter definitions/format to match config.json
|
858
|
+
if legacy_config_path and pathlib.Path(legacy_config_path).is_file():
|
859
|
+
with open(legacy_config_path, 'r') as data:
|
860
|
+
legacy_config = yaml.load(data, Loader=yaml.FullLoader)
|
861
|
+
legacy_config_parameters = build_legacy_config_params(emhass_conf,legacy_config,logger)
|
862
|
+
if type(legacy_config_parameters) is not bool:
|
863
|
+
logger.info("Obtaining parameters from config_emhass.yaml: (will overwrite config parameters)")
|
864
|
+
config.update(legacy_config_parameters)
|
865
|
+
|
866
|
+
return config
|
867
|
+
|
868
|
+
|
869
|
+
def build_legacy_config_params(emhass_conf: dict, legacy_config: dict,
|
870
|
+
logger: logging.Logger) -> dict:
|
871
|
+
"""
|
872
|
+
Build a config dictionary with legacy config_emhass.yaml file.
|
873
|
+
Uses the associations file to convert parameter naming conventions (to config.json/config_defaults.json).
|
874
|
+
Extracts the parameter values and formats to match config.json.
|
875
|
+
|
876
|
+
:param emhass_conf: Dictionary containing the needed emhass paths
|
877
|
+
:type emhass_conf: dict
|
878
|
+
:param legacy_config: The legacy config dictionary
|
879
|
+
:type legacy_config: dict
|
880
|
+
:param logger: The logger object
|
881
|
+
:type logger: logging.Logger
|
882
|
+
:return: The built config dictionary
|
883
|
+
:rtype: dict
|
884
|
+
"""
|
885
|
+
|
886
|
+
|
887
|
+
# Association file key reference
|
888
|
+
# association[0] = config catagories
|
889
|
+
# association[1] = legacy parameter name
|
890
|
+
# association[2] = parameter (config.json/config_defaults.json)
|
891
|
+
# association[3] = parameter list name if exists (not used, from legacy options.json)
|
892
|
+
|
893
|
+
# Check each config catagories exists, else create blank dict for categories (avoid errors)
|
894
|
+
legacy_config['retrieve_hass_conf'] = legacy_config.get('retrieve_hass_conf',{})
|
895
|
+
legacy_config['optim_conf'] = legacy_config.get('optim_conf',{})
|
896
|
+
legacy_config['plant_conf'] = legacy_config.get('plant_conf',{})
|
897
|
+
config = {}
|
898
|
+
|
899
|
+
# Use associations list to map legacy parameter name with config.json parameter name
|
900
|
+
if emhass_conf['associations_path'].exists():
|
901
|
+
with emhass_conf['associations_path'].open('r') as data:
|
902
|
+
associations = list(csv.reader(data, delimiter=","))
|
903
|
+
else:
|
904
|
+
logger.error("Cant find associations file (associations.csv) in: " + str(emhass_conf['associations_path']))
|
905
|
+
return False
|
906
|
+
|
907
|
+
# Loop through all parameters in association file
|
908
|
+
# Append config with existing legacy config parameters (converting alternative parameter naming conventions with associations list)
|
909
|
+
for association in associations:
|
910
|
+
# if legacy config catagories exists and if legacy parameter exists in config catagories
|
911
|
+
if legacy_config.get(association[0],None) is not None and legacy_config[association[0]].get(association[1],None) is not None:
|
912
|
+
config[association[2]] = legacy_config[association[0]][association[1]]
|
913
|
+
|
914
|
+
# If config now has load_peak_hour_periods, extract from list of dict
|
915
|
+
if association[2] == "load_peak_hour_periods" and type(config[association[2]]) is list:
|
916
|
+
config[association[2]] = dict((key, d[key]) for d in config[association[2]] for key in d)
|
917
|
+
|
918
|
+
return config
|
919
|
+
# params['associations_dict'] = associations_dict
|
920
|
+
|
921
|
+
def param_to_config(param: dict,
|
767
922
|
logger: logging.Logger) -> dict:
|
768
923
|
"""
|
769
|
-
|
924
|
+
A function that extracts the parameters from param back to the config.json format.
|
925
|
+
Extracts parameters from config catagories.
|
926
|
+
Attempts to exclude secrets hosed in retrieve_hass_conf.
|
927
|
+
|
928
|
+
:param params: Built configuration parameters
|
929
|
+
:type param: dict
|
930
|
+
:param logger: The logger object
|
931
|
+
:type logger: logging.Logger
|
932
|
+
:return: The built config dictionary
|
933
|
+
:rtype: dict
|
934
|
+
"""
|
935
|
+
logger.debug("Converting param to config")
|
936
|
+
|
937
|
+
return_config = {}
|
938
|
+
|
939
|
+
config_catagories = ["retrieve_hass_conf","optim_conf","plant_conf"]
|
940
|
+
secret_params = ["hass_url", "time_zone", "Latitude", "Longitude", "Altitude", "long_lived_token", "solcast_api_key", "solcast_rooftop_id", "solar_forecast_kwp"]
|
941
|
+
|
942
|
+
# Loop through config catagories that contain config params, and extract
|
943
|
+
for config in config_catagories:
|
944
|
+
for parameter in param[config]:
|
945
|
+
# If parameter is not a secret, append to return_config
|
946
|
+
if parameter not in secret_params:
|
947
|
+
return_config[str(parameter)] = param[config][parameter]
|
948
|
+
|
949
|
+
return return_config
|
950
|
+
|
951
|
+
def build_secrets(emhass_conf: dict, logger: logging.Logger, argument: Optional[dict] = {}, options_path: Optional[str] = None,
|
952
|
+
secrets_path: Optional[str] = None, no_response: Optional[bool] = False) -> Tuple[dict, dict]:
|
953
|
+
"""
|
954
|
+
Retrieve and build parameters from secrets locations (ENV, ARG, Secrets file (secrets_emhass.yaml/options.json) and/or Home Assistant (via API))
|
955
|
+
priority order (lwo to high) = Defaults (written in function), ENV, Options json file, Home Assistant API, Secrets yaml file, Arguments
|
956
|
+
|
957
|
+
:param emhass_conf: Dictionary containing the needed emhass paths
|
958
|
+
:type emhass_conf: dict
|
959
|
+
:param logger: The logger object
|
960
|
+
:type logger: logging.Logger
|
961
|
+
:param argument: dictionary of secrets arguments passed (url,key)
|
962
|
+
:type argument: dict
|
963
|
+
:param options_path: path to the options file (options.json) (usually provided bt EMHASS-Add-on)
|
964
|
+
:type options_path: str
|
965
|
+
:param secrets_path: path to secrets file (secrets_emhass.yaml)
|
966
|
+
:type secrets_path: str
|
967
|
+
:param no_response: bypass get request to Home Assistant (json response errors)
|
968
|
+
:type no_response: bool
|
969
|
+
:return: Updated emhass_conf, the built secrets dictionary
|
970
|
+
:rtype: Tuple[dict, dict]:
|
971
|
+
"""
|
972
|
+
|
973
|
+
#Set defaults to be overwritten
|
974
|
+
params_secrets = {
|
975
|
+
"hass_url": "https://myhass.duckdns.org/",
|
976
|
+
"long_lived_token": "thatverylongtokenhere",
|
977
|
+
"time_zone": "Europe/Paris",
|
978
|
+
"Latitude": 45.83,
|
979
|
+
"Longitude": 6.86,
|
980
|
+
"Altitude": 4807.8,
|
981
|
+
"solcast_api_key": "yoursecretsolcastapikey",
|
982
|
+
"solcast_rooftop_id": "yourrooftopid",
|
983
|
+
"solar_forecast_kwp": 5
|
984
|
+
}
|
985
|
+
|
986
|
+
# Obtain Secrets from ENV?
|
987
|
+
params_secrets['hass_url'] = os.getenv("EMHASS_URL",params_secrets['hass_url'])
|
988
|
+
params_secrets['long_lived_token'] = os.getenv("SUPERVISOR_TOKEN", params_secrets['long_lived_token'])
|
989
|
+
params_secrets['time_zone'] = os.getenv("TIME_ZONE", params_secrets['time_zone'])
|
990
|
+
params_secrets['Latitude'] = float(os.getenv("LAT", params_secrets['Latitude']))
|
991
|
+
params_secrets['Longitude'] = float(os.getenv("LON", params_secrets['Longitude']))
|
992
|
+
params_secrets['Altitude'] = float(os.getenv("ALT", params_secrets['Altitude']))
|
993
|
+
|
994
|
+
# Obtain secrets from options.json (Generated from EMHASS-Add-on, Home Assistant addon Configuration page) or Home Assistant API (from local Supervisor API)?
|
995
|
+
# Use local supervisor API to obtain secrets from Home Assistant if hass_url in options.json is empty and SUPERVISOR_TOKEN ENV exists (provided by Home Assistant when running the container as addon)
|
996
|
+
options = {}
|
997
|
+
if options_path and pathlib.Path(options_path).is_file():
|
998
|
+
with options_path.open('r') as data:
|
999
|
+
options = json.load(data)
|
1000
|
+
|
1001
|
+
# Obtain secrets from Home Assistant?
|
1002
|
+
url_from_options = options.get('hass_url', 'empty')
|
1003
|
+
key_from_options = options.get('long_lived_token', 'empty')
|
1004
|
+
|
1005
|
+
# If data path specified by options.json, overwrite emhass_conf['data_path']
|
1006
|
+
if options.get('data_path', None) != None and pathlib.Path(options['data_path']).exists():
|
1007
|
+
emhass_conf['data_path'] = pathlib.Path(options['data_path']);
|
1008
|
+
|
1009
|
+
# Check to use Home Assistant local API
|
1010
|
+
if not no_response and \
|
1011
|
+
(url_from_options == 'empty' or url_from_options == '' or url_from_options == "http://supervisor/core/api") and \
|
1012
|
+
os.getenv("SUPERVISOR_TOKEN", None) is not None:
|
1013
|
+
|
1014
|
+
params_secrets['long_lived_token'] = os.getenv("SUPERVISOR_TOKEN",None)
|
1015
|
+
params_secrets['hass_url'] = "http://supervisor/core/api"
|
1016
|
+
headers = {
|
1017
|
+
"Authorization": "Bearer " + params_secrets['long_lived_token'],
|
1018
|
+
"content-type": "application/json"
|
1019
|
+
}
|
1020
|
+
# Obtain secrets from Home Assistant via API
|
1021
|
+
logger.debug("Obtaining secrets from Home Assistant Supervisor API")
|
1022
|
+
response = get((params_secrets['hass_url'] + "/config"), headers=headers)
|
1023
|
+
if response.status_code < 400:
|
1024
|
+
config_hass = response.json()
|
1025
|
+
params_secrets = {
|
1026
|
+
'hass_url': params_secrets['hass_url'],
|
1027
|
+
'long_lived_token': params_secrets['long_lived_token'],
|
1028
|
+
'time_zone': config_hass['time_zone'],
|
1029
|
+
'Latitude': config_hass['latitude'],
|
1030
|
+
'Longitude': config_hass['longitude'],
|
1031
|
+
'Altitude': config_hass['elevation']
|
1032
|
+
}
|
1033
|
+
else:
|
1034
|
+
# Obtain the url and key secrets if any from options.json (default /app/options.json)
|
1035
|
+
logger.warning("Error obtaining secrets from Home Assistant Supervisor API")
|
1036
|
+
logger.debug("Obtaining url and key secrets from options.json")
|
1037
|
+
if url_from_options != 'empty' and url_from_options != '':
|
1038
|
+
params_secrets['hass_url'] = url_from_options
|
1039
|
+
if key_from_options != 'empty' and key_from_options != '':
|
1040
|
+
params_secrets['long_lived_token'] = key_from_options
|
1041
|
+
if options.get('time_zone',"empty") != "empty" and options['time_zone'] != '':
|
1042
|
+
params_secrets['time_zone'] = options['time_zone']
|
1043
|
+
if options.get('Latitude',None) is not None and bool(options['Latitude']):
|
1044
|
+
params_secrets['Latitude'] = options['Latitude']
|
1045
|
+
if options.get('Longitude',None) is not None and bool(options['Longitude']):
|
1046
|
+
params_secrets['Longitude'] = options['Longitude']
|
1047
|
+
if options.get('Altitude',None) is not None and bool(options['Altitude']):
|
1048
|
+
params_secrets['Altitude'] = options['Altitude']
|
1049
|
+
else:
|
1050
|
+
# Obtain the url and key secrets if any from options.json (default /app/options.json)
|
1051
|
+
logger.debug("Obtaining url and key secrets from options.json")
|
1052
|
+
if url_from_options != 'empty' and url_from_options != '':
|
1053
|
+
params_secrets['hass_url'] = url_from_options
|
1054
|
+
if key_from_options != 'empty' and key_from_options != '':
|
1055
|
+
params_secrets['long_lived_token'] = key_from_options
|
1056
|
+
if options.get('time_zone',"empty") != "empty" and options['time_zone'] != '':
|
1057
|
+
params_secrets['time_zone'] = options['time_zone']
|
1058
|
+
if options.get('Latitude',None) is not None and bool(options['Latitude']):
|
1059
|
+
params_secrets['Latitude'] = options['Latitude']
|
1060
|
+
if options.get('Longitude',None) is not None and bool(options['Longitude']):
|
1061
|
+
params_secrets['Longitude'] = options['Longitude']
|
1062
|
+
if options.get('Altitude',None) is not None and bool(options['Altitude']):
|
1063
|
+
params_secrets['Altitude'] = options['Altitude']
|
1064
|
+
|
1065
|
+
# Obtain the forecast secrets (if any) from options.json (default /app/options.json)
|
1066
|
+
forecast_secrets = ["solcast_api_key","solcast_rooftop_id","solar_forecast_kwp"]
|
1067
|
+
if any(x in forecast_secrets for x in list(options.keys())):
|
1068
|
+
logger.debug("Obtaining forecast secrets from options.json")
|
1069
|
+
if options.get('solcast_api_key',"empty") != "empty" and options['solcast_api_key'] != '':
|
1070
|
+
params_secrets['solcast_api_key'] = options['solcast_api_key']
|
1071
|
+
if options.get('solcast_rooftop_id',"empty") != "empty" and options['solcast_rooftop_id'] != '':
|
1072
|
+
params_secrets['solcast_rooftop_id'] = options['solcast_rooftop_id']
|
1073
|
+
if options.get('solar_forecast_kwp',None) and bool(options['solar_forecast_kwp']):
|
1074
|
+
params_secrets['solar_forecast_kwp'] = options['solar_forecast_kwp']
|
1075
|
+
|
1076
|
+
# Obtain secrets from secrets_emhass.yaml? (default /app/secrets_emhass.yaml)
|
1077
|
+
if secrets_path and pathlib.Path(secrets_path).is_file():
|
1078
|
+
logger.debug("Obtaining secrets from secrets file")
|
1079
|
+
with open(pathlib.Path(secrets_path), 'r') as file:
|
1080
|
+
params_secrets.update(yaml.load(file, Loader=yaml.FullLoader))
|
1081
|
+
|
1082
|
+
# Receive key and url from ARG/arguments?
|
1083
|
+
if argument.get('url',None) is not None:
|
1084
|
+
params_secrets['hass_url'] = argument['url']
|
1085
|
+
logger.debug("Obtaining url from passed argument")
|
1086
|
+
if argument.get('key',None) is not None:
|
1087
|
+
params_secrets['long_lived_token'] = argument['key']
|
1088
|
+
logger.debug("Obtaining long_lived_token from passed argument")
|
1089
|
+
|
1090
|
+
return emhass_conf, params_secrets
|
1091
|
+
|
1092
|
+
|
770
1093
|
|
771
|
-
|
772
|
-
|
773
|
-
|
1094
|
+
def build_params(emhass_conf: dict, params_secrets: dict, config: dict,
|
1095
|
+
logger: logging.Logger) -> dict:
|
1096
|
+
"""
|
1097
|
+
Build the main params dictionary from the config and secrets
|
1098
|
+
Appends configuration catagories used by emhass to the parameters. (with use of the associations file as a reference)
|
1099
|
+
|
1100
|
+
:param emhass_conf: Dictionary containing the needed emhass paths
|
1101
|
+
:type emhass_conf: dict
|
1102
|
+
:param params_secrets: The dictionary containing the built secret variables
|
774
1103
|
:type params_secrets: dict
|
775
|
-
:param
|
776
|
-
:type
|
777
|
-
:param addon: A "bool" to select if we are using the add-on
|
778
|
-
:type addon: int
|
1104
|
+
:param config: The dictionary of built config parameters
|
1105
|
+
:type config: dict
|
779
1106
|
:param logger: The logger object
|
780
1107
|
:type logger: logging.Logger
|
781
|
-
:return: The
|
1108
|
+
:return: The built param dictionary
|
782
1109
|
:rtype: dict
|
783
1110
|
"""
|
784
|
-
if
|
785
|
-
|
786
|
-
|
787
|
-
|
788
|
-
|
789
|
-
|
790
|
-
|
791
|
-
|
792
|
-
|
793
|
-
|
794
|
-
|
795
|
-
|
796
|
-
]
|
797
|
-
|
798
|
-
|
799
|
-
|
800
|
-
|
801
|
-
|
802
|
-
|
803
|
-
|
804
|
-
|
805
|
-
|
806
|
-
|
807
|
-
|
808
|
-
|
809
|
-
|
810
|
-
|
811
|
-
|
812
|
-
if
|
813
|
-
|
814
|
-
|
815
|
-
|
816
|
-
|
817
|
-
|
818
|
-
|
819
|
-
|
820
|
-
|
821
|
-
|
822
|
-
|
823
|
-
|
824
|
-
|
825
|
-
|
826
|
-
params["optim_conf"]["delta_forecast"] = options.get("delta_forecast_daily", params["optim_conf"]["delta_forecast"])
|
827
|
-
params["optim_conf"]["load_cost_forecast_method"] = options.get("load_cost_forecast_method", params["optim_conf"]["load_cost_forecast_method"])
|
828
|
-
if (options.get("list_peak_hours_periods_start_hours", None) != None and options.get("list_peak_hours_periods_end_hours", None) != None):
|
829
|
-
start_hours_list = [i["peak_hours_periods_start_hours"] for i in options["list_peak_hours_periods_start_hours"]]
|
830
|
-
end_hours_list = [i["peak_hours_periods_end_hours"] for i in options["list_peak_hours_periods_end_hours"]]
|
1111
|
+
if type(params_secrets) is not dict:
|
1112
|
+
params_secrets = {}
|
1113
|
+
|
1114
|
+
params = {}
|
1115
|
+
#Start with blank config catagories
|
1116
|
+
params['retrieve_hass_conf'] = {}
|
1117
|
+
params['params_secrets'] = {}
|
1118
|
+
params['optim_conf'] = {}
|
1119
|
+
params['plant_conf'] = {}
|
1120
|
+
|
1121
|
+
# Obtain associations to categorize parameters to their corresponding config catagories
|
1122
|
+
if emhass_conf.get('associations_path', get_root(__file__, num_parent=2) / 'data/associations.csv').exists():
|
1123
|
+
with emhass_conf['associations_path'].open('r') as data:
|
1124
|
+
associations = list(csv.reader(data, delimiter=","))
|
1125
|
+
else:
|
1126
|
+
logger.error("Unable to obtain the associations file (associations.csv) in: " + str(emhass_conf['associations_path']))
|
1127
|
+
return False
|
1128
|
+
|
1129
|
+
# Association file key reference
|
1130
|
+
# association[0] = config catagories
|
1131
|
+
# association[1] = legacy parameter name
|
1132
|
+
# association[2] = parameter (config.json/config_defaults.json)
|
1133
|
+
# association[3] = parameter list name if exists (not used, from legacy options.json)
|
1134
|
+
|
1135
|
+
# Use association list to append parameters from config into params (with corresponding config catagories)
|
1136
|
+
for association in associations:
|
1137
|
+
# If parameter has list_ name and parameter in config is presented with its list name
|
1138
|
+
# (ie, config parameter is in legacy options.json format)
|
1139
|
+
if len(association) == 4 and config.get(association[3],None) is not None:
|
1140
|
+
# Extract lists of dictionaries
|
1141
|
+
if config[association[3]] and type(config[association[3]][0]) is dict:
|
1142
|
+
params[association[0]][association[2]] = [i[association[2]] for i in config[association[3]]]
|
1143
|
+
else:
|
1144
|
+
params[association[0]][association[2]] = config[association[3]]
|
1145
|
+
# Else, directly set value of config parameter to param
|
1146
|
+
elif config.get(association[2],None) is not None:
|
1147
|
+
params[association[0]][association[2]] = config[association[2]]
|
1148
|
+
|
1149
|
+
# Check if we need to create `list_hp_periods` from config (ie. legacy options.json format)
|
1150
|
+
if params.get('optim_conf',None) is not None and config.get("list_peak_hours_periods_start_hours", None) is not None and config.get("list_peak_hours_periods_end_hours", None) is not None:
|
1151
|
+
start_hours_list = [i["peak_hours_periods_start_hours"] for i in config["list_peak_hours_periods_start_hours"]]
|
1152
|
+
end_hours_list = [i["peak_hours_periods_end_hours"] for i in config["list_peak_hours_periods_end_hours"]]
|
831
1153
|
num_peak_hours = len(start_hours_list)
|
832
|
-
list_hp_periods_list =
|
833
|
-
params['optim_conf']['
|
834
|
-
|
835
|
-
|
836
|
-
params['optim_conf']
|
837
|
-
|
838
|
-
|
839
|
-
|
840
|
-
|
841
|
-
params['optim_conf']['
|
842
|
-
|
843
|
-
|
844
|
-
|
845
|
-
|
846
|
-
|
847
|
-
params['optim_conf']['
|
848
|
-
|
849
|
-
|
850
|
-
|
851
|
-
|
852
|
-
|
853
|
-
params['
|
854
|
-
|
855
|
-
|
856
|
-
|
857
|
-
|
858
|
-
|
859
|
-
|
860
|
-
params['plant_conf']['surface_tilt'] = [i['surface_tilt'] for i in options.get('list_surface_tilt')]
|
861
|
-
if options.get('list_surface_azimuth',None) != None:
|
862
|
-
params['plant_conf']['surface_azimuth'] = [i['surface_azimuth'] for i in options.get('list_surface_azimuth')]
|
863
|
-
if options.get('list_modules_per_string',None) != None:
|
864
|
-
params['plant_conf']['modules_per_string'] = [i['modules_per_string'] for i in options.get('list_modules_per_string')]
|
865
|
-
if options.get('list_strings_per_inverter',None) != None:
|
866
|
-
params['plant_conf']['strings_per_inverter'] = [i['strings_per_inverter'] for i in options.get('list_strings_per_inverter')]
|
867
|
-
params["plant_conf"]["inverter_is_hybrid"] = options.get("inverter_is_hybrid", params["plant_conf"]["inverter_is_hybrid"])
|
868
|
-
params["plant_conf"]["compute_curtailment"] = options.get("compute_curtailment", params["plant_conf"]["compute_curtailment"])
|
869
|
-
params['plant_conf']['Pd_max'] = options.get('battery_discharge_power_max', params['plant_conf']['Pd_max'])
|
870
|
-
params['plant_conf']['Pc_max'] = options.get('battery_charge_power_max', params['plant_conf']['Pc_max'])
|
871
|
-
params['plant_conf']['eta_disch'] = options.get('battery_discharge_efficiency', params['plant_conf']['eta_disch'])
|
872
|
-
params['plant_conf']['eta_ch'] = options.get('battery_charge_efficiency', params['plant_conf']['eta_ch'])
|
873
|
-
params['plant_conf']['Enom'] = options.get('battery_nominal_energy_capacity', params['plant_conf']['Enom'])
|
874
|
-
params['plant_conf']['SOCmin'] = options.get('battery_minimum_state_of_charge', params['plant_conf']['SOCmin'])
|
875
|
-
params['plant_conf']['SOCmax'] = options.get('battery_maximum_state_of_charge', params['plant_conf']['SOCmax'])
|
876
|
-
params['plant_conf']['SOCtarget'] = options.get('battery_target_state_of_charge', params['plant_conf']['SOCtarget'])
|
877
|
-
# Check parameter lists have the same amounts as deferrable loads
|
878
|
-
# If not, set defaults it fill in gaps
|
879
|
-
if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_start_timestep']):
|
880
|
-
logger.warning("def_start_timestep / list_start_timesteps_of_each_deferrable_load does not match number in num_def_loads, adding default values to parameter")
|
881
|
-
for x in range(len(params['optim_conf']['def_start_timestep']), params['optim_conf']['num_def_loads']):
|
882
|
-
params['optim_conf']['def_start_timestep'].append(0)
|
883
|
-
if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_end_timestep']):
|
884
|
-
logger.warning("def_end_timestep / list_end_timesteps_of_each_deferrable_load does not match number in num_def_loads, adding default values to parameter")
|
885
|
-
for x in range(len(params['optim_conf']['def_end_timestep']), params['optim_conf']['num_def_loads']):
|
886
|
-
params['optim_conf']['def_end_timestep'].append(0)
|
887
|
-
if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['set_def_constant']):
|
888
|
-
logger.warning("set_def_constant / list_set_deferrable_load_single_constant does not match number in num_def_loads, adding default values to parameter")
|
889
|
-
for x in range(len(params['optim_conf']['set_def_constant']), params['optim_conf']['num_def_loads']):
|
890
|
-
params['optim_conf']['set_def_constant'].append(False)
|
891
|
-
if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['treat_def_as_semi_cont']):
|
892
|
-
logger.warning("treat_def_as_semi_cont / list_treat_deferrable_load_as_semi_cont does not match number in num_def_loads, adding default values to parameter")
|
893
|
-
for x in range(len(params['optim_conf']['treat_def_as_semi_cont']), params['optim_conf']['num_def_loads']):
|
894
|
-
params['optim_conf']['treat_def_as_semi_cont'].append(True)
|
895
|
-
if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_start_penalty']):
|
896
|
-
logger.warning("def_start_penalty / list_set_deferrable_startup_penalty does not match number in num_def_loads, adding default values to parameter")
|
897
|
-
for x in range(len(params['optim_conf']['def_start_penalty']), params['optim_conf']['num_def_loads']):
|
898
|
-
params['optim_conf']['def_start_penalty'].append(0.0)
|
899
|
-
# days_to_retrieve should be no less then 2
|
900
|
-
if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_total_hours']):
|
901
|
-
logger.warning("def_total_hours / list_operating_hours_of_each_deferrable_load does not match number in num_def_loads, adding default values to parameter")
|
902
|
-
for x in range(len(params['optim_conf']['def_total_hours']), params['optim_conf']['num_def_loads']):
|
903
|
-
params['optim_conf']['def_total_hours'].append(0)
|
904
|
-
if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['P_deferrable_nom']):
|
905
|
-
logger.warning("P_deferrable_nom / list_nominal_power_of_deferrable_loads does not match number in num_def_loads, adding default values to parameter")
|
906
|
-
for x in range(len(params['optim_conf']['P_deferrable_nom']), params['optim_conf']['num_def_loads']):
|
907
|
-
params['optim_conf']['P_deferrable_nom'].append(0)
|
908
|
-
# days_to_retrieve should be no less then 2
|
909
|
-
if params["retrieve_hass_conf"]["days_to_retrieve"] < 2:
|
910
|
-
params["retrieve_hass_conf"]["days_to_retrieve"] = 2
|
1154
|
+
list_hp_periods_list = {'period_hp_'+str(i+1):[{'start':start_hours_list[i]},{'end':end_hours_list[i]}] for i in range(num_peak_hours)}
|
1155
|
+
params['optim_conf']['load_peak_hour_periods'] = list_hp_periods_list
|
1156
|
+
else:
|
1157
|
+
# Else, check param already contains load_peak_hour_periods from config
|
1158
|
+
if params['optim_conf'].get('load_peak_hour_periods',None) is None:
|
1159
|
+
logger.warning("Unable to detect or create load_peak_hour_periods parameter")
|
1160
|
+
|
1161
|
+
# Format load_peak_hour_periods list to dict if necessary
|
1162
|
+
if params['optim_conf'].get('load_peak_hour_periods',None) is not None and isinstance(params['optim_conf']['load_peak_hour_periods'], list):
|
1163
|
+
params['optim_conf']['load_peak_hour_periods'] = dict((key, d[key]) for d in params['optim_conf']['load_peak_hour_periods'] for key in d)
|
1164
|
+
|
1165
|
+
# Call function to check parameter lists that require the same length as deferrable loads
|
1166
|
+
# If not, set defaults it fill in gaps
|
1167
|
+
if params['optim_conf'].get('number_of_deferrable_loads',None) is not None:
|
1168
|
+
num_def_loads = params['optim_conf']['number_of_deferrable_loads']
|
1169
|
+
params['optim_conf']['start_timesteps_of_each_deferrable_load'] = check_def_loads(num_def_loads,params['optim_conf'],0,'start_timesteps_of_each_deferrable_load',logger)
|
1170
|
+
params['optim_conf']['end_timesteps_of_each_deferrable_load'] = check_def_loads(num_def_loads,params['optim_conf'],0,'end_timesteps_of_each_deferrable_load',logger)
|
1171
|
+
params['optim_conf']['set_deferrable_load_single_constant'] = check_def_loads(num_def_loads,params['optim_conf'],False,'set_deferrable_load_single_constant',logger)
|
1172
|
+
params['optim_conf']['treat_deferrable_load_as_semi_cont'] = check_def_loads(num_def_loads,params['optim_conf'],True,'treat_deferrable_load_as_semi_cont',logger)
|
1173
|
+
params['optim_conf']['set_deferrable_startup_penalty'] = check_def_loads(num_def_loads,params['optim_conf'],0.0,'set_deferrable_startup_penalty',logger)
|
1174
|
+
params['optim_conf']['operating_hours_of_each_deferrable_load'] = check_def_loads(num_def_loads,params['optim_conf'],0,'operating_hours_of_each_deferrable_load',logger)
|
1175
|
+
params['optim_conf']['nominal_power_of_deferrable_loads'] = check_def_loads(num_def_loads,params['optim_conf'],0,'nominal_power_of_deferrable_loads',logger)
|
1176
|
+
else:
|
1177
|
+
logger.warning("unable to obtain parameter: number_of_deferrable_loads")
|
1178
|
+
# historic_days_to_retrieve should be no less then 2
|
1179
|
+
if params["retrieve_hass_conf"].get('historic_days_to_retrieve',None) is not None:
|
1180
|
+
if params["retrieve_hass_conf"]['historic_days_to_retrieve'] < 2:
|
1181
|
+
params["retrieve_hass_conf"]['historic_days_to_retrieve'] = 2
|
911
1182
|
logger.warning("days_to_retrieve should not be lower then 2, setting days_to_retrieve to 2. Make sure your sensors also have at least 2 days of history")
|
912
1183
|
else:
|
913
|
-
|
914
|
-
|
1184
|
+
logger.warning("unable to obtain parameter: historic_days_to_retrieve")
|
1185
|
+
|
1186
|
+
# Configure secrets, set params to correct config categorie
|
1187
|
+
# retrieve_hass_conf
|
1188
|
+
params['retrieve_hass_conf']["hass_url"] = params_secrets.get("hass_url",None)
|
1189
|
+
params['retrieve_hass_conf']["long_lived_token"] = params_secrets.get("long_lived_token",None)
|
1190
|
+
params['retrieve_hass_conf']["time_zone"] = params_secrets.get("time_zone",None)
|
1191
|
+
params['retrieve_hass_conf']['Latitude'] = params_secrets.get('Latitude',None)
|
1192
|
+
params['retrieve_hass_conf']['Longitude'] = params_secrets.get('Longitude',None)
|
1193
|
+
params['retrieve_hass_conf']['Altitude'] = params_secrets.get('Altitude',None)
|
1194
|
+
# Update optional param secrets
|
1195
|
+
if params["optim_conf"].get('weather_forecast_method',None) is not None:
|
1196
|
+
if params["optim_conf"]['weather_forecast_method'] == "solcast":
|
1197
|
+
params["retrieve_hass_conf"]["solcast_api_key"] = params_secrets.get("solcast_api_key", "123456")
|
1198
|
+
params["params_secrets"]["solcast_api_key"] = params_secrets.get("solcast_api_key", "123456")
|
1199
|
+
params["retrieve_hass_conf"]["solcast_rooftop_id"] = params_secrets.get("solcast_rooftop_id", "123456")
|
1200
|
+
params["params_secrets"]["solcast_rooftop_id"] = params_secrets.get("solcast_rooftop_id", "123456")
|
1201
|
+
elif params["optim_conf"]['weather_forecast_method'] == "solar.forecast":
|
1202
|
+
params["retrieve_hass_conf"]["solar_forecast_kwp"] = params_secrets.get("solar_forecast_kwp", 5)
|
1203
|
+
params["params_secrets"]["solar_forecast_kwp"] = params_secrets.get("solar_forecast_kwp", 5)
|
1204
|
+
else:
|
1205
|
+
logger.warning("Unable to detect weather_forecast_method parameter")
|
1206
|
+
# Check if secrets parameters still defaults values
|
1207
|
+
secret_params = ["https://myhass.duckdns.org/","thatverylongtokenhere",45.83,6.86,4807.8]
|
1208
|
+
if any(x in secret_params for x in params['retrieve_hass_conf'].values()):
|
1209
|
+
logger.warning("Some secret parameters values are still matching their defaults")
|
1210
|
+
|
1211
|
+
|
1212
|
+
# Set empty dict objects for params passed_data
|
1213
|
+
# To be latter populated with runtime parameters (treat_runtimeparams)
|
915
1214
|
params["passed_data"] = {
|
916
1215
|
"pv_power_forecast": None,
|
917
1216
|
"load_power_forecast": None,
|
@@ -920,14 +1219,39 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
|
|
920
1219
|
"prediction_horizon": None,
|
921
1220
|
"soc_init": None,
|
922
1221
|
"soc_final": None,
|
923
|
-
|
924
|
-
|
925
|
-
|
1222
|
+
'operating_hours_of_each_deferrable_load': None,
|
1223
|
+
'start_timesteps_of_each_deferrable_load': None,
|
1224
|
+
'end_timesteps_of_each_deferrable_load': None,
|
926
1225
|
"alpha": None,
|
927
1226
|
"beta": None,
|
928
1227
|
}
|
1228
|
+
|
929
1229
|
return params
|
930
1230
|
|
1231
|
+
def check_def_loads(num_def_loads: int, parameter: list[dict], default, parameter_name: str, logger):
|
1232
|
+
"""
|
1233
|
+
Check parameter lists with deferrable loads number, if they do not match, enlarge to fit.
|
1234
|
+
|
1235
|
+
:param num_def_loads: Total number deferrable loads
|
1236
|
+
:type num_def_loads: int
|
1237
|
+
:param parameter: parameter config dict containing paramater
|
1238
|
+
:type: list[dict]
|
1239
|
+
:param default: default value for parameter to pad missing
|
1240
|
+
:type: obj
|
1241
|
+
:param parameter_name: name of parameter
|
1242
|
+
:type logger: str
|
1243
|
+
:param logger: The logger object
|
1244
|
+
:type logger: logging.Logger
|
1245
|
+
return: parameter list
|
1246
|
+
:rtype: list[dict]
|
1247
|
+
|
1248
|
+
"""
|
1249
|
+
if parameter.get(parameter_name,None) is not None and type(parameter[parameter_name]) is list and num_def_loads > len(parameter[parameter_name]):
|
1250
|
+
logger.warning(parameter_name + " does not match number in num_def_loads, adding default values ("+ str(default) + ") to parameter")
|
1251
|
+
for x in range(len(parameter[parameter_name]), num_def_loads):
|
1252
|
+
parameter[parameter_name].append(default)
|
1253
|
+
return parameter[parameter_name]
|
1254
|
+
|
931
1255
|
|
932
1256
|
def get_days_list(days_to_retrieve: int) -> pd.date_range:
|
933
1257
|
"""
|