emhass 0.11.1__py3-none-any.whl → 0.11.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- emhass/command_line.py +702 -373
- emhass/data/associations.csv +1 -1
- emhass/data/config_defaults.json +1 -2
- emhass/forecast.py +671 -346
- emhass/machine_learning_forecaster.py +204 -105
- emhass/machine_learning_regressor.py +26 -7
- emhass/optimization.py +1017 -471
- emhass/retrieve_hass.py +234 -78
- emhass/static/configuration_script.js +4 -4
- emhass/static/data/param_definitions.json +5 -4
- emhass/utils.py +690 -404
- emhass/web_server.py +339 -225
- {emhass-0.11.1.dist-info → emhass-0.11.3.dist-info}/METADATA +17 -11
- emhass-0.11.3.dist-info/RECORD +32 -0
- {emhass-0.11.1.dist-info → emhass-0.11.3.dist-info}/WHEEL +1 -1
- emhass-0.11.1.dist-info/RECORD +0 -32
- {emhass-0.11.1.dist-info → emhass-0.11.3.dist-info}/LICENSE +0 -0
- {emhass-0.11.1.dist-info → emhass-0.11.3.dist-info}/entry_points.txt +0 -0
- {emhass-0.11.1.dist-info → emhass-0.11.3.dist-info}/top_level.txt +0 -0
emhass/utils.py
CHANGED
@@ -1,26 +1,26 @@
|
|
1
1
|
#!/usr/bin/env python3
|
2
2
|
# -*- coding: utf-8 -*-
|
3
|
-
|
3
|
+
import ast
|
4
|
+
import copy
|
4
5
|
import csv
|
5
|
-
import
|
6
|
-
from typing import Tuple, Optional
|
7
|
-
from datetime import datetime, timedelta, timezone
|
6
|
+
import json
|
8
7
|
import logging
|
8
|
+
import os
|
9
9
|
import pathlib
|
10
|
-
import
|
11
|
-
import
|
10
|
+
from datetime import datetime, timedelta, timezone
|
11
|
+
from typing import Optional, Tuple
|
12
|
+
|
12
13
|
import numpy as np
|
13
14
|
import pandas as pd
|
14
|
-
|
15
|
-
import yaml
|
15
|
+
import plotly.express as px
|
16
16
|
import pytz
|
17
|
-
import
|
17
|
+
import yaml
|
18
|
+
from requests import get
|
18
19
|
|
19
|
-
|
20
|
+
from emhass.machine_learning_forecaster import MLForecaster
|
20
21
|
|
21
22
|
pd.options.plotting.backend = "plotly"
|
22
23
|
|
23
|
-
from emhass.machine_learning_forecaster import MLForecaster
|
24
24
|
|
25
25
|
def get_root(file: str, num_parent: Optional[int] = 3) -> str:
|
26
26
|
"""
|
@@ -44,8 +44,12 @@ def get_root(file: str, num_parent: Optional[int] = 3) -> str:
|
|
44
44
|
return root
|
45
45
|
|
46
46
|
|
47
|
-
def get_logger(
|
48
|
-
|
47
|
+
def get_logger(
|
48
|
+
fun_name: str,
|
49
|
+
emhass_conf: dict,
|
50
|
+
save_to_file: Optional[bool] = True,
|
51
|
+
logging_level: Optional[str] = "DEBUG",
|
52
|
+
) -> Tuple[logging.Logger, logging.StreamHandler]:
|
49
53
|
"""
|
50
54
|
Create a simple logger object.
|
51
55
|
|
@@ -64,10 +68,10 @@ def get_logger(fun_name: str, emhass_conf: dict, save_to_file: Optional[bool] =
|
|
64
68
|
logger.propagate = True
|
65
69
|
logger.fileSetting = save_to_file
|
66
70
|
if save_to_file:
|
67
|
-
if
|
68
|
-
ch = logging.FileHandler(emhass_conf[
|
71
|
+
if os.path.isdir(emhass_conf["data_path"]):
|
72
|
+
ch = logging.FileHandler(emhass_conf["data_path"] / "logger_emhass.log")
|
69
73
|
else:
|
70
|
-
raise Exception("Unable to access data_path: "+emhass_conf[
|
74
|
+
raise Exception("Unable to access data_path: " + emhass_conf["data_path"])
|
71
75
|
else:
|
72
76
|
ch = logging.StreamHandler()
|
73
77
|
if logging_level == "DEBUG":
|
@@ -94,8 +98,12 @@ def get_logger(fun_name: str, emhass_conf: dict, save_to_file: Optional[bool] =
|
|
94
98
|
return logger, ch
|
95
99
|
|
96
100
|
|
97
|
-
def get_forecast_dates(
|
98
|
-
|
101
|
+
def get_forecast_dates(
|
102
|
+
freq: int,
|
103
|
+
delta_forecast: int,
|
104
|
+
time_zone: datetime.tzinfo,
|
105
|
+
timedelta_days: Optional[int] = 0,
|
106
|
+
) -> pd.core.indexes.datetimes.DatetimeIndex:
|
99
107
|
"""
|
100
108
|
Get the date_range list of the needed future dates using the delta_forecast parameter.
|
101
109
|
|
@@ -110,17 +118,36 @@ def get_forecast_dates(freq: int, delta_forecast: int, time_zone: datetime.tzinf
|
|
110
118
|
|
111
119
|
"""
|
112
120
|
freq = pd.to_timedelta(freq, "minutes")
|
113
|
-
start_forecast = pd.Timestamp(datetime.now()).replace(
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
121
|
+
start_forecast = pd.Timestamp(datetime.now()).replace(
|
122
|
+
hour=0, minute=0, second=0, microsecond=0
|
123
|
+
)
|
124
|
+
end_forecast = (start_forecast + pd.Timedelta(days=delta_forecast)).replace(
|
125
|
+
microsecond=0
|
126
|
+
)
|
127
|
+
forecast_dates = (
|
128
|
+
pd.date_range(
|
129
|
+
start=start_forecast,
|
130
|
+
end=end_forecast + timedelta(days=timedelta_days) - freq,
|
131
|
+
freq=freq,
|
132
|
+
tz=time_zone,
|
133
|
+
)
|
134
|
+
.tz_convert("utc")
|
135
|
+
.round(freq, ambiguous="infer", nonexistent="shift_forward")
|
136
|
+
.tz_convert(time_zone)
|
137
|
+
)
|
118
138
|
return forecast_dates
|
119
139
|
|
120
140
|
|
121
|
-
def treat_runtimeparams(
|
122
|
-
|
123
|
-
|
141
|
+
def treat_runtimeparams(
|
142
|
+
runtimeparams: str,
|
143
|
+
params: str,
|
144
|
+
retrieve_hass_conf: dict,
|
145
|
+
optim_conf: dict,
|
146
|
+
plant_conf: dict,
|
147
|
+
set_type: str,
|
148
|
+
logger: logging.Logger,
|
149
|
+
emhass_conf: dict,
|
150
|
+
) -> Tuple[str, dict]:
|
124
151
|
"""
|
125
152
|
Treat the passed optimization runtime parameters.
|
126
153
|
|
@@ -128,31 +155,38 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
128
155
|
:type runtimeparams: str
|
129
156
|
:param params: Built configuration parameters
|
130
157
|
:type params: str
|
131
|
-
:param retrieve_hass_conf:
|
158
|
+
:param retrieve_hass_conf: Config dictionary for data retrieving parameters.
|
132
159
|
:type retrieve_hass_conf: dict
|
133
|
-
:param optim_conf:
|
160
|
+
:param optim_conf: Config dictionary for optimization parameters.
|
134
161
|
:type optim_conf: dict
|
135
|
-
:param plant_conf:
|
162
|
+
:param plant_conf: Config dictionary for technical plant parameters.
|
136
163
|
:type plant_conf: dict
|
137
164
|
:param set_type: The type of action to be performed.
|
138
165
|
:type set_type: str
|
139
166
|
:param logger: The logger object.
|
140
167
|
:type logger: logging.Logger
|
168
|
+
:param emhass_conf: Dictionary containing the needed emhass paths
|
169
|
+
:type emhass_conf: dict
|
141
170
|
:return: Returning the params and optimization parameter container.
|
142
171
|
:rtype: Tuple[str, dict]
|
143
172
|
|
144
173
|
"""
|
145
|
-
#
|
174
|
+
# Check if passed params is a dict
|
146
175
|
if (params != None) and (params != "null"):
|
147
176
|
if type(params) is str:
|
148
177
|
params = json.loads(params)
|
149
178
|
else:
|
150
179
|
params = {}
|
151
180
|
|
181
|
+
# Merge current config categories to params
|
182
|
+
params["retrieve_hass_conf"].update(retrieve_hass_conf)
|
183
|
+
params["optim_conf"].update(optim_conf)
|
184
|
+
params["plant_conf"].update(plant_conf)
|
185
|
+
|
152
186
|
# Some default data needed
|
153
187
|
custom_deferrable_forecast_id = []
|
154
188
|
custom_predicted_temperature_id = []
|
155
|
-
for k in range(optim_conf[
|
189
|
+
for k in range(params["optim_conf"]["number_of_deferrable_loads"]):
|
156
190
|
custom_deferrable_forecast_id.append(
|
157
191
|
{
|
158
192
|
"entity_id": "sensor.p_deferrable{}".format(k),
|
@@ -233,18 +267,132 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
233
267
|
else:
|
234
268
|
params["passed_data"] = default_passed_dict
|
235
269
|
|
270
|
+
# If any runtime parameters where passed in action call
|
236
271
|
if runtimeparams is not None:
|
237
272
|
if type(runtimeparams) is str:
|
238
273
|
runtimeparams = json.loads(runtimeparams)
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
274
|
+
|
275
|
+
# Loop though parameters stored in association file, Check to see if any stored in runtime
|
276
|
+
# If true, set runtime parameter to params
|
277
|
+
if emhass_conf["associations_path"].exists():
|
278
|
+
with emhass_conf["associations_path"].open("r") as data:
|
279
|
+
associations = list(csv.reader(data, delimiter=","))
|
280
|
+
# Association file key reference
|
281
|
+
# association[0] = config categories
|
282
|
+
# association[1] = legacy parameter name
|
283
|
+
# association[2] = parameter (config.json/config_defaults.json)
|
284
|
+
# association[3] = parameter list name if exists (not used, from legacy options.json)
|
285
|
+
for association in associations:
|
286
|
+
# Check parameter name exists in runtime
|
287
|
+
if runtimeparams.get(association[2], None) is not None:
|
288
|
+
params[association[0]][association[2]] = runtimeparams[
|
289
|
+
association[2]
|
290
|
+
]
|
291
|
+
# Check Legacy parameter name runtime
|
292
|
+
elif runtimeparams.get(association[1], None) is not None:
|
293
|
+
params[association[0]][association[2]] = runtimeparams[
|
294
|
+
association[1]
|
295
|
+
]
|
296
|
+
else:
|
297
|
+
logger.warning(
|
298
|
+
"Cant find associations file (associations.csv) in: "
|
299
|
+
+ str(emhass_conf["associations_path"])
|
300
|
+
)
|
301
|
+
|
302
|
+
# Generate forecast_dates
|
303
|
+
if (
|
304
|
+
"optimization_time_step" in runtimeparams.keys()
|
305
|
+
or "freq" in runtimeparams.keys()
|
306
|
+
):
|
307
|
+
optimization_time_step = int(
|
308
|
+
runtimeparams.get("optimization_time_step", runtimeparams.get("freq"))
|
309
|
+
)
|
310
|
+
params["retrieve_hass_conf"]["optimization_time_step"] = pd.to_timedelta(
|
311
|
+
optimization_time_step
|
312
|
+
)
|
313
|
+
else:
|
314
|
+
optimization_time_step = int(
|
315
|
+
params["retrieve_hass_conf"]["optimization_time_step"].seconds / 60.0
|
316
|
+
)
|
317
|
+
if (
|
318
|
+
runtimeparams.get("delta_forecast_daily", None) is not None
|
319
|
+
or runtimeparams.get("delta_forecast", None) is not None
|
320
|
+
):
|
321
|
+
delta_forecast = int(
|
322
|
+
runtimeparams.get(
|
323
|
+
"delta_forecast_daily", runtimeparams["delta_forecast"]
|
324
|
+
)
|
325
|
+
)
|
326
|
+
params["optim_conf"]["delta_forecast_daily"] = pd.Timedelta(
|
327
|
+
days=optim_conf["delta_forecast_daily"]
|
328
|
+
)
|
329
|
+
else:
|
330
|
+
delta_forecast = int(params["optim_conf"]["delta_forecast_daily"].days)
|
331
|
+
if runtimeparams.get("time_zone", None) is not None:
|
332
|
+
time_zone = pytz.timezone(params["retrieve_hass_conf"]["time_zone"])
|
333
|
+
params["retrieve_hass_conf"]["time_zone"] = time_zone
|
334
|
+
else:
|
335
|
+
time_zone = params["retrieve_hass_conf"]["time_zone"]
|
336
|
+
|
244
337
|
forecast_dates = get_forecast_dates(
|
245
|
-
optimization_time_step, delta_forecast, time_zone
|
246
|
-
|
247
|
-
|
338
|
+
optimization_time_step, delta_forecast, time_zone
|
339
|
+
)
|
340
|
+
|
341
|
+
# Treat passed forecast data lists
|
342
|
+
list_forecast_key = [
|
343
|
+
"pv_power_forecast",
|
344
|
+
"load_power_forecast",
|
345
|
+
"load_cost_forecast",
|
346
|
+
"prod_price_forecast",
|
347
|
+
"outdoor_temperature_forecast",
|
348
|
+
]
|
349
|
+
forecast_methods = [
|
350
|
+
"weather_forecast_method",
|
351
|
+
"load_forecast_method",
|
352
|
+
"load_cost_forecast_method",
|
353
|
+
"production_price_forecast_method",
|
354
|
+
"outdoor_temperature_forecast_method",
|
355
|
+
]
|
356
|
+
|
357
|
+
# Loop forecasts, check if value is a list and greater than or equal to forecast_dates
|
358
|
+
for method, forecast_key in enumerate(list_forecast_key):
|
359
|
+
if forecast_key in runtimeparams.keys():
|
360
|
+
if isinstance(runtimeparams[forecast_key], list) and len(
|
361
|
+
runtimeparams[forecast_key]
|
362
|
+
) >= len(forecast_dates):
|
363
|
+
params["passed_data"][forecast_key] = runtimeparams[forecast_key]
|
364
|
+
params["optim_conf"][forecast_methods[method]] = "list"
|
365
|
+
else:
|
366
|
+
logger.error(
|
367
|
+
f"ERROR: The passed data is either not a list or the length is not correct, length should be {str(len(forecast_dates))}"
|
368
|
+
)
|
369
|
+
logger.error(
|
370
|
+
f"Passed type is {str(type(runtimeparams[forecast_key]))} and length is {str(len(runtimeparams[forecast_key]))}"
|
371
|
+
)
|
372
|
+
# Check if string contains list, if so extract
|
373
|
+
if isinstance(runtimeparams[forecast_key], str):
|
374
|
+
if isinstance(ast.literal_eval(runtimeparams[forecast_key]), list):
|
375
|
+
runtimeparams[forecast_key] = ast.literal_eval(
|
376
|
+
runtimeparams[forecast_key]
|
377
|
+
)
|
378
|
+
list_non_digits = [
|
379
|
+
x
|
380
|
+
for x in runtimeparams[forecast_key]
|
381
|
+
if not (isinstance(x, int) or isinstance(x, float))
|
382
|
+
]
|
383
|
+
if len(list_non_digits) > 0:
|
384
|
+
logger.warning(
|
385
|
+
f"There are non numeric values on the passed data for {forecast_key}, check for missing values (nans, null, etc)"
|
386
|
+
)
|
387
|
+
for x in list_non_digits:
|
388
|
+
logger.warning(
|
389
|
+
f"This value in {forecast_key} was detected as non digits: {str(x)}"
|
390
|
+
)
|
391
|
+
else:
|
392
|
+
params["passed_data"][forecast_key] = None
|
393
|
+
|
394
|
+
# Add runtime exclusive (not in config) parameters to params
|
395
|
+
# regressor-model-fit
|
248
396
|
if set_type == "regressor-model-fit":
|
249
397
|
if "csv_file" in runtimeparams:
|
250
398
|
csv_file = runtimeparams["csv_file"]
|
@@ -265,7 +413,7 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
265
413
|
else:
|
266
414
|
date_features = runtimeparams["date_features"]
|
267
415
|
params["passed_data"]["date_features"] = date_features
|
268
|
-
|
416
|
+
|
269
417
|
# regressor-model-predict
|
270
418
|
if set_type == "regressor-model-predict":
|
271
419
|
if "new_values" in runtimeparams:
|
@@ -280,8 +428,8 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
280
428
|
if "target" in runtimeparams:
|
281
429
|
target = runtimeparams["target"]
|
282
430
|
params["passed_data"]["target"] = target
|
283
|
-
|
284
|
-
#
|
431
|
+
|
432
|
+
# MPC control case
|
285
433
|
if set_type == "naive-mpc-optim":
|
286
434
|
if "prediction_horizon" not in runtimeparams.keys():
|
287
435
|
prediction_horizon = 10 # 10 time steps by default
|
@@ -289,86 +437,71 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
289
437
|
prediction_horizon = runtimeparams["prediction_horizon"]
|
290
438
|
params["passed_data"]["prediction_horizon"] = prediction_horizon
|
291
439
|
if "soc_init" not in runtimeparams.keys():
|
292
|
-
soc_init = plant_conf[
|
440
|
+
soc_init = params["plant_conf"]["battery_target_state_of_charge"]
|
293
441
|
else:
|
294
442
|
soc_init = runtimeparams["soc_init"]
|
295
443
|
params["passed_data"]["soc_init"] = soc_init
|
296
444
|
if "soc_final" not in runtimeparams.keys():
|
297
|
-
soc_final = plant_conf[
|
445
|
+
soc_final = params["plant_conf"]["battery_target_state_of_charge"]
|
298
446
|
else:
|
299
447
|
soc_final = runtimeparams["soc_final"]
|
300
448
|
params["passed_data"]["soc_final"] = soc_final
|
301
|
-
|
302
|
-
def_total_hours = optim_conf['operating_hours_of_each_deferrable_load']
|
303
|
-
else:
|
304
|
-
def_total_hours = runtimeparams['operating_hours_of_each_deferrable_load']
|
305
|
-
params["passed_data"]['operating_hours_of_each_deferrable_load'] = def_total_hours
|
306
|
-
if 'start_timesteps_of_each_deferrable_load' in runtimeparams.keys():
|
307
|
-
def_start_timestep = runtimeparams['start_timesteps_of_each_deferrable_load']
|
308
|
-
else:
|
309
|
-
def_start_timestep = runtimeparams.get(
|
310
|
-
'def_start_timestep', optim_conf['start_timesteps_of_each_deferrable_load'])
|
311
|
-
params["passed_data"]['start_timesteps_of_each_deferrable_load'] = def_start_timestep
|
312
|
-
if 'end_timesteps_of_each_deferrable_load' in runtimeparams.keys():
|
313
|
-
def_end_timestep = runtimeparams['end_timesteps_of_each_deferrable_load']
|
314
|
-
else:
|
315
|
-
def_end_timestep = runtimeparams.get(
|
316
|
-
'def_end_timestep', optim_conf['end_timesteps_of_each_deferrable_load'])
|
317
|
-
params["passed_data"]["end_timesteps_of_each_deferrable_load"] = def_end_timestep
|
449
|
+
|
318
450
|
forecast_dates = copy.deepcopy(forecast_dates)[0:prediction_horizon]
|
451
|
+
|
319
452
|
# Load the default config
|
320
|
-
if "def_load_config" in
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
453
|
+
if "def_load_config" in runtimeparams:
|
454
|
+
params["optim_conf"]["def_load_config"] = runtimeparams[
|
455
|
+
"def_load_config"
|
456
|
+
]
|
457
|
+
if "def_load_config" in params["optim_conf"]:
|
458
|
+
for k in range(len(params["optim_conf"]["def_load_config"])):
|
459
|
+
if "thermal_config" in params["optim_conf"]["def_load_config"][k]:
|
460
|
+
if (
|
461
|
+
"heater_desired_temperatures" in runtimeparams
|
462
|
+
and len(runtimeparams["heater_desired_temperatures"]) > k
|
463
|
+
):
|
464
|
+
params["optim_conf"]["def_load_config"][k][
|
465
|
+
"thermal_config"
|
466
|
+
]["desired_temperatures"] = runtimeparams[
|
467
|
+
"heater_desired_temperatures"
|
468
|
+
][k]
|
469
|
+
if (
|
470
|
+
"heater_start_temperatures" in runtimeparams
|
471
|
+
and len(runtimeparams["heater_start_temperatures"]) > k
|
472
|
+
):
|
473
|
+
params["optim_conf"]["def_load_config"][k][
|
474
|
+
"thermal_config"
|
475
|
+
]["start_temperature"] = runtimeparams[
|
476
|
+
"heater_start_temperatures"
|
477
|
+
][k]
|
327
478
|
else:
|
328
479
|
params["passed_data"]["prediction_horizon"] = None
|
329
480
|
params["passed_data"]["soc_init"] = None
|
330
481
|
params["passed_data"]["soc_final"] = None
|
331
|
-
|
332
|
-
params["passed_data"]['start_timesteps_of_each_deferrable_load'] = None
|
333
|
-
params["passed_data"]['end_timesteps_of_each_deferrable_load'] = None
|
334
|
-
# Treat passed forecast data lists
|
335
|
-
list_forecast_key = ['pv_power_forecast', 'load_power_forecast',
|
336
|
-
'load_cost_forecast', 'prod_price_forecast', 'outdoor_temperature_forecast']
|
337
|
-
forecast_methods = ['weather_forecast_method', 'load_forecast_method', 'load_cost_forecast_method',
|
338
|
-
'production_price_forecast_method', 'outdoor_temperature_forecast_method']
|
339
|
-
|
340
|
-
# Loop forecasts, check if value is a list and greater than or equal to forecast_dates
|
341
|
-
for method, forecast_key in enumerate(list_forecast_key):
|
342
|
-
if forecast_key in runtimeparams.keys():
|
343
|
-
if type(runtimeparams[forecast_key]) == list and len(runtimeparams[forecast_key]) >= len(forecast_dates):
|
344
|
-
params['passed_data'][forecast_key] = runtimeparams[forecast_key]
|
345
|
-
optim_conf[forecast_methods[method]] = 'list'
|
346
|
-
else:
|
347
|
-
logger.error(
|
348
|
-
f"ERROR: The passed data is either not a list or the length is not correct, length should be {str(len(forecast_dates))}")
|
349
|
-
logger.error(
|
350
|
-
f"Passed type is {str(type(runtimeparams[forecast_key]))} and length is {str(len(runtimeparams[forecast_key]))}")
|
351
|
-
# Check if string contains list, if so extract
|
352
|
-
if type(runtimeparams[forecast_key]) == str:
|
353
|
-
if type(ast.literal_eval(runtimeparams[forecast_key])) == list:
|
354
|
-
runtimeparams[forecast_key] = ast.literal_eval(runtimeparams[forecast_key])
|
355
|
-
list_non_digits = [x for x in runtimeparams[forecast_key] if not (
|
356
|
-
isinstance(x, int) or isinstance(x, float))]
|
357
|
-
if len(list_non_digits) > 0:
|
358
|
-
logger.warning(
|
359
|
-
f"There are non numeric values on the passed data for {forecast_key}, check for missing values (nans, null, etc)")
|
360
|
-
for x in list_non_digits:
|
361
|
-
logger.warning(
|
362
|
-
f"This value in {forecast_key} was detected as non digits: {str(x)}")
|
363
|
-
else:
|
364
|
-
params['passed_data'][forecast_key] = None
|
365
|
-
|
482
|
+
|
366
483
|
# Treat passed data for forecast model fit/predict/tune at runtime
|
367
|
-
if
|
368
|
-
|
484
|
+
if (
|
485
|
+
params["passed_data"].get("historic_days_to_retrieve", None) is not None
|
486
|
+
and params["passed_data"]["historic_days_to_retrieve"] < 9
|
487
|
+
):
|
488
|
+
logger.warning(
|
489
|
+
"warning `days_to_retrieve` is set to a value less than 9, this could cause an error with the fit"
|
490
|
+
)
|
491
|
+
logger.warning(
|
492
|
+
"setting`passed_data:days_to_retrieve` to 9 for fit/predict/tune"
|
493
|
+
)
|
494
|
+
params["passed_data"]["historic_days_to_retrieve"] = 9
|
369
495
|
else:
|
370
|
-
|
371
|
-
|
496
|
+
if params["retrieve_hass_conf"].get("historic_days_to_retrieve", 0) < 9:
|
497
|
+
logger.debug(
|
498
|
+
"setting`passed_data:days_to_retrieve` to 9 for fit/predict/tune"
|
499
|
+
)
|
500
|
+
params["passed_data"]["historic_days_to_retrieve"] = 9
|
501
|
+
else:
|
502
|
+
params["passed_data"]["historic_days_to_retrieve"] = params[
|
503
|
+
"retrieve_hass_conf"
|
504
|
+
]["historic_days_to_retrieve"]
|
372
505
|
if "model_type" not in runtimeparams.keys():
|
373
506
|
model_type = "load_forecast"
|
374
507
|
else:
|
@@ -403,13 +536,15 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
403
536
|
perform_backtest = False
|
404
537
|
else:
|
405
538
|
perform_backtest = ast.literal_eval(
|
406
|
-
str(runtimeparams["perform_backtest"]).capitalize()
|
539
|
+
str(runtimeparams["perform_backtest"]).capitalize()
|
540
|
+
)
|
407
541
|
params["passed_data"]["perform_backtest"] = perform_backtest
|
408
542
|
if "model_predict_publish" not in runtimeparams.keys():
|
409
543
|
model_predict_publish = False
|
410
544
|
else:
|
411
545
|
model_predict_publish = ast.literal_eval(
|
412
|
-
str(runtimeparams["model_predict_publish"]).capitalize()
|
546
|
+
str(runtimeparams["model_predict_publish"]).capitalize()
|
547
|
+
)
|
413
548
|
params["passed_data"]["model_predict_publish"] = model_predict_publish
|
414
549
|
if "model_predict_entity_id" not in runtimeparams.keys():
|
415
550
|
model_predict_entity_id = "sensor.p_load_forecast_custom_model"
|
@@ -419,13 +554,19 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
419
554
|
if "model_predict_unit_of_measurement" not in runtimeparams.keys():
|
420
555
|
model_predict_unit_of_measurement = "W"
|
421
556
|
else:
|
422
|
-
model_predict_unit_of_measurement = runtimeparams[
|
423
|
-
|
557
|
+
model_predict_unit_of_measurement = runtimeparams[
|
558
|
+
"model_predict_unit_of_measurement"
|
559
|
+
]
|
560
|
+
params["passed_data"]["model_predict_unit_of_measurement"] = (
|
561
|
+
model_predict_unit_of_measurement
|
562
|
+
)
|
424
563
|
if "model_predict_friendly_name" not in runtimeparams.keys():
|
425
564
|
model_predict_friendly_name = "Load Power Forecast custom ML model"
|
426
565
|
else:
|
427
566
|
model_predict_friendly_name = runtimeparams["model_predict_friendly_name"]
|
428
|
-
params["passed_data"]["model_predict_friendly_name"] =
|
567
|
+
params["passed_data"]["model_predict_friendly_name"] = (
|
568
|
+
model_predict_friendly_name
|
569
|
+
)
|
429
570
|
if "mlr_predict_entity_id" not in runtimeparams.keys():
|
430
571
|
mlr_predict_entity_id = "sensor.mlr_predict"
|
431
572
|
else:
|
@@ -434,14 +575,18 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
434
575
|
if "mlr_predict_unit_of_measurement" not in runtimeparams.keys():
|
435
576
|
mlr_predict_unit_of_measurement = None
|
436
577
|
else:
|
437
|
-
mlr_predict_unit_of_measurement = runtimeparams[
|
438
|
-
|
578
|
+
mlr_predict_unit_of_measurement = runtimeparams[
|
579
|
+
"mlr_predict_unit_of_measurement"
|
580
|
+
]
|
581
|
+
params["passed_data"]["mlr_predict_unit_of_measurement"] = (
|
582
|
+
mlr_predict_unit_of_measurement
|
583
|
+
)
|
439
584
|
if "mlr_predict_friendly_name" not in runtimeparams.keys():
|
440
585
|
mlr_predict_friendly_name = "mlr predictor"
|
441
586
|
else:
|
442
587
|
mlr_predict_friendly_name = runtimeparams["mlr_predict_friendly_name"]
|
443
588
|
params["passed_data"]["mlr_predict_friendly_name"] = mlr_predict_friendly_name
|
444
|
-
|
589
|
+
|
445
590
|
# Treat passed data for other parameters
|
446
591
|
if "alpha" not in runtimeparams.keys():
|
447
592
|
alpha = 0.5
|
@@ -453,24 +598,30 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
453
598
|
else:
|
454
599
|
beta = runtimeparams["beta"]
|
455
600
|
params["passed_data"]["beta"] = beta
|
601
|
+
|
456
602
|
# Param to save forecast cache (i.e. Solcast)
|
457
603
|
if "weather_forecast_cache" not in runtimeparams.keys():
|
458
604
|
weather_forecast_cache = False
|
459
605
|
else:
|
460
606
|
weather_forecast_cache = runtimeparams["weather_forecast_cache"]
|
461
607
|
params["passed_data"]["weather_forecast_cache"] = weather_forecast_cache
|
608
|
+
|
462
609
|
# Param to make sure optimization only uses cached data. (else produce error)
|
463
610
|
if "weather_forecast_cache_only" not in runtimeparams.keys():
|
464
611
|
weather_forecast_cache_only = False
|
465
612
|
else:
|
466
613
|
weather_forecast_cache_only = runtimeparams["weather_forecast_cache_only"]
|
467
|
-
params["passed_data"]["weather_forecast_cache_only"] =
|
614
|
+
params["passed_data"]["weather_forecast_cache_only"] = (
|
615
|
+
weather_forecast_cache_only
|
616
|
+
)
|
617
|
+
|
468
618
|
# A condition to manually save entity data under data_path/entities after optimization
|
469
619
|
if "entity_save" not in runtimeparams.keys():
|
470
620
|
entity_save = ""
|
471
621
|
else:
|
472
622
|
entity_save = runtimeparams["entity_save"]
|
473
623
|
params["passed_data"]["entity_save"] = entity_save
|
624
|
+
|
474
625
|
# A condition to put a prefix on all published data, or check for saved data under prefix name
|
475
626
|
if "publish_prefix" not in runtimeparams.keys():
|
476
627
|
publish_prefix = ""
|
@@ -479,98 +630,25 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
479
630
|
params["passed_data"]["publish_prefix"] = publish_prefix
|
480
631
|
|
481
632
|
# Treat optimization (optim_conf) configuration parameters passed at runtime
|
482
|
-
if 'number_of_deferrable_loads' in runtimeparams.keys():
|
483
|
-
optim_conf['number_of_deferrable_loads'] = runtimeparams['number_of_deferrable_loads']
|
484
|
-
if 'num_def_loads' in runtimeparams.keys():
|
485
|
-
optim_conf['number_of_deferrable_loads'] = runtimeparams['num_def_loads']
|
486
|
-
if 'nominal_power_of_deferrable_loads' in runtimeparams.keys():
|
487
|
-
optim_conf['nominal_power_of_deferrable_loads'] = runtimeparams['nominal_power_of_deferrable_loads']
|
488
|
-
if 'P_deferrable_nom' in runtimeparams.keys():
|
489
|
-
optim_conf['nominal_power_of_deferrable_loads'] = runtimeparams['P_deferrable_nom']
|
490
|
-
if 'operating_hours_of_each_deferrable_load' in runtimeparams.keys():
|
491
|
-
optim_conf['operating_hours_of_each_deferrable_load'] = runtimeparams['operating_hours_of_each_deferrable_load']
|
492
|
-
if 'def_total_hours' in runtimeparams.keys():
|
493
|
-
optim_conf['operating_hours_of_each_deferrable_load'] = runtimeparams['def_total_hours']
|
494
|
-
if 'start_timesteps_of_each_deferrable_load' in runtimeparams.keys():
|
495
|
-
optim_conf['start_timesteps_of_each_deferrable_load'] = runtimeparams['start_timesteps_of_each_deferrable_load']
|
496
|
-
if 'end_timesteps_of_each_deferrable_load' in runtimeparams.keys():
|
497
|
-
optim_conf['end_timesteps_of_each_deferrable_load'] = runtimeparams['end_timesteps_of_each_deferrable_load']
|
498
633
|
if "def_current_state" in runtimeparams.keys():
|
499
|
-
optim_conf["def_current_state"] = [
|
500
|
-
bool(s) for s in runtimeparams["def_current_state"]
|
501
|
-
if 'treat_deferrable_load_as_semi_cont' in runtimeparams.keys():
|
502
|
-
optim_conf['treat_deferrable_load_as_semi_cont'] = [
|
503
|
-
ast.literal_eval(str(k).capitalize())
|
504
|
-
for k in runtimeparams['treat_deferrable_load_as_semi_cont']
|
505
|
-
]
|
506
|
-
if 'treat_def_as_semi_cont' in runtimeparams.keys():
|
507
|
-
optim_conf['treat_deferrable_load_as_semi_cont'] = [
|
508
|
-
ast.literal_eval(str(k).capitalize())
|
509
|
-
for k in runtimeparams['treat_def_as_semi_cont']
|
510
|
-
]
|
511
|
-
if 'set_deferrable_load_single_constant' in runtimeparams.keys():
|
512
|
-
optim_conf['set_deferrable_load_single_constant'] = [
|
513
|
-
ast.literal_eval(str(k).capitalize()) for k in runtimeparams['set_deferrable_load_single_constant']
|
514
|
-
]
|
515
|
-
if 'set_def_constant' in runtimeparams.keys():
|
516
|
-
optim_conf['set_deferrable_load_single_constant'] = [
|
517
|
-
ast.literal_eval(str(k).capitalize()) for k in runtimeparams['set_def_constant']
|
518
|
-
]
|
519
|
-
if 'set_deferrable_startup_penalty' in runtimeparams.keys():
|
520
|
-
optim_conf['set_deferrable_startup_penalty'] = [
|
521
|
-
ast.literal_eval(str(k).capitalize()) for k in runtimeparams['set_deferrable_startup_penalty']
|
522
|
-
]
|
523
|
-
if 'def_start_penalty' in runtimeparams.keys():
|
524
|
-
optim_conf['set_deferrable_startup_penalty'] = [
|
525
|
-
ast.literal_eval(str(k).capitalize()) for k in runtimeparams['def_start_penalty']
|
526
|
-
]
|
527
|
-
if 'def_load_config' in runtimeparams:
|
528
|
-
optim_conf["def_load_config"] = runtimeparams['def_load_config']
|
529
|
-
if 'weight_battery_discharge' in runtimeparams.keys():
|
530
|
-
optim_conf['weight_battery_discharge'] = runtimeparams[
|
531
|
-
'weight_battery_discharge'
|
634
|
+
params["optim_conf"]["def_current_state"] = [
|
635
|
+
bool(s) for s in runtimeparams["def_current_state"]
|
532
636
|
]
|
533
|
-
if 'weight_battery_charge' in runtimeparams.keys():
|
534
|
-
optim_conf['weight_battery_charge'] = runtimeparams['weight_battery_charge']
|
535
637
|
|
536
638
|
# Treat retrieve data from Home Assistant (retrieve_hass_conf) configuration parameters passed at runtime
|
537
|
-
|
538
|
-
retrieve_hass_conf['optimization_time_step'] = pd.to_timedelta(
|
539
|
-
runtimeparams['optimization_time_step'], "minutes")
|
540
|
-
if 'continual_publish' in runtimeparams.keys():
|
541
|
-
retrieve_hass_conf['continual_publish'] = bool(
|
542
|
-
runtimeparams['continual_publish'])
|
639
|
+
# Secrets passed at runtime
|
543
640
|
if "solcast_api_key" in runtimeparams.keys():
|
544
|
-
retrieve_hass_conf["solcast_api_key"] = runtimeparams[
|
545
|
-
|
641
|
+
params["retrieve_hass_conf"]["solcast_api_key"] = runtimeparams[
|
642
|
+
"solcast_api_key"
|
643
|
+
]
|
546
644
|
if "solcast_rooftop_id" in runtimeparams.keys():
|
547
|
-
retrieve_hass_conf["solcast_rooftop_id"] = runtimeparams[
|
645
|
+
params["retrieve_hass_conf"]["solcast_rooftop_id"] = runtimeparams[
|
548
646
|
"solcast_rooftop_id"
|
549
647
|
]
|
550
|
-
optim_conf['weather_forecast_method'] = "solcast"
|
551
648
|
if "solar_forecast_kwp" in runtimeparams.keys():
|
552
|
-
retrieve_hass_conf["solar_forecast_kwp"] = runtimeparams[
|
649
|
+
params["retrieve_hass_conf"]["solar_forecast_kwp"] = runtimeparams[
|
553
650
|
"solar_forecast_kwp"
|
554
651
|
]
|
555
|
-
optim_conf['weather_forecast_method'] = "solar.forecast"
|
556
|
-
|
557
|
-
# Treat system model parameters (plant) configuration parameters passed at runtime
|
558
|
-
if 'battery_minimum_state_of_charge' in runtimeparams.keys() or 'SOCmin' in runtimeparams.keys():
|
559
|
-
plant_conf['battery_minimum_state_of_charge'] = runtimeparams.get(
|
560
|
-
'battery_minimum_state_of_charge', runtimeparams.get('SOCmin'))
|
561
|
-
if 'battery_maximum_state_of_charge' in runtimeparams.keys() or 'SOCmax' in runtimeparams.keys():
|
562
|
-
plant_conf['battery_maximum_state_of_charge'] = runtimeparams.get(
|
563
|
-
'battery_maximum_state_of_charge', runtimeparams.get('SOCmax'))
|
564
|
-
if 'battery_target_state_of_charge' in runtimeparams.keys() or 'SOCtarget' in runtimeparams.keys():
|
565
|
-
plant_conf['battery_target_state_of_charge'] = runtimeparams.get(
|
566
|
-
'battery_target_state_of_charge', runtimeparams.get('SOCtarget'))
|
567
|
-
if 'battery_discharge_power_max' in runtimeparams.keys() or 'Pd_max' in runtimeparams.keys():
|
568
|
-
plant_conf['battery_discharge_power_max'] = runtimeparams.get(
|
569
|
-
'battery_discharge_power_max', runtimeparams.get('Pd_max'))
|
570
|
-
if 'battery_charge_power_max' in runtimeparams.keys() or 'Pc_max' in runtimeparams.keys():
|
571
|
-
plant_conf['battery_charge_power_max'] = runtimeparams.get(
|
572
|
-
'battery_charge_power_max', runtimeparams.get('Pc_max'))
|
573
|
-
|
574
652
|
# Treat custom entities id's and friendly names for variables
|
575
653
|
if "custom_pv_forecast_id" in runtimeparams.keys():
|
576
654
|
params["passed_data"]["custom_pv_forecast_id"] = runtimeparams[
|
@@ -624,7 +702,12 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
624
702
|
params["passed_data"]["custom_predicted_temperature_id"] = runtimeparams[
|
625
703
|
"custom_predicted_temperature_id"
|
626
704
|
]
|
627
|
-
|
705
|
+
|
706
|
+
# split config categories from params
|
707
|
+
retrieve_hass_conf = params["retrieve_hass_conf"]
|
708
|
+
optim_conf = params["optim_conf"]
|
709
|
+
plant_conf = params["plant_conf"]
|
710
|
+
|
628
711
|
# Serialize the final params
|
629
712
|
params = json.dumps(params, default=str)
|
630
713
|
return params, retrieve_hass_conf, optim_conf, plant_conf
|
@@ -632,8 +715,8 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
632
715
|
|
633
716
|
def get_yaml_parse(params: str, logger: logging.Logger) -> Tuple[dict, dict, dict]:
|
634
717
|
"""
|
635
|
-
Perform parsing of the params into the configuration catagories
|
636
|
-
|
718
|
+
Perform parsing of the params into the configuration catagories
|
719
|
+
|
637
720
|
:param params: Built configuration parameters
|
638
721
|
:type params: str
|
639
722
|
:param logger: The logger object
|
@@ -653,18 +736,20 @@ def get_yaml_parse(params: str, logger: logging.Logger) -> Tuple[dict, dict, dic
|
|
653
736
|
return False, False, False
|
654
737
|
|
655
738
|
optim_conf = input_conf.get("optim_conf", {})
|
656
|
-
|
657
739
|
retrieve_hass_conf = input_conf.get("retrieve_hass_conf", {})
|
658
|
-
|
659
740
|
plant_conf = input_conf.get("plant_conf", {})
|
660
741
|
|
661
742
|
# Format time parameters
|
662
|
-
if optim_conf.get(
|
663
|
-
optim_conf[
|
664
|
-
|
665
|
-
|
666
|
-
if retrieve_hass_conf.get(
|
667
|
-
retrieve_hass_conf["
|
743
|
+
if optim_conf.get("delta_forecast_daily", None) is not None:
|
744
|
+
optim_conf["delta_forecast_daily"] = pd.Timedelta(
|
745
|
+
days=optim_conf["delta_forecast_daily"]
|
746
|
+
)
|
747
|
+
if retrieve_hass_conf.get("optimization_time_step", None) is not None:
|
748
|
+
retrieve_hass_conf["optimization_time_step"] = pd.to_timedelta(
|
749
|
+
retrieve_hass_conf["optimization_time_step"], "minutes"
|
750
|
+
)
|
751
|
+
if retrieve_hass_conf.get("time_zone", None) is not None:
|
752
|
+
retrieve_hass_conf["time_zone"] = pytz.timezone(retrieve_hass_conf["time_zone"])
|
668
753
|
|
669
754
|
return retrieve_hass_conf, optim_conf, plant_conf
|
670
755
|
|
@@ -756,7 +841,9 @@ def get_injection_dict(df: pd.DataFrame, plot_size: Optional[int] = 1366) -> dic
|
|
756
841
|
return injection_dict
|
757
842
|
|
758
843
|
|
759
|
-
def get_injection_dict_forecast_model_fit(
|
844
|
+
def get_injection_dict_forecast_model_fit(
|
845
|
+
df_fit_pred: pd.DataFrame, mlf: MLForecaster
|
846
|
+
) -> dict:
|
760
847
|
"""
|
761
848
|
Build a dictionary with graphs and tables for the webui for special MLF fit case.
|
762
849
|
|
@@ -785,7 +872,9 @@ def get_injection_dict_forecast_model_fit(df_fit_pred: pd.DataFrame, mlf: MLFore
|
|
785
872
|
return injection_dict
|
786
873
|
|
787
874
|
|
788
|
-
def get_injection_dict_forecast_model_tune(
|
875
|
+
def get_injection_dict_forecast_model_tune(
|
876
|
+
df_pred_optim: pd.DataFrame, mlf: MLForecaster
|
877
|
+
) -> dict:
|
789
878
|
"""
|
790
879
|
Build a dictionary with graphs and tables for the webui for special MLF tune case.
|
791
880
|
|
@@ -815,10 +904,16 @@ def get_injection_dict_forecast_model_tune(df_pred_optim: pd.DataFrame, mlf: MLF
|
|
815
904
|
injection_dict["figure_0"] = image_path_0
|
816
905
|
return injection_dict
|
817
906
|
|
818
|
-
|
819
|
-
|
907
|
+
|
908
|
+
def build_config(
|
909
|
+
emhass_conf: dict,
|
910
|
+
logger: logging.Logger,
|
911
|
+
defaults_path: str,
|
912
|
+
config_path: Optional[str] = None,
|
913
|
+
legacy_config_path: Optional[str] = None,
|
914
|
+
) -> dict:
|
820
915
|
"""
|
821
|
-
Retrieve parameters from configuration files.
|
916
|
+
Retrieve parameters from configuration files.
|
822
917
|
priority order (low - high) = defaults_path, config_path legacy_config_path
|
823
918
|
|
824
919
|
:param emhass_conf: Dictionary containing the needed emhass paths
|
@@ -837,39 +932,48 @@ def build_config(emhass_conf: dict, logger: logging.Logger, defaults_path: str,
|
|
837
932
|
|
838
933
|
# Read default parameters (default root_path/data/config_defaults.json)
|
839
934
|
if defaults_path and pathlib.Path(defaults_path).is_file():
|
840
|
-
with defaults_path.open(
|
935
|
+
with defaults_path.open("r") as data:
|
841
936
|
config = json.load(data)
|
842
937
|
else:
|
843
938
|
logger.error("config_defaults.json. does not exist ")
|
844
939
|
return False
|
845
|
-
|
940
|
+
|
846
941
|
# Read user config parameters if provided (default /share/config.json)
|
847
942
|
if config_path and pathlib.Path(config_path).is_file():
|
848
|
-
with config_path.open(
|
943
|
+
with config_path.open("r") as data:
|
849
944
|
# Set override default parameters (config_defaults) with user given parameters (config.json)
|
850
945
|
logger.info("Obtaining parameters from config.json:")
|
851
946
|
config.update(json.load(data))
|
852
947
|
else:
|
853
|
-
logger.info(
|
854
|
-
|
948
|
+
logger.info(
|
949
|
+
"config.json does not exist, or has not been passed. config parameters may default to config_defaults.json"
|
950
|
+
)
|
951
|
+
logger.info(
|
952
|
+
"you may like to generate the config.json file on the configuration page"
|
953
|
+
)
|
855
954
|
|
856
955
|
# Check to see if legacy config_emhass.yaml was provided (default /app/config_emhass.yaml)
|
857
956
|
# Convert legacy parameter definitions/format to match config.json
|
858
957
|
if legacy_config_path and pathlib.Path(legacy_config_path).is_file():
|
859
|
-
with open(legacy_config_path,
|
958
|
+
with open(legacy_config_path, "r") as data:
|
860
959
|
legacy_config = yaml.load(data, Loader=yaml.FullLoader)
|
861
|
-
legacy_config_parameters = build_legacy_config_params(
|
960
|
+
legacy_config_parameters = build_legacy_config_params(
|
961
|
+
emhass_conf, legacy_config, logger
|
962
|
+
)
|
862
963
|
if type(legacy_config_parameters) is not bool:
|
863
|
-
logger.info(
|
864
|
-
|
964
|
+
logger.info(
|
965
|
+
"Obtaining parameters from config_emhass.yaml: (will overwrite config parameters)"
|
966
|
+
)
|
967
|
+
config.update(legacy_config_parameters)
|
865
968
|
|
866
969
|
return config
|
867
970
|
|
868
971
|
|
869
|
-
def build_legacy_config_params(
|
870
|
-
|
972
|
+
def build_legacy_config_params(
|
973
|
+
emhass_conf: dict, legacy_config: dict, logger: logging.Logger
|
974
|
+
) -> dict:
|
871
975
|
"""
|
872
|
-
Build a config dictionary with legacy config_emhass.yaml file.
|
976
|
+
Build a config dictionary with legacy config_emhass.yaml file.
|
873
977
|
Uses the associations file to convert parameter naming conventions (to config.json/config_defaults.json).
|
874
978
|
Extracts the parameter values and formats to match config.json.
|
875
979
|
|
@@ -883,77 +987,104 @@ def build_legacy_config_params(emhass_conf: dict, legacy_config: dict,
|
|
883
987
|
:rtype: dict
|
884
988
|
"""
|
885
989
|
|
886
|
-
|
887
990
|
# Association file key reference
|
888
991
|
# association[0] = config catagories
|
889
992
|
# association[1] = legacy parameter name
|
890
993
|
# association[2] = parameter (config.json/config_defaults.json)
|
891
|
-
# association[3] = parameter list name if exists (not used, from legacy options.json)
|
994
|
+
# association[3] = parameter list name if exists (not used, from legacy options.json)
|
892
995
|
|
893
996
|
# Check each config catagories exists, else create blank dict for categories (avoid errors)
|
894
|
-
legacy_config[
|
895
|
-
legacy_config[
|
896
|
-
legacy_config[
|
997
|
+
legacy_config["retrieve_hass_conf"] = legacy_config.get("retrieve_hass_conf", {})
|
998
|
+
legacy_config["optim_conf"] = legacy_config.get("optim_conf", {})
|
999
|
+
legacy_config["plant_conf"] = legacy_config.get("plant_conf", {})
|
897
1000
|
config = {}
|
898
1001
|
|
899
1002
|
# Use associations list to map legacy parameter name with config.json parameter name
|
900
|
-
if emhass_conf[
|
901
|
-
|
902
|
-
|
1003
|
+
if emhass_conf["associations_path"].exists():
|
1004
|
+
with emhass_conf["associations_path"].open("r") as data:
|
1005
|
+
associations = list(csv.reader(data, delimiter=","))
|
903
1006
|
else:
|
904
|
-
logger.error(
|
1007
|
+
logger.error(
|
1008
|
+
"Cant find associations file (associations.csv) in: "
|
1009
|
+
+ str(emhass_conf["associations_path"])
|
1010
|
+
)
|
905
1011
|
return False
|
906
|
-
|
1012
|
+
|
907
1013
|
# Loop through all parameters in association file
|
908
1014
|
# Append config with existing legacy config parameters (converting alternative parameter naming conventions with associations list)
|
909
1015
|
for association in associations:
|
910
1016
|
# if legacy config catagories exists and if legacy parameter exists in config catagories
|
911
|
-
if
|
1017
|
+
if (
|
1018
|
+
legacy_config.get(association[0], None) is not None
|
1019
|
+
and legacy_config[association[0]].get(association[1], None) is not None
|
1020
|
+
):
|
912
1021
|
config[association[2]] = legacy_config[association[0]][association[1]]
|
913
|
-
|
1022
|
+
|
914
1023
|
# If config now has load_peak_hour_periods, extract from list of dict
|
915
|
-
if
|
916
|
-
|
917
|
-
|
1024
|
+
if (
|
1025
|
+
association[2] == "load_peak_hour_periods"
|
1026
|
+
and type(config[association[2]]) is list
|
1027
|
+
):
|
1028
|
+
config[association[2]] = dict(
|
1029
|
+
(key, d[key]) for d in config[association[2]] for key in d
|
1030
|
+
)
|
1031
|
+
|
918
1032
|
return config
|
919
1033
|
# params['associations_dict'] = associations_dict
|
920
1034
|
|
921
|
-
|
922
|
-
|
1035
|
+
|
1036
|
+
def param_to_config(param: dict, logger: logging.Logger) -> dict:
|
923
1037
|
"""
|
924
1038
|
A function that extracts the parameters from param back to the config.json format.
|
925
1039
|
Extracts parameters from config catagories.
|
926
1040
|
Attempts to exclude secrets hosed in retrieve_hass_conf.
|
927
|
-
|
1041
|
+
|
928
1042
|
:param params: Built configuration parameters
|
929
1043
|
:type param: dict
|
930
1044
|
:param logger: The logger object
|
931
1045
|
:type logger: logging.Logger
|
932
1046
|
:return: The built config dictionary
|
933
1047
|
:rtype: dict
|
934
|
-
"""
|
1048
|
+
"""
|
935
1049
|
logger.debug("Converting param to config")
|
936
1050
|
|
937
1051
|
return_config = {}
|
938
1052
|
|
939
|
-
config_catagories = ["retrieve_hass_conf","optim_conf","plant_conf"]
|
940
|
-
secret_params = [
|
941
|
-
|
1053
|
+
config_catagories = ["retrieve_hass_conf", "optim_conf", "plant_conf"]
|
1054
|
+
secret_params = [
|
1055
|
+
"hass_url",
|
1056
|
+
"time_zone",
|
1057
|
+
"Latitude",
|
1058
|
+
"Longitude",
|
1059
|
+
"Altitude",
|
1060
|
+
"long_lived_token",
|
1061
|
+
"solcast_api_key",
|
1062
|
+
"solcast_rooftop_id",
|
1063
|
+
"solar_forecast_kwp",
|
1064
|
+
]
|
1065
|
+
|
942
1066
|
# Loop through config catagories that contain config params, and extract
|
943
1067
|
for config in config_catagories:
|
944
1068
|
for parameter in param[config]:
|
945
|
-
|
946
|
-
|
947
|
-
|
948
|
-
|
1069
|
+
# If parameter is not a secret, append to return_config
|
1070
|
+
if parameter not in secret_params:
|
1071
|
+
return_config[str(parameter)] = param[config][parameter]
|
1072
|
+
|
949
1073
|
return return_config
|
950
1074
|
|
951
|
-
|
952
|
-
|
1075
|
+
|
1076
|
+
def build_secrets(
|
1077
|
+
emhass_conf: dict,
|
1078
|
+
logger: logging.Logger,
|
1079
|
+
argument: Optional[dict] = {},
|
1080
|
+
options_path: Optional[str] = None,
|
1081
|
+
secrets_path: Optional[str] = None,
|
1082
|
+
no_response: Optional[bool] = False,
|
1083
|
+
) -> Tuple[dict, dict]:
|
953
1084
|
"""
|
954
1085
|
Retrieve and build parameters from secrets locations (ENV, ARG, Secrets file (secrets_emhass.yaml/options.json) and/or Home Assistant (via API))
|
955
1086
|
priority order (lwo to high) = Defaults (written in function), ENV, Options json file, Home Assistant API, Secrets yaml file, Arguments
|
956
|
-
|
1087
|
+
|
957
1088
|
:param emhass_conf: Dictionary containing the needed emhass paths
|
958
1089
|
:type emhass_conf: dict
|
959
1090
|
:param logger: The logger object
|
@@ -970,7 +1101,7 @@ def build_secrets(emhass_conf: dict, logger: logging.Logger, argument: Optional[
|
|
970
1101
|
:rtype: Tuple[dict, dict]:
|
971
1102
|
"""
|
972
1103
|
|
973
|
-
#Set defaults to be overwritten
|
1104
|
+
# Set defaults to be overwritten
|
974
1105
|
params_secrets = {
|
975
1106
|
"hass_url": "https://myhass.duckdns.org/",
|
976
1107
|
"long_lived_token": "thatverylongtokenhere",
|
@@ -980,128 +1111,172 @@ def build_secrets(emhass_conf: dict, logger: logging.Logger, argument: Optional[
|
|
980
1111
|
"Altitude": 4807.8,
|
981
1112
|
"solcast_api_key": "yoursecretsolcastapikey",
|
982
1113
|
"solcast_rooftop_id": "yourrooftopid",
|
983
|
-
"solar_forecast_kwp": 5
|
1114
|
+
"solar_forecast_kwp": 5,
|
984
1115
|
}
|
985
1116
|
|
986
1117
|
# Obtain Secrets from ENV?
|
987
|
-
params_secrets[
|
988
|
-
params_secrets[
|
989
|
-
|
990
|
-
|
991
|
-
params_secrets[
|
992
|
-
params_secrets[
|
1118
|
+
params_secrets["hass_url"] = os.getenv("EMHASS_URL", params_secrets["hass_url"])
|
1119
|
+
params_secrets["long_lived_token"] = os.getenv(
|
1120
|
+
"SUPERVISOR_TOKEN", params_secrets["long_lived_token"]
|
1121
|
+
)
|
1122
|
+
params_secrets["time_zone"] = os.getenv("TIME_ZONE", params_secrets["time_zone"])
|
1123
|
+
params_secrets["Latitude"] = float(os.getenv("LAT", params_secrets["Latitude"]))
|
1124
|
+
params_secrets["Longitude"] = float(os.getenv("LON", params_secrets["Longitude"]))
|
1125
|
+
params_secrets["Altitude"] = float(os.getenv("ALT", params_secrets["Altitude"]))
|
993
1126
|
|
994
1127
|
# Obtain secrets from options.json (Generated from EMHASS-Add-on, Home Assistant addon Configuration page) or Home Assistant API (from local Supervisor API)?
|
995
1128
|
# Use local supervisor API to obtain secrets from Home Assistant if hass_url in options.json is empty and SUPERVISOR_TOKEN ENV exists (provided by Home Assistant when running the container as addon)
|
996
1129
|
options = {}
|
997
1130
|
if options_path and pathlib.Path(options_path).is_file():
|
998
|
-
with options_path.open(
|
1131
|
+
with options_path.open("r") as data:
|
999
1132
|
options = json.load(data)
|
1000
|
-
|
1133
|
+
|
1001
1134
|
# Obtain secrets from Home Assistant?
|
1002
|
-
url_from_options = options.get(
|
1003
|
-
key_from_options = options.get(
|
1135
|
+
url_from_options = options.get("hass_url", "empty")
|
1136
|
+
key_from_options = options.get("long_lived_token", "empty")
|
1004
1137
|
|
1005
1138
|
# If data path specified by options.json, overwrite emhass_conf['data_path']
|
1006
|
-
if
|
1007
|
-
|
1008
|
-
|
1139
|
+
if (
|
1140
|
+
options.get("data_path", None) != None
|
1141
|
+
and pathlib.Path(options["data_path"]).exists()
|
1142
|
+
):
|
1143
|
+
emhass_conf["data_path"] = pathlib.Path(options["data_path"])
|
1144
|
+
|
1009
1145
|
# Check to use Home Assistant local API
|
1010
|
-
if
|
1011
|
-
|
1012
|
-
|
1013
|
-
|
1014
|
-
|
1015
|
-
|
1146
|
+
if (
|
1147
|
+
not no_response
|
1148
|
+
and (
|
1149
|
+
url_from_options == "empty"
|
1150
|
+
or url_from_options == ""
|
1151
|
+
or url_from_options == "http://supervisor/core/api"
|
1152
|
+
)
|
1153
|
+
and os.getenv("SUPERVISOR_TOKEN", None) is not None
|
1154
|
+
):
|
1155
|
+
params_secrets["long_lived_token"] = os.getenv("SUPERVISOR_TOKEN", None)
|
1156
|
+
params_secrets["hass_url"] = "http://supervisor/core/api"
|
1016
1157
|
headers = {
|
1017
|
-
|
1018
|
-
|
1158
|
+
"Authorization": "Bearer " + params_secrets["long_lived_token"],
|
1159
|
+
"content-type": "application/json",
|
1019
1160
|
}
|
1020
1161
|
# Obtain secrets from Home Assistant via API
|
1021
1162
|
logger.debug("Obtaining secrets from Home Assistant Supervisor API")
|
1022
|
-
response = get(
|
1163
|
+
response = get(
|
1164
|
+
(params_secrets["hass_url"] + "/config"), headers=headers
|
1165
|
+
)
|
1023
1166
|
if response.status_code < 400:
|
1024
1167
|
config_hass = response.json()
|
1025
1168
|
params_secrets = {
|
1026
|
-
|
1027
|
-
|
1028
|
-
|
1029
|
-
|
1030
|
-
|
1031
|
-
|
1169
|
+
"hass_url": params_secrets["hass_url"],
|
1170
|
+
"long_lived_token": params_secrets["long_lived_token"],
|
1171
|
+
"time_zone": config_hass["time_zone"],
|
1172
|
+
"Latitude": config_hass["latitude"],
|
1173
|
+
"Longitude": config_hass["longitude"],
|
1174
|
+
"Altitude": config_hass["elevation"],
|
1032
1175
|
}
|
1033
|
-
else:
|
1176
|
+
else:
|
1034
1177
|
# Obtain the url and key secrets if any from options.json (default /app/options.json)
|
1035
|
-
logger.warning(
|
1178
|
+
logger.warning(
|
1179
|
+
"Error obtaining secrets from Home Assistant Supervisor API"
|
1180
|
+
)
|
1036
1181
|
logger.debug("Obtaining url and key secrets from options.json")
|
1037
|
-
if url_from_options !=
|
1038
|
-
params_secrets[
|
1039
|
-
if key_from_options !=
|
1040
|
-
params_secrets[
|
1041
|
-
if
|
1042
|
-
|
1043
|
-
|
1044
|
-
|
1045
|
-
|
1046
|
-
|
1047
|
-
|
1048
|
-
|
1182
|
+
if url_from_options != "empty" and url_from_options != "":
|
1183
|
+
params_secrets["hass_url"] = url_from_options
|
1184
|
+
if key_from_options != "empty" and key_from_options != "":
|
1185
|
+
params_secrets["long_lived_token"] = key_from_options
|
1186
|
+
if (
|
1187
|
+
options.get("time_zone", "empty") != "empty"
|
1188
|
+
and options["time_zone"] != ""
|
1189
|
+
):
|
1190
|
+
params_secrets["time_zone"] = options["time_zone"]
|
1191
|
+
if options.get("Latitude", None) is not None and bool(
|
1192
|
+
options["Latitude"]
|
1193
|
+
):
|
1194
|
+
params_secrets["Latitude"] = options["Latitude"]
|
1195
|
+
if options.get("Longitude", None) is not None and bool(
|
1196
|
+
options["Longitude"]
|
1197
|
+
):
|
1198
|
+
params_secrets["Longitude"] = options["Longitude"]
|
1199
|
+
if options.get("Altitude", None) is not None and bool(
|
1200
|
+
options["Altitude"]
|
1201
|
+
):
|
1202
|
+
params_secrets["Altitude"] = options["Altitude"]
|
1049
1203
|
else:
|
1050
1204
|
# Obtain the url and key secrets if any from options.json (default /app/options.json)
|
1051
1205
|
logger.debug("Obtaining url and key secrets from options.json")
|
1052
|
-
if url_from_options !=
|
1053
|
-
params_secrets[
|
1054
|
-
if key_from_options !=
|
1055
|
-
params_secrets[
|
1056
|
-
if
|
1057
|
-
|
1058
|
-
|
1059
|
-
|
1060
|
-
|
1061
|
-
|
1062
|
-
|
1063
|
-
|
1064
|
-
|
1206
|
+
if url_from_options != "empty" and url_from_options != "":
|
1207
|
+
params_secrets["hass_url"] = url_from_options
|
1208
|
+
if key_from_options != "empty" and key_from_options != "":
|
1209
|
+
params_secrets["long_lived_token"] = key_from_options
|
1210
|
+
if (
|
1211
|
+
options.get("time_zone", "empty") != "empty"
|
1212
|
+
and options["time_zone"] != ""
|
1213
|
+
):
|
1214
|
+
params_secrets["time_zone"] = options["time_zone"]
|
1215
|
+
if options.get("Latitude", None) is not None and bool(
|
1216
|
+
options["Latitude"]
|
1217
|
+
):
|
1218
|
+
params_secrets["Latitude"] = options["Latitude"]
|
1219
|
+
if options.get("Longitude", None) is not None and bool(
|
1220
|
+
options["Longitude"]
|
1221
|
+
):
|
1222
|
+
params_secrets["Longitude"] = options["Longitude"]
|
1223
|
+
if options.get("Altitude", None) is not None and bool(
|
1224
|
+
options["Altitude"]
|
1225
|
+
):
|
1226
|
+
params_secrets["Altitude"] = options["Altitude"]
|
1227
|
+
|
1065
1228
|
# Obtain the forecast secrets (if any) from options.json (default /app/options.json)
|
1066
|
-
forecast_secrets = [
|
1229
|
+
forecast_secrets = [
|
1230
|
+
"solcast_api_key",
|
1231
|
+
"solcast_rooftop_id",
|
1232
|
+
"solar_forecast_kwp",
|
1233
|
+
]
|
1067
1234
|
if any(x in forecast_secrets for x in list(options.keys())):
|
1068
1235
|
logger.debug("Obtaining forecast secrets from options.json")
|
1069
|
-
if
|
1070
|
-
|
1071
|
-
|
1072
|
-
|
1073
|
-
|
1074
|
-
|
1075
|
-
|
1236
|
+
if (
|
1237
|
+
options.get("solcast_api_key", "empty") != "empty"
|
1238
|
+
and options["solcast_api_key"] != ""
|
1239
|
+
):
|
1240
|
+
params_secrets["solcast_api_key"] = options["solcast_api_key"]
|
1241
|
+
if (
|
1242
|
+
options.get("solcast_rooftop_id", "empty") != "empty"
|
1243
|
+
and options["solcast_rooftop_id"] != ""
|
1244
|
+
):
|
1245
|
+
params_secrets["solcast_rooftop_id"] = options["solcast_rooftop_id"]
|
1246
|
+
if options.get("solar_forecast_kwp", None) and bool(
|
1247
|
+
options["solar_forecast_kwp"]
|
1248
|
+
):
|
1249
|
+
params_secrets["solar_forecast_kwp"] = options["solar_forecast_kwp"]
|
1250
|
+
|
1076
1251
|
# Obtain secrets from secrets_emhass.yaml? (default /app/secrets_emhass.yaml)
|
1077
1252
|
if secrets_path and pathlib.Path(secrets_path).is_file():
|
1078
1253
|
logger.debug("Obtaining secrets from secrets file")
|
1079
|
-
with open(pathlib.Path(secrets_path),
|
1254
|
+
with open(pathlib.Path(secrets_path), "r") as file:
|
1080
1255
|
params_secrets.update(yaml.load(file, Loader=yaml.FullLoader))
|
1081
1256
|
|
1082
|
-
# Receive key and url from ARG/arguments?
|
1083
|
-
if argument.get(
|
1084
|
-
|
1085
|
-
|
1086
|
-
if argument.get(
|
1087
|
-
params_secrets[
|
1088
|
-
logger.debug("Obtaining long_lived_token from passed argument")
|
1089
|
-
|
1090
|
-
return emhass_conf, params_secrets
|
1091
|
-
|
1092
|
-
|
1093
|
-
|
1094
|
-
|
1095
|
-
|
1257
|
+
# Receive key and url from ARG/arguments?
|
1258
|
+
if argument.get("url", None) is not None:
|
1259
|
+
params_secrets["hass_url"] = argument["url"]
|
1260
|
+
logger.debug("Obtaining url from passed argument")
|
1261
|
+
if argument.get("key", None) is not None:
|
1262
|
+
params_secrets["long_lived_token"] = argument["key"]
|
1263
|
+
logger.debug("Obtaining long_lived_token from passed argument")
|
1264
|
+
|
1265
|
+
return emhass_conf, params_secrets
|
1266
|
+
|
1267
|
+
|
1268
|
+
def build_params(
|
1269
|
+
emhass_conf: dict, params_secrets: dict, config: dict, logger: logging.Logger
|
1270
|
+
) -> dict:
|
1096
1271
|
"""
|
1097
1272
|
Build the main params dictionary from the config and secrets
|
1098
1273
|
Appends configuration catagories used by emhass to the parameters. (with use of the associations file as a reference)
|
1099
|
-
|
1274
|
+
|
1100
1275
|
:param emhass_conf: Dictionary containing the needed emhass paths
|
1101
1276
|
:type emhass_conf: dict
|
1102
1277
|
:param params_secrets: The dictionary containing the built secret variables
|
1103
1278
|
:type params_secrets: dict
|
1104
|
-
:param config: The dictionary of built config parameters
|
1279
|
+
:param config: The dictionary of built config parameters
|
1105
1280
|
:type config: dict
|
1106
1281
|
:param logger: The logger object
|
1107
1282
|
:type logger: logging.Logger
|
@@ -1110,104 +1285,203 @@ def build_params(emhass_conf: dict, params_secrets: dict, config: dict,
|
|
1110
1285
|
"""
|
1111
1286
|
if type(params_secrets) is not dict:
|
1112
1287
|
params_secrets = {}
|
1113
|
-
|
1288
|
+
|
1114
1289
|
params = {}
|
1115
|
-
#Start with blank config catagories
|
1116
|
-
params[
|
1117
|
-
params[
|
1118
|
-
params[
|
1119
|
-
params[
|
1120
|
-
|
1121
|
-
# Obtain associations to categorize parameters to their corresponding config catagories
|
1122
|
-
if emhass_conf.get(
|
1123
|
-
|
1290
|
+
# Start with blank config catagories
|
1291
|
+
params["retrieve_hass_conf"] = {}
|
1292
|
+
params["params_secrets"] = {}
|
1293
|
+
params["optim_conf"] = {}
|
1294
|
+
params["plant_conf"] = {}
|
1295
|
+
|
1296
|
+
# Obtain associations to categorize parameters to their corresponding config catagories
|
1297
|
+
if emhass_conf.get(
|
1298
|
+
"associations_path", get_root(__file__, num_parent=2) / "data/associations.csv"
|
1299
|
+
).exists():
|
1300
|
+
with emhass_conf["associations_path"].open("r") as data:
|
1124
1301
|
associations = list(csv.reader(data, delimiter=","))
|
1125
1302
|
else:
|
1126
|
-
logger.error(
|
1303
|
+
logger.error(
|
1304
|
+
"Unable to obtain the associations file (associations.csv) in: "
|
1305
|
+
+ str(emhass_conf["associations_path"])
|
1306
|
+
)
|
1127
1307
|
return False
|
1128
1308
|
|
1129
1309
|
# Association file key reference
|
1130
1310
|
# association[0] = config catagories
|
1131
1311
|
# association[1] = legacy parameter name
|
1132
1312
|
# association[2] = parameter (config.json/config_defaults.json)
|
1133
|
-
# association[3] = parameter list name if exists (not used, from legacy options.json)
|
1134
|
-
|
1313
|
+
# association[3] = parameter list name if exists (not used, from legacy options.json)
|
1135
1314
|
# Use association list to append parameters from config into params (with corresponding config catagories)
|
1136
1315
|
for association in associations:
|
1137
|
-
# If parameter has list_ name and parameter in config is presented with its list name
|
1316
|
+
# If parameter has list_ name and parameter in config is presented with its list name
|
1138
1317
|
# (ie, config parameter is in legacy options.json format)
|
1139
|
-
if len(association) == 4 and config.get(association[3],None) is not None:
|
1318
|
+
if len(association) == 4 and config.get(association[3], None) is not None:
|
1140
1319
|
# Extract lists of dictionaries
|
1141
1320
|
if config[association[3]] and type(config[association[3]][0]) is dict:
|
1142
|
-
params[association[0]][association[2]] = [
|
1321
|
+
params[association[0]][association[2]] = [
|
1322
|
+
i[association[2]] for i in config[association[3]]
|
1323
|
+
]
|
1143
1324
|
else:
|
1144
1325
|
params[association[0]][association[2]] = config[association[3]]
|
1145
|
-
# Else, directly set value of config parameter to param
|
1146
|
-
elif config.get(association[2],None) is not None:
|
1326
|
+
# Else, directly set value of config parameter to param
|
1327
|
+
elif config.get(association[2], None) is not None:
|
1147
1328
|
params[association[0]][association[2]] = config[association[2]]
|
1148
1329
|
|
1149
1330
|
# Check if we need to create `list_hp_periods` from config (ie. legacy options.json format)
|
1150
|
-
if
|
1151
|
-
|
1152
|
-
|
1153
|
-
|
1154
|
-
|
1155
|
-
|
1331
|
+
if (
|
1332
|
+
params.get("optim_conf", None) is not None
|
1333
|
+
and config.get("list_peak_hours_periods_start_hours", None) is not None
|
1334
|
+
and config.get("list_peak_hours_periods_end_hours", None) is not None
|
1335
|
+
):
|
1336
|
+
start_hours_list = [
|
1337
|
+
i["peak_hours_periods_start_hours"]
|
1338
|
+
for i in config["list_peak_hours_periods_start_hours"]
|
1339
|
+
]
|
1340
|
+
end_hours_list = [
|
1341
|
+
i["peak_hours_periods_end_hours"]
|
1342
|
+
for i in config["list_peak_hours_periods_end_hours"]
|
1343
|
+
]
|
1344
|
+
num_peak_hours = len(start_hours_list)
|
1345
|
+
list_hp_periods_list = {
|
1346
|
+
"period_hp_" + str(i + 1): [
|
1347
|
+
{"start": start_hours_list[i]},
|
1348
|
+
{"end": end_hours_list[i]},
|
1349
|
+
]
|
1350
|
+
for i in range(num_peak_hours)
|
1351
|
+
}
|
1352
|
+
params["optim_conf"]["load_peak_hour_periods"] = list_hp_periods_list
|
1156
1353
|
else:
|
1157
1354
|
# Else, check param already contains load_peak_hour_periods from config
|
1158
|
-
if params[
|
1159
|
-
logger.warning(
|
1355
|
+
if params["optim_conf"].get("load_peak_hour_periods", None) is None:
|
1356
|
+
logger.warning(
|
1357
|
+
"Unable to detect or create load_peak_hour_periods parameter"
|
1358
|
+
)
|
1160
1359
|
|
1161
1360
|
# Format load_peak_hour_periods list to dict if necessary
|
1162
|
-
if params[
|
1163
|
-
|
1361
|
+
if params["optim_conf"].get(
|
1362
|
+
"load_peak_hour_periods", None
|
1363
|
+
) is not None and isinstance(params["optim_conf"]["load_peak_hour_periods"], list):
|
1364
|
+
params["optim_conf"]["load_peak_hour_periods"] = dict(
|
1365
|
+
(key, d[key])
|
1366
|
+
for d in params["optim_conf"]["load_peak_hour_periods"]
|
1367
|
+
for key in d
|
1368
|
+
)
|
1164
1369
|
|
1165
1370
|
# Call function to check parameter lists that require the same length as deferrable loads
|
1166
1371
|
# If not, set defaults it fill in gaps
|
1167
|
-
if params[
|
1168
|
-
num_def_loads = params[
|
1169
|
-
params[
|
1170
|
-
|
1171
|
-
|
1172
|
-
|
1173
|
-
|
1174
|
-
|
1175
|
-
|
1372
|
+
if params["optim_conf"].get("number_of_deferrable_loads", None) is not None:
|
1373
|
+
num_def_loads = params["optim_conf"]["number_of_deferrable_loads"]
|
1374
|
+
params["optim_conf"]["start_timesteps_of_each_deferrable_load"] = (
|
1375
|
+
check_def_loads(
|
1376
|
+
num_def_loads,
|
1377
|
+
params["optim_conf"],
|
1378
|
+
0,
|
1379
|
+
"start_timesteps_of_each_deferrable_load",
|
1380
|
+
logger,
|
1381
|
+
)
|
1382
|
+
)
|
1383
|
+
params["optim_conf"]["end_timesteps_of_each_deferrable_load"] = check_def_loads(
|
1384
|
+
num_def_loads,
|
1385
|
+
params["optim_conf"],
|
1386
|
+
0,
|
1387
|
+
"end_timesteps_of_each_deferrable_load",
|
1388
|
+
logger,
|
1389
|
+
)
|
1390
|
+
params["optim_conf"]["set_deferrable_load_single_constant"] = check_def_loads(
|
1391
|
+
num_def_loads,
|
1392
|
+
params["optim_conf"],
|
1393
|
+
False,
|
1394
|
+
"set_deferrable_load_single_constant",
|
1395
|
+
logger,
|
1396
|
+
)
|
1397
|
+
params["optim_conf"]["treat_deferrable_load_as_semi_cont"] = check_def_loads(
|
1398
|
+
num_def_loads,
|
1399
|
+
params["optim_conf"],
|
1400
|
+
True,
|
1401
|
+
"treat_deferrable_load_as_semi_cont",
|
1402
|
+
logger,
|
1403
|
+
)
|
1404
|
+
params["optim_conf"]["set_deferrable_startup_penalty"] = check_def_loads(
|
1405
|
+
num_def_loads,
|
1406
|
+
params["optim_conf"],
|
1407
|
+
0.0,
|
1408
|
+
"set_deferrable_startup_penalty",
|
1409
|
+
logger,
|
1410
|
+
)
|
1411
|
+
params["optim_conf"]["operating_hours_of_each_deferrable_load"] = (
|
1412
|
+
check_def_loads(
|
1413
|
+
num_def_loads,
|
1414
|
+
params["optim_conf"],
|
1415
|
+
0,
|
1416
|
+
"operating_hours_of_each_deferrable_load",
|
1417
|
+
logger,
|
1418
|
+
)
|
1419
|
+
)
|
1420
|
+
params["optim_conf"]["nominal_power_of_deferrable_loads"] = check_def_loads(
|
1421
|
+
num_def_loads,
|
1422
|
+
params["optim_conf"],
|
1423
|
+
0,
|
1424
|
+
"nominal_power_of_deferrable_loads",
|
1425
|
+
logger,
|
1426
|
+
)
|
1176
1427
|
else:
|
1177
1428
|
logger.warning("unable to obtain parameter: number_of_deferrable_loads")
|
1178
1429
|
# historic_days_to_retrieve should be no less then 2
|
1179
|
-
if params["retrieve_hass_conf"].get(
|
1180
|
-
if params["retrieve_hass_conf"][
|
1181
|
-
params["retrieve_hass_conf"][
|
1182
|
-
logger.warning(
|
1430
|
+
if params["retrieve_hass_conf"].get("historic_days_to_retrieve", None) is not None:
|
1431
|
+
if params["retrieve_hass_conf"]["historic_days_to_retrieve"] < 2:
|
1432
|
+
params["retrieve_hass_conf"]["historic_days_to_retrieve"] = 2
|
1433
|
+
logger.warning(
|
1434
|
+
"days_to_retrieve should not be lower then 2, setting days_to_retrieve to 2. Make sure your sensors also have at least 2 days of history"
|
1435
|
+
)
|
1183
1436
|
else:
|
1184
1437
|
logger.warning("unable to obtain parameter: historic_days_to_retrieve")
|
1185
1438
|
|
1186
1439
|
# Configure secrets, set params to correct config categorie
|
1187
1440
|
# retrieve_hass_conf
|
1188
|
-
params[
|
1189
|
-
params[
|
1190
|
-
|
1191
|
-
|
1192
|
-
params[
|
1193
|
-
params[
|
1441
|
+
params["retrieve_hass_conf"]["hass_url"] = params_secrets.get("hass_url", None)
|
1442
|
+
params["retrieve_hass_conf"]["long_lived_token"] = params_secrets.get(
|
1443
|
+
"long_lived_token", None
|
1444
|
+
)
|
1445
|
+
params["retrieve_hass_conf"]["time_zone"] = params_secrets.get("time_zone", None)
|
1446
|
+
params["retrieve_hass_conf"]["Latitude"] = params_secrets.get("Latitude", None)
|
1447
|
+
params["retrieve_hass_conf"]["Longitude"] = params_secrets.get("Longitude", None)
|
1448
|
+
params["retrieve_hass_conf"]["Altitude"] = params_secrets.get("Altitude", None)
|
1194
1449
|
# Update optional param secrets
|
1195
|
-
if params["optim_conf"].get(
|
1196
|
-
if params["optim_conf"][
|
1197
|
-
params["retrieve_hass_conf"]["solcast_api_key"] = params_secrets.get(
|
1198
|
-
|
1199
|
-
|
1200
|
-
params["params_secrets"]["
|
1201
|
-
|
1202
|
-
|
1203
|
-
params["
|
1450
|
+
if params["optim_conf"].get("weather_forecast_method", None) is not None:
|
1451
|
+
if params["optim_conf"]["weather_forecast_method"] == "solcast":
|
1452
|
+
params["retrieve_hass_conf"]["solcast_api_key"] = params_secrets.get(
|
1453
|
+
"solcast_api_key", "123456"
|
1454
|
+
)
|
1455
|
+
params["params_secrets"]["solcast_api_key"] = params_secrets.get(
|
1456
|
+
"solcast_api_key", "123456"
|
1457
|
+
)
|
1458
|
+
params["retrieve_hass_conf"]["solcast_rooftop_id"] = params_secrets.get(
|
1459
|
+
"solcast_rooftop_id", "123456"
|
1460
|
+
)
|
1461
|
+
params["params_secrets"]["solcast_rooftop_id"] = params_secrets.get(
|
1462
|
+
"solcast_rooftop_id", "123456"
|
1463
|
+
)
|
1464
|
+
elif params["optim_conf"]["weather_forecast_method"] == "solar.forecast":
|
1465
|
+
params["retrieve_hass_conf"]["solar_forecast_kwp"] = params_secrets.get(
|
1466
|
+
"solar_forecast_kwp", 5
|
1467
|
+
)
|
1468
|
+
params["params_secrets"]["solar_forecast_kwp"] = params_secrets.get(
|
1469
|
+
"solar_forecast_kwp", 5
|
1470
|
+
)
|
1204
1471
|
else:
|
1205
|
-
logger.warning("Unable to detect weather_forecast_method parameter")
|
1472
|
+
logger.warning("Unable to detect weather_forecast_method parameter")
|
1206
1473
|
# Check if secrets parameters still defaults values
|
1207
|
-
secret_params = [
|
1208
|
-
|
1209
|
-
|
1210
|
-
|
1474
|
+
secret_params = [
|
1475
|
+
"https://myhass.duckdns.org/",
|
1476
|
+
"thatverylongtokenhere",
|
1477
|
+
45.83,
|
1478
|
+
6.86,
|
1479
|
+
4807.8,
|
1480
|
+
]
|
1481
|
+
if any(x in secret_params for x in params["retrieve_hass_conf"].values()):
|
1482
|
+
logger.warning(
|
1483
|
+
"Some secret parameters values are still matching their defaults"
|
1484
|
+
)
|
1211
1485
|
|
1212
1486
|
# Set empty dict objects for params passed_data
|
1213
1487
|
# To be latter populated with runtime parameters (treat_runtimeparams)
|
@@ -1219,16 +1493,19 @@ def build_params(emhass_conf: dict, params_secrets: dict, config: dict,
|
|
1219
1493
|
"prediction_horizon": None,
|
1220
1494
|
"soc_init": None,
|
1221
1495
|
"soc_final": None,
|
1222
|
-
|
1223
|
-
|
1224
|
-
|
1496
|
+
"operating_hours_of_each_deferrable_load": None,
|
1497
|
+
"start_timesteps_of_each_deferrable_load": None,
|
1498
|
+
"end_timesteps_of_each_deferrable_load": None,
|
1225
1499
|
"alpha": None,
|
1226
1500
|
"beta": None,
|
1227
1501
|
}
|
1228
1502
|
|
1229
1503
|
return params
|
1230
1504
|
|
1231
|
-
|
1505
|
+
|
1506
|
+
def check_def_loads(
|
1507
|
+
num_def_loads: int, parameter: list[dict], default, parameter_name: str, logger
|
1508
|
+
):
|
1232
1509
|
"""
|
1233
1510
|
Check parameter lists with deferrable loads number, if they do not match, enlarge to fit.
|
1234
1511
|
|
@@ -1242,12 +1519,21 @@ def check_def_loads(num_def_loads: int, parameter: list[dict], default, paramete
|
|
1242
1519
|
:type logger: str
|
1243
1520
|
:param logger: The logger object
|
1244
1521
|
:type logger: logging.Logger
|
1245
|
-
return: parameter list
|
1522
|
+
return: parameter list
|
1246
1523
|
:rtype: list[dict]
|
1247
1524
|
|
1248
1525
|
"""
|
1249
|
-
if
|
1250
|
-
|
1526
|
+
if (
|
1527
|
+
parameter.get(parameter_name, None) is not None
|
1528
|
+
and type(parameter[parameter_name]) is list
|
1529
|
+
and num_def_loads > len(parameter[parameter_name])
|
1530
|
+
):
|
1531
|
+
logger.warning(
|
1532
|
+
parameter_name
|
1533
|
+
+ " does not match number in num_def_loads, adding default values ("
|
1534
|
+
+ str(default)
|
1535
|
+
+ ") to parameter"
|
1536
|
+
)
|
1251
1537
|
for x in range(len(parameter[parameter_name]), num_def_loads):
|
1252
1538
|
parameter[parameter_name].append(default)
|
1253
1539
|
return parameter[parameter_name]
|