emhass 0.11.2__py3-none-any.whl → 0.11.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- emhass/command_line.py +702 -373
- emhass/data/associations.csv +1 -1
- emhass/forecast.py +671 -346
- emhass/machine_learning_forecaster.py +204 -105
- emhass/machine_learning_regressor.py +26 -7
- emhass/optimization.py +1017 -471
- emhass/retrieve_hass.py +226 -79
- emhass/static/data/param_definitions.json +5 -4
- emhass/utils.py +687 -443
- emhass/web_server.py +339 -232
- {emhass-0.11.2.dist-info → emhass-0.11.4.dist-info}/METADATA +17 -8
- {emhass-0.11.2.dist-info → emhass-0.11.4.dist-info}/RECORD +16 -16
- {emhass-0.11.2.dist-info → emhass-0.11.4.dist-info}/WHEEL +1 -1
- {emhass-0.11.2.dist-info → emhass-0.11.4.dist-info}/LICENSE +0 -0
- {emhass-0.11.2.dist-info → emhass-0.11.4.dist-info}/entry_points.txt +0 -0
- {emhass-0.11.2.dist-info → emhass-0.11.4.dist-info}/top_level.txt +0 -0
emhass/utils.py
CHANGED
@@ -1,26 +1,26 @@
|
|
1
1
|
#!/usr/bin/env python3
|
2
2
|
# -*- coding: utf-8 -*-
|
3
|
-
|
3
|
+
import ast
|
4
|
+
import copy
|
4
5
|
import csv
|
5
|
-
import
|
6
|
-
from typing import Tuple, Optional
|
7
|
-
from datetime import datetime, timedelta, timezone
|
6
|
+
import json
|
8
7
|
import logging
|
8
|
+
import os
|
9
9
|
import pathlib
|
10
|
-
import
|
11
|
-
import
|
10
|
+
from datetime import datetime, timedelta, timezone
|
11
|
+
from typing import Optional, Tuple
|
12
|
+
|
12
13
|
import numpy as np
|
13
14
|
import pandas as pd
|
14
|
-
|
15
|
-
import yaml
|
15
|
+
import plotly.express as px
|
16
16
|
import pytz
|
17
|
-
import
|
17
|
+
import yaml
|
18
|
+
from requests import get
|
18
19
|
|
19
|
-
|
20
|
+
from emhass.machine_learning_forecaster import MLForecaster
|
20
21
|
|
21
22
|
pd.options.plotting.backend = "plotly"
|
22
23
|
|
23
|
-
from emhass.machine_learning_forecaster import MLForecaster
|
24
24
|
|
25
25
|
def get_root(file: str, num_parent: Optional[int] = 3) -> str:
|
26
26
|
"""
|
@@ -44,8 +44,12 @@ def get_root(file: str, num_parent: Optional[int] = 3) -> str:
|
|
44
44
|
return root
|
45
45
|
|
46
46
|
|
47
|
-
def get_logger(
|
48
|
-
|
47
|
+
def get_logger(
|
48
|
+
fun_name: str,
|
49
|
+
emhass_conf: dict,
|
50
|
+
save_to_file: Optional[bool] = True,
|
51
|
+
logging_level: Optional[str] = "DEBUG",
|
52
|
+
) -> Tuple[logging.Logger, logging.StreamHandler]:
|
49
53
|
"""
|
50
54
|
Create a simple logger object.
|
51
55
|
|
@@ -64,10 +68,10 @@ def get_logger(fun_name: str, emhass_conf: dict, save_to_file: Optional[bool] =
|
|
64
68
|
logger.propagate = True
|
65
69
|
logger.fileSetting = save_to_file
|
66
70
|
if save_to_file:
|
67
|
-
if
|
68
|
-
ch = logging.FileHandler(emhass_conf[
|
71
|
+
if os.path.isdir(emhass_conf["data_path"]):
|
72
|
+
ch = logging.FileHandler(emhass_conf["data_path"] / "logger_emhass.log")
|
69
73
|
else:
|
70
|
-
raise Exception("Unable to access data_path: "+emhass_conf[
|
74
|
+
raise Exception("Unable to access data_path: " + emhass_conf["data_path"])
|
71
75
|
else:
|
72
76
|
ch = logging.StreamHandler()
|
73
77
|
if logging_level == "DEBUG":
|
@@ -94,8 +98,12 @@ def get_logger(fun_name: str, emhass_conf: dict, save_to_file: Optional[bool] =
|
|
94
98
|
return logger, ch
|
95
99
|
|
96
100
|
|
97
|
-
def get_forecast_dates(
|
98
|
-
|
101
|
+
def get_forecast_dates(
|
102
|
+
freq: int,
|
103
|
+
delta_forecast: int,
|
104
|
+
time_zone: datetime.tzinfo,
|
105
|
+
timedelta_days: Optional[int] = 0,
|
106
|
+
) -> pd.core.indexes.datetimes.DatetimeIndex:
|
99
107
|
"""
|
100
108
|
Get the date_range list of the needed future dates using the delta_forecast parameter.
|
101
109
|
|
@@ -110,17 +118,36 @@ def get_forecast_dates(freq: int, delta_forecast: int, time_zone: datetime.tzinf
|
|
110
118
|
|
111
119
|
"""
|
112
120
|
freq = pd.to_timedelta(freq, "minutes")
|
113
|
-
start_forecast = pd.Timestamp(datetime.now()).replace(
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
121
|
+
start_forecast = pd.Timestamp(datetime.now()).replace(
|
122
|
+
hour=0, minute=0, second=0, microsecond=0
|
123
|
+
)
|
124
|
+
end_forecast = (start_forecast + pd.Timedelta(days=delta_forecast)).replace(
|
125
|
+
microsecond=0
|
126
|
+
)
|
127
|
+
forecast_dates = (
|
128
|
+
pd.date_range(
|
129
|
+
start=start_forecast,
|
130
|
+
end=end_forecast + timedelta(days=timedelta_days) - freq,
|
131
|
+
freq=freq,
|
132
|
+
tz=time_zone,
|
133
|
+
)
|
134
|
+
.tz_convert("utc")
|
135
|
+
.round(freq, ambiguous="infer", nonexistent="shift_forward")
|
136
|
+
.tz_convert(time_zone)
|
137
|
+
)
|
118
138
|
return forecast_dates
|
119
139
|
|
120
140
|
|
121
|
-
def treat_runtimeparams(
|
122
|
-
|
123
|
-
|
141
|
+
def treat_runtimeparams(
|
142
|
+
runtimeparams: str,
|
143
|
+
params: str,
|
144
|
+
retrieve_hass_conf: dict,
|
145
|
+
optim_conf: dict,
|
146
|
+
plant_conf: dict,
|
147
|
+
set_type: str,
|
148
|
+
logger: logging.Logger,
|
149
|
+
emhass_conf: dict,
|
150
|
+
) -> Tuple[str, dict]:
|
124
151
|
"""
|
125
152
|
Treat the passed optimization runtime parameters.
|
126
153
|
|
@@ -128,31 +155,38 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
128
155
|
:type runtimeparams: str
|
129
156
|
:param params: Built configuration parameters
|
130
157
|
:type params: str
|
131
|
-
:param retrieve_hass_conf:
|
158
|
+
:param retrieve_hass_conf: Config dictionary for data retrieving parameters.
|
132
159
|
:type retrieve_hass_conf: dict
|
133
|
-
:param optim_conf:
|
160
|
+
:param optim_conf: Config dictionary for optimization parameters.
|
134
161
|
:type optim_conf: dict
|
135
|
-
:param plant_conf:
|
162
|
+
:param plant_conf: Config dictionary for technical plant parameters.
|
136
163
|
:type plant_conf: dict
|
137
164
|
:param set_type: The type of action to be performed.
|
138
165
|
:type set_type: str
|
139
166
|
:param logger: The logger object.
|
140
167
|
:type logger: logging.Logger
|
168
|
+
:param emhass_conf: Dictionary containing the needed emhass paths
|
169
|
+
:type emhass_conf: dict
|
141
170
|
:return: Returning the params and optimization parameter container.
|
142
171
|
:rtype: Tuple[str, dict]
|
143
172
|
|
144
173
|
"""
|
145
|
-
#
|
174
|
+
# Check if passed params is a dict
|
146
175
|
if (params != None) and (params != "null"):
|
147
176
|
if type(params) is str:
|
148
177
|
params = json.loads(params)
|
149
178
|
else:
|
150
179
|
params = {}
|
151
180
|
|
181
|
+
# Merge current config categories to params
|
182
|
+
params["retrieve_hass_conf"].update(retrieve_hass_conf)
|
183
|
+
params["optim_conf"].update(optim_conf)
|
184
|
+
params["plant_conf"].update(plant_conf)
|
185
|
+
|
152
186
|
# Some default data needed
|
153
187
|
custom_deferrable_forecast_id = []
|
154
188
|
custom_predicted_temperature_id = []
|
155
|
-
for k in range(optim_conf[
|
189
|
+
for k in range(params["optim_conf"]["number_of_deferrable_loads"]):
|
156
190
|
custom_deferrable_forecast_id.append(
|
157
191
|
{
|
158
192
|
"entity_id": "sensor.p_deferrable{}".format(k),
|
@@ -233,18 +267,79 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
233
267
|
else:
|
234
268
|
params["passed_data"] = default_passed_dict
|
235
269
|
|
270
|
+
# If any runtime parameters where passed in action call
|
236
271
|
if runtimeparams is not None:
|
237
272
|
if type(runtimeparams) is str:
|
238
273
|
runtimeparams = json.loads(runtimeparams)
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
274
|
+
|
275
|
+
# Loop though parameters stored in association file, Check to see if any stored in runtime
|
276
|
+
# If true, set runtime parameter to params
|
277
|
+
if emhass_conf["associations_path"].exists():
|
278
|
+
with emhass_conf["associations_path"].open("r") as data:
|
279
|
+
associations = list(csv.reader(data, delimiter=","))
|
280
|
+
# Association file key reference
|
281
|
+
# association[0] = config categories
|
282
|
+
# association[1] = legacy parameter name
|
283
|
+
# association[2] = parameter (config.json/config_defaults.json)
|
284
|
+
# association[3] = parameter list name if exists (not used, from legacy options.json)
|
285
|
+
for association in associations:
|
286
|
+
# Check parameter name exists in runtime
|
287
|
+
if runtimeparams.get(association[2], None) is not None:
|
288
|
+
params[association[0]][association[2]] = runtimeparams[
|
289
|
+
association[2]
|
290
|
+
]
|
291
|
+
# Check Legacy parameter name runtime
|
292
|
+
elif runtimeparams.get(association[1], None) is not None:
|
293
|
+
params[association[0]][association[2]] = runtimeparams[
|
294
|
+
association[1]
|
295
|
+
]
|
296
|
+
else:
|
297
|
+
logger.warning(
|
298
|
+
"Cant find associations file (associations.csv) in: "
|
299
|
+
+ str(emhass_conf["associations_path"])
|
300
|
+
)
|
301
|
+
|
302
|
+
# Generate forecast_dates
|
303
|
+
if (
|
304
|
+
"optimization_time_step" in runtimeparams.keys()
|
305
|
+
or "freq" in runtimeparams.keys()
|
306
|
+
):
|
307
|
+
optimization_time_step = int(
|
308
|
+
runtimeparams.get("optimization_time_step", runtimeparams.get("freq"))
|
309
|
+
)
|
310
|
+
params["retrieve_hass_conf"]["optimization_time_step"] = pd.to_timedelta(
|
311
|
+
optimization_time_step, "minutes"
|
312
|
+
)
|
313
|
+
else:
|
314
|
+
optimization_time_step = int(
|
315
|
+
params["retrieve_hass_conf"]["optimization_time_step"].seconds / 60.0
|
316
|
+
)
|
317
|
+
if (
|
318
|
+
runtimeparams.get("delta_forecast_daily", None) is not None
|
319
|
+
or runtimeparams.get("delta_forecast", None) is not None
|
320
|
+
):
|
321
|
+
delta_forecast = int(
|
322
|
+
runtimeparams.get(
|
323
|
+
"delta_forecast_daily", runtimeparams["delta_forecast"]
|
324
|
+
)
|
325
|
+
)
|
326
|
+
params["optim_conf"]["delta_forecast_daily"] = pd.Timedelta(
|
327
|
+
days=optim_conf["delta_forecast_daily"]
|
328
|
+
)
|
329
|
+
else:
|
330
|
+
delta_forecast = int(params["optim_conf"]["delta_forecast_daily"].days)
|
331
|
+
if runtimeparams.get("time_zone", None) is not None:
|
332
|
+
time_zone = pytz.timezone(params["retrieve_hass_conf"]["time_zone"])
|
333
|
+
params["retrieve_hass_conf"]["time_zone"] = time_zone
|
334
|
+
else:
|
335
|
+
time_zone = params["retrieve_hass_conf"]["time_zone"]
|
336
|
+
|
244
337
|
forecast_dates = get_forecast_dates(
|
245
|
-
optimization_time_step, delta_forecast, time_zone
|
246
|
-
|
247
|
-
|
338
|
+
optimization_time_step, delta_forecast, time_zone
|
339
|
+
)
|
340
|
+
|
341
|
+
# Add runtime exclusive (not in config) parameters to params
|
342
|
+
# regressor-model-fit
|
248
343
|
if set_type == "regressor-model-fit":
|
249
344
|
if "csv_file" in runtimeparams:
|
250
345
|
csv_file = runtimeparams["csv_file"]
|
@@ -265,7 +360,7 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
265
360
|
else:
|
266
361
|
date_features = runtimeparams["date_features"]
|
267
362
|
params["passed_data"]["date_features"] = date_features
|
268
|
-
|
363
|
+
|
269
364
|
# regressor-model-predict
|
270
365
|
if set_type == "regressor-model-predict":
|
271
366
|
if "new_values" in runtimeparams:
|
@@ -280,101 +375,143 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
280
375
|
if "target" in runtimeparams:
|
281
376
|
target = runtimeparams["target"]
|
282
377
|
params["passed_data"]["target"] = target
|
283
|
-
|
284
|
-
#
|
378
|
+
|
379
|
+
# MPC control case
|
285
380
|
if set_type == "naive-mpc-optim":
|
286
381
|
if "prediction_horizon" not in runtimeparams.keys():
|
287
382
|
prediction_horizon = 10 # 10 time steps by default
|
288
383
|
else:
|
289
384
|
prediction_horizon = runtimeparams["prediction_horizon"]
|
290
385
|
params["passed_data"]["prediction_horizon"] = prediction_horizon
|
291
|
-
if
|
292
|
-
soc_init = plant_conf[
|
386
|
+
if "soc_init" not in runtimeparams.keys():
|
387
|
+
soc_init = params["plant_conf"]["battery_target_state_of_charge"]
|
293
388
|
else:
|
294
389
|
soc_init = runtimeparams["soc_init"]
|
295
390
|
params["passed_data"]["soc_init"] = soc_init
|
296
391
|
if "soc_final" not in runtimeparams.keys():
|
297
|
-
soc_final = plant_conf[
|
392
|
+
soc_final = params["plant_conf"]["battery_target_state_of_charge"]
|
298
393
|
else:
|
299
394
|
soc_final = runtimeparams["soc_final"]
|
300
395
|
params["passed_data"]["soc_final"] = soc_final
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
params["passed_data"]['start_timesteps_of_each_deferrable_load'] = def_start_timestep
|
313
|
-
if 'end_timesteps_of_each_deferrable_load' not in runtimeparams.keys() and 'def_end_timestep' not in runtimeparams.keys():
|
314
|
-
def_end_timestep = optim_conf.get('end_timesteps_of_each_deferrable_load')
|
315
|
-
else:
|
316
|
-
def_end_timestep = runtimeparams.get(
|
317
|
-
'end_timesteps_of_each_deferrable_load', runtimeparams.get('def_end_timestep'))
|
318
|
-
params["passed_data"]['end_timesteps_of_each_deferrable_load'] = def_end_timestep
|
396
|
+
|
397
|
+
params["passed_data"]["operating_hours_of_each_deferrable_load"] = params[
|
398
|
+
"optim_conf"
|
399
|
+
].get("operating_hours_of_each_deferrable_load", None)
|
400
|
+
params["passed_data"]["start_timesteps_of_each_deferrable_load"] = params[
|
401
|
+
"optim_conf"
|
402
|
+
].get("start_timesteps_of_each_deferrable_load", None)
|
403
|
+
params["passed_data"]["end_timesteps_of_each_deferrable_load"] = params[
|
404
|
+
"optim_conf"
|
405
|
+
].get("end_timesteps_of_each_deferrable_load", None)
|
406
|
+
|
319
407
|
forecast_dates = copy.deepcopy(forecast_dates)[0:prediction_horizon]
|
408
|
+
|
320
409
|
# Load the default config
|
321
|
-
if "def_load_config" in
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
410
|
+
if "def_load_config" in runtimeparams:
|
411
|
+
params["optim_conf"]["def_load_config"] = runtimeparams[
|
412
|
+
"def_load_config"
|
413
|
+
]
|
414
|
+
if "def_load_config" in params["optim_conf"]:
|
415
|
+
for k in range(len(params["optim_conf"]["def_load_config"])):
|
416
|
+
if "thermal_config" in params["optim_conf"]["def_load_config"][k]:
|
417
|
+
if (
|
418
|
+
"heater_desired_temperatures" in runtimeparams
|
419
|
+
and len(runtimeparams["heater_desired_temperatures"]) > k
|
420
|
+
):
|
421
|
+
params["optim_conf"]["def_load_config"][k][
|
422
|
+
"thermal_config"
|
423
|
+
]["desired_temperatures"] = runtimeparams[
|
424
|
+
"heater_desired_temperatures"
|
425
|
+
][k]
|
426
|
+
if (
|
427
|
+
"heater_start_temperatures" in runtimeparams
|
428
|
+
and len(runtimeparams["heater_start_temperatures"]) > k
|
429
|
+
):
|
430
|
+
params["optim_conf"]["def_load_config"][k][
|
431
|
+
"thermal_config"
|
432
|
+
]["start_temperature"] = runtimeparams[
|
433
|
+
"heater_start_temperatures"
|
434
|
+
][k]
|
328
435
|
else:
|
329
436
|
params["passed_data"]["prediction_horizon"] = None
|
330
437
|
params["passed_data"]["soc_init"] = None
|
331
438
|
params["passed_data"]["soc_final"] = None
|
332
|
-
|
333
|
-
params["passed_data"]['start_timesteps_of_each_deferrable_load'] = None
|
334
|
-
params["passed_data"]['end_timesteps_of_each_deferrable_load'] = None
|
439
|
+
|
335
440
|
# Treat passed forecast data lists
|
336
|
-
list_forecast_key = [
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
441
|
+
list_forecast_key = [
|
442
|
+
"pv_power_forecast",
|
443
|
+
"load_power_forecast",
|
444
|
+
"load_cost_forecast",
|
445
|
+
"prod_price_forecast",
|
446
|
+
"outdoor_temperature_forecast",
|
447
|
+
]
|
448
|
+
forecast_methods = [
|
449
|
+
"weather_forecast_method",
|
450
|
+
"load_forecast_method",
|
451
|
+
"load_cost_forecast_method",
|
452
|
+
"production_price_forecast_method",
|
453
|
+
"outdoor_temperature_forecast_method",
|
454
|
+
]
|
455
|
+
|
341
456
|
# Loop forecasts, check if value is a list and greater than or equal to forecast_dates
|
342
457
|
for method, forecast_key in enumerate(list_forecast_key):
|
343
458
|
if forecast_key in runtimeparams.keys():
|
344
|
-
if
|
345
|
-
|
346
|
-
|
459
|
+
if isinstance(runtimeparams[forecast_key], list) and len(
|
460
|
+
runtimeparams[forecast_key]
|
461
|
+
) >= len(forecast_dates):
|
462
|
+
params["passed_data"][forecast_key] = runtimeparams[forecast_key]
|
463
|
+
params["optim_conf"][forecast_methods[method]] = "list"
|
347
464
|
else:
|
348
465
|
logger.error(
|
349
|
-
f"ERROR: The passed data is either not a list or the length is not correct, length should be {str(len(forecast_dates))}"
|
466
|
+
f"ERROR: The passed data is either not a list or the length is not correct, length should be {str(len(forecast_dates))}"
|
467
|
+
)
|
350
468
|
logger.error(
|
351
|
-
f"Passed type is {str(type(runtimeparams[forecast_key]))} and length is {str(len(runtimeparams[forecast_key]))}"
|
469
|
+
f"Passed type is {str(type(runtimeparams[forecast_key]))} and length is {str(len(runtimeparams[forecast_key]))}"
|
470
|
+
)
|
352
471
|
# Check if string contains list, if so extract
|
353
|
-
if
|
354
|
-
if
|
355
|
-
runtimeparams[forecast_key] = ast.literal_eval(
|
356
|
-
|
357
|
-
|
472
|
+
if isinstance(runtimeparams[forecast_key], str):
|
473
|
+
if isinstance(ast.literal_eval(runtimeparams[forecast_key]), list):
|
474
|
+
runtimeparams[forecast_key] = ast.literal_eval(
|
475
|
+
runtimeparams[forecast_key]
|
476
|
+
)
|
477
|
+
list_non_digits = [
|
478
|
+
x
|
479
|
+
for x in runtimeparams[forecast_key]
|
480
|
+
if not (isinstance(x, int) or isinstance(x, float))
|
481
|
+
]
|
358
482
|
if len(list_non_digits) > 0:
|
359
483
|
logger.warning(
|
360
|
-
f"There are non numeric values on the passed data for {forecast_key}, check for missing values (nans, null, etc)"
|
484
|
+
f"There are non numeric values on the passed data for {forecast_key}, check for missing values (nans, null, etc)"
|
485
|
+
)
|
361
486
|
for x in list_non_digits:
|
362
487
|
logger.warning(
|
363
|
-
f"This value in {forecast_key} was detected as non digits: {str(x)}"
|
488
|
+
f"This value in {forecast_key} was detected as non digits: {str(x)}"
|
489
|
+
)
|
364
490
|
else:
|
365
|
-
params[
|
366
|
-
|
491
|
+
params["passed_data"][forecast_key] = None
|
492
|
+
|
367
493
|
# Treat passed data for forecast model fit/predict/tune at runtime
|
368
|
-
if
|
369
|
-
|
494
|
+
if (
|
495
|
+
params["passed_data"].get("historic_days_to_retrieve", None) is not None
|
496
|
+
and params["passed_data"]["historic_days_to_retrieve"] < 9
|
497
|
+
):
|
498
|
+
logger.warning(
|
499
|
+
"warning `days_to_retrieve` is set to a value less than 9, this could cause an error with the fit"
|
500
|
+
)
|
501
|
+
logger.warning(
|
502
|
+
"setting`passed_data:days_to_retrieve` to 9 for fit/predict/tune"
|
503
|
+
)
|
504
|
+
params["passed_data"]["historic_days_to_retrieve"] = 9
|
370
505
|
else:
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
506
|
+
if params["retrieve_hass_conf"].get("historic_days_to_retrieve", 0) < 9:
|
507
|
+
logger.debug(
|
508
|
+
"setting`passed_data:days_to_retrieve` to 9 for fit/predict/tune"
|
509
|
+
)
|
510
|
+
params["passed_data"]["historic_days_to_retrieve"] = 9
|
511
|
+
else:
|
512
|
+
params["passed_data"]["historic_days_to_retrieve"] = params[
|
513
|
+
"retrieve_hass_conf"
|
514
|
+
]["historic_days_to_retrieve"]
|
378
515
|
if "model_type" not in runtimeparams.keys():
|
379
516
|
model_type = "load_forecast"
|
380
517
|
else:
|
@@ -409,13 +546,15 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
409
546
|
perform_backtest = False
|
410
547
|
else:
|
411
548
|
perform_backtest = ast.literal_eval(
|
412
|
-
str(runtimeparams["perform_backtest"]).capitalize()
|
549
|
+
str(runtimeparams["perform_backtest"]).capitalize()
|
550
|
+
)
|
413
551
|
params["passed_data"]["perform_backtest"] = perform_backtest
|
414
552
|
if "model_predict_publish" not in runtimeparams.keys():
|
415
553
|
model_predict_publish = False
|
416
554
|
else:
|
417
555
|
model_predict_publish = ast.literal_eval(
|
418
|
-
str(runtimeparams["model_predict_publish"]).capitalize()
|
556
|
+
str(runtimeparams["model_predict_publish"]).capitalize()
|
557
|
+
)
|
419
558
|
params["passed_data"]["model_predict_publish"] = model_predict_publish
|
420
559
|
if "model_predict_entity_id" not in runtimeparams.keys():
|
421
560
|
model_predict_entity_id = "sensor.p_load_forecast_custom_model"
|
@@ -425,13 +564,19 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
425
564
|
if "model_predict_unit_of_measurement" not in runtimeparams.keys():
|
426
565
|
model_predict_unit_of_measurement = "W"
|
427
566
|
else:
|
428
|
-
model_predict_unit_of_measurement = runtimeparams[
|
429
|
-
|
567
|
+
model_predict_unit_of_measurement = runtimeparams[
|
568
|
+
"model_predict_unit_of_measurement"
|
569
|
+
]
|
570
|
+
params["passed_data"]["model_predict_unit_of_measurement"] = (
|
571
|
+
model_predict_unit_of_measurement
|
572
|
+
)
|
430
573
|
if "model_predict_friendly_name" not in runtimeparams.keys():
|
431
574
|
model_predict_friendly_name = "Load Power Forecast custom ML model"
|
432
575
|
else:
|
433
576
|
model_predict_friendly_name = runtimeparams["model_predict_friendly_name"]
|
434
|
-
params["passed_data"]["model_predict_friendly_name"] =
|
577
|
+
params["passed_data"]["model_predict_friendly_name"] = (
|
578
|
+
model_predict_friendly_name
|
579
|
+
)
|
435
580
|
if "mlr_predict_entity_id" not in runtimeparams.keys():
|
436
581
|
mlr_predict_entity_id = "sensor.mlr_predict"
|
437
582
|
else:
|
@@ -440,14 +585,18 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
440
585
|
if "mlr_predict_unit_of_measurement" not in runtimeparams.keys():
|
441
586
|
mlr_predict_unit_of_measurement = None
|
442
587
|
else:
|
443
|
-
mlr_predict_unit_of_measurement = runtimeparams[
|
444
|
-
|
588
|
+
mlr_predict_unit_of_measurement = runtimeparams[
|
589
|
+
"mlr_predict_unit_of_measurement"
|
590
|
+
]
|
591
|
+
params["passed_data"]["mlr_predict_unit_of_measurement"] = (
|
592
|
+
mlr_predict_unit_of_measurement
|
593
|
+
)
|
445
594
|
if "mlr_predict_friendly_name" not in runtimeparams.keys():
|
446
595
|
mlr_predict_friendly_name = "mlr predictor"
|
447
596
|
else:
|
448
597
|
mlr_predict_friendly_name = runtimeparams["mlr_predict_friendly_name"]
|
449
598
|
params["passed_data"]["mlr_predict_friendly_name"] = mlr_predict_friendly_name
|
450
|
-
|
599
|
+
|
451
600
|
# Treat passed data for other parameters
|
452
601
|
if "alpha" not in runtimeparams.keys():
|
453
602
|
alpha = 0.5
|
@@ -459,24 +608,30 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
459
608
|
else:
|
460
609
|
beta = runtimeparams["beta"]
|
461
610
|
params["passed_data"]["beta"] = beta
|
611
|
+
|
462
612
|
# Param to save forecast cache (i.e. Solcast)
|
463
613
|
if "weather_forecast_cache" not in runtimeparams.keys():
|
464
614
|
weather_forecast_cache = False
|
465
615
|
else:
|
466
616
|
weather_forecast_cache = runtimeparams["weather_forecast_cache"]
|
467
617
|
params["passed_data"]["weather_forecast_cache"] = weather_forecast_cache
|
618
|
+
|
468
619
|
# Param to make sure optimization only uses cached data. (else produce error)
|
469
620
|
if "weather_forecast_cache_only" not in runtimeparams.keys():
|
470
621
|
weather_forecast_cache_only = False
|
471
622
|
else:
|
472
623
|
weather_forecast_cache_only = runtimeparams["weather_forecast_cache_only"]
|
473
|
-
params["passed_data"]["weather_forecast_cache_only"] =
|
624
|
+
params["passed_data"]["weather_forecast_cache_only"] = (
|
625
|
+
weather_forecast_cache_only
|
626
|
+
)
|
627
|
+
|
474
628
|
# A condition to manually save entity data under data_path/entities after optimization
|
475
629
|
if "entity_save" not in runtimeparams.keys():
|
476
630
|
entity_save = ""
|
477
631
|
else:
|
478
632
|
entity_save = runtimeparams["entity_save"]
|
479
633
|
params["passed_data"]["entity_save"] = entity_save
|
634
|
+
|
480
635
|
# A condition to put a prefix on all published data, or check for saved data under prefix name
|
481
636
|
if "publish_prefix" not in runtimeparams.keys():
|
482
637
|
publish_prefix = ""
|
@@ -485,83 +640,25 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
485
640
|
params["passed_data"]["publish_prefix"] = publish_prefix
|
486
641
|
|
487
642
|
# Treat optimization (optim_conf) configuration parameters passed at runtime
|
488
|
-
if 'number_of_deferrable_loads' in runtimeparams.keys() or 'num_def_loads' in runtimeparams.keys():
|
489
|
-
optim_conf['number_of_deferrable_loads'] = runtimeparams.get(
|
490
|
-
'number_of_deferrable_loads', runtimeparams.get('num_def_loads'))
|
491
|
-
if 'nominal_power_of_deferrable_loads' in runtimeparams.keys() or 'P_deferrable_nom' in runtimeparams.keys():
|
492
|
-
optim_conf['nominal_power_of_deferrable_loads'] = runtimeparams.get(
|
493
|
-
'nominal_power_of_deferrable_loads', runtimeparams.get('P_deferrable_nom'))
|
494
|
-
if 'operating_hours_of_each_deferrable_load' in runtimeparams.keys() or 'def_total_hours' in runtimeparams.keys():
|
495
|
-
optim_conf['operating_hours_of_each_deferrable_load'] = runtimeparams.get(
|
496
|
-
'operating_hours_of_each_deferrable_load', runtimeparams.get('def_total_hours'))
|
497
|
-
if 'start_timesteps_of_each_deferrable_load' in runtimeparams.keys() or 'def_start_timestep' in runtimeparams.keys():
|
498
|
-
optim_conf['start_timesteps_of_each_deferrable_load'] = runtimeparams.get(
|
499
|
-
'start_timesteps_of_each_deferrable_load', runtimeparams.get('def_start_timestep'))
|
500
|
-
if 'end_timesteps_of_each_deferrable_load' in runtimeparams.keys() or 'def_end_timestep' in runtimeparams.keys():
|
501
|
-
optim_conf['end_timesteps_of_each_deferrable_load'] = runtimeparams.get(
|
502
|
-
'end_timesteps_of_each_deferrable_load', runtimeparams.get('def_end_timestep'))
|
503
643
|
if "def_current_state" in runtimeparams.keys():
|
504
|
-
optim_conf["def_current_state"] = [
|
505
|
-
bool(s) for s in runtimeparams["def_current_state"]
|
506
|
-
if 'treat_deferrable_load_as_semi_cont' in runtimeparams.keys() or 'treat_def_as_semi_cont' in runtimeparams.keys():
|
507
|
-
optim_conf['treat_deferrable_load_as_semi_cont'] = [
|
508
|
-
ast.literal_eval(str(k).capitalize()) for k in runtimeparams.get('treat_deferrable_load_as_semi_cont',runtimeparams.get('treat_def_as_semi_cont'))
|
509
|
-
]
|
510
|
-
if 'set_deferrable_load_single_constant' in runtimeparams.keys() or 'set_def_constant' in runtimeparams.keys():
|
511
|
-
optim_conf['set_deferrable_load_single_constant'] = [
|
512
|
-
ast.literal_eval(str(k).capitalize()) for k in runtimeparams.get('set_deferrable_load_single_constant',runtimeparams.get('set_def_constant'))
|
513
|
-
]
|
514
|
-
if 'set_deferrable_startup_penalty' in runtimeparams.keys() or 'def_start_penalty' in runtimeparams.keys():
|
515
|
-
optim_conf['set_deferrable_startup_penalty'] = [
|
516
|
-
ast.literal_eval(str(k).capitalize()) for k in runtimeparams.get('set_deferrable_startup_penalty',runtimeparams.get('def_start_penalty'))
|
644
|
+
params["optim_conf"]["def_current_state"] = [
|
645
|
+
bool(s) for s in runtimeparams["def_current_state"]
|
517
646
|
]
|
518
|
-
if 'def_load_config' in runtimeparams:
|
519
|
-
optim_conf["def_load_config"] = runtimeparams['def_load_config']
|
520
|
-
if 'weight_battery_discharge' in runtimeparams.keys():
|
521
|
-
optim_conf['weight_battery_discharge'] = runtimeparams[
|
522
|
-
'weight_battery_discharge'
|
523
|
-
]
|
524
|
-
if 'weight_battery_charge' in runtimeparams.keys():
|
525
|
-
optim_conf['weight_battery_charge'] = runtimeparams['weight_battery_charge']
|
526
647
|
|
527
648
|
# Treat retrieve data from Home Assistant (retrieve_hass_conf) configuration parameters passed at runtime
|
528
|
-
|
529
|
-
retrieve_hass_conf['optimization_time_step'] = pd.to_timedelta(runtimeparams.get(
|
530
|
-
'optimization_time_step', runtimeparams.get('freq')), "minutes")
|
531
|
-
if 'continual_publish' in runtimeparams.keys():
|
532
|
-
retrieve_hass_conf['continual_publish'] = bool(
|
533
|
-
runtimeparams['continual_publish'])
|
649
|
+
# Secrets passed at runtime
|
534
650
|
if "solcast_api_key" in runtimeparams.keys():
|
535
|
-
retrieve_hass_conf["solcast_api_key"] = runtimeparams[
|
536
|
-
|
651
|
+
params["retrieve_hass_conf"]["solcast_api_key"] = runtimeparams[
|
652
|
+
"solcast_api_key"
|
653
|
+
]
|
537
654
|
if "solcast_rooftop_id" in runtimeparams.keys():
|
538
|
-
retrieve_hass_conf["solcast_rooftop_id"] = runtimeparams[
|
655
|
+
params["retrieve_hass_conf"]["solcast_rooftop_id"] = runtimeparams[
|
539
656
|
"solcast_rooftop_id"
|
540
657
|
]
|
541
|
-
optim_conf['weather_forecast_method'] = "solcast"
|
542
658
|
if "solar_forecast_kwp" in runtimeparams.keys():
|
543
|
-
retrieve_hass_conf["solar_forecast_kwp"] = runtimeparams[
|
659
|
+
params["retrieve_hass_conf"]["solar_forecast_kwp"] = runtimeparams[
|
544
660
|
"solar_forecast_kwp"
|
545
661
|
]
|
546
|
-
optim_conf['weather_forecast_method'] = "solar.forecast"
|
547
|
-
|
548
|
-
# Treat system model parameters (plant) configuration parameters passed at runtime
|
549
|
-
if 'battery_minimum_state_of_charge' in runtimeparams.keys() or 'SOCmin' in runtimeparams.keys():
|
550
|
-
plant_conf['battery_minimum_state_of_charge'] = runtimeparams.get(
|
551
|
-
'battery_minimum_state_of_charge', runtimeparams.get('SOCmin'))
|
552
|
-
if 'battery_maximum_state_of_charge' in runtimeparams.keys() or 'SOCmax' in runtimeparams.keys():
|
553
|
-
plant_conf['battery_maximum_state_of_charge'] = runtimeparams.get(
|
554
|
-
'battery_maximum_state_of_charge', runtimeparams.get('SOCmax'))
|
555
|
-
if 'battery_target_state_of_charge' in runtimeparams.keys() or 'SOCtarget' in runtimeparams.keys():
|
556
|
-
plant_conf['battery_target_state_of_charge'] = runtimeparams.get(
|
557
|
-
'battery_target_state_of_charge', runtimeparams.get('SOCtarget'))
|
558
|
-
if 'battery_discharge_power_max' in runtimeparams.keys() or 'Pd_max' in runtimeparams.keys():
|
559
|
-
plant_conf['battery_discharge_power_max'] = runtimeparams.get(
|
560
|
-
'battery_discharge_power_max', runtimeparams.get('Pd_max'))
|
561
|
-
if 'battery_charge_power_max' in runtimeparams.keys() or 'Pc_max' in runtimeparams.keys():
|
562
|
-
plant_conf['battery_charge_power_max'] = runtimeparams.get(
|
563
|
-
'battery_charge_power_max', runtimeparams.get('Pc_max'))
|
564
|
-
|
565
662
|
# Treat custom entities id's and friendly names for variables
|
566
663
|
if "custom_pv_forecast_id" in runtimeparams.keys():
|
567
664
|
params["passed_data"]["custom_pv_forecast_id"] = runtimeparams[
|
@@ -615,7 +712,12 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
615
712
|
params["passed_data"]["custom_predicted_temperature_id"] = runtimeparams[
|
616
713
|
"custom_predicted_temperature_id"
|
617
714
|
]
|
618
|
-
|
715
|
+
|
716
|
+
# split config categories from params
|
717
|
+
retrieve_hass_conf = params["retrieve_hass_conf"]
|
718
|
+
optim_conf = params["optim_conf"]
|
719
|
+
plant_conf = params["plant_conf"]
|
720
|
+
|
619
721
|
# Serialize the final params
|
620
722
|
params = json.dumps(params, default=str)
|
621
723
|
return params, retrieve_hass_conf, optim_conf, plant_conf
|
@@ -623,8 +725,8 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
623
725
|
|
624
726
|
def get_yaml_parse(params: str, logger: logging.Logger) -> Tuple[dict, dict, dict]:
|
625
727
|
"""
|
626
|
-
Perform parsing of the params into the configuration catagories
|
627
|
-
|
728
|
+
Perform parsing of the params into the configuration catagories
|
729
|
+
|
628
730
|
:param params: Built configuration parameters
|
629
731
|
:type params: str
|
630
732
|
:param logger: The logger object
|
@@ -648,76 +750,16 @@ def get_yaml_parse(params: str, logger: logging.Logger) -> Tuple[dict, dict, dic
|
|
648
750
|
plant_conf = input_conf.get("plant_conf", {})
|
649
751
|
|
650
752
|
# Format time parameters
|
651
|
-
if optim_conf.get(
|
652
|
-
optim_conf[
|
653
|
-
|
654
|
-
retrieve_hass_conf['optimization_time_step'] = pd.to_timedelta(retrieve_hass_conf['optimization_time_step'], "minutes")
|
655
|
-
if retrieve_hass_conf.get('time_zone',None) is not None:
|
656
|
-
retrieve_hass_conf["time_zone"] = pytz.timezone(retrieve_hass_conf["time_zone"])
|
657
|
-
|
658
|
-
return retrieve_hass_conf, optim_conf, plant_conf
|
659
|
-
|
660
|
-
def get_legacy_yaml_parse(emhass_conf: dict, use_secrets: Optional[bool] = True,
|
661
|
-
params: Optional[str] = None) -> Tuple[dict, dict, dict]:
|
662
|
-
"""
|
663
|
-
Perform parsing of the config.yaml file.
|
664
|
-
|
665
|
-
:param emhass_conf: Dictionary containing the needed emhass paths
|
666
|
-
:type emhass_conf: dict
|
667
|
-
:param use_secrets: Indicate if we should use a secrets file or not.
|
668
|
-
Set to False for unit tests.
|
669
|
-
:type use_secrets: bool, optional
|
670
|
-
:param params: Configuration parameters passed from data/options.json
|
671
|
-
:type params: str
|
672
|
-
:return: A tuple with the dictionaries containing the parsed data
|
673
|
-
:rtype: tuple(dict)
|
674
|
-
|
675
|
-
"""
|
676
|
-
if params is None:
|
677
|
-
with open(emhass_conf["config_path"], 'r') as file:
|
678
|
-
input_conf = yaml.load(file, Loader=yaml.FullLoader)
|
679
|
-
else:
|
680
|
-
input_conf = json.loads(params)
|
681
|
-
if use_secrets:
|
682
|
-
if params is None:
|
683
|
-
with open(emhass_conf["config_path"].parent / 'secrets_emhass.yaml', 'r') as file: # Assume secrets and config file paths are the same
|
684
|
-
input_secrets = yaml.load(file, Loader=yaml.FullLoader)
|
685
|
-
else:
|
686
|
-
input_secrets = input_conf.pop("params_secrets", None)
|
687
|
-
|
688
|
-
if type(input_conf["retrieve_hass_conf"]) == list: # if using old config version
|
689
|
-
retrieve_hass_conf = dict(
|
690
|
-
{key: d[key] for d in input_conf["retrieve_hass_conf"] for key in d}
|
753
|
+
if optim_conf.get("delta_forecast_daily", None) is not None:
|
754
|
+
optim_conf["delta_forecast_daily"] = pd.Timedelta(
|
755
|
+
days=optim_conf["delta_forecast_daily"]
|
691
756
|
)
|
692
|
-
|
693
|
-
retrieve_hass_conf =
|
694
|
-
|
695
|
-
|
696
|
-
|
697
|
-
|
698
|
-
retrieve_hass_conf["hass_url"] = "http://supervisor/core/api"
|
699
|
-
retrieve_hass_conf["long_lived_token"] = "${SUPERVISOR_TOKEN}"
|
700
|
-
retrieve_hass_conf["time_zone"] = "Europe/Paris"
|
701
|
-
retrieve_hass_conf["lat"] = 45.83
|
702
|
-
retrieve_hass_conf["lon"] = 6.86
|
703
|
-
retrieve_hass_conf["alt"] = 4807.8
|
704
|
-
retrieve_hass_conf["freq"] = pd.to_timedelta(retrieve_hass_conf["freq"], "minutes")
|
705
|
-
retrieve_hass_conf["time_zone"] = pytz.timezone(retrieve_hass_conf["time_zone"])
|
706
|
-
|
707
|
-
if type(input_conf["optim_conf"]) == list:
|
708
|
-
optim_conf = dict({key: d[key] for d in input_conf["optim_conf"] for key in d})
|
709
|
-
else:
|
710
|
-
optim_conf = input_conf.get("optim_conf", {})
|
711
|
-
|
712
|
-
optim_conf["list_hp_periods"] = dict(
|
713
|
-
(key, d[key]) for d in optim_conf["list_hp_periods"] for key in d
|
714
|
-
)
|
715
|
-
optim_conf["delta_forecast"] = pd.Timedelta(days=optim_conf["delta_forecast"])
|
716
|
-
|
717
|
-
if type(input_conf["plant_conf"]) == list:
|
718
|
-
plant_conf = dict({key: d[key] for d in input_conf["plant_conf"] for key in d})
|
719
|
-
else:
|
720
|
-
plant_conf = input_conf.get("plant_conf", {})
|
757
|
+
if retrieve_hass_conf.get("optimization_time_step", None) is not None:
|
758
|
+
retrieve_hass_conf["optimization_time_step"] = pd.to_timedelta(
|
759
|
+
retrieve_hass_conf["optimization_time_step"], "minutes"
|
760
|
+
)
|
761
|
+
if retrieve_hass_conf.get("time_zone", None) is not None:
|
762
|
+
retrieve_hass_conf["time_zone"] = pytz.timezone(retrieve_hass_conf["time_zone"])
|
721
763
|
|
722
764
|
return retrieve_hass_conf, optim_conf, plant_conf
|
723
765
|
|
@@ -809,7 +851,9 @@ def get_injection_dict(df: pd.DataFrame, plot_size: Optional[int] = 1366) -> dic
|
|
809
851
|
return injection_dict
|
810
852
|
|
811
853
|
|
812
|
-
def get_injection_dict_forecast_model_fit(
|
854
|
+
def get_injection_dict_forecast_model_fit(
|
855
|
+
df_fit_pred: pd.DataFrame, mlf: MLForecaster
|
856
|
+
) -> dict:
|
813
857
|
"""
|
814
858
|
Build a dictionary with graphs and tables for the webui for special MLF fit case.
|
815
859
|
|
@@ -838,7 +882,9 @@ def get_injection_dict_forecast_model_fit(df_fit_pred: pd.DataFrame, mlf: MLFore
|
|
838
882
|
return injection_dict
|
839
883
|
|
840
884
|
|
841
|
-
def get_injection_dict_forecast_model_tune(
|
885
|
+
def get_injection_dict_forecast_model_tune(
|
886
|
+
df_pred_optim: pd.DataFrame, mlf: MLForecaster
|
887
|
+
) -> dict:
|
842
888
|
"""
|
843
889
|
Build a dictionary with graphs and tables for the webui for special MLF tune case.
|
844
890
|
|
@@ -868,10 +914,16 @@ def get_injection_dict_forecast_model_tune(df_pred_optim: pd.DataFrame, mlf: MLF
|
|
868
914
|
injection_dict["figure_0"] = image_path_0
|
869
915
|
return injection_dict
|
870
916
|
|
871
|
-
|
872
|
-
|
917
|
+
|
918
|
+
def build_config(
|
919
|
+
emhass_conf: dict,
|
920
|
+
logger: logging.Logger,
|
921
|
+
defaults_path: str,
|
922
|
+
config_path: Optional[str] = None,
|
923
|
+
legacy_config_path: Optional[str] = None,
|
924
|
+
) -> dict:
|
873
925
|
"""
|
874
|
-
Retrieve parameters from configuration files.
|
926
|
+
Retrieve parameters from configuration files.
|
875
927
|
priority order (low - high) = defaults_path, config_path legacy_config_path
|
876
928
|
|
877
929
|
:param emhass_conf: Dictionary containing the needed emhass paths
|
@@ -890,39 +942,48 @@ def build_config(emhass_conf: dict, logger: logging.Logger, defaults_path: str,
|
|
890
942
|
|
891
943
|
# Read default parameters (default root_path/data/config_defaults.json)
|
892
944
|
if defaults_path and pathlib.Path(defaults_path).is_file():
|
893
|
-
with defaults_path.open(
|
945
|
+
with defaults_path.open("r") as data:
|
894
946
|
config = json.load(data)
|
895
947
|
else:
|
896
948
|
logger.error("config_defaults.json. does not exist ")
|
897
949
|
return False
|
898
|
-
|
950
|
+
|
899
951
|
# Read user config parameters if provided (default /share/config.json)
|
900
952
|
if config_path and pathlib.Path(config_path).is_file():
|
901
|
-
with config_path.open(
|
953
|
+
with config_path.open("r") as data:
|
902
954
|
# Set override default parameters (config_defaults) with user given parameters (config.json)
|
903
955
|
logger.info("Obtaining parameters from config.json:")
|
904
956
|
config.update(json.load(data))
|
905
957
|
else:
|
906
|
-
logger.info(
|
907
|
-
|
958
|
+
logger.info(
|
959
|
+
"config.json does not exist, or has not been passed. config parameters may default to config_defaults.json"
|
960
|
+
)
|
961
|
+
logger.info(
|
962
|
+
"you may like to generate the config.json file on the configuration page"
|
963
|
+
)
|
908
964
|
|
909
965
|
# Check to see if legacy config_emhass.yaml was provided (default /app/config_emhass.yaml)
|
910
966
|
# Convert legacy parameter definitions/format to match config.json
|
911
967
|
if legacy_config_path and pathlib.Path(legacy_config_path).is_file():
|
912
|
-
with open(legacy_config_path,
|
968
|
+
with open(legacy_config_path, "r") as data:
|
913
969
|
legacy_config = yaml.load(data, Loader=yaml.FullLoader)
|
914
|
-
legacy_config_parameters = build_legacy_config_params(
|
970
|
+
legacy_config_parameters = build_legacy_config_params(
|
971
|
+
emhass_conf, legacy_config, logger
|
972
|
+
)
|
915
973
|
if type(legacy_config_parameters) is not bool:
|
916
|
-
logger.info(
|
917
|
-
|
974
|
+
logger.info(
|
975
|
+
"Obtaining parameters from config_emhass.yaml: (will overwrite config parameters)"
|
976
|
+
)
|
977
|
+
config.update(legacy_config_parameters)
|
918
978
|
|
919
979
|
return config
|
920
980
|
|
921
981
|
|
922
|
-
def build_legacy_config_params(
|
923
|
-
|
982
|
+
def build_legacy_config_params(
|
983
|
+
emhass_conf: dict, legacy_config: dict, logger: logging.Logger
|
984
|
+
) -> dict:
|
924
985
|
"""
|
925
|
-
Build a config dictionary with legacy config_emhass.yaml file.
|
986
|
+
Build a config dictionary with legacy config_emhass.yaml file.
|
926
987
|
Uses the associations file to convert parameter naming conventions (to config.json/config_defaults.json).
|
927
988
|
Extracts the parameter values and formats to match config.json.
|
928
989
|
|
@@ -936,83 +997,111 @@ def build_legacy_config_params(emhass_conf: dict, legacy_config: dict,
|
|
936
997
|
:rtype: dict
|
937
998
|
"""
|
938
999
|
|
939
|
-
|
940
1000
|
# Association file key reference
|
941
1001
|
# association[0] = config catagories
|
942
1002
|
# association[1] = legacy parameter name
|
943
1003
|
# association[2] = parameter (config.json/config_defaults.json)
|
944
|
-
# association[3] = parameter list name if exists (not used, from legacy options.json)
|
1004
|
+
# association[3] = parameter list name if exists (not used, from legacy options.json)
|
945
1005
|
|
946
1006
|
# Check each config catagories exists, else create blank dict for categories (avoid errors)
|
947
|
-
legacy_config[
|
948
|
-
legacy_config[
|
949
|
-
legacy_config[
|
1007
|
+
legacy_config["retrieve_hass_conf"] = legacy_config.get("retrieve_hass_conf", {})
|
1008
|
+
legacy_config["optim_conf"] = legacy_config.get("optim_conf", {})
|
1009
|
+
legacy_config["plant_conf"] = legacy_config.get("plant_conf", {})
|
950
1010
|
config = {}
|
951
1011
|
|
952
1012
|
# Use associations list to map legacy parameter name with config.json parameter name
|
953
|
-
if emhass_conf[
|
954
|
-
|
955
|
-
|
1013
|
+
if emhass_conf["associations_path"].exists():
|
1014
|
+
with emhass_conf["associations_path"].open("r") as data:
|
1015
|
+
associations = list(csv.reader(data, delimiter=","))
|
956
1016
|
else:
|
957
|
-
logger.error(
|
1017
|
+
logger.error(
|
1018
|
+
"Cant find associations file (associations.csv) in: "
|
1019
|
+
+ str(emhass_conf["associations_path"])
|
1020
|
+
)
|
958
1021
|
return False
|
959
|
-
|
1022
|
+
|
960
1023
|
# Loop through all parameters in association file
|
961
1024
|
# Append config with existing legacy config parameters (converting alternative parameter naming conventions with associations list)
|
962
1025
|
for association in associations:
|
963
1026
|
# if legacy config catagories exists and if legacy parameter exists in config catagories
|
964
|
-
if
|
1027
|
+
if (
|
1028
|
+
legacy_config.get(association[0], None) is not None
|
1029
|
+
and legacy_config[association[0]].get(association[1], None) is not None
|
1030
|
+
):
|
965
1031
|
config[association[2]] = legacy_config[association[0]][association[1]]
|
966
|
-
|
1032
|
+
|
967
1033
|
# If config now has load_peak_hour_periods, extract from list of dict
|
968
|
-
if
|
969
|
-
|
970
|
-
|
1034
|
+
if (
|
1035
|
+
association[2] == "load_peak_hour_periods"
|
1036
|
+
and type(config[association[2]]) is list
|
1037
|
+
):
|
1038
|
+
config[association[2]] = dict(
|
1039
|
+
(key, d[key]) for d in config[association[2]] for key in d
|
1040
|
+
)
|
1041
|
+
|
971
1042
|
return config
|
972
1043
|
# params['associations_dict'] = associations_dict
|
973
1044
|
|
1045
|
+
|
974
1046
|
def param_to_config(param: dict, logger: logging.Logger) -> dict:
|
975
1047
|
"""
|
976
1048
|
A function that extracts the parameters from param back to the config.json format.
|
977
1049
|
Extracts parameters from config catagories.
|
978
1050
|
Attempts to exclude secrets hosed in retrieve_hass_conf.
|
979
|
-
|
1051
|
+
|
980
1052
|
:param params: Built configuration parameters
|
981
1053
|
:type param: dict
|
982
1054
|
:param logger: The logger object
|
983
1055
|
:type logger: logging.Logger
|
984
1056
|
:return: The built config dictionary
|
985
1057
|
:rtype: dict
|
986
|
-
"""
|
1058
|
+
"""
|
987
1059
|
logger.debug("Converting param to config")
|
988
1060
|
|
989
1061
|
return_config = {}
|
990
1062
|
|
991
|
-
config_catagories = ["retrieve_hass_conf","optim_conf","plant_conf"]
|
992
|
-
secret_params = [
|
993
|
-
|
1063
|
+
config_catagories = ["retrieve_hass_conf", "optim_conf", "plant_conf"]
|
1064
|
+
secret_params = [
|
1065
|
+
"hass_url",
|
1066
|
+
"time_zone",
|
1067
|
+
"Latitude",
|
1068
|
+
"Longitude",
|
1069
|
+
"Altitude",
|
1070
|
+
"long_lived_token",
|
1071
|
+
"solcast_api_key",
|
1072
|
+
"solcast_rooftop_id",
|
1073
|
+
"solar_forecast_kwp",
|
1074
|
+
]
|
1075
|
+
|
994
1076
|
# Loop through config catagories that contain config params, and extract
|
995
1077
|
for config in config_catagories:
|
996
1078
|
for parameter in param[config]:
|
997
|
-
|
998
|
-
|
999
|
-
|
1000
|
-
|
1079
|
+
# If parameter is not a secret, append to return_config
|
1080
|
+
if parameter not in secret_params:
|
1081
|
+
return_config[str(parameter)] = param[config][parameter]
|
1082
|
+
|
1001
1083
|
return return_config
|
1002
1084
|
|
1003
|
-
|
1004
|
-
|
1085
|
+
|
1086
|
+
def build_secrets(
|
1087
|
+
emhass_conf: dict,
|
1088
|
+
logger: logging.Logger,
|
1089
|
+
argument: Optional[dict] = {},
|
1090
|
+
options_path: Optional[str] = None,
|
1091
|
+
secrets_path: Optional[str] = None,
|
1092
|
+
no_response: Optional[bool] = False,
|
1093
|
+
) -> Tuple[dict, dict]:
|
1005
1094
|
"""
|
1006
1095
|
Retrieve and build parameters from secrets locations (ENV, ARG, Secrets file (secrets_emhass.yaml/options.json) and/or Home Assistant (via API))
|
1007
1096
|
priority order (lwo to high) = Defaults (written in function), ENV, Options json file, Home Assistant API, Secrets yaml file, Arguments
|
1008
|
-
|
1097
|
+
|
1009
1098
|
:param emhass_conf: Dictionary containing the needed emhass paths
|
1010
1099
|
:type emhass_conf: dict
|
1011
1100
|
:param logger: The logger object
|
1012
1101
|
:type logger: logging.Logger
|
1013
1102
|
:param argument: dictionary of secrets arguments passed (url,key)
|
1014
1103
|
:type argument: dict
|
1015
|
-
:param options_path: path to the options file (options.json) (usually provided
|
1104
|
+
:param options_path: path to the options file (options.json) (usually provided by EMHASS-Add-on)
|
1016
1105
|
:type options_path: str
|
1017
1106
|
:param secrets_path: path to secrets file (secrets_emhass.yaml)
|
1018
1107
|
:type secrets_path: str
|
@@ -1022,7 +1111,7 @@ def build_secrets(emhass_conf: dict, logger: logging.Logger, argument: Optional[
|
|
1022
1111
|
:rtype: Tuple[dict, dict]:
|
1023
1112
|
"""
|
1024
1113
|
|
1025
|
-
#Set defaults to be overwritten
|
1114
|
+
# Set defaults to be overwritten
|
1026
1115
|
params_secrets = {
|
1027
1116
|
"hass_url": "https://myhass.duckdns.org/",
|
1028
1117
|
"long_lived_token": "thatverylongtokenhere",
|
@@ -1032,128 +1121,172 @@ def build_secrets(emhass_conf: dict, logger: logging.Logger, argument: Optional[
|
|
1032
1121
|
"Altitude": 4807.8,
|
1033
1122
|
"solcast_api_key": "yoursecretsolcastapikey",
|
1034
1123
|
"solcast_rooftop_id": "yourrooftopid",
|
1035
|
-
"solar_forecast_kwp": 5
|
1124
|
+
"solar_forecast_kwp": 5,
|
1036
1125
|
}
|
1037
1126
|
|
1038
1127
|
# Obtain Secrets from ENV?
|
1039
|
-
params_secrets[
|
1040
|
-
params_secrets[
|
1041
|
-
|
1042
|
-
|
1043
|
-
params_secrets[
|
1044
|
-
params_secrets[
|
1128
|
+
params_secrets["hass_url"] = os.getenv("EMHASS_URL", params_secrets["hass_url"])
|
1129
|
+
params_secrets["long_lived_token"] = os.getenv(
|
1130
|
+
"SUPERVISOR_TOKEN", params_secrets["long_lived_token"]
|
1131
|
+
)
|
1132
|
+
params_secrets["time_zone"] = os.getenv("TIME_ZONE", params_secrets["time_zone"])
|
1133
|
+
params_secrets["Latitude"] = float(os.getenv("LAT", params_secrets["Latitude"]))
|
1134
|
+
params_secrets["Longitude"] = float(os.getenv("LON", params_secrets["Longitude"]))
|
1135
|
+
params_secrets["Altitude"] = float(os.getenv("ALT", params_secrets["Altitude"]))
|
1045
1136
|
|
1046
1137
|
# Obtain secrets from options.json (Generated from EMHASS-Add-on, Home Assistant addon Configuration page) or Home Assistant API (from local Supervisor API)?
|
1047
1138
|
# Use local supervisor API to obtain secrets from Home Assistant if hass_url in options.json is empty and SUPERVISOR_TOKEN ENV exists (provided by Home Assistant when running the container as addon)
|
1048
1139
|
options = {}
|
1049
1140
|
if options_path and pathlib.Path(options_path).is_file():
|
1050
|
-
with options_path.open(
|
1141
|
+
with options_path.open("r") as data:
|
1051
1142
|
options = json.load(data)
|
1052
|
-
|
1143
|
+
|
1053
1144
|
# Obtain secrets from Home Assistant?
|
1054
|
-
url_from_options = options.get(
|
1055
|
-
key_from_options = options.get(
|
1145
|
+
url_from_options = options.get("hass_url", "empty")
|
1146
|
+
key_from_options = options.get("long_lived_token", "empty")
|
1056
1147
|
|
1057
1148
|
# If data path specified by options.json, overwrite emhass_conf['data_path']
|
1058
|
-
if
|
1059
|
-
|
1060
|
-
|
1149
|
+
if (
|
1150
|
+
options.get("data_path", None) != None
|
1151
|
+
and pathlib.Path(options["data_path"]).exists()
|
1152
|
+
):
|
1153
|
+
emhass_conf["data_path"] = pathlib.Path(options["data_path"])
|
1154
|
+
|
1061
1155
|
# Check to use Home Assistant local API
|
1062
|
-
if
|
1063
|
-
|
1064
|
-
|
1065
|
-
|
1066
|
-
|
1067
|
-
|
1156
|
+
if (
|
1157
|
+
not no_response
|
1158
|
+
and (
|
1159
|
+
url_from_options == "empty"
|
1160
|
+
or url_from_options == ""
|
1161
|
+
or url_from_options == "http://supervisor/core/api"
|
1162
|
+
)
|
1163
|
+
and os.getenv("SUPERVISOR_TOKEN", None) is not None
|
1164
|
+
):
|
1165
|
+
params_secrets["long_lived_token"] = os.getenv("SUPERVISOR_TOKEN", None)
|
1166
|
+
params_secrets["hass_url"] = "http://supervisor/core/api"
|
1068
1167
|
headers = {
|
1069
|
-
|
1070
|
-
|
1168
|
+
"Authorization": "Bearer " + params_secrets["long_lived_token"],
|
1169
|
+
"content-type": "application/json",
|
1071
1170
|
}
|
1072
1171
|
# Obtain secrets from Home Assistant via API
|
1073
1172
|
logger.debug("Obtaining secrets from Home Assistant Supervisor API")
|
1074
|
-
response = get(
|
1173
|
+
response = get(
|
1174
|
+
(params_secrets["hass_url"] + "/config"), headers=headers
|
1175
|
+
)
|
1075
1176
|
if response.status_code < 400:
|
1076
1177
|
config_hass = response.json()
|
1077
1178
|
params_secrets = {
|
1078
|
-
|
1079
|
-
|
1080
|
-
|
1081
|
-
|
1082
|
-
|
1083
|
-
|
1179
|
+
"hass_url": params_secrets["hass_url"],
|
1180
|
+
"long_lived_token": params_secrets["long_lived_token"],
|
1181
|
+
"time_zone": config_hass["time_zone"],
|
1182
|
+
"Latitude": config_hass["latitude"],
|
1183
|
+
"Longitude": config_hass["longitude"],
|
1184
|
+
"Altitude": config_hass["elevation"],
|
1084
1185
|
}
|
1085
|
-
else:
|
1186
|
+
else:
|
1086
1187
|
# Obtain the url and key secrets if any from options.json (default /app/options.json)
|
1087
|
-
logger.warning(
|
1188
|
+
logger.warning(
|
1189
|
+
"Error obtaining secrets from Home Assistant Supervisor API"
|
1190
|
+
)
|
1088
1191
|
logger.debug("Obtaining url and key secrets from options.json")
|
1089
|
-
if url_from_options !=
|
1090
|
-
params_secrets[
|
1091
|
-
if key_from_options !=
|
1092
|
-
params_secrets[
|
1093
|
-
if
|
1094
|
-
|
1095
|
-
|
1096
|
-
|
1097
|
-
|
1098
|
-
|
1099
|
-
|
1100
|
-
|
1192
|
+
if url_from_options != "empty" and url_from_options != "":
|
1193
|
+
params_secrets["hass_url"] = url_from_options
|
1194
|
+
if key_from_options != "empty" and key_from_options != "":
|
1195
|
+
params_secrets["long_lived_token"] = key_from_options
|
1196
|
+
if (
|
1197
|
+
options.get("time_zone", "empty") != "empty"
|
1198
|
+
and options["time_zone"] != ""
|
1199
|
+
):
|
1200
|
+
params_secrets["time_zone"] = options["time_zone"]
|
1201
|
+
if options.get("Latitude", None) is not None and bool(
|
1202
|
+
options["Latitude"]
|
1203
|
+
):
|
1204
|
+
params_secrets["Latitude"] = options["Latitude"]
|
1205
|
+
if options.get("Longitude", None) is not None and bool(
|
1206
|
+
options["Longitude"]
|
1207
|
+
):
|
1208
|
+
params_secrets["Longitude"] = options["Longitude"]
|
1209
|
+
if options.get("Altitude", None) is not None and bool(
|
1210
|
+
options["Altitude"]
|
1211
|
+
):
|
1212
|
+
params_secrets["Altitude"] = options["Altitude"]
|
1101
1213
|
else:
|
1102
1214
|
# Obtain the url and key secrets if any from options.json (default /app/options.json)
|
1103
1215
|
logger.debug("Obtaining url and key secrets from options.json")
|
1104
|
-
if url_from_options !=
|
1105
|
-
params_secrets[
|
1106
|
-
if key_from_options !=
|
1107
|
-
params_secrets[
|
1108
|
-
if
|
1109
|
-
|
1110
|
-
|
1111
|
-
|
1112
|
-
|
1113
|
-
|
1114
|
-
|
1115
|
-
|
1116
|
-
|
1216
|
+
if url_from_options != "empty" and url_from_options != "":
|
1217
|
+
params_secrets["hass_url"] = url_from_options
|
1218
|
+
if key_from_options != "empty" and key_from_options != "":
|
1219
|
+
params_secrets["long_lived_token"] = key_from_options
|
1220
|
+
if (
|
1221
|
+
options.get("time_zone", "empty") != "empty"
|
1222
|
+
and options["time_zone"] != ""
|
1223
|
+
):
|
1224
|
+
params_secrets["time_zone"] = options["time_zone"]
|
1225
|
+
if options.get("Latitude", None) is not None and bool(
|
1226
|
+
options["Latitude"]
|
1227
|
+
):
|
1228
|
+
params_secrets["Latitude"] = options["Latitude"]
|
1229
|
+
if options.get("Longitude", None) is not None and bool(
|
1230
|
+
options["Longitude"]
|
1231
|
+
):
|
1232
|
+
params_secrets["Longitude"] = options["Longitude"]
|
1233
|
+
if options.get("Altitude", None) is not None and bool(
|
1234
|
+
options["Altitude"]
|
1235
|
+
):
|
1236
|
+
params_secrets["Altitude"] = options["Altitude"]
|
1237
|
+
|
1117
1238
|
# Obtain the forecast secrets (if any) from options.json (default /app/options.json)
|
1118
|
-
forecast_secrets = [
|
1239
|
+
forecast_secrets = [
|
1240
|
+
"solcast_api_key",
|
1241
|
+
"solcast_rooftop_id",
|
1242
|
+
"solar_forecast_kwp",
|
1243
|
+
]
|
1119
1244
|
if any(x in forecast_secrets for x in list(options.keys())):
|
1120
1245
|
logger.debug("Obtaining forecast secrets from options.json")
|
1121
|
-
if
|
1122
|
-
|
1123
|
-
|
1124
|
-
|
1125
|
-
|
1126
|
-
|
1127
|
-
|
1246
|
+
if (
|
1247
|
+
options.get("solcast_api_key", "empty") != "empty"
|
1248
|
+
and options["solcast_api_key"] != ""
|
1249
|
+
):
|
1250
|
+
params_secrets["solcast_api_key"] = options["solcast_api_key"]
|
1251
|
+
if (
|
1252
|
+
options.get("solcast_rooftop_id", "empty") != "empty"
|
1253
|
+
and options["solcast_rooftop_id"] != ""
|
1254
|
+
):
|
1255
|
+
params_secrets["solcast_rooftop_id"] = options["solcast_rooftop_id"]
|
1256
|
+
if options.get("solar_forecast_kwp", None) and bool(
|
1257
|
+
options["solar_forecast_kwp"]
|
1258
|
+
):
|
1259
|
+
params_secrets["solar_forecast_kwp"] = options["solar_forecast_kwp"]
|
1260
|
+
|
1128
1261
|
# Obtain secrets from secrets_emhass.yaml? (default /app/secrets_emhass.yaml)
|
1129
1262
|
if secrets_path and pathlib.Path(secrets_path).is_file():
|
1130
1263
|
logger.debug("Obtaining secrets from secrets file")
|
1131
|
-
with open(pathlib.Path(secrets_path),
|
1264
|
+
with open(pathlib.Path(secrets_path), "r") as file:
|
1132
1265
|
params_secrets.update(yaml.load(file, Loader=yaml.FullLoader))
|
1133
1266
|
|
1134
|
-
# Receive key and url from ARG/arguments?
|
1135
|
-
if argument.get(
|
1136
|
-
|
1137
|
-
|
1138
|
-
if argument.get(
|
1139
|
-
params_secrets[
|
1140
|
-
logger.debug("Obtaining long_lived_token from passed argument")
|
1141
|
-
|
1142
|
-
return emhass_conf, params_secrets
|
1143
|
-
|
1144
|
-
|
1145
|
-
|
1146
|
-
|
1147
|
-
|
1267
|
+
# Receive key and url from ARG/arguments?
|
1268
|
+
if argument.get("url", None) is not None:
|
1269
|
+
params_secrets["hass_url"] = argument["url"]
|
1270
|
+
logger.debug("Obtaining url from passed argument")
|
1271
|
+
if argument.get("key", None) is not None:
|
1272
|
+
params_secrets["long_lived_token"] = argument["key"]
|
1273
|
+
logger.debug("Obtaining long_lived_token from passed argument")
|
1274
|
+
|
1275
|
+
return emhass_conf, params_secrets
|
1276
|
+
|
1277
|
+
|
1278
|
+
def build_params(
|
1279
|
+
emhass_conf: dict, params_secrets: dict, config: dict, logger: logging.Logger
|
1280
|
+
) -> dict:
|
1148
1281
|
"""
|
1149
1282
|
Build the main params dictionary from the config and secrets
|
1150
1283
|
Appends configuration catagories used by emhass to the parameters. (with use of the associations file as a reference)
|
1151
|
-
|
1284
|
+
|
1152
1285
|
:param emhass_conf: Dictionary containing the needed emhass paths
|
1153
1286
|
:type emhass_conf: dict
|
1154
1287
|
:param params_secrets: The dictionary containing the built secret variables
|
1155
1288
|
:type params_secrets: dict
|
1156
|
-
:param config: The dictionary of built config parameters
|
1289
|
+
:param config: The dictionary of built config parameters
|
1157
1290
|
:type config: dict
|
1158
1291
|
:param logger: The logger object
|
1159
1292
|
:type logger: logging.Logger
|
@@ -1162,104 +1295,203 @@ def build_params(emhass_conf: dict, params_secrets: dict, config: dict,
|
|
1162
1295
|
"""
|
1163
1296
|
if type(params_secrets) is not dict:
|
1164
1297
|
params_secrets = {}
|
1165
|
-
|
1298
|
+
|
1166
1299
|
params = {}
|
1167
|
-
#Start with blank config catagories
|
1168
|
-
params[
|
1169
|
-
params[
|
1170
|
-
params[
|
1171
|
-
params[
|
1172
|
-
|
1173
|
-
# Obtain associations to categorize parameters to their corresponding config catagories
|
1174
|
-
if emhass_conf.get(
|
1175
|
-
|
1300
|
+
# Start with blank config catagories
|
1301
|
+
params["retrieve_hass_conf"] = {}
|
1302
|
+
params["params_secrets"] = {}
|
1303
|
+
params["optim_conf"] = {}
|
1304
|
+
params["plant_conf"] = {}
|
1305
|
+
|
1306
|
+
# Obtain associations to categorize parameters to their corresponding config catagories
|
1307
|
+
if emhass_conf.get(
|
1308
|
+
"associations_path", get_root(__file__, num_parent=2) / "data/associations.csv"
|
1309
|
+
).exists():
|
1310
|
+
with emhass_conf["associations_path"].open("r") as data:
|
1176
1311
|
associations = list(csv.reader(data, delimiter=","))
|
1177
1312
|
else:
|
1178
|
-
logger.error(
|
1313
|
+
logger.error(
|
1314
|
+
"Unable to obtain the associations file (associations.csv) in: "
|
1315
|
+
+ str(emhass_conf["associations_path"])
|
1316
|
+
)
|
1179
1317
|
return False
|
1180
1318
|
|
1181
1319
|
# Association file key reference
|
1182
1320
|
# association[0] = config catagories
|
1183
1321
|
# association[1] = legacy parameter name
|
1184
1322
|
# association[2] = parameter (config.json/config_defaults.json)
|
1185
|
-
# association[3] = parameter list name if exists (not used, from legacy options.json)
|
1186
|
-
|
1323
|
+
# association[3] = parameter list name if exists (not used, from legacy options.json)
|
1187
1324
|
# Use association list to append parameters from config into params (with corresponding config catagories)
|
1188
1325
|
for association in associations:
|
1189
|
-
# If parameter has list_ name and parameter in config is presented with its list name
|
1326
|
+
# If parameter has list_ name and parameter in config is presented with its list name
|
1190
1327
|
# (ie, config parameter is in legacy options.json format)
|
1191
|
-
if len(association) == 4 and config.get(association[3],None) is not None:
|
1328
|
+
if len(association) == 4 and config.get(association[3], None) is not None:
|
1192
1329
|
# Extract lists of dictionaries
|
1193
1330
|
if config[association[3]] and type(config[association[3]][0]) is dict:
|
1194
|
-
params[association[0]][association[2]] = [
|
1331
|
+
params[association[0]][association[2]] = [
|
1332
|
+
i[association[2]] for i in config[association[3]]
|
1333
|
+
]
|
1195
1334
|
else:
|
1196
1335
|
params[association[0]][association[2]] = config[association[3]]
|
1197
|
-
# Else, directly set value of config parameter to param
|
1198
|
-
elif config.get(association[2],None) is not None:
|
1336
|
+
# Else, directly set value of config parameter to param
|
1337
|
+
elif config.get(association[2], None) is not None:
|
1199
1338
|
params[association[0]][association[2]] = config[association[2]]
|
1200
1339
|
|
1201
1340
|
# Check if we need to create `list_hp_periods` from config (ie. legacy options.json format)
|
1202
|
-
if
|
1203
|
-
|
1204
|
-
|
1205
|
-
|
1206
|
-
|
1207
|
-
|
1341
|
+
if (
|
1342
|
+
params.get("optim_conf", None) is not None
|
1343
|
+
and config.get("list_peak_hours_periods_start_hours", None) is not None
|
1344
|
+
and config.get("list_peak_hours_periods_end_hours", None) is not None
|
1345
|
+
):
|
1346
|
+
start_hours_list = [
|
1347
|
+
i["peak_hours_periods_start_hours"]
|
1348
|
+
for i in config["list_peak_hours_periods_start_hours"]
|
1349
|
+
]
|
1350
|
+
end_hours_list = [
|
1351
|
+
i["peak_hours_periods_end_hours"]
|
1352
|
+
for i in config["list_peak_hours_periods_end_hours"]
|
1353
|
+
]
|
1354
|
+
num_peak_hours = len(start_hours_list)
|
1355
|
+
list_hp_periods_list = {
|
1356
|
+
"period_hp_" + str(i + 1): [
|
1357
|
+
{"start": start_hours_list[i]},
|
1358
|
+
{"end": end_hours_list[i]},
|
1359
|
+
]
|
1360
|
+
for i in range(num_peak_hours)
|
1361
|
+
}
|
1362
|
+
params["optim_conf"]["load_peak_hour_periods"] = list_hp_periods_list
|
1208
1363
|
else:
|
1209
1364
|
# Else, check param already contains load_peak_hour_periods from config
|
1210
|
-
if params[
|
1211
|
-
logger.warning(
|
1365
|
+
if params["optim_conf"].get("load_peak_hour_periods", None) is None:
|
1366
|
+
logger.warning(
|
1367
|
+
"Unable to detect or create load_peak_hour_periods parameter"
|
1368
|
+
)
|
1212
1369
|
|
1213
1370
|
# Format load_peak_hour_periods list to dict if necessary
|
1214
|
-
if params[
|
1215
|
-
|
1371
|
+
if params["optim_conf"].get(
|
1372
|
+
"load_peak_hour_periods", None
|
1373
|
+
) is not None and isinstance(params["optim_conf"]["load_peak_hour_periods"], list):
|
1374
|
+
params["optim_conf"]["load_peak_hour_periods"] = dict(
|
1375
|
+
(key, d[key])
|
1376
|
+
for d in params["optim_conf"]["load_peak_hour_periods"]
|
1377
|
+
for key in d
|
1378
|
+
)
|
1216
1379
|
|
1217
1380
|
# Call function to check parameter lists that require the same length as deferrable loads
|
1218
1381
|
# If not, set defaults it fill in gaps
|
1219
|
-
if params[
|
1220
|
-
num_def_loads = params[
|
1221
|
-
params[
|
1222
|
-
|
1223
|
-
|
1224
|
-
|
1225
|
-
|
1226
|
-
|
1227
|
-
|
1382
|
+
if params["optim_conf"].get("number_of_deferrable_loads", None) is not None:
|
1383
|
+
num_def_loads = params["optim_conf"]["number_of_deferrable_loads"]
|
1384
|
+
params["optim_conf"]["start_timesteps_of_each_deferrable_load"] = (
|
1385
|
+
check_def_loads(
|
1386
|
+
num_def_loads,
|
1387
|
+
params["optim_conf"],
|
1388
|
+
0,
|
1389
|
+
"start_timesteps_of_each_deferrable_load",
|
1390
|
+
logger,
|
1391
|
+
)
|
1392
|
+
)
|
1393
|
+
params["optim_conf"]["end_timesteps_of_each_deferrable_load"] = check_def_loads(
|
1394
|
+
num_def_loads,
|
1395
|
+
params["optim_conf"],
|
1396
|
+
0,
|
1397
|
+
"end_timesteps_of_each_deferrable_load",
|
1398
|
+
logger,
|
1399
|
+
)
|
1400
|
+
params["optim_conf"]["set_deferrable_load_single_constant"] = check_def_loads(
|
1401
|
+
num_def_loads,
|
1402
|
+
params["optim_conf"],
|
1403
|
+
False,
|
1404
|
+
"set_deferrable_load_single_constant",
|
1405
|
+
logger,
|
1406
|
+
)
|
1407
|
+
params["optim_conf"]["treat_deferrable_load_as_semi_cont"] = check_def_loads(
|
1408
|
+
num_def_loads,
|
1409
|
+
params["optim_conf"],
|
1410
|
+
True,
|
1411
|
+
"treat_deferrable_load_as_semi_cont",
|
1412
|
+
logger,
|
1413
|
+
)
|
1414
|
+
params["optim_conf"]["set_deferrable_startup_penalty"] = check_def_loads(
|
1415
|
+
num_def_loads,
|
1416
|
+
params["optim_conf"],
|
1417
|
+
0.0,
|
1418
|
+
"set_deferrable_startup_penalty",
|
1419
|
+
logger,
|
1420
|
+
)
|
1421
|
+
params["optim_conf"]["operating_hours_of_each_deferrable_load"] = (
|
1422
|
+
check_def_loads(
|
1423
|
+
num_def_loads,
|
1424
|
+
params["optim_conf"],
|
1425
|
+
0,
|
1426
|
+
"operating_hours_of_each_deferrable_load",
|
1427
|
+
logger,
|
1428
|
+
)
|
1429
|
+
)
|
1430
|
+
params["optim_conf"]["nominal_power_of_deferrable_loads"] = check_def_loads(
|
1431
|
+
num_def_loads,
|
1432
|
+
params["optim_conf"],
|
1433
|
+
0,
|
1434
|
+
"nominal_power_of_deferrable_loads",
|
1435
|
+
logger,
|
1436
|
+
)
|
1228
1437
|
else:
|
1229
1438
|
logger.warning("unable to obtain parameter: number_of_deferrable_loads")
|
1230
1439
|
# historic_days_to_retrieve should be no less then 2
|
1231
|
-
if params["retrieve_hass_conf"].get(
|
1232
|
-
if params["retrieve_hass_conf"][
|
1233
|
-
params["retrieve_hass_conf"][
|
1234
|
-
logger.warning(
|
1440
|
+
if params["retrieve_hass_conf"].get("historic_days_to_retrieve", None) is not None:
|
1441
|
+
if params["retrieve_hass_conf"]["historic_days_to_retrieve"] < 2:
|
1442
|
+
params["retrieve_hass_conf"]["historic_days_to_retrieve"] = 2
|
1443
|
+
logger.warning(
|
1444
|
+
"days_to_retrieve should not be lower then 2, setting days_to_retrieve to 2. Make sure your sensors also have at least 2 days of history"
|
1445
|
+
)
|
1235
1446
|
else:
|
1236
1447
|
logger.warning("unable to obtain parameter: historic_days_to_retrieve")
|
1237
1448
|
|
1238
1449
|
# Configure secrets, set params to correct config categorie
|
1239
1450
|
# retrieve_hass_conf
|
1240
|
-
params[
|
1241
|
-
params[
|
1242
|
-
|
1243
|
-
|
1244
|
-
params[
|
1245
|
-
params[
|
1451
|
+
params["retrieve_hass_conf"]["hass_url"] = params_secrets.get("hass_url", None)
|
1452
|
+
params["retrieve_hass_conf"]["long_lived_token"] = params_secrets.get(
|
1453
|
+
"long_lived_token", None
|
1454
|
+
)
|
1455
|
+
params["retrieve_hass_conf"]["time_zone"] = params_secrets.get("time_zone", None)
|
1456
|
+
params["retrieve_hass_conf"]["Latitude"] = params_secrets.get("Latitude", None)
|
1457
|
+
params["retrieve_hass_conf"]["Longitude"] = params_secrets.get("Longitude", None)
|
1458
|
+
params["retrieve_hass_conf"]["Altitude"] = params_secrets.get("Altitude", None)
|
1246
1459
|
# Update optional param secrets
|
1247
|
-
if params["optim_conf"].get(
|
1248
|
-
if params["optim_conf"][
|
1249
|
-
params["retrieve_hass_conf"]["solcast_api_key"] = params_secrets.get(
|
1250
|
-
|
1251
|
-
|
1252
|
-
params["params_secrets"]["
|
1253
|
-
|
1254
|
-
|
1255
|
-
params["
|
1460
|
+
if params["optim_conf"].get("weather_forecast_method", None) is not None:
|
1461
|
+
if params["optim_conf"]["weather_forecast_method"] == "solcast":
|
1462
|
+
params["retrieve_hass_conf"]["solcast_api_key"] = params_secrets.get(
|
1463
|
+
"solcast_api_key", "123456"
|
1464
|
+
)
|
1465
|
+
params["params_secrets"]["solcast_api_key"] = params_secrets.get(
|
1466
|
+
"solcast_api_key", "123456"
|
1467
|
+
)
|
1468
|
+
params["retrieve_hass_conf"]["solcast_rooftop_id"] = params_secrets.get(
|
1469
|
+
"solcast_rooftop_id", "123456"
|
1470
|
+
)
|
1471
|
+
params["params_secrets"]["solcast_rooftop_id"] = params_secrets.get(
|
1472
|
+
"solcast_rooftop_id", "123456"
|
1473
|
+
)
|
1474
|
+
elif params["optim_conf"]["weather_forecast_method"] == "solar.forecast":
|
1475
|
+
params["retrieve_hass_conf"]["solar_forecast_kwp"] = params_secrets.get(
|
1476
|
+
"solar_forecast_kwp", 5
|
1477
|
+
)
|
1478
|
+
params["params_secrets"]["solar_forecast_kwp"] = params_secrets.get(
|
1479
|
+
"solar_forecast_kwp", 5
|
1480
|
+
)
|
1256
1481
|
else:
|
1257
|
-
logger.warning("Unable to detect weather_forecast_method parameter")
|
1482
|
+
logger.warning("Unable to detect weather_forecast_method parameter")
|
1258
1483
|
# Check if secrets parameters still defaults values
|
1259
|
-
secret_params = [
|
1260
|
-
|
1261
|
-
|
1262
|
-
|
1484
|
+
secret_params = [
|
1485
|
+
"https://myhass.duckdns.org/",
|
1486
|
+
"thatverylongtokenhere",
|
1487
|
+
45.83,
|
1488
|
+
6.86,
|
1489
|
+
4807.8,
|
1490
|
+
]
|
1491
|
+
if any(x in secret_params for x in params["retrieve_hass_conf"].values()):
|
1492
|
+
logger.warning(
|
1493
|
+
"Some secret parameters values are still matching their defaults"
|
1494
|
+
)
|
1263
1495
|
|
1264
1496
|
# Set empty dict objects for params passed_data
|
1265
1497
|
# To be latter populated with runtime parameters (treat_runtimeparams)
|
@@ -1271,16 +1503,19 @@ def build_params(emhass_conf: dict, params_secrets: dict, config: dict,
|
|
1271
1503
|
"prediction_horizon": None,
|
1272
1504
|
"soc_init": None,
|
1273
1505
|
"soc_final": None,
|
1274
|
-
|
1275
|
-
|
1276
|
-
|
1506
|
+
"operating_hours_of_each_deferrable_load": None,
|
1507
|
+
"start_timesteps_of_each_deferrable_load": None,
|
1508
|
+
"end_timesteps_of_each_deferrable_load": None,
|
1277
1509
|
"alpha": None,
|
1278
1510
|
"beta": None,
|
1279
1511
|
}
|
1280
1512
|
|
1281
1513
|
return params
|
1282
1514
|
|
1283
|
-
|
1515
|
+
|
1516
|
+
def check_def_loads(
|
1517
|
+
num_def_loads: int, parameter: list[dict], default, parameter_name: str, logger
|
1518
|
+
):
|
1284
1519
|
"""
|
1285
1520
|
Check parameter lists with deferrable loads number, if they do not match, enlarge to fit.
|
1286
1521
|
|
@@ -1294,12 +1529,21 @@ def check_def_loads(num_def_loads: int, parameter: list[dict], default, paramete
|
|
1294
1529
|
:type logger: str
|
1295
1530
|
:param logger: The logger object
|
1296
1531
|
:type logger: logging.Logger
|
1297
|
-
return: parameter list
|
1532
|
+
return: parameter list
|
1298
1533
|
:rtype: list[dict]
|
1299
1534
|
|
1300
1535
|
"""
|
1301
|
-
if
|
1302
|
-
|
1536
|
+
if (
|
1537
|
+
parameter.get(parameter_name, None) is not None
|
1538
|
+
and type(parameter[parameter_name]) is list
|
1539
|
+
and num_def_loads > len(parameter[parameter_name])
|
1540
|
+
):
|
1541
|
+
logger.warning(
|
1542
|
+
parameter_name
|
1543
|
+
+ " does not match number in num_def_loads, adding default values ("
|
1544
|
+
+ str(default)
|
1545
|
+
+ ") to parameter"
|
1546
|
+
)
|
1303
1547
|
for x in range(len(parameter[parameter_name]), num_def_loads):
|
1304
1548
|
parameter[parameter_name].append(default)
|
1305
1549
|
return parameter[parameter_name]
|