emhass 0.10.5__py3-none-any.whl → 0.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- emhass/command_line.py +179 -86
- emhass/data/associations.csv +61 -0
- emhass/data/config_defaults.json +117 -0
- emhass/forecast.py +38 -36
- emhass/machine_learning_forecaster.py +2 -1
- emhass/machine_learning_regressor.py +7 -2
- emhass/optimization.py +62 -62
- emhass/retrieve_hass.py +9 -4
- emhass/static/advanced.html +2 -1
- emhass/static/basic.html +4 -2
- emhass/static/configuration_list.html +44 -0
- emhass/static/configuration_script.js +871 -0
- emhass/static/data/param_definitions.json +424 -0
- emhass/static/script.js +345 -322
- emhass/static/style.css +267 -8
- emhass/templates/configuration.html +75 -0
- emhass/templates/index.html +15 -8
- emhass/utils.py +620 -303
- emhass/web_server.py +323 -213
- {emhass-0.10.5.dist-info → emhass-0.11.0.dist-info}/METADATA +207 -169
- emhass-0.11.0.dist-info/RECORD +32 -0
- {emhass-0.10.5.dist-info → emhass-0.11.0.dist-info}/WHEEL +1 -1
- emhass-0.10.5.dist-info/RECORD +0 -26
- {emhass-0.10.5.dist-info → emhass-0.11.0.dist-info}/LICENSE +0 -0
- {emhass-0.10.5.dist-info → emhass-0.11.0.dist-info}/entry_points.txt +0 -0
- {emhass-0.10.5.dist-info → emhass-0.11.0.dist-info}/top_level.txt +0 -0
emhass/command_line.py
CHANGED
@@ -3,6 +3,7 @@
|
|
3
3
|
|
4
4
|
import argparse
|
5
5
|
import os
|
6
|
+
import re
|
6
7
|
import time
|
7
8
|
import pathlib
|
8
9
|
import logging
|
@@ -50,15 +51,25 @@ def set_input_data_dict(emhass_conf: dict, costfun: str,
|
|
50
51
|
|
51
52
|
"""
|
52
53
|
logger.info("Setting up needed data")
|
54
|
+
|
55
|
+
# check if passed params is a dict
|
56
|
+
if (params != None) and (params != "null"):
|
57
|
+
if type(params) is str:
|
58
|
+
params = json.loads(params)
|
59
|
+
else:
|
60
|
+
params = {}
|
61
|
+
|
53
62
|
# Parsing yaml
|
54
|
-
retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(
|
55
|
-
|
63
|
+
retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params,logger)
|
64
|
+
if type(retrieve_hass_conf) is bool:
|
65
|
+
return False
|
66
|
+
|
56
67
|
# Treat runtimeparams
|
57
68
|
params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams(
|
58
69
|
runtimeparams, params, retrieve_hass_conf, optim_conf, plant_conf, set_type, logger)
|
59
70
|
# Define main objects
|
60
71
|
rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'],
|
61
|
-
retrieve_hass_conf['
|
72
|
+
retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'],
|
62
73
|
params, emhass_conf, logger, get_data_from_file=get_data_from_file)
|
63
74
|
fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf,
|
64
75
|
params, emhass_conf, logger, get_data_from_file=get_data_from_file)
|
@@ -71,24 +82,24 @@ def set_input_data_dict(emhass_conf: dict, costfun: str,
|
|
71
82
|
if get_data_from_file:
|
72
83
|
with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp:
|
73
84
|
rh.df_final, days_list, var_list = pickle.load(inp)
|
74
|
-
retrieve_hass_conf['
|
75
|
-
retrieve_hass_conf['
|
76
|
-
retrieve_hass_conf['
|
77
|
-
retrieve_hass_conf['
|
78
|
-
retrieve_hass_conf['
|
79
|
-
retrieve_hass_conf['
|
85
|
+
retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(var_list[0])
|
86
|
+
retrieve_hass_conf['sensor_power_photovoltaics'] = str(var_list[1])
|
87
|
+
retrieve_hass_conf['sensor_linear_interp'] = [
|
88
|
+
retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']]
|
89
|
+
retrieve_hass_conf['sensor_replace_zero'] = [
|
90
|
+
retrieve_hass_conf['sensor_power_photovoltaics']]
|
80
91
|
else:
|
81
92
|
days_list = utils.get_days_list(
|
82
|
-
retrieve_hass_conf[
|
83
|
-
var_list = [retrieve_hass_conf[
|
84
|
-
retrieve_hass_conf[
|
93
|
+
retrieve_hass_conf['historic_days_to_retrieve'])
|
94
|
+
var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'],
|
95
|
+
retrieve_hass_conf['sensor_power_photovoltaics']]
|
85
96
|
if not rh.get_data(days_list, var_list, minimal_response=False, significant_changes_only=False):
|
86
97
|
return False
|
87
|
-
if not rh.prepare_data(retrieve_hass_conf[
|
88
|
-
load_negative=retrieve_hass_conf[
|
89
|
-
set_zero_min=retrieve_hass_conf[
|
90
|
-
var_replace_zero=retrieve_hass_conf[
|
91
|
-
var_interp=retrieve_hass_conf[
|
98
|
+
if not rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'],
|
99
|
+
load_negative=retrieve_hass_conf['load_negative'],
|
100
|
+
set_zero_min=retrieve_hass_conf['set_zero_min'],
|
101
|
+
var_replace_zero=retrieve_hass_conf['sensor_replace_zero'],
|
102
|
+
var_interp=retrieve_hass_conf['sensor_linear_interp']):
|
92
103
|
return False
|
93
104
|
df_input_data = rh.df_final.copy()
|
94
105
|
# What we don't need for this type of action
|
@@ -96,7 +107,7 @@ def set_input_data_dict(emhass_conf: dict, costfun: str,
|
|
96
107
|
elif set_type == "dayahead-optim":
|
97
108
|
# Get PV and load forecasts
|
98
109
|
df_weather = fcst.get_weather_forecast(
|
99
|
-
method=optim_conf[
|
110
|
+
method=optim_conf['weather_forecast_method'])
|
100
111
|
if isinstance(df_weather, bool) and not df_weather:
|
101
112
|
return False
|
102
113
|
P_PV_forecast = fcst.get_power_from_weather(df_weather)
|
@@ -109,7 +120,14 @@ def set_input_data_dict(emhass_conf: dict, costfun: str,
|
|
109
120
|
df_input_data_dayahead = pd.DataFrame(np.transpose(np.vstack(
|
110
121
|
[P_PV_forecast.values, P_load_forecast.values])), index=P_PV_forecast.index,
|
111
122
|
columns=["P_PV_forecast", "P_load_forecast"])
|
112
|
-
|
123
|
+
if "optimization_time_step" in retrieve_hass_conf and retrieve_hass_conf["optimization_time_step"]:
|
124
|
+
if not isinstance(retrieve_hass_conf["optimization_time_step"], pd._libs.tslibs.timedeltas.Timedelta):
|
125
|
+
optimization_time_step = pd.to_timedelta(retrieve_hass_conf["optimization_time_step"], "minute")
|
126
|
+
else:
|
127
|
+
optimization_time_step = retrieve_hass_conf["optimization_time_step"]
|
128
|
+
df_input_data_dayahead = df_input_data_dayahead.asfreq(optimization_time_step)
|
129
|
+
else:
|
130
|
+
df_input_data_dayahead = utils.set_df_index_freq(df_input_data_dayahead)
|
113
131
|
params = json.loads(params)
|
114
132
|
if ("prediction_horizon" in params["passed_data"] and params["passed_data"]["prediction_horizon"] is not None):
|
115
133
|
prediction_horizon = params["passed_data"]["prediction_horizon"]
|
@@ -122,23 +140,23 @@ def set_input_data_dict(emhass_conf: dict, costfun: str,
|
|
122
140
|
if get_data_from_file:
|
123
141
|
with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp:
|
124
142
|
rh.df_final, days_list, var_list = pickle.load(inp)
|
125
|
-
retrieve_hass_conf['
|
126
|
-
retrieve_hass_conf['
|
127
|
-
retrieve_hass_conf['
|
128
|
-
retrieve_hass_conf['
|
129
|
-
retrieve_hass_conf['
|
130
|
-
retrieve_hass_conf['
|
143
|
+
retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(var_list[0])
|
144
|
+
retrieve_hass_conf['sensor_power_photovoltaics'] = str(var_list[1])
|
145
|
+
retrieve_hass_conf['sensor_linear_interp'] = [
|
146
|
+
retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']]
|
147
|
+
retrieve_hass_conf['sensor_replace_zero'] = [
|
148
|
+
retrieve_hass_conf['sensor_power_photovoltaics']]
|
131
149
|
else:
|
132
150
|
days_list = utils.get_days_list(1)
|
133
|
-
var_list = [retrieve_hass_conf[
|
134
|
-
retrieve_hass_conf[
|
151
|
+
var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'],
|
152
|
+
retrieve_hass_conf['sensor_power_photovoltaics']]
|
135
153
|
if not rh.get_data(days_list, var_list, minimal_response=False, significant_changes_only=False):
|
136
154
|
return False
|
137
|
-
if not rh.prepare_data(retrieve_hass_conf[
|
138
|
-
load_negative=retrieve_hass_conf[
|
139
|
-
set_zero_min=retrieve_hass_conf[
|
140
|
-
var_replace_zero=retrieve_hass_conf[
|
141
|
-
var_interp=retrieve_hass_conf[
|
155
|
+
if not rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'],
|
156
|
+
load_negative=retrieve_hass_conf['load_negative'],
|
157
|
+
set_zero_min=retrieve_hass_conf['set_zero_min'],
|
158
|
+
var_replace_zero=retrieve_hass_conf['sensor_replace_zero'],
|
159
|
+
var_interp=retrieve_hass_conf['sensor_linear_interp']):
|
142
160
|
return False
|
143
161
|
df_input_data = rh.df_final.copy()
|
144
162
|
# Get PV and load forecasts
|
@@ -155,7 +173,14 @@ def set_input_data_dict(emhass_conf: dict, costfun: str,
|
|
155
173
|
"Unable to get sensor power photovoltaics, or sensor power load no var loads. Check HA sensors and their daily data")
|
156
174
|
return False
|
157
175
|
df_input_data_dayahead = pd.concat([P_PV_forecast, P_load_forecast], axis=1)
|
158
|
-
|
176
|
+
if "optimization_time_step" in retrieve_hass_conf and retrieve_hass_conf["optimization_time_step"]:
|
177
|
+
if not isinstance(retrieve_hass_conf["optimization_time_step"], pd._libs.tslibs.timedeltas.Timedelta):
|
178
|
+
optimization_time_step = pd.to_timedelta(retrieve_hass_conf["optimization_time_step"], "minute")
|
179
|
+
else:
|
180
|
+
optimization_time_step = retrieve_hass_conf["optimization_time_step"]
|
181
|
+
df_input_data_dayahead = df_input_data_dayahead.asfreq(optimization_time_step)
|
182
|
+
else:
|
183
|
+
df_input_data_dayahead = utils.set_df_index_freq(df_input_data_dayahead)
|
159
184
|
df_input_data_dayahead.columns = ["P_PV_forecast", "P_load_forecast"]
|
160
185
|
params = json.loads(params)
|
161
186
|
if ("prediction_horizon" in params["passed_data"] and params["passed_data"]["prediction_horizon"] is not None):
|
@@ -167,7 +192,7 @@ def set_input_data_dict(emhass_conf: dict, costfun: str,
|
|
167
192
|
P_PV_forecast, P_load_forecast = None, None
|
168
193
|
params = json.loads(params)
|
169
194
|
# Retrieve data from hass
|
170
|
-
days_to_retrieve = params["passed_data"][
|
195
|
+
days_to_retrieve = params["passed_data"]['historic_days_to_retrieve']
|
171
196
|
model_type = params["passed_data"]["model_type"]
|
172
197
|
var_model = params["passed_data"]["var_model"]
|
173
198
|
if get_data_from_file:
|
@@ -266,8 +291,7 @@ def weather_forecast_cache(emhass_conf: dict, params: str,
|
|
266
291
|
"""
|
267
292
|
|
268
293
|
# Parsing yaml
|
269
|
-
retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(
|
270
|
-
emhass_conf, use_secrets=True, params=params)
|
294
|
+
retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params, logger)
|
271
295
|
|
272
296
|
# Treat runtimeparams
|
273
297
|
params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams(
|
@@ -285,7 +309,7 @@ def weather_forecast_cache(emhass_conf: dict, params: str,
|
|
285
309
|
fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf,
|
286
310
|
params, emhass_conf, logger)
|
287
311
|
|
288
|
-
result = fcst.get_weather_forecast(optim_conf[
|
312
|
+
result = fcst.get_weather_forecast(optim_conf['weather_forecast_method'])
|
289
313
|
if isinstance(result, bool) and not result:
|
290
314
|
return False
|
291
315
|
|
@@ -318,7 +342,7 @@ def perfect_forecast_optim(input_data_dict: dict, logger: logging.Logger,
|
|
318
342
|
if isinstance(df_input_data, bool) and not df_input_data:
|
319
343
|
return False
|
320
344
|
df_input_data = input_data_dict['fcst'].get_prod_price_forecast(
|
321
|
-
df_input_data, method=input_data_dict['fcst'].optim_conf['
|
345
|
+
df_input_data, method=input_data_dict['fcst'].optim_conf['production_price_forecast_method'],
|
322
346
|
list_and_perfect=True)
|
323
347
|
if isinstance(df_input_data, bool) and not df_input_data:
|
324
348
|
return False
|
@@ -339,7 +363,7 @@ def perfect_forecast_optim(input_data_dict: dict, logger: logging.Logger,
|
|
339
363
|
params = input_data_dict["params"]
|
340
364
|
|
341
365
|
# if continual_publish, save perfect results to data_path/entities json
|
342
|
-
if input_data_dict["retrieve_hass_conf"].get(
|
366
|
+
if input_data_dict["retrieve_hass_conf"].get('continual_publish',False) or params["passed_data"].get("entity_save",False):
|
343
367
|
#Trigger the publish function, save entity data and not post to HA
|
344
368
|
publish_data(input_data_dict, logger, entity_save=True, dont_post=True)
|
345
369
|
|
@@ -372,7 +396,7 @@ def dayahead_forecast_optim(input_data_dict: dict, logger: logging.Logger,
|
|
372
396
|
return False
|
373
397
|
df_input_data_dayahead = input_data_dict['fcst'].get_prod_price_forecast(
|
374
398
|
df_input_data_dayahead,
|
375
|
-
method=input_data_dict['fcst'].optim_conf['
|
399
|
+
method=input_data_dict['fcst'].optim_conf['production_price_forecast_method'])
|
376
400
|
if isinstance(df_input_data_dayahead, bool) and not df_input_data_dayahead:
|
377
401
|
return False
|
378
402
|
if "outdoor_temperature_forecast" in input_data_dict["params"]["passed_data"]:
|
@@ -398,7 +422,7 @@ def dayahead_forecast_optim(input_data_dict: dict, logger: logging.Logger,
|
|
398
422
|
params = input_data_dict["params"]
|
399
423
|
|
400
424
|
# if continual_publish, save day_ahead results to data_path/entities json
|
401
|
-
if input_data_dict["retrieve_hass_conf"].get(
|
425
|
+
if input_data_dict["retrieve_hass_conf"].get('continual_publish',False) or params["passed_data"].get("entity_save",False):
|
402
426
|
#Trigger the publish function, save entity data and not post to HA
|
403
427
|
publish_data(input_data_dict, logger, entity_save=True, dont_post=True)
|
404
428
|
|
@@ -430,7 +454,7 @@ def naive_mpc_optim(input_data_dict: dict, logger: logging.Logger,
|
|
430
454
|
if isinstance(df_input_data_dayahead, bool) and not df_input_data_dayahead:
|
431
455
|
return False
|
432
456
|
df_input_data_dayahead = input_data_dict['fcst'].get_prod_price_forecast(
|
433
|
-
df_input_data_dayahead, method=input_data_dict['fcst'].optim_conf['
|
457
|
+
df_input_data_dayahead, method=input_data_dict['fcst'].optim_conf['production_price_forecast_method'])
|
434
458
|
if isinstance(df_input_data_dayahead, bool) and not df_input_data_dayahead:
|
435
459
|
return False
|
436
460
|
if "outdoor_temperature_forecast" in input_data_dict["params"]["passed_data"]:
|
@@ -440,9 +464,9 @@ def naive_mpc_optim(input_data_dict: dict, logger: logging.Logger,
|
|
440
464
|
prediction_horizon = input_data_dict["params"]["passed_data"]["prediction_horizon"]
|
441
465
|
soc_init = input_data_dict["params"]["passed_data"]["soc_init"]
|
442
466
|
soc_final = input_data_dict["params"]["passed_data"]["soc_final"]
|
443
|
-
def_total_hours = input_data_dict["params"]["passed_data"][
|
444
|
-
def_start_timestep = input_data_dict["params"]["passed_data"][
|
445
|
-
def_end_timestep = input_data_dict["params"]["passed_data"][
|
467
|
+
def_total_hours = input_data_dict["params"]["passed_data"]['operating_hours_of_each_deferrable_load']
|
468
|
+
def_start_timestep = input_data_dict["params"]["passed_data"]['start_timesteps_of_each_deferrable_load']
|
469
|
+
def_end_timestep = input_data_dict["params"]["passed_data"]['end_timesteps_of_each_deferrable_load']
|
446
470
|
opt_res_naive_mpc = input_data_dict["opt"].perform_naive_mpc_optim(
|
447
471
|
df_input_data_dayahead, input_data_dict["P_PV_forecast"], input_data_dict["P_load_forecast"],
|
448
472
|
prediction_horizon, soc_init, soc_final, def_total_hours,
|
@@ -465,7 +489,7 @@ def naive_mpc_optim(input_data_dict: dict, logger: logging.Logger,
|
|
465
489
|
params = input_data_dict["params"]
|
466
490
|
|
467
491
|
# if continual_publish, save mpc results to data_path/entities json
|
468
|
-
if input_data_dict["retrieve_hass_conf"].get(
|
492
|
+
if input_data_dict["retrieve_hass_conf"].get('continual_publish',False) or params["passed_data"].get("entity_save",False):
|
469
493
|
#Trigger the publish function, save entity data and not post to HA
|
470
494
|
publish_data(input_data_dict, logger, entity_save=True, dont_post=True)
|
471
495
|
|
@@ -568,11 +592,11 @@ def forecast_model_predict(input_data_dict: dict, logger: logging.Logger,
|
|
568
592
|
now_precise = datetime.now(
|
569
593
|
input_data_dict["retrieve_hass_conf"]["time_zone"]
|
570
594
|
).replace(second=0, microsecond=0)
|
571
|
-
if input_data_dict["retrieve_hass_conf"][
|
595
|
+
if input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "nearest":
|
572
596
|
idx_closest = predictions.index.get_indexer([now_precise], method="nearest")[0]
|
573
|
-
elif input_data_dict["retrieve_hass_conf"][
|
597
|
+
elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "first":
|
574
598
|
idx_closest = predictions.index.get_indexer([now_precise], method="ffill")[0]
|
575
|
-
elif input_data_dict["retrieve_hass_conf"][
|
599
|
+
elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "last":
|
576
600
|
idx_closest = predictions.index.get_indexer([now_precise], method="bfill")[0]
|
577
601
|
if idx_closest == -1:
|
578
602
|
idx_closest = predictions.index.get_indexer([now_precise], method="nearest")[0]
|
@@ -668,7 +692,9 @@ def regressor_model_fit(input_data_dict: dict, logger: logging.Logger,
|
|
668
692
|
# The MLRegressor object
|
669
693
|
mlr = MLRegressor(data, model_type, regression_model, features, target, timestamp, logger)
|
670
694
|
# Fit the ML model
|
671
|
-
mlr.fit(date_features=date_features)
|
695
|
+
fit = mlr.fit(date_features=date_features)
|
696
|
+
if not fit:
|
697
|
+
return False
|
672
698
|
# Save model
|
673
699
|
if not debug:
|
674
700
|
filename = model_type + "_mlr.pkl"
|
@@ -749,10 +775,13 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
|
|
749
775
|
|
750
776
|
"""
|
751
777
|
logger.info("Publishing data to HASS instance")
|
752
|
-
if
|
753
|
-
|
754
|
-
|
755
|
-
|
778
|
+
if input_data_dict:
|
779
|
+
if not isinstance(input_data_dict.get("params",{}),dict):
|
780
|
+
params = json.loads(input_data_dict["params"])
|
781
|
+
else:
|
782
|
+
params = input_data_dict.get("params",{})
|
783
|
+
|
784
|
+
|
756
785
|
# Check if a day ahead optimization has been performed (read CSV file)
|
757
786
|
if save_data_to_file:
|
758
787
|
today = datetime.now(timezone.utc).replace(
|
@@ -799,17 +828,17 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
|
|
799
828
|
opt_res_latest = pd.read_csv(
|
800
829
|
input_data_dict['emhass_conf']['data_path'] / filename, index_col='timestamp')
|
801
830
|
opt_res_latest.index = pd.to_datetime(opt_res_latest.index)
|
802
|
-
opt_res_latest.index.freq = input_data_dict["retrieve_hass_conf"][
|
831
|
+
opt_res_latest.index.freq = input_data_dict["retrieve_hass_conf"]['optimization_time_step']
|
803
832
|
# Estimate the current index
|
804
833
|
now_precise = datetime.now(
|
805
834
|
input_data_dict["retrieve_hass_conf"]["time_zone"]
|
806
835
|
).replace(second=0, microsecond=0)
|
807
|
-
if input_data_dict["retrieve_hass_conf"][
|
836
|
+
if input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "nearest":
|
808
837
|
idx_closest = opt_res_latest.index.get_indexer([now_precise], method="nearest")[0]
|
809
|
-
elif input_data_dict["retrieve_hass_conf"][
|
838
|
+
elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "first":
|
810
839
|
idx_closest = opt_res_latest.index.get_indexer(
|
811
840
|
[now_precise], method="ffill")[0]
|
812
|
-
elif input_data_dict["retrieve_hass_conf"][
|
841
|
+
elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "last":
|
813
842
|
idx_closest = opt_res_latest.index.get_indexer(
|
814
843
|
[now_precise], method="bfill")[0]
|
815
844
|
if idx_closest == -1:
|
@@ -877,7 +906,7 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
|
|
877
906
|
custom_deferrable_forecast_id = params["passed_data"][
|
878
907
|
"custom_deferrable_forecast_id"
|
879
908
|
]
|
880
|
-
for k in range(input_data_dict["opt"].optim_conf[
|
909
|
+
for k in range(input_data_dict["opt"].optim_conf['number_of_deferrable_loads']):
|
881
910
|
if "P_deferrable{}".format(k) not in opt_res_latest.columns:
|
882
911
|
logger.error(
|
883
912
|
"P_deferrable{}".format(k)
|
@@ -900,11 +929,11 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
|
|
900
929
|
custom_predicted_temperature_id = params["passed_data"][
|
901
930
|
"custom_predicted_temperature_id"
|
902
931
|
]
|
903
|
-
for k in range(input_data_dict["opt"].optim_conf[
|
932
|
+
for k in range(input_data_dict["opt"].optim_conf['number_of_deferrable_loads']):
|
904
933
|
if "def_load_config" in input_data_dict["opt"].optim_conf.keys():
|
905
934
|
if "thermal_config" in input_data_dict["opt"].optim_conf["def_load_config"][k]:
|
906
935
|
input_data_dict["rh"].post_data(
|
907
|
-
opt_res_latest["
|
936
|
+
opt_res_latest["predicted_temp_heater{}".format(k)],
|
908
937
|
idx_closest,
|
909
938
|
custom_predicted_temperature_id[k]["entity_id"],
|
910
939
|
custom_predicted_temperature_id[k]["unit_of_measurement"],
|
@@ -916,7 +945,7 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
|
|
916
945
|
)
|
917
946
|
cols_published = cols_published + ["predicted_temp_heater{}".format(k)]
|
918
947
|
# Publish battery power
|
919
|
-
if input_data_dict["opt"].optim_conf[
|
948
|
+
if input_data_dict["opt"].optim_conf['set_use_battery']:
|
920
949
|
if "P_batt" not in opt_res_latest.columns:
|
921
950
|
logger.error(
|
922
951
|
"P_batt was not found in results DataFrame. Optimization task may need to be relaunched or it did not converge to a solution.",
|
@@ -1045,7 +1074,7 @@ def continual_publish(input_data_dict: dict, entity_path: pathlib.Path, logger:
|
|
1045
1074
|
|
1046
1075
|
"""
|
1047
1076
|
logger.info("Continual publish thread service started")
|
1048
|
-
freq = input_data_dict['retrieve_hass_conf'].get(
|
1077
|
+
freq = input_data_dict['retrieve_hass_conf'].get('optimization_time_step', pd.to_timedelta(1, "minutes"))
|
1049
1078
|
entity_path_contents = []
|
1050
1079
|
while True:
|
1051
1080
|
# Sleep for x seconds (using current time as a reference for time left)
|
@@ -1056,7 +1085,14 @@ def continual_publish(input_data_dict: dict, entity_path: pathlib.Path, logger:
|
|
1056
1085
|
for entity in entity_path_contents:
|
1057
1086
|
if entity != "metadata.json":
|
1058
1087
|
# Call publish_json with entity file, build entity, and publish
|
1059
|
-
publish_json(entity, input_data_dict, entity_path, logger,
|
1088
|
+
publish_json(entity, input_data_dict, entity_path, logger, 'continual_publish')
|
1089
|
+
# Retrieve entity metadata from file
|
1090
|
+
if os.path.isfile(entity_path / "metadata.json"):
|
1091
|
+
with open(entity_path / "metadata.json", "r") as file:
|
1092
|
+
metadata = json.load(file)
|
1093
|
+
# Check if freq should be shorter
|
1094
|
+
if not metadata.get("lowest_time_step",None) == None:
|
1095
|
+
freq = pd.to_timedelta(metadata["lowest_time_step"], "minutes")
|
1060
1096
|
pass
|
1061
1097
|
# This function should never return
|
1062
1098
|
return False
|
@@ -1081,9 +1117,7 @@ def publish_json(entity: dict, input_data_dict: dict, entity_path: pathlib.Path,
|
|
1081
1117
|
# Retrieve entity metadata from file
|
1082
1118
|
if os.path.isfile(entity_path / "metadata.json"):
|
1083
1119
|
with open(entity_path / "metadata.json", "r") as file:
|
1084
|
-
metadata = json.load(file)
|
1085
|
-
if not metadata.get("lowest_freq",None) == None:
|
1086
|
-
freq = pd.to_timedelta(metadata["lowest_freq"], "minutes")
|
1120
|
+
metadata = json.load(file)
|
1087
1121
|
else:
|
1088
1122
|
logger.error("unable to located metadata.json in:" + entity_path)
|
1089
1123
|
return False
|
@@ -1097,18 +1131,18 @@ def publish_json(entity: dict, input_data_dict: dict, entity_path: pathlib.Path,
|
|
1097
1131
|
entity_data.columns = [metadata[entity_id]["name"]]
|
1098
1132
|
entity_data.index.name = "timestamp"
|
1099
1133
|
entity_data.index = pd.to_datetime(entity_data.index).tz_convert(input_data_dict["retrieve_hass_conf"]["time_zone"])
|
1100
|
-
entity_data.index.freq = pd.to_timedelta(int(metadata[entity_id][
|
1134
|
+
entity_data.index.freq = pd.to_timedelta(int(metadata[entity_id]['optimization_time_step']), "minutes")
|
1101
1135
|
# Calculate the current state value
|
1102
|
-
if input_data_dict["retrieve_hass_conf"][
|
1136
|
+
if input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "nearest":
|
1103
1137
|
idx_closest = entity_data.index.get_indexer([now_precise], method="nearest")[0]
|
1104
|
-
elif input_data_dict["retrieve_hass_conf"][
|
1138
|
+
elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "first":
|
1105
1139
|
idx_closest = entity_data.index.get_indexer([now_precise], method="ffill")[0]
|
1106
|
-
elif input_data_dict["retrieve_hass_conf"][
|
1140
|
+
elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "last":
|
1107
1141
|
idx_closest = entity_data.index.get_indexer([now_precise], method="bfill")[0]
|
1108
1142
|
if idx_closest == -1:
|
1109
1143
|
idx_closest = entity_data.index.get_indexer([now_precise], method="nearest")[0]
|
1110
1144
|
# Call post data
|
1111
|
-
if reference ==
|
1145
|
+
if reference == 'continual_publish':
|
1112
1146
|
logger.debug("Auto Published sensor:")
|
1113
1147
|
logger_levels = "DEBUG"
|
1114
1148
|
else:
|
@@ -1153,7 +1187,9 @@ def main():
|
|
1153
1187
|
parser.add_argument('--action', type=str, help='Set the desired action, options are: perfect-optim, dayahead-optim,\
|
1154
1188
|
naive-mpc-optim, publish-data, forecast-model-fit, forecast-model-predict, forecast-model-tune')
|
1155
1189
|
parser.add_argument('--config', type=str,
|
1156
|
-
help='Define path to the config.
|
1190
|
+
help='Define path to the config.json/defaults.json file')
|
1191
|
+
parser.add_argument('--params', type=str, default=None,
|
1192
|
+
help='String of configuration parameters passed')
|
1157
1193
|
parser.add_argument('--data', type=str,
|
1158
1194
|
help='Define path to the Data files (.csv & .pkl)')
|
1159
1195
|
parser.add_argument('--root', type=str, help='Define path emhass root')
|
@@ -1161,19 +1197,19 @@ def main():
|
|
1161
1197
|
help='Define the type of cost function, options are: profit, cost, self-consumption')
|
1162
1198
|
parser.add_argument('--log2file', type=strtobool, default='False',
|
1163
1199
|
help='Define if we should log to a file or not')
|
1164
|
-
parser.add_argument('--
|
1165
|
-
help='
|
1200
|
+
parser.add_argument('--secrets', type=str, default=None,
|
1201
|
+
help='Define secret parameter file (secrets_emhass.yaml) path')
|
1166
1202
|
parser.add_argument('--runtimeparams', type=str, default=None,
|
1167
1203
|
help='Pass runtime optimization parameters as dictionnary')
|
1168
1204
|
parser.add_argument('--debug', type=strtobool,
|
1169
1205
|
default='False', help='Use True for testing purposes')
|
1170
1206
|
args = parser.parse_args()
|
1207
|
+
|
1171
1208
|
# The path to the configuration files
|
1172
1209
|
if args.config is not None:
|
1173
1210
|
config_path = pathlib.Path(args.config)
|
1174
1211
|
else:
|
1175
|
-
config_path = pathlib.Path(
|
1176
|
-
str(utils.get_root(__file__, num_parent=2) / 'config_emhass.yaml'))
|
1212
|
+
config_path = pathlib.Path(str(utils.get_root(__file__, num_parent=3) / 'config.json'))
|
1177
1213
|
if args.data is not None:
|
1178
1214
|
data_path = pathlib.Path(args.data)
|
1179
1215
|
else:
|
@@ -1181,30 +1217,50 @@ def main():
|
|
1181
1217
|
if args.root is not None:
|
1182
1218
|
root_path = pathlib.Path(args.root)
|
1183
1219
|
else:
|
1184
|
-
root_path =
|
1220
|
+
root_path = utils.get_root(__file__, num_parent=1)
|
1221
|
+
if args.secrets is not None:
|
1222
|
+
secrets_path = pathlib.Path(args.secrets)
|
1223
|
+
else:
|
1224
|
+
secrets_path = pathlib.Path(config_path.parent / 'secrets_emhass.yaml')
|
1225
|
+
|
1226
|
+
associations_path = root_path / 'data/associations.csv'
|
1227
|
+
defaults_path = root_path / 'data/config_defaults.json'
|
1228
|
+
|
1185
1229
|
emhass_conf = {}
|
1186
1230
|
emhass_conf['config_path'] = config_path
|
1187
1231
|
emhass_conf['data_path'] = data_path
|
1188
1232
|
emhass_conf['root_path'] = root_path
|
1233
|
+
emhass_conf['associations_path'] = associations_path
|
1234
|
+
emhass_conf['defaults_path'] = defaults_path
|
1189
1235
|
# create logger
|
1190
1236
|
logger, ch = utils.get_logger(
|
1191
1237
|
__name__, emhass_conf, save_to_file=bool(args.log2file))
|
1238
|
+
|
1239
|
+
# Check paths
|
1192
1240
|
logger.debug("config path: " + str(config_path))
|
1193
1241
|
logger.debug("data path: " + str(data_path))
|
1194
1242
|
logger.debug("root path: " + str(root_path))
|
1195
|
-
if not
|
1243
|
+
if not associations_path.exists():
|
1196
1244
|
logger.error(
|
1197
|
-
"Could not find
|
1198
|
-
logger.error("Try setting config file path with --
|
1245
|
+
"Could not find associations.csv file in: " + str(associations_path))
|
1246
|
+
logger.error("Try setting config file path with --associations")
|
1199
1247
|
return False
|
1248
|
+
if not config_path.exists():
|
1249
|
+
logger.warning(
|
1250
|
+
"Could not find config.json file in: " + str(config_path))
|
1251
|
+
logger.warning("Try setting config file path with --config")
|
1252
|
+
if not secrets_path.exists():
|
1253
|
+
logger.warning("Could not find secrets file in: " + str(secrets_path))
|
1254
|
+
logger.warning("Try setting secrets file path with --secrets")
|
1200
1255
|
if not os.path.isdir(data_path):
|
1201
|
-
logger.error("Could not find data
|
1256
|
+
logger.error("Could not find data folder in: " + str(data_path))
|
1202
1257
|
logger.error("Try setting data path with --data")
|
1203
1258
|
return False
|
1204
|
-
if not os.path.isdir(root_path
|
1205
|
-
logger.error("Could not find emhass/src
|
1259
|
+
if not os.path.isdir(root_path):
|
1260
|
+
logger.error("Could not find emhass/src folder in: " + str(root_path))
|
1206
1261
|
logger.error("Try setting emhass root path with --root")
|
1207
1262
|
return False
|
1263
|
+
|
1208
1264
|
# Additional argument
|
1209
1265
|
try:
|
1210
1266
|
parser.add_argument(
|
@@ -1217,10 +1273,47 @@ def main():
|
|
1217
1273
|
logger.info(
|
1218
1274
|
"Version not found for emhass package. Or importlib exited with PackageNotFoundError.",
|
1219
1275
|
)
|
1220
|
-
|
1276
|
+
|
1277
|
+
# Setup config
|
1278
|
+
config = {}
|
1279
|
+
# Check if passed config file is yaml of json, build config accordingly
|
1280
|
+
if config_path.exists():
|
1281
|
+
config_file_ending = re.findall("(?<=\.).*$", str(config_path))
|
1282
|
+
if len(config_file_ending) > 0:
|
1283
|
+
match(config_file_ending[0]):
|
1284
|
+
case "json":
|
1285
|
+
config = utils.build_config(emhass_conf,logger,defaults_path,config_path)
|
1286
|
+
case "yaml":
|
1287
|
+
config = utils.build_config(emhass_conf,logger,defaults_path,config_path=config_path)
|
1288
|
+
case "yml":
|
1289
|
+
config = utils.build_config(emhass_conf,logger,defaults_path,config_path=config_path)
|
1290
|
+
# If unable to find config file, use only defaults_config.json
|
1291
|
+
else:
|
1292
|
+
logger.warning("Unable to obtain config.json file, building parameters with only defaults")
|
1293
|
+
config = utils.build_config(emhass_conf,logger,defaults_path)
|
1294
|
+
if type(config) is bool and not config:
|
1295
|
+
raise Exception("Failed to find default config")
|
1296
|
+
|
1297
|
+
|
1298
|
+
# Obtain secrets from secrets_emhass.yaml?
|
1299
|
+
params_secrets = {}
|
1300
|
+
emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger,secrets_path=secrets_path)
|
1301
|
+
params_secrets.update(built_secrets)
|
1302
|
+
|
1303
|
+
# Build params
|
1304
|
+
params = utils.build_params(emhass_conf, params_secrets, config, logger)
|
1305
|
+
if type(params) is bool:
|
1306
|
+
raise Exception("A error has occurred while building parameters")
|
1307
|
+
# Add any passed params from args to params
|
1308
|
+
if args.params:
|
1309
|
+
params.update(json.loads(args.params))
|
1310
|
+
|
1221
1311
|
input_data_dict = set_input_data_dict(emhass_conf,
|
1222
|
-
args.costfun,
|
1312
|
+
args.costfun, json.dumps(params), args.runtimeparams, args.action,
|
1223
1313
|
logger, args.debug)
|
1314
|
+
if type(input_data_dict) is bool:
|
1315
|
+
raise Exception("A error has occurred while creating action objects")
|
1316
|
+
|
1224
1317
|
# Perform selected action
|
1225
1318
|
if args.action == "perfect-optim":
|
1226
1319
|
opt_res = perfect_forecast_optim(
|
@@ -0,0 +1,61 @@
|
|
1
|
+
config_categorie,legacy_parameter_name,parameter,list_name
|
2
|
+
retrieve_hass_conf,freq,optimization_time_step
|
3
|
+
retrieve_hass_conf,days_to_retrieve,historic_days_to_retrieve
|
4
|
+
retrieve_hass_conf,var_PV,sensor_power_photovoltaics
|
5
|
+
retrieve_hass_conf,var_load,sensor_power_load_no_var_loads
|
6
|
+
retrieve_hass_conf,load_negative,load_negative
|
7
|
+
retrieve_hass_conf,set_zero_min,set_zero_min
|
8
|
+
retrieve_hass_conf,var_replace_zero,sensor_replace_zero,list_sensor_replace_zero
|
9
|
+
retrieve_hass_conf,var_interp,sensor_linear_interp,list_sensor_linear_interp
|
10
|
+
retrieve_hass_conf,method_ts_round,method_ts_round
|
11
|
+
retrieve_hass_conf,continual_publish,continual_publish
|
12
|
+
params_secrets,time_zone,time_zone
|
13
|
+
params_secrets,lat,Latitude
|
14
|
+
params_secrets,lon,Longitude
|
15
|
+
params_secrets,alt,Altitude
|
16
|
+
optim_conf,set_use_battery,set_use_battery
|
17
|
+
optim_conf,num_def_loads,number_of_deferrable_loads
|
18
|
+
optim_conf,P_deferrable_nom,nominal_power_of_deferrable_loads,list_nominal_power_of_deferrable_loads
|
19
|
+
optim_conf,def_total_hours,operating_hours_of_each_deferrable_load,list_operating_hours_of_each_deferrable_load
|
20
|
+
optim_conf,treat_def_as_semi_cont,treat_deferrable_load_as_semi_cont,list_treat_deferrable_load_as_semi_cont
|
21
|
+
optim_conf,set_def_constant,set_deferrable_load_single_constant,list_set_deferrable_load_single_constant
|
22
|
+
optim_conf,def_start_penalty,set_deferrable_startup_penalty,list_set_deferrable_startup_penalty
|
23
|
+
optim_conf,delta_forecast,delta_forecast_daily
|
24
|
+
optim_conf,load_forecast_method,load_forecast_method
|
25
|
+
optim_conf,load_cost_forecast_method,load_cost_forecast_method
|
26
|
+
optim_conf,load_cost_hp,load_peak_hours_cost
|
27
|
+
optim_conf,load_cost_hc,load_offpeak_hours_cost
|
28
|
+
optim_conf,prod_price_forecast_method,production_price_forecast_method
|
29
|
+
optim_conf,prod_sell_price,photovoltaic_production_sell_price
|
30
|
+
optim_conf,set_total_pv_sell,set_total_pv_sell
|
31
|
+
optim_conf,lp_solver,lp_solver
|
32
|
+
optim_conf,lp_solver_path,lp_solver_path
|
33
|
+
optim_conf,set_nocharge_from_grid,set_nocharge_from_grid
|
34
|
+
optim_conf,set_nodischarge_to_grid,set_nodischarge_to_grid
|
35
|
+
optim_conf,set_battery_dynamic,set_battery_dynamic
|
36
|
+
optim_conf,battery_dynamic_max,battery_dynamic_max
|
37
|
+
optim_conf,battery_dynamic_min,battery_dynamic_min
|
38
|
+
optim_conf,weight_battery_discharge,weight_battery_discharge
|
39
|
+
optim_conf,weight_battery_charge,weight_battery_charge
|
40
|
+
optim_conf,weather_forecast_method,weather_forecast_method
|
41
|
+
optim_conf,def_start_timestep,start_timesteps_of_each_deferrable_load,list_start_timesteps_of_each_deferrable_load
|
42
|
+
optim_conf,def_end_timestep,end_timesteps_of_each_deferrable_load,list_end_timesteps_of_each_deferrable_load
|
43
|
+
optim_conf,list_hp_periods,load_peak_hour_periods
|
44
|
+
plant_conf,P_from_grid_max,maximum_power_from_grid
|
45
|
+
plant_conf,P_to_grid_max,maximum_power_to_grid
|
46
|
+
plant_conf,module_model,pv_module_model,list_pv_module_model
|
47
|
+
plant_conf,inverter_model,pv_inverter_model,list_pv_inverter_model
|
48
|
+
plant_conf,surface_tilt,surface_tilt,list_surface_tilt
|
49
|
+
plant_conf,surface_azimuth,surface_azimuth,list_surface_azimuth
|
50
|
+
plant_conf,modules_per_string,modules_per_string,list_modules_per_string
|
51
|
+
plant_conf,strings_per_inverter,strings_per_inverter,list_strings_per_inverter
|
52
|
+
plant_conf,inverter_is_hybrid,inverter_is_hybrid
|
53
|
+
plant_conf,compute_curtailment,compute_curtailment
|
54
|
+
plant_conf,Pd_max,battery_discharge_power_max
|
55
|
+
plant_conf,Pc_max,battery_charge_power_max
|
56
|
+
plant_conf,eta_disch,battery_discharge_efficiency
|
57
|
+
plant_conf,eta_ch,battery_charge_efficiency
|
58
|
+
plant_conf,Enom,battery_nominal_energy_capacity
|
59
|
+
plant_conf,SOCmin,battery_minimum_state_of_charge
|
60
|
+
plant_conf,SOCmax,battery_maximum_state_of_charge
|
61
|
+
plant_conf,SOCtarget,battery_target_state_of_charge
|