emhass 0.11.4__tar.gz → 0.12.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {emhass-0.11.4 → emhass-0.12.0}/CHANGELOG.md +7 -0
- {emhass-0.11.4 → emhass-0.12.0}/PKG-INFO +11 -6
- {emhass-0.11.4 → emhass-0.12.0}/README.md +10 -5
- {emhass-0.11.4 → emhass-0.12.0}/data/test_df_final.pkl +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/pyproject.toml +1 -1
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/command_line.py +48 -23
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/data/associations.csv +1 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/data/config_defaults.json +1 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/forecast.py +98 -4
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/optimization.py +1 -1
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/static/configuration_list.html +4 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/static/configuration_script.js +15 -2
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/static/data/param_definitions.json +8 -1
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/templates/configuration.html +1 -1
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/templates/index.html +1 -1
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/utils.py +72 -4
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass.egg-info/PKG-INFO +11 -6
- {emhass-0.11.4 → emhass-0.12.0}/tests/test_command_line_utils.py +1 -0
- {emhass-0.11.4 → emhass-0.12.0}/tests/test_forecast.py +17 -4
- {emhass-0.11.4 → emhass-0.12.0}/tests/test_machine_learning_forecaster.py +1 -1
- {emhass-0.11.4 → emhass-0.12.0}/tests/test_optimization.py +1 -1
- {emhass-0.11.4 → emhass-0.12.0}/tests/test_retrieve_hass.py +21 -2
- {emhass-0.11.4 → emhass-0.12.0}/tests/test_utils.py +31 -0
- {emhass-0.11.4 → emhass-0.12.0}/CODE_OF_CONDUCT.md +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/CONTRIBUTING.md +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/LICENSE +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/MANIFEST.in +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/data_load_cost_forecast.csv +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/data_load_forecast.csv +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/data_prod_price_forecast.csv +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/data_train_load_clustering.pkl +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/data_train_load_forecast.pkl +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/data_weather_forecast.csv +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/heating_prediction.csv +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/opt_res_latest.csv +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/opt_res_perfect_optim_cost.csv +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/opt_res_perfect_optim_profit.csv +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/opt_res_perfect_optim_self-consumption.csv +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/test_response_get_data_get_method.pbz2 +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/test_response_scrapper_get_method.pbz2 +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/test_response_solarforecast_get_method.pbz2 +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/data/test_response_solcast_get_method.pbz2 +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/setup.cfg +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/__init__.py +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/data/cec_inverters.pbz2 +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/data/cec_modules.pbz2 +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/machine_learning_forecaster.py +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/machine_learning_regressor.py +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/retrieve_hass.py +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/static/advanced.html +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/static/basic.html +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/static/img/emhass_icon.png +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/static/img/emhass_logo_short.svg +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/static/img/feather-sprite.svg +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/static/script.js +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/static/style.css +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/templates/template.html +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass/web_server.py +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass.egg-info/SOURCES.txt +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass.egg-info/dependency_links.txt +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass.egg-info/entry_points.txt +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass.egg-info/requires.txt +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/src/emhass.egg-info/top_level.txt +0 -0
- {emhass-0.11.4 → emhass-0.12.0}/tests/test_machine_learning_regressor.py +0 -0
@@ -1,5 +1,12 @@
|
|
1
1
|
# Changelog
|
2
2
|
|
3
|
+
## 0.12.0 - 2024-12-29
|
4
|
+
### BREAKING CHANGE
|
5
|
+
- The solar PV system is now optional. If you have a PV system then use the switch on the webui or set the `set_use_pv` parameter to `True`.
|
6
|
+
### Improvement
|
7
|
+
- A new load power forecast method based on statistic and typical household load power consumption. This and the optional PV, will make it easier for new users to on-board EMHASS because this new default behavior does not require any data retrieval from Home Assistant or any other external source.
|
8
|
+
- Updated documentation.
|
9
|
+
|
3
10
|
## 0.11.4 - 2024-12-24
|
4
11
|
### Fix
|
5
12
|
- Fix bug when treating runtime params, fix optimization_time_step timedelta
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: emhass
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.12.0
|
4
4
|
Summary: An Energy Management System for Home Assistant
|
5
5
|
Author-email: David HERNANDEZ <davidusb@gmail.com>
|
6
6
|
License: MIT
|
@@ -132,11 +132,16 @@ Installation instructions and example Home Assistant automation configurations a
|
|
132
132
|
You must follow these steps to make EMHASS work properly:
|
133
133
|
|
134
134
|
1) Install and run EMHASS.
|
135
|
-
- There are multiple methods of installing and Running EMHASS. See [Installation Method](
|
135
|
+
- There are multiple methods of installing and Running EMHASS. See [Installation Method](#Installation-Methods) below to pick a method that best suits your use case.
|
136
136
|
|
137
|
-
2) Define all the parameters in the configuration file *(`config.json`)* or configuration page *(`YOURIP:5000/configuration`)*.
|
137
|
+
2) Define all the parameters in the configuration file *(`config.json`)* or configuration page *(`YOURIP:5000/configuration`)*.
|
138
|
+
```{note}
|
139
|
+
|
140
|
+
New in EMHASS v0.12.0: the default configuration does not need to retrieve any data from Home Assistant! After installing and running the add-on, EMHASS should start and it will be ready to launch an optimization.
|
141
|
+
```
|
138
142
|
- See the description for each parameter in the [configuration](https://emhass.readthedocs.io/en/latest/config.html) docs.
|
139
|
-
|
143
|
+
- EMHASS has a default configuration with 2 deferrable loads, no solar PV, no batteries and a basic load power forecasting method.
|
144
|
+
- If you want to consider solar PV and more advanced load power forecast methods, you will need to define the main data entering EMHASS. This will be the Home Assistant sensor/variable `sensor.power_load_no_var_loads`, for the load power of your household excluding the power of the deferrable loads that you want to optimize, and the sensor/variable `sensor.power_photovoltaics` for the name of your Home Assistant variable containing the PV produced power (if solar PV is activated).
|
140
145
|
- If you have a PV installation then this dedicated web app can be useful for finding your inverter and solar panel models: [https://emhass-pvlib-database.streamlit.app/](https://emhass-pvlib-database.streamlit.app/)
|
141
146
|
|
142
147
|
4) Launch the optimization and check the results.
|
@@ -144,10 +149,10 @@ You must follow these steps to make EMHASS work properly:
|
|
144
149
|
- Or with a `curl` command like this: `curl -i -H 'Content-Type:application/json' -X POST -d '{}' http://localhost:5000/action/dayahead-optim`.
|
145
150
|
|
146
151
|
5) If you’re satisfied with the optimization results then you can set the optimization and data publish task commands in an automation.
|
147
|
-
- You can read more about this in the [usage](
|
152
|
+
- You can read more about this in the [usage](#usage) section below.
|
148
153
|
|
149
154
|
6) The final step is to link the deferrable loads variables to real switches on your installation.
|
150
|
-
- An example code for this using automations and the shell command integration is presented below in the [usage](
|
155
|
+
- An example code for this using automations and the shell command integration is presented below in the [usage](#usage) section.
|
151
156
|
|
152
157
|
A more detailed workflow is given below:
|
153
158
|
|
@@ -88,11 +88,16 @@ Installation instructions and example Home Assistant automation configurations a
|
|
88
88
|
You must follow these steps to make EMHASS work properly:
|
89
89
|
|
90
90
|
1) Install and run EMHASS.
|
91
|
-
- There are multiple methods of installing and Running EMHASS. See [Installation Method](
|
91
|
+
- There are multiple methods of installing and Running EMHASS. See [Installation Method](#Installation-Methods) below to pick a method that best suits your use case.
|
92
92
|
|
93
|
-
2) Define all the parameters in the configuration file *(`config.json`)* or configuration page *(`YOURIP:5000/configuration`)*.
|
93
|
+
2) Define all the parameters in the configuration file *(`config.json`)* or configuration page *(`YOURIP:5000/configuration`)*.
|
94
|
+
```{note}
|
95
|
+
|
96
|
+
New in EMHASS v0.12.0: the default configuration does not need to retrieve any data from Home Assistant! After installing and running the add-on, EMHASS should start and it will be ready to launch an optimization.
|
97
|
+
```
|
94
98
|
- See the description for each parameter in the [configuration](https://emhass.readthedocs.io/en/latest/config.html) docs.
|
95
|
-
|
99
|
+
- EMHASS has a default configuration with 2 deferrable loads, no solar PV, no batteries and a basic load power forecasting method.
|
100
|
+
- If you want to consider solar PV and more advanced load power forecast methods, you will need to define the main data entering EMHASS. This will be the Home Assistant sensor/variable `sensor.power_load_no_var_loads`, for the load power of your household excluding the power of the deferrable loads that you want to optimize, and the sensor/variable `sensor.power_photovoltaics` for the name of your Home Assistant variable containing the PV produced power (if solar PV is activated).
|
96
101
|
- If you have a PV installation then this dedicated web app can be useful for finding your inverter and solar panel models: [https://emhass-pvlib-database.streamlit.app/](https://emhass-pvlib-database.streamlit.app/)
|
97
102
|
|
98
103
|
4) Launch the optimization and check the results.
|
@@ -100,10 +105,10 @@ You must follow these steps to make EMHASS work properly:
|
|
100
105
|
- Or with a `curl` command like this: `curl -i -H 'Content-Type:application/json' -X POST -d '{}' http://localhost:5000/action/dayahead-optim`.
|
101
106
|
|
102
107
|
5) If you’re satisfied with the optimization results then you can set the optimization and data publish task commands in an automation.
|
103
|
-
- You can read more about this in the [usage](
|
108
|
+
- You can read more about this in the [usage](#usage) section below.
|
104
109
|
|
105
110
|
6) The final step is to link the deferrable loads variables to real switches on your installation.
|
106
|
-
- An example code for this using automations and the shell command integration is presented below in the [usage](
|
111
|
+
- An example code for this using automations and the shell command integration is presented below in the [usage](#usage) section.
|
107
112
|
|
108
113
|
A more detailed workflow is given below:
|
109
114
|
|
Binary file
|
@@ -69,7 +69,7 @@ def set_input_data_dict(
|
|
69
69
|
retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params, logger)
|
70
70
|
if type(retrieve_hass_conf) is bool:
|
71
71
|
return False
|
72
|
-
|
72
|
+
|
73
73
|
# Treat runtimeparams
|
74
74
|
params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams(
|
75
75
|
runtimeparams,
|
@@ -81,7 +81,8 @@ def set_input_data_dict(
|
|
81
81
|
logger,
|
82
82
|
emhass_conf,
|
83
83
|
)
|
84
|
-
|
84
|
+
|
85
|
+
# Define the data retrieve object
|
85
86
|
rh = RetrieveHass(
|
86
87
|
retrieve_hass_conf["hass_url"],
|
87
88
|
retrieve_hass_conf["long_lived_token"],
|
@@ -92,6 +93,21 @@ def set_input_data_dict(
|
|
92
93
|
logger,
|
93
94
|
get_data_from_file=get_data_from_file,
|
94
95
|
)
|
96
|
+
|
97
|
+
# Retrieve basic configuration data from hass
|
98
|
+
if get_data_from_file:
|
99
|
+
with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp:
|
100
|
+
_, _, _, rh.ha_config = pickle.load(inp)
|
101
|
+
else:
|
102
|
+
rh.get_ha_config()
|
103
|
+
|
104
|
+
# Update the params dict using data from the HA configuration
|
105
|
+
params = utils.update_params_with_ha_config(
|
106
|
+
params,
|
107
|
+
rh.ha_config,
|
108
|
+
)
|
109
|
+
|
110
|
+
# Define the forecast and optimization objects
|
95
111
|
fcst = Forecast(
|
96
112
|
retrieve_hass_conf,
|
97
113
|
optim_conf,
|
@@ -111,12 +127,13 @@ def set_input_data_dict(
|
|
111
127
|
emhass_conf,
|
112
128
|
logger,
|
113
129
|
)
|
130
|
+
|
114
131
|
# Perform setup based on type of action
|
115
132
|
if set_type == "perfect-optim":
|
116
133
|
# Retrieve data from hass
|
117
134
|
if get_data_from_file:
|
118
135
|
with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp:
|
119
|
-
rh.df_final, days_list, var_list = pickle.load(inp)
|
136
|
+
rh.df_final, days_list, var_list, rh.ha_config = pickle.load(inp)
|
120
137
|
retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(var_list[0])
|
121
138
|
retrieve_hass_conf["sensor_power_photovoltaics"] = str(var_list[1])
|
122
139
|
retrieve_hass_conf["sensor_linear_interp"] = [
|
@@ -154,12 +171,18 @@ def set_input_data_dict(
|
|
154
171
|
P_PV_forecast, P_load_forecast, df_input_data_dayahead = None, None, None
|
155
172
|
elif set_type == "dayahead-optim":
|
156
173
|
# Get PV and load forecasts
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
174
|
+
if (
|
175
|
+
optim_conf["set_use_pv"]
|
176
|
+
or optim_conf.get("weather_forecast_method", None) == "list"
|
177
|
+
):
|
178
|
+
df_weather = fcst.get_weather_forecast(
|
179
|
+
method=optim_conf["weather_forecast_method"]
|
180
|
+
)
|
181
|
+
if isinstance(df_weather, bool) and not df_weather:
|
182
|
+
return False
|
183
|
+
P_PV_forecast = fcst.get_power_from_weather(df_weather)
|
184
|
+
else:
|
185
|
+
P_PV_forecast = pd.Series(0, index=fcst.forecast_dates)
|
163
186
|
P_load_forecast = fcst.get_load_forecast(
|
164
187
|
method=optim_conf["load_forecast_method"]
|
165
188
|
)
|
@@ -208,7 +231,7 @@ def set_input_data_dict(
|
|
208
231
|
# Retrieve data from hass
|
209
232
|
if get_data_from_file:
|
210
233
|
with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp:
|
211
|
-
rh.df_final, days_list, var_list = pickle.load(inp)
|
234
|
+
rh.df_final, days_list, var_list, rh.ha_config = pickle.load(inp)
|
212
235
|
retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(var_list[0])
|
213
236
|
retrieve_hass_conf["sensor_power_photovoltaics"] = str(var_list[1])
|
214
237
|
retrieve_hass_conf["sensor_linear_interp"] = [
|
@@ -241,14 +264,20 @@ def set_input_data_dict(
|
|
241
264
|
return False
|
242
265
|
df_input_data = rh.df_final.copy()
|
243
266
|
# Get PV and load forecasts
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
267
|
+
if (
|
268
|
+
optim_conf["set_use_pv"]
|
269
|
+
or optim_conf.get("weather_forecast_method", None) == "list"
|
270
|
+
):
|
271
|
+
df_weather = fcst.get_weather_forecast(
|
272
|
+
method=optim_conf["weather_forecast_method"]
|
273
|
+
)
|
274
|
+
if isinstance(df_weather, bool) and not df_weather:
|
275
|
+
return False
|
276
|
+
P_PV_forecast = fcst.get_power_from_weather(
|
277
|
+
df_weather, set_mix_forecast=True, df_now=df_input_data
|
278
|
+
)
|
279
|
+
else:
|
280
|
+
P_PV_forecast = pd.Series(0, index=fcst.forecast_dates)
|
252
281
|
P_load_forecast = fcst.get_load_forecast(
|
253
282
|
method=optim_conf["load_forecast_method"],
|
254
283
|
set_mix_forecast=True,
|
@@ -403,10 +432,8 @@ def weather_forecast_cache(
|
|
403
432
|
:rtype: bool
|
404
433
|
|
405
434
|
"""
|
406
|
-
|
407
435
|
# Parsing yaml
|
408
436
|
retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params, logger)
|
409
|
-
|
410
437
|
# Treat runtimeparams
|
411
438
|
params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams(
|
412
439
|
runtimeparams,
|
@@ -417,8 +444,8 @@ def weather_forecast_cache(
|
|
417
444
|
"forecast",
|
418
445
|
logger,
|
419
446
|
emhass_conf,
|
447
|
+
{},
|
420
448
|
)
|
421
|
-
|
422
449
|
# Make sure weather_forecast_cache is true
|
423
450
|
if (params != None) and (params != "null"):
|
424
451
|
params = json.loads(params)
|
@@ -426,12 +453,10 @@ def weather_forecast_cache(
|
|
426
453
|
params = {}
|
427
454
|
params["passed_data"]["weather_forecast_cache"] = True
|
428
455
|
params = json.dumps(params)
|
429
|
-
|
430
456
|
# Create Forecast object
|
431
457
|
fcst = Forecast(
|
432
458
|
retrieve_hass_conf, optim_conf, plant_conf, params, emhass_conf, logger
|
433
459
|
)
|
434
|
-
|
435
460
|
result = fcst.get_weather_forecast(optim_conf["weather_forecast_method"])
|
436
461
|
if isinstance(result, bool) and not result:
|
437
462
|
return False
|
@@ -15,6 +15,7 @@ params_secrets,lon,Longitude
|
|
15
15
|
params_secrets,alt,Altitude
|
16
16
|
optim_conf,costfun,costfun
|
17
17
|
optim_conf,logging_level,logging_level
|
18
|
+
optim_conf,set_use_pv,set_use_pv
|
18
19
|
optim_conf,set_use_battery,set_use_battery
|
19
20
|
optim_conf,num_def_loads,number_of_deferrable_loads
|
20
21
|
optim_conf,P_deferrable_nom,nominal_power_of_deferrable_loads,list_nominal_power_of_deferrable_loads
|
@@ -886,10 +886,73 @@ class Forecast(object):
|
|
886
886
|
forecast_out = pd.concat([forecast_out, forecast_tp], axis=0)
|
887
887
|
return forecast_out
|
888
888
|
|
889
|
+
@staticmethod
|
890
|
+
def resample_data(data, freq, current_freq):
|
891
|
+
r"""
|
892
|
+
Resample a DataFrame with a custom frequency.
|
893
|
+
|
894
|
+
:param data: Original time series data with a DateTimeIndex.
|
895
|
+
:type data: pd.DataFrame
|
896
|
+
:param freq: Desired frequency for resampling (e.g., pd.Timedelta("10min")).
|
897
|
+
:type freq: pd.Timedelta
|
898
|
+
:return: Resampled data at the specified frequency.
|
899
|
+
:rtype: pd.DataFrame
|
900
|
+
"""
|
901
|
+
if freq > current_freq:
|
902
|
+
# Downsampling
|
903
|
+
# Use 'mean' to aggregate or choose other options ('sum', 'max', etc.)
|
904
|
+
resampled_data = data.resample(freq).mean()
|
905
|
+
elif freq < current_freq:
|
906
|
+
# Upsampling
|
907
|
+
# Use 'asfreq' to create empty slots, then interpolate
|
908
|
+
resampled_data = data.resample(freq).asfreq()
|
909
|
+
resampled_data = resampled_data.interpolate(method='time')
|
910
|
+
else:
|
911
|
+
# No resampling needed
|
912
|
+
resampled_data = data.copy()
|
913
|
+
return resampled_data
|
914
|
+
|
915
|
+
@staticmethod
|
916
|
+
def get_typical_load_forecast(data, forecast_date):
|
917
|
+
r"""
|
918
|
+
Forecast the load profile for the next day based on historic data.
|
919
|
+
|
920
|
+
:param data: A DataFrame with a DateTimeIndex containing the historic load data.
|
921
|
+
Must include a 'load' column.
|
922
|
+
:type data: pd.DataFrame
|
923
|
+
:param forecast_date: The date for which the forecast will be generated.
|
924
|
+
:type forecast_date: pd.Timestamp
|
925
|
+
:return: A Series with the forecasted load profile for the next day and a list of days used
|
926
|
+
to calculate the forecast.
|
927
|
+
:rtype: tuple (pd.Series, list)
|
928
|
+
"""
|
929
|
+
# Ensure the 'load' column exists
|
930
|
+
if 'load' not in data.columns:
|
931
|
+
raise ValueError("Data must have a 'load' column.")
|
932
|
+
# Filter historic data for the same month and day of the week
|
933
|
+
month = forecast_date.month
|
934
|
+
day_of_week = forecast_date.dayofweek
|
935
|
+
historic_data = data[(data.index.month == month) & (data.index.dayofweek == day_of_week)]
|
936
|
+
used_days = np.unique(historic_data.index.date)
|
937
|
+
# Align all historic data to the forecast day
|
938
|
+
aligned_data = []
|
939
|
+
for day in used_days:
|
940
|
+
daily_data = data[data.index.date == pd.Timestamp(day).date()]
|
941
|
+
aligned_daily_data = daily_data.copy()
|
942
|
+
aligned_daily_data.index = aligned_daily_data.index.map(
|
943
|
+
lambda x: x.replace(year=forecast_date.year, month=forecast_date.month, day=forecast_date.day)
|
944
|
+
)
|
945
|
+
aligned_data.append(aligned_daily_data)
|
946
|
+
# Combine all aligned historic data into a single DataFrame
|
947
|
+
combined_data = pd.concat(aligned_data)
|
948
|
+
# Compute the mean load for each timestamp
|
949
|
+
forecast = combined_data.groupby(combined_data.index).mean()
|
950
|
+
return forecast, used_days
|
951
|
+
|
889
952
|
def get_load_forecast(
|
890
953
|
self,
|
891
954
|
days_min_load_forecast: Optional[int] = 3,
|
892
|
-
method: Optional[str] = "
|
955
|
+
method: Optional[str] = "typical",
|
893
956
|
csv_path: Optional[str] = "data_load_forecast.csv",
|
894
957
|
set_mix_forecast: Optional[bool] = False,
|
895
958
|
df_now: Optional[pd.DataFrame] = pd.DataFrame(),
|
@@ -904,10 +967,11 @@ class Forecast(object):
|
|
904
967
|
will be used to generate a naive forecast, defaults to 3
|
905
968
|
:type days_min_load_forecast: int, optional
|
906
969
|
:param method: The method to be used to generate load forecast, the options \
|
970
|
+
are 'typical' for a typical household load consumption curve, \
|
907
971
|
are 'naive' for a persistance model, 'mlforecaster' for using a custom \
|
908
972
|
previously fitted machine learning model, 'csv' to read the forecast from \
|
909
973
|
a CSV file and 'list' to use data directly passed at runtime as a list of \
|
910
|
-
values. Defaults to '
|
974
|
+
values. Defaults to 'typical'.
|
911
975
|
:type method: str, optional
|
912
976
|
:param csv_path: The path to the CSV file used when method = 'csv', \
|
913
977
|
defaults to "/data/data_load_forecast.csv"
|
@@ -956,7 +1020,7 @@ class Forecast(object):
|
|
956
1020
|
if self.get_data_from_file:
|
957
1021
|
filename_path = self.emhass_conf["data_path"] / "test_df_final.pkl"
|
958
1022
|
with open(filename_path, "rb") as inp:
|
959
|
-
rh.df_final, days_list, var_list = pickle.load(inp)
|
1023
|
+
rh.df_final, days_list, var_list, rh.ha_config = pickle.load(inp)
|
960
1024
|
self.var_load = var_list[0]
|
961
1025
|
self.retrieve_hass_conf["sensor_power_load_no_var_loads"] = (
|
962
1026
|
self.var_load
|
@@ -977,7 +1041,37 @@ class Forecast(object):
|
|
977
1041
|
):
|
978
1042
|
return False
|
979
1043
|
df = rh.df_final.copy()[[self.var_load_new]]
|
980
|
-
if method == "
|
1044
|
+
if method == "typical": # using typical statistical data from a household power consumption
|
1045
|
+
# Loading data from history file
|
1046
|
+
model_type = "load_clustering"
|
1047
|
+
data_path = self.emhass_conf["data_path"] / str("data_train_" + model_type + ".pkl")
|
1048
|
+
with open(data_path, "rb") as fid:
|
1049
|
+
data, _ = pickle.load(fid)
|
1050
|
+
# Resample the data if needed
|
1051
|
+
current_freq = pd.Timedelta('30min')
|
1052
|
+
if self.freq != current_freq:
|
1053
|
+
data = Forecast.resample_data(data, self.freq, current_freq)
|
1054
|
+
# Generate forecast
|
1055
|
+
data_list = []
|
1056
|
+
dates_list = np.unique(self.forecast_dates.date).tolist()
|
1057
|
+
forecast = pd.DataFrame()
|
1058
|
+
for date in dates_list:
|
1059
|
+
forecast_date = pd.Timestamp(date)
|
1060
|
+
data.columns = ['load']
|
1061
|
+
forecast_tmp, used_days = Forecast.get_typical_load_forecast(data, forecast_date)
|
1062
|
+
self.logger.debug(f"Using {len(used_days)} days of data to generate the forecast.")
|
1063
|
+
# Normalize the forecast
|
1064
|
+
forecast_tmp = forecast_tmp*self.plant_conf['maximum_power_from_grid']/9000
|
1065
|
+
data_list.extend(forecast_tmp.values.ravel().tolist())
|
1066
|
+
if len(forecast) == 0:
|
1067
|
+
forecast = forecast_tmp
|
1068
|
+
else:
|
1069
|
+
forecast = pd.concat([forecast, forecast_tmp], axis=0)
|
1070
|
+
forecast.index = forecast.index.tz_convert(self.time_zone)
|
1071
|
+
forecast_out = forecast.loc[forecast.index.intersection(self.forecast_dates)]
|
1072
|
+
forecast_out.index.name = 'ts'
|
1073
|
+
forecast_out = forecast_out.rename(columns={'load': 'yhat'})
|
1074
|
+
elif method == "naive": # using a naive approach
|
981
1075
|
mask_forecast_out = (
|
982
1076
|
df.index > days_list[-1] - self.optim_conf["delta_forecast_daily"]
|
983
1077
|
)
|
@@ -29,6 +29,10 @@
|
|
29
29
|
<div id="Solar System (PV)" class="section-card">
|
30
30
|
<div class="section-card-header">
|
31
31
|
<h4>Solar System (PV)</h4>
|
32
|
+
<label class="switch"> <!-- switch connected to set_use_pv -->
|
33
|
+
<input id="set_use_pv" type="checkbox">
|
34
|
+
<span class="slider"></span>
|
35
|
+
</label>
|
32
36
|
</div>
|
33
37
|
<div class="section-body"> </div> <!-- parameters will get generated here -->
|
34
38
|
</div>
|
@@ -121,7 +121,7 @@ function loadConfigurationListView(param_definitions, config, list_html) {
|
|
121
121
|
}
|
122
122
|
|
123
123
|
//list parameters used in the section headers
|
124
|
-
header_input_list = ["set_use_battery", "number_of_deferrable_loads"];
|
124
|
+
header_input_list = ["set_use_battery", "set_use_pv", "number_of_deferrable_loads"];
|
125
125
|
|
126
126
|
//get the main container and append list template html
|
127
127
|
document.getElementById("configuration-container").innerHTML = list_html;
|
@@ -265,7 +265,7 @@ function buildParamContainers(
|
|
265
265
|
});
|
266
266
|
|
267
267
|
//check initial checkbox state, check "value" of input and match to "checked" value
|
268
|
-
let checkbox =
|
268
|
+
let checkbox = SectionContainer.querySelectorAll("input[type='checkbox']");
|
269
269
|
checkbox.forEach(function (answer) {
|
270
270
|
let value = answer.value === "true";
|
271
271
|
answer.checked = value;
|
@@ -559,6 +559,19 @@ function headerElement(element, param_definitions, config) {
|
|
559
559
|
}
|
560
560
|
break;
|
561
561
|
|
562
|
+
//if set_use_pv, add or remove PV section (inc. related params)
|
563
|
+
case "set_use_pv":
|
564
|
+
if (element.checked) {
|
565
|
+
param_container.innerHTML = "";
|
566
|
+
buildParamContainers("Solar System (PV)", param_definitions["Solar System (PV)"], config, [
|
567
|
+
"set_use_pv",
|
568
|
+
]);
|
569
|
+
element.checked = true;
|
570
|
+
} else {
|
571
|
+
param_container.innerHTML = "";
|
572
|
+
}
|
573
|
+
break;
|
574
|
+
|
562
575
|
//if number_of_deferrable_loads, the number of inputs in the "Deferrable Loads" section should add up to number_of_deferrable_loads value in header
|
563
576
|
case "number_of_deferrable_loads":
|
564
577
|
//get a list of param in section
|
@@ -101,11 +101,12 @@
|
|
101
101
|
"Description": "The load forecast method that will be used. The options are ‘csv’ to load a CSV file or ‘naive’ for a simple 1-day persistence model.",
|
102
102
|
"input": "select",
|
103
103
|
"select_options": [
|
104
|
+
"typical",
|
104
105
|
"naive",
|
105
106
|
"mlforecaster",
|
106
107
|
"csv"
|
107
108
|
],
|
108
|
-
"default_value": "
|
109
|
+
"default_value": "typical"
|
109
110
|
},
|
110
111
|
"set_total_pv_sell": {
|
111
112
|
"friendly_name": "PV straight to grid",
|
@@ -229,6 +230,12 @@
|
|
229
230
|
}
|
230
231
|
},
|
231
232
|
"Solar System (PV)": {
|
233
|
+
"set_use_pv": {
|
234
|
+
"friendly_name": "Enable PV system",
|
235
|
+
"Description": "Set to True if we should consider an solar PV system. Defaults to False",
|
236
|
+
"input": "boolean",
|
237
|
+
"default_value": false
|
238
|
+
},
|
232
239
|
"pv_module_model": {
|
233
240
|
"friendly_name": "PV module model name",
|
234
241
|
"Description": "The PV module model. This parameter can be a list of items to enable the simulation of mixed orientation systems.",
|
@@ -66,7 +66,7 @@
|
|
66
66
|
</div>
|
67
67
|
</div>
|
68
68
|
<footer class="footer">
|
69
|
-
<p style="margin-top:10px; text-align:center;">© MIT License | Copyright (c) 2021-
|
69
|
+
<p style="margin-top:10px; text-align:center;">© MIT License | Copyright (c) 2021-2025 David
|
70
70
|
HERNANDEZ</p>
|
71
71
|
</footer>
|
72
72
|
</div>
|
@@ -69,7 +69,7 @@
|
|
69
69
|
<div>
|
70
70
|
|
71
71
|
<footer class="footer">
|
72
|
-
<p style="margin-top:10px; text-align:center;">© MIT License | Copyright (c) 2021-
|
72
|
+
<p style="margin-top:10px; text-align:center;">© MIT License | Copyright (c) 2021-2025 David
|
73
73
|
HERNANDEZ</p>
|
74
74
|
</footer>
|
75
75
|
</body>
|
@@ -138,6 +138,70 @@ def get_forecast_dates(
|
|
138
138
|
return forecast_dates
|
139
139
|
|
140
140
|
|
141
|
+
def update_params_with_ha_config(
|
142
|
+
params: str,
|
143
|
+
ha_config: dict,
|
144
|
+
) -> dict:
|
145
|
+
"""
|
146
|
+
Update the params with the Home Assistant configuration.
|
147
|
+
|
148
|
+
Parameters
|
149
|
+
----------
|
150
|
+
params : str
|
151
|
+
The serialized params.
|
152
|
+
ha_config : dict
|
153
|
+
The Home Assistant configuration.
|
154
|
+
|
155
|
+
Returns
|
156
|
+
-------
|
157
|
+
dict
|
158
|
+
The updated params.
|
159
|
+
"""
|
160
|
+
# Load serialized params
|
161
|
+
params = json.loads(params)
|
162
|
+
# Update params
|
163
|
+
currency_to_symbol = {
|
164
|
+
'EUR': '€',
|
165
|
+
'USD': '$',
|
166
|
+
'GBP': '£',
|
167
|
+
'YEN': '¥',
|
168
|
+
'JPY': '¥',
|
169
|
+
'AUD': 'A$',
|
170
|
+
'CAD': 'C$',
|
171
|
+
'CHF': 'CHF', # Swiss Franc has no special symbol
|
172
|
+
'CNY': '¥',
|
173
|
+
'INR': '₹',
|
174
|
+
# Add more as needed
|
175
|
+
}
|
176
|
+
if 'currency' in ha_config.keys():
|
177
|
+
ha_config['currency'] = currency_to_symbol.get(ha_config['currency'], 'Unknown')
|
178
|
+
else:
|
179
|
+
ha_config['currency'] = '€'
|
180
|
+
if 'unit_system' not in ha_config.keys():
|
181
|
+
ha_config['unit_system'] = {'temperature': '°C'}
|
182
|
+
|
183
|
+
for k in range(params["optim_conf"]["number_of_deferrable_loads"]):
|
184
|
+
params['passed_data']['custom_predicted_temperature_id'][k].update(
|
185
|
+
{"unit_of_measurement": ha_config['unit_system']['temperature']}
|
186
|
+
)
|
187
|
+
updated_passed_dict = {
|
188
|
+
"custom_cost_fun_id": {
|
189
|
+
"unit_of_measurement": ha_config['currency'],
|
190
|
+
},
|
191
|
+
"custom_unit_load_cost_id": {
|
192
|
+
"unit_of_measurement": f"{ha_config['currency']}/kWh",
|
193
|
+
},
|
194
|
+
"custom_unit_prod_price_id": {
|
195
|
+
"unit_of_measurement": f"{ha_config['currency']}/kWh",
|
196
|
+
},
|
197
|
+
}
|
198
|
+
for key, value in updated_passed_dict.items():
|
199
|
+
params["passed_data"][key]["unit_of_measurement"] = value["unit_of_measurement"]
|
200
|
+
# Serialize the final params
|
201
|
+
params = json.dumps(params, default=str)
|
202
|
+
return params
|
203
|
+
|
204
|
+
|
141
205
|
def treat_runtimeparams(
|
142
206
|
runtimeparams: str,
|
143
207
|
params: str,
|
@@ -183,6 +247,10 @@ def treat_runtimeparams(
|
|
183
247
|
params["optim_conf"].update(optim_conf)
|
184
248
|
params["plant_conf"].update(plant_conf)
|
185
249
|
|
250
|
+
# Check defaults on HA retrieved config
|
251
|
+
default_currency_unit = '€'
|
252
|
+
default_temperature_unit = '°C'
|
253
|
+
|
186
254
|
# Some default data needed
|
187
255
|
custom_deferrable_forecast_id = []
|
188
256
|
custom_predicted_temperature_id = []
|
@@ -197,7 +265,7 @@ def treat_runtimeparams(
|
|
197
265
|
custom_predicted_temperature_id.append(
|
198
266
|
{
|
199
267
|
"entity_id": "sensor.temp_predicted{}".format(k),
|
200
|
-
"unit_of_measurement":
|
268
|
+
"unit_of_measurement": default_temperature_unit,
|
201
269
|
"friendly_name": "Predicted temperature {}".format(k),
|
202
270
|
}
|
203
271
|
)
|
@@ -239,7 +307,7 @@ def treat_runtimeparams(
|
|
239
307
|
},
|
240
308
|
"custom_cost_fun_id": {
|
241
309
|
"entity_id": "sensor.total_cost_fun_value",
|
242
|
-
"unit_of_measurement":
|
310
|
+
"unit_of_measurement": default_currency_unit,
|
243
311
|
"friendly_name": "Total cost function value",
|
244
312
|
},
|
245
313
|
"custom_optim_status_id": {
|
@@ -249,12 +317,12 @@ def treat_runtimeparams(
|
|
249
317
|
},
|
250
318
|
"custom_unit_load_cost_id": {
|
251
319
|
"entity_id": "sensor.unit_load_cost",
|
252
|
-
"unit_of_measurement": "
|
320
|
+
"unit_of_measurement": f"{default_currency_unit}/kWh",
|
253
321
|
"friendly_name": "Unit Load Cost",
|
254
322
|
},
|
255
323
|
"custom_unit_prod_price_id": {
|
256
324
|
"entity_id": "sensor.unit_prod_price",
|
257
|
-
"unit_of_measurement": "
|
325
|
+
"unit_of_measurement": f"{default_currency_unit}/kWh",
|
258
326
|
"friendly_name": "Unit Prod Price",
|
259
327
|
},
|
260
328
|
"custom_deferrable_forecast_id": custom_deferrable_forecast_id,
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: emhass
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.12.0
|
4
4
|
Summary: An Energy Management System for Home Assistant
|
5
5
|
Author-email: David HERNANDEZ <davidusb@gmail.com>
|
6
6
|
License: MIT
|
@@ -132,11 +132,16 @@ Installation instructions and example Home Assistant automation configurations a
|
|
132
132
|
You must follow these steps to make EMHASS work properly:
|
133
133
|
|
134
134
|
1) Install and run EMHASS.
|
135
|
-
- There are multiple methods of installing and Running EMHASS. See [Installation Method](
|
135
|
+
- There are multiple methods of installing and Running EMHASS. See [Installation Method](#Installation-Methods) below to pick a method that best suits your use case.
|
136
136
|
|
137
|
-
2) Define all the parameters in the configuration file *(`config.json`)* or configuration page *(`YOURIP:5000/configuration`)*.
|
137
|
+
2) Define all the parameters in the configuration file *(`config.json`)* or configuration page *(`YOURIP:5000/configuration`)*.
|
138
|
+
```{note}
|
139
|
+
|
140
|
+
New in EMHASS v0.12.0: the default configuration does not need to retrieve any data from Home Assistant! After installing and running the add-on, EMHASS should start and it will be ready to launch an optimization.
|
141
|
+
```
|
138
142
|
- See the description for each parameter in the [configuration](https://emhass.readthedocs.io/en/latest/config.html) docs.
|
139
|
-
|
143
|
+
- EMHASS has a default configuration with 2 deferrable loads, no solar PV, no batteries and a basic load power forecasting method.
|
144
|
+
- If you want to consider solar PV and more advanced load power forecast methods, you will need to define the main data entering EMHASS. This will be the Home Assistant sensor/variable `sensor.power_load_no_var_loads`, for the load power of your household excluding the power of the deferrable loads that you want to optimize, and the sensor/variable `sensor.power_photovoltaics` for the name of your Home Assistant variable containing the PV produced power (if solar PV is activated).
|
140
145
|
- If you have a PV installation then this dedicated web app can be useful for finding your inverter and solar panel models: [https://emhass-pvlib-database.streamlit.app/](https://emhass-pvlib-database.streamlit.app/)
|
141
146
|
|
142
147
|
4) Launch the optimization and check the results.
|
@@ -144,10 +149,10 @@ You must follow these steps to make EMHASS work properly:
|
|
144
149
|
- Or with a `curl` command like this: `curl -i -H 'Content-Type:application/json' -X POST -d '{}' http://localhost:5000/action/dayahead-optim`.
|
145
150
|
|
146
151
|
5) If you’re satisfied with the optimization results then you can set the optimization and data publish task commands in an automation.
|
147
|
-
- You can read more about this in the [usage](
|
152
|
+
- You can read more about this in the [usage](#usage) section below.
|
148
153
|
|
149
154
|
6) The final step is to link the deferrable loads variables to real switches on your installation.
|
150
|
-
- An example code for this using automations and the shell command integration is presented below in the [usage](
|
155
|
+
- An example code for this using automations and the shell command integration is presented below in the [usage](#usage) section.
|
151
156
|
|
152
157
|
A more detailed workflow is given below:
|
153
158
|
|
@@ -149,6 +149,7 @@ class TestCommandLineUtils(unittest.TestCase):
|
|
149
149
|
"load_power_forecast": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
|
150
150
|
"load_cost_forecast": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
|
151
151
|
"prod_price_forecast": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
|
152
|
+
"prediction_horizon": 10,
|
152
153
|
}
|
153
154
|
runtimeparams_json = json.dumps(runtimeparams)
|
154
155
|
params = copy.deepcopy(json.loads(self.params_json))
|
@@ -76,7 +76,7 @@ class TestForecast(unittest.TestCase):
|
|
76
76
|
# Obtain sensor values from saved file
|
77
77
|
if self.get_data_from_file:
|
78
78
|
with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp:
|
79
|
-
self.rh.df_final, self.days_list, self.var_list = pickle.load(inp)
|
79
|
+
self.rh.df_final, self.days_list, self.var_list, self.rh.ha_config = pickle.load(inp)
|
80
80
|
self.retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(
|
81
81
|
self.var_list[0]
|
82
82
|
)
|
@@ -429,8 +429,8 @@ class TestForecast(unittest.TestCase):
|
|
429
429
|
)
|
430
430
|
# Obtain sensor values from saved file
|
431
431
|
if self.get_data_from_file:
|
432
|
-
with open(
|
433
|
-
rh.df_final, days_list, var_list = pickle.load(inp)
|
432
|
+
with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp:
|
433
|
+
rh.df_final, days_list, var_list, rh.ha_config = pickle.load(inp)
|
434
434
|
retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(self.var_list[0])
|
435
435
|
retrieve_hass_conf["sensor_power_photovoltaics"] = str(self.var_list[1])
|
436
436
|
retrieve_hass_conf["sensor_linear_interp"] = [
|
@@ -667,7 +667,7 @@ class TestForecast(unittest.TestCase):
|
|
667
667
|
# Obtain sensor values from saved file
|
668
668
|
if self.get_data_from_file:
|
669
669
|
with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp:
|
670
|
-
rh.df_final, days_list, var_list = pickle.load(inp)
|
670
|
+
rh.df_final, days_list, var_list, rh.ha_config = pickle.load(inp)
|
671
671
|
retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(self.var_list[0])
|
672
672
|
retrieve_hass_conf["sensor_power_photovoltaics"] = str(self.var_list[1])
|
673
673
|
retrieve_hass_conf["sensor_linear_interp"] = [
|
@@ -901,6 +901,19 @@ class TestForecast(unittest.TestCase):
|
|
901
901
|
self.assertTrue((P_load_forecast.index == self.fcst.forecast_dates).all())
|
902
902
|
self.assertEqual(len(self.P_PV_forecast), len(P_load_forecast))
|
903
903
|
|
904
|
+
# Test load forecast with typical statistics method
|
905
|
+
def test_get_load_forecast_typical(self):
|
906
|
+
P_load_forecast = self.fcst.get_load_forecast(method='typical')
|
907
|
+
self.assertIsInstance(P_load_forecast, pd.core.series.Series)
|
908
|
+
self.assertIsInstance(
|
909
|
+
P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex
|
910
|
+
)
|
911
|
+
self.assertIsInstance(
|
912
|
+
P_load_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype
|
913
|
+
)
|
914
|
+
self.assertEqual(P_load_forecast.index.tz, self.fcst.time_zone)
|
915
|
+
self.assertEqual(len(self.P_PV_forecast), len(P_load_forecast))
|
916
|
+
|
904
917
|
# Test load cost forecast dataframe output using saved csv referece file
|
905
918
|
def test_get_load_cost_forecast(self):
|
906
919
|
df_input_data = self.fcst.get_load_cost_forecast(self.df_input_data)
|
@@ -100,7 +100,7 @@ class TestMLForecaster(unittest.TestCase):
|
|
100
100
|
)
|
101
101
|
# Open and extract saved sensor data to test against
|
102
102
|
with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp:
|
103
|
-
self.rh.df_final, self.days_list, self.var_list = pickle.load(inp)
|
103
|
+
self.rh.df_final, self.days_list, self.var_list, self.rh.ha_config = pickle.load(inp)
|
104
104
|
|
105
105
|
def test_fit(self):
|
106
106
|
df_pred, df_pred_backtest = self.mlf.fit()
|
@@ -73,7 +73,7 @@ class TestOptimization(unittest.TestCase):
|
|
73
73
|
# Obtain sensor values from saved file
|
74
74
|
if get_data_from_file:
|
75
75
|
with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp:
|
76
|
-
self.rh.df_final, self.days_list, self.var_list = pickle.load(inp)
|
76
|
+
self.rh.df_final, self.days_list, self.var_list, self.rh.ha_config = pickle.load(inp)
|
77
77
|
self.retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(
|
78
78
|
self.var_list[0]
|
79
79
|
)
|
@@ -92,7 +92,7 @@ class TestRetrieveHass(unittest.TestCase):
|
|
92
92
|
# Obtain sensor values from saved file
|
93
93
|
if self.get_data_from_file:
|
94
94
|
with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp:
|
95
|
-
self.rh.df_final, self.days_list, self.var_list = pickle.load(inp)
|
95
|
+
self.rh.df_final, self.days_list, self.var_list, self.rh.ha_config = pickle.load(inp)
|
96
96
|
# Else obtain sensor values from HA
|
97
97
|
else:
|
98
98
|
self.days_list = get_days_list(
|
@@ -108,11 +108,30 @@ class TestRetrieveHass(unittest.TestCase):
|
|
108
108
|
minimal_response=False,
|
109
109
|
significant_changes_only=False,
|
110
110
|
)
|
111
|
+
# Mocking retrieve of ha_config using: self.rh.get_ha_config()
|
112
|
+
self.rh.ha_config = {
|
113
|
+
'country': 'FR',
|
114
|
+
'currency': 'EUR',
|
115
|
+
'elevation': 4807,
|
116
|
+
'latitude': 48.83,
|
117
|
+
'longitude': 6.86,
|
118
|
+
'time_zone': 'Europe/Paris',
|
119
|
+
'unit_system': {
|
120
|
+
'length': 'km',
|
121
|
+
'accumulated_precipitation': 'mm',
|
122
|
+
'area': 'm²',
|
123
|
+
'mass': 'g',
|
124
|
+
'pressure': 'Pa',
|
125
|
+
'temperature': '°C',
|
126
|
+
'volume': 'L',
|
127
|
+
'wind_speed': 'm/s'
|
128
|
+
}
|
129
|
+
}
|
111
130
|
# Check to save updated data to file
|
112
131
|
if save_data_to_file:
|
113
132
|
with open(emhass_conf["data_path"] / "test_df_final.pkl", "wb") as outp:
|
114
133
|
pickle.dump(
|
115
|
-
(self.rh.df_final, self.days_list, self.var_list),
|
134
|
+
(self.rh.df_final, self.days_list, self.var_list, self.rh.ha_config),
|
116
135
|
outp,
|
117
136
|
pickle.HIGHEST_PROTOCOL,
|
118
137
|
)
|
@@ -439,6 +439,37 @@ class TestCommandLineUtils(unittest.TestCase):
|
|
439
439
|
self.assertIsInstance(runtimeparams["load_cost_forecast"], str)
|
440
440
|
self.assertIsInstance(runtimeparams["prod_price_forecast"], str)
|
441
441
|
|
442
|
+
def test_update_params_with_ha_config(self):
|
443
|
+
# Test dayahead runtime params
|
444
|
+
retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(
|
445
|
+
self.params_json, logger
|
446
|
+
)
|
447
|
+
set_type = "dayahead-optim"
|
448
|
+
params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams(
|
449
|
+
self.runtimeparams_json,
|
450
|
+
self.params_json,
|
451
|
+
retrieve_hass_conf,
|
452
|
+
optim_conf,
|
453
|
+
plant_conf,
|
454
|
+
set_type,
|
455
|
+
logger,
|
456
|
+
emhass_conf,
|
457
|
+
)
|
458
|
+
ha_config = {
|
459
|
+
'currency': 'USD',
|
460
|
+
'unit_system': {'temperature': '°F'}
|
461
|
+
}
|
462
|
+
params_json = utils.update_params_with_ha_config(
|
463
|
+
params,
|
464
|
+
ha_config,
|
465
|
+
)
|
466
|
+
params = json.loads(params_json)
|
467
|
+
self.assertTrue(params["passed_data"]["custom_predicted_temperature_id"][0]["unit_of_measurement"] == "°F")
|
468
|
+
self.assertTrue(params["passed_data"]["custom_predicted_temperature_id"][1]["unit_of_measurement"] == "°F")
|
469
|
+
self.assertTrue(params["passed_data"]["custom_cost_fun_id"]["unit_of_measurement"] == '$')
|
470
|
+
self.assertTrue(params["passed_data"]["custom_unit_load_cost_id"]["unit_of_measurement"] == '$/kWh')
|
471
|
+
self.assertTrue(params["passed_data"]["custom_unit_prod_price_id"]["unit_of_measurement"] == '$/kWh')
|
472
|
+
|
442
473
|
def test_build_secrets(self):
|
443
474
|
# Test the build_secrets defaults from get_test_params()
|
444
475
|
params = TestCommandLineUtils.get_test_params()
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|