emhass 0.10.1__tar.gz → 0.10.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. {emhass-0.10.1 → emhass-0.10.3}/CHANGELOG.md +14 -0
  2. {emhass-0.10.1 → emhass-0.10.3}/PKG-INFO +17 -11
  3. {emhass-0.10.1 → emhass-0.10.3}/README.md +15 -9
  4. emhass-0.10.3/data/test_df_final.pkl +0 -0
  5. {emhass-0.10.1 → emhass-0.10.3}/setup.py +2 -2
  6. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/command_line.py +78 -14
  7. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/forecast.py +94 -37
  8. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/optimization.py +46 -50
  9. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/utils.py +55 -36
  10. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/web_server.py +14 -4
  11. {emhass-0.10.1 → emhass-0.10.3}/src/emhass.egg-info/PKG-INFO +17 -11
  12. {emhass-0.10.1 → emhass-0.10.3}/src/emhass.egg-info/requires.txt +1 -1
  13. {emhass-0.10.1 → emhass-0.10.3}/tests/test_forecast.py +8 -0
  14. {emhass-0.10.1 → emhass-0.10.3}/tests/test_retrieve_hass.py +1 -1
  15. emhass-0.10.1/data/test_df_final.pkl +0 -0
  16. {emhass-0.10.1 → emhass-0.10.3}/CODE_OF_CONDUCT.md +0 -0
  17. {emhass-0.10.1 → emhass-0.10.3}/CONTRIBUTING.md +0 -0
  18. {emhass-0.10.1 → emhass-0.10.3}/LICENSE +0 -0
  19. {emhass-0.10.1 → emhass-0.10.3}/MANIFEST.in +0 -0
  20. {emhass-0.10.1 → emhass-0.10.3}/data/data_load_cost_forecast.csv +0 -0
  21. {emhass-0.10.1 → emhass-0.10.3}/data/data_load_forecast.csv +0 -0
  22. {emhass-0.10.1 → emhass-0.10.3}/data/data_prod_price_forecast.csv +0 -0
  23. {emhass-0.10.1 → emhass-0.10.3}/data/data_train_load_clustering.pkl +0 -0
  24. {emhass-0.10.1 → emhass-0.10.3}/data/data_train_load_forecast.pkl +0 -0
  25. {emhass-0.10.1 → emhass-0.10.3}/data/data_weather_forecast.csv +0 -0
  26. {emhass-0.10.1 → emhass-0.10.3}/data/heating_prediction.csv +0 -0
  27. {emhass-0.10.1 → emhass-0.10.3}/data/opt_res_latest.csv +0 -0
  28. {emhass-0.10.1 → emhass-0.10.3}/data/opt_res_perfect_optim_cost.csv +0 -0
  29. {emhass-0.10.1 → emhass-0.10.3}/data/opt_res_perfect_optim_profit.csv +0 -0
  30. {emhass-0.10.1 → emhass-0.10.3}/data/opt_res_perfect_optim_self-consumption.csv +0 -0
  31. {emhass-0.10.1 → emhass-0.10.3}/data/test_response_get_data_get_method.pbz2 +0 -0
  32. {emhass-0.10.1 → emhass-0.10.3}/data/test_response_scrapper_get_method.pbz2 +0 -0
  33. {emhass-0.10.1 → emhass-0.10.3}/data/test_response_solarforecast_get_method.pbz2 +0 -0
  34. {emhass-0.10.1 → emhass-0.10.3}/data/test_response_solcast_get_method.pbz2 +0 -0
  35. {emhass-0.10.1 → emhass-0.10.3}/pyproject.toml +0 -0
  36. {emhass-0.10.1 → emhass-0.10.3}/setup.cfg +0 -0
  37. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/__init__.py +0 -0
  38. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/data/cec_inverters.pbz2 +0 -0
  39. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/data/cec_modules.pbz2 +0 -0
  40. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/machine_learning_forecaster.py +0 -0
  41. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/machine_learning_regressor.py +0 -0
  42. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/retrieve_hass.py +0 -0
  43. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/static/advanced.html +0 -0
  44. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/static/basic.html +0 -0
  45. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/static/img/emhass_icon.png +0 -0
  46. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/static/img/emhass_logo_short.svg +0 -0
  47. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/static/img/feather-sprite.svg +0 -0
  48. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/static/script.js +0 -0
  49. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/static/style.css +0 -0
  50. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/templates/index.html +0 -0
  51. {emhass-0.10.1 → emhass-0.10.3}/src/emhass/templates/template.html +0 -0
  52. {emhass-0.10.1 → emhass-0.10.3}/src/emhass.egg-info/SOURCES.txt +0 -0
  53. {emhass-0.10.1 → emhass-0.10.3}/src/emhass.egg-info/dependency_links.txt +0 -0
  54. {emhass-0.10.1 → emhass-0.10.3}/src/emhass.egg-info/entry_points.txt +0 -0
  55. {emhass-0.10.1 → emhass-0.10.3}/src/emhass.egg-info/top_level.txt +0 -0
  56. {emhass-0.10.1 → emhass-0.10.3}/tests/test_command_line_utils.py +0 -0
  57. {emhass-0.10.1 → emhass-0.10.3}/tests/test_machine_learning_forecaster.py +0 -0
  58. {emhass-0.10.1 → emhass-0.10.3}/tests/test_machine_learning_regressor.py +0 -0
  59. {emhass-0.10.1 → emhass-0.10.3}/tests/test_optimization.py +0 -0
  60. {emhass-0.10.1 → emhass-0.10.3}/tests/test_utils.py +0 -0
@@ -1,5 +1,19 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.10.3 - 2024-07-06
4
+ ### Improvement
5
+ - Added improved support for `def_start_penalty` option
6
+ - Improved documentation
7
+
8
+ ## 0.10.2 - 2024-07-06
9
+ ### Improvement
10
+ - Weather forecast caching and Solcast method fix by @GeoDerp
11
+ - Added a new configuration parameter to control wether we compute PV curtailment or not
12
+ - Added hybrid inverter to data publish
13
+ - It is now possible to pass these battery parameters at runtime: `SOCmin`, `SOCmax`, `Pd_max` and `Pc_max`
14
+ ### Fix
15
+ - Fixed problem with negative PV forecast values in optimization.py, by @GeoDerp
16
+
3
17
  ## 0.10.1 - 2024-06-03
4
18
  ### Fix
5
19
  - Fixed PV curtailment maximum possible value constraint
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: emhass
3
- Version: 0.10.1
3
+ Version: 0.10.3
4
4
  Summary: An Energy Management System for Home Assistant
5
5
  Home-page: https://github.com/davidusb-geek/emhass
6
6
  Author: David HERNANDEZ
@@ -28,7 +28,7 @@ Requires-Dist: h5py==3.11.0
28
28
  Requires-Dist: pulp>=2.4
29
29
  Requires-Dist: pyyaml>=5.4.1
30
30
  Requires-Dist: tables<=3.9.1
31
- Requires-Dist: skforecast==0.12.0
31
+ Requires-Dist: skforecast==0.12.1
32
32
  Requires-Dist: flask>=2.0.3
33
33
  Requires-Dist: waitress>=2.1.1
34
34
  Requires-Dist: plotly>=5.6.0
@@ -122,7 +122,7 @@ Installation instructions and example Home Assistant automation configurations a
122
122
 
123
123
  You must follow these steps to make EMHASS work properly:
124
124
 
125
- 1) Define all the parameters in the configuration file according to your installation. See the description for each parameter in the **configuration** section.
125
+ 1) Define all the parameters in the configuration file according to your installation method. For the add-on method you need to use the configuration pane directly on the add-on page. For other installation methods it should be needed to set the variables using the `config_emhass.yaml` file. See below for details on the installation methods. See the description for each parameter in the **configuration** section. If you have a PV installation then this dedicated webapp can be useful to find your inverter and solar panel models: [https://emhass-pvlib-database.streamlit.app/](https://emhass-pvlib-database.streamlit.app/)
126
126
 
127
127
  2) You most notably will need to define the main data entering EMHASS. This will be the `sensor.power_photovoltaics` for the name of the your hass variable containing the PV produced power and the variable `sensor.power_load_no_var_loads` for the load power of your household excluding the power of the deferrable loads that you want to optimize.
128
128
 
@@ -179,13 +179,9 @@ docker run -it --restart always -p 5000:5000 -e TZ="Europe/Paris" -e LOCAL_COS
179
179
  ### Method 3) Legacy method using a Python virtual environment
180
180
 
181
181
  With this method it is recommended to install on a virtual environment.
182
- For this you will need `virtualenv`, install it using:
182
+ Create and activate a virtual environment:
183
183
  ```bash
184
- sudo apt install python3-virtualenv
185
- ```
186
- Then create and activate the virtual environment:
187
- ```bash
188
- virtualenv -p /usr/bin/python3 emhassenv
184
+ python3 -m venv emhassenv
189
185
  cd emhassenv
190
186
  source bin/activate
191
187
  ```
@@ -496,7 +492,7 @@ curl -i -H 'Content-Type:application/json' -X POST -d '{"publish_prefix":"all"}'
496
492
  ```
497
493
  This action will publish the dayahead (_dh) and MPC (_mpc) optimization results from the optimizations above.
498
494
 
499
- ### Forecast data
495
+ ### Forecast data at runtime
500
496
 
501
497
  It is possible to provide EMHASS with your own forecast data. For this just add the data as list of values to a data dictionary during the call to `emhass` using the `runtimeparams` option.
502
498
 
@@ -519,7 +515,7 @@ The possible dictionary keys to pass data are:
519
515
 
520
516
  - `prod_price_forecast` for the PV production selling price forecast.
521
517
 
522
- ### Passing other data
518
+ ### Passing other data at runtime
523
519
 
524
520
  It is possible to also pass other data during runtime in order to automate the energy management. For example, it could be useful to dynamically update the total number of hours for each deferrable load (`def_total_hours`) using for instance a correlation with the outdoor temperature (useful for water heater for example).
525
521
 
@@ -535,6 +531,8 @@ Here is the list of the other additional dictionary keys that can be passed at r
535
531
 
536
532
  - `def_end_timestep` for the timestep before which each deferrable load should operate (if you don't want the deferrable load to use the whole optimization timewindow).
537
533
 
534
+ - `def_current_state` Pass this as a list of booleans (True/False) to indicate the current deferrable load state. This is used internally to avoid incorrectly penalizing a deferrable load start if a forecast is run when that load is already running.
535
+
538
536
  - `treat_def_as_semi_cont` to define if we should treat each deferrable load as a semi-continuous variable.
539
537
 
540
538
  - `set_def_constant` to define if we should set each deferrable load as a constant fixed value variable with just one startup for each optimization task.
@@ -545,8 +543,16 @@ Here is the list of the other additional dictionary keys that can be passed at r
545
543
 
546
544
  - `solar_forecast_kwp` for the PV peak installed power in kW used for the solar.forecast API call.
547
545
 
546
+ - `SOCmin` the minimum possible SOC.
547
+
548
+ - `SOCmax` the maximum possible SOC.
549
+
548
550
  - `SOCtarget` for the desired target value of initial and final SOC.
549
551
 
552
+ - `Pd_max` for the maximum battery discharge power.
553
+
554
+ - `Pc_max` for the maximum battery charge power.
555
+
550
556
  - `publish_prefix` use this key to pass a common prefix to all published data. This will add a prefix to the sensor name but also to the forecasts attributes keys within the sensor.
551
557
 
552
558
  ## A naive Model Predictive Controller
@@ -87,7 +87,7 @@ Installation instructions and example Home Assistant automation configurations a
87
87
 
88
88
  You must follow these steps to make EMHASS work properly:
89
89
 
90
- 1) Define all the parameters in the configuration file according to your installation. See the description for each parameter in the **configuration** section.
90
+ 1) Define all the parameters in the configuration file according to your installation method. For the add-on method you need to use the configuration pane directly on the add-on page. For other installation methods it should be needed to set the variables using the `config_emhass.yaml` file. See below for details on the installation methods. See the description for each parameter in the **configuration** section. If you have a PV installation then this dedicated webapp can be useful to find your inverter and solar panel models: [https://emhass-pvlib-database.streamlit.app/](https://emhass-pvlib-database.streamlit.app/)
91
91
 
92
92
  2) You most notably will need to define the main data entering EMHASS. This will be the `sensor.power_photovoltaics` for the name of the your hass variable containing the PV produced power and the variable `sensor.power_load_no_var_loads` for the load power of your household excluding the power of the deferrable loads that you want to optimize.
93
93
 
@@ -144,13 +144,9 @@ docker run -it --restart always -p 5000:5000 -e TZ="Europe/Paris" -e LOCAL_COS
144
144
  ### Method 3) Legacy method using a Python virtual environment
145
145
 
146
146
  With this method it is recommended to install on a virtual environment.
147
- For this you will need `virtualenv`, install it using:
147
+ Create and activate a virtual environment:
148
148
  ```bash
149
- sudo apt install python3-virtualenv
150
- ```
151
- Then create and activate the virtual environment:
152
- ```bash
153
- virtualenv -p /usr/bin/python3 emhassenv
149
+ python3 -m venv emhassenv
154
150
  cd emhassenv
155
151
  source bin/activate
156
152
  ```
@@ -461,7 +457,7 @@ curl -i -H 'Content-Type:application/json' -X POST -d '{"publish_prefix":"all"}'
461
457
  ```
462
458
  This action will publish the dayahead (_dh) and MPC (_mpc) optimization results from the optimizations above.
463
459
 
464
- ### Forecast data
460
+ ### Forecast data at runtime
465
461
 
466
462
  It is possible to provide EMHASS with your own forecast data. For this just add the data as list of values to a data dictionary during the call to `emhass` using the `runtimeparams` option.
467
463
 
@@ -484,7 +480,7 @@ The possible dictionary keys to pass data are:
484
480
 
485
481
  - `prod_price_forecast` for the PV production selling price forecast.
486
482
 
487
- ### Passing other data
483
+ ### Passing other data at runtime
488
484
 
489
485
  It is possible to also pass other data during runtime in order to automate the energy management. For example, it could be useful to dynamically update the total number of hours for each deferrable load (`def_total_hours`) using for instance a correlation with the outdoor temperature (useful for water heater for example).
490
486
 
@@ -500,6 +496,8 @@ Here is the list of the other additional dictionary keys that can be passed at r
500
496
 
501
497
  - `def_end_timestep` for the timestep before which each deferrable load should operate (if you don't want the deferrable load to use the whole optimization timewindow).
502
498
 
499
+ - `def_current_state` Pass this as a list of booleans (True/False) to indicate the current deferrable load state. This is used internally to avoid incorrectly penalizing a deferrable load start if a forecast is run when that load is already running.
500
+
503
501
  - `treat_def_as_semi_cont` to define if we should treat each deferrable load as a semi-continuous variable.
504
502
 
505
503
  - `set_def_constant` to define if we should set each deferrable load as a constant fixed value variable with just one startup for each optimization task.
@@ -510,8 +508,16 @@ Here is the list of the other additional dictionary keys that can be passed at r
510
508
 
511
509
  - `solar_forecast_kwp` for the PV peak installed power in kW used for the solar.forecast API call.
512
510
 
511
+ - `SOCmin` the minimum possible SOC.
512
+
513
+ - `SOCmax` the maximum possible SOC.
514
+
513
515
  - `SOCtarget` for the desired target value of initial and final SOC.
514
516
 
517
+ - `Pd_max` for the maximum battery discharge power.
518
+
519
+ - `Pc_max` for the maximum battery charge power.
520
+
515
521
  - `publish_prefix` use this key to pass a common prefix to all published data. This will add a prefix to the sensor name but also to the forecasts attributes keys within the sensor.
516
522
 
517
523
  ## A naive Model Predictive Controller
Binary file
@@ -19,7 +19,7 @@ long_description = (here / 'README.md').read_text(encoding='utf-8')
19
19
 
20
20
  setup(
21
21
  name='emhass', # Required
22
- version='0.10.1', # Required
22
+ version='0.10.3', # Required
23
23
  description='An Energy Management System for Home Assistant', # Optional
24
24
  long_description=long_description, # Optional
25
25
  long_description_content_type='text/markdown', # Optional (see note above)
@@ -52,7 +52,7 @@ setup(
52
52
  'pulp>=2.4',
53
53
  'pyyaml>=5.4.1',
54
54
  'tables<=3.9.1',
55
- 'skforecast==0.12.0',
55
+ 'skforecast==0.12.1',
56
56
  'flask>=2.0.3',
57
57
  'waitress>=2.1.1',
58
58
  'plotly>=5.6.0'
@@ -97,6 +97,8 @@ def set_input_data_dict(emhass_conf: dict, costfun: str,
97
97
  # Get PV and load forecasts
98
98
  df_weather = fcst.get_weather_forecast(
99
99
  method=optim_conf["weather_forecast_method"])
100
+ if isinstance(df_weather, bool) and not df_weather:
101
+ return False
100
102
  P_PV_forecast = fcst.get_power_from_weather(df_weather)
101
103
  P_load_forecast = fcst.get_load_forecast(
102
104
  method=optim_conf['load_forecast_method'])
@@ -142,6 +144,8 @@ def set_input_data_dict(emhass_conf: dict, costfun: str,
142
144
  # Get PV and load forecasts
143
145
  df_weather = fcst.get_weather_forecast(
144
146
  method=optim_conf['weather_forecast_method'])
147
+ if isinstance(df_weather, bool) and not df_weather:
148
+ return False
145
149
  P_PV_forecast = fcst.get_power_from_weather(
146
150
  df_weather, set_mix_forecast=True, df_now=df_input_data)
147
151
  P_load_forecast = fcst.get_load_forecast(
@@ -243,6 +247,50 @@ def set_input_data_dict(emhass_conf: dict, costfun: str,
243
247
  }
244
248
  return input_data_dict
245
249
 
250
+ def weather_forecast_cache(emhass_conf: dict, params: str,
251
+ runtimeparams: str, logger: logging.Logger) -> bool:
252
+ """
253
+ Perform a call to get forecast function, intend to save results to cache.
254
+
255
+ :param emhass_conf: Dictionary containing the needed emhass paths
256
+ :type emhass_conf: dict
257
+ :param params: Configuration parameters passed from data/options.json
258
+ :type params: str
259
+ :param runtimeparams: Runtime optimization parameters passed as a dictionary
260
+ :type runtimeparams: str
261
+ :param logger: The passed logger object
262
+ :type logger: logging object
263
+ :return: A bool for function completion
264
+ :rtype: bool
265
+
266
+ """
267
+
268
+ # Parsing yaml
269
+ retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(
270
+ emhass_conf, use_secrets=True, params=params)
271
+
272
+ # Treat runtimeparams
273
+ params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams(
274
+ runtimeparams, params, retrieve_hass_conf, optim_conf, plant_conf, "forecast", logger)
275
+
276
+ # Make sure weather_forecast_cache is true
277
+ if (params != None) and (params != "null"):
278
+ params = json.loads(params)
279
+ else:
280
+ params = {}
281
+ params["passed_data"]["weather_forecast_cache"] = True
282
+ params = json.dumps(params)
283
+
284
+ # Create Forecast object
285
+ fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf,
286
+ params, emhass_conf, logger)
287
+
288
+ result = fcst.get_weather_forecast(optim_conf["weather_forecast_method"])
289
+ if isinstance(result, bool) and not result:
290
+ return False
291
+
292
+ return True
293
+
246
294
 
247
295
  def perfect_forecast_optim(input_data_dict: dict, logger: logging.Logger,
248
296
  save_data_to_file: Optional[bool] = True,
@@ -801,19 +849,35 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
801
849
  )
802
850
  cols_published = ["P_PV", "P_Load"]
803
851
  # Publish PV curtailment
804
- custom_pv_curtailment_id = params["passed_data"]["custom_pv_curtailment_id"]
805
- input_data_dict["rh"].post_data(
806
- opt_res_latest["P_PV_curtailment"],
807
- idx_closest,
808
- custom_pv_curtailment_id["entity_id"],
809
- custom_pv_curtailment_id["unit_of_measurement"],
810
- custom_pv_curtailment_id["friendly_name"],
811
- type_var="power",
812
- publish_prefix=publish_prefix,
813
- save_entities=entity_save,
814
- dont_post=dont_post
815
- )
816
- cols_published = cols_published + ["P_PV_curtailment"]
852
+ if input_data_dict["fcst"].plant_conf['compute_curtailment']:
853
+ custom_pv_curtailment_id = params["passed_data"]["custom_pv_curtailment_id"]
854
+ input_data_dict["rh"].post_data(
855
+ opt_res_latest["P_PV_curtailment"],
856
+ idx_closest,
857
+ custom_pv_curtailment_id["entity_id"],
858
+ custom_pv_curtailment_id["unit_of_measurement"],
859
+ custom_pv_curtailment_id["friendly_name"],
860
+ type_var="power",
861
+ publish_prefix=publish_prefix,
862
+ save_entities=entity_save,
863
+ dont_post=dont_post
864
+ )
865
+ cols_published = cols_published + ["P_PV_curtailment"]
866
+ # Publish P_hybrid_inverter
867
+ if input_data_dict["fcst"].plant_conf['inverter_is_hybrid']:
868
+ custom_hybrid_inverter_id = params["passed_data"]["custom_hybrid_inverter_id"]
869
+ input_data_dict["rh"].post_data(
870
+ opt_res_latest["P_hybrid_inverter"],
871
+ idx_closest,
872
+ custom_hybrid_inverter_id["entity_id"],
873
+ custom_hybrid_inverter_id["unit_of_measurement"],
874
+ custom_hybrid_inverter_id["friendly_name"],
875
+ type_var="power",
876
+ publish_prefix=publish_prefix,
877
+ save_entities=entity_save,
878
+ dont_post=dont_post
879
+ )
880
+ cols_published = cols_published + ["P_hybrid_inverter"]
817
881
  # Publish deferrable loads
818
882
  custom_deferrable_forecast_id = params["passed_data"][
819
883
  "custom_deferrable_forecast_id"
@@ -1126,7 +1190,7 @@ def main():
1126
1190
  logger.error("Could not find emhass/src foulder in: " + str(root_path))
1127
1191
  logger.error("Try setting emhass root path with --root")
1128
1192
  return False
1129
- # Additionnal argument
1193
+ # Additional argument
1130
1194
  try:
1131
1195
  parser.add_argument(
1132
1196
  "--version",
@@ -182,6 +182,7 @@ class Forecast(object):
182
182
 
183
183
  """
184
184
  csv_path = self.emhass_conf['data_path'] / csv_path
185
+ w_forecast_cache_path = os.path.abspath(self.emhass_conf['data_path'] / "weather_forecast_data.pkl")
185
186
 
186
187
  self.logger.info("Retrieving weather forecast data using method = "+method)
187
188
  self.weather_forecast_method = method # Saving this attribute for later use to identify csv method usage
@@ -223,40 +224,97 @@ class Forecast(object):
223
224
  data['relative_humidity'] = raw_data['Relative Humidity (%)']
224
225
  data['precipitable_water'] = pvlib.atmosphere.gueymard94_pw(
225
226
  data['temp_air'], data['relative_humidity'])
226
- elif method == 'solcast': # using solcast API
227
- # Retrieve data from the solcast API
228
- if 'solcast_api_key' not in self.retrieve_hass_conf:
229
- self.logger.warning("The solcast_api_key parameter was not defined, using dummy values for testing")
230
- self.retrieve_hass_conf['solcast_api_key'] = "123456"
231
- if 'solcast_rooftop_id' not in self.retrieve_hass_conf:
232
- self.logger.warning("The solcast_rooftop_id parameter was not defined, using dummy values for testing")
233
- self.retrieve_hass_conf['solcast_rooftop_id'] = "123456"
234
- headers = {
235
- "Authorization": "Bearer " + self.retrieve_hass_conf['solcast_api_key'],
236
- "content-type": "application/json",
237
- }
238
- days_solcast = int(len(self.forecast_dates)*self.freq.seconds/3600)
239
- url = "https://api.solcast.com.au/rooftop_sites/"+self.retrieve_hass_conf['solcast_rooftop_id']+"/forecasts?hours="+str(days_solcast)
240
- response = get(url, headers=headers)
241
- '''import bz2 # Uncomment to save a serialized data for tests
242
- import _pickle as cPickle
243
- with bz2.BZ2File("data/test_response_solcast_get_method.pbz2", "w") as f:
244
- cPickle.dump(response, f)'''
245
- data = response.json()
246
- data_list = []
247
- for elm in data['forecasts']:
248
- data_list.append(elm['pv_estimate']*1000) # Converting kW to W
249
- # Check if the retrieved data has the correct length
250
- if len(data_list) < len(self.forecast_dates):
251
- self.logger.error("Not enough data retrived from SolCast service, try increasing the time step or use MPC")
227
+ elif method == 'solcast': # using Solcast API
228
+ # Check if weather_forecast_cache is true or if forecast_data file does not exist
229
+ if self.params["passed_data"]["weather_forecast_cache"] or not os.path.isfile(w_forecast_cache_path):
230
+ # Check if weather_forecast_cache_only is true, if so produce error for not finding cache file
231
+ if not self.params["passed_data"]["weather_forecast_cache_only"]:
232
+ # Retrieve data from the Solcast API
233
+ if 'solcast_api_key' not in self.retrieve_hass_conf:
234
+ self.logger.error("The solcast_api_key parameter was not defined")
235
+ return False
236
+ if 'solcast_rooftop_id' not in self.retrieve_hass_conf:
237
+ self.logger.error("The solcast_rooftop_id parameter was not defined")
238
+ return False
239
+ headers = {
240
+ 'User-Agent': 'EMHASS',
241
+ "Authorization": "Bearer " + self.retrieve_hass_conf['solcast_api_key'],
242
+ "content-type": "application/json",
243
+ }
244
+ days_solcast = int(len(self.forecast_dates)*self.freq.seconds/3600)
245
+ # If weather_forecast_cache, set request days as twice as long to avoid length issues (add a buffer)
246
+ if self.params["passed_data"]["weather_forecast_cache"]:
247
+ days_solcast = min((days_solcast * 2), 336)
248
+ url = "https://api.solcast.com.au/rooftop_sites/"+self.retrieve_hass_conf['solcast_rooftop_id']+"/forecasts?hours="+str(days_solcast)
249
+ response = get(url, headers=headers)
250
+ '''import bz2 # Uncomment to save a serialized data for tests
251
+ import _pickle as cPickle
252
+ with bz2.BZ2File("data/test_response_solcast_get_method.pbz2", "w") as f:
253
+ cPickle.dump(response, f)'''
254
+ # Verify the request passed
255
+ if int(response.status_code) == 200:
256
+ data = response.json()
257
+ elif int(response.status_code) == 402 or int(response.status_code) == 429:
258
+ self.logger.error("Solcast error: May have exceeded your subscription limit.")
259
+ return False
260
+ elif int(response.status_code) >= 400 or int(response.status_code) >= 202:
261
+ self.logger.error("Solcast error: There was a issue with the solcast request, check solcast API key and rooftop ID.")
262
+ self.logger.error("Solcast error: Check that your subscription is valid and your network can connect to Solcast.")
263
+ return False
264
+ data_list = []
265
+ for elm in data['forecasts']:
266
+ data_list.append(elm['pv_estimate']*1000) # Converting kW to W
267
+ # Check if the retrieved data has the correct length
268
+ if len(data_list) < len(self.forecast_dates):
269
+ self.logger.error("Not enough data retried from Solcast service, try increasing the time step or use MPC.")
270
+ else:
271
+ # If runtime weather_forecast_cache is true save forecast result to file as cache
272
+ if self.params["passed_data"]["weather_forecast_cache"]:
273
+ # Add x2 forecast periods for cached results. This adds a extra delta_forecast amount of days for a buffer
274
+ cached_forecast_dates = self.forecast_dates.union(pd.date_range(self.forecast_dates[-1], periods=(len(self.forecast_dates) +1), freq=self.freq)[1:])
275
+ cache_data_list = data_list[0:len(cached_forecast_dates)]
276
+ cache_data_dict = {'ts':cached_forecast_dates, 'yhat':cache_data_list}
277
+ data_cache = pd.DataFrame.from_dict(cache_data_dict)
278
+ data_cache.set_index('ts', inplace=True)
279
+ with open(w_forecast_cache_path, "wb") as file:
280
+ cPickle.dump(data_cache, file)
281
+ if not os.path.isfile(w_forecast_cache_path):
282
+ self.logger.warning("Solcast forecast data could not be saved to file.")
283
+ else:
284
+ self.logger.info("Saved the Solcast results to cache, for later reference.")
285
+ # Trim request results to forecast_dates
286
+ data_list = data_list[0:len(self.forecast_dates)]
287
+ data_dict = {'ts':self.forecast_dates, 'yhat':data_list}
288
+ # Define DataFrame
289
+ data = pd.DataFrame.from_dict(data_dict)
290
+ # Define index
291
+ data.set_index('ts', inplace=True)
292
+ # Else, notify user to update cache
293
+ else:
294
+ self.logger.error("Unable to obtain Solcast cache file.")
295
+ self.logger.error("Try running optimization again with 'weather_forecast_cache_only': false")
296
+ self.logger.error("Optionally, obtain new Solcast cache with runtime parameter 'weather_forecast_cache': true in an optimization, or run the `forecast-cache` action, to pull new data from Solcast and cache.")
297
+ return False
298
+ # Else, open stored weather_forecast_data.pkl file for previous forecast data (cached data)
252
299
  else:
253
- # Ensure correct length
254
- data_list = data_list[0:len(self.forecast_dates)]
255
- # Define DataFrame
256
- data_dict = {'ts':self.forecast_dates, 'yhat':data_list}
257
- data = pd.DataFrame.from_dict(data_dict)
258
- # Define index
259
- data.set_index('ts', inplace=True)
300
+ with open(w_forecast_cache_path, "rb") as file:
301
+ data = cPickle.load(file)
302
+ if not isinstance(data, pd.DataFrame) or len(data) < len(self.forecast_dates):
303
+ self.logger.error("There has been a error obtaining cached Solcast forecast data.")
304
+ self.logger.error("Try running optimization again with 'weather_forecast_cache': true, or run action `forecast-cache`, to pull new data from Solcast and cache.")
305
+ self.logger.warning("Removing old Solcast cache file. Next optimization will pull data from Solcast, unless 'weather_forecast_cache_only': true")
306
+ os.remove(w_forecast_cache_path)
307
+ return False
308
+ # Filter cached forecast data to match current forecast_dates start-end range (reduce forecast Dataframe size to appropriate length)
309
+ if self.forecast_dates[0] in data.index and self.forecast_dates[-1] in data.index:
310
+ data = data.loc[self.forecast_dates[0]:self.forecast_dates[-1]]
311
+ self.logger.info("Retrieved Solcast data from the previously saved cache.")
312
+ else:
313
+ self.logger.error("Unable to obtain cached Solcast forecast data within the requested timeframe range.")
314
+ self.logger.error("Try running optimization again (not using cache). Optionally, add runtime parameter 'weather_forecast_cache': true to pull new data from Solcast and cache.")
315
+ self.logger.warning("Removing old Solcast cache file. Next optimization will pull data from Solcast, unless 'weather_forecast_cache_only': true")
316
+ os.remove(w_forecast_cache_path)
317
+ return False
260
318
  elif method == 'solar.forecast': # using the solar.forecast API
261
319
  # Retrieve data from the solar.forecast API
262
320
  if 'solar_forecast_kwp' not in self.retrieve_hass_conf:
@@ -423,9 +481,9 @@ class Forecast(object):
423
481
  # Setting the main parameters of the PV plant
424
482
  location = Location(latitude=self.lat, longitude=self.lon)
425
483
  temp_params = TEMPERATURE_MODEL_PARAMETERS['sapm']['close_mount_glass_glass']
426
- cec_modules = bz2.BZ2File(self.emhass_conf['root_path'] / 'data/cec_modules.pbz2', "rb")
484
+ cec_modules = bz2.BZ2File(self.emhass_conf['root_path'] / 'data' / 'cec_modules.pbz2', "rb")
427
485
  cec_modules = cPickle.load(cec_modules)
428
- cec_inverters = bz2.BZ2File(self.emhass_conf['root_path'] / 'data/cec_inverters.pbz2', "rb")
486
+ cec_inverters = bz2.BZ2File(self.emhass_conf['root_path'] / 'data' / 'cec_inverters.pbz2', "rb")
429
487
  cec_inverters = cPickle.load(cec_inverters)
430
488
  if type(self.plant_conf['module_model']) == list:
431
489
  P_PV_forecast = pd.Series(0, index=df_weather.index)
@@ -838,5 +896,4 @@ class Forecast(object):
838
896
  else:
839
897
  self.logger.error("Passed method is not valid")
840
898
  return False
841
- return df_final
842
-
899
+ return df_final
@@ -272,12 +272,20 @@ class Optimization:
272
272
  rhs = 0)
273
273
  for i in set_I}
274
274
  else:
275
- constraints = {"constraint_main1_{}".format(i) :
276
- plp.LpConstraint(
277
- e = P_PV[i] - P_PV_curtailment[i] - P_def_sum[i] - P_load[i] + P_grid_neg[i] + P_grid_pos[i] + P_sto_pos[i] + P_sto_neg[i],
278
- sense = plp.LpConstraintEQ,
279
- rhs = 0)
280
- for i in set_I}
275
+ if self.plant_conf['compute_curtailment']:
276
+ constraints = {"constraint_main2_{}".format(i) :
277
+ plp.LpConstraint(
278
+ e = P_PV[i] - P_PV_curtailment[i] - P_def_sum[i] - P_load[i] + P_grid_neg[i] + P_grid_pos[i] + P_sto_pos[i] + P_sto_neg[i],
279
+ sense = plp.LpConstraintEQ,
280
+ rhs = 0)
281
+ for i in set_I}
282
+ else:
283
+ constraints = {"constraint_main3_{}".format(i) :
284
+ plp.LpConstraint(
285
+ e = P_PV[i] - P_def_sum[i] - P_load[i] + P_grid_neg[i] + P_grid_pos[i] + P_sto_pos[i] + P_sto_neg[i],
286
+ sense = plp.LpConstraintEQ,
287
+ rhs = 0)
288
+ for i in set_I}
281
289
 
282
290
  # Constraint for hybrid inverter and curtailment cases
283
291
  if type(self.plant_conf['module_model']) == list:
@@ -312,12 +320,13 @@ class Optimization:
312
320
  rhs = 0)
313
321
  for i in set_I})
314
322
  else:
315
- constraints.update({"constraint_curtailment_{}".format(i) :
316
- plp.LpConstraint(
317
- e = P_PV_curtailment[i] - P_PV[i],
318
- sense = plp.LpConstraintLE,
319
- rhs = 0)
320
- for i in set_I})
323
+ if self.plant_conf['compute_curtailment']:
324
+ constraints.update({"constraint_curtailment_{}".format(i) :
325
+ plp.LpConstraint(
326
+ e = P_PV_curtailment[i] - max(P_PV[i],0),
327
+ sense = plp.LpConstraintLE,
328
+ rhs = 0)
329
+ for i in set_I})
321
330
 
322
331
  # Constraint for sequence of deferrable
323
332
  # WARNING: This is experimental, formulation seems correct but feasibility problems.
@@ -363,13 +372,13 @@ class Optimization:
363
372
  # Two special constraints just for a self-consumption cost function
364
373
  if self.costfun == 'self-consumption':
365
374
  if type_self_conso == 'maxmin': # maxmin linear problem
366
- constraints.update({"constraint_selfcons_PV_{}".format(i) :
375
+ constraints.update({"constraint_selfcons_PV1_{}".format(i) :
367
376
  plp.LpConstraint(
368
377
  e = SC[i] - P_PV[i],
369
378
  sense = plp.LpConstraintLE,
370
379
  rhs = 0)
371
380
  for i in set_I})
372
- constraints.update({"constraint_selfcons_PV_{}".format(i) :
381
+ constraints.update({"constraint_selfcons_PV2_{}".format(i) :
373
382
  plp.LpConstraint(
374
383
  e = SC[i] - P_load[i] - P_def_sum[i],
375
384
  sense = plp.LpConstraintLE,
@@ -439,41 +448,27 @@ class Optimization:
439
448
  sense=plp.LpConstraintLE,
440
449
  rhs=0)
441
450
  for i in set_I})
442
- # Treat the number of starts for a deferrable load
443
- if self.optim_conf['set_def_constant'][k]:
444
- constraints.update({"constraint_pdef{}_start1_{}".format(k, i) :
445
- plp.LpConstraint(
446
- e=P_deferrable[k][i] - P_def_bin2[k][i]*M,
447
- sense=plp.LpConstraintLE,
448
- rhs=0)
449
- for i in set_I})
450
- constraints.update({"constraint_pdef{}_start2_{}".format(k, i):
451
- plp.LpConstraint(
452
- e=P_def_start[k][i] - P_def_bin2[k][i] + (P_def_bin2[k][i-1] if i-1 >= 0 else 0),
453
- sense=plp.LpConstraintGE,
454
- rhs=0)
455
- for i in set_I})
456
- constraints.update({"constraint_pdef{}_start3".format(k) :
457
- plp.LpConstraint(
458
- e = plp.lpSum(P_def_start[k][i] for i in set_I),
459
- sense = plp.LpConstraintEQ,
460
- rhs = 1)
461
- })
462
- # Treat deferrable load as a semi-continuous variable
463
- if self.optim_conf['treat_def_as_semi_cont'][k]:
464
- constraints.update({"constraint_pdef{}_semicont1_{}".format(k, i) :
465
- plp.LpConstraint(
466
- e=P_deferrable[k][i] - self.optim_conf['P_deferrable_nom'][k]*P_def_bin1[k][i],
467
- sense=plp.LpConstraintGE,
468
- rhs=0)
469
- for i in set_I})
470
- constraints.update({"constraint_pdef{}_semicont2_{}".format(k, i) :
471
- plp.LpConstraint(
472
- e=P_deferrable[k][i] - self.optim_conf['P_deferrable_nom'][k]*P_def_bin1[k][i],
473
- sense=plp.LpConstraintLE,
474
- rhs=0)
475
- for i in set_I})
476
- # Treat the number of starts for a deferrable load
451
+ # Treat the number of starts for a deferrable load (old method, kept here just in case)
452
+ # if self.optim_conf['set_def_constant'][k]:
453
+ # constraints.update({"constraint_pdef{}_start1_{}".format(k, i) :
454
+ # plp.LpConstraint(
455
+ # e=P_deferrable[k][i] - P_def_bin2[k][i]*M,
456
+ # sense=plp.LpConstraintLE,
457
+ # rhs=0)
458
+ # for i in set_I})
459
+ # constraints.update({"constraint_pdef{}_start2_{}".format(k, i):
460
+ # plp.LpConstraint(
461
+ # e=P_def_start[k][i] - P_def_bin2[k][i] + (P_def_bin2[k][i-1] if i-1 >= 0 else 0),
462
+ # sense=plp.LpConstraintGE,
463
+ # rhs=0)
464
+ # for i in set_I})
465
+ # constraints.update({"constraint_pdef{}_start3".format(k) :
466
+ # plp.LpConstraint(
467
+ # e = plp.lpSum(P_def_start[k][i] for i in set_I),
468
+ # sense = plp.LpConstraintEQ,
469
+ # rhs = 1)
470
+ # })
471
+ # Treat the number of starts for a deferrable load (new method considering current state)
477
472
  current_state = 0
478
473
  if ("def_current_state" in self.optim_conf and len(self.optim_conf["def_current_state"]) > k):
479
474
  current_state = 1 if self.optim_conf["def_current_state"][k] else 0
@@ -644,7 +639,8 @@ class Optimization:
644
639
  opt_tp["SOC_opt"] = SOC_opt
645
640
  if self.plant_conf['inverter_is_hybrid']:
646
641
  opt_tp["P_hybrid_inverter"] = [P_hybrid_inverter[i].varValue for i in set_I]
647
- opt_tp["P_PV_curtailment"] = [P_PV_curtailment[i].varValue for i in set_I]
642
+ if self.plant_conf['compute_curtailment']:
643
+ opt_tp["P_PV_curtailment"] = [P_PV_curtailment[i].varValue for i in set_I]
648
644
  opt_tp.index = data_opt.index
649
645
 
650
646
  # Lets compute the optimal cost function
@@ -11,6 +11,7 @@ import numpy as np
11
11
  import pandas as pd
12
12
  import yaml
13
13
  import pytz
14
+ import ast
14
15
 
15
16
  import plotly.express as px
16
17
 
@@ -166,6 +167,11 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
166
167
  "unit_of_measurement": "W",
167
168
  "friendly_name": "PV Power Curtailment",
168
169
  },
170
+ "custom_hybrid_inverter_id": {
171
+ "entity_id": "sensor.p_hybrid_inverter",
172
+ "unit_of_measurement": "W",
173
+ "friendly_name": "PV Hybrid Inverter",
174
+ },
169
175
  "custom_batt_forecast_id": {
170
176
  "entity_id": "sensor.p_batt_forecast",
171
177
  "unit_of_measurement": "W",
@@ -247,7 +253,6 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
247
253
  if "target" in runtimeparams:
248
254
  target = runtimeparams["target"]
249
255
  params["passed_data"]["target"] = target
250
-
251
256
  # Treating special data passed for MPC control case
252
257
  if set_type == "naive-mpc-optim":
253
258
  if "prediction_horizon" not in runtimeparams.keys():
@@ -303,6 +308,18 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
303
308
  # Treat passed forecast data lists
304
309
  list_forecast_key = ['pv_power_forecast', 'load_power_forecast', 'load_cost_forecast', 'prod_price_forecast']
305
310
  forecast_methods = ['weather_forecast_method', 'load_forecast_method', 'load_cost_forecast_method', 'prod_price_forecast_method']
311
+ # Param to save forecast cache (i.e. Solcast)
312
+ if "weather_forecast_cache" not in runtimeparams.keys():
313
+ weather_forecast_cache = False
314
+ else:
315
+ weather_forecast_cache = runtimeparams["weather_forecast_cache"]
316
+ params["passed_data"]["weather_forecast_cache"] = weather_forecast_cache
317
+ # Param to make sure optimization only uses cached data. (else produce error)
318
+ if "weather_forecast_cache_only" not in runtimeparams.keys():
319
+ weather_forecast_cache_only = False
320
+ else:
321
+ weather_forecast_cache_only = runtimeparams["weather_forecast_cache_only"]
322
+ params["passed_data"]["weather_forecast_cache_only"] = weather_forecast_cache_only
306
323
  for method, forecast_key in enumerate(list_forecast_key):
307
324
  if forecast_key in runtimeparams.keys():
308
325
  if type(runtimeparams[forecast_key]) == list and len(runtimeparams[forecast_key]) >= len(forecast_dates):
@@ -357,14 +374,12 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
357
374
  if "perform_backtest" not in runtimeparams.keys():
358
375
  perform_backtest = False
359
376
  else:
360
- perform_backtest = eval(str(runtimeparams["perform_backtest"]).capitalize())
377
+ perform_backtest = ast.literal_eval(str(runtimeparams["perform_backtest"]).capitalize())
361
378
  params["passed_data"]["perform_backtest"] = perform_backtest
362
379
  if "model_predict_publish" not in runtimeparams.keys():
363
380
  model_predict_publish = False
364
381
  else:
365
- model_predict_publish = eval(
366
- str(runtimeparams["model_predict_publish"]).capitalize()
367
- )
382
+ model_predict_publish = ast.literal_eval(str(runtimeparams["model_predict_publish"]).capitalize())
368
383
  params["passed_data"]["model_predict_publish"] = model_predict_publish
369
384
  if "model_predict_entity_id" not in runtimeparams.keys():
370
385
  model_predict_entity_id = "sensor.p_load_forecast_custom_model"
@@ -421,12 +436,16 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
421
436
  optim_conf["def_current_state"] = [bool(s) for s in runtimeparams["def_current_state"]]
422
437
  if "treat_def_as_semi_cont" in runtimeparams.keys():
423
438
  optim_conf["treat_def_as_semi_cont"] = [
424
- eval(str(k).capitalize())
439
+ ast.literal_eval(str(k).capitalize())
425
440
  for k in runtimeparams["treat_def_as_semi_cont"]
426
441
  ]
427
442
  if "set_def_constant" in runtimeparams.keys():
428
443
  optim_conf["set_def_constant"] = [
429
- eval(str(k).capitalize()) for k in runtimeparams["set_def_constant"]
444
+ ast.literal_eval(str(k).capitalize()) for k in runtimeparams["set_def_constant"]
445
+ ]
446
+ if "def_start_penalty" in runtimeparams.keys():
447
+ optim_conf["def_start_penalty"] = [
448
+ ast.literal_eval(str(k).capitalize()) for k in runtimeparams["def_start_penalty"]
430
449
  ]
431
450
  if "solcast_api_key" in runtimeparams.keys():
432
451
  retrieve_hass_conf["solcast_api_key"] = runtimeparams["solcast_api_key"]
@@ -452,8 +471,16 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
452
471
  if 'continual_publish' in runtimeparams.keys():
453
472
  retrieve_hass_conf['continual_publish'] = bool(runtimeparams['continual_publish'])
454
473
  # Treat plant configuration parameters passed at runtime
474
+ if "SOCmin" in runtimeparams.keys():
475
+ plant_conf["SOCmin"] = runtimeparams["SOCmin"]
476
+ if "SOCmax" in runtimeparams.keys():
477
+ plant_conf["SOCmax"] = runtimeparams["SOCmax"]
455
478
  if "SOCtarget" in runtimeparams.keys():
456
479
  plant_conf["SOCtarget"] = runtimeparams["SOCtarget"]
480
+ if "Pd_max" in runtimeparams.keys():
481
+ plant_conf["Pd_max"] = runtimeparams["Pd_max"]
482
+ if "Pc_max" in runtimeparams.keys():
483
+ plant_conf["Pc_max"] = runtimeparams["Pc_max"]
457
484
  # Treat custom entities id's and friendly names for variables
458
485
  if "custom_pv_forecast_id" in runtimeparams.keys():
459
486
  params["passed_data"]["custom_pv_forecast_id"] = runtimeparams[
@@ -467,6 +494,10 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
467
494
  params["passed_data"]["custom_pv_curtailment_id"] = runtimeparams[
468
495
  "custom_pv_curtailment_id"
469
496
  ]
497
+ if "custom_hybrid_inverter_id" in runtimeparams.keys():
498
+ params["passed_data"]["custom_hybrid_inverter_id"] = runtimeparams[
499
+ "custom_hybrid_inverter_id"
500
+ ]
470
501
  if "custom_batt_forecast_id" in runtimeparams.keys():
471
502
  params["passed_data"]["custom_batt_forecast_id"] = runtimeparams[
472
503
  "custom_batt_forecast_id"
@@ -754,9 +785,7 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
754
785
  params["retrieve_hass_conf"]["var_load"] = options.get("sensor_power_load_no_var_loads", params["retrieve_hass_conf"]["var_load"])
755
786
  params["retrieve_hass_conf"]["load_negative"] = options.get("load_negative", params["retrieve_hass_conf"]["load_negative"])
756
787
  params["retrieve_hass_conf"]["set_zero_min"] = options.get("set_zero_min", params["retrieve_hass_conf"]["set_zero_min"])
757
- params["retrieve_hass_conf"]["var_replace_zero"] = [
758
- options.get("sensor_power_photovoltaics", params["retrieve_hass_conf"]["var_replace_zero"])
759
- ]
788
+ params["retrieve_hass_conf"]["var_replace_zero"] = [options.get("sensor_power_photovoltaics", params["retrieve_hass_conf"]["var_replace_zero"])]
760
789
  params["retrieve_hass_conf"]["var_interp"] = [
761
790
  options.get("sensor_power_photovoltaics", params["retrieve_hass_conf"]["var_PV"]),
762
791
  options.get("sensor_power_load_no_var_loads", params["retrieve_hass_conf"]["var_load"])
@@ -773,20 +802,15 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
773
802
  params["optim_conf"]["set_use_battery"] = options.get("set_use_battery", params["optim_conf"]["set_use_battery"])
774
803
  params["optim_conf"]["num_def_loads"] = options.get("number_of_deferrable_loads", params["optim_conf"]["num_def_loads"])
775
804
  if options.get("list_nominal_power_of_deferrable_loads", None) != None:
776
- params["optim_conf"]["P_deferrable_nom"] = [
777
- i["nominal_power_of_deferrable_loads"]
778
- for i in options.get("list_nominal_power_of_deferrable_loads")
779
- ]
805
+ params["optim_conf"]["P_deferrable_nom"] = [i["nominal_power_of_deferrable_loads"] for i in options.get("list_nominal_power_of_deferrable_loads")]
780
806
  if options.get("list_operating_hours_of_each_deferrable_load", None) != None:
781
- params["optim_conf"]["def_total_hours"] = [
782
- i["operating_hours_of_each_deferrable_load"]
783
- for i in options.get("list_operating_hours_of_each_deferrable_load")
784
- ]
807
+ params["optim_conf"]["def_total_hours"] = [i["operating_hours_of_each_deferrable_load"] for i in options.get("list_operating_hours_of_each_deferrable_load")]
785
808
  if options.get("list_treat_deferrable_load_as_semi_cont", None) != None:
786
- params["optim_conf"]["treat_def_as_semi_cont"] = [
787
- i["treat_deferrable_load_as_semi_cont"]
788
- for i in options.get("list_treat_deferrable_load_as_semi_cont")
789
- ]
809
+ params["optim_conf"]["treat_def_as_semi_cont"] = [i["treat_deferrable_load_as_semi_cont"] for i in options.get("list_treat_deferrable_load_as_semi_cont")]
810
+ if options.get("list_set_deferrable_load_single_constant", None) != None:
811
+ params["optim_conf"]["set_def_constant"] = [i["set_deferrable_load_single_constant"] for i in options.get("list_set_deferrable_load_single_constant")]
812
+ if options.get("list_set_deferrable_startup_penalty", None) != None:
813
+ params["optim_conf"]["def_start_penalty"] = [i["set_deferrable_startup_penalty"] for i in options.get("list_set_deferrable_startup_penalty")]
790
814
  params["optim_conf"]["weather_forecast_method"] = options.get("weather_forecast_method", params["optim_conf"]["weather_forecast_method"])
791
815
  # Update optional param secrets
792
816
  if params["optim_conf"]["weather_forecast_method"] == "solcast":
@@ -797,20 +821,9 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
797
821
  params["optim_conf"]["load_forecast_method"] = options.get("load_forecast_method", params["optim_conf"]["load_forecast_method"])
798
822
  params["optim_conf"]["delta_forecast"] = options.get("delta_forecast_daily", params["optim_conf"]["delta_forecast"])
799
823
  params["optim_conf"]["load_cost_forecast_method"] = options.get("load_cost_forecast_method", params["optim_conf"]["load_cost_forecast_method"])
800
- if options.get("list_set_deferrable_load_single_constant", None) != None:
801
- params["optim_conf"]["set_def_constant"] = [
802
- i["set_deferrable_load_single_constant"]
803
- for i in options.get("list_set_deferrable_load_single_constant")
804
- ]
805
824
  if (options.get("list_peak_hours_periods_start_hours", None) != None and options.get("list_peak_hours_periods_end_hours", None) != None):
806
- start_hours_list = [
807
- i["peak_hours_periods_start_hours"]
808
- for i in options["list_peak_hours_periods_start_hours"]
809
- ]
810
- end_hours_list = [
811
- i["peak_hours_periods_end_hours"]
812
- for i in options["list_peak_hours_periods_end_hours"]
813
- ]
825
+ start_hours_list = [i["peak_hours_periods_start_hours"] for i in options["list_peak_hours_periods_start_hours"]]
826
+ end_hours_list = [i["peak_hours_periods_end_hours"] for i in options["list_peak_hours_periods_end_hours"]]
814
827
  num_peak_hours = len(start_hours_list)
815
828
  list_hp_periods_list = [{'period_hp_'+str(i+1):[{'start':start_hours_list[i]},{'end':end_hours_list[i]}]} for i in range(num_peak_hours)]
816
829
  params['optim_conf']['list_hp_periods'] = list_hp_periods_list
@@ -848,6 +861,7 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
848
861
  if options.get('list_strings_per_inverter',None) != None:
849
862
  params['plant_conf']['strings_per_inverter'] = [i['strings_per_inverter'] for i in options.get('list_strings_per_inverter')]
850
863
  params["plant_conf"]["inverter_is_hybrid"] = options.get("inverter_is_hybrid", params["plant_conf"]["inverter_is_hybrid"])
864
+ params["plant_conf"]["compute_curtailment"] = options.get("compute_curtailment", params["plant_conf"]["compute_curtailment"])
851
865
  params['plant_conf']['Pd_max'] = options.get('battery_discharge_power_max', params['plant_conf']['Pd_max'])
852
866
  params['plant_conf']['Pc_max'] = options.get('battery_charge_power_max', params['plant_conf']['Pc_max'])
853
867
  params['plant_conf']['eta_disch'] = options.get('battery_discharge_efficiency', params['plant_conf']['eta_disch'])
@@ -873,7 +887,12 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
873
887
  if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['treat_def_as_semi_cont']):
874
888
  logger.warning("treat_def_as_semi_cont / list_treat_deferrable_load_as_semi_cont does not match number in num_def_loads, adding default values to parameter")
875
889
  for x in range(len(params['optim_conf']['treat_def_as_semi_cont']), params['optim_conf']['num_def_loads']):
876
- params['optim_conf']['treat_def_as_semi_cont'].append(True)
890
+ params['optim_conf']['treat_def_as_semi_cont'].append(True)
891
+ if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_start_penalty']):
892
+ logger.warning("def_start_penalty / list_set_deferrable_startup_penalty does not match number in num_def_loads, adding default values to parameter")
893
+ for x in range(len(params['optim_conf']['def_start_penalty']), params['optim_conf']['num_def_loads']):
894
+ params['optim_conf']['def_start_penalty'].append(0.0)
895
+ # days_to_retrieve should be no less then 2
877
896
  if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_total_hours']):
878
897
  logger.warning("def_total_hours / list_operating_hours_of_each_deferrable_load does not match number in num_def_loads, adding default values to parameter")
879
898
  for x in range(len(params['optim_conf']['def_total_hours']), params['optim_conf']['num_def_loads']):
@@ -12,7 +12,7 @@ from distutils.util import strtobool
12
12
 
13
13
  from emhass.command_line import set_input_data_dict
14
14
  from emhass.command_line import perfect_forecast_optim, dayahead_forecast_optim, naive_mpc_optim
15
- from emhass.command_line import forecast_model_fit, forecast_model_predict, forecast_model_tune
15
+ from emhass.command_line import forecast_model_fit, forecast_model_predict, forecast_model_tune, weather_forecast_cache
16
16
  from emhass.command_line import regressor_model_fit, regressor_model_predict
17
17
  from emhass.command_line import publish_data, continual_publish
18
18
  from emhass.utils import get_injection_dict, get_injection_dict_forecast_model_fit, \
@@ -106,6 +106,17 @@ def action_call(action_name):
106
106
  if runtimeparams is not None and runtimeparams != '{}':
107
107
  app.logger.info("Passed runtime parameters: " + str(runtimeparams))
108
108
  runtimeparams = json.dumps(runtimeparams)
109
+
110
+ # Run action if weather_forecast_cache
111
+ if action_name == 'weather-forecast-cache':
112
+ ActionStr = " >> Performing weather forecast, try to caching result"
113
+ app.logger.info(ActionStr)
114
+ weather_forecast_cache(emhass_conf, params, runtimeparams, app.logger)
115
+ msg = f'EMHASS >> Weather Forecast has run and results possibly cached... \n'
116
+ if not checkFileLog(ActionStr):
117
+ return make_response(msg, 201)
118
+ return make_response(grabLog(ActionStr), 400)
119
+
109
120
  ActionStr = " >> Setting input data dict"
110
121
  app.logger.info(ActionStr)
111
122
  input_data_dict = set_input_data_dict(emhass_conf, costfun,
@@ -459,15 +470,14 @@ if __name__ == "__main__":
459
470
  app.logger.addHandler(fileLogger)
460
471
  clearFileLog() #Clear Action File logger file, ready for new instance
461
472
 
462
-
463
- #If entity_path exists, remove any entity/metadata files
473
+ # If entity_path exists, remove any entity/metadata files
464
474
  entity_path = emhass_conf['data_path'] / "entities"
465
475
  if os.path.exists(entity_path):
466
476
  entity_pathContents = os.listdir(entity_path)
467
477
  if len(entity_pathContents) > 0:
468
478
  for entity in entity_pathContents:
469
479
  os.remove(entity_path / entity)
470
-
480
+
471
481
  # Initialise continual publish thread list
472
482
  continual_publish_thread = []
473
483
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: emhass
3
- Version: 0.10.1
3
+ Version: 0.10.3
4
4
  Summary: An Energy Management System for Home Assistant
5
5
  Home-page: https://github.com/davidusb-geek/emhass
6
6
  Author: David HERNANDEZ
@@ -28,7 +28,7 @@ Requires-Dist: h5py==3.11.0
28
28
  Requires-Dist: pulp>=2.4
29
29
  Requires-Dist: pyyaml>=5.4.1
30
30
  Requires-Dist: tables<=3.9.1
31
- Requires-Dist: skforecast==0.12.0
31
+ Requires-Dist: skforecast==0.12.1
32
32
  Requires-Dist: flask>=2.0.3
33
33
  Requires-Dist: waitress>=2.1.1
34
34
  Requires-Dist: plotly>=5.6.0
@@ -122,7 +122,7 @@ Installation instructions and example Home Assistant automation configurations a
122
122
 
123
123
  You must follow these steps to make EMHASS work properly:
124
124
 
125
- 1) Define all the parameters in the configuration file according to your installation. See the description for each parameter in the **configuration** section.
125
+ 1) Define all the parameters in the configuration file according to your installation method. For the add-on method you need to use the configuration pane directly on the add-on page. For other installation methods it should be needed to set the variables using the `config_emhass.yaml` file. See below for details on the installation methods. See the description for each parameter in the **configuration** section. If you have a PV installation then this dedicated webapp can be useful to find your inverter and solar panel models: [https://emhass-pvlib-database.streamlit.app/](https://emhass-pvlib-database.streamlit.app/)
126
126
 
127
127
  2) You most notably will need to define the main data entering EMHASS. This will be the `sensor.power_photovoltaics` for the name of the your hass variable containing the PV produced power and the variable `sensor.power_load_no_var_loads` for the load power of your household excluding the power of the deferrable loads that you want to optimize.
128
128
 
@@ -179,13 +179,9 @@ docker run -it --restart always -p 5000:5000 -e TZ="Europe/Paris" -e LOCAL_COS
179
179
  ### Method 3) Legacy method using a Python virtual environment
180
180
 
181
181
  With this method it is recommended to install on a virtual environment.
182
- For this you will need `virtualenv`, install it using:
182
+ Create and activate a virtual environment:
183
183
  ```bash
184
- sudo apt install python3-virtualenv
185
- ```
186
- Then create and activate the virtual environment:
187
- ```bash
188
- virtualenv -p /usr/bin/python3 emhassenv
184
+ python3 -m venv emhassenv
189
185
  cd emhassenv
190
186
  source bin/activate
191
187
  ```
@@ -496,7 +492,7 @@ curl -i -H 'Content-Type:application/json' -X POST -d '{"publish_prefix":"all"}'
496
492
  ```
497
493
  This action will publish the dayahead (_dh) and MPC (_mpc) optimization results from the optimizations above.
498
494
 
499
- ### Forecast data
495
+ ### Forecast data at runtime
500
496
 
501
497
  It is possible to provide EMHASS with your own forecast data. For this just add the data as list of values to a data dictionary during the call to `emhass` using the `runtimeparams` option.
502
498
 
@@ -519,7 +515,7 @@ The possible dictionary keys to pass data are:
519
515
 
520
516
  - `prod_price_forecast` for the PV production selling price forecast.
521
517
 
522
- ### Passing other data
518
+ ### Passing other data at runtime
523
519
 
524
520
  It is possible to also pass other data during runtime in order to automate the energy management. For example, it could be useful to dynamically update the total number of hours for each deferrable load (`def_total_hours`) using for instance a correlation with the outdoor temperature (useful for water heater for example).
525
521
 
@@ -535,6 +531,8 @@ Here is the list of the other additional dictionary keys that can be passed at r
535
531
 
536
532
  - `def_end_timestep` for the timestep before which each deferrable load should operate (if you don't want the deferrable load to use the whole optimization timewindow).
537
533
 
534
+ - `def_current_state` Pass this as a list of booleans (True/False) to indicate the current deferrable load state. This is used internally to avoid incorrectly penalizing a deferrable load start if a forecast is run when that load is already running.
535
+
538
536
  - `treat_def_as_semi_cont` to define if we should treat each deferrable load as a semi-continuous variable.
539
537
 
540
538
  - `set_def_constant` to define if we should set each deferrable load as a constant fixed value variable with just one startup for each optimization task.
@@ -545,8 +543,16 @@ Here is the list of the other additional dictionary keys that can be passed at r
545
543
 
546
544
  - `solar_forecast_kwp` for the PV peak installed power in kW used for the solar.forecast API call.
547
545
 
546
+ - `SOCmin` the minimum possible SOC.
547
+
548
+ - `SOCmax` the maximum possible SOC.
549
+
548
550
  - `SOCtarget` for the desired target value of initial and final SOC.
549
551
 
552
+ - `Pd_max` for the maximum battery discharge power.
553
+
554
+ - `Pc_max` for the maximum battery charge power.
555
+
550
556
  - `publish_prefix` use this key to pass a common prefix to all published data. This will add a prefix to the sensor name but also to the forecasts attributes keys within the sensor.
551
557
 
552
558
  ## A naive Model Predictive Controller
@@ -11,7 +11,7 @@ h5py==3.11.0
11
11
  pulp>=2.4
12
12
  pyyaml>=5.4.1
13
13
  tables<=3.9.1
14
- skforecast==0.12.0
14
+ skforecast==0.12.1
15
15
  flask>=2.0.3
16
16
  waitress>=2.1.1
17
17
  plotly>=5.6.0
@@ -2,6 +2,7 @@
2
2
  # -*- coding: utf-8 -*-
3
3
 
4
4
  import unittest
5
+ import os
5
6
  import requests_mock
6
7
  import pandas as pd
7
8
  import pathlib, pickle, json, copy, yaml
@@ -147,6 +148,11 @@ class TestForecast(unittest.TestCase):
147
148
  self.assertEqual(len(df_weather_scrap), len(P_PV_forecast))
148
149
 
149
150
  def test_get_weather_forecast_solcast_method_mock(self):
151
+ self.fcst.params = {'passed_data': {'weather_forecast_cache': False, 'weather_forecast_cache_only': False}}
152
+ self.fcst.retrieve_hass_conf['solcast_api_key'] = "123456"
153
+ self.fcst.retrieve_hass_conf['solcast_rooftop_id'] = "123456"
154
+ if os.path.isfile(emhass_conf['data_path'] / "weather_forecast_data.pkl"):
155
+ os.rename(emhass_conf['data_path'] / "weather_forecast_data.pkl", emhass_conf['data_path'] / "temp_weather_forecast_data.pkl")
150
156
  with requests_mock.mock() as m:
151
157
  data = bz2.BZ2File(str(emhass_conf['data_path'] / 'test_response_solcast_get_method.pbz2'), "rb")
152
158
  data = cPickle.load(data)
@@ -160,6 +166,8 @@ class TestForecast(unittest.TestCase):
160
166
  self.assertTrue(self.fcst.start_forecast < ts for ts in df_weather_scrap.index)
161
167
  self.assertEqual(len(df_weather_scrap),
162
168
  int(self.optim_conf['delta_forecast'].total_seconds()/3600/self.fcst.timeStep))
169
+ if os.path.isfile(emhass_conf['data_path'] / "temp_weather_forecast_data.pkl"):
170
+ os.rename(emhass_conf['data_path'] / "temp_weather_forecast_data.pkl", emhass_conf['data_path'] / "weather_forecast_data.pkl")
163
171
 
164
172
  def test_get_weather_forecast_solarforecast_method_mock(self):
165
173
  with requests_mock.mock() as m:
@@ -122,7 +122,7 @@ class TestRetrieveHass(unittest.TestCase):
122
122
  self.assertEqual(len(self.rh.df_final.columns), len(self.var_list))
123
123
  self.assertEqual(self.rh.df_final.index.isin(self.days_list).sum(), len(self.days_list))
124
124
  self.assertEqual(self.rh.df_final.index.freq, self.retrieve_hass_conf['freq'])
125
- self.assertEqual(self.rh.df_final.index.tz, pytz.UTC)
125
+ self.assertEqual(self.rh.df_final.index.tz, datetime.timezone.utc)
126
126
  self.rh.prepare_data(self.retrieve_hass_conf['var_load'],
127
127
  load_negative = self.retrieve_hass_conf['load_negative'],
128
128
  set_zero_min = self.retrieve_hass_conf['set_zero_min'],
Binary file
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes