emhass 0.10.0__tar.gz → 0.10.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. {emhass-0.10.0 → emhass-0.10.1}/CHANGELOG.md +9 -1
  2. {emhass-0.10.0 → emhass-0.10.1}/PKG-INFO +1 -1
  3. emhass-0.10.1/data/opt_res_latest.csv +49 -0
  4. {emhass-0.10.0 → emhass-0.10.1}/setup.py +1 -1
  5. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/command_line.py +21 -20
  6. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/machine_learning_forecaster.py +20 -20
  7. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/optimization.py +1 -1
  8. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/utils.py +9 -0
  9. {emhass-0.10.0 → emhass-0.10.1}/src/emhass.egg-info/PKG-INFO +1 -1
  10. emhass-0.10.0/data/opt_res_latest.csv +0 -11
  11. {emhass-0.10.0 → emhass-0.10.1}/CODE_OF_CONDUCT.md +0 -0
  12. {emhass-0.10.0 → emhass-0.10.1}/CONTRIBUTING.md +0 -0
  13. {emhass-0.10.0 → emhass-0.10.1}/LICENSE +0 -0
  14. {emhass-0.10.0 → emhass-0.10.1}/MANIFEST.in +0 -0
  15. {emhass-0.10.0 → emhass-0.10.1}/README.md +0 -0
  16. {emhass-0.10.0 → emhass-0.10.1}/data/data_load_cost_forecast.csv +0 -0
  17. {emhass-0.10.0 → emhass-0.10.1}/data/data_load_forecast.csv +0 -0
  18. {emhass-0.10.0 → emhass-0.10.1}/data/data_prod_price_forecast.csv +0 -0
  19. {emhass-0.10.0 → emhass-0.10.1}/data/data_train_load_clustering.pkl +0 -0
  20. {emhass-0.10.0 → emhass-0.10.1}/data/data_train_load_forecast.pkl +0 -0
  21. {emhass-0.10.0 → emhass-0.10.1}/data/data_weather_forecast.csv +0 -0
  22. {emhass-0.10.0 → emhass-0.10.1}/data/heating_prediction.csv +0 -0
  23. {emhass-0.10.0 → emhass-0.10.1}/data/opt_res_perfect_optim_cost.csv +0 -0
  24. {emhass-0.10.0 → emhass-0.10.1}/data/opt_res_perfect_optim_profit.csv +0 -0
  25. {emhass-0.10.0 → emhass-0.10.1}/data/opt_res_perfect_optim_self-consumption.csv +0 -0
  26. {emhass-0.10.0 → emhass-0.10.1}/data/test_df_final.pkl +0 -0
  27. {emhass-0.10.0 → emhass-0.10.1}/data/test_response_get_data_get_method.pbz2 +0 -0
  28. {emhass-0.10.0 → emhass-0.10.1}/data/test_response_scrapper_get_method.pbz2 +0 -0
  29. {emhass-0.10.0 → emhass-0.10.1}/data/test_response_solarforecast_get_method.pbz2 +0 -0
  30. {emhass-0.10.0 → emhass-0.10.1}/data/test_response_solcast_get_method.pbz2 +0 -0
  31. {emhass-0.10.0 → emhass-0.10.1}/pyproject.toml +0 -0
  32. {emhass-0.10.0 → emhass-0.10.1}/setup.cfg +0 -0
  33. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/__init__.py +0 -0
  34. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/data/cec_inverters.pbz2 +0 -0
  35. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/data/cec_modules.pbz2 +0 -0
  36. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/forecast.py +0 -0
  37. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/machine_learning_regressor.py +0 -0
  38. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/retrieve_hass.py +0 -0
  39. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/static/advanced.html +0 -0
  40. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/static/basic.html +0 -0
  41. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/static/img/emhass_icon.png +0 -0
  42. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/static/img/emhass_logo_short.svg +0 -0
  43. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/static/img/feather-sprite.svg +0 -0
  44. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/static/script.js +0 -0
  45. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/static/style.css +0 -0
  46. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/templates/index.html +0 -0
  47. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/templates/template.html +0 -0
  48. {emhass-0.10.0 → emhass-0.10.1}/src/emhass/web_server.py +0 -0
  49. {emhass-0.10.0 → emhass-0.10.1}/src/emhass.egg-info/SOURCES.txt +0 -0
  50. {emhass-0.10.0 → emhass-0.10.1}/src/emhass.egg-info/dependency_links.txt +0 -0
  51. {emhass-0.10.0 → emhass-0.10.1}/src/emhass.egg-info/entry_points.txt +0 -0
  52. {emhass-0.10.0 → emhass-0.10.1}/src/emhass.egg-info/requires.txt +0 -0
  53. {emhass-0.10.0 → emhass-0.10.1}/src/emhass.egg-info/top_level.txt +0 -0
  54. {emhass-0.10.0 → emhass-0.10.1}/tests/test_command_line_utils.py +0 -0
  55. {emhass-0.10.0 → emhass-0.10.1}/tests/test_forecast.py +0 -0
  56. {emhass-0.10.0 → emhass-0.10.1}/tests/test_machine_learning_forecaster.py +0 -0
  57. {emhass-0.10.0 → emhass-0.10.1}/tests/test_machine_learning_regressor.py +0 -0
  58. {emhass-0.10.0 → emhass-0.10.1}/tests/test_optimization.py +0 -0
  59. {emhass-0.10.0 → emhass-0.10.1}/tests/test_retrieve_hass.py +0 -0
  60. {emhass-0.10.0 → emhass-0.10.1}/tests/test_utils.py +0 -0
@@ -1,8 +1,16 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.10.1 - 2024-06-03
4
+ ### Fix
5
+ - Fixed PV curtailment maximum possible value constraint
6
+ - Added PV curtailement to variable to publish to HA
7
+
3
8
  ## 0.10.0 - 2024-06-02
9
+ ### BREAKING CHANGE
10
+ - In this new version we have added support for PV curtailment computation. While doing this the nominal PV peak power is needed. The easiest way find this information is by directly using the `inverter_model` defined in the configuration. As this is needed in the optimization to correctly compute PV curtailment, this parameter need to be properly defined for your installation. Before this chage this parameter was only needed if using the PV forecast method `scrapper`, but now it is not optional as it is directly used in the optimization.
11
+ Use the dedicated webapp to find the correct model for your inverter, if you cannot find your exact brand/model then just pick an inverter with the same nominal power as yours: [https://emhass-pvlib-database.streamlit.app/](https://emhass-pvlib-database.streamlit.app/)
4
12
  ### Improvement
5
- - Added support for hybrid inverters
13
+ - Added support for hybrid inverters and PV curtailment computation
6
14
  - Implemented a new `continual_publish` service that avoid the need of setting a special automation for data publish. Thanks to @GeoDerp
7
15
  - Implement a deferrable load start penalty functionality. Thanks to @werdnum
8
16
  - This feature also implement a `def_current_state` that can be passed at runtime to let the optimization consider that a deferrable load is currently scheduled or under operation when launching the optimization task
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: emhass
3
- Version: 0.10.0
3
+ Version: 0.10.1
4
4
  Summary: An Energy Management System for Home Assistant
5
5
  Home-page: https://github.com/davidusb-geek/emhass
6
6
  Author: David HERNANDEZ
@@ -0,0 +1,49 @@
1
+ timestamp,P_PV,P_Load,P_deferrable0,P_deferrable1,P_grid_pos,P_grid_neg,P_grid,P_PV_curtailment,unit_load_cost,unit_prod_price,cost_profit,cost_fun_profit,optim_status
2
+ 2024-06-03 23:00:00+02:00,0.0,2124.8911797752808,0.0,750.0,2874.8912,0.0,2874.8912,0.0,0.1419,0.065,-0.20397353064,-0.20397353064,Optimal
3
+ 2024-06-03 23:30:00+02:00,0.0,393.7693220338983,3000.0,750.0,4143.7693,0.0,4143.7693,0.0,0.1419,0.065,-0.29400043183499996,-0.29400043183499996,Optimal
4
+ 2024-06-04 00:00:00+02:00,0.0,329.5656571428571,0.0,750.0,1079.5657,0.0,1079.5657,0.0,0.1419,0.065,-0.076595186415,-0.076595186415,Optimal
5
+ 2024-06-04 00:30:00+02:00,0.0,214.95473988439306,0.0,750.0,964.95474,0.0,964.95474,0.0,0.1419,0.065,-0.068463538803,-0.068463538803,Optimal
6
+ 2024-06-04 01:00:00+02:00,0.0,254.92180790960455,3000.0,0.0,3254.9218,0.0,3254.9218,0.0,0.1419,0.065,-0.23093670171,-0.23093670171,Optimal
7
+ 2024-06-04 01:30:00+02:00,0.0,653.0385393258427,0.0,0.0,653.03854,0.0,653.03854,0.0,0.1419,0.065,-0.046333084413000006,-0.046333084413000006,Optimal
8
+ 2024-06-04 02:00:00+02:00,0.0,694.8668181818182,0.0,0.0,694.86682,0.0,694.86682,0.0,0.1419,0.065,-0.049300800879,-0.049300800879,Optimal
9
+ 2024-06-04 02:30:00+02:00,0.0,856.8446739130435,0.0,0.0,856.84467,0.0,856.84467,0.0,0.1419,0.065,-0.060793129336499996,-0.060793129336499996,Optimal
10
+ 2024-06-04 03:00:00+02:00,0.0,914.380597826087,0.0,0.0,914.3806,0.0,914.3806,0.0,0.1907,0.065,-0.08718619021000001,-0.08718619021000001,Optimal
11
+ 2024-06-04 03:30:00+02:00,0.0,599.8399421965318,0.0,0.0,599.83994,0.0,599.83994,0.0,0.1907,0.065,-0.057194738279,-0.057194738279,Optimal
12
+ 2024-06-04 04:00:00+02:00,0.0,703.5027607361963,0.0,0.0,703.50276,0.0,703.50276,0.0,0.1907,0.065,-0.067078988166,-0.067078988166,Optimal
13
+ 2024-06-04 04:30:00+02:00,0.0,646.7419879518072,0.0,0.0,646.74199,0.0,646.74199,0.0,0.1907,0.065,-0.061666848746500004,-0.061666848746500004,Optimal
14
+ 2024-06-04 05:00:00+02:00,0.0,1009.152816091954,0.0,0.0,1009.1528,0.0,1009.1528,0.0,0.1907,0.065,-0.09622271948,-0.09622271948,Optimal
15
+ 2024-06-04 05:30:00+02:00,0.0,967.1363841807911,0.0,0.0,967.13638,0.0,967.13638,0.0,0.1907,0.065,-0.092216453833,-0.092216453833,Optimal
16
+ 2024-06-04 06:00:00+02:00,0.0,935.1571508379889,0.0,0.0,935.15715,0.0,935.15715,0.0,0.1907,0.065,-0.0891672342525,-0.0891672342525,Optimal
17
+ 2024-06-04 06:30:00+02:00,60.0,3267.5106703910615,0.0,0.0,3207.5107,0.0,3207.5107,0.0,0.1907,0.065,-0.305836145245,-0.305836145245,Optimal
18
+ 2024-06-04 07:00:00+02:00,840.0,3286.2027777777776,0.0,0.0,2446.2028,0.0,2446.2028,0.0,0.1907,0.065,-0.23324543698000003,-0.23324543698000003,Optimal
19
+ 2024-06-04 07:30:00+02:00,660.0,1496.1914772727273,0.0,0.0,836.19148,0.0,836.19148,0.0,0.1907,0.065,-0.079730857618,-0.079730857618,Optimal
20
+ 2024-06-04 08:00:00+02:00,620.0,794.2991620111732,0.0,0.0,174.29916,0.0,174.29916,0.0,0.1907,0.065,-0.016619424906,-0.016619424906,Optimal
21
+ 2024-06-04 08:30:00+02:00,620.0,832.2424719101124,0.0,0.0,212.24247,0.0,212.24247,0.0,0.1907,0.065,-0.020237319514500002,-0.020237319514500002,Optimal
22
+ 2024-06-04 09:00:00+02:00,6380.0,788.9761235955057,3000.0,750.0,0.0,-1841.0239,-1841.0239,0.0,0.1907,0.065,0.05983327675,0.05983327675,Optimal
23
+ 2024-06-04 09:30:00+02:00,1095.5620000000001,781.2152298850575,0.0,750.0,435.65323,0.0,435.65323,0.0,0.1907,0.065,-0.0415395354805,-0.0415395354805,Optimal
24
+ 2024-06-04 10:00:00+02:00,811.4380000000002,664.0545197740113,0.0,0.0,0.0,-147.38348,-147.38348,0.0,0.1907,0.065,0.0047899631,0.0047899631,Optimal
25
+ 2024-06-04 10:30:00+02:00,681.0759999999999,666.1989265536723,0.0,0.0,0.0,-14.877073,-14.877073,0.0,0.1907,0.065,0.00048350487250000003,0.00048350487250000003,Optimal
26
+ 2024-06-04 11:00:00+02:00,671.9846666666667,669.4183146067417,0.0,0.0,0.0,-2.5663521,-2.5663521,0.0,0.1907,0.065,8.340644325000001e-05,8.340644325000001e-05,Optimal
27
+ 2024-06-04 11:30:00+02:00,6469.634666666666,579.2235294117647,3000.0,750.0,0.0,0.0,0.0,2140.4111,0.1907,-0.07,-0.0,-0.0,Optimal
28
+ 2024-06-04 12:00:00+02:00,2992.012,642.7344318181817,3000.0,0.0,650.72243,0.0,650.72243,0.0,0.1907,-0.07,-0.06204638370050001,-0.06204638370050001,Optimal
29
+ 2024-06-04 12:30:00+02:00,1867.9053333333331,637.1688636363637,0.0,750.0,0.0,0.0,0.0,480.73647,0.1907,-0.07,-0.0,-0.0,Optimal
30
+ 2024-06-04 13:00:00+02:00,2067.554666666667,649.3890173410405,3000.0,0.0,1581.8344,0.0,1581.8344,0.0,0.1907,-0.07,-0.15082791004,-0.15082791004,Optimal
31
+ 2024-06-04 13:30:00+02:00,622.756,509.79664739884396,0.0,0.0,0.0,0.0,0.0,112.95935,0.1907,-0.07,-0.0,-0.0,Optimal
32
+ 2024-06-04 14:00:00+02:00,1518.7553333333335,500.53686046511626,0.0,750.0,0.0,-268.21847,-268.21847,0.0,0.1907,0.065,0.008717100275000002,0.008717100275000002,Optimal
33
+ 2024-06-04 14:30:00+02:00,2551.502,520.944,3000.0,0.0,969.442,0.0,969.442,0.0,0.1907,0.065,-0.0924362947,-0.0924362947,Optimal
34
+ 2024-06-04 15:00:00+02:00,4160.0,415.32341040462427,3000.0,750.0,5.3234104,0.0,5.3234104,0.0,0.1907,0.065,-0.00050758718164,-0.00050758718164,Optimal
35
+ 2024-06-04 15:30:00+02:00,4240.0,321.6410285714286,3000.0,750.0,0.0,-168.35897,-168.35897,0.0,0.1419,0.065,0.005471666525,0.005471666525,Optimal
36
+ 2024-06-04 16:00:00+02:00,560.0,3778.0933888888894,3000.0,750.0,6968.0934,0.0,6968.0934,0.0,0.1419,0.065,-0.49438622673,-0.49438622673,Optimal
37
+ 2024-06-04 16:30:00+02:00,380.0,3990.816179775281,0.0,750.0,4360.8162,0.0,4360.8162,0.0,0.1419,0.065,-0.30939990939,-0.30939990939,Optimal
38
+ 2024-06-04 17:00:00+02:00,0.0,3909.8039890710384,0.0,750.0,4659.804,0.0,4659.804,0.0,0.1419,0.065,-0.33061309380000004,-0.33061309380000004,Optimal
39
+ 2024-06-04 17:30:00+02:00,0.0,4206.869447513812,0.0,0.0,4206.8694,0.0,4206.8694,0.0,0.1907,0.065,-0.40112499728999995,-0.40112499728999995,Optimal
40
+ 2024-06-04 18:00:00+02:00,0.0,486.26584269662925,0.0,0.0,486.26584,0.0,486.26584,0.0,0.1907,0.065,-0.046365447844000006,-0.046365447844000006,Optimal
41
+ 2024-06-04 18:30:00+02:00,0.0,402.43446927374305,0.0,0.0,402.43447,0.0,402.43447,0.0,0.1907,0.065,-0.0383721267145,-0.0383721267145,Optimal
42
+ 2024-06-04 19:00:00+02:00,0.0,316.17875,0.0,0.0,316.17875,0.0,316.17875,0.0,0.1907,0.065,-0.0301476438125,-0.0301476438125,Optimal
43
+ 2024-06-04 19:30:00+02:00,0.0,867.4,0.0,0.0,867.4,0.0,867.4,0.0,0.1907,0.065,-0.08270659000000001,-0.08270659000000001,Optimal
44
+ 2024-06-04 20:00:00+02:00,0.0,340.8070760233918,0.0,0.0,340.80708,0.0,340.80708,0.0,0.1907,0.065,-0.032495955078,-0.032495955078,Optimal
45
+ 2024-06-04 20:30:00+02:00,0.0,349.07406779661017,0.0,750.0,1099.0741,0.0,1099.0741,0.0,0.1419,0.065,-0.077979307395,-0.077979307395,Optimal
46
+ 2024-06-04 21:00:00+02:00,0.0,1790.5224581005587,0.0,750.0,2540.5225,0.0,2540.5225,0.0,0.1419,0.065,-0.18025007137500001,-0.18025007137500001,Optimal
47
+ 2024-06-04 21:30:00+02:00,0.0,2612.0882857142856,0.0,0.0,2612.0883,0.0,2612.0883,0.0,0.1419,0.065,-0.185327664885,-0.185327664885,Optimal
48
+ 2024-06-04 22:00:00+02:00,0.0,2617.098882681564,0.0,0.0,2617.0989,0.0,2617.0989,0.0,0.1419,0.065,-0.185683166955,-0.185683166955,Optimal
49
+ 2024-06-04 22:30:00+02:00,0.0,2254.2344375000002,0.0,0.0,2254.2344,0.0,2254.2344,0.0,0.1419,0.065,-0.15993793068,-0.15993793068,Optimal
@@ -19,7 +19,7 @@ long_description = (here / 'README.md').read_text(encoding='utf-8')
19
19
 
20
20
  setup(
21
21
  name='emhass', # Required
22
- version='0.10.0', # Required
22
+ version='0.10.1', # Required
23
23
  description='An Energy Management System for Home Assistant', # Optional
24
24
  long_description=long_description, # Optional
25
25
  long_description_content_type='text/markdown', # Optional (see note above)
@@ -52,7 +52,7 @@ def set_input_data_dict(emhass_conf: dict, costfun: str,
52
52
  logger.info("Setting up needed data")
53
53
  # Parsing yaml
54
54
  retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(
55
- emhass_conf, use_secrets=not (get_data_from_file), params=params)
55
+ emhass_conf, use_secrets=not(get_data_from_file), params=params)
56
56
  # Treat runtimeparams
57
57
  params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams(
58
58
  runtimeparams, params, retrieve_hass_conf, optim_conf, plant_conf, set_type, logger)
@@ -683,8 +683,6 @@ def regressor_model_predict(input_data_dict: dict, logger: logging.Logger,
683
683
  type_var="mlregressor")
684
684
  return prediction
685
685
 
686
-
687
-
688
686
  def publish_data(input_data_dict: dict, logger: logging.Logger,
689
687
  save_data_to_file: Optional[bool] = False,
690
688
  opt_res_latest: Optional[pd.DataFrame] = None,
@@ -708,12 +706,10 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
708
706
 
709
707
  """
710
708
  logger.info("Publishing data to HASS instance")
711
-
712
709
  if not isinstance(input_data_dict["params"],dict):
713
710
  params = json.loads(input_data_dict["params"])
714
711
  else:
715
712
  params = input_data_dict["params"]
716
-
717
713
  # Check if a day ahead optimization has been performed (read CSV file)
718
714
  if save_data_to_file:
719
715
  today = datetime.now(timezone.utc).replace(
@@ -726,7 +722,6 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
726
722
  opt_res_list_names = []
727
723
  publish_prefix = params["passed_data"]["publish_prefix"]
728
724
  entity_path = input_data_dict['emhass_conf']['data_path'] / "entities"
729
-
730
725
  # Check if items in entity_path
731
726
  if os.path.exists(entity_path) and len(os.listdir(entity_path)) > 0:
732
727
  # Obtain all files in entity_path
@@ -805,6 +800,20 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
805
800
  dont_post=dont_post
806
801
  )
807
802
  cols_published = ["P_PV", "P_Load"]
803
+ # Publish PV curtailment
804
+ custom_pv_curtailment_id = params["passed_data"]["custom_pv_curtailment_id"]
805
+ input_data_dict["rh"].post_data(
806
+ opt_res_latest["P_PV_curtailment"],
807
+ idx_closest,
808
+ custom_pv_curtailment_id["entity_id"],
809
+ custom_pv_curtailment_id["unit_of_measurement"],
810
+ custom_pv_curtailment_id["friendly_name"],
811
+ type_var="power",
812
+ publish_prefix=publish_prefix,
813
+ save_entities=entity_save,
814
+ dont_post=dont_post
815
+ )
816
+ cols_published = cols_published + ["P_PV_curtailment"]
808
817
  # Publish deferrable loads
809
818
  custom_deferrable_forecast_id = params["passed_data"][
810
819
  "custom_deferrable_forecast_id"
@@ -944,7 +953,7 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
944
953
  opt_res_latest.index[idx_closest]]]
945
954
  return opt_res
946
955
 
947
- def continual_publish(input_data_dict,entity_path,logger):
956
+ def continual_publish(input_data_dict: dict, entity_path: pathlib.Path, logger: logging.Logger):
948
957
  """
949
958
  If continual_publish is true and a entity file saved in /data_path/entities, continually publish sensor on freq rate, updating entity current state value based on timestamp
950
959
 
@@ -959,23 +968,22 @@ def continual_publish(input_data_dict,entity_path,logger):
959
968
  logger.info("Continual publish thread service started")
960
969
  freq = input_data_dict['retrieve_hass_conf'].get("freq", pd.to_timedelta(1, "minutes"))
961
970
  entity_path_contents = []
962
-
963
971
  while True:
964
972
  # Sleep for x seconds (using current time as a reference for time left)
965
973
  time.sleep(max(0,freq.total_seconds() - (datetime.now(input_data_dict["retrieve_hass_conf"]["time_zone"]).timestamp() % 60)))
966
-
967
974
  # Loop through all saved entity files
968
975
  if os.path.exists(entity_path) and len(os.listdir(entity_path)) > 0:
969
976
  entity_path_contents = os.listdir(entity_path)
970
977
  for entity in entity_path_contents:
971
978
  if entity != "metadata.json":
972
979
  # Call publish_json with entity file, build entity, and publish
973
- publish_json(entity,input_data_dict,entity_path,logger,"continual_publish")
980
+ publish_json(entity, input_data_dict, entity_path, logger, "continual_publish")
974
981
  pass
975
982
  # This function should never return
976
983
  return False
977
984
 
978
- def publish_json(entity,input_data_dict,entity_path,logger,reference: Optional[str] = ""):
985
+ def publish_json(entity: dict, input_data_dict: dict, entity_path: pathlib.Path,
986
+ logger: logging.Logger, reference: Optional[str] = ""):
979
987
  """
980
988
  Extract saved entity data from .json (in data_path/entities), build entity, post results to post_data
981
989
 
@@ -989,9 +997,8 @@ def publish_json(entity,input_data_dict,entity_path,logger,reference: Optional[s
989
997
  :type logger: logging.Logger
990
998
  :param reference: String for identifying who ran the function
991
999
  :type reference: str, optional
992
-
1000
+
993
1001
  """
994
-
995
1002
  # Retrieve entity metadata from file
996
1003
  if os.path.isfile(entity_path / "metadata.json"):
997
1004
  with open(entity_path / "metadata.json", "r") as file:
@@ -1001,16 +1008,12 @@ def publish_json(entity,input_data_dict,entity_path,logger,reference: Optional[s
1001
1008
  else:
1002
1009
  logger.error("unable to located metadata.json in:" + entity_path)
1003
1010
  return False
1004
-
1005
1011
  # Round current timecode (now)
1006
1012
  now_precise = datetime.now(input_data_dict["retrieve_hass_conf"]["time_zone"]).replace(second=0, microsecond=0)
1007
-
1008
1013
  # Retrieve entity data from file
1009
1014
  entity_data = pd.read_json(entity_path / entity , orient='index')
1010
-
1011
1015
  # Remove ".json" from string for entity_id
1012
1016
  entity_id = entity.replace(".json", "")
1013
-
1014
1017
  # Adjust Dataframe from received entity json file
1015
1018
  entity_data.columns = [metadata[entity_id]["name"]]
1016
1019
  entity_data.index.name = "timestamp"
@@ -1025,15 +1028,13 @@ def publish_json(entity,input_data_dict,entity_path,logger,reference: Optional[s
1025
1028
  idx_closest = entity_data.index.get_indexer([now_precise], method="bfill")[0]
1026
1029
  if idx_closest == -1:
1027
1030
  idx_closest = entity_data.index.get_indexer([now_precise], method="nearest")[0]
1028
-
1029
1031
  # Call post data
1030
1032
  if reference == "continual_publish":
1031
1033
  logger.debug("Auto Published sensor:")
1032
1034
  logger_levels = "DEBUG"
1033
1035
  else:
1034
1036
  logger_levels = "INFO"
1035
-
1036
- #post/save entity
1037
+ # post/save entity
1037
1038
  input_data_dict["rh"].post_data(
1038
1039
  data_df=entity_data[metadata[entity_id]["name"]],
1039
1040
  idx=idx_closest,
@@ -221,51 +221,52 @@ class MLForecaster:
221
221
  :return: The DataFrame with the forecasts using the optimized model.
222
222
  :rtype: pd.DataFrame
223
223
  """
224
- # Bayesian search hyperparameter and lags with skforecast/optuna
225
- # Lags used as predictors
226
- if debug:
227
- lags_grid = [3]
228
- refit = False
229
- num_lags = 3
230
- else:
231
- lags_grid = [6, 12, 24, 36, 48, 60, 72]
232
- refit = True
233
- num_lags = self.num_lags
234
224
  # Regressor hyperparameters search space
235
225
  if self.sklearn_model == 'LinearRegression':
236
226
  if debug:
237
227
  def search_space(trial):
238
- search_space = {'fit_intercept': trial.suggest_categorical('fit_intercept', [True])}
228
+ search_space = {'fit_intercept': trial.suggest_categorical('fit_intercept', [True]),
229
+ 'lags': trial.suggest_categorical('lags', [3])}
239
230
  return search_space
240
231
  else:
241
232
  def search_space(trial):
242
- search_space = {'fit_intercept': trial.suggest_categorical('fit_intercept', [True, False])}
233
+ search_space = {'fit_intercept': trial.suggest_categorical('fit_intercept', [True, False]),
234
+ 'lags': trial.suggest_categorical('lags', [6, 12, 24, 36, 48, 60, 72])}
243
235
  return search_space
244
236
  elif self.sklearn_model == 'ElasticNet':
245
237
  if debug:
246
238
  def search_space(trial):
247
- search_space = {'selection': trial.suggest_categorical('selection', ['random'])}
239
+ search_space = {'selection': trial.suggest_categorical('selection', ['random']),
240
+ 'lags': trial.suggest_categorical('lags', [3])}
248
241
  return search_space
249
242
  else:
250
243
  def search_space(trial):
251
244
  search_space = {'alpha': trial.suggest_float('alpha', 0.0, 2.0),
252
245
  'l1_ratio': trial.suggest_float('l1_ratio', 0.0, 1.0),
253
- 'selection': trial.suggest_categorical('selection', ['cyclic', 'random'])
254
- }
246
+ 'selection': trial.suggest_categorical('selection', ['cyclic', 'random']),
247
+ 'lags': trial.suggest_categorical('lags', [6, 12, 24, 36, 48, 60, 72])}
255
248
  return search_space
256
249
  elif self.sklearn_model == 'KNeighborsRegressor':
257
250
  if debug:
258
251
  def search_space(trial):
259
- search_space = {'weights': trial.suggest_categorical('weights', ['uniform'])}
252
+ search_space = {'weights': trial.suggest_categorical('weights', ['uniform']),
253
+ 'lags': trial.suggest_categorical('lags', [3])}
260
254
  return search_space
261
255
  else:
262
256
  def search_space(trial):
263
257
  search_space = {'n_neighbors': trial.suggest_int('n_neighbors', 2, 20),
264
258
  'leaf_size': trial.suggest_int('leaf_size', 20, 40),
265
- 'weights': trial.suggest_categorical('weights', ['uniform', 'distance'])
266
- }
259
+ 'weights': trial.suggest_categorical('weights', ['uniform', 'distance']),
260
+ 'lags': trial.suggest_categorical('lags', [6, 12, 24, 36, 48, 60, 72])}
267
261
  return search_space
268
-
262
+ # Bayesian search hyperparameter and lags with skforecast/optuna
263
+ # Lags used as predictors
264
+ if debug:
265
+ refit = False
266
+ num_lags = 3
267
+ else:
268
+ refit = True
269
+ num_lags = self.num_lags
269
270
  # The optimization routine call
270
271
  self.logger.info("Bayesian hyperparameter optimization with backtesting")
271
272
  start_time = time.time()
@@ -273,7 +274,6 @@ class MLForecaster:
273
274
  forecaster = self.forecaster,
274
275
  y = self.data_train[self.var_model],
275
276
  exog = self.data_train.drop(self.var_model, axis=1),
276
- lags_grid = lags_grid,
277
277
  search_space = search_space,
278
278
  steps = num_lags,
279
279
  metric = MLForecaster.neg_r2_score,
@@ -314,7 +314,7 @@ class Optimization:
314
314
  else:
315
315
  constraints.update({"constraint_curtailment_{}".format(i) :
316
316
  plp.LpConstraint(
317
- e = P_PV[i] - P_PV_curtailment[i] - P_nom_inverter,
317
+ e = P_PV_curtailment[i] - P_PV[i],
318
318
  sense = plp.LpConstraintLE,
319
319
  rhs = 0)
320
320
  for i in set_I})
@@ -161,6 +161,11 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
161
161
  "unit_of_measurement": "W",
162
162
  "friendly_name": "Load Power Forecast",
163
163
  },
164
+ "custom_pv_curtailment_id": {
165
+ "entity_id": "sensor.p_pv_curtailment",
166
+ "unit_of_measurement": "W",
167
+ "friendly_name": "PV Power Curtailment",
168
+ },
164
169
  "custom_batt_forecast_id": {
165
170
  "entity_id": "sensor.p_batt_forecast",
166
171
  "unit_of_measurement": "W",
@@ -458,6 +463,10 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
458
463
  params["passed_data"]["custom_load_forecast_id"] = runtimeparams[
459
464
  "custom_load_forecast_id"
460
465
  ]
466
+ if "custom_pv_curtailment_id" in runtimeparams.keys():
467
+ params["passed_data"]["custom_pv_curtailment_id"] = runtimeparams[
468
+ "custom_pv_curtailment_id"
469
+ ]
461
470
  if "custom_batt_forecast_id" in runtimeparams.keys():
462
471
  params["passed_data"]["custom_batt_forecast_id"] = runtimeparams[
463
472
  "custom_batt_forecast_id"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: emhass
3
- Version: 0.10.0
3
+ Version: 0.10.1
4
4
  Summary: An Energy Management System for Home Assistant
5
5
  Home-page: https://github.com/davidusb-geek/emhass
6
6
  Author: David HERNANDEZ
@@ -1,11 +0,0 @@
1
- timestamp,P_PV,P_Load,P_deferrable0,P_deferrable1,P_grid_pos,P_grid_neg,P_grid,unit_load_cost,unit_prod_price,cost_profit,cost_fun_profit
2
- 2023-01-27 10:30:00+01:00,0.5,1127.6172187500001,3000.0,750.0,4877.1172,0.0,4877.1172,1,1,-2.4385586,-2.4385586
3
- 2023-01-27 11:00:00+01:00,2.0,2.0,3000.0,750.0,3750.0,0.0,3750.0,2,2,-3.75,-3.75
4
- 2023-01-27 11:30:00+01:00,3.0,3.0,3000.0,750.0,3750.0,0.0,3750.0,3,3,-5.625,-5.625
5
- 2023-01-27 12:00:00+01:00,4.0,4.0,3000.0,750.0,3750.0,0.0,3750.0,4,4,-7.5,-7.5
6
- 2023-01-27 12:30:00+01:00,5.0,5.0,3000.0,750.0,3750.0,0.0,3750.0,5,5,-9.375,-9.375
7
- 2023-01-27 13:00:00+01:00,6.0,6.0,3000.0,750.0,3750.0,0.0,3750.0,6,6,-11.25,-11.25
8
- 2023-01-27 13:30:00+01:00,7.0,7.0,3000.0,750.0,3750.0,0.0,3750.0,7,7,-13.125,-13.125
9
- 2023-01-27 14:00:00+01:00,8.0,8.0,3000.0,750.0,3750.0,0.0,3750.0,8,8,-15.0,-15.0
10
- 2023-01-27 14:30:00+01:00,9.0,9.0,6000.0,750.0,6750.0,0.0,6750.0,9,9,-30.375,-30.375
11
- 2023-01-27 15:00:00+01:00,10.0,10.0,0.0,5250.0,5250.0,0.0,5250.0,10,10,-26.25,-26.25
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes