emhass 0.8.4__tar.gz → 0.8.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. {emhass-0.8.4 → emhass-0.8.6}/CHANGELOG.md +18 -0
  2. {emhass-0.8.4 → emhass-0.8.6}/PKG-INFO +2 -4
  3. {emhass-0.8.4 → emhass-0.8.6}/README.md +0 -2
  4. {emhass-0.8.4 → emhass-0.8.6}/setup.py +4 -3
  5. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/command_line.py +14 -12
  6. emhass-0.8.6/src/emhass/data/cec_inverters.pbz2 +0 -0
  7. emhass-0.8.6/src/emhass/data/cec_modules.pbz2 +0 -0
  8. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/forecast.py +71 -31
  9. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/optimization.py +4 -4
  10. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/retrieve_hass.py +3 -3
  11. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/static/script.js +3 -34
  12. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/static/style.css +74 -39
  13. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/templates/index.html +7 -7
  14. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/utils.py +21 -64
  15. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/web_server.py +8 -4
  16. {emhass-0.8.4 → emhass-0.8.6}/src/emhass.egg-info/PKG-INFO +2 -4
  17. {emhass-0.8.4 → emhass-0.8.6}/src/emhass.egg-info/SOURCES.txt +2 -0
  18. {emhass-0.8.4 → emhass-0.8.6}/tests/test_command_line_utils.py +8 -8
  19. {emhass-0.8.4 → emhass-0.8.6}/tests/test_forecast.py +81 -27
  20. {emhass-0.8.4 → emhass-0.8.6}/CODE_OF_CONDUCT.md +0 -0
  21. {emhass-0.8.4 → emhass-0.8.6}/CONTRIBUTING.md +0 -0
  22. {emhass-0.8.4 → emhass-0.8.6}/LICENSE +0 -0
  23. {emhass-0.8.4 → emhass-0.8.6}/MANIFEST.in +0 -0
  24. {emhass-0.8.4 → emhass-0.8.6}/data/data_load_cost_forecast.csv +0 -0
  25. {emhass-0.8.4 → emhass-0.8.6}/data/data_load_forecast.csv +0 -0
  26. {emhass-0.8.4 → emhass-0.8.6}/data/data_prod_price_forecast.csv +0 -0
  27. {emhass-0.8.4 → emhass-0.8.6}/data/data_train_load_clustering.pkl +0 -0
  28. {emhass-0.8.4 → emhass-0.8.6}/data/data_train_load_forecast.pkl +0 -0
  29. {emhass-0.8.4 → emhass-0.8.6}/data/data_weather_forecast.csv +0 -0
  30. {emhass-0.8.4 → emhass-0.8.6}/data/opt_res_latest.csv +0 -0
  31. {emhass-0.8.4 → emhass-0.8.6}/data/opt_res_perfect_optim_cost.csv +0 -0
  32. {emhass-0.8.4 → emhass-0.8.6}/data/opt_res_perfect_optim_profit.csv +0 -0
  33. {emhass-0.8.4 → emhass-0.8.6}/data/opt_res_perfect_optim_self-consumption.csv +0 -0
  34. {emhass-0.8.4 → emhass-0.8.6}/data/test_df_final.pkl +0 -0
  35. {emhass-0.8.4 → emhass-0.8.6}/data/test_response_get_data_get_method.pbz2 +0 -0
  36. {emhass-0.8.4 → emhass-0.8.6}/data/test_response_scrapper_get_method.pbz2 +0 -0
  37. {emhass-0.8.4 → emhass-0.8.6}/data/test_response_solarforecast_get_method.pbz2 +0 -0
  38. {emhass-0.8.4 → emhass-0.8.6}/data/test_response_solcast_get_method.pbz2 +0 -0
  39. {emhass-0.8.4 → emhass-0.8.6}/pyproject.toml +0 -0
  40. {emhass-0.8.4 → emhass-0.8.6}/setup.cfg +0 -0
  41. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/__init__.py +0 -0
  42. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/machine_learning_forecaster.py +0 -0
  43. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/static/advanced.html +0 -0
  44. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/static/basic.html +0 -0
  45. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/static/img/emhass_icon.png +0 -0
  46. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/static/img/emhass_logo_short.svg +0 -0
  47. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/static/img/feather-sprite.svg +0 -0
  48. {emhass-0.8.4 → emhass-0.8.6}/src/emhass/templates/template.html +0 -0
  49. {emhass-0.8.4 → emhass-0.8.6}/src/emhass.egg-info/dependency_links.txt +0 -0
  50. {emhass-0.8.4 → emhass-0.8.6}/src/emhass.egg-info/entry_points.txt +0 -0
  51. {emhass-0.8.4 → emhass-0.8.6}/src/emhass.egg-info/requires.txt +0 -0
  52. {emhass-0.8.4 → emhass-0.8.6}/src/emhass.egg-info/top_level.txt +0 -0
  53. {emhass-0.8.4 → emhass-0.8.6}/tests/test_machine_learning_forecaster.py +0 -0
  54. {emhass-0.8.4 → emhass-0.8.6}/tests/test_optimization.py +0 -0
  55. {emhass-0.8.4 → emhass-0.8.6}/tests/test_retrieve_hass.py +0 -0
  56. {emhass-0.8.4 → emhass-0.8.6}/tests/test_utils.py +0 -0
@@ -1,5 +1,23 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.8.6 - 2024-04-07
4
+ ### Fix
5
+ - Fixed bug from forecast out method related to issue 240
6
+ - Fix patch for some issues with package file paths
7
+
8
+ ## 0.8.5 - 2024-04-01
9
+ ### Improvement
10
+ - Simplified fetch urls to relatives
11
+ - Improved code for passed forecast data error handling in utils.py
12
+ - Added new tests for forecast longer than 24h by changing parameter `delta_forecast`
13
+ - Added new files for updated PV modules and inverters database for use with PVLib
14
+ - Added a new webapp to help configuring modules and inverters: [https://emhass-pvlib-database.streamlit.app/](https://emhass-pvlib-database.streamlit.app/)
15
+ - Added a new `P_to_grid_max` variable, different from the current `P_from_grid_max` option
16
+ ### Fix
17
+ - style.css auto format and adjusted table styling
18
+ - Changed pandas datetime rounding to nonexistent='shift_forward' to help survive DST change
19
+ - Dropped support for Python 3.9
20
+
3
21
  ## 0.8.4 - 2024-03-13
4
22
  ### Improvement
5
23
  - Improved documentation
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: emhass
3
- Version: 0.8.4
3
+ Version: 0.8.6
4
4
  Summary: An Energy Management System for Home Assistant
5
5
  Home-page: https://github.com/davidusb-geek/emhass
6
6
  Author: David HERNANDEZ
@@ -12,7 +12,7 @@ Classifier: Topic :: Software Development :: Build Tools
12
12
  Classifier: License :: OSI Approved :: MIT License
13
13
  Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Operating System :: OS Independent
15
- Requires-Python: >=3.9, <3.12
15
+ Requires-Python: >=3.10, <3.12
16
16
  Description-Content-Type: text/markdown
17
17
  License-File: LICENSE
18
18
  Requires-Dist: wheel
@@ -466,8 +466,6 @@ curl -i -H 'Content-Type:application/json' -X POST -d '{"pv_power_forecast":[0,
466
466
  curl -i -H 'Content-Type:application/json' -X POST -d '{"pv_power_forecast":[0, 70, 141.22, 246.18, 513.5, 753.27, 1049.89, 1797.93, 1697.3, 3078.93], "prediction_horizon":10, "soc_init":0.5,"soc_final":0.6,"def_total_hours":[1,3],"def_start_timestep":[0,3],"def_end_timestep":[0,6]}' http://localhost:5000/action/naive-mpc-optim
467
467
  ```
468
468
 
469
-
470
-
471
469
  ## A machine learning forecaster
472
470
 
473
471
  Starting in v0.4.0 a new machine learning forecaster class was introduced.
@@ -431,8 +431,6 @@ curl -i -H 'Content-Type:application/json' -X POST -d '{"pv_power_forecast":[0,
431
431
  curl -i -H 'Content-Type:application/json' -X POST -d '{"pv_power_forecast":[0, 70, 141.22, 246.18, 513.5, 753.27, 1049.89, 1797.93, 1697.3, 3078.93], "prediction_horizon":10, "soc_init":0.5,"soc_final":0.6,"def_total_hours":[1,3],"def_start_timestep":[0,3],"def_end_timestep":[0,6]}' http://localhost:5000/action/naive-mpc-optim
432
432
  ```
433
433
 
434
-
435
-
436
434
  ## A machine learning forecaster
437
435
 
438
436
  Starting in v0.4.0 a new machine learning forecaster class was introduced.
@@ -19,7 +19,7 @@ long_description = (here / 'README.md').read_text(encoding='utf-8')
19
19
 
20
20
  setup(
21
21
  name='emhass', # Required
22
- version='0.8.4', # Required
22
+ version='0.8.6', # Required
23
23
  description='An Energy Management System for Home Assistant', # Optional
24
24
  long_description=long_description, # Optional
25
25
  long_description_content_type='text/markdown', # Optional (see note above)
@@ -37,7 +37,7 @@ setup(
37
37
  keywords='energy, management, optimization, hass', # Optional
38
38
  package_dir={'': 'src'}, # Optional
39
39
  packages=find_packages(where='src'), # Required
40
- python_requires='>=3.9, <3.12',
40
+ python_requires='>=3.10, <3.12',
41
41
  install_requires=[
42
42
  'wheel',
43
43
  'numpy==1.26.4',
@@ -63,5 +63,6 @@ setup(
63
63
  ],
64
64
  },
65
65
  package_data={'emhass': ['templates/index.html','templates/template.html','static/advanced.html','static/basic.html', 'static/script.js',
66
- 'static/style.css','static/img/emhass_icon.png','static/img/emhass_logo_short.svg', 'static/img/feather-sprite.svg']},
66
+ 'static/style.css','static/img/emhass_icon.png','static/img/emhass_logo_short.svg', 'static/img/feather-sprite.svg',
67
+ 'data/cec_modules.pbz2', 'data/cec_inverters.pbz2']},
67
68
  )
@@ -59,8 +59,8 @@ def set_input_data_dict(config_path: pathlib.Path, base_path: str, costfun: str,
59
59
  optim_conf, plant_conf, set_type, logger)
60
60
  # Define main objects
61
61
  rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'],
62
- retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'],
63
- params, base_path, logger, get_data_from_file=get_data_from_file)
62
+ retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'],
63
+ params, base_path, logger, get_data_from_file=get_data_from_file)
64
64
  fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf,
65
65
  params, base_path, logger, get_data_from_file=get_data_from_file)
66
66
  opt = Optimization(retrieve_hass_conf, optim_conf, plant_conf,
@@ -76,12 +76,12 @@ def set_input_data_dict(config_path: pathlib.Path, base_path: str, costfun: str,
76
76
  days_list = utils.get_days_list(retrieve_hass_conf['days_to_retrieve'])
77
77
  var_list = [retrieve_hass_conf['var_load'], retrieve_hass_conf['var_PV']]
78
78
  if not rh.get_data(days_list, var_list,
79
- minimal_response=False, significant_changes_only=False):
79
+ minimal_response=False, significant_changes_only=False):
80
80
  return False
81
81
  if not rh.prepare_data(retrieve_hass_conf['var_load'], load_negative = retrieve_hass_conf['load_negative'],
82
- set_zero_min = retrieve_hass_conf['set_zero_min'],
83
- var_replace_zero = retrieve_hass_conf['var_replace_zero'],
84
- var_interp = retrieve_hass_conf['var_interp']):
82
+ set_zero_min = retrieve_hass_conf['set_zero_min'],
83
+ var_replace_zero = retrieve_hass_conf['var_replace_zero'],
84
+ var_interp = retrieve_hass_conf['var_interp']):
85
85
  return False
86
86
  df_input_data = rh.df_final.copy()
87
87
  # What we don't need for this type of action
@@ -113,12 +113,12 @@ def set_input_data_dict(config_path: pathlib.Path, base_path: str, costfun: str,
113
113
  days_list = utils.get_days_list(1)
114
114
  var_list = [retrieve_hass_conf['var_load'], retrieve_hass_conf['var_PV']]
115
115
  if not rh.get_data(days_list, var_list,
116
- minimal_response=False, significant_changes_only=False):
116
+ minimal_response=False, significant_changes_only=False):
117
117
  return False
118
118
  if not rh.prepare_data(retrieve_hass_conf['var_load'], load_negative = retrieve_hass_conf['load_negative'],
119
- set_zero_min = retrieve_hass_conf['set_zero_min'],
120
- var_replace_zero = retrieve_hass_conf['var_replace_zero'],
121
- var_interp = retrieve_hass_conf['var_interp']):
119
+ set_zero_min = retrieve_hass_conf['set_zero_min'],
120
+ var_replace_zero = retrieve_hass_conf['var_replace_zero'],
121
+ var_interp = retrieve_hass_conf['var_interp']):
122
122
  return False
123
123
  df_input_data = rh.df_final.copy()
124
124
  # Get PV and load forecasts
@@ -201,9 +201,11 @@ def perfect_forecast_optim(input_data_dict: dict, logger: logging.Logger,
201
201
  # Load cost and prod price forecast
202
202
  df_input_data = input_data_dict['fcst'].get_load_cost_forecast(
203
203
  input_data_dict['df_input_data'],
204
- method=input_data_dict['fcst'].optim_conf['load_cost_forecast_method'])
204
+ method=input_data_dict['fcst'].optim_conf['load_cost_forecast_method'],
205
+ list_and_perfect=True)
205
206
  df_input_data = input_data_dict['fcst'].get_prod_price_forecast(
206
- df_input_data, method=input_data_dict['fcst'].optim_conf['prod_price_forecast_method'])
207
+ df_input_data, method=input_data_dict['fcst'].optim_conf['prod_price_forecast_method'],
208
+ list_and_perfect=True)
207
209
  opt_res = input_data_dict['opt'].perform_perfect_forecast_optim(df_input_data, input_data_dict['days_list'])
208
210
  # Save CSV file for analysis
209
211
  if save_data_to_file:
@@ -7,6 +7,8 @@ import copy
7
7
  import logging
8
8
  import json
9
9
  from typing import Optional
10
+ import bz2
11
+ import pickle as cPickle
10
12
  import pandas as pd
11
13
  import numpy as np
12
14
  from datetime import datetime, timedelta
@@ -21,7 +23,7 @@ from pvlib.irradiance import disc
21
23
 
22
24
  from emhass.retrieve_hass import RetrieveHass
23
25
  from emhass.machine_learning_forecaster import MLForecaster
24
- from emhass.utils import get_days_list
26
+ from emhass.utils import get_days_list, get_root, set_df_index_freq
25
27
 
26
28
 
27
29
  class Forecast(object):
@@ -133,7 +135,7 @@ class Forecast(object):
133
135
  self.time_zone = self.retrieve_hass_conf['time_zone']
134
136
  self.method_ts_round = self.retrieve_hass_conf['method_ts_round']
135
137
  self.timeStep = self.freq.seconds/3600 # in hours
136
- self.time_delta = pd.to_timedelta(opt_time_delta, "hours") # The period of optimization
138
+ self.time_delta = pd.to_timedelta(opt_time_delta, "hours")
137
139
  self.var_PV = self.retrieve_hass_conf['var_PV']
138
140
  self.var_load = self.retrieve_hass_conf['var_load']
139
141
  self.var_load_new = self.var_load+'_positive'
@@ -159,7 +161,7 @@ class Forecast(object):
159
161
  self.end_forecast = (self.start_forecast + self.optim_conf['delta_forecast']).replace(microsecond=0)
160
162
  self.forecast_dates = pd.date_range(start=self.start_forecast,
161
163
  end=self.end_forecast-self.freq,
162
- freq=self.freq).round(self.freq, ambiguous='infer', nonexistent=self.freq)
164
+ freq=self.freq).round(self.freq, ambiguous='infer', nonexistent='shift_forward')
163
165
  if params is not None:
164
166
  if 'prediction_horizon' in list(self.params['passed_data'].keys()):
165
167
  if self.params['passed_data']['prediction_horizon'] is not None:
@@ -184,7 +186,7 @@ class Forecast(object):
184
186
  freq_scrap = pd.to_timedelta(60, "minutes") # The scrapping time step is 60min
185
187
  forecast_dates_scrap = pd.date_range(start=self.start_forecast,
186
188
  end=self.end_forecast-freq_scrap,
187
- freq=freq_scrap).round(freq_scrap, ambiguous='infer', nonexistent=freq_scrap)
189
+ freq=freq_scrap).round(freq_scrap, ambiguous='infer', nonexistent='shift_forward')
188
190
  # Using the clearoutside webpage
189
191
  response = get("https://clearoutside.com/forecast/"+str(round(self.lat, 2))+"/"+str(round(self.lon, 2))+"?desktop=true")
190
192
  '''import bz2 # Uncomment to save a serialized data for tests
@@ -412,8 +414,10 @@ class Forecast(object):
412
414
  # Setting the main parameters of the PV plant
413
415
  location = Location(latitude=self.lat, longitude=self.lon)
414
416
  temp_params = TEMPERATURE_MODEL_PARAMETERS['sapm']['close_mount_glass_glass']
415
- cec_modules = pvlib.pvsystem.retrieve_sam('CECMod')
416
- cec_inverters = pvlib.pvsystem.retrieve_sam('cecinverter')
417
+ cec_modules = bz2.BZ2File(get_root(__file__, num_parent=2) / 'emhass/data/cec_modules.pbz2', "rb")
418
+ cec_modules = cPickle.load(cec_modules)
419
+ cec_inverters = bz2.BZ2File(get_root(__file__, num_parent=2) / 'emhass/data/cec_inverters.pbz2', "rb")
420
+ cec_inverters = cPickle.load(cec_inverters)
417
421
  if type(self.plant_conf['module_model']) == list:
418
422
  P_PV_forecast = pd.Series(0, index=df_weather.index)
419
423
  for i in range(len(self.plant_conf['module_model'])):
@@ -476,15 +480,16 @@ class Forecast(object):
476
480
  end_forecast_csv = (start_forecast_csv + self.optim_conf['delta_forecast']).replace(microsecond=0)
477
481
  forecast_dates_csv = pd.date_range(start=start_forecast_csv,
478
482
  end=end_forecast_csv+timedelta(days=timedelta_days)-self.freq,
479
- freq=self.freq).round(self.freq, ambiguous='infer', nonexistent=self.freq)
483
+ freq=self.freq).round(self.freq, ambiguous='infer', nonexistent='shift_forward')
480
484
  if self.params is not None:
481
485
  if 'prediction_horizon' in list(self.params['passed_data'].keys()):
482
486
  if self.params['passed_data']['prediction_horizon'] is not None:
483
487
  forecast_dates_csv = forecast_dates_csv[0:self.params['passed_data']['prediction_horizon']]
484
488
  return forecast_dates_csv
485
489
 
486
- def get_forecast_out_from_csv(self, df_final: pd.DataFrame, forecast_dates_csv: pd.date_range,
487
- csv_path: str, data_list: Optional[list] = None) -> pd.DataFrame:
490
+ def get_forecast_out_from_csv_or_list(self, df_final: pd.DataFrame, forecast_dates_csv: pd.date_range,
491
+ csv_path: str, data_list: Optional[list] = None,
492
+ list_and_perfect: Optional[bool] = False) -> pd.DataFrame:
488
493
  r"""
489
494
  Get the forecast data as a DataFrame from a CSV file.
490
495
 
@@ -502,34 +507,67 @@ class Forecast(object):
502
507
  :rtype: pd.DataFrame
503
508
 
504
509
  """
505
- days_list = df_final.index.day.unique().tolist()
506
510
  if csv_path is None:
507
511
  data_dict = {'ts':forecast_dates_csv, 'yhat':data_list}
508
512
  df_csv = pd.DataFrame.from_dict(data_dict)
509
513
  df_csv.index = forecast_dates_csv
510
514
  df_csv.drop(['ts'], axis=1, inplace=True)
515
+ df_csv = set_df_index_freq(df_csv)
516
+ if list_and_perfect:
517
+ days_list = df_final.index.day.unique().tolist()
518
+ else:
519
+ days_list = df_csv.index.day.unique().tolist()
511
520
  else:
512
521
  load_csv_file_path = self.root + csv_path
513
522
  df_csv = pd.read_csv(load_csv_file_path, header=None, names=['ts', 'yhat'])
514
523
  df_csv.index = forecast_dates_csv
515
524
  df_csv.drop(['ts'], axis=1, inplace=True)
525
+ df_csv = set_df_index_freq(df_csv)
526
+ days_list = df_final.index.day.unique().tolist()
516
527
  forecast_out = pd.DataFrame()
517
528
  for day in days_list:
518
- first_elm_index = [i for i, x in enumerate(df_final.index.day == day) if x][0]
519
- last_elm_index = [i for i, x in enumerate(df_final.index.day == day) if x][-1]
520
- fcst_index = pd.date_range(start=df_final.index[first_elm_index],
521
- end=df_final.index[last_elm_index],
522
- freq=df_final.index.freq)
523
- first_hour = str(df_final.index[first_elm_index].hour)+":"+str(df_final.index[first_elm_index].minute)
524
- last_hour = str(df_final.index[last_elm_index].hour)+":"+str(df_final.index[last_elm_index].minute)
529
+ if csv_path is None:
530
+ if list_and_perfect:
531
+ df_tmp = copy.deepcopy(df_final)
532
+ else:
533
+ df_tmp = copy.deepcopy(df_csv)
534
+ else:
535
+ df_tmp = copy.deepcopy(df_final)
536
+ first_elm_index = [i for i, x in enumerate(df_tmp.index.day == day) if x][0]
537
+ last_elm_index = [i for i, x in enumerate(df_tmp.index.day == day) if x][-1]
538
+ fcst_index = pd.date_range(start=df_tmp.index[first_elm_index],
539
+ end=df_tmp.index[last_elm_index],
540
+ freq=df_tmp.index.freq)
541
+ first_hour = str(df_tmp.index[first_elm_index].hour)+":"+str(df_tmp.index[first_elm_index].minute)
542
+ last_hour = str(df_tmp.index[last_elm_index].hour)+":"+str(df_tmp.index[last_elm_index].minute)
525
543
  if len(forecast_out) == 0:
526
- forecast_out = pd.DataFrame(
527
- df_csv.between_time(first_hour, last_hour).values,
528
- index=fcst_index)
544
+ if csv_path is None:
545
+ if list_and_perfect:
546
+ forecast_out = pd.DataFrame(
547
+ df_csv.between_time(first_hour, last_hour).values,
548
+ index=fcst_index)
549
+ else:
550
+ forecast_out = pd.DataFrame(
551
+ df_csv.loc[fcst_index,:].between_time(first_hour, last_hour).values,
552
+ index=fcst_index)
553
+ else:
554
+ forecast_out = pd.DataFrame(
555
+ df_csv.between_time(first_hour, last_hour).values,
556
+ index=fcst_index)
529
557
  else:
530
- forecast_tp = pd.DataFrame(
531
- df_csv.between_time(first_hour, last_hour).values,
532
- index=fcst_index)
558
+ if csv_path is None:
559
+ if list_and_perfect:
560
+ forecast_tp = pd.DataFrame(
561
+ df_csv.between_time(first_hour, last_hour).values,
562
+ index=fcst_index)
563
+ else:
564
+ forecast_tp = pd.DataFrame(
565
+ df_csv.loc[fcst_index,:].between_time(first_hour, last_hour).values,
566
+ index=fcst_index)
567
+ else:
568
+ forecast_tp = pd.DataFrame(
569
+ df_csv.between_time(first_hour, last_hour).values,
570
+ index=fcst_index)
533
571
  forecast_out = pd.concat([forecast_out, forecast_tp], axis=0)
534
572
  return forecast_out
535
573
 
@@ -664,7 +702,8 @@ class Forecast(object):
664
702
  return P_Load_forecast
665
703
 
666
704
  def get_load_cost_forecast(self, df_final: pd.DataFrame, method: Optional[str] = 'hp_hc_periods',
667
- csv_path: Optional[str] = "data_load_cost_forecast.csv") -> pd.DataFrame:
705
+ csv_path: Optional[str] = "data_load_cost_forecast.csv",
706
+ list_and_perfect: Optional[bool] = False) -> pd.DataFrame:
668
707
  r"""
669
708
  Get the unit cost for the load consumption based on multiple tariff \
670
709
  periods. This is the cost of the energy from the utility in a vector \
@@ -694,7 +733,7 @@ class Forecast(object):
694
733
  df_final.loc[df_hp.index, self.var_load_cost] = self.optim_conf['load_cost_hp']
695
734
  elif method == 'csv':
696
735
  forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0)
697
- forecast_out = self.get_forecast_out_from_csv(
736
+ forecast_out = self.get_forecast_out_from_csv_or_list(
698
737
  df_final, forecast_dates_csv, csv_path)
699
738
  df_final[self.var_load_cost] = forecast_out
700
739
  elif method == 'list': # reading a list of values
@@ -708,8 +747,8 @@ class Forecast(object):
708
747
  data_list = data_list[0:len(self.forecast_dates)]
709
748
  # Define the correct dates
710
749
  forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0)
711
- forecast_out = self.get_forecast_out_from_csv(
712
- df_final, forecast_dates_csv, None, data_list=data_list)
750
+ forecast_out = self.get_forecast_out_from_csv_or_list(
751
+ df_final, forecast_dates_csv, None, data_list=data_list, list_and_perfect=list_and_perfect)
713
752
  # Fill the final DF
714
753
  df_final[self.var_load_cost] = forecast_out
715
754
  else:
@@ -718,7 +757,8 @@ class Forecast(object):
718
757
  return df_final
719
758
 
720
759
  def get_prod_price_forecast(self, df_final: pd.DataFrame, method: Optional[str] = 'constant',
721
- csv_path: Optional[str] = "/data/data_prod_price_forecast.csv") -> pd.DataFrame:
760
+ csv_path: Optional[str] = "/data/data_prod_price_forecast.csv",
761
+ list_and_perfect: Optional[bool] = False) -> pd.DataFrame:
722
762
  r"""
723
763
  Get the unit power production price for the energy injected to the grid.\
724
764
  This is the price of the energy injected to the utility in a vector \
@@ -743,7 +783,7 @@ class Forecast(object):
743
783
  df_final[self.var_prod_price] = self.optim_conf['prod_sell_price']
744
784
  elif method == 'csv':
745
785
  forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0)
746
- forecast_out = self.get_forecast_out_from_csv(df_final,
786
+ forecast_out = self.get_forecast_out_from_csv_or_list(df_final,
747
787
  forecast_dates_csv,
748
788
  csv_path)
749
789
  df_final[self.var_prod_price] = forecast_out
@@ -758,8 +798,8 @@ class Forecast(object):
758
798
  data_list = data_list[0:len(self.forecast_dates)]
759
799
  # Define the correct dates
760
800
  forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0)
761
- forecast_out = self.get_forecast_out_from_csv(
762
- df_final, forecast_dates_csv, None, data_list=data_list)
801
+ forecast_out = self.get_forecast_out_from_csv_or_list(
802
+ df_final, forecast_dates_csv, None, data_list=data_list, list_and_perfect=list_and_perfect)
763
803
  # Fill the final DF
764
804
  df_final[self.var_prod_price] = forecast_out
765
805
  else:
@@ -162,10 +162,10 @@ class Optimization:
162
162
 
163
163
  ## Add decision variables
164
164
  P_grid_neg = {(i):plp.LpVariable(cat='Continuous',
165
- lowBound=-self.plant_conf['P_grid_max'], upBound=0,
165
+ lowBound=-self.plant_conf['P_to_grid_max'], upBound=0,
166
166
  name="P_grid_neg{}".format(i)) for i in set_I}
167
167
  P_grid_pos = {(i):plp.LpVariable(cat='Continuous',
168
- lowBound=0, upBound=self.plant_conf['P_grid_max'],
168
+ lowBound=0, upBound=self.plant_conf['P_from_grid_max'],
169
169
  name="P_grid_pos{}".format(i)) for i in set_I}
170
170
  P_deferrable = []
171
171
  P_def_bin1 = []
@@ -267,13 +267,13 @@ class Optimization:
267
267
  # Avoid injecting and consuming from grid at the same time
268
268
  constraints.update({"constraint_pgridpos_{}".format(i) :
269
269
  plp.LpConstraint(
270
- e = P_grid_pos[i] - self.plant_conf['P_grid_max']*D[i],
270
+ e = P_grid_pos[i] - self.plant_conf['P_from_grid_max']*D[i],
271
271
  sense = plp.LpConstraintLE,
272
272
  rhs = 0)
273
273
  for i in set_I})
274
274
  constraints.update({"constraint_pgridneg_{}".format(i) :
275
275
  plp.LpConstraint(
276
- e = -P_grid_neg[i] - self.plant_conf['P_grid_max']*(1-D[i]),
276
+ e = -P_grid_neg[i] - self.plant_conf['P_to_grid_max']*(1-D[i]),
277
277
  sense = plp.LpConstraintLE,
278
278
  rhs = 0)
279
279
  for i in set_I})
@@ -133,14 +133,14 @@ class RetrieveHass:
133
133
  try: # Sometimes when there are connection problems we need to catch empty retrieved json
134
134
  data = response.json()[0]
135
135
  except IndexError:
136
- if x is 0:
136
+ if x == 0:
137
137
  self.logger.error("The retrieved JSON is empty, A sensor:" + var + " may have 0 days of history or passed sensor may not be correct")
138
138
  else:
139
139
  self.logger.error("The retrieved JSON is empty for day:"+ str(day) +", days_to_retrieve may be larger than the recorded history of sensor:" + var + " (check your recorder settings)")
140
140
  return False
141
141
  df_raw = pd.DataFrame.from_dict(data)
142
142
  if len(df_raw) == 0:
143
- if x is 0:
143
+ if x == 0:
144
144
  self.logger.error("The retrieved Dataframe is empty, A sensor:" + var + " may have 0 days of history or passed sensor may not be correct")
145
145
  else:
146
146
  self.logger.error("Retrieved empty Dataframe for day:"+ str(day) +", days_to_retrieve may be larger than the recorded history of sensor:" + var + " (check your recorder settings)")
@@ -149,7 +149,7 @@ class RetrieveHass:
149
149
  from_date = pd.to_datetime(df_raw['last_changed'], format="ISO8601").min()
150
150
  to_date = pd.to_datetime(df_raw['last_changed'], format="ISO8601").max()
151
151
  ts = pd.to_datetime(pd.date_range(start=from_date, end=to_date, freq=self.freq),
152
- format='%Y-%d-%m %H:%M').round(self.freq, ambiguous='infer', nonexistent=self.freq)
152
+ format='%Y-%d-%m %H:%M').round(self.freq, ambiguous='infer', nonexistent='shift_forward')
153
153
  df_day = pd.DataFrame(index = ts)
154
154
  # Caution with undefined string data: unknown, unavailable, etc.
155
155
  df_tp = df_raw.copy()[['state']].replace(
@@ -1,8 +1,3 @@
1
- //before page load check for stylesheet
2
- document.onreadystatechange = async function() {
3
- checkStyleSheets()
4
- }
5
-
6
1
  //on page reload get saved data
7
2
  window.onload = async function () {
8
3
 
@@ -12,23 +7,6 @@ window.onload = async function () {
12
7
  document.getElementById("basicOrAdvanced").addEventListener("click", () => SwitchBasicOrAdvanced());
13
8
  };
14
9
 
15
- //check style sheet is loaded
16
- async function checkStyleSheets() {
17
- var styleHREF = getHTMLURL() + `static/style.css`
18
- var styles = document.styleSheets;
19
- for (var i = 0; i < styles.length; i++) {
20
- if (styles[i].href.match("style")["input"] == styleHREF) {
21
- return true
22
- }
23
- }
24
- //if could not find file
25
- var style = document.createElement("link");
26
- style.rel = "stylesheet";
27
- style.href = styleHREF;
28
- style.type = "text/css";
29
- document.getElementsByTagName("head")[0].appendChild(style);
30
- }
31
-
32
10
  //add listeners to buttons (based on page)
33
11
  function loadButtons(page) {
34
12
  switch (page) {
@@ -120,19 +98,10 @@ function SwitchBasicOrAdvanced() {
120
98
  }
121
99
  }
122
100
 
123
- //set current url
124
- function getHTMLURL() {
125
- var currentUrl
126
- if (window.location) {
127
- currentUrl = window.location.href; //get current url to append
128
- }
129
- else { currentUrl = "" }
130
- return currentUrl
131
- }
132
101
 
133
102
  //get html data from basic.html or advanced.html
134
103
  async function getHTMLData(htmlFile) {
135
- const response = await fetch(getHTMLURL() + `static/` + htmlFile);
104
+ const response = await fetch(`static/` + htmlFile);
136
105
  blob = await response.blob(); //get data blob
137
106
  htmlTemplateData = await new Response(blob).text(); //obtain html from blob
138
107
  return await htmlTemplateData;
@@ -148,7 +117,7 @@ async function formAction(action, page) {
148
117
 
149
118
  if (data !== 0) { //don't run if there is an error in the input (box/list) Json data
150
119
  showChangeStatus("loading", {}); // show loading div for status
151
- const response = await fetch(getHTMLURL() + `action/${action}`, {
120
+ const response = await fetch(`action/` + action, {
152
121
  //fetch data from webserver.py
153
122
  method: "POST",
154
123
  headers: {
@@ -206,7 +175,7 @@ async function showChangeStatus(status, logJson) {
206
175
  async function getTemplate() {
207
176
  //fetch data from webserver.py
208
177
  let htmlTemplateData = "";
209
- response = await fetch(getHTMLURL() + `template/table-template`, {
178
+ response = await fetch(`template/table-template`, {
210
179
  method: "GET",
211
180
  });
212
181
  blob = await response.blob(); //get data blob