emhass 0.9.1__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
emhass/utils.py CHANGED
@@ -12,7 +12,6 @@ import pandas as pd
12
12
  import yaml
13
13
  import pytz
14
14
 
15
-
16
15
  import plotly.express as px
17
16
 
18
17
  pd.options.plotting.backend = "plotly"
@@ -413,6 +412,8 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
413
412
  optim_conf["def_start_timestep"] = runtimeparams["def_start_timestep"]
414
413
  if "def_end_timestep" in runtimeparams.keys():
415
414
  optim_conf["def_end_timestep"] = runtimeparams["def_end_timestep"]
415
+ if "def_current_state" in runtimeparams.keys():
416
+ optim_conf["def_current_state"] = [bool(s) for s in runtimeparams["def_current_state"]]
416
417
  if "treat_def_as_semi_cont" in runtimeparams.keys():
417
418
  optim_conf["treat_def_as_semi_cont"] = [
418
419
  eval(str(k).capitalize())
@@ -443,6 +444,8 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
443
444
  optim_conf["weight_battery_charge"] = runtimeparams["weight_battery_charge"]
444
445
  if 'freq' in runtimeparams.keys():
445
446
  retrieve_hass_conf['freq'] = pd.to_timedelta(runtimeparams['freq'], "minutes")
447
+ if 'continual_publish' in runtimeparams.keys():
448
+ retrieve_hass_conf['continual_publish'] = bool(runtimeparams['continual_publish'])
446
449
  # Treat plant configuration parameters passed at runtime
447
450
  if "SOCtarget" in runtimeparams.keys():
448
451
  plant_conf["SOCtarget"] = runtimeparams["SOCtarget"]
@@ -487,12 +490,18 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
487
490
  params["passed_data"]["custom_deferrable_forecast_id"] = runtimeparams[
488
491
  "custom_deferrable_forecast_id"
489
492
  ]
490
- # A condition to put a prefix on all published data
493
+ # A condition to put a prefix on all published data, or check for saved data under prefix name
491
494
  if "publish_prefix" not in runtimeparams.keys():
492
495
  publish_prefix = ""
493
496
  else:
494
497
  publish_prefix = runtimeparams["publish_prefix"]
495
498
  params["passed_data"]["publish_prefix"] = publish_prefix
499
+ # A condition to manually save entity data under data_path/entities after optimization
500
+ if "entity_save" not in runtimeparams.keys():
501
+ entity_save = ""
502
+ else:
503
+ entity_save = runtimeparams["entity_save"]
504
+ params["passed_data"]["entity_save"] = entity_save
496
505
  # Serialize the final params
497
506
  params = json.dumps(params)
498
507
  return params, retrieve_hass_conf, optim_conf, plant_conf
@@ -521,7 +530,7 @@ def get_yaml_parse(emhass_conf: dict, use_secrets: Optional[bool] = True,
521
530
  input_conf = json.loads(params)
522
531
  if use_secrets:
523
532
  if params is None:
524
- with open(emhass_conf["root_path"] / 'secrets_emhass.yaml', 'r') as file: #assume secrets file is in root path
533
+ with open(emhass_conf["config_path"].parent / 'secrets_emhass.yaml', 'r') as file: # Assume secrets and config file paths are the same
525
534
  input_secrets = yaml.load(file, Loader=yaml.FullLoader)
526
535
  else:
527
536
  input_secrets = input_conf.pop("params_secrets", None)
@@ -730,60 +739,30 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
730
739
  """
731
740
  if addon == 1:
732
741
  # Updating variables in retrieve_hass_conf
733
- params["retrieve_hass_conf"]["freq"] = options.get(
734
- "optimization_time_step", params["retrieve_hass_conf"]["freq"]
735
- )
736
- params["retrieve_hass_conf"]["days_to_retrieve"] = options.get(
737
- "historic_days_to_retrieve",
738
- params["retrieve_hass_conf"]["days_to_retrieve"],
739
- )
740
- params["retrieve_hass_conf"]["var_PV"] = options.get(
741
- "sensor_power_photovoltaics", params["retrieve_hass_conf"]["var_PV"]
742
- )
743
- params["retrieve_hass_conf"]["var_load"] = options.get(
744
- "sensor_power_load_no_var_loads", params["retrieve_hass_conf"]["var_load"]
745
- )
746
- params["retrieve_hass_conf"]["load_negative"] = options.get(
747
- "load_negative", params["retrieve_hass_conf"]["load_negative"]
748
- )
749
- params["retrieve_hass_conf"]["set_zero_min"] = options.get(
750
- "set_zero_min", params["retrieve_hass_conf"]["set_zero_min"]
751
- )
742
+ params["retrieve_hass_conf"]["freq"] = options.get("optimization_time_step", params["retrieve_hass_conf"]["freq"])
743
+ params["retrieve_hass_conf"]["days_to_retrieve"] = options.get("historic_days_to_retrieve", params["retrieve_hass_conf"]["days_to_retrieve"])
744
+ params["retrieve_hass_conf"]["var_PV"] = options.get("sensor_power_photovoltaics", params["retrieve_hass_conf"]["var_PV"])
745
+ params["retrieve_hass_conf"]["var_load"] = options.get("sensor_power_load_no_var_loads", params["retrieve_hass_conf"]["var_load"])
746
+ params["retrieve_hass_conf"]["load_negative"] = options.get("load_negative", params["retrieve_hass_conf"]["load_negative"])
747
+ params["retrieve_hass_conf"]["set_zero_min"] = options.get("set_zero_min", params["retrieve_hass_conf"]["set_zero_min"])
752
748
  params["retrieve_hass_conf"]["var_replace_zero"] = [
753
- options.get(
754
- "sensor_power_photovoltaics",
755
- params["retrieve_hass_conf"]["var_replace_zero"],
756
- )
749
+ options.get("sensor_power_photovoltaics", params["retrieve_hass_conf"]["var_replace_zero"])
757
750
  ]
758
751
  params["retrieve_hass_conf"]["var_interp"] = [
759
- options.get(
760
- "sensor_power_photovoltaics", params["retrieve_hass_conf"]["var_PV"]
761
- ),
762
- options.get(
763
- "sensor_power_load_no_var_loads",
764
- params["retrieve_hass_conf"]["var_load"],
765
- ),
752
+ options.get("sensor_power_photovoltaics", params["retrieve_hass_conf"]["var_PV"]),
753
+ options.get("sensor_power_load_no_var_loads", params["retrieve_hass_conf"]["var_load"])
766
754
  ]
767
- params["retrieve_hass_conf"]["method_ts_round"] = options.get(
768
- "method_ts_round", params["retrieve_hass_conf"]["method_ts_round"]
769
- )
755
+ params["retrieve_hass_conf"]["method_ts_round"] = options.get("method_ts_round", params["retrieve_hass_conf"]["method_ts_round"])
756
+ params["retrieve_hass_conf"]["continual_publish"] = options.get("continual_publish", params["retrieve_hass_conf"]["continual_publish"])
770
757
  # Update params Secrets if specified
771
758
  params["params_secrets"] = params_secrets
772
- params["params_secrets"]["time_zone"] = options.get(
773
- "time_zone", params_secrets["time_zone"]
774
- )
759
+ params["params_secrets"]["time_zone"] = options.get("time_zone", params_secrets["time_zone"])
775
760
  params["params_secrets"]["lat"] = options.get("Latitude", params_secrets["lat"])
776
- params["params_secrets"]["lon"] = options.get(
777
- "Longitude", params_secrets["lon"]
778
- )
761
+ params["params_secrets"]["lon"] = options.get("Longitude", params_secrets["lon"])
779
762
  params["params_secrets"]["alt"] = options.get("Altitude", params_secrets["alt"])
780
763
  # Updating variables in optim_conf
781
- params["optim_conf"]["set_use_battery"] = options.get(
782
- "set_use_battery", params["optim_conf"]["set_use_battery"]
783
- )
784
- params["optim_conf"]["num_def_loads"] = options.get(
785
- "number_of_deferrable_loads", params["optim_conf"]["num_def_loads"]
786
- )
764
+ params["optim_conf"]["set_use_battery"] = options.get("set_use_battery", params["optim_conf"]["set_use_battery"])
765
+ params["optim_conf"]["num_def_loads"] = options.get("number_of_deferrable_loads", params["optim_conf"]["num_def_loads"])
787
766
  if options.get("list_nominal_power_of_deferrable_loads", None) != None:
788
767
  params["optim_conf"]["P_deferrable_nom"] = [
789
768
  i["nominal_power_of_deferrable_loads"]
@@ -799,43 +778,22 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
799
778
  i["treat_deferrable_load_as_semi_cont"]
800
779
  for i in options.get("list_treat_deferrable_load_as_semi_cont")
801
780
  ]
802
- params["optim_conf"]["weather_forecast_method"] = options.get(
803
- "weather_forecast_method", params["optim_conf"]["weather_forecast_method"]
804
- )
781
+ params["optim_conf"]["weather_forecast_method"] = options.get("weather_forecast_method", params["optim_conf"]["weather_forecast_method"])
805
782
  # Update optional param secrets
806
783
  if params["optim_conf"]["weather_forecast_method"] == "solcast":
807
- params["params_secrets"]["solcast_api_key"] = options.get(
808
- "optional_solcast_api_key",
809
- params_secrets.get("solcast_api_key", "123456"),
810
- )
811
- params["params_secrets"]["solcast_rooftop_id"] = options.get(
812
- "optional_solcast_rooftop_id",
813
- params_secrets.get("solcast_rooftop_id", "123456"),
814
- )
784
+ params["params_secrets"]["solcast_api_key"] = options.get("optional_solcast_api_key", params_secrets.get("solcast_api_key", "123456"))
785
+ params["params_secrets"]["solcast_rooftop_id"] = options.get("optional_solcast_rooftop_id", params_secrets.get("solcast_rooftop_id", "123456"))
815
786
  elif params["optim_conf"]["weather_forecast_method"] == "solar.forecast":
816
- params["params_secrets"]["solar_forecast_kwp"] = options.get(
817
- "optional_solar_forecast_kwp",
818
- params_secrets.get("solar_forecast_kwp", 5),
819
- )
820
- params["optim_conf"]["load_forecast_method"] = options.get(
821
- "load_forecast_method", params["optim_conf"]["load_forecast_method"]
822
- )
823
- params["optim_conf"]["delta_forecast"] = options.get(
824
- "delta_forecast_daily", params["optim_conf"]["delta_forecast"]
825
- )
826
- params["optim_conf"]["load_cost_forecast_method"] = options.get(
827
- "load_cost_forecast_method",
828
- params["optim_conf"]["load_cost_forecast_method"],
829
- )
787
+ params["params_secrets"]["solar_forecast_kwp"] = options.get("optional_solar_forecast_kwp", params_secrets.get("solar_forecast_kwp", 5))
788
+ params["optim_conf"]["load_forecast_method"] = options.get("load_forecast_method", params["optim_conf"]["load_forecast_method"])
789
+ params["optim_conf"]["delta_forecast"] = options.get("delta_forecast_daily", params["optim_conf"]["delta_forecast"])
790
+ params["optim_conf"]["load_cost_forecast_method"] = options.get("load_cost_forecast_method", params["optim_conf"]["load_cost_forecast_method"])
830
791
  if options.get("list_set_deferrable_load_single_constant", None) != None:
831
792
  params["optim_conf"]["set_def_constant"] = [
832
793
  i["set_deferrable_load_single_constant"]
833
794
  for i in options.get("list_set_deferrable_load_single_constant")
834
795
  ]
835
- if (
836
- options.get("list_peak_hours_periods_start_hours", None) != None
837
- and options.get("list_peak_hours_periods_end_hours", None) != None
838
- ):
796
+ if (options.get("list_peak_hours_periods_start_hours", None) != None and options.get("list_peak_hours_periods_end_hours", None) != None):
839
797
  start_hours_list = [
840
798
  i["peak_hours_periods_start_hours"]
841
799
  for i in options["list_peak_hours_periods_start_hours"]
@@ -847,27 +805,27 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
847
805
  num_peak_hours = len(start_hours_list)
848
806
  list_hp_periods_list = [{'period_hp_'+str(i+1):[{'start':start_hours_list[i]},{'end':end_hours_list[i]}]} for i in range(num_peak_hours)]
849
807
  params['optim_conf']['list_hp_periods'] = list_hp_periods_list
850
- params['optim_conf']['load_cost_hp'] = options.get('load_peak_hours_cost',params['optim_conf']['load_cost_hp'])
808
+ params['optim_conf']['load_cost_hp'] = options.get('load_peak_hours_cost', params['optim_conf']['load_cost_hp'])
851
809
  params['optim_conf']['load_cost_hc'] = options.get('load_offpeak_hours_cost', params['optim_conf']['load_cost_hc'])
852
810
  params['optim_conf']['prod_price_forecast_method'] = options.get('production_price_forecast_method', params['optim_conf']['prod_price_forecast_method'])
853
- params['optim_conf']['prod_sell_price'] = options.get('photovoltaic_production_sell_price',params['optim_conf']['prod_sell_price'])
854
- params['optim_conf']['set_total_pv_sell'] = options.get('set_total_pv_sell',params['optim_conf']['set_total_pv_sell'])
855
- params['optim_conf']['lp_solver'] = options.get('lp_solver',params['optim_conf']['lp_solver'])
856
- params['optim_conf']['lp_solver_path'] = options.get('lp_solver_path',params['optim_conf']['lp_solver_path'])
857
- params['optim_conf']['set_nocharge_from_grid'] = options.get('set_nocharge_from_grid',params['optim_conf']['set_nocharge_from_grid'])
858
- params['optim_conf']['set_nodischarge_to_grid'] = options.get('set_nodischarge_to_grid',params['optim_conf']['set_nodischarge_to_grid'])
859
- params['optim_conf']['set_battery_dynamic'] = options.get('set_battery_dynamic',params['optim_conf']['set_battery_dynamic'])
860
- params['optim_conf']['battery_dynamic_max'] = options.get('battery_dynamic_max',params['optim_conf']['battery_dynamic_max'])
861
- params['optim_conf']['battery_dynamic_min'] = options.get('battery_dynamic_min',params['optim_conf']['battery_dynamic_min'])
862
- params['optim_conf']['weight_battery_discharge'] = options.get('weight_battery_discharge',params['optim_conf']['weight_battery_discharge'])
863
- params['optim_conf']['weight_battery_charge'] = options.get('weight_battery_charge',params['optim_conf']['weight_battery_charge'])
811
+ params['optim_conf']['prod_sell_price'] = options.get('photovoltaic_production_sell_price', params['optim_conf']['prod_sell_price'])
812
+ params['optim_conf']['set_total_pv_sell'] = options.get('set_total_pv_sell', params['optim_conf']['set_total_pv_sell'])
813
+ params['optim_conf']['lp_solver'] = options.get('lp_solver', params['optim_conf']['lp_solver'])
814
+ params['optim_conf']['lp_solver_path'] = options.get('lp_solver_path', params['optim_conf']['lp_solver_path'])
815
+ params['optim_conf']['set_nocharge_from_grid'] = options.get('set_nocharge_from_grid', params['optim_conf']['set_nocharge_from_grid'])
816
+ params['optim_conf']['set_nodischarge_to_grid'] = options.get('set_nodischarge_to_grid', params['optim_conf']['set_nodischarge_to_grid'])
817
+ params['optim_conf']['set_battery_dynamic'] = options.get('set_battery_dynamic', params['optim_conf']['set_battery_dynamic'])
818
+ params['optim_conf']['battery_dynamic_max'] = options.get('battery_dynamic_max', params['optim_conf']['battery_dynamic_max'])
819
+ params['optim_conf']['battery_dynamic_min'] = options.get('battery_dynamic_min', params['optim_conf']['battery_dynamic_min'])
820
+ params['optim_conf']['weight_battery_discharge'] = options.get('weight_battery_discharge', params['optim_conf']['weight_battery_discharge'])
821
+ params['optim_conf']['weight_battery_charge'] = options.get('weight_battery_charge', params['optim_conf']['weight_battery_charge'])
864
822
  if options.get('list_start_timesteps_of_each_deferrable_load',None) != None:
865
823
  params['optim_conf']['def_start_timestep'] = [i['start_timesteps_of_each_deferrable_load'] for i in options.get('list_start_timesteps_of_each_deferrable_load')]
866
824
  if options.get('list_end_timesteps_of_each_deferrable_load',None) != None:
867
825
  params['optim_conf']['def_end_timestep'] = [i['end_timesteps_of_each_deferrable_load'] for i in options.get('list_end_timesteps_of_each_deferrable_load')]
868
826
  # Updating variables in plant_conf
869
- params['plant_conf']['P_from_grid_max'] = options.get('maximum_power_from_grid',params['plant_conf']['P_from_grid_max'])
870
- params['plant_conf']['P_to_grid_max'] = options.get('maximum_power_to_grid',params['plant_conf']['P_to_grid_max'])
827
+ params['plant_conf']['P_from_grid_max'] = options.get('maximum_power_from_grid', params['plant_conf']['P_from_grid_max'])
828
+ params['plant_conf']['P_to_grid_max'] = options.get('maximum_power_to_grid', params['plant_conf']['P_to_grid_max'])
871
829
  if options.get('list_pv_module_model',None) != None:
872
830
  params['plant_conf']['module_model'] = [i['pv_module_model'] for i in options.get('list_pv_module_model')]
873
831
  if options.get('list_pv_inverter_model',None) != None:
@@ -880,14 +838,15 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
880
838
  params['plant_conf']['modules_per_string'] = [i['modules_per_string'] for i in options.get('list_modules_per_string')]
881
839
  if options.get('list_strings_per_inverter',None) != None:
882
840
  params['plant_conf']['strings_per_inverter'] = [i['strings_per_inverter'] for i in options.get('list_strings_per_inverter')]
883
- params['plant_conf']['Pd_max'] = options.get('battery_discharge_power_max',params['plant_conf']['Pd_max'])
884
- params['plant_conf']['Pc_max'] = options.get('battery_charge_power_max',params['plant_conf']['Pc_max'])
885
- params['plant_conf']['eta_disch'] = options.get('battery_discharge_efficiency',params['plant_conf']['eta_disch'])
886
- params['plant_conf']['eta_ch'] = options.get('battery_charge_efficiency',params['plant_conf']['eta_ch'])
887
- params['plant_conf']['Enom'] = options.get('battery_nominal_energy_capacity',params['plant_conf']['Enom'])
888
- params['plant_conf']['SOCmin'] = options.get('battery_minimum_state_of_charge',params['plant_conf']['SOCmin'])
889
- params['plant_conf']['SOCmax'] = options.get('battery_maximum_state_of_charge',params['plant_conf']['SOCmax'])
890
- params['plant_conf']['SOCtarget'] = options.get('battery_target_state_of_charge',params['plant_conf']['SOCtarget'])
841
+ params["plant_conf"]["inverter_is_hybrid"] = options.get("inverter_is_hybrid", params["plant_conf"]["inverter_is_hybrid"])
842
+ params['plant_conf']['Pd_max'] = options.get('battery_discharge_power_max', params['plant_conf']['Pd_max'])
843
+ params['plant_conf']['Pc_max'] = options.get('battery_charge_power_max', params['plant_conf']['Pc_max'])
844
+ params['plant_conf']['eta_disch'] = options.get('battery_discharge_efficiency', params['plant_conf']['eta_disch'])
845
+ params['plant_conf']['eta_ch'] = options.get('battery_charge_efficiency', params['plant_conf']['eta_ch'])
846
+ params['plant_conf']['Enom'] = options.get('battery_nominal_energy_capacity', params['plant_conf']['Enom'])
847
+ params['plant_conf']['SOCmin'] = options.get('battery_minimum_state_of_charge', params['plant_conf']['SOCmin'])
848
+ params['plant_conf']['SOCmax'] = options.get('battery_maximum_state_of_charge', params['plant_conf']['SOCmax'])
849
+ params['plant_conf']['SOCtarget'] = options.get('battery_target_state_of_charge', params['plant_conf']['SOCtarget'])
891
850
  # Check parameter lists have the same amounts as deferrable loads
892
851
  # If not, set defaults it fill in gaps
893
852
  if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_start_timestep']):
@@ -917,9 +876,7 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
917
876
  # days_to_retrieve should be no less then 2
918
877
  if params["retrieve_hass_conf"]["days_to_retrieve"] < 2:
919
878
  params["retrieve_hass_conf"]["days_to_retrieve"] = 2
920
- logger.warning(
921
- "days_to_retrieve should not be lower then 2, setting days_to_retrieve to 2. Make sure your sensors also have at least 2 days of history"
922
- )
879
+ logger.warning("days_to_retrieve should not be lower then 2, setting days_to_retrieve to 2. Make sure your sensors also have at least 2 days of history")
923
880
  else:
924
881
  params["params_secrets"] = params_secrets
925
882
  # The params dict
emhass/web_server.py CHANGED
@@ -7,21 +7,21 @@ from requests import get
7
7
  from waitress import serve
8
8
  from importlib.metadata import version, PackageNotFoundError
9
9
  from pathlib import Path
10
- import os, json, argparse, pickle, yaml, logging, re
10
+ import os, json, argparse, pickle, yaml, logging, re, threading
11
11
  from distutils.util import strtobool
12
12
 
13
13
  from emhass.command_line import set_input_data_dict
14
14
  from emhass.command_line import perfect_forecast_optim, dayahead_forecast_optim, naive_mpc_optim
15
15
  from emhass.command_line import forecast_model_fit, forecast_model_predict, forecast_model_tune
16
16
  from emhass.command_line import regressor_model_fit, regressor_model_predict
17
- from emhass.command_line import publish_data
17
+ from emhass.command_line import publish_data, continual_publish
18
18
  from emhass.utils import get_injection_dict, get_injection_dict_forecast_model_fit, \
19
19
  get_injection_dict_forecast_model_tune, build_params
20
20
 
21
21
  # Define the Flask instance
22
22
  app = Flask(__name__)
23
23
 
24
- #check logfile for error, anything after string match if provided
24
+ # Check logfile for error, anything after string match if provided
25
25
  def checkFileLog(refString=None):
26
26
  if (refString is not None):
27
27
  logArray = grabLog(refString) #grab reduced log array
@@ -34,7 +34,7 @@ def checkFileLog(refString=None):
34
34
  return True
35
35
  return False
36
36
 
37
- #find string in logs, append all lines after to return
37
+ # Find string in logs, append all lines after to return
38
38
  def grabLog(refString):
39
39
  isFound = []
40
40
  output = []
@@ -49,13 +49,14 @@ def grabLog(refString):
49
49
  output.append(logArray[x])
50
50
  return output
51
51
 
52
- #clear the log file
52
+ # Clear the log file
53
53
  def clearFileLog():
54
54
  if ((emhass_conf['data_path'] / 'actionLogs.txt')).exists():
55
55
  with open(str(emhass_conf['data_path'] / 'actionLogs.txt'), "w") as fp:
56
56
  fp.truncate()
57
57
 
58
- #initial index page render
58
+
59
+ # Initial index page render
59
60
  @app.route('/')
60
61
  def index():
61
62
  app.logger.info("EMHASS server online, serving index.html...")
@@ -97,6 +98,7 @@ def template_action(action_name):
97
98
  #post actions
98
99
  @app.route('/action/<action_name>', methods=['POST'])
99
100
  def action_call(action_name):
101
+ # Setting up parameters
100
102
  with open(str(emhass_conf['data_path'] / 'params.pkl'), "rb") as fid:
101
103
  emhass_conf['config_path'], params = pickle.load(fid)
102
104
  runtimeparams = request.get_json(force=True)
@@ -110,6 +112,15 @@ def action_call(action_name):
110
112
  params, runtimeparams, action_name, app.logger)
111
113
  if not input_data_dict:
112
114
  return make_response(grabLog(ActionStr), 400)
115
+
116
+ # If continual_publish is True, start thread with loop function
117
+ if len(continual_publish_thread) == 0 and input_data_dict['retrieve_hass_conf'].get("continual_publish",False):
118
+ # Start Thread
119
+ continualLoop = threading.Thread(name="continual_publish",target=continual_publish,args=[input_data_dict,entity_path,app.logger])
120
+ continualLoop.start()
121
+ continual_publish_thread.append(continualLoop)
122
+
123
+ # run action based on POST request
113
124
  if action_name == 'publish-data':
114
125
  ActionStr = " >> Publishing data..."
115
126
  app.logger.info(ActionStr)
@@ -238,6 +249,7 @@ if __name__ == "__main__":
238
249
  CONFIG_PATH = os.getenv("CONFIG_PATH", default="/app/config_emhass.yaml")
239
250
  OPTIONS_PATH = os.getenv('OPTIONS_PATH', default="/app/options.json")
240
251
  DATA_PATH = os.getenv("DATA_PATH", default="/app/data/")
252
+ ROOT_PATH = os.getenv("ROOT_PATH", default=str(Path(__file__).parent))
241
253
 
242
254
  #options None by default
243
255
  options = None
@@ -273,10 +285,11 @@ if __name__ == "__main__":
273
285
  #save paths to dictionary
274
286
  config_path = Path(CONFIG_PATH)
275
287
  data_path = Path(DATA_PATH)
288
+ root_path = Path(ROOT_PATH)
276
289
  emhass_conf = {}
277
290
  emhass_conf['config_path'] = config_path
278
291
  emhass_conf['data_path'] = data_path
279
- emhass_conf['root_path'] = Path(config_path).parent #assume root is parent of config_path
292
+ emhass_conf['root_path'] = root_path
280
293
 
281
294
  # Read the example default config file
282
295
  if config_path.exists():
@@ -446,6 +459,18 @@ if __name__ == "__main__":
446
459
  app.logger.addHandler(fileLogger)
447
460
  clearFileLog() #Clear Action File logger file, ready for new instance
448
461
 
462
+
463
+ #If entity_path exists, remove any entity/metadata files
464
+ entity_path = emhass_conf['data_path'] / "entities"
465
+ if os.path.exists(entity_path):
466
+ entity_pathContents = os.listdir(entity_path)
467
+ if len(entity_pathContents) > 0:
468
+ for entity in entity_pathContents:
469
+ os.remove(entity_path / entity)
470
+
471
+ # Initialise continual publish thread list
472
+ continual_publish_thread = []
473
+
449
474
  # Launch server
450
475
  port = int(os.environ.get('PORT', 5000))
451
476
  app.logger.info("Launching the emhass webserver at: http://"+web_ui_url+":"+str(port))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: emhass
3
- Version: 0.9.1
3
+ Version: 0.10.0
4
4
  Summary: An Energy Management System for Home Assistant
5
5
  Home-page: https://github.com/davidusb-geek/emhass
6
6
  Author: David HERNANDEZ
@@ -285,6 +285,8 @@ sudo chmod +x /home/user/emhass/scripts/publish_data.sh
285
285
  ```
286
286
  ### Common for any installation method
287
287
 
288
+ #### Options 1, Home Assistant automate publish
289
+
288
290
  In `automations.yaml`:
289
291
  ```yaml
290
292
  - alias: EMHASS day-ahead optimization
@@ -300,9 +302,36 @@ In `automations.yaml`:
300
302
  action:
301
303
  - service: shell_command.publish_data
302
304
  ```
303
- In these automations the day-ahead optimization is performed everyday at 5:30am and the data is published every 5 minutes.
305
+ In these automation's the day-ahead optimization is performed once a day, everyday at 5:30am, and the data *(output of automation)* is published every 5 minutes.
306
+
307
+ #### Option 2, EMHASS automate publish
304
308
 
305
- The final action will be to link a sensor value in Home Assistant to control the switch of a desired controllable load. For example imagine that I want to control my water heater and that the `publish-data` action is publishing the optimized value of a deferrable load that I want to be linked to my water heater desired behavior. In this case we could use an automation like this one below to control the desired real switch:
309
+ In `automations.yaml`:
310
+ ```yaml
311
+ - alias: EMHASS day-ahead optimization
312
+ trigger:
313
+ platform: time
314
+ at: '05:30:00'
315
+ action:
316
+ - service: shell_command.dayahead_optim
317
+ - service: shell_command.publish_data
318
+ ```
319
+ in configuration page/`config_emhass.yaml`
320
+ ```json
321
+ "method_ts_round": "first"
322
+ "continual_publish": true
323
+ ```
324
+ In this automation the day-ahead optimization is performed once a day, everyday at 5:30am.
325
+ If the `freq` parameter is set to `30` *(default)* in the configuration, the results of the day-ahead optimization will generate 48 values *(for each entity)*, a value for each 30 minutes in a day *(i.e. 24 hrs x 2)*.
326
+
327
+ Setting the parameter `continual_publish` to `true` in the configuration page, will allow EMHASS to store the optimization results as entities/sensors into seperate json files. `continual_publish` will periodically (every `freq` amount of minutes) run a publish, and publish the optimization results of each generated entities/sensors to Home Assistant. The current state of the sensor/entity being updated every time publish runs, selecting one of the 48 stored values, by comparing the stored values timestamps, the current timestamp and [`"method_ts_round": "first"`](#the-publish-data-specificities) to select the optimal stored value for the current state.
328
+
329
+ option 1 and 2 are very similar, however option 2 (`continual_publish`) will require a cpu thread to constantly be run inside of EMHASS, lowering efficiency. The reason why you may pick one over the other is explained in more detail bellow in [continual_publish](#continual_publish-emhass-automation).
330
+
331
+ Lastly, we can link a EMHASS published entities/sensor's current state to a Home Assistant entity on/off switch, controlling a desired controllable load.
332
+ For example, imagine that I want to control my water heater. I can use a published `deferrable` EMHASS entity to control my water heaters desired behavior. In this case, we could use an automation like below, to control the desired water heater on and off:
333
+
334
+ on:
306
335
  ```yaml
307
336
  automation:
308
337
  - alias: Water Heater Optimized ON
@@ -317,7 +346,7 @@ automation:
317
346
  - service: homeassistant.turn_on
318
347
  entity_id: switch.water_heater_switch
319
348
  ```
320
- A second automation should be used to turn off the switch:
349
+ off:
321
350
  ```yaml
322
351
  automation:
323
352
  - alias: Water Heater Optimized OFF
@@ -332,14 +361,15 @@ automation:
332
361
  - service: homeassistant.turn_off
333
362
  entity_id: switch.water_heater_switch
334
363
  ```
364
+ The result of these automation's will turn on and off the Home Assistant entity `switch.water_heater_switch` using the current state from the EMHASS entity `sensor.p_deferrable0`. `sensor.p_deferrable0` being the entity generated from the EMHASS day-ahead optimization and published by examples above. The `sensor.p_deferrable0` entity current state being updated every 30 minutes (or `freq` minutes) via a automated publish option 1 or 2. *(selecting one of the 48 stored data values)*
335
365
 
336
366
  ## The publish-data specificities
337
367
 
338
- The `publish-data` command will push to Home Assistant the optimization results for each deferrable load defined in the configuration. For example if you have defined two deferrable loads, then the command will publish `sensor.p_deferrable0` and `sensor.p_deferrable1` to Home Assistant. When the `dayahead-optim` is launched, after the optimization, a csv file will be saved on disk. The `publish-data` command will load the latest csv file and look for the closest timestamp that match the current time using the `datetime.now()` method in Python. This means that if EMHASS is configured for 30min time step optimizations, the csv will be saved with timestamps 00:00, 00:30, 01:00, 01:30, ... and so on. If the current time is 00:05, then the closest timestamp of the optimization results that will be published is 00:00. If the current time is 00:25, then the closest timestamp of the optimization results that will be published is 00:30.
368
+ `publish-data` (which is either run manually, or automatically via `continual_publish` or Home Assistant automation), will push the optimization results to Home Assistant for each deferrable load defined in the configuration. For example if you have defined two deferrable loads, then the command will publish `sensor.p_deferrable0` and `sensor.p_deferrable1` to Home Assistant. When the `dayahead-optim` is launched, after the optimization, either entity json files or a csv file will be saved on disk. The `publish-data` command will load the latest csv/json files to look for the closest timestamp that match the current time using the `datetime.now()` method in Python. This means that if EMHASS is configured for 30min time step optimizations, the csv/json will be saved with timestamps 00:00, 00:30, 01:00, 01:30, ... and so on. If the current time is 00:05, and parameter `method_ts_round` is set to `nearest` in the configuration, then the closest timestamp of the optimization results that will be published is 00:00. If the current time is 00:25, then the closest timestamp of the optimization results that will be published is 00:30.
339
369
 
340
370
  The `publish-data` command will also publish PV and load forecast data on sensors `p_pv_forecast` and `p_load_forecast`. If using a battery, then the battery optimized power and the SOC will be published on sensors `p_batt_forecast` and `soc_batt_forecast`. On these sensors the future values are passed as nested attributes.
341
371
 
342
- It is possible to provide custm sensor names for all the data exported by the `publish-data` command. For this, when using the `publish-data` endpoint just add some runtime parameters as dictionaries like this:
372
+ If you run publish manually *(or via a Home Assistant Automation)*, it is possible to provide custom sensor names for all the data exported by the `publish-data` command. For this, when using the `publish-data` endpoint we can just add some runtime parameters as dictionaries like this:
343
373
  ```yaml
344
374
  shell_command:
345
375
  publish_data: "curl -i -H \"Content-Type:application/json\" -X POST -d '{\"custom_load_forecast_id\": {\"entity_id\": \"sensor.p_load_forecast\", \"unit_of_measurement\": \"W\", \"friendly_name\": \"Load Power Forecast\"}}' http://localhost:5000/action/publish-data"
@@ -387,12 +417,85 @@ In EMHASS we have basically 4 forecasts to deal with:
387
417
 
388
418
  - PV production selling price forecast: at what price are you selling your excess PV production on the next 24h. This is given in EUR/kWh.
389
419
 
390
- The sensor containing the load data should be specified in parameter `var_load` in the configuration file. As we want to optimize the household energies, when need to forecast the load power conumption. The default method for this is a naive approach using 1-day persistence. The load data variable should not contain the data from the deferrable loads themselves. For example, lets say that you set your deferrable load to be the washing machine. The variable that you should enter in EMHASS will be: `var_load: 'sensor.power_load_no_var_loads'` and `sensor.power_load_no_var_loads = sensor.power_load - sensor.power_washing_machine`. This is supposing that the overall load of your house is contained in variable: `sensor.power_load`. The sensor `sensor.power_load_no_var_loads` can be easily created with a new template sensor in Home Assistant.
420
+ The sensor containing the load data should be specified in parameter `var_load` in the configuration file. As we want to optimize the household energies, when need to forecast the load power consumption. The default method for this is a naive approach using 1-day persistence. The load data variable should not contain the data from the deferrable loads themselves. For example, lets say that you set your deferrable load to be the washing machine. The variable that you should enter in EMHASS will be: `var_load: 'sensor.power_load_no_var_loads'` and `sensor.power_load_no_var_loads = sensor.power_load - sensor.power_washing_machine`. This is supposing that the overall load of your house is contained in variable: `sensor.power_load`. The sensor `sensor.power_load_no_var_loads` can be easily created with a new template sensor in Home Assistant.
391
421
 
392
422
  If you are implementing a MPC controller, then you should also need to provide some data at the optimization runtime using the key `runtimeparams`.
393
423
 
394
424
  The valid values to pass for both forecast data and MPC related data are explained below.
395
425
 
426
+ ### Alternative publish methods
427
+ Due to the flexibility of EMHASS, multiple different approaches to publishing the optimization results have been created. Select a option that best meets your use case:
428
+
429
+ #### publish last optimization *(manual)*
430
+ By default, running an optimization in EMHASS will output the results into the csv file: `data_path/opt_res_latest.csv` *(overriding the existing data on that file)*. We run the publish command to publish the last optimization saved in the `opt_res_latest.csv`:
431
+ ```bash
432
+ # RUN dayahead
433
+ curl -i -H 'Content-Type:application/json' -X POST -d {} http://localhost:5000/action/dayahead-optim
434
+ # Then publish teh results of dayahead
435
+ curl -i -H 'Content-Type:application/json' -X POST -d {} http://localhost:5000/action/publish-data
436
+ ```
437
+ *Note, the published entities from the publish-data action will not automatically update the entities current state (current state being used to check when to turn on and off appliances via Home Assistant automatons). To update the EMHASS entities state, another publish would have to be re-run later when the current time matches the next values timestamp (E.g every 30 minutes). See examples bellow for methods to automate the publish-action.*
438
+
439
+ #### continual_publish *(EMHASS Automation)*
440
+ As discussed in [Common for any installation method - option 2](#option-2-emhass-automate-publish), setting `continual_publish` to `true` in the configuration saves the output of the optimization into the `data_path/entities` folder *(a .json file for each sensor/entity)*. A constant loop (in `freq` minutes) will run, observe the .json files in that folder, and publish the saved files periodically (updating the current state of the entity by comparing date.now with the saved data value timestamps).
441
+
442
+ For users that wish to run multiple different optimizations, you can set the runtime parameter: `publish_prefix` to something like: `"mpc_"` or `"dh_"`. This will generate unique entity_id names per optimization and save these unique entities as separate files in the folder. All the entity files will then be updated when the next loop iteration runs. If a different `freq` integer was passed as a runtime parameter in an optimization, the `continual_publish` loop will be based on the lowest `freq` saved. An example:
443
+
444
+ ```bash
445
+ # RUN dayahead, with freq=30 (default), prefix=dh_
446
+ curl -i -H 'Content-Type:application/json' -X POST -d '{"publish_prefix":"dh_"}' http://localhost:5000/action/dayahead-optim
447
+ # RUN MPC, with freq=5, prefix=mpc_
448
+ curl -i -H 'Content-Type:application/json' -X POST -d '{"freq":5,"publish_prefix":"mpc_"}' http://localhost:5000/action/naive-mpc-optim
449
+ ```
450
+ This will tell continual_publish to loop every 5 minutes based on the freq passed in MPC. All entities from the output of dayahead "dh_" and MPC "mpc_" will be published every 5 minutes.
451
+
452
+ </br>
453
+
454
+ *It is recommended to use the 2 other options bellow once you have a more advance understanding of EMHASS and/or Home Assistant.*
455
+
456
+ #### Mixture of continual_publish and manual *(Home Assistant Automation for Publish)*
457
+
458
+ You can choose to save one optimization for continual_publish and bypass another optimization by setting `"continual_publish":false` runtime parameter:
459
+ ```bash
460
+ # RUN dayahead, with freq=30 (default), prefix=dh_, included into continual_publish
461
+ curl -i -H 'Content-Type:application/json' -X POST -d '{"publish_prefix":"dh_"}' http://localhost:5000/action/dayahead-optim
462
+
463
+ # RUN MPC, with freq=5, prefix=mpc_, Manually publish, excluded from continual_publish loop
464
+ curl -i -H 'Content-Type:application/json' -X POST -d '{"continual_publish":false,"freq":5,"publish_prefix":"mpc_"}' http://localhost:5000/action/naive-mpc-optim
465
+ # Publish MPC output
466
+ curl -i -H 'Content-Type:application/json' -X POST -d {} http://localhost:5000/action/publish-data
467
+ ```
468
+ This example saves the dayahead optimization into `data_path/entities` as .json files, being included in the `continutal_publish` loop (publishing every 30 minutes). The MPC optimization will not be saved in `data_path/entities`, and therefore only into `data_path/opt_res_latest.csv`. Requiring a publish-data action to be run manually (or via a Home Assistant) Automation for the MPC results.
469
+
470
+ #### Manual *(Home Assistant Automation for Publish)*
471
+
472
+ For users who wish to have full control of exactly when they will like to run a publish and have the ability to save multiple different optimizations. The `entity_save` runtime parameter has been created to save the optimization output entities to .json files whilst `continual_publish` is set to `false` in the configuration. Allowing the user to reference the saved .json files manually via a publish:
473
+
474
+ in configuration page/`config_emhass.yaml` :
475
+ ```json
476
+ "continual_publish": false
477
+ ```
478
+ POST action :
479
+ ```bash
480
+ # RUN dayahead, with freq=30 (default), prefix=dh_, save entity
481
+ curl -i -H 'Content-Type:application/json' -X POST -d '{"entity_save": true, "publish_prefix":"dh_"}' http://localhost:5000/action/dayahead-optim
482
+ # RUN MPC, with freq=5, prefix=mpc_, save entity
483
+ curl -i -H 'Content-Type:application/json' -X POST -d '{"entity_save": true", "freq":5,"publish_prefix":"mpc_"}' http://localhost:5000/action/naive-mpc-optim
484
+ ```
485
+ You can then reference these .json saved entities via their `publish_prefix`. Include the same `publish_prefix` in the `publish_data` action:
486
+ ```bash
487
+ #Publish the MPC optimization ran above
488
+ curl -i -H 'Content-Type:application/json' -X POST -d '{"publish_prefix":"mpc_"}' http://localhost:5000/action/publish-data
489
+ ```
490
+ This will publish all entities from the MPC (_mpc) optimization above.
491
+ </br>
492
+ Alternatively, you can choose to publish all the saved files .json files with `publish_prefix` = all:
493
+ ```bash
494
+ #Publish all saved entities
495
+ curl -i -H 'Content-Type:application/json' -X POST -d '{"publish_prefix":"all"}' http://localhost:5000/action/publish-data
496
+ ```
497
+ This action will publish the dayahead (_dh) and MPC (_mpc) optimization results from the optimizations above.
498
+
396
499
  ### Forecast data
397
500
 
398
501
  It is possible to provide EMHASS with your own forecast data. For this just add the data as list of values to a data dictionary during the call to `emhass` using the `runtimeparams` option.
@@ -491,7 +594,7 @@ Check the dedicated section in the documentation here: [https://emhass.readthedo
491
594
 
492
595
  ## Development
493
596
 
494
- Pull request are very much accepted on this project. For development you can find some instructions here [Development](https://emhass.readthedocs.io/en/latest/develop.html)
597
+ Pull request are very much accepted on this project. For development you can find some instructions here [Development](https://emhass.readthedocs.io/en/latest/develop.html).
495
598
 
496
599
  ## Troubleshooting
497
600
 
@@ -1,12 +1,12 @@
1
1
  emhass/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- emhass/command_line.py,sha256=mxMvxqN5rp30AZ_QR4uWWxWu5NxLMNesgf44pJ-sVWk,47908
3
- emhass/forecast.py,sha256=lZZ7B8CUj-r9aXk1YDVr3bF7vgH-KUM1MoA99F0IBmA,47076
2
+ emhass/command_line.py,sha256=RPMFS86McukgzoGmeq-DEMwyCZrpFx6Ko27Ndj_A0Fc,57211
3
+ emhass/forecast.py,sha256=VdB8O6V4LYXdfIg7fHNRIVdvj2Y8Ow8Z7r1Ck3hvvh8,47631
4
4
  emhass/machine_learning_forecaster.py,sha256=az8cYRCckmR-WEdXyigbe8udtbj82yfahPmow4gue4s,15655
5
5
  emhass/machine_learning_regressor.py,sha256=WmR9ODWkY64RAniqLowwf5tZWzPTVp5ftCTKNtzcd6I,10407
6
- emhass/optimization.py,sha256=ijiSBKdU0fS6TBpeoBo-CoPz6lBMU4nnsi6aiZi1J0I,37252
7
- emhass/retrieve_hass.py,sha256=Xz3dYfQri-6irltbPr4QDDI7GGLJPwW3WEzRyHeC62Q,20391
8
- emhass/utils.py,sha256=4sm8QMp2rU1DZVM7XYT4FK5O7z_GEZTXbZcfn8nyBgc,47820
9
- emhass/web_server.py,sha256=UfPUBA-ct1Su8cQFyufnW0Bb4BBlpGHF3yXN47sXkig,23055
6
+ emhass/optimization.py,sha256=bNn3txHurUNrnW-FFmY9MRokn1X1V50pjK9TNH3z-Es,47305
7
+ emhass/retrieve_hass.py,sha256=k-BPZMqW-uQ95Q7Gzz93nPkLHStDCkI7-047GVYBGC0,22983
8
+ emhass/utils.py,sha256=uc2yrcvA-GwJPsGOQIc_tEf3MxtGu_tMNf1PyyEx3Aw,48112
9
+ emhass/web_server.py,sha256=8hzVjDTCSMdPwpgzstGiH11IArIYiaYGSYkAiMq4S78,24095
10
10
  emhass/data/cec_inverters.pbz2,sha256=tK8FvAUDW0uYez8EPttdCJwHhpPofclYV6GhhNZL0Pk,168272
11
11
  emhass/data/cec_modules.pbz2,sha256=8vEaysgYffXg3KUl8XSF36Mdywzi3LpEtUN_qenjO9s,1655747
12
12
  emhass/static/advanced.html,sha256=15tYNw9ck_ds1zxQk0XXj7wmS9px_0x0GDx57cFx3vA,1970
@@ -18,9 +18,9 @@ emhass/static/img/emhass_logo_short.svg,sha256=yzMcqtBRCV8rH84-MwnigZh45_f9Eoqwh
18
18
  emhass/static/img/feather-sprite.svg,sha256=VHjMJQg88wXa9CaeYrKGhNtyK0xdd47zCqwSIa-hxo8,60319
19
19
  emhass/templates/index.html,sha256=_BsvUJ981uSQkx5H9tq_3es__x4WdPiOy7FjNoNYU9w,2744
20
20
  emhass/templates/template.html,sha256=TkGgMecQEbFUZA4ymPwMUzNjKHsENvCgroUWbPt7G4Y,158
21
- emhass-0.9.1.dist-info/LICENSE,sha256=1X3-S1yvOCBDBeox1aK3dq00m7dA8NDtcPrpKPISzbE,1077
22
- emhass-0.9.1.dist-info/METADATA,sha256=jc55aJnUsaxY7_yKqCux0BSmVdySw2ftms7fqnJfMCc,35940
23
- emhass-0.9.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
24
- emhass-0.9.1.dist-info/entry_points.txt,sha256=6Bp1NFOGNv_fSTxYl1ke3K3h3aqAcBxI-bgq5yq-i1M,52
25
- emhass-0.9.1.dist-info/top_level.txt,sha256=L7fIX4awfmxQbAePtSdVg2e6x_HhghfReHfsKSpKr9I,7
26
- emhass-0.9.1.dist-info/RECORD,,
21
+ emhass-0.10.0.dist-info/LICENSE,sha256=1X3-S1yvOCBDBeox1aK3dq00m7dA8NDtcPrpKPISzbE,1077
22
+ emhass-0.10.0.dist-info/METADATA,sha256=602w53NAKX6TNa7sQcTVpyZlUrlgcJn1yxX2oz9Hn_M,44352
23
+ emhass-0.10.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
24
+ emhass-0.10.0.dist-info/entry_points.txt,sha256=6Bp1NFOGNv_fSTxYl1ke3K3h3aqAcBxI-bgq5yq-i1M,52
25
+ emhass-0.10.0.dist-info/top_level.txt,sha256=L7fIX4awfmxQbAePtSdVg2e6x_HhghfReHfsKSpKr9I,7
26
+ emhass-0.10.0.dist-info/RECORD,,