emhass 0.9.1__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
emhass/command_line.py CHANGED
@@ -3,6 +3,7 @@
3
3
 
4
4
  import argparse
5
5
  import os
6
+ import time
6
7
  import pathlib
7
8
  import logging
8
9
  import json
@@ -285,6 +286,18 @@ def perfect_forecast_optim(input_data_dict: dict, logger: logging.Logger,
285
286
  if not debug:
286
287
  opt_res.to_csv(
287
288
  input_data_dict['emhass_conf']['data_path'] / filename, index_label='timestamp')
289
+
290
+
291
+ if not isinstance(input_data_dict["params"],dict):
292
+ params = json.loads(input_data_dict["params"])
293
+ else:
294
+ params = input_data_dict["params"]
295
+
296
+ # if continual_publish, save perfect results to data_path/entities json
297
+ if input_data_dict["retrieve_hass_conf"].get("continual_publish",False) or params["passed_data"].get("entity_save",False):
298
+ #Trigger the publish function, save entity data and not post to HA
299
+ publish_data(input_data_dict, logger, entity_save=True, dont_post=True)
300
+
288
301
  return opt_res
289
302
 
290
303
 
@@ -330,7 +343,19 @@ def dayahead_forecast_optim(input_data_dict: dict, logger: logging.Logger,
330
343
  filename = "opt_res_latest.csv"
331
344
  if not debug:
332
345
  opt_res_dayahead.to_csv(
333
- input_data_dict['emhass_conf']['data_path'] / filename, index_label='timestamp')
346
+ input_data_dict['emhass_conf']['data_path'] / filename, index_label='timestamp')
347
+
348
+ if not isinstance(input_data_dict["params"],dict):
349
+ params = json.loads(input_data_dict["params"])
350
+ else:
351
+ params = input_data_dict["params"]
352
+
353
+
354
+ # if continual_publish, save day_ahead results to data_path/entities json
355
+ if input_data_dict["retrieve_hass_conf"].get("continual_publish",False) or params["passed_data"].get("entity_save",False):
356
+ #Trigger the publish function, save entity data and not post to HA
357
+ publish_data(input_data_dict, logger, entity_save=True, dont_post=True)
358
+
334
359
  return opt_res_dayahead
335
360
 
336
361
 
@@ -384,7 +409,18 @@ def naive_mpc_optim(input_data_dict: dict, logger: logging.Logger,
384
409
  filename = "opt_res_latest.csv"
385
410
  if not debug:
386
411
  opt_res_naive_mpc.to_csv(
387
- input_data_dict['emhass_conf']['data_path'] / filename, index_label='timestamp')
412
+ input_data_dict['emhass_conf']['data_path'] / filename, index_label='timestamp')
413
+
414
+ if not isinstance(input_data_dict["params"],dict):
415
+ params = json.loads(input_data_dict["params"])
416
+ else:
417
+ params = input_data_dict["params"]
418
+
419
+ # if continual_publish, save mpc results to data_path/entities json
420
+ if input_data_dict["retrieve_hass_conf"].get("continual_publish",False) or params["passed_data"].get("entity_save",False):
421
+ #Trigger the publish function, save entity data and not post to HA
422
+ publish_data(input_data_dict, logger, entity_save=True, dont_post=True)
423
+
388
424
  return opt_res_naive_mpc
389
425
 
390
426
 
@@ -648,9 +684,12 @@ def regressor_model_predict(input_data_dict: dict, logger: logging.Logger,
648
684
  return prediction
649
685
 
650
686
 
687
+
651
688
  def publish_data(input_data_dict: dict, logger: logging.Logger,
652
689
  save_data_to_file: Optional[bool] = False,
653
- opt_res_latest: Optional[pd.DataFrame] = None) -> pd.DataFrame:
690
+ opt_res_latest: Optional[pd.DataFrame] = None,
691
+ entity_save: Optional[bool] = False,
692
+ dont_post: Optional[bool] = False) -> pd.DataFrame:
654
693
  """
655
694
  Publish the data obtained from the optimization results.
656
695
 
@@ -662,15 +701,55 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
662
701
  :type save_data_to_file: bool, optional
663
702
  :return: The output data of the optimization readed from a CSV file in the data folder
664
703
  :rtype: pd.DataFrame
704
+ :param entity_save: Save built entities to data_path/entities
705
+ :type entity_save: bool, optional
706
+ :param dont_post: Do not post to Home Assistant. Works with entity_save
707
+ :type dont_post: bool, optional
665
708
 
666
709
  """
667
710
  logger.info("Publishing data to HASS instance")
711
+
712
+ if not isinstance(input_data_dict["params"],dict):
713
+ params = json.loads(input_data_dict["params"])
714
+ else:
715
+ params = input_data_dict["params"]
716
+
668
717
  # Check if a day ahead optimization has been performed (read CSV file)
669
718
  if save_data_to_file:
670
719
  today = datetime.now(timezone.utc).replace(
671
720
  hour=0, minute=0, second=0, microsecond=0
672
721
  )
673
722
  filename = "opt_res_dayahead_" + today.strftime("%Y_%m_%d") + ".csv"
723
+ # If publish_prefix is passed, check if there is saved entities in data_path/entities with prefix, publish to results
724
+ elif params["passed_data"].get("publish_prefix","") != "" and not dont_post:
725
+ opt_res_list = []
726
+ opt_res_list_names = []
727
+ publish_prefix = params["passed_data"]["publish_prefix"]
728
+ entity_path = input_data_dict['emhass_conf']['data_path'] / "entities"
729
+
730
+ # Check if items in entity_path
731
+ if os.path.exists(entity_path) and len(os.listdir(entity_path)) > 0:
732
+ # Obtain all files in entity_path
733
+ entity_path_contents = os.listdir(entity_path)
734
+ for entity in entity_path_contents:
735
+ if entity != "metadata.json":
736
+ # If publish_prefix is "all" publish all saved entities to Home Assistant
737
+ # If publish_prefix matches the prefix from saved entities, publish to Home Assistant
738
+ if publish_prefix in entity or publish_prefix == "all":
739
+ entity_data = publish_json(entity,input_data_dict,entity_path,logger)
740
+ if not isinstance(entity_data, bool):
741
+ opt_res_list.append(entity_data)
742
+ opt_res_list_names.append(entity.replace(".json", ""))
743
+ else:
744
+ return False
745
+ # Build a DataFrame with published entities
746
+ opt_res = pd.concat(opt_res_list, axis=1)
747
+ opt_res.columns = opt_res_list_names
748
+ return opt_res
749
+ else:
750
+ logger.warning("no saved entity json files in path:" + str(entity_path))
751
+ logger.warning("falling back to opt_res_latest")
752
+ filename = "opt_res_latest.csv"
674
753
  else:
675
754
  filename = "opt_res_latest.csv"
676
755
  if opt_res_latest is None:
@@ -698,7 +777,6 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
698
777
  if idx_closest == -1:
699
778
  idx_closest = opt_res_latest.index.get_indexer([now_precise], method="nearest")[0]
700
779
  # Publish the data
701
- params = json.loads(input_data_dict["params"])
702
780
  publish_prefix = params["passed_data"]["publish_prefix"]
703
781
  # Publish PV forecast
704
782
  custom_pv_forecast_id = params["passed_data"]["custom_pv_forecast_id"]
@@ -710,6 +788,8 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
710
788
  custom_pv_forecast_id["friendly_name"],
711
789
  type_var="power",
712
790
  publish_prefix=publish_prefix,
791
+ save_entities=entity_save,
792
+ dont_post=dont_post
713
793
  )
714
794
  # Publish Load forecast
715
795
  custom_load_forecast_id = params["passed_data"]["custom_load_forecast_id"]
@@ -721,6 +801,8 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
721
801
  custom_load_forecast_id["friendly_name"],
722
802
  type_var="power",
723
803
  publish_prefix=publish_prefix,
804
+ save_entities=entity_save,
805
+ dont_post=dont_post
724
806
  )
725
807
  cols_published = ["P_PV", "P_Load"]
726
808
  # Publish deferrable loads
@@ -742,6 +824,8 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
742
824
  custom_deferrable_forecast_id[k]["friendly_name"],
743
825
  type_var="deferrable",
744
826
  publish_prefix=publish_prefix,
827
+ save_entities=entity_save,
828
+ dont_post=dont_post
745
829
  )
746
830
  cols_published = cols_published + ["P_deferrable{}".format(k)]
747
831
  # Publish battery power
@@ -760,6 +844,8 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
760
844
  custom_batt_forecast_id["friendly_name"],
761
845
  type_var="batt",
762
846
  publish_prefix=publish_prefix,
847
+ save_entities=entity_save,
848
+ dont_post=dont_post
763
849
  )
764
850
  cols_published = cols_published + ["P_batt"]
765
851
  custom_batt_soc_forecast_id = params["passed_data"][
@@ -773,6 +859,8 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
773
859
  custom_batt_soc_forecast_id["friendly_name"],
774
860
  type_var="SOC",
775
861
  publish_prefix=publish_prefix,
862
+ save_entities=entity_save,
863
+ dont_post=dont_post
776
864
  )
777
865
  cols_published = cols_published + ["SOC_opt"]
778
866
  # Publish grid power
@@ -785,6 +873,8 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
785
873
  custom_grid_forecast_id["friendly_name"],
786
874
  type_var="power",
787
875
  publish_prefix=publish_prefix,
876
+ save_entities=entity_save,
877
+ dont_post=dont_post
788
878
  )
789
879
  cols_published = cols_published + ["P_grid"]
790
880
  # Publish total value of cost function
@@ -798,7 +888,10 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
798
888
  custom_cost_fun_id["friendly_name"],
799
889
  type_var="cost_fun",
800
890
  publish_prefix=publish_prefix,
891
+ save_entities=entity_save,
892
+ dont_post=dont_post
801
893
  )
894
+ # cols_published = cols_published + col_cost_fun
802
895
  # Publish the optimization status
803
896
  custom_cost_fun_id = params["passed_data"]["custom_optim_status_id"]
804
897
  if "optim_status" not in opt_res_latest:
@@ -814,6 +907,8 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
814
907
  custom_cost_fun_id["friendly_name"],
815
908
  type_var="optim_status",
816
909
  publish_prefix=publish_prefix,
910
+ save_entities=entity_save,
911
+ dont_post=dont_post
817
912
  )
818
913
  cols_published = cols_published + ["optim_status"]
819
914
  # Publish unit_load_cost
@@ -826,6 +921,8 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
826
921
  custom_unit_load_cost_id["friendly_name"],
827
922
  type_var="unit_load_cost",
828
923
  publish_prefix=publish_prefix,
924
+ save_entities=entity_save,
925
+ dont_post=dont_post
829
926
  )
830
927
  cols_published = cols_published + ["unit_load_cost"]
831
928
  # Publish unit_prod_price
@@ -838,6 +935,8 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
838
935
  custom_unit_prod_price_id["friendly_name"],
839
936
  type_var="unit_prod_price",
840
937
  publish_prefix=publish_prefix,
938
+ save_entities=entity_save,
939
+ dont_post=dont_post
841
940
  )
842
941
  cols_published = cols_published + ["unit_prod_price"]
843
942
  # Create a DF resuming what has been published
@@ -845,6 +944,108 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
845
944
  opt_res_latest.index[idx_closest]]]
846
945
  return opt_res
847
946
 
947
+ def continual_publish(input_data_dict,entity_path,logger):
948
+ """
949
+ If continual_publish is true and a entity file saved in /data_path/entities, continually publish sensor on freq rate, updating entity current state value based on timestamp
950
+
951
+ :param input_data_dict: A dictionnary with multiple data used by the action functions
952
+ :type input_data_dict: dict
953
+ :param entity_path: Path for entities folder in data_path
954
+ :type entity_path: Path
955
+ :param logger: The passed logger object
956
+ :type logger: logging.Logger
957
+
958
+ """
959
+ logger.info("Continual publish thread service started")
960
+ freq = input_data_dict['retrieve_hass_conf'].get("freq", pd.to_timedelta(1, "minutes"))
961
+ entity_path_contents = []
962
+
963
+ while True:
964
+ # Sleep for x seconds (using current time as a reference for time left)
965
+ time.sleep(max(0,freq.total_seconds() - (datetime.now(input_data_dict["retrieve_hass_conf"]["time_zone"]).timestamp() % 60)))
966
+
967
+ # Loop through all saved entity files
968
+ if os.path.exists(entity_path) and len(os.listdir(entity_path)) > 0:
969
+ entity_path_contents = os.listdir(entity_path)
970
+ for entity in entity_path_contents:
971
+ if entity != "metadata.json":
972
+ # Call publish_json with entity file, build entity, and publish
973
+ publish_json(entity,input_data_dict,entity_path,logger,"continual_publish")
974
+ pass
975
+ # This function should never return
976
+ return False
977
+
978
+ def publish_json(entity,input_data_dict,entity_path,logger,reference: Optional[str] = ""):
979
+ """
980
+ Extract saved entity data from .json (in data_path/entities), build entity, post results to post_data
981
+
982
+ :param entity: json file containing entity data
983
+ :type entity: dict
984
+ :param input_data_dict: A dictionnary with multiple data used by the action functions
985
+ :type input_data_dict: dict
986
+ :param entity_path: Path for entities folder in data_path
987
+ :type entity_path: Path
988
+ :param logger: The passed logger object
989
+ :type logger: logging.Logger
990
+ :param reference: String for identifying who ran the function
991
+ :type reference: str, optional
992
+
993
+ """
994
+
995
+ # Retrieve entity metadata from file
996
+ if os.path.isfile(entity_path / "metadata.json"):
997
+ with open(entity_path / "metadata.json", "r") as file:
998
+ metadata = json.load(file)
999
+ if not metadata.get("lowest_freq",None) == None:
1000
+ freq = pd.to_timedelta(metadata["lowest_freq"], "minutes")
1001
+ else:
1002
+ logger.error("unable to located metadata.json in:" + entity_path)
1003
+ return False
1004
+
1005
+ # Round current timecode (now)
1006
+ now_precise = datetime.now(input_data_dict["retrieve_hass_conf"]["time_zone"]).replace(second=0, microsecond=0)
1007
+
1008
+ # Retrieve entity data from file
1009
+ entity_data = pd.read_json(entity_path / entity , orient='index')
1010
+
1011
+ # Remove ".json" from string for entity_id
1012
+ entity_id = entity.replace(".json", "")
1013
+
1014
+ # Adjust Dataframe from received entity json file
1015
+ entity_data.columns = [metadata[entity_id]["name"]]
1016
+ entity_data.index.name = "timestamp"
1017
+ entity_data.index = pd.to_datetime(entity_data.index).tz_convert(input_data_dict["retrieve_hass_conf"]["time_zone"])
1018
+ entity_data.index.freq = pd.to_timedelta(int(metadata[entity_id]["freq"]), "minutes")
1019
+ # Calculate the current state value
1020
+ if input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "nearest":
1021
+ idx_closest = entity_data.index.get_indexer([now_precise], method="nearest")[0]
1022
+ elif input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "first":
1023
+ idx_closest = entity_data.index.get_indexer([now_precise], method="ffill")[0]
1024
+ elif input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "last":
1025
+ idx_closest = entity_data.index.get_indexer([now_precise], method="bfill")[0]
1026
+ if idx_closest == -1:
1027
+ idx_closest = entity_data.index.get_indexer([now_precise], method="nearest")[0]
1028
+
1029
+ # Call post data
1030
+ if reference == "continual_publish":
1031
+ logger.debug("Auto Published sensor:")
1032
+ logger_levels = "DEBUG"
1033
+ else:
1034
+ logger_levels = "INFO"
1035
+
1036
+ #post/save entity
1037
+ input_data_dict["rh"].post_data(
1038
+ data_df=entity_data[metadata[entity_id]["name"]],
1039
+ idx=idx_closest,
1040
+ entity_id=entity_id,
1041
+ unit_of_measurement=metadata[entity_id]["unit_of_measurement"],
1042
+ friendly_name=metadata[entity_id]["friendly_name"],
1043
+ type_var=metadata[entity_id].get("type_var",""),
1044
+ save_entities=False,
1045
+ logger_levels=logger_levels
1046
+ )
1047
+ return entity_data[metadata[entity_id]["name"]]
1048
+
848
1049
 
849
1050
  def main():
850
1051
  r"""Define the main command line entry function.
@@ -979,7 +1180,7 @@ def main():
979
1180
  prediction = regressor_model_predict(input_data_dict, logger, debug=args.debug,mlr=mlr)
980
1181
  opt_res = None
981
1182
  elif args.action == "publish-data":
982
- opt_res = publish_data(input_data_dict, logger)
1183
+ opt_res = publish_data(input_data_dict,logger)
983
1184
  else:
984
1185
  logger.error("The passed action argument is not valid")
985
1186
  logger.error("Try setting --action: perfect-optim, dayahead-optim, naive-mpc-optim, forecast-model-fit, forecast-model-predict, forecast-model-tune or publish-data")
emhass/forecast.py CHANGED
@@ -186,7 +186,7 @@ class Forecast(object):
186
186
  self.logger.info("Retrieving weather forecast data using method = "+method)
187
187
  self.weather_forecast_method = method # Saving this attribute for later use to identify csv method usage
188
188
  if method == 'scrapper':
189
- freq_scrap = pd.to_timedelta(60, "minutes") # The scrapping time step is 60min
189
+ freq_scrap = pd.to_timedelta(60, "minutes") # The scrapping time step is 60min on clearoutside
190
190
  forecast_dates_scrap = pd.date_range(start=self.start_forecast,
191
191
  end=self.end_forecast-freq_scrap,
192
192
  freq=freq_scrap).round(freq_scrap, ambiguous='infer', nonexistent='shift_forward')
@@ -204,7 +204,7 @@ class Forecast(object):
204
204
  col_names = [list_names[i].get_text() for i in selected_cols]
205
205
  list_tables = [list_tables[i] for i in selected_cols]
206
206
  # Building the raw DF container
207
- raw_data = pd.DataFrame(index=range(24), columns=col_names, dtype=float)
207
+ raw_data = pd.DataFrame(index=range(len(forecast_dates_scrap)), columns=col_names, dtype=float)
208
208
  for count_col, col in enumerate(col_names):
209
209
  list_rows = list_tables[count_col].find_all('li')
210
210
  for count_row, row in enumerate(list_rows):
@@ -235,7 +235,8 @@ class Forecast(object):
235
235
  "Authorization": "Bearer " + self.retrieve_hass_conf['solcast_api_key'],
236
236
  "content-type": "application/json",
237
237
  }
238
- url = "https://api.solcast.com.au/rooftop_sites/"+self.retrieve_hass_conf['solcast_rooftop_id']+"/forecasts?hours=24"
238
+ days_solcast = int(len(self.forecast_dates)*self.freq.seconds/3600)
239
+ url = "https://api.solcast.com.au/rooftop_sites/"+self.retrieve_hass_conf['solcast_rooftop_id']+"/forecasts?hours="+str(days_solcast)
239
240
  response = get(url, headers=headers)
240
241
  '''import bz2 # Uncomment to save a serialized data for tests
241
242
  import _pickle as cPickle
@@ -263,7 +264,11 @@ class Forecast(object):
263
264
  self.retrieve_hass_conf['solar_forecast_kwp'] = 5
264
265
  if self.retrieve_hass_conf['solar_forecast_kwp'] == 0:
265
266
  self.logger.warning("The solar_forecast_kwp parameter is set to zero, setting to default 5")
266
- self.retrieve_hass_conf['solar_forecast_kwp'] = 5
267
+ self.retrieve_hass_conf['solar_forecast_kwp'] = 5
268
+ if self.optim_conf['delta_forecast'].days > 1:
269
+ self.logger.warning("The free public tier for solar.forecast only provides one day forecasts")
270
+ self.logger.warning("Continuing with just the first day of data, the other days are filled with 0.0.")
271
+ self.logger.warning("Use the other available methods for delta_forecast > 1")
267
272
  headers = {
268
273
  "Accept": "application/json"
269
274
  }
@@ -289,7 +294,8 @@ class Forecast(object):
289
294
  mask_down_data_df = data_tmp.copy(deep=True).fillna(method = "bfill").isnull()
290
295
  data_tmp.loc[data_tmp.index[mask_up_data_df['yhat']==True],:] = 0.0
291
296
  data_tmp.loc[data_tmp.index[mask_down_data_df['yhat']==True],:] = 0.0
292
- data_tmp.interpolate(inplace=True)
297
+ data_tmp.interpolate(inplace=True, limit=1)
298
+ data_tmp = data_tmp.fillna(0.0)
293
299
  if len(data) == 0:
294
300
  data = copy.deepcopy(data_tmp)
295
301
  else:
@@ -417,9 +423,9 @@ class Forecast(object):
417
423
  # Setting the main parameters of the PV plant
418
424
  location = Location(latitude=self.lat, longitude=self.lon)
419
425
  temp_params = TEMPERATURE_MODEL_PARAMETERS['sapm']['close_mount_glass_glass']
420
- cec_modules = bz2.BZ2File(pathlib.Path(__file__).parent / 'data/cec_modules.pbz2', "rb")
426
+ cec_modules = bz2.BZ2File(self.emhass_conf['root_path'] / 'data/cec_modules.pbz2', "rb")
421
427
  cec_modules = cPickle.load(cec_modules)
422
- cec_inverters = bz2.BZ2File(pathlib.Path(__file__).parent / 'data/cec_inverters.pbz2', "rb")
428
+ cec_inverters = bz2.BZ2File(self.emhass_conf['root_path'] / 'data/cec_inverters.pbz2', "rb")
423
429
  cec_inverters = cPickle.load(cec_inverters)
424
430
  if type(self.plant_conf['module_model']) == list:
425
431
  P_PV_forecast = pd.Series(0, index=df_weather.index)