emhass 0.12.1__py3-none-any.whl → 0.12.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
emhass/command_line.py CHANGED
@@ -11,7 +11,6 @@ import pickle
11
11
  import re
12
12
  import time
13
13
  from datetime import datetime, timezone
14
- from distutils.util import strtobool
15
14
  from importlib.metadata import version
16
15
  from typing import Optional, Tuple
17
16
 
@@ -446,7 +445,6 @@ def weather_forecast_cache(
446
445
  "forecast",
447
446
  logger,
448
447
  emhass_conf,
449
- {},
450
448
  )
451
449
  # Make sure weather_forecast_cache is true
452
450
  if (params != None) and (params != "null"):
@@ -647,9 +645,12 @@ def naive_mpc_optim(
647
645
  prediction_horizon = input_data_dict["params"]["passed_data"]["prediction_horizon"]
648
646
  soc_init = input_data_dict["params"]["passed_data"]["soc_init"]
649
647
  soc_final = input_data_dict["params"]["passed_data"]["soc_final"]
650
- def_total_hours = input_data_dict["params"]["optim_conf"][
651
- "operating_hours_of_each_deferrable_load"
652
- ]
648
+ def_total_hours = input_data_dict["params"]["optim_conf"].get(
649
+ "operating_hours_of_each_deferrable_load", None
650
+ )
651
+ def_total_timestep = input_data_dict["params"]["optim_conf"].get(
652
+ "operating_timesteps_of_each_deferrable_load", None
653
+ )
653
654
  def_start_timestep = input_data_dict["params"]["optim_conf"][
654
655
  "start_timesteps_of_each_deferrable_load"
655
656
  ]
@@ -664,6 +665,7 @@ def naive_mpc_optim(
664
665
  soc_init,
665
666
  soc_final,
666
667
  def_total_hours,
668
+ def_total_timestep,
667
669
  def_start_timestep,
668
670
  def_end_timestep,
669
671
  )
@@ -1516,8 +1518,8 @@ def main():
1516
1518
  )
1517
1519
  parser.add_argument(
1518
1520
  "--log2file",
1519
- type=strtobool,
1520
- default="False",
1521
+ type=bool,
1522
+ default=False,
1521
1523
  help="Define if we should log to a file or not",
1522
1524
  )
1523
1525
  parser.add_argument(
@@ -1533,7 +1535,10 @@ def main():
1533
1535
  help="Pass runtime optimization parameters as dictionnary",
1534
1536
  )
1535
1537
  parser.add_argument(
1536
- "--debug", type=strtobool, default="False", help="Use True for testing purposes"
1538
+ "--debug",
1539
+ type=bool,
1540
+ default=False,
1541
+ help="Use True for testing purposes",
1537
1542
  )
1538
1543
  args = parser.parse_args()
1539
1544
 
Binary file
Binary file
@@ -0,0 +1,8 @@
1
+ Name,Vac,Pso,Paco,Pdco,Vdco,C0,C1,C2,C3,Pnt,Vdcmax,Idcmax,Mppt_low,Mppt_high,CEC_Date,CEC_hybrid,CEC_Type
2
+ Units,V,W,W,W,V,1/W,1/V,1/V,1/V,W,V,A,V,V,,,
3
+ [0],inv_snl_ac_voltage,inv_snl_pso,inv_snl_paco,inv_snl_pdco,inv_snl_vdco,inv_snl_c0,inv_snl_c1,inv_snl_c2,inv_snl_c3,inv_snl_pnt,inv_snl_vdcmax,inv_snl_idcmax,inv_snl_mppt_low,inv_snl_mppt_hi,inv_cec_date,inv_cec_hybrid,inv_cec_type
4
+ Sungrow: SH25T,400,250,27500,26000,600,-0.0000828,-0.000759,-0.0001722,-0.0000414,25,1100,80,200,950,01/01/2025,Y,Hybrid
5
+ Sungrow: SH20T,400,200,22000,21000,600,-0.00007584,-0.0007245,-0.0001587,-0.00003834,25,1100,80,200,950,01/01/2025,Y,Hybrid
6
+ Sungrow: SH15T,400,150,16500,15700,600,-0.000069,-0.00069,-0.000138,-0.0000345,25,1100,80,200,950,01/01/2025,Y,Hybrid
7
+ Sungrow: SH10RT,400,100,11000,10467,600,-0.00005892,-0.0006555,-0.0001173,-0.00002934,25,1100,80,200,950,01/01/2025,Y,Hybrid
8
+ Sungrow: SH10RS,400,100,10000,9500,600,-0.000065,-0.00075,-0.00013,-0.000032,25,1000,25,250,850,01/01/2025,N,Grid-Tie
@@ -0,0 +1,6 @@
1
+ Name,Manufacturer,Technology,Bifacial,STC,PTC,A_c,Length,Width,N_s,I_sc_ref,V_oc_ref,I_mp_ref,V_mp_ref,alpha_sc,beta_oc,T_NOCT,a_ref,I_L_ref,I_o_ref,R_s,R_sh_ref,Adjust,gamma_pmp,BIPV,Version,Date
2
+ Units,,,,,,m2,m,m,,A,V,A,V,A/K,V/K,C,V,A,A,Ohm,Ohm,%,%/K,,,
3
+ [0],lib_manufacturer,cec_material,lib_is_bifacial,,,cec_area,lib_length,lib_width,cec_n_s,cec_i_sc_ref,cec_v_oc_ref,cec_i_mp_ref,cec_v_mp_ref,cec_alpha_sc,cec_beta_oc,cec_t_noct,cec_a_ref,cec_i_l_ref,cec_i_o_ref,cec_r_s,cec_r_sh_ref,cec_adjust,cec_gamma_pmp,,,
4
+ Jinko Solar JKM475N-60HL4-V,Jinko Solar,Mono-c-Si,0,475.0,432.0,2.16,1.903,1.134,120,14.23,42.54,13.49,35.21,0.046,-0.17,45.0,1.3,14.23,2e-7,0.2,300.0,1,-0.003,N,2021.08.01,01/03/2021
5
+ Hanwha Q CELLS Q.PEAK L-G5 300W,Hanwha Q CELLS,Mono-c-Si,N,300.0,273.0,1.67,1.67,1.0,72,9.57,39.58,9.01,33.29,0.0006,-0.33,45.0,1.2,9.58,1e-10,0.5,1500,1,-0.38,N,2019.08.01,01/03/2019
6
+ Risen RSM40-8-390M,Risen,Mono-c-Si,0,390.0,355.0,1.82,1.755,1.038,80,13.98,40.70,13.19,31.8,0.048,-0.15,45.0,1.25,13.98,1.9e-7,0.38,445.0,1,-0.35,N,2024.01.01,01/04/2024
emhass/forecast.py CHANGED
@@ -9,6 +9,8 @@ import os
9
9
  import pickle
10
10
  import pickle as cPickle
11
11
  from datetime import datetime, timedelta
12
+ import re
13
+ from itertools import zip_longest
12
14
  from typing import Optional
13
15
 
14
16
  import numpy as np
@@ -319,86 +321,94 @@ class Forecast(object):
319
321
  # If weather_forecast_cache, set request days as twice as long to avoid length issues (add a buffer)
320
322
  if self.params["passed_data"].get("weather_forecast_cache", False):
321
323
  days_solcast = min((days_solcast * 2), 336)
322
- url = (
323
- "https://api.solcast.com.au/rooftop_sites/"
324
- + self.retrieve_hass_conf["solcast_rooftop_id"]
325
- + "/forecasts?hours="
326
- + str(days_solcast)
327
- )
328
- response = get(url, headers=headers)
329
- """import bz2 # Uncomment to save a serialized data for tests
330
- import _pickle as cPickle
331
- with bz2.BZ2File("data/test_response_solcast_get_method.pbz2", "w") as f:
332
- cPickle.dump(response, f)"""
333
- # Verify the request passed
334
- if int(response.status_code) == 200:
335
- data = response.json()
336
- elif (
337
- int(response.status_code) == 402
338
- or int(response.status_code) == 429
339
- ):
340
- self.logger.error(
341
- "Solcast error: May have exceeded your subscription limit."
342
- )
343
- return False
344
- elif (
345
- int(response.status_code) >= 400
346
- or int(response.status_code) >= 202
324
+ # Split `roof_id` into a list (support comma or space as separator)
325
+ roof_ids = re.split(r"[,\s]+", self.retrieve_hass_conf["solcast_rooftop_id"].strip())
326
+ # Summary list of data
327
+ total_data_list = [0] * len(self.forecast_dates)
328
+ # Iteration over individual `roof_id`
329
+ for roof_id in roof_ids:
330
+ url = (
331
+ f"https://api.solcast.com.au/rooftop_sites/{roof_id}/forecasts?hours={days_solcast}"
332
+ )
333
+ response = get(url, headers=headers)
334
+ """import bz2 # Uncomment to save a serialized data for tests
335
+ import _pickle as cPickle
336
+ with bz2.BZ2File("data/test_response_solcast_get_method.pbz2", "w") as f:
337
+ cPickle.dump(response, f)"""
338
+ # Verify the request passed
339
+ if int(response.status_code) == 200:
340
+ data = response.json()
341
+ elif (
342
+ int(response.status_code) == 402
343
+ or int(response.status_code) == 429
344
+ ):
345
+ self.logger.error(
346
+ "Solcast error: May have exceeded your subscription limit."
347
+ )
348
+ return False
349
+ elif (
350
+ int(response.status_code) >= 400
351
+ or (int(response.status_code) >= 202 and int(response.status_code) <= 299)
352
+ ):
353
+ self.logger.error(
354
+ "Solcast error: There was a issue with the solcast request, check solcast API key and rooftop ID."
355
+ )
356
+ self.logger.error(
357
+ "Solcast error: Check that your subscription is valid and your network can connect to Solcast."
358
+ )
359
+ return False
360
+ # Data processing for the current `roof_id`
361
+ data_list = []
362
+ for elm in data["forecasts"]:
363
+ data_list.append(
364
+ elm["pv_estimate"] * 1000
365
+ ) # Converting kW to W
366
+ # Check if the retrieved data has the correct length
367
+ if len(data_list) < len(self.forecast_dates):
368
+ self.logger.error(
369
+ "Not enough data retrieved from Solcast service, try increasing the time step or use MPC."
370
+ )
371
+ return False
372
+ # Adding the data of the current `roof_id` to the total
373
+ total_data_list = [
374
+ total + current for total, current in zip_longest(total_data_list, data_list, fillvalue=0)
375
+ ]
376
+ # If runtime weather_forecast_cache is true save forecast result to file as cache
377
+ if self.params["passed_data"].get(
378
+ "weather_forecast_cache", False
347
379
  ):
348
- self.logger.error(
349
- "Solcast error: There was a issue with the solcast request, check solcast API key and rooftop ID."
350
- )
351
- self.logger.error(
352
- "Solcast error: Check that your subscription is valid and your network can connect to Solcast."
353
- )
354
- return False
355
- data_list = []
356
- for elm in data["forecasts"]:
357
- data_list.append(
358
- elm["pv_estimate"] * 1000
359
- ) # Converting kW to W
360
- # Check if the retrieved data has the correct length
361
- if len(data_list) < len(self.forecast_dates):
362
- self.logger.error(
363
- "Not enough data retried from Solcast service, try increasing the time step or use MPC."
380
+ # Add x2 forecast periods for cached results. This adds a extra delta_forecast amount of days for a buffer
381
+ cached_forecast_dates = self.forecast_dates.union(
382
+ pd.date_range(
383
+ self.forecast_dates[-1],
384
+ periods=(len(self.forecast_dates) + 1),
385
+ freq=self.freq,
386
+ )[1:]
364
387
  )
365
- else:
366
- # If runtime weather_forecast_cache is true save forecast result to file as cache
367
- if self.params["passed_data"].get(
368
- "weather_forecast_cache", False
369
- ):
370
- # Add x2 forecast periods for cached results. This adds a extra delta_forecast amount of days for a buffer
371
- cached_forecast_dates = self.forecast_dates.union(
372
- pd.date_range(
373
- self.forecast_dates[-1],
374
- periods=(len(self.forecast_dates) + 1),
375
- freq=self.freq,
376
- )[1:]
388
+ cache_data_list = total_data_list[0 : len(cached_forecast_dates)]
389
+ cache_data_dict = {
390
+ "ts": cached_forecast_dates,
391
+ "yhat": cache_data_list,
392
+ }
393
+ data_cache = pd.DataFrame.from_dict(cache_data_dict)
394
+ data_cache.set_index("ts", inplace=True)
395
+ with open(w_forecast_cache_path, "wb") as file:
396
+ cPickle.dump(data_cache, file)
397
+ if not os.path.isfile(w_forecast_cache_path):
398
+ self.logger.warning(
399
+ "Solcast forecast data could not be saved to file."
377
400
  )
378
- cache_data_list = data_list[0 : len(cached_forecast_dates)]
379
- cache_data_dict = {
380
- "ts": cached_forecast_dates,
381
- "yhat": cache_data_list,
382
- }
383
- data_cache = pd.DataFrame.from_dict(cache_data_dict)
384
- data_cache.set_index("ts", inplace=True)
385
- with open(w_forecast_cache_path, "wb") as file:
386
- cPickle.dump(data_cache, file)
387
- if not os.path.isfile(w_forecast_cache_path):
388
- self.logger.warning(
389
- "Solcast forecast data could not be saved to file."
390
- )
391
- else:
392
- self.logger.info(
393
- "Saved the Solcast results to cache, for later reference."
394
- )
395
- # Trim request results to forecast_dates
396
- data_list = data_list[0 : len(self.forecast_dates)]
397
- data_dict = {"ts": self.forecast_dates, "yhat": data_list}
398
- # Define DataFrame
399
- data = pd.DataFrame.from_dict(data_dict)
400
- # Define index
401
- data.set_index("ts", inplace=True)
401
+ else:
402
+ self.logger.info(
403
+ "Saved the Solcast results to cache, for later reference."
404
+ )
405
+ # Trim request results to forecast_dates
406
+ total_data_list = total_data_list[0 : len(self.forecast_dates)]
407
+ data_dict = {"ts": self.forecast_dates, "yhat": total_data_list}
408
+ # Define DataFrame
409
+ data = pd.DataFrame.from_dict(data_dict)
410
+ # Define index
411
+ data.set_index("ts", inplace=True)
402
412
  # Else, notify user to update cache
403
413
  else:
404
414
  self.logger.error("Unable to obtain Solcast cache file.")
@@ -906,33 +916,35 @@ class Forecast(object):
906
916
  # Upsampling
907
917
  # Use 'asfreq' to create empty slots, then interpolate
908
918
  resampled_data = data.resample(freq).asfreq()
909
- resampled_data = resampled_data.interpolate(method='time')
919
+ resampled_data = resampled_data.interpolate(method="time")
910
920
  else:
911
921
  # No resampling needed
912
922
  resampled_data = data.copy()
913
923
  return resampled_data
914
-
924
+
915
925
  @staticmethod
916
926
  def get_typical_load_forecast(data, forecast_date):
917
927
  r"""
918
928
  Forecast the load profile for the next day based on historic data.
919
929
 
920
- :param data: A DataFrame with a DateTimeIndex containing the historic load data.
930
+ :param data: A DataFrame with a DateTimeIndex containing the historic load data.
921
931
  Must include a 'load' column.
922
932
  :type data: pd.DataFrame
923
933
  :param forecast_date: The date for which the forecast will be generated.
924
934
  :type forecast_date: pd.Timestamp
925
- :return: A Series with the forecasted load profile for the next day and a list of days used
935
+ :return: A Series with the forecasted load profile for the next day and a list of days used
926
936
  to calculate the forecast.
927
937
  :rtype: tuple (pd.Series, list)
928
938
  """
929
939
  # Ensure the 'load' column exists
930
- if 'load' not in data.columns:
940
+ if "load" not in data.columns:
931
941
  raise ValueError("Data must have a 'load' column.")
932
942
  # Filter historic data for the same month and day of the week
933
943
  month = forecast_date.month
934
944
  day_of_week = forecast_date.dayofweek
935
- historic_data = data[(data.index.month == month) & (data.index.dayofweek == day_of_week)]
945
+ historic_data = data[
946
+ (data.index.month == month) & (data.index.dayofweek == day_of_week)
947
+ ]
936
948
  used_days = np.unique(historic_data.index.date)
937
949
  # Align all historic data to the forecast day
938
950
  aligned_data = []
@@ -940,7 +952,11 @@ class Forecast(object):
940
952
  daily_data = data[data.index.date == pd.Timestamp(day).date()]
941
953
  aligned_daily_data = daily_data.copy()
942
954
  aligned_daily_data.index = aligned_daily_data.index.map(
943
- lambda x: x.replace(year=forecast_date.year, month=forecast_date.month, day=forecast_date.day)
955
+ lambda x: x.replace(
956
+ year=forecast_date.year,
957
+ month=forecast_date.month,
958
+ day=forecast_date.day,
959
+ )
944
960
  )
945
961
  aligned_data.append(aligned_daily_data)
946
962
  # Combine all aligned historic data into a single DataFrame
@@ -948,7 +964,7 @@ class Forecast(object):
948
964
  # Compute the mean load for each timestamp
949
965
  forecast = combined_data.groupby(combined_data.index).mean()
950
966
  return forecast, used_days
951
-
967
+
952
968
  def get_load_forecast(
953
969
  self,
954
970
  days_min_load_forecast: Optional[int] = 3,
@@ -1041,14 +1057,18 @@ class Forecast(object):
1041
1057
  ):
1042
1058
  return False
1043
1059
  df = rh.df_final.copy()[[self.var_load_new]]
1044
- if method == "typical": # using typical statistical data from a household power consumption
1060
+ if (
1061
+ method == "typical"
1062
+ ): # using typical statistical data from a household power consumption
1045
1063
  # Loading data from history file
1046
1064
  model_type = "load_clustering"
1047
- data_path = self.emhass_conf["data_path"] / str("data_train_" + model_type + ".pkl")
1065
+ data_path = self.emhass_conf["data_path"] / str(
1066
+ "data_train_" + model_type + ".pkl"
1067
+ )
1048
1068
  with open(data_path, "rb") as fid:
1049
1069
  data, _ = pickle.load(fid)
1050
1070
  # Resample the data if needed
1051
- current_freq = pd.Timedelta('30min')
1071
+ current_freq = pd.Timedelta("30min")
1052
1072
  if self.freq != current_freq:
1053
1073
  data = Forecast.resample_data(data, self.freq, current_freq)
1054
1074
  # Generate forecast
@@ -1057,20 +1077,28 @@ class Forecast(object):
1057
1077
  forecast = pd.DataFrame()
1058
1078
  for date in dates_list:
1059
1079
  forecast_date = pd.Timestamp(date)
1060
- data.columns = ['load']
1061
- forecast_tmp, used_days = Forecast.get_typical_load_forecast(data, forecast_date)
1062
- self.logger.debug(f"Using {len(used_days)} days of data to generate the forecast.")
1080
+ data.columns = ["load"]
1081
+ forecast_tmp, used_days = Forecast.get_typical_load_forecast(
1082
+ data, forecast_date
1083
+ )
1084
+ self.logger.debug(
1085
+ f"Using {len(used_days)} days of data to generate the forecast."
1086
+ )
1063
1087
  # Normalize the forecast
1064
- forecast_tmp = forecast_tmp*self.plant_conf['maximum_power_from_grid']/9000
1088
+ forecast_tmp = (
1089
+ forecast_tmp * self.plant_conf["maximum_power_from_grid"] / 9000
1090
+ )
1065
1091
  data_list.extend(forecast_tmp.values.ravel().tolist())
1066
1092
  if len(forecast) == 0:
1067
1093
  forecast = forecast_tmp
1068
1094
  else:
1069
1095
  forecast = pd.concat([forecast, forecast_tmp], axis=0)
1070
1096
  forecast.index = forecast.index.tz_convert(self.time_zone)
1071
- forecast_out = forecast.loc[forecast.index.intersection(self.forecast_dates)]
1072
- forecast_out.index.name = 'ts'
1073
- forecast_out = forecast_out.rename(columns={'load': 'yhat'})
1097
+ forecast_out = forecast.loc[
1098
+ forecast.index.intersection(self.forecast_dates)
1099
+ ]
1100
+ forecast_out.index.name = "ts"
1101
+ forecast_out = forecast_out.rename(columns={"load": "yhat"})
1074
1102
  elif method == "naive": # using a naive approach
1075
1103
  mask_forecast_out = (
1076
1104
  df.index > days_list[-1] - self.optim_conf["delta_forecast_daily"]
Binary file
@@ -1,22 +1,22 @@
1
1
  #!/usr/bin/env python3
2
2
  # -*- coding: utf-8 -*-
3
3
 
4
- import logging
5
4
  import copy
5
+ import logging
6
6
  import time
7
+ import warnings
7
8
  from typing import Optional, Tuple
8
- import pandas as pd
9
- import numpy as np
10
9
 
11
- from sklearn.linear_model import LinearRegression
12
- from sklearn.linear_model import ElasticNet
13
- from sklearn.neighbors import KNeighborsRegressor
10
+ import numpy as np
11
+ import pandas as pd
12
+ from skforecast.ForecasterAutoreg import ForecasterAutoreg
13
+ from skforecast.model_selection import (
14
+ backtesting_forecaster,
15
+ bayesian_search_forecaster,
16
+ )
17
+ from sklearn.linear_model import ElasticNet, LinearRegression
14
18
  from sklearn.metrics import r2_score
15
-
16
- from skforecast.recursive import ForecasterRecursive
17
- from skforecast.model_selection import bayesian_search_forecaster, backtesting_forecaster, TimeSeriesFold
18
-
19
- import warnings
19
+ from sklearn.neighbors import KNeighborsRegressor
20
20
 
21
21
  warnings.filterwarnings("ignore", category=DeprecationWarning)
22
22
 
@@ -169,10 +169,7 @@ class MLForecaster:
169
169
  )
170
170
  base_model = KNeighborsRegressor()
171
171
  # Define the forecaster object
172
- self.forecaster = ForecasterRecursive(
173
- regressor = base_model,
174
- lags = self.num_lags
175
- )
172
+ self.forecaster = ForecasterAutoreg(regressor=base_model, lags=self.num_lags)
176
173
  # Fit and time it
177
174
  self.logger.info("Training a " + self.sklearn_model + " model")
178
175
  start_time = time.time()
@@ -201,22 +198,18 @@ class MLForecaster:
201
198
  # Using backtesting tool to evaluate the model
202
199
  self.logger.info("Performing simple backtesting of fitted model")
203
200
  start_time = time.time()
204
- cv = TimeSeriesFold(
205
- steps = self.num_lags,
206
- initial_train_size = None,
207
- fixed_train_size = False,
208
- gap = 0,
209
- allow_incomplete_fold = True,
210
- refit = False
211
- )
212
201
  metric, predictions_backtest = backtesting_forecaster(
213
- forecaster = self.forecaster,
214
- y = self.data_train[self.var_model],
215
- exog = self.data_train.drop(self.var_model, axis=1),
216
- cv = cv,
217
- metric = MLForecaster.neg_r2_score,
218
- verbose = False,
219
- show_progress = True
202
+ forecaster=self.forecaster,
203
+ y=self.data_train[self.var_model],
204
+ exog=self.data_train.drop(self.var_model, axis=1),
205
+ steps=self.num_lags,
206
+ initial_train_size=None,
207
+ allow_incomplete_fold=True,
208
+ gap=0,
209
+ metric=MLForecaster.neg_r2_score,
210
+ verbose=False,
211
+ refit=False,
212
+ show_progress=True,
220
213
  )
221
214
  self.logger.info(f"Elapsed backtesting time: {time.time() - start_time}")
222
215
  self.logger.info(f"Backtest R2 score: {-metric}")
@@ -356,25 +349,24 @@ class MLForecaster:
356
349
  # The optimization routine call
357
350
  self.logger.info("Bayesian hyperparameter optimization with backtesting")
358
351
  start_time = time.time()
359
- cv = TimeSeriesFold(
360
- steps = num_lags,
361
- initial_train_size = len(self.data_exo.loc[:self.date_train]),
362
- fixed_train_size = True,
363
- gap = 0,
364
- skip_folds = None,
365
- allow_incomplete_fold = True,
366
- refit = refit
367
- )
368
- self.optimize_results, self.optimize_results_object = bayesian_search_forecaster(
369
- forecaster = self.forecaster,
370
- y = self.data_train[self.var_model],
371
- exog = self.data_train.drop(self.var_model, axis=1),
372
- cv = cv,
373
- search_space = search_space,
374
- metric = MLForecaster.neg_r2_score,
375
- n_trials = 10,
376
- random_state = 123,
377
- return_best = True
352
+ self.optimize_results, self.optimize_results_object = (
353
+ bayesian_search_forecaster(
354
+ forecaster=self.forecaster,
355
+ y=self.data_train[self.var_model],
356
+ exog=self.data_train.drop(self.var_model, axis=1),
357
+ search_space=search_space,
358
+ metric=MLForecaster.neg_r2_score,
359
+ n_trials=10,
360
+ random_state=123,
361
+ steps=num_lags,
362
+ initial_train_size=len(self.data_exo.loc[: self.date_train]),
363
+ return_best=True,
364
+ fixed_train_size=True,
365
+ gap=0,
366
+ allow_incomplete_fold=True,
367
+ skip_folds=None,
368
+ refit=refit,
369
+ )
378
370
  )
379
371
  self.logger.info(f"Elapsed time: {time.time() - start_time}")
380
372
  self.is_tuned = True
emhass/optimization.py CHANGED
@@ -4,7 +4,6 @@
4
4
  import bz2
5
5
  import copy
6
6
  import logging
7
- import pathlib
8
7
  import pickle as cPickle
9
8
  from math import ceil
10
9
  from typing import Optional, Tuple
@@ -85,7 +84,7 @@ class Optimization:
85
84
  self.var_load = self.retrieve_hass_conf["sensor_power_load_no_var_loads"]
86
85
  self.var_load_new = self.var_load + "_positive"
87
86
  self.costfun = costfun
88
- # self.emhass_conf = emhass_conf
87
+ self.emhass_conf = emhass_conf
89
88
  self.logger = logger
90
89
  self.var_load_cost = var_load_cost
91
90
  self.var_prod_price = var_prod_price
@@ -120,6 +119,7 @@ class Optimization:
120
119
  soc_init: Optional[float] = None,
121
120
  soc_final: Optional[float] = None,
122
121
  def_total_hours: Optional[list] = None,
122
+ def_total_timestep: Optional[list] = None,
123
123
  def_start_timestep: Optional[list] = None,
124
124
  def_end_timestep: Optional[list] = None,
125
125
  debug: Optional[bool] = False,
@@ -152,6 +152,9 @@ class Optimization:
152
152
  :param def_total_hours: The functioning hours for this iteration for each deferrable load. \
153
153
  (For continuous deferrable loads: functioning hours at nominal power)
154
154
  :type def_total_hours: list
155
+ :param def_total_timestep: The functioning timesteps for this iteration for each deferrable load. \
156
+ (For continuous deferrable loads: functioning timesteps at nominal power)
157
+ :type def_total_timestep: list
155
158
  :param def_start_timestep: The timestep as from which each deferrable load is allowed to operate.
156
159
  :type def_start_timestep: list
157
160
  :param def_end_timestep: The timestep before which each deferrable load should operate.
@@ -173,8 +176,13 @@ class Optimization:
173
176
  soc_final = soc_init
174
177
  else:
175
178
  soc_final = self.plant_conf["battery_target_state_of_charge"]
176
- if def_total_hours is None:
179
+
180
+ # If def_total_timestep os set, bypass def_total_hours
181
+ if def_total_timestep is not None:
182
+ def_total_hours = [0 if x != 0 else x for x in def_total_hours]
183
+ elif def_total_hours is None:
177
184
  def_total_hours = self.optim_conf["operating_hours_of_each_deferrable_load"]
185
+
178
186
  if def_start_timestep is None:
179
187
  def_start_timestep = self.optim_conf[
180
188
  "start_timesteps_of_each_deferrable_load"
@@ -381,7 +389,7 @@ class Optimization:
381
389
  * self.timeStep
382
390
  * (
383
391
  self.optim_conf["weight_battery_discharge"] * P_sto_pos[i]
384
- + self.optim_conf["weight_battery_charge"] * P_sto_neg[i]
392
+ - self.optim_conf["weight_battery_charge"] * P_sto_neg[i]
385
393
  )
386
394
  for i in set_I
387
395
  )
@@ -462,7 +470,8 @@ class Optimization:
462
470
  for i in range(len(self.plant_conf["pv_inverter_model"])):
463
471
  if type(self.plant_conf["pv_inverter_model"][i]) == str:
464
472
  cec_inverters = bz2.BZ2File(
465
- pathlib.Path(__file__).parent / "data/cec_inverters.pbz2", "rb"
473
+ self.emhass_conf["root_path"] / "data" / "cec_inverters.pbz2",
474
+ "rb",
466
475
  )
467
476
  cec_inverters = cPickle.load(cec_inverters)
468
477
  inverter = cec_inverters[self.plant_conf["pv_inverter_model"][i]]
@@ -472,7 +481,7 @@ class Optimization:
472
481
  else:
473
482
  if type(self.plant_conf["pv_inverter_model"][i]) == str:
474
483
  cec_inverters = bz2.BZ2File(
475
- pathlib.Path(__file__).parent / "data/cec_inverters.pbz2", "rb"
484
+ self.emhass_conf["root_path"] / "data" / "cec_inverters.pbz2", "rb"
476
485
  )
477
486
  cec_inverters = cPickle.load(cec_inverters)
478
487
  inverter = cec_inverters[self.plant_conf["pv_inverter_model"]]
@@ -699,8 +708,7 @@ class Optimization:
699
708
  predicted_temps[k] = predicted_temp
700
709
 
701
710
  else:
702
- if def_total_hours[k] > 0:
703
- # Total time of deferrable load
711
+ if def_total_timestep and def_total_timestep[k] > 0:
704
712
  constraints.update(
705
713
  {
706
714
  "constraint_defload{}_energy".format(k): plp.LpConstraint(
@@ -708,13 +716,33 @@ class Optimization:
708
716
  P_deferrable[k][i] * self.timeStep for i in set_I
709
717
  ),
710
718
  sense=plp.LpConstraintEQ,
711
- rhs=def_total_hours[k]
719
+ rhs=(self.timeStep * def_total_timestep[k])
712
720
  * self.optim_conf["nominal_power_of_deferrable_loads"][
713
721
  k
714
722
  ],
715
723
  )
716
724
  }
717
725
  )
726
+ else:
727
+ if def_total_hours[k] > 0:
728
+ # Total time of deferrable load
729
+ constraints.update(
730
+ {
731
+ "constraint_defload{}_energy".format(
732
+ k
733
+ ): plp.LpConstraint(
734
+ e=plp.lpSum(
735
+ P_deferrable[k][i] * self.timeStep
736
+ for i in set_I
737
+ ),
738
+ sense=plp.LpConstraintEQ,
739
+ rhs=def_total_hours[k]
740
+ * self.optim_conf[
741
+ "nominal_power_of_deferrable_loads"
742
+ ][k],
743
+ )
744
+ }
745
+ )
718
746
 
719
747
  # Ensure deferrable loads consume energy between def_start_timestep & def_end_timestep
720
748
  self.logger.debug(
@@ -722,12 +750,23 @@ class Optimization:
722
750
  k, def_start_timestep[k], def_end_timestep[k]
723
751
  )
724
752
  )
725
- def_start, def_end, warning = Optimization.validate_def_timewindow(
726
- def_start_timestep[k],
727
- def_end_timestep[k],
728
- ceil(def_total_hours[k] / self.timeStep),
729
- n,
730
- )
753
+ if def_total_timestep and def_total_timestep[k] > 0:
754
+ def_start, def_end, warning = Optimization.validate_def_timewindow(
755
+ def_start_timestep[k],
756
+ def_end_timestep[k],
757
+ ceil(
758
+ (60 / ((self.freq.seconds / 60) * def_total_timestep[k]))
759
+ / self.timeStep
760
+ ),
761
+ n,
762
+ )
763
+ else:
764
+ def_start, def_end, warning = Optimization.validate_def_timewindow(
765
+ def_start_timestep[k],
766
+ def_end_timestep[k],
767
+ ceil(def_total_hours[k] / self.timeStep),
768
+ n,
769
+ )
731
770
  if warning is not None:
732
771
  self.logger.warning("Deferrable load {} : {}".format(k, warning))
733
772
  self.logger.debug(
@@ -837,15 +876,35 @@ class Optimization:
837
876
  }
838
877
  )
839
878
  # P_def_bin2 must be 1 for exactly the correct number of timesteps.
840
- constraints.update(
841
- {
842
- "constraint_pdef{}_start5".format(k): plp.LpConstraint(
843
- e=plp.lpSum(P_def_bin2[k][i] for i in set_I),
844
- sense=plp.LpConstraintEQ,
845
- rhs=def_total_hours[k] / self.timeStep,
846
- )
847
- }
848
- )
879
+ if def_total_timestep and def_total_timestep[k] > 0:
880
+ constraints.update(
881
+ {
882
+ "constraint_pdef{}_start5".format(k): plp.LpConstraint(
883
+ e=plp.lpSum(P_def_bin2[k][i] for i in set_I),
884
+ sense=plp.LpConstraintEQ,
885
+ rhs=(
886
+ (
887
+ 60
888
+ / (
889
+ (self.freq.seconds / 60)
890
+ * def_total_timestep[k]
891
+ )
892
+ )
893
+ / self.timeStep
894
+ ),
895
+ )
896
+ }
897
+ )
898
+ else:
899
+ constraints.update(
900
+ {
901
+ "constraint_pdef{}_start5".format(k): plp.LpConstraint(
902
+ e=plp.lpSum(P_def_bin2[k][i] for i in set_I),
903
+ sense=plp.LpConstraintEQ,
904
+ rhs=def_total_hours[k] / self.timeStep,
905
+ )
906
+ }
907
+ )
849
908
 
850
909
  # Treat deferrable load as a semi-continuous variable
851
910
  if self.optim_conf["treat_deferrable_load_as_semi_cont"][k]:
@@ -1336,6 +1395,7 @@ class Optimization:
1336
1395
  soc_init: Optional[float] = None,
1337
1396
  soc_final: Optional[float] = None,
1338
1397
  def_total_hours: Optional[list] = None,
1398
+ def_total_timestep: Optional[list] = None,
1339
1399
  def_start_timestep: Optional[list] = None,
1340
1400
  def_end_timestep: Optional[list] = None,
1341
1401
  ) -> pd.DataFrame:
@@ -1362,6 +1422,9 @@ class Optimization:
1362
1422
  :param soc_final: The final battery SOC for the optimization. This parameter \
1363
1423
  is optional, if not given soc_init = soc_final = soc_target from the configuration file.
1364
1424
  :type soc_final:
1425
+ :param def_total_timestep: The functioning timesteps for this iteration for each deferrable load. \
1426
+ (For continuous deferrable loads: functioning timesteps at nominal power)
1427
+ :type def_total_timestep: list
1365
1428
  :param def_total_hours: The functioning hours for this iteration for each deferrable load. \
1366
1429
  (For continuous deferrable loads: functioning hours at nominal power)
1367
1430
  :type def_total_hours: list
@@ -1395,6 +1458,7 @@ class Optimization:
1395
1458
  soc_init=soc_init,
1396
1459
  soc_final=soc_final,
1397
1460
  def_total_hours=def_total_hours,
1461
+ def_total_timestep=def_total_timestep,
1398
1462
  def_start_timestep=def_start_timestep,
1399
1463
  def_end_timestep=def_end_timestep,
1400
1464
  )
emhass/retrieve_hass.py CHANGED
@@ -80,6 +80,7 @@ class RetrieveHass:
80
80
  self.emhass_conf = emhass_conf
81
81
  self.logger = logger
82
82
  self.get_data_from_file = get_data_from_file
83
+ self.var_list = []
83
84
 
84
85
  def get_ha_config(self):
85
86
  """
@@ -286,6 +287,7 @@ class RetrieveHass:
286
287
  + str(self.freq)
287
288
  )
288
289
  return False
290
+ self.var_list = var_list
289
291
  return True
290
292
 
291
293
  def prepare_data(
@@ -336,7 +338,21 @@ class RetrieveHass:
336
338
  "sensor.power_photovoltaics and sensor.power_load_no_var_loads should not be the same"
337
339
  )
338
340
  return False
339
- if set_zero_min: # Apply minimum values
341
+ # Confirm var_replace_zero & var_interp contain only sensors contained in var_list
342
+ if isinstance(var_replace_zero, list) and all(
343
+ item in var_replace_zero for item in self.var_list
344
+ ):
345
+ pass
346
+ else:
347
+ var_replace_zero = []
348
+ if isinstance(var_interp, list) and all(
349
+ item in var_interp for item in self.var_list
350
+ ):
351
+ pass
352
+ else:
353
+ var_interp = []
354
+ # Apply minimum values
355
+ if set_zero_min:
340
356
  self.df_final.clip(lower=0.0, inplace=True, axis=1)
341
357
  self.df_final.replace(to_replace=0.0, value=np.nan, inplace=True)
342
358
  new_var_replace_zero = []
@@ -347,6 +363,12 @@ class RetrieveHass:
347
363
  new_string = string.replace(var_load, var_load + "_positive")
348
364
  new_var_replace_zero.append(new_string)
349
365
  else:
366
+ self.logger.warning(
367
+ "Unable to find all the sensors in sensor_replace_zero parameter"
368
+ )
369
+ self.logger.warning(
370
+ "Confirm sure all sensors in sensor_replace_zero are sensor_power_photovoltaics and/or ensor_power_load_no_var_loads "
371
+ )
350
372
  new_var_replace_zero = None
351
373
  if var_interp is not None:
352
374
  for string in var_interp:
@@ -354,6 +376,12 @@ class RetrieveHass:
354
376
  new_var_interp.append(new_string)
355
377
  else:
356
378
  new_var_interp = None
379
+ self.logger.warning(
380
+ "Unable to find all the sensors in sensor_linear_interp parameter"
381
+ )
382
+ self.logger.warning(
383
+ "Confirm all sensors in sensor_linear_interp are sensor_power_photovoltaics and/or ensor_power_load_no_var_loads "
384
+ )
357
385
  # Treating NaN replacement: either by zeros or by linear interpolation
358
386
  if new_var_replace_zero is not None:
359
387
  self.df_final[new_var_replace_zero] = self.df_final[
emhass/utils.py CHANGED
@@ -161,32 +161,52 @@ def update_params_with_ha_config(
161
161
  params = json.loads(params)
162
162
  # Update params
163
163
  currency_to_symbol = {
164
- 'EUR': '',
165
- 'USD': '$',
166
- 'GBP': '£',
167
- 'YEN': '¥',
168
- 'JPY': '¥',
169
- 'AUD': 'A$',
170
- 'CAD': 'C$',
171
- 'CHF': 'CHF', # Swiss Franc has no special symbol
172
- 'CNY': '¥',
173
- 'INR': '',
164
+ "EUR": "",
165
+ "USD": "$",
166
+ "GBP": "£",
167
+ "YEN": "¥",
168
+ "JPY": "¥",
169
+ "AUD": "A$",
170
+ "CAD": "C$",
171
+ "CHF": "CHF", # Swiss Franc has no special symbol
172
+ "CNY": "¥",
173
+ "INR": "",
174
+ "CZK": "Kč",
175
+ "BGN": "лв",
176
+ "DKK": "kr",
177
+ "HUF": "Ft",
178
+ "PLN": "zł",
179
+ "RON": "Leu",
180
+ "SEK": "kr",
181
+ "TRY": "Lira",
182
+ "VEF": "Bolivar",
183
+ "VND": "Dong",
184
+ "THB": "Baht",
185
+ "SGD": "S$",
186
+ "IDR": "Roepia",
187
+ "ZAR": "Rand",
174
188
  # Add more as needed
175
189
  }
176
- if 'currency' in ha_config.keys():
177
- ha_config['currency'] = currency_to_symbol.get(ha_config['currency'], 'Unknown')
190
+ if "currency" in ha_config.keys():
191
+ ha_config["currency"] = currency_to_symbol.get(ha_config["currency"], "Unknown")
178
192
  else:
179
- ha_config['currency'] = ''
180
- if 'unit_system' not in ha_config.keys():
181
- ha_config['unit_system'] = {'temperature': '°C'}
182
-
183
- for k in range(params["optim_conf"]["number_of_deferrable_loads"]):
184
- params['passed_data']['custom_predicted_temperature_id'][k].update(
185
- {"unit_of_measurement": ha_config['unit_system']['temperature']}
193
+ ha_config["currency"] = ""
194
+ if "unit_system" not in ha_config.keys():
195
+ ha_config["unit_system"] = {"temperature": "°C"}
196
+
197
+ number_of_deferrable_loads = params["optim_conf"]["number_of_deferrable_loads"]
198
+ if "num_def_loads" in params["passed_data"].keys():
199
+ number_of_deferrable_loads = params["passed_data"]["num_def_loads"]
200
+ if "number_of_deferrable_loads" in params["passed_data"].keys():
201
+ number_of_deferrable_loads = params["passed_data"]["number_of_deferrable_loads"]
202
+
203
+ for k in range(number_of_deferrable_loads):
204
+ params["passed_data"]["custom_predicted_temperature_id"][k].update(
205
+ {"unit_of_measurement": ha_config["unit_system"]["temperature"]}
186
206
  )
187
207
  updated_passed_dict = {
188
208
  "custom_cost_fun_id": {
189
- "unit_of_measurement": ha_config['currency'],
209
+ "unit_of_measurement": ha_config["currency"],
190
210
  },
191
211
  "custom_unit_load_cost_id": {
192
212
  "unit_of_measurement": f"{ha_config['currency']}/kWh",
@@ -248,9 +268,9 @@ def treat_runtimeparams(
248
268
  params["plant_conf"].update(plant_conf)
249
269
 
250
270
  # Check defaults on HA retrieved config
251
- default_currency_unit = ''
252
- default_temperature_unit = '°C'
253
-
271
+ default_currency_unit = ""
272
+ default_temperature_unit = "°C"
273
+
254
274
  # Some default data needed
255
275
  custom_deferrable_forecast_id = []
256
276
  custom_predicted_temperature_id = []
@@ -461,10 +481,17 @@ def treat_runtimeparams(
461
481
  else:
462
482
  soc_final = runtimeparams["soc_final"]
463
483
  params["passed_data"]["soc_final"] = soc_final
464
-
465
- params["passed_data"]["operating_hours_of_each_deferrable_load"] = params[
466
- "optim_conf"
467
- ].get("operating_hours_of_each_deferrable_load", None)
484
+ if "operating_timesteps_of_each_deferrable_load" in runtimeparams.keys():
485
+ params["passed_data"]["operating_timesteps_of_each_deferrable_load"] = (
486
+ runtimeparams["operating_timesteps_of_each_deferrable_load"]
487
+ )
488
+ params["optim_conf"]["operating_timesteps_of_each_deferrable_load"] = (
489
+ runtimeparams["operating_timesteps_of_each_deferrable_load"]
490
+ )
491
+ if "operating_hours_of_each_deferrable_load" in params["optim_conf"].keys():
492
+ params["passed_data"]["operating_hours_of_each_deferrable_load"] = (
493
+ params["optim_conf"]["operating_hours_of_each_deferrable_load"]
494
+ )
468
495
  params["passed_data"]["start_timesteps_of_each_deferrable_load"] = params[
469
496
  "optim_conf"
470
497
  ].get("start_timesteps_of_each_deferrable_load", None)
emhass/web_server.py CHANGED
@@ -8,9 +8,9 @@ import os
8
8
  import pickle
9
9
  import re
10
10
  import threading
11
- from distutils.util import strtobool
12
11
  from importlib.metadata import PackageNotFoundError, version
13
12
  from pathlib import Path
13
+ from typing import Optional
14
14
 
15
15
  import yaml
16
16
  from flask import Flask, make_response, request
@@ -46,6 +46,21 @@ from emhass.utils import (
46
46
  # Define the Flask instance
47
47
  app = Flask(__name__)
48
48
  emhass_conf = {}
49
+ entity_path = Path
50
+ params_secrets = {}
51
+ continual_publish_thread = []
52
+ injection_dict = {}
53
+
54
+
55
+ def create_app(settings_override=None):
56
+ """
57
+ Create a Flask application.
58
+ :param settings_override: Override settings
59
+ :return: Flask app
60
+ """
61
+ global app
62
+ main()
63
+ return app
49
64
 
50
65
 
51
66
  def checkFileLog(refString=None) -> bool:
@@ -153,6 +168,10 @@ def configuration():
153
168
 
154
169
  """
155
170
  app.logger.info("serving configuration.html...")
171
+ # get params
172
+ if (emhass_conf["data_path"] / "params.pkl").exists():
173
+ with open(str(emhass_conf["data_path"] / "params.pkl"), "rb") as fid:
174
+ emhass_conf["config_path"], params = pickle.load(fid)
156
175
  # Load HTML template
157
176
  file_loader = PackageLoader("emhass", "templates")
158
177
  env = Environment(loader=file_loader)
@@ -328,7 +347,7 @@ def parameter_set():
328
347
  with open(str(emhass_conf["data_path"] / "params.pkl"), "wb") as fid:
329
348
  pickle.dump(
330
349
  (
331
- config_path,
350
+ emhass_conf["config_path"],
332
351
  build_params(emhass_conf, params_secrets, config, app.logger),
333
352
  ),
334
353
  fid,
@@ -349,6 +368,9 @@ def action_call(action_name):
349
368
  :type action_name: String
350
369
 
351
370
  """
371
+ global continual_publish_thread
372
+ global injection_dict
373
+
352
374
  # Setting up parameters
353
375
  # Params
354
376
  ActionStr = " >> Obtaining params: "
@@ -524,31 +546,17 @@ def action_call(action_name):
524
546
  return make_response(msg, 400)
525
547
 
526
548
 
527
- if __name__ == "__main__":
528
- # Parsing arguments
529
- parser = argparse.ArgumentParser()
530
- parser.add_argument(
531
- "--url",
532
- type=str,
533
- help="The URL to your Home Assistant instance, ex the external_url in your hass configuration",
534
- )
535
- parser.add_argument(
536
- "--key",
537
- type=str,
538
- help="Your access key. If using EMHASS in standalone this should be a Long-Lived Access Token",
539
- )
540
- parser.add_argument(
541
- "--no_response",
542
- type=strtobool,
543
- default="False",
544
- help="This is set if json response errors occur",
545
- )
546
- args = parser.parse_args()
547
-
549
+ def main(
550
+ args: Optional[dict] = None,
551
+ ):
552
+ global continual_publish_thread
553
+ global emhass_conf
554
+ global entity_path
555
+ global injection_dict
548
556
  # Pre formatted config parameters
549
557
  config = {}
550
558
  # Secrets
551
- params_secrets = {}
559
+ global params_secrets
552
560
  # Built parameters (formatted config + secrets)
553
561
  params = None
554
562
 
@@ -601,11 +609,16 @@ if __name__ == "__main__":
601
609
  app.logger.setLevel(logging.DEBUG)
602
610
 
603
611
  ## Secrets
612
+ # Argument
604
613
  argument = {}
605
- if args.url:
606
- argument["url"] = args.url
607
- if args.key:
608
- argument["key"] = args.key
614
+ no_response = False
615
+ if args is not None:
616
+ if args.get("url", None):
617
+ argument["url"] = args["url"]
618
+ if args.get("key", None):
619
+ argument["key"] = args["key"]
620
+ if args.get("no_response", None):
621
+ no_response = args["no_response"]
609
622
  # Combine secrets from ENV, Arguments/ARG, Secrets file (secrets_emhass.yaml), options (options.json from addon configuration file) and/or Home Assistant Standalone API (if exist)
610
623
  emhass_conf, secrets = build_secrets(
611
624
  emhass_conf,
@@ -613,7 +626,7 @@ if __name__ == "__main__":
613
626
  argument,
614
627
  options_path,
615
628
  os.getenv("SECRETS_PATH", default="/app/secrets_emhass.yaml"),
616
- bool(args.no_response),
629
+ bool(no_response),
617
630
  )
618
631
  params_secrets.update(secrets)
619
632
 
@@ -706,4 +719,33 @@ if __name__ == "__main__":
706
719
  app.logger.info("Using core emhass version: " + version("emhass"))
707
720
  except PackageNotFoundError:
708
721
  app.logger.info("Using development emhass version")
722
+
723
+ return server_ip, port
724
+
725
+
726
+ if __name__ == "__main__":
727
+ # Parsing arguments
728
+ parser = argparse.ArgumentParser()
729
+ parser.add_argument(
730
+ "--url",
731
+ type=str,
732
+ help="The URL to your Home Assistant instance, ex the external_url in your hass configuration",
733
+ )
734
+ parser.add_argument(
735
+ "--key",
736
+ type=str,
737
+ help="Your access key. If using EMHASS in standalone this should be a Long-Lived Access Token",
738
+ )
739
+ parser.add_argument(
740
+ "--no_response",
741
+ type=bool,
742
+ default=False,
743
+ help="This is set if json response errors occur",
744
+ )
745
+ args = parser.parse_args()
746
+
747
+ server_ip, port = main(vars(args))
748
+ os.environ["IP"] = str(server_ip)
749
+ os.environ["PORT"] = str(port)
750
+
709
751
  serve(app, host=server_ip, port=port, threads=8)
@@ -1,46 +1,48 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: emhass
3
- Version: 0.12.1
3
+ Version: 0.12.3
4
4
  Summary: An Energy Management System for Home Assistant
5
+ Project-URL: Homepage, https://github.com/davidusb-geek/emhass
5
6
  Author-email: David HERNANDEZ <davidusb@gmail.com>
6
7
  License: MIT
7
- Project-URL: Homepage, https://github.com/davidusb-geek/emhass
8
- Keywords: energy,management,optimization,hass
8
+ License-File: LICENSE
9
+ Keywords: energy,hass,management,optimization
9
10
  Classifier: Development Status :: 5 - Production/Stable
10
11
  Classifier: Intended Audience :: Developers
11
- Classifier: Topic :: Software Development :: Build Tools
12
12
  Classifier: License :: OSI Approved :: MIT License
13
- Classifier: Programming Language :: Python :: 3.11
14
13
  Classifier: Operating System :: OS Independent
14
+ Classifier: Programming Language :: Python :: 3.11
15
+ Classifier: Topic :: Software Development :: Build Tools
15
16
  Requires-Python: <3.12,>=3.10
16
- Description-Content-Type: text/markdown
17
- License-File: LICENSE
18
- Requires-Dist: numpy==1.26.4
19
- Requires-Dist: scipy==1.12.0
20
- Requires-Dist: pandas<=2.0.3
21
- Requires-Dist: pvlib>=0.10.2
22
- Requires-Dist: protobuf>=3.0.0
23
- Requires-Dist: pytz>=2021.1
17
+ Requires-Dist: beautifulsoup4>=4.12.3
18
+ Requires-Dist: flask>=3.1.0
19
+ Requires-Dist: gunicorn>=23.0.0
20
+ Requires-Dist: h5py>=3.12.1
21
+ Requires-Dist: numpy<2.3.0,>=2.0.0
22
+ Requires-Dist: pandas>=2.1.1
23
+ Requires-Dist: plotly>=6.0.0rc0
24
+ Requires-Dist: protobuf>=5.29.1
25
+ Requires-Dist: pulp>=2.8.0
26
+ Requires-Dist: pvlib>=0.10.3
27
+ Requires-Dist: pytz>=2023.4
28
+ Requires-Dist: pyyaml>=6.0.1
24
29
  Requires-Dist: requests>=2.25.1
25
- Requires-Dist: beautifulsoup4>=4.9.3
26
- Requires-Dist: h5py==3.12.1
27
- Requires-Dist: pulp>=2.4
28
- Requires-Dist: pyyaml>=5.4.1
29
- Requires-Dist: tables<=3.9.1
30
- Requires-Dist: skforecast==0.14.0
31
- Requires-Dist: flask>=2.0.3
32
- Requires-Dist: waitress>=2.1.1
33
- Requires-Dist: plotly>=5.6.0
30
+ Requires-Dist: scipy>=1.14.0
31
+ Requires-Dist: skforecast>=0.9.0
32
+ Requires-Dist: tables>=3.9.1
33
+ Requires-Dist: waitress>=3.0.2
34
34
  Provides-Extra: docs
35
- Requires-Dist: sphinx; extra == "docs"
36
- Requires-Dist: sphinx-rtd-theme; extra == "docs"
37
- Requires-Dist: myst-parser; extra == "docs"
35
+ Requires-Dist: myst-parser; extra == 'docs'
36
+ Requires-Dist: sphinx; extra == 'docs'
37
+ Requires-Dist: sphinx-rtd-theme; extra == 'docs'
38
38
  Provides-Extra: test
39
- Requires-Dist: requests_mock; extra == "test"
40
- Requires-Dist: pytest; extra == "test"
41
- Requires-Dist: coverage; extra == "test"
42
- Requires-Dist: snakeviz; extra == "test"
43
- Requires-Dist: ruff; extra == "test"
39
+ Requires-Dist: coverage; extra == 'test'
40
+ Requires-Dist: pytest; extra == 'test'
41
+ Requires-Dist: requests-mock; extra == 'test'
42
+ Requires-Dist: ruff; extra == 'test'
43
+ Requires-Dist: snakeviz; extra == 'test'
44
+ Requires-Dist: tabulate; extra == 'test'
45
+ Description-Content-Type: text/markdown
44
46
 
45
47
  <div align="center">
46
48
  <br>
@@ -572,6 +574,7 @@ Here is the list of the other additional dictionary keys that can be passed at r
572
574
  - `nominal_power_of_deferrable_loads` for the nominal power for each deferrable load in Watts.
573
575
 
574
576
  - `operating_hours_of_each_deferrable_load` for the total number of hours that each deferrable load should operate.
577
+ - Alteratively, you can pass `operating_timesteps_of_each_deferrable_load` to set the total number of timesteps for each deferrable load. *(better parameter to use for setting under 1 hr)*
575
578
 
576
579
  - `start_timesteps_of_each_deferrable_load` for the timestep from which each deferrable load is allowed to operate (if you don't want the deferrable load to use the whole optimization timewindow).
577
580
 
@@ -1,16 +1,19 @@
1
1
  emhass/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- emhass/command_line.py,sha256=XCqeXWJR2YvsCjN6tXg5LGc_ucKIHQqbT2SYQqw6DKA,70739
3
- emhass/forecast.py,sha256=_Gc8k6_8Nz87WHXKyUH6iXK956Z2TGzhL8L6t-tO_sk,63496
4
- emhass/machine_learning_forecaster.py,sha256=JErz50i_D59J5wXdbf_EUPb_FG45qRflv51iBA7ARXU,17417
2
+ emhass/command_line.py,sha256=n9mQB-aQvxoyRWMj3zgafiITp-VJ8Owdj0q9jiAgKL4,70875
3
+ emhass/forecast.py,sha256=62jwTz2RMDWQXT959w23KW7jGEtwyW7Idkdy3of8c7o,64574
4
+ emhass/machine_learning_forecaster.py,sha256=4DVROJ8Y60hgSXCOJ5cIKvYRbxHoZYNu0hfuBtEsyx0,16898
5
5
  emhass/machine_learning_regressor.py,sha256=yFwMvVEmlgDJUsHhBT-HpNE3j2TC24e8Gmbcn9MPfeU,10690
6
- emhass/optimization.py,sha256=izMgRJFEP_9LHvZeX6FM1lxWyDWX3Tq2hFvu8ZP9FN4,61457
7
- emhass/retrieve_hass.py,sha256=RIzRiGzo09TO6zfDKqIClnLzQ5n2YkV97pinXPPXWsQ,26135
8
- emhass/utils.py,sha256=esxcCoA38VhhugOHRvu69hJ8_V_zJkAqu0jUHW26rck,68969
9
- emhass/web_server.py,sha256=QsqT51AdlAgNCG3NV1zbm4YkBSq_0BaC3cIEzPeZvl8,28023
6
+ emhass/optimization.py,sha256=DVUEtYrXFYNUIGpOPFX87TvpM1yqlmxjpERzZ7TNUIc,64680
7
+ emhass/retrieve_hass.py,sha256=t51D1D_HjpV6h4IA0xitV5wQ39Wr3Lh2A-bJPEWdZJc,27282
8
+ emhass/utils.py,sha256=7kuQfEQrYFh78Q5M2bQB0_x_4F-MvZ69v1c8JNFO1eU,70181
9
+ emhass/web_server.py,sha256=2N0nGVKYUyKB_GpRUOBrYj6ksAPhXuV-yc8BNUf91_8,29090
10
10
  emhass/data/associations.csv,sha256=IpEZIIWYdFjkRoC5xa1pRHjwnVs_VH8G8ogbGFxLfGI,3679
11
- emhass/data/cec_inverters.pbz2,sha256=tK8FvAUDW0uYez8EPttdCJwHhpPofclYV6GhhNZL0Pk,168272
12
- emhass/data/cec_modules.pbz2,sha256=8vEaysgYffXg3KUl8XSF36Mdywzi3LpEtUN_qenjO9s,1655747
11
+ emhass/data/cec_inverters.pbz2,sha256=P1JkVuAF1VKD8HmA9Y-Pvb45dv7E-bIwAVkGXDftZLQ,189425
12
+ emhass/data/cec_modules.pbz2,sha256=h14aS6aC1x1NKsCeTctsVZdFgAl5WiBZFAdnFZR_vE4,1885712
13
13
  emhass/data/config_defaults.json,sha256=-mQHahDv6Z5wYgClOs4VVr5KVCP51olb3f2mEj3Beic,2777
14
+ emhass/data/emhass_inverters.csv,sha256=V5yN2B6TQpgT7ob4vQDvFW-Lzt8g3l5MODSDdlMOl9I,991
15
+ emhass/data/emhass_modules.csv,sha256=IX2QfcyL8X5rpu9GYBBxBeeflZFckHAP8_L2bb602Yk,1056
16
+ emhass/img/emhass_icon.png,sha256=Kyx6hXQ1huJLHAq2CaBfjYXR25H9j99PSWHI0lShkaQ,19030
14
17
  emhass/static/advanced.html,sha256=gAhsd14elDwh1Ts4lf9wn_ZkczzzObq5qOimi_la3Ic,2067
15
18
  emhass/static/basic.html,sha256=ro2WwWgJyoUhqx_nJFzKCEG8FA8863vSHLmrjGYcEgs,677
16
19
  emhass/static/configuration_list.html,sha256=i4v83RVduWjdjkjPhA74e-j8NSUpFzqMGU3ixOaJLfI,1740
@@ -24,9 +27,8 @@ emhass/static/img/feather-sprite.svg,sha256=VHjMJQg88wXa9CaeYrKGhNtyK0xdd47zCqwS
24
27
  emhass/templates/configuration.html,sha256=M-_L__juYzcdGDaryGrz6LG2mguW2f1Sx6k01YfG7Dc,2885
25
28
  emhass/templates/index.html,sha256=1V44c0yyliu_z8inl0K-zmmmkhQumH3Bqk8Jj1YJPzY,3076
26
29
  emhass/templates/template.html,sha256=TkGgMecQEbFUZA4ymPwMUzNjKHsENvCgroUWbPt7G4Y,158
27
- emhass-0.12.1.dist-info/LICENSE,sha256=1X3-S1yvOCBDBeox1aK3dq00m7dA8NDtcPrpKPISzbE,1077
28
- emhass-0.12.1.dist-info/METADATA,sha256=FMOeeklV9inxmpy26IwnnZNfZkJiyR0rhUR0J7YgPmY,49387
29
- emhass-0.12.1.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
30
- emhass-0.12.1.dist-info/entry_points.txt,sha256=6Bp1NFOGNv_fSTxYl1ke3K3h3aqAcBxI-bgq5yq-i1M,52
31
- emhass-0.12.1.dist-info/top_level.txt,sha256=L7fIX4awfmxQbAePtSdVg2e6x_HhghfReHfsKSpKr9I,7
32
- emhass-0.12.1.dist-info/RECORD,,
30
+ emhass-0.12.3.dist-info/METADATA,sha256=Z-dKp0aCJECTLYXCf29ocXI69R6cWAwXNr2_wAd_T54,49664
31
+ emhass-0.12.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
32
+ emhass-0.12.3.dist-info/entry_points.txt,sha256=6Bp1NFOGNv_fSTxYl1ke3K3h3aqAcBxI-bgq5yq-i1M,52
33
+ emhass-0.12.3.dist-info/licenses/LICENSE,sha256=1X3-S1yvOCBDBeox1aK3dq00m7dA8NDtcPrpKPISzbE,1077
34
+ emhass-0.12.3.dist-info/RECORD,,
@@ -1,5 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.6.0)
2
+ Generator: hatchling 1.27.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
-
@@ -1 +0,0 @@
1
- emhass