emhass 0.12.2__py3-none-any.whl → 0.12.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- emhass/command_line.py +13 -7
- emhass/data/cec_inverters.pbz2 +0 -0
- emhass/data/cec_modules.pbz2 +0 -0
- emhass/data/emhass_inverters.csv +8 -0
- emhass/data/emhass_modules.csv +6 -0
- emhass/forecast.py +123 -95
- emhass/img/emhass_icon.png +0 -0
- emhass/machine_learning_forecaster.py +41 -49
- emhass/optimization.py +88 -24
- emhass/retrieve_hass.py +29 -1
- emhass/utils.py +54 -28
- emhass/web_server.py +76 -29
- {emhass-0.12.2.dist-info → emhass-0.12.4.dist-info}/METADATA +34 -31
- {emhass-0.12.2.dist-info → emhass-0.12.4.dist-info}/RECORD +17 -15
- {emhass-0.12.2.dist-info → emhass-0.12.4.dist-info}/WHEEL +1 -2
- emhass-0.12.2.dist-info/top_level.txt +0 -1
- {emhass-0.12.2.dist-info → emhass-0.12.4.dist-info}/entry_points.txt +0 -0
- {emhass-0.12.2.dist-info → emhass-0.12.4.dist-info/licenses}/LICENSE +0 -0
emhass/command_line.py
CHANGED
@@ -11,7 +11,6 @@ import pickle
|
|
11
11
|
import re
|
12
12
|
import time
|
13
13
|
from datetime import datetime, timezone
|
14
|
-
from distutils.util import strtobool
|
15
14
|
from importlib.metadata import version
|
16
15
|
from typing import Optional, Tuple
|
17
16
|
|
@@ -646,9 +645,12 @@ def naive_mpc_optim(
|
|
646
645
|
prediction_horizon = input_data_dict["params"]["passed_data"]["prediction_horizon"]
|
647
646
|
soc_init = input_data_dict["params"]["passed_data"]["soc_init"]
|
648
647
|
soc_final = input_data_dict["params"]["passed_data"]["soc_final"]
|
649
|
-
def_total_hours = input_data_dict["params"]["optim_conf"]
|
650
|
-
"operating_hours_of_each_deferrable_load"
|
651
|
-
|
648
|
+
def_total_hours = input_data_dict["params"]["optim_conf"].get(
|
649
|
+
"operating_hours_of_each_deferrable_load", None
|
650
|
+
)
|
651
|
+
def_total_timestep = input_data_dict["params"]["optim_conf"].get(
|
652
|
+
"operating_timesteps_of_each_deferrable_load", None
|
653
|
+
)
|
652
654
|
def_start_timestep = input_data_dict["params"]["optim_conf"][
|
653
655
|
"start_timesteps_of_each_deferrable_load"
|
654
656
|
]
|
@@ -663,6 +665,7 @@ def naive_mpc_optim(
|
|
663
665
|
soc_init,
|
664
666
|
soc_final,
|
665
667
|
def_total_hours,
|
668
|
+
def_total_timestep,
|
666
669
|
def_start_timestep,
|
667
670
|
def_end_timestep,
|
668
671
|
)
|
@@ -1515,8 +1518,8 @@ def main():
|
|
1515
1518
|
)
|
1516
1519
|
parser.add_argument(
|
1517
1520
|
"--log2file",
|
1518
|
-
type=
|
1519
|
-
default=
|
1521
|
+
type=bool,
|
1522
|
+
default=False,
|
1520
1523
|
help="Define if we should log to a file or not",
|
1521
1524
|
)
|
1522
1525
|
parser.add_argument(
|
@@ -1532,7 +1535,10 @@ def main():
|
|
1532
1535
|
help="Pass runtime optimization parameters as dictionnary",
|
1533
1536
|
)
|
1534
1537
|
parser.add_argument(
|
1535
|
-
"--debug",
|
1538
|
+
"--debug",
|
1539
|
+
type=bool,
|
1540
|
+
default=False,
|
1541
|
+
help="Use True for testing purposes",
|
1536
1542
|
)
|
1537
1543
|
args = parser.parse_args()
|
1538
1544
|
|
emhass/data/cec_inverters.pbz2
CHANGED
Binary file
|
emhass/data/cec_modules.pbz2
CHANGED
Binary file
|
@@ -0,0 +1,8 @@
|
|
1
|
+
Name,Vac,Pso,Paco,Pdco,Vdco,C0,C1,C2,C3,Pnt,Vdcmax,Idcmax,Mppt_low,Mppt_high,CEC_Date,CEC_hybrid,CEC_Type
|
2
|
+
Units,V,W,W,W,V,1/W,1/V,1/V,1/V,W,V,A,V,V,,,
|
3
|
+
[0],inv_snl_ac_voltage,inv_snl_pso,inv_snl_paco,inv_snl_pdco,inv_snl_vdco,inv_snl_c0,inv_snl_c1,inv_snl_c2,inv_snl_c3,inv_snl_pnt,inv_snl_vdcmax,inv_snl_idcmax,inv_snl_mppt_low,inv_snl_mppt_hi,inv_cec_date,inv_cec_hybrid,inv_cec_type
|
4
|
+
Sungrow: SH25T,400,250,27500,26000,600,-0.0000828,-0.000759,-0.0001722,-0.0000414,25,1100,80,200,950,01/01/2025,Y,Hybrid
|
5
|
+
Sungrow: SH20T,400,200,22000,21000,600,-0.00007584,-0.0007245,-0.0001587,-0.00003834,25,1100,80,200,950,01/01/2025,Y,Hybrid
|
6
|
+
Sungrow: SH15T,400,150,16500,15700,600,-0.000069,-0.00069,-0.000138,-0.0000345,25,1100,80,200,950,01/01/2025,Y,Hybrid
|
7
|
+
Sungrow: SH10RT,400,100,11000,10467,600,-0.00005892,-0.0006555,-0.0001173,-0.00002934,25,1100,80,200,950,01/01/2025,Y,Hybrid
|
8
|
+
Sungrow: SH10RS,400,100,10000,9500,600,-0.000065,-0.00075,-0.00013,-0.000032,25,1000,25,250,850,01/01/2025,N,Grid-Tie
|
@@ -0,0 +1,6 @@
|
|
1
|
+
Name,Manufacturer,Technology,Bifacial,STC,PTC,A_c,Length,Width,N_s,I_sc_ref,V_oc_ref,I_mp_ref,V_mp_ref,alpha_sc,beta_oc,T_NOCT,a_ref,I_L_ref,I_o_ref,R_s,R_sh_ref,Adjust,gamma_pmp,BIPV,Version,Date
|
2
|
+
Units,,,,,,m2,m,m,,A,V,A,V,A/K,V/K,C,V,A,A,Ohm,Ohm,%,%/K,,,
|
3
|
+
[0],lib_manufacturer,cec_material,lib_is_bifacial,,,cec_area,lib_length,lib_width,cec_n_s,cec_i_sc_ref,cec_v_oc_ref,cec_i_mp_ref,cec_v_mp_ref,cec_alpha_sc,cec_beta_oc,cec_t_noct,cec_a_ref,cec_i_l_ref,cec_i_o_ref,cec_r_s,cec_r_sh_ref,cec_adjust,cec_gamma_pmp,,,
|
4
|
+
Jinko Solar JKM475N-60HL4-V,Jinko Solar,Mono-c-Si,0,475.0,432.0,2.16,1.903,1.134,120,14.23,42.54,13.49,35.21,0.046,-0.17,45.0,1.3,14.23,2e-7,0.2,300.0,1,-0.003,N,2021.08.01,01/03/2021
|
5
|
+
Hanwha Q CELLS Q.PEAK L-G5 300W,Hanwha Q CELLS,Mono-c-Si,N,300.0,273.0,1.67,1.67,1.0,72,9.57,39.58,9.01,33.29,0.0006,-0.33,45.0,1.2,9.58,1e-10,0.5,1500,1,-0.38,N,2019.08.01,01/03/2019
|
6
|
+
Risen RSM40-8-390M,Risen,Mono-c-Si,0,390.0,355.0,1.82,1.755,1.038,80,13.98,40.70,13.19,31.8,0.048,-0.15,45.0,1.25,13.98,1.9e-7,0.38,445.0,1,-0.35,N,2024.01.01,01/04/2024
|
emhass/forecast.py
CHANGED
@@ -9,6 +9,8 @@ import os
|
|
9
9
|
import pickle
|
10
10
|
import pickle as cPickle
|
11
11
|
from datetime import datetime, timedelta
|
12
|
+
import re
|
13
|
+
from itertools import zip_longest
|
12
14
|
from typing import Optional
|
13
15
|
|
14
16
|
import numpy as np
|
@@ -319,86 +321,94 @@ class Forecast(object):
|
|
319
321
|
# If weather_forecast_cache, set request days as twice as long to avoid length issues (add a buffer)
|
320
322
|
if self.params["passed_data"].get("weather_forecast_cache", False):
|
321
323
|
days_solcast = min((days_solcast * 2), 336)
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
int(response.status_code) ==
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
)
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
324
|
+
# Split `roof_id` into a list (support comma or space as separator)
|
325
|
+
roof_ids = re.split(r"[,\s]+", self.retrieve_hass_conf["solcast_rooftop_id"].strip())
|
326
|
+
# Summary list of data
|
327
|
+
total_data_list = [0] * len(self.forecast_dates)
|
328
|
+
# Iteration over individual `roof_id`
|
329
|
+
for roof_id in roof_ids:
|
330
|
+
url = (
|
331
|
+
f"https://api.solcast.com.au/rooftop_sites/{roof_id}/forecasts?hours={days_solcast}"
|
332
|
+
)
|
333
|
+
response = get(url, headers=headers)
|
334
|
+
"""import bz2 # Uncomment to save a serialized data for tests
|
335
|
+
import _pickle as cPickle
|
336
|
+
with bz2.BZ2File("data/test_response_solcast_get_method.pbz2", "w") as f:
|
337
|
+
cPickle.dump(response, f)"""
|
338
|
+
# Verify the request passed
|
339
|
+
if int(response.status_code) == 200:
|
340
|
+
data = response.json()
|
341
|
+
elif (
|
342
|
+
int(response.status_code) == 402
|
343
|
+
or int(response.status_code) == 429
|
344
|
+
):
|
345
|
+
self.logger.error(
|
346
|
+
"Solcast error: May have exceeded your subscription limit."
|
347
|
+
)
|
348
|
+
return False
|
349
|
+
elif (
|
350
|
+
int(response.status_code) >= 400
|
351
|
+
or (int(response.status_code) >= 202 and int(response.status_code) <= 299)
|
352
|
+
):
|
353
|
+
self.logger.error(
|
354
|
+
"Solcast error: There was a issue with the solcast request, check solcast API key and rooftop ID."
|
355
|
+
)
|
356
|
+
self.logger.error(
|
357
|
+
"Solcast error: Check that your subscription is valid and your network can connect to Solcast."
|
358
|
+
)
|
359
|
+
return False
|
360
|
+
# Data processing for the current `roof_id`
|
361
|
+
data_list = []
|
362
|
+
for elm in data["forecasts"]:
|
363
|
+
data_list.append(
|
364
|
+
elm["pv_estimate"] * 1000
|
365
|
+
) # Converting kW to W
|
366
|
+
# Check if the retrieved data has the correct length
|
367
|
+
if len(data_list) < len(self.forecast_dates):
|
368
|
+
self.logger.error(
|
369
|
+
"Not enough data retrieved from Solcast service, try increasing the time step or use MPC."
|
370
|
+
)
|
371
|
+
return False
|
372
|
+
# Adding the data of the current `roof_id` to the total
|
373
|
+
total_data_list = [
|
374
|
+
total + current for total, current in zip_longest(total_data_list, data_list, fillvalue=0)
|
375
|
+
]
|
376
|
+
# If runtime weather_forecast_cache is true save forecast result to file as cache
|
377
|
+
if self.params["passed_data"].get(
|
378
|
+
"weather_forecast_cache", False
|
347
379
|
):
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
data_list = []
|
356
|
-
for elm in data["forecasts"]:
|
357
|
-
data_list.append(
|
358
|
-
elm["pv_estimate"] * 1000
|
359
|
-
) # Converting kW to W
|
360
|
-
# Check if the retrieved data has the correct length
|
361
|
-
if len(data_list) < len(self.forecast_dates):
|
362
|
-
self.logger.error(
|
363
|
-
"Not enough data retried from Solcast service, try increasing the time step or use MPC."
|
380
|
+
# Add x2 forecast periods for cached results. This adds a extra delta_forecast amount of days for a buffer
|
381
|
+
cached_forecast_dates = self.forecast_dates.union(
|
382
|
+
pd.date_range(
|
383
|
+
self.forecast_dates[-1],
|
384
|
+
periods=(len(self.forecast_dates) + 1),
|
385
|
+
freq=self.freq,
|
386
|
+
)[1:]
|
364
387
|
)
|
365
|
-
|
366
|
-
|
367
|
-
|
368
|
-
"
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
|
388
|
+
cache_data_list = total_data_list[0 : len(cached_forecast_dates)]
|
389
|
+
cache_data_dict = {
|
390
|
+
"ts": cached_forecast_dates,
|
391
|
+
"yhat": cache_data_list,
|
392
|
+
}
|
393
|
+
data_cache = pd.DataFrame.from_dict(cache_data_dict)
|
394
|
+
data_cache.set_index("ts", inplace=True)
|
395
|
+
with open(w_forecast_cache_path, "wb") as file:
|
396
|
+
cPickle.dump(data_cache, file)
|
397
|
+
if not os.path.isfile(w_forecast_cache_path):
|
398
|
+
self.logger.warning(
|
399
|
+
"Solcast forecast data could not be saved to file."
|
377
400
|
)
|
378
|
-
|
379
|
-
|
380
|
-
"
|
381
|
-
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
"Solcast forecast data could not be saved to file."
|
390
|
-
)
|
391
|
-
else:
|
392
|
-
self.logger.info(
|
393
|
-
"Saved the Solcast results to cache, for later reference."
|
394
|
-
)
|
395
|
-
# Trim request results to forecast_dates
|
396
|
-
data_list = data_list[0 : len(self.forecast_dates)]
|
397
|
-
data_dict = {"ts": self.forecast_dates, "yhat": data_list}
|
398
|
-
# Define DataFrame
|
399
|
-
data = pd.DataFrame.from_dict(data_dict)
|
400
|
-
# Define index
|
401
|
-
data.set_index("ts", inplace=True)
|
401
|
+
else:
|
402
|
+
self.logger.info(
|
403
|
+
"Saved the Solcast results to cache, for later reference."
|
404
|
+
)
|
405
|
+
# Trim request results to forecast_dates
|
406
|
+
total_data_list = total_data_list[0 : len(self.forecast_dates)]
|
407
|
+
data_dict = {"ts": self.forecast_dates, "yhat": total_data_list}
|
408
|
+
# Define DataFrame
|
409
|
+
data = pd.DataFrame.from_dict(data_dict)
|
410
|
+
# Define index
|
411
|
+
data.set_index("ts", inplace=True)
|
402
412
|
# Else, notify user to update cache
|
403
413
|
else:
|
404
414
|
self.logger.error("Unable to obtain Solcast cache file.")
|
@@ -906,33 +916,35 @@ class Forecast(object):
|
|
906
916
|
# Upsampling
|
907
917
|
# Use 'asfreq' to create empty slots, then interpolate
|
908
918
|
resampled_data = data.resample(freq).asfreq()
|
909
|
-
resampled_data = resampled_data.interpolate(method=
|
919
|
+
resampled_data = resampled_data.interpolate(method="time")
|
910
920
|
else:
|
911
921
|
# No resampling needed
|
912
922
|
resampled_data = data.copy()
|
913
923
|
return resampled_data
|
914
|
-
|
924
|
+
|
915
925
|
@staticmethod
|
916
926
|
def get_typical_load_forecast(data, forecast_date):
|
917
927
|
r"""
|
918
928
|
Forecast the load profile for the next day based on historic data.
|
919
929
|
|
920
|
-
:param data: A DataFrame with a DateTimeIndex containing the historic load data.
|
930
|
+
:param data: A DataFrame with a DateTimeIndex containing the historic load data.
|
921
931
|
Must include a 'load' column.
|
922
932
|
:type data: pd.DataFrame
|
923
933
|
:param forecast_date: The date for which the forecast will be generated.
|
924
934
|
:type forecast_date: pd.Timestamp
|
925
|
-
:return: A Series with the forecasted load profile for the next day and a list of days used
|
935
|
+
:return: A Series with the forecasted load profile for the next day and a list of days used
|
926
936
|
to calculate the forecast.
|
927
937
|
:rtype: tuple (pd.Series, list)
|
928
938
|
"""
|
929
939
|
# Ensure the 'load' column exists
|
930
|
-
if
|
940
|
+
if "load" not in data.columns:
|
931
941
|
raise ValueError("Data must have a 'load' column.")
|
932
942
|
# Filter historic data for the same month and day of the week
|
933
943
|
month = forecast_date.month
|
934
944
|
day_of_week = forecast_date.dayofweek
|
935
|
-
historic_data = data[
|
945
|
+
historic_data = data[
|
946
|
+
(data.index.month == month) & (data.index.dayofweek == day_of_week)
|
947
|
+
]
|
936
948
|
used_days = np.unique(historic_data.index.date)
|
937
949
|
# Align all historic data to the forecast day
|
938
950
|
aligned_data = []
|
@@ -940,7 +952,11 @@ class Forecast(object):
|
|
940
952
|
daily_data = data[data.index.date == pd.Timestamp(day).date()]
|
941
953
|
aligned_daily_data = daily_data.copy()
|
942
954
|
aligned_daily_data.index = aligned_daily_data.index.map(
|
943
|
-
lambda x: x.replace(
|
955
|
+
lambda x: x.replace(
|
956
|
+
year=forecast_date.year,
|
957
|
+
month=forecast_date.month,
|
958
|
+
day=forecast_date.day,
|
959
|
+
)
|
944
960
|
)
|
945
961
|
aligned_data.append(aligned_daily_data)
|
946
962
|
# Combine all aligned historic data into a single DataFrame
|
@@ -948,7 +964,7 @@ class Forecast(object):
|
|
948
964
|
# Compute the mean load for each timestamp
|
949
965
|
forecast = combined_data.groupby(combined_data.index).mean()
|
950
966
|
return forecast, used_days
|
951
|
-
|
967
|
+
|
952
968
|
def get_load_forecast(
|
953
969
|
self,
|
954
970
|
days_min_load_forecast: Optional[int] = 3,
|
@@ -1041,14 +1057,18 @@ class Forecast(object):
|
|
1041
1057
|
):
|
1042
1058
|
return False
|
1043
1059
|
df = rh.df_final.copy()[[self.var_load_new]]
|
1044
|
-
if
|
1060
|
+
if (
|
1061
|
+
method == "typical"
|
1062
|
+
): # using typical statistical data from a household power consumption
|
1045
1063
|
# Loading data from history file
|
1046
1064
|
model_type = "load_clustering"
|
1047
|
-
data_path = self.emhass_conf["data_path"] / str(
|
1065
|
+
data_path = self.emhass_conf["data_path"] / str(
|
1066
|
+
"data_train_" + model_type + ".pkl"
|
1067
|
+
)
|
1048
1068
|
with open(data_path, "rb") as fid:
|
1049
1069
|
data, _ = pickle.load(fid)
|
1050
1070
|
# Resample the data if needed
|
1051
|
-
current_freq = pd.Timedelta(
|
1071
|
+
current_freq = pd.Timedelta("30min")
|
1052
1072
|
if self.freq != current_freq:
|
1053
1073
|
data = Forecast.resample_data(data, self.freq, current_freq)
|
1054
1074
|
# Generate forecast
|
@@ -1057,20 +1077,28 @@ class Forecast(object):
|
|
1057
1077
|
forecast = pd.DataFrame()
|
1058
1078
|
for date in dates_list:
|
1059
1079
|
forecast_date = pd.Timestamp(date)
|
1060
|
-
data.columns = [
|
1061
|
-
forecast_tmp, used_days = Forecast.get_typical_load_forecast(
|
1062
|
-
|
1080
|
+
data.columns = ["load"]
|
1081
|
+
forecast_tmp, used_days = Forecast.get_typical_load_forecast(
|
1082
|
+
data, forecast_date
|
1083
|
+
)
|
1084
|
+
self.logger.debug(
|
1085
|
+
f"Using {len(used_days)} days of data to generate the forecast."
|
1086
|
+
)
|
1063
1087
|
# Normalize the forecast
|
1064
|
-
forecast_tmp =
|
1088
|
+
forecast_tmp = (
|
1089
|
+
forecast_tmp * self.plant_conf["maximum_power_from_grid"] / 9000
|
1090
|
+
)
|
1065
1091
|
data_list.extend(forecast_tmp.values.ravel().tolist())
|
1066
1092
|
if len(forecast) == 0:
|
1067
1093
|
forecast = forecast_tmp
|
1068
1094
|
else:
|
1069
1095
|
forecast = pd.concat([forecast, forecast_tmp], axis=0)
|
1070
1096
|
forecast.index = forecast.index.tz_convert(self.time_zone)
|
1071
|
-
forecast_out = forecast.loc[
|
1072
|
-
|
1073
|
-
|
1097
|
+
forecast_out = forecast.loc[
|
1098
|
+
forecast.index.intersection(self.forecast_dates)
|
1099
|
+
]
|
1100
|
+
forecast_out.index.name = "ts"
|
1101
|
+
forecast_out = forecast_out.rename(columns={"load": "yhat"})
|
1074
1102
|
elif method == "naive": # using a naive approach
|
1075
1103
|
mask_forecast_out = (
|
1076
1104
|
df.index > days_list[-1] - self.optim_conf["delta_forecast_daily"]
|
Binary file
|
@@ -1,22 +1,22 @@
|
|
1
1
|
#!/usr/bin/env python3
|
2
2
|
# -*- coding: utf-8 -*-
|
3
3
|
|
4
|
-
import logging
|
5
4
|
import copy
|
5
|
+
import logging
|
6
6
|
import time
|
7
|
+
import warnings
|
7
8
|
from typing import Optional, Tuple
|
8
|
-
import pandas as pd
|
9
|
-
import numpy as np
|
10
9
|
|
11
|
-
|
12
|
-
|
13
|
-
from
|
10
|
+
import numpy as np
|
11
|
+
import pandas as pd
|
12
|
+
from skforecast.ForecasterAutoreg import ForecasterAutoreg
|
13
|
+
from skforecast.model_selection import (
|
14
|
+
backtesting_forecaster,
|
15
|
+
bayesian_search_forecaster,
|
16
|
+
)
|
17
|
+
from sklearn.linear_model import ElasticNet, LinearRegression
|
14
18
|
from sklearn.metrics import r2_score
|
15
|
-
|
16
|
-
from skforecast.recursive import ForecasterRecursive
|
17
|
-
from skforecast.model_selection import bayesian_search_forecaster, backtesting_forecaster, TimeSeriesFold
|
18
|
-
|
19
|
-
import warnings
|
19
|
+
from sklearn.neighbors import KNeighborsRegressor
|
20
20
|
|
21
21
|
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
22
22
|
|
@@ -169,10 +169,7 @@ class MLForecaster:
|
|
169
169
|
)
|
170
170
|
base_model = KNeighborsRegressor()
|
171
171
|
# Define the forecaster object
|
172
|
-
self.forecaster =
|
173
|
-
regressor = base_model,
|
174
|
-
lags = self.num_lags
|
175
|
-
)
|
172
|
+
self.forecaster = ForecasterAutoreg(regressor=base_model, lags=self.num_lags)
|
176
173
|
# Fit and time it
|
177
174
|
self.logger.info("Training a " + self.sklearn_model + " model")
|
178
175
|
start_time = time.time()
|
@@ -201,22 +198,18 @@ class MLForecaster:
|
|
201
198
|
# Using backtesting tool to evaluate the model
|
202
199
|
self.logger.info("Performing simple backtesting of fitted model")
|
203
200
|
start_time = time.time()
|
204
|
-
cv = TimeSeriesFold(
|
205
|
-
steps = self.num_lags,
|
206
|
-
initial_train_size = None,
|
207
|
-
fixed_train_size = False,
|
208
|
-
gap = 0,
|
209
|
-
allow_incomplete_fold = True,
|
210
|
-
refit = False
|
211
|
-
)
|
212
201
|
metric, predictions_backtest = backtesting_forecaster(
|
213
|
-
forecaster
|
214
|
-
y
|
215
|
-
exog
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
202
|
+
forecaster=self.forecaster,
|
203
|
+
y=self.data_train[self.var_model],
|
204
|
+
exog=self.data_train.drop(self.var_model, axis=1),
|
205
|
+
steps=self.num_lags,
|
206
|
+
initial_train_size=None,
|
207
|
+
allow_incomplete_fold=True,
|
208
|
+
gap=0,
|
209
|
+
metric=MLForecaster.neg_r2_score,
|
210
|
+
verbose=False,
|
211
|
+
refit=False,
|
212
|
+
show_progress=True,
|
220
213
|
)
|
221
214
|
self.logger.info(f"Elapsed backtesting time: {time.time() - start_time}")
|
222
215
|
self.logger.info(f"Backtest R2 score: {-metric}")
|
@@ -356,25 +349,24 @@ class MLForecaster:
|
|
356
349
|
# The optimization routine call
|
357
350
|
self.logger.info("Bayesian hyperparameter optimization with backtesting")
|
358
351
|
start_time = time.time()
|
359
|
-
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
|
364
|
-
|
365
|
-
|
366
|
-
|
367
|
-
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
return_best = True
|
352
|
+
self.optimize_results, self.optimize_results_object = (
|
353
|
+
bayesian_search_forecaster(
|
354
|
+
forecaster=self.forecaster,
|
355
|
+
y=self.data_train[self.var_model],
|
356
|
+
exog=self.data_train.drop(self.var_model, axis=1),
|
357
|
+
search_space=search_space,
|
358
|
+
metric=MLForecaster.neg_r2_score,
|
359
|
+
n_trials=10,
|
360
|
+
random_state=123,
|
361
|
+
steps=num_lags,
|
362
|
+
initial_train_size=len(self.data_exo.loc[: self.date_train]),
|
363
|
+
return_best=True,
|
364
|
+
fixed_train_size=True,
|
365
|
+
gap=0,
|
366
|
+
allow_incomplete_fold=True,
|
367
|
+
skip_folds=None,
|
368
|
+
refit=refit,
|
369
|
+
)
|
378
370
|
)
|
379
371
|
self.logger.info(f"Elapsed time: {time.time() - start_time}")
|
380
372
|
self.is_tuned = True
|
emhass/optimization.py
CHANGED
@@ -4,7 +4,6 @@
|
|
4
4
|
import bz2
|
5
5
|
import copy
|
6
6
|
import logging
|
7
|
-
import pathlib
|
8
7
|
import pickle as cPickle
|
9
8
|
from math import ceil
|
10
9
|
from typing import Optional, Tuple
|
@@ -85,7 +84,7 @@ class Optimization:
|
|
85
84
|
self.var_load = self.retrieve_hass_conf["sensor_power_load_no_var_loads"]
|
86
85
|
self.var_load_new = self.var_load + "_positive"
|
87
86
|
self.costfun = costfun
|
88
|
-
|
87
|
+
self.emhass_conf = emhass_conf
|
89
88
|
self.logger = logger
|
90
89
|
self.var_load_cost = var_load_cost
|
91
90
|
self.var_prod_price = var_prod_price
|
@@ -120,6 +119,7 @@ class Optimization:
|
|
120
119
|
soc_init: Optional[float] = None,
|
121
120
|
soc_final: Optional[float] = None,
|
122
121
|
def_total_hours: Optional[list] = None,
|
122
|
+
def_total_timestep: Optional[list] = None,
|
123
123
|
def_start_timestep: Optional[list] = None,
|
124
124
|
def_end_timestep: Optional[list] = None,
|
125
125
|
debug: Optional[bool] = False,
|
@@ -152,6 +152,9 @@ class Optimization:
|
|
152
152
|
:param def_total_hours: The functioning hours for this iteration for each deferrable load. \
|
153
153
|
(For continuous deferrable loads: functioning hours at nominal power)
|
154
154
|
:type def_total_hours: list
|
155
|
+
:param def_total_timestep: The functioning timesteps for this iteration for each deferrable load. \
|
156
|
+
(For continuous deferrable loads: functioning timesteps at nominal power)
|
157
|
+
:type def_total_timestep: list
|
155
158
|
:param def_start_timestep: The timestep as from which each deferrable load is allowed to operate.
|
156
159
|
:type def_start_timestep: list
|
157
160
|
:param def_end_timestep: The timestep before which each deferrable load should operate.
|
@@ -173,8 +176,13 @@ class Optimization:
|
|
173
176
|
soc_final = soc_init
|
174
177
|
else:
|
175
178
|
soc_final = self.plant_conf["battery_target_state_of_charge"]
|
176
|
-
|
179
|
+
|
180
|
+
# If def_total_timestep os set, bypass def_total_hours
|
181
|
+
if def_total_timestep is not None:
|
182
|
+
def_total_hours = [0 if x != 0 else x for x in def_total_hours]
|
183
|
+
elif def_total_hours is None:
|
177
184
|
def_total_hours = self.optim_conf["operating_hours_of_each_deferrable_load"]
|
185
|
+
|
178
186
|
if def_start_timestep is None:
|
179
187
|
def_start_timestep = self.optim_conf[
|
180
188
|
"start_timesteps_of_each_deferrable_load"
|
@@ -381,7 +389,7 @@ class Optimization:
|
|
381
389
|
* self.timeStep
|
382
390
|
* (
|
383
391
|
self.optim_conf["weight_battery_discharge"] * P_sto_pos[i]
|
384
|
-
|
392
|
+
- self.optim_conf["weight_battery_charge"] * P_sto_neg[i]
|
385
393
|
)
|
386
394
|
for i in set_I
|
387
395
|
)
|
@@ -462,7 +470,8 @@ class Optimization:
|
|
462
470
|
for i in range(len(self.plant_conf["pv_inverter_model"])):
|
463
471
|
if type(self.plant_conf["pv_inverter_model"][i]) == str:
|
464
472
|
cec_inverters = bz2.BZ2File(
|
465
|
-
|
473
|
+
self.emhass_conf["root_path"] / "data" / "cec_inverters.pbz2",
|
474
|
+
"rb",
|
466
475
|
)
|
467
476
|
cec_inverters = cPickle.load(cec_inverters)
|
468
477
|
inverter = cec_inverters[self.plant_conf["pv_inverter_model"][i]]
|
@@ -472,7 +481,7 @@ class Optimization:
|
|
472
481
|
else:
|
473
482
|
if type(self.plant_conf["pv_inverter_model"][i]) == str:
|
474
483
|
cec_inverters = bz2.BZ2File(
|
475
|
-
|
484
|
+
self.emhass_conf["root_path"] / "data" / "cec_inverters.pbz2", "rb"
|
476
485
|
)
|
477
486
|
cec_inverters = cPickle.load(cec_inverters)
|
478
487
|
inverter = cec_inverters[self.plant_conf["pv_inverter_model"]]
|
@@ -699,8 +708,7 @@ class Optimization:
|
|
699
708
|
predicted_temps[k] = predicted_temp
|
700
709
|
|
701
710
|
else:
|
702
|
-
if
|
703
|
-
# Total time of deferrable load
|
711
|
+
if def_total_timestep and def_total_timestep[k] > 0:
|
704
712
|
constraints.update(
|
705
713
|
{
|
706
714
|
"constraint_defload{}_energy".format(k): plp.LpConstraint(
|
@@ -708,13 +716,33 @@ class Optimization:
|
|
708
716
|
P_deferrable[k][i] * self.timeStep for i in set_I
|
709
717
|
),
|
710
718
|
sense=plp.LpConstraintEQ,
|
711
|
-
rhs=
|
719
|
+
rhs=(self.timeStep * def_total_timestep[k])
|
712
720
|
* self.optim_conf["nominal_power_of_deferrable_loads"][
|
713
721
|
k
|
714
722
|
],
|
715
723
|
)
|
716
724
|
}
|
717
725
|
)
|
726
|
+
else:
|
727
|
+
if def_total_hours[k] > 0:
|
728
|
+
# Total time of deferrable load
|
729
|
+
constraints.update(
|
730
|
+
{
|
731
|
+
"constraint_defload{}_energy".format(
|
732
|
+
k
|
733
|
+
): plp.LpConstraint(
|
734
|
+
e=plp.lpSum(
|
735
|
+
P_deferrable[k][i] * self.timeStep
|
736
|
+
for i in set_I
|
737
|
+
),
|
738
|
+
sense=plp.LpConstraintEQ,
|
739
|
+
rhs=def_total_hours[k]
|
740
|
+
* self.optim_conf[
|
741
|
+
"nominal_power_of_deferrable_loads"
|
742
|
+
][k],
|
743
|
+
)
|
744
|
+
}
|
745
|
+
)
|
718
746
|
|
719
747
|
# Ensure deferrable loads consume energy between def_start_timestep & def_end_timestep
|
720
748
|
self.logger.debug(
|
@@ -722,12 +750,23 @@ class Optimization:
|
|
722
750
|
k, def_start_timestep[k], def_end_timestep[k]
|
723
751
|
)
|
724
752
|
)
|
725
|
-
|
726
|
-
|
727
|
-
|
728
|
-
|
729
|
-
|
730
|
-
|
753
|
+
if def_total_timestep and def_total_timestep[k] > 0:
|
754
|
+
def_start, def_end, warning = Optimization.validate_def_timewindow(
|
755
|
+
def_start_timestep[k],
|
756
|
+
def_end_timestep[k],
|
757
|
+
ceil(
|
758
|
+
(60 / ((self.freq.seconds / 60) * def_total_timestep[k]))
|
759
|
+
/ self.timeStep
|
760
|
+
),
|
761
|
+
n,
|
762
|
+
)
|
763
|
+
else:
|
764
|
+
def_start, def_end, warning = Optimization.validate_def_timewindow(
|
765
|
+
def_start_timestep[k],
|
766
|
+
def_end_timestep[k],
|
767
|
+
ceil(def_total_hours[k] / self.timeStep),
|
768
|
+
n,
|
769
|
+
)
|
731
770
|
if warning is not None:
|
732
771
|
self.logger.warning("Deferrable load {} : {}".format(k, warning))
|
733
772
|
self.logger.debug(
|
@@ -837,15 +876,35 @@ class Optimization:
|
|
837
876
|
}
|
838
877
|
)
|
839
878
|
# P_def_bin2 must be 1 for exactly the correct number of timesteps.
|
840
|
-
|
841
|
-
|
842
|
-
|
843
|
-
|
844
|
-
|
845
|
-
|
846
|
-
|
847
|
-
|
848
|
-
|
879
|
+
if def_total_timestep and def_total_timestep[k] > 0:
|
880
|
+
constraints.update(
|
881
|
+
{
|
882
|
+
"constraint_pdef{}_start5".format(k): plp.LpConstraint(
|
883
|
+
e=plp.lpSum(P_def_bin2[k][i] for i in set_I),
|
884
|
+
sense=plp.LpConstraintEQ,
|
885
|
+
rhs=(
|
886
|
+
(
|
887
|
+
60
|
888
|
+
/ (
|
889
|
+
(self.freq.seconds / 60)
|
890
|
+
* def_total_timestep[k]
|
891
|
+
)
|
892
|
+
)
|
893
|
+
/ self.timeStep
|
894
|
+
),
|
895
|
+
)
|
896
|
+
}
|
897
|
+
)
|
898
|
+
else:
|
899
|
+
constraints.update(
|
900
|
+
{
|
901
|
+
"constraint_pdef{}_start5".format(k): plp.LpConstraint(
|
902
|
+
e=plp.lpSum(P_def_bin2[k][i] for i in set_I),
|
903
|
+
sense=plp.LpConstraintEQ,
|
904
|
+
rhs=def_total_hours[k] / self.timeStep,
|
905
|
+
)
|
906
|
+
}
|
907
|
+
)
|
849
908
|
|
850
909
|
# Treat deferrable load as a semi-continuous variable
|
851
910
|
if self.optim_conf["treat_deferrable_load_as_semi_cont"][k]:
|
@@ -1336,6 +1395,7 @@ class Optimization:
|
|
1336
1395
|
soc_init: Optional[float] = None,
|
1337
1396
|
soc_final: Optional[float] = None,
|
1338
1397
|
def_total_hours: Optional[list] = None,
|
1398
|
+
def_total_timestep: Optional[list] = None,
|
1339
1399
|
def_start_timestep: Optional[list] = None,
|
1340
1400
|
def_end_timestep: Optional[list] = None,
|
1341
1401
|
) -> pd.DataFrame:
|
@@ -1362,6 +1422,9 @@ class Optimization:
|
|
1362
1422
|
:param soc_final: The final battery SOC for the optimization. This parameter \
|
1363
1423
|
is optional, if not given soc_init = soc_final = soc_target from the configuration file.
|
1364
1424
|
:type soc_final:
|
1425
|
+
:param def_total_timestep: The functioning timesteps for this iteration for each deferrable load. \
|
1426
|
+
(For continuous deferrable loads: functioning timesteps at nominal power)
|
1427
|
+
:type def_total_timestep: list
|
1365
1428
|
:param def_total_hours: The functioning hours for this iteration for each deferrable load. \
|
1366
1429
|
(For continuous deferrable loads: functioning hours at nominal power)
|
1367
1430
|
:type def_total_hours: list
|
@@ -1395,6 +1458,7 @@ class Optimization:
|
|
1395
1458
|
soc_init=soc_init,
|
1396
1459
|
soc_final=soc_final,
|
1397
1460
|
def_total_hours=def_total_hours,
|
1461
|
+
def_total_timestep=def_total_timestep,
|
1398
1462
|
def_start_timestep=def_start_timestep,
|
1399
1463
|
def_end_timestep=def_end_timestep,
|
1400
1464
|
)
|
emhass/retrieve_hass.py
CHANGED
@@ -80,6 +80,7 @@ class RetrieveHass:
|
|
80
80
|
self.emhass_conf = emhass_conf
|
81
81
|
self.logger = logger
|
82
82
|
self.get_data_from_file = get_data_from_file
|
83
|
+
self.var_list = []
|
83
84
|
|
84
85
|
def get_ha_config(self):
|
85
86
|
"""
|
@@ -286,6 +287,7 @@ class RetrieveHass:
|
|
286
287
|
+ str(self.freq)
|
287
288
|
)
|
288
289
|
return False
|
290
|
+
self.var_list = var_list
|
289
291
|
return True
|
290
292
|
|
291
293
|
def prepare_data(
|
@@ -336,7 +338,21 @@ class RetrieveHass:
|
|
336
338
|
"sensor.power_photovoltaics and sensor.power_load_no_var_loads should not be the same"
|
337
339
|
)
|
338
340
|
return False
|
339
|
-
|
341
|
+
# Confirm var_replace_zero & var_interp contain only sensors contained in var_list
|
342
|
+
if isinstance(var_replace_zero, list) and all(
|
343
|
+
item in var_replace_zero for item in self.var_list
|
344
|
+
):
|
345
|
+
pass
|
346
|
+
else:
|
347
|
+
var_replace_zero = []
|
348
|
+
if isinstance(var_interp, list) and all(
|
349
|
+
item in var_interp for item in self.var_list
|
350
|
+
):
|
351
|
+
pass
|
352
|
+
else:
|
353
|
+
var_interp = []
|
354
|
+
# Apply minimum values
|
355
|
+
if set_zero_min:
|
340
356
|
self.df_final.clip(lower=0.0, inplace=True, axis=1)
|
341
357
|
self.df_final.replace(to_replace=0.0, value=np.nan, inplace=True)
|
342
358
|
new_var_replace_zero = []
|
@@ -347,6 +363,12 @@ class RetrieveHass:
|
|
347
363
|
new_string = string.replace(var_load, var_load + "_positive")
|
348
364
|
new_var_replace_zero.append(new_string)
|
349
365
|
else:
|
366
|
+
self.logger.warning(
|
367
|
+
"Unable to find all the sensors in sensor_replace_zero parameter"
|
368
|
+
)
|
369
|
+
self.logger.warning(
|
370
|
+
"Confirm sure all sensors in sensor_replace_zero are sensor_power_photovoltaics and/or ensor_power_load_no_var_loads "
|
371
|
+
)
|
350
372
|
new_var_replace_zero = None
|
351
373
|
if var_interp is not None:
|
352
374
|
for string in var_interp:
|
@@ -354,6 +376,12 @@ class RetrieveHass:
|
|
354
376
|
new_var_interp.append(new_string)
|
355
377
|
else:
|
356
378
|
new_var_interp = None
|
379
|
+
self.logger.warning(
|
380
|
+
"Unable to find all the sensors in sensor_linear_interp parameter"
|
381
|
+
)
|
382
|
+
self.logger.warning(
|
383
|
+
"Confirm all sensors in sensor_linear_interp are sensor_power_photovoltaics and/or ensor_power_load_no_var_loads "
|
384
|
+
)
|
357
385
|
# Treating NaN replacement: either by zeros or by linear interpolation
|
358
386
|
if new_var_replace_zero is not None:
|
359
387
|
self.df_final[new_var_replace_zero] = self.df_final[
|
emhass/utils.py
CHANGED
@@ -161,33 +161,52 @@ def update_params_with_ha_config(
|
|
161
161
|
params = json.loads(params)
|
162
162
|
# Update params
|
163
163
|
currency_to_symbol = {
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
164
|
+
"EUR": "€",
|
165
|
+
"USD": "$",
|
166
|
+
"GBP": "£",
|
167
|
+
"YEN": "¥",
|
168
|
+
"JPY": "¥",
|
169
|
+
"AUD": "A$",
|
170
|
+
"CAD": "C$",
|
171
|
+
"CHF": "CHF", # Swiss Franc has no special symbol
|
172
|
+
"CNY": "¥",
|
173
|
+
"INR": "₹",
|
174
|
+
"CZK": "Kč",
|
175
|
+
"BGN": "лв",
|
176
|
+
"DKK": "kr",
|
177
|
+
"HUF": "Ft",
|
178
|
+
"PLN": "zł",
|
179
|
+
"RON": "Leu",
|
180
|
+
"SEK": "kr",
|
181
|
+
"TRY": "Lira",
|
182
|
+
"VEF": "Bolivar",
|
183
|
+
"VND": "Dong",
|
184
|
+
"THB": "Baht",
|
185
|
+
"SGD": "S$",
|
186
|
+
"IDR": "Roepia",
|
187
|
+
"ZAR": "Rand",
|
175
188
|
# Add more as needed
|
176
189
|
}
|
177
|
-
if
|
178
|
-
ha_config[
|
190
|
+
if "currency" in ha_config.keys():
|
191
|
+
ha_config["currency"] = currency_to_symbol.get(ha_config["currency"], "Unknown")
|
179
192
|
else:
|
180
|
-
ha_config[
|
181
|
-
if
|
182
|
-
ha_config[
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
193
|
+
ha_config["currency"] = "€"
|
194
|
+
if "unit_system" not in ha_config.keys():
|
195
|
+
ha_config["unit_system"] = {"temperature": "°C"}
|
196
|
+
|
197
|
+
number_of_deferrable_loads = params["optim_conf"]["number_of_deferrable_loads"]
|
198
|
+
if "num_def_loads" in params["passed_data"].keys():
|
199
|
+
number_of_deferrable_loads = params["passed_data"]["num_def_loads"]
|
200
|
+
if "number_of_deferrable_loads" in params["passed_data"].keys():
|
201
|
+
number_of_deferrable_loads = params["passed_data"]["number_of_deferrable_loads"]
|
202
|
+
|
203
|
+
for k in range(number_of_deferrable_loads):
|
204
|
+
params["passed_data"]["custom_predicted_temperature_id"][k].update(
|
205
|
+
{"unit_of_measurement": ha_config["unit_system"]["temperature"]}
|
187
206
|
)
|
188
207
|
updated_passed_dict = {
|
189
208
|
"custom_cost_fun_id": {
|
190
|
-
"unit_of_measurement": ha_config[
|
209
|
+
"unit_of_measurement": ha_config["currency"],
|
191
210
|
},
|
192
211
|
"custom_unit_load_cost_id": {
|
193
212
|
"unit_of_measurement": f"{ha_config['currency']}/kWh",
|
@@ -249,9 +268,9 @@ def treat_runtimeparams(
|
|
249
268
|
params["plant_conf"].update(plant_conf)
|
250
269
|
|
251
270
|
# Check defaults on HA retrieved config
|
252
|
-
default_currency_unit =
|
253
|
-
default_temperature_unit =
|
254
|
-
|
271
|
+
default_currency_unit = "€"
|
272
|
+
default_temperature_unit = "°C"
|
273
|
+
|
255
274
|
# Some default data needed
|
256
275
|
custom_deferrable_forecast_id = []
|
257
276
|
custom_predicted_temperature_id = []
|
@@ -462,10 +481,17 @@ def treat_runtimeparams(
|
|
462
481
|
else:
|
463
482
|
soc_final = runtimeparams["soc_final"]
|
464
483
|
params["passed_data"]["soc_final"] = soc_final
|
465
|
-
|
466
|
-
|
467
|
-
|
468
|
-
|
484
|
+
if "operating_timesteps_of_each_deferrable_load" in runtimeparams.keys():
|
485
|
+
params["passed_data"]["operating_timesteps_of_each_deferrable_load"] = (
|
486
|
+
runtimeparams["operating_timesteps_of_each_deferrable_load"]
|
487
|
+
)
|
488
|
+
params["optim_conf"]["operating_timesteps_of_each_deferrable_load"] = (
|
489
|
+
runtimeparams["operating_timesteps_of_each_deferrable_load"]
|
490
|
+
)
|
491
|
+
if "operating_hours_of_each_deferrable_load" in params["optim_conf"].keys():
|
492
|
+
params["passed_data"]["operating_hours_of_each_deferrable_load"] = (
|
493
|
+
params["optim_conf"]["operating_hours_of_each_deferrable_load"]
|
494
|
+
)
|
469
495
|
params["passed_data"]["start_timesteps_of_each_deferrable_load"] = params[
|
470
496
|
"optim_conf"
|
471
497
|
].get("start_timesteps_of_each_deferrable_load", None)
|
emhass/web_server.py
CHANGED
@@ -8,9 +8,9 @@ import os
|
|
8
8
|
import pickle
|
9
9
|
import re
|
10
10
|
import threading
|
11
|
-
from distutils.util import strtobool
|
12
11
|
from importlib.metadata import PackageNotFoundError, version
|
13
12
|
from pathlib import Path
|
13
|
+
from typing import Optional
|
14
14
|
|
15
15
|
import yaml
|
16
16
|
from flask import Flask, make_response, request
|
@@ -45,7 +45,26 @@ from emhass.utils import (
|
|
45
45
|
|
46
46
|
# Define the Flask instance
|
47
47
|
app = Flask(__name__)
|
48
|
+
|
48
49
|
emhass_conf = {}
|
50
|
+
entity_path = Path
|
51
|
+
params_secrets = {}
|
52
|
+
continual_publish_thread = []
|
53
|
+
injection_dict = {}
|
54
|
+
|
55
|
+
|
56
|
+
def create_app(settings_override=None):
|
57
|
+
"""
|
58
|
+
Create a Flask application.
|
59
|
+
:param settings_override: Override settings
|
60
|
+
:return: Flask app
|
61
|
+
"""
|
62
|
+
global app
|
63
|
+
gunicorn_logger = logging.getLogger("gunicorn.error")
|
64
|
+
app.logger.handlers = gunicorn_logger.handlers
|
65
|
+
app.logger.setLevel(logging.INFO)
|
66
|
+
main()
|
67
|
+
return app
|
49
68
|
|
50
69
|
|
51
70
|
def checkFileLog(refString=None) -> bool:
|
@@ -153,6 +172,10 @@ def configuration():
|
|
153
172
|
|
154
173
|
"""
|
155
174
|
app.logger.info("serving configuration.html...")
|
175
|
+
# get params
|
176
|
+
if (emhass_conf["data_path"] / "params.pkl").exists():
|
177
|
+
with open(str(emhass_conf["data_path"] / "params.pkl"), "rb") as fid:
|
178
|
+
emhass_conf["config_path"], params = pickle.load(fid)
|
156
179
|
# Load HTML template
|
157
180
|
file_loader = PackageLoader("emhass", "templates")
|
158
181
|
env = Environment(loader=file_loader)
|
@@ -328,7 +351,7 @@ def parameter_set():
|
|
328
351
|
with open(str(emhass_conf["data_path"] / "params.pkl"), "wb") as fid:
|
329
352
|
pickle.dump(
|
330
353
|
(
|
331
|
-
config_path,
|
354
|
+
emhass_conf["config_path"],
|
332
355
|
build_params(emhass_conf, params_secrets, config, app.logger),
|
333
356
|
),
|
334
357
|
fid,
|
@@ -349,6 +372,9 @@ def action_call(action_name):
|
|
349
372
|
:type action_name: String
|
350
373
|
|
351
374
|
"""
|
375
|
+
global continual_publish_thread
|
376
|
+
global injection_dict
|
377
|
+
|
352
378
|
# Setting up parameters
|
353
379
|
# Params
|
354
380
|
ActionStr = " >> Obtaining params: "
|
@@ -524,31 +550,18 @@ def action_call(action_name):
|
|
524
550
|
return make_response(msg, 400)
|
525
551
|
|
526
552
|
|
527
|
-
|
528
|
-
|
529
|
-
|
530
|
-
|
531
|
-
|
532
|
-
|
533
|
-
|
534
|
-
|
535
|
-
parser.add_argument(
|
536
|
-
"--key",
|
537
|
-
type=str,
|
538
|
-
help="Your access key. If using EMHASS in standalone this should be a Long-Lived Access Token",
|
539
|
-
)
|
540
|
-
parser.add_argument(
|
541
|
-
"--no_response",
|
542
|
-
type=strtobool,
|
543
|
-
default="False",
|
544
|
-
help="This is set if json response errors occur",
|
545
|
-
)
|
546
|
-
args = parser.parse_args()
|
547
|
-
|
553
|
+
def main(
|
554
|
+
args: Optional[dict] = None,
|
555
|
+
):
|
556
|
+
global continual_publish_thread
|
557
|
+
global emhass_conf
|
558
|
+
global entity_path
|
559
|
+
global injection_dict
|
560
|
+
global app
|
548
561
|
# Pre formatted config parameters
|
549
562
|
config = {}
|
550
563
|
# Secrets
|
551
|
-
params_secrets
|
564
|
+
global params_secrets
|
552
565
|
# Built parameters (formatted config + secrets)
|
553
566
|
params = None
|
554
567
|
|
@@ -601,11 +614,16 @@ if __name__ == "__main__":
|
|
601
614
|
app.logger.setLevel(logging.DEBUG)
|
602
615
|
|
603
616
|
## Secrets
|
617
|
+
# Argument
|
604
618
|
argument = {}
|
605
|
-
|
606
|
-
|
607
|
-
|
608
|
-
|
619
|
+
no_response = False
|
620
|
+
if args is not None:
|
621
|
+
if args.get("url", None):
|
622
|
+
argument["url"] = args["url"]
|
623
|
+
if args.get("key", None):
|
624
|
+
argument["key"] = args["key"]
|
625
|
+
if args.get("no_response", None):
|
626
|
+
no_response = args["no_response"]
|
609
627
|
# Combine secrets from ENV, Arguments/ARG, Secrets file (secrets_emhass.yaml), options (options.json from addon configuration file) and/or Home Assistant Standalone API (if exist)
|
610
628
|
emhass_conf, secrets = build_secrets(
|
611
629
|
emhass_conf,
|
@@ -613,7 +631,7 @@ if __name__ == "__main__":
|
|
613
631
|
argument,
|
614
632
|
options_path,
|
615
633
|
os.getenv("SECRETS_PATH", default="/app/secrets_emhass.yaml"),
|
616
|
-
bool(
|
634
|
+
bool(no_response),
|
617
635
|
)
|
618
636
|
params_secrets.update(secrets)
|
619
637
|
|
@@ -706,4 +724,33 @@ if __name__ == "__main__":
|
|
706
724
|
app.logger.info("Using core emhass version: " + version("emhass"))
|
707
725
|
except PackageNotFoundError:
|
708
726
|
app.logger.info("Using development emhass version")
|
727
|
+
|
728
|
+
return server_ip, port
|
729
|
+
|
730
|
+
|
731
|
+
if __name__ == "__main__":
|
732
|
+
# Parsing arguments
|
733
|
+
parser = argparse.ArgumentParser()
|
734
|
+
parser.add_argument(
|
735
|
+
"--url",
|
736
|
+
type=str,
|
737
|
+
help="The URL to your Home Assistant instance, ex the external_url in your hass configuration",
|
738
|
+
)
|
739
|
+
parser.add_argument(
|
740
|
+
"--key",
|
741
|
+
type=str,
|
742
|
+
help="Your access key. If using EMHASS in standalone this should be a Long-Lived Access Token",
|
743
|
+
)
|
744
|
+
parser.add_argument(
|
745
|
+
"--no_response",
|
746
|
+
type=bool,
|
747
|
+
default=False,
|
748
|
+
help="This is set if json response errors occur",
|
749
|
+
)
|
750
|
+
args = parser.parse_args()
|
751
|
+
|
752
|
+
server_ip, port = main(vars(args))
|
753
|
+
os.environ["IP"] = str(server_ip)
|
754
|
+
os.environ["PORT"] = str(port)
|
755
|
+
|
709
756
|
serve(app, host=server_ip, port=port, threads=8)
|
@@ -1,46 +1,48 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.4
|
2
2
|
Name: emhass
|
3
|
-
Version: 0.12.
|
3
|
+
Version: 0.12.4
|
4
4
|
Summary: An Energy Management System for Home Assistant
|
5
|
+
Project-URL: Homepage, https://github.com/davidusb-geek/emhass
|
5
6
|
Author-email: David HERNANDEZ <davidusb@gmail.com>
|
6
7
|
License: MIT
|
7
|
-
|
8
|
-
Keywords: energy,management,optimization
|
8
|
+
License-File: LICENSE
|
9
|
+
Keywords: energy,hass,management,optimization
|
9
10
|
Classifier: Development Status :: 5 - Production/Stable
|
10
11
|
Classifier: Intended Audience :: Developers
|
11
|
-
Classifier: Topic :: Software Development :: Build Tools
|
12
12
|
Classifier: License :: OSI Approved :: MIT License
|
13
|
-
Classifier: Programming Language :: Python :: 3.11
|
14
13
|
Classifier: Operating System :: OS Independent
|
14
|
+
Classifier: Programming Language :: Python :: 3.11
|
15
|
+
Classifier: Topic :: Software Development :: Build Tools
|
15
16
|
Requires-Python: <3.12,>=3.10
|
16
|
-
|
17
|
-
|
18
|
-
Requires-Dist:
|
19
|
-
Requires-Dist:
|
20
|
-
Requires-Dist:
|
21
|
-
Requires-Dist:
|
22
|
-
Requires-Dist:
|
23
|
-
Requires-Dist:
|
17
|
+
Requires-Dist: beautifulsoup4>=4.12.3
|
18
|
+
Requires-Dist: flask>=3.1.0
|
19
|
+
Requires-Dist: gunicorn>=23.0.0
|
20
|
+
Requires-Dist: h5py>=3.12.1
|
21
|
+
Requires-Dist: numpy<2.3.0,>=2.0.0
|
22
|
+
Requires-Dist: pandas>=2.1.1
|
23
|
+
Requires-Dist: plotly>=6.0.0rc0
|
24
|
+
Requires-Dist: protobuf>=5.29.1
|
25
|
+
Requires-Dist: pulp>=2.8.0
|
26
|
+
Requires-Dist: pvlib>=0.10.3
|
27
|
+
Requires-Dist: pytz>=2023.4
|
28
|
+
Requires-Dist: pyyaml>=6.0.1
|
24
29
|
Requires-Dist: requests>=2.25.1
|
25
|
-
Requires-Dist:
|
26
|
-
Requires-Dist:
|
27
|
-
Requires-Dist:
|
28
|
-
Requires-Dist:
|
29
|
-
Requires-Dist: tables<=3.9.1
|
30
|
-
Requires-Dist: skforecast==0.14.0
|
31
|
-
Requires-Dist: flask>=2.0.3
|
32
|
-
Requires-Dist: waitress>=2.1.1
|
33
|
-
Requires-Dist: plotly>=5.6.0
|
30
|
+
Requires-Dist: scipy>=1.14.0
|
31
|
+
Requires-Dist: skforecast>=0.9.0
|
32
|
+
Requires-Dist: tables>=3.9.1
|
33
|
+
Requires-Dist: waitress>=3.0.2
|
34
34
|
Provides-Extra: docs
|
35
|
-
Requires-Dist:
|
36
|
-
Requires-Dist: sphinx
|
37
|
-
Requires-Dist:
|
35
|
+
Requires-Dist: myst-parser; extra == 'docs'
|
36
|
+
Requires-Dist: sphinx; extra == 'docs'
|
37
|
+
Requires-Dist: sphinx-rtd-theme; extra == 'docs'
|
38
38
|
Provides-Extra: test
|
39
|
-
Requires-Dist:
|
40
|
-
Requires-Dist: pytest; extra ==
|
41
|
-
Requires-Dist:
|
42
|
-
Requires-Dist:
|
43
|
-
Requires-Dist:
|
39
|
+
Requires-Dist: coverage; extra == 'test'
|
40
|
+
Requires-Dist: pytest; extra == 'test'
|
41
|
+
Requires-Dist: requests-mock; extra == 'test'
|
42
|
+
Requires-Dist: ruff; extra == 'test'
|
43
|
+
Requires-Dist: snakeviz; extra == 'test'
|
44
|
+
Requires-Dist: tabulate; extra == 'test'
|
45
|
+
Description-Content-Type: text/markdown
|
44
46
|
|
45
47
|
<div align="center">
|
46
48
|
<br>
|
@@ -572,6 +574,7 @@ Here is the list of the other additional dictionary keys that can be passed at r
|
|
572
574
|
- `nominal_power_of_deferrable_loads` for the nominal power for each deferrable load in Watts.
|
573
575
|
|
574
576
|
- `operating_hours_of_each_deferrable_load` for the total number of hours that each deferrable load should operate.
|
577
|
+
- Alteratively, you can pass `operating_timesteps_of_each_deferrable_load` to set the total number of timesteps for each deferrable load. *(better parameter to use for setting under 1 hr)*
|
575
578
|
|
576
579
|
- `start_timesteps_of_each_deferrable_load` for the timestep from which each deferrable load is allowed to operate (if you don't want the deferrable load to use the whole optimization timewindow).
|
577
580
|
|
@@ -1,16 +1,19 @@
|
|
1
1
|
emhass/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
-
emhass/command_line.py,sha256=
|
3
|
-
emhass/forecast.py,sha256=
|
4
|
-
emhass/machine_learning_forecaster.py,sha256=
|
2
|
+
emhass/command_line.py,sha256=n9mQB-aQvxoyRWMj3zgafiITp-VJ8Owdj0q9jiAgKL4,70875
|
3
|
+
emhass/forecast.py,sha256=62jwTz2RMDWQXT959w23KW7jGEtwyW7Idkdy3of8c7o,64574
|
4
|
+
emhass/machine_learning_forecaster.py,sha256=4DVROJ8Y60hgSXCOJ5cIKvYRbxHoZYNu0hfuBtEsyx0,16898
|
5
5
|
emhass/machine_learning_regressor.py,sha256=yFwMvVEmlgDJUsHhBT-HpNE3j2TC24e8Gmbcn9MPfeU,10690
|
6
|
-
emhass/optimization.py,sha256=
|
7
|
-
emhass/retrieve_hass.py,sha256=
|
8
|
-
emhass/utils.py,sha256=
|
9
|
-
emhass/web_server.py,sha256=
|
6
|
+
emhass/optimization.py,sha256=DVUEtYrXFYNUIGpOPFX87TvpM1yqlmxjpERzZ7TNUIc,64680
|
7
|
+
emhass/retrieve_hass.py,sha256=t51D1D_HjpV6h4IA0xitV5wQ39Wr3Lh2A-bJPEWdZJc,27282
|
8
|
+
emhass/utils.py,sha256=7kuQfEQrYFh78Q5M2bQB0_x_4F-MvZ69v1c8JNFO1eU,70181
|
9
|
+
emhass/web_server.py,sha256=qVFzU6ei_T4NhnPyhQ3BJyB_cWJz7VkpooPafZlqE98,29253
|
10
10
|
emhass/data/associations.csv,sha256=IpEZIIWYdFjkRoC5xa1pRHjwnVs_VH8G8ogbGFxLfGI,3679
|
11
|
-
emhass/data/cec_inverters.pbz2,sha256=
|
12
|
-
emhass/data/cec_modules.pbz2,sha256=
|
11
|
+
emhass/data/cec_inverters.pbz2,sha256=P1JkVuAF1VKD8HmA9Y-Pvb45dv7E-bIwAVkGXDftZLQ,189425
|
12
|
+
emhass/data/cec_modules.pbz2,sha256=h14aS6aC1x1NKsCeTctsVZdFgAl5WiBZFAdnFZR_vE4,1885712
|
13
13
|
emhass/data/config_defaults.json,sha256=-mQHahDv6Z5wYgClOs4VVr5KVCP51olb3f2mEj3Beic,2777
|
14
|
+
emhass/data/emhass_inverters.csv,sha256=V5yN2B6TQpgT7ob4vQDvFW-Lzt8g3l5MODSDdlMOl9I,991
|
15
|
+
emhass/data/emhass_modules.csv,sha256=IX2QfcyL8X5rpu9GYBBxBeeflZFckHAP8_L2bb602Yk,1056
|
16
|
+
emhass/img/emhass_icon.png,sha256=Kyx6hXQ1huJLHAq2CaBfjYXR25H9j99PSWHI0lShkaQ,19030
|
14
17
|
emhass/static/advanced.html,sha256=gAhsd14elDwh1Ts4lf9wn_ZkczzzObq5qOimi_la3Ic,2067
|
15
18
|
emhass/static/basic.html,sha256=ro2WwWgJyoUhqx_nJFzKCEG8FA8863vSHLmrjGYcEgs,677
|
16
19
|
emhass/static/configuration_list.html,sha256=i4v83RVduWjdjkjPhA74e-j8NSUpFzqMGU3ixOaJLfI,1740
|
@@ -24,9 +27,8 @@ emhass/static/img/feather-sprite.svg,sha256=VHjMJQg88wXa9CaeYrKGhNtyK0xdd47zCqwS
|
|
24
27
|
emhass/templates/configuration.html,sha256=M-_L__juYzcdGDaryGrz6LG2mguW2f1Sx6k01YfG7Dc,2885
|
25
28
|
emhass/templates/index.html,sha256=1V44c0yyliu_z8inl0K-zmmmkhQumH3Bqk8Jj1YJPzY,3076
|
26
29
|
emhass/templates/template.html,sha256=TkGgMecQEbFUZA4ymPwMUzNjKHsENvCgroUWbPt7G4Y,158
|
27
|
-
emhass-0.12.
|
28
|
-
emhass-0.12.
|
29
|
-
emhass-0.12.
|
30
|
-
emhass-0.12.
|
31
|
-
emhass-0.12.
|
32
|
-
emhass-0.12.2.dist-info/RECORD,,
|
30
|
+
emhass-0.12.4.dist-info/METADATA,sha256=QSe1zRBt_wjb-wp5ZRrlDmtyJtTBesZco0ARGslI-sk,49664
|
31
|
+
emhass-0.12.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
32
|
+
emhass-0.12.4.dist-info/entry_points.txt,sha256=6Bp1NFOGNv_fSTxYl1ke3K3h3aqAcBxI-bgq5yq-i1M,52
|
33
|
+
emhass-0.12.4.dist-info/licenses/LICENSE,sha256=1X3-S1yvOCBDBeox1aK3dq00m7dA8NDtcPrpKPISzbE,1077
|
34
|
+
emhass-0.12.4.dist-info/RECORD,,
|
@@ -1 +0,0 @@
|
|
1
|
-
emhass
|
File without changes
|
File without changes
|