emhass 0.8.4__py3-none-any.whl → 0.8.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- emhass/command_line.py +14 -12
- emhass/data/cec_inverters.pbz2 +0 -0
- emhass/data/cec_modules.pbz2 +0 -0
- emhass/forecast.py +71 -31
- emhass/optimization.py +4 -4
- emhass/retrieve_hass.py +3 -3
- emhass/static/script.js +3 -34
- emhass/static/style.css +74 -39
- emhass/templates/index.html +7 -7
- emhass/utils.py +21 -64
- emhass/web_server.py +8 -4
- {emhass-0.8.4.dist-info → emhass-0.8.6.dist-info}/METADATA +2 -4
- emhass-0.8.6.dist-info/RECORD +25 -0
- emhass-0.8.4.dist-info/RECORD +0 -23
- {emhass-0.8.4.dist-info → emhass-0.8.6.dist-info}/LICENSE +0 -0
- {emhass-0.8.4.dist-info → emhass-0.8.6.dist-info}/WHEEL +0 -0
- {emhass-0.8.4.dist-info → emhass-0.8.6.dist-info}/entry_points.txt +0 -0
- {emhass-0.8.4.dist-info → emhass-0.8.6.dist-info}/top_level.txt +0 -0
emhass/command_line.py
CHANGED
@@ -59,8 +59,8 @@ def set_input_data_dict(config_path: pathlib.Path, base_path: str, costfun: str,
|
|
59
59
|
optim_conf, plant_conf, set_type, logger)
|
60
60
|
# Define main objects
|
61
61
|
rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'],
|
62
|
-
|
63
|
-
|
62
|
+
retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'],
|
63
|
+
params, base_path, logger, get_data_from_file=get_data_from_file)
|
64
64
|
fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf,
|
65
65
|
params, base_path, logger, get_data_from_file=get_data_from_file)
|
66
66
|
opt = Optimization(retrieve_hass_conf, optim_conf, plant_conf,
|
@@ -76,12 +76,12 @@ def set_input_data_dict(config_path: pathlib.Path, base_path: str, costfun: str,
|
|
76
76
|
days_list = utils.get_days_list(retrieve_hass_conf['days_to_retrieve'])
|
77
77
|
var_list = [retrieve_hass_conf['var_load'], retrieve_hass_conf['var_PV']]
|
78
78
|
if not rh.get_data(days_list, var_list,
|
79
|
-
|
79
|
+
minimal_response=False, significant_changes_only=False):
|
80
80
|
return False
|
81
81
|
if not rh.prepare_data(retrieve_hass_conf['var_load'], load_negative = retrieve_hass_conf['load_negative'],
|
82
|
-
|
83
|
-
|
84
|
-
|
82
|
+
set_zero_min = retrieve_hass_conf['set_zero_min'],
|
83
|
+
var_replace_zero = retrieve_hass_conf['var_replace_zero'],
|
84
|
+
var_interp = retrieve_hass_conf['var_interp']):
|
85
85
|
return False
|
86
86
|
df_input_data = rh.df_final.copy()
|
87
87
|
# What we don't need for this type of action
|
@@ -113,12 +113,12 @@ def set_input_data_dict(config_path: pathlib.Path, base_path: str, costfun: str,
|
|
113
113
|
days_list = utils.get_days_list(1)
|
114
114
|
var_list = [retrieve_hass_conf['var_load'], retrieve_hass_conf['var_PV']]
|
115
115
|
if not rh.get_data(days_list, var_list,
|
116
|
-
|
116
|
+
minimal_response=False, significant_changes_only=False):
|
117
117
|
return False
|
118
118
|
if not rh.prepare_data(retrieve_hass_conf['var_load'], load_negative = retrieve_hass_conf['load_negative'],
|
119
|
-
|
120
|
-
|
121
|
-
|
119
|
+
set_zero_min = retrieve_hass_conf['set_zero_min'],
|
120
|
+
var_replace_zero = retrieve_hass_conf['var_replace_zero'],
|
121
|
+
var_interp = retrieve_hass_conf['var_interp']):
|
122
122
|
return False
|
123
123
|
df_input_data = rh.df_final.copy()
|
124
124
|
# Get PV and load forecasts
|
@@ -201,9 +201,11 @@ def perfect_forecast_optim(input_data_dict: dict, logger: logging.Logger,
|
|
201
201
|
# Load cost and prod price forecast
|
202
202
|
df_input_data = input_data_dict['fcst'].get_load_cost_forecast(
|
203
203
|
input_data_dict['df_input_data'],
|
204
|
-
method=input_data_dict['fcst'].optim_conf['load_cost_forecast_method']
|
204
|
+
method=input_data_dict['fcst'].optim_conf['load_cost_forecast_method'],
|
205
|
+
list_and_perfect=True)
|
205
206
|
df_input_data = input_data_dict['fcst'].get_prod_price_forecast(
|
206
|
-
df_input_data, method=input_data_dict['fcst'].optim_conf['prod_price_forecast_method']
|
207
|
+
df_input_data, method=input_data_dict['fcst'].optim_conf['prod_price_forecast_method'],
|
208
|
+
list_and_perfect=True)
|
207
209
|
opt_res = input_data_dict['opt'].perform_perfect_forecast_optim(df_input_data, input_data_dict['days_list'])
|
208
210
|
# Save CSV file for analysis
|
209
211
|
if save_data_to_file:
|
Binary file
|
Binary file
|
emhass/forecast.py
CHANGED
@@ -7,6 +7,8 @@ import copy
|
|
7
7
|
import logging
|
8
8
|
import json
|
9
9
|
from typing import Optional
|
10
|
+
import bz2
|
11
|
+
import pickle as cPickle
|
10
12
|
import pandas as pd
|
11
13
|
import numpy as np
|
12
14
|
from datetime import datetime, timedelta
|
@@ -21,7 +23,7 @@ from pvlib.irradiance import disc
|
|
21
23
|
|
22
24
|
from emhass.retrieve_hass import RetrieveHass
|
23
25
|
from emhass.machine_learning_forecaster import MLForecaster
|
24
|
-
from emhass.utils import get_days_list
|
26
|
+
from emhass.utils import get_days_list, get_root, set_df_index_freq
|
25
27
|
|
26
28
|
|
27
29
|
class Forecast(object):
|
@@ -133,7 +135,7 @@ class Forecast(object):
|
|
133
135
|
self.time_zone = self.retrieve_hass_conf['time_zone']
|
134
136
|
self.method_ts_round = self.retrieve_hass_conf['method_ts_round']
|
135
137
|
self.timeStep = self.freq.seconds/3600 # in hours
|
136
|
-
self.time_delta = pd.to_timedelta(opt_time_delta, "hours")
|
138
|
+
self.time_delta = pd.to_timedelta(opt_time_delta, "hours")
|
137
139
|
self.var_PV = self.retrieve_hass_conf['var_PV']
|
138
140
|
self.var_load = self.retrieve_hass_conf['var_load']
|
139
141
|
self.var_load_new = self.var_load+'_positive'
|
@@ -159,7 +161,7 @@ class Forecast(object):
|
|
159
161
|
self.end_forecast = (self.start_forecast + self.optim_conf['delta_forecast']).replace(microsecond=0)
|
160
162
|
self.forecast_dates = pd.date_range(start=self.start_forecast,
|
161
163
|
end=self.end_forecast-self.freq,
|
162
|
-
freq=self.freq).round(self.freq, ambiguous='infer', nonexistent=
|
164
|
+
freq=self.freq).round(self.freq, ambiguous='infer', nonexistent='shift_forward')
|
163
165
|
if params is not None:
|
164
166
|
if 'prediction_horizon' in list(self.params['passed_data'].keys()):
|
165
167
|
if self.params['passed_data']['prediction_horizon'] is not None:
|
@@ -184,7 +186,7 @@ class Forecast(object):
|
|
184
186
|
freq_scrap = pd.to_timedelta(60, "minutes") # The scrapping time step is 60min
|
185
187
|
forecast_dates_scrap = pd.date_range(start=self.start_forecast,
|
186
188
|
end=self.end_forecast-freq_scrap,
|
187
|
-
freq=freq_scrap).round(freq_scrap, ambiguous='infer', nonexistent=
|
189
|
+
freq=freq_scrap).round(freq_scrap, ambiguous='infer', nonexistent='shift_forward')
|
188
190
|
# Using the clearoutside webpage
|
189
191
|
response = get("https://clearoutside.com/forecast/"+str(round(self.lat, 2))+"/"+str(round(self.lon, 2))+"?desktop=true")
|
190
192
|
'''import bz2 # Uncomment to save a serialized data for tests
|
@@ -412,8 +414,10 @@ class Forecast(object):
|
|
412
414
|
# Setting the main parameters of the PV plant
|
413
415
|
location = Location(latitude=self.lat, longitude=self.lon)
|
414
416
|
temp_params = TEMPERATURE_MODEL_PARAMETERS['sapm']['close_mount_glass_glass']
|
415
|
-
cec_modules =
|
416
|
-
|
417
|
+
cec_modules = bz2.BZ2File(get_root(__file__, num_parent=2) / 'emhass/data/cec_modules.pbz2', "rb")
|
418
|
+
cec_modules = cPickle.load(cec_modules)
|
419
|
+
cec_inverters = bz2.BZ2File(get_root(__file__, num_parent=2) / 'emhass/data/cec_inverters.pbz2', "rb")
|
420
|
+
cec_inverters = cPickle.load(cec_inverters)
|
417
421
|
if type(self.plant_conf['module_model']) == list:
|
418
422
|
P_PV_forecast = pd.Series(0, index=df_weather.index)
|
419
423
|
for i in range(len(self.plant_conf['module_model'])):
|
@@ -476,15 +480,16 @@ class Forecast(object):
|
|
476
480
|
end_forecast_csv = (start_forecast_csv + self.optim_conf['delta_forecast']).replace(microsecond=0)
|
477
481
|
forecast_dates_csv = pd.date_range(start=start_forecast_csv,
|
478
482
|
end=end_forecast_csv+timedelta(days=timedelta_days)-self.freq,
|
479
|
-
freq=self.freq).round(self.freq, ambiguous='infer', nonexistent=
|
483
|
+
freq=self.freq).round(self.freq, ambiguous='infer', nonexistent='shift_forward')
|
480
484
|
if self.params is not None:
|
481
485
|
if 'prediction_horizon' in list(self.params['passed_data'].keys()):
|
482
486
|
if self.params['passed_data']['prediction_horizon'] is not None:
|
483
487
|
forecast_dates_csv = forecast_dates_csv[0:self.params['passed_data']['prediction_horizon']]
|
484
488
|
return forecast_dates_csv
|
485
489
|
|
486
|
-
def
|
487
|
-
|
490
|
+
def get_forecast_out_from_csv_or_list(self, df_final: pd.DataFrame, forecast_dates_csv: pd.date_range,
|
491
|
+
csv_path: str, data_list: Optional[list] = None,
|
492
|
+
list_and_perfect: Optional[bool] = False) -> pd.DataFrame:
|
488
493
|
r"""
|
489
494
|
Get the forecast data as a DataFrame from a CSV file.
|
490
495
|
|
@@ -502,34 +507,67 @@ class Forecast(object):
|
|
502
507
|
:rtype: pd.DataFrame
|
503
508
|
|
504
509
|
"""
|
505
|
-
days_list = df_final.index.day.unique().tolist()
|
506
510
|
if csv_path is None:
|
507
511
|
data_dict = {'ts':forecast_dates_csv, 'yhat':data_list}
|
508
512
|
df_csv = pd.DataFrame.from_dict(data_dict)
|
509
513
|
df_csv.index = forecast_dates_csv
|
510
514
|
df_csv.drop(['ts'], axis=1, inplace=True)
|
515
|
+
df_csv = set_df_index_freq(df_csv)
|
516
|
+
if list_and_perfect:
|
517
|
+
days_list = df_final.index.day.unique().tolist()
|
518
|
+
else:
|
519
|
+
days_list = df_csv.index.day.unique().tolist()
|
511
520
|
else:
|
512
521
|
load_csv_file_path = self.root + csv_path
|
513
522
|
df_csv = pd.read_csv(load_csv_file_path, header=None, names=['ts', 'yhat'])
|
514
523
|
df_csv.index = forecast_dates_csv
|
515
524
|
df_csv.drop(['ts'], axis=1, inplace=True)
|
525
|
+
df_csv = set_df_index_freq(df_csv)
|
526
|
+
days_list = df_final.index.day.unique().tolist()
|
516
527
|
forecast_out = pd.DataFrame()
|
517
528
|
for day in days_list:
|
518
|
-
|
519
|
-
|
520
|
-
|
521
|
-
|
522
|
-
|
523
|
-
|
524
|
-
|
529
|
+
if csv_path is None:
|
530
|
+
if list_and_perfect:
|
531
|
+
df_tmp = copy.deepcopy(df_final)
|
532
|
+
else:
|
533
|
+
df_tmp = copy.deepcopy(df_csv)
|
534
|
+
else:
|
535
|
+
df_tmp = copy.deepcopy(df_final)
|
536
|
+
first_elm_index = [i for i, x in enumerate(df_tmp.index.day == day) if x][0]
|
537
|
+
last_elm_index = [i for i, x in enumerate(df_tmp.index.day == day) if x][-1]
|
538
|
+
fcst_index = pd.date_range(start=df_tmp.index[first_elm_index],
|
539
|
+
end=df_tmp.index[last_elm_index],
|
540
|
+
freq=df_tmp.index.freq)
|
541
|
+
first_hour = str(df_tmp.index[first_elm_index].hour)+":"+str(df_tmp.index[first_elm_index].minute)
|
542
|
+
last_hour = str(df_tmp.index[last_elm_index].hour)+":"+str(df_tmp.index[last_elm_index].minute)
|
525
543
|
if len(forecast_out) == 0:
|
526
|
-
|
527
|
-
|
528
|
-
|
544
|
+
if csv_path is None:
|
545
|
+
if list_and_perfect:
|
546
|
+
forecast_out = pd.DataFrame(
|
547
|
+
df_csv.between_time(first_hour, last_hour).values,
|
548
|
+
index=fcst_index)
|
549
|
+
else:
|
550
|
+
forecast_out = pd.DataFrame(
|
551
|
+
df_csv.loc[fcst_index,:].between_time(first_hour, last_hour).values,
|
552
|
+
index=fcst_index)
|
553
|
+
else:
|
554
|
+
forecast_out = pd.DataFrame(
|
555
|
+
df_csv.between_time(first_hour, last_hour).values,
|
556
|
+
index=fcst_index)
|
529
557
|
else:
|
530
|
-
|
531
|
-
|
532
|
-
|
558
|
+
if csv_path is None:
|
559
|
+
if list_and_perfect:
|
560
|
+
forecast_tp = pd.DataFrame(
|
561
|
+
df_csv.between_time(first_hour, last_hour).values,
|
562
|
+
index=fcst_index)
|
563
|
+
else:
|
564
|
+
forecast_tp = pd.DataFrame(
|
565
|
+
df_csv.loc[fcst_index,:].between_time(first_hour, last_hour).values,
|
566
|
+
index=fcst_index)
|
567
|
+
else:
|
568
|
+
forecast_tp = pd.DataFrame(
|
569
|
+
df_csv.between_time(first_hour, last_hour).values,
|
570
|
+
index=fcst_index)
|
533
571
|
forecast_out = pd.concat([forecast_out, forecast_tp], axis=0)
|
534
572
|
return forecast_out
|
535
573
|
|
@@ -664,7 +702,8 @@ class Forecast(object):
|
|
664
702
|
return P_Load_forecast
|
665
703
|
|
666
704
|
def get_load_cost_forecast(self, df_final: pd.DataFrame, method: Optional[str] = 'hp_hc_periods',
|
667
|
-
csv_path: Optional[str] = "data_load_cost_forecast.csv"
|
705
|
+
csv_path: Optional[str] = "data_load_cost_forecast.csv",
|
706
|
+
list_and_perfect: Optional[bool] = False) -> pd.DataFrame:
|
668
707
|
r"""
|
669
708
|
Get the unit cost for the load consumption based on multiple tariff \
|
670
709
|
periods. This is the cost of the energy from the utility in a vector \
|
@@ -694,7 +733,7 @@ class Forecast(object):
|
|
694
733
|
df_final.loc[df_hp.index, self.var_load_cost] = self.optim_conf['load_cost_hp']
|
695
734
|
elif method == 'csv':
|
696
735
|
forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0)
|
697
|
-
forecast_out = self.
|
736
|
+
forecast_out = self.get_forecast_out_from_csv_or_list(
|
698
737
|
df_final, forecast_dates_csv, csv_path)
|
699
738
|
df_final[self.var_load_cost] = forecast_out
|
700
739
|
elif method == 'list': # reading a list of values
|
@@ -708,8 +747,8 @@ class Forecast(object):
|
|
708
747
|
data_list = data_list[0:len(self.forecast_dates)]
|
709
748
|
# Define the correct dates
|
710
749
|
forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0)
|
711
|
-
forecast_out = self.
|
712
|
-
df_final, forecast_dates_csv, None, data_list=data_list)
|
750
|
+
forecast_out = self.get_forecast_out_from_csv_or_list(
|
751
|
+
df_final, forecast_dates_csv, None, data_list=data_list, list_and_perfect=list_and_perfect)
|
713
752
|
# Fill the final DF
|
714
753
|
df_final[self.var_load_cost] = forecast_out
|
715
754
|
else:
|
@@ -718,7 +757,8 @@ class Forecast(object):
|
|
718
757
|
return df_final
|
719
758
|
|
720
759
|
def get_prod_price_forecast(self, df_final: pd.DataFrame, method: Optional[str] = 'constant',
|
721
|
-
|
760
|
+
csv_path: Optional[str] = "/data/data_prod_price_forecast.csv",
|
761
|
+
list_and_perfect: Optional[bool] = False) -> pd.DataFrame:
|
722
762
|
r"""
|
723
763
|
Get the unit power production price for the energy injected to the grid.\
|
724
764
|
This is the price of the energy injected to the utility in a vector \
|
@@ -743,7 +783,7 @@ class Forecast(object):
|
|
743
783
|
df_final[self.var_prod_price] = self.optim_conf['prod_sell_price']
|
744
784
|
elif method == 'csv':
|
745
785
|
forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0)
|
746
|
-
forecast_out = self.
|
786
|
+
forecast_out = self.get_forecast_out_from_csv_or_list(df_final,
|
747
787
|
forecast_dates_csv,
|
748
788
|
csv_path)
|
749
789
|
df_final[self.var_prod_price] = forecast_out
|
@@ -758,8 +798,8 @@ class Forecast(object):
|
|
758
798
|
data_list = data_list[0:len(self.forecast_dates)]
|
759
799
|
# Define the correct dates
|
760
800
|
forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0)
|
761
|
-
forecast_out = self.
|
762
|
-
df_final, forecast_dates_csv, None, data_list=data_list)
|
801
|
+
forecast_out = self.get_forecast_out_from_csv_or_list(
|
802
|
+
df_final, forecast_dates_csv, None, data_list=data_list, list_and_perfect=list_and_perfect)
|
763
803
|
# Fill the final DF
|
764
804
|
df_final[self.var_prod_price] = forecast_out
|
765
805
|
else:
|
emhass/optimization.py
CHANGED
@@ -162,10 +162,10 @@ class Optimization:
|
|
162
162
|
|
163
163
|
## Add decision variables
|
164
164
|
P_grid_neg = {(i):plp.LpVariable(cat='Continuous',
|
165
|
-
lowBound=-self.plant_conf['
|
165
|
+
lowBound=-self.plant_conf['P_to_grid_max'], upBound=0,
|
166
166
|
name="P_grid_neg{}".format(i)) for i in set_I}
|
167
167
|
P_grid_pos = {(i):plp.LpVariable(cat='Continuous',
|
168
|
-
lowBound=0, upBound=self.plant_conf['
|
168
|
+
lowBound=0, upBound=self.plant_conf['P_from_grid_max'],
|
169
169
|
name="P_grid_pos{}".format(i)) for i in set_I}
|
170
170
|
P_deferrable = []
|
171
171
|
P_def_bin1 = []
|
@@ -267,13 +267,13 @@ class Optimization:
|
|
267
267
|
# Avoid injecting and consuming from grid at the same time
|
268
268
|
constraints.update({"constraint_pgridpos_{}".format(i) :
|
269
269
|
plp.LpConstraint(
|
270
|
-
e = P_grid_pos[i] - self.plant_conf['
|
270
|
+
e = P_grid_pos[i] - self.plant_conf['P_from_grid_max']*D[i],
|
271
271
|
sense = plp.LpConstraintLE,
|
272
272
|
rhs = 0)
|
273
273
|
for i in set_I})
|
274
274
|
constraints.update({"constraint_pgridneg_{}".format(i) :
|
275
275
|
plp.LpConstraint(
|
276
|
-
e = -P_grid_neg[i] - self.plant_conf['
|
276
|
+
e = -P_grid_neg[i] - self.plant_conf['P_to_grid_max']*(1-D[i]),
|
277
277
|
sense = plp.LpConstraintLE,
|
278
278
|
rhs = 0)
|
279
279
|
for i in set_I})
|
emhass/retrieve_hass.py
CHANGED
@@ -133,14 +133,14 @@ class RetrieveHass:
|
|
133
133
|
try: # Sometimes when there are connection problems we need to catch empty retrieved json
|
134
134
|
data = response.json()[0]
|
135
135
|
except IndexError:
|
136
|
-
if x
|
136
|
+
if x == 0:
|
137
137
|
self.logger.error("The retrieved JSON is empty, A sensor:" + var + " may have 0 days of history or passed sensor may not be correct")
|
138
138
|
else:
|
139
139
|
self.logger.error("The retrieved JSON is empty for day:"+ str(day) +", days_to_retrieve may be larger than the recorded history of sensor:" + var + " (check your recorder settings)")
|
140
140
|
return False
|
141
141
|
df_raw = pd.DataFrame.from_dict(data)
|
142
142
|
if len(df_raw) == 0:
|
143
|
-
if x
|
143
|
+
if x == 0:
|
144
144
|
self.logger.error("The retrieved Dataframe is empty, A sensor:" + var + " may have 0 days of history or passed sensor may not be correct")
|
145
145
|
else:
|
146
146
|
self.logger.error("Retrieved empty Dataframe for day:"+ str(day) +", days_to_retrieve may be larger than the recorded history of sensor:" + var + " (check your recorder settings)")
|
@@ -149,7 +149,7 @@ class RetrieveHass:
|
|
149
149
|
from_date = pd.to_datetime(df_raw['last_changed'], format="ISO8601").min()
|
150
150
|
to_date = pd.to_datetime(df_raw['last_changed'], format="ISO8601").max()
|
151
151
|
ts = pd.to_datetime(pd.date_range(start=from_date, end=to_date, freq=self.freq),
|
152
|
-
format='%Y-%d-%m %H:%M').round(self.freq, ambiguous='infer', nonexistent=
|
152
|
+
format='%Y-%d-%m %H:%M').round(self.freq, ambiguous='infer', nonexistent='shift_forward')
|
153
153
|
df_day = pd.DataFrame(index = ts)
|
154
154
|
# Caution with undefined string data: unknown, unavailable, etc.
|
155
155
|
df_tp = df_raw.copy()[['state']].replace(
|
emhass/static/script.js
CHANGED
@@ -1,8 +1,3 @@
|
|
1
|
-
//before page load check for stylesheet
|
2
|
-
document.onreadystatechange = async function() {
|
3
|
-
checkStyleSheets()
|
4
|
-
}
|
5
|
-
|
6
1
|
//on page reload get saved data
|
7
2
|
window.onload = async function () {
|
8
3
|
|
@@ -12,23 +7,6 @@ window.onload = async function () {
|
|
12
7
|
document.getElementById("basicOrAdvanced").addEventListener("click", () => SwitchBasicOrAdvanced());
|
13
8
|
};
|
14
9
|
|
15
|
-
//check style sheet is loaded
|
16
|
-
async function checkStyleSheets() {
|
17
|
-
var styleHREF = getHTMLURL() + `static/style.css`
|
18
|
-
var styles = document.styleSheets;
|
19
|
-
for (var i = 0; i < styles.length; i++) {
|
20
|
-
if (styles[i].href.match("style")["input"] == styleHREF) {
|
21
|
-
return true
|
22
|
-
}
|
23
|
-
}
|
24
|
-
//if could not find file
|
25
|
-
var style = document.createElement("link");
|
26
|
-
style.rel = "stylesheet";
|
27
|
-
style.href = styleHREF;
|
28
|
-
style.type = "text/css";
|
29
|
-
document.getElementsByTagName("head")[0].appendChild(style);
|
30
|
-
}
|
31
|
-
|
32
10
|
//add listeners to buttons (based on page)
|
33
11
|
function loadButtons(page) {
|
34
12
|
switch (page) {
|
@@ -120,19 +98,10 @@ function SwitchBasicOrAdvanced() {
|
|
120
98
|
}
|
121
99
|
}
|
122
100
|
|
123
|
-
//set current url
|
124
|
-
function getHTMLURL() {
|
125
|
-
var currentUrl
|
126
|
-
if (window.location) {
|
127
|
-
currentUrl = window.location.href; //get current url to append
|
128
|
-
}
|
129
|
-
else { currentUrl = "" }
|
130
|
-
return currentUrl
|
131
|
-
}
|
132
101
|
|
133
102
|
//get html data from basic.html or advanced.html
|
134
103
|
async function getHTMLData(htmlFile) {
|
135
|
-
const response = await fetch(
|
104
|
+
const response = await fetch(`static/` + htmlFile);
|
136
105
|
blob = await response.blob(); //get data blob
|
137
106
|
htmlTemplateData = await new Response(blob).text(); //obtain html from blob
|
138
107
|
return await htmlTemplateData;
|
@@ -148,7 +117,7 @@ async function formAction(action, page) {
|
|
148
117
|
|
149
118
|
if (data !== 0) { //don't run if there is an error in the input (box/list) Json data
|
150
119
|
showChangeStatus("loading", {}); // show loading div for status
|
151
|
-
const response = await fetch(
|
120
|
+
const response = await fetch(`action/` + action, {
|
152
121
|
//fetch data from webserver.py
|
153
122
|
method: "POST",
|
154
123
|
headers: {
|
@@ -206,7 +175,7 @@ async function showChangeStatus(status, logJson) {
|
|
206
175
|
async function getTemplate() {
|
207
176
|
//fetch data from webserver.py
|
208
177
|
let htmlTemplateData = "";
|
209
|
-
response = await fetch(
|
178
|
+
response = await fetch(`template/table-template`, {
|
210
179
|
method: "GET",
|
211
180
|
});
|
212
181
|
blob = await response.blob(); //get data blob
|
emhass/static/style.css
CHANGED
@@ -550,16 +550,16 @@ https://github.com/feathericons/feather */
|
|
550
550
|
stroke-linecap: round !important;
|
551
551
|
stroke-linejoin: round !important;
|
552
552
|
fill: none !important;
|
553
|
-
filter: drop-shadow(
|
553
|
+
filter: drop-shadow(#282928 .2px .2px) !important;
|
554
554
|
-webkit-text-size-adjust: none !important;
|
555
555
|
-ms-text-size-adjust: none !important;
|
556
556
|
|
557
557
|
}
|
558
558
|
|
559
|
-
/* feather icons no background color */
|
559
|
+
/* feather icons no background color */
|
560
560
|
#top-links a {
|
561
561
|
background: none !important;
|
562
|
-
}
|
562
|
+
}
|
563
563
|
|
564
564
|
/* -------------- */
|
565
565
|
|
@@ -586,7 +586,8 @@ select {
|
|
586
586
|
}
|
587
587
|
|
588
588
|
|
589
|
-
.alert,
|
589
|
+
.alert,
|
590
|
+
.info {
|
590
591
|
max-width: 50%;
|
591
592
|
}
|
592
593
|
|
@@ -600,7 +601,7 @@ h2 {
|
|
600
601
|
margin-bottom: .3em;
|
601
602
|
}
|
602
603
|
|
603
|
-
.table_div h4{
|
604
|
+
.table_div h4 {
|
604
605
|
margin-top: .5em;
|
605
606
|
}
|
606
607
|
|
@@ -683,17 +684,49 @@ button {
|
|
683
684
|
|
684
685
|
th {
|
685
686
|
padding: 5px 7.77px;
|
687
|
+
text-align: center;
|
686
688
|
}
|
687
689
|
|
688
|
-
.mystyle tr:nth-child(even)
|
690
|
+
.mystyle tr:nth-child(even) td,
|
691
|
+
th {
|
689
692
|
background: #e1e1e1;
|
690
693
|
}
|
691
694
|
|
692
|
-
.mystyle tr:
|
693
|
-
background:
|
695
|
+
.mystyle tr:nth-child(odd) td {
|
696
|
+
background: white;
|
697
|
+
}
|
698
|
+
|
699
|
+
.mystyle tr:hover td {
|
700
|
+
background-color: silver;
|
694
701
|
cursor: pointer;
|
695
702
|
}
|
696
703
|
|
704
|
+
th:last-child {
|
705
|
+
border-top-right-radius: 7px;
|
706
|
+
}
|
707
|
+
|
708
|
+
th:first-child {
|
709
|
+
border-top-left-radius: 7px;
|
710
|
+
}
|
711
|
+
|
712
|
+
tr:last-child td:first-child {
|
713
|
+
border-bottom-left-radius: 7px;
|
714
|
+
}
|
715
|
+
|
716
|
+
tr:last-child td:last-child {
|
717
|
+
border-bottom-right-radius: 7px;
|
718
|
+
}
|
719
|
+
|
720
|
+
tr:hover td:first-child {
|
721
|
+
border-top-left-radius: 7px;
|
722
|
+
border-bottom-left-radius: 7px;
|
723
|
+
}
|
724
|
+
|
725
|
+
tr:hover td:last-child {
|
726
|
+
border-top-right-radius: 7px;
|
727
|
+
border-bottom-right-radius: 7px;
|
728
|
+
}
|
729
|
+
|
697
730
|
#top-links {
|
698
731
|
display: flex;
|
699
732
|
position: absolute;
|
@@ -795,14 +828,20 @@ th {
|
|
795
828
|
}
|
796
829
|
|
797
830
|
/* Basic and Advanced fade transitions */
|
798
|
-
.TabSelection,
|
831
|
+
.TabSelection,
|
832
|
+
#advance,
|
833
|
+
#basic,
|
834
|
+
button,
|
835
|
+
select,
|
836
|
+
.info {
|
799
837
|
animation-name: fadeInOpacity;
|
800
838
|
animation-iteration-count: 1;
|
801
839
|
animation-timing-function: ease-in-out;
|
802
840
|
animation-duration: .3s;
|
803
841
|
}
|
804
842
|
|
805
|
-
.input-list,
|
843
|
+
.input-list,
|
844
|
+
.input-box {
|
806
845
|
animation-name: fadeInOpacity;
|
807
846
|
animation-iteration-count: 1;
|
808
847
|
animation-timing-function: ease-in-out;
|
@@ -944,7 +983,8 @@ th {
|
|
944
983
|
display: none !important;
|
945
984
|
}
|
946
985
|
|
947
|
-
.info,
|
986
|
+
.info,
|
987
|
+
.alert {
|
948
988
|
max-width: 100%;
|
949
989
|
}
|
950
990
|
}
|
@@ -957,15 +997,17 @@ th {
|
|
957
997
|
}
|
958
998
|
|
959
999
|
img,
|
960
|
-
figure,
|
1000
|
+
figure,
|
1001
|
+
svg.main-svg {
|
961
1002
|
-webkit-filter: invert(.82);
|
962
1003
|
filter: invert(.82);
|
963
1004
|
}
|
964
|
-
|
965
|
-
figure,
|
966
|
-
|
967
|
-
|
968
|
-
|
1005
|
+
|
1006
|
+
figure,
|
1007
|
+
svg.main-svg {
|
1008
|
+
border-color: #181818;
|
1009
|
+
border-style: solid;
|
1010
|
+
border-width: 1px;
|
969
1011
|
}
|
970
1012
|
|
971
1013
|
button,
|
@@ -1008,10 +1050,12 @@ th {
|
|
1008
1050
|
.modebar-btn svg path {
|
1009
1051
|
fill: #111 !important;
|
1010
1052
|
}
|
1053
|
+
|
1011
1054
|
.modebar-btn svg {
|
1012
|
-
filter:invert(100%) sepia(64%) saturate(2%) hue-rotate(294deg) brightness(85%) contrast(93%) !important
|
1055
|
+
filter: invert(100%) sepia(64%) saturate(2%) hue-rotate(294deg) brightness(85%) contrast(93%) !important
|
1013
1056
|
}
|
1014
|
-
|
1057
|
+
|
1058
|
+
.modebar-btn--logo svg {
|
1015
1059
|
filter: None !important;
|
1016
1060
|
/* filter: invert(100%) saturate(100%) brightness(87%) contrast(100%) !important */
|
1017
1061
|
}
|
@@ -1024,37 +1068,28 @@ th {
|
|
1024
1068
|
color: #e1e1e1;
|
1025
1069
|
}
|
1026
1070
|
|
1027
|
-
tr
|
1028
|
-
|
1029
|
-
background-color: #3d3d3d;
|
1071
|
+
.mystyle tr {
|
1072
|
+
background: none;
|
1030
1073
|
}
|
1031
1074
|
|
1032
|
-
tr:nth-child(
|
1033
|
-
|
1034
|
-
background
|
1075
|
+
.mystyle tr:nth-child(even) td,
|
1076
|
+
th {
|
1077
|
+
background: #282928;
|
1035
1078
|
}
|
1036
1079
|
|
1037
|
-
tr:nth-child(
|
1038
|
-
|
1039
|
-
background-color: #181818;
|
1080
|
+
.mystyle tr:nth-child(odd) td {
|
1081
|
+
background: #111111;
|
1040
1082
|
}
|
1041
1083
|
|
1042
|
-
tr:hover td
|
1043
|
-
th {
|
1084
|
+
.mystyle tr:hover td {
|
1044
1085
|
background-color: #3f3f3f;
|
1045
1086
|
}
|
1046
1087
|
|
1047
|
-
|
1048
|
-
th:last-child, th:first-child, td:first-child, td:last-child
|
1049
|
-
{
|
1050
|
-
border-radius: 7px
|
1051
|
-
}
|
1052
|
-
|
1053
|
-
.modebar-group{
|
1088
|
+
.modebar-group {
|
1054
1089
|
background-color: #0000 !important;
|
1055
1090
|
}
|
1056
1091
|
|
1057
|
-
.modebar-btn{
|
1092
|
+
.modebar-btn {
|
1058
1093
|
background: #3f3f3f;
|
1059
1094
|
}
|
1060
1095
|
|
@@ -1076,4 +1111,4 @@ th {
|
|
1076
1111
|
}
|
1077
1112
|
|
1078
1113
|
|
1079
|
-
}
|
1114
|
+
}
|
emhass/templates/index.html
CHANGED
@@ -4,9 +4,9 @@
|
|
4
4
|
<head>
|
5
5
|
<title>EMHASS: Energy Management Optimization for Home Assistant</title>
|
6
6
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
7
|
-
<link rel="stylesheet" type="text/css" href="
|
8
|
-
<link rel="icon" type="image/x-icon" href="
|
9
|
-
<script src="
|
7
|
+
<link rel="stylesheet" type="text/css" href="static/style.css?version=1"> <!-- change version on stylesheet changes -->
|
8
|
+
<link rel="icon" type="image/x-icon" href="static/img/emhass_logo_short.svg">
|
9
|
+
<script src="static/script.js"></script>
|
10
10
|
</head>
|
11
11
|
|
12
12
|
<body style="margin: auto; align-items:center; text-align:center;">
|
@@ -17,22 +17,22 @@
|
|
17
17
|
<!-- advanced or basic page switch -->
|
18
18
|
<a id="basicOrAdvanced" style="margin-right: 24px; cursor: pointer; z-index: 1">
|
19
19
|
<svg class="feather">
|
20
|
-
<use class="feather" href="
|
20
|
+
<use class="feather" href="static/img/feather-sprite.svg#tool" />
|
21
21
|
</svg>
|
22
22
|
</a>
|
23
23
|
<a href="https://emhass.readthedocs.io/en/latest/">
|
24
24
|
<svg class="feather" style="margin-right: 12px;";>
|
25
|
-
<use class="feather" href="
|
25
|
+
<use class="feather" href="static/img/feather-sprite.svg#book" />
|
26
26
|
</svg>
|
27
27
|
</a>
|
28
28
|
<a href="https://github.com/davidusb-geek/emhass" target="_blank" rel="noopener noreferrer">
|
29
29
|
<svg class="feather" style="margin-right: 0px;" >
|
30
|
-
<use class="feather" href="
|
30
|
+
<use class="feather" href="static/img/feather-sprite.svg#git-branch" />
|
31
31
|
</svg>
|
32
32
|
</a>
|
33
33
|
</div>
|
34
34
|
<!-- Title -->
|
35
|
-
<img src="
|
35
|
+
<img src="static/img/emhass_icon.png" alt="">
|
36
36
|
<h2>EMHASS: Energy Management Optimization for Home Assistant</h2>
|
37
37
|
</div>
|
38
38
|
|
emhass/utils.py
CHANGED
@@ -96,7 +96,7 @@ def get_forecast_dates(freq: int, delta_forecast: int,
|
|
96
96
|
end_forecast = (start_forecast + pd.Timedelta(days=delta_forecast)).replace(microsecond=0)
|
97
97
|
forecast_dates = pd.date_range(start=start_forecast,
|
98
98
|
end=end_forecast+timedelta(days=timedelta_days)-freq,
|
99
|
-
freq=freq).round(freq, ambiguous='infer', nonexistent=
|
99
|
+
freq=freq).round(freq, ambiguous='infer', nonexistent='shift_forward')
|
100
100
|
return forecast_dates
|
101
101
|
|
102
102
|
def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dict, optim_conf: dict, plant_conf: dict,
|
@@ -208,62 +208,23 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
|
|
208
208
|
params['passed_data']['alpha'] = None
|
209
209
|
params['passed_data']['beta'] = None
|
210
210
|
# Treat passed forecast data lists
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
211
|
+
list_forecast_key = ['pv_power_forecast', 'load_power_forecast', 'load_cost_forecast', 'prod_price_forecast']
|
212
|
+
forecast_methods = ['weather_forecast_method', 'load_forecast_method', 'load_cost_forecast_method', 'prod_price_forecast_method']
|
213
|
+
for method, forecast_key in enumerate(list_forecast_key):
|
214
|
+
if forecast_key in runtimeparams.keys():
|
215
|
+
if type(runtimeparams[forecast_key]) == list and len(runtimeparams[forecast_key]) >= len(forecast_dates):
|
216
|
+
params['passed_data'][forecast_key] = runtimeparams[forecast_key]
|
217
|
+
optim_conf[forecast_methods[method]] = 'list'
|
218
|
+
else:
|
219
|
+
logger.error(f"ERROR: The passed data is either not a list or the length is not correct, length should be {str(len(forecast_dates))}")
|
220
|
+
logger.error(f"Passed type is {str(type(runtimeparams[forecast_key]))} and length is {str(len(runtimeparams[forecast_key]))}")
|
221
|
+
list_non_digits = [x for x in runtimeparams[forecast_key] if not (isinstance(x, int) or isinstance(x, float))]
|
222
|
+
if len(list_non_digits) > 0:
|
223
|
+
logger.warning(f"There are non numeric values on the passed data for {forecast_key}, check for missing values (nans, null, etc)")
|
224
|
+
for x in list_non_digits:
|
225
|
+
logger.warning(f"This value in {forecast_key} was detected as non digits: {str(x)}")
|
215
226
|
else:
|
216
|
-
|
217
|
-
logger.error("Passed type is "+str(type(runtimeparams['pv_power_forecast']))+" and length is "+str(len(runtimeparams['pv_power_forecast'])))
|
218
|
-
list_non_digits = [x for x in runtimeparams['pv_power_forecast'] if not (isinstance(x, int) or isinstance(x, float))]
|
219
|
-
if len(list_non_digits) > 0:
|
220
|
-
logger.warning("There are non numeric values on the passed data for pv_power_forecast, check for missing values (nans, null, etc)")
|
221
|
-
for x in list_non_digits:
|
222
|
-
logger.warning("This value in pv_power_forecast was detected as non digits: "+str(x))
|
223
|
-
else:
|
224
|
-
params['passed_data']['pv_power_forecast'] = None
|
225
|
-
if 'load_power_forecast' in runtimeparams.keys():
|
226
|
-
if type(runtimeparams['load_power_forecast']) == list and len(runtimeparams['load_power_forecast']) >= len(forecast_dates):
|
227
|
-
params['passed_data']['load_power_forecast'] = runtimeparams['load_power_forecast']
|
228
|
-
optim_conf['load_forecast_method'] = 'list'
|
229
|
-
else:
|
230
|
-
logger.error("ERROR: The passed data is either not a list or the length is not correct, length should be "+str(len(forecast_dates)))
|
231
|
-
logger.error("Passed type is "+str(type(runtimeparams['load_power_forecast']))+" and length is "+str(len(runtimeparams['load_power_forecast'])))
|
232
|
-
list_non_digits = [x for x in runtimeparams['load_power_forecast'] if not (isinstance(x, int) or isinstance(x, float))]
|
233
|
-
if len(list_non_digits) > 0:
|
234
|
-
logger.warning("There are non numeric values on the passed data for load_power_forecast, check for missing values (nans, null, etc)")
|
235
|
-
for x in list_non_digits:
|
236
|
-
logger.warning("This value in load_power_forecast was detected as non digits: "+str(x))
|
237
|
-
else:
|
238
|
-
params['passed_data']['load_power_forecast'] = None
|
239
|
-
if 'load_cost_forecast' in runtimeparams.keys():
|
240
|
-
if type(runtimeparams['load_cost_forecast']) == list and len(runtimeparams['load_cost_forecast']) >= len(forecast_dates):
|
241
|
-
params['passed_data']['load_cost_forecast'] = runtimeparams['load_cost_forecast']
|
242
|
-
optim_conf['load_cost_forecast_method'] = 'list'
|
243
|
-
else:
|
244
|
-
logger.error("ERROR: The passed data is either not a list or the length is not correct, length should be "+str(len(forecast_dates)))
|
245
|
-
logger.error("Passed type is "+str(type(runtimeparams['load_cost_forecast']))+" and length is "+str(len(runtimeparams['load_cost_forecast'])))
|
246
|
-
list_non_digits = [x for x in runtimeparams['load_cost_forecast'] if not (isinstance(x, int) or isinstance(x, float))]
|
247
|
-
if len(list_non_digits) > 0:
|
248
|
-
logger.warning("There are non numeric values on the passed data or load_cost_forecast, check for missing values (nans, null, etc)")
|
249
|
-
for x in list_non_digits:
|
250
|
-
logger.warning("This value in load_cost_forecast was detected as non digits: "+str(x))
|
251
|
-
else:
|
252
|
-
params['passed_data']['load_cost_forecast'] = None
|
253
|
-
if 'prod_price_forecast' in runtimeparams.keys():
|
254
|
-
if type(runtimeparams['prod_price_forecast']) == list and len(runtimeparams['prod_price_forecast']) >= len(forecast_dates):
|
255
|
-
params['passed_data']['prod_price_forecast'] = runtimeparams['prod_price_forecast']
|
256
|
-
optim_conf['prod_price_forecast_method'] = 'list'
|
257
|
-
else:
|
258
|
-
logger.error("ERROR: The passed data is either not a list or the length is not correct, length should be "+str(len(forecast_dates)))
|
259
|
-
logger.error("Passed type is "+str(type(runtimeparams['prod_price_forecast']))+" and length is "+str(len(runtimeparams['prod_price_forecast'])))
|
260
|
-
list_non_digits = [x for x in runtimeparams['prod_price_forecast'] if not (isinstance(x, int) or isinstance(x, float))]
|
261
|
-
if len(list_non_digits) > 0:
|
262
|
-
logger.warning("There are non numeric values on the passed data for prod_price_forecast, check for missing values (nans, null, etc)")
|
263
|
-
for x in list_non_digits:
|
264
|
-
logger.warning("This value in prod_price_forecast was detected as non digits: "+str(x))
|
265
|
-
else:
|
266
|
-
params['passed_data']['prod_price_forecast'] = None
|
227
|
+
params['passed_data'][forecast_key] = None
|
267
228
|
# Treat passed data for forecast model fit/predict/tune at runtime
|
268
229
|
if 'days_to_retrieve' not in runtimeparams.keys():
|
269
230
|
days_to_retrieve = 9
|
@@ -634,8 +595,9 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
|
|
634
595
|
params['optim_conf']['def_start_timestep'] = [i['start_timesteps_of_each_deferrable_load'] for i in options.get('list_start_timesteps_of_each_deferrable_load')]
|
635
596
|
if options.get('list_end_timesteps_of_each_deferrable_load',None) != None:
|
636
597
|
params['optim_conf']['def_end_timestep'] = [i['end_timesteps_of_each_deferrable_load'] for i in options.get('list_end_timesteps_of_each_deferrable_load')]
|
637
|
-
# Updating variables in
|
638
|
-
|
598
|
+
# Updating variables in plant_conf
|
599
|
+
params['plant_conf']['P_from_grid_max'] = options.get('maximum_power_from_grid',params['plant_conf']['P_from_grid_max'])
|
600
|
+
params['plant_conf']['P_to_grid_max'] = options.get('maximum_power_to_grid',params['plant_conf']['P_to_grid_max'])
|
639
601
|
if options.get('list_pv_module_model',None) != None:
|
640
602
|
params['plant_conf']['module_model'] = [i['pv_module_model'] for i in options.get('list_pv_module_model')]
|
641
603
|
if options.get('list_pv_inverter_model',None) != None:
|
@@ -656,8 +618,7 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
|
|
656
618
|
params['plant_conf']['SOCmin'] = options.get('battery_minimum_state_of_charge',params['plant_conf']['SOCmin'])
|
657
619
|
params['plant_conf']['SOCmax'] = options.get('battery_maximum_state_of_charge',params['plant_conf']['SOCmax'])
|
658
620
|
params['plant_conf']['SOCtarget'] = options.get('battery_target_state_of_charge',params['plant_conf']['SOCtarget'])
|
659
|
-
|
660
|
-
# Check parameter lists have the same amounts as deferrable loads
|
621
|
+
# Check parameter lists have the same amounts as deferrable loads
|
661
622
|
# If not, set defaults it fill in gaps
|
662
623
|
if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['def_start_timestep']):
|
663
624
|
logger.warning("def_start_timestep / list_start_timesteps_of_each_deferrable_load does not match number in num_def_loads, adding default values to parameter")
|
@@ -683,10 +644,6 @@ def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
|
|
683
644
|
logger.warning("P_deferrable_nom / list_nominal_power_of_deferrable_loads does not match number in num_def_loads, adding default values to parameter")
|
684
645
|
for x in range(len(params['optim_conf']['P_deferrable_nom']), params['optim_conf']['num_def_loads']):
|
685
646
|
params['optim_conf']['P_deferrable_nom'].append(0)
|
686
|
-
if params['optim_conf']['num_def_loads'] is not len(params['optim_conf']['list_hp_periods']):
|
687
|
-
logger.warning("list_hp_periods / list_peak_hours_periods_(start&end)_hours does not match number in num_def_loads, adding default values to parameter")
|
688
|
-
for x in range(len(params['optim_conf']['list_hp_periods']), params['optim_conf']['num_def_loads']):
|
689
|
-
params['optim_conf']['list_hp_periods'].append({'period_hp_'+str(x+1):[{'start':'02:54'},{'end':'20:24'}]})
|
690
647
|
# days_to_retrieve should be no less then 2
|
691
648
|
if params['retrieve_hass_conf']['days_to_retrieve'] < 2:
|
692
649
|
params['retrieve_hass_conf']['days_to_retrieve'] = 2
|
emhass/web_server.py
CHANGED
@@ -69,8 +69,13 @@ def index():
|
|
69
69
|
else:
|
70
70
|
app.logger.warning("The data container dictionary is empty... Please launch an optimization task")
|
71
71
|
injection_dict={}
|
72
|
-
|
73
|
-
|
72
|
+
|
73
|
+
# replace {{basename}} in html template html with path root
|
74
|
+
# basename = request.headers.get("X-Ingress-Path", "")
|
75
|
+
# return make_response(template.render(injection_dict=injection_dict, basename=basename))
|
76
|
+
|
77
|
+
return make_response(template.render(injection_dict=injection_dict))
|
78
|
+
|
74
79
|
|
75
80
|
#get actions
|
76
81
|
@app.route('/template/<action_name>', methods=['GET'])
|
@@ -86,8 +91,7 @@ def template_action(action_name):
|
|
86
91
|
else:
|
87
92
|
app.logger.warning("The data container dictionary is empty... Please launch an optimization task")
|
88
93
|
injection_dict={}
|
89
|
-
|
90
|
-
return make_response(template.render(injection_dict=injection_dict, basename=basename))
|
94
|
+
return make_response(template.render(injection_dict=injection_dict))
|
91
95
|
|
92
96
|
#post actions
|
93
97
|
@app.route('/action/<action_name>', methods=['POST'])
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: emhass
|
3
|
-
Version: 0.8.
|
3
|
+
Version: 0.8.6
|
4
4
|
Summary: An Energy Management System for Home Assistant
|
5
5
|
Home-page: https://github.com/davidusb-geek/emhass
|
6
6
|
Author: David HERNANDEZ
|
@@ -12,7 +12,7 @@ Classifier: Topic :: Software Development :: Build Tools
|
|
12
12
|
Classifier: License :: OSI Approved :: MIT License
|
13
13
|
Classifier: Programming Language :: Python :: 3.11
|
14
14
|
Classifier: Operating System :: OS Independent
|
15
|
-
Requires-Python: >=3.
|
15
|
+
Requires-Python: >=3.10, <3.12
|
16
16
|
Description-Content-Type: text/markdown
|
17
17
|
License-File: LICENSE
|
18
18
|
Requires-Dist: wheel
|
@@ -466,8 +466,6 @@ curl -i -H 'Content-Type:application/json' -X POST -d '{"pv_power_forecast":[0,
|
|
466
466
|
curl -i -H 'Content-Type:application/json' -X POST -d '{"pv_power_forecast":[0, 70, 141.22, 246.18, 513.5, 753.27, 1049.89, 1797.93, 1697.3, 3078.93], "prediction_horizon":10, "soc_init":0.5,"soc_final":0.6,"def_total_hours":[1,3],"def_start_timestep":[0,3],"def_end_timestep":[0,6]}' http://localhost:5000/action/naive-mpc-optim
|
467
467
|
```
|
468
468
|
|
469
|
-
|
470
|
-
|
471
469
|
## A machine learning forecaster
|
472
470
|
|
473
471
|
Starting in v0.4.0 a new machine learning forecaster class was introduced.
|
@@ -0,0 +1,25 @@
|
|
1
|
+
emhass/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
+
emhass/command_line.py,sha256=TrNJnP1V94NoGanR7-Ik1ZVWlE6fsbDjWUIuH0l-0bs,37580
|
3
|
+
emhass/forecast.py,sha256=38WF2XkopDOwvSZJU3_m01BXyiENVypEVOeXcHP-5Fo,45704
|
4
|
+
emhass/machine_learning_forecaster.py,sha256=8Rm0-pltsjIYqLv01zCeO_Ij_n2HKC62dv_kCno7UsU,15640
|
5
|
+
emhass/optimization.py,sha256=WcUJSDSBK7wgx0jaX25mhco7ZfqG1g066Ebh6ACyruQ,37197
|
6
|
+
emhass/retrieve_hass.py,sha256=COf8LD6B0arFI-P71PXyLT7snB7_Wg5c3bMhRdVMdI4,18406
|
7
|
+
emhass/utils.py,sha256=zj1rzpzsRpifgDcmeqRZUcM6WL6GGCcUAdStmGoXlJE,42394
|
8
|
+
emhass/web_server.py,sha256=FFdIZio-QGFH3t-p-Le2Q1o6_cqjfBSdoEXdExJ21nY,21541
|
9
|
+
emhass/data/cec_inverters.pbz2,sha256=tK8FvAUDW0uYez8EPttdCJwHhpPofclYV6GhhNZL0Pk,168272
|
10
|
+
emhass/data/cec_modules.pbz2,sha256=8vEaysgYffXg3KUl8XSF36Mdywzi3LpEtUN_qenjO9s,1655747
|
11
|
+
emhass/static/advanced.html,sha256=AsT3lMD0AjvAqzAYvUPmslyOYk2C3LA-VfoSB2PwnYA,1747
|
12
|
+
emhass/static/basic.html,sha256=hJ4EgCXVNHL5nMQWkIHWjsTm_bJb0N_ZN4zFUjhxEzU,608
|
13
|
+
emhass/static/script.js,sha256=bj3Pksm97sM4lUTpp3IkStx8fMwee39GnDWhrWjvV_A,17250
|
14
|
+
emhass/static/style.css,sha256=xSihd06G-AeMKtFGPCJAnTaGVQXKuPM7kvVvDuQDuxU,15557
|
15
|
+
emhass/static/img/emhass_icon.png,sha256=Kyx6hXQ1huJLHAq2CaBfjYXR25H9j99PSWHI0lShkaQ,19030
|
16
|
+
emhass/static/img/emhass_logo_short.svg,sha256=yzMcqtBRCV8rH84-MwnigZh45_f9Eoqwho9P8nCodJA,66736
|
17
|
+
emhass/static/img/feather-sprite.svg,sha256=VHjMJQg88wXa9CaeYrKGhNtyK0xdd47zCqwSIa-hxo8,60319
|
18
|
+
emhass/templates/index.html,sha256=_BsvUJ981uSQkx5H9tq_3es__x4WdPiOy7FjNoNYU9w,2744
|
19
|
+
emhass/templates/template.html,sha256=TkGgMecQEbFUZA4ymPwMUzNjKHsENvCgroUWbPt7G4Y,158
|
20
|
+
emhass-0.8.6.dist-info/LICENSE,sha256=1X3-S1yvOCBDBeox1aK3dq00m7dA8NDtcPrpKPISzbE,1077
|
21
|
+
emhass-0.8.6.dist-info/METADATA,sha256=KR8yrRLmiYFe6ljkjXrQO-De3j-26I1d3WO3pZXMSvs,34758
|
22
|
+
emhass-0.8.6.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
23
|
+
emhass-0.8.6.dist-info/entry_points.txt,sha256=6Bp1NFOGNv_fSTxYl1ke3K3h3aqAcBxI-bgq5yq-i1M,52
|
24
|
+
emhass-0.8.6.dist-info/top_level.txt,sha256=L7fIX4awfmxQbAePtSdVg2e6x_HhghfReHfsKSpKr9I,7
|
25
|
+
emhass-0.8.6.dist-info/RECORD,,
|
emhass-0.8.4.dist-info/RECORD
DELETED
@@ -1,23 +0,0 @@
|
|
1
|
-
emhass/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
-
emhass/command_line.py,sha256=Os1W93kSIIiJ_IU48qfkEw691_ywaI0WknRBUDWleYc,37464
|
3
|
-
emhass/forecast.py,sha256=2_Dm03_XyEVPYnKnJzA5TABYgt3tCe9_zRDGOOfFtEM,43555
|
4
|
-
emhass/machine_learning_forecaster.py,sha256=8Rm0-pltsjIYqLv01zCeO_Ij_n2HKC62dv_kCno7UsU,15640
|
5
|
-
emhass/optimization.py,sha256=M9BlbJ4f38APoIsHKLY_pfKszVWA61cPv_QnmtazkRA,37181
|
6
|
-
emhass/retrieve_hass.py,sha256=buVvu-PJcphN9rlIhW1lI7oYk5r0fHh3h450eqznIAU,18400
|
7
|
-
emhass/utils.py,sha256=tH6a7ofXpa18Gnll08wCHUdLMzHqqM3aEC-9-nyXKd4,46151
|
8
|
-
emhass/web_server.py,sha256=U8aqg0udqxChBMxr83b0bE84HZLnuLQyb6SQaJmKtzM,21475
|
9
|
-
emhass/static/advanced.html,sha256=AsT3lMD0AjvAqzAYvUPmslyOYk2C3LA-VfoSB2PwnYA,1747
|
10
|
-
emhass/static/basic.html,sha256=hJ4EgCXVNHL5nMQWkIHWjsTm_bJb0N_ZN4zFUjhxEzU,608
|
11
|
-
emhass/static/script.js,sha256=m6Bxqx1aHWcboXSiAqeEv8XscVC7W9sw8-Z_gPQzp5c,18173
|
12
|
-
emhass/static/style.css,sha256=HSMn-URvvTvOXs_uVK3Ln7GM2xAvAlJjXdoNVWAdFk0,15222
|
13
|
-
emhass/static/img/emhass_icon.png,sha256=Kyx6hXQ1huJLHAq2CaBfjYXR25H9j99PSWHI0lShkaQ,19030
|
14
|
-
emhass/static/img/emhass_logo_short.svg,sha256=yzMcqtBRCV8rH84-MwnigZh45_f9Eoqwho9P8nCodJA,66736
|
15
|
-
emhass/static/img/feather-sprite.svg,sha256=VHjMJQg88wXa9CaeYrKGhNtyK0xdd47zCqwSIa-hxo8,60319
|
16
|
-
emhass/templates/index.html,sha256=pZCThgtHXEB6p2JlrMFJrbC3o35PcBmdome3OnNH_SE,2792
|
17
|
-
emhass/templates/template.html,sha256=TkGgMecQEbFUZA4ymPwMUzNjKHsENvCgroUWbPt7G4Y,158
|
18
|
-
emhass-0.8.4.dist-info/LICENSE,sha256=1X3-S1yvOCBDBeox1aK3dq00m7dA8NDtcPrpKPISzbE,1077
|
19
|
-
emhass-0.8.4.dist-info/METADATA,sha256=90908Yq-TiZHviiU1JMCErLMOqTJpuA9xzvx1cxCn7M,34759
|
20
|
-
emhass-0.8.4.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
21
|
-
emhass-0.8.4.dist-info/entry_points.txt,sha256=6Bp1NFOGNv_fSTxYl1ke3K3h3aqAcBxI-bgq5yq-i1M,52
|
22
|
-
emhass-0.8.4.dist-info/top_level.txt,sha256=L7fIX4awfmxQbAePtSdVg2e6x_HhghfReHfsKSpKr9I,7
|
23
|
-
emhass-0.8.4.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|