emhass 0.11.3__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
emhass/command_line.py CHANGED
@@ -69,7 +69,7 @@ def set_input_data_dict(
69
69
  retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params, logger)
70
70
  if type(retrieve_hass_conf) is bool:
71
71
  return False
72
-
72
+
73
73
  # Treat runtimeparams
74
74
  params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams(
75
75
  runtimeparams,
@@ -81,7 +81,8 @@ def set_input_data_dict(
81
81
  logger,
82
82
  emhass_conf,
83
83
  )
84
- # Define main objects
84
+
85
+ # Define the data retrieve object
85
86
  rh = RetrieveHass(
86
87
  retrieve_hass_conf["hass_url"],
87
88
  retrieve_hass_conf["long_lived_token"],
@@ -92,6 +93,21 @@ def set_input_data_dict(
92
93
  logger,
93
94
  get_data_from_file=get_data_from_file,
94
95
  )
96
+
97
+ # Retrieve basic configuration data from hass
98
+ if get_data_from_file:
99
+ with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp:
100
+ _, _, _, rh.ha_config = pickle.load(inp)
101
+ else:
102
+ rh.get_ha_config()
103
+
104
+ # Update the params dict using data from the HA configuration
105
+ params = utils.update_params_with_ha_config(
106
+ params,
107
+ rh.ha_config,
108
+ )
109
+
110
+ # Define the forecast and optimization objects
95
111
  fcst = Forecast(
96
112
  retrieve_hass_conf,
97
113
  optim_conf,
@@ -111,12 +127,13 @@ def set_input_data_dict(
111
127
  emhass_conf,
112
128
  logger,
113
129
  )
130
+
114
131
  # Perform setup based on type of action
115
132
  if set_type == "perfect-optim":
116
133
  # Retrieve data from hass
117
134
  if get_data_from_file:
118
135
  with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp:
119
- rh.df_final, days_list, var_list = pickle.load(inp)
136
+ rh.df_final, days_list, var_list, rh.ha_config = pickle.load(inp)
120
137
  retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(var_list[0])
121
138
  retrieve_hass_conf["sensor_power_photovoltaics"] = str(var_list[1])
122
139
  retrieve_hass_conf["sensor_linear_interp"] = [
@@ -154,12 +171,18 @@ def set_input_data_dict(
154
171
  P_PV_forecast, P_load_forecast, df_input_data_dayahead = None, None, None
155
172
  elif set_type == "dayahead-optim":
156
173
  # Get PV and load forecasts
157
- df_weather = fcst.get_weather_forecast(
158
- method=optim_conf["weather_forecast_method"]
159
- )
160
- if isinstance(df_weather, bool) and not df_weather:
161
- return False
162
- P_PV_forecast = fcst.get_power_from_weather(df_weather)
174
+ if (
175
+ optim_conf["set_use_pv"]
176
+ or optim_conf.get("weather_forecast_method", None) == "list"
177
+ ):
178
+ df_weather = fcst.get_weather_forecast(
179
+ method=optim_conf["weather_forecast_method"]
180
+ )
181
+ if isinstance(df_weather, bool) and not df_weather:
182
+ return False
183
+ P_PV_forecast = fcst.get_power_from_weather(df_weather)
184
+ else:
185
+ P_PV_forecast = pd.Series(0, index=fcst.forecast_dates)
163
186
  P_load_forecast = fcst.get_load_forecast(
164
187
  method=optim_conf["load_forecast_method"]
165
188
  )
@@ -208,7 +231,7 @@ def set_input_data_dict(
208
231
  # Retrieve data from hass
209
232
  if get_data_from_file:
210
233
  with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp:
211
- rh.df_final, days_list, var_list = pickle.load(inp)
234
+ rh.df_final, days_list, var_list, rh.ha_config = pickle.load(inp)
212
235
  retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(var_list[0])
213
236
  retrieve_hass_conf["sensor_power_photovoltaics"] = str(var_list[1])
214
237
  retrieve_hass_conf["sensor_linear_interp"] = [
@@ -241,14 +264,20 @@ def set_input_data_dict(
241
264
  return False
242
265
  df_input_data = rh.df_final.copy()
243
266
  # Get PV and load forecasts
244
- df_weather = fcst.get_weather_forecast(
245
- method=optim_conf["weather_forecast_method"]
246
- )
247
- if isinstance(df_weather, bool) and not df_weather:
248
- return False
249
- P_PV_forecast = fcst.get_power_from_weather(
250
- df_weather, set_mix_forecast=True, df_now=df_input_data
251
- )
267
+ if (
268
+ optim_conf["set_use_pv"]
269
+ or optim_conf.get("weather_forecast_method", None) == "list"
270
+ ):
271
+ df_weather = fcst.get_weather_forecast(
272
+ method=optim_conf["weather_forecast_method"]
273
+ )
274
+ if isinstance(df_weather, bool) and not df_weather:
275
+ return False
276
+ P_PV_forecast = fcst.get_power_from_weather(
277
+ df_weather, set_mix_forecast=True, df_now=df_input_data
278
+ )
279
+ else:
280
+ P_PV_forecast = pd.Series(0, index=fcst.forecast_dates)
252
281
  P_load_forecast = fcst.get_load_forecast(
253
282
  method=optim_conf["load_forecast_method"],
254
283
  set_mix_forecast=True,
@@ -403,10 +432,8 @@ def weather_forecast_cache(
403
432
  :rtype: bool
404
433
 
405
434
  """
406
-
407
435
  # Parsing yaml
408
436
  retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params, logger)
409
-
410
437
  # Treat runtimeparams
411
438
  params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams(
412
439
  runtimeparams,
@@ -417,8 +444,8 @@ def weather_forecast_cache(
417
444
  "forecast",
418
445
  logger,
419
446
  emhass_conf,
447
+ {},
420
448
  )
421
-
422
449
  # Make sure weather_forecast_cache is true
423
450
  if (params != None) and (params != "null"):
424
451
  params = json.loads(params)
@@ -426,12 +453,10 @@ def weather_forecast_cache(
426
453
  params = {}
427
454
  params["passed_data"]["weather_forecast_cache"] = True
428
455
  params = json.dumps(params)
429
-
430
456
  # Create Forecast object
431
457
  fcst = Forecast(
432
458
  retrieve_hass_conf, optim_conf, plant_conf, params, emhass_conf, logger
433
459
  )
434
-
435
460
  result = fcst.get_weather_forecast(optim_conf["weather_forecast_method"])
436
461
  if isinstance(result, bool) and not result:
437
462
  return False
@@ -15,6 +15,7 @@ params_secrets,lon,Longitude
15
15
  params_secrets,alt,Altitude
16
16
  optim_conf,costfun,costfun
17
17
  optim_conf,logging_level,logging_level
18
+ optim_conf,set_use_pv,set_use_pv
18
19
  optim_conf,set_use_battery,set_use_battery
19
20
  optim_conf,num_def_loads,number_of_deferrable_loads
20
21
  optim_conf,P_deferrable_nom,nominal_power_of_deferrable_loads,list_nominal_power_of_deferrable_loads
@@ -84,6 +84,7 @@
84
84
  "photovoltaic_production_sell_price": 0.1419,
85
85
  "maximum_power_from_grid": 9000,
86
86
  "maximum_power_to_grid": 9000,
87
+ "set_use_pv": false,
87
88
  "pv_module_model": [
88
89
  "CSUN_Eurasia_Energy_Systems_Industry_and_Trade_CSUN295_60M"
89
90
  ],
emhass/forecast.py CHANGED
@@ -886,10 +886,73 @@ class Forecast(object):
886
886
  forecast_out = pd.concat([forecast_out, forecast_tp], axis=0)
887
887
  return forecast_out
888
888
 
889
+ @staticmethod
890
+ def resample_data(data, freq, current_freq):
891
+ r"""
892
+ Resample a DataFrame with a custom frequency.
893
+
894
+ :param data: Original time series data with a DateTimeIndex.
895
+ :type data: pd.DataFrame
896
+ :param freq: Desired frequency for resampling (e.g., pd.Timedelta("10min")).
897
+ :type freq: pd.Timedelta
898
+ :return: Resampled data at the specified frequency.
899
+ :rtype: pd.DataFrame
900
+ """
901
+ if freq > current_freq:
902
+ # Downsampling
903
+ # Use 'mean' to aggregate or choose other options ('sum', 'max', etc.)
904
+ resampled_data = data.resample(freq).mean()
905
+ elif freq < current_freq:
906
+ # Upsampling
907
+ # Use 'asfreq' to create empty slots, then interpolate
908
+ resampled_data = data.resample(freq).asfreq()
909
+ resampled_data = resampled_data.interpolate(method='time')
910
+ else:
911
+ # No resampling needed
912
+ resampled_data = data.copy()
913
+ return resampled_data
914
+
915
+ @staticmethod
916
+ def get_typical_load_forecast(data, forecast_date):
917
+ r"""
918
+ Forecast the load profile for the next day based on historic data.
919
+
920
+ :param data: A DataFrame with a DateTimeIndex containing the historic load data.
921
+ Must include a 'load' column.
922
+ :type data: pd.DataFrame
923
+ :param forecast_date: The date for which the forecast will be generated.
924
+ :type forecast_date: pd.Timestamp
925
+ :return: A Series with the forecasted load profile for the next day and a list of days used
926
+ to calculate the forecast.
927
+ :rtype: tuple (pd.Series, list)
928
+ """
929
+ # Ensure the 'load' column exists
930
+ if 'load' not in data.columns:
931
+ raise ValueError("Data must have a 'load' column.")
932
+ # Filter historic data for the same month and day of the week
933
+ month = forecast_date.month
934
+ day_of_week = forecast_date.dayofweek
935
+ historic_data = data[(data.index.month == month) & (data.index.dayofweek == day_of_week)]
936
+ used_days = np.unique(historic_data.index.date)
937
+ # Align all historic data to the forecast day
938
+ aligned_data = []
939
+ for day in used_days:
940
+ daily_data = data[data.index.date == pd.Timestamp(day).date()]
941
+ aligned_daily_data = daily_data.copy()
942
+ aligned_daily_data.index = aligned_daily_data.index.map(
943
+ lambda x: x.replace(year=forecast_date.year, month=forecast_date.month, day=forecast_date.day)
944
+ )
945
+ aligned_data.append(aligned_daily_data)
946
+ # Combine all aligned historic data into a single DataFrame
947
+ combined_data = pd.concat(aligned_data)
948
+ # Compute the mean load for each timestamp
949
+ forecast = combined_data.groupby(combined_data.index).mean()
950
+ return forecast, used_days
951
+
889
952
  def get_load_forecast(
890
953
  self,
891
954
  days_min_load_forecast: Optional[int] = 3,
892
- method: Optional[str] = "naive",
955
+ method: Optional[str] = "typical",
893
956
  csv_path: Optional[str] = "data_load_forecast.csv",
894
957
  set_mix_forecast: Optional[bool] = False,
895
958
  df_now: Optional[pd.DataFrame] = pd.DataFrame(),
@@ -904,10 +967,11 @@ class Forecast(object):
904
967
  will be used to generate a naive forecast, defaults to 3
905
968
  :type days_min_load_forecast: int, optional
906
969
  :param method: The method to be used to generate load forecast, the options \
970
+ are 'typical' for a typical household load consumption curve, \
907
971
  are 'naive' for a persistance model, 'mlforecaster' for using a custom \
908
972
  previously fitted machine learning model, 'csv' to read the forecast from \
909
973
  a CSV file and 'list' to use data directly passed at runtime as a list of \
910
- values. Defaults to 'naive'.
974
+ values. Defaults to 'typical'.
911
975
  :type method: str, optional
912
976
  :param csv_path: The path to the CSV file used when method = 'csv', \
913
977
  defaults to "/data/data_load_forecast.csv"
@@ -956,7 +1020,7 @@ class Forecast(object):
956
1020
  if self.get_data_from_file:
957
1021
  filename_path = self.emhass_conf["data_path"] / "test_df_final.pkl"
958
1022
  with open(filename_path, "rb") as inp:
959
- rh.df_final, days_list, var_list = pickle.load(inp)
1023
+ rh.df_final, days_list, var_list, rh.ha_config = pickle.load(inp)
960
1024
  self.var_load = var_list[0]
961
1025
  self.retrieve_hass_conf["sensor_power_load_no_var_loads"] = (
962
1026
  self.var_load
@@ -977,7 +1041,37 @@ class Forecast(object):
977
1041
  ):
978
1042
  return False
979
1043
  df = rh.df_final.copy()[[self.var_load_new]]
980
- if method == "naive": # using a naive approach
1044
+ if method == "typical": # using typical statistical data from a household power consumption
1045
+ # Loading data from history file
1046
+ model_type = "load_clustering"
1047
+ data_path = self.emhass_conf["data_path"] / str("data_train_" + model_type + ".pkl")
1048
+ with open(data_path, "rb") as fid:
1049
+ data, _ = pickle.load(fid)
1050
+ # Resample the data if needed
1051
+ current_freq = pd.Timedelta('30min')
1052
+ if self.freq != current_freq:
1053
+ data = Forecast.resample_data(data, self.freq, current_freq)
1054
+ # Generate forecast
1055
+ data_list = []
1056
+ dates_list = np.unique(self.forecast_dates.date).tolist()
1057
+ forecast = pd.DataFrame()
1058
+ for date in dates_list:
1059
+ forecast_date = pd.Timestamp(date)
1060
+ data.columns = ['load']
1061
+ forecast_tmp, used_days = Forecast.get_typical_load_forecast(data, forecast_date)
1062
+ self.logger.debug(f"Using {len(used_days)} days of data to generate the forecast.")
1063
+ # Normalize the forecast
1064
+ forecast_tmp = forecast_tmp*self.plant_conf['maximum_power_from_grid']/9000
1065
+ data_list.extend(forecast_tmp.values.ravel().tolist())
1066
+ if len(forecast) == 0:
1067
+ forecast = forecast_tmp
1068
+ else:
1069
+ forecast = pd.concat([forecast, forecast_tmp], axis=0)
1070
+ forecast.index = forecast.index.tz_convert(self.time_zone)
1071
+ forecast_out = forecast.loc[forecast.index.intersection(self.forecast_dates)]
1072
+ forecast_out.index.name = 'ts'
1073
+ forecast_out = forecast_out.rename(columns={'load': 'yhat'})
1074
+ elif method == "naive": # using a naive approach
981
1075
  mask_forecast_out = (
982
1076
  df.index > days_list[-1] - self.optim_conf["delta_forecast_daily"]
983
1077
  )
emhass/optimization.py CHANGED
@@ -661,7 +661,7 @@ class Optimization:
661
661
  cooling_constant
662
662
  * (
663
663
  predicted_temp[I - 1]
664
- - outdoor_temperature_forecast[I - 1]
664
+ - outdoor_temperature_forecast.iloc[I - 1]
665
665
  )
666
666
  )
667
667
  )
@@ -29,6 +29,10 @@
29
29
  <div id="Solar System (PV)" class="section-card">
30
30
  <div class="section-card-header">
31
31
  <h4>Solar System (PV)</h4>
32
+ <label class="switch"> <!-- switch connected to set_use_pv -->
33
+ <input id="set_use_pv" type="checkbox">
34
+ <span class="slider"></span>
35
+ </label>
32
36
  </div>
33
37
  <div class="section-body"> </div> <!-- parameters will get generated here -->
34
38
  </div>
@@ -121,7 +121,7 @@ function loadConfigurationListView(param_definitions, config, list_html) {
121
121
  }
122
122
 
123
123
  //list parameters used in the section headers
124
- header_input_list = ["set_use_battery", "number_of_deferrable_loads"];
124
+ header_input_list = ["set_use_battery", "set_use_pv", "number_of_deferrable_loads"];
125
125
 
126
126
  //get the main container and append list template html
127
127
  document.getElementById("configuration-container").innerHTML = list_html;
@@ -265,7 +265,7 @@ function buildParamContainers(
265
265
  });
266
266
 
267
267
  //check initial checkbox state, check "value" of input and match to "checked" value
268
- let checkbox = document.querySelectorAll("input[type='checkbox']");
268
+ let checkbox = SectionContainer.querySelectorAll("input[type='checkbox']");
269
269
  checkbox.forEach(function (answer) {
270
270
  let value = answer.value === "true";
271
271
  answer.checked = value;
@@ -559,6 +559,19 @@ function headerElement(element, param_definitions, config) {
559
559
  }
560
560
  break;
561
561
 
562
+ //if set_use_pv, add or remove PV section (inc. related params)
563
+ case "set_use_pv":
564
+ if (element.checked) {
565
+ param_container.innerHTML = "";
566
+ buildParamContainers("Solar System (PV)", param_definitions["Solar System (PV)"], config, [
567
+ "set_use_pv",
568
+ ]);
569
+ element.checked = true;
570
+ } else {
571
+ param_container.innerHTML = "";
572
+ }
573
+ break;
574
+
562
575
  //if number_of_deferrable_loads, the number of inputs in the "Deferrable Loads" section should add up to number_of_deferrable_loads value in header
563
576
  case "number_of_deferrable_loads":
564
577
  //get a list of param in section
@@ -101,11 +101,12 @@
101
101
  "Description": "The load forecast method that will be used. The options are ‘csv’ to load a CSV file or ‘naive’ for a simple 1-day persistence model.",
102
102
  "input": "select",
103
103
  "select_options": [
104
+ "typical",
104
105
  "naive",
105
106
  "mlforecaster",
106
107
  "csv"
107
108
  ],
108
- "default_value": "naive"
109
+ "default_value": "typical"
109
110
  },
110
111
  "set_total_pv_sell": {
111
112
  "friendly_name": "PV straight to grid",
@@ -229,6 +230,12 @@
229
230
  }
230
231
  },
231
232
  "Solar System (PV)": {
233
+ "set_use_pv": {
234
+ "friendly_name": "Enable PV system",
235
+ "Description": "Set to True if we should consider an solar PV system. Defaults to False",
236
+ "input": "boolean",
237
+ "default_value": false
238
+ },
232
239
  "pv_module_model": {
233
240
  "friendly_name": "PV module model name",
234
241
  "Description": "The PV module model. This parameter can be a list of items to enable the simulation of mixed orientation systems.",
@@ -66,7 +66,7 @@
66
66
  </div>
67
67
  </div>
68
68
  <footer class="footer">
69
- <p style="margin-top:10px; text-align:center;">&copy; MIT License | Copyright (c) 2021-2023 David
69
+ <p style="margin-top:10px; text-align:center;">&copy; MIT License | Copyright (c) 2021-2025 David
70
70
  HERNANDEZ</p>
71
71
  </footer>
72
72
  </div>
@@ -69,7 +69,7 @@
69
69
  <div>
70
70
 
71
71
  <footer class="footer">
72
- <p style="margin-top:10px; text-align:center;">&copy; MIT License | Copyright (c) 2021-2023 David
72
+ <p style="margin-top:10px; text-align:center;">&copy; MIT License | Copyright (c) 2021-2025 David
73
73
  HERNANDEZ</p>
74
74
  </footer>
75
75
  </body>
emhass/utils.py CHANGED
@@ -138,6 +138,70 @@ def get_forecast_dates(
138
138
  return forecast_dates
139
139
 
140
140
 
141
+ def update_params_with_ha_config(
142
+ params: str,
143
+ ha_config: dict,
144
+ ) -> dict:
145
+ """
146
+ Update the params with the Home Assistant configuration.
147
+
148
+ Parameters
149
+ ----------
150
+ params : str
151
+ The serialized params.
152
+ ha_config : dict
153
+ The Home Assistant configuration.
154
+
155
+ Returns
156
+ -------
157
+ dict
158
+ The updated params.
159
+ """
160
+ # Load serialized params
161
+ params = json.loads(params)
162
+ # Update params
163
+ currency_to_symbol = {
164
+ 'EUR': '€',
165
+ 'USD': '$',
166
+ 'GBP': '£',
167
+ 'YEN': '¥',
168
+ 'JPY': '¥',
169
+ 'AUD': 'A$',
170
+ 'CAD': 'C$',
171
+ 'CHF': 'CHF', # Swiss Franc has no special symbol
172
+ 'CNY': '¥',
173
+ 'INR': '₹',
174
+ # Add more as needed
175
+ }
176
+ if 'currency' in ha_config.keys():
177
+ ha_config['currency'] = currency_to_symbol.get(ha_config['currency'], 'Unknown')
178
+ else:
179
+ ha_config['currency'] = '€'
180
+ if 'unit_system' not in ha_config.keys():
181
+ ha_config['unit_system'] = {'temperature': '°C'}
182
+
183
+ for k in range(params["optim_conf"]["number_of_deferrable_loads"]):
184
+ params['passed_data']['custom_predicted_temperature_id'][k].update(
185
+ {"unit_of_measurement": ha_config['unit_system']['temperature']}
186
+ )
187
+ updated_passed_dict = {
188
+ "custom_cost_fun_id": {
189
+ "unit_of_measurement": ha_config['currency'],
190
+ },
191
+ "custom_unit_load_cost_id": {
192
+ "unit_of_measurement": f"{ha_config['currency']}/kWh",
193
+ },
194
+ "custom_unit_prod_price_id": {
195
+ "unit_of_measurement": f"{ha_config['currency']}/kWh",
196
+ },
197
+ }
198
+ for key, value in updated_passed_dict.items():
199
+ params["passed_data"][key]["unit_of_measurement"] = value["unit_of_measurement"]
200
+ # Serialize the final params
201
+ params = json.dumps(params, default=str)
202
+ return params
203
+
204
+
141
205
  def treat_runtimeparams(
142
206
  runtimeparams: str,
143
207
  params: str,
@@ -183,6 +247,10 @@ def treat_runtimeparams(
183
247
  params["optim_conf"].update(optim_conf)
184
248
  params["plant_conf"].update(plant_conf)
185
249
 
250
+ # Check defaults on HA retrieved config
251
+ default_currency_unit = '€'
252
+ default_temperature_unit = '°C'
253
+
186
254
  # Some default data needed
187
255
  custom_deferrable_forecast_id = []
188
256
  custom_predicted_temperature_id = []
@@ -197,7 +265,7 @@ def treat_runtimeparams(
197
265
  custom_predicted_temperature_id.append(
198
266
  {
199
267
  "entity_id": "sensor.temp_predicted{}".format(k),
200
- "unit_of_measurement": "°C",
268
+ "unit_of_measurement": default_temperature_unit,
201
269
  "friendly_name": "Predicted temperature {}".format(k),
202
270
  }
203
271
  )
@@ -239,7 +307,7 @@ def treat_runtimeparams(
239
307
  },
240
308
  "custom_cost_fun_id": {
241
309
  "entity_id": "sensor.total_cost_fun_value",
242
- "unit_of_measurement": "",
310
+ "unit_of_measurement": default_currency_unit,
243
311
  "friendly_name": "Total cost function value",
244
312
  },
245
313
  "custom_optim_status_id": {
@@ -249,12 +317,12 @@ def treat_runtimeparams(
249
317
  },
250
318
  "custom_unit_load_cost_id": {
251
319
  "entity_id": "sensor.unit_load_cost",
252
- "unit_of_measurement": "€/kWh",
320
+ "unit_of_measurement": f"{default_currency_unit}/kWh",
253
321
  "friendly_name": "Unit Load Cost",
254
322
  },
255
323
  "custom_unit_prod_price_id": {
256
324
  "entity_id": "sensor.unit_prod_price",
257
- "unit_of_measurement": "€/kWh",
325
+ "unit_of_measurement": f"{default_currency_unit}/kWh",
258
326
  "friendly_name": "Unit Prod Price",
259
327
  },
260
328
  "custom_deferrable_forecast_id": custom_deferrable_forecast_id,
@@ -308,7 +376,7 @@ def treat_runtimeparams(
308
376
  runtimeparams.get("optimization_time_step", runtimeparams.get("freq"))
309
377
  )
310
378
  params["retrieve_hass_conf"]["optimization_time_step"] = pd.to_timedelta(
311
- optimization_time_step
379
+ optimization_time_step, "minutes"
312
380
  )
313
381
  else:
314
382
  optimization_time_step = int(
@@ -338,59 +406,6 @@ def treat_runtimeparams(
338
406
  optimization_time_step, delta_forecast, time_zone
339
407
  )
340
408
 
341
- # Treat passed forecast data lists
342
- list_forecast_key = [
343
- "pv_power_forecast",
344
- "load_power_forecast",
345
- "load_cost_forecast",
346
- "prod_price_forecast",
347
- "outdoor_temperature_forecast",
348
- ]
349
- forecast_methods = [
350
- "weather_forecast_method",
351
- "load_forecast_method",
352
- "load_cost_forecast_method",
353
- "production_price_forecast_method",
354
- "outdoor_temperature_forecast_method",
355
- ]
356
-
357
- # Loop forecasts, check if value is a list and greater than or equal to forecast_dates
358
- for method, forecast_key in enumerate(list_forecast_key):
359
- if forecast_key in runtimeparams.keys():
360
- if isinstance(runtimeparams[forecast_key], list) and len(
361
- runtimeparams[forecast_key]
362
- ) >= len(forecast_dates):
363
- params["passed_data"][forecast_key] = runtimeparams[forecast_key]
364
- params["optim_conf"][forecast_methods[method]] = "list"
365
- else:
366
- logger.error(
367
- f"ERROR: The passed data is either not a list or the length is not correct, length should be {str(len(forecast_dates))}"
368
- )
369
- logger.error(
370
- f"Passed type is {str(type(runtimeparams[forecast_key]))} and length is {str(len(runtimeparams[forecast_key]))}"
371
- )
372
- # Check if string contains list, if so extract
373
- if isinstance(runtimeparams[forecast_key], str):
374
- if isinstance(ast.literal_eval(runtimeparams[forecast_key]), list):
375
- runtimeparams[forecast_key] = ast.literal_eval(
376
- runtimeparams[forecast_key]
377
- )
378
- list_non_digits = [
379
- x
380
- for x in runtimeparams[forecast_key]
381
- if not (isinstance(x, int) or isinstance(x, float))
382
- ]
383
- if len(list_non_digits) > 0:
384
- logger.warning(
385
- f"There are non numeric values on the passed data for {forecast_key}, check for missing values (nans, null, etc)"
386
- )
387
- for x in list_non_digits:
388
- logger.warning(
389
- f"This value in {forecast_key} was detected as non digits: {str(x)}"
390
- )
391
- else:
392
- params["passed_data"][forecast_key] = None
393
-
394
409
  # Add runtime exclusive (not in config) parameters to params
395
410
  # regressor-model-fit
396
411
  if set_type == "regressor-model-fit":
@@ -447,6 +462,16 @@ def treat_runtimeparams(
447
462
  soc_final = runtimeparams["soc_final"]
448
463
  params["passed_data"]["soc_final"] = soc_final
449
464
 
465
+ params["passed_data"]["operating_hours_of_each_deferrable_load"] = params[
466
+ "optim_conf"
467
+ ].get("operating_hours_of_each_deferrable_load", None)
468
+ params["passed_data"]["start_timesteps_of_each_deferrable_load"] = params[
469
+ "optim_conf"
470
+ ].get("start_timesteps_of_each_deferrable_load", None)
471
+ params["passed_data"]["end_timesteps_of_each_deferrable_load"] = params[
472
+ "optim_conf"
473
+ ].get("end_timesteps_of_each_deferrable_load", None)
474
+
450
475
  forecast_dates = copy.deepcopy(forecast_dates)[0:prediction_horizon]
451
476
 
452
477
  # Load the default config
@@ -480,6 +505,59 @@ def treat_runtimeparams(
480
505
  params["passed_data"]["soc_init"] = None
481
506
  params["passed_data"]["soc_final"] = None
482
507
 
508
+ # Treat passed forecast data lists
509
+ list_forecast_key = [
510
+ "pv_power_forecast",
511
+ "load_power_forecast",
512
+ "load_cost_forecast",
513
+ "prod_price_forecast",
514
+ "outdoor_temperature_forecast",
515
+ ]
516
+ forecast_methods = [
517
+ "weather_forecast_method",
518
+ "load_forecast_method",
519
+ "load_cost_forecast_method",
520
+ "production_price_forecast_method",
521
+ "outdoor_temperature_forecast_method",
522
+ ]
523
+
524
+ # Loop forecasts, check if value is a list and greater than or equal to forecast_dates
525
+ for method, forecast_key in enumerate(list_forecast_key):
526
+ if forecast_key in runtimeparams.keys():
527
+ if isinstance(runtimeparams[forecast_key], list) and len(
528
+ runtimeparams[forecast_key]
529
+ ) >= len(forecast_dates):
530
+ params["passed_data"][forecast_key] = runtimeparams[forecast_key]
531
+ params["optim_conf"][forecast_methods[method]] = "list"
532
+ else:
533
+ logger.error(
534
+ f"ERROR: The passed data is either not a list or the length is not correct, length should be {str(len(forecast_dates))}"
535
+ )
536
+ logger.error(
537
+ f"Passed type is {str(type(runtimeparams[forecast_key]))} and length is {str(len(runtimeparams[forecast_key]))}"
538
+ )
539
+ # Check if string contains list, if so extract
540
+ if isinstance(runtimeparams[forecast_key], str):
541
+ if isinstance(ast.literal_eval(runtimeparams[forecast_key]), list):
542
+ runtimeparams[forecast_key] = ast.literal_eval(
543
+ runtimeparams[forecast_key]
544
+ )
545
+ list_non_digits = [
546
+ x
547
+ for x in runtimeparams[forecast_key]
548
+ if not (isinstance(x, int) or isinstance(x, float))
549
+ ]
550
+ if len(list_non_digits) > 0:
551
+ logger.warning(
552
+ f"There are non numeric values on the passed data for {forecast_key}, check for missing values (nans, null, etc)"
553
+ )
554
+ for x in list_non_digits:
555
+ logger.warning(
556
+ f"This value in {forecast_key} was detected as non digits: {str(x)}"
557
+ )
558
+ else:
559
+ params["passed_data"][forecast_key] = None
560
+
483
561
  # Treat passed data for forecast model fit/predict/tune at runtime
484
562
  if (
485
563
  params["passed_data"].get("historic_days_to_retrieve", None) is not None
@@ -1091,7 +1169,7 @@ def build_secrets(
1091
1169
  :type logger: logging.Logger
1092
1170
  :param argument: dictionary of secrets arguments passed (url,key)
1093
1171
  :type argument: dict
1094
- :param options_path: path to the options file (options.json) (usually provided bt EMHASS-Add-on)
1172
+ :param options_path: path to the options file (options.json) (usually provided by EMHASS-Add-on)
1095
1173
  :type options_path: str
1096
1174
  :param secrets_path: path to secrets file (secrets_emhass.yaml)
1097
1175
  :type secrets_path: str
emhass/web_server.py CHANGED
@@ -14,6 +14,7 @@ from pathlib import Path
14
14
 
15
15
  import yaml
16
16
  from flask import Flask, make_response, request
17
+ from flask import logging as log
17
18
  from jinja2 import Environment, PackageLoader
18
19
  from waitress import serve
19
20
 
@@ -384,7 +385,6 @@ def action_call(action_name):
384
385
 
385
386
  ActionStr = " >> Setting input data dict"
386
387
  app.logger.info(ActionStr)
387
- app.logger.warning(costfun)
388
388
  input_data_dict = set_input_data_dict(
389
389
  emhass_conf, costfun, params, runtimeparams, action_name, app.logger
390
390
  )
@@ -652,37 +652,30 @@ if __name__ == "__main__":
652
652
  raise Exception("missing: " + str(emhass_conf["data_path"]))
653
653
 
654
654
  # Define loggers
655
- ch = logging.StreamHandler()
656
655
  formatter = logging.Formatter(
657
656
  "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
658
657
  )
659
- ch.setFormatter(formatter)
658
+ log.default_handler.setFormatter(formatter)
660
659
  # Action file logger
661
660
  fileLogger = logging.FileHandler(str(emhass_conf["data_path"] / "actionLogs.txt"))
662
661
  formatter = logging.Formatter("%(levelname)s - %(name)s - %(message)s")
663
662
  fileLogger.setFormatter(formatter) # add format to Handler
664
663
  if logging_level == "DEBUG":
665
664
  app.logger.setLevel(logging.DEBUG)
666
- ch.setLevel(logging.DEBUG)
667
665
  fileLogger.setLevel(logging.DEBUG)
668
666
  elif logging_level == "INFO":
669
667
  app.logger.setLevel(logging.INFO)
670
- ch.setLevel(logging.INFO)
671
668
  fileLogger.setLevel(logging.INFO)
672
669
  elif logging_level == "WARNING":
673
670
  app.logger.setLevel(logging.WARNING)
674
- ch.setLevel(logging.WARNING)
675
671
  fileLogger.setLevel(logging.WARNING)
676
672
  elif logging_level == "ERROR":
677
673
  app.logger.setLevel(logging.ERROR)
678
- ch.setLevel(logging.ERROR)
679
674
  fileLogger.setLevel(logging.ERROR)
680
675
  else:
681
676
  app.logger.setLevel(logging.DEBUG)
682
- ch.setLevel(logging.DEBUG)
683
677
  fileLogger.setLevel(logging.DEBUG)
684
678
  app.logger.propagate = False
685
- app.logger.addHandler(ch)
686
679
  app.logger.addHandler(fileLogger)
687
680
  # Clear Action File logger file, ready for new instance
688
681
  clearFileLog()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: emhass
3
- Version: 0.11.3
3
+ Version: 0.12.0
4
4
  Summary: An Energy Management System for Home Assistant
5
5
  Author-email: David HERNANDEZ <davidusb@gmail.com>
6
6
  License: MIT
@@ -132,11 +132,16 @@ Installation instructions and example Home Assistant automation configurations a
132
132
  You must follow these steps to make EMHASS work properly:
133
133
 
134
134
  1) Install and run EMHASS.
135
- - There are multiple methods of installing and Running EMHASS. See [Installation Method](##Installation-Methods) below to pick a method that best suits your use case.
135
+ - There are multiple methods of installing and Running EMHASS. See [Installation Method](#Installation-Methods) below to pick a method that best suits your use case.
136
136
 
137
- 2) Define all the parameters in the configuration file *(`config.json`)* or configuration page *(`YOURIP:5000/configuration`)*.
137
+ 2) Define all the parameters in the configuration file *(`config.json`)* or configuration page *(`YOURIP:5000/configuration`)*.
138
+ ```{note}
139
+
140
+ New in EMHASS v0.12.0: the default configuration does not need to retrieve any data from Home Assistant! After installing and running the add-on, EMHASS should start and it will be ready to launch an optimization.
141
+ ```
138
142
  - See the description for each parameter in the [configuration](https://emhass.readthedocs.io/en/latest/config.html) docs.
139
- - You will most notably need to define the main data entering EMHASS. This will be the Home Assistant sensor/variable `sensor.power_photovoltaics` for the name of your Home Assistant variable containing the PV produced power, and the sensor/variable `sensor.power_load_no_var_loads`, for the load power of your household excluding the power of the deferrable loads that you want to optimize.
143
+ - EMHASS has a default configuration with 2 deferrable loads, no solar PV, no batteries and a basic load power forecasting method.
144
+ - If you want to consider solar PV and more advanced load power forecast methods, you will need to define the main data entering EMHASS. This will be the Home Assistant sensor/variable `sensor.power_load_no_var_loads`, for the load power of your household excluding the power of the deferrable loads that you want to optimize, and the sensor/variable `sensor.power_photovoltaics` for the name of your Home Assistant variable containing the PV produced power (if solar PV is activated).
140
145
  - If you have a PV installation then this dedicated web app can be useful for finding your inverter and solar panel models: [https://emhass-pvlib-database.streamlit.app/](https://emhass-pvlib-database.streamlit.app/)
141
146
 
142
147
  4) Launch the optimization and check the results.
@@ -144,10 +149,10 @@ You must follow these steps to make EMHASS work properly:
144
149
  - Or with a `curl` command like this: `curl -i -H 'Content-Type:application/json' -X POST -d '{}' http://localhost:5000/action/dayahead-optim`.
145
150
 
146
151
  5) If you’re satisfied with the optimization results then you can set the optimization and data publish task commands in an automation.
147
- - You can read more about this in the [usage](##usage) section below.
152
+ - You can read more about this in the [usage](#usage) section below.
148
153
 
149
154
  6) The final step is to link the deferrable loads variables to real switches on your installation.
150
- - An example code for this using automations and the shell command integration is presented below in the [usage](##usage) section.
155
+ - An example code for this using automations and the shell command integration is presented below in the [usage](#usage) section.
151
156
 
152
157
  A more detailed workflow is given below:
153
158
 
@@ -0,0 +1,32 @@
1
+ emhass/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ emhass/command_line.py,sha256=xXu0A87fgiv38YbEJZiK3xQZ63wEMp081YqUCNsAWcM,70688
3
+ emhass/forecast.py,sha256=_Gc8k6_8Nz87WHXKyUH6iXK956Z2TGzhL8L6t-tO_sk,63496
4
+ emhass/machine_learning_forecaster.py,sha256=JErz50i_D59J5wXdbf_EUPb_FG45qRflv51iBA7ARXU,17417
5
+ emhass/machine_learning_regressor.py,sha256=yFwMvVEmlgDJUsHhBT-HpNE3j2TC24e8Gmbcn9MPfeU,10690
6
+ emhass/optimization.py,sha256=izMgRJFEP_9LHvZeX6FM1lxWyDWX3Tq2hFvu8ZP9FN4,61457
7
+ emhass/retrieve_hass.py,sha256=LLZBoP5Rkyg0_uSvJSG5m-6apVoWZ6MWuXPPA-j-JsI,25617
8
+ emhass/utils.py,sha256=esxcCoA38VhhugOHRvu69hJ8_V_zJkAqu0jUHW26rck,68969
9
+ emhass/web_server.py,sha256=QsqT51AdlAgNCG3NV1zbm4YkBSq_0BaC3cIEzPeZvl8,28023
10
+ emhass/data/associations.csv,sha256=IpEZIIWYdFjkRoC5xa1pRHjwnVs_VH8G8ogbGFxLfGI,3679
11
+ emhass/data/cec_inverters.pbz2,sha256=tK8FvAUDW0uYez8EPttdCJwHhpPofclYV6GhhNZL0Pk,168272
12
+ emhass/data/cec_modules.pbz2,sha256=8vEaysgYffXg3KUl8XSF36Mdywzi3LpEtUN_qenjO9s,1655747
13
+ emhass/data/config_defaults.json,sha256=-mQHahDv6Z5wYgClOs4VVr5KVCP51olb3f2mEj3Beic,2777
14
+ emhass/static/advanced.html,sha256=gAhsd14elDwh1Ts4lf9wn_ZkczzzObq5qOimi_la3Ic,2067
15
+ emhass/static/basic.html,sha256=ro2WwWgJyoUhqx_nJFzKCEG8FA8863vSHLmrjGYcEgs,677
16
+ emhass/static/configuration_list.html,sha256=i4v83RVduWjdjkjPhA74e-j8NSUpFzqMGU3ixOaJLfI,1740
17
+ emhass/static/configuration_script.js,sha256=CU6CuvnFrAWhnCns8K_AyX8fAdOJMrtR7wX7pXzpnK4,31525
18
+ emhass/static/script.js,sha256=q3qTqc_pTLTK-0NPKurxFXcJ2vZLz4TctPfUgz09ygo,16291
19
+ emhass/static/style.css,sha256=a_8YlGubn1zoF5RTLJ_Qkrb8tAjUY9p7oAKxhCvJY2s,19288
20
+ emhass/static/data/param_definitions.json,sha256=W-vq1Hj5_-YDpfl00cYF7kuLAQpfpsamjKGh7eU20LY,19485
21
+ emhass/static/img/emhass_icon.png,sha256=Kyx6hXQ1huJLHAq2CaBfjYXR25H9j99PSWHI0lShkaQ,19030
22
+ emhass/static/img/emhass_logo_short.svg,sha256=yzMcqtBRCV8rH84-MwnigZh45_f9Eoqwho9P8nCodJA,66736
23
+ emhass/static/img/feather-sprite.svg,sha256=VHjMJQg88wXa9CaeYrKGhNtyK0xdd47zCqwSIa-hxo8,60319
24
+ emhass/templates/configuration.html,sha256=M-_L__juYzcdGDaryGrz6LG2mguW2f1Sx6k01YfG7Dc,2885
25
+ emhass/templates/index.html,sha256=1V44c0yyliu_z8inl0K-zmmmkhQumH3Bqk8Jj1YJPzY,3076
26
+ emhass/templates/template.html,sha256=TkGgMecQEbFUZA4ymPwMUzNjKHsENvCgroUWbPt7G4Y,158
27
+ emhass-0.12.0.dist-info/LICENSE,sha256=1X3-S1yvOCBDBeox1aK3dq00m7dA8NDtcPrpKPISzbE,1077
28
+ emhass-0.12.0.dist-info/METADATA,sha256=7e1QOm_rd__64snbbStegC7hFW8q62JHdC2sHlCFhSw,49398
29
+ emhass-0.12.0.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
30
+ emhass-0.12.0.dist-info/entry_points.txt,sha256=6Bp1NFOGNv_fSTxYl1ke3K3h3aqAcBxI-bgq5yq-i1M,52
31
+ emhass-0.12.0.dist-info/top_level.txt,sha256=L7fIX4awfmxQbAePtSdVg2e6x_HhghfReHfsKSpKr9I,7
32
+ emhass-0.12.0.dist-info/RECORD,,
@@ -1,32 +0,0 @@
1
- emhass/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- emhass/command_line.py,sha256=3fN0gUrTImxCUbmESgU8j_dXUwvkaCDMSdwcHWY0SQI,69671
3
- emhass/forecast.py,sha256=Jr1DAobtFoTfCS9RvtPQvYRTYrvyp1Ubq-X1noqh7jA,58763
4
- emhass/machine_learning_forecaster.py,sha256=JErz50i_D59J5wXdbf_EUPb_FG45qRflv51iBA7ARXU,17417
5
- emhass/machine_learning_regressor.py,sha256=yFwMvVEmlgDJUsHhBT-HpNE3j2TC24e8Gmbcn9MPfeU,10690
6
- emhass/optimization.py,sha256=-s3gWblpc85v-q0Ad9M8lXlG8ZZW0iL_s_I_9EUhzwA,61452
7
- emhass/retrieve_hass.py,sha256=LLZBoP5Rkyg0_uSvJSG5m-6apVoWZ6MWuXPPA-j-JsI,25617
8
- emhass/utils.py,sha256=U0fH_CbYQJd-izWqjBfJc-ih1y14S5IISIFtBHe1wM0,66330
9
- emhass/web_server.py,sha256=BdCefS_ipseUOa3sq-HFl1hWrA9NZbfxe3mt0ZqcgIU,28244
10
- emhass/data/associations.csv,sha256=Wv2845irZDMYm8svg__Ev0c2BgkVX88yAXoqpy1C4RM,3646
11
- emhass/data/cec_inverters.pbz2,sha256=tK8FvAUDW0uYez8EPttdCJwHhpPofclYV6GhhNZL0Pk,168272
12
- emhass/data/cec_modules.pbz2,sha256=8vEaysgYffXg3KUl8XSF36Mdywzi3LpEtUN_qenjO9s,1655747
13
- emhass/data/config_defaults.json,sha256=0jyYfF1ob3QMbjP8h2rd0jlbfe2uYm68NYW0GKM5qfk,2754
14
- emhass/static/advanced.html,sha256=gAhsd14elDwh1Ts4lf9wn_ZkczzzObq5qOimi_la3Ic,2067
15
- emhass/static/basic.html,sha256=ro2WwWgJyoUhqx_nJFzKCEG8FA8863vSHLmrjGYcEgs,677
16
- emhass/static/configuration_list.html,sha256=4ZAL-4YXdofnx17np-v39Yt3qW2TWbSzNBkj86bpvIg,1578
17
- emhass/static/configuration_script.js,sha256=N95GzyQdLzzOuSNw4L78BdArdqLPYJKhU3baGEsOhZE,31098
18
- emhass/static/script.js,sha256=q3qTqc_pTLTK-0NPKurxFXcJ2vZLz4TctPfUgz09ygo,16291
19
- emhass/static/style.css,sha256=a_8YlGubn1zoF5RTLJ_Qkrb8tAjUY9p7oAKxhCvJY2s,19288
20
- emhass/static/data/param_definitions.json,sha256=2z_nb94wrj-fVORy0F_hoOCHHv7CbemIoKpScCmRcPI,19243
21
- emhass/static/img/emhass_icon.png,sha256=Kyx6hXQ1huJLHAq2CaBfjYXR25H9j99PSWHI0lShkaQ,19030
22
- emhass/static/img/emhass_logo_short.svg,sha256=yzMcqtBRCV8rH84-MwnigZh45_f9Eoqwho9P8nCodJA,66736
23
- emhass/static/img/feather-sprite.svg,sha256=VHjMJQg88wXa9CaeYrKGhNtyK0xdd47zCqwSIa-hxo8,60319
24
- emhass/templates/configuration.html,sha256=yS9p730GHf99ZYK0NiZjkuaxPjH1ZFo8R6xL5c1ZZ9s,2885
25
- emhass/templates/index.html,sha256=Ehn-hUdraIwX_5Usb5Liz1ip24NfztmCxsi0J4Tf3-A,3076
26
- emhass/templates/template.html,sha256=TkGgMecQEbFUZA4ymPwMUzNjKHsENvCgroUWbPt7G4Y,158
27
- emhass-0.11.3.dist-info/LICENSE,sha256=1X3-S1yvOCBDBeox1aK3dq00m7dA8NDtcPrpKPISzbE,1077
28
- emhass-0.11.3.dist-info/METADATA,sha256=hkZYpwr5bjDhC9hGaQdAY2braRkT10_LD5pHCFuxQ3A,48945
29
- emhass-0.11.3.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
30
- emhass-0.11.3.dist-info/entry_points.txt,sha256=6Bp1NFOGNv_fSTxYl1ke3K3h3aqAcBxI-bgq5yq-i1M,52
31
- emhass-0.11.3.dist-info/top_level.txt,sha256=L7fIX4awfmxQbAePtSdVg2e6x_HhghfReHfsKSpKr9I,7
32
- emhass-0.11.3.dist-info/RECORD,,