emhass 0.8.6__py3-none-any.whl → 0.9.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
emhass/retrieve_hass.py CHANGED
@@ -31,11 +31,11 @@ class RetrieveHass:
31
31
  """
32
32
 
33
33
  def __init__(self, hass_url: str, long_lived_token: str, freq: pd.Timedelta,
34
- time_zone: datetime.timezone, params: str, base_path: str, logger: logging.Logger,
34
+ time_zone: datetime.timezone, params: str, emhass_conf: dict, logger: logging.Logger,
35
35
  get_data_from_file: Optional[bool] = False) -> None:
36
36
  """
37
37
  Define constructor for RetrieveHass class.
38
-
38
+
39
39
  :param hass_url: The URL of the Home Assistant instance
40
40
  :type hass_url: str
41
41
  :param long_lived_token: The long lived token retrieved from the configuration pane
@@ -46,11 +46,11 @@ class RetrieveHass:
46
46
  :type time_zone: datetime.timezone
47
47
  :param params: Configuration parameters passed from data/options.json
48
48
  :type params: str
49
- :param base_path: The path to the yaml configuration file
50
- :type base_path: str
49
+ :param emhass_conf: Dictionary containing the needed emhass paths
50
+ :type emhass_conf: dict
51
51
  :param logger: The passed logger object
52
52
  :type logger: logging object
53
- :param get_data_from_file: Select if data should be retrieved from a
53
+ :param get_data_from_file: Select if data should be retrieved from a
54
54
  previously saved pickle useful for testing or directly from connection to
55
55
  hass database
56
56
  :type get_data_from_file: bool, optional
@@ -61,13 +61,13 @@ class RetrieveHass:
61
61
  self.freq = freq
62
62
  self.time_zone = time_zone
63
63
  self.params = params
64
- self.base_path = base_path
64
+ # self.emhass_conf = emhass_conf
65
65
  self.logger = logger
66
66
  self.get_data_from_file = get_data_from_file
67
67
 
68
- def get_data(self, days_list: pd.date_range, var_list: list, minimal_response: Optional[bool] = False,
69
- significant_changes_only: Optional[bool] = False,
70
- test_url: Optional[str] = 'empty') -> None:
68
+ def get_data(self, days_list: pd.date_range, var_list: list,
69
+ minimal_response: Optional[bool] = False, significant_changes_only: Optional[bool] = False,
70
+ test_url: Optional[str] = "empty") -> None:
71
71
  r"""
72
72
  Retrieve the actual data from hass.
73
73
 
@@ -92,20 +92,34 @@ class RetrieveHass:
92
92
  """
93
93
  self.logger.info("Retrieve hass get data method initiated...")
94
94
  self.df_final = pd.DataFrame()
95
- x = 0 #iterate based on days
95
+ x = 0 # iterate based on days
96
96
  # Looping on each day from days list
97
97
  for day in days_list:
98
-
99
98
  for i, var in enumerate(var_list):
100
-
101
- if test_url == 'empty':
102
- if self.hass_url == "http://supervisor/core/api": # If we are using the supervisor API
103
- url = self.hass_url+"/history/period/"+day.isoformat()+"?filter_entity_id="+var
104
- else: # Otherwise the Home Assistant Core API it is
105
- url = self.hass_url+"api/history/period/"+day.isoformat()+"?filter_entity_id="+var
106
- if minimal_response: # A support for minimal response
99
+ if test_url == "empty":
100
+ if (
101
+ self.hass_url == "http://supervisor/core/api"
102
+ ): # If we are using the supervisor API
103
+ url = (
104
+ self.hass_url
105
+ + "/history/period/"
106
+ + day.isoformat()
107
+ + "?filter_entity_id="
108
+ + var
109
+ )
110
+ else: # Otherwise the Home Assistant Core API it is
111
+ url = (
112
+ self.hass_url
113
+ + "api/history/period/"
114
+ + day.isoformat()
115
+ + "?filter_entity_id="
116
+ + var
117
+ )
118
+ if minimal_response: # A support for minimal response
107
119
  url = url + "?minimal_response"
108
- if significant_changes_only: # And for signicant changes only (check the HASS restful API for more info)
120
+ if (
121
+ significant_changes_only
122
+ ): # And for signicant changes only (check the HASS restful API for more info)
109
123
  url = url + "?significant_changes_only"
110
124
  else:
111
125
  url = test_url
@@ -116,35 +130,51 @@ class RetrieveHass:
116
130
  try:
117
131
  response = get(url, headers=headers)
118
132
  except Exception:
119
- self.logger.error("Unable to access Home Assistance instance, check URL")
120
- self.logger.error("If using addon, try setting url and token to 'empty'")
133
+ self.logger.error(
134
+ "Unable to access Home Assistance instance, check URL"
135
+ )
136
+ self.logger.error(
137
+ "If using addon, try setting url and token to 'empty'"
138
+ )
121
139
  return False
122
140
  else:
123
141
  if response.status_code == 401:
124
- self.logger.error("Unable to access Home Assistance instance, TOKEN/KEY")
125
- self.logger.error("If using addon, try setting url and token to 'empty'")
142
+ self.logger.error(
143
+ "Unable to access Home Assistance instance, TOKEN/KEY"
144
+ )
145
+ self.logger.error(
146
+ "If using addon, try setting url and token to 'empty'"
147
+ )
126
148
  return False
127
149
  if response.status_code > 299:
128
150
  return f"Request Get Error: {response.status_code}"
129
- '''import bz2 # Uncomment to save a serialized data for tests
151
+ """import bz2 # Uncomment to save a serialized data for tests
130
152
  import _pickle as cPickle
131
153
  with bz2.BZ2File("data/test_response_get_data_get_method.pbz2", "w") as f:
132
- cPickle.dump(response, f)'''
133
- try: # Sometimes when there are connection problems we need to catch empty retrieved json
154
+ cPickle.dump(response, f)"""
155
+ try: # Sometimes when there are connection problems we need to catch empty retrieved json
134
156
  data = response.json()[0]
135
157
  except IndexError:
136
158
  if x == 0:
137
- self.logger.error("The retrieved JSON is empty, A sensor:" + var + " may have 0 days of history or passed sensor may not be correct")
159
+ self.logger.error("The retrieved JSON is empty, A sensor:" + var + " may have 0 days of history, passed sensor may not be correct, or days to retrieve is set too heigh")
138
160
  else:
139
161
  self.logger.error("The retrieved JSON is empty for day:"+ str(day) +", days_to_retrieve may be larger than the recorded history of sensor:" + var + " (check your recorder settings)")
140
162
  return False
141
163
  df_raw = pd.DataFrame.from_dict(data)
164
+ # self.logger.info(str(df_raw))
142
165
  if len(df_raw) == 0:
143
166
  if x == 0:
144
- self.logger.error("The retrieved Dataframe is empty, A sensor:" + var + " may have 0 days of history or passed sensor may not be correct")
167
+ self.logger.error(
168
+ "The retrieved Dataframe is empty, A sensor:"
169
+ + var
170
+ + " may have 0 days of history or passed sensor may not be correct"
171
+ )
145
172
  else:
146
173
  self.logger.error("Retrieved empty Dataframe for day:"+ str(day) +", days_to_retrieve may be larger than the recorded history of sensor:" + var + " (check your recorder settings)")
147
174
  return False
175
+ # self.logger.info(self.freq.seconds)
176
+ if len(df_raw) < ((60 / (self.freq.seconds / 60)) * 24) and x != len(days_list) -1: #check if there is enough Dataframes for passed frequency per day (not inc current day)
177
+ self.logger.debug("sensor:" + var + " retrieved Dataframe count: " + str(len(df_raw)) + ", on day: " + str(day) + ". This is less than freq value passed: " + str(self.freq))
148
178
  if i == 0: # Defining the DataFrame container
149
179
  from_date = pd.to_datetime(df_raw['last_changed'], format="ISO8601").min()
150
180
  to_date = pd.to_datetime(df_raw['last_changed'], format="ISO8601").max()
@@ -152,20 +182,27 @@ class RetrieveHass:
152
182
  format='%Y-%d-%m %H:%M').round(self.freq, ambiguous='infer', nonexistent='shift_forward')
153
183
  df_day = pd.DataFrame(index = ts)
154
184
  # Caution with undefined string data: unknown, unavailable, etc.
155
- df_tp = df_raw.copy()[['state']].replace(
156
- ['unknown', 'unavailable', ''], np.nan).astype(float).rename(columns={'state': var})
185
+ df_tp = (
186
+ df_raw.copy()[["state"]]
187
+ .replace(["unknown", "unavailable", ""], np.nan)
188
+ .astype(float)
189
+ .rename(columns={"state": var})
190
+ )
157
191
  # Setting index, resampling and concatenation
158
- df_tp.set_index(pd.to_datetime(df_raw['last_changed'], format="ISO8601"), inplace=True)
192
+ df_tp.set_index(
193
+ pd.to_datetime(df_raw["last_changed"], format="ISO8601"),
194
+ inplace=True,
195
+ )
159
196
  df_tp = df_tp.resample(self.freq).mean()
160
197
  df_day = pd.concat([df_day, df_tp], axis=1)
161
-
162
- x += 1
163
198
  self.df_final = pd.concat([self.df_final, df_day], axis=0)
199
+ x += 1
164
200
  self.df_final = set_df_index_freq(self.df_final)
165
201
  if self.df_final.index.freq != self.freq:
166
- self.logger.error("The inferred freq from data is not equal to the defined freq in passed parameters")
202
+ self.logger.error("The inferred freq:" + str(self.df_final.index.freq) + " from data is not equal to the defined freq in passed:" + str(self.freq))
167
203
  return False
168
204
  return True
205
+
169
206
 
170
207
  def prepare_data(self, var_load: str, load_negative: Optional[bool] = False, set_zero_min: Optional[bool] = True,
171
208
  var_replace_zero: Optional[list] = None, var_interp: Optional[list] = None) -> None:
@@ -192,18 +229,24 @@ class RetrieveHass:
192
229
 
193
230
  """
194
231
  try:
195
- if load_negative: # Apply the correct sign to load power
196
- self.df_final[var_load+'_positive'] = -self.df_final[var_load]
232
+ if load_negative: # Apply the correct sign to load power
233
+ self.df_final[var_load + "_positive"] = -self.df_final[var_load]
197
234
  else:
198
- self.df_final[var_load+'_positive'] = self.df_final[var_load]
235
+ self.df_final[var_load + "_positive"] = self.df_final[var_load]
199
236
  self.df_final.drop([var_load], inplace=True, axis=1)
200
237
  except KeyError:
201
- self.logger.error("Variable "+var_load+" was not found. This is typically because no data could be retrieved from Home Assistant")
238
+ self.logger.error(
239
+ "Variable "
240
+ + var_load
241
+ + " was not found. This is typically because no data could be retrieved from Home Assistant"
242
+ )
202
243
  return False
203
244
  except ValueError:
204
- self.logger.error("sensor.power_photovoltaics and sensor.power_load_no_var_loads should not be the same")
205
- return False
206
- if set_zero_min: # Apply minimum values
245
+ self.logger.error(
246
+ "sensor.power_photovoltaics and sensor.power_load_no_var_loads should not be the same"
247
+ )
248
+ return False
249
+ if set_zero_min: # Apply minimum values
207
250
  self.df_final.clip(lower=0.0, inplace=True, axis=1)
208
251
  self.df_final.replace(to_replace=0.0, value=np.nan, inplace=True)
209
252
  new_var_replace_zero = []
@@ -211,59 +254,59 @@ class RetrieveHass:
211
254
  # Just changing the names of variables to contain the fact that they are considered positive
212
255
  if var_replace_zero is not None:
213
256
  for string in var_replace_zero:
214
- new_string = string.replace(var_load, var_load+'_positive')
257
+ new_string = string.replace(var_load, var_load + "_positive")
215
258
  new_var_replace_zero.append(new_string)
216
259
  else:
217
260
  new_var_replace_zero = None
218
261
  if var_interp is not None:
219
262
  for string in var_interp:
220
- new_string = string.replace(var_load, var_load+'_positive')
263
+ new_string = string.replace(var_load, var_load + "_positive")
221
264
  new_var_interp.append(new_string)
222
265
  else:
223
266
  new_var_interp = None
224
267
  # Treating NaN replacement: either by zeros or by linear interpolation
225
268
  if new_var_replace_zero is not None:
226
- self.df_final[new_var_replace_zero] = self.df_final[new_var_replace_zero].fillna(0.0)
269
+ self.df_final[new_var_replace_zero] = self.df_final[
270
+ new_var_replace_zero
271
+ ].fillna(0.0)
227
272
  if new_var_interp is not None:
228
273
  self.df_final[new_var_interp] = self.df_final[new_var_interp].interpolate(
229
- method='linear', axis=0, limit=None)
274
+ method="linear", axis=0, limit=None
275
+ )
230
276
  self.df_final[new_var_interp] = self.df_final[new_var_interp].fillna(0.0)
231
277
  # Setting the correct time zone on DF index
232
278
  if self.time_zone is not None:
233
279
  self.df_final.index = self.df_final.index.tz_convert(self.time_zone)
234
280
  # Drop datetimeindex duplicates on final DF
235
- self.df_final = self.df_final[~self.df_final.index.duplicated(keep='first')]
281
+ self.df_final = self.df_final[~self.df_final.index.duplicated(keep="first")]
236
282
  return True
237
-
283
+
238
284
  @staticmethod
239
- def get_attr_data_dict(data_df: pd.DataFrame, idx: int, entity_id: str,
240
- unit_of_measurement: str, friendly_name: str,
241
- list_name: str, state: float) -> dict:
242
- list_df = copy.deepcopy(data_df).loc[data_df.index[idx]:].reset_index()
243
- list_df.columns = ['timestamps', entity_id]
244
- ts_list = [str(i) for i in list_df['timestamps'].tolist()]
245
- vals_list = [str(np.round(i,2)) for i in list_df[entity_id].tolist()]
285
+ def get_attr_data_dict(data_df: pd.DataFrame, idx: int, entity_id: str, unit_of_measurement: str,
286
+ friendly_name: str, list_name: str, state: float) -> dict:
287
+ list_df = copy.deepcopy(data_df).loc[data_df.index[idx] :].reset_index()
288
+ list_df.columns = ["timestamps", entity_id]
289
+ ts_list = [str(i) for i in list_df["timestamps"].tolist()]
290
+ vals_list = [str(np.round(i, 2)) for i in list_df[entity_id].tolist()]
246
291
  forecast_list = []
247
292
  for i, ts in enumerate(ts_list):
248
293
  datum = {}
249
294
  datum["date"] = ts
250
- datum[entity_id.split('sensor.')[1]] = vals_list[i]
295
+ datum[entity_id.split("sensor.")[1]] = vals_list[i]
251
296
  forecast_list.append(datum)
252
297
  data = {
253
298
  "state": "{:.2f}".format(state),
254
299
  "attributes": {
255
300
  "unit_of_measurement": unit_of_measurement,
256
301
  "friendly_name": friendly_name,
257
- list_name: forecast_list
258
- }
302
+ list_name: forecast_list,
303
+ },
259
304
  }
260
305
  return data
261
-
262
- def post_data(self, data_df: pd.DataFrame, idx: int, entity_id: str,
263
- unit_of_measurement: str, friendly_name: str,
264
- type_var: str,
265
- from_mlforecaster: Optional[bool]=False,
266
- publish_prefix: Optional[str]="") -> None:
306
+
307
+ def post_data(self, data_df: pd.DataFrame, idx: int, entity_id: str, unit_of_measurement: str,
308
+ friendly_name: str, type_var: str, from_mlforecaster: Optional[bool] = False,
309
+ publish_prefix: Optional[str] = "") -> None:
267
310
  r"""
268
311
  Post passed data to hass.
269
312
 
@@ -286,72 +329,88 @@ class RetrieveHass:
286
329
 
287
330
  """
288
331
  # Add a possible prefix to the entity ID
289
- entity_id = entity_id.replace('sensor.', 'sensor.'+publish_prefix)
332
+ entity_id = entity_id.replace("sensor.", "sensor." + publish_prefix)
290
333
  # Set the URL
291
- if self.hass_url == "http://supervisor/core/api": # If we are using the supervisor API
292
- url = self.hass_url+"/states/"+entity_id
293
- else: # Otherwise the Home Assistant Core API it is
294
- url = self.hass_url+"api/states/"+entity_id
334
+ if (
335
+ self.hass_url == "http://supervisor/core/api"
336
+ ): # If we are using the supervisor API
337
+ url = self.hass_url + "/states/" + entity_id
338
+ else: # Otherwise the Home Assistant Core API it is
339
+ url = self.hass_url + "api/states/" + entity_id
295
340
  headers = {
296
341
  "Authorization": "Bearer " + self.long_lived_token,
297
342
  "content-type": "application/json",
298
343
  }
299
344
  # Preparing the data dict to be published
300
- if type_var == 'cost_fun':
301
- state = np.round(data_df.sum()[0],2)
302
- elif type_var == 'unit_load_cost' or type_var == 'unit_prod_price':
303
- state = np.round(data_df.loc[data_df.index[idx]],4)
304
- elif type_var == 'optim_status':
345
+ if type_var == "cost_fun":
346
+ state = np.round(data_df.sum()[0], 2)
347
+ elif type_var == "unit_load_cost" or type_var == "unit_prod_price":
348
+ state = np.round(data_df.loc[data_df.index[idx]], 4)
349
+ elif type_var == "optim_status":
305
350
  state = data_df.loc[data_df.index[idx]]
351
+ elif type_var == "mlregressor":
352
+ state = data_df[idx]
306
353
  else:
307
- state = np.round(data_df.loc[data_df.index[idx]],2)
308
- if type_var == 'power':
309
- data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
310
- friendly_name, "forecasts", state)
311
- elif type_var == 'deferrable':
312
- data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
313
- friendly_name, "deferrables_schedule", state)
314
- elif type_var == 'batt':
354
+ state = np.round(data_df.loc[data_df.index[idx]], 2)
355
+ if type_var == "power":
356
+ data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
357
+ friendly_name, "forecasts", state)
358
+ elif type_var == "deferrable":
359
+ data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
360
+ friendly_name, "deferrables_schedule", state)
361
+ elif type_var == "batt":
362
+ data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
363
+ friendly_name, "battery_scheduled_power", state)
364
+ elif type_var == "SOC":
365
+ data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
366
+ friendly_name, "battery_scheduled_soc", state)
367
+ elif type_var == "unit_load_cost":
315
368
  data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
316
- friendly_name, "battery_scheduled_power", state)
317
- elif type_var == 'SOC':
318
- data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
319
- friendly_name, "battery_scheduled_soc", state)
320
- elif type_var == 'unit_load_cost':
321
- data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
322
- friendly_name, "unit_load_cost_forecasts", state)
323
- elif type_var == 'unit_prod_price':
324
- data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
325
- friendly_name, "unit_prod_price_forecasts", state)
326
- elif type_var == 'mlforecaster':
327
- data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
328
- friendly_name, "scheduled_forecast", state)
329
- elif type_var == 'optim_status':
369
+ friendly_name, "unit_load_cost_forecasts", state)
370
+ elif type_var == "unit_prod_price":
371
+ data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
372
+ friendly_name, "unit_prod_price_forecasts", state)
373
+ elif type_var == "mlforecaster":
374
+ data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
375
+ friendly_name, "scheduled_forecast", state)
376
+ elif type_var == "optim_status":
330
377
  data = {
331
378
  "state": state,
332
379
  "attributes": {
333
380
  "unit_of_measurement": unit_of_measurement,
334
- "friendly_name": friendly_name
335
- }
381
+ "friendly_name": friendly_name,
382
+ },
383
+ }
384
+ elif type_var == "mlregressor":
385
+ data = {
386
+ "state": state,
387
+ "attributes": {
388
+ "unit_of_measurement": unit_of_measurement,
389
+ "friendly_name": friendly_name,
390
+ },
336
391
  }
337
392
  else:
338
393
  data = {
339
394
  "state": "{:.2f}".format(state),
340
395
  "attributes": {
341
396
  "unit_of_measurement": unit_of_measurement,
342
- "friendly_name": friendly_name
343
- }
397
+ "friendly_name": friendly_name,
398
+ },
344
399
  }
345
400
  # Actually post the data
346
401
  if self.get_data_from_file:
347
- class response: pass
402
+ class response:
403
+ pass
348
404
  response.status_code = 200
349
405
  response.ok = True
350
406
  else:
351
407
  response = post(url, headers=headers, data=json.dumps(data))
352
408
  # Treating the response status and posting them on the logger
353
409
  if response.ok:
354
- self.logger.info("Successfully posted to "+entity_id+" = "+str(state))
410
+ self.logger.info("Successfully posted to " + entity_id + " = " + str(state))
355
411
  else:
356
- self.logger.info("The status code for received curl command response is: "+str(response.status_code))
412
+ self.logger.info(
413
+ "The status code for received curl command response is: "
414
+ + str(response.status_code)
415
+ )
357
416
  return response, data
@@ -14,6 +14,9 @@
14
14
  <button type="button" id="forecast-model-predict" class="button button2">ML forecast model
15
15
  predict</button>
16
16
  <button type="button" id="forecast-model-tune" class="button button3">ML forecast model tune</button>
17
+ </br></br>
18
+ <button type="button" id="regressor-model-fit" class="button button1">ML regressor model fit</button>
19
+ <button type="button" id="regressor-model-predict" class="button button2">ML regressor model predict</button>
17
20
  <!-- -->
18
21
  <!--dynamic input elements section -->
19
22
  <h4>Input Runtime Parameters</h4>
emhass/static/script.js CHANGED
@@ -16,6 +16,8 @@ function loadButtons(page) {
16
16
  "forecast-model-fit",
17
17
  "forecast-model-predict",
18
18
  "forecast-model-tune",
19
+ "regressor-model-fit",
20
+ "regressor-model-predict",
19
21
  "perfect-optim",
20
22
  "publish-data",
21
23
  "naive-mpc-optim"