emhass 0.8.6__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
emhass/retrieve_hass.py CHANGED
@@ -31,11 +31,11 @@ class RetrieveHass:
31
31
  """
32
32
 
33
33
  def __init__(self, hass_url: str, long_lived_token: str, freq: pd.Timedelta,
34
- time_zone: datetime.timezone, params: str, base_path: str, logger: logging.Logger,
34
+ time_zone: datetime.timezone, params: str, emhass_conf: dict, logger: logging.Logger,
35
35
  get_data_from_file: Optional[bool] = False) -> None:
36
36
  """
37
37
  Define constructor for RetrieveHass class.
38
-
38
+
39
39
  :param hass_url: The URL of the Home Assistant instance
40
40
  :type hass_url: str
41
41
  :param long_lived_token: The long lived token retrieved from the configuration pane
@@ -46,11 +46,11 @@ class RetrieveHass:
46
46
  :type time_zone: datetime.timezone
47
47
  :param params: Configuration parameters passed from data/options.json
48
48
  :type params: str
49
- :param base_path: The path to the yaml configuration file
50
- :type base_path: str
49
+ :param emhass_conf: Dictionary containing the needed emhass paths
50
+ :type emhass_conf: dict
51
51
  :param logger: The passed logger object
52
52
  :type logger: logging object
53
- :param get_data_from_file: Select if data should be retrieved from a
53
+ :param get_data_from_file: Select if data should be retrieved from a
54
54
  previously saved pickle useful for testing or directly from connection to
55
55
  hass database
56
56
  :type get_data_from_file: bool, optional
@@ -61,13 +61,18 @@ class RetrieveHass:
61
61
  self.freq = freq
62
62
  self.time_zone = time_zone
63
63
  self.params = params
64
- self.base_path = base_path
64
+ # self.emhass_conf = emhass_conf
65
65
  self.logger = logger
66
66
  self.get_data_from_file = get_data_from_file
67
67
 
68
- def get_data(self, days_list: pd.date_range, var_list: list, minimal_response: Optional[bool] = False,
69
- significant_changes_only: Optional[bool] = False,
70
- test_url: Optional[str] = 'empty') -> None:
68
+ def get_data(
69
+ self,
70
+ days_list: pd.date_range,
71
+ var_list: list,
72
+ minimal_response: Optional[bool] = False,
73
+ significant_changes_only: Optional[bool] = False,
74
+ test_url: Optional[str] = "empty",
75
+ ) -> None:
71
76
  r"""
72
77
  Retrieve the actual data from hass.
73
78
 
@@ -92,20 +97,36 @@ class RetrieveHass:
92
97
  """
93
98
  self.logger.info("Retrieve hass get data method initiated...")
94
99
  self.df_final = pd.DataFrame()
95
- x = 0 #iterate based on days
100
+ x = 0 # iterate based on days
96
101
  # Looping on each day from days list
97
102
  for day in days_list:
98
-
103
+
99
104
  for i, var in enumerate(var_list):
100
-
101
- if test_url == 'empty':
102
- if self.hass_url == "http://supervisor/core/api": # If we are using the supervisor API
103
- url = self.hass_url+"/history/period/"+day.isoformat()+"?filter_entity_id="+var
104
- else: # Otherwise the Home Assistant Core API it is
105
- url = self.hass_url+"api/history/period/"+day.isoformat()+"?filter_entity_id="+var
106
- if minimal_response: # A support for minimal response
105
+
106
+ if test_url == "empty":
107
+ if (
108
+ self.hass_url == "http://supervisor/core/api"
109
+ ): # If we are using the supervisor API
110
+ url = (
111
+ self.hass_url
112
+ + "/history/period/"
113
+ + day.isoformat()
114
+ + "?filter_entity_id="
115
+ + var
116
+ )
117
+ else: # Otherwise the Home Assistant Core API it is
118
+ url = (
119
+ self.hass_url
120
+ + "api/history/period/"
121
+ + day.isoformat()
122
+ + "?filter_entity_id="
123
+ + var
124
+ )
125
+ if minimal_response: # A support for minimal response
107
126
  url = url + "?minimal_response"
108
- if significant_changes_only: # And for signicant changes only (check the HASS restful API for more info)
127
+ if (
128
+ significant_changes_only
129
+ ): # And for signicant changes only (check the HASS restful API for more info)
109
130
  url = url + "?significant_changes_only"
110
131
  else:
111
132
  url = test_url
@@ -116,35 +137,51 @@ class RetrieveHass:
116
137
  try:
117
138
  response = get(url, headers=headers)
118
139
  except Exception:
119
- self.logger.error("Unable to access Home Assistance instance, check URL")
120
- self.logger.error("If using addon, try setting url and token to 'empty'")
140
+ self.logger.error(
141
+ "Unable to access Home Assistance instance, check URL"
142
+ )
143
+ self.logger.error(
144
+ "If using addon, try setting url and token to 'empty'"
145
+ )
121
146
  return False
122
147
  else:
123
148
  if response.status_code == 401:
124
- self.logger.error("Unable to access Home Assistance instance, TOKEN/KEY")
125
- self.logger.error("If using addon, try setting url and token to 'empty'")
149
+ self.logger.error(
150
+ "Unable to access Home Assistance instance, TOKEN/KEY"
151
+ )
152
+ self.logger.error(
153
+ "If using addon, try setting url and token to 'empty'"
154
+ )
126
155
  return False
127
156
  if response.status_code > 299:
128
157
  return f"Request Get Error: {response.status_code}"
129
- '''import bz2 # Uncomment to save a serialized data for tests
158
+ """import bz2 # Uncomment to save a serialized data for tests
130
159
  import _pickle as cPickle
131
160
  with bz2.BZ2File("data/test_response_get_data_get_method.pbz2", "w") as f:
132
- cPickle.dump(response, f)'''
133
- try: # Sometimes when there are connection problems we need to catch empty retrieved json
161
+ cPickle.dump(response, f)"""
162
+ try: # Sometimes when there are connection problems we need to catch empty retrieved json
134
163
  data = response.json()[0]
135
164
  except IndexError:
136
165
  if x == 0:
137
- self.logger.error("The retrieved JSON is empty, A sensor:" + var + " may have 0 days of history or passed sensor may not be correct")
166
+ self.logger.error("The retrieved JSON is empty, A sensor:" + var + " may have 0 days of history, passed sensor may not be correct, or days to retrieve is set too heigh")
138
167
  else:
139
168
  self.logger.error("The retrieved JSON is empty for day:"+ str(day) +", days_to_retrieve may be larger than the recorded history of sensor:" + var + " (check your recorder settings)")
140
169
  return False
141
170
  df_raw = pd.DataFrame.from_dict(data)
171
+ # self.logger.info(str(df_raw))
142
172
  if len(df_raw) == 0:
143
173
  if x == 0:
144
- self.logger.error("The retrieved Dataframe is empty, A sensor:" + var + " may have 0 days of history or passed sensor may not be correct")
174
+ self.logger.error(
175
+ "The retrieved Dataframe is empty, A sensor:"
176
+ + var
177
+ + " may have 0 days of history or passed sensor may not be correct"
178
+ )
145
179
  else:
146
180
  self.logger.error("Retrieved empty Dataframe for day:"+ str(day) +", days_to_retrieve may be larger than the recorded history of sensor:" + var + " (check your recorder settings)")
147
181
  return False
182
+ # self.logger.info(self.freq.seconds)
183
+ if len(df_raw) < ((60 / (self.freq.seconds / 60)) * 24) and x != len(days_list) -1: #check if there is enough Dataframes for passed frequency per day (not inc current day)
184
+ self.logger.debug("sensor:" + var + " retrieved Dataframe count: " + str(len(df_raw)) + ", on day: " + str(day) + ". This is less than freq value passed: " + str(self.freq))
148
185
  if i == 0: # Defining the DataFrame container
149
186
  from_date = pd.to_datetime(df_raw['last_changed'], format="ISO8601").min()
150
187
  to_date = pd.to_datetime(df_raw['last_changed'], format="ISO8601").max()
@@ -152,20 +189,27 @@ class RetrieveHass:
152
189
  format='%Y-%d-%m %H:%M').round(self.freq, ambiguous='infer', nonexistent='shift_forward')
153
190
  df_day = pd.DataFrame(index = ts)
154
191
  # Caution with undefined string data: unknown, unavailable, etc.
155
- df_tp = df_raw.copy()[['state']].replace(
156
- ['unknown', 'unavailable', ''], np.nan).astype(float).rename(columns={'state': var})
192
+ df_tp = (
193
+ df_raw.copy()[["state"]]
194
+ .replace(["unknown", "unavailable", ""], np.nan)
195
+ .astype(float)
196
+ .rename(columns={"state": var})
197
+ )
157
198
  # Setting index, resampling and concatenation
158
- df_tp.set_index(pd.to_datetime(df_raw['last_changed'], format="ISO8601"), inplace=True)
199
+ df_tp.set_index(
200
+ pd.to_datetime(df_raw["last_changed"], format="ISO8601"),
201
+ inplace=True,
202
+ )
159
203
  df_tp = df_tp.resample(self.freq).mean()
160
204
  df_day = pd.concat([df_day, df_tp], axis=1)
161
-
162
- x += 1
163
205
  self.df_final = pd.concat([self.df_final, df_day], axis=0)
206
+ x += 1
164
207
  self.df_final = set_df_index_freq(self.df_final)
165
208
  if self.df_final.index.freq != self.freq:
166
- self.logger.error("The inferred freq from data is not equal to the defined freq in passed parameters")
209
+ self.logger.error("The inferred freq:" + str(self.df_final.index.freq) + " from data is not equal to the defined freq in passed:" + str(self.freq))
167
210
  return False
168
211
  return True
212
+
169
213
 
170
214
  def prepare_data(self, var_load: str, load_negative: Optional[bool] = False, set_zero_min: Optional[bool] = True,
171
215
  var_replace_zero: Optional[list] = None, var_interp: Optional[list] = None) -> None:
@@ -192,18 +236,24 @@ class RetrieveHass:
192
236
 
193
237
  """
194
238
  try:
195
- if load_negative: # Apply the correct sign to load power
196
- self.df_final[var_load+'_positive'] = -self.df_final[var_load]
239
+ if load_negative: # Apply the correct sign to load power
240
+ self.df_final[var_load + "_positive"] = -self.df_final[var_load]
197
241
  else:
198
- self.df_final[var_load+'_positive'] = self.df_final[var_load]
242
+ self.df_final[var_load + "_positive"] = self.df_final[var_load]
199
243
  self.df_final.drop([var_load], inplace=True, axis=1)
200
244
  except KeyError:
201
- self.logger.error("Variable "+var_load+" was not found. This is typically because no data could be retrieved from Home Assistant")
245
+ self.logger.error(
246
+ "Variable "
247
+ + var_load
248
+ + " was not found. This is typically because no data could be retrieved from Home Assistant"
249
+ )
202
250
  return False
203
251
  except ValueError:
204
- self.logger.error("sensor.power_photovoltaics and sensor.power_load_no_var_loads should not be the same")
205
- return False
206
- if set_zero_min: # Apply minimum values
252
+ self.logger.error(
253
+ "sensor.power_photovoltaics and sensor.power_load_no_var_loads should not be the same"
254
+ )
255
+ return False
256
+ if set_zero_min: # Apply minimum values
207
257
  self.df_final.clip(lower=0.0, inplace=True, axis=1)
208
258
  self.df_final.replace(to_replace=0.0, value=np.nan, inplace=True)
209
259
  new_var_replace_zero = []
@@ -211,59 +261,74 @@ class RetrieveHass:
211
261
  # Just changing the names of variables to contain the fact that they are considered positive
212
262
  if var_replace_zero is not None:
213
263
  for string in var_replace_zero:
214
- new_string = string.replace(var_load, var_load+'_positive')
264
+ new_string = string.replace(var_load, var_load + "_positive")
215
265
  new_var_replace_zero.append(new_string)
216
266
  else:
217
267
  new_var_replace_zero = None
218
268
  if var_interp is not None:
219
269
  for string in var_interp:
220
- new_string = string.replace(var_load, var_load+'_positive')
270
+ new_string = string.replace(var_load, var_load + "_positive")
221
271
  new_var_interp.append(new_string)
222
272
  else:
223
273
  new_var_interp = None
224
274
  # Treating NaN replacement: either by zeros or by linear interpolation
225
275
  if new_var_replace_zero is not None:
226
- self.df_final[new_var_replace_zero] = self.df_final[new_var_replace_zero].fillna(0.0)
276
+ self.df_final[new_var_replace_zero] = self.df_final[
277
+ new_var_replace_zero
278
+ ].fillna(0.0)
227
279
  if new_var_interp is not None:
228
280
  self.df_final[new_var_interp] = self.df_final[new_var_interp].interpolate(
229
- method='linear', axis=0, limit=None)
281
+ method="linear", axis=0, limit=None
282
+ )
230
283
  self.df_final[new_var_interp] = self.df_final[new_var_interp].fillna(0.0)
231
284
  # Setting the correct time zone on DF index
232
285
  if self.time_zone is not None:
233
286
  self.df_final.index = self.df_final.index.tz_convert(self.time_zone)
234
287
  # Drop datetimeindex duplicates on final DF
235
- self.df_final = self.df_final[~self.df_final.index.duplicated(keep='first')]
288
+ self.df_final = self.df_final[~self.df_final.index.duplicated(keep="first")]
236
289
  return True
237
-
290
+
238
291
  @staticmethod
239
- def get_attr_data_dict(data_df: pd.DataFrame, idx: int, entity_id: str,
240
- unit_of_measurement: str, friendly_name: str,
241
- list_name: str, state: float) -> dict:
242
- list_df = copy.deepcopy(data_df).loc[data_df.index[idx]:].reset_index()
243
- list_df.columns = ['timestamps', entity_id]
244
- ts_list = [str(i) for i in list_df['timestamps'].tolist()]
245
- vals_list = [str(np.round(i,2)) for i in list_df[entity_id].tolist()]
292
+ def get_attr_data_dict(
293
+ data_df: pd.DataFrame,
294
+ idx: int,
295
+ entity_id: str,
296
+ unit_of_measurement: str,
297
+ friendly_name: str,
298
+ list_name: str,
299
+ state: float,
300
+ ) -> dict:
301
+ list_df = copy.deepcopy(data_df).loc[data_df.index[idx] :].reset_index()
302
+ list_df.columns = ["timestamps", entity_id]
303
+ ts_list = [str(i) for i in list_df["timestamps"].tolist()]
304
+ vals_list = [str(np.round(i, 2)) for i in list_df[entity_id].tolist()]
246
305
  forecast_list = []
247
306
  for i, ts in enumerate(ts_list):
248
307
  datum = {}
249
308
  datum["date"] = ts
250
- datum[entity_id.split('sensor.')[1]] = vals_list[i]
309
+ datum[entity_id.split("sensor.")[1]] = vals_list[i]
251
310
  forecast_list.append(datum)
252
311
  data = {
253
312
  "state": "{:.2f}".format(state),
254
313
  "attributes": {
255
314
  "unit_of_measurement": unit_of_measurement,
256
315
  "friendly_name": friendly_name,
257
- list_name: forecast_list
258
- }
316
+ list_name: forecast_list,
317
+ },
259
318
  }
260
319
  return data
261
-
262
- def post_data(self, data_df: pd.DataFrame, idx: int, entity_id: str,
263
- unit_of_measurement: str, friendly_name: str,
264
- type_var: str,
265
- from_mlforecaster: Optional[bool]=False,
266
- publish_prefix: Optional[str]="") -> None:
320
+
321
+ def post_data(
322
+ self,
323
+ data_df: pd.DataFrame,
324
+ idx: int,
325
+ entity_id: str,
326
+ unit_of_measurement: str,
327
+ friendly_name: str,
328
+ type_var: str,
329
+ from_mlforecaster: Optional[bool] = False,
330
+ publish_prefix: Optional[str] = "",
331
+ ) -> None:
267
332
  r"""
268
333
  Post passed data to hass.
269
334
 
@@ -286,72 +351,139 @@ class RetrieveHass:
286
351
 
287
352
  """
288
353
  # Add a possible prefix to the entity ID
289
- entity_id = entity_id.replace('sensor.', 'sensor.'+publish_prefix)
354
+ entity_id = entity_id.replace("sensor.", "sensor." + publish_prefix)
290
355
  # Set the URL
291
- if self.hass_url == "http://supervisor/core/api": # If we are using the supervisor API
292
- url = self.hass_url+"/states/"+entity_id
293
- else: # Otherwise the Home Assistant Core API it is
294
- url = self.hass_url+"api/states/"+entity_id
356
+ if (
357
+ self.hass_url == "http://supervisor/core/api"
358
+ ): # If we are using the supervisor API
359
+ url = self.hass_url + "/states/" + entity_id
360
+ else: # Otherwise the Home Assistant Core API it is
361
+ url = self.hass_url + "api/states/" + entity_id
295
362
  headers = {
296
363
  "Authorization": "Bearer " + self.long_lived_token,
297
364
  "content-type": "application/json",
298
365
  }
299
366
  # Preparing the data dict to be published
300
- if type_var == 'cost_fun':
301
- state = np.round(data_df.sum()[0],2)
302
- elif type_var == 'unit_load_cost' or type_var == 'unit_prod_price':
303
- state = np.round(data_df.loc[data_df.index[idx]],4)
304
- elif type_var == 'optim_status':
367
+ if type_var == "cost_fun":
368
+ state = np.round(data_df.sum()[0], 2)
369
+ elif type_var == "unit_load_cost" or type_var == "unit_prod_price":
370
+ state = np.round(data_df.loc[data_df.index[idx]], 4)
371
+ elif type_var == "optim_status":
305
372
  state = data_df.loc[data_df.index[idx]]
373
+ elif type_var == "mlregressor":
374
+ state = data_df[idx]
306
375
  else:
307
- state = np.round(data_df.loc[data_df.index[idx]],2)
308
- if type_var == 'power':
309
- data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
310
- friendly_name, "forecasts", state)
311
- elif type_var == 'deferrable':
312
- data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
313
- friendly_name, "deferrables_schedule", state)
314
- elif type_var == 'batt':
315
- data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
316
- friendly_name, "battery_scheduled_power", state)
317
- elif type_var == 'SOC':
318
- data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
319
- friendly_name, "battery_scheduled_soc", state)
320
- elif type_var == 'unit_load_cost':
321
- data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
322
- friendly_name, "unit_load_cost_forecasts", state)
323
- elif type_var == 'unit_prod_price':
324
- data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
325
- friendly_name, "unit_prod_price_forecasts", state)
326
- elif type_var == 'mlforecaster':
327
- data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
328
- friendly_name, "scheduled_forecast", state)
329
- elif type_var == 'optim_status':
376
+ state = np.round(data_df.loc[data_df.index[idx]], 2)
377
+ if type_var == "power":
378
+ data = RetrieveHass.get_attr_data_dict(
379
+ data_df,
380
+ idx,
381
+ entity_id,
382
+ unit_of_measurement,
383
+ friendly_name,
384
+ "forecasts",
385
+ state,
386
+ )
387
+ elif type_var == "deferrable":
388
+ data = RetrieveHass.get_attr_data_dict(
389
+ data_df,
390
+ idx,
391
+ entity_id,
392
+ unit_of_measurement,
393
+ friendly_name,
394
+ "deferrables_schedule",
395
+ state,
396
+ )
397
+ elif type_var == "batt":
398
+ data = RetrieveHass.get_attr_data_dict(
399
+ data_df,
400
+ idx,
401
+ entity_id,
402
+ unit_of_measurement,
403
+ friendly_name,
404
+ "battery_scheduled_power",
405
+ state,
406
+ )
407
+ elif type_var == "SOC":
408
+ data = RetrieveHass.get_attr_data_dict(
409
+ data_df,
410
+ idx,
411
+ entity_id,
412
+ unit_of_measurement,
413
+ friendly_name,
414
+ "battery_scheduled_soc",
415
+ state,
416
+ )
417
+ elif type_var == "unit_load_cost":
418
+ data = RetrieveHass.get_attr_data_dict(
419
+ data_df,
420
+ idx,
421
+ entity_id,
422
+ unit_of_measurement,
423
+ friendly_name,
424
+ "unit_load_cost_forecasts",
425
+ state,
426
+ )
427
+ elif type_var == "unit_prod_price":
428
+ data = RetrieveHass.get_attr_data_dict(
429
+ data_df,
430
+ idx,
431
+ entity_id,
432
+ unit_of_measurement,
433
+ friendly_name,
434
+ "unit_prod_price_forecasts",
435
+ state,
436
+ )
437
+ elif type_var == "mlforecaster":
438
+ data = RetrieveHass.get_attr_data_dict(
439
+ data_df,
440
+ idx,
441
+ entity_id,
442
+ unit_of_measurement,
443
+ friendly_name,
444
+ "scheduled_forecast",
445
+ state,
446
+ )
447
+ elif type_var == "optim_status":
330
448
  data = {
331
449
  "state": state,
332
450
  "attributes": {
333
451
  "unit_of_measurement": unit_of_measurement,
334
- "friendly_name": friendly_name
335
- }
452
+ "friendly_name": friendly_name,
453
+ },
454
+ }
455
+ elif type_var == "mlregressor":
456
+ data = {
457
+ "state": state,
458
+ "attributes": {
459
+ "unit_of_measurement": unit_of_measurement,
460
+ "friendly_name": friendly_name,
461
+ },
336
462
  }
337
463
  else:
338
464
  data = {
339
465
  "state": "{:.2f}".format(state),
340
466
  "attributes": {
341
467
  "unit_of_measurement": unit_of_measurement,
342
- "friendly_name": friendly_name
343
- }
468
+ "friendly_name": friendly_name,
469
+ },
344
470
  }
345
471
  # Actually post the data
346
472
  if self.get_data_from_file:
347
- class response: pass
473
+
474
+ class response:
475
+ pass
476
+
348
477
  response.status_code = 200
349
478
  response.ok = True
350
479
  else:
351
480
  response = post(url, headers=headers, data=json.dumps(data))
352
481
  # Treating the response status and posting them on the logger
353
482
  if response.ok:
354
- self.logger.info("Successfully posted to "+entity_id+" = "+str(state))
483
+ self.logger.info("Successfully posted to " + entity_id + " = " + str(state))
355
484
  else:
356
- self.logger.info("The status code for received curl command response is: "+str(response.status_code))
485
+ self.logger.info(
486
+ "The status code for received curl command response is: "
487
+ + str(response.status_code)
488
+ )
357
489
  return response, data
@@ -14,6 +14,9 @@
14
14
  <button type="button" id="forecast-model-predict" class="button button2">ML forecast model
15
15
  predict</button>
16
16
  <button type="button" id="forecast-model-tune" class="button button3">ML forecast model tune</button>
17
+ </br></br>
18
+ <button type="button" id="regressor-model-fit" class="button button1">ML regressor model fit</button>
19
+ <button type="button" id="regressor-model-predict" class="button button2">ML regressor model predict</button>
17
20
  <!-- -->
18
21
  <!--dynamic input elements section -->
19
22
  <h4>Input Runtime Parameters</h4>
emhass/static/script.js CHANGED
@@ -16,6 +16,8 @@ function loadButtons(page) {
16
16
  "forecast-model-fit",
17
17
  "forecast-model-predict",
18
18
  "forecast-model-tune",
19
+ "regressor-model-fit",
20
+ "regressor-model-predict",
19
21
  "perfect-optim",
20
22
  "publish-data",
21
23
  "naive-mpc-optim"