emhass 0.13.0__py3-none-any.whl → 0.13.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,712 @@
1
+ #!/usr/bin/env python3
2
+
3
+ import copy
4
+ import datetime
5
+ import json
6
+ import logging
7
+ import os
8
+ import pathlib
9
+
10
+ import numpy as np
11
+ import pandas as pd
12
+ from requests import get, post
13
+
14
+ from emhass.utils import set_df_index_freq
15
+
16
+
17
+ class RetrieveHass:
18
+ r"""
19
+ Retrieve data from Home Assistant using the restful API.
20
+
21
+ This class allows the user to retrieve data from a Home Assistant instance \
22
+ using the provided restful API (https://developers.home-assistant.io/docs/api/rest/)
23
+
24
+ This class methods are:
25
+
26
+ - get_data: to retrieve the actual data from hass
27
+
28
+ - prepare_data: to apply some data treatment in preparation for the optimization task
29
+
30
+ - post_data: Post passed data to hass
31
+
32
+ """
33
+
34
+ def __init__(
35
+ self,
36
+ hass_url: str,
37
+ long_lived_token: str,
38
+ freq: pd.Timedelta,
39
+ time_zone: datetime.timezone,
40
+ params: str,
41
+ emhass_conf: dict,
42
+ logger: logging.Logger,
43
+ get_data_from_file: bool | None = False,
44
+ ) -> None:
45
+ """
46
+ Define constructor for RetrieveHass class.
47
+
48
+ :param hass_url: The URL of the Home Assistant instance
49
+ :type hass_url: str
50
+ :param long_lived_token: The long lived token retrieved from the configuration pane
51
+ :type long_lived_token: str
52
+ :param freq: The frequency of the data DateTimeIndexes
53
+ :type freq: pd.TimeDelta
54
+ :param time_zone: The time zone
55
+ :type time_zone: datetime.timezone
56
+ :param params: Configuration parameters passed from data/options.json
57
+ :type params: str
58
+ :param emhass_conf: Dictionary containing the needed emhass paths
59
+ :type emhass_conf: dict
60
+ :param logger: The passed logger object
61
+ :type logger: logging object
62
+ :param get_data_from_file: Select if data should be retrieved from a
63
+ previously saved pickle useful for testing or directly from connection to
64
+ hass database
65
+ :type get_data_from_file: bool, optional
66
+
67
+ """
68
+ self.hass_url = hass_url
69
+ self.long_lived_token = long_lived_token
70
+ self.freq = freq
71
+ self.time_zone = time_zone
72
+ if (params is None) or (params == "null"):
73
+ self.params = {}
74
+ elif type(params) is dict:
75
+ self.params = params
76
+ else:
77
+ self.params = json.loads(params)
78
+ self.emhass_conf = emhass_conf
79
+ self.logger = logger
80
+ self.get_data_from_file = get_data_from_file
81
+ self.var_list = []
82
+
83
+ def get_ha_config(self):
84
+ """
85
+ Extract some configuration data from HA.
86
+
87
+ """
88
+ headers = {
89
+ "Authorization": "Bearer " + self.long_lived_token,
90
+ "content-type": "application/json",
91
+ }
92
+ if self.hass_url == "http://supervisor/core/api":
93
+ url = self.hass_url + "/config"
94
+ else:
95
+ if self.hass_url[-1] != "/":
96
+ self.logger.warning(
97
+ "Missing slash </> at the end of the defined URL, appending a slash but please fix your URL"
98
+ )
99
+ self.hass_url = self.hass_url + "/"
100
+ url = self.hass_url + "api/config"
101
+
102
+ try:
103
+ response_config = get(url, headers=headers)
104
+ except Exception:
105
+ self.logger.error("Unable to access Home Assistance instance, check URL")
106
+ self.logger.error("If using addon, try setting url and token to 'empty'")
107
+ return False
108
+
109
+ try:
110
+ self.ha_config = response_config.json()
111
+ except Exception:
112
+ self.logger.error("EMHASS was unable to obtain configuration data from HA")
113
+ return False
114
+
115
+ def get_data(
116
+ self,
117
+ days_list: pd.date_range,
118
+ var_list: list,
119
+ minimal_response: bool | None = False,
120
+ significant_changes_only: bool | None = False,
121
+ test_url: str | None = "empty",
122
+ ) -> None:
123
+ r"""
124
+ Retrieve the actual data from hass.
125
+
126
+ :param days_list: A list of days to retrieve. The ISO format should be used \
127
+ and the timezone is UTC. The frequency of the data_range should be freq='D'
128
+ :type days_list: pandas.date_range
129
+ :param var_list: The list of variables to retrive from hass. These should \
130
+ be the exact name of the sensor in Home Assistant. \
131
+ For example: ['sensor.home_load', 'sensor.home_pv']
132
+ :type var_list: list
133
+ :param minimal_response: Retrieve a minimal response using the hass \
134
+ restful API, defaults to False
135
+ :type minimal_response: bool, optional
136
+ :param significant_changes_only: Retrieve significant changes only \
137
+ using the hass restful API, defaults to False
138
+ :type significant_changes_only: bool, optional
139
+ :return: The DataFrame populated with the retrieved data from hass
140
+ :rtype: pandas.DataFrame
141
+
142
+ .. warning:: The minimal_response and significant_changes_only options \
143
+ are experimental
144
+ """
145
+ self.logger.info("Retrieve hass get data method initiated...")
146
+ headers = {
147
+ "Authorization": "Bearer " + self.long_lived_token,
148
+ "content-type": "application/json",
149
+ }
150
+ # Remove empty strings from var_list
151
+ var_list = [var for var in var_list if var != ""]
152
+ # Looping on each day from days list
153
+ self.df_final = pd.DataFrame()
154
+ x = 0 # iterate based on days
155
+ for day in days_list:
156
+ for i, var in enumerate(var_list):
157
+ if test_url == "empty":
158
+ if (
159
+ self.hass_url == "http://supervisor/core/api"
160
+ ): # If we are using the supervisor API
161
+ url = (
162
+ self.hass_url
163
+ + "/history/period/"
164
+ + day.isoformat()
165
+ + "?filter_entity_id="
166
+ + var
167
+ )
168
+ else: # Otherwise the Home Assistant Core API it is
169
+ if self.hass_url[-1] != "/":
170
+ self.logger.warning(
171
+ "Missing slash </> at the end of the defined URL, appending a slash but please fix your URL"
172
+ )
173
+ self.hass_url = self.hass_url + "/"
174
+ url = (
175
+ self.hass_url
176
+ + "api/history/period/"
177
+ + day.isoformat()
178
+ + "?filter_entity_id="
179
+ + var
180
+ )
181
+ if minimal_response: # A support for minimal response
182
+ url = url + "?minimal_response"
183
+ if significant_changes_only: # And for signicant changes only (check the HASS restful API for more info)
184
+ url = url + "?significant_changes_only"
185
+ else:
186
+ url = test_url
187
+ try:
188
+ response = get(url, headers=headers)
189
+ except Exception:
190
+ self.logger.error(
191
+ "Unable to access Home Assistance instance, check URL"
192
+ )
193
+ self.logger.error(
194
+ "If using addon, try setting url and token to 'empty'"
195
+ )
196
+ return False
197
+ else:
198
+ if response.status_code == 401:
199
+ self.logger.error(
200
+ "Unable to access Home Assistance instance, TOKEN/KEY"
201
+ )
202
+ self.logger.error(
203
+ "If using addon, try setting url and token to 'empty'"
204
+ )
205
+ return False
206
+ if response.status_code > 299:
207
+ self.logger.error(f"Home assistant request GET error: {response.status_code} for var {var}")
208
+ return False
209
+ """import bz2 # Uncomment to save a serialized data for tests
210
+ import _pickle as cPickle
211
+ with bz2.BZ2File("data/test_response_get_data_get_method.pbz2", "w") as f:
212
+ cPickle.dump(response, f)"""
213
+ try: # Sometimes when there are connection problems we need to catch empty retrieved json
214
+ data = response.json()[0]
215
+ except IndexError:
216
+ if x == 0:
217
+ self.logger.error(
218
+ "The retrieved JSON is empty, A sensor:"
219
+ + var
220
+ + " may have 0 days of history, passed sensor may not be correct, or days to retrieve is set too heigh"
221
+ )
222
+ else:
223
+ self.logger.error(
224
+ "The retrieved JSON is empty for day:"
225
+ + str(day)
226
+ + ", days_to_retrieve may be larger than the recorded history of sensor:"
227
+ + var
228
+ + " (check your recorder settings)"
229
+ )
230
+ return False
231
+ df_raw = pd.DataFrame.from_dict(data)
232
+ if len(df_raw) == 0:
233
+ if x == 0:
234
+ self.logger.error(
235
+ "The retrieved Dataframe is empty, A sensor:"
236
+ + var
237
+ + " may have 0 days of history or passed sensor may not be correct"
238
+ )
239
+ else:
240
+ self.logger.error(
241
+ "Retrieved empty Dataframe for day:"
242
+ + str(day)
243
+ + ", days_to_retrieve may be larger than the recorded history of sensor:"
244
+ + var
245
+ + " (check your recorder settings)"
246
+ )
247
+ return False
248
+ if (
249
+ len(df_raw) < ((60 / (self.freq.seconds / 60)) * 24)
250
+ and x != len(days_list) - 1
251
+ ): # check if there is enough Dataframes for passed frequency per day (not inc current day)
252
+ self.logger.debug(
253
+ "sensor:"
254
+ + var
255
+ + " retrieved Dataframe count: "
256
+ + str(len(df_raw))
257
+ + ", on day: "
258
+ + str(day)
259
+ + ". This is less than freq value passed: "
260
+ + str(self.freq)
261
+ )
262
+ if i == 0: # Defining the DataFrame container
263
+ from_date = pd.to_datetime(
264
+ df_raw["last_changed"], format="ISO8601"
265
+ ).min()
266
+ to_date = pd.to_datetime(
267
+ df_raw["last_changed"], format="ISO8601"
268
+ ).max()
269
+ ts = pd.to_datetime(
270
+ pd.date_range(start=from_date, end=to_date, freq=self.freq),
271
+ format="%Y-%d-%m %H:%M"
272
+ ).round(self.freq, ambiguous="infer", nonexistent="shift_forward")
273
+ df_day = pd.DataFrame(index=ts)
274
+ # Caution with undefined string data: unknown, unavailable, etc.
275
+ df_tp = (
276
+ df_raw.copy()[["state"]]
277
+ .replace(["unknown", "unavailable", ""], np.nan)
278
+ .astype(float)
279
+ .rename(columns={"state": var})
280
+ )
281
+ # Setting index, resampling and concatenation
282
+ df_tp.set_index(
283
+ pd.to_datetime(df_raw["last_changed"], format="ISO8601"),
284
+ inplace=True,
285
+ )
286
+ df_tp = df_tp.resample(self.freq).mean()
287
+ df_day = pd.concat([df_day, df_tp], axis=1)
288
+ self.df_final = pd.concat([self.df_final, df_day], axis=0)
289
+ x += 1
290
+ self.df_final = set_df_index_freq(self.df_final)
291
+ if self.df_final.index.freq != self.freq:
292
+ self.logger.error(
293
+ "The inferred freq:"
294
+ + str(self.df_final.index.freq)
295
+ + " from data is not equal to the defined freq in passed:"
296
+ + str(self.freq)
297
+ )
298
+ return False
299
+ self.var_list = var_list
300
+ return True
301
+
302
+ def prepare_data(
303
+ self,
304
+ var_load: str,
305
+ load_negative: bool | None = False,
306
+ set_zero_min: bool | None = True,
307
+ var_replace_zero: list | None = None,
308
+ var_interp: list | None = None,
309
+ ) -> None:
310
+ r"""
311
+ Apply some data treatment in preparation for the optimization task.
312
+
313
+ :param var_load: The name of the variable for the household load consumption.
314
+ :type var_load: str
315
+ :param load_negative: Set to True if the retrived load variable is \
316
+ negative by convention, defaults to False
317
+ :type load_negative: bool, optional
318
+ :param set_zero_min: A special treatment for a minimum value saturation \
319
+ to zero. Values below zero are replaced by nans, defaults to True
320
+ :type set_zero_min: bool, optional
321
+ :param var_replace_zero: A list of retrived variables that we would want \
322
+ to replace nans with zeros, defaults to None
323
+ :type var_replace_zero: list, optional
324
+ :param var_interp: A list of retrived variables that we would want to \
325
+ interpolate nan values using linear interpolation, defaults to None
326
+ :type var_interp: list, optional
327
+ :return: The DataFrame populated with the retrieved data from hass and \
328
+ after the data treatment
329
+ :rtype: pandas.DataFrame
330
+
331
+ """
332
+ self.logger.debug("prepare_data self.var_list=%s", self.var_list)
333
+ self.logger.debug("prepare_data var_load=%s", var_load)
334
+ self.logger.debug("prepare_data load_negative=%s", load_negative)
335
+ self.logger.debug("prepare_data set_zero_min=%s", set_zero_min)
336
+ self.logger.debug("prepare_data var_replace_zero=%s", var_replace_zero)
337
+ self.logger.debug("prepare_data var_interp=%s", var_interp)
338
+ try:
339
+ if load_negative: # Apply the correct sign to load power
340
+ self.df_final[var_load + "_positive"] = -self.df_final[var_load]
341
+ else:
342
+ self.df_final[var_load + "_positive"] = self.df_final[var_load]
343
+ self.df_final.drop([var_load], inplace=True, axis=1)
344
+ except KeyError:
345
+ self.logger.error(
346
+ "Variable "
347
+ + var_load
348
+ + " was not found. This is typically because no data could be retrieved from Home Assistant"
349
+ )
350
+ return False
351
+ except ValueError:
352
+ self.logger.error(
353
+ "sensor.power_photovoltaics and sensor.power_load_no_var_loads should not be the same"
354
+ )
355
+ return False
356
+ # Confirm var_replace_zero & var_interp contain only sensors contained in var_list
357
+ if isinstance(var_replace_zero, list):
358
+ original_list = var_replace_zero[:]
359
+ var_replace_zero = [
360
+ item for item in var_replace_zero if item in self.var_list
361
+ ]
362
+ removed = set(original_list) - set(var_replace_zero)
363
+ for item in removed:
364
+ self.logger.warning(
365
+ f"Sensor '{item}' in var_replace_zero not found in self.var_list and has been removed."
366
+ )
367
+ else:
368
+ var_replace_zero = []
369
+ if isinstance(var_interp, list):
370
+ original_list = var_interp[:]
371
+ var_interp = [item for item in var_interp if item in self.var_list]
372
+ removed = set(original_list) - set(var_interp)
373
+ for item in removed:
374
+ self.logger.warning(
375
+ f"Sensor '{item}' in var_interp not found in self.var_list and has been removed."
376
+ )
377
+ else:
378
+ var_interp = []
379
+ # Apply minimum values
380
+ if set_zero_min:
381
+ self.df_final.clip(lower=0.0, inplace=True, axis=1)
382
+ self.df_final.replace(to_replace=0.0, value=np.nan, inplace=True)
383
+ new_var_replace_zero = []
384
+ new_var_interp = []
385
+ # Just changing the names of variables to contain the fact that they are considered positive
386
+ if var_replace_zero is not None:
387
+ for string in var_replace_zero:
388
+ new_string = string.replace(var_load, var_load + "_positive")
389
+ new_var_replace_zero.append(new_string)
390
+ else:
391
+ self.logger.warning(
392
+ "Unable to find all the sensors in sensor_replace_zero parameter"
393
+ )
394
+ self.logger.warning(
395
+ "Confirm sure all sensors in sensor_replace_zero are sensor_power_photovoltaics and/or ensor_power_load_no_var_loads "
396
+ )
397
+ new_var_replace_zero = None
398
+ if var_interp is not None:
399
+ for string in var_interp:
400
+ new_string = string.replace(var_load, var_load + "_positive")
401
+ new_var_interp.append(new_string)
402
+ else:
403
+ new_var_interp = None
404
+ self.logger.warning(
405
+ "Unable to find all the sensors in sensor_linear_interp parameter"
406
+ )
407
+ self.logger.warning(
408
+ "Confirm all sensors in sensor_linear_interp are sensor_power_photovoltaics and/or ensor_power_load_no_var_loads "
409
+ )
410
+ # Treating NaN replacement: either by zeros or by linear interpolation
411
+ if new_var_replace_zero is not None:
412
+ self.df_final[new_var_replace_zero] = self.df_final[
413
+ new_var_replace_zero
414
+ ].fillna(0.0)
415
+ if new_var_interp is not None:
416
+ self.df_final[new_var_interp] = self.df_final[new_var_interp].interpolate(
417
+ method="linear", axis=0, limit=None
418
+ )
419
+ self.df_final[new_var_interp] = self.df_final[new_var_interp].fillna(0.0)
420
+ # Setting the correct time zone on DF index
421
+ if self.time_zone is not None:
422
+ self.df_final.index = self.df_final.index.tz_convert(self.time_zone)
423
+ # Drop datetimeindex duplicates on final DF
424
+ self.df_final = self.df_final[~self.df_final.index.duplicated(keep="first")]
425
+ return True
426
+
427
+ @staticmethod
428
+ def get_attr_data_dict(
429
+ data_df: pd.DataFrame,
430
+ idx: int,
431
+ entity_id: str,
432
+ device_class: str,
433
+ unit_of_measurement: str,
434
+ friendly_name: str,
435
+ list_name: str,
436
+ state: float,
437
+ ) -> dict:
438
+ list_df = copy.deepcopy(data_df).loc[data_df.index[idx] :].reset_index()
439
+ list_df.columns = ["timestamps", entity_id]
440
+ ts_list = [str(i) for i in list_df["timestamps"].tolist()]
441
+ vals_list = [str(np.round(i, 2)) for i in list_df[entity_id].tolist()]
442
+ forecast_list = []
443
+ for i, ts in enumerate(ts_list):
444
+ datum = {}
445
+ datum["date"] = ts
446
+ datum[entity_id.split("sensor.")[1]] = vals_list[i]
447
+ forecast_list.append(datum)
448
+ data = {
449
+ "state": f"{state:.2f}",
450
+ "attributes": {
451
+ "device_class": device_class,
452
+ "unit_of_measurement": unit_of_measurement,
453
+ "friendly_name": friendly_name,
454
+ list_name: forecast_list,
455
+ },
456
+ }
457
+ return data
458
+
459
+ def post_data(
460
+ self,
461
+ data_df: pd.DataFrame,
462
+ idx: int,
463
+ entity_id: str,
464
+ device_class: str,
465
+ unit_of_measurement: str,
466
+ friendly_name: str,
467
+ type_var: str,
468
+ from_mlforecaster: bool | None = False,
469
+ publish_prefix: str | None = "",
470
+ save_entities: bool | None = False,
471
+ logger_levels: str | None = "info",
472
+ dont_post: bool | None = False,
473
+ ) -> None:
474
+ r"""
475
+ Post passed data to hass.
476
+
477
+ :param data_df: The DataFrame containing the data that will be posted \
478
+ to hass. This should be a one columns DF or a series.
479
+ :type data_df: pd.DataFrame
480
+ :param idx: The int index of the location of the data within the passed \
481
+ DataFrame. We will post just one value at a time.
482
+ :type idx: int
483
+ :param entity_id: The unique entity_id of the sensor in hass.
484
+ :type entity_id: str
485
+ :param device_class: The HASS device class for the sensor.
486
+ :type device_class: str
487
+ :param unit_of_measurement: The units of the sensor.
488
+ :type unit_of_measurement: str
489
+ :param friendly_name: The friendly name that will be used in the hass frontend.
490
+ :type friendly_name: str
491
+ :param type_var: A variable to indicate the type of variable: power, SOC, etc.
492
+ :type type_var: str
493
+ :param publish_prefix: A common prefix for all published data entity_id.
494
+ :type publish_prefix: str, optional
495
+ :param save_entities: if entity data should be saved in data_path/entities
496
+ :type save_entities: bool, optional
497
+ :param logger_levels: set logger level, info or debug, to output
498
+ :type logger_levels: str, optional
499
+ :param dont_post: dont post to HA
500
+ :type dont_post: bool, optional
501
+
502
+ """
503
+ # Add a possible prefix to the entity ID
504
+ entity_id = entity_id.replace("sensor.", "sensor." + publish_prefix)
505
+ # Set the URL
506
+ if (
507
+ self.hass_url == "http://supervisor/core/api"
508
+ ): # If we are using the supervisor API
509
+ url = self.hass_url + "/states/" + entity_id
510
+ else: # Otherwise the Home Assistant Core API it is
511
+ url = self.hass_url + "api/states/" + entity_id
512
+ headers = {
513
+ "Authorization": "Bearer " + self.long_lived_token,
514
+ "content-type": "application/json",
515
+ }
516
+ # Preparing the data dict to be published
517
+ if type_var == "cost_fun":
518
+ if isinstance(data_df.iloc[0], pd.Series): # if Series extract
519
+ data_df = data_df.iloc[:, 0]
520
+ state = np.round(data_df.sum(), 2)
521
+ elif type_var == "unit_load_cost" or type_var == "unit_prod_price":
522
+ state = np.round(data_df.loc[data_df.index[idx]], 4)
523
+ elif type_var == "optim_status":
524
+ state = data_df.loc[data_df.index[idx]]
525
+ elif type_var == "mlregressor":
526
+ state = data_df[idx]
527
+ else:
528
+ state = np.round(data_df.loc[data_df.index[idx]], 2)
529
+ if type_var == "power":
530
+ data = RetrieveHass.get_attr_data_dict(
531
+ data_df,
532
+ idx,
533
+ entity_id,
534
+ device_class,
535
+ unit_of_measurement,
536
+ friendly_name,
537
+ "forecasts",
538
+ state,
539
+ )
540
+ elif type_var == "deferrable":
541
+ data = RetrieveHass.get_attr_data_dict(
542
+ data_df,
543
+ idx,
544
+ entity_id,
545
+ device_class,
546
+ unit_of_measurement,
547
+ friendly_name,
548
+ "deferrables_schedule",
549
+ state,
550
+ )
551
+ elif type_var == "temperature":
552
+ data = RetrieveHass.get_attr_data_dict(
553
+ data_df,
554
+ idx,
555
+ entity_id,
556
+ device_class,
557
+ unit_of_measurement,
558
+ friendly_name,
559
+ "predicted_temperatures",
560
+ state,
561
+ )
562
+ elif type_var == "batt":
563
+ data = RetrieveHass.get_attr_data_dict(
564
+ data_df,
565
+ idx,
566
+ entity_id,
567
+ device_class,
568
+ unit_of_measurement,
569
+ friendly_name,
570
+ "battery_scheduled_power",
571
+ state,
572
+ )
573
+ elif type_var == "SOC":
574
+ data = RetrieveHass.get_attr_data_dict(
575
+ data_df,
576
+ idx,
577
+ entity_id,
578
+ device_class,
579
+ unit_of_measurement,
580
+ friendly_name,
581
+ "battery_scheduled_soc",
582
+ state,
583
+ )
584
+ elif type_var == "unit_load_cost":
585
+ data = RetrieveHass.get_attr_data_dict(
586
+ data_df,
587
+ idx,
588
+ entity_id,
589
+ device_class,
590
+ unit_of_measurement,
591
+ friendly_name,
592
+ "unit_load_cost_forecasts",
593
+ state,
594
+ )
595
+ elif type_var == "unit_prod_price":
596
+ data = RetrieveHass.get_attr_data_dict(
597
+ data_df,
598
+ idx,
599
+ entity_id,
600
+ device_class,
601
+ unit_of_measurement,
602
+ friendly_name,
603
+ "unit_prod_price_forecasts",
604
+ state,
605
+ )
606
+ elif type_var == "mlforecaster":
607
+ data = RetrieveHass.get_attr_data_dict(
608
+ data_df,
609
+ idx,
610
+ entity_id,
611
+ device_class,
612
+ unit_of_measurement,
613
+ friendly_name,
614
+ "scheduled_forecast",
615
+ state,
616
+ )
617
+ elif type_var == "optim_status":
618
+ data = {
619
+ "state": state,
620
+ "attributes": {
621
+ "device_class": device_class,
622
+ "unit_of_measurement": unit_of_measurement,
623
+ "friendly_name": friendly_name,
624
+ },
625
+ }
626
+ elif type_var == "mlregressor":
627
+ data = {
628
+ "state": state,
629
+ "attributes": {
630
+ "device_class": device_class,
631
+ "unit_of_measurement": unit_of_measurement,
632
+ "friendly_name": friendly_name,
633
+ },
634
+ }
635
+ else:
636
+ data = {
637
+ "state": f"{state:.2f}",
638
+ "attributes": {
639
+ "device_class": device_class,
640
+ "unit_of_measurement": unit_of_measurement,
641
+ "friendly_name": friendly_name,
642
+ },
643
+ }
644
+ # Actually post the data
645
+ if self.get_data_from_file or dont_post:
646
+
647
+ class response:
648
+ pass
649
+
650
+ response.status_code = 200
651
+ response.ok = True
652
+ else:
653
+ response = post(url, headers=headers, data=json.dumps(data))
654
+
655
+ # Treating the response status and posting them on the logger
656
+ if response.ok:
657
+ if logger_levels == "DEBUG":
658
+ self.logger.debug(
659
+ "Successfully posted to " + entity_id + " = " + str(state)
660
+ )
661
+ else:
662
+ self.logger.info(
663
+ "Successfully posted to " + entity_id + " = " + str(state)
664
+ )
665
+
666
+ # If save entities is set, save entity data to /data_path/entities
667
+ if save_entities:
668
+ entities_path = self.emhass_conf["data_path"] / "entities"
669
+
670
+ # Clarify folder exists
671
+ pathlib.Path(entities_path).mkdir(parents=True, exist_ok=True)
672
+
673
+ # Save entity data to json file
674
+ result = data_df.to_json(
675
+ index="timestamp", orient="index", date_unit="s", date_format="iso"
676
+ )
677
+ parsed = json.loads(result)
678
+ with open(entities_path / (entity_id + ".json"), "w") as file:
679
+ json.dump(parsed, file, indent=4)
680
+
681
+ # Save the required metadata to json file
682
+ if os.path.isfile(entities_path / "metadata.json"):
683
+ with open(entities_path / "metadata.json") as file:
684
+ metadata = json.load(file)
685
+ else:
686
+ metadata = {}
687
+ with open(entities_path / "metadata.json", "w") as file:
688
+ # Save entity metadata, key = entity_id
689
+ metadata[entity_id] = {
690
+ "name": data_df.name,
691
+ "device_class": device_class,
692
+ "unit_of_measurement": unit_of_measurement,
693
+ "friendly_name": friendly_name,
694
+ "type_var": type_var,
695
+ "optimization_time_step": int(self.freq.seconds / 60),
696
+ }
697
+
698
+ # Find lowest frequency to set for continual loop freq
699
+ if metadata.get("lowest_time_step", None) is None or metadata[
700
+ "lowest_time_step"
701
+ ] > int(self.freq.seconds / 60):
702
+ metadata["lowest_time_step"] = int(self.freq.seconds / 60)
703
+ json.dump(metadata, file, indent=4)
704
+
705
+ self.logger.debug("Saved " + entity_id + " to json file")
706
+
707
+ else:
708
+ self.logger.warning(
709
+ "The status code for received curl command response is: "
710
+ + str(response.status_code)
711
+ )
712
+ return response, data