gazpar2haws 0.2.0b1__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
gazpar2haws/gazpar.py CHANGED
@@ -1,12 +1,23 @@
1
1
  import logging
2
2
  import traceback
3
- from datetime import datetime, timedelta
4
- from typing import Any
3
+ from datetime import date, datetime, timedelta
4
+ from typing import Optional
5
5
 
6
6
  import pygazpar # type: ignore
7
7
  import pytz
8
+ from pygazpar.datasource import MeterReadings # type: ignore
8
9
 
10
+ from gazpar2haws.date_array import DateArray
9
11
  from gazpar2haws.haws import HomeAssistantWS, HomeAssistantWSException
12
+ from gazpar2haws.model import (
13
+ ConsumptionQuantityArray,
14
+ Device,
15
+ PriceUnit,
16
+ Pricing,
17
+ QuantityUnit,
18
+ TimeUnit,
19
+ )
20
+ from gazpar2haws.pricer import Pricer
10
21
 
11
22
  Logger = logging.getLogger(__name__)
12
23
 
@@ -15,35 +26,51 @@ Logger = logging.getLogger(__name__)
15
26
  class Gazpar:
16
27
 
17
28
  # ----------------------------------
18
- def __init__(self, config: dict[str, Any], homeassistant: HomeAssistantWS):
29
+ def __init__(
30
+ self,
31
+ device_config: Device,
32
+ pricing_config: Optional[Pricing],
33
+ homeassistant: HomeAssistantWS,
34
+ ):
19
35
 
20
36
  self._homeassistant = homeassistant
37
+ self._grdf_config = device_config
38
+ self._pricing_config = pricing_config
39
+
40
+ # GrDF configuration: name
41
+ self._name = device_config.name
42
+
43
+ # GrDF configuration: data source
44
+ self._data_source = device_config.data_source
45
+
46
+ # GrDF configuration: username
47
+ self._username = device_config.username
48
+
49
+ # GrDF configuration: password
50
+ self._password = device_config.password.get_secret_value() if device_config.password is not None else None
51
+
52
+ # GrDF configuration: pce_identifier
53
+ self._pce_identifier = (
54
+ device_config.pce_identifier.get_secret_value() if device_config.pce_identifier is not None else None
55
+ )
56
+
57
+ # GrDF configuration: tmp_dir
58
+ self._tmp_dir = device_config.tmp_dir
59
+
60
+ # GrDF configuration: last_days
61
+ self._last_days = device_config.last_days
21
62
 
22
- # GrDF configuration
23
- self._name = config.get("name")
24
- self._data_source = config.get("data_source")
25
- self._username = config.get("username")
26
- self._password = config.get("password")
27
- self._pce_identifier = str(config.get("pce_identifier"))
28
- self._tmp_dir = config.get("tmp_dir")
29
- self._last_days = int(str(config.get("last_days")))
30
- self._timezone = str(config.get("timezone"))
31
- self._reset = bool(config.get("reset"))
63
+ # GrDF configuration: timezone
64
+ self._timezone = device_config.timezone
65
+
66
+ # GrDF configuration: reset
67
+ self._reset = device_config.reset
32
68
 
33
69
  # As of date: YYYY-MM-DD
34
- as_of_date = config.get("as_of_date")
35
- if self._data_source is not None and str(self._data_source).lower() == "test":
36
- self._as_of_date = (
37
- datetime.now(tz=pytz.timezone(self._timezone))
38
- if as_of_date is None
39
- else datetime.strptime(as_of_date, "%Y-%m-%d")
40
- )
41
- else:
42
- self._as_of_date = datetime.now(tz=pytz.timezone(self._timezone))
70
+ self._as_of_date = device_config.as_of_date
43
71
 
44
72
  # Set the timezone
45
- timezone = pytz.timezone(self._timezone)
46
- self._as_of_date = timezone.localize(self._as_of_date)
73
+ self._timezone = device_config.timezone
47
74
 
48
75
  # ----------------------------------
49
76
  def name(self):
@@ -51,42 +78,129 @@ class Gazpar:
51
78
 
52
79
  # ----------------------------------
53
80
  # Publish Gaspar data to Home Assistant WS
54
- async def publish(self):
81
+ async def publish(self): # pylint: disable=too-many-branches, too-many-statements
55
82
 
56
- # Volume and energy sensor names.
83
+ # Volume, energy and cost sensor names.
57
84
  volume_sensor_name = f"sensor.{self._name}_volume"
58
85
  energy_sensor_name = f"sensor.{self._name}_energy"
86
+ cost_sensor_name = f"sensor.{self._name}_cost"
59
87
 
60
88
  # Eventually reset the sensor in Home Assistant
61
89
  if self._reset:
62
90
  try:
63
- await self._homeassistant.clear_statistics(
64
- [volume_sensor_name, energy_sensor_name]
65
- )
91
+ await self._homeassistant.clear_statistics([volume_sensor_name, energy_sensor_name])
66
92
  except Exception:
67
- Logger.warning(
68
- f"Error while resetting the sensor in Home Assistant: {traceback.format_exc()}"
69
- )
93
+ Logger.warning(f"Error while resetting the sensor in Home Assistant: {traceback.format_exc()}")
70
94
  raise
71
95
 
72
- # Publish volume sensor
73
- await self._publish_entity(
74
- volume_sensor_name, pygazpar.PropertyName.VOLUME.value, "m³"
96
+ last_date_and_value_by_sensor = dict[str, tuple[date, float]]()
97
+
98
+ last_date_and_value_by_sensor[volume_sensor_name] = await self.find_last_date_and_value(volume_sensor_name)
99
+ last_date_and_value_by_sensor[energy_sensor_name] = await self.find_last_date_and_value(energy_sensor_name)
100
+ last_date_and_value_by_sensor[cost_sensor_name] = await self.find_last_date_and_value(cost_sensor_name)
101
+
102
+ # Compute the start date as the minimum of the last dates plus one day
103
+ start_date = min(
104
+ min(v[0] for v in last_date_and_value_by_sensor.values()) + timedelta(days=1), self._as_of_date
75
105
  )
76
- await self._publish_entity(
77
- energy_sensor_name, pygazpar.PropertyName.ENERGY.value, "kWh"
106
+
107
+ # Get all start dates
108
+ energy_start_date = last_date_and_value_by_sensor[energy_sensor_name][0] + timedelta(days=1)
109
+ volume_start_date = last_date_and_value_by_sensor[volume_sensor_name][0] + timedelta(days=1)
110
+ cost_start_date = last_date_and_value_by_sensor[cost_sensor_name][0] + timedelta(days=1)
111
+
112
+ Logger.debug(f"Min start date for all sensors: {start_date}")
113
+ Logger.debug(f"Energy start date: {energy_start_date}")
114
+ Logger.debug(f"Volume start date: {volume_start_date}")
115
+ Logger.debug(f"Cost start date: {cost_start_date}")
116
+
117
+ # Fetch the data from GrDF and publish it to Home Assistant
118
+ daily_history = self.fetch_daily_gazpar_history(start_date, self._as_of_date)
119
+
120
+ # The end date is the last date of the daily history
121
+ if daily_history is None or len(daily_history) == 0:
122
+ end_date = start_date
123
+ else:
124
+ end_date = datetime.strptime(daily_history[-1][pygazpar.PropertyName.TIME_PERIOD.value], "%d/%m/%Y").date()
125
+
126
+ Logger.debug(f"End date: {end_date}")
127
+
128
+ # Extract the volume from the daily history
129
+ volume_array = self.extract_property_from_daily_gazpar_history(
130
+ daily_history,
131
+ pygazpar.PropertyName.VOLUME.value,
132
+ volume_start_date,
133
+ end_date,
134
+ )
135
+
136
+ # Extract the energy from the daily history
137
+ energy_array = self.extract_property_from_daily_gazpar_history(
138
+ daily_history,
139
+ pygazpar.PropertyName.ENERGY.value,
140
+ min(energy_start_date, cost_start_date),
141
+ end_date,
78
142
  )
79
143
 
144
+ # Publish the volume and energy to Home Assistant
145
+ if volume_array is not None:
146
+ await self.publish_date_array(
147
+ volume_sensor_name,
148
+ "m³",
149
+ volume_array,
150
+ last_date_and_value_by_sensor[volume_sensor_name][1],
151
+ )
152
+ else:
153
+ Logger.info("No volume data to publish")
154
+
155
+ if energy_array is not None and energy_start_date <= end_date:
156
+ await self.publish_date_array(
157
+ energy_sensor_name,
158
+ "kWh",
159
+ energy_array[energy_start_date : end_date + timedelta(days=1)],
160
+ last_date_and_value_by_sensor[energy_sensor_name][1],
161
+ )
162
+ else:
163
+ Logger.info("No energy data to publish")
164
+
165
+ if self._pricing_config is None:
166
+ Logger.info("No pricing configuration provided")
167
+ return
168
+
169
+ # Compute the cost from the energy
170
+ if energy_array is not None:
171
+ pricer = Pricer(self._pricing_config)
172
+
173
+ quantities = ConsumptionQuantityArray(
174
+ start_date=cost_start_date,
175
+ end_date=end_date,
176
+ value_unit=QuantityUnit.KWH,
177
+ base_unit=TimeUnit.DAY,
178
+ value_array=energy_array[cost_start_date : end_date + timedelta(days=1)],
179
+ )
180
+
181
+ cost_array = pricer.compute(quantities, PriceUnit.EURO)
182
+ else:
183
+ cost_array = None
184
+
185
+ # Publish the cost to Home Assistant
186
+ if cost_array is not None:
187
+ cost_initial_value = last_date_and_value_by_sensor[cost_sensor_name][1]
188
+ await self.publish_date_array(
189
+ cost_sensor_name,
190
+ cost_array.value_unit,
191
+ cost_array.value_array,
192
+ cost_initial_value,
193
+ )
194
+ else:
195
+ Logger.info("No cost data to publish")
196
+
80
197
  # ----------------------------------
81
- # Publish a sensor to Home Assistant
82
- async def _publish_entity(
83
- self, entity_id: str, property_name: str, unit_of_measurement: str
84
- ):
198
+ # Fetch daily Gazpar history.
199
+ def fetch_daily_gazpar_history(self, start_date: date, end_date: date) -> MeterReadings:
85
200
 
86
- # Find last date, days and value of the entity.
87
- last_date, last_days, last_value = await self._find_last_date_days_value(
88
- entity_id
89
- )
201
+ if start_date >= end_date:
202
+ Logger.info("No data to fetch")
203
+ return []
90
204
 
91
205
  # Instantiate the right data source.
92
206
  data_source = self._create_data_source()
@@ -95,42 +209,85 @@ class Gazpar:
95
209
  client = pygazpar.Client(data_source)
96
210
 
97
211
  try:
98
- data = client.loadSince(
99
- pceIdentifier=self._pce_identifier,
100
- lastNDays=last_days,
212
+ history = client.load_date_range(
213
+ pce_identifier=self._pce_identifier,
214
+ start_date=start_date,
215
+ end_date=end_date,
101
216
  frequencies=[pygazpar.Frequency.DAILY],
102
217
  )
218
+
219
+ # Filter the daily readings by keeping only dates between start_date and end_date
220
+ res = []
221
+ for reading in history[pygazpar.Frequency.DAILY.value]:
222
+ reading_date = datetime.strptime(reading[pygazpar.PropertyName.TIME_PERIOD.value], "%d/%m/%Y").date()
223
+ if start_date <= reading_date <= end_date:
224
+ res.append(reading)
225
+
226
+ Logger.debug(f"Fetched {len(res)} daily readings from start date {start_date} to end date {end_date}")
103
227
  except Exception: # pylint: disable=broad-except
104
- Logger.warning(
105
- f"Error while fetching data from GrDF: {traceback.format_exc()}"
106
- )
107
- data = {}
228
+ Logger.warning(f"Error while fetching data from GrDF: {traceback.format_exc()}")
229
+ res = MeterReadings()
108
230
 
109
- # Timezone
110
- timezone = pytz.timezone(self._timezone)
231
+ return res
111
232
 
112
- # Compute and fill statistics.
113
- daily = data.get(pygazpar.Frequency.DAILY.value)
114
- statistics = []
115
- total = last_value
116
- for reading in daily:
233
+ # ----------------------------------
234
+ # Extract a given property from the daily Gazpar history and return a DateArray.
235
+ def extract_property_from_daily_gazpar_history(
236
+ self,
237
+ readings: MeterReadings,
238
+ property_name: str,
239
+ start_date: date,
240
+ end_date: date,
241
+ ) -> Optional[DateArray]:
242
+
243
+ # Fill the quantity array.
244
+ res: Optional[DateArray] = None
245
+
246
+ for reading in readings:
117
247
  # Parse date format DD/MM/YYYY into datetime.
118
- date = datetime.strptime(
119
- reading[pygazpar.PropertyName.TIME_PERIOD.value], "%d/%m/%Y"
120
- )
248
+ reading_date = datetime.strptime(reading[pygazpar.PropertyName.TIME_PERIOD.value], "%d/%m/%Y").date()
121
249
 
122
- # Set the timezone
123
- date = timezone.localize(date)
250
+ # Skip all readings before the start date.
251
+ if reading_date < start_date:
252
+ # Logger.debug(f"Skip date: {reading_date} < {start_date}")
253
+ continue
124
254
 
125
- # Skip all readings before the last statistic date.
126
- if date <= last_date:
127
- Logger.debug(f"Skip date: {date} <= {last_date}")
255
+ # Skip all readings after the end date.
256
+ if reading_date > end_date:
257
+ # Logger.debug(f"Skip date: {reading_date} > {end_date}")
128
258
  continue
129
259
 
130
- # Compute the total volume and energy
131
- total += reading[property_name]
260
+ # Fill the quantity array.
261
+ if reading[property_name] is not None:
262
+ if res is None:
263
+ res = DateArray(name=property_name, start_date=start_date, end_date=end_date)
264
+ res[reading_date] = reading[property_name]
265
+
266
+ return res
267
+
268
+ # ----------------------------------
269
+ # Push a date array to Home Assistant.
270
+ async def publish_date_array(
271
+ self,
272
+ entity_id: str,
273
+ unit_of_measurement: str,
274
+ date_array: DateArray,
275
+ initial_value: float,
276
+ ):
132
277
 
133
- statistics.append({"start": date.isoformat(), "state": total, "sum": total})
278
+ # Compute the cumulative sum of the values.
279
+ total_array = date_array.cumsum() + initial_value
280
+
281
+ # Timezone
282
+ timezone = pytz.timezone(self._timezone)
283
+
284
+ # Fill the statistics.
285
+ statistics = []
286
+ for dt, total in total_array:
287
+ # Set the timezone
288
+ date_time = datetime.combine(dt, datetime.min.time())
289
+ date_time = timezone.localize(date_time)
290
+ statistics.append({"start": date_time.isoformat(), "state": total, "sum": total})
134
291
 
135
292
  # Publish statistics to Home Assistant
136
293
  try:
@@ -138,9 +295,7 @@ class Gazpar:
138
295
  entity_id, "recorder", "gazpar2haws", unit_of_measurement, statistics
139
296
  )
140
297
  except Exception:
141
- Logger.warning(
142
- f"Error while importing statistics to Home Assistant: {traceback.format_exc()}"
143
- )
298
+ Logger.warning(f"Error while importing statistics to Home Assistant: {traceback.format_exc()}")
144
299
  raise
145
300
 
146
301
  # ----------------------------------
@@ -158,36 +313,31 @@ class Gazpar:
158
313
  tmpDirectory=self._tmp_dir,
159
314
  )
160
315
 
161
- return pygazpar.JsonWebDataSource(
162
- username=self._username, password=self._password
163
- )
316
+ return pygazpar.JsonWebDataSource(username=self._username, password=self._password)
164
317
 
165
318
  # ----------------------------------
166
- # Find last date, days and value of the entity.
167
- async def _find_last_date_days_value(
168
- self, entity_id: str
169
- ) -> tuple[datetime, int, float]:
319
+ # Find last date, value of the entity.
320
+ async def find_last_date_and_value(self, entity_id: str) -> tuple[date, float]:
170
321
 
171
322
  # Check the existence of the sensor in Home Assistant
172
323
  try:
173
- exists_statistic_id = await self._homeassistant.exists_statistic_id(
174
- entity_id, "sum"
175
- )
324
+ exists_statistic_id = await self._homeassistant.exists_statistic_id(entity_id, "sum")
176
325
  except Exception:
177
326
  Logger.warning(
178
- f"Error while checking the existence of the sensor in Home Assistant: {traceback.format_exc()}"
327
+ f"Error while checking the existence of the entity '{entity_id}' in Home Assistant: {traceback.format_exc()}"
179
328
  )
180
329
  raise
181
330
 
182
331
  if exists_statistic_id:
183
332
  # Get the last statistic from Home Assistant
184
333
  try:
185
- last_statistic = await self._homeassistant.get_last_statistic(
186
- entity_id, self._as_of_date, self._last_days
187
- )
334
+ as_of_date = datetime.combine(self._as_of_date, datetime.min.time())
335
+ as_of_date = pytz.timezone(self._timezone).localize(as_of_date)
336
+
337
+ last_statistic = await self._homeassistant.get_last_statistic(entity_id, as_of_date, self._last_days)
188
338
  except HomeAssistantWSException:
189
339
  Logger.warning(
190
- f"Error while fetching last statistics from Home Assistant: {traceback.format_exc()}"
340
+ f"Error while fetching last statistics of the entity '{entity_id}' from Home Assistant: {traceback.format_exc()}"
191
341
  )
192
342
 
193
343
  if last_statistic:
@@ -195,35 +345,25 @@ class Gazpar:
195
345
  last_date = datetime.fromtimestamp(
196
346
  int(str(last_statistic.get("start"))) / 1000,
197
347
  tz=pytz.timezone(self._timezone),
198
- )
199
-
200
- # Compute the number of days since the last statistics
201
- last_days = (self._as_of_date - last_date).days
348
+ ).date()
202
349
 
203
350
  # Get the last meter value
204
351
  last_value = float(str(last_statistic.get("sum")))
205
352
 
206
- Logger.debug(
207
- f"Last date: {last_date}, last days: {last_days}, last value: {last_value}"
208
- )
353
+ Logger.debug(f"Entity '{entity_id}' => Last date: {last_date}, last value: {last_value}")
209
354
 
210
- return last_date, last_days, last_value
355
+ return last_date, last_value
211
356
 
212
- Logger.debug(f"No statistics found for the existing sensor {entity_id}.")
357
+ Logger.debug(f"Entity '{entity_id}' => No statistics found.")
213
358
  else:
214
- Logger.debug(f"Sensor {entity_id} does not exist in Home Assistant.")
215
-
216
- # If the sensor does not exist in Home Assistant, fetch the last days defined in the configuration
217
- last_days = self._last_days
359
+ Logger.debug(f"Entity '{entity_id}' does not exist in Home Assistant.")
218
360
 
219
361
  # Compute the corresponding last_date
220
- last_date = self._as_of_date - timedelta(days=last_days)
362
+ last_date = self._as_of_date - timedelta(days=self._last_days)
221
363
 
222
364
  # If no statistic, the last value is initialized to zero
223
365
  last_value = 0
224
366
 
225
- Logger.debug(
226
- f"Last date: {last_date}, last days: {last_days}, last value: {last_value}"
227
- )
367
+ Logger.debug(f"Entity '{entity_id}' => Last date: {last_date}, last value: {last_value}")
228
368
 
229
- return last_date, last_days, last_value
369
+ return last_date, last_value
gazpar2haws/haws.py CHANGED
@@ -15,7 +15,7 @@ class HomeAssistantWSException(Exception):
15
15
  # ----------------------------------
16
16
  class HomeAssistantWS:
17
17
  # ----------------------------------
18
- def __init__(self, host: str, port: str, endpoint: str, token: str):
18
+ def __init__(self, host: str, port: int, endpoint: str, token: str):
19
19
  self._host = host
20
20
  self._port = port
21
21
  self._endpoint = endpoint
@@ -92,9 +92,7 @@ class HomeAssistantWS:
92
92
  raise HomeAssistantWSException(f"Invalid response message: {response_data}")
93
93
 
94
94
  if not response_data.get("success"):
95
- raise HomeAssistantWSException(
96
- f"Request failed: {response_data.get('error')}"
97
- )
95
+ raise HomeAssistantWSException(f"Request failed: {response_data.get('error')}")
98
96
 
99
97
  return response_data.get("result")
100
98
 
@@ -122,17 +120,13 @@ class HomeAssistantWS:
122
120
  return response
123
121
 
124
122
  # ----------------------------------
125
- async def exists_statistic_id(
126
- self, entity_id: str, statistic_type: str | None = None
127
- ) -> bool:
123
+ async def exists_statistic_id(self, entity_id: str, statistic_type: str | None = None) -> bool:
128
124
 
129
125
  Logger.debug(f"Checking if {entity_id} exists...")
130
126
 
131
127
  statistic_ids = await self.list_statistic_ids(statistic_type)
132
128
 
133
- entity_ids = [
134
- statistic_id.get("statistic_id") for statistic_id in statistic_ids
135
- ]
129
+ entity_ids = [statistic_id.get("statistic_id") for statistic_id in statistic_ids]
136
130
 
137
131
  exists_statistic = entity_id in entity_ids
138
132
 
@@ -141,13 +135,9 @@ class HomeAssistantWS:
141
135
  return exists_statistic
142
136
 
143
137
  # ----------------------------------
144
- async def statistics_during_period(
145
- self, entity_ids: list[str], start_time: datetime, end_time: datetime
146
- ) -> dict:
138
+ async def statistics_during_period(self, entity_ids: list[str], start_time: datetime, end_time: datetime) -> dict:
147
139
 
148
- Logger.debug(
149
- f"Getting {entity_ids} statistics during period from {start_time} to {end_time}..."
150
- )
140
+ Logger.debug(f"Getting {entity_ids} statistics during period from {start_time} to {end_time}...")
151
141
 
152
142
  # Subscribe to statistics
153
143
  statistics_message = {
@@ -166,16 +156,12 @@ class HomeAssistantWS:
166
156
  f"Invalid statistics_during_period response type: got {type(response)} instead of dict"
167
157
  )
168
158
 
169
- Logger.debug(
170
- f"Received {entity_ids} statistics during period from {start_time} to {end_time}"
171
- )
159
+ Logger.debug(f"Received {entity_ids} statistics during period from {start_time} to {end_time}")
172
160
 
173
161
  return response
174
162
 
175
163
  # ----------------------------------
176
- async def get_last_statistic(
177
- self, entity_id: str, as_of_date: datetime, depth_days: int
178
- ) -> dict:
164
+ async def get_last_statistic(self, entity_id: str, as_of_date: datetime, depth_days: int) -> dict:
179
165
 
180
166
  Logger.debug(f"Getting last statistic for {entity_id}...")
181
167
 
@@ -201,9 +187,7 @@ class HomeAssistantWS:
201
187
  statistics: list[dict],
202
188
  ):
203
189
 
204
- Logger.debug(
205
- f"Importing {len(statistics)} statistics for {entity_id} from {source}..."
206
- )
190
+ Logger.debug(f"Importing {len(statistics)} statistics for {entity_id} from {source}...")
207
191
 
208
192
  if len(statistics) == 0:
209
193
  Logger.debug("No statistics to import")
@@ -225,9 +209,7 @@ class HomeAssistantWS:
225
209
 
226
210
  await self.send_message(import_statistics_message)
227
211
 
228
- Logger.debug(
229
- f"Imported {len(statistics)} statistics for {entity_id} from {source}"
230
- )
212
+ Logger.debug(f"Imported {len(statistics)} statistics for {entity_id} from {source}")
231
213
 
232
214
  # ----------------------------------
233
215
  async def clear_statistics(self, entity_ids: list[str]):