gazpar2haws 0.3.2__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,104 @@
1
+ """Utility functions for datetime conversions."""
2
+
3
+ from datetime import date, datetime
4
+
5
+ import pytz
6
+
7
+
8
+ def timestamp_ms_to_datetime(timestamp_ms: int | float | str, timezone: str | None = None) -> datetime:
9
+ """
10
+ Convert Unix timestamp in milliseconds to a datetime object.
11
+
12
+ This is the base conversion function used by other timestamp utilities.
13
+
14
+ Args:
15
+ timestamp_ms: Unix timestamp in milliseconds (as int, float, or string)
16
+ timezone: Optional timezone string for timezone-aware datetime.
17
+ If provided, creates datetime in that timezone.
18
+ If not provided, uses UTC (default Home Assistant behavior).
19
+
20
+ Returns:
21
+ datetime object in the specified timezone or UTC
22
+ """
23
+ # Convert to seconds
24
+ timestamp_seconds = int(str(timestamp_ms)) / 1000
25
+ # Convert to specified timezone or UTC
26
+ if timezone:
27
+ return datetime.fromtimestamp(timestamp_seconds, tz=pytz.timezone(timezone))
28
+ return datetime.fromtimestamp(timestamp_seconds, tz=pytz.UTC)
29
+
30
+
31
+ def timestamp_ms_to_iso_string(timestamp_ms: int | float | str, timezone: str | None = None) -> str:
32
+ """
33
+ Convert Unix timestamp in milliseconds to ISO format string.
34
+
35
+ This is used for converting Home Assistant statistics timestamps
36
+ (which are in milliseconds) to ISO format strings expected by
37
+ the import_statistics API.
38
+
39
+ Args:
40
+ timestamp_ms: Unix timestamp in milliseconds (as int, float, or string)
41
+ timezone: Optional timezone string for timezone-aware conversion.
42
+ If provided, converts to that timezone before returning ISO format.
43
+ If not provided, uses UTC (default Home Assistant behavior).
44
+
45
+ Returns:
46
+ ISO format string (e.g., "2020-12-14T00:00:00+00:00")
47
+ """
48
+ dt = timestamp_ms_to_datetime(timestamp_ms, timezone)
49
+ return dt.isoformat()
50
+
51
+
52
+ def timestamp_ms_to_date(timestamp_ms: int | float | str, timezone: str) -> date:
53
+ """
54
+ Convert Unix timestamp in milliseconds to a date in the specified timezone.
55
+
56
+ This is used for extracting the date portion of Home Assistant statistics
57
+ timestamps while respecting the local timezone.
58
+
59
+ Args:
60
+ timestamp_ms: Unix timestamp in milliseconds (as int, float, or string)
61
+ timezone: Timezone string (e.g., "Europe/Paris")
62
+
63
+ Returns:
64
+ Date object in the specified timezone
65
+ """
66
+ dt = timestamp_ms_to_datetime(timestamp_ms, timezone)
67
+ return dt.date()
68
+
69
+
70
+ def convert_statistics_timestamps(statistics: list[dict], timezone: str | None = None) -> list[dict]:
71
+ """
72
+ Convert all timestamp fields in statistics from Unix milliseconds to ISO format strings.
73
+
74
+ Home Assistant statistics_during_period API returns timestamps as Unix milliseconds,
75
+ but the import_statistics API expects ISO format strings.
76
+
77
+ Converts these fields if they exist:
78
+ - "start": The start time of the statistics period
79
+ - "end": The end time of the statistics period (if present)
80
+
81
+ Args:
82
+ statistics: List of statistics dictionaries with timestamp fields
83
+ timezone: Optional timezone string for timezone-aware conversion.
84
+ If provided, converts to that timezone before returning ISO format.
85
+ If not provided, uses UTC (default Home Assistant behavior).
86
+
87
+ Returns:
88
+ List of statistics dictionaries with converted timestamps
89
+ """
90
+ converted_statistics = []
91
+ for stat in statistics:
92
+ converted_stat = stat.copy()
93
+
94
+ # Convert start timestamp if present
95
+ if "start" in converted_stat:
96
+ converted_stat["start"] = timestamp_ms_to_iso_string(converted_stat["start"], timezone)
97
+
98
+ # Convert end timestamp if present (for statistics that have it)
99
+ if "end" in converted_stat:
100
+ converted_stat["end"] = timestamp_ms_to_iso_string(converted_stat["end"], timezone)
101
+
102
+ converted_statistics.append(converted_stat)
103
+
104
+ return converted_statistics
gazpar2haws/gazpar.py CHANGED
@@ -8,6 +8,7 @@ import pytz
8
8
  from pygazpar.datasource import MeterReadings # type: ignore
9
9
 
10
10
  from gazpar2haws.date_array import DateArray
11
+ from gazpar2haws.datetime_utils import timestamp_ms_to_date
11
12
  from gazpar2haws.haws import HomeAssistantWS, HomeAssistantWSException
12
13
  from gazpar2haws.model import (
13
14
  ConsumptionQuantityArray,
@@ -91,12 +92,46 @@ class Gazpar:
91
92
  # Volume, energy and cost sensor names.
92
93
  volume_sensor_name = f"sensor.{self._name}_volume"
93
94
  energy_sensor_name = f"sensor.{self._name}_energy"
94
- cost_sensor_name = f"sensor.{self._name}_cost"
95
+ consumption_cost_sensor_name = f"sensor.{self._name}_consumption_cost"
96
+ subscription_cost_sensor_name = f"sensor.{self._name}_subscription_cost"
97
+ transport_cost_sensor_name = f"sensor.{self._name}_transport_cost"
98
+ energy_taxes_cost_sensor_name = f"sensor.{self._name}_energy_taxes_cost"
99
+ total_cost_sensor_name = f"sensor.{self._name}_total_cost"
100
+
101
+ # Automatic migration from v0.3.x to v0.4.0
102
+ # Migrate old sensor.{name}_cost to sensor.{name}_total_cost if pricing is enabled
103
+ if self._pricing_config is not None:
104
+ try:
105
+ old_total_cost_sensor_name = f"sensor.{self._name}_cost"
106
+ await self._homeassistant.migrate_statistic(
107
+ old_entity_id=old_total_cost_sensor_name,
108
+ new_entity_id=total_cost_sensor_name,
109
+ new_name="Gazpar2HAWS Total Cost",
110
+ unit_of_measurement="€",
111
+ timezone=self._timezone,
112
+ as_of_date=as_of_date,
113
+ )
114
+ except Exception: # pylint: disable=broad-except
115
+ Logger.warning(
116
+ f"Error during automatic sensor migration from "
117
+ f"{old_total_cost_sensor_name} to {total_cost_sensor_name}: "
118
+ f"{traceback.format_exc()}"
119
+ )
95
120
 
96
121
  # Eventually reset the sensor in Home Assistant
97
122
  if self._reset:
98
123
  try:
99
- await self._homeassistant.clear_statistics([volume_sensor_name, energy_sensor_name])
124
+ await self._homeassistant.clear_statistics(
125
+ [
126
+ volume_sensor_name,
127
+ energy_sensor_name,
128
+ consumption_cost_sensor_name,
129
+ subscription_cost_sensor_name,
130
+ transport_cost_sensor_name,
131
+ energy_taxes_cost_sensor_name,
132
+ total_cost_sensor_name,
133
+ ]
134
+ )
100
135
  except Exception:
101
136
  Logger.warning(f"Error while resetting the sensor in Home Assistant: {traceback.format_exc()}")
102
137
  raise
@@ -105,7 +140,21 @@ class Gazpar:
105
140
 
106
141
  last_date_and_value_by_sensor[volume_sensor_name] = await self.find_last_date_and_value(volume_sensor_name)
107
142
  last_date_and_value_by_sensor[energy_sensor_name] = await self.find_last_date_and_value(energy_sensor_name)
108
- last_date_and_value_by_sensor[cost_sensor_name] = await self.find_last_date_and_value(cost_sensor_name)
143
+ last_date_and_value_by_sensor[consumption_cost_sensor_name] = await self.find_last_date_and_value(
144
+ consumption_cost_sensor_name
145
+ )
146
+ last_date_and_value_by_sensor[subscription_cost_sensor_name] = await self.find_last_date_and_value(
147
+ subscription_cost_sensor_name
148
+ )
149
+ last_date_and_value_by_sensor[transport_cost_sensor_name] = await self.find_last_date_and_value(
150
+ transport_cost_sensor_name
151
+ )
152
+ last_date_and_value_by_sensor[energy_taxes_cost_sensor_name] = await self.find_last_date_and_value(
153
+ energy_taxes_cost_sensor_name
154
+ )
155
+ last_date_and_value_by_sensor[total_cost_sensor_name] = await self.find_last_date_and_value(
156
+ total_cost_sensor_name
157
+ )
109
158
 
110
159
  # Compute the start date as the minimum of the last dates plus one day
111
160
  start_date = min(min(v[0] for v in last_date_and_value_by_sensor.values()) + timedelta(days=1), as_of_date)
@@ -113,12 +162,34 @@ class Gazpar:
113
162
  # Get all start dates
114
163
  energy_start_date = last_date_and_value_by_sensor[energy_sensor_name][0] + timedelta(days=1)
115
164
  volume_start_date = last_date_and_value_by_sensor[volume_sensor_name][0] + timedelta(days=1)
116
- cost_start_date = last_date_and_value_by_sensor[cost_sensor_name][0] + timedelta(days=1)
165
+ consumption_cost_start_date = last_date_and_value_by_sensor[consumption_cost_sensor_name][0] + timedelta(days=1)
166
+ subscription_cost_start_date = last_date_and_value_by_sensor[subscription_cost_sensor_name][0] + timedelta(
167
+ days=1
168
+ )
169
+ transport_cost_start_date = last_date_and_value_by_sensor[transport_cost_sensor_name][0] + timedelta(days=1)
170
+ energy_taxes_cost_start_date = last_date_and_value_by_sensor[energy_taxes_cost_sensor_name][0] + timedelta(
171
+ days=1
172
+ )
173
+ total_cost_start_date = last_date_and_value_by_sensor[total_cost_sensor_name][0] + timedelta(days=1)
174
+
175
+ # Get the minimum cost start date
176
+ cost_start_date = min(
177
+ consumption_cost_start_date,
178
+ subscription_cost_start_date,
179
+ transport_cost_start_date,
180
+ energy_taxes_cost_start_date,
181
+ total_cost_start_date,
182
+ )
117
183
 
118
184
  Logger.debug(f"Min start date for all sensors: {start_date}")
119
185
  Logger.debug(f"Energy start date: {energy_start_date}")
120
186
  Logger.debug(f"Volume start date: {volume_start_date}")
121
- Logger.debug(f"Cost start date: {cost_start_date}")
187
+ Logger.debug(f"Consumption cost start date: {consumption_cost_start_date}")
188
+ Logger.debug(f"Subscription cost start date: {subscription_cost_start_date}")
189
+ Logger.debug(f"Transport cost start date: {transport_cost_start_date}")
190
+ Logger.debug(f"Energy taxes cost start date: {energy_taxes_cost_start_date}")
191
+ Logger.debug(f"Total cost start date: {total_cost_start_date}")
192
+ Logger.debug(f"Min cost start date: {cost_start_date}")
122
193
 
123
194
  # Fetch the data from GrDF and publish it to Home Assistant
124
195
  daily_history = self.fetch_daily_gazpar_history(start_date, as_of_date)
@@ -151,6 +222,7 @@ class Gazpar:
151
222
  if volume_array is not None:
152
223
  await self.publish_date_array(
153
224
  volume_sensor_name,
225
+ "Gazpar2HAWS Volume",
154
226
  "m³",
155
227
  volume_array,
156
228
  last_date_and_value_by_sensor[volume_sensor_name][1],
@@ -161,6 +233,7 @@ class Gazpar:
161
233
  if energy_array is not None and energy_start_date <= end_date:
162
234
  await self.publish_date_array(
163
235
  energy_sensor_name,
236
+ "Gazpar2HAWS Energy",
164
237
  "kWh",
165
238
  energy_array[energy_start_date : end_date + timedelta(days=1)],
166
239
  last_date_and_value_by_sensor[energy_sensor_name][1],
@@ -184,18 +257,55 @@ class Gazpar:
184
257
  value_array=energy_array[cost_start_date : end_date + timedelta(days=1)],
185
258
  )
186
259
 
187
- cost_array = pricer.compute(quantities, PriceUnit.EURO)
260
+ cost_breakdown = pricer.compute(quantities, PriceUnit.EURO)
188
261
  else:
189
- cost_array = None
262
+ cost_breakdown = None
263
+
264
+ # Publish the cost breakdown to Home Assistant
265
+ if cost_breakdown is not None:
266
+ # Publish consumption cost
267
+ await self.publish_date_array(
268
+ consumption_cost_sensor_name,
269
+ "Gazpar2HAWS Consumption Cost",
270
+ cost_breakdown.consumption.value_unit,
271
+ cost_breakdown.consumption.value_array,
272
+ last_date_and_value_by_sensor[consumption_cost_sensor_name][1],
273
+ )
274
+
275
+ # Publish subscription cost
276
+ await self.publish_date_array(
277
+ subscription_cost_sensor_name,
278
+ "Gazpar2HAWS Subscription Cost",
279
+ cost_breakdown.subscription.value_unit,
280
+ cost_breakdown.subscription.value_array,
281
+ last_date_and_value_by_sensor[subscription_cost_sensor_name][1],
282
+ )
283
+
284
+ # Publish transport cost
285
+ await self.publish_date_array(
286
+ transport_cost_sensor_name,
287
+ "Gazpar2HAWS Transport Cost",
288
+ cost_breakdown.transport.value_unit,
289
+ cost_breakdown.transport.value_array,
290
+ last_date_and_value_by_sensor[transport_cost_sensor_name][1],
291
+ )
292
+
293
+ # Publish energy taxes cost
294
+ await self.publish_date_array(
295
+ energy_taxes_cost_sensor_name,
296
+ "Gazpar2HAWS Energy Taxes Cost",
297
+ cost_breakdown.energy_taxes.value_unit,
298
+ cost_breakdown.energy_taxes.value_array,
299
+ last_date_and_value_by_sensor[energy_taxes_cost_sensor_name][1],
300
+ )
190
301
 
191
- # Publish the cost to Home Assistant
192
- if cost_array is not None:
193
- cost_initial_value = last_date_and_value_by_sensor[cost_sensor_name][1]
302
+ # Publish total cost
194
303
  await self.publish_date_array(
195
- cost_sensor_name,
196
- cost_array.value_unit,
197
- cost_array.value_array,
198
- cost_initial_value,
304
+ total_cost_sensor_name,
305
+ "Gazpar2HAWS Total Cost",
306
+ cost_breakdown.total.value_unit,
307
+ cost_breakdown.total.value_array,
308
+ last_date_and_value_by_sensor[total_cost_sensor_name][1],
199
309
  )
200
310
  else:
201
311
  Logger.info("No cost data to publish")
@@ -276,6 +386,7 @@ class Gazpar:
276
386
  async def publish_date_array(
277
387
  self,
278
388
  entity_id: str,
389
+ entity_name: str,
279
390
  unit_of_measurement: str,
280
391
  date_array: DateArray,
281
392
  initial_value: float,
@@ -298,7 +409,7 @@ class Gazpar:
298
409
  # Publish statistics to Home Assistant
299
410
  try:
300
411
  await self._homeassistant.import_statistics(
301
- entity_id, "recorder", "gazpar2haws", unit_of_measurement, statistics
412
+ entity_id, "recorder", entity_name, unit_of_measurement, statistics
302
413
  )
303
414
  except Exception:
304
415
  Logger.warning(f"Error while importing statistics to Home Assistant: {traceback.format_exc()}")
@@ -353,10 +464,7 @@ class Gazpar:
353
464
 
354
465
  if last_statistic:
355
466
  # Extract the end date of the last statistics from the unix timestamp
356
- last_date = datetime.fromtimestamp(
357
- int(str(last_statistic.get("start"))) / 1000,
358
- tz=pytz.timezone(self._timezone),
359
- ).date()
467
+ last_date = timestamp_ms_to_date(last_statistic.get("start"), self._timezone) # type: ignore[arg-type]
360
468
 
361
469
  # Get the last meter value
362
470
  last_value = float(str(last_statistic.get("sum")))
gazpar2haws/haws.py CHANGED
@@ -1,9 +1,12 @@
1
1
  import json
2
2
  import logging
3
- from datetime import datetime, timedelta
3
+ from datetime import date, datetime, timedelta
4
4
 
5
+ import pytz
5
6
  import websockets
6
7
 
8
+ from gazpar2haws.datetime_utils import convert_statistics_timestamps
9
+
7
10
  Logger = logging.getLogger(__name__)
8
11
 
9
12
 
@@ -225,3 +228,112 @@ class HomeAssistantWS:
225
228
  await self.send_message(clear_statistics_message)
226
229
 
227
230
  Logger.debug(f"Cleared {entity_ids} statistics")
231
+
232
+ # ----------------------------------
233
+ async def migrate_statistic(
234
+ self,
235
+ old_entity_id: str,
236
+ new_entity_id: str,
237
+ new_name: str,
238
+ unit_of_measurement: str,
239
+ timezone: str,
240
+ as_of_date: date,
241
+ ) -> bool:
242
+ """
243
+ Migrate statistics from an old sensor to a new sensor.
244
+
245
+ This implements smart detection logic:
246
+ - If old sensor exists but new doesn't: AUTO-MIGRATE data
247
+ - If both exist: SKIP with warning (prevent data loss)
248
+ - If only new exists: SKIP (normal operation, no old data)
249
+ - On error: LOG WARNING and return False (graceful fallback)
250
+
251
+ Args:
252
+ old_entity_id: Source sensor ID (e.g., sensor.gazpar2haws_cost)
253
+ new_entity_id: Target sensor ID (e.g., sensor.gazpar2haws_total_cost)
254
+ new_name: Display name for the new sensor
255
+ unit_of_measurement: Unit for the new sensor (e.g., "€")
256
+ timezone: Timezone string (e.g., "Europe/Paris")
257
+ as_of_date: Latest date to query for migration
258
+
259
+ Returns:
260
+ True if migration was successful or skipped safely, False on error
261
+ """
262
+ try:
263
+ Logger.debug(f"Checking for migration opportunity: {old_entity_id} → {new_entity_id}")
264
+
265
+ # Create timezone-aware datetime objects
266
+ tz = pytz.timezone(timezone)
267
+
268
+ # Convert as_of_date to datetime at start of day (00:00:00) in target timezone
269
+ as_of_datetime = tz.localize(datetime.combine(as_of_date, datetime.min.time()))
270
+
271
+ # Very old date to capture all historical data (10 years back from as_of_date)
272
+ very_old_datetime = as_of_datetime - timedelta(days=3650)
273
+
274
+ # Check if old and new sensors have data using statistics_during_period
275
+ # This is more reliable than list_statistic_ids which may have caching delays
276
+ old_statistics_data = await self.statistics_during_period(
277
+ [old_entity_id], very_old_datetime, as_of_datetime
278
+ )
279
+
280
+ old_has_data = old_entity_id in old_statistics_data and old_statistics_data[old_entity_id]
281
+
282
+ # Decision logic
283
+ if not old_has_data:
284
+ Logger.debug(f"Old sensor {old_entity_id} does not exist or has no data - no migration needed")
285
+ return True
286
+
287
+ # Query new sensor to check if it already has data
288
+ new_statistics_data = await self.statistics_during_period(
289
+ [new_entity_id], very_old_datetime, as_of_datetime
290
+ )
291
+
292
+ new_has_data = new_entity_id in new_statistics_data and new_statistics_data[new_entity_id]
293
+
294
+ if new_has_data:
295
+ Logger.warning(
296
+ f"Both old sensor {old_entity_id} and new sensor {new_entity_id} have data. "
297
+ f"Skipping migration to prevent data loss. Old sensor can be manually deleted if desired."
298
+ )
299
+ return True
300
+
301
+ # At this point: old has data AND new doesn't → MIGRATE
302
+ Logger.info(f"Starting automatic migration: {old_entity_id} → {new_entity_id}")
303
+
304
+ old_statistics = old_statistics_data[old_entity_id]
305
+ Logger.debug(f"Found {len(old_statistics)} statistics entries to migrate from {old_entity_id}")
306
+
307
+ # Remove 'change' and 'end' fields from old statistics as they are not accepted by import_statistics
308
+ for stat in old_statistics:
309
+ if "change" in stat:
310
+ del stat["change"]
311
+ if "end" in stat:
312
+ del stat["end"]
313
+
314
+ # Convert start and end timestamps from Unix milliseconds to ISO format strings
315
+ # because import_statistics expects ISO format (using timezone for consistency)
316
+ converted_statistics = convert_statistics_timestamps(old_statistics, timezone)
317
+
318
+ # Import the statistics to the new sensor with same metadata
319
+ await self.import_statistics(
320
+ entity_id=new_entity_id,
321
+ source="recorder",
322
+ name=new_name,
323
+ unit_of_measurement=unit_of_measurement,
324
+ statistics=converted_statistics,
325
+ )
326
+
327
+ Logger.info(
328
+ f"Successfully migrated {len(old_statistics)} statistics entries "
329
+ f"from {old_entity_id} to {new_entity_id}. "
330
+ f"Old sensor can be deleted manually if desired."
331
+ )
332
+ return True
333
+
334
+ except Exception as exc: # pylint: disable=broad-except
335
+ Logger.warning(
336
+ f"Error during statistic migration from {old_entity_id} to {new_entity_id}: {exc}. "
337
+ f"Continuing without migration (data is preserved in old sensor)."
338
+ )
339
+ return False
gazpar2haws/model.py CHANGED
@@ -1,9 +1,10 @@
1
+ import tempfile
1
2
  from datetime import date
2
3
  from enum import Enum
3
4
  from pathlib import Path
4
5
  from typing import Generic, Optional, TypeVar
5
6
 
6
- from pydantic import BaseModel, DirectoryPath, EmailStr, SecretStr, model_validator
7
+ from pydantic import BaseModel, EmailStr, SecretStr, model_validator
7
8
  from pydantic_extra_types.timezone_name import TimeZoneName
8
9
 
9
10
  from gazpar2haws.date_array import DateArray
@@ -53,7 +54,7 @@ class Logging(BaseModel):
53
54
  class Device(BaseModel):
54
55
  name: str
55
56
  data_source: str = "json"
56
- tmp_dir: DirectoryPath = DirectoryPath("/tmp")
57
+ tmp_dir: Optional[str] = None # If None, will use system temp directory
57
58
  as_of_date: Optional[date] = None
58
59
  username: Optional[EmailStr] = None
59
60
  password: Optional[SecretStr] = None
@@ -72,8 +73,15 @@ class Device(BaseModel):
72
73
  raise ValueError("Missing password")
73
74
  if self.data_source != "test" and self.pce_identifier is None:
74
75
  raise ValueError("Missing pce_identifier")
75
- if self.data_source == "excel" and self.tmp_dir is None or not Path(self.tmp_dir).is_dir():
76
+
77
+ # Set tmp_dir to system temp directory if not specified
78
+ if self.tmp_dir is None:
79
+ self.tmp_dir = tempfile.gettempdir()
80
+
81
+ # Validate tmp_dir exists for excel data source
82
+ if self.data_source == "excel" and not Path(self.tmp_dir).is_dir():
76
83
  raise ValueError(f"Invalid tmp_dir {self.tmp_dir}")
84
+
77
85
  return self
78
86
 
79
87
 
@@ -148,6 +156,20 @@ class Price(Unit[ValueUnit, BaseUnit]): # pylint: disable=too-few-public-method
148
156
  vat_id: Optional[str] = None
149
157
 
150
158
 
159
+ # ----------------------------------
160
+ class CompositePriceValue(Period):
161
+ price_unit: Optional[PriceUnit] = None # € or ¢ (applies to both components)
162
+ vat_id: Optional[str] = None
163
+
164
+ # Quantity component (€/kWh)
165
+ quantity_value: Optional[float] = None
166
+ quantity_unit: Optional[QuantityUnit] = None
167
+
168
+ # Time component (€/month)
169
+ time_value: Optional[float] = None
170
+ time_unit: Optional[TimeUnit] = None
171
+
172
+
151
173
  # ----------------------------------
152
174
  class PriceValue(Price[ValueUnit, BaseUnit], Value):
153
175
  pass
@@ -178,17 +200,51 @@ class EnergyTaxesPriceArray(PriceValueArray[PriceUnit, QuantityUnit]): # pylint
178
200
  pass
179
201
 
180
202
 
203
+ # ----------------------------------
204
+ class CompositePriceArray(Period): # pylint: disable=too-few-public-methods
205
+ name: Optional[str] = None
206
+ price_unit: Optional[PriceUnit] = None
207
+ vat_id: Optional[str] = None
208
+
209
+ # Quantity component (€/kWh) - vectorized
210
+ quantity_value_array: Optional[DateArray] = None
211
+ quantity_unit: Optional[QuantityUnit] = None
212
+
213
+ # Time component (€/month) - vectorized
214
+ time_value_array: Optional[DateArray] = None
215
+ time_unit: Optional[TimeUnit] = None
216
+
217
+ @model_validator(mode="after")
218
+ def set_value_arrays(self):
219
+ if self.quantity_value_array is None:
220
+ self.quantity_value_array = DateArray(
221
+ name=f"{self.name}_quantity", start_date=self.start_date, end_date=self.end_date
222
+ ) # pylint: disable=attribute-defined-outside-init
223
+ if self.time_value_array is None:
224
+ self.time_value_array = DateArray(
225
+ name=f"{self.name}_time", start_date=self.start_date, end_date=self.end_date
226
+ ) # pylint: disable=attribute-defined-outside-init
227
+ return self
228
+
229
+
181
230
  # ----------------------------------
182
231
  class Pricing(BaseModel):
183
232
  vat: Optional[list[VatRate]] = None
184
- consumption_prices: list[PriceValue[PriceUnit, QuantityUnit]]
185
- subscription_prices: Optional[list[PriceValue[PriceUnit, TimeUnit]]] = None
186
- transport_prices: Optional[list[PriceValue[PriceUnit, TimeUnit]]] = None
187
- energy_taxes: Optional[list[PriceValue[PriceUnit, QuantityUnit]]] = None
233
+ consumption_prices: list[CompositePriceValue]
234
+ subscription_prices: Optional[list[CompositePriceValue]] = None
235
+ transport_prices: Optional[list[CompositePriceValue]] = None
236
+ energy_taxes: Optional[list[CompositePriceValue]] = None
188
237
 
189
238
  @model_validator(mode="before")
190
239
  @classmethod
191
240
  def propagates_properties(cls, values):
241
+ # Default units for all price types
242
+ default_units = {
243
+ "price_unit": "€",
244
+ "quantity_unit": "kWh",
245
+ "time_unit": "month",
246
+ }
247
+
192
248
  for price_list in [
193
249
  "consumption_prices",
194
250
  "subscription_prices",
@@ -202,23 +258,19 @@ class Pricing(BaseModel):
202
258
 
203
259
  if "start_date" not in prices[0]:
204
260
  raise ValueError(f"Missing start_date in first element of {price_list}")
205
- if "value_unit" not in prices[0]:
206
- prices[0]["value_unit"] = "€"
207
- if "base_unit" not in prices[0]:
208
- if price_list in ["consumption_prices", "energy_taxes"]:
209
- prices[0]["base_unit"] = "kWh"
210
- else:
211
- raise ValueError(
212
- "Missing base_unit in first element of ['transport_prices', 'subscription_prices']"
213
- )
214
261
 
262
+ # Apply defaults to first entry
263
+ for key, default_value in default_units.items():
264
+ if key not in prices[0]:
265
+ prices[0][key] = default_value
266
+
267
+ # Propagate properties through the list
215
268
  for i in range(len(prices) - 1):
216
269
  if "end_date" not in prices[i]:
217
270
  prices[i]["end_date"] = prices[i + 1]["start_date"]
218
- if "value_unit" not in prices[i + 1]:
219
- prices[i + 1]["value_unit"] = prices[i]["value_unit"]
220
- if "base_unit" not in prices[i + 1]:
221
- prices[i + 1]["base_unit"] = prices[i]["base_unit"]
271
+ for key, default_value in default_units.items():
272
+ if key not in prices[i + 1]:
273
+ prices[i + 1][key] = prices[i][key]
222
274
  if "vat_id" not in prices[i + 1] and "vat_id" in prices[i]:
223
275
  prices[i + 1]["vat_id"] = prices[i]["vat_id"]
224
276
 
@@ -233,3 +285,14 @@ class ConsumptionQuantityArray(Unit[QuantityUnit, TimeUnit], ValueArray):
233
285
  # ----------------------------------
234
286
  class CostArray(Unit[PriceUnit, TimeUnit], ValueArray):
235
287
  pass
288
+
289
+
290
+ # ----------------------------------
291
+ class CostBreakdown(BaseModel):
292
+ """Detailed breakdown of costs with individual components and total."""
293
+
294
+ consumption: CostArray
295
+ subscription: CostArray
296
+ transport: CostArray
297
+ energy_taxes: CostArray
298
+ total: CostArray