meteocat 4.0.4 → 4.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,3 +1,11 @@
1
+ ## [4.0.5](https://github.com/figorr/meteocat/compare/v4.0.4...v4.0.5) (2026-01-31)
2
+
3
+
4
+ ### Bug Fixes
5
+
6
+ * fix state for UVI, Hourly and Daily File sensors ([af645a6](https://github.com/figorr/meteocat/commit/af645a6a99189b1d0dac3b9b1019a400ae4e92f5))
7
+ * include last update check to avoid continuous API calls when API returns outdated hourly and daily forecast data ([939a8ac](https://github.com/figorr/meteocat/commit/939a8ac66bfb3c3d750fda0cd719f310493f06c0))
8
+
1
9
  ## [4.0.4](https://github.com/figorr/meteocat/compare/v4.0.3...v4.0.4) (2026-01-25)
2
10
 
3
11
 
@@ -24,7 +24,7 @@ from .const import DOMAIN, PLATFORMS
24
24
  _LOGGER = logging.getLogger(__name__)
25
25
 
26
26
  # Versión
27
- __version__ = "4.0.4"
27
+ __version__ = "4.0.5"
28
28
 
29
29
  # Definir el esquema de configuración CONFIG_SCHEMA
30
30
  CONFIG_SCHEMA = vol.Schema(
@@ -61,6 +61,8 @@ DEFAULT_NAME = "METEOCAT"
61
61
  DEFAULT_VALIDITY_DAYS = 1 # Número de días a partir de los cuales se considera que el archivo de información está obsoleto
62
62
  DEFAULT_VALIDITY_HOURS = 6 # Hora a partir de la cual la API tiene la información actualizada de predicciones disponible para descarga
63
63
  DEFAULT_VALIDITY_MINUTES = 0 # Minutos a partir de los cuales la API tiene la información actualizada de predicciones disponible para descarga
64
+ DEFAULT_HOURLY_FORECAST_MIN_HOURS_SINCE_LAST_UPDATE = 15 # Horas mínimas desde la última actualización de predicciones horararias para proceder a una nueva llamada a la API
65
+ DEFAULT_DAILY_FORECAST_MIN_HOURS_SINCE_LAST_UPDATE = 15 # Horas mínimas desde la última actualización de predicciones diarias para proceder a una nueva llamada a la API
64
66
  DEFAULT_UVI_LOW_VALIDITY_HOURS = 5 # Hora a partir de la cual la API tiene la información actualizada de datos UVI disponible para descarga con límite bajo de cuota
65
67
  DEFAULT_UVI_LOW_VALIDITY_MINUTES = 0 # Minutos a partir de los cuales la API tiene la información actualizada de datos UVI disponible para descarga con límite bajo de cuota
66
68
  DEFAULT_UVI_HIGH_VALIDITY_HOURS = 9 # Hora a partir de la cual la API tiene la información actualizada de datos UVI disponible para descarga con límite alto de cuota
@@ -52,6 +52,8 @@ from .const import (
52
52
  DEFAULT_VALIDITY_DAYS,
53
53
  DEFAULT_VALIDITY_HOURS,
54
54
  DEFAULT_VALIDITY_MINUTES,
55
+ DEFAULT_HOURLY_FORECAST_MIN_HOURS_SINCE_LAST_UPDATE,
56
+ DEFAULT_DAILY_FORECAST_MIN_HOURS_SINCE_LAST_UPDATE,
55
57
  DEFAULT_UVI_LOW_VALIDITY_HOURS,
56
58
  DEFAULT_UVI_LOW_VALIDITY_MINUTES,
57
59
  DEFAULT_UVI_HIGH_VALIDITY_HOURS,
@@ -681,81 +683,100 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
681
683
  name=f"{DOMAIN} Entity Coordinator",
682
684
  update_interval=DEFAULT_ENTITY_UPDATE_INTERVAL,
683
685
  )
684
-
686
+
685
687
  # --------------------------------------------------------------------- #
686
688
  # VALIDACIÓN DINÁMICA DE DATOS DE PREDICCIÓN
687
689
  # --------------------------------------------------------------------- #
688
- async def validate_forecast_data(self, file_path: Path) -> dict:
689
- """Valida y retorna datos de predicción si son válidos.
690
-
691
- - Si `limit_prediccio >= 550` → actualiza **el día siguiente** después de las DEFAULT_VALIDITY_HOURS:DEFAULT_VALIDITY_MINUTES.
692
- - Si `limit_prediccio < 550` → actualiza **dos días después** después de las DEFAULT_VALIDITY_HOURS:DEFAULT_VALIDITY_MINUTES.
693
- """
690
+ async def validate_forecast_data(self, file_path: Path) -> Optional[dict]:
691
+ """Valida si los datos de predicción son válidos considerando 3 condiciones."""
694
692
  if not file_path.exists():
695
- _LOGGER.warning("El archivo %s no existe. Se considerará inválido.", file_path)
693
+ _LOGGER.warning("Archivo no existe: %s", file_path)
696
694
  return None
695
+
697
696
  try:
698
- async with aiofiles.open(file_path, "r", encoding="utf-8") as f:
699
- content = await f.read()
700
- data = json.loads(content)
697
+ data = await load_json_from_file(file_path)
701
698
 
702
- # Fecha del primer día de predicción (solo fecha)
703
- first_date_str = data["dies"][0]["data"].rstrip("Z")
704
- first_date = datetime.fromisoformat(first_date_str).date()
705
- today = datetime.now(timezone.utc).date()
699
+ if not isinstance(data, dict) or "dies" not in data or not data["dies"]:
700
+ _LOGGER.warning("Estructura inválida en %s", file_path)
701
+ return None
706
702
 
707
- # Hora actual en zona local (Europe/Madrid)
708
- current_time_local = datetime.now(TIMEZONE).time()
709
- min_update_time = time(DEFAULT_VALIDITY_HOURS, DEFAULT_VALIDITY_MINUTES)
703
+ # ── Condición 1: Antigüedad del primer día de predicción ──
704
+ first_date_str = data["dies"][0]["data"].rstrip("Z")
705
+ try:
706
+ first_date = datetime.fromisoformat(first_date_str).date()
707
+ except Exception as exc:
708
+ _LOGGER.warning("Fecha inválida en %s: %s", file_path, exc)
709
+ return None
710
710
 
711
+ now_local = datetime.now(TIMEZONE)
712
+ today = now_local.date() # Si queremos respetar que los datos del json son UTC quizás mejor usar today = datetime.now(timezone.utc).date()
713
+ current_time_local = now_local.time()
711
714
  days_diff = (today - first_date).days
712
715
 
713
- # -----------------------------------------------------------------
714
- # Lógica según cuota
715
- # -----------------------------------------------------------------
716
+ # ── Condición 2: Lógica de umbrales según cuota para determinar días y horas válidos de actualización ──
716
717
  if self.limit_prediccio >= PREDICCIO_HIGH_QUOTA_LIMIT:
717
- # Cuota alta → actualiza cuando los datos son de ayer (o antes) + hora OK
718
- should_update = days_diff >= DEFAULT_VALIDITY_DAYS and current_time_local >= min_update_time
718
+ min_days = DEFAULT_VALIDITY_DAYS
719
+ min_update_time = time(DEFAULT_VALIDITY_HOURS, DEFAULT_VALIDITY_MINUTES)
720
+ quota_level = "ALTA"
719
721
  else:
720
- # Cuota baja → actualiza solo cuando los datos son de anteayer + hora OK
721
- should_update = days_diff > DEFAULT_VALIDITY_DAYS and current_time_local >= min_update_time
722
+ min_days = DEFAULT_VALIDITY_DAYS + 1
723
+ min_update_time = time(DEFAULT_VALIDITY_HOURS, DEFAULT_VALIDITY_MINUTES)
724
+ quota_level = "BAJA"
725
+
726
+ cond1 = days_diff >= min_days
727
+ cond2 = current_time_local >= min_update_time
728
+
729
+ # ── Condición 3: Más de X horas desde última actualización ──
730
+ cond3 = True # por defecto permite actualizar si no hay timestamp
731
+ last_update_str = None
732
+ hours_threshold = (
733
+ DEFAULT_HOURLY_FORECAST_MIN_HOURS_SINCE_LAST_UPDATE
734
+ if "hourly" in file_path.name.lower()
735
+ else DEFAULT_DAILY_FORECAST_MIN_HOURS_SINCE_LAST_UPDATE
736
+ )
737
+
738
+ if "actualitzat" in data and "dataUpdate" in data["actualitzat"]:
739
+ try:
740
+ last_update = datetime.fromisoformat(data["actualitzat"]["dataUpdate"])
741
+ time_since = now_local - last_update
742
+ cond3 = time_since > timedelta(hours=hours_threshold)
743
+ last_update_str = last_update.strftime("%Y-%m-%d %H:%M:%S %z")
744
+ _LOGGER.debug(
745
+ "%s → tiempo desde última act.: %s (%s %dh)",
746
+ file_path.name, time_since,
747
+ "supera" if cond3 else "NO supera", hours_threshold
748
+ )
749
+ except ValueError:
750
+ _LOGGER.warning("dataUpdate inválido en %s: %s", file_path, data["actualitzat"]["dataUpdate"])
751
+ cond3 = True
752
+
753
+ should_update = cond1 and cond2 and cond3
722
754
 
723
- # -----------------------------------------------------------------
724
- # Logs detallados
725
- # -----------------------------------------------------------------
726
755
  _LOGGER.debug(
727
- "[%s] Validación: primer_día=%s, hoy=%s días=%d, "
728
- "cuota=%d (%s), hora_local=%s %s actualizar=%s",
729
- file_path.name,
730
- first_date,
731
- today,
732
- days_diff,
733
- self.limit_prediccio,
734
- "ALTA" if self.limit_prediccio >= 550 else "BAJA",
735
- current_time_local.strftime("%H:%M"),
736
- min_update_time.strftime("%H:%M"),
737
- should_update,
756
+ "[%s] Validación → cond1(días >=%d)=%s | cond2(hora >=%s)=%s | "
757
+ "cond3(>%dh desde %s)=%s | cuota=%d (%s) | actualizar=%s",
758
+ file_path.name, min_days, cond1,
759
+ min_update_time.strftime("%H:%M"), cond2,
760
+ hours_threshold, last_update_str or "nunca", cond3,
761
+ self.limit_prediccio, quota_level, should_update
738
762
  )
739
763
 
740
764
  if should_update:
741
- _LOGGER.debug(
742
- "Datos obsoletos o actualizables → llamando API (%s, cuota=%d)",
743
- file_path.name, self.limit_prediccio
744
- )
745
- return None # → forzar actualización
765
+ _LOGGER.info("Datos obsoletos → llamando API para %s", file_path.name)
766
+ return None
746
767
 
747
- _LOGGER.debug("Datos válidos en %s → usando caché", file_path.name)
768
+ _LOGGER.debug("Datos válidos → usando caché %s", file_path.name)
748
769
  return data
749
770
 
771
+ except json.JSONDecodeError:
772
+ _LOGGER.error("JSON corrupto en %s", file_path)
773
+ return None
750
774
  except Exception as e:
751
- _LOGGER.warning("Error validando %s: %s", file_path, e)
775
+ _LOGGER.error("Error validando %s: %s", file_path, e)
752
776
  return None
753
777
 
754
- # --------------------------------------------------------------------- #
755
- # OBTENCIÓN Y GUARDADO DE DATOS DESDE LA API
756
- # --------------------------------------------------------------------- #
757
778
  async def _fetch_and_save_data(self, api_method, file_path: Path) -> dict:
758
- """Obtiene datos de la API y los guarda en un archivo JSON."""
779
+ """Obtiene datos de la API, los procesa y guarda con timestamp."""
759
780
  try:
760
781
  data = await asyncio.wait_for(api_method(self.town_id), timeout=30)
761
782
 
@@ -769,16 +790,24 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
769
790
  ):
770
791
  details["valor"] = "0.0"
771
792
 
772
- await save_json_to_file(data, file_path)
793
+ # Añadir timestamp de actualización exitosa
794
+ now_iso = datetime.now(TIMEZONE).isoformat()
795
+ enhanced_data = {
796
+ "actualitzat": {"dataUpdate": now_iso},
797
+ **data
798
+ }
799
+
800
+ await save_json_to_file(enhanced_data, file_path)
801
+ _LOGGER.debug("Guardado %s con dataUpdate: %s", file_path.name, now_iso)
773
802
 
774
- # Actualizar cuotas (dependiendo del tipo de predicción horaria/diaria)
803
+ # Actualizar cuotas
775
804
  if api_method.__name__ in ("get_prediccion_horaria", "get_prediccion_diaria"):
776
805
  await _update_quotes(self.hass, "Prediccio")
777
806
 
778
- return data
807
+ return enhanced_data
779
808
 
780
809
  except Exception as err:
781
- _LOGGER.error(f"Error al obtener datos de la API para {file_path}: {err}")
810
+ _LOGGER.error("Error al obtener/guardar %s: %s", file_path, err)
782
811
  raise
783
812
 
784
813
  # --------------------------------------------------------------------- #
@@ -836,7 +865,7 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
836
865
  # === FALLBACK SEGURO ===
837
866
  hourly_cache = await load_json_from_file(self.hourly_file) or {}
838
867
  daily_cache = await load_json_from_file(self.daily_file) or {}
839
-
868
+
840
869
  # --- Fecha horaria ---
841
870
  h_raw = hourly_cache.get("dies", [{}])[0].get("data", "")
842
871
  try:
@@ -860,7 +889,7 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
860
889
  self.hourly_file.name, h_display,
861
890
  self.daily_file.name, d_display
862
891
  )
863
-
892
+
864
893
  self.async_set_updated_data({"hourly": hourly_cache, "daily": daily_cache})
865
894
  return {"hourly": hourly_cache, "daily": daily_cache}
866
895
 
@@ -21,5 +21,5 @@
21
21
  "packaging>=20.3",
22
22
  "wrapt>=1.14.0"
23
23
  ],
24
- "version": "4.0.4"
24
+ "version": "4.0.5"
25
25
  }
@@ -97,6 +97,10 @@ from .const import (
97
97
  DEFAULT_VALIDITY_DAYS,
98
98
  DEFAULT_VALIDITY_HOURS,
99
99
  DEFAULT_VALIDITY_MINUTES,
100
+ DEFAULT_UVI_LOW_VALIDITY_HOURS,
101
+ DEFAULT_UVI_LOW_VALIDITY_MINUTES,
102
+ DEFAULT_UVI_HIGH_VALIDITY_HOURS,
103
+ DEFAULT_UVI_HIGH_VALIDITY_MINUTES,
100
104
  DEFAULT_ALERT_VALIDITY_TIME,
101
105
  DEFAULT_QUOTES_VALIDITY_TIME,
102
106
  ALERT_VALIDITY_MULTIPLIER_100,
@@ -114,6 +118,10 @@ from .const import (
114
118
  MOON_FILE_STATUS,
115
119
  MOONRISE,
116
120
  MOONSET,
121
+ PREDICCIO_HIGH_QUOTA_LIMIT,
122
+ DEFAULT_HOURLY_FORECAST_MIN_HOURS_SINCE_LAST_UPDATE,
123
+ DEFAULT_DAILY_FORECAST_MIN_HOURS_SINCE_LAST_UPDATE,
124
+ DEFAULT_UVI_MIN_HOURS_SINCE_LAST_UPDATE,
117
125
  )
118
126
 
119
127
  from .coordinator import (
@@ -1181,46 +1189,83 @@ class MeteocatHourlyForecastStatusSensor(CoordinatorEntity[MeteocatEntityCoordin
1181
1189
  self._town_name = entry_data["town_name"]
1182
1190
  self._town_id = entry_data["town_id"]
1183
1191
  self._station_id = entry_data["station_id"]
1192
+ self._limit_prediccio = entry_data["limit_prediccio"]
1184
1193
  self._attr_unique_id = f"sensor.{DOMAIN}_{self._town_id}_hourly_status"
1185
1194
  self._attr_entity_category = getattr(description, "entity_category", None)
1186
1195
 
1196
+ def _get_forecast_data(self, forecast_type: str) -> Optional[dict]:
1197
+ """Devuelve los datos del tipo de forecast (hourly o daily) o None."""
1198
+ if not self.coordinator.data:
1199
+ return None
1200
+ return self.coordinator.data.get(forecast_type)
1201
+
1187
1202
  def _get_first_date(self):
1188
- hourly_data = self.coordinator.data.get("hourly")
1189
- if hourly_data and "dies" in hourly_data:
1190
- return datetime.fromisoformat(hourly_data["dies"][0]["data"].rstrip("Z")).date()
1203
+ hourly_data = self._get_forecast_data("hourly")
1204
+ if hourly_data and "dies" in hourly_data and hourly_data["dies"]:
1205
+ try:
1206
+ first_date_str = hourly_data["dies"][0]["data"].rstrip("Z")
1207
+ return datetime.fromisoformat(first_date_str).date()
1208
+ except (ValueError, TypeError, KeyError, IndexError):
1209
+ _LOGGER.warning("No se pudo parsear primera fecha del forecast horario")
1210
+ return None
1211
+ return None
1212
+
1213
+ def _get_last_api_update(self):
1214
+ hourly_data = self._get_forecast_data("hourly")
1215
+ if hourly_data and "actualitzat" in hourly_data and "dataUpdate" in hourly_data["actualitzat"]:
1216
+ try:
1217
+ return datetime.fromisoformat(hourly_data["actualitzat"]["dataUpdate"])
1218
+ except ValueError:
1219
+ _LOGGER.warning("Formato inválido en dataUpdate del forecast horario")
1220
+ return None
1191
1221
  return None
1192
1222
 
1193
1223
  @property
1194
- def native_value(self):
1224
+ def native_value(self) -> str:
1195
1225
  first_date = self._get_first_date()
1196
- if first_date:
1197
- today = datetime.now(timezone.utc).date()
1198
- current_time = datetime.now(timezone.utc).time()
1199
- days_difference = (today - first_date).days
1200
- _LOGGER.debug(
1201
- f"Diferencia de días para predicciones horarias: {days_difference}."
1202
- f"Hora actual de validación: {current_time}."
1203
- f"Para la validación: "
1204
- f"número de días= {DEFAULT_VALIDITY_DAYS}, "
1205
- f"hora de contacto a la API >= {DEFAULT_VALIDITY_HOURS}, "
1206
- f"minutos de contacto a la API >= {DEFAULT_VALIDITY_MINUTES}."
1207
- )
1208
- if days_difference > DEFAULT_VALIDITY_DAYS and current_time >= time(DEFAULT_VALIDITY_HOURS, DEFAULT_VALIDITY_MINUTES):
1209
- return "obsolete"
1210
- return "updated"
1211
- return "unknown"
1226
+ if not first_date:
1227
+ _LOGGER.debug("Hourly status: no hay datos disponibles aún")
1228
+ return "unknown"
1229
+
1230
+ now_local = datetime.now(TIMEZONE)
1231
+ today = now_local.date()
1232
+ current_time = now_local.time()
1233
+ days_difference = (today - first_date).days
1234
+
1235
+ # Replicar lógica del coordinador
1236
+ min_days = DEFAULT_VALIDITY_DAYS if self._limit_prediccio >= PREDICCIO_HIGH_QUOTA_LIMIT else DEFAULT_VALIDITY_DAYS + 1
1237
+ min_time = time(DEFAULT_VALIDITY_HOURS + 1, DEFAULT_VALIDITY_MINUTES) # Margen adicional de +1 hora sobre la hora mínima configurada.
1238
+
1239
+ cond1 = days_difference >= min_days
1240
+ cond2 = current_time >= min_time
1241
+
1242
+ _LOGGER.debug(
1243
+ "Hourly status → días: %d (≥%d)=%s | hora: %s (≥%s)=%s → %s",
1244
+ days_difference, min_days, cond1,
1245
+ current_time.strftime("%H:%M"), min_time.strftime("%H:%M"), cond2,
1246
+ "obsolete" if cond1 and cond2 else "updated"
1247
+ )
1248
+
1249
+ if cond1 and cond2:
1250
+ return "obsolete"
1251
+ return "updated"
1212
1252
 
1213
1253
  @property
1214
- def extra_state_attributes(self):
1215
- attributes = super().extra_state_attributes or {}
1254
+ def extra_state_attributes(self) -> dict:
1255
+ attributes: dict = {}
1256
+
1216
1257
  first_date = self._get_first_date()
1217
1258
  if first_date:
1218
1259
  attributes["update_date"] = first_date.isoformat()
1260
+
1261
+ last_update = self._get_last_api_update()
1262
+ if last_update:
1263
+ attributes["data_updatetime"] = last_update.isoformat()
1264
+
1219
1265
  return attributes
1220
-
1266
+
1221
1267
  @property
1222
1268
  def device_info(self) -> DeviceInfo:
1223
- """Return the device info."""
1224
1269
  return DeviceInfo(
1225
1270
  identifiers={(DOMAIN, self._town_id)},
1226
1271
  name=f"Meteocat {self._station_id} {self._town_name}",
@@ -1237,46 +1282,83 @@ class MeteocatDailyForecastStatusSensor(CoordinatorEntity[MeteocatEntityCoordina
1237
1282
  self._town_name = entry_data["town_name"]
1238
1283
  self._town_id = entry_data["town_id"]
1239
1284
  self._station_id = entry_data["station_id"]
1285
+ self._limit_prediccio = entry_data["limit_prediccio"]
1240
1286
  self._attr_unique_id = f"sensor.{DOMAIN}_{self._town_id}_daily_status"
1241
1287
  self._attr_entity_category = getattr(description, "entity_category", None)
1242
1288
 
1289
+ def _get_forecast_data(self, forecast_type: str) -> Optional[dict]:
1290
+ """Devuelve los datos del tipo de forecast (hourly o daily) o None."""
1291
+ if not self.coordinator.data:
1292
+ return None
1293
+ return self.coordinator.data.get(forecast_type)
1294
+
1243
1295
  def _get_first_date(self):
1244
- daily_data = self.coordinator.data.get("daily")
1245
- if daily_data and "dies" in daily_data:
1246
- return datetime.fromisoformat(daily_data["dies"][0]["data"].rstrip("Z")).date()
1296
+ daily_data = self._get_forecast_data("daily")
1297
+ if daily_data and "dies" in daily_data and daily_data["dies"]:
1298
+ try:
1299
+ first_date_str = daily_data["dies"][0]["data"].rstrip("Z")
1300
+ return datetime.fromisoformat(first_date_str).date()
1301
+ except (ValueError, TypeError, KeyError, IndexError):
1302
+ _LOGGER.warning("No se pudo parsear primera fecha del forecast diario")
1303
+ return None
1304
+ return None
1305
+
1306
+ def _get_last_api_update(self):
1307
+ daily_data = self._get_forecast_data("daily")
1308
+ if daily_data and "actualitzat" in daily_data and "dataUpdate" in daily_data["actualitzat"]:
1309
+ try:
1310
+ return datetime.fromisoformat(daily_data["actualitzat"]["dataUpdate"])
1311
+ except ValueError:
1312
+ _LOGGER.warning("Formato inválido en dataUpdate del forecast diario")
1313
+ return None
1247
1314
  return None
1248
1315
 
1249
1316
  @property
1250
- def native_value(self):
1317
+ def native_value(self) -> str:
1251
1318
  first_date = self._get_first_date()
1252
- if first_date:
1253
- today = datetime.now(timezone.utc).date()
1254
- current_time = datetime.now(timezone.utc).time()
1255
- days_difference = (today - first_date).days
1256
- _LOGGER.debug(
1257
- f"Diferencia de días para predicciones diarias: {days_difference}."
1258
- f"Hora actual de validación: {current_time}."
1259
- f"Para la validación: "
1260
- f"número de días= {DEFAULT_VALIDITY_DAYS}, "
1261
- f"hora de contacto a la API >= {DEFAULT_VALIDITY_HOURS}, "
1262
- f"minutos de contacto a la API >= {DEFAULT_VALIDITY_MINUTES}."
1263
- )
1264
- if days_difference > DEFAULT_VALIDITY_DAYS and current_time >= time(DEFAULT_VALIDITY_HOURS, DEFAULT_VALIDITY_MINUTES):
1265
- return "obsolete"
1266
- return "updated"
1267
- return "unknown"
1319
+ if not first_date:
1320
+ _LOGGER.debug("Daily status: no hay datos disponibles aún")
1321
+ return "unknown"
1322
+
1323
+ now_local = datetime.now(TIMEZONE)
1324
+ today = now_local.date()
1325
+ current_time = now_local.time()
1326
+ days_difference = (today - first_date).days
1327
+
1328
+ # Replicar lógica del coordinador
1329
+ min_days = DEFAULT_VALIDITY_DAYS if self._limit_prediccio >= PREDICCIO_HIGH_QUOTA_LIMIT else DEFAULT_VALIDITY_DAYS + 1
1330
+ min_time = time(DEFAULT_VALIDITY_HOURS + 1, DEFAULT_VALIDITY_MINUTES) # Margen adicional de +1 hora sobre la hora mínima configurada.
1331
+
1332
+ cond1 = days_difference >= min_days
1333
+ cond2 = current_time >= min_time
1334
+
1335
+ _LOGGER.debug(
1336
+ "Daily status → días: %d (≥%d)=%s | hora: %s (≥%s)=%s → %s",
1337
+ days_difference, min_days, cond1,
1338
+ current_time.strftime("%H:%M"), min_time.strftime("%H:%M"), cond2,
1339
+ "obsolete" if cond1 and cond2 else "updated"
1340
+ )
1341
+
1342
+ if cond1 and cond2:
1343
+ return "obsolete"
1344
+ return "updated"
1268
1345
 
1269
1346
  @property
1270
- def extra_state_attributes(self):
1271
- attributes = super().extra_state_attributes or {}
1347
+ def extra_state_attributes(self) -> dict:
1348
+ attributes: dict = {}
1349
+
1272
1350
  first_date = self._get_first_date()
1273
1351
  if first_date:
1274
1352
  attributes["update_date"] = first_date.isoformat()
1353
+
1354
+ last_update = self._get_last_api_update()
1355
+ if last_update:
1356
+ attributes["data_updatetime"] = last_update.isoformat()
1357
+
1275
1358
  return attributes
1276
-
1359
+
1277
1360
  @property
1278
1361
  def device_info(self) -> DeviceInfo:
1279
- """Return the device info."""
1280
1362
  return DeviceInfo(
1281
1363
  identifiers={(DOMAIN, self._town_id)},
1282
1364
  name=f"Meteocat {self._station_id} {self._town_name}",
@@ -1293,6 +1375,7 @@ class MeteocatUviStatusSensor(CoordinatorEntity[MeteocatUviCoordinator], SensorE
1293
1375
  self._town_name = entry_data["town_name"]
1294
1376
  self._town_id = entry_data["town_id"]
1295
1377
  self._station_id = entry_data["station_id"]
1378
+ self._limit_prediccio = entry_data["limit_prediccio"]
1296
1379
  self._attr_unique_id = f"sensor.{DOMAIN}_{self._town_id}_uvi_status"
1297
1380
  self._attr_entity_category = getattr(description, "entity_category", None)
1298
1381
 
@@ -1326,7 +1409,7 @@ class MeteocatUviStatusSensor(CoordinatorEntity[MeteocatUviCoordinator], SensorE
1326
1409
  def native_value(self) -> str:
1327
1410
  data_dict = self._get_uvi_data_dict()
1328
1411
  if not data_dict:
1329
- _LOGGER.debug("UVI Status: no hay data_dict disponible aún")
1412
+ _LOGGER.debug("UVI Status: no hay datos disponibles aún")
1330
1413
  return "unknown"
1331
1414
 
1332
1415
  first_date = self._get_first_date()
@@ -1339,27 +1422,35 @@ class MeteocatUviStatusSensor(CoordinatorEntity[MeteocatUviCoordinator], SensorE
1339
1422
  current_time = now_local.time()
1340
1423
  days_difference = (today - first_date).days
1341
1424
 
1425
+ # ── Replicar lógica exacta del coordinador ──
1426
+ if self._limit_prediccio >= PREDICCIO_HIGH_QUOTA_LIMIT:
1427
+ min_days = DEFAULT_VALIDITY_DAYS
1428
+ min_time = time(DEFAULT_UVI_HIGH_VALIDITY_HOURS + 1, DEFAULT_UVI_HIGH_VALIDITY_MINUTES) # Margen adicional de +1 hora sobre la hora mínima configurada.
1429
+ quota_level = "ALTA"
1430
+ else:
1431
+ min_days = DEFAULT_VALIDITY_DAYS + 1
1432
+ min_time = time(DEFAULT_UVI_LOW_VALIDITY_HOURS + 1, DEFAULT_UVI_LOW_VALIDITY_MINUTES) # Margen adicional de +1 hora sobre la hora mínima configurada.
1433
+ quota_level = "BAJA"
1434
+
1435
+ cond1 = days_difference >= min_days
1436
+ cond2 = current_time >= min_time
1437
+
1342
1438
  _LOGGER.debug(
1343
- "UVI Status → días diff: %d | hora actual: %s | umbral días: %d | umbral hora: %02d:%02d",
1344
- days_difference,
1345
- current_time.strftime("%H:%M"),
1346
- DEFAULT_VALIDITY_DAYS,
1347
- DEFAULT_VALIDITY_HOURS,
1348
- DEFAULT_VALIDITY_MINUTES,
1439
+ "UVI Status → días: %d (≥%d)=%s | hora: %s (≥%s)=%s %s",
1440
+ days_difference, min_days, cond1,
1441
+ current_time.strftime("%H:%M"), min_time.strftime("%H:%M"), cond2,
1442
+ self._limit_prediccio, quota_level,
1443
+ "obsolete" if cond1 and cond2 else "updated"
1349
1444
  )
1350
1445
 
1351
- if days_difference > DEFAULT_VALIDITY_DAYS and current_time >= time(DEFAULT_VALIDITY_HOURS, DEFAULT_VALIDITY_MINUTES):
1446
+ if cond1 and cond2:
1352
1447
  return "obsolete"
1353
1448
  return "updated"
1354
1449
 
1355
1450
  @property
1356
1451
  def extra_state_attributes(self) -> dict:
1357
- attributes = {}
1358
- data_dict = self._get_uvi_data_dict()
1452
+ attributes: dict = {}
1359
1453
 
1360
- if not data_dict:
1361
- attributes["debug_info"] = "Aún no hay datos en el coordinador"
1362
- return attributes
1363
1454
  # Primera fecha de los datos UVI
1364
1455
  first_date = self._get_first_date()
1365
1456
  if first_date:
@@ -212,6 +212,9 @@
212
212
  "state_attributes": {
213
213
  "update_date": {
214
214
  "name": "Date"
215
+ },
216
+ "data_updatetime": {
217
+ "name": "Updated"
215
218
  }
216
219
  }
217
220
  },
@@ -224,6 +227,9 @@
224
227
  "state_attributes": {
225
228
  "update_date": {
226
229
  "name": "Date"
230
+ },
231
+ "data_updatetime": {
232
+ "name": "Updated"
227
233
  }
228
234
  }
229
235
  },
@@ -212,6 +212,9 @@
212
212
  "state_attributes": {
213
213
  "update_date": {
214
214
  "name": "Data"
215
+ },
216
+ "data_updatetime": {
217
+ "name": "Actualitzat"
215
218
  }
216
219
  }
217
220
  },
@@ -224,6 +227,9 @@
224
227
  "state_attributes": {
225
228
  "update_date": {
226
229
  "name": "Data"
230
+ },
231
+ "data_updatetime": {
232
+ "name": "Actualitzat"
227
233
  }
228
234
  }
229
235
  },
@@ -212,6 +212,9 @@
212
212
  "state_attributes": {
213
213
  "update_date": {
214
214
  "name": "Date"
215
+ },
216
+ "data_updatetime": {
217
+ "name": "Updated"
215
218
  }
216
219
  }
217
220
  },
@@ -224,6 +227,9 @@
224
227
  "state_attributes": {
225
228
  "update_date": {
226
229
  "name": "Date"
230
+ },
231
+ "data_updatetime": {
232
+ "name": "Updated"
227
233
  }
228
234
  }
229
235
  },
@@ -212,6 +212,9 @@
212
212
  "state_attributes": {
213
213
  "update_date": {
214
214
  "name": "Fecha"
215
+ },
216
+ "data_updatetime": {
217
+ "name": "Actualizado"
215
218
  }
216
219
  }
217
220
  },
@@ -224,6 +227,9 @@
224
227
  "state_attributes": {
225
228
  "update_date": {
226
229
  "name": "Fecha"
230
+ },
231
+ "data_updatetime": {
232
+ "name": "Actualizado"
227
233
  }
228
234
  }
229
235
  },
@@ -1 +1 @@
1
- __version__ = "4.0.4"
1
+ __version__ = "4.0.5"
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "meteocat",
3
- "version": "4.0.4",
3
+ "version": "4.0.5",
4
4
  "description": "[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)\r [![Python version compatibility](https://img.shields.io/pypi/pyversions/meteocat)](https://pypi.org/project/meteocat)\r [![pipeline status](https://gitlab.com/figorr/meteocat/badges/master/pipeline.svg)](https://gitlab.com/figorr/meteocat/commits/master)",
5
5
  "main": "index.js",
6
6
  "directories": {
package/pyproject.toml CHANGED
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "meteocat"
3
- version = "4.0.4"
3
+ version = "4.0.5"
4
4
  description = "Script para obtener datos meteorológicos de la API de Meteocat"
5
5
  authors = ["figorr <jdcuartero@yahoo.es>"]
6
6
  license = "Apache-2.0"