meteocat 3.1.0 → 3.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +34 -0
- package/README.md +4 -1
- package/custom_components/meteocat/__init__.py +8 -3
- package/custom_components/meteocat/condition.py +6 -2
- package/custom_components/meteocat/config_flow.py +206 -28
- package/custom_components/meteocat/const.py +14 -2
- package/custom_components/meteocat/coordinator.py +1040 -205
- package/custom_components/meteocat/helpers.py +31 -36
- package/custom_components/meteocat/manifest.json +3 -2
- package/custom_components/meteocat/options_flow.py +15 -5
- package/custom_components/meteocat/sensor.py +366 -4
- package/custom_components/meteocat/strings.json +195 -4
- package/custom_components/meteocat/translations/ca.json +195 -4
- package/custom_components/meteocat/translations/en.json +195 -4
- package/custom_components/meteocat/translations/es.json +195 -4
- package/custom_components/meteocat/version.py +1 -1
- package/filetree.txt +12 -3
- package/hacs.json +1 -1
- package/images/daily_forecast_2_alerts.png +0 -0
- package/images/daily_forecast_no_alerts.png +0 -0
- package/images/diagnostic_sensors.png +0 -0
- package/images/dynamic_sensors.png +0 -0
- package/images/options.png +0 -0
- package/images/regenerate_assets.png +0 -0
- package/images/setup_options.png +0 -0
- package/images/system_options.png +0 -0
- package/package.json +1 -1
- package/pyproject.toml +1 -1
|
@@ -4,19 +4,31 @@ import json
|
|
|
4
4
|
import aiofiles
|
|
5
5
|
import logging
|
|
6
6
|
import asyncio
|
|
7
|
+
import random
|
|
7
8
|
import unicodedata
|
|
8
9
|
from pathlib import Path
|
|
9
|
-
from astral.sun import sun
|
|
10
|
-
from astral import LocationInfo
|
|
11
10
|
from datetime import date, datetime, timedelta, timezone, time
|
|
12
11
|
from zoneinfo import ZoneInfo
|
|
13
|
-
from typing import Dict, Any
|
|
12
|
+
from typing import List, Dict, Any, Optional
|
|
14
13
|
|
|
15
14
|
from homeassistant.core import HomeAssistant, EVENT_HOMEASSISTANT_START
|
|
16
15
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
|
17
16
|
from homeassistant.exceptions import ConfigEntryNotReady
|
|
18
17
|
from homeassistant.components.weather import Forecast
|
|
19
18
|
|
|
19
|
+
from solarmoonpy.moon import (
|
|
20
|
+
moon_phase,
|
|
21
|
+
moon_day,
|
|
22
|
+
moon_rise_set,
|
|
23
|
+
illuminated_percentage,
|
|
24
|
+
moon_distance,
|
|
25
|
+
moon_angular_diameter,
|
|
26
|
+
lunation_number,
|
|
27
|
+
get_moon_phase_name,
|
|
28
|
+
get_lunation_duration
|
|
29
|
+
)
|
|
30
|
+
from solarmoonpy.location import Location, LocationInfo
|
|
31
|
+
|
|
20
32
|
from meteocatpy.data import MeteocatStationData
|
|
21
33
|
from meteocatpy.uvi import MeteocatUviData
|
|
22
34
|
from meteocatpy.forecast import MeteocatForecast
|
|
@@ -40,6 +52,10 @@ from .const import (
|
|
|
40
52
|
DEFAULT_VALIDITY_DAYS,
|
|
41
53
|
DEFAULT_VALIDITY_HOURS,
|
|
42
54
|
DEFAULT_VALIDITY_MINUTES,
|
|
55
|
+
DEFAULT_UVI_LOW_VALIDITY_HOURS,
|
|
56
|
+
DEFAULT_UVI_LOW_VALIDITY_MINUTES,
|
|
57
|
+
DEFAULT_UVI_HIGH_VALIDITY_HOURS,
|
|
58
|
+
DEFAULT_UVI_HIGH_VALIDITY_MINUTES,
|
|
43
59
|
DEFAULT_ALERT_VALIDITY_TIME,
|
|
44
60
|
DEFAULT_QUOTES_VALIDITY_TIME,
|
|
45
61
|
ALERT_VALIDITY_MULTIPLIER_100,
|
|
@@ -48,7 +64,8 @@ from .const import (
|
|
|
48
64
|
ALERT_VALIDITY_MULTIPLIER_DEFAULT,
|
|
49
65
|
DEFAULT_LIGHTNING_VALIDITY_TIME,
|
|
50
66
|
DEFAULT_LIGHTNING_VALIDITY_HOURS,
|
|
51
|
-
DEFAULT_LIGHTNING_VALIDITY_MINUTES
|
|
67
|
+
DEFAULT_LIGHTNING_VALIDITY_MINUTES,
|
|
68
|
+
PREDICCIO_HIGH_QUOTA_LIMIT
|
|
52
69
|
)
|
|
53
70
|
|
|
54
71
|
_LOGGER = logging.getLogger(__name__)
|
|
@@ -71,6 +88,8 @@ DEFAULT_LIGHTNING_UPDATE_INTERVAL = timedelta(minutes=10)
|
|
|
71
88
|
DEFAULT_LIGHTNING_FILE_UPDATE_INTERVAL = timedelta(minutes=5)
|
|
72
89
|
DEFAULT_SUN_UPDATE_INTERVAL = timedelta(minutes=1)
|
|
73
90
|
DEFAULT_SUN_FILE_UPDATE_INTERVAL = timedelta(seconds=30)
|
|
91
|
+
DEFAULT_MOON_UPDATE_INTERVAL = timedelta(minutes=1)
|
|
92
|
+
DEFAULT_MOON_FILE_UPDATE_INTERVAL = timedelta(seconds=30)
|
|
74
93
|
|
|
75
94
|
# Definir la zona horaria local
|
|
76
95
|
TIMEZONE = ZoneInfo("Europe/Madrid")
|
|
@@ -141,6 +160,50 @@ async def _update_quotes(hass: HomeAssistant, plan_name: str) -> None:
|
|
|
141
160
|
except Exception as e:
|
|
142
161
|
_LOGGER.exception("Error inesperado al actualizar las cuotas en quotes.json: %s", str(e))
|
|
143
162
|
|
|
163
|
+
class BaseFileCoordinator(DataUpdateCoordinator):
|
|
164
|
+
"""
|
|
165
|
+
Coordinador base para leer datos desde archivos JSON.
|
|
166
|
+
|
|
167
|
+
Proporciona un pequeño desfase aleatorio antes de cada actualización
|
|
168
|
+
para evitar colisión entre el coordinador que crea el JSON y el que lo lee.
|
|
169
|
+
|
|
170
|
+
Cada coordinador que herede de esta clase debe implementar su propio
|
|
171
|
+
método `_async_update_data()` para definir la lógica de lectura y validación.
|
|
172
|
+
"""
|
|
173
|
+
|
|
174
|
+
def __init__(self, hass, name: str, update_interval: timedelta, min_delay: float = 1.0, max_delay: float = 2.0):
|
|
175
|
+
"""
|
|
176
|
+
Inicializa el coordinador base.
|
|
177
|
+
|
|
178
|
+
Args:
|
|
179
|
+
hass (HomeAssistant): Instancia de Home Assistant.
|
|
180
|
+
name (str): Nombre identificativo del coordinador.
|
|
181
|
+
update_interval (timedelta): Intervalo de actualización.
|
|
182
|
+
min_delay (float): Límite inferior del desfase aleatorio en segundos (default: 1.0).
|
|
183
|
+
max_delay (float): Límite superior del desfase aleatorio en segundos (default: 2.0).
|
|
184
|
+
"""
|
|
185
|
+
super().__init__(hass, _LOGGER, name=name, update_interval=update_interval)
|
|
186
|
+
self._min_delay = min_delay
|
|
187
|
+
self._max_delay = max_delay
|
|
188
|
+
self._first_delay = random.uniform(min_delay, max_delay)
|
|
189
|
+
self._initialized = False
|
|
190
|
+
|
|
191
|
+
async def _apply_random_delay(self):
|
|
192
|
+
"""
|
|
193
|
+
Aplica un desfase aleatorio leve antes de la lectura.
|
|
194
|
+
|
|
195
|
+
- En la primera ejecución: usa un desfase fijo (_first_delay)
|
|
196
|
+
- En las siguientes: aplica un desfase aleatorio entre 1 y 2 segundos
|
|
197
|
+
"""
|
|
198
|
+
if not self._initialized:
|
|
199
|
+
delay = self._first_delay
|
|
200
|
+
self._initialized = True
|
|
201
|
+
else:
|
|
202
|
+
delay = random.uniform(self._min_delay, self._max_delay)
|
|
203
|
+
|
|
204
|
+
_LOGGER.debug("%s aplicando desfase aleatorio de %.2fs", self.name, delay)
|
|
205
|
+
await asyncio.sleep(delay)
|
|
206
|
+
|
|
144
207
|
class MeteocatSensorCoordinator(DataUpdateCoordinator):
|
|
145
208
|
"""Coordinator para manejar la actualización de datos de los sensores."""
|
|
146
209
|
|
|
@@ -301,6 +364,7 @@ class MeteocatUviCoordinator(DataUpdateCoordinator):
|
|
|
301
364
|
):
|
|
302
365
|
self.api_key = entry_data["api_key"]
|
|
303
366
|
self.town_id = entry_data["town_id"]
|
|
367
|
+
self.limit_prediccio = entry_data["limit_prediccio"]
|
|
304
368
|
self.meteocat_uvi_data = MeteocatUviData(self.api_key)
|
|
305
369
|
|
|
306
370
|
# Ruta persistente en /config/meteocat_files/files
|
|
@@ -314,56 +378,83 @@ class MeteocatUviCoordinator(DataUpdateCoordinator):
|
|
|
314
378
|
update_interval=DEFAULT_UVI_UPDATE_INTERVAL,
|
|
315
379
|
)
|
|
316
380
|
|
|
317
|
-
async def is_uvi_data_valid(self) -> dict
|
|
318
|
-
"""
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
381
|
+
async def is_uvi_data_valid(self) -> Optional[dict]:
|
|
382
|
+
"""Valida datos UVI: misma lógica que predicción, basada en limit_prediccio.
|
|
383
|
+
|
|
384
|
+
- Si `limit_prediccio >= 550` → actualiza **el día siguiente** después de las DEFAULT_VALIDITY_HOURS:DEFAULT_VALIDITY_MINUTES.
|
|
385
|
+
- Si `limit_prediccio < 550` → actualiza **dos días después** después de las DEFAULT_VALIDITY_HOURS:DEFAULT_VALIDITY_MINUTES.
|
|
386
|
+
"""
|
|
387
|
+
if not self.uvi_file.exists():
|
|
388
|
+
_LOGGER.debug("Archivo UVI no existe: %s", self.uvi_file)
|
|
389
|
+
return None
|
|
323
390
|
|
|
324
|
-
|
|
325
|
-
|
|
391
|
+
try:
|
|
392
|
+
async with aiofiles.open(self.uvi_file, "r", encoding="utf-8") as f:
|
|
393
|
+
content = await f.read()
|
|
326
394
|
data = json.loads(content)
|
|
327
395
|
|
|
328
|
-
#
|
|
396
|
+
# Validar estructura básica
|
|
329
397
|
if not isinstance(data, dict) or "uvi" not in data or not isinstance(data["uvi"], list) or not data["uvi"]:
|
|
330
|
-
_LOGGER.warning("Estructura inválida
|
|
398
|
+
_LOGGER.warning("Estructura UVI inválida en %s", self.uvi_file)
|
|
331
399
|
return None
|
|
332
400
|
|
|
333
|
-
#
|
|
401
|
+
# Fecha del primer día
|
|
334
402
|
try:
|
|
335
|
-
|
|
403
|
+
first_date_str = data["uvi"][0].get("date")
|
|
404
|
+
first_date = datetime.strptime(first_date_str, "%Y-%m-%d").date()
|
|
336
405
|
except Exception as exc:
|
|
337
|
-
_LOGGER.warning("Fecha inválida en %s: %s", self.uvi_file, exc)
|
|
406
|
+
_LOGGER.warning("Fecha UVI inválida en %s: %s", self.uvi_file, exc)
|
|
338
407
|
return None
|
|
339
408
|
|
|
340
|
-
|
|
341
|
-
|
|
409
|
+
# Fecha y hora actual en zona local (Europe/Madrid)
|
|
410
|
+
now_local = datetime.now(TIMEZONE)
|
|
411
|
+
today = now_local.date()
|
|
412
|
+
current_time_local = now_local.time()
|
|
413
|
+
# Horas para actualización según límite de cuota
|
|
414
|
+
min_update_time_high = time(DEFAULT_UVI_HIGH_VALIDITY_HOURS, DEFAULT_UVI_HIGH_VALIDITY_MINUTES) # Hora para cuota alta
|
|
415
|
+
min_update_time_low = time(DEFAULT_UVI_LOW_VALIDITY_HOURS, DEFAULT_UVI_LOW_VALIDITY_MINUTES) # Hora para cuota baja
|
|
416
|
+
# Diferencia en días
|
|
417
|
+
days_diff = (today - first_date).days
|
|
418
|
+
|
|
419
|
+
# === LÓGICA DINÁMICA SEGÚN CUOTA ===
|
|
420
|
+
if self.limit_prediccio >= PREDICCIO_HIGH_QUOTA_LIMIT:
|
|
421
|
+
should_update = days_diff >= DEFAULT_VALIDITY_DAYS and current_time_local >= min_update_time_high
|
|
422
|
+
else:
|
|
423
|
+
should_update = days_diff > DEFAULT_VALIDITY_DAYS and current_time_local >= min_update_time_low
|
|
342
424
|
|
|
343
425
|
_LOGGER.debug(
|
|
344
|
-
"
|
|
345
|
-
|
|
346
|
-
|
|
426
|
+
"[UVI %s] Validación: primer_día=%s, hoy=%s → días=%d, "
|
|
427
|
+
"cuota=%d (%s), hora=%s ≥ %s → actualizar=%s",
|
|
428
|
+
self.town_id,
|
|
347
429
|
first_date,
|
|
348
|
-
|
|
430
|
+
today,
|
|
431
|
+
days_diff,
|
|
432
|
+
self.limit_prediccio,
|
|
433
|
+
"ALTA" if self.limit_prediccio >= 550 else "BAJA",
|
|
434
|
+
current_time_local.strftime("%H:%M"),
|
|
435
|
+
min_update_time_high.strftime("%H:%M") if self.limit_prediccio >= 550 else min_update_time_low.strftime("%H:%M"),
|
|
436
|
+
should_update,
|
|
349
437
|
)
|
|
350
438
|
|
|
351
|
-
if
|
|
352
|
-
_LOGGER.info(
|
|
439
|
+
if should_update:
|
|
440
|
+
_LOGGER.info(
|
|
441
|
+
"Datos UVI obsoletos → llamando API (town=%s, cuota=%d)",
|
|
442
|
+
self.town_id, self.limit_prediccio
|
|
443
|
+
)
|
|
353
444
|
return None
|
|
354
445
|
|
|
355
|
-
_LOGGER.
|
|
446
|
+
_LOGGER.debug("Datos UVI válidos → usando caché")
|
|
356
447
|
return data
|
|
357
448
|
|
|
358
449
|
except json.JSONDecodeError:
|
|
359
|
-
_LOGGER.error("
|
|
450
|
+
_LOGGER.error("JSON corrupto en %s", self.uvi_file)
|
|
360
451
|
return None
|
|
361
452
|
except Exception as e:
|
|
362
|
-
_LOGGER.error("Error
|
|
453
|
+
_LOGGER.error("Error validando UVI: %s", e)
|
|
363
454
|
return None
|
|
364
455
|
|
|
365
|
-
async def _async_update_data(self) -> Dict:
|
|
366
|
-
"""Actualiza los datos de UVI desde la API de Meteocat
|
|
456
|
+
async def _async_update_data(self) -> List[Dict]:
|
|
457
|
+
"""Actualiza los datos de UVI desde la API de Meteocat o caché."""
|
|
367
458
|
try:
|
|
368
459
|
valid_data = await self.is_uvi_data_valid()
|
|
369
460
|
if valid_data:
|
|
@@ -406,10 +497,9 @@ class MeteocatUviCoordinator(DataUpdateCoordinator):
|
|
|
406
497
|
_LOGGER.warning("Usando datos en caché para la ciudad %s.", self.town_id)
|
|
407
498
|
return cached_data.get("uvi", [])
|
|
408
499
|
_LOGGER.error("No se pudo obtener datos UVI ni cargar caché.")
|
|
409
|
-
return
|
|
500
|
+
return []
|
|
410
501
|
|
|
411
|
-
|
|
412
|
-
class MeteocatUviFileCoordinator(DataUpdateCoordinator):
|
|
502
|
+
class MeteocatUviFileCoordinator(BaseFileCoordinator):
|
|
413
503
|
"""Coordinator to read and process UV data from a file."""
|
|
414
504
|
|
|
415
505
|
def __init__(
|
|
@@ -421,9 +511,10 @@ class MeteocatUviFileCoordinator(DataUpdateCoordinator):
|
|
|
421
511
|
|
|
422
512
|
super().__init__(
|
|
423
513
|
hass,
|
|
424
|
-
_LOGGER,
|
|
425
514
|
name=f"{DOMAIN} Uvi File Coordinator",
|
|
426
515
|
update_interval=DEFAULT_UVI_SENSOR_UPDATE_INTERVAL,
|
|
516
|
+
min_delay=1.0, # Rango predeterminado
|
|
517
|
+
max_delay=2.0, # Rango predeterminado
|
|
427
518
|
)
|
|
428
519
|
|
|
429
520
|
# Ruta persistente en /config/meteocat_files/files
|
|
@@ -432,6 +523,9 @@ class MeteocatUviFileCoordinator(DataUpdateCoordinator):
|
|
|
432
523
|
|
|
433
524
|
async def _async_update_data(self):
|
|
434
525
|
"""Read and process UV data for the current hour from the file asynchronously."""
|
|
526
|
+
# 🔸 Añadimos un pequeño desfase aleatorio (1 a 2 segundos) basados en el BaseFileCoordinator
|
|
527
|
+
await self._apply_random_delay()
|
|
528
|
+
|
|
435
529
|
try:
|
|
436
530
|
async with aiofiles.open(self._file_path, "r", encoding="utf-8") as file:
|
|
437
531
|
raw = await file.read()
|
|
@@ -494,6 +588,7 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
|
|
|
494
588
|
self.station_id = entry_data["station_id"]
|
|
495
589
|
self.variable_name = entry_data["variable_name"]
|
|
496
590
|
self.variable_id = entry_data["variable_id"]
|
|
591
|
+
self.limit_prediccio = entry_data["limit_prediccio"] # Límite de llamada a la API para PREDICCIONES
|
|
497
592
|
self.meteocat_forecast = MeteocatForecast(self.api_key)
|
|
498
593
|
|
|
499
594
|
# Ruta persistente en /config/meteocat_files/files
|
|
@@ -507,52 +602,85 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
|
|
|
507
602
|
name=f"{DOMAIN} Entity Coordinator",
|
|
508
603
|
update_interval=DEFAULT_ENTITY_UPDATE_INTERVAL,
|
|
509
604
|
)
|
|
510
|
-
|
|
605
|
+
|
|
606
|
+
# --------------------------------------------------------------------- #
|
|
607
|
+
# VALIDACIÓN DINÁMICA DE DATOS DE PREDICCIÓN
|
|
608
|
+
# --------------------------------------------------------------------- #
|
|
511
609
|
async def validate_forecast_data(self, file_path: Path) -> dict:
|
|
512
|
-
"""Valida y retorna datos de predicción si son válidos.
|
|
610
|
+
"""Valida y retorna datos de predicción si son válidos.
|
|
611
|
+
|
|
612
|
+
- Si `limit_prediccio >= 550` → actualiza **el día siguiente** después de las DEFAULT_VALIDITY_HOURS:DEFAULT_VALIDITY_MINUTES.
|
|
613
|
+
- Si `limit_prediccio < 550` → actualiza **dos días después** después de las DEFAULT_VALIDITY_HOURS:DEFAULT_VALIDITY_MINUTES.
|
|
614
|
+
"""
|
|
513
615
|
if not file_path.exists():
|
|
514
|
-
_LOGGER.
|
|
616
|
+
_LOGGER.warning("El archivo %s no existe. Se considerará inválido.", file_path)
|
|
515
617
|
return None
|
|
516
618
|
try:
|
|
517
619
|
async with aiofiles.open(file_path, "r", encoding="utf-8") as f:
|
|
518
620
|
content = await f.read()
|
|
519
621
|
data = json.loads(content)
|
|
520
622
|
|
|
521
|
-
#
|
|
522
|
-
|
|
623
|
+
# Fecha del primer día de predicción (solo fecha)
|
|
624
|
+
first_date_str = data["dies"][0]["data"].rstrip("Z")
|
|
625
|
+
first_date = datetime.fromisoformat(first_date_str).date()
|
|
523
626
|
today = datetime.now(timezone.utc).date()
|
|
524
|
-
current_time = datetime.now(timezone.utc).time()
|
|
525
627
|
|
|
526
|
-
#
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
628
|
+
# Hora actual en zona local (Europe/Madrid)
|
|
629
|
+
current_time_local = datetime.now(TIMEZONE).time()
|
|
630
|
+
min_update_time = time(DEFAULT_VALIDITY_HOURS, DEFAULT_VALIDITY_MINUTES)
|
|
631
|
+
|
|
632
|
+
days_diff = (today - first_date).days
|
|
633
|
+
|
|
634
|
+
# -----------------------------------------------------------------
|
|
635
|
+
# Lógica según cuota
|
|
636
|
+
# -----------------------------------------------------------------
|
|
637
|
+
if self.limit_prediccio >= PREDICCIO_HIGH_QUOTA_LIMIT:
|
|
638
|
+
# Cuota alta → actualiza cuando los datos son de ayer (o antes) + hora OK
|
|
639
|
+
should_update = days_diff >= DEFAULT_VALIDITY_DAYS and current_time_local >= min_update_time
|
|
640
|
+
else:
|
|
641
|
+
# Cuota baja → actualiza solo cuando los datos son de anteayer + hora OK
|
|
642
|
+
should_update = days_diff > DEFAULT_VALIDITY_DAYS and current_time_local >= min_update_time
|
|
643
|
+
|
|
644
|
+
# -----------------------------------------------------------------
|
|
645
|
+
# Logs detallados
|
|
646
|
+
# -----------------------------------------------------------------
|
|
647
|
+
_LOGGER.debug(
|
|
648
|
+
"[%s] Validación: primer_día=%s, hoy=%s → días=%d, "
|
|
649
|
+
"cuota=%d (%s), hora_local=%s ≥ %s → actualizar=%s",
|
|
650
|
+
file_path.name,
|
|
531
651
|
first_date,
|
|
532
|
-
|
|
652
|
+
today,
|
|
653
|
+
days_diff,
|
|
654
|
+
self.limit_prediccio,
|
|
655
|
+
"ALTA" if self.limit_prediccio >= 550 else "BAJA",
|
|
656
|
+
current_time_local.strftime("%H:%M"),
|
|
657
|
+
min_update_time.strftime("%H:%M"),
|
|
658
|
+
should_update,
|
|
533
659
|
)
|
|
534
660
|
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
_LOGGER.info(
|
|
540
|
-
"Los datos en %s son antiguos. Se procederá a llamar a la API.",
|
|
541
|
-
file_path,
|
|
661
|
+
if should_update:
|
|
662
|
+
_LOGGER.debug(
|
|
663
|
+
"Datos obsoletos o actualizables → llamando API (%s, cuota=%d)",
|
|
664
|
+
file_path.name, self.limit_prediccio
|
|
542
665
|
)
|
|
543
|
-
return None
|
|
544
|
-
|
|
666
|
+
return None # → forzar actualización
|
|
667
|
+
|
|
668
|
+
_LOGGER.debug("Datos válidos en %s → usando caché", file_path.name)
|
|
545
669
|
return data
|
|
670
|
+
|
|
546
671
|
except Exception as e:
|
|
547
|
-
_LOGGER.warning("Error validando
|
|
672
|
+
_LOGGER.warning("Error validando %s: %s", file_path, e)
|
|
548
673
|
return None
|
|
549
674
|
|
|
675
|
+
# --------------------------------------------------------------------- #
|
|
676
|
+
# OBTENCIÓN Y GUARDADO DE DATOS DESDE LA API
|
|
677
|
+
# --------------------------------------------------------------------- #
|
|
550
678
|
async def _fetch_and_save_data(self, api_method, file_path: Path) -> dict:
|
|
551
679
|
"""Obtiene datos de la API y los guarda en un archivo JSON."""
|
|
552
680
|
try:
|
|
553
681
|
data = await asyncio.wait_for(api_method(self.town_id), timeout=30)
|
|
554
682
|
|
|
555
|
-
# Procesar
|
|
683
|
+
# Procesar precipitación negativa antes de guardar los datos
|
|
556
684
|
for day in data.get("dies", []):
|
|
557
685
|
for var, details in day.get("variables", {}).items():
|
|
558
686
|
if (
|
|
@@ -564,26 +692,30 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
|
|
|
564
692
|
|
|
565
693
|
await save_json_to_file(data, file_path)
|
|
566
694
|
|
|
567
|
-
# Actualizar cuotas dependiendo del tipo de predicción
|
|
695
|
+
# Actualizar cuotas (dependiendo del tipo de predicción horaria/diaria)
|
|
568
696
|
if api_method.__name__ in ("get_prediccion_horaria", "get_prediccion_diaria"):
|
|
569
697
|
await _update_quotes(self.hass, "Prediccio")
|
|
570
698
|
|
|
571
699
|
return data
|
|
700
|
+
|
|
572
701
|
except Exception as err:
|
|
573
702
|
_LOGGER.error(f"Error al obtener datos de la API para {file_path}: {err}")
|
|
574
703
|
raise
|
|
575
704
|
|
|
576
|
-
|
|
705
|
+
# --------------------------------------------------------------------- #
|
|
706
|
+
# ACTUALIZACIÓN PRINCIPAL
|
|
707
|
+
# --------------------------------------------------------------------- #
|
|
708
|
+
async def _async_update_data(self) -> Dict[str, Any]:
|
|
577
709
|
"""Actualiza los datos de predicción horaria y diaria."""
|
|
578
710
|
try:
|
|
579
|
-
# Validar o actualizar datos horarios
|
|
711
|
+
# --- Validar o actualizar datos horarios ---
|
|
580
712
|
hourly_data = await self.validate_forecast_data(self.hourly_file)
|
|
581
713
|
if not hourly_data:
|
|
582
714
|
hourly_data = await self._fetch_and_save_data(
|
|
583
715
|
self.meteocat_forecast.get_prediccion_horaria, self.hourly_file
|
|
584
716
|
)
|
|
585
717
|
|
|
586
|
-
# Validar o actualizar datos diarios
|
|
718
|
+
# --- Validar o actualizar datos diarios ---
|
|
587
719
|
daily_data = await self.validate_forecast_data(self.daily_file)
|
|
588
720
|
if not daily_data:
|
|
589
721
|
daily_data = await self._fetch_and_save_data(
|
|
@@ -592,6 +724,9 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
|
|
|
592
724
|
|
|
593
725
|
return {"hourly": hourly_data, "daily": daily_data}
|
|
594
726
|
|
|
727
|
+
# -----------------------------------------------------------------
|
|
728
|
+
# Manejo de errores de API
|
|
729
|
+
# -----------------------------------------------------------------
|
|
595
730
|
except asyncio.TimeoutError as err:
|
|
596
731
|
_LOGGER.warning("Tiempo de espera agotado al obtener datos de predicción.")
|
|
597
732
|
raise ConfigEntryNotReady from err
|
|
@@ -619,7 +754,9 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
|
|
|
619
754
|
except Exception as err:
|
|
620
755
|
_LOGGER.exception("Error inesperado al obtener datos de predicción: %s", err)
|
|
621
756
|
|
|
622
|
-
#
|
|
757
|
+
# -----------------------------------------------------------------
|
|
758
|
+
# Fallback: usar caché local si todo falla
|
|
759
|
+
# -----------------------------------------------------------------
|
|
623
760
|
hourly_cache = await load_json_from_file(self.hourly_file) or {}
|
|
624
761
|
daily_cache = await load_json_from_file(self.daily_file) or {}
|
|
625
762
|
|
|
@@ -650,6 +787,23 @@ class HourlyForecastCoordinator(DataUpdateCoordinator):
|
|
|
650
787
|
self.station_name = entry_data["station_name"]
|
|
651
788
|
self.station_id = entry_data["station_id"]
|
|
652
789
|
|
|
790
|
+
# === NUEVO: ubicación solar usando solarmoonpy ===
|
|
791
|
+
latitude = entry_data.get("latitude", hass.config.latitude)
|
|
792
|
+
longitude = entry_data.get("longitude", hass.config.longitude)
|
|
793
|
+
altitude = entry_data.get("altitude", hass.config.elevation or 0.0)
|
|
794
|
+
timezone_str = hass.config.time_zone or "Europe/Madrid"
|
|
795
|
+
|
|
796
|
+
self.location = Location(
|
|
797
|
+
LocationInfo(
|
|
798
|
+
name=self.town_name,
|
|
799
|
+
region="Spain",
|
|
800
|
+
timezone=timezone_str,
|
|
801
|
+
latitude=latitude,
|
|
802
|
+
longitude=longitude,
|
|
803
|
+
elevation=altitude,
|
|
804
|
+
)
|
|
805
|
+
)
|
|
806
|
+
|
|
653
807
|
# Ruta persistente en /config/meteocat_files/files
|
|
654
808
|
files_folder = get_storage_dir(hass, "files")
|
|
655
809
|
self.file_path = files_folder / f"forecast_{self.town_id.lower()}_hourly_data.json"
|
|
@@ -723,7 +877,7 @@ class HourlyForecastCoordinator(DataUpdateCoordinator):
|
|
|
723
877
|
condition_data = get_condition_from_statcel(
|
|
724
878
|
codi_estatcel=condition_code,
|
|
725
879
|
current_time=forecast_time_local,
|
|
726
|
-
|
|
880
|
+
location=self.location,
|
|
727
881
|
is_hourly=True
|
|
728
882
|
)
|
|
729
883
|
condition = condition_data["condition"]
|
|
@@ -780,7 +934,7 @@ class HourlyForecastCoordinator(DataUpdateCoordinator):
|
|
|
780
934
|
_LOGGER.warning("Error procesando '%s' para %s: %s", variable_name, valor, e)
|
|
781
935
|
continue
|
|
782
936
|
|
|
783
|
-
_LOGGER.
|
|
937
|
+
_LOGGER.warning("No se encontró un valor válido para '%s' en %s.", variable_name, target_time)
|
|
784
938
|
return None
|
|
785
939
|
|
|
786
940
|
class DailyForecastCoordinator(DataUpdateCoordinator):
|
|
@@ -926,9 +1080,27 @@ class MeteocatConditionCoordinator(DataUpdateCoordinator):
|
|
|
926
1080
|
hass (HomeAssistant): Instance of Home Assistant.
|
|
927
1081
|
entry_data (dict): Configuration data from core.config_entries.
|
|
928
1082
|
"""
|
|
1083
|
+
self.town_name = entry_data["town_name"]
|
|
929
1084
|
self.town_id = entry_data["town_id"] # Municipality ID
|
|
930
1085
|
self.hass = hass
|
|
931
1086
|
|
|
1087
|
+
# === NUEVO: ubicación solar usando solarmoonpy ===
|
|
1088
|
+
latitude = entry_data.get("latitude", hass.config.latitude)
|
|
1089
|
+
longitude = entry_data.get("longitude", hass.config.longitude)
|
|
1090
|
+
altitude = entry_data.get("altitude", hass.config.elevation or 0.0)
|
|
1091
|
+
timezone_str = hass.config.time_zone or "Europe/Madrid"
|
|
1092
|
+
|
|
1093
|
+
self.location = Location(
|
|
1094
|
+
LocationInfo(
|
|
1095
|
+
name=self.town_name,
|
|
1096
|
+
region="Spain",
|
|
1097
|
+
timezone=timezone_str,
|
|
1098
|
+
latitude=latitude,
|
|
1099
|
+
longitude=longitude,
|
|
1100
|
+
elevation=altitude,
|
|
1101
|
+
)
|
|
1102
|
+
)
|
|
1103
|
+
|
|
932
1104
|
super().__init__(
|
|
933
1105
|
hass,
|
|
934
1106
|
_LOGGER,
|
|
@@ -970,7 +1142,7 @@ class MeteocatConditionCoordinator(DataUpdateCoordinator):
|
|
|
970
1142
|
condition = get_condition_from_statcel(
|
|
971
1143
|
codi_estatcel,
|
|
972
1144
|
current_datetime,
|
|
973
|
-
self.
|
|
1145
|
+
location=self.location,
|
|
974
1146
|
is_hourly=True,
|
|
975
1147
|
)
|
|
976
1148
|
condition.update({
|
|
@@ -1415,7 +1587,7 @@ class MeteocatAlertsRegionCoordinator(DataUpdateCoordinator):
|
|
|
1415
1587
|
async def _async_update_data(self) -> Dict[str, Any]:
|
|
1416
1588
|
"""Carga y procesa los datos de alertas desde el archivo JSON."""
|
|
1417
1589
|
data = await load_json_from_file(self._file_path)
|
|
1418
|
-
_LOGGER.
|
|
1590
|
+
_LOGGER.debug("Datos cargados desde %s: %s", self._file_path, data) # Log de la carga de datos
|
|
1419
1591
|
|
|
1420
1592
|
if not data:
|
|
1421
1593
|
_LOGGER.error("No se pudo cargar el archivo JSON de alertas en %s.", self._file_path)
|
|
@@ -1426,7 +1598,7 @@ class MeteocatAlertsRegionCoordinator(DataUpdateCoordinator):
|
|
|
1426
1598
|
def _process_alerts_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
|
1427
1599
|
"""Procesa los datos de alertas y devuelve un diccionario filtrado por región."""
|
|
1428
1600
|
if not data.get("dades"):
|
|
1429
|
-
_LOGGER.
|
|
1601
|
+
_LOGGER.debug("No hay alertas activas para la región %s.", self.region_id)
|
|
1430
1602
|
return {
|
|
1431
1603
|
"estado": "Tancat",
|
|
1432
1604
|
"actualizado": data.get("actualitzat", {}).get("dataUpdate", ""),
|
|
@@ -1661,7 +1833,7 @@ class MeteocatQuotesCoordinator(DataUpdateCoordinator):
|
|
|
1661
1833
|
_LOGGER.error("No se pudo obtener datos actualizados ni cargar datos en caché.")
|
|
1662
1834
|
return None
|
|
1663
1835
|
|
|
1664
|
-
class MeteocatQuotesFileCoordinator(
|
|
1836
|
+
class MeteocatQuotesFileCoordinator(BaseFileCoordinator):
|
|
1665
1837
|
"""Coordinator para manejar la actualización de las cuotas desde quotes.json."""
|
|
1666
1838
|
|
|
1667
1839
|
def __init__(
|
|
@@ -1681,9 +1853,10 @@ class MeteocatQuotesFileCoordinator(DataUpdateCoordinator):
|
|
|
1681
1853
|
|
|
1682
1854
|
super().__init__(
|
|
1683
1855
|
hass,
|
|
1684
|
-
|
|
1685
|
-
name="Meteocat Quotes File Coordinator",
|
|
1856
|
+
name=f"{DOMAIN} Quotes File Coordinator",
|
|
1686
1857
|
update_interval=DEFAULT_QUOTES_FILE_UPDATE_INTERVAL,
|
|
1858
|
+
min_delay=1.0, # Rango predeterminado
|
|
1859
|
+
max_delay=2.0, # Rango predeterminado
|
|
1687
1860
|
)
|
|
1688
1861
|
# Ruta persistente en /config/meteocat_files/files
|
|
1689
1862
|
files_folder = get_storage_dir(hass, "files")
|
|
@@ -1691,6 +1864,9 @@ class MeteocatQuotesFileCoordinator(DataUpdateCoordinator):
|
|
|
1691
1864
|
|
|
1692
1865
|
async def _async_update_data(self) -> Dict[str, Any]:
|
|
1693
1866
|
"""Carga los datos de quotes.json y devuelve el estado de las cuotas."""
|
|
1867
|
+
# 🔸 Añadimos un pequeño desfase aleatorio (1 a 2 segundos) basados en el BaseFileCoordinator
|
|
1868
|
+
await self._apply_random_delay()
|
|
1869
|
+
|
|
1694
1870
|
existing_data = await load_json_from_file(self.quotes_file)
|
|
1695
1871
|
|
|
1696
1872
|
if not existing_data:
|
|
@@ -1828,14 +2004,10 @@ class MeteocatLightningCoordinator(DataUpdateCoordinator):
|
|
|
1828
2004
|
_LOGGER.error("No se pudo obtener datos actualizados ni cargar datos en caché.")
|
|
1829
2005
|
return None
|
|
1830
2006
|
|
|
1831
|
-
class MeteocatLightningFileCoordinator(
|
|
2007
|
+
class MeteocatLightningFileCoordinator(BaseFileCoordinator):
|
|
1832
2008
|
"""Coordinator para manejar la actualización de los datos de rayos desde lightning_{region_id}.json."""
|
|
1833
2009
|
|
|
1834
|
-
def __init__(
|
|
1835
|
-
self,
|
|
1836
|
-
hass: HomeAssistant,
|
|
1837
|
-
entry_data: dict,
|
|
1838
|
-
):
|
|
2010
|
+
def __init__(self, hass: HomeAssistant, entry_data: dict):
|
|
1839
2011
|
"""
|
|
1840
2012
|
Inicializa el coordinador de rayos desde archivo.
|
|
1841
2013
|
|
|
@@ -1850,64 +2022,89 @@ class MeteocatLightningFileCoordinator(DataUpdateCoordinator):
|
|
|
1850
2022
|
files_folder = get_storage_dir(hass, "files")
|
|
1851
2023
|
self.lightning_file = files_folder / f"lightning_{self.region_id}.json"
|
|
1852
2024
|
|
|
2025
|
+
# ✅ Marca interna para recordar si ya se hizo reset con una fecha concreta
|
|
2026
|
+
self._last_reset_date: Optional[date] = None
|
|
2027
|
+
|
|
1853
2028
|
super().__init__(
|
|
1854
2029
|
hass,
|
|
1855
|
-
|
|
1856
|
-
name="Meteocat Lightning File Coordinator",
|
|
2030
|
+
name=f"{DOMAIN} Lightning File Coordinator",
|
|
1857
2031
|
update_interval=DEFAULT_LIGHTNING_FILE_UPDATE_INTERVAL,
|
|
2032
|
+
min_delay=1.0, # Rango predeterminado
|
|
2033
|
+
max_delay=2.0, # Rango predeterminado
|
|
1858
2034
|
)
|
|
1859
2035
|
|
|
1860
2036
|
async def _async_update_data(self) -> Dict[str, Any]:
|
|
1861
2037
|
"""Carga los datos de rayos desde el archivo JSON y procesa la información."""
|
|
2038
|
+
# 🔸 Añadimos un pequeño desfase aleatorio (1 a 2 segundos) basados en el BaseFileCoordinator
|
|
2039
|
+
await self._apply_random_delay()
|
|
2040
|
+
|
|
1862
2041
|
existing_data = await load_json_from_file(self.lightning_file)
|
|
1863
2042
|
|
|
1864
2043
|
if not existing_data:
|
|
1865
2044
|
_LOGGER.warning("No se encontraron datos en %s.", self.lightning_file)
|
|
1866
|
-
return
|
|
1867
|
-
|
|
1868
|
-
|
|
1869
|
-
|
|
1870
|
-
|
|
2045
|
+
return self._empty_state()
|
|
2046
|
+
|
|
2047
|
+
# Obtener fecha de actualización del JSON
|
|
2048
|
+
update_date_str = existing_data.get("actualitzat", {}).get("dataUpdate", "")
|
|
2049
|
+
if not update_date_str:
|
|
2050
|
+
_LOGGER.warning("El archivo %s no contiene campo 'dataUpdate'.", self.lightning_file)
|
|
2051
|
+
return self._empty_state()
|
|
2052
|
+
|
|
2053
|
+
try:
|
|
2054
|
+
update_date = datetime.fromisoformat(update_date_str).astimezone(TIMEZONE)
|
|
2055
|
+
except ValueError:
|
|
2056
|
+
_LOGGER.warning("Formato de fecha inválido en %s: %s", self.lightning_file, update_date_str)
|
|
2057
|
+
return self._empty_state()
|
|
1871
2058
|
|
|
1872
|
-
# Convertir la cadena de fecha a un objeto datetime y ajustar a la zona horaria local
|
|
1873
|
-
update_date = datetime.fromisoformat(existing_data.get("actualitzat", {}).get("dataUpdate", ""))
|
|
1874
|
-
update_date = update_date.astimezone(TIMEZONE)
|
|
1875
2059
|
now = datetime.now(TIMEZONE)
|
|
1876
2060
|
|
|
1877
|
-
|
|
1878
|
-
|
|
1879
|
-
|
|
1880
|
-
|
|
1881
|
-
|
|
1882
|
-
|
|
1883
|
-
|
|
2061
|
+
# 📆 Si los datos son de otro día:
|
|
2062
|
+
if update_date.date() != now.date():
|
|
2063
|
+
# Si ya hicimos reset para esta fecha, no volver a procesar el JSON
|
|
2064
|
+
if self._last_reset_date == update_date.date():
|
|
2065
|
+
_LOGGER.debug(
|
|
2066
|
+
"Archivo de rayos aún sin actualizar (última: %s, hoy: %s). Manteniendo datos a cero.",
|
|
2067
|
+
update_date.date(),
|
|
2068
|
+
now.date(),
|
|
2069
|
+
)
|
|
2070
|
+
return self._empty_state()
|
|
2071
|
+
|
|
2072
|
+
# Primer reset detectado para esta fecha
|
|
2073
|
+
_LOGGER.debug("Los datos de rayos son de un día diferente. Reiniciando valores a cero.")
|
|
2074
|
+
self._last_reset_date = update_date.date()
|
|
2075
|
+
return self._empty_state()
|
|
2076
|
+
|
|
2077
|
+
# 📅 Si los datos son actuales:
|
|
2078
|
+
self._last_reset_date = None # borrar marca de reset
|
|
2079
|
+
region_data = self._process_region_data(existing_data.get("dades", []))
|
|
2080
|
+
town_data = self._process_town_data(existing_data.get("dades", []))
|
|
1884
2081
|
|
|
1885
2082
|
return {
|
|
1886
2083
|
"actualizado": update_date,
|
|
1887
2084
|
"region": region_data,
|
|
1888
|
-
"town": town_data
|
|
2085
|
+
"town": town_data,
|
|
1889
2086
|
}
|
|
1890
2087
|
|
|
1891
2088
|
def _process_region_data(self, data_list):
|
|
1892
2089
|
"""Suma los tipos de descargas para toda la región."""
|
|
1893
2090
|
region_counts = {
|
|
1894
|
-
"cc": 0,
|
|
1895
|
-
"cg-": 0,
|
|
2091
|
+
"cc": 0,
|
|
2092
|
+
"cg-": 0,
|
|
1896
2093
|
"cg+": 0
|
|
1897
2094
|
}
|
|
1898
2095
|
for town in data_list:
|
|
1899
2096
|
for discharge in town.get("descarregues", []):
|
|
1900
2097
|
if discharge["tipus"] in region_counts:
|
|
1901
2098
|
region_counts[discharge["tipus"]] += discharge["recompte"]
|
|
1902
|
-
|
|
2099
|
+
|
|
1903
2100
|
region_counts["total"] = sum(region_counts.values())
|
|
1904
2101
|
return region_counts
|
|
1905
2102
|
|
|
1906
2103
|
def _process_town_data(self, data_list):
|
|
1907
2104
|
"""Encuentra y suma los tipos de descargas para un municipio específico."""
|
|
1908
2105
|
town_counts = {
|
|
1909
|
-
"cc": 0,
|
|
1910
|
-
"cg-": 0,
|
|
2106
|
+
"cc": 0,
|
|
2107
|
+
"cg-": 0,
|
|
1911
2108
|
"cg+": 0
|
|
1912
2109
|
}
|
|
1913
2110
|
for town in data_list:
|
|
@@ -1916,7 +2113,7 @@ class MeteocatLightningFileCoordinator(DataUpdateCoordinator):
|
|
|
1916
2113
|
if discharge["tipus"] in town_counts:
|
|
1917
2114
|
town_counts[discharge["tipus"]] += discharge["recompte"]
|
|
1918
2115
|
break # Solo necesitamos datos de un municipio
|
|
1919
|
-
|
|
2116
|
+
|
|
1920
2117
|
town_counts["total"] = sum(town_counts.values())
|
|
1921
2118
|
return town_counts
|
|
1922
2119
|
|
|
@@ -1926,38 +2123,40 @@ class MeteocatLightningFileCoordinator(DataUpdateCoordinator):
|
|
|
1926
2123
|
"cc": 0,
|
|
1927
2124
|
"cg-": 0,
|
|
1928
2125
|
"cg+": 0,
|
|
1929
|
-
"total": 0
|
|
2126
|
+
"total": 0,
|
|
2127
|
+
}
|
|
2128
|
+
|
|
2129
|
+
def _empty_state(self) -> Dict[str, Any]:
|
|
2130
|
+
"""Devuelve un estado vacío (valores a cero) para los sensores."""
|
|
2131
|
+
now_iso = datetime.now(TIMEZONE).isoformat()
|
|
2132
|
+
empty = self._reset_data()
|
|
2133
|
+
return {
|
|
2134
|
+
"actualizado": now_iso,
|
|
2135
|
+
"region": empty,
|
|
2136
|
+
"town": empty,
|
|
1930
2137
|
}
|
|
1931
2138
|
|
|
1932
2139
|
class MeteocatSunCoordinator(DataUpdateCoordinator):
|
|
1933
|
-
"""Coordinator para manejar la actualización de los datos de sol calculados con
|
|
2140
|
+
"""Coordinator para manejar la actualización de los datos de sol calculados con sun.py."""
|
|
1934
2141
|
|
|
1935
|
-
def __init__(
|
|
1936
|
-
|
|
1937
|
-
hass: HomeAssistant,
|
|
1938
|
-
entry_data: dict,
|
|
1939
|
-
):
|
|
1940
|
-
"""
|
|
1941
|
-
Inicializa el coordinador de sol de Meteocat.
|
|
1942
|
-
|
|
1943
|
-
Args:
|
|
1944
|
-
hass (HomeAssistant): Instancia de Home Assistant.
|
|
1945
|
-
entry_data (dict): Datos de configuración obtenidos de core.config_entries.
|
|
1946
|
-
"""
|
|
2142
|
+
def __init__(self, hass: HomeAssistant, entry_data: dict):
|
|
2143
|
+
"""Inicializa el coordinador de sol de Meteocat."""
|
|
1947
2144
|
self.latitude = entry_data.get("latitude")
|
|
1948
2145
|
self.longitude = entry_data.get("longitude")
|
|
2146
|
+
self.elevation = entry_data.get("altitude", 0.0)
|
|
1949
2147
|
self.timezone_str = hass.config.time_zone or "Europe/Madrid"
|
|
1950
2148
|
self.town_id = entry_data.get("town_id")
|
|
1951
|
-
|
|
1952
|
-
|
|
2149
|
+
|
|
2150
|
+
# Crear ubicación para cálculos solares
|
|
2151
|
+
self.location = Location(LocationInfo(
|
|
1953
2152
|
name=entry_data.get("town_name", "Municipio"),
|
|
1954
2153
|
region="Spain",
|
|
1955
2154
|
timezone=self.timezone_str,
|
|
1956
2155
|
latitude=self.latitude,
|
|
1957
2156
|
longitude=self.longitude,
|
|
1958
|
-
|
|
2157
|
+
elevation=self.elevation,
|
|
2158
|
+
))
|
|
1959
2159
|
|
|
1960
|
-
# Ruta persistente en /config/meteocat_files/files
|
|
1961
2160
|
files_folder = get_storage_dir(hass, "files")
|
|
1962
2161
|
self.sun_file = files_folder / f"sun_{self.town_id.lower()}_data.json"
|
|
1963
2162
|
|
|
@@ -1965,90 +2164,289 @@ class MeteocatSunCoordinator(DataUpdateCoordinator):
|
|
|
1965
2164
|
hass,
|
|
1966
2165
|
_LOGGER,
|
|
1967
2166
|
name=f"{DOMAIN} Sun Coordinator",
|
|
1968
|
-
update_interval=DEFAULT_SUN_UPDATE_INTERVAL,
|
|
2167
|
+
update_interval=DEFAULT_SUN_UPDATE_INTERVAL,
|
|
1969
2168
|
)
|
|
1970
2169
|
|
|
1971
|
-
async def _async_update_data(self) ->
|
|
1972
|
-
"""
|
|
1973
|
-
|
|
1974
|
-
|
|
2170
|
+
async def _async_update_data(self) -> dict:
|
|
2171
|
+
"""Comprueba si es necesario actualizar los datos solares (evitando escrituras innecesarias)."""
|
|
2172
|
+
_LOGGER.debug("☀️ Comprobando si es necesario actualizar los datos solares...")
|
|
1975
2173
|
now = datetime.now(tz=ZoneInfo(self.timezone_str))
|
|
2174
|
+
today = now.date()
|
|
2175
|
+
tomorrow = today + timedelta(days=1)
|
|
2176
|
+
|
|
2177
|
+
# === 1️⃣ Calcular eventos solares esperados ===
|
|
2178
|
+
events_today = self.location.sun_events(date=today, local=True)
|
|
2179
|
+
events_tomorrow = self.location.sun_events(date=tomorrow, local=True)
|
|
2180
|
+
|
|
2181
|
+
def get_expected_sun_data():
|
|
2182
|
+
"""Selecciona si usar los eventos de hoy o mañana según la hora actual."""
|
|
2183
|
+
expected = {}
|
|
2184
|
+
events = [
|
|
2185
|
+
"dawn_astronomical", "dawn_nautical", "dawn_civil",
|
|
2186
|
+
"sunrise", "noon", "sunset",
|
|
2187
|
+
"dusk_civil", "dusk_nautical", "dusk_astronomical",
|
|
2188
|
+
"midnight"
|
|
2189
|
+
]
|
|
2190
|
+
for event in events:
|
|
2191
|
+
event_time = events_today.get(event)
|
|
2192
|
+
if event_time and now >= event_time:
|
|
2193
|
+
expected[event] = events_tomorrow.get(event)
|
|
2194
|
+
_LOGGER.debug("☀️ %s ya pasó (%s), usando valor de mañana: %s",
|
|
2195
|
+
event, event_time, expected[event])
|
|
2196
|
+
else:
|
|
2197
|
+
expected[event] = event_time
|
|
2198
|
+
expected["daylight_duration"] = (
|
|
2199
|
+
events_tomorrow["daylight_duration"]
|
|
2200
|
+
if expected["sunset"] == events_tomorrow["sunset"]
|
|
2201
|
+
else events_today["daylight_duration"]
|
|
2202
|
+
)
|
|
2203
|
+
return expected
|
|
1976
2204
|
|
|
2205
|
+
expected = get_expected_sun_data()
|
|
2206
|
+
|
|
2207
|
+
# === 2️⃣ Cargar datos existentes del archivo ===
|
|
2208
|
+
existing_data = await load_json_from_file(self.sun_file) or {}
|
|
1977
2209
|
if not existing_data or "dades" not in existing_data or not existing_data["dades"]:
|
|
1978
|
-
|
|
2210
|
+
_LOGGER.debug("☀️ No hay datos solares previos. Generando nuevos datos.")
|
|
2211
|
+
return await self._calculate_and_save_new_data(**expected)
|
|
1979
2212
|
|
|
1980
|
-
|
|
1981
|
-
if not last_update_str:
|
|
1982
|
-
return await self._calculate_and_save_new_data()
|
|
2213
|
+
dades = existing_data["dades"][0]
|
|
1983
2214
|
|
|
1984
|
-
|
|
2215
|
+
try:
|
|
2216
|
+
saved = {k: (datetime.fromisoformat(v) if k != "daylight_duration" else v)
|
|
2217
|
+
for k, v in dades.items() if k in expected}
|
|
2218
|
+
except Exception as e:
|
|
2219
|
+
_LOGGER.warning("☀️ Error al leer el archivo solar: %s", e)
|
|
2220
|
+
return await self._calculate_and_save_new_data(**expected)
|
|
1985
2221
|
|
|
1986
|
-
|
|
1987
|
-
|
|
1988
|
-
|
|
2222
|
+
# === 3️⃣ Detectar cambios en eventos solares ===
|
|
2223
|
+
changed_events = {
|
|
2224
|
+
key: expected[key] for key in expected
|
|
2225
|
+
if saved.get(key) != expected[key]
|
|
2226
|
+
}
|
|
1989
2227
|
|
|
1990
|
-
#
|
|
1991
|
-
|
|
1992
|
-
|
|
1993
|
-
|
|
1994
|
-
|
|
1995
|
-
|
|
2228
|
+
# === 4️⃣ Calcular posición solar actual y futura (una sola vez) ===
|
|
2229
|
+
current_pos = self.location.sun_position(dt=now, local=True)
|
|
2230
|
+
future_time = now + timedelta(minutes=10)
|
|
2231
|
+
future_pos = self.location.sun_position(dt=future_time, local=True)
|
|
2232
|
+
|
|
2233
|
+
# === 5️⃣ Función auxiliar: umbral dinámico de elevación ===
|
|
2234
|
+
def get_dynamic_elevation_threshold() -> float:
|
|
2235
|
+
sunrise = saved.get("sunrise")
|
|
2236
|
+
sunset = saved.get("sunset")
|
|
2237
|
+
noon = saved.get("noon")
|
|
2238
|
+
if sunrise and sunset and noon:
|
|
2239
|
+
sunrise_window = (sunrise - timedelta(hours=1), sunrise + timedelta(hours=1))
|
|
2240
|
+
sunset_window = (sunset - timedelta(hours=1), sunset + timedelta(hours=1))
|
|
2241
|
+
noon_window = (noon - timedelta(hours=2), noon + timedelta(hours=2))
|
|
2242
|
+
if sunrise_window[0] <= now <= sunrise_window[1] or sunset_window[0] <= now <= sunset_window[1]:
|
|
2243
|
+
return 0.3 # Mayor sensibilidad cerca del horizonte
|
|
2244
|
+
elif noon_window[0] <= now <= noon_window[1]:
|
|
2245
|
+
return 1.0 # Menor sensibilidad cerca del mediodía
|
|
2246
|
+
return 0.5 # Valor base para el resto del día
|
|
2247
|
+
|
|
2248
|
+
# === 6️⃣ Función auxiliar: validez dinámica con límites ===
|
|
2249
|
+
def get_dynamic_validity_interval(current_elev: float, future_elev: float) -> timedelta:
|
|
2250
|
+
elevation_change = abs(future_elev - current_elev)
|
|
2251
|
+
rate_of_change = elevation_change / 10 # °/min
|
|
2252
|
+
_LOGGER.debug("☀️ Tasa de cambio de elevación: %.4f°/min", rate_of_change)
|
|
2253
|
+
|
|
2254
|
+
if rate_of_change > 0.05: # Amanecer/atardecer: cambio rápido
|
|
2255
|
+
validity = timedelta(minutes=30)
|
|
2256
|
+
elif rate_of_change > 0.02: # Cambio moderado
|
|
2257
|
+
validity = timedelta(minutes=60)
|
|
2258
|
+
else: # Noche o mediodía: cambio lento
|
|
2259
|
+
validity = timedelta(minutes=120)
|
|
2260
|
+
|
|
2261
|
+
# Limitar entre 15 y 180 minutos
|
|
2262
|
+
return max(timedelta(minutes=15), min(validity, timedelta(minutes=180)))
|
|
2263
|
+
|
|
2264
|
+
SUN_POSITION_VALIDITY = get_dynamic_validity_interval(
|
|
2265
|
+
current_pos["elevation"], future_pos["elevation"]
|
|
2266
|
+
)
|
|
2267
|
+
|
|
2268
|
+
# === 7️⃣ Evaluar necesidad de actualización ===
|
|
2269
|
+
position_needs_update = False
|
|
2270
|
+
last_pos_update_str = dades.get("sun_position_updated")
|
|
2271
|
+
|
|
2272
|
+
if last_pos_update_str:
|
|
2273
|
+
try:
|
|
2274
|
+
last_pos_update = datetime.fromisoformat(last_pos_update_str)
|
|
2275
|
+
if last_pos_update.tzinfo is None:
|
|
2276
|
+
last_pos_update = last_pos_update.replace(tzinfo=ZoneInfo(self.timezone_str))
|
|
2277
|
+
|
|
2278
|
+
time_expired = (now - last_pos_update) > SUN_POSITION_VALIDITY
|
|
2279
|
+
elevation_threshold = get_dynamic_elevation_threshold()
|
|
2280
|
+
|
|
2281
|
+
last_elev = dades.get("sun_elevation")
|
|
2282
|
+
if last_elev is not None:
|
|
2283
|
+
elev_changed = abs(current_pos["elevation"] - float(last_elev)) > elevation_threshold
|
|
2284
|
+
else:
|
|
2285
|
+
elev_changed = True
|
|
2286
|
+
|
|
2287
|
+
# ✅ Ambas condiciones deben cumplirse
|
|
2288
|
+
position_needs_update = time_expired and elev_changed or bool(changed_events)
|
|
1996
2289
|
|
|
1997
|
-
|
|
1998
|
-
|
|
2290
|
+
_LOGGER.debug(
|
|
2291
|
+
"☀️ Verificación solar -> expirado=%s (validez=%s), elevación_cambió=%s (umbral=%.2f°), eventos_cambiados=%s, actualizar=%s",
|
|
2292
|
+
time_expired, SUN_POSITION_VALIDITY, elev_changed, elevation_threshold, bool(changed_events), position_needs_update
|
|
2293
|
+
)
|
|
2294
|
+
except Exception as e:
|
|
2295
|
+
_LOGGER.warning("☀️ Error al verificar posición solar previa: %s", e)
|
|
2296
|
+
position_needs_update = True
|
|
2297
|
+
else:
|
|
2298
|
+
position_needs_update = True
|
|
2299
|
+
|
|
2300
|
+
# === 8️⃣ Si nada cambió, no se actualiza ===
|
|
2301
|
+
if not changed_events and not position_needs_update:
|
|
2302
|
+
_LOGGER.debug("☀️ Datos solares actuales coinciden con lo esperado. No se actualiza.")
|
|
2303
|
+
return existing_data
|
|
2304
|
+
|
|
2305
|
+
# === 9️⃣ Actualizar si es necesario ===
|
|
2306
|
+
sun_pos = current_pos if position_needs_update else None
|
|
2307
|
+
if sun_pos:
|
|
2308
|
+
_LOGGER.debug("Posición solar actualizada: elev=%.2f°, azim=%.2f°, rising=%s",
|
|
2309
|
+
sun_pos["elevation"], sun_pos["azimuth"], sun_pos["rising"])
|
|
2310
|
+
|
|
2311
|
+
updated_data = saved.copy()
|
|
2312
|
+
updated_data.update(changed_events)
|
|
2313
|
+
|
|
2314
|
+
# 🟡 Si hay eventos solares nuevos (por ejemplo, cambio de sunset → mañana),
|
|
2315
|
+
# forzar cálculo inmediato de la posición solar para evitar huecos.
|
|
2316
|
+
if changed_events and sun_pos is None:
|
|
2317
|
+
sun_pos = self.location.sun_position(dt=now, local=True)
|
|
2318
|
+
_LOGGER.debug("☀️ Posición solar recalculada tras cambio de eventos: elev=%.2f°, azim=%.2f°, rising=%s",
|
|
2319
|
+
sun_pos["elevation"], sun_pos["azimuth"], sun_pos["rising"])
|
|
2320
|
+
|
|
2321
|
+
_LOGGER.debug("☀️ Datos solares han cambiado. Actualizando: %s", changed_events)
|
|
2322
|
+
return await self._calculate_and_save_new_data(
|
|
2323
|
+
**updated_data,
|
|
2324
|
+
sun_pos=sun_pos,
|
|
2325
|
+
now=now
|
|
2326
|
+
)
|
|
2327
|
+
|
|
2328
|
+
async def _calculate_and_save_new_data(
|
|
2329
|
+
self,
|
|
2330
|
+
dawn_civil: Optional[datetime] = None,
|
|
2331
|
+
dawn_nautical: Optional[datetime] = None,
|
|
2332
|
+
dawn_astronomical: Optional[datetime] = None,
|
|
2333
|
+
sunrise: Optional[datetime] = None,
|
|
2334
|
+
noon: Optional[datetime] = None,
|
|
2335
|
+
sunset: Optional[datetime] = None,
|
|
2336
|
+
dusk_civil: Optional[datetime] = None,
|
|
2337
|
+
dusk_nautical: Optional[datetime] = None,
|
|
2338
|
+
dusk_astronomical: Optional[datetime] = None,
|
|
2339
|
+
midnight: Optional[datetime] = None,
|
|
2340
|
+
daylight_duration: Optional[float] = None,
|
|
2341
|
+
sun_pos: Optional[dict] = None,
|
|
2342
|
+
now: Optional[datetime] = None,
|
|
2343
|
+
) -> dict:
|
|
2344
|
+
"""Guarda los datos solares pasados, usando valores existentes si no se proporcionan."""
|
|
1999
2345
|
try:
|
|
2000
2346
|
now = datetime.now(tz=ZoneInfo(self.timezone_str))
|
|
2001
2347
|
today = now.date()
|
|
2002
|
-
|
|
2003
|
-
|
|
2004
|
-
|
|
2005
|
-
|
|
2006
|
-
|
|
2007
|
-
|
|
2008
|
-
|
|
2009
|
-
|
|
2010
|
-
|
|
2011
|
-
|
|
2012
|
-
|
|
2013
|
-
|
|
2014
|
-
|
|
2015
|
-
|
|
2016
|
-
|
|
2017
|
-
|
|
2018
|
-
|
|
2019
|
-
|
|
2348
|
+
|
|
2349
|
+
# Cargar datos existentes para preservar valores no cambiados
|
|
2350
|
+
existing_data = await load_json_from_file(self.sun_file) or {}
|
|
2351
|
+
existing_dades = existing_data.get("dades", [{}])[0] if existing_data else {}
|
|
2352
|
+
|
|
2353
|
+
# Convertir valores existentes a tipos adecuados
|
|
2354
|
+
try:
|
|
2355
|
+
saved = {
|
|
2356
|
+
"dawn_civil": datetime.fromisoformat(existing_dades["dawn_civil"]) if existing_dades.get("dawn_civil") else None,
|
|
2357
|
+
"dawn_nautical": datetime.fromisoformat(existing_dades["dawn_nautical"]) if existing_dades.get("dawn_nautical") else None,
|
|
2358
|
+
"dawn_astronomical": datetime.fromisoformat(existing_dades["dawn_astronomical"]) if existing_dades.get("dawn_astronomical") else None,
|
|
2359
|
+
"sunrise": datetime.fromisoformat(existing_dades["sunrise"]) if existing_dades.get("sunrise") else None,
|
|
2360
|
+
"noon": datetime.fromisoformat(existing_dades["noon"]) if existing_dades.get("noon") else None,
|
|
2361
|
+
"sunset": datetime.fromisoformat(existing_dades["sunset"]) if existing_dades.get("sunset") else None,
|
|
2362
|
+
"dusk_civil": datetime.fromisoformat(existing_dades["dusk_civil"]) if existing_dades.get("dusk_civil") else None,
|
|
2363
|
+
"dusk_nautical": datetime.fromisoformat(existing_dades["dusk_nautical"]) if existing_dades.get("dusk_nautical") else None,
|
|
2364
|
+
"dusk_astronomical": datetime.fromisoformat(existing_dades["dusk_astronomical"]) if existing_dades.get("dusk_astronomical") else None,
|
|
2365
|
+
"midnight": datetime.fromisoformat(existing_dades["midnight"]) if existing_dades.get("midnight") else None,
|
|
2366
|
+
"daylight_duration": existing_dades.get("daylight_duration"),
|
|
2367
|
+
}
|
|
2368
|
+
except Exception as e:
|
|
2369
|
+
_LOGGER.warning("☀️ Error al leer datos existentes, recalculando todo: %s", e)
|
|
2370
|
+
saved = {}
|
|
2371
|
+
|
|
2372
|
+
# Si no se proporcionan valores, usar los existentes o calcularlos
|
|
2373
|
+
if not any([dawn_civil, dawn_nautical, dawn_astronomical, sunrise, noon, sunset, dusk_civil, dusk_nautical, dusk_astronomical, midnight]):
|
|
2374
|
+
events = self.location.sun_events(date=today, local=True)
|
|
2375
|
+
dawn_civil = events["dawn_civil"]
|
|
2376
|
+
dawn_nautical = events["dawn_nautical"]
|
|
2377
|
+
dawn_astronomical = events["dawn_astronomical"]
|
|
2378
|
+
sunrise = events["sunrise"]
|
|
2379
|
+
noon = events["noon"]
|
|
2380
|
+
sunset = events["sunset"]
|
|
2381
|
+
dusk_civil = events["dusk_civil"]
|
|
2382
|
+
dusk_nautical = events["dusk_nautical"]
|
|
2383
|
+
dusk_astronomical = events["dusk_astronomical"]
|
|
2384
|
+
midnight = events["midnight"]
|
|
2385
|
+
daylight_duration = events["daylight_duration"]
|
|
2386
|
+
else:
|
|
2387
|
+
# Usar valores proporcionados, o los existentes si no se proporcionan
|
|
2388
|
+
dawn_civil = dawn_civil if dawn_civil is not None else saved.get("dawn_civil")
|
|
2389
|
+
dawn_nautical = dawn_nautical if dawn_nautical is not None else saved.get("dawn_nautical")
|
|
2390
|
+
dawn_astronomical = dawn_astronomical if dawn_astronomical is not None else saved.get("dawn_astronomical")
|
|
2391
|
+
sunrise = sunrise if sunrise is not None else saved.get("sunrise")
|
|
2392
|
+
noon = noon if noon is not None else saved.get("noon")
|
|
2393
|
+
sunset = sunset if sunset is not None else saved.get("sunset")
|
|
2394
|
+
dusk_civil = dusk_civil if dusk_civil is not None else saved.get("dusk_civil")
|
|
2395
|
+
dusk_nautical = dusk_nautical if dusk_nautical is not None else saved.get("dusk_nautical")
|
|
2396
|
+
dusk_astronomical = dusk_astronomical if dusk_astronomical is not None else saved.get("dusk_astronomical")
|
|
2397
|
+
midnight = midnight if midnight is not None else saved.get("midnight")
|
|
2398
|
+
daylight_duration = daylight_duration if daylight_duration is not None else saved.get("daylight_duration")
|
|
2399
|
+
|
|
2400
|
+
# Recalcular daylight_duration si sunrise o sunset han cambiado
|
|
2401
|
+
if sunrise and sunset and (sunrise != saved.get("sunrise") or sunset != saved.get("sunset")):
|
|
2402
|
+
daylight_duration = (sunset - sunrise).total_seconds() / 3600 if sunrise and sunset else None
|
|
2403
|
+
|
|
2404
|
+
# CONSTRUIR DADES
|
|
2405
|
+
dades_dict = {
|
|
2406
|
+
"dawn_civil": dawn_civil.isoformat() if dawn_civil else None,
|
|
2407
|
+
"dawn_nautical": dawn_nautical.isoformat() if dawn_nautical else None,
|
|
2408
|
+
"dawn_astronomical": dawn_astronomical.isoformat() if dawn_astronomical else None,
|
|
2409
|
+
"sunrise": sunrise.isoformat() if sunrise else None,
|
|
2410
|
+
"noon": noon.isoformat() if noon else None,
|
|
2411
|
+
"sunset": sunset.isoformat() if sunset else None,
|
|
2412
|
+
"dusk_civil": dusk_civil.isoformat() if dusk_civil else None,
|
|
2413
|
+
"dusk_nautical": dusk_nautical.isoformat() if dusk_nautical else None,
|
|
2414
|
+
"dusk_astronomical": dusk_astronomical.isoformat() if dusk_astronomical else None,
|
|
2415
|
+
"midnight": midnight.isoformat() if midnight else None,
|
|
2416
|
+
"daylight_duration": daylight_duration,
|
|
2417
|
+
}
|
|
2418
|
+
|
|
2419
|
+
# AÑADIR POSICIÓN SOLAR
|
|
2420
|
+
if sun_pos:
|
|
2421
|
+
dades_dict.update({
|
|
2422
|
+
"sun_elevation": round(sun_pos["elevation"], 2),
|
|
2423
|
+
"sun_azimuth": round(sun_pos["azimuth"], 2),
|
|
2424
|
+
"sun_horizon_position": sun_pos["horizon_position"],
|
|
2425
|
+
"sun_rising": sun_pos["rising"],
|
|
2426
|
+
"sun_position_updated": now.isoformat()
|
|
2427
|
+
})
|
|
2428
|
+
|
|
2429
|
+
# GUARDAR
|
|
2020
2430
|
data_with_timestamp = {
|
|
2021
|
-
"actualitzat": {
|
|
2022
|
-
|
|
2023
|
-
},
|
|
2024
|
-
"dades": [
|
|
2025
|
-
{
|
|
2026
|
-
"sunrise": sunrise.isoformat(),
|
|
2027
|
-
"sunset": sunset.isoformat()
|
|
2028
|
-
}
|
|
2029
|
-
]
|
|
2431
|
+
"actualitzat": {"dataUpdate": now.isoformat()},
|
|
2432
|
+
"dades": [dades_dict],
|
|
2030
2433
|
}
|
|
2031
2434
|
|
|
2032
|
-
# Guardar los datos en un archivo JSON
|
|
2033
2435
|
await save_json_to_file(data_with_timestamp, self.sun_file)
|
|
2436
|
+
_LOGGER.info("Archivo solar actualizado (eventos: %s, posición: %s)",
|
|
2437
|
+
bool(dawn_civil is not None), bool(sun_pos))
|
|
2034
2438
|
|
|
2035
|
-
|
|
2036
|
-
|
|
2037
|
-
return {"actualizado": data_with_timestamp['actualitzat']['dataUpdate']}
|
|
2439
|
+
return data_with_timestamp
|
|
2038
2440
|
|
|
2039
2441
|
except Exception as err:
|
|
2040
|
-
_LOGGER.exception("Error
|
|
2041
|
-
|
|
2042
|
-
|
|
2043
|
-
|
|
2044
|
-
|
|
2045
|
-
|
|
2046
|
-
return {"actualizado": cached_data['actualitzat']['dataUpdate']}
|
|
2047
|
-
|
|
2048
|
-
_LOGGER.error("No se pudo calcular datos actualizados ni cargar datos en caché.")
|
|
2049
|
-
return None
|
|
2442
|
+
_LOGGER.exception("Error al calcular/guardar los datos solares: %s", err)
|
|
2443
|
+
cached = await load_json_from_file(self.sun_file)
|
|
2444
|
+
if cached:
|
|
2445
|
+
_LOGGER.warning("Usando datos solares en caché por error.")
|
|
2446
|
+
return cached
|
|
2447
|
+
return None
|
|
2050
2448
|
|
|
2051
|
-
class MeteocatSunFileCoordinator(
|
|
2449
|
+
class MeteocatSunFileCoordinator(BaseFileCoordinator):
|
|
2052
2450
|
"""Coordinator para manejar la actualización de los datos de sol desde sun_{town_id}.json."""
|
|
2053
2451
|
|
|
2054
2452
|
def __init__(
|
|
@@ -2072,41 +2470,478 @@ class MeteocatSunFileCoordinator(DataUpdateCoordinator):
|
|
|
2072
2470
|
|
|
2073
2471
|
super().__init__(
|
|
2074
2472
|
hass,
|
|
2075
|
-
|
|
2076
|
-
|
|
2077
|
-
|
|
2473
|
+
name=f"{DOMAIN} Sun File Coordinator",
|
|
2474
|
+
update_interval=DEFAULT_SUN_FILE_UPDATE_INTERVAL,
|
|
2475
|
+
min_delay=1.0, # Rango predeterminado
|
|
2476
|
+
max_delay=2.0, # Rango predeterminado
|
|
2078
2477
|
)
|
|
2079
2478
|
|
|
2080
|
-
async def _async_update_data(self) ->
|
|
2081
|
-
"""
|
|
2082
|
-
|
|
2479
|
+
async def _async_update_data(self) -> dict[str, Any]:
|
|
2480
|
+
"""Lee el archivo y resetea si el primer evento (dawn_astronomical) es de ayer."""
|
|
2481
|
+
# 🔸 Añadimos un pequeño desfase aleatorio (1 a 2 segundos) basados en el BaseFileCoordinator
|
|
2482
|
+
await self._apply_random_delay()
|
|
2083
2483
|
|
|
2084
|
-
|
|
2085
|
-
|
|
2086
|
-
|
|
2484
|
+
try:
|
|
2485
|
+
data = await load_json_from_file(self.sun_file)
|
|
2486
|
+
if not data or "dades" not in data or not data["dades"]:
|
|
2487
|
+
_LOGGER.warning("Archivo solar vacío: %s", self.sun_file)
|
|
2488
|
+
return self._reset_data()
|
|
2489
|
+
|
|
2490
|
+
dades = data["dades"][0]
|
|
2491
|
+
update_str = data.get("actualitzat", {}).get("dataUpdate")
|
|
2492
|
+
update_dt = datetime.fromisoformat(update_str) if update_str else None
|
|
2493
|
+
now = datetime.now(ZoneInfo(self.timezone_str))
|
|
2494
|
+
today = now.date()
|
|
2087
2495
|
|
|
2088
|
-
|
|
2089
|
-
|
|
2090
|
-
|
|
2496
|
+
# === PRIMER EVENTO: dawn_astronomical ===
|
|
2497
|
+
dawn_astro_str = dades.get("dawn_astronomical")
|
|
2498
|
+
if not dawn_astro_str:
|
|
2499
|
+
_LOGGER.debug("No hay 'dawn_astronomical'. Forzando reset.")
|
|
2500
|
+
return self._reset_data()
|
|
2091
2501
|
|
|
2092
|
-
|
|
2093
|
-
|
|
2094
|
-
|
|
2502
|
+
try:
|
|
2503
|
+
dawn_astro_dt = datetime.fromisoformat(dawn_astro_str)
|
|
2504
|
+
event_date = dawn_astro_dt.date()
|
|
2505
|
+
except ValueError as e:
|
|
2506
|
+
_LOGGER.warning("Formato inválido en dawn_astronomical: %s → %s", dawn_astro_str, e)
|
|
2507
|
+
return self._reset_data()
|
|
2508
|
+
|
|
2509
|
+
# === ¿Es de un día anterior a ayer? ===
|
|
2510
|
+
if event_date < (today - timedelta(days=1)):
|
|
2511
|
+
_LOGGER.info(
|
|
2512
|
+
"Datos solares muy antiguos: dawn_astronomical es del %s (hoy es %s). Reiniciando.",
|
|
2513
|
+
event_date, today
|
|
2514
|
+
)
|
|
2515
|
+
return self._reset_data()
|
|
2095
2516
|
|
|
2096
|
-
|
|
2097
|
-
|
|
2098
|
-
|
|
2099
|
-
|
|
2100
|
-
|
|
2101
|
-
|
|
2517
|
+
# 🟢 Si el evento es de mañana, mantener datos actuales (no resetear)
|
|
2518
|
+
if event_date > today:
|
|
2519
|
+
_LOGGER.debug(
|
|
2520
|
+
"Datos solares son de mañana (%s). Manteniendo valores actuales hasta próxima actualización.",
|
|
2521
|
+
event_date
|
|
2522
|
+
)
|
|
2523
|
+
|
|
2524
|
+
# === DATOS VÁLIDOS DEL DÍA ACTUAL ===
|
|
2525
|
+
result = {
|
|
2526
|
+
"actualizado": update_dt.isoformat() if update_dt else now.isoformat(),
|
|
2527
|
+
"dawn_civil": dades.get("dawn_civil"),
|
|
2528
|
+
"dawn_nautical": dades.get("dawn_nautical"),
|
|
2529
|
+
"dawn_astronomical": dawn_astro_str,
|
|
2102
2530
|
"sunrise": dades.get("sunrise"),
|
|
2103
|
-
"
|
|
2531
|
+
"noon": dades.get("noon"),
|
|
2532
|
+
"sunset": dades.get("sunset"),
|
|
2533
|
+
"dusk_civil": dades.get("dusk_civil"),
|
|
2534
|
+
"dusk_nautical": dades.get("dusk_nautical"),
|
|
2535
|
+
"dusk_astronomical": dades.get("dusk_astronomical"),
|
|
2536
|
+
"midnight": dades.get("midnight"),
|
|
2537
|
+
"daylight_duration": dades.get("daylight_duration"),
|
|
2538
|
+
"sun_elevation": dades.get("sun_elevation"),
|
|
2539
|
+
"sun_azimuth": dades.get("sun_azimuth"),
|
|
2540
|
+
"sun_horizon_position": dades.get("sun_horizon_position"),
|
|
2541
|
+
"sun_rising": dades.get("sun_rising"),
|
|
2542
|
+
"sun_position_updated": dades.get("sun_position_updated"),
|
|
2104
2543
|
}
|
|
2105
2544
|
|
|
2545
|
+
_LOGGER.debug("Datos solares válidos para hoy (%s)", today)
|
|
2546
|
+
return result
|
|
2547
|
+
|
|
2548
|
+
except Exception as e:
|
|
2549
|
+
_LOGGER.error("Error crítico en SunFileCoordinator: %s", e)
|
|
2550
|
+
return self._reset_data()
|
|
2551
|
+
|
|
2106
2552
|
def _reset_data(self):
|
|
2107
2553
|
"""Resetea los datos a valores nulos."""
|
|
2554
|
+
now = datetime.now(ZoneInfo(self.timezone_str)).isoformat()
|
|
2108
2555
|
return {
|
|
2109
|
-
"actualizado":
|
|
2556
|
+
"actualizado": now,
|
|
2110
2557
|
"sunrise": None,
|
|
2111
|
-
"sunset": None
|
|
2558
|
+
"sunset": None,
|
|
2559
|
+
"noon": None,
|
|
2560
|
+
"dawn_civil": None,
|
|
2561
|
+
"dusk_civil": None,
|
|
2562
|
+
"dawn_nautical": None,
|
|
2563
|
+
"dusk_nautical": None,
|
|
2564
|
+
"dawn_astronomical": None,
|
|
2565
|
+
"dusk_astronomical": None,
|
|
2566
|
+
"midnight": None,
|
|
2567
|
+
"daylight_duration": None,
|
|
2568
|
+
"sun_elevation": None,
|
|
2569
|
+
"sun_azimuth": None,
|
|
2570
|
+
"sun_horizon_position": None,
|
|
2571
|
+
"sun_rising": None,
|
|
2572
|
+
"sun_position_updated": now,
|
|
2573
|
+
}
|
|
2574
|
+
|
|
2575
|
+
class MeteocatMoonCoordinator(DataUpdateCoordinator):
|
|
2576
|
+
"""Coordinator para manejar la actualización de los datos de la luna desde moon.py."""
|
|
2577
|
+
|
|
2578
|
+
def __init__(self, hass: HomeAssistant, entry_data: dict):
|
|
2579
|
+
self.latitude = entry_data.get("latitude")
|
|
2580
|
+
self.longitude = entry_data.get("longitude")
|
|
2581
|
+
self.timezone_str = hass.config.time_zone or "Europe/Madrid"
|
|
2582
|
+
self.town_id = entry_data.get("town_id")
|
|
2583
|
+
|
|
2584
|
+
self.location = LocationInfo(
|
|
2585
|
+
name=entry_data.get("town_name", "Municipio"),
|
|
2586
|
+
region="Spain",
|
|
2587
|
+
timezone=self.timezone_str,
|
|
2588
|
+
latitude=self.latitude,
|
|
2589
|
+
longitude=self.longitude,
|
|
2590
|
+
)
|
|
2591
|
+
|
|
2592
|
+
files_folder = get_storage_dir(hass, "files")
|
|
2593
|
+
self.moon_file = files_folder / f"moon_{self.town_id.lower()}_data.json"
|
|
2594
|
+
|
|
2595
|
+
super().__init__(
|
|
2596
|
+
hass,
|
|
2597
|
+
_LOGGER,
|
|
2598
|
+
name=f"{DOMAIN} Moon Coordinator",
|
|
2599
|
+
update_interval=DEFAULT_MOON_UPDATE_INTERVAL,
|
|
2600
|
+
)
|
|
2601
|
+
|
|
2602
|
+
async def _async_update_data(self) -> dict:
|
|
2603
|
+
"""Determina si los datos de la luna son válidos o requieren actualización."""
|
|
2604
|
+
_LOGGER.debug("🌙 Iniciando actualización de datos de la luna...")
|
|
2605
|
+
now = datetime.now(tz=ZoneInfo(self.timezone_str))
|
|
2606
|
+
existing_data = await load_json_from_file(self.moon_file) or {}
|
|
2607
|
+
|
|
2608
|
+
# 🟡 Si no hay datos previos o JSON incompleto → calcular todo para hoy
|
|
2609
|
+
if (
|
|
2610
|
+
not existing_data
|
|
2611
|
+
or "dades" not in existing_data
|
|
2612
|
+
or not existing_data["dades"]
|
|
2613
|
+
or "actualitzat" not in existing_data
|
|
2614
|
+
or "dataUpdate" not in existing_data["actualitzat"]
|
|
2615
|
+
):
|
|
2616
|
+
_LOGGER.warning("🌙 Datos previos incompletos o ausentes: calculando todos los datos para hoy.")
|
|
2617
|
+
return await self._calculate_and_save_new_data(today_only=True, existing_data=existing_data)
|
|
2618
|
+
|
|
2619
|
+
dades = existing_data["dades"][0]
|
|
2620
|
+
last_lunar_update_date_str = existing_data["actualitzat"].get("last_lunar_update_date")
|
|
2621
|
+
last_lunar_update_date = (
|
|
2622
|
+
datetime.fromisoformat(f"{last_lunar_update_date_str}T00:00:00").date()
|
|
2623
|
+
if last_lunar_update_date_str
|
|
2624
|
+
else now.date() - timedelta(days=1) # Fallback
|
|
2625
|
+
)
|
|
2626
|
+
|
|
2627
|
+
# 🟢 Comprobar si los datos son obsoletos (last_lunar_update_date y eventos antiguos)
|
|
2628
|
+
try:
|
|
2629
|
+
moonrise_str = dades.get("moonrise")
|
|
2630
|
+
moonset_str = dades.get("moonset")
|
|
2631
|
+
moonrise = datetime.fromisoformat(moonrise_str) if moonrise_str else None
|
|
2632
|
+
moonset = datetime.fromisoformat(moonset_str) if moonset_str else None
|
|
2633
|
+
|
|
2634
|
+
# Si last_lunar_update_date es de un día anterior y los eventos (si existen) también lo son
|
|
2635
|
+
events_are_old = (
|
|
2636
|
+
(moonrise is None or moonrise.date() < now.date())
|
|
2637
|
+
and (moonset is None or moonset.date() < now.date())
|
|
2638
|
+
)
|
|
2639
|
+
if last_lunar_update_date < now.date() and events_are_old:
|
|
2640
|
+
_LOGGER.debug(
|
|
2641
|
+
"🌙 Datos obsoletos: last_lunar_update_date=%s, moonrise=%s, moonset=%s. Calculando para hoy.",
|
|
2642
|
+
last_lunar_update_date, moonrise, moonset
|
|
2643
|
+
)
|
|
2644
|
+
return await self._calculate_and_save_new_data(today_only=True, existing_data=existing_data)
|
|
2645
|
+
except Exception as e:
|
|
2646
|
+
_LOGGER.warning("🌙 Error interpretando fechas previas: %s", e)
|
|
2647
|
+
return await self._calculate_and_save_new_data(today_only=True, existing_data=existing_data)
|
|
2648
|
+
|
|
2649
|
+
# 🟢 Comprobar si los datos lunares necesitan actualización
|
|
2650
|
+
if now.date() > last_lunar_update_date:
|
|
2651
|
+
_LOGGER.debug("🌙 Fecha actual superior a last_lunar_update_date: actualizando datos lunares.")
|
|
2652
|
+
return await self._calculate_and_save_new_data(
|
|
2653
|
+
update_type="update_lunar_data",
|
|
2654
|
+
existing_data=existing_data
|
|
2655
|
+
)
|
|
2656
|
+
|
|
2657
|
+
_LOGGER.debug(
|
|
2658
|
+
"🌙 Estado actual → now=%s | moonrise=%s | moonset=%s",
|
|
2659
|
+
now.isoformat(), moonrise, moonset
|
|
2660
|
+
)
|
|
2661
|
+
|
|
2662
|
+
# Lógica para eventos moonrise y moonset
|
|
2663
|
+
if moonrise is None and moonset is None:
|
|
2664
|
+
_LOGGER.debug("🌙 Ambos eventos None: verificando si datos son actuales.")
|
|
2665
|
+
if last_lunar_update_date == now.date():
|
|
2666
|
+
_LOGGER.debug("🌙 Datos de hoy sin eventos: no se actualiza.")
|
|
2667
|
+
return {"actualizado": existing_data["actualitzat"]["dataUpdate"]}
|
|
2668
|
+
return await self._calculate_and_save_new_data(today_only=True, existing_data=existing_data)
|
|
2669
|
+
|
|
2670
|
+
elif moonrise is None:
|
|
2671
|
+
_LOGGER.debug("🌙 No moonrise: tratando moonset como único evento.")
|
|
2672
|
+
if now < moonset:
|
|
2673
|
+
_LOGGER.debug("🌙 Antes del moonset: no se actualiza.")
|
|
2674
|
+
return {"actualizado": existing_data["actualitzat"]["dataUpdate"]}
|
|
2675
|
+
else:
|
|
2676
|
+
_LOGGER.debug("🌙 Después del moonset: actualizar moonset para mañana.")
|
|
2677
|
+
return await self._calculate_and_save_new_data(update_type="update_set_tomorrow", existing_data=existing_data)
|
|
2678
|
+
|
|
2679
|
+
elif moonset is None:
|
|
2680
|
+
_LOGGER.debug("🌙 No moonset: tratando moonrise como único evento.")
|
|
2681
|
+
if now < moonrise:
|
|
2682
|
+
_LOGGER.debug("🌙 Antes del moonrise: no se actualiza.")
|
|
2683
|
+
return {"actualizado": existing_data["actualitzat"]["dataUpdate"]}
|
|
2684
|
+
else:
|
|
2685
|
+
_LOGGER.debug("🌙 Después del moonrise: actualizar moonrise para mañana.")
|
|
2686
|
+
return await self._calculate_and_save_new_data(update_type="update_rise_tomorrow", existing_data=existing_data)
|
|
2687
|
+
|
|
2688
|
+
else:
|
|
2689
|
+
min_event = min(moonrise, moonset)
|
|
2690
|
+
max_event = max(moonrise, moonset)
|
|
2691
|
+
first_is_rise = (min_event == moonrise)
|
|
2692
|
+
|
|
2693
|
+
if now < min_event:
|
|
2694
|
+
_LOGGER.debug("🌙 Momento actual antes del primer evento → no se actualiza nada.")
|
|
2695
|
+
return {"actualizado": existing_data["actualitzat"]["dataUpdate"]}
|
|
2696
|
+
|
|
2697
|
+
elif now < max_event:
|
|
2698
|
+
if first_is_rise:
|
|
2699
|
+
_LOGGER.debug("🌙 Después del moonrise pero antes del moonset → actualizar solo moonrise para mañana.")
|
|
2700
|
+
return await self._calculate_and_save_new_data(update_type="update_rise_tomorrow", existing_data=existing_data)
|
|
2701
|
+
else:
|
|
2702
|
+
_LOGGER.debug("🌙 Después del moonset pero antes del moonrise → actualizar solo moonset para mañana.")
|
|
2703
|
+
return await self._calculate_and_save_new_data(update_type="update_set_tomorrow", existing_data=existing_data)
|
|
2704
|
+
|
|
2705
|
+
else:
|
|
2706
|
+
_LOGGER.debug("🌙 Después de ambos eventos → actualizar moonrise y moonset para mañana.")
|
|
2707
|
+
return await self._calculate_and_save_new_data(update_type="update_both_tomorrow", existing_data=existing_data)
|
|
2708
|
+
|
|
2709
|
+
async def _calculate_and_save_new_data(self, today_only: bool = False, update_type: str = None, existing_data: dict = None):
|
|
2710
|
+
"""Calcula y guarda nuevos datos de la luna según el tipo de actualización."""
|
|
2711
|
+
try:
|
|
2712
|
+
now = datetime.now(tz=ZoneInfo(self.timezone_str))
|
|
2713
|
+
tz = ZoneInfo(self.timezone_str)
|
|
2714
|
+
today = now.date()
|
|
2715
|
+
next_day = today + timedelta(days=1)
|
|
2716
|
+
next_next_day = today + timedelta(days=2)
|
|
2717
|
+
|
|
2718
|
+
_LOGGER.debug("🌙 Calculando nuevos datos (update_type=%s)...", update_type)
|
|
2719
|
+
|
|
2720
|
+
# 🟣 Calcular fase e iluminación, distancia y diámetro angular
|
|
2721
|
+
moon_phase_value = moon_phase(today)
|
|
2722
|
+
moon_day_today = moon_day(today)
|
|
2723
|
+
lunation = lunation_number(today)
|
|
2724
|
+
illum_percentage = round(illuminated_percentage(today), 2)
|
|
2725
|
+
distance = round(moon_distance(today), 0)
|
|
2726
|
+
angular_diameter = round(moon_angular_diameter(today), 2)
|
|
2727
|
+
moon_phase_name = get_moon_phase_name(today)
|
|
2728
|
+
lunation_duration = get_lunation_duration(today)
|
|
2729
|
+
|
|
2730
|
+
# Inicializar moonrise_final y moonset_final
|
|
2731
|
+
moonrise_final = None
|
|
2732
|
+
moonset_final = None
|
|
2733
|
+
|
|
2734
|
+
# 🟢 Caso: actualizar solo datos lunares
|
|
2735
|
+
if update_type == "update_lunar_data":
|
|
2736
|
+
dades = existing_data.get("dades", [{}])[0]
|
|
2737
|
+
moonrise_str = dades.get("moonrise")
|
|
2738
|
+
moonset_str = dades.get("moonset")
|
|
2739
|
+
moonrise_final = datetime.fromisoformat(moonrise_str) if moonrise_str else None
|
|
2740
|
+
moonset_final = datetime.fromisoformat(moonset_str) if moonset_str else None
|
|
2741
|
+
|
|
2742
|
+
# Si faltan moonrise o moonset, calcular de fallback
|
|
2743
|
+
if moonrise_final is None or moonset_final is None:
|
|
2744
|
+
_LOGGER.debug("🌙 Falta algún evento lunar, calculando de fallback.")
|
|
2745
|
+
moonrise_today, moonset_today = moon_rise_set(self.latitude, self.longitude, today)
|
|
2746
|
+
moonrise_tomorrow, moonset_tomorrow = moon_rise_set(self.latitude, self.longitude, next_day)
|
|
2747
|
+
moonrise_next_tomorrow, moonset_next_tomorrow = moon_rise_set(self.latitude, self.longitude, next_next_day)
|
|
2748
|
+
|
|
2749
|
+
# Convertir a zona local
|
|
2750
|
+
events = {
|
|
2751
|
+
"moonrise_today": moonrise_today,
|
|
2752
|
+
"moonset_today": moonset_today,
|
|
2753
|
+
"moonrise_tomorrow": moonrise_tomorrow,
|
|
2754
|
+
"moonset_tomorrow": moonset_tomorrow,
|
|
2755
|
+
"moonrise_next_tomorrow": moonrise_next_tomorrow,
|
|
2756
|
+
"moonset_next_tomorrow": moonset_next_tomorrow,
|
|
2757
|
+
}
|
|
2758
|
+
for key, val in events.items():
|
|
2759
|
+
if val:
|
|
2760
|
+
events[key] = val.astimezone(tz)
|
|
2761
|
+
moonrise_today, moonset_today, moonrise_tomorrow, moonset_tomorrow, moonrise_next_tomorrow, moonset_next_tomorrow = (
|
|
2762
|
+
events["moonrise_today"],
|
|
2763
|
+
events["moonset_today"],
|
|
2764
|
+
events["moonrise_tomorrow"],
|
|
2765
|
+
events["moonset_tomorrow"],
|
|
2766
|
+
events["moonrise_next_tomorrow"],
|
|
2767
|
+
events["moonset_next_tomorrow"],
|
|
2768
|
+
)
|
|
2769
|
+
|
|
2770
|
+
# Seleccionar los eventos más próximos disponibles
|
|
2771
|
+
moonrise_final = moonrise_final or (moonrise_today if moonrise_today else (moonrise_tomorrow if moonrise_tomorrow else moonrise_next_tomorrow))
|
|
2772
|
+
moonset_final = moonset_final or (moonset_today if moonset_today else (moonset_tomorrow if moonset_tomorrow else moonset_next_tomorrow))
|
|
2773
|
+
_LOGGER.debug("🌙 Fallback: usando moonrise=%s y moonset=%s", moonrise_final, moonset_final)
|
|
2774
|
+
|
|
2775
|
+
else:
|
|
2776
|
+
# Calcular eventos lunares
|
|
2777
|
+
moonrise_today, moonset_today = moon_rise_set(self.latitude, self.longitude, today)
|
|
2778
|
+
moonrise_tomorrow, moonset_tomorrow = moon_rise_set(self.latitude, self.longitude, next_day)
|
|
2779
|
+
moonrise_next_tomorrow, moonset_next_tomorrow = moon_rise_set(self.latitude, self.longitude, next_next_day)
|
|
2780
|
+
|
|
2781
|
+
# Convertir a zona local
|
|
2782
|
+
events = {
|
|
2783
|
+
"moonrise_today": moonrise_today,
|
|
2784
|
+
"moonset_today": moonset_today,
|
|
2785
|
+
"moonrise_tomorrow": moonrise_tomorrow,
|
|
2786
|
+
"moonset_tomorrow": moonset_tomorrow,
|
|
2787
|
+
"moonrise_next_tomorrow": moonrise_next_tomorrow,
|
|
2788
|
+
"moonset_next_tomorrow": moonset_next_tomorrow,
|
|
2789
|
+
}
|
|
2790
|
+
for key, val in events.items():
|
|
2791
|
+
if val:
|
|
2792
|
+
events[key] = val.astimezone(tz)
|
|
2793
|
+
moonrise_today, moonset_today, moonrise_tomorrow, moonset_tomorrow, moonrise_next_tomorrow, moonset_next_tomorrow = (
|
|
2794
|
+
events["moonrise_today"],
|
|
2795
|
+
events["moonset_today"],
|
|
2796
|
+
events["moonrise_tomorrow"],
|
|
2797
|
+
events["moonset_tomorrow"],
|
|
2798
|
+
events["moonrise_next_tomorrow"],
|
|
2799
|
+
events["moonset_next_tomorrow"],
|
|
2800
|
+
)
|
|
2801
|
+
|
|
2802
|
+
# 🧭 Determinar valores finales según el contexto
|
|
2803
|
+
if today_only:
|
|
2804
|
+
moonrise_final = moonrise_today
|
|
2805
|
+
moonset_final = moonset_today
|
|
2806
|
+
elif update_type == "update_set_tomorrow":
|
|
2807
|
+
if existing_data and "dades" in existing_data and existing_data["dades"] and "moonrise" in existing_data["dades"][0]:
|
|
2808
|
+
moonrise_str = existing_data["dades"][0]["moonrise"]
|
|
2809
|
+
moonrise_final = datetime.fromisoformat(moonrise_str) if moonrise_str else None
|
|
2810
|
+
else:
|
|
2811
|
+
moonrise_final = moonrise_today
|
|
2812
|
+
moonset_final = moonset_tomorrow if moonset_tomorrow else moonset_next_tomorrow
|
|
2813
|
+
_LOGGER.debug("🌙 Actualizado moonset para mañana: %s (manteniendo moonrise: %s)", moonset_final, moonrise_final)
|
|
2814
|
+
elif update_type == "update_rise_tomorrow":
|
|
2815
|
+
if existing_data and "dades" in existing_data and existing_data["dades"] and "moonset" in existing_data["dades"][0]:
|
|
2816
|
+
moonset_str = existing_data["dades"][0]["moonset"]
|
|
2817
|
+
moonset_final = datetime.fromisoformat(moonset_str) if moonset_str else None
|
|
2818
|
+
else:
|
|
2819
|
+
moonset_final = moonset_today
|
|
2820
|
+
moonrise_final = moonrise_tomorrow if moonrise_tomorrow else moonrise_next_tomorrow
|
|
2821
|
+
_LOGGER.debug("🌙 Actualizado moonrise para mañana: %s (manteniendo moonset: %s)", moonrise_final, moonset_final)
|
|
2822
|
+
elif update_type == "update_both_tomorrow":
|
|
2823
|
+
moonrise_final = moonrise_tomorrow if moonrise_tomorrow else moonrise_next_tomorrow
|
|
2824
|
+
moonset_final = moonset_tomorrow if moonset_tomorrow else moonset_next_tomorrow
|
|
2825
|
+
_LOGGER.debug("🌙 Actualizados moonrise y moonset para mañana: %s / %s", moonrise_final, moonset_final)
|
|
2826
|
+
else:
|
|
2827
|
+
moonrise_final = moonrise_today
|
|
2828
|
+
moonset_final = moonset_today
|
|
2829
|
+
|
|
2830
|
+
# Si algún evento final es None, intentar con el del día siguiente o el posterior
|
|
2831
|
+
if moonrise_final is None:
|
|
2832
|
+
moonrise_final = moonrise_tomorrow if moonrise_tomorrow else moonrise_next_tomorrow
|
|
2833
|
+
if moonrise_final:
|
|
2834
|
+
_LOGGER.debug("🌙 Moonrise era None: usando el del día siguiente o posterior: %s", moonrise_final)
|
|
2835
|
+
if moonset_final is None:
|
|
2836
|
+
moonset_final = moonset_tomorrow if moonset_tomorrow else moonset_next_tomorrow
|
|
2837
|
+
if moonset_final:
|
|
2838
|
+
_LOGGER.debug("🌙 Moonset era None: usando el del día siguiente o posterior: %s", moonset_final)
|
|
2839
|
+
|
|
2840
|
+
data_with_timestamp = {
|
|
2841
|
+
"actualitzat": {
|
|
2842
|
+
"dataUpdate": now.isoformat(),
|
|
2843
|
+
# 🟢 Determinar last_lunar_update_date de forma legible
|
|
2844
|
+
"last_lunar_update_date": (
|
|
2845
|
+
today.isoformat()
|
|
2846
|
+
if update_type in ("update_lunar_data", None) or today_only
|
|
2847
|
+
else existing_data.get("actualitzat", {}).get("last_lunar_update_date", today.isoformat())
|
|
2848
|
+
),
|
|
2849
|
+
},
|
|
2850
|
+
"dades": [
|
|
2851
|
+
{
|
|
2852
|
+
"moon_day": moon_day_today,
|
|
2853
|
+
"moon_phase": round(moon_phase_value, 2),
|
|
2854
|
+
"moon_phase_name": moon_phase_name,
|
|
2855
|
+
"illuminated_percentage": illum_percentage,
|
|
2856
|
+
"moon_distance": distance,
|
|
2857
|
+
"moon_angular_diameter": angular_diameter,
|
|
2858
|
+
"lunation": lunation,
|
|
2859
|
+
"lunation_duration": lunation_duration,
|
|
2860
|
+
"moonrise": moonrise_final.isoformat() if moonrise_final else None,
|
|
2861
|
+
"moonset": moonset_final.isoformat() if moonset_final else None,
|
|
2862
|
+
}
|
|
2863
|
+
],
|
|
2864
|
+
}
|
|
2865
|
+
|
|
2866
|
+
await save_json_to_file(data_with_timestamp, self.moon_file)
|
|
2867
|
+
_LOGGER.debug("🌙 Datos de luna guardados correctamente → %s", data_with_timestamp)
|
|
2868
|
+
return {"actualizado": data_with_timestamp["actualitzat"]["dataUpdate"]}
|
|
2869
|
+
|
|
2870
|
+
except Exception as err:
|
|
2871
|
+
_LOGGER.exception("🌙 Error al calcular datos de la luna: %s", err)
|
|
2872
|
+
cached_data = await load_json_from_file(self.moon_file)
|
|
2873
|
+
if cached_data:
|
|
2874
|
+
_LOGGER.warning("🌙 Se usaron datos en caché por error de cálculo.")
|
|
2875
|
+
return {"actualizado": cached_data["actualitzat"]["dataUpdate"]}
|
|
2876
|
+
_LOGGER.error("🌙 No se pudo calcular ni cargar datos en caché de luna.")
|
|
2877
|
+
return None
|
|
2878
|
+
|
|
2879
|
+
class MeteocatMoonFileCoordinator(BaseFileCoordinator):
|
|
2880
|
+
"""Coordinator para manejar la actualización de los datos de la luna desde moon_{town_id}.json."""
|
|
2881
|
+
|
|
2882
|
+
def __init__(self, hass: HomeAssistant, entry_data: dict):
|
|
2883
|
+
self.town_id = entry_data["town_id"]
|
|
2884
|
+
self.timezone_str = hass.config.time_zone or "Europe/Madrid"
|
|
2885
|
+
|
|
2886
|
+
files_folder = get_storage_dir(hass, "files")
|
|
2887
|
+
self.moon_file = files_folder / f"moon_{self.town_id.lower()}_data.json"
|
|
2888
|
+
|
|
2889
|
+
super().__init__(
|
|
2890
|
+
hass,
|
|
2891
|
+
name=f"{DOMAIN} Moon File Coordinator",
|
|
2892
|
+
update_interval=DEFAULT_MOON_FILE_UPDATE_INTERVAL,
|
|
2893
|
+
min_delay=1.0, # Rango predeterminado
|
|
2894
|
+
max_delay=2.0, # Rango predeterminado
|
|
2895
|
+
)
|
|
2896
|
+
|
|
2897
|
+
async def _async_update_data(self) -> Dict[str, Any]:
|
|
2898
|
+
"""Carga los datos de la luna desde el archivo JSON y verifica si siguen siendo válidos."""
|
|
2899
|
+
# 🔸 Añadimos un pequeño desfase aleatorio (1 a 2 segundos) basados en el BaseFileCoordinator
|
|
2900
|
+
await self._apply_random_delay()
|
|
2901
|
+
|
|
2902
|
+
existing_data = await load_json_from_file(self.moon_file)
|
|
2903
|
+
|
|
2904
|
+
if not existing_data or "dades" not in existing_data or not existing_data["dades"]:
|
|
2905
|
+
_LOGGER.warning("No se encontraron datos en %s.", self.moon_file)
|
|
2906
|
+
return {
|
|
2907
|
+
"actualizado": datetime.now(ZoneInfo(self.timezone_str)).isoformat(),
|
|
2908
|
+
"last_lunar_update_date": None,
|
|
2909
|
+
"moon_day": None,
|
|
2910
|
+
"moon_phase": None,
|
|
2911
|
+
"moon_phase_name": None,
|
|
2912
|
+
"illuminated_percentage": None,
|
|
2913
|
+
"moon_distance": None,
|
|
2914
|
+
"moon_angular_diameter": None,
|
|
2915
|
+
"lunation": None,
|
|
2916
|
+
"lunation_duration": None,
|
|
2917
|
+
"moonrise": None,
|
|
2918
|
+
"moonset": None,
|
|
2919
|
+
}
|
|
2920
|
+
|
|
2921
|
+
dades = existing_data["dades"][0]
|
|
2922
|
+
moonrise_str = dades.get("moonrise")
|
|
2923
|
+
moonset_str = dades.get("moonset")
|
|
2924
|
+
update_date_str = existing_data.get("actualitzat", {}).get("dataUpdate", "")
|
|
2925
|
+
last_lunar_update_date_str = existing_data.get("actualitzat", {}).get("last_lunar_update_date", "")
|
|
2926
|
+
|
|
2927
|
+
update_date = (
|
|
2928
|
+
datetime.fromisoformat(update_date_str)
|
|
2929
|
+
if update_date_str
|
|
2930
|
+
else datetime.now(ZoneInfo(self.timezone_str))
|
|
2931
|
+
)
|
|
2932
|
+
|
|
2933
|
+
# Simplemente devolvemos los datos cargados, aunque estén desfasados
|
|
2934
|
+
return {
|
|
2935
|
+
"actualizado": update_date.isoformat(),
|
|
2936
|
+
"last_lunar_update_date": last_lunar_update_date_str,
|
|
2937
|
+
"moon_day": dades.get("moon_day"),
|
|
2938
|
+
"moon_phase": dades.get("moon_phase"),
|
|
2939
|
+
"moon_phase_name": dades.get("moon_phase_name"),
|
|
2940
|
+
"illuminated_percentage": dades.get("illuminated_percentage"),
|
|
2941
|
+
"moon_distance": dades.get("moon_distance"),
|
|
2942
|
+
"moon_angular_diameter": dades.get("moon_angular_diameter"),
|
|
2943
|
+
"lunation": dades.get("lunation"),
|
|
2944
|
+
"lunation_duration": dades.get("lunation_duration"),
|
|
2945
|
+
"moonrise": moonrise_str,
|
|
2946
|
+
"moonset": moonset_str,
|
|
2112
2947
|
}
|