meteocat 3.0.0 → 3.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/ISSUE_TEMPLATE/bug_report.md +8 -2
- package/.github/ISSUE_TEMPLATE/config.yml +7 -0
- package/.github/ISSUE_TEMPLATE/improvement.md +39 -0
- package/.github/ISSUE_TEMPLATE/new_function.md +41 -0
- package/.github/labels.yml +63 -0
- package/.github/workflows/autocloser.yaml +11 -9
- package/.github/workflows/close-on-label.yml +48 -0
- package/.github/workflows/force-sync-labels.yml +18 -0
- package/.github/workflows/sync-gitlab.yml +15 -4
- package/.github/workflows/sync-labels.yml +21 -0
- package/CHANGELOG.md +80 -11
- package/README.md +16 -4
- package/custom_components/meteocat/__init__.py +57 -42
- package/custom_components/meteocat/condition.py +6 -2
- package/custom_components/meteocat/config_flow.py +231 -4
- package/custom_components/meteocat/const.py +17 -2
- package/custom_components/meteocat/coordinator.py +1122 -101
- package/custom_components/meteocat/helpers.py +31 -36
- package/custom_components/meteocat/manifest.json +3 -2
- package/custom_components/meteocat/options_flow.py +71 -3
- package/custom_components/meteocat/sensor.py +660 -247
- package/custom_components/meteocat/strings.json +252 -15
- package/custom_components/meteocat/translations/ca.json +249 -13
- package/custom_components/meteocat/translations/en.json +252 -15
- package/custom_components/meteocat/translations/es.json +252 -15
- package/custom_components/meteocat/version.py +1 -1
- package/filetree.txt +12 -3
- package/hacs.json +1 -1
- package/images/daily_forecast_2_alerts.png +0 -0
- package/images/daily_forecast_no_alerts.png +0 -0
- package/images/diagnostic_sensors.png +0 -0
- package/images/dynamic_sensors.png +0 -0
- package/images/options.png +0 -0
- package/images/regenerate_assets.png +0 -0
- package/images/setup_options.png +0 -0
- package/images/system_options.png +0 -0
- package/package.json +1 -1
- package/pyproject.toml +1 -1
- package/scripts/update_version.sh +6 -0
- package/.github/workflows/close-duplicates.yml +0 -57
|
@@ -4,17 +4,31 @@ import json
|
|
|
4
4
|
import aiofiles
|
|
5
5
|
import logging
|
|
6
6
|
import asyncio
|
|
7
|
+
import random
|
|
7
8
|
import unicodedata
|
|
8
9
|
from pathlib import Path
|
|
9
|
-
from datetime import datetime, timedelta, timezone, time
|
|
10
|
+
from datetime import date, datetime, timedelta, timezone, time
|
|
10
11
|
from zoneinfo import ZoneInfo
|
|
11
|
-
from typing import Dict, Any
|
|
12
|
+
from typing import List, Dict, Any, Optional
|
|
12
13
|
|
|
13
|
-
from homeassistant.core import HomeAssistant
|
|
14
|
+
from homeassistant.core import HomeAssistant, EVENT_HOMEASSISTANT_START
|
|
14
15
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
|
15
16
|
from homeassistant.exceptions import ConfigEntryNotReady
|
|
16
17
|
from homeassistant.components.weather import Forecast
|
|
17
18
|
|
|
19
|
+
from solarmoonpy.moon import (
|
|
20
|
+
moon_phase,
|
|
21
|
+
moon_day,
|
|
22
|
+
moon_rise_set,
|
|
23
|
+
illuminated_percentage,
|
|
24
|
+
moon_distance,
|
|
25
|
+
moon_angular_diameter,
|
|
26
|
+
lunation_number,
|
|
27
|
+
get_moon_phase_name,
|
|
28
|
+
get_lunation_duration
|
|
29
|
+
)
|
|
30
|
+
from solarmoonpy.location import Location, LocationInfo
|
|
31
|
+
|
|
18
32
|
from meteocatpy.data import MeteocatStationData
|
|
19
33
|
from meteocatpy.uvi import MeteocatUviData
|
|
20
34
|
from meteocatpy.forecast import MeteocatForecast
|
|
@@ -38,6 +52,10 @@ from .const import (
|
|
|
38
52
|
DEFAULT_VALIDITY_DAYS,
|
|
39
53
|
DEFAULT_VALIDITY_HOURS,
|
|
40
54
|
DEFAULT_VALIDITY_MINUTES,
|
|
55
|
+
DEFAULT_UVI_LOW_VALIDITY_HOURS,
|
|
56
|
+
DEFAULT_UVI_LOW_VALIDITY_MINUTES,
|
|
57
|
+
DEFAULT_UVI_HIGH_VALIDITY_HOURS,
|
|
58
|
+
DEFAULT_UVI_HIGH_VALIDITY_MINUTES,
|
|
41
59
|
DEFAULT_ALERT_VALIDITY_TIME,
|
|
42
60
|
DEFAULT_QUOTES_VALIDITY_TIME,
|
|
43
61
|
ALERT_VALIDITY_MULTIPLIER_100,
|
|
@@ -46,7 +64,8 @@ from .const import (
|
|
|
46
64
|
ALERT_VALIDITY_MULTIPLIER_DEFAULT,
|
|
47
65
|
DEFAULT_LIGHTNING_VALIDITY_TIME,
|
|
48
66
|
DEFAULT_LIGHTNING_VALIDITY_HOURS,
|
|
49
|
-
DEFAULT_LIGHTNING_VALIDITY_MINUTES
|
|
67
|
+
DEFAULT_LIGHTNING_VALIDITY_MINUTES,
|
|
68
|
+
PREDICCIO_HIGH_QUOTA_LIMIT
|
|
50
69
|
)
|
|
51
70
|
|
|
52
71
|
_LOGGER = logging.getLogger(__name__)
|
|
@@ -67,6 +86,10 @@ DEFAULT_QUOTES_UPDATE_INTERVAL = timedelta(minutes=10)
|
|
|
67
86
|
DEFAULT_QUOTES_FILE_UPDATE_INTERVAL = timedelta(minutes=5)
|
|
68
87
|
DEFAULT_LIGHTNING_UPDATE_INTERVAL = timedelta(minutes=10)
|
|
69
88
|
DEFAULT_LIGHTNING_FILE_UPDATE_INTERVAL = timedelta(minutes=5)
|
|
89
|
+
DEFAULT_SUN_UPDATE_INTERVAL = timedelta(minutes=1)
|
|
90
|
+
DEFAULT_SUN_FILE_UPDATE_INTERVAL = timedelta(seconds=30)
|
|
91
|
+
DEFAULT_MOON_UPDATE_INTERVAL = timedelta(minutes=1)
|
|
92
|
+
DEFAULT_MOON_FILE_UPDATE_INTERVAL = timedelta(seconds=30)
|
|
70
93
|
|
|
71
94
|
# Definir la zona horaria local
|
|
72
95
|
TIMEZONE = ZoneInfo("Europe/Madrid")
|
|
@@ -137,6 +160,50 @@ async def _update_quotes(hass: HomeAssistant, plan_name: str) -> None:
|
|
|
137
160
|
except Exception as e:
|
|
138
161
|
_LOGGER.exception("Error inesperado al actualizar las cuotas en quotes.json: %s", str(e))
|
|
139
162
|
|
|
163
|
+
class BaseFileCoordinator(DataUpdateCoordinator):
|
|
164
|
+
"""
|
|
165
|
+
Coordinador base para leer datos desde archivos JSON.
|
|
166
|
+
|
|
167
|
+
Proporciona un pequeño desfase aleatorio antes de cada actualización
|
|
168
|
+
para evitar colisión entre el coordinador que crea el JSON y el que lo lee.
|
|
169
|
+
|
|
170
|
+
Cada coordinador que herede de esta clase debe implementar su propio
|
|
171
|
+
método `_async_update_data()` para definir la lógica de lectura y validación.
|
|
172
|
+
"""
|
|
173
|
+
|
|
174
|
+
def __init__(self, hass, name: str, update_interval: timedelta, min_delay: float = 1.0, max_delay: float = 2.0):
|
|
175
|
+
"""
|
|
176
|
+
Inicializa el coordinador base.
|
|
177
|
+
|
|
178
|
+
Args:
|
|
179
|
+
hass (HomeAssistant): Instancia de Home Assistant.
|
|
180
|
+
name (str): Nombre identificativo del coordinador.
|
|
181
|
+
update_interval (timedelta): Intervalo de actualización.
|
|
182
|
+
min_delay (float): Límite inferior del desfase aleatorio en segundos (default: 1.0).
|
|
183
|
+
max_delay (float): Límite superior del desfase aleatorio en segundos (default: 2.0).
|
|
184
|
+
"""
|
|
185
|
+
super().__init__(hass, _LOGGER, name=name, update_interval=update_interval)
|
|
186
|
+
self._min_delay = min_delay
|
|
187
|
+
self._max_delay = max_delay
|
|
188
|
+
self._first_delay = random.uniform(min_delay, max_delay)
|
|
189
|
+
self._initialized = False
|
|
190
|
+
|
|
191
|
+
async def _apply_random_delay(self):
|
|
192
|
+
"""
|
|
193
|
+
Aplica un desfase aleatorio leve antes de la lectura.
|
|
194
|
+
|
|
195
|
+
- En la primera ejecución: usa un desfase fijo (_first_delay)
|
|
196
|
+
- En las siguientes: aplica un desfase aleatorio entre 1 y 2 segundos
|
|
197
|
+
"""
|
|
198
|
+
if not self._initialized:
|
|
199
|
+
delay = self._first_delay
|
|
200
|
+
self._initialized = True
|
|
201
|
+
else:
|
|
202
|
+
delay = random.uniform(self._min_delay, self._max_delay)
|
|
203
|
+
|
|
204
|
+
_LOGGER.debug("%s aplicando desfase aleatorio de %.2fs", self.name, delay)
|
|
205
|
+
await asyncio.sleep(delay)
|
|
206
|
+
|
|
140
207
|
class MeteocatSensorCoordinator(DataUpdateCoordinator):
|
|
141
208
|
"""Coordinator para manejar la actualización de datos de los sensores."""
|
|
142
209
|
|
|
@@ -297,6 +364,7 @@ class MeteocatUviCoordinator(DataUpdateCoordinator):
|
|
|
297
364
|
):
|
|
298
365
|
self.api_key = entry_data["api_key"]
|
|
299
366
|
self.town_id = entry_data["town_id"]
|
|
367
|
+
self.limit_prediccio = entry_data["limit_prediccio"]
|
|
300
368
|
self.meteocat_uvi_data = MeteocatUviData(self.api_key)
|
|
301
369
|
|
|
302
370
|
# Ruta persistente en /config/meteocat_files/files
|
|
@@ -310,56 +378,83 @@ class MeteocatUviCoordinator(DataUpdateCoordinator):
|
|
|
310
378
|
update_interval=DEFAULT_UVI_UPDATE_INTERVAL,
|
|
311
379
|
)
|
|
312
380
|
|
|
313
|
-
async def is_uvi_data_valid(self) -> dict
|
|
314
|
-
"""
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
381
|
+
async def is_uvi_data_valid(self) -> Optional[dict]:
|
|
382
|
+
"""Valida datos UVI: misma lógica que predicción, basada en limit_prediccio.
|
|
383
|
+
|
|
384
|
+
- Si `limit_prediccio >= 550` → actualiza **el día siguiente** después de las DEFAULT_VALIDITY_HOURS:DEFAULT_VALIDITY_MINUTES.
|
|
385
|
+
- Si `limit_prediccio < 550` → actualiza **dos días después** después de las DEFAULT_VALIDITY_HOURS:DEFAULT_VALIDITY_MINUTES.
|
|
386
|
+
"""
|
|
387
|
+
if not self.uvi_file.exists():
|
|
388
|
+
_LOGGER.debug("Archivo UVI no existe: %s", self.uvi_file)
|
|
389
|
+
return None
|
|
319
390
|
|
|
320
|
-
|
|
321
|
-
|
|
391
|
+
try:
|
|
392
|
+
async with aiofiles.open(self.uvi_file, "r", encoding="utf-8") as f:
|
|
393
|
+
content = await f.read()
|
|
322
394
|
data = json.loads(content)
|
|
323
395
|
|
|
324
|
-
#
|
|
396
|
+
# Validar estructura básica
|
|
325
397
|
if not isinstance(data, dict) or "uvi" not in data or not isinstance(data["uvi"], list) or not data["uvi"]:
|
|
326
|
-
_LOGGER.warning("Estructura inválida
|
|
398
|
+
_LOGGER.warning("Estructura UVI inválida en %s", self.uvi_file)
|
|
327
399
|
return None
|
|
328
400
|
|
|
329
|
-
#
|
|
401
|
+
# Fecha del primer día
|
|
330
402
|
try:
|
|
331
|
-
|
|
403
|
+
first_date_str = data["uvi"][0].get("date")
|
|
404
|
+
first_date = datetime.strptime(first_date_str, "%Y-%m-%d").date()
|
|
332
405
|
except Exception as exc:
|
|
333
|
-
_LOGGER.warning("Fecha inválida en %s: %s", self.uvi_file, exc)
|
|
406
|
+
_LOGGER.warning("Fecha UVI inválida en %s: %s", self.uvi_file, exc)
|
|
334
407
|
return None
|
|
335
408
|
|
|
336
|
-
|
|
337
|
-
|
|
409
|
+
# Fecha y hora actual en zona local (Europe/Madrid)
|
|
410
|
+
now_local = datetime.now(TIMEZONE)
|
|
411
|
+
today = now_local.date()
|
|
412
|
+
current_time_local = now_local.time()
|
|
413
|
+
# Horas para actualización según límite de cuota
|
|
414
|
+
min_update_time_high = time(DEFAULT_UVI_HIGH_VALIDITY_HOURS, DEFAULT_UVI_HIGH_VALIDITY_MINUTES) # Hora para cuota alta
|
|
415
|
+
min_update_time_low = time(DEFAULT_UVI_LOW_VALIDITY_HOURS, DEFAULT_UVI_LOW_VALIDITY_MINUTES) # Hora para cuota baja
|
|
416
|
+
# Diferencia en días
|
|
417
|
+
days_diff = (today - first_date).days
|
|
418
|
+
|
|
419
|
+
# === LÓGICA DINÁMICA SEGÚN CUOTA ===
|
|
420
|
+
if self.limit_prediccio >= PREDICCIO_HIGH_QUOTA_LIMIT:
|
|
421
|
+
should_update = days_diff >= DEFAULT_VALIDITY_DAYS and current_time_local >= min_update_time_high
|
|
422
|
+
else:
|
|
423
|
+
should_update = days_diff > DEFAULT_VALIDITY_DAYS and current_time_local >= min_update_time_low
|
|
338
424
|
|
|
339
425
|
_LOGGER.debug(
|
|
340
|
-
"
|
|
341
|
-
|
|
342
|
-
|
|
426
|
+
"[UVI %s] Validación: primer_día=%s, hoy=%s → días=%d, "
|
|
427
|
+
"cuota=%d (%s), hora=%s ≥ %s → actualizar=%s",
|
|
428
|
+
self.town_id,
|
|
343
429
|
first_date,
|
|
344
|
-
|
|
430
|
+
today,
|
|
431
|
+
days_diff,
|
|
432
|
+
self.limit_prediccio,
|
|
433
|
+
"ALTA" if self.limit_prediccio >= 550 else "BAJA",
|
|
434
|
+
current_time_local.strftime("%H:%M"),
|
|
435
|
+
min_update_time_high.strftime("%H:%M") if self.limit_prediccio >= 550 else min_update_time_low.strftime("%H:%M"),
|
|
436
|
+
should_update,
|
|
345
437
|
)
|
|
346
438
|
|
|
347
|
-
if
|
|
348
|
-
_LOGGER.info(
|
|
439
|
+
if should_update:
|
|
440
|
+
_LOGGER.info(
|
|
441
|
+
"Datos UVI obsoletos → llamando API (town=%s, cuota=%d)",
|
|
442
|
+
self.town_id, self.limit_prediccio
|
|
443
|
+
)
|
|
349
444
|
return None
|
|
350
445
|
|
|
351
|
-
_LOGGER.
|
|
446
|
+
_LOGGER.debug("Datos UVI válidos → usando caché")
|
|
352
447
|
return data
|
|
353
448
|
|
|
354
449
|
except json.JSONDecodeError:
|
|
355
|
-
_LOGGER.error("
|
|
450
|
+
_LOGGER.error("JSON corrupto en %s", self.uvi_file)
|
|
356
451
|
return None
|
|
357
452
|
except Exception as e:
|
|
358
|
-
_LOGGER.error("Error
|
|
453
|
+
_LOGGER.error("Error validando UVI: %s", e)
|
|
359
454
|
return None
|
|
360
455
|
|
|
361
|
-
async def _async_update_data(self) -> Dict:
|
|
362
|
-
"""Actualiza los datos de UVI desde la API de Meteocat
|
|
456
|
+
async def _async_update_data(self) -> List[Dict]:
|
|
457
|
+
"""Actualiza los datos de UVI desde la API de Meteocat o caché."""
|
|
363
458
|
try:
|
|
364
459
|
valid_data = await self.is_uvi_data_valid()
|
|
365
460
|
if valid_data:
|
|
@@ -402,10 +497,9 @@ class MeteocatUviCoordinator(DataUpdateCoordinator):
|
|
|
402
497
|
_LOGGER.warning("Usando datos en caché para la ciudad %s.", self.town_id)
|
|
403
498
|
return cached_data.get("uvi", [])
|
|
404
499
|
_LOGGER.error("No se pudo obtener datos UVI ni cargar caché.")
|
|
405
|
-
return
|
|
406
|
-
|
|
500
|
+
return []
|
|
407
501
|
|
|
408
|
-
class MeteocatUviFileCoordinator(
|
|
502
|
+
class MeteocatUviFileCoordinator(BaseFileCoordinator):
|
|
409
503
|
"""Coordinator to read and process UV data from a file."""
|
|
410
504
|
|
|
411
505
|
def __init__(
|
|
@@ -417,9 +511,10 @@ class MeteocatUviFileCoordinator(DataUpdateCoordinator):
|
|
|
417
511
|
|
|
418
512
|
super().__init__(
|
|
419
513
|
hass,
|
|
420
|
-
_LOGGER,
|
|
421
514
|
name=f"{DOMAIN} Uvi File Coordinator",
|
|
422
515
|
update_interval=DEFAULT_UVI_SENSOR_UPDATE_INTERVAL,
|
|
516
|
+
min_delay=1.0, # Rango predeterminado
|
|
517
|
+
max_delay=2.0, # Rango predeterminado
|
|
423
518
|
)
|
|
424
519
|
|
|
425
520
|
# Ruta persistente en /config/meteocat_files/files
|
|
@@ -428,6 +523,9 @@ class MeteocatUviFileCoordinator(DataUpdateCoordinator):
|
|
|
428
523
|
|
|
429
524
|
async def _async_update_data(self):
|
|
430
525
|
"""Read and process UV data for the current hour from the file asynchronously."""
|
|
526
|
+
# 🔸 Añadimos un pequeño desfase aleatorio (1 a 2 segundos) basados en el BaseFileCoordinator
|
|
527
|
+
await self._apply_random_delay()
|
|
528
|
+
|
|
431
529
|
try:
|
|
432
530
|
async with aiofiles.open(self._file_path, "r", encoding="utf-8") as file:
|
|
433
531
|
raw = await file.read()
|
|
@@ -490,6 +588,7 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
|
|
|
490
588
|
self.station_id = entry_data["station_id"]
|
|
491
589
|
self.variable_name = entry_data["variable_name"]
|
|
492
590
|
self.variable_id = entry_data["variable_id"]
|
|
591
|
+
self.limit_prediccio = entry_data["limit_prediccio"] # Límite de llamada a la API para PREDICCIONES
|
|
493
592
|
self.meteocat_forecast = MeteocatForecast(self.api_key)
|
|
494
593
|
|
|
495
594
|
# Ruta persistente en /config/meteocat_files/files
|
|
@@ -503,52 +602,85 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
|
|
|
503
602
|
name=f"{DOMAIN} Entity Coordinator",
|
|
504
603
|
update_interval=DEFAULT_ENTITY_UPDATE_INTERVAL,
|
|
505
604
|
)
|
|
506
|
-
|
|
605
|
+
|
|
606
|
+
# --------------------------------------------------------------------- #
|
|
607
|
+
# VALIDACIÓN DINÁMICA DE DATOS DE PREDICCIÓN
|
|
608
|
+
# --------------------------------------------------------------------- #
|
|
507
609
|
async def validate_forecast_data(self, file_path: Path) -> dict:
|
|
508
|
-
"""Valida y retorna datos de predicción si son válidos.
|
|
610
|
+
"""Valida y retorna datos de predicción si son válidos.
|
|
611
|
+
|
|
612
|
+
- Si `limit_prediccio >= 550` → actualiza **el día siguiente** después de las DEFAULT_VALIDITY_HOURS:DEFAULT_VALIDITY_MINUTES.
|
|
613
|
+
- Si `limit_prediccio < 550` → actualiza **dos días después** después de las DEFAULT_VALIDITY_HOURS:DEFAULT_VALIDITY_MINUTES.
|
|
614
|
+
"""
|
|
509
615
|
if not file_path.exists():
|
|
510
|
-
_LOGGER.
|
|
616
|
+
_LOGGER.warning("El archivo %s no existe. Se considerará inválido.", file_path)
|
|
511
617
|
return None
|
|
512
618
|
try:
|
|
513
619
|
async with aiofiles.open(file_path, "r", encoding="utf-8") as f:
|
|
514
620
|
content = await f.read()
|
|
515
621
|
data = json.loads(content)
|
|
516
622
|
|
|
517
|
-
#
|
|
518
|
-
|
|
623
|
+
# Fecha del primer día de predicción (solo fecha)
|
|
624
|
+
first_date_str = data["dies"][0]["data"].rstrip("Z")
|
|
625
|
+
first_date = datetime.fromisoformat(first_date_str).date()
|
|
519
626
|
today = datetime.now(timezone.utc).date()
|
|
520
|
-
current_time = datetime.now(timezone.utc).time()
|
|
521
627
|
|
|
522
|
-
#
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
628
|
+
# Hora actual en zona local (Europe/Madrid)
|
|
629
|
+
current_time_local = datetime.now(TIMEZONE).time()
|
|
630
|
+
min_update_time = time(DEFAULT_VALIDITY_HOURS, DEFAULT_VALIDITY_MINUTES)
|
|
631
|
+
|
|
632
|
+
days_diff = (today - first_date).days
|
|
633
|
+
|
|
634
|
+
# -----------------------------------------------------------------
|
|
635
|
+
# Lógica según cuota
|
|
636
|
+
# -----------------------------------------------------------------
|
|
637
|
+
if self.limit_prediccio >= PREDICCIO_HIGH_QUOTA_LIMIT:
|
|
638
|
+
# Cuota alta → actualiza cuando los datos son de ayer (o antes) + hora OK
|
|
639
|
+
should_update = days_diff >= DEFAULT_VALIDITY_DAYS and current_time_local >= min_update_time
|
|
640
|
+
else:
|
|
641
|
+
# Cuota baja → actualiza solo cuando los datos son de anteayer + hora OK
|
|
642
|
+
should_update = days_diff > DEFAULT_VALIDITY_DAYS and current_time_local >= min_update_time
|
|
643
|
+
|
|
644
|
+
# -----------------------------------------------------------------
|
|
645
|
+
# Logs detallados
|
|
646
|
+
# -----------------------------------------------------------------
|
|
647
|
+
_LOGGER.debug(
|
|
648
|
+
"[%s] Validación: primer_día=%s, hoy=%s → días=%d, "
|
|
649
|
+
"cuota=%d (%s), hora_local=%s ≥ %s → actualizar=%s",
|
|
650
|
+
file_path.name,
|
|
527
651
|
first_date,
|
|
528
|
-
|
|
652
|
+
today,
|
|
653
|
+
days_diff,
|
|
654
|
+
self.limit_prediccio,
|
|
655
|
+
"ALTA" if self.limit_prediccio >= 550 else "BAJA",
|
|
656
|
+
current_time_local.strftime("%H:%M"),
|
|
657
|
+
min_update_time.strftime("%H:%M"),
|
|
658
|
+
should_update,
|
|
529
659
|
)
|
|
530
660
|
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
_LOGGER.info(
|
|
536
|
-
"Los datos en %s son antiguos. Se procederá a llamar a la API.",
|
|
537
|
-
file_path,
|
|
661
|
+
if should_update:
|
|
662
|
+
_LOGGER.debug(
|
|
663
|
+
"Datos obsoletos o actualizables → llamando API (%s, cuota=%d)",
|
|
664
|
+
file_path.name, self.limit_prediccio
|
|
538
665
|
)
|
|
539
|
-
return None
|
|
540
|
-
|
|
666
|
+
return None # → forzar actualización
|
|
667
|
+
|
|
668
|
+
_LOGGER.debug("Datos válidos en %s → usando caché", file_path.name)
|
|
541
669
|
return data
|
|
670
|
+
|
|
542
671
|
except Exception as e:
|
|
543
|
-
_LOGGER.warning("Error validando
|
|
672
|
+
_LOGGER.warning("Error validando %s: %s", file_path, e)
|
|
544
673
|
return None
|
|
545
674
|
|
|
675
|
+
# --------------------------------------------------------------------- #
|
|
676
|
+
# OBTENCIÓN Y GUARDADO DE DATOS DESDE LA API
|
|
677
|
+
# --------------------------------------------------------------------- #
|
|
546
678
|
async def _fetch_and_save_data(self, api_method, file_path: Path) -> dict:
|
|
547
679
|
"""Obtiene datos de la API y los guarda en un archivo JSON."""
|
|
548
680
|
try:
|
|
549
681
|
data = await asyncio.wait_for(api_method(self.town_id), timeout=30)
|
|
550
682
|
|
|
551
|
-
# Procesar
|
|
683
|
+
# Procesar precipitación negativa antes de guardar los datos
|
|
552
684
|
for day in data.get("dies", []):
|
|
553
685
|
for var, details in day.get("variables", {}).items():
|
|
554
686
|
if (
|
|
@@ -560,26 +692,30 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
|
|
|
560
692
|
|
|
561
693
|
await save_json_to_file(data, file_path)
|
|
562
694
|
|
|
563
|
-
# Actualizar cuotas dependiendo del tipo de predicción
|
|
695
|
+
# Actualizar cuotas (dependiendo del tipo de predicción horaria/diaria)
|
|
564
696
|
if api_method.__name__ in ("get_prediccion_horaria", "get_prediccion_diaria"):
|
|
565
697
|
await _update_quotes(self.hass, "Prediccio")
|
|
566
698
|
|
|
567
699
|
return data
|
|
700
|
+
|
|
568
701
|
except Exception as err:
|
|
569
702
|
_LOGGER.error(f"Error al obtener datos de la API para {file_path}: {err}")
|
|
570
703
|
raise
|
|
571
704
|
|
|
572
|
-
|
|
705
|
+
# --------------------------------------------------------------------- #
|
|
706
|
+
# ACTUALIZACIÓN PRINCIPAL
|
|
707
|
+
# --------------------------------------------------------------------- #
|
|
708
|
+
async def _async_update_data(self) -> Dict[str, Any]:
|
|
573
709
|
"""Actualiza los datos de predicción horaria y diaria."""
|
|
574
710
|
try:
|
|
575
|
-
# Validar o actualizar datos horarios
|
|
711
|
+
# --- Validar o actualizar datos horarios ---
|
|
576
712
|
hourly_data = await self.validate_forecast_data(self.hourly_file)
|
|
577
713
|
if not hourly_data:
|
|
578
714
|
hourly_data = await self._fetch_and_save_data(
|
|
579
715
|
self.meteocat_forecast.get_prediccion_horaria, self.hourly_file
|
|
580
716
|
)
|
|
581
717
|
|
|
582
|
-
# Validar o actualizar datos diarios
|
|
718
|
+
# --- Validar o actualizar datos diarios ---
|
|
583
719
|
daily_data = await self.validate_forecast_data(self.daily_file)
|
|
584
720
|
if not daily_data:
|
|
585
721
|
daily_data = await self._fetch_and_save_data(
|
|
@@ -588,6 +724,9 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
|
|
|
588
724
|
|
|
589
725
|
return {"hourly": hourly_data, "daily": daily_data}
|
|
590
726
|
|
|
727
|
+
# -----------------------------------------------------------------
|
|
728
|
+
# Manejo de errores de API
|
|
729
|
+
# -----------------------------------------------------------------
|
|
591
730
|
except asyncio.TimeoutError as err:
|
|
592
731
|
_LOGGER.warning("Tiempo de espera agotado al obtener datos de predicción.")
|
|
593
732
|
raise ConfigEntryNotReady from err
|
|
@@ -615,7 +754,9 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
|
|
|
615
754
|
except Exception as err:
|
|
616
755
|
_LOGGER.exception("Error inesperado al obtener datos de predicción: %s", err)
|
|
617
756
|
|
|
618
|
-
#
|
|
757
|
+
# -----------------------------------------------------------------
|
|
758
|
+
# Fallback: usar caché local si todo falla
|
|
759
|
+
# -----------------------------------------------------------------
|
|
619
760
|
hourly_cache = await load_json_from_file(self.hourly_file) or {}
|
|
620
761
|
daily_cache = await load_json_from_file(self.daily_file) or {}
|
|
621
762
|
|
|
@@ -646,6 +787,23 @@ class HourlyForecastCoordinator(DataUpdateCoordinator):
|
|
|
646
787
|
self.station_name = entry_data["station_name"]
|
|
647
788
|
self.station_id = entry_data["station_id"]
|
|
648
789
|
|
|
790
|
+
# === NUEVO: ubicación solar usando solarmoonpy ===
|
|
791
|
+
latitude = entry_data.get("latitude", hass.config.latitude)
|
|
792
|
+
longitude = entry_data.get("longitude", hass.config.longitude)
|
|
793
|
+
altitude = entry_data.get("altitude", hass.config.elevation or 0.0)
|
|
794
|
+
timezone_str = hass.config.time_zone or "Europe/Madrid"
|
|
795
|
+
|
|
796
|
+
self.location = Location(
|
|
797
|
+
LocationInfo(
|
|
798
|
+
name=self.town_name,
|
|
799
|
+
region="Spain",
|
|
800
|
+
timezone=timezone_str,
|
|
801
|
+
latitude=latitude,
|
|
802
|
+
longitude=longitude,
|
|
803
|
+
elevation=altitude,
|
|
804
|
+
)
|
|
805
|
+
)
|
|
806
|
+
|
|
649
807
|
# Ruta persistente en /config/meteocat_files/files
|
|
650
808
|
files_folder = get_storage_dir(hass, "files")
|
|
651
809
|
self.file_path = files_folder / f"forecast_{self.town_id.lower()}_hourly_data.json"
|
|
@@ -719,7 +877,7 @@ class HourlyForecastCoordinator(DataUpdateCoordinator):
|
|
|
719
877
|
condition_data = get_condition_from_statcel(
|
|
720
878
|
codi_estatcel=condition_code,
|
|
721
879
|
current_time=forecast_time_local,
|
|
722
|
-
|
|
880
|
+
location=self.location,
|
|
723
881
|
is_hourly=True
|
|
724
882
|
)
|
|
725
883
|
condition = condition_data["condition"]
|
|
@@ -776,7 +934,7 @@ class HourlyForecastCoordinator(DataUpdateCoordinator):
|
|
|
776
934
|
_LOGGER.warning("Error procesando '%s' para %s: %s", variable_name, valor, e)
|
|
777
935
|
continue
|
|
778
936
|
|
|
779
|
-
_LOGGER.
|
|
937
|
+
_LOGGER.warning("No se encontró un valor válido para '%s' en %s.", variable_name, target_time)
|
|
780
938
|
return None
|
|
781
939
|
|
|
782
940
|
class DailyForecastCoordinator(DataUpdateCoordinator):
|
|
@@ -922,9 +1080,27 @@ class MeteocatConditionCoordinator(DataUpdateCoordinator):
|
|
|
922
1080
|
hass (HomeAssistant): Instance of Home Assistant.
|
|
923
1081
|
entry_data (dict): Configuration data from core.config_entries.
|
|
924
1082
|
"""
|
|
1083
|
+
self.town_name = entry_data["town_name"]
|
|
925
1084
|
self.town_id = entry_data["town_id"] # Municipality ID
|
|
926
1085
|
self.hass = hass
|
|
927
1086
|
|
|
1087
|
+
# === NUEVO: ubicación solar usando solarmoonpy ===
|
|
1088
|
+
latitude = entry_data.get("latitude", hass.config.latitude)
|
|
1089
|
+
longitude = entry_data.get("longitude", hass.config.longitude)
|
|
1090
|
+
altitude = entry_data.get("altitude", hass.config.elevation or 0.0)
|
|
1091
|
+
timezone_str = hass.config.time_zone or "Europe/Madrid"
|
|
1092
|
+
|
|
1093
|
+
self.location = Location(
|
|
1094
|
+
LocationInfo(
|
|
1095
|
+
name=self.town_name,
|
|
1096
|
+
region="Spain",
|
|
1097
|
+
timezone=timezone_str,
|
|
1098
|
+
latitude=latitude,
|
|
1099
|
+
longitude=longitude,
|
|
1100
|
+
elevation=altitude,
|
|
1101
|
+
)
|
|
1102
|
+
)
|
|
1103
|
+
|
|
928
1104
|
super().__init__(
|
|
929
1105
|
hass,
|
|
930
1106
|
_LOGGER,
|
|
@@ -966,7 +1142,7 @@ class MeteocatConditionCoordinator(DataUpdateCoordinator):
|
|
|
966
1142
|
condition = get_condition_from_statcel(
|
|
967
1143
|
codi_estatcel,
|
|
968
1144
|
current_datetime,
|
|
969
|
-
self.
|
|
1145
|
+
location=self.location,
|
|
970
1146
|
is_hourly=True,
|
|
971
1147
|
)
|
|
972
1148
|
condition.update({
|
|
@@ -1411,7 +1587,7 @@ class MeteocatAlertsRegionCoordinator(DataUpdateCoordinator):
|
|
|
1411
1587
|
async def _async_update_data(self) -> Dict[str, Any]:
|
|
1412
1588
|
"""Carga y procesa los datos de alertas desde el archivo JSON."""
|
|
1413
1589
|
data = await load_json_from_file(self._file_path)
|
|
1414
|
-
_LOGGER.
|
|
1590
|
+
_LOGGER.debug("Datos cargados desde %s: %s", self._file_path, data) # Log de la carga de datos
|
|
1415
1591
|
|
|
1416
1592
|
if not data:
|
|
1417
1593
|
_LOGGER.error("No se pudo cargar el archivo JSON de alertas en %s.", self._file_path)
|
|
@@ -1422,7 +1598,7 @@ class MeteocatAlertsRegionCoordinator(DataUpdateCoordinator):
|
|
|
1422
1598
|
def _process_alerts_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
|
1423
1599
|
"""Procesa los datos de alertas y devuelve un diccionario filtrado por región."""
|
|
1424
1600
|
if not data.get("dades"):
|
|
1425
|
-
_LOGGER.
|
|
1601
|
+
_LOGGER.debug("No hay alertas activas para la región %s.", self.region_id)
|
|
1426
1602
|
return {
|
|
1427
1603
|
"estado": "Tancat",
|
|
1428
1604
|
"actualizado": data.get("actualitzat", {}).get("dataUpdate", ""),
|
|
@@ -1657,7 +1833,7 @@ class MeteocatQuotesCoordinator(DataUpdateCoordinator):
|
|
|
1657
1833
|
_LOGGER.error("No se pudo obtener datos actualizados ni cargar datos en caché.")
|
|
1658
1834
|
return None
|
|
1659
1835
|
|
|
1660
|
-
class MeteocatQuotesFileCoordinator(
|
|
1836
|
+
class MeteocatQuotesFileCoordinator(BaseFileCoordinator):
|
|
1661
1837
|
"""Coordinator para manejar la actualización de las cuotas desde quotes.json."""
|
|
1662
1838
|
|
|
1663
1839
|
def __init__(
|
|
@@ -1677,9 +1853,10 @@ class MeteocatQuotesFileCoordinator(DataUpdateCoordinator):
|
|
|
1677
1853
|
|
|
1678
1854
|
super().__init__(
|
|
1679
1855
|
hass,
|
|
1680
|
-
|
|
1681
|
-
name="Meteocat Quotes File Coordinator",
|
|
1856
|
+
name=f"{DOMAIN} Quotes File Coordinator",
|
|
1682
1857
|
update_interval=DEFAULT_QUOTES_FILE_UPDATE_INTERVAL,
|
|
1858
|
+
min_delay=1.0, # Rango predeterminado
|
|
1859
|
+
max_delay=2.0, # Rango predeterminado
|
|
1683
1860
|
)
|
|
1684
1861
|
# Ruta persistente en /config/meteocat_files/files
|
|
1685
1862
|
files_folder = get_storage_dir(hass, "files")
|
|
@@ -1687,6 +1864,9 @@ class MeteocatQuotesFileCoordinator(DataUpdateCoordinator):
|
|
|
1687
1864
|
|
|
1688
1865
|
async def _async_update_data(self) -> Dict[str, Any]:
|
|
1689
1866
|
"""Carga los datos de quotes.json y devuelve el estado de las cuotas."""
|
|
1867
|
+
# 🔸 Añadimos un pequeño desfase aleatorio (1 a 2 segundos) basados en el BaseFileCoordinator
|
|
1868
|
+
await self._apply_random_delay()
|
|
1869
|
+
|
|
1690
1870
|
existing_data = await load_json_from_file(self.quotes_file)
|
|
1691
1871
|
|
|
1692
1872
|
if not existing_data:
|
|
@@ -1824,14 +2004,10 @@ class MeteocatLightningCoordinator(DataUpdateCoordinator):
|
|
|
1824
2004
|
_LOGGER.error("No se pudo obtener datos actualizados ni cargar datos en caché.")
|
|
1825
2005
|
return None
|
|
1826
2006
|
|
|
1827
|
-
class MeteocatLightningFileCoordinator(
|
|
2007
|
+
class MeteocatLightningFileCoordinator(BaseFileCoordinator):
|
|
1828
2008
|
"""Coordinator para manejar la actualización de los datos de rayos desde lightning_{region_id}.json."""
|
|
1829
2009
|
|
|
1830
|
-
def __init__(
|
|
1831
|
-
self,
|
|
1832
|
-
hass: HomeAssistant,
|
|
1833
|
-
entry_data: dict,
|
|
1834
|
-
):
|
|
2010
|
+
def __init__(self, hass: HomeAssistant, entry_data: dict):
|
|
1835
2011
|
"""
|
|
1836
2012
|
Inicializa el coordinador de rayos desde archivo.
|
|
1837
2013
|
|
|
@@ -1846,64 +2022,89 @@ class MeteocatLightningFileCoordinator(DataUpdateCoordinator):
|
|
|
1846
2022
|
files_folder = get_storage_dir(hass, "files")
|
|
1847
2023
|
self.lightning_file = files_folder / f"lightning_{self.region_id}.json"
|
|
1848
2024
|
|
|
2025
|
+
# ✅ Marca interna para recordar si ya se hizo reset con una fecha concreta
|
|
2026
|
+
self._last_reset_date: Optional[date] = None
|
|
2027
|
+
|
|
1849
2028
|
super().__init__(
|
|
1850
2029
|
hass,
|
|
1851
|
-
|
|
1852
|
-
name="Meteocat Lightning File Coordinator",
|
|
2030
|
+
name=f"{DOMAIN} Lightning File Coordinator",
|
|
1853
2031
|
update_interval=DEFAULT_LIGHTNING_FILE_UPDATE_INTERVAL,
|
|
2032
|
+
min_delay=1.0, # Rango predeterminado
|
|
2033
|
+
max_delay=2.0, # Rango predeterminado
|
|
1854
2034
|
)
|
|
1855
2035
|
|
|
1856
2036
|
async def _async_update_data(self) -> Dict[str, Any]:
|
|
1857
2037
|
"""Carga los datos de rayos desde el archivo JSON y procesa la información."""
|
|
2038
|
+
# 🔸 Añadimos un pequeño desfase aleatorio (1 a 2 segundos) basados en el BaseFileCoordinator
|
|
2039
|
+
await self._apply_random_delay()
|
|
2040
|
+
|
|
1858
2041
|
existing_data = await load_json_from_file(self.lightning_file)
|
|
1859
2042
|
|
|
1860
2043
|
if not existing_data:
|
|
1861
2044
|
_LOGGER.warning("No se encontraron datos en %s.", self.lightning_file)
|
|
1862
|
-
return
|
|
1863
|
-
|
|
1864
|
-
|
|
1865
|
-
|
|
1866
|
-
|
|
2045
|
+
return self._empty_state()
|
|
2046
|
+
|
|
2047
|
+
# Obtener fecha de actualización del JSON
|
|
2048
|
+
update_date_str = existing_data.get("actualitzat", {}).get("dataUpdate", "")
|
|
2049
|
+
if not update_date_str:
|
|
2050
|
+
_LOGGER.warning("El archivo %s no contiene campo 'dataUpdate'.", self.lightning_file)
|
|
2051
|
+
return self._empty_state()
|
|
2052
|
+
|
|
2053
|
+
try:
|
|
2054
|
+
update_date = datetime.fromisoformat(update_date_str).astimezone(TIMEZONE)
|
|
2055
|
+
except ValueError:
|
|
2056
|
+
_LOGGER.warning("Formato de fecha inválido en %s: %s", self.lightning_file, update_date_str)
|
|
2057
|
+
return self._empty_state()
|
|
1867
2058
|
|
|
1868
|
-
# Convertir la cadena de fecha a un objeto datetime y ajustar a la zona horaria local
|
|
1869
|
-
update_date = datetime.fromisoformat(existing_data.get("actualitzat", {}).get("dataUpdate", ""))
|
|
1870
|
-
update_date = update_date.astimezone(TIMEZONE)
|
|
1871
2059
|
now = datetime.now(TIMEZONE)
|
|
1872
2060
|
|
|
1873
|
-
|
|
1874
|
-
|
|
1875
|
-
|
|
1876
|
-
|
|
1877
|
-
|
|
1878
|
-
|
|
1879
|
-
|
|
2061
|
+
# 📆 Si los datos son de otro día:
|
|
2062
|
+
if update_date.date() != now.date():
|
|
2063
|
+
# Si ya hicimos reset para esta fecha, no volver a procesar el JSON
|
|
2064
|
+
if self._last_reset_date == update_date.date():
|
|
2065
|
+
_LOGGER.debug(
|
|
2066
|
+
"Archivo de rayos aún sin actualizar (última: %s, hoy: %s). Manteniendo datos a cero.",
|
|
2067
|
+
update_date.date(),
|
|
2068
|
+
now.date(),
|
|
2069
|
+
)
|
|
2070
|
+
return self._empty_state()
|
|
2071
|
+
|
|
2072
|
+
# Primer reset detectado para esta fecha
|
|
2073
|
+
_LOGGER.debug("Los datos de rayos son de un día diferente. Reiniciando valores a cero.")
|
|
2074
|
+
self._last_reset_date = update_date.date()
|
|
2075
|
+
return self._empty_state()
|
|
2076
|
+
|
|
2077
|
+
# 📅 Si los datos son actuales:
|
|
2078
|
+
self._last_reset_date = None # borrar marca de reset
|
|
2079
|
+
region_data = self._process_region_data(existing_data.get("dades", []))
|
|
2080
|
+
town_data = self._process_town_data(existing_data.get("dades", []))
|
|
1880
2081
|
|
|
1881
2082
|
return {
|
|
1882
2083
|
"actualizado": update_date,
|
|
1883
2084
|
"region": region_data,
|
|
1884
|
-
"town": town_data
|
|
2085
|
+
"town": town_data,
|
|
1885
2086
|
}
|
|
1886
2087
|
|
|
1887
2088
|
def _process_region_data(self, data_list):
|
|
1888
2089
|
"""Suma los tipos de descargas para toda la región."""
|
|
1889
2090
|
region_counts = {
|
|
1890
|
-
"cc": 0,
|
|
1891
|
-
"cg-": 0,
|
|
2091
|
+
"cc": 0,
|
|
2092
|
+
"cg-": 0,
|
|
1892
2093
|
"cg+": 0
|
|
1893
2094
|
}
|
|
1894
2095
|
for town in data_list:
|
|
1895
2096
|
for discharge in town.get("descarregues", []):
|
|
1896
2097
|
if discharge["tipus"] in region_counts:
|
|
1897
2098
|
region_counts[discharge["tipus"]] += discharge["recompte"]
|
|
1898
|
-
|
|
2099
|
+
|
|
1899
2100
|
region_counts["total"] = sum(region_counts.values())
|
|
1900
2101
|
return region_counts
|
|
1901
2102
|
|
|
1902
2103
|
def _process_town_data(self, data_list):
|
|
1903
2104
|
"""Encuentra y suma los tipos de descargas para un municipio específico."""
|
|
1904
2105
|
town_counts = {
|
|
1905
|
-
"cc": 0,
|
|
1906
|
-
"cg-": 0,
|
|
2106
|
+
"cc": 0,
|
|
2107
|
+
"cg-": 0,
|
|
1907
2108
|
"cg+": 0
|
|
1908
2109
|
}
|
|
1909
2110
|
for town in data_list:
|
|
@@ -1912,7 +2113,7 @@ class MeteocatLightningFileCoordinator(DataUpdateCoordinator):
|
|
|
1912
2113
|
if discharge["tipus"] in town_counts:
|
|
1913
2114
|
town_counts[discharge["tipus"]] += discharge["recompte"]
|
|
1914
2115
|
break # Solo necesitamos datos de un municipio
|
|
1915
|
-
|
|
2116
|
+
|
|
1916
2117
|
town_counts["total"] = sum(town_counts.values())
|
|
1917
2118
|
return town_counts
|
|
1918
2119
|
|
|
@@ -1922,5 +2123,825 @@ class MeteocatLightningFileCoordinator(DataUpdateCoordinator):
|
|
|
1922
2123
|
"cc": 0,
|
|
1923
2124
|
"cg-": 0,
|
|
1924
2125
|
"cg+": 0,
|
|
1925
|
-
"total": 0
|
|
2126
|
+
"total": 0,
|
|
2127
|
+
}
|
|
2128
|
+
|
|
2129
|
+
def _empty_state(self) -> Dict[str, Any]:
|
|
2130
|
+
"""Devuelve un estado vacío (valores a cero) para los sensores."""
|
|
2131
|
+
now_iso = datetime.now(TIMEZONE).isoformat()
|
|
2132
|
+
empty = self._reset_data()
|
|
2133
|
+
return {
|
|
2134
|
+
"actualizado": now_iso,
|
|
2135
|
+
"region": empty,
|
|
2136
|
+
"town": empty,
|
|
2137
|
+
}
|
|
2138
|
+
|
|
2139
|
+
class MeteocatSunCoordinator(DataUpdateCoordinator):
|
|
2140
|
+
"""Coordinator para manejar la actualización de los datos de sol calculados con sun.py."""
|
|
2141
|
+
|
|
2142
|
+
def __init__(self, hass: HomeAssistant, entry_data: dict):
|
|
2143
|
+
"""Inicializa el coordinador de sol de Meteocat."""
|
|
2144
|
+
self.latitude = entry_data.get("latitude")
|
|
2145
|
+
self.longitude = entry_data.get("longitude")
|
|
2146
|
+
self.elevation = entry_data.get("altitude", 0.0)
|
|
2147
|
+
self.timezone_str = hass.config.time_zone or "Europe/Madrid"
|
|
2148
|
+
self.town_id = entry_data.get("town_id")
|
|
2149
|
+
|
|
2150
|
+
# Crear ubicación para cálculos solares
|
|
2151
|
+
self.location = Location(LocationInfo(
|
|
2152
|
+
name=entry_data.get("town_name", "Municipio"),
|
|
2153
|
+
region="Spain",
|
|
2154
|
+
timezone=self.timezone_str,
|
|
2155
|
+
latitude=self.latitude,
|
|
2156
|
+
longitude=self.longitude,
|
|
2157
|
+
elevation=self.elevation,
|
|
2158
|
+
))
|
|
2159
|
+
|
|
2160
|
+
files_folder = get_storage_dir(hass, "files")
|
|
2161
|
+
self.sun_file = files_folder / f"sun_{self.town_id.lower()}_data.json"
|
|
2162
|
+
|
|
2163
|
+
super().__init__(
|
|
2164
|
+
hass,
|
|
2165
|
+
_LOGGER,
|
|
2166
|
+
name=f"{DOMAIN} Sun Coordinator",
|
|
2167
|
+
update_interval=DEFAULT_SUN_UPDATE_INTERVAL,
|
|
2168
|
+
)
|
|
2169
|
+
|
|
2170
|
+
async def _async_update_data(self) -> dict:
|
|
2171
|
+
"""Comprueba si es necesario actualizar los datos solares (evitando escrituras innecesarias)."""
|
|
2172
|
+
_LOGGER.debug("☀️ Comprobando si es necesario actualizar los datos solares...")
|
|
2173
|
+
now = datetime.now(tz=ZoneInfo(self.timezone_str))
|
|
2174
|
+
today = now.date()
|
|
2175
|
+
tomorrow = today + timedelta(days=1)
|
|
2176
|
+
|
|
2177
|
+
# === 1️⃣ Calcular eventos solares esperados ===
|
|
2178
|
+
events_today = self.location.sun_events(date=today, local=True)
|
|
2179
|
+
events_tomorrow = self.location.sun_events(date=tomorrow, local=True)
|
|
2180
|
+
|
|
2181
|
+
def get_expected_sun_data():
|
|
2182
|
+
"""Selecciona si usar los eventos de hoy o mañana según la hora actual."""
|
|
2183
|
+
expected = {}
|
|
2184
|
+
events = [
|
|
2185
|
+
"dawn_astronomical", "dawn_nautical", "dawn_civil",
|
|
2186
|
+
"sunrise", "noon", "sunset",
|
|
2187
|
+
"dusk_civil", "dusk_nautical", "dusk_astronomical",
|
|
2188
|
+
"midnight"
|
|
2189
|
+
]
|
|
2190
|
+
for event in events:
|
|
2191
|
+
event_time = events_today.get(event)
|
|
2192
|
+
if event_time and now >= event_time:
|
|
2193
|
+
expected[event] = events_tomorrow.get(event)
|
|
2194
|
+
_LOGGER.debug("☀️ %s ya pasó (%s), usando valor de mañana: %s",
|
|
2195
|
+
event, event_time, expected[event])
|
|
2196
|
+
else:
|
|
2197
|
+
expected[event] = event_time
|
|
2198
|
+
expected["daylight_duration"] = (
|
|
2199
|
+
events_tomorrow["daylight_duration"]
|
|
2200
|
+
if expected["sunset"] == events_tomorrow["sunset"]
|
|
2201
|
+
else events_today["daylight_duration"]
|
|
2202
|
+
)
|
|
2203
|
+
return expected
|
|
2204
|
+
|
|
2205
|
+
expected = get_expected_sun_data()
|
|
2206
|
+
|
|
2207
|
+
# === 2️⃣ Cargar datos existentes del archivo ===
|
|
2208
|
+
existing_data = await load_json_from_file(self.sun_file) or {}
|
|
2209
|
+
if not existing_data or "dades" not in existing_data or not existing_data["dades"]:
|
|
2210
|
+
_LOGGER.debug("☀️ No hay datos solares previos. Generando nuevos datos.")
|
|
2211
|
+
return await self._calculate_and_save_new_data(**expected)
|
|
2212
|
+
|
|
2213
|
+
dades = existing_data["dades"][0]
|
|
2214
|
+
|
|
2215
|
+
try:
|
|
2216
|
+
saved = {k: (datetime.fromisoformat(v) if k != "daylight_duration" else v)
|
|
2217
|
+
for k, v in dades.items() if k in expected}
|
|
2218
|
+
except Exception as e:
|
|
2219
|
+
_LOGGER.warning("☀️ Error al leer el archivo solar: %s", e)
|
|
2220
|
+
return await self._calculate_and_save_new_data(**expected)
|
|
2221
|
+
|
|
2222
|
+
# === 3️⃣ Detectar cambios en eventos solares ===
|
|
2223
|
+
changed_events = {
|
|
2224
|
+
key: expected[key] for key in expected
|
|
2225
|
+
if saved.get(key) != expected[key]
|
|
2226
|
+
}
|
|
2227
|
+
|
|
2228
|
+
# === 4️⃣ Calcular posición solar actual y futura (una sola vez) ===
|
|
2229
|
+
current_pos = self.location.sun_position(dt=now, local=True)
|
|
2230
|
+
future_time = now + timedelta(minutes=10)
|
|
2231
|
+
future_pos = self.location.sun_position(dt=future_time, local=True)
|
|
2232
|
+
|
|
2233
|
+
# === 5️⃣ Función auxiliar: umbral dinámico de elevación ===
|
|
2234
|
+
def get_dynamic_elevation_threshold() -> float:
|
|
2235
|
+
sunrise = saved.get("sunrise")
|
|
2236
|
+
sunset = saved.get("sunset")
|
|
2237
|
+
noon = saved.get("noon")
|
|
2238
|
+
if sunrise and sunset and noon:
|
|
2239
|
+
sunrise_window = (sunrise - timedelta(hours=1), sunrise + timedelta(hours=1))
|
|
2240
|
+
sunset_window = (sunset - timedelta(hours=1), sunset + timedelta(hours=1))
|
|
2241
|
+
noon_window = (noon - timedelta(hours=2), noon + timedelta(hours=2))
|
|
2242
|
+
if sunrise_window[0] <= now <= sunrise_window[1] or sunset_window[0] <= now <= sunset_window[1]:
|
|
2243
|
+
return 0.3 # Mayor sensibilidad cerca del horizonte
|
|
2244
|
+
elif noon_window[0] <= now <= noon_window[1]:
|
|
2245
|
+
return 1.0 # Menor sensibilidad cerca del mediodía
|
|
2246
|
+
return 0.5 # Valor base para el resto del día
|
|
2247
|
+
|
|
2248
|
+
# === 6️⃣ Función auxiliar: validez dinámica con límites ===
|
|
2249
|
+
def get_dynamic_validity_interval(current_elev: float, future_elev: float) -> timedelta:
|
|
2250
|
+
elevation_change = abs(future_elev - current_elev)
|
|
2251
|
+
rate_of_change = elevation_change / 10 # °/min
|
|
2252
|
+
_LOGGER.debug("☀️ Tasa de cambio de elevación: %.4f°/min", rate_of_change)
|
|
2253
|
+
|
|
2254
|
+
if rate_of_change > 0.05: # Amanecer/atardecer: cambio rápido
|
|
2255
|
+
validity = timedelta(minutes=30)
|
|
2256
|
+
elif rate_of_change > 0.02: # Cambio moderado
|
|
2257
|
+
validity = timedelta(minutes=60)
|
|
2258
|
+
else: # Noche o mediodía: cambio lento
|
|
2259
|
+
validity = timedelta(minutes=120)
|
|
2260
|
+
|
|
2261
|
+
# Limitar entre 15 y 180 minutos
|
|
2262
|
+
return max(timedelta(minutes=15), min(validity, timedelta(minutes=180)))
|
|
2263
|
+
|
|
2264
|
+
SUN_POSITION_VALIDITY = get_dynamic_validity_interval(
|
|
2265
|
+
current_pos["elevation"], future_pos["elevation"]
|
|
2266
|
+
)
|
|
2267
|
+
|
|
2268
|
+
# === 7️⃣ Evaluar necesidad de actualización ===
|
|
2269
|
+
position_needs_update = False
|
|
2270
|
+
last_pos_update_str = dades.get("sun_position_updated")
|
|
2271
|
+
|
|
2272
|
+
if last_pos_update_str:
|
|
2273
|
+
try:
|
|
2274
|
+
last_pos_update = datetime.fromisoformat(last_pos_update_str)
|
|
2275
|
+
if last_pos_update.tzinfo is None:
|
|
2276
|
+
last_pos_update = last_pos_update.replace(tzinfo=ZoneInfo(self.timezone_str))
|
|
2277
|
+
|
|
2278
|
+
time_expired = (now - last_pos_update) > SUN_POSITION_VALIDITY
|
|
2279
|
+
elevation_threshold = get_dynamic_elevation_threshold()
|
|
2280
|
+
|
|
2281
|
+
last_elev = dades.get("sun_elevation")
|
|
2282
|
+
if last_elev is not None:
|
|
2283
|
+
elev_changed = abs(current_pos["elevation"] - float(last_elev)) > elevation_threshold
|
|
2284
|
+
else:
|
|
2285
|
+
elev_changed = True
|
|
2286
|
+
|
|
2287
|
+
# ✅ Ambas condiciones deben cumplirse
|
|
2288
|
+
position_needs_update = time_expired and elev_changed or bool(changed_events)
|
|
2289
|
+
|
|
2290
|
+
_LOGGER.debug(
|
|
2291
|
+
"☀️ Verificación solar -> expirado=%s (validez=%s), elevación_cambió=%s (umbral=%.2f°), eventos_cambiados=%s, actualizar=%s",
|
|
2292
|
+
time_expired, SUN_POSITION_VALIDITY, elev_changed, elevation_threshold, bool(changed_events), position_needs_update
|
|
2293
|
+
)
|
|
2294
|
+
except Exception as e:
|
|
2295
|
+
_LOGGER.warning("☀️ Error al verificar posición solar previa: %s", e)
|
|
2296
|
+
position_needs_update = True
|
|
2297
|
+
else:
|
|
2298
|
+
position_needs_update = True
|
|
2299
|
+
|
|
2300
|
+
# === 8️⃣ Si nada cambió, no se actualiza ===
|
|
2301
|
+
if not changed_events and not position_needs_update:
|
|
2302
|
+
_LOGGER.debug("☀️ Datos solares actuales coinciden con lo esperado. No se actualiza.")
|
|
2303
|
+
return existing_data
|
|
2304
|
+
|
|
2305
|
+
# === 9️⃣ Actualizar si es necesario ===
|
|
2306
|
+
sun_pos = current_pos if position_needs_update else None
|
|
2307
|
+
if sun_pos:
|
|
2308
|
+
_LOGGER.debug("Posición solar actualizada: elev=%.2f°, azim=%.2f°, rising=%s",
|
|
2309
|
+
sun_pos["elevation"], sun_pos["azimuth"], sun_pos["rising"])
|
|
2310
|
+
|
|
2311
|
+
updated_data = saved.copy()
|
|
2312
|
+
updated_data.update(changed_events)
|
|
2313
|
+
|
|
2314
|
+
# 🟡 Si hay eventos solares nuevos (por ejemplo, cambio de sunset → mañana),
|
|
2315
|
+
# forzar cálculo inmediato de la posición solar para evitar huecos.
|
|
2316
|
+
if changed_events and sun_pos is None:
|
|
2317
|
+
sun_pos = self.location.sun_position(dt=now, local=True)
|
|
2318
|
+
_LOGGER.debug("☀️ Posición solar recalculada tras cambio de eventos: elev=%.2f°, azim=%.2f°, rising=%s",
|
|
2319
|
+
sun_pos["elevation"], sun_pos["azimuth"], sun_pos["rising"])
|
|
2320
|
+
|
|
2321
|
+
_LOGGER.debug("☀️ Datos solares han cambiado. Actualizando: %s", changed_events)
|
|
2322
|
+
return await self._calculate_and_save_new_data(
|
|
2323
|
+
**updated_data,
|
|
2324
|
+
sun_pos=sun_pos,
|
|
2325
|
+
now=now
|
|
2326
|
+
)
|
|
2327
|
+
|
|
2328
|
+
async def _calculate_and_save_new_data(
|
|
2329
|
+
self,
|
|
2330
|
+
dawn_civil: Optional[datetime] = None,
|
|
2331
|
+
dawn_nautical: Optional[datetime] = None,
|
|
2332
|
+
dawn_astronomical: Optional[datetime] = None,
|
|
2333
|
+
sunrise: Optional[datetime] = None,
|
|
2334
|
+
noon: Optional[datetime] = None,
|
|
2335
|
+
sunset: Optional[datetime] = None,
|
|
2336
|
+
dusk_civil: Optional[datetime] = None,
|
|
2337
|
+
dusk_nautical: Optional[datetime] = None,
|
|
2338
|
+
dusk_astronomical: Optional[datetime] = None,
|
|
2339
|
+
midnight: Optional[datetime] = None,
|
|
2340
|
+
daylight_duration: Optional[float] = None,
|
|
2341
|
+
sun_pos: Optional[dict] = None,
|
|
2342
|
+
now: Optional[datetime] = None,
|
|
2343
|
+
) -> dict:
|
|
2344
|
+
"""Guarda los datos solares pasados, usando valores existentes si no se proporcionan."""
|
|
2345
|
+
try:
|
|
2346
|
+
now = datetime.now(tz=ZoneInfo(self.timezone_str))
|
|
2347
|
+
today = now.date()
|
|
2348
|
+
|
|
2349
|
+
# Cargar datos existentes para preservar valores no cambiados
|
|
2350
|
+
existing_data = await load_json_from_file(self.sun_file) or {}
|
|
2351
|
+
existing_dades = existing_data.get("dades", [{}])[0] if existing_data else {}
|
|
2352
|
+
|
|
2353
|
+
# Convertir valores existentes a tipos adecuados
|
|
2354
|
+
try:
|
|
2355
|
+
saved = {
|
|
2356
|
+
"dawn_civil": datetime.fromisoformat(existing_dades["dawn_civil"]) if existing_dades.get("dawn_civil") else None,
|
|
2357
|
+
"dawn_nautical": datetime.fromisoformat(existing_dades["dawn_nautical"]) if existing_dades.get("dawn_nautical") else None,
|
|
2358
|
+
"dawn_astronomical": datetime.fromisoformat(existing_dades["dawn_astronomical"]) if existing_dades.get("dawn_astronomical") else None,
|
|
2359
|
+
"sunrise": datetime.fromisoformat(existing_dades["sunrise"]) if existing_dades.get("sunrise") else None,
|
|
2360
|
+
"noon": datetime.fromisoformat(existing_dades["noon"]) if existing_dades.get("noon") else None,
|
|
2361
|
+
"sunset": datetime.fromisoformat(existing_dades["sunset"]) if existing_dades.get("sunset") else None,
|
|
2362
|
+
"dusk_civil": datetime.fromisoformat(existing_dades["dusk_civil"]) if existing_dades.get("dusk_civil") else None,
|
|
2363
|
+
"dusk_nautical": datetime.fromisoformat(existing_dades["dusk_nautical"]) if existing_dades.get("dusk_nautical") else None,
|
|
2364
|
+
"dusk_astronomical": datetime.fromisoformat(existing_dades["dusk_astronomical"]) if existing_dades.get("dusk_astronomical") else None,
|
|
2365
|
+
"midnight": datetime.fromisoformat(existing_dades["midnight"]) if existing_dades.get("midnight") else None,
|
|
2366
|
+
"daylight_duration": existing_dades.get("daylight_duration"),
|
|
2367
|
+
}
|
|
2368
|
+
except Exception as e:
|
|
2369
|
+
_LOGGER.warning("☀️ Error al leer datos existentes, recalculando todo: %s", e)
|
|
2370
|
+
saved = {}
|
|
2371
|
+
|
|
2372
|
+
# Si no se proporcionan valores, usar los existentes o calcularlos
|
|
2373
|
+
if not any([dawn_civil, dawn_nautical, dawn_astronomical, sunrise, noon, sunset, dusk_civil, dusk_nautical, dusk_astronomical, midnight]):
|
|
2374
|
+
events = self.location.sun_events(date=today, local=True)
|
|
2375
|
+
dawn_civil = events["dawn_civil"]
|
|
2376
|
+
dawn_nautical = events["dawn_nautical"]
|
|
2377
|
+
dawn_astronomical = events["dawn_astronomical"]
|
|
2378
|
+
sunrise = events["sunrise"]
|
|
2379
|
+
noon = events["noon"]
|
|
2380
|
+
sunset = events["sunset"]
|
|
2381
|
+
dusk_civil = events["dusk_civil"]
|
|
2382
|
+
dusk_nautical = events["dusk_nautical"]
|
|
2383
|
+
dusk_astronomical = events["dusk_astronomical"]
|
|
2384
|
+
midnight = events["midnight"]
|
|
2385
|
+
daylight_duration = events["daylight_duration"]
|
|
2386
|
+
else:
|
|
2387
|
+
# Usar valores proporcionados, o los existentes si no se proporcionan
|
|
2388
|
+
dawn_civil = dawn_civil if dawn_civil is not None else saved.get("dawn_civil")
|
|
2389
|
+
dawn_nautical = dawn_nautical if dawn_nautical is not None else saved.get("dawn_nautical")
|
|
2390
|
+
dawn_astronomical = dawn_astronomical if dawn_astronomical is not None else saved.get("dawn_astronomical")
|
|
2391
|
+
sunrise = sunrise if sunrise is not None else saved.get("sunrise")
|
|
2392
|
+
noon = noon if noon is not None else saved.get("noon")
|
|
2393
|
+
sunset = sunset if sunset is not None else saved.get("sunset")
|
|
2394
|
+
dusk_civil = dusk_civil if dusk_civil is not None else saved.get("dusk_civil")
|
|
2395
|
+
dusk_nautical = dusk_nautical if dusk_nautical is not None else saved.get("dusk_nautical")
|
|
2396
|
+
dusk_astronomical = dusk_astronomical if dusk_astronomical is not None else saved.get("dusk_astronomical")
|
|
2397
|
+
midnight = midnight if midnight is not None else saved.get("midnight")
|
|
2398
|
+
daylight_duration = daylight_duration if daylight_duration is not None else saved.get("daylight_duration")
|
|
2399
|
+
|
|
2400
|
+
# Recalcular daylight_duration si sunrise o sunset han cambiado
|
|
2401
|
+
if sunrise and sunset and (sunrise != saved.get("sunrise") or sunset != saved.get("sunset")):
|
|
2402
|
+
daylight_duration = (sunset - sunrise).total_seconds() / 3600 if sunrise and sunset else None
|
|
2403
|
+
|
|
2404
|
+
# CONSTRUIR DADES
|
|
2405
|
+
dades_dict = {
|
|
2406
|
+
"dawn_civil": dawn_civil.isoformat() if dawn_civil else None,
|
|
2407
|
+
"dawn_nautical": dawn_nautical.isoformat() if dawn_nautical else None,
|
|
2408
|
+
"dawn_astronomical": dawn_astronomical.isoformat() if dawn_astronomical else None,
|
|
2409
|
+
"sunrise": sunrise.isoformat() if sunrise else None,
|
|
2410
|
+
"noon": noon.isoformat() if noon else None,
|
|
2411
|
+
"sunset": sunset.isoformat() if sunset else None,
|
|
2412
|
+
"dusk_civil": dusk_civil.isoformat() if dusk_civil else None,
|
|
2413
|
+
"dusk_nautical": dusk_nautical.isoformat() if dusk_nautical else None,
|
|
2414
|
+
"dusk_astronomical": dusk_astronomical.isoformat() if dusk_astronomical else None,
|
|
2415
|
+
"midnight": midnight.isoformat() if midnight else None,
|
|
2416
|
+
"daylight_duration": daylight_duration,
|
|
2417
|
+
}
|
|
2418
|
+
|
|
2419
|
+
# AÑADIR POSICIÓN SOLAR
|
|
2420
|
+
if sun_pos:
|
|
2421
|
+
dades_dict.update({
|
|
2422
|
+
"sun_elevation": round(sun_pos["elevation"], 2),
|
|
2423
|
+
"sun_azimuth": round(sun_pos["azimuth"], 2),
|
|
2424
|
+
"sun_horizon_position": sun_pos["horizon_position"],
|
|
2425
|
+
"sun_rising": sun_pos["rising"],
|
|
2426
|
+
"sun_position_updated": now.isoformat()
|
|
2427
|
+
})
|
|
2428
|
+
|
|
2429
|
+
# GUARDAR
|
|
2430
|
+
data_with_timestamp = {
|
|
2431
|
+
"actualitzat": {"dataUpdate": now.isoformat()},
|
|
2432
|
+
"dades": [dades_dict],
|
|
2433
|
+
}
|
|
2434
|
+
|
|
2435
|
+
await save_json_to_file(data_with_timestamp, self.sun_file)
|
|
2436
|
+
_LOGGER.info("Archivo solar actualizado (eventos: %s, posición: %s)",
|
|
2437
|
+
bool(dawn_civil is not None), bool(sun_pos))
|
|
2438
|
+
|
|
2439
|
+
return data_with_timestamp
|
|
2440
|
+
|
|
2441
|
+
except Exception as err:
|
|
2442
|
+
_LOGGER.exception("Error al calcular/guardar los datos solares: %s", err)
|
|
2443
|
+
cached = await load_json_from_file(self.sun_file)
|
|
2444
|
+
if cached:
|
|
2445
|
+
_LOGGER.warning("Usando datos solares en caché por error.")
|
|
2446
|
+
return cached
|
|
2447
|
+
return None
|
|
2448
|
+
|
|
2449
|
+
class MeteocatSunFileCoordinator(BaseFileCoordinator):
|
|
2450
|
+
"""Coordinator para manejar la actualización de los datos de sol desde sun_{town_id}.json."""
|
|
2451
|
+
|
|
2452
|
+
def __init__(
|
|
2453
|
+
self,
|
|
2454
|
+
hass: HomeAssistant,
|
|
2455
|
+
entry_data: dict,
|
|
2456
|
+
):
|
|
2457
|
+
"""
|
|
2458
|
+
Inicializa el coordinador de sol desde archivo.
|
|
2459
|
+
|
|
2460
|
+
Args:
|
|
2461
|
+
hass (HomeAssistant): Instancia de Home Assistant.
|
|
2462
|
+
entry_data (dict): Datos de configuración de la entrada.
|
|
2463
|
+
"""
|
|
2464
|
+
self.town_id = entry_data["town_id"]
|
|
2465
|
+
self.timezone_str = hass.config.time_zone or "Europe/Madrid"
|
|
2466
|
+
|
|
2467
|
+
# Ruta persistente en /config/meteocat_files/files
|
|
2468
|
+
files_folder = get_storage_dir(hass, "files")
|
|
2469
|
+
self.sun_file = files_folder / f"sun_{self.town_id.lower()}_data.json"
|
|
2470
|
+
|
|
2471
|
+
super().__init__(
|
|
2472
|
+
hass,
|
|
2473
|
+
name=f"{DOMAIN} Sun File Coordinator",
|
|
2474
|
+
update_interval=DEFAULT_SUN_FILE_UPDATE_INTERVAL,
|
|
2475
|
+
min_delay=1.0, # Rango predeterminado
|
|
2476
|
+
max_delay=2.0, # Rango predeterminado
|
|
2477
|
+
)
|
|
2478
|
+
|
|
2479
|
+
async def _async_update_data(self) -> dict[str, Any]:
|
|
2480
|
+
"""Lee el archivo y resetea si el primer evento (dawn_astronomical) es de ayer."""
|
|
2481
|
+
# 🔸 Añadimos un pequeño desfase aleatorio (1 a 2 segundos) basados en el BaseFileCoordinator
|
|
2482
|
+
await self._apply_random_delay()
|
|
2483
|
+
|
|
2484
|
+
try:
|
|
2485
|
+
data = await load_json_from_file(self.sun_file)
|
|
2486
|
+
if not data or "dades" not in data or not data["dades"]:
|
|
2487
|
+
_LOGGER.warning("Archivo solar vacío: %s", self.sun_file)
|
|
2488
|
+
return self._reset_data()
|
|
2489
|
+
|
|
2490
|
+
dades = data["dades"][0]
|
|
2491
|
+
update_str = data.get("actualitzat", {}).get("dataUpdate")
|
|
2492
|
+
update_dt = datetime.fromisoformat(update_str) if update_str else None
|
|
2493
|
+
now = datetime.now(ZoneInfo(self.timezone_str))
|
|
2494
|
+
today = now.date()
|
|
2495
|
+
|
|
2496
|
+
# === PRIMER EVENTO: dawn_astronomical ===
|
|
2497
|
+
dawn_astro_str = dades.get("dawn_astronomical")
|
|
2498
|
+
if not dawn_astro_str:
|
|
2499
|
+
_LOGGER.debug("No hay 'dawn_astronomical'. Forzando reset.")
|
|
2500
|
+
return self._reset_data()
|
|
2501
|
+
|
|
2502
|
+
try:
|
|
2503
|
+
dawn_astro_dt = datetime.fromisoformat(dawn_astro_str)
|
|
2504
|
+
event_date = dawn_astro_dt.date()
|
|
2505
|
+
except ValueError as e:
|
|
2506
|
+
_LOGGER.warning("Formato inválido en dawn_astronomical: %s → %s", dawn_astro_str, e)
|
|
2507
|
+
return self._reset_data()
|
|
2508
|
+
|
|
2509
|
+
# === ¿Es de un día anterior a ayer? ===
|
|
2510
|
+
if event_date < (today - timedelta(days=1)):
|
|
2511
|
+
_LOGGER.info(
|
|
2512
|
+
"Datos solares muy antiguos: dawn_astronomical es del %s (hoy es %s). Reiniciando.",
|
|
2513
|
+
event_date, today
|
|
2514
|
+
)
|
|
2515
|
+
return self._reset_data()
|
|
2516
|
+
|
|
2517
|
+
# 🟢 Si el evento es de mañana, mantener datos actuales (no resetear)
|
|
2518
|
+
if event_date > today:
|
|
2519
|
+
_LOGGER.debug(
|
|
2520
|
+
"Datos solares son de mañana (%s). Manteniendo valores actuales hasta próxima actualización.",
|
|
2521
|
+
event_date
|
|
2522
|
+
)
|
|
2523
|
+
|
|
2524
|
+
# === DATOS VÁLIDOS DEL DÍA ACTUAL ===
|
|
2525
|
+
result = {
|
|
2526
|
+
"actualizado": update_dt.isoformat() if update_dt else now.isoformat(),
|
|
2527
|
+
"dawn_civil": dades.get("dawn_civil"),
|
|
2528
|
+
"dawn_nautical": dades.get("dawn_nautical"),
|
|
2529
|
+
"dawn_astronomical": dawn_astro_str,
|
|
2530
|
+
"sunrise": dades.get("sunrise"),
|
|
2531
|
+
"noon": dades.get("noon"),
|
|
2532
|
+
"sunset": dades.get("sunset"),
|
|
2533
|
+
"dusk_civil": dades.get("dusk_civil"),
|
|
2534
|
+
"dusk_nautical": dades.get("dusk_nautical"),
|
|
2535
|
+
"dusk_astronomical": dades.get("dusk_astronomical"),
|
|
2536
|
+
"midnight": dades.get("midnight"),
|
|
2537
|
+
"daylight_duration": dades.get("daylight_duration"),
|
|
2538
|
+
"sun_elevation": dades.get("sun_elevation"),
|
|
2539
|
+
"sun_azimuth": dades.get("sun_azimuth"),
|
|
2540
|
+
"sun_horizon_position": dades.get("sun_horizon_position"),
|
|
2541
|
+
"sun_rising": dades.get("sun_rising"),
|
|
2542
|
+
"sun_position_updated": dades.get("sun_position_updated"),
|
|
2543
|
+
}
|
|
2544
|
+
|
|
2545
|
+
_LOGGER.debug("Datos solares válidos para hoy (%s)", today)
|
|
2546
|
+
return result
|
|
2547
|
+
|
|
2548
|
+
except Exception as e:
|
|
2549
|
+
_LOGGER.error("Error crítico en SunFileCoordinator: %s", e)
|
|
2550
|
+
return self._reset_data()
|
|
2551
|
+
|
|
2552
|
+
def _reset_data(self):
|
|
2553
|
+
"""Resetea los datos a valores nulos."""
|
|
2554
|
+
now = datetime.now(ZoneInfo(self.timezone_str)).isoformat()
|
|
2555
|
+
return {
|
|
2556
|
+
"actualizado": now,
|
|
2557
|
+
"sunrise": None,
|
|
2558
|
+
"sunset": None,
|
|
2559
|
+
"noon": None,
|
|
2560
|
+
"dawn_civil": None,
|
|
2561
|
+
"dusk_civil": None,
|
|
2562
|
+
"dawn_nautical": None,
|
|
2563
|
+
"dusk_nautical": None,
|
|
2564
|
+
"dawn_astronomical": None,
|
|
2565
|
+
"dusk_astronomical": None,
|
|
2566
|
+
"midnight": None,
|
|
2567
|
+
"daylight_duration": None,
|
|
2568
|
+
"sun_elevation": None,
|
|
2569
|
+
"sun_azimuth": None,
|
|
2570
|
+
"sun_horizon_position": None,
|
|
2571
|
+
"sun_rising": None,
|
|
2572
|
+
"sun_position_updated": now,
|
|
2573
|
+
}
|
|
2574
|
+
|
|
2575
|
+
class MeteocatMoonCoordinator(DataUpdateCoordinator):
|
|
2576
|
+
"""Coordinator para manejar la actualización de los datos de la luna desde moon.py."""
|
|
2577
|
+
|
|
2578
|
+
def __init__(self, hass: HomeAssistant, entry_data: dict):
|
|
2579
|
+
self.latitude = entry_data.get("latitude")
|
|
2580
|
+
self.longitude = entry_data.get("longitude")
|
|
2581
|
+
self.timezone_str = hass.config.time_zone or "Europe/Madrid"
|
|
2582
|
+
self.town_id = entry_data.get("town_id")
|
|
2583
|
+
|
|
2584
|
+
self.location = LocationInfo(
|
|
2585
|
+
name=entry_data.get("town_name", "Municipio"),
|
|
2586
|
+
region="Spain",
|
|
2587
|
+
timezone=self.timezone_str,
|
|
2588
|
+
latitude=self.latitude,
|
|
2589
|
+
longitude=self.longitude,
|
|
2590
|
+
)
|
|
2591
|
+
|
|
2592
|
+
files_folder = get_storage_dir(hass, "files")
|
|
2593
|
+
self.moon_file = files_folder / f"moon_{self.town_id.lower()}_data.json"
|
|
2594
|
+
|
|
2595
|
+
super().__init__(
|
|
2596
|
+
hass,
|
|
2597
|
+
_LOGGER,
|
|
2598
|
+
name=f"{DOMAIN} Moon Coordinator",
|
|
2599
|
+
update_interval=DEFAULT_MOON_UPDATE_INTERVAL,
|
|
2600
|
+
)
|
|
2601
|
+
|
|
2602
|
+
async def _async_update_data(self) -> dict:
|
|
2603
|
+
"""Determina si los datos de la luna son válidos o requieren actualización."""
|
|
2604
|
+
_LOGGER.debug("🌙 Iniciando actualización de datos de la luna...")
|
|
2605
|
+
now = datetime.now(tz=ZoneInfo(self.timezone_str))
|
|
2606
|
+
existing_data = await load_json_from_file(self.moon_file) or {}
|
|
2607
|
+
|
|
2608
|
+
# 🟡 Si no hay datos previos o JSON incompleto → calcular todo para hoy
|
|
2609
|
+
if (
|
|
2610
|
+
not existing_data
|
|
2611
|
+
or "dades" not in existing_data
|
|
2612
|
+
or not existing_data["dades"]
|
|
2613
|
+
or "actualitzat" not in existing_data
|
|
2614
|
+
or "dataUpdate" not in existing_data["actualitzat"]
|
|
2615
|
+
):
|
|
2616
|
+
_LOGGER.warning("🌙 Datos previos incompletos o ausentes: calculando todos los datos para hoy.")
|
|
2617
|
+
return await self._calculate_and_save_new_data(today_only=True, existing_data=existing_data)
|
|
2618
|
+
|
|
2619
|
+
dades = existing_data["dades"][0]
|
|
2620
|
+
last_lunar_update_date_str = existing_data["actualitzat"].get("last_lunar_update_date")
|
|
2621
|
+
last_lunar_update_date = (
|
|
2622
|
+
datetime.fromisoformat(f"{last_lunar_update_date_str}T00:00:00").date()
|
|
2623
|
+
if last_lunar_update_date_str
|
|
2624
|
+
else now.date() - timedelta(days=1) # Fallback
|
|
2625
|
+
)
|
|
2626
|
+
|
|
2627
|
+
# 🟢 Comprobar si los datos son obsoletos (last_lunar_update_date y eventos antiguos)
|
|
2628
|
+
try:
|
|
2629
|
+
moonrise_str = dades.get("moonrise")
|
|
2630
|
+
moonset_str = dades.get("moonset")
|
|
2631
|
+
moonrise = datetime.fromisoformat(moonrise_str) if moonrise_str else None
|
|
2632
|
+
moonset = datetime.fromisoformat(moonset_str) if moonset_str else None
|
|
2633
|
+
|
|
2634
|
+
# Si last_lunar_update_date es de un día anterior y los eventos (si existen) también lo son
|
|
2635
|
+
events_are_old = (
|
|
2636
|
+
(moonrise is None or moonrise.date() < now.date())
|
|
2637
|
+
and (moonset is None or moonset.date() < now.date())
|
|
2638
|
+
)
|
|
2639
|
+
if last_lunar_update_date < now.date() and events_are_old:
|
|
2640
|
+
_LOGGER.debug(
|
|
2641
|
+
"🌙 Datos obsoletos: last_lunar_update_date=%s, moonrise=%s, moonset=%s. Calculando para hoy.",
|
|
2642
|
+
last_lunar_update_date, moonrise, moonset
|
|
2643
|
+
)
|
|
2644
|
+
return await self._calculate_and_save_new_data(today_only=True, existing_data=existing_data)
|
|
2645
|
+
except Exception as e:
|
|
2646
|
+
_LOGGER.warning("🌙 Error interpretando fechas previas: %s", e)
|
|
2647
|
+
return await self._calculate_and_save_new_data(today_only=True, existing_data=existing_data)
|
|
2648
|
+
|
|
2649
|
+
# 🟢 Comprobar si los datos lunares necesitan actualización
|
|
2650
|
+
if now.date() > last_lunar_update_date:
|
|
2651
|
+
_LOGGER.debug("🌙 Fecha actual superior a last_lunar_update_date: actualizando datos lunares.")
|
|
2652
|
+
return await self._calculate_and_save_new_data(
|
|
2653
|
+
update_type="update_lunar_data",
|
|
2654
|
+
existing_data=existing_data
|
|
2655
|
+
)
|
|
2656
|
+
|
|
2657
|
+
_LOGGER.debug(
|
|
2658
|
+
"🌙 Estado actual → now=%s | moonrise=%s | moonset=%s",
|
|
2659
|
+
now.isoformat(), moonrise, moonset
|
|
2660
|
+
)
|
|
2661
|
+
|
|
2662
|
+
# Lógica para eventos moonrise y moonset
|
|
2663
|
+
if moonrise is None and moonset is None:
|
|
2664
|
+
_LOGGER.debug("🌙 Ambos eventos None: verificando si datos son actuales.")
|
|
2665
|
+
if last_lunar_update_date == now.date():
|
|
2666
|
+
_LOGGER.debug("🌙 Datos de hoy sin eventos: no se actualiza.")
|
|
2667
|
+
return {"actualizado": existing_data["actualitzat"]["dataUpdate"]}
|
|
2668
|
+
return await self._calculate_and_save_new_data(today_only=True, existing_data=existing_data)
|
|
2669
|
+
|
|
2670
|
+
elif moonrise is None:
|
|
2671
|
+
_LOGGER.debug("🌙 No moonrise: tratando moonset como único evento.")
|
|
2672
|
+
if now < moonset:
|
|
2673
|
+
_LOGGER.debug("🌙 Antes del moonset: no se actualiza.")
|
|
2674
|
+
return {"actualizado": existing_data["actualitzat"]["dataUpdate"]}
|
|
2675
|
+
else:
|
|
2676
|
+
_LOGGER.debug("🌙 Después del moonset: actualizar moonset para mañana.")
|
|
2677
|
+
return await self._calculate_and_save_new_data(update_type="update_set_tomorrow", existing_data=existing_data)
|
|
2678
|
+
|
|
2679
|
+
elif moonset is None:
|
|
2680
|
+
_LOGGER.debug("🌙 No moonset: tratando moonrise como único evento.")
|
|
2681
|
+
if now < moonrise:
|
|
2682
|
+
_LOGGER.debug("🌙 Antes del moonrise: no se actualiza.")
|
|
2683
|
+
return {"actualizado": existing_data["actualitzat"]["dataUpdate"]}
|
|
2684
|
+
else:
|
|
2685
|
+
_LOGGER.debug("🌙 Después del moonrise: actualizar moonrise para mañana.")
|
|
2686
|
+
return await self._calculate_and_save_new_data(update_type="update_rise_tomorrow", existing_data=existing_data)
|
|
2687
|
+
|
|
2688
|
+
else:
|
|
2689
|
+
min_event = min(moonrise, moonset)
|
|
2690
|
+
max_event = max(moonrise, moonset)
|
|
2691
|
+
first_is_rise = (min_event == moonrise)
|
|
2692
|
+
|
|
2693
|
+
if now < min_event:
|
|
2694
|
+
_LOGGER.debug("🌙 Momento actual antes del primer evento → no se actualiza nada.")
|
|
2695
|
+
return {"actualizado": existing_data["actualitzat"]["dataUpdate"]}
|
|
2696
|
+
|
|
2697
|
+
elif now < max_event:
|
|
2698
|
+
if first_is_rise:
|
|
2699
|
+
_LOGGER.debug("🌙 Después del moonrise pero antes del moonset → actualizar solo moonrise para mañana.")
|
|
2700
|
+
return await self._calculate_and_save_new_data(update_type="update_rise_tomorrow", existing_data=existing_data)
|
|
2701
|
+
else:
|
|
2702
|
+
_LOGGER.debug("🌙 Después del moonset pero antes del moonrise → actualizar solo moonset para mañana.")
|
|
2703
|
+
return await self._calculate_and_save_new_data(update_type="update_set_tomorrow", existing_data=existing_data)
|
|
2704
|
+
|
|
2705
|
+
else:
|
|
2706
|
+
_LOGGER.debug("🌙 Después de ambos eventos → actualizar moonrise y moonset para mañana.")
|
|
2707
|
+
return await self._calculate_and_save_new_data(update_type="update_both_tomorrow", existing_data=existing_data)
|
|
2708
|
+
|
|
2709
|
+
async def _calculate_and_save_new_data(self, today_only: bool = False, update_type: str = None, existing_data: dict = None):
|
|
2710
|
+
"""Calcula y guarda nuevos datos de la luna según el tipo de actualización."""
|
|
2711
|
+
try:
|
|
2712
|
+
now = datetime.now(tz=ZoneInfo(self.timezone_str))
|
|
2713
|
+
tz = ZoneInfo(self.timezone_str)
|
|
2714
|
+
today = now.date()
|
|
2715
|
+
next_day = today + timedelta(days=1)
|
|
2716
|
+
next_next_day = today + timedelta(days=2)
|
|
2717
|
+
|
|
2718
|
+
_LOGGER.debug("🌙 Calculando nuevos datos (update_type=%s)...", update_type)
|
|
2719
|
+
|
|
2720
|
+
# 🟣 Calcular fase e iluminación, distancia y diámetro angular
|
|
2721
|
+
moon_phase_value = moon_phase(today)
|
|
2722
|
+
moon_day_today = moon_day(today)
|
|
2723
|
+
lunation = lunation_number(today)
|
|
2724
|
+
illum_percentage = round(illuminated_percentage(today), 2)
|
|
2725
|
+
distance = round(moon_distance(today), 0)
|
|
2726
|
+
angular_diameter = round(moon_angular_diameter(today), 2)
|
|
2727
|
+
moon_phase_name = get_moon_phase_name(today)
|
|
2728
|
+
lunation_duration = get_lunation_duration(today)
|
|
2729
|
+
|
|
2730
|
+
# Inicializar moonrise_final y moonset_final
|
|
2731
|
+
moonrise_final = None
|
|
2732
|
+
moonset_final = None
|
|
2733
|
+
|
|
2734
|
+
# 🟢 Caso: actualizar solo datos lunares
|
|
2735
|
+
if update_type == "update_lunar_data":
|
|
2736
|
+
dades = existing_data.get("dades", [{}])[0]
|
|
2737
|
+
moonrise_str = dades.get("moonrise")
|
|
2738
|
+
moonset_str = dades.get("moonset")
|
|
2739
|
+
moonrise_final = datetime.fromisoformat(moonrise_str) if moonrise_str else None
|
|
2740
|
+
moonset_final = datetime.fromisoformat(moonset_str) if moonset_str else None
|
|
2741
|
+
|
|
2742
|
+
# Si faltan moonrise o moonset, calcular de fallback
|
|
2743
|
+
if moonrise_final is None or moonset_final is None:
|
|
2744
|
+
_LOGGER.debug("🌙 Falta algún evento lunar, calculando de fallback.")
|
|
2745
|
+
moonrise_today, moonset_today = moon_rise_set(self.latitude, self.longitude, today)
|
|
2746
|
+
moonrise_tomorrow, moonset_tomorrow = moon_rise_set(self.latitude, self.longitude, next_day)
|
|
2747
|
+
moonrise_next_tomorrow, moonset_next_tomorrow = moon_rise_set(self.latitude, self.longitude, next_next_day)
|
|
2748
|
+
|
|
2749
|
+
# Convertir a zona local
|
|
2750
|
+
events = {
|
|
2751
|
+
"moonrise_today": moonrise_today,
|
|
2752
|
+
"moonset_today": moonset_today,
|
|
2753
|
+
"moonrise_tomorrow": moonrise_tomorrow,
|
|
2754
|
+
"moonset_tomorrow": moonset_tomorrow,
|
|
2755
|
+
"moonrise_next_tomorrow": moonrise_next_tomorrow,
|
|
2756
|
+
"moonset_next_tomorrow": moonset_next_tomorrow,
|
|
2757
|
+
}
|
|
2758
|
+
for key, val in events.items():
|
|
2759
|
+
if val:
|
|
2760
|
+
events[key] = val.astimezone(tz)
|
|
2761
|
+
moonrise_today, moonset_today, moonrise_tomorrow, moonset_tomorrow, moonrise_next_tomorrow, moonset_next_tomorrow = (
|
|
2762
|
+
events["moonrise_today"],
|
|
2763
|
+
events["moonset_today"],
|
|
2764
|
+
events["moonrise_tomorrow"],
|
|
2765
|
+
events["moonset_tomorrow"],
|
|
2766
|
+
events["moonrise_next_tomorrow"],
|
|
2767
|
+
events["moonset_next_tomorrow"],
|
|
2768
|
+
)
|
|
2769
|
+
|
|
2770
|
+
# Seleccionar los eventos más próximos disponibles
|
|
2771
|
+
moonrise_final = moonrise_final or (moonrise_today if moonrise_today else (moonrise_tomorrow if moonrise_tomorrow else moonrise_next_tomorrow))
|
|
2772
|
+
moonset_final = moonset_final or (moonset_today if moonset_today else (moonset_tomorrow if moonset_tomorrow else moonset_next_tomorrow))
|
|
2773
|
+
_LOGGER.debug("🌙 Fallback: usando moonrise=%s y moonset=%s", moonrise_final, moonset_final)
|
|
2774
|
+
|
|
2775
|
+
else:
|
|
2776
|
+
# Calcular eventos lunares
|
|
2777
|
+
moonrise_today, moonset_today = moon_rise_set(self.latitude, self.longitude, today)
|
|
2778
|
+
moonrise_tomorrow, moonset_tomorrow = moon_rise_set(self.latitude, self.longitude, next_day)
|
|
2779
|
+
moonrise_next_tomorrow, moonset_next_tomorrow = moon_rise_set(self.latitude, self.longitude, next_next_day)
|
|
2780
|
+
|
|
2781
|
+
# Convertir a zona local
|
|
2782
|
+
events = {
|
|
2783
|
+
"moonrise_today": moonrise_today,
|
|
2784
|
+
"moonset_today": moonset_today,
|
|
2785
|
+
"moonrise_tomorrow": moonrise_tomorrow,
|
|
2786
|
+
"moonset_tomorrow": moonset_tomorrow,
|
|
2787
|
+
"moonrise_next_tomorrow": moonrise_next_tomorrow,
|
|
2788
|
+
"moonset_next_tomorrow": moonset_next_tomorrow,
|
|
2789
|
+
}
|
|
2790
|
+
for key, val in events.items():
|
|
2791
|
+
if val:
|
|
2792
|
+
events[key] = val.astimezone(tz)
|
|
2793
|
+
moonrise_today, moonset_today, moonrise_tomorrow, moonset_tomorrow, moonrise_next_tomorrow, moonset_next_tomorrow = (
|
|
2794
|
+
events["moonrise_today"],
|
|
2795
|
+
events["moonset_today"],
|
|
2796
|
+
events["moonrise_tomorrow"],
|
|
2797
|
+
events["moonset_tomorrow"],
|
|
2798
|
+
events["moonrise_next_tomorrow"],
|
|
2799
|
+
events["moonset_next_tomorrow"],
|
|
2800
|
+
)
|
|
2801
|
+
|
|
2802
|
+
# 🧭 Determinar valores finales según el contexto
|
|
2803
|
+
if today_only:
|
|
2804
|
+
moonrise_final = moonrise_today
|
|
2805
|
+
moonset_final = moonset_today
|
|
2806
|
+
elif update_type == "update_set_tomorrow":
|
|
2807
|
+
if existing_data and "dades" in existing_data and existing_data["dades"] and "moonrise" in existing_data["dades"][0]:
|
|
2808
|
+
moonrise_str = existing_data["dades"][0]["moonrise"]
|
|
2809
|
+
moonrise_final = datetime.fromisoformat(moonrise_str) if moonrise_str else None
|
|
2810
|
+
else:
|
|
2811
|
+
moonrise_final = moonrise_today
|
|
2812
|
+
moonset_final = moonset_tomorrow if moonset_tomorrow else moonset_next_tomorrow
|
|
2813
|
+
_LOGGER.debug("🌙 Actualizado moonset para mañana: %s (manteniendo moonrise: %s)", moonset_final, moonrise_final)
|
|
2814
|
+
elif update_type == "update_rise_tomorrow":
|
|
2815
|
+
if existing_data and "dades" in existing_data and existing_data["dades"] and "moonset" in existing_data["dades"][0]:
|
|
2816
|
+
moonset_str = existing_data["dades"][0]["moonset"]
|
|
2817
|
+
moonset_final = datetime.fromisoformat(moonset_str) if moonset_str else None
|
|
2818
|
+
else:
|
|
2819
|
+
moonset_final = moonset_today
|
|
2820
|
+
moonrise_final = moonrise_tomorrow if moonrise_tomorrow else moonrise_next_tomorrow
|
|
2821
|
+
_LOGGER.debug("🌙 Actualizado moonrise para mañana: %s (manteniendo moonset: %s)", moonrise_final, moonset_final)
|
|
2822
|
+
elif update_type == "update_both_tomorrow":
|
|
2823
|
+
moonrise_final = moonrise_tomorrow if moonrise_tomorrow else moonrise_next_tomorrow
|
|
2824
|
+
moonset_final = moonset_tomorrow if moonset_tomorrow else moonset_next_tomorrow
|
|
2825
|
+
_LOGGER.debug("🌙 Actualizados moonrise y moonset para mañana: %s / %s", moonrise_final, moonset_final)
|
|
2826
|
+
else:
|
|
2827
|
+
moonrise_final = moonrise_today
|
|
2828
|
+
moonset_final = moonset_today
|
|
2829
|
+
|
|
2830
|
+
# Si algún evento final es None, intentar con el del día siguiente o el posterior
|
|
2831
|
+
if moonrise_final is None:
|
|
2832
|
+
moonrise_final = moonrise_tomorrow if moonrise_tomorrow else moonrise_next_tomorrow
|
|
2833
|
+
if moonrise_final:
|
|
2834
|
+
_LOGGER.debug("🌙 Moonrise era None: usando el del día siguiente o posterior: %s", moonrise_final)
|
|
2835
|
+
if moonset_final is None:
|
|
2836
|
+
moonset_final = moonset_tomorrow if moonset_tomorrow else moonset_next_tomorrow
|
|
2837
|
+
if moonset_final:
|
|
2838
|
+
_LOGGER.debug("🌙 Moonset era None: usando el del día siguiente o posterior: %s", moonset_final)
|
|
2839
|
+
|
|
2840
|
+
data_with_timestamp = {
|
|
2841
|
+
"actualitzat": {
|
|
2842
|
+
"dataUpdate": now.isoformat(),
|
|
2843
|
+
# 🟢 Determinar last_lunar_update_date de forma legible
|
|
2844
|
+
"last_lunar_update_date": (
|
|
2845
|
+
today.isoformat()
|
|
2846
|
+
if update_type in ("update_lunar_data", None) or today_only
|
|
2847
|
+
else existing_data.get("actualitzat", {}).get("last_lunar_update_date", today.isoformat())
|
|
2848
|
+
),
|
|
2849
|
+
},
|
|
2850
|
+
"dades": [
|
|
2851
|
+
{
|
|
2852
|
+
"moon_day": moon_day_today,
|
|
2853
|
+
"moon_phase": round(moon_phase_value, 2),
|
|
2854
|
+
"moon_phase_name": moon_phase_name,
|
|
2855
|
+
"illuminated_percentage": illum_percentage,
|
|
2856
|
+
"moon_distance": distance,
|
|
2857
|
+
"moon_angular_diameter": angular_diameter,
|
|
2858
|
+
"lunation": lunation,
|
|
2859
|
+
"lunation_duration": lunation_duration,
|
|
2860
|
+
"moonrise": moonrise_final.isoformat() if moonrise_final else None,
|
|
2861
|
+
"moonset": moonset_final.isoformat() if moonset_final else None,
|
|
2862
|
+
}
|
|
2863
|
+
],
|
|
2864
|
+
}
|
|
2865
|
+
|
|
2866
|
+
await save_json_to_file(data_with_timestamp, self.moon_file)
|
|
2867
|
+
_LOGGER.debug("🌙 Datos de luna guardados correctamente → %s", data_with_timestamp)
|
|
2868
|
+
return {"actualizado": data_with_timestamp["actualitzat"]["dataUpdate"]}
|
|
2869
|
+
|
|
2870
|
+
except Exception as err:
|
|
2871
|
+
_LOGGER.exception("🌙 Error al calcular datos de la luna: %s", err)
|
|
2872
|
+
cached_data = await load_json_from_file(self.moon_file)
|
|
2873
|
+
if cached_data:
|
|
2874
|
+
_LOGGER.warning("🌙 Se usaron datos en caché por error de cálculo.")
|
|
2875
|
+
return {"actualizado": cached_data["actualitzat"]["dataUpdate"]}
|
|
2876
|
+
_LOGGER.error("🌙 No se pudo calcular ni cargar datos en caché de luna.")
|
|
2877
|
+
return None
|
|
2878
|
+
|
|
2879
|
+
class MeteocatMoonFileCoordinator(BaseFileCoordinator):
|
|
2880
|
+
"""Coordinator para manejar la actualización de los datos de la luna desde moon_{town_id}.json."""
|
|
2881
|
+
|
|
2882
|
+
def __init__(self, hass: HomeAssistant, entry_data: dict):
|
|
2883
|
+
self.town_id = entry_data["town_id"]
|
|
2884
|
+
self.timezone_str = hass.config.time_zone or "Europe/Madrid"
|
|
2885
|
+
|
|
2886
|
+
files_folder = get_storage_dir(hass, "files")
|
|
2887
|
+
self.moon_file = files_folder / f"moon_{self.town_id.lower()}_data.json"
|
|
2888
|
+
|
|
2889
|
+
super().__init__(
|
|
2890
|
+
hass,
|
|
2891
|
+
name=f"{DOMAIN} Moon File Coordinator",
|
|
2892
|
+
update_interval=DEFAULT_MOON_FILE_UPDATE_INTERVAL,
|
|
2893
|
+
min_delay=1.0, # Rango predeterminado
|
|
2894
|
+
max_delay=2.0, # Rango predeterminado
|
|
2895
|
+
)
|
|
2896
|
+
|
|
2897
|
+
async def _async_update_data(self) -> Dict[str, Any]:
|
|
2898
|
+
"""Carga los datos de la luna desde el archivo JSON y verifica si siguen siendo válidos."""
|
|
2899
|
+
# 🔸 Añadimos un pequeño desfase aleatorio (1 a 2 segundos) basados en el BaseFileCoordinator
|
|
2900
|
+
await self._apply_random_delay()
|
|
2901
|
+
|
|
2902
|
+
existing_data = await load_json_from_file(self.moon_file)
|
|
2903
|
+
|
|
2904
|
+
if not existing_data or "dades" not in existing_data or not existing_data["dades"]:
|
|
2905
|
+
_LOGGER.warning("No se encontraron datos en %s.", self.moon_file)
|
|
2906
|
+
return {
|
|
2907
|
+
"actualizado": datetime.now(ZoneInfo(self.timezone_str)).isoformat(),
|
|
2908
|
+
"last_lunar_update_date": None,
|
|
2909
|
+
"moon_day": None,
|
|
2910
|
+
"moon_phase": None,
|
|
2911
|
+
"moon_phase_name": None,
|
|
2912
|
+
"illuminated_percentage": None,
|
|
2913
|
+
"moon_distance": None,
|
|
2914
|
+
"moon_angular_diameter": None,
|
|
2915
|
+
"lunation": None,
|
|
2916
|
+
"lunation_duration": None,
|
|
2917
|
+
"moonrise": None,
|
|
2918
|
+
"moonset": None,
|
|
2919
|
+
}
|
|
2920
|
+
|
|
2921
|
+
dades = existing_data["dades"][0]
|
|
2922
|
+
moonrise_str = dades.get("moonrise")
|
|
2923
|
+
moonset_str = dades.get("moonset")
|
|
2924
|
+
update_date_str = existing_data.get("actualitzat", {}).get("dataUpdate", "")
|
|
2925
|
+
last_lunar_update_date_str = existing_data.get("actualitzat", {}).get("last_lunar_update_date", "")
|
|
2926
|
+
|
|
2927
|
+
update_date = (
|
|
2928
|
+
datetime.fromisoformat(update_date_str)
|
|
2929
|
+
if update_date_str
|
|
2930
|
+
else datetime.now(ZoneInfo(self.timezone_str))
|
|
2931
|
+
)
|
|
2932
|
+
|
|
2933
|
+
# Simplemente devolvemos los datos cargados, aunque estén desfasados
|
|
2934
|
+
return {
|
|
2935
|
+
"actualizado": update_date.isoformat(),
|
|
2936
|
+
"last_lunar_update_date": last_lunar_update_date_str,
|
|
2937
|
+
"moon_day": dades.get("moon_day"),
|
|
2938
|
+
"moon_phase": dades.get("moon_phase"),
|
|
2939
|
+
"moon_phase_name": dades.get("moon_phase_name"),
|
|
2940
|
+
"illuminated_percentage": dades.get("illuminated_percentage"),
|
|
2941
|
+
"moon_distance": dades.get("moon_distance"),
|
|
2942
|
+
"moon_angular_diameter": dades.get("moon_angular_diameter"),
|
|
2943
|
+
"lunation": dades.get("lunation"),
|
|
2944
|
+
"lunation_duration": dades.get("lunation_duration"),
|
|
2945
|
+
"moonrise": moonrise_str,
|
|
2946
|
+
"moonset": moonset_str,
|
|
1926
2947
|
}
|