meteocat 2.2.2 → 2.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -0
- package/README.md +1 -1
- package/custom_components/meteocat/__init__.py +16 -2
- package/custom_components/meteocat/config_flow.py +28 -0
- package/custom_components/meteocat/const.py +6 -0
- package/custom_components/meteocat/coordinator.py +225 -2
- package/custom_components/meteocat/manifest.json +2 -2
- package/custom_components/meteocat/sensor.py +177 -2
- package/custom_components/meteocat/strings.json +46 -0
- package/custom_components/meteocat/translations/ca.json +46 -0
- package/custom_components/meteocat/translations/en.json +46 -0
- package/custom_components/meteocat/translations/es.json +46 -0
- package/custom_components/meteocat/version.py +1 -1
- package/images/api_limits.png +0 -0
- package/images/diagnostic_sensors.png +0 -0
- package/images/dynamic_sensors.png +0 -0
- package/package.json +1 -1
- package/poetry.lock +594 -582
- package/pyproject.toml +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,21 @@
|
|
|
1
|
+
## [2.2.4](https://github.com/figorr/meteocat/compare/v2.2.3...v2.2.4) (2025-02-09)
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
### Bug Fixes
|
|
5
|
+
|
|
6
|
+
* 2.2.4 ([59183ea](https://github.com/figorr/meteocat/commit/59183ea082f6d963e46d3f8a51e0867b3f32060d))
|
|
7
|
+
* fix valid lightning data for download from API ([4e4a8ae](https://github.com/figorr/meteocat/commit/4e4a8ae110b72b6e6ff560921f88ea7fb4640a29))
|
|
8
|
+
|
|
9
|
+
## [2.2.3](https://github.com/figorr/meteocat/compare/v2.2.2...v2.2.3) (2025-02-08)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
### Bug Fixes
|
|
13
|
+
|
|
14
|
+
* 2.2.3 ([ce224a0](https://github.com/figorr/meteocat/commit/ce224a097e7c879c46985ff3a16143de1b822006))
|
|
15
|
+
* bump meteocatpy to 1.0.1 ([7119065](https://github.com/figorr/meteocat/commit/711906534fca5c61a9a3ab968a3572e70f05929e))
|
|
16
|
+
* new lightning sensors ([8528f57](https://github.com/figorr/meteocat/commit/8528f57d688f7fc21f66715ffeac086895afd1aa))
|
|
17
|
+
* update README ([10c86e5](https://github.com/figorr/meteocat/commit/10c86e5e373c661cf23524421c756374711d89fe))
|
|
18
|
+
|
|
1
19
|
## [2.2.2](https://github.com/figorr/meteocat/compare/v2.2.1...v2.2.2) (2025-02-04)
|
|
2
20
|
|
|
3
21
|
|
package/README.md
CHANGED
|
@@ -56,7 +56,7 @@ Once you pick the town you will be prompted to pick a station from the list. The
|
|
|
56
56
|
|
|
57
57
|

|
|
58
58
|
|
|
59
|
-
Then you will be asked to set the
|
|
59
|
+
Then you will be asked to set the API limits from your plan.
|
|
60
60
|
|
|
61
61
|

|
|
62
62
|
|
|
@@ -24,6 +24,8 @@ from .coordinator import (
|
|
|
24
24
|
MeteocatAlertsRegionCoordinator,
|
|
25
25
|
MeteocatQuotesCoordinator,
|
|
26
26
|
MeteocatQuotesFileCoordinator,
|
|
27
|
+
MeteocatLightningCoordinator,
|
|
28
|
+
MeteocatLightningFileCoordinator,
|
|
27
29
|
)
|
|
28
30
|
|
|
29
31
|
from .const import DOMAIN, PLATFORMS
|
|
@@ -31,7 +33,7 @@ from .const import DOMAIN, PLATFORMS
|
|
|
31
33
|
_LOGGER = logging.getLogger(__name__)
|
|
32
34
|
|
|
33
35
|
# Versión
|
|
34
|
-
__version__ = "2.2.
|
|
36
|
+
__version__ = "2.2.4"
|
|
35
37
|
|
|
36
38
|
# Definir el esquema de configuración CONFIG_SCHEMA
|
|
37
39
|
CONFIG_SCHEMA = vol.Schema(
|
|
@@ -137,6 +139,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|
|
137
139
|
quotes_file_coordinator = MeteocatQuotesFileCoordinator(hass=hass, entry_data=entry_data)
|
|
138
140
|
await quotes_file_coordinator.async_config_entry_first_refresh()
|
|
139
141
|
|
|
142
|
+
lightning_coordinator = MeteocatLightningCoordinator(hass=hass, entry_data=entry_data)
|
|
143
|
+
await lightning_coordinator.async_config_entry_first_refresh()
|
|
144
|
+
|
|
145
|
+
lightning_file_coordinator = MeteocatLightningFileCoordinator(hass=hass, entry_data=entry_data)
|
|
146
|
+
await lightning_file_coordinator.async_config_entry_first_refresh()
|
|
147
|
+
|
|
140
148
|
except Exception as err: # Capturar todos los errores
|
|
141
149
|
_LOGGER.exception(f"Error al inicializar los coordinadores: {err}")
|
|
142
150
|
return False
|
|
@@ -157,6 +165,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|
|
157
165
|
"alerts_region_coordinator": alerts_region_coordinator,
|
|
158
166
|
"quotes_coordinator": quotes_coordinator,
|
|
159
167
|
"quotes_file_coordinator": quotes_file_coordinator,
|
|
168
|
+
"lightning_coordinator": lightning_coordinator,
|
|
169
|
+
"lightning_file_coordinator": lightning_file_coordinator,
|
|
160
170
|
**entry_data,
|
|
161
171
|
}
|
|
162
172
|
|
|
@@ -223,9 +233,12 @@ async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
|
|
223
233
|
alerts_file = files_folder / "alerts.json"
|
|
224
234
|
alerts_region_file = files_folder / f"alerts_{region_id}.json"
|
|
225
235
|
|
|
226
|
-
#
|
|
236
|
+
# Archivo JSON de cuotas
|
|
227
237
|
quotes_file = files_folder / f"quotes.json"
|
|
228
238
|
|
|
239
|
+
# Archivo JSON de rayos
|
|
240
|
+
lightning_file = files_folder / f"lightning_{region_id}.json"
|
|
241
|
+
|
|
229
242
|
# Validar la ruta base
|
|
230
243
|
if not custom_components_path.exists():
|
|
231
244
|
_LOGGER.warning(f"La ruta {custom_components_path} no existe. No se realizará la limpieza.")
|
|
@@ -241,5 +254,6 @@ async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
|
|
241
254
|
safe_remove(alerts_file)
|
|
242
255
|
safe_remove(quotes_file)
|
|
243
256
|
safe_remove(alerts_region_file)
|
|
257
|
+
safe_remove(lightning_file)
|
|
244
258
|
safe_remove(assets_folder, is_folder=True)
|
|
245
259
|
safe_remove(files_folder, is_folder=True)
|
|
@@ -75,6 +75,7 @@ class MeteocatConfigFlow(ConfigFlow, domain=DOMAIN):
|
|
|
75
75
|
self.variable_id: str | None = None
|
|
76
76
|
self.station_id: str | None = None
|
|
77
77
|
self.station_name: str | None = None
|
|
78
|
+
self.region_id: str | None = None
|
|
78
79
|
self._cache = {}
|
|
79
80
|
|
|
80
81
|
async def fetch_and_save_quotes(self, api_key):
|
|
@@ -159,6 +160,29 @@ class MeteocatConfigFlow(ConfigFlow, domain=DOMAIN):
|
|
|
159
160
|
await file.write(json.dumps(initial_data, ensure_ascii=False, indent=4))
|
|
160
161
|
|
|
161
162
|
_LOGGER.info("Archivo alerts.json creado en %s", alerts_file)
|
|
163
|
+
|
|
164
|
+
async def create_lightning_file(self):
|
|
165
|
+
"""Crea el archivo lightning_{self.region_id}.json si no existe."""
|
|
166
|
+
lightning_dir = os.path.join(
|
|
167
|
+
self.hass.config.path(),
|
|
168
|
+
"custom_components",
|
|
169
|
+
"meteocat",
|
|
170
|
+
"files"
|
|
171
|
+
)
|
|
172
|
+
os.makedirs(lightning_dir, exist_ok=True)
|
|
173
|
+
lightning_file = os.path.join(lightning_dir, f"lightning_{self.region_id}.json")
|
|
174
|
+
|
|
175
|
+
if not os.path.exists(lightning_file):
|
|
176
|
+
initial_data = {
|
|
177
|
+
"actualitzat": {
|
|
178
|
+
"dataUpdate": "1970-01-01T00:00:00+00:00"
|
|
179
|
+
},
|
|
180
|
+
"dades": []
|
|
181
|
+
}
|
|
182
|
+
async with aiofiles.open(lightning_file, "w", encoding="utf-8") as file:
|
|
183
|
+
await file.write(json.dumps(initial_data, ensure_ascii=False, indent=4))
|
|
184
|
+
|
|
185
|
+
_LOGGER.info("Archivo %s creado", lightning_file)
|
|
162
186
|
|
|
163
187
|
async def async_step_user(
|
|
164
188
|
self, user_input: dict[str, Any] | None = None
|
|
@@ -307,6 +331,10 @@ class MeteocatConfigFlow(ConfigFlow, domain=DOMAIN):
|
|
|
307
331
|
self.province_id = station_metadata.get("provincia", {}).get("codi", "")
|
|
308
332
|
self.province_name = station_metadata.get("provincia", {}).get("nom", "")
|
|
309
333
|
self.station_status = station_metadata.get("estats", [{}])[0].get("codi", "")
|
|
334
|
+
|
|
335
|
+
# Crear el archivo lightning después de obtener region_id
|
|
336
|
+
await self.create_lightning_file()
|
|
337
|
+
|
|
310
338
|
return await self.async_step_set_api_limits()
|
|
311
339
|
except Exception as ex:
|
|
312
340
|
_LOGGER.error("Error al obtener los metadatos de la estación: %s", ex)
|
|
@@ -41,6 +41,7 @@ QUOTA_PREDICCIO = "quota_prediccio"
|
|
|
41
41
|
QUOTA_BASIC = "quota_basic"
|
|
42
42
|
QUOTA_XEMA = "quota_xema"
|
|
43
43
|
QUOTA_QUERIES = "quota_queries"
|
|
44
|
+
LIGHTNING_FILE_STATUS = "lightning_file_status"
|
|
44
45
|
|
|
45
46
|
from homeassistant.const import Platform
|
|
46
47
|
|
|
@@ -54,6 +55,9 @@ DEFAULT_VALIDITY_HOURS = 5 # Hora a partir de la cual la API tiene la informaci
|
|
|
54
55
|
DEFAULT_VALIDITY_MINUTES = 0 # Minutos a partir de los cuales la API tiene la información actualizada de predicciones disponible para descarga
|
|
55
56
|
DEFAULT_ALERT_VALIDITY_TIME = 120 # Minutos a partir de los cuales las alertas están obsoletas y se se debe proceder a una nueva llamada a la API
|
|
56
57
|
DEFAULT_QUOTES_VALIDITY_TIME = 240 # Minutos a partir de los cuales los datos de cuotas están obsoletos y se se debe proceder a una nueva llamada a la API
|
|
58
|
+
DEFAULT_LIGHTNING_VALIDITY_TIME = 240 # Minutos a partir de los cuales los datos de rayos están obsoletos y se se debe proceder a una nueva llamada a la API
|
|
59
|
+
DEFAULT_LIGHTNING_VALIDITY_HOURS = 2 # Hora a partir de la cual la API tiene la información actualizada de rayos disponible para descarga
|
|
60
|
+
DEFAULT_LIGHTNING_VALIDITY_MINUTES = 0 # Minutos a partir de los cuales la API tiene la información actualizada de rayos disponible para descarga
|
|
57
61
|
|
|
58
62
|
# Multiplicadores para la duración de validez basada en limit_prediccio
|
|
59
63
|
ALERT_VALIDITY_MULTIPLIER_100 = 12 # para limit_prediccio <= 100
|
|
@@ -81,6 +85,8 @@ STATION_TIMESTAMP = "station_timestamp" # Código de tiempo de la estación
|
|
|
81
85
|
CONDITION = "condition" # Estado del cielo
|
|
82
86
|
MAX_TEMPERATURE_FORECAST = "max_temperature_forecast" # Temperatura máxima prevista
|
|
83
87
|
MIN_TEMPERATURE_FORECAST = "min_temperature_forecast" # Temperatura mínima prevista
|
|
88
|
+
LIGHTNING_REGION = "lightning_region" # Rayos de la comarca
|
|
89
|
+
LIGHTNING_TOWN = "lightning_town" # Rayos de la población
|
|
84
90
|
|
|
85
91
|
# Definición de códigos para variables
|
|
86
92
|
WIND_SPEED_CODE = 30
|
|
@@ -20,6 +20,7 @@ from meteocatpy.uvi import MeteocatUviData
|
|
|
20
20
|
from meteocatpy.forecast import MeteocatForecast
|
|
21
21
|
from meteocatpy.alerts import MeteocatAlerts
|
|
22
22
|
from meteocatpy.quotes import MeteocatQuotes
|
|
23
|
+
from meteocatpy.lightning import MeteocatLightning
|
|
23
24
|
|
|
24
25
|
from meteocatpy.exceptions import (
|
|
25
26
|
BadRequestError,
|
|
@@ -41,7 +42,10 @@ from .const import (
|
|
|
41
42
|
ALERT_VALIDITY_MULTIPLIER_100,
|
|
42
43
|
ALERT_VALIDITY_MULTIPLIER_200,
|
|
43
44
|
ALERT_VALIDITY_MULTIPLIER_500,
|
|
44
|
-
ALERT_VALIDITY_MULTIPLIER_DEFAULT
|
|
45
|
+
ALERT_VALIDITY_MULTIPLIER_DEFAULT,
|
|
46
|
+
DEFAULT_LIGHTNING_VALIDITY_TIME,
|
|
47
|
+
DEFAULT_LIGHTNING_VALIDITY_HOURS,
|
|
48
|
+
DEFAULT_LIGHTNING_VALIDITY_MINUTES
|
|
45
49
|
)
|
|
46
50
|
|
|
47
51
|
_LOGGER = logging.getLogger(__name__)
|
|
@@ -60,6 +64,8 @@ DEFAULT_ALERTS_UPDATE_INTERVAL = timedelta(minutes=10)
|
|
|
60
64
|
DEFAULT_ALERTS_REGION_UPDATE_INTERVAL = timedelta(minutes=5)
|
|
61
65
|
DEFAULT_QUOTES_UPDATE_INTERVAL = timedelta(minutes=10)
|
|
62
66
|
DEFAULT_QUOTES_FILE_UPDATE_INTERVAL = timedelta(minutes=5)
|
|
67
|
+
DEFAULT_LIGHTNING_UPDATE_INTERVAL = timedelta(minutes=10)
|
|
68
|
+
DEFAULT_LIGHTNING_FILE_UPDATE_INTERVAL = timedelta(minutes=5)
|
|
63
69
|
|
|
64
70
|
# Definir la zona horaria local
|
|
65
71
|
TIMEZONE = ZoneInfo("Europe/Madrid")
|
|
@@ -278,6 +284,8 @@ class MeteocatStaticSensorCoordinator(DataUpdateCoordinator):
|
|
|
278
284
|
self.town_id = entry_data["town_id"] # ID del municipio
|
|
279
285
|
self.station_name = entry_data["station_name"] # Nombre de la estación
|
|
280
286
|
self.station_id = entry_data["station_id"] # ID de la estación
|
|
287
|
+
self.region_name = entry_data["region_name"] # Nombre de la región
|
|
288
|
+
self.region_id = entry_data["region_id"] # ID de la región
|
|
281
289
|
|
|
282
290
|
super().__init__(
|
|
283
291
|
hass,
|
|
@@ -293,17 +301,21 @@ class MeteocatStaticSensorCoordinator(DataUpdateCoordinator):
|
|
|
293
301
|
Since static sensors use entry_data, this method simply logs the process.
|
|
294
302
|
"""
|
|
295
303
|
_LOGGER.debug(
|
|
296
|
-
"Updating static sensor data for town: %s (ID: %s), station: %s (ID: %s)",
|
|
304
|
+
"Updating static sensor data for town: %s (ID: %s), station: %s (ID: %s), region: %s (ID: %s)",
|
|
297
305
|
self.town_name,
|
|
298
306
|
self.town_id,
|
|
299
307
|
self.station_name,
|
|
300
308
|
self.station_id,
|
|
309
|
+
self.region_name,
|
|
310
|
+
self.region_id,
|
|
301
311
|
)
|
|
302
312
|
return {
|
|
303
313
|
"town_name": self.town_name,
|
|
304
314
|
"town_id": self.town_id,
|
|
305
315
|
"station_name": self.station_name,
|
|
306
316
|
"station_id": self.station_id,
|
|
317
|
+
"region_name": self.region_name,
|
|
318
|
+
"region_id": self.region_id,
|
|
307
319
|
}
|
|
308
320
|
|
|
309
321
|
class MeteocatUviCoordinator(DataUpdateCoordinator):
|
|
@@ -1721,6 +1733,11 @@ class MeteocatQuotesCoordinator(DataUpdateCoordinator):
|
|
|
1721
1733
|
normalized_nom = normalize_name(plan["nom"])
|
|
1722
1734
|
new_name = next((v for k, v in plan_mapping.items() if normalized_nom.startswith(k)), plan["nom"])
|
|
1723
1735
|
|
|
1736
|
+
# Si el plan es "Quota", actualizamos las consultas realizadas y restantes
|
|
1737
|
+
if new_name == "Quota":
|
|
1738
|
+
plan["consultesRealitzades"] += 1
|
|
1739
|
+
plan["consultesRestants"] = max(0, plan["consultesRestants"] - 1)
|
|
1740
|
+
|
|
1724
1741
|
modified_plans.append({
|
|
1725
1742
|
"nom": new_name,
|
|
1726
1743
|
"periode": plan["periode"],
|
|
@@ -1853,3 +1870,209 @@ class MeteocatQuotesFileCoordinator(DataUpdateCoordinator):
|
|
|
1853
1870
|
}
|
|
1854
1871
|
_LOGGER.warning("Plan %s no encontrado en quotes.json.", plan_name)
|
|
1855
1872
|
return {}
|
|
1873
|
+
|
|
1874
|
+
class MeteocatLightningCoordinator(DataUpdateCoordinator):
|
|
1875
|
+
"""Coordinator para manejar la actualización de los datos de rayos de la API de Meteocat."""
|
|
1876
|
+
|
|
1877
|
+
def __init__(
|
|
1878
|
+
self,
|
|
1879
|
+
hass: HomeAssistant,
|
|
1880
|
+
entry_data: dict,
|
|
1881
|
+
):
|
|
1882
|
+
"""
|
|
1883
|
+
Inicializa el coordinador de rayos de Meteocat.
|
|
1884
|
+
|
|
1885
|
+
Args:
|
|
1886
|
+
hass (HomeAssistant): Instancia de Home Assistant.
|
|
1887
|
+
entry_data (dict): Datos de configuración obtenidos de core.config_entries.
|
|
1888
|
+
"""
|
|
1889
|
+
self.api_key = entry_data["api_key"] # API Key de la configuración
|
|
1890
|
+
self.region_id = entry_data["region_id"] # Región de la configuración
|
|
1891
|
+
self.meteocat_lightning = MeteocatLightning(self.api_key)
|
|
1892
|
+
|
|
1893
|
+
self.lightning_file = os.path.join(
|
|
1894
|
+
hass.config.path(),
|
|
1895
|
+
"custom_components",
|
|
1896
|
+
"meteocat",
|
|
1897
|
+
"files",
|
|
1898
|
+
f"lightning_{self.region_id}.json",
|
|
1899
|
+
)
|
|
1900
|
+
|
|
1901
|
+
super().__init__(
|
|
1902
|
+
hass,
|
|
1903
|
+
_LOGGER,
|
|
1904
|
+
name=f"{DOMAIN} Lightning Coordinator",
|
|
1905
|
+
update_interval=DEFAULT_LIGHTNING_UPDATE_INTERVAL,
|
|
1906
|
+
)
|
|
1907
|
+
|
|
1908
|
+
async def _async_update_data(self) -> Dict:
|
|
1909
|
+
"""Actualiza los datos de rayos desde la API de Meteocat o usa datos en caché según la antigüedad."""
|
|
1910
|
+
existing_data = await load_json_from_file(self.lightning_file) or {}
|
|
1911
|
+
|
|
1912
|
+
# Definir la duración de validez de los datos
|
|
1913
|
+
current_time = datetime.now(timezone.utc).time()
|
|
1914
|
+
validity_start_time = time(DEFAULT_LIGHTNING_VALIDITY_HOURS, DEFAULT_LIGHTNING_VALIDITY_MINUTES)
|
|
1915
|
+
validity_duration = timedelta(minutes=DEFAULT_LIGHTNING_VALIDITY_TIME)
|
|
1916
|
+
|
|
1917
|
+
if not existing_data:
|
|
1918
|
+
return await self._fetch_and_save_new_data()
|
|
1919
|
+
else:
|
|
1920
|
+
last_update = datetime.fromisoformat(existing_data['actualitzat']['dataUpdate'])
|
|
1921
|
+
now = datetime.now(timezone.utc).astimezone(TIMEZONE)
|
|
1922
|
+
|
|
1923
|
+
if now - last_update >= validity_duration and current_time >= validity_start_time:
|
|
1924
|
+
return await self._fetch_and_save_new_data()
|
|
1925
|
+
else:
|
|
1926
|
+
_LOGGER.debug("Usando datos existentes de rayos: %s", existing_data)
|
|
1927
|
+
return {"actualizado": existing_data['actualitzat']['dataUpdate']}
|
|
1928
|
+
|
|
1929
|
+
async def _fetch_and_save_new_data(self):
|
|
1930
|
+
"""Obtiene nuevos datos de la API y los guarda en el archivo JSON."""
|
|
1931
|
+
try:
|
|
1932
|
+
data = await asyncio.wait_for(
|
|
1933
|
+
self.meteocat_lightning.get_lightning_data(self.region_id),
|
|
1934
|
+
timeout=30 # Tiempo límite de 30 segundos
|
|
1935
|
+
)
|
|
1936
|
+
_LOGGER.debug("Datos de rayos actualizados exitosamente: %s", data)
|
|
1937
|
+
|
|
1938
|
+
# Verificar que `data` sea una lista (como la API de Meteocat devuelve)
|
|
1939
|
+
if not isinstance(data, list):
|
|
1940
|
+
_LOGGER.error("Formato inválido: Se esperaba una lista, pero se obtuvo %s", type(data).__name__)
|
|
1941
|
+
raise ValueError("Formato de datos inválido")
|
|
1942
|
+
|
|
1943
|
+
# Estructurar los datos en el formato correcto
|
|
1944
|
+
current_time = datetime.now(timezone.utc).astimezone(TIMEZONE).isoformat()
|
|
1945
|
+
data_with_timestamp = {
|
|
1946
|
+
"actualitzat": {
|
|
1947
|
+
"dataUpdate": current_time
|
|
1948
|
+
},
|
|
1949
|
+
"dades": data # Siempre será una lista
|
|
1950
|
+
}
|
|
1951
|
+
|
|
1952
|
+
# Guardar los datos en un archivo JSON
|
|
1953
|
+
await save_json_to_file(data_with_timestamp, self.lightning_file)
|
|
1954
|
+
|
|
1955
|
+
# Actualizar cuotas usando la función externa
|
|
1956
|
+
await _update_quotes(self.hass, "XDDE") # Asegúrate de usar el nombre correcto del plan aquí
|
|
1957
|
+
|
|
1958
|
+
return {"actualizado": data_with_timestamp['actualitzat']['dataUpdate']}
|
|
1959
|
+
|
|
1960
|
+
except asyncio.TimeoutError as err:
|
|
1961
|
+
_LOGGER.warning("Tiempo de espera agotado al obtener los datos de rayos de la API de Meteocat.")
|
|
1962
|
+
raise ConfigEntryNotReady from err
|
|
1963
|
+
except Exception as err:
|
|
1964
|
+
_LOGGER.exception("Error inesperado al obtener los datos de rayos de la API de Meteocat: %s", err)
|
|
1965
|
+
|
|
1966
|
+
# Intentar cargar datos en caché si la API falla
|
|
1967
|
+
cached_data = await load_json_from_file(self.lightning_file)
|
|
1968
|
+
if cached_data:
|
|
1969
|
+
_LOGGER.warning("Usando datos en caché para los datos de rayos de la API de Meteocat.")
|
|
1970
|
+
return cached_data
|
|
1971
|
+
|
|
1972
|
+
_LOGGER.error("No se pudo obtener datos actualizados ni cargar datos en caché.")
|
|
1973
|
+
return None
|
|
1974
|
+
|
|
1975
|
+
class MeteocatLightningFileCoordinator(DataUpdateCoordinator):
|
|
1976
|
+
"""Coordinator para manejar la actualización de los datos de rayos desde lightning_{region_id}.json."""
|
|
1977
|
+
|
|
1978
|
+
def __init__(
|
|
1979
|
+
self,
|
|
1980
|
+
hass: HomeAssistant,
|
|
1981
|
+
entry_data: dict,
|
|
1982
|
+
):
|
|
1983
|
+
"""
|
|
1984
|
+
Inicializa el coordinador de rayos desde archivo.
|
|
1985
|
+
|
|
1986
|
+
Args:
|
|
1987
|
+
hass (HomeAssistant): Instancia de Home Assistant.
|
|
1988
|
+
entry_data (dict): Datos de configuración de la entrada.
|
|
1989
|
+
"""
|
|
1990
|
+
self.region_id = entry_data["region_id"]
|
|
1991
|
+
self.town_id = entry_data["town_id"]
|
|
1992
|
+
|
|
1993
|
+
self.lightning_file = os.path.join(
|
|
1994
|
+
hass.config.path(),
|
|
1995
|
+
"custom_components",
|
|
1996
|
+
"meteocat",
|
|
1997
|
+
"files",
|
|
1998
|
+
f"lightning_{self.region_id}.json",
|
|
1999
|
+
)
|
|
2000
|
+
|
|
2001
|
+
super().__init__(
|
|
2002
|
+
hass,
|
|
2003
|
+
_LOGGER,
|
|
2004
|
+
name="Meteocat Lightning File Coordinator",
|
|
2005
|
+
update_interval=DEFAULT_LIGHTNING_FILE_UPDATE_INTERVAL,
|
|
2006
|
+
)
|
|
2007
|
+
|
|
2008
|
+
async def _async_update_data(self) -> Dict[str, Any]:
|
|
2009
|
+
"""Carga los datos de rayos desde el archivo JSON y procesa la información."""
|
|
2010
|
+
existing_data = await load_json_from_file(self.lightning_file)
|
|
2011
|
+
|
|
2012
|
+
if not existing_data:
|
|
2013
|
+
_LOGGER.warning("No se encontraron datos en %s.", self.lightning_file)
|
|
2014
|
+
return {
|
|
2015
|
+
"actualizado": datetime.now(ZoneInfo("Europe/Madrid")).isoformat(),
|
|
2016
|
+
"region": self._reset_data(),
|
|
2017
|
+
"town": self._reset_data()
|
|
2018
|
+
}
|
|
2019
|
+
|
|
2020
|
+
# Convertir la cadena de fecha a un objeto datetime y ajustar a la zona horaria local
|
|
2021
|
+
update_date = datetime.fromisoformat(existing_data.get("actualitzat", {}).get("dataUpdate", ""))
|
|
2022
|
+
update_date = update_date.astimezone(ZoneInfo("Europe/Madrid"))
|
|
2023
|
+
now = datetime.now(ZoneInfo("Europe/Madrid"))
|
|
2024
|
+
|
|
2025
|
+
if update_date.date() != now.date(): # Si la fecha no es la de hoy
|
|
2026
|
+
_LOGGER.info("Los datos de rayos son de un día diferente. Reiniciando valores a cero.")
|
|
2027
|
+
region_data = town_data = self._reset_data()
|
|
2028
|
+
update_date = datetime.now(ZoneInfo("Europe/Madrid")).isoformat() # Usar la fecha actual
|
|
2029
|
+
else:
|
|
2030
|
+
region_data = self._process_region_data(existing_data.get("dades", []))
|
|
2031
|
+
town_data = self._process_town_data(existing_data.get("dades", []))
|
|
2032
|
+
|
|
2033
|
+
return {
|
|
2034
|
+
"actualizado": update_date,
|
|
2035
|
+
"region": region_data,
|
|
2036
|
+
"town": town_data
|
|
2037
|
+
}
|
|
2038
|
+
|
|
2039
|
+
def _process_region_data(self, data_list):
|
|
2040
|
+
"""Suma los tipos de descargas para toda la región."""
|
|
2041
|
+
region_counts = {
|
|
2042
|
+
"cc": 0,
|
|
2043
|
+
"cg-": 0,
|
|
2044
|
+
"cg+": 0
|
|
2045
|
+
}
|
|
2046
|
+
for town in data_list:
|
|
2047
|
+
for discharge in town.get("descarregues", []):
|
|
2048
|
+
if discharge["tipus"] in region_counts:
|
|
2049
|
+
region_counts[discharge["tipus"]] += discharge["recompte"]
|
|
2050
|
+
|
|
2051
|
+
region_counts["total"] = sum(region_counts.values())
|
|
2052
|
+
return region_counts
|
|
2053
|
+
|
|
2054
|
+
def _process_town_data(self, data_list):
|
|
2055
|
+
"""Encuentra y suma los tipos de descargas para un municipio específico."""
|
|
2056
|
+
town_counts = {
|
|
2057
|
+
"cc": 0,
|
|
2058
|
+
"cg-": 0,
|
|
2059
|
+
"cg+": 0
|
|
2060
|
+
}
|
|
2061
|
+
for town in data_list:
|
|
2062
|
+
if town["codi"] == self.town_id:
|
|
2063
|
+
for discharge in town.get("descarregues", []):
|
|
2064
|
+
if discharge["tipus"] in town_counts:
|
|
2065
|
+
town_counts[discharge["tipus"]] += discharge["recompte"]
|
|
2066
|
+
break # Solo necesitamos datos de un municipio
|
|
2067
|
+
|
|
2068
|
+
town_counts["total"] = sum(town_counts.values())
|
|
2069
|
+
return town_counts
|
|
2070
|
+
|
|
2071
|
+
def _reset_data(self):
|
|
2072
|
+
"""Resetea los datos a cero."""
|
|
2073
|
+
return {
|
|
2074
|
+
"cc": 0,
|
|
2075
|
+
"cg-": 0,
|
|
2076
|
+
"cg+": 0,
|
|
2077
|
+
"total": 0
|
|
2078
|
+
}
|
|
@@ -8,6 +8,6 @@
|
|
|
8
8
|
"iot_class": "cloud_polling",
|
|
9
9
|
"issue_tracker": "https://github.com/figorr/meteocat/issues",
|
|
10
10
|
"loggers": ["meteocatpy"],
|
|
11
|
-
"requirements": ["meteocatpy==1.0.
|
|
12
|
-
"version": "2.2.
|
|
11
|
+
"requirements": ["meteocatpy==1.0.1", "packaging>=20.3", "wrapt>=1.14.0"],
|
|
12
|
+
"version": "2.2.4"
|
|
13
13
|
}
|