meteocat 0.1.41 → 0.1.43
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +24 -0
- package/custom_components/meteocat/__init__.py +6 -3
- package/custom_components/meteocat/config_flow.py +53 -6
- package/custom_components/meteocat/const.py +12 -0
- package/custom_components/meteocat/coordinator.py +126 -108
- package/custom_components/meteocat/manifest.json +2 -2
- package/custom_components/meteocat/sensor.py +161 -2
- package/custom_components/meteocat/strings.json +9 -1
- package/custom_components/meteocat/translations/ca.json +9 -0
- package/custom_components/meteocat/translations/en.json +9 -0
- package/custom_components/meteocat/translations/es.json +9 -0
- package/custom_components/meteocat/version.py +1 -1
- package/package.json +1 -1
- package/pyproject.toml +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,27 @@
|
|
|
1
|
+
## [0.1.43](https://github.com/figorr/meteocat/compare/v0.1.42...v0.1.43) (2024-12-30)
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
### Bug Fixes
|
|
5
|
+
|
|
6
|
+
* 0.1.43 ([13185b2](https://github.com/figorr/meteocat/commit/13185b2276a5efb02b4fd18d07d30ab03ca7357d))
|
|
7
|
+
* add info station to config_flow ([6c8c652](https://github.com/figorr/meteocat/commit/6c8c652dc54788e2b471984726dc7dd3c0ba30d6))
|
|
8
|
+
* add new hourly, daily and uvi file status sensors ([42aa8b2](https://github.com/figorr/meteocat/commit/42aa8b2abbf6b333287c15d92e19b69d9630e62e))
|
|
9
|
+
* add new hourly, daily and uvi file status sensors translations ([5639489](https://github.com/figorr/meteocat/commit/563948999feb077a5effe9c07df9dda950469d73))
|
|
10
|
+
* add new hourly, daily and uvi sensor constants ([f7a4814](https://github.com/figorr/meteocat/commit/f7a481435abf34926b52312cc5302045c9357e69))
|
|
11
|
+
* fix entity and uvi coordinator ([ee3a557](https://github.com/figorr/meteocat/commit/ee3a5571e5e08737b3393a650509732b1cd4996a))
|
|
12
|
+
* ignore test read json date ([f5ce2ed](https://github.com/figorr/meteocat/commit/f5ce2edfa77d1ec33c4beff8c5c775499c0564fe))
|
|
13
|
+
* include entity and uvi coordinators ([4128d39](https://github.com/figorr/meteocat/commit/4128d3902bd67414eaee6d8d6da4628ecbce3493))
|
|
14
|
+
* set region, province and status to str ([31f58e7](https://github.com/figorr/meteocat/commit/31f58e7d701b2499e4f4d9f385238368703c75ff))
|
|
15
|
+
|
|
16
|
+
## [0.1.42](https://github.com/figorr/meteocat/compare/v0.1.41...v0.1.42) (2024-12-28)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
### Bug Fixes
|
|
20
|
+
|
|
21
|
+
* 0.1.42 ([f180cf1](https://github.com/figorr/meteocat/commit/f180cf1400e614684cfee81849369bb74796ee5e))
|
|
22
|
+
* bump meteocatpy to 0.0.16 ([0e14f79](https://github.com/figorr/meteocat/commit/0e14f79445ee4c059d47a315bcbdc20858a0c666))
|
|
23
|
+
* set logger to warning when using cache data ([b840d72](https://github.com/figorr/meteocat/commit/b840d7202c439f83b08597b9365c007e92aca1c5))
|
|
24
|
+
|
|
1
25
|
## [0.1.41](https://github.com/figorr/meteocat/compare/v0.1.40...v0.1.41) (2024-12-27)
|
|
2
26
|
|
|
3
27
|
|
|
@@ -25,7 +25,7 @@ from .const import DOMAIN, PLATFORMS
|
|
|
25
25
|
_LOGGER = logging.getLogger(__name__)
|
|
26
26
|
|
|
27
27
|
# Versión
|
|
28
|
-
__version__ = "0.1.
|
|
28
|
+
__version__ = "0.1.43"
|
|
29
29
|
|
|
30
30
|
def safe_remove(path: Path, is_folder: bool = False):
|
|
31
31
|
"""Elimina de forma segura un archivo o carpeta si existe."""
|
|
@@ -53,7 +53,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|
|
53
53
|
# Validar campos requeridos
|
|
54
54
|
required_fields = [
|
|
55
55
|
"api_key", "town_name", "town_id", "variable_name",
|
|
56
|
-
"variable_id", "station_name", "station_id"
|
|
56
|
+
"variable_id", "station_name", "station_id", "province_name",
|
|
57
|
+
"province_id", "region_name", "region_id"
|
|
57
58
|
]
|
|
58
59
|
missing_fields = [field for field in required_fields if field not in entry_data]
|
|
59
60
|
if missing_fields:
|
|
@@ -63,7 +64,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|
|
63
64
|
_LOGGER.debug(
|
|
64
65
|
f"Datos de configuración: Municipio '{entry_data['town_name']}' (ID: {entry_data['town_id']}), "
|
|
65
66
|
f"Variable '{entry_data['variable_name']}' (ID: {entry_data['variable_id']}), "
|
|
66
|
-
f"Estación '{entry_data['station_name']}' (ID: {entry_data['station_id']})
|
|
67
|
+
f"Estación '{entry_data['station_name']}' (ID: {entry_data['station_id']}), "
|
|
68
|
+
f"Provincia '{entry_data['province_name']}' (ID: {entry_data['province_id']}), "
|
|
69
|
+
f"Comarca '{entry_data['region_name']}' (ID: {entry_data['region_id']})."
|
|
67
70
|
)
|
|
68
71
|
|
|
69
72
|
# Inicializar coordinadores
|
|
@@ -23,7 +23,16 @@ from .const import (
|
|
|
23
23
|
VARIABLE_NAME,
|
|
24
24
|
VARIABLE_ID,
|
|
25
25
|
STATION_NAME,
|
|
26
|
-
STATION_ID
|
|
26
|
+
STATION_ID,
|
|
27
|
+
STATION_TYPE,
|
|
28
|
+
LATITUDE,
|
|
29
|
+
LONGITUDE,
|
|
30
|
+
ALTITUDE,
|
|
31
|
+
REGION_ID,
|
|
32
|
+
REGION_NAME,
|
|
33
|
+
PROVINCE_ID,
|
|
34
|
+
PROVINCE_NAME,
|
|
35
|
+
STATION_STATUS
|
|
27
36
|
)
|
|
28
37
|
|
|
29
38
|
from .options_flow import MeteocatOptionsFlowHandler
|
|
@@ -31,6 +40,8 @@ from meteocatpy.town import MeteocatTown
|
|
|
31
40
|
from meteocatpy.symbols import MeteocatSymbols
|
|
32
41
|
from meteocatpy.variables import MeteocatVariables
|
|
33
42
|
from meteocatpy.townstations import MeteocatTownStations
|
|
43
|
+
from meteocatpy.infostation import MeteocatInfoStation
|
|
44
|
+
|
|
34
45
|
from meteocatpy.exceptions import BadRequestError, ForbiddenError, TooManyRequestsError, InternalServerError, UnknownAPIError
|
|
35
46
|
|
|
36
47
|
_LOGGER = logging.getLogger(__name__)
|
|
@@ -103,10 +114,17 @@ class MeteocatConfigFlow(ConfigFlow, domain=DOMAIN):
|
|
|
103
114
|
errors = {}
|
|
104
115
|
|
|
105
116
|
# Crear directorio de activos (assets) si no existe
|
|
106
|
-
assets_dir =
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
117
|
+
assets_dir = os.path.join(
|
|
118
|
+
self.hass.config.path(),
|
|
119
|
+
"custom_components",
|
|
120
|
+
"meteocat",
|
|
121
|
+
"assets"
|
|
122
|
+
)
|
|
123
|
+
os.makedirs(assets_dir, exist_ok=True)
|
|
124
|
+
|
|
125
|
+
# Rutas para los archivos de símbolos y variables
|
|
126
|
+
symbols_file = os.path.join(assets_dir, "symbols.json")
|
|
127
|
+
variables_file = os.path.join(assets_dir, "variables.json")
|
|
110
128
|
|
|
111
129
|
try:
|
|
112
130
|
# Descargar y guardar los símbolos
|
|
@@ -171,6 +189,26 @@ class MeteocatConfigFlow(ConfigFlow, domain=DOMAIN):
|
|
|
171
189
|
self.station_id = selected_station["codi"]
|
|
172
190
|
self.station_name = selected_station["nom"]
|
|
173
191
|
|
|
192
|
+
# Obtener metadatos de la estación
|
|
193
|
+
infostation_client = MeteocatInfoStation(self.api_key)
|
|
194
|
+
try:
|
|
195
|
+
station_metadata = await infostation_client.get_infostation(self.station_id)
|
|
196
|
+
except Exception as ex:
|
|
197
|
+
_LOGGER.error("Error al obtener los metadatos de la estación: %s", ex)
|
|
198
|
+
errors["base"] = "metadata_fetch_failed"
|
|
199
|
+
station_metadata = {}
|
|
200
|
+
|
|
201
|
+
# Extraer los valores necesarios de los metadatos
|
|
202
|
+
self.station_type = station_metadata.get("tipus", "")
|
|
203
|
+
self.latitude = station_metadata.get("coordenades", {}).get("latitud", 0.0)
|
|
204
|
+
self.longitude = station_metadata.get("coordenades", {}).get("longitud", 0.0)
|
|
205
|
+
self.altitude = station_metadata.get("altitud", 0)
|
|
206
|
+
self.region_id = station_metadata.get("comarca", {}).get("codi", "")
|
|
207
|
+
self.region_name = station_metadata.get("comarca", {}).get("nom", "")
|
|
208
|
+
self.province_id = station_metadata.get("provincia", {}).get("codi", "")
|
|
209
|
+
self.province_name = station_metadata.get("provincia", {}).get("nom", "")
|
|
210
|
+
self.station_status = station_metadata.get("estats", [{}])[0].get("codi", "")
|
|
211
|
+
|
|
174
212
|
return self.async_create_entry(
|
|
175
213
|
title=self.selected_municipi["nom"],
|
|
176
214
|
data={
|
|
@@ -180,7 +218,16 @@ class MeteocatConfigFlow(ConfigFlow, domain=DOMAIN):
|
|
|
180
218
|
VARIABLE_NAME: "Temperatura",
|
|
181
219
|
VARIABLE_ID: str(self.variable_id),
|
|
182
220
|
STATION_NAME: self.station_name,
|
|
183
|
-
STATION_ID: self.station_id
|
|
221
|
+
STATION_ID: self.station_id,
|
|
222
|
+
STATION_TYPE: self.station_type,
|
|
223
|
+
LATITUDE: self.latitude,
|
|
224
|
+
LONGITUDE: self.longitude,
|
|
225
|
+
ALTITUDE: self.altitude,
|
|
226
|
+
REGION_ID: str(self.region_id),
|
|
227
|
+
REGION_NAME: self.region_name,
|
|
228
|
+
PROVINCE_ID: str(self.province_id),
|
|
229
|
+
PROVINCE_NAME: self.province_name,
|
|
230
|
+
STATION_STATUS: str(self.station_status),
|
|
184
231
|
},
|
|
185
232
|
)
|
|
186
233
|
else:
|
|
@@ -8,6 +8,18 @@ VARIABLE_NAME = "variable_name"
|
|
|
8
8
|
VARIABLE_ID = "variable_id"
|
|
9
9
|
STATION_NAME = "station_name"
|
|
10
10
|
STATION_ID = "station_id"
|
|
11
|
+
STATION_TYPE = "station_type"
|
|
12
|
+
LATITUDE = "latitude"
|
|
13
|
+
LONGITUDE = "longitude"
|
|
14
|
+
ALTITUDE = "altitude"
|
|
15
|
+
REGION_ID = "region_id"
|
|
16
|
+
REGION_NAME = "region_name"
|
|
17
|
+
PROVINCE_ID = "province_id"
|
|
18
|
+
PROVINCE_NAME = "province_name"
|
|
19
|
+
STATION_STATUS = "station_status"
|
|
20
|
+
HOURLY_FORECAST_FILE_STATUS = "hourly_forecast_file_status"
|
|
21
|
+
DAILY_FORECAST_FILE_STATUS = "daily_forecast_file_status"
|
|
22
|
+
UVI_FILE_STATUS = "uvi_file_status"
|
|
11
23
|
|
|
12
24
|
from homeassistant.const import Platform
|
|
13
25
|
|
|
@@ -84,7 +84,6 @@ class MeteocatSensorCoordinator(DataUpdateCoordinator):
|
|
|
84
84
|
self,
|
|
85
85
|
hass: HomeAssistant,
|
|
86
86
|
entry_data: dict,
|
|
87
|
-
update_interval: timedelta = DEFAULT_SENSOR_UPDATE_INTERVAL,
|
|
88
87
|
):
|
|
89
88
|
"""
|
|
90
89
|
Inicializa el coordinador de sensores de Meteocat.
|
|
@@ -103,11 +102,19 @@ class MeteocatSensorCoordinator(DataUpdateCoordinator):
|
|
|
103
102
|
self.variable_id = entry_data["variable_id"] # Usamos el ID de la variable
|
|
104
103
|
self.meteocat_station_data = MeteocatStationData(self.api_key)
|
|
105
104
|
|
|
105
|
+
self.station_file = os.path.join(
|
|
106
|
+
hass.config.path(),
|
|
107
|
+
"custom_components",
|
|
108
|
+
"meteocat",
|
|
109
|
+
"files",
|
|
110
|
+
f"station_{self.station_id.lower()}_data.json"
|
|
111
|
+
)
|
|
112
|
+
|
|
106
113
|
super().__init__(
|
|
107
114
|
hass,
|
|
108
115
|
_LOGGER,
|
|
109
116
|
name=f"{DOMAIN} Sensor Coordinator",
|
|
110
|
-
update_interval=
|
|
117
|
+
update_interval=DEFAULT_SENSOR_UPDATE_INTERVAL,
|
|
111
118
|
)
|
|
112
119
|
|
|
113
120
|
async def _async_update_data(self) -> Dict:
|
|
@@ -129,17 +136,8 @@ class MeteocatSensorCoordinator(DataUpdateCoordinator):
|
|
|
129
136
|
)
|
|
130
137
|
raise ValueError("Formato de datos inválido")
|
|
131
138
|
|
|
132
|
-
# Determinar la ruta al archivo en la carpeta raíz del repositorio
|
|
133
|
-
output_file = os.path.join(
|
|
134
|
-
self.hass.config.path(),
|
|
135
|
-
"custom_components",
|
|
136
|
-
"meteocat",
|
|
137
|
-
"files",
|
|
138
|
-
f"station_{self.station_id.lower()}_data.json"
|
|
139
|
-
)
|
|
140
|
-
|
|
141
139
|
# Guardar los datos en un archivo JSON
|
|
142
|
-
await save_json_to_file(data,
|
|
140
|
+
await save_json_to_file(data, self.station_file)
|
|
143
141
|
|
|
144
142
|
return data
|
|
145
143
|
except asyncio.TimeoutError as err:
|
|
@@ -182,13 +180,14 @@ class MeteocatSensorCoordinator(DataUpdateCoordinator):
|
|
|
182
180
|
self.station_id,
|
|
183
181
|
err,
|
|
184
182
|
)
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
183
|
+
# Intentar cargar datos en caché si hay un error
|
|
184
|
+
cached_data = load_json_from_file(self.station_file)
|
|
185
|
+
if cached_data:
|
|
186
|
+
_LOGGER.warning("Usando datos en caché para la estación %s.", self.station_id)
|
|
187
|
+
return cached_data
|
|
188
|
+
# No se puede actualizar el estado, retornar None
|
|
189
|
+
_LOGGER.error("No se pudo obtener datos actualizados ni cargar datos en caché.")
|
|
190
|
+
return None # o cualquier otro valor que indique un estado de error
|
|
192
191
|
|
|
193
192
|
class MeteocatStaticSensorCoordinator(DataUpdateCoordinator):
|
|
194
193
|
"""Coordinator to manage and update static sensor data."""
|
|
@@ -197,7 +196,6 @@ class MeteocatStaticSensorCoordinator(DataUpdateCoordinator):
|
|
|
197
196
|
self,
|
|
198
197
|
hass: HomeAssistant,
|
|
199
198
|
entry_data: dict,
|
|
200
|
-
update_interval: timedelta = DEFAULT_STATIC_SENSOR_UPDATE_INTERVAL,
|
|
201
199
|
):
|
|
202
200
|
"""
|
|
203
201
|
Initialize the MeteocatStaticSensorCoordinator.
|
|
@@ -216,7 +214,7 @@ class MeteocatStaticSensorCoordinator(DataUpdateCoordinator):
|
|
|
216
214
|
hass,
|
|
217
215
|
_LOGGER,
|
|
218
216
|
name=f"{DOMAIN} Static Sensor Coordinator",
|
|
219
|
-
update_interval=
|
|
217
|
+
update_interval=DEFAULT_STATIC_SENSOR_UPDATE_INTERVAL,
|
|
220
218
|
)
|
|
221
219
|
|
|
222
220
|
async def _async_update_data(self):
|
|
@@ -240,16 +238,15 @@ class MeteocatStaticSensorCoordinator(DataUpdateCoordinator):
|
|
|
240
238
|
}
|
|
241
239
|
|
|
242
240
|
class MeteocatUviCoordinator(DataUpdateCoordinator):
|
|
243
|
-
"""Coordinator para manejar la actualización de datos de
|
|
241
|
+
"""Coordinator para manejar la actualización de datos de UVI desde la API de Meteocat."""
|
|
244
242
|
|
|
245
243
|
def __init__(
|
|
246
244
|
self,
|
|
247
245
|
hass: HomeAssistant,
|
|
248
246
|
entry_data: dict,
|
|
249
|
-
update_interval: timedelta = DEFAULT_UVI_UPDATE_INTERVAL,
|
|
250
247
|
):
|
|
251
248
|
"""
|
|
252
|
-
Inicializa el coordinador del
|
|
249
|
+
Inicializa el coordinador del Índice UV de Meteocat.
|
|
253
250
|
|
|
254
251
|
Args:
|
|
255
252
|
hass (HomeAssistant): Instancia de Home Assistant.
|
|
@@ -259,7 +256,7 @@ class MeteocatUviCoordinator(DataUpdateCoordinator):
|
|
|
259
256
|
self.api_key = entry_data["api_key"] # Usamos la API key de la configuración
|
|
260
257
|
self.town_id = entry_data["town_id"] # Usamos el ID del municipio
|
|
261
258
|
self.meteocat_uvi_data = MeteocatUviData(self.api_key)
|
|
262
|
-
self.
|
|
259
|
+
self.uvi_file = os.path.join(
|
|
263
260
|
hass.config.path(),
|
|
264
261
|
"custom_components",
|
|
265
262
|
"meteocat",
|
|
@@ -271,43 +268,51 @@ class MeteocatUviCoordinator(DataUpdateCoordinator):
|
|
|
271
268
|
hass,
|
|
272
269
|
_LOGGER,
|
|
273
270
|
name=f"{DOMAIN} Uvi Coordinator",
|
|
274
|
-
update_interval=
|
|
271
|
+
update_interval=DEFAULT_UVI_UPDATE_INTERVAL,
|
|
275
272
|
)
|
|
276
273
|
|
|
277
274
|
async def is_uvi_data_valid(self) -> dict:
|
|
278
275
|
"""Comprueba si el archivo JSON contiene datos válidos para el día actual y devuelve los datos si son válidos."""
|
|
279
276
|
try:
|
|
280
|
-
if not os.path.exists(self.
|
|
277
|
+
if not os.path.exists(self.uvi_file):
|
|
278
|
+
_LOGGER.info("El archivo %s no existe. Se considerará inválido.", self.uvi_file)
|
|
281
279
|
return None
|
|
282
280
|
|
|
283
|
-
async with aiofiles.open(self.
|
|
281
|
+
async with aiofiles.open(self.uvi_file, "r", encoding="utf-8") as file:
|
|
284
282
|
content = await file.read()
|
|
285
283
|
data = json.loads(content)
|
|
286
284
|
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
285
|
+
# Validar la fecha del primer elemento superior a 1 día
|
|
286
|
+
first_date = datetime.strptime(data["uvi"][0].get("date"), "%Y-%m-%d").date()
|
|
287
|
+
today = datetime.now(timezone.utc).date()
|
|
290
288
|
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
289
|
+
# Log detallado
|
|
290
|
+
_LOGGER.info(
|
|
291
|
+
"Validando datos UVI en %s: Fecha de hoy: %s, Fecha del primer elemento: %s",
|
|
292
|
+
self.uvi_file,
|
|
293
|
+
today,
|
|
294
|
+
first_date,
|
|
295
|
+
)
|
|
294
296
|
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
297
|
+
# Verificar si la antigüedad es mayor a un día
|
|
298
|
+
if (today - first_date).days > 1:
|
|
299
|
+
_LOGGER.info(
|
|
300
|
+
"Los datos en %s son antiguos. Se procederá a llamar a la API.",
|
|
301
|
+
self.uvi_file,
|
|
302
|
+
)
|
|
298
303
|
return None
|
|
299
|
-
|
|
304
|
+
_LOGGER.info("Los datos en %s son válidos. Se usarán sin llamar a la API.", self.uvi_file)
|
|
300
305
|
return data
|
|
301
306
|
except Exception as e:
|
|
302
307
|
_LOGGER.error("Error al validar el archivo JSON del índice UV: %s", e)
|
|
303
308
|
return None
|
|
304
309
|
|
|
305
310
|
async def _async_update_data(self) -> Dict:
|
|
306
|
-
"""Actualiza los datos de
|
|
311
|
+
"""Actualiza los datos de UVI desde la API de Meteocat."""
|
|
307
312
|
try:
|
|
308
313
|
# Validar el archivo JSON existente
|
|
309
314
|
valid_data = await self.is_uvi_data_valid()
|
|
310
|
-
if valid_data
|
|
315
|
+
if valid_data:
|
|
311
316
|
_LOGGER.info("Los datos del índice UV están actualizados. No se realiza llamada a la API.")
|
|
312
317
|
return valid_data['uvi']
|
|
313
318
|
|
|
@@ -316,7 +321,7 @@ class MeteocatUviCoordinator(DataUpdateCoordinator):
|
|
|
316
321
|
self.meteocat_uvi_data.get_uvi_index(self.town_id),
|
|
317
322
|
timeout=30 # Tiempo límite de 30 segundos
|
|
318
323
|
)
|
|
319
|
-
_LOGGER.debug("Datos
|
|
324
|
+
_LOGGER.debug("Datos actualizados exitosamente: %s", data)
|
|
320
325
|
|
|
321
326
|
# Validar que los datos sean un dict con una clave 'uvi'
|
|
322
327
|
if not isinstance(data, dict) or 'uvi' not in data:
|
|
@@ -324,7 +329,7 @@ class MeteocatUviCoordinator(DataUpdateCoordinator):
|
|
|
324
329
|
raise ValueError("Formato de datos inválido")
|
|
325
330
|
|
|
326
331
|
# Guardar los datos en un archivo JSON
|
|
327
|
-
await save_json_to_file(data, self.
|
|
332
|
+
await save_json_to_file(data, self.uvi_file)
|
|
328
333
|
|
|
329
334
|
return data['uvi']
|
|
330
335
|
except asyncio.TimeoutError as err:
|
|
@@ -357,12 +362,14 @@ class MeteocatUviCoordinator(DataUpdateCoordinator):
|
|
|
357
362
|
self.town_id,
|
|
358
363
|
err,
|
|
359
364
|
)
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
365
|
+
# Intentar cargar datos en caché si hay un error
|
|
366
|
+
cached_data = load_json_from_file(self.uvi_file)
|
|
367
|
+
if cached_data:
|
|
368
|
+
_LOGGER.warning("Usando datos en caché para la ciudad %s.", self.town_id)
|
|
369
|
+
return cached_data.get('uvi', [])
|
|
370
|
+
# No se puede actualizar el estado, retornar None
|
|
371
|
+
_LOGGER.error("No se pudo obtener datos actualizados ni cargar datos en caché.")
|
|
372
|
+
return None
|
|
366
373
|
|
|
367
374
|
class MeteocatUviFileCoordinator(DataUpdateCoordinator):
|
|
368
375
|
"""Coordinator to read and process UV data from a file."""
|
|
@@ -371,7 +378,6 @@ class MeteocatUviFileCoordinator(DataUpdateCoordinator):
|
|
|
371
378
|
self,
|
|
372
379
|
hass: HomeAssistant,
|
|
373
380
|
entry_data: dict,
|
|
374
|
-
update_interval: timedelta = DEFAULT_UVI_SENSOR_UPDATE_INTERVAL,
|
|
375
381
|
):
|
|
376
382
|
"""
|
|
377
383
|
Inicializa el coordinador del sensor del Índice UV de Meteocat.
|
|
@@ -387,7 +393,7 @@ class MeteocatUviFileCoordinator(DataUpdateCoordinator):
|
|
|
387
393
|
hass,
|
|
388
394
|
_LOGGER,
|
|
389
395
|
name=f"{DOMAIN} Uvi File Coordinator",
|
|
390
|
-
update_interval=
|
|
396
|
+
update_interval=DEFAULT_UVI_SENSOR_UPDATE_INTERVAL,
|
|
391
397
|
)
|
|
392
398
|
self._file_path = os.path.join(
|
|
393
399
|
hass.config.path("custom_components/meteocat/files"),
|
|
@@ -449,7 +455,6 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
|
|
|
449
455
|
self,
|
|
450
456
|
hass: HomeAssistant,
|
|
451
457
|
entry_data: dict,
|
|
452
|
-
update_interval: timedelta = DEFAULT_ENTITY_UPDATE_INTERVAL,
|
|
453
458
|
):
|
|
454
459
|
"""
|
|
455
460
|
Inicializa el coordinador de datos para entidades de predicción.
|
|
@@ -468,35 +473,62 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
|
|
|
468
473
|
self.variable_id = entry_data["variable_id"]
|
|
469
474
|
self.meteocat_forecast = MeteocatForecast(self.api_key)
|
|
470
475
|
|
|
476
|
+
self.hourly_file = os.path.join(
|
|
477
|
+
hass.config.path(),
|
|
478
|
+
"custom_components",
|
|
479
|
+
"meteocat",
|
|
480
|
+
"files",
|
|
481
|
+
f"forecast_{self.town_id}_hourly_data.json",
|
|
482
|
+
)
|
|
483
|
+
self.daily_file = os.path.join(
|
|
484
|
+
hass.config.path(),
|
|
485
|
+
"custom_components",
|
|
486
|
+
"meteocat",
|
|
487
|
+
"files",
|
|
488
|
+
f"forecast_{self.town_id}_daily_data.json",
|
|
489
|
+
)
|
|
490
|
+
|
|
471
491
|
super().__init__(
|
|
472
492
|
hass,
|
|
473
493
|
_LOGGER,
|
|
474
494
|
name=f"{DOMAIN} Entity Coordinator",
|
|
475
|
-
update_interval=
|
|
495
|
+
update_interval=DEFAULT_ENTITY_UPDATE_INTERVAL,
|
|
476
496
|
)
|
|
477
497
|
|
|
478
|
-
async def
|
|
479
|
-
"""
|
|
498
|
+
async def validate_forecast_data(self, file_path: str) -> dict:
|
|
499
|
+
"""Valida y retorna datos de predicción si son válidos."""
|
|
480
500
|
if not os.path.exists(file_path):
|
|
481
|
-
|
|
482
|
-
|
|
501
|
+
_LOGGER.info("El archivo %s no existe. Se considerará inválido.", file_path)
|
|
502
|
+
return None
|
|
483
503
|
try:
|
|
484
504
|
async with aiofiles.open(file_path, "r", encoding="utf-8") as f:
|
|
485
505
|
content = await f.read()
|
|
486
506
|
data = json.loads(content)
|
|
487
507
|
|
|
488
|
-
if not data or "dies" not in data or not data["dies"]:
|
|
489
|
-
return False
|
|
490
|
-
|
|
491
508
|
# Obtener la fecha del primer día
|
|
492
509
|
first_date = datetime.fromisoformat(data["dies"][0]["data"].rstrip("Z")).date()
|
|
493
510
|
today = datetime.now(timezone.utc).date()
|
|
494
511
|
|
|
512
|
+
# Log detallado
|
|
513
|
+
_LOGGER.info(
|
|
514
|
+
"Validando datos en %s: Fecha de hoy: %s, Fecha del primer elemento: %s",
|
|
515
|
+
file_path,
|
|
516
|
+
today,
|
|
517
|
+
first_date,
|
|
518
|
+
)
|
|
519
|
+
|
|
495
520
|
# Verificar si la antigüedad es mayor a un día
|
|
496
|
-
|
|
521
|
+
if (today - first_date).days > 1:
|
|
522
|
+
_LOGGER.info(
|
|
523
|
+
"Los datos en %s son antiguos. Se procederá a llamar a la API.",
|
|
524
|
+
file_path,
|
|
525
|
+
)
|
|
526
|
+
return None
|
|
527
|
+
_LOGGER.info("Los datos en %s son válidos. Se usarán sin llamar a la API.", file_path)
|
|
528
|
+
return data
|
|
497
529
|
except Exception as e:
|
|
498
530
|
_LOGGER.warning("Error validando datos en %s: %s", file_path, e)
|
|
499
|
-
return
|
|
531
|
+
return None
|
|
500
532
|
|
|
501
533
|
async def _fetch_and_save_data(self, api_method, file_path: str) -> dict:
|
|
502
534
|
"""Obtiene datos de la API y los guarda en un archivo JSON."""
|
|
@@ -504,44 +536,25 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
|
|
|
504
536
|
await save_json_to_file(data, file_path)
|
|
505
537
|
return data
|
|
506
538
|
|
|
507
|
-
async def _async_update_data(self) ->
|
|
508
|
-
"""Actualiza los datos de predicción
|
|
509
|
-
hourly_file = os.path.join(
|
|
510
|
-
self.hass.config.path(),
|
|
511
|
-
"custom_components",
|
|
512
|
-
"meteocat",
|
|
513
|
-
"files",
|
|
514
|
-
f"forecast_{self.town_id.lower()}_hourly_data.json",
|
|
515
|
-
)
|
|
516
|
-
daily_file = os.path.join(
|
|
517
|
-
self.hass.config.path(),
|
|
518
|
-
"custom_components",
|
|
519
|
-
"meteocat",
|
|
520
|
-
"files",
|
|
521
|
-
f"forecast_{self.town_id.lower()}_daily_data.json",
|
|
522
|
-
)
|
|
523
|
-
|
|
539
|
+
async def _async_update_data(self) -> dict:
|
|
540
|
+
"""Actualiza los datos de predicción horaria y diaria."""
|
|
524
541
|
try:
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
self.meteocat_forecast.get_prediccion_horaria, hourly_file
|
|
542
|
+
# Validar o actualizar datos horarios
|
|
543
|
+
hourly_data = await self.validate_forecast_data(self.hourly_file)
|
|
544
|
+
if not hourly_data:
|
|
545
|
+
hourly_data = await self._fetch_and_save_data(
|
|
546
|
+
self.meteocat_forecast.get_prediccion_horaria, self.hourly_file
|
|
530
547
|
)
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
self.meteocat_forecast.get_prediccion_diaria, daily_file
|
|
548
|
+
|
|
549
|
+
# Validar o actualizar datos diarios
|
|
550
|
+
daily_data = await self.validate_forecast_data(self.daily_file)
|
|
551
|
+
if not daily_data:
|
|
552
|
+
daily_data = await self._fetch_and_save_data(
|
|
553
|
+
self.meteocat_forecast.get_prediccion_diaria, self.daily_file
|
|
537
554
|
)
|
|
538
|
-
)
|
|
539
555
|
|
|
540
|
-
_LOGGER.debug(
|
|
541
|
-
"Datos de predicción horaria y diaria actualizados correctamente para %s.",
|
|
542
|
-
self.town_id,
|
|
543
|
-
)
|
|
544
556
|
return {"hourly": hourly_data, "daily": daily_data}
|
|
557
|
+
|
|
545
558
|
except asyncio.TimeoutError as err:
|
|
546
559
|
_LOGGER.warning("Tiempo de espera agotado al obtener datos de predicción.")
|
|
547
560
|
raise ConfigEntryNotReady from err
|
|
@@ -568,10 +581,19 @@ class MeteocatEntityCoordinator(DataUpdateCoordinator):
|
|
|
568
581
|
raise
|
|
569
582
|
except Exception as err:
|
|
570
583
|
_LOGGER.exception("Error inesperado al obtener datos de predicción: %s", err)
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
584
|
+
|
|
585
|
+
# Si ocurre un error, intentar cargar datos desde los archivos locales
|
|
586
|
+
hourly_cache = load_json_from_file(self.hourly_file) or {}
|
|
587
|
+
daily_cache = load_json_from_file(self.daily_file) or {}
|
|
588
|
+
|
|
589
|
+
_LOGGER.warning(
|
|
590
|
+
"Cargando datos desde caché para %s. Datos horarios: %s, Datos diarios: %s",
|
|
591
|
+
self.town_id,
|
|
592
|
+
"Encontrados" if hourly_cache else "No encontrados",
|
|
593
|
+
"Encontrados" if daily_cache else "No encontrados",
|
|
594
|
+
)
|
|
595
|
+
|
|
596
|
+
return {"hourly": hourly_cache, "daily": daily_cache}
|
|
575
597
|
|
|
576
598
|
def get_condition_from_code(code: int) -> str:
|
|
577
599
|
"""Devuelve la condición meteorológica basada en el código."""
|
|
@@ -584,7 +606,6 @@ class HourlyForecastCoordinator(DataUpdateCoordinator):
|
|
|
584
606
|
self,
|
|
585
607
|
hass: HomeAssistant,
|
|
586
608
|
entry_data: dict,
|
|
587
|
-
update_interval: timedelta = DEFAULT_HOURLY_FORECAST_UPDATE_INTERVAL,
|
|
588
609
|
):
|
|
589
610
|
"""Inicializa el coordinador para predicciones horarias."""
|
|
590
611
|
self.town_name = entry_data["town_name"]
|
|
@@ -602,7 +623,7 @@ class HourlyForecastCoordinator(DataUpdateCoordinator):
|
|
|
602
623
|
hass,
|
|
603
624
|
_LOGGER,
|
|
604
625
|
name=f"{DOMAIN} Hourly Forecast Coordinator",
|
|
605
|
-
update_interval=
|
|
626
|
+
update_interval=DEFAULT_HOURLY_FORECAST_UPDATE_INTERVAL,
|
|
606
627
|
)
|
|
607
628
|
|
|
608
629
|
async def _is_data_valid(self) -> bool:
|
|
@@ -716,7 +737,6 @@ class DailyForecastCoordinator(DataUpdateCoordinator):
|
|
|
716
737
|
self,
|
|
717
738
|
hass: HomeAssistant,
|
|
718
739
|
entry_data: dict,
|
|
719
|
-
update_interval: timedelta = DEFAULT_DAILY_FORECAST_UPDATE_INTERVAL,
|
|
720
740
|
):
|
|
721
741
|
"""Inicializa el coordinador para predicciones diarias."""
|
|
722
742
|
self.town_name = entry_data["town_name"]
|
|
@@ -734,7 +754,7 @@ class DailyForecastCoordinator(DataUpdateCoordinator):
|
|
|
734
754
|
hass,
|
|
735
755
|
_LOGGER,
|
|
736
756
|
name=f"{DOMAIN} Daily Forecast Coordinator",
|
|
737
|
-
update_interval=
|
|
757
|
+
update_interval=DEFAULT_DAILY_FORECAST_UPDATE_INTERVAL,
|
|
738
758
|
)
|
|
739
759
|
|
|
740
760
|
async def _is_data_valid(self) -> bool:
|
|
@@ -827,7 +847,6 @@ class MeteocatConditionCoordinator(DataUpdateCoordinator):
|
|
|
827
847
|
self,
|
|
828
848
|
hass: HomeAssistant,
|
|
829
849
|
entry_data: dict,
|
|
830
|
-
update_interval: timedelta = DEFAULT_CONDITION_SENSOR_UPDATE_INTERVAL,
|
|
831
850
|
):
|
|
832
851
|
"""
|
|
833
852
|
Initialize the Meteocat Condition Coordinator.
|
|
@@ -844,7 +863,7 @@ class MeteocatConditionCoordinator(DataUpdateCoordinator):
|
|
|
844
863
|
hass,
|
|
845
864
|
_LOGGER,
|
|
846
865
|
name=f"{DOMAIN} Condition Coordinator",
|
|
847
|
-
update_interval=
|
|
866
|
+
update_interval=DEFAULT_CONDITION_SENSOR_UPDATE_INTERVAL,
|
|
848
867
|
)
|
|
849
868
|
|
|
850
869
|
self._file_path = os.path.join(
|
|
@@ -927,7 +946,6 @@ class MeteocatTempForecastCoordinator(DataUpdateCoordinator):
|
|
|
927
946
|
self,
|
|
928
947
|
hass: HomeAssistant,
|
|
929
948
|
entry_data: dict,
|
|
930
|
-
update_interval: timedelta = DEFAULT_TEMP_FORECAST_UPDATE_INTERVAL,
|
|
931
949
|
):
|
|
932
950
|
"""Inicializa el coordinador para predicciones diarias."""
|
|
933
951
|
self.town_name = entry_data["town_name"]
|
|
@@ -945,7 +963,7 @@ class MeteocatTempForecastCoordinator(DataUpdateCoordinator):
|
|
|
945
963
|
hass,
|
|
946
964
|
_LOGGER,
|
|
947
965
|
name=f"{DOMAIN} Daily Forecast Coordinator",
|
|
948
|
-
update_interval=
|
|
966
|
+
update_interval=DEFAULT_TEMP_FORECAST_UPDATE_INTERVAL,
|
|
949
967
|
)
|
|
950
968
|
|
|
951
969
|
async def _is_data_valid(self) -> bool:
|
|
@@ -990,7 +1008,7 @@ class MeteocatTempForecastCoordinator(DataUpdateCoordinator):
|
|
|
990
1008
|
# Usar datos del día actual si están disponibles
|
|
991
1009
|
today_temp_forecast = self.get_temp_forecast_for_today(data)
|
|
992
1010
|
if today_temp_forecast:
|
|
993
|
-
parsed_data = self.
|
|
1011
|
+
parsed_data = self.parse_temp_forecast(today_temp_forecast)
|
|
994
1012
|
return parsed_data
|
|
995
1013
|
except Exception as e:
|
|
996
1014
|
_LOGGER.warning("Error leyendo archivo de predicción diaria: %s", e)
|
|
@@ -1009,7 +1027,7 @@ class MeteocatTempForecastCoordinator(DataUpdateCoordinator):
|
|
|
1009
1027
|
return dia
|
|
1010
1028
|
return None
|
|
1011
1029
|
|
|
1012
|
-
def
|
|
1030
|
+
def parse_temp_forecast(self, dia: dict) -> dict:
|
|
1013
1031
|
"""Convierte un día de predicción en un diccionario con los datos necesarios."""
|
|
1014
1032
|
variables = dia.get("variables", {})
|
|
1015
1033
|
|
|
@@ -7,6 +7,6 @@
|
|
|
7
7
|
"iot_class": "cloud_polling",
|
|
8
8
|
"documentation": "https://gitlab.com/figorr/meteocat",
|
|
9
9
|
"loggers": ["meteocatpy"],
|
|
10
|
-
"requirements": ["meteocatpy==0.0.
|
|
11
|
-
"version": "0.1.
|
|
10
|
+
"requirements": ["meteocatpy==0.0.17", "packaging>=20.3", "wrapt>=1.14.0"],
|
|
11
|
+
"version": "0.1.43"
|
|
12
12
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from dataclasses import dataclass
|
|
4
|
-
from datetime import datetime
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
5
|
import logging
|
|
6
6
|
from homeassistant.helpers.entity import (
|
|
7
7
|
DeviceInfo,
|
|
@@ -48,6 +48,9 @@ from .const import (
|
|
|
48
48
|
CONDITION,
|
|
49
49
|
MAX_TEMPERATURE_FORECAST,
|
|
50
50
|
MIN_TEMPERATURE_FORECAST,
|
|
51
|
+
HOURLY_FORECAST_FILE_STATUS,
|
|
52
|
+
DAILY_FORECAST_FILE_STATUS,
|
|
53
|
+
UVI_FILE_STATUS,
|
|
51
54
|
WIND_SPEED_CODE,
|
|
52
55
|
WIND_DIRECTION_CODE,
|
|
53
56
|
TEMPERATURE_CODE,
|
|
@@ -68,6 +71,8 @@ from .coordinator import (
|
|
|
68
71
|
MeteocatUviFileCoordinator,
|
|
69
72
|
MeteocatConditionCoordinator,
|
|
70
73
|
MeteocatTempForecastCoordinator,
|
|
74
|
+
MeteocatEntityCoordinator,
|
|
75
|
+
MeteocatUviCoordinator,
|
|
71
76
|
)
|
|
72
77
|
|
|
73
78
|
_LOGGER = logging.getLogger(__name__)
|
|
@@ -229,6 +234,24 @@ SENSOR_TYPES: tuple[MeteocatSensorEntityDescription, ...] = (
|
|
|
229
234
|
state_class=SensorStateClass.MEASUREMENT,
|
|
230
235
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
|
231
236
|
),
|
|
237
|
+
MeteocatSensorEntityDescription(
|
|
238
|
+
key=HOURLY_FORECAST_FILE_STATUS,
|
|
239
|
+
translation_key="hourly_forecast_file_status",
|
|
240
|
+
icon="mdi:clock",
|
|
241
|
+
entity_category=EntityCategory.DIAGNOSTIC,
|
|
242
|
+
),
|
|
243
|
+
MeteocatSensorEntityDescription(
|
|
244
|
+
key=DAILY_FORECAST_FILE_STATUS,
|
|
245
|
+
translation_key="daily_forecast_file_status",
|
|
246
|
+
icon="mdi:clock",
|
|
247
|
+
entity_category=EntityCategory.DIAGNOSTIC,
|
|
248
|
+
),
|
|
249
|
+
MeteocatSensorEntityDescription(
|
|
250
|
+
key=UVI_FILE_STATUS,
|
|
251
|
+
translation_key="uvi_file_status",
|
|
252
|
+
icon="mdi:clock",
|
|
253
|
+
entity_category=EntityCategory.DIAGNOSTIC,
|
|
254
|
+
)
|
|
232
255
|
)
|
|
233
256
|
|
|
234
257
|
@callback
|
|
@@ -242,12 +265,14 @@ async def async_setup_entry(hass, entry, async_add_entities: AddEntitiesCallback
|
|
|
242
265
|
static_sensor_coordinator = entry_data.get("static_sensor_coordinator")
|
|
243
266
|
condition_coordinator = entry_data.get("condition_coordinator")
|
|
244
267
|
temp_forecast_coordinator = entry_data.get("temp_forecast_coordinator")
|
|
268
|
+
entity_coordinator = entry_data.get("entity_coordinator")
|
|
269
|
+
uvi_coordinator = entry_data.get("uvi_coordinator")
|
|
245
270
|
|
|
246
271
|
# Sensores generales
|
|
247
272
|
async_add_entities(
|
|
248
273
|
MeteocatSensor(coordinator, description, entry_data)
|
|
249
274
|
for description in SENSOR_TYPES
|
|
250
|
-
if description.key not in {TOWN_NAME, TOWN_ID, STATION_NAME, STATION_ID, UV_INDEX, CONDITION, MAX_TEMPERATURE_FORECAST, MIN_TEMPERATURE_FORECAST} # Excluir estáticos y UVI
|
|
275
|
+
if description.key not in {TOWN_NAME, TOWN_ID, STATION_NAME, STATION_ID, UV_INDEX, CONDITION, MAX_TEMPERATURE_FORECAST, MIN_TEMPERATURE_FORECAST, HOURLY_FORECAST_FILE_STATUS, DAILY_FORECAST_FILE_STATUS, UVI_FILE_STATUS} # Excluir estáticos y UVI
|
|
251
276
|
)
|
|
252
277
|
|
|
253
278
|
# Sensores estáticos
|
|
@@ -278,6 +303,27 @@ async def async_setup_entry(hass, entry, async_add_entities: AddEntitiesCallback
|
|
|
278
303
|
if description.key in {MAX_TEMPERATURE_FORECAST, MIN_TEMPERATURE_FORECAST}
|
|
279
304
|
)
|
|
280
305
|
|
|
306
|
+
# Sensores de estado de los archivos de previsión horaria
|
|
307
|
+
async_add_entities(
|
|
308
|
+
MeteocatHourlyForecastStatusSensor(entity_coordinator, description, entry_data)
|
|
309
|
+
for description in SENSOR_TYPES
|
|
310
|
+
if description.key == HOURLY_FORECAST_FILE_STATUS
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
# Sensores de estado de los archivos de previsión diaria
|
|
314
|
+
async_add_entities(
|
|
315
|
+
MeteocatDailyForecastStatusSensor(entity_coordinator, description, entry_data)
|
|
316
|
+
for description in SENSOR_TYPES
|
|
317
|
+
if description.key == DAILY_FORECAST_FILE_STATUS
|
|
318
|
+
)
|
|
319
|
+
|
|
320
|
+
# Sensores de estado de los archivos de uvi
|
|
321
|
+
async_add_entities(
|
|
322
|
+
MeteocatUviStatusSensor(uvi_coordinator, description, entry_data)
|
|
323
|
+
for description in SENSOR_TYPES
|
|
324
|
+
if description.key == UVI_FILE_STATUS
|
|
325
|
+
)
|
|
326
|
+
|
|
281
327
|
class MeteocatStaticSensor(CoordinatorEntity[MeteocatStaticSensorCoordinator], SensorEntity):
|
|
282
328
|
"""Representation of a static Meteocat sensor."""
|
|
283
329
|
STATIC_KEYS = {TOWN_NAME, TOWN_ID, STATION_NAME, STATION_ID}
|
|
@@ -718,3 +764,116 @@ class MeteocatTempForecast(CoordinatorEntity[MeteocatTempForecastCoordinator], S
|
|
|
718
764
|
manufacturer="Meteocat",
|
|
719
765
|
model="Meteocat API",
|
|
720
766
|
)
|
|
767
|
+
|
|
768
|
+
class MeteocatHourlyForecastStatusSensor(CoordinatorEntity[MeteocatEntityCoordinator], SensorEntity):
|
|
769
|
+
|
|
770
|
+
_attr_has_entity_name = True # Activa el uso de nombres basados en el dispositivo
|
|
771
|
+
|
|
772
|
+
def __init__(self, entity_coordinator, description, entry_data):
|
|
773
|
+
super().__init__(entity_coordinator)
|
|
774
|
+
self.entity_description = description
|
|
775
|
+
self._town_name = entry_data["town_name"]
|
|
776
|
+
self._town_id = entry_data["town_id"]
|
|
777
|
+
self._station_id = entry_data["station_id"]
|
|
778
|
+
|
|
779
|
+
# Unique ID for the entity
|
|
780
|
+
self._attr_unique_id = f"sensor.{DOMAIN}_{self._station_id}_hourly_status"
|
|
781
|
+
|
|
782
|
+
# Assign entity_category if defined in the description
|
|
783
|
+
self._attr_entity_category = getattr(description, "entity_category", None)
|
|
784
|
+
|
|
785
|
+
@property
|
|
786
|
+
def native_value(self):
|
|
787
|
+
hourly_data = self.coordinator.data.get("hourly")
|
|
788
|
+
if hourly_data and "dies" in hourly_data:
|
|
789
|
+
first_date = datetime.fromisoformat(hourly_data["dies"][0]["data"].rstrip("Z")).date()
|
|
790
|
+
today = datetime.now(timezone.utc).date()
|
|
791
|
+
days_difference = (today - first_date).days
|
|
792
|
+
_LOGGER.debug(f"Diferencia de días para predicciones horarias: {days_difference}")
|
|
793
|
+
return "updated" if days_difference <= 1 else "obsolete"
|
|
794
|
+
return "unknown"
|
|
795
|
+
|
|
796
|
+
@property
|
|
797
|
+
def device_info(self) -> DeviceInfo:
|
|
798
|
+
"""Return the device info."""
|
|
799
|
+
return DeviceInfo(
|
|
800
|
+
identifiers={(DOMAIN, self._town_id)},
|
|
801
|
+
name="Meteocat " + self._station_id + " " + self._town_name,
|
|
802
|
+
manufacturer="Meteocat",
|
|
803
|
+
model="Meteocat API",
|
|
804
|
+
)
|
|
805
|
+
|
|
806
|
+
class MeteocatDailyForecastStatusSensor(CoordinatorEntity[MeteocatEntityCoordinator], SensorEntity):
|
|
807
|
+
|
|
808
|
+
_attr_has_entity_name = True # Activa el uso de nombres basados en el dispositivo
|
|
809
|
+
|
|
810
|
+
def __init__(self, entity_coordinator, description, entry_data):
|
|
811
|
+
super().__init__(entity_coordinator)
|
|
812
|
+
self.entity_description = description
|
|
813
|
+
self._town_name = entry_data["town_name"]
|
|
814
|
+
self._town_id = entry_data["town_id"]
|
|
815
|
+
self._station_id = entry_data["station_id"]
|
|
816
|
+
|
|
817
|
+
# Unique ID for the entity
|
|
818
|
+
self._attr_unique_id = f"sensor.{DOMAIN}_{self._station_id}_daily_status"
|
|
819
|
+
|
|
820
|
+
# Assign entity_category if defined in the description
|
|
821
|
+
self._attr_entity_category = getattr(description, "entity_category", None)
|
|
822
|
+
|
|
823
|
+
@property
|
|
824
|
+
def native_value(self):
|
|
825
|
+
daily_data = self.coordinator.data.get("daily")
|
|
826
|
+
if daily_data and "dies" in daily_data:
|
|
827
|
+
first_date = datetime.fromisoformat(daily_data["dies"][0]["data"].rstrip("Z")).date()
|
|
828
|
+
today = datetime.now(timezone.utc).date()
|
|
829
|
+
days_difference = (today - first_date).days
|
|
830
|
+
_LOGGER.debug(f"Diferencia de días para predicciones diarias: {days_difference}")
|
|
831
|
+
return "updated" if days_difference <= 1 else "obsolete"
|
|
832
|
+
return "unknown"
|
|
833
|
+
|
|
834
|
+
@property
|
|
835
|
+
def device_info(self) -> DeviceInfo:
|
|
836
|
+
"""Return the device info."""
|
|
837
|
+
return DeviceInfo(
|
|
838
|
+
identifiers={(DOMAIN, self._town_id)},
|
|
839
|
+
name="Meteocat " + self._station_id + " " + self._town_name,
|
|
840
|
+
manufacturer="Meteocat",
|
|
841
|
+
model="Meteocat API",
|
|
842
|
+
)
|
|
843
|
+
|
|
844
|
+
class MeteocatUviStatusSensor(CoordinatorEntity[MeteocatUviCoordinator], SensorEntity):
|
|
845
|
+
|
|
846
|
+
_attr_has_entity_name = True # Activa el uso de nombres basados en el dispositivo
|
|
847
|
+
|
|
848
|
+
def __init__(self, uvi_coordinator, description, entry_data):
|
|
849
|
+
super().__init__(uvi_coordinator)
|
|
850
|
+
self.entity_description = description
|
|
851
|
+
self._town_name = entry_data["town_name"]
|
|
852
|
+
self._town_id = entry_data["town_id"]
|
|
853
|
+
self._station_id = entry_data["station_id"]
|
|
854
|
+
|
|
855
|
+
# Unique ID for the entity
|
|
856
|
+
self._attr_unique_id = f"sensor.{DOMAIN}_{self._station_id}_uvi_status"
|
|
857
|
+
|
|
858
|
+
# Assign entity_category if defined in the description
|
|
859
|
+
self._attr_entity_category = getattr(description, "entity_category", None)
|
|
860
|
+
|
|
861
|
+
@property
|
|
862
|
+
def native_value(self):
|
|
863
|
+
if self.coordinator.data:
|
|
864
|
+
first_date = datetime.strptime(self.coordinator.data[0].get("date"), "%Y-%m-%d").date()
|
|
865
|
+
today = datetime.now(timezone.utc).date()
|
|
866
|
+
days_difference = (today - first_date).days
|
|
867
|
+
_LOGGER.debug(f"Diferencia de días para UVI: {days_difference}")
|
|
868
|
+
return "updated" if days_difference <= 1 else "obsolete"
|
|
869
|
+
return "unknown"
|
|
870
|
+
|
|
871
|
+
@property
|
|
872
|
+
def device_info(self) -> DeviceInfo:
|
|
873
|
+
"""Return the device info."""
|
|
874
|
+
return DeviceInfo(
|
|
875
|
+
identifiers={(DOMAIN, self._town_id)},
|
|
876
|
+
name="Meteocat " + self._station_id + " " + self._town_name,
|
|
877
|
+
manufacturer="Meteocat",
|
|
878
|
+
model="Meteocat API",
|
|
879
|
+
)
|
|
@@ -133,8 +133,16 @@
|
|
|
133
133
|
},
|
|
134
134
|
"min_temperature_forecast": {
|
|
135
135
|
"name": "Min Temperature Today"
|
|
136
|
+
},
|
|
137
|
+
"hourly_forecast_file_status": {
|
|
138
|
+
"name": "Hourly File"
|
|
139
|
+
},
|
|
140
|
+
"daily_forecast_file_status": {
|
|
141
|
+
"name": "Daily File"
|
|
142
|
+
},
|
|
143
|
+
"uvi_file_status": {
|
|
144
|
+
"name": "Uvi File"
|
|
136
145
|
}
|
|
137
146
|
}
|
|
138
147
|
}
|
|
139
148
|
}
|
|
140
|
-
|
|
@@ -133,6 +133,15 @@
|
|
|
133
133
|
},
|
|
134
134
|
"min_temperature_forecast": {
|
|
135
135
|
"name": "Temperatura Min Avui"
|
|
136
|
+
},
|
|
137
|
+
"hourly_forecast_file_status": {
|
|
138
|
+
"name": "Arxiu Horari"
|
|
139
|
+
},
|
|
140
|
+
"daily_forecast_file_status": {
|
|
141
|
+
"name": "Arxiu Diari"
|
|
142
|
+
},
|
|
143
|
+
"uvi_file_status": {
|
|
144
|
+
"name": "Arxiu UVI"
|
|
136
145
|
}
|
|
137
146
|
}
|
|
138
147
|
}
|
|
@@ -133,6 +133,15 @@
|
|
|
133
133
|
},
|
|
134
134
|
"min_temperature_forecast": {
|
|
135
135
|
"name": "Min Temperature Today"
|
|
136
|
+
},
|
|
137
|
+
"hourly_forecast_file_status": {
|
|
138
|
+
"name": "Hourly File"
|
|
139
|
+
},
|
|
140
|
+
"daily_forecast_file_status": {
|
|
141
|
+
"name": "Daily File"
|
|
142
|
+
},
|
|
143
|
+
"uvi_file_status": {
|
|
144
|
+
"name": "Uvi File"
|
|
136
145
|
}
|
|
137
146
|
}
|
|
138
147
|
}
|
|
@@ -133,6 +133,15 @@
|
|
|
133
133
|
},
|
|
134
134
|
"min_temperature_forecast": {
|
|
135
135
|
"name": "Temperatura Min Hoy"
|
|
136
|
+
},
|
|
137
|
+
"hourly_forecast_file_status": {
|
|
138
|
+
"name": "Archivo Horario"
|
|
139
|
+
},
|
|
140
|
+
"daily_forecast_file_status": {
|
|
141
|
+
"name": "Archivo Diario"
|
|
142
|
+
},
|
|
143
|
+
"uvi_file_status": {
|
|
144
|
+
"name": "Archivo UVI"
|
|
136
145
|
}
|
|
137
146
|
}
|
|
138
147
|
}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
# version.py
|
|
2
|
-
__version__ = "0.1.
|
|
2
|
+
__version__ = "0.1.43"
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "meteocat",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.43",
|
|
4
4
|
"description": "[](https://opensource.org/licenses/Apache-2.0)\r [](https://pypi.org/project/meteocat)\r [](https://gitlab.com/figorr/meteocat/commits/master)",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"directories": {
|