meteocat 2.2.6 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/ISSUE_TEMPLATE/bug_report.md +39 -0
- package/.github/ISSUE_TEMPLATE/config.yml +1 -0
- package/.github/workflows/autocloser.yaml +25 -0
- package/.github/workflows/close-duplicates.yml +57 -0
- package/.github/workflows/publish-zip.yml +67 -0
- package/.github/workflows/release.yml +38 -6
- package/.github/workflows/stale.yml +12 -0
- package/.github/workflows/sync-gitlab.yml +94 -0
- package/.releaserc +1 -8
- package/CHANGELOG.md +29 -0
- package/README.md +29 -4
- package/custom_components/meteocat/__init__.py +154 -110
- package/custom_components/meteocat/config_flow.py +125 -55
- package/custom_components/meteocat/coordinator.py +200 -368
- package/custom_components/meteocat/helpers.py +12 -0
- package/custom_components/meteocat/manifest.json +22 -11
- package/custom_components/meteocat/options_flow.py +46 -2
- package/custom_components/meteocat/sensor.py +47 -8
- package/custom_components/meteocat/strings.json +10 -2
- package/custom_components/meteocat/translations/ca.json +10 -2
- package/custom_components/meteocat/translations/en.json +10 -2
- package/custom_components/meteocat/translations/es.json +10 -2
- package/custom_components/meteocat/version.py +1 -2
- package/filetree.txt +9 -0
- package/hacs.json +5 -2
- package/images/options.png +0 -0
- package/package.json +1 -1
- package/pyproject.toml +1 -1
- package/.releaserc.toml +0 -14
- package/releaserc.json +0 -18
|
@@ -3,6 +3,9 @@ from __future__ import annotations
|
|
|
3
3
|
import logging
|
|
4
4
|
import voluptuous as vol
|
|
5
5
|
from pathlib import Path
|
|
6
|
+
import aiofiles
|
|
7
|
+
import json
|
|
8
|
+
|
|
6
9
|
from homeassistant import core
|
|
7
10
|
from homeassistant.config_entries import ConfigEntry
|
|
8
11
|
from homeassistant.core import HomeAssistant
|
|
@@ -10,6 +13,7 @@ from homeassistant.exceptions import HomeAssistantError
|
|
|
10
13
|
from homeassistant.helpers.entity_platform import async_get_platforms
|
|
11
14
|
from homeassistant.helpers import config_validation as cv
|
|
12
15
|
|
|
16
|
+
from .helpers import get_storage_dir
|
|
13
17
|
from .coordinator import (
|
|
14
18
|
MeteocatSensorCoordinator,
|
|
15
19
|
MeteocatStaticSensorCoordinator,
|
|
@@ -28,12 +32,16 @@ from .coordinator import (
|
|
|
28
32
|
MeteocatLightningFileCoordinator,
|
|
29
33
|
)
|
|
30
34
|
|
|
35
|
+
from meteocatpy.town import MeteocatTown
|
|
36
|
+
from meteocatpy.symbols import MeteocatSymbols
|
|
37
|
+
from meteocatpy.variables import MeteocatVariables
|
|
38
|
+
from meteocatpy.townstations import MeteocatTownStations
|
|
31
39
|
from .const import DOMAIN, PLATFORMS
|
|
32
40
|
|
|
33
41
|
_LOGGER = logging.getLogger(__name__)
|
|
34
42
|
|
|
35
43
|
# Versión
|
|
36
|
-
__version__ = "
|
|
44
|
+
__version__ = ""
|
|
37
45
|
|
|
38
46
|
# Definir el esquema de configuración CONFIG_SCHEMA
|
|
39
47
|
CONFIG_SCHEMA = vol.Schema(
|
|
@@ -44,6 +52,7 @@ CONFIG_SCHEMA = vol.Schema(
|
|
|
44
52
|
vol.Required("town_name"): cv.string,
|
|
45
53
|
vol.Required("town_id"): cv.string,
|
|
46
54
|
vol.Optional("variable_name", default="temperature"): cv.string,
|
|
55
|
+
vol.Required("variable_id"): cv.string,
|
|
47
56
|
vol.Optional("station_name"): cv.string,
|
|
48
57
|
vol.Optional("station_id"): cv.string,
|
|
49
58
|
vol.Optional("province_name"): cv.string,
|
|
@@ -56,17 +65,55 @@ CONFIG_SCHEMA = vol.Schema(
|
|
|
56
65
|
extra=vol.ALLOW_EXTRA,
|
|
57
66
|
)
|
|
58
67
|
|
|
59
|
-
def safe_remove(path: Path, is_folder: bool = False):
|
|
60
|
-
"""Elimina
|
|
68
|
+
def safe_remove(path: Path, is_folder: bool = False) -> None:
|
|
69
|
+
"""Elimina un archivo o carpeta vacía de forma segura."""
|
|
61
70
|
try:
|
|
62
|
-
if is_folder
|
|
63
|
-
path.
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
71
|
+
if is_folder:
|
|
72
|
+
if path.exists() and path.is_dir():
|
|
73
|
+
path.rmdir() # Solo elimina si está vacía
|
|
74
|
+
_LOGGER.info("Carpeta eliminada: %s", path)
|
|
75
|
+
else:
|
|
76
|
+
if path.exists():
|
|
77
|
+
path.unlink()
|
|
78
|
+
_LOGGER.info("Archivo eliminado: %s", path)
|
|
79
|
+
except Exception as e:
|
|
80
|
+
_LOGGER.error("Error eliminando %s: %s", path, e)
|
|
81
|
+
|
|
82
|
+
async def ensure_assets_exist(hass, api_key, town_id=None, variable_id=None):
|
|
83
|
+
"""Comprueba y crea los assets básicos si faltan."""
|
|
84
|
+
assets_dir = get_storage_dir(hass, "assets")
|
|
85
|
+
assets_dir.mkdir(parents=True, exist_ok=True)
|
|
86
|
+
|
|
87
|
+
# Lista de assets: (nombre_archivo, fetch_func, clave_json, args)
|
|
88
|
+
assets = [
|
|
89
|
+
("towns.json", MeteocatTown(api_key).get_municipis, "towns", []),
|
|
90
|
+
("stations.json", MeteocatTownStations(api_key).stations_service.get_stations, "stations", []),
|
|
91
|
+
("variables.json", MeteocatVariables(api_key).get_variables, "variables", []),
|
|
92
|
+
("symbols.json", MeteocatSymbols(api_key).fetch_symbols, "symbols", []),
|
|
93
|
+
]
|
|
94
|
+
|
|
95
|
+
# Si tenemos town_id y variable_id, agregamos stations_<town_id>.json
|
|
96
|
+
if town_id and variable_id:
|
|
97
|
+
assets.append(
|
|
98
|
+
(f"stations_{town_id}.json", MeteocatTownStations(api_key).get_town_stations, "town_stations", [town_id, variable_id])
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
for filename, fetch_func, key, args in assets:
|
|
102
|
+
file_path = assets_dir / filename
|
|
103
|
+
if not file_path.exists():
|
|
104
|
+
_LOGGER.debug("Intentando descargar datos para %s desde la API con args: %s", key, args)
|
|
105
|
+
try:
|
|
106
|
+
data = await fetch_func(*args)
|
|
107
|
+
except Exception as ex:
|
|
108
|
+
_LOGGER.warning(
|
|
109
|
+
"No se pudieron obtener los datos para %s. Intenta regenerarlo más adelante desde las opciones de la integración. Detalle: %s",
|
|
110
|
+
key,
|
|
111
|
+
ex,
|
|
112
|
+
)
|
|
113
|
+
data = []
|
|
114
|
+
async with aiofiles.open(file_path, "w", encoding="utf-8") as file:
|
|
115
|
+
await file.write(json.dumps({key: data}, ensure_ascii=False, indent=4))
|
|
116
|
+
_LOGGER.info("Archivo creado: %s", file_path)
|
|
70
117
|
|
|
71
118
|
async def async_setup(hass: core.HomeAssistant, config: dict) -> bool:
|
|
72
119
|
"""Configuración inicial del componente Meteocat."""
|
|
@@ -89,6 +136,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|
|
89
136
|
if missing_fields:
|
|
90
137
|
_LOGGER.error(f"Faltan los siguientes campos en la configuración: {missing_fields}")
|
|
91
138
|
return False
|
|
139
|
+
|
|
140
|
+
# Crear los assets básicos si faltan
|
|
141
|
+
await ensure_assets_exist(
|
|
142
|
+
hass,
|
|
143
|
+
api_key=entry_data["api_key"],
|
|
144
|
+
town_id=entry_data.get("town_id"),
|
|
145
|
+
variable_id=entry_data.get("variable_id"),
|
|
146
|
+
)
|
|
92
147
|
|
|
93
148
|
_LOGGER.debug(
|
|
94
149
|
f"Datos de configuración: Municipio '{entry_data['town_name']}' (ID: {entry_data['town_id']}), "
|
|
@@ -99,78 +154,39 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|
|
99
154
|
)
|
|
100
155
|
|
|
101
156
|
# Inicializar coordinadores
|
|
102
|
-
|
|
103
|
-
sensor_coordinator
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
await hourly_forecast_coordinator.async_config_entry_first_refresh()
|
|
120
|
-
|
|
121
|
-
daily_forecast_coordinator = DailyForecastCoordinator(hass=hass, entry_data=entry_data)
|
|
122
|
-
await daily_forecast_coordinator.async_config_entry_first_refresh()
|
|
123
|
-
|
|
124
|
-
condition_coordinator = MeteocatConditionCoordinator(hass=hass, entry_data=entry_data)
|
|
125
|
-
await condition_coordinator.async_config_entry_first_refresh()
|
|
126
|
-
|
|
127
|
-
temp_forecast_coordinator = MeteocatTempForecastCoordinator(hass=hass, entry_data=entry_data)
|
|
128
|
-
await temp_forecast_coordinator.async_config_entry_first_refresh()
|
|
129
|
-
|
|
130
|
-
alerts_coordinator = MeteocatAlertsCoordinator(hass=hass, entry_data=entry_data)
|
|
131
|
-
await alerts_coordinator.async_config_entry_first_refresh()
|
|
132
|
-
|
|
133
|
-
alerts_region_coordinator = MeteocatAlertsRegionCoordinator(hass=hass, entry_data=entry_data)
|
|
134
|
-
await alerts_region_coordinator.async_config_entry_first_refresh()
|
|
135
|
-
|
|
136
|
-
quotes_coordinator = MeteocatQuotesCoordinator(hass=hass, entry_data=entry_data)
|
|
137
|
-
await quotes_coordinator.async_config_entry_first_refresh()
|
|
138
|
-
|
|
139
|
-
quotes_file_coordinator = MeteocatQuotesFileCoordinator(hass=hass, entry_data=entry_data)
|
|
140
|
-
await quotes_file_coordinator.async_config_entry_first_refresh()
|
|
157
|
+
coordinators = [
|
|
158
|
+
("sensor_coordinator", MeteocatSensorCoordinator),
|
|
159
|
+
("static_sensor_coordinator", MeteocatStaticSensorCoordinator),
|
|
160
|
+
("entity_coordinator", MeteocatEntityCoordinator),
|
|
161
|
+
("uvi_coordinator", MeteocatUviCoordinator),
|
|
162
|
+
("uvi_file_coordinator", MeteocatUviFileCoordinator),
|
|
163
|
+
("hourly_forecast_coordinator", HourlyForecastCoordinator),
|
|
164
|
+
("daily_forecast_coordinator", DailyForecastCoordinator),
|
|
165
|
+
("condition_coordinator", MeteocatConditionCoordinator),
|
|
166
|
+
("temp_forecast_coordinator", MeteocatTempForecastCoordinator),
|
|
167
|
+
("alerts_coordinator", MeteocatAlertsCoordinator),
|
|
168
|
+
("alerts_region_coordinator", MeteocatAlertsRegionCoordinator),
|
|
169
|
+
("quotes_coordinator", MeteocatQuotesCoordinator),
|
|
170
|
+
("quotes_file_coordinator", MeteocatQuotesFileCoordinator),
|
|
171
|
+
("lightning_coordinator", MeteocatLightningCoordinator),
|
|
172
|
+
("lightning_file_coordinator", MeteocatLightningFileCoordinator),
|
|
173
|
+
]
|
|
141
174
|
|
|
142
|
-
|
|
143
|
-
|
|
175
|
+
hass.data.setdefault(DOMAIN, {})
|
|
176
|
+
hass.data[DOMAIN][entry.entry_id] = {}
|
|
144
177
|
|
|
145
|
-
|
|
146
|
-
|
|
178
|
+
try:
|
|
179
|
+
for key, cls in coordinators:
|
|
180
|
+
coordinator = cls(hass=hass, entry_data=entry_data)
|
|
181
|
+
await coordinator.async_config_entry_first_refresh()
|
|
182
|
+
hass.data[DOMAIN][entry.entry_id][key] = coordinator
|
|
147
183
|
|
|
148
|
-
except Exception as err:
|
|
149
|
-
_LOGGER.exception(
|
|
184
|
+
except Exception as err:
|
|
185
|
+
_LOGGER.exception("Error al inicializar los coordinadores: %s", err)
|
|
150
186
|
return False
|
|
151
187
|
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
hass.data[DOMAIN][entry.entry_id] = {
|
|
155
|
-
"sensor_coordinator": sensor_coordinator,
|
|
156
|
-
"static_sensor_coordinator": static_sensor_coordinator,
|
|
157
|
-
"entity_coordinator": entity_coordinator,
|
|
158
|
-
"uvi_coordinator": uvi_coordinator,
|
|
159
|
-
"uvi_file_coordinator": uvi_file_coordinator,
|
|
160
|
-
"hourly_forecast_coordinator": hourly_forecast_coordinator,
|
|
161
|
-
"daily_forecast_coordinator": daily_forecast_coordinator,
|
|
162
|
-
"condition_coordinator": condition_coordinator,
|
|
163
|
-
"temp_forecast_coordinator": temp_forecast_coordinator,
|
|
164
|
-
"alerts_coordinator": alerts_coordinator,
|
|
165
|
-
"alerts_region_coordinator": alerts_region_coordinator,
|
|
166
|
-
"quotes_coordinator": quotes_coordinator,
|
|
167
|
-
"quotes_file_coordinator": quotes_file_coordinator,
|
|
168
|
-
"lightning_coordinator": lightning_coordinator,
|
|
169
|
-
"lightning_file_coordinator": lightning_file_coordinator,
|
|
170
|
-
**entry_data,
|
|
171
|
-
}
|
|
172
|
-
|
|
173
|
-
# Configurar plataformas
|
|
188
|
+
hass.data[DOMAIN][entry.entry_id].update(entry_data)
|
|
189
|
+
|
|
174
190
|
_LOGGER.debug(f"Cargando plataformas: {PLATFORMS}")
|
|
175
191
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
|
176
192
|
|
|
@@ -192,48 +208,76 @@ async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
|
|
192
208
|
"""Limpia cualquier dato adicional al desinstalar la integración."""
|
|
193
209
|
_LOGGER.info(f"Eliminando datos residuales de la integración: {entry.entry_id}")
|
|
194
210
|
|
|
195
|
-
#
|
|
196
|
-
|
|
197
|
-
assets_folder =
|
|
198
|
-
files_folder =
|
|
199
|
-
|
|
200
|
-
# Archivos comunes
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
211
|
+
# Rutas persistentes en /config/meteocat_files
|
|
212
|
+
base_folder = get_storage_dir(hass)
|
|
213
|
+
assets_folder = get_storage_dir(hass, "assets")
|
|
214
|
+
files_folder = get_storage_dir(hass, "files")
|
|
215
|
+
|
|
216
|
+
# Archivos comunes (solo se eliminan si no queda ninguna entrada)
|
|
217
|
+
common_files = [
|
|
218
|
+
assets_folder / "towns.json",
|
|
219
|
+
assets_folder / "symbols.json",
|
|
220
|
+
assets_folder / "variables.json",
|
|
221
|
+
assets_folder / "stations.json",
|
|
222
|
+
files_folder / "alerts.json",
|
|
223
|
+
files_folder / "quotes.json",
|
|
224
|
+
]
|
|
205
225
|
|
|
206
|
-
#
|
|
226
|
+
# Identificadores de la entrada eliminada
|
|
207
227
|
station_id = entry.data.get("station_id")
|
|
208
228
|
town_id = entry.data.get("town_id")
|
|
209
229
|
region_id = entry.data.get("region_id")
|
|
210
230
|
|
|
211
|
-
|
|
212
|
-
_LOGGER.warning(f"La ruta {custom_components_path} no existe. No se realizará la limpieza.")
|
|
213
|
-
return
|
|
231
|
+
specific_files = []
|
|
214
232
|
|
|
215
|
-
#
|
|
233
|
+
# 1. Archivos de estación
|
|
216
234
|
if station_id:
|
|
217
|
-
|
|
235
|
+
other_entries_with_station = [
|
|
236
|
+
e for e in hass.config_entries.async_entries(DOMAIN)
|
|
237
|
+
if e.entry_id != entry.entry_id and e.data.get("station_id") == station_id
|
|
238
|
+
]
|
|
239
|
+
if not other_entries_with_station:
|
|
240
|
+
specific_files.append(files_folder / f"station_{station_id.lower()}_data.json")
|
|
241
|
+
|
|
242
|
+
# 2. Archivos de municipio
|
|
218
243
|
if town_id:
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
244
|
+
other_entries_with_town = [
|
|
245
|
+
e for e in hass.config_entries.async_entries(DOMAIN)
|
|
246
|
+
if e.entry_id != entry.entry_id and e.data.get("town_id") == town_id
|
|
247
|
+
]
|
|
248
|
+
if not other_entries_with_town:
|
|
249
|
+
specific_files.extend([
|
|
250
|
+
assets_folder / f"stations_{town_id.lower()}.json",
|
|
251
|
+
files_folder / f"uvi_{town_id.lower()}_data.json",
|
|
252
|
+
files_folder / f"forecast_{town_id.lower()}_hourly_data.json",
|
|
253
|
+
files_folder / f"forecast_{town_id.lower()}_daily_data.json",
|
|
254
|
+
])
|
|
255
|
+
|
|
256
|
+
# 3. Archivos de comarca (region_id)
|
|
222
257
|
if region_id:
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
258
|
+
other_entries_with_region = [
|
|
259
|
+
e for e in hass.config_entries.async_entries(DOMAIN)
|
|
260
|
+
if e.entry_id != entry.entry_id and e.data.get("region_id") == region_id
|
|
261
|
+
]
|
|
262
|
+
if not other_entries_with_region:
|
|
263
|
+
specific_files.extend([
|
|
264
|
+
files_folder / f"alerts_{region_id}.json",
|
|
265
|
+
files_folder / f"lightning_{region_id}.json",
|
|
266
|
+
])
|
|
267
|
+
|
|
268
|
+
# Eliminar archivos específicos (solo si ya no los necesita nadie más)
|
|
269
|
+
for f in specific_files:
|
|
270
|
+
safe_remove(f)
|
|
271
|
+
|
|
272
|
+
# Comprobar si quedan entradas activas de la integración
|
|
231
273
|
remaining_entries = [
|
|
232
274
|
e for e in hass.config_entries.async_entries(DOMAIN)
|
|
233
275
|
if e.entry_id != entry.entry_id
|
|
234
276
|
]
|
|
235
|
-
if not remaining_entries:
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
277
|
+
if not remaining_entries:
|
|
278
|
+
for f in common_files:
|
|
279
|
+
safe_remove(f)
|
|
280
|
+
|
|
281
|
+
# Intentar eliminar carpetas vacías
|
|
282
|
+
for folder in [assets_folder, files_folder, base_folder]:
|
|
283
|
+
safe_remove(folder, is_folder=True)
|