env-canada 0.6.3__py3-none-any.whl → 0.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- env_canada/constants.py +1 -1
- env_canada/ec_aqhi.py +5 -6
- env_canada/ec_cache.py +16 -29
- env_canada/ec_radar.py +156 -170
- env_canada/ec_weather.py +7 -22
- {env_canada-0.6.3.dist-info → env_canada-0.7.1.dist-info}/METADATA +2 -2
- env_canada-0.7.1.dist-info/RECORD +16 -0
- {env_canada-0.6.3.dist-info → env_canada-0.7.1.dist-info}/WHEEL +1 -1
- env_canada/ec_data.py +0 -501
- env_canada-0.6.3.dist-info/RECORD +0 -17
- {env_canada-0.6.3.dist-info → env_canada-0.7.1.dist-info}/LICENSE +0 -0
- {env_canada-0.6.3.dist-info → env_canada-0.7.1.dist-info}/top_level.txt +0 -0
env_canada/constants.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
USER_AGENT = "env_canada/0.
|
1
|
+
USER_AGENT = "env_canada/0.7.1"
|
env_canada/ec_aqhi.py
CHANGED
@@ -84,7 +84,6 @@ async def find_closest_region(language, lat, lon):
|
|
84
84
|
|
85
85
|
|
86
86
|
class ECAirQuality(object):
|
87
|
-
|
88
87
|
"""Get air quality data from Environment Canada."""
|
89
88
|
|
90
89
|
def __init__(self, **kwargs):
|
@@ -172,7 +171,7 @@ class ECAirQuality(object):
|
|
172
171
|
# Fetch current measurement
|
173
172
|
aqhi_current = await self.get_aqhi_data(url=AQHI_OBSERVATION_URL)
|
174
173
|
|
175
|
-
if aqhi_current:
|
174
|
+
if aqhi_current is not None:
|
176
175
|
# Update region name
|
177
176
|
element = aqhi_current.find("region")
|
178
177
|
self.region_name = element.attrib[
|
@@ -202,7 +201,7 @@ class ECAirQuality(object):
|
|
202
201
|
# Update AQHI forecasts
|
203
202
|
aqhi_forecast = await self.get_aqhi_data(url=AQHI_FORECAST_URL)
|
204
203
|
|
205
|
-
if aqhi_forecast:
|
204
|
+
if aqhi_forecast is not None:
|
206
205
|
# Update AQHI daily forecasts
|
207
206
|
for f in aqhi_forecast.findall("./forecastGroup/forecast"):
|
208
207
|
for p in f.findall("./period"):
|
@@ -214,6 +213,6 @@ class ECAirQuality(object):
|
|
214
213
|
|
215
214
|
# Update AQHI hourly forecasts
|
216
215
|
for f in aqhi_forecast.findall("./hourlyForecastGroup/hourlyForecast"):
|
217
|
-
self.forecasts["hourly"][
|
218
|
-
|
219
|
-
|
216
|
+
self.forecasts["hourly"][timestamp_to_datetime(f.attrib["UTCTime"])] = (
|
217
|
+
int(f.text or 0)
|
218
|
+
)
|
env_canada/ec_cache.py
CHANGED
@@ -1,38 +1,25 @@
|
|
1
|
-
from
|
2
|
-
from datetime import datetime, timedelta
|
1
|
+
from datetime import datetime
|
3
2
|
|
4
|
-
from .constants import USER_AGENT
|
5
|
-
|
6
|
-
CACHE_EXPIRE_TIME = timedelta(minutes=200) # Time is tuned for 3h radar image
|
7
3
|
|
4
|
+
class Cache:
|
5
|
+
_cache = {}
|
8
6
|
|
9
|
-
|
10
|
-
|
7
|
+
@classmethod
|
8
|
+
def add(cls, cache_key, item, cache_time):
|
9
|
+
"""Add an entry to the cache."""
|
11
10
|
|
12
|
-
|
11
|
+
cls._cache[cache_key] = (datetime.now() + cache_time, item)
|
12
|
+
return item # Returning item useful for chaining calls
|
13
13
|
|
14
|
-
|
15
|
-
|
14
|
+
@classmethod
|
15
|
+
def get(cls, cache_key):
|
16
|
+
"""Get an entry from the cache."""
|
16
17
|
|
18
|
+
# Delete expired entries at start so we don't use expired entries
|
17
19
|
now = datetime.now()
|
18
|
-
expired = [key for key, value in
|
20
|
+
expired = [key for key, value in cls._cache.items() if value[0] < now]
|
19
21
|
for key in expired:
|
20
|
-
del
|
21
|
-
|
22
|
-
async def get(self, url, params, cache_time=CACHE_EXPIRE_TIME):
|
23
|
-
"""Thin wrapper around ClientSession.get to cache responses."""
|
24
|
-
|
25
|
-
self._flush_cache() # Flush at start so we don't use expired entries
|
26
|
-
|
27
|
-
cache_key = (url, tuple(sorted(params.items())))
|
28
|
-
result = self._cache.get(cache_key)
|
29
|
-
if not result:
|
30
|
-
result = (
|
31
|
-
datetime.now() + cache_time,
|
32
|
-
await super().get(
|
33
|
-
url=url, params=params, headers={"User-Agent": USER_AGENT}
|
34
|
-
),
|
35
|
-
)
|
36
|
-
self._cache[cache_key] = result
|
22
|
+
del cls._cache[key]
|
37
23
|
|
38
|
-
|
24
|
+
result = cls._cache.get(cache_key)
|
25
|
+
return result[1] if result else None
|
env_canada/ec_radar.py
CHANGED
@@ -1,18 +1,20 @@
|
|
1
1
|
import asyncio
|
2
|
-
import
|
2
|
+
from datetime import date, timedelta
|
3
3
|
import logging
|
4
4
|
import math
|
5
5
|
import os
|
6
6
|
from io import BytesIO
|
7
|
+
from typing import cast
|
7
8
|
|
8
9
|
import dateutil.parser
|
9
10
|
import defusedxml.ElementTree as et
|
10
|
-
import imageio.v2 as imageio
|
11
11
|
import voluptuous as vol
|
12
|
+
from aiohttp import ClientSession
|
12
13
|
from aiohttp.client_exceptions import ClientConnectorError
|
13
14
|
from PIL import Image, ImageDraw, ImageFont
|
14
15
|
|
15
|
-
from .
|
16
|
+
from .constants import USER_AGENT
|
17
|
+
from .ec_cache import Cache
|
16
18
|
|
17
19
|
ATTRIBUTION = {
|
18
20
|
"english": "Data provided by Environment Canada",
|
@@ -69,7 +71,7 @@ legend_params = {
|
|
69
71
|
"sld_version": "1.1.0",
|
70
72
|
"format": "image/png",
|
71
73
|
}
|
72
|
-
radar_interval = 6
|
74
|
+
radar_interval = timedelta(minutes=6)
|
73
75
|
|
74
76
|
timestamp_label = {
|
75
77
|
"rain": {"english": "Rain", "french": "Pluie"},
|
@@ -77,7 +79,7 @@ timestamp_label = {
|
|
77
79
|
}
|
78
80
|
|
79
81
|
|
80
|
-
def
|
82
|
+
def _compute_bounding_box(distance, latittude, longitude):
|
81
83
|
"""
|
82
84
|
Modified from https://gist.github.com/alexcpn/f95ae83a7ee0293a5225
|
83
85
|
"""
|
@@ -102,6 +104,16 @@ def compute_bounding_box(distance, latittude, longitude):
|
|
102
104
|
return lat_min, lon_min, lat_max, lon_max
|
103
105
|
|
104
106
|
|
107
|
+
async def _get_resource(url, params, bytes=True):
|
108
|
+
async with ClientSession(raise_for_status=True) as session:
|
109
|
+
response = await session.get(
|
110
|
+
url=url, params=params, headers={"User-Agent": USER_AGENT}
|
111
|
+
)
|
112
|
+
if bytes:
|
113
|
+
return await response.read()
|
114
|
+
return await response.text()
|
115
|
+
|
116
|
+
|
105
117
|
class ECRadar(object):
|
106
118
|
def __init__(self, **kwargs):
|
107
119
|
"""Initialize the radar object."""
|
@@ -133,259 +145,233 @@ class ECRadar(object):
|
|
133
145
|
self.language = kwargs["language"]
|
134
146
|
self.metadata = {"attribution": ATTRIBUTION[self.language]}
|
135
147
|
|
136
|
-
|
137
|
-
|
138
|
-
if "precip_type" in kwargs and kwargs["precip_type"] is not None:
|
139
|
-
self.precip_type = kwargs["precip_type"]
|
140
|
-
else:
|
141
|
-
self.precip_type = "auto"
|
148
|
+
self._precip_type_setting = kwargs.get("precip_type")
|
149
|
+
self._precip_type_actual = self.precip_type[1]
|
142
150
|
|
143
151
|
# Get map parameters
|
144
|
-
|
145
152
|
self.image = None
|
146
153
|
self.width = kwargs["width"]
|
147
154
|
self.height = kwargs["height"]
|
148
|
-
self.bbox =
|
155
|
+
self.bbox = _compute_bounding_box(kwargs["radius"], *kwargs["coordinates"])
|
149
156
|
self.map_params = {
|
150
157
|
"bbox": ",".join([str(coord) for coord in self.bbox]),
|
151
158
|
"width": self.width,
|
152
159
|
"height": self.height,
|
153
160
|
}
|
154
|
-
self.map_image = None
|
155
161
|
self.radar_opacity = kwargs["radar_opacity"]
|
156
162
|
|
157
163
|
# Get overlay parameters
|
158
|
-
|
159
164
|
self.show_legend = kwargs["legend"]
|
160
|
-
self.legend_layer = None
|
161
|
-
self.legend_image = None
|
162
|
-
self.legend_position = None
|
163
|
-
|
164
165
|
self.show_timestamp = kwargs["timestamp"]
|
165
|
-
|
166
|
+
|
167
|
+
self._font = None
|
166
168
|
|
167
169
|
@property
|
168
170
|
def precip_type(self):
|
169
|
-
|
171
|
+
# NOTE: this is a breaking change for this lib; HA doesn't use this so not breaking for that
|
172
|
+
if self._precip_type_setting in ["rain", "snow"]:
|
173
|
+
return (self._precip_type_setting, self._precip_type_setting)
|
174
|
+
self._precip_type_actual = (
|
175
|
+
"rain" if date.today().month in range(4, 11) else "snow"
|
176
|
+
)
|
177
|
+
return ("auto", self._precip_type_actual)
|
170
178
|
|
171
179
|
@precip_type.setter
|
172
180
|
def precip_type(self, user_input):
|
173
181
|
if user_input not in ["rain", "snow", "auto"]:
|
174
182
|
raise ValueError("precip_type must be 'rain', 'snow', or 'auto'")
|
175
|
-
|
176
|
-
self.
|
177
|
-
|
178
|
-
if self._precip_setting in ["rain", "snow"]:
|
179
|
-
self.layer_key = self._precip_setting
|
180
|
-
else:
|
181
|
-
self._auto_precip_type()
|
182
|
-
|
183
|
-
def _auto_precip_type(self):
|
184
|
-
if datetime.date.today().month in range(4, 11):
|
185
|
-
self.layer_key = "rain"
|
186
|
-
else:
|
187
|
-
self.layer_key = "snow"
|
183
|
+
self._precip_type_setting = user_input
|
184
|
+
self._precip_type_actual = self.precip_type[1]
|
188
185
|
|
189
186
|
async def _get_basemap(self):
|
190
187
|
"""Fetch the background map image."""
|
191
|
-
|
192
|
-
|
193
|
-
try:
|
194
|
-
async with ClientSession(raise_for_status=True) as session:
|
195
|
-
response = await session.get(url=basemap_url, params=basemap_params)
|
196
|
-
base_bytes = await response.read()
|
188
|
+
if base_bytes := Cache.get("basemap"):
|
189
|
+
return base_bytes
|
197
190
|
|
198
|
-
|
199
|
-
|
191
|
+
basemap_params.update(self.map_params)
|
192
|
+
for map_url in [basemap_url, backup_map_url]:
|
200
193
|
try:
|
201
|
-
|
202
|
-
|
203
|
-
url=backup_map_url, params=basemap_params
|
204
|
-
)
|
205
|
-
base_bytes = await response.read()
|
206
|
-
except ClientConnectorError:
|
207
|
-
logging.warning("Mapbox base map could not be retrieved")
|
208
|
-
return None
|
194
|
+
base_bytes = await _get_resource(map_url, basemap_params)
|
195
|
+
return Cache.add("basemap", base_bytes, timedelta(days=7))
|
209
196
|
|
210
|
-
|
197
|
+
except ClientConnectorError as e:
|
198
|
+
logging.warning("Map from %s could not be retrieved: %s" % map_url, e)
|
211
199
|
|
212
200
|
async def _get_legend(self):
|
213
201
|
"""Fetch legend image."""
|
202
|
+
|
203
|
+
legend_cache_key = f"legend-{self._precip_type_actual}"
|
204
|
+
if legend := Cache.get(legend_cache_key):
|
205
|
+
return legend
|
206
|
+
|
214
207
|
legend_params.update(
|
215
208
|
dict(
|
216
|
-
layer=precip_layers[self.
|
209
|
+
layer=precip_layers[self._precip_type_actual],
|
210
|
+
style=legend_style[self._precip_type_actual],
|
217
211
|
)
|
218
212
|
)
|
219
213
|
try:
|
220
|
-
|
221
|
-
|
222
|
-
|
214
|
+
legend = await _get_resource(geomet_url, legend_params)
|
215
|
+
return Cache.add(legend_cache_key, legend, timedelta(days=7))
|
216
|
+
|
223
217
|
except ClientConnectorError:
|
224
218
|
logging.warning("Legend could not be retrieved")
|
225
219
|
return None
|
226
220
|
|
227
221
|
async def _get_dimensions(self):
|
228
|
-
"""Get time range of available
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
222
|
+
"""Get time range of available radar images."""
|
223
|
+
|
224
|
+
capabilities_cache_key = f"capabilities-{self._precip_type_actual}"
|
225
|
+
|
226
|
+
if not (capabilities_xml := Cache.get(capabilities_cache_key)):
|
227
|
+
capabilities_params["layer"] = precip_layers[self._precip_type_actual]
|
228
|
+
capabilities_xml = await _get_resource(
|
229
|
+
geomet_url, capabilities_params, bytes=False
|
236
230
|
)
|
237
|
-
capabilities_xml =
|
231
|
+
Cache.add(capabilities_cache_key, capabilities_xml, timedelta(minutes=5))
|
238
232
|
|
239
|
-
|
240
|
-
|
241
|
-
dimension_xpath.format(layer=precip_layers[self.layer_key]),
|
233
|
+
dimension_string = et.fromstring(capabilities_xml).find(
|
234
|
+
dimension_xpath.format(layer=precip_layers[self._precip_type_actual]),
|
242
235
|
namespaces=wms_namespace,
|
243
|
-
)
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
if not self.map_image:
|
255
|
-
base_bytes = await self._get_basemap()
|
256
|
-
|
257
|
-
legend_bytes = None
|
258
|
-
if self.show_legend:
|
259
|
-
if not self.legend_image or self.legend_layer != self.layer_key:
|
260
|
-
legend_bytes = await self._get_legend()
|
261
|
-
|
262
|
-
# All the synchronous PIL stuff here
|
236
|
+
)
|
237
|
+
if dimension_string is not None:
|
238
|
+
if dimension_string := dimension_string.text:
|
239
|
+
start, end = [
|
240
|
+
dateutil.parser.isoparse(t) for t in dimension_string.split("/")[:2]
|
241
|
+
]
|
242
|
+
self.timestamp = end.isoformat()
|
243
|
+
return (start, end)
|
244
|
+
return None
|
245
|
+
|
246
|
+
async def _get_radar_image(self, frame_time):
|
263
247
|
def _create_image():
|
264
|
-
|
248
|
+
"""Contains all the PIL calls; run in another thread."""
|
265
249
|
|
250
|
+
radar_image = Image.open(BytesIO(cast(bytes, radar_bytes))).convert("RGBA")
|
251
|
+
|
252
|
+
map_image = None
|
266
253
|
if base_bytes:
|
267
|
-
|
254
|
+
map_image = Image.open(BytesIO(base_bytes)).convert("RGBA")
|
268
255
|
|
269
256
|
if legend_bytes:
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
257
|
+
legend_image = Image.open(BytesIO(legend_bytes)).convert("RGB")
|
258
|
+
legend_position = (self.width - legend_image.size[0], 0)
|
259
|
+
else:
|
260
|
+
legend_image = None
|
261
|
+
legend_position = None
|
274
262
|
|
275
263
|
# Add transparency to radar
|
276
264
|
if self.radar_opacity < 100:
|
277
265
|
alpha = round((self.radar_opacity / 100) * 255)
|
278
|
-
radar_copy =
|
266
|
+
radar_copy = radar_image.copy()
|
279
267
|
radar_copy.putalpha(alpha)
|
280
|
-
|
281
|
-
|
282
|
-
if self.show_timestamp and not self.font:
|
283
|
-
self.font = ImageFont.load(
|
284
|
-
os.path.join(os.path.dirname(__file__), "10x20.pil")
|
285
|
-
)
|
268
|
+
radar_image.paste(radar_copy, radar_image)
|
286
269
|
|
287
270
|
# Overlay radar on basemap
|
288
|
-
if
|
289
|
-
frame = Image.alpha_composite(
|
271
|
+
if map_image:
|
272
|
+
frame = Image.alpha_composite(map_image, radar_image)
|
290
273
|
else:
|
291
|
-
frame =
|
274
|
+
frame = radar_image
|
292
275
|
|
293
276
|
# Add legend
|
294
|
-
if
|
295
|
-
frame.paste(
|
277
|
+
if legend_image:
|
278
|
+
frame.paste(legend_image, legend_position)
|
296
279
|
|
297
280
|
# Add timestamp
|
298
|
-
if self.show_timestamp
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
281
|
+
if self.show_timestamp:
|
282
|
+
if not self._font:
|
283
|
+
self._font = ImageFont.load(
|
284
|
+
os.path.join(os.path.dirname(__file__), "10x20.pil")
|
285
|
+
)
|
286
|
+
|
287
|
+
if self._font:
|
288
|
+
label = timestamp_label[self._precip_type_actual][self.language]
|
289
|
+
timestamp = f"{label} @ {frame_time.astimezone().strftime('%H:%M')}"
|
290
|
+
text_box = Image.new(
|
291
|
+
"RGBA", self._font.getbbox(timestamp)[2:], "white"
|
292
|
+
)
|
293
|
+
box_draw = ImageDraw.Draw(text_box)
|
294
|
+
box_draw.text(
|
295
|
+
xy=(0, 0), text=timestamp, fill=(0, 0, 0), font=self._font
|
296
|
+
)
|
297
|
+
double_box = text_box.resize(
|
298
|
+
(text_box.width * 2, text_box.height * 2)
|
299
|
+
)
|
300
|
+
frame.paste(double_box)
|
301
|
+
frame = frame.quantize()
|
302
|
+
|
303
|
+
# Convert frame to PNG for return
|
312
304
|
img_byte_arr = BytesIO()
|
313
305
|
frame.save(img_byte_arr, format="PNG")
|
314
|
-
frame_bytes = img_byte_arr.getvalue()
|
315
306
|
|
316
|
-
|
307
|
+
# Time is tuned for 3h radar image
|
308
|
+
return Cache.add(
|
309
|
+
f"radar-{time}", img_byte_arr.getvalue(), timedelta(minutes=200)
|
310
|
+
)
|
317
311
|
|
318
|
-
|
319
|
-
|
312
|
+
time = frame_time.strftime("%Y-%m-%dT%H:%M:00Z")
|
313
|
+
|
314
|
+
if img := Cache.get(f"radar-{time}"):
|
315
|
+
return img
|
316
|
+
|
317
|
+
base_bytes = await self._get_basemap()
|
318
|
+
legend_bytes = await self._get_legend() if self.show_legend else None
|
320
319
|
|
321
|
-
async def _get_radar_image(self, session, frame_time):
|
322
320
|
params = dict(
|
323
321
|
**radar_params,
|
324
322
|
**self.map_params,
|
325
|
-
layers=precip_layers[self.
|
326
|
-
time=
|
323
|
+
layers=precip_layers[self._precip_type_actual],
|
324
|
+
time=time,
|
327
325
|
)
|
328
|
-
|
329
|
-
return await
|
326
|
+
radar_bytes = await _get_resource(geomet_url, params)
|
327
|
+
return await asyncio.get_event_loop().run_in_executor(None, _create_image)
|
330
328
|
|
331
329
|
async def get_latest_frame(self):
|
332
330
|
"""Get the latest image from Environment Canada."""
|
333
331
|
dimensions = await self._get_dimensions()
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
return await self._combine_layers(frame, latest)
|
332
|
+
if not dimensions:
|
333
|
+
return None
|
334
|
+
return await self._get_radar_image(frame_time=dimensions[1])
|
338
335
|
|
339
336
|
async def update(self):
|
340
|
-
if self.precip_type == "auto":
|
341
|
-
self._auto_precip_type()
|
342
|
-
|
343
337
|
self.image = await self.get_loop()
|
344
338
|
|
345
339
|
async def get_loop(self, fps=5):
|
346
340
|
"""Build an animated GIF of recent radar images."""
|
347
341
|
|
348
|
-
def
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
342
|
+
def create_gif():
|
343
|
+
"""Assemble animated GIF."""
|
344
|
+
duration = 1000 / fps
|
345
|
+
imgs = [Image.open(BytesIO(img)).convert("RGBA") for img in radar_layers]
|
346
|
+
gif = BytesIO()
|
347
|
+
imgs[0].save(
|
348
|
+
gif,
|
353
349
|
format="GIF",
|
350
|
+
save_all=True,
|
351
|
+
append_images=imgs[1:],
|
354
352
|
duration=duration,
|
355
|
-
|
353
|
+
loop=0,
|
356
354
|
)
|
357
|
-
return
|
355
|
+
return gif.getvalue()
|
358
356
|
|
359
|
-
|
360
|
-
|
361
|
-
|
357
|
+
# Without this cache priming the tasks below each compete to load map/legend
|
358
|
+
# at the same time, resulting in them getting retrieved for each radar image.
|
359
|
+
await self._get_basemap()
|
360
|
+
await self._get_legend() if self.show_legend else None
|
362
361
|
|
363
|
-
|
364
|
-
|
365
|
-
|
366
|
-
|
367
|
-
else:
|
368
|
-
frame_times.append(next_frame)
|
369
|
-
|
370
|
-
"""Fetch frames."""
|
362
|
+
timespan = await self._get_dimensions()
|
363
|
+
if not timespan:
|
364
|
+
logging.error("Cannot retrieve radar times.")
|
365
|
+
return None
|
371
366
|
|
372
367
|
tasks = []
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
378
|
-
frames = []
|
379
|
-
|
380
|
-
for i, f in enumerate(radar_layers):
|
381
|
-
frames.append(await self._combine_layers(f, frame_times[i]))
|
382
|
-
|
383
|
-
for f in range(3):
|
384
|
-
frames.append(frames[-1])
|
368
|
+
curr = timespan[0]
|
369
|
+
while curr <= timespan[1]:
|
370
|
+
tasks.append(self._get_radar_image(frame_time=curr))
|
371
|
+
curr = curr + radar_interval
|
372
|
+
radar_layers = await asyncio.gather(*tasks)
|
385
373
|
|
386
|
-
|
387
|
-
|
374
|
+
for _ in range(3):
|
375
|
+
radar_layers.append(radar_layers[-1])
|
388
376
|
|
389
|
-
|
390
|
-
gif_bytes = await loop.run_in_executor(None, build_image)
|
391
|
-
return gif_bytes
|
377
|
+
return await asyncio.get_running_loop().run_in_executor(None, create_gif)
|
env_canada/ec_weather.py
CHANGED
@@ -137,24 +137,6 @@ conditions_meta = {
|
|
137
137
|
"english": "Icon Code",
|
138
138
|
"french": "Code icône",
|
139
139
|
},
|
140
|
-
"high_temp_yesterday": {
|
141
|
-
"xpath": './yesterdayConditions/temperature[@class="high"]',
|
142
|
-
"type": "float",
|
143
|
-
"english": "High Temperature Yesterday",
|
144
|
-
"french": "Haute température d'hier",
|
145
|
-
},
|
146
|
-
"low_temp_yesterday": {
|
147
|
-
"xpath": './yesterdayConditions/temperature[@class="low"]',
|
148
|
-
"type": "float",
|
149
|
-
"english": "Low Temperature Yesterday",
|
150
|
-
"french": "Basse température d'hier",
|
151
|
-
},
|
152
|
-
"precip_yesterday": {
|
153
|
-
"xpath": "./yesterdayConditions/precip",
|
154
|
-
"type": "float",
|
155
|
-
"english": "Precipitation Yesterday",
|
156
|
-
"french": "Précipitation d'hier",
|
157
|
-
},
|
158
140
|
"normal_high": {
|
159
141
|
"xpath": './forecastGroup/regionalNormals/temperature[@class="high"]',
|
160
142
|
"type": "int",
|
@@ -183,7 +165,7 @@ conditions_meta = {
|
|
183
165
|
"xpath": "./currentConditions/dateTime/timeStamp",
|
184
166
|
"type": "timestamp",
|
185
167
|
"english": "Observation Time",
|
186
|
-
"french": "Temps d'observation"
|
168
|
+
"french": "Temps d'observation",
|
187
169
|
},
|
188
170
|
}
|
189
171
|
|
@@ -278,7 +260,6 @@ def closest_site(site_list, lat, lon):
|
|
278
260
|
|
279
261
|
|
280
262
|
class ECWeather(object):
|
281
|
-
|
282
263
|
"""Get weather data from Environment Canada."""
|
283
264
|
|
284
265
|
def __init__(self, **kwargs):
|
@@ -432,7 +413,9 @@ class ECWeather(object):
|
|
432
413
|
self.conditions[c].update(get_condition(meta))
|
433
414
|
|
434
415
|
# Update station metadata
|
435
|
-
self.metadata["station"] = weather_tree.find(
|
416
|
+
self.metadata["station"] = weather_tree.find(
|
417
|
+
"./currentConditions/station"
|
418
|
+
).text
|
436
419
|
|
437
420
|
# Update text summary
|
438
421
|
period = get_condition(summary_meta["forecast_period"])["value"]
|
@@ -494,7 +477,9 @@ class ECWeather(object):
|
|
494
477
|
"temperature": int(f.findtext("./temperature") or 0),
|
495
478
|
"icon_code": f.findtext("./iconCode"),
|
496
479
|
"precip_probability": int(f.findtext("./lop") or "0"),
|
497
|
-
"wind_speed": int(
|
480
|
+
"wind_speed": int(
|
481
|
+
wind_speed_text if wind_speed_text.isnumeric() else 0
|
482
|
+
),
|
498
483
|
"wind_direction": f.findtext("./wind/direction"),
|
499
484
|
}
|
500
485
|
)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: env_canada
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.7.1
|
4
4
|
Summary: A package to access meteorological data from Environment Canada
|
5
5
|
Home-page: https://github.com/michaeldavie/env_canada
|
6
6
|
Author: Michael Davie
|
@@ -25,7 +25,7 @@ Requires-Dist: voluptuous
|
|
25
25
|
# Environment Canada (env_canada)
|
26
26
|
|
27
27
|
[](https://badge.fury.io/py/env-canada)
|
28
|
-
[](https://snyk.io/vuln/pip:env-canada@0.
|
28
|
+
[](https://snyk.io/vuln/pip:env-canada@0.7.1?utm_source=badge)
|
29
29
|
|
30
30
|
This package provides access to various data sources published by [Environment and Climate Change Canada](https://www.canada.ca/en/environment-climate-change.html).
|
31
31
|
|
@@ -0,0 +1,16 @@
|
|
1
|
+
env_canada/10x20.pbm,sha256=ClKTs2WUmhUhTHAQzPuGwPTICGVBzCvos5l-vHRBE5M,2463
|
2
|
+
env_canada/10x20.pil,sha256=Oki6-TD7b0xFtfm6vxCKsmpEpsZ5Jaia_0v_aDz8bfE,5143
|
3
|
+
env_canada/__init__.py,sha256=wEx1BCwVUH__GoosSlhNMHuUKCKNZAvv5uuSa5ZWq_g,187
|
4
|
+
env_canada/constants.py,sha256=6oDJe86s1hRPyA0pj_MK0S_HHM8kCXd_RuHMWu0fh3E,32
|
5
|
+
env_canada/ec_aqhi.py,sha256=zEEt2U8gCxaLlePexl23r9zCfQYgmfhsP0ur2ZiupZc,7793
|
6
|
+
env_canada/ec_cache.py,sha256=xPlXBRLyrD6dTJWLRBy12J8kzBxMUC-20-xRuc56Hts,722
|
7
|
+
env_canada/ec_exc.py,sha256=SBJwzmLf94lTx7KYVLfQYrMXYNYUoIxeVXc-BLkuXoE,67
|
8
|
+
env_canada/ec_historical.py,sha256=slHaFwsoyW16uCVtE3_-IF3_BFhFD4IuWl7rpIRsCm4,15901
|
9
|
+
env_canada/ec_hydro.py,sha256=LBsWreTlaTKec6ObjI0ih8-zOKBNjD02oiXKTyUa1EQ,4898
|
10
|
+
env_canada/ec_radar.py,sha256=zh0tbazBbvLpuxrY0yfRm9EIaXNkM6HXPe1us99h4xM,12982
|
11
|
+
env_canada/ec_weather.py,sha256=M7nPeZIKLirRIcCENB8z2B8aBDZHrjltzMYPgRz9lz0,16789
|
12
|
+
env_canada-0.7.1.dist-info/LICENSE,sha256=c037dTHQWAgRgDqZNN-5d-CZvcteSYN37u39SNklO0I,1072
|
13
|
+
env_canada-0.7.1.dist-info/METADATA,sha256=y5JONQes6DXUWQlw4zN-XL4TjmOBdYvlVmuoxT4M0B8,10707
|
14
|
+
env_canada-0.7.1.dist-info/WHEEL,sha256=mguMlWGMX-VHnMpKOjjQidIo1ssRlCFu4a4mBpz1s2M,91
|
15
|
+
env_canada-0.7.1.dist-info/top_level.txt,sha256=fw7Pcl9ULBXYvqnAdyBdmwPXW8GSRFmhO0sLZWVfOCc,11
|
16
|
+
env_canada-0.7.1.dist-info/RECORD,,
|
env_canada/ec_data.py
DELETED
@@ -1,501 +0,0 @@
|
|
1
|
-
from datetime import datetime, timezone
|
2
|
-
import logging
|
3
|
-
import re
|
4
|
-
import xml.etree.ElementTree as et
|
5
|
-
|
6
|
-
from geopy import distance
|
7
|
-
from ratelimit import limits, RateLimitException
|
8
|
-
import requests
|
9
|
-
|
10
|
-
SITE_LIST_URL = "https://dd.weather.gc.ca/citypage_weather/docs/site_list_en.csv"
|
11
|
-
AQHI_SITE_LIST_URL = "https://dd.weather.gc.ca/air_quality/doc/AQHI_XML_File_List.xml"
|
12
|
-
|
13
|
-
WEATHER_URL = "https://hpfx.collab.science.gc.ca/{date}/WXO-DD/citypage_weather/xml/{site}_{language}.xml"
|
14
|
-
AQHI_OBSERVATION_URL = "https://dd.weather.gc.ca/air_quality/aqhi/{}/observation/realtime/xml/AQ_OBS_{}_CURRENT.xml"
|
15
|
-
AQHI_FORECAST_URL = "https://dd.weather.gc.ca/air_quality/aqhi/{}/forecast/realtime/xml/AQ_FCST_{}_CURRENT.xml"
|
16
|
-
|
17
|
-
LOG = logging.getLogger(__name__)
|
18
|
-
|
19
|
-
conditions_meta = {
|
20
|
-
"temperature": {
|
21
|
-
"xpath": "./currentConditions/temperature",
|
22
|
-
"english": "Temperature",
|
23
|
-
"french": "Température",
|
24
|
-
},
|
25
|
-
"dewpoint": {
|
26
|
-
"xpath": "./currentConditions/dewpoint",
|
27
|
-
"english": "Dew Point",
|
28
|
-
"french": "Point de rosée",
|
29
|
-
},
|
30
|
-
"wind_chill": {
|
31
|
-
"xpath": "./currentConditions/windChill",
|
32
|
-
"english": "Wind Chill",
|
33
|
-
"french": "Refroidissement éolien",
|
34
|
-
},
|
35
|
-
"humidex": {
|
36
|
-
"xpath": "./currentConditions/humidex",
|
37
|
-
"english": "Humidex",
|
38
|
-
"french": "Humidex",
|
39
|
-
},
|
40
|
-
"pressure": {
|
41
|
-
"xpath": "./currentConditions/pressure",
|
42
|
-
"english": "Pressure",
|
43
|
-
"french": "Pression",
|
44
|
-
},
|
45
|
-
"tendency": {
|
46
|
-
"xpath": "./currentConditions/pressure",
|
47
|
-
"attribute": "tendency",
|
48
|
-
"english": "Tendency",
|
49
|
-
"french": "Tendance",
|
50
|
-
},
|
51
|
-
"humidity": {
|
52
|
-
"xpath": "./currentConditions/relativeHumidity",
|
53
|
-
"english": "Humidity",
|
54
|
-
"french": "Humidité",
|
55
|
-
},
|
56
|
-
"visibility": {
|
57
|
-
"xpath": "./currentConditions/visibility",
|
58
|
-
"english": "Visibility",
|
59
|
-
"french": "Visibilité",
|
60
|
-
},
|
61
|
-
"condition": {
|
62
|
-
"xpath": "./currentConditions/condition",
|
63
|
-
"english": "Condition",
|
64
|
-
"french": "Condition",
|
65
|
-
},
|
66
|
-
"wind_speed": {
|
67
|
-
"xpath": "./currentConditions/wind/speed",
|
68
|
-
"english": "Wind Speed",
|
69
|
-
"french": "Vitesse de vent",
|
70
|
-
},
|
71
|
-
"wind_gust": {
|
72
|
-
"xpath": "./currentConditions/wind/gust",
|
73
|
-
"english": "Wind Gust",
|
74
|
-
"french": "Rafale de vent",
|
75
|
-
},
|
76
|
-
"wind_dir": {
|
77
|
-
"xpath": "./currentConditions/wind/direction",
|
78
|
-
"english": "Wind Direction",
|
79
|
-
"french": "Direction de vent",
|
80
|
-
},
|
81
|
-
"wind_bearing": {
|
82
|
-
"xpath": "./currentConditions/wind/bearing",
|
83
|
-
"english": "Wind Bearing",
|
84
|
-
"french": "Palier de vent",
|
85
|
-
},
|
86
|
-
"high_temp": {
|
87
|
-
"xpath": './forecastGroup/forecast/temperatures/temperature[@class="high"]',
|
88
|
-
"english": "High Temperature",
|
89
|
-
"french": "Haute température",
|
90
|
-
},
|
91
|
-
"low_temp": {
|
92
|
-
"xpath": './forecastGroup/forecast/temperatures/temperature[@class="low"]',
|
93
|
-
"english": "Low Temperature",
|
94
|
-
"french": "Basse température",
|
95
|
-
},
|
96
|
-
"uv_index": {
|
97
|
-
"xpath": "./forecastGroup/forecast/uv/index",
|
98
|
-
"english": "UV Index",
|
99
|
-
"french": "Indice UV",
|
100
|
-
},
|
101
|
-
"pop": {
|
102
|
-
"xpath": "./forecastGroup/forecast/abbreviatedForecast/pop",
|
103
|
-
"english": "Chance of Precip.",
|
104
|
-
"french": "Probabilité d'averses",
|
105
|
-
},
|
106
|
-
"icon_code": {
|
107
|
-
"xpath": "./currentConditions/iconCode",
|
108
|
-
"english": "Icon Code",
|
109
|
-
"french": "Code icône",
|
110
|
-
},
|
111
|
-
"precip_yesterday": {
|
112
|
-
"xpath": "./yesterdayConditions/precip",
|
113
|
-
"english": "Precipitation Yesterday",
|
114
|
-
"french": "Précipitation d'hier",
|
115
|
-
},
|
116
|
-
}
|
117
|
-
|
118
|
-
aqhi_meta = {
|
119
|
-
"label": {"english": "Air Quality Health Index", "french": "Cote air santé"}
|
120
|
-
}
|
121
|
-
|
122
|
-
summary_meta = {
|
123
|
-
"forecast_period": {
|
124
|
-
"xpath": "./forecastGroup/forecast/period",
|
125
|
-
"attribute": "textForecastName",
|
126
|
-
},
|
127
|
-
"text_summary": {
|
128
|
-
"xpath": "./forecastGroup/forecast/textSummary",
|
129
|
-
},
|
130
|
-
"label": {"english": "Forecast", "french": "Prévision"},
|
131
|
-
}
|
132
|
-
|
133
|
-
alerts_meta = {
|
134
|
-
"warnings": {
|
135
|
-
"english": {"label": "Warnings", "pattern": ".*WARNING((?!ENDED).)*$"},
|
136
|
-
"french": {
|
137
|
-
"label": "Alertes",
|
138
|
-
"pattern": ".*(ALERTE|AVERTISSEMENT)((?!TERMINÉ).)*$",
|
139
|
-
},
|
140
|
-
},
|
141
|
-
"watches": {
|
142
|
-
"english": {"label": "Watches", "pattern": ".*WATCH((?!ENDED).)*$"},
|
143
|
-
"french": {"label": "Veilles", "pattern": ".*VEILLE((?!TERMINÉ).)*$"},
|
144
|
-
},
|
145
|
-
"advisories": {
|
146
|
-
"english": {"label": "Advisories", "pattern": ".*ADVISORY((?!ENDED).)*$"},
|
147
|
-
"french": {"label": "Avis", "pattern": ".*AVIS((?!TERMINÉ).)*$"},
|
148
|
-
},
|
149
|
-
"statements": {
|
150
|
-
"english": {"label": "Statements", "pattern": ".*STATEMENT((?!ENDED).)*$"},
|
151
|
-
"french": {"label": "Bulletins", "pattern": ".*BULLETIN((?!TERMINÉ).)*$"},
|
152
|
-
},
|
153
|
-
"endings": {
|
154
|
-
"english": {"label": "Endings", "pattern": ".*ENDED"},
|
155
|
-
"french": {"label": "Terminaisons", "pattern": ".*TERMINÉE?"},
|
156
|
-
},
|
157
|
-
}
|
158
|
-
|
159
|
-
metadata_meta = {
|
160
|
-
"timestamp": {
|
161
|
-
"xpath": "./currentConditions/dateTime/timeStamp",
|
162
|
-
},
|
163
|
-
"location": {
|
164
|
-
"xpath": "./location/name",
|
165
|
-
},
|
166
|
-
"station": {
|
167
|
-
"xpath": "./currentConditions/station",
|
168
|
-
},
|
169
|
-
}
|
170
|
-
|
171
|
-
|
172
|
-
def ignore_ratelimit_error(fun):
|
173
|
-
def res(*args, **kwargs):
|
174
|
-
try:
|
175
|
-
return fun(*args, **kwargs)
|
176
|
-
except RateLimitException:
|
177
|
-
return None
|
178
|
-
|
179
|
-
return res
|
180
|
-
|
181
|
-
|
182
|
-
class ECData(object):
|
183
|
-
|
184
|
-
"""Get weather data from Environment Canada."""
|
185
|
-
|
186
|
-
def __init__(self, station_id=None, coordinates=(0, 0), language="english"):
|
187
|
-
"""Initialize the data object."""
|
188
|
-
self.language = language
|
189
|
-
self.language_abr = language[:2].upper()
|
190
|
-
self.zone_name_tag = "name_%s_CA" % self.language_abr.lower()
|
191
|
-
self.region_name_tag = "name%s" % self.language_abr.title()
|
192
|
-
|
193
|
-
self.metadata = {}
|
194
|
-
self.conditions = {}
|
195
|
-
self.alerts = {}
|
196
|
-
self.daily_forecasts = []
|
197
|
-
self.hourly_forecasts = []
|
198
|
-
self.aqhi = {}
|
199
|
-
self.forecast_time = ""
|
200
|
-
self.aqhi_id = None
|
201
|
-
self.lat = 0
|
202
|
-
self.lon = 0
|
203
|
-
|
204
|
-
site_list = self.get_ec_sites()
|
205
|
-
if station_id:
|
206
|
-
self.station_id = station_id
|
207
|
-
stn = station_id.split("/")
|
208
|
-
if len(stn) == 2:
|
209
|
-
for site in site_list:
|
210
|
-
if stn[1] == site["Codes"] and stn[0] == site["Province Codes"]:
|
211
|
-
self.lat = site["Latitude"]
|
212
|
-
self.lon = site["Longitude"]
|
213
|
-
break
|
214
|
-
else:
|
215
|
-
self.station_id = self.closest_site(
|
216
|
-
site_list, coordinates[0], coordinates[1]
|
217
|
-
)
|
218
|
-
self.lat = coordinates[0]
|
219
|
-
self.lon = coordinates[1]
|
220
|
-
|
221
|
-
self.update()
|
222
|
-
|
223
|
-
@ignore_ratelimit_error
|
224
|
-
@limits(calls=5, period=120)
|
225
|
-
def update(self):
|
226
|
-
"""Get the latest data from Environment Canada."""
|
227
|
-
url = WEATHER_URL.format(
|
228
|
-
date=datetime.now(tz=timezone.utc).strftime("%Y%m%d"),
|
229
|
-
site=self.station_id,
|
230
|
-
language=self.language[0],
|
231
|
-
)
|
232
|
-
try:
|
233
|
-
weather_result = requests.get(url)
|
234
|
-
|
235
|
-
except requests.exceptions.RequestException as e:
|
236
|
-
LOG.warning("Unable to retrieve weather forecast: %s", e)
|
237
|
-
return
|
238
|
-
|
239
|
-
if weather_result.status_code != 200:
|
240
|
-
LOG.warning(
|
241
|
-
"Unable to retrieve weather forecast, status code: %d, url: %s",
|
242
|
-
weather_result.status_code,
|
243
|
-
url,
|
244
|
-
)
|
245
|
-
return
|
246
|
-
|
247
|
-
weather_xml = weather_result.content.decode("iso-8859-1")
|
248
|
-
try:
|
249
|
-
weather_tree = et.fromstring(weather_xml)
|
250
|
-
except Exception as e:
|
251
|
-
LOG.warning("Unable to parse XML returned")
|
252
|
-
return
|
253
|
-
|
254
|
-
# Update metadata
|
255
|
-
for m, meta in metadata_meta.items():
|
256
|
-
element = weather_tree.find(meta["xpath"])
|
257
|
-
if element is not None:
|
258
|
-
self.metadata[m] = weather_tree.find(meta["xpath"]).text
|
259
|
-
else:
|
260
|
-
self.metadata[m] = None
|
261
|
-
|
262
|
-
# Update current conditions
|
263
|
-
def get_condition(meta):
|
264
|
-
condition = {}
|
265
|
-
|
266
|
-
element = weather_tree.find(meta["xpath"])
|
267
|
-
|
268
|
-
if element is not None:
|
269
|
-
if meta.get("attribute"):
|
270
|
-
condition["value"] = element.attrib.get(meta["attribute"])
|
271
|
-
else:
|
272
|
-
condition["value"] = element.text
|
273
|
-
if element.attrib.get("units"):
|
274
|
-
condition["unit"] = element.attrib.get("units")
|
275
|
-
return condition
|
276
|
-
|
277
|
-
for c, meta in conditions_meta.items():
|
278
|
-
self.conditions[c] = {"label": meta[self.language]}
|
279
|
-
self.conditions[c].update(get_condition(meta))
|
280
|
-
|
281
|
-
# Update text summary
|
282
|
-
period = get_condition(summary_meta["forecast_period"])["value"]
|
283
|
-
summary = get_condition(summary_meta["text_summary"])["value"]
|
284
|
-
|
285
|
-
self.conditions["text_summary"] = {
|
286
|
-
"label": summary_meta["label"][self.language],
|
287
|
-
"value": ". ".join([period, summary]),
|
288
|
-
}
|
289
|
-
|
290
|
-
# Update alerts
|
291
|
-
for category, meta in alerts_meta.items():
|
292
|
-
self.alerts[category] = {"value": [], "label": meta[self.language]["label"]}
|
293
|
-
|
294
|
-
alert_elements = weather_tree.findall("./warnings/event")
|
295
|
-
|
296
|
-
for a in alert_elements:
|
297
|
-
title = a.attrib.get("description").strip()
|
298
|
-
for category, meta in alerts_meta.items():
|
299
|
-
category_match = re.search(meta[self.language]["pattern"], title)
|
300
|
-
if category_match:
|
301
|
-
alert = {
|
302
|
-
"title": title.title(),
|
303
|
-
"date": a.find("./dateTime[last()]/textSummary").text,
|
304
|
-
}
|
305
|
-
self.alerts[category]["value"].append(alert)
|
306
|
-
|
307
|
-
# Update daily forecasts
|
308
|
-
self.forecast_time = weather_tree.findtext("./forecastGroup/dateTime/timeStamp")
|
309
|
-
self.daily_forecasts = []
|
310
|
-
self.hourly_forecasts = []
|
311
|
-
|
312
|
-
for f in weather_tree.findall("./forecastGroup/forecast"):
|
313
|
-
self.daily_forecasts.append(
|
314
|
-
{
|
315
|
-
"period": f.findtext("period"),
|
316
|
-
"text_summary": f.findtext("textSummary"),
|
317
|
-
"icon_code": f.findtext("./abbreviatedForecast/iconCode"),
|
318
|
-
"temperature": f.findtext("./temperatures/temperature"),
|
319
|
-
"temperature_class": f.find(
|
320
|
-
"./temperatures/temperature"
|
321
|
-
).attrib.get("class"),
|
322
|
-
"precip_probability": f.findtext("./abbreviatedForecast/pop")
|
323
|
-
or "0",
|
324
|
-
}
|
325
|
-
)
|
326
|
-
|
327
|
-
# Update hourly forecasts
|
328
|
-
for f in weather_tree.findall("./hourlyForecastGroup/hourlyForecast"):
|
329
|
-
self.hourly_forecasts.append(
|
330
|
-
{
|
331
|
-
"period": f.attrib.get("dateTimeUTC"),
|
332
|
-
"condition": f.findtext("./condition"),
|
333
|
-
"temperature": f.findtext("./temperature"),
|
334
|
-
"icon_code": f.findtext("./iconCode"),
|
335
|
-
"precip_probability": f.findtext("./lop") or "0",
|
336
|
-
}
|
337
|
-
)
|
338
|
-
|
339
|
-
# Update AQHI current condition
|
340
|
-
|
341
|
-
if self.aqhi_id is None:
|
342
|
-
lat = weather_tree.find("./location/name").attrib.get("lat")[:-1]
|
343
|
-
lon = weather_tree.find("./location/name").attrib.get("lon")[:-1]
|
344
|
-
aqhi_coordinates = (float(lat), float(lon) * -1)
|
345
|
-
self.aqhi_id = self.closest_aqhi(aqhi_coordinates[0], aqhi_coordinates[1])
|
346
|
-
|
347
|
-
success = True
|
348
|
-
try:
|
349
|
-
aqhi_result = requests.get(
|
350
|
-
AQHI_OBSERVATION_URL.format(self.aqhi_id[0], self.aqhi_id[1]),
|
351
|
-
timeout=10,
|
352
|
-
)
|
353
|
-
except requests.exceptions.RequestException as e:
|
354
|
-
LOG.warning("Unable to retrieve current AQHI observation: %s", e)
|
355
|
-
success = False
|
356
|
-
|
357
|
-
if not success or aqhi_result.status_code == 404:
|
358
|
-
self.aqhi["current"] = None
|
359
|
-
else:
|
360
|
-
aqhi_xml = aqhi_result.content.decode("utf-8")
|
361
|
-
aqhi_tree = et.fromstring(aqhi_xml)
|
362
|
-
|
363
|
-
element = aqhi_tree.find("airQualityHealthIndex")
|
364
|
-
if element is not None:
|
365
|
-
self.aqhi["current"] = element.text
|
366
|
-
else:
|
367
|
-
self.aqhi["current"] = None
|
368
|
-
|
369
|
-
self.conditions["air_quality"] = {
|
370
|
-
"label": aqhi_meta["label"][self.language],
|
371
|
-
"value": self.aqhi["current"],
|
372
|
-
}
|
373
|
-
|
374
|
-
element = aqhi_tree.find("./dateStamp/UTCStamp")
|
375
|
-
if element is not None:
|
376
|
-
self.aqhi["utc_time"] = element.text
|
377
|
-
else:
|
378
|
-
self.aqhi["utc_time"] = None
|
379
|
-
|
380
|
-
# Update AQHI forecasts
|
381
|
-
success = True
|
382
|
-
try:
|
383
|
-
aqhi_result = requests.get(
|
384
|
-
AQHI_FORECAST_URL.format(self.aqhi_id[0], self.aqhi_id[1]), timeout=10
|
385
|
-
)
|
386
|
-
except requests.exceptions.RequestException as e:
|
387
|
-
LOG.warning("Unable to retrieve forecast AQHI observation: %s", e)
|
388
|
-
success = False
|
389
|
-
|
390
|
-
if not success or aqhi_result.status_code == 404:
|
391
|
-
self.aqhi["forecasts"] = None
|
392
|
-
else:
|
393
|
-
aqhi_xml = aqhi_result.content.decode("ISO-8859-1")
|
394
|
-
aqhi_tree = et.fromstring(aqhi_xml)
|
395
|
-
|
396
|
-
self.aqhi["forecasts"] = {"daily": [], "hourly": []}
|
397
|
-
|
398
|
-
# Update AQHI daily forecasts
|
399
|
-
period = None
|
400
|
-
for f in aqhi_tree.findall("./forecastGroup/forecast"):
|
401
|
-
for p in f.findall("./period"):
|
402
|
-
if self.language_abr == p.attrib["lang"]:
|
403
|
-
period = p.attrib["forecastName"]
|
404
|
-
self.aqhi["forecasts"]["daily"].append(
|
405
|
-
{
|
406
|
-
"period": period,
|
407
|
-
"aqhi": f.findtext("./airQualityHealthIndex"),
|
408
|
-
}
|
409
|
-
)
|
410
|
-
|
411
|
-
# Update AQHI hourly forecasts
|
412
|
-
for f in aqhi_tree.findall("./hourlyForecastGroup/hourlyForecast"):
|
413
|
-
self.aqhi["forecasts"]["hourly"].append(
|
414
|
-
{"period": f.attrib["UTCTime"], "aqhi": f.text}
|
415
|
-
)
|
416
|
-
|
417
|
-
def get_ec_sites(self):
|
418
|
-
"""Get list of all sites from Environment Canada, for auto-config."""
|
419
|
-
import csv
|
420
|
-
import io
|
421
|
-
|
422
|
-
sites = []
|
423
|
-
|
424
|
-
try:
|
425
|
-
sites_result = requests.get(SITE_LIST_URL, timeout=10)
|
426
|
-
sites_csv_string = sites_result.text
|
427
|
-
except requests.exceptions.RequestException as e:
|
428
|
-
LOG.warning("Unable to retrieve site list csv: %s", e)
|
429
|
-
return sites
|
430
|
-
|
431
|
-
sites_csv_stream = io.StringIO(sites_csv_string)
|
432
|
-
|
433
|
-
sites_csv_stream.seek(0)
|
434
|
-
next(sites_csv_stream)
|
435
|
-
|
436
|
-
sites_reader = csv.DictReader(sites_csv_stream)
|
437
|
-
|
438
|
-
for site in sites_reader:
|
439
|
-
if site["Province Codes"] != "HEF":
|
440
|
-
site["Latitude"] = float(site["Latitude"].replace("N", ""))
|
441
|
-
site["Longitude"] = -1 * float(site["Longitude"].replace("W", ""))
|
442
|
-
sites.append(site)
|
443
|
-
|
444
|
-
return sites
|
445
|
-
|
446
|
-
def closest_site(self, site_list, lat, lon):
|
447
|
-
"""Return the province/site_code of the closest station to our lat/lon."""
|
448
|
-
|
449
|
-
def site_distance(site):
|
450
|
-
"""Calculate distance to a site."""
|
451
|
-
return distance.distance((lat, lon), (site["Latitude"], site["Longitude"]))
|
452
|
-
|
453
|
-
closest = min(site_list, key=site_distance)
|
454
|
-
|
455
|
-
return "{}/{}".format(closest["Province Codes"], closest["Codes"])
|
456
|
-
|
457
|
-
def get_aqhi_regions(self):
|
458
|
-
"""Get list of all AQHI regions from Environment Canada, for auto-config."""
|
459
|
-
regions = []
|
460
|
-
try:
|
461
|
-
result = requests.get(AQHI_SITE_LIST_URL, timeout=10)
|
462
|
-
except requests.exceptions.RequestException as e:
|
463
|
-
LOG.warning("Unable to retrieve AQHI regions: %s", e)
|
464
|
-
return regions
|
465
|
-
|
466
|
-
site_xml = result.content.decode("utf-8")
|
467
|
-
xml_object = et.fromstring(site_xml)
|
468
|
-
|
469
|
-
for zone in xml_object.findall("./EC_administrativeZone"):
|
470
|
-
_zone_attribs = zone.attrib
|
471
|
-
_zone_attrib = {
|
472
|
-
"abbreviation": _zone_attribs["abreviation"],
|
473
|
-
"zone_name": _zone_attribs[self.zone_name_tag],
|
474
|
-
}
|
475
|
-
for region in zone.findall("./regionList/region"):
|
476
|
-
_region_attribs = region.attrib
|
477
|
-
|
478
|
-
_region_attrib = {
|
479
|
-
"region_name": _region_attribs[self.region_name_tag],
|
480
|
-
"cgndb": _region_attribs["cgndb"],
|
481
|
-
"latitude": float(_region_attribs["latitude"]),
|
482
|
-
"longitude": float(_region_attribs["longitude"]),
|
483
|
-
}
|
484
|
-
_children = list(region)
|
485
|
-
for child in _children:
|
486
|
-
_region_attrib[child.tag] = child.text
|
487
|
-
_region_attrib.update(_zone_attrib)
|
488
|
-
regions.append(_region_attrib)
|
489
|
-
return regions
|
490
|
-
|
491
|
-
def closest_aqhi(self, lat, lon):
|
492
|
-
"""Return the AQHI region and site ID of the closest site."""
|
493
|
-
region_list = self.get_aqhi_regions()
|
494
|
-
|
495
|
-
def site_distance(site):
|
496
|
-
"""Calculate distance to a region."""
|
497
|
-
return distance.distance((lat, lon), (site["latitude"], site["longitude"]))
|
498
|
-
|
499
|
-
closest = min(region_list, key=site_distance)
|
500
|
-
|
501
|
-
return closest["abbreviation"], closest["cgndb"]
|
@@ -1,17 +0,0 @@
|
|
1
|
-
env_canada/10x20.pbm,sha256=ClKTs2WUmhUhTHAQzPuGwPTICGVBzCvos5l-vHRBE5M,2463
|
2
|
-
env_canada/10x20.pil,sha256=Oki6-TD7b0xFtfm6vxCKsmpEpsZ5Jaia_0v_aDz8bfE,5143
|
3
|
-
env_canada/__init__.py,sha256=wEx1BCwVUH__GoosSlhNMHuUKCKNZAvv5uuSa5ZWq_g,187
|
4
|
-
env_canada/constants.py,sha256=P8tdLi9F5nSq1VHsA5avWwtf2mXSANWRgf7-qjSB7pM,32
|
5
|
-
env_canada/ec_aqhi.py,sha256=kJQ8xEgFnujGMYdxRXpoEK17B5e-ya-Y7rK0vLo_-w0,7768
|
6
|
-
env_canada/ec_cache.py,sha256=qoFxmO-kOBT8jhgPeNWtVBRmguXcARIIOI54OaDh-20,1171
|
7
|
-
env_canada/ec_data.py,sha256=DacCeZSDeMMVdN-Mx5WVa2ObooVm4SfEOK3J0kAV6H8,17597
|
8
|
-
env_canada/ec_exc.py,sha256=SBJwzmLf94lTx7KYVLfQYrMXYNYUoIxeVXc-BLkuXoE,67
|
9
|
-
env_canada/ec_historical.py,sha256=slHaFwsoyW16uCVtE3_-IF3_BFhFD4IuWl7rpIRsCm4,15901
|
10
|
-
env_canada/ec_hydro.py,sha256=LBsWreTlaTKec6ObjI0ih8-zOKBNjD02oiXKTyUa1EQ,4898
|
11
|
-
env_canada/ec_radar.py,sha256=gcLa2z5T_CkrY-NLEJRqaLDHODJRcO5unW5MGxjKxF8,13115
|
12
|
-
env_canada/ec_weather.py,sha256=uBY6qd0-hVyZDhqPcpipfMDImXpJGiNIzMOjIzqNBfo,17358
|
13
|
-
env_canada-0.6.3.dist-info/LICENSE,sha256=c037dTHQWAgRgDqZNN-5d-CZvcteSYN37u39SNklO0I,1072
|
14
|
-
env_canada-0.6.3.dist-info/METADATA,sha256=00ezEXuLV3vc0BkMlGRi4jPCqFjqFkXSI-M_KgxYBPo,10707
|
15
|
-
env_canada-0.6.3.dist-info/WHEEL,sha256=cpQTJ5IWu9CdaPViMhC9YzF8gZuS5-vlfoFihTBC86A,91
|
16
|
-
env_canada-0.6.3.dist-info/top_level.txt,sha256=fw7Pcl9ULBXYvqnAdyBdmwPXW8GSRFmhO0sLZWVfOCc,11
|
17
|
-
env_canada-0.6.3.dist-info/RECORD,,
|
File without changes
|
File without changes
|