cavapy 0.1.0__py3-none-any.whl → 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cavapy might be problematic. Click here for more details.
cavapy.py
CHANGED
|
@@ -1,631 +1,649 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import multiprocessing as mp
|
|
3
|
-
from concurrent.futures import ThreadPoolExecutor
|
|
4
|
-
from functools import partial
|
|
5
|
-
import logging
|
|
6
|
-
import warnings
|
|
7
|
-
|
|
8
|
-
warnings.filterwarnings(
|
|
9
|
-
"ignore",
|
|
10
|
-
category=FutureWarning,
|
|
11
|
-
message=".*geopandas.dataset module is deprecated.*",
|
|
12
|
-
)
|
|
13
|
-
import geopandas as gpd # noqa: E402
|
|
14
|
-
import pandas as pd # noqa: E402
|
|
15
|
-
import xarray as xr # noqa: E402
|
|
16
|
-
import numpy as np # noqa: E402
|
|
17
|
-
from xclim import sdba # noqa: E402
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
logger = logging.getLogger("climate")
|
|
21
|
-
logger.handlers = [] # Remove any existing handlers
|
|
22
|
-
handler = logging.StreamHandler()
|
|
23
|
-
formatter = logging.Formatter(
|
|
24
|
-
"%(asctime)s | %(name)s | %(process)d:%(thread)d [%(levelname)s]: %(message)s"
|
|
25
|
-
)
|
|
26
|
-
handler.setFormatter(formatter)
|
|
27
|
-
for hdlr in logger.handlers[:]: # remove all old handlers
|
|
28
|
-
logger.removeHandler(hdlr)
|
|
29
|
-
logger.addHandler(handler)
|
|
30
|
-
logger.setLevel(logging.DEBUG)
|
|
31
|
-
|
|
32
|
-
VARIABLES_MAP = {
|
|
33
|
-
"pr": "tp",
|
|
34
|
-
"tasmax": "t2mx",
|
|
35
|
-
"tasmin": "t2mn",
|
|
36
|
-
"hurs": "hurs",
|
|
37
|
-
"sfcWind": "sfcwind",
|
|
38
|
-
"rsds": "ssrd",
|
|
39
|
-
}
|
|
40
|
-
VALID_VARIABLES = list(VARIABLES_MAP)
|
|
41
|
-
# TODO: Throw an error if the selected country is not in the selected domain
|
|
42
|
-
VALID_DOMAINS = [
|
|
43
|
-
"NAM-22",
|
|
44
|
-
"EUR-22",
|
|
45
|
-
"AFR-22",
|
|
46
|
-
"EAS-22",
|
|
47
|
-
"SEA-22",
|
|
48
|
-
"WAS-22",
|
|
49
|
-
"AUS-22",
|
|
50
|
-
"SAM-22",
|
|
51
|
-
"CAM-22",
|
|
52
|
-
]
|
|
53
|
-
VALID_RCPS = ["rcp26", "rcp85"]
|
|
54
|
-
VALID_GCM = ["MOHC", "MPI", "NCC"]
|
|
55
|
-
VALID_RCM = ["REMO", "Reg"]
|
|
56
|
-
|
|
57
|
-
INVENTORY_DATA_REMOTE_URL = (
|
|
58
|
-
"https://hub.ipcc.ifca.es/thredds/fileServer/inventories/cava.csv"
|
|
59
|
-
)
|
|
60
|
-
INVENTORY_DATA_LOCAL_PATH = os.path.join(
|
|
61
|
-
os.path.expanduser("~"), "shared/inventories/cava/inventory.csv"
|
|
62
|
-
)
|
|
63
|
-
ERA5_DATA_REMOTE_URL = (
|
|
64
|
-
"https://hub.ipcc.ifca.es/thredds/dodsC/fao/observations/ERA5/0.25/ERA5_025.ncml"
|
|
65
|
-
)
|
|
66
|
-
ERA5_DATA_LOCAL_PATH = os.path.join(
|
|
67
|
-
os.path.expanduser("~"), "shared/data/observations/ERA5/0.25/ERA5_025.ncml"
|
|
68
|
-
)
|
|
69
|
-
DEFAULT_YEARS_OBS = range(1980, 2006)
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
def get_climate_data(
|
|
73
|
-
*,
|
|
74
|
-
country: str | None,
|
|
75
|
-
cordex_domain: str,
|
|
76
|
-
rcp: str,
|
|
77
|
-
gcm: str,
|
|
78
|
-
rcm: str,
|
|
79
|
-
years_up_to: int,
|
|
80
|
-
years_obs: range | None = None,
|
|
81
|
-
bias_correction: bool = False,
|
|
82
|
-
historical: bool = False,
|
|
83
|
-
obs: bool = False,
|
|
84
|
-
buffer: int = 0,
|
|
85
|
-
xlim: tuple[float, float] | None = None,
|
|
86
|
-
ylim: tuple[float, float] | None = None,
|
|
87
|
-
remote: bool = True,
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
if country is
|
|
129
|
-
raise ValueError("You must specify
|
|
130
|
-
|
|
131
|
-
"
|
|
132
|
-
|
|
133
|
-
"
|
|
134
|
-
"
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
if
|
|
145
|
-
raise ValueError("
|
|
146
|
-
if
|
|
147
|
-
raise ValueError("years_obs must be
|
|
148
|
-
if
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
)
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
-
|
|
297
|
-
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
-
|
|
310
|
-
-
|
|
311
|
-
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
)
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
log.info("
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
)
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
ds_cropped =
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
#
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
ds_cropped["
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
)
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
1
|
+
import os
|
|
2
|
+
import multiprocessing as mp
|
|
3
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
4
|
+
from functools import partial
|
|
5
|
+
import logging
|
|
6
|
+
import warnings
|
|
7
|
+
|
|
8
|
+
warnings.filterwarnings(
|
|
9
|
+
"ignore",
|
|
10
|
+
category=FutureWarning,
|
|
11
|
+
message=".*geopandas.dataset module is deprecated.*",
|
|
12
|
+
)
|
|
13
|
+
import geopandas as gpd # noqa: E402
|
|
14
|
+
import pandas as pd # noqa: E402
|
|
15
|
+
import xarray as xr # noqa: E402
|
|
16
|
+
import numpy as np # noqa: E402
|
|
17
|
+
from xclim import sdba # noqa: E402
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger("climate")
|
|
21
|
+
logger.handlers = [] # Remove any existing handlers
|
|
22
|
+
handler = logging.StreamHandler()
|
|
23
|
+
formatter = logging.Formatter(
|
|
24
|
+
"%(asctime)s | %(name)s | %(process)d:%(thread)d [%(levelname)s]: %(message)s"
|
|
25
|
+
)
|
|
26
|
+
handler.setFormatter(formatter)
|
|
27
|
+
for hdlr in logger.handlers[:]: # remove all old handlers
|
|
28
|
+
logger.removeHandler(hdlr)
|
|
29
|
+
logger.addHandler(handler)
|
|
30
|
+
logger.setLevel(logging.DEBUG)
|
|
31
|
+
|
|
32
|
+
VARIABLES_MAP = {
|
|
33
|
+
"pr": "tp",
|
|
34
|
+
"tasmax": "t2mx",
|
|
35
|
+
"tasmin": "t2mn",
|
|
36
|
+
"hurs": "hurs",
|
|
37
|
+
"sfcWind": "sfcwind",
|
|
38
|
+
"rsds": "ssrd",
|
|
39
|
+
}
|
|
40
|
+
VALID_VARIABLES = list(VARIABLES_MAP)
|
|
41
|
+
# TODO: Throw an error if the selected country is not in the selected domain
|
|
42
|
+
VALID_DOMAINS = [
|
|
43
|
+
"NAM-22",
|
|
44
|
+
"EUR-22",
|
|
45
|
+
"AFR-22",
|
|
46
|
+
"EAS-22",
|
|
47
|
+
"SEA-22",
|
|
48
|
+
"WAS-22",
|
|
49
|
+
"AUS-22",
|
|
50
|
+
"SAM-22",
|
|
51
|
+
"CAM-22",
|
|
52
|
+
]
|
|
53
|
+
VALID_RCPS = ["rcp26", "rcp85"]
|
|
54
|
+
VALID_GCM = ["MOHC", "MPI", "NCC"]
|
|
55
|
+
VALID_RCM = ["REMO", "Reg"]
|
|
56
|
+
|
|
57
|
+
INVENTORY_DATA_REMOTE_URL = (
|
|
58
|
+
"https://hub.ipcc.ifca.es/thredds/fileServer/inventories/cava.csv"
|
|
59
|
+
)
|
|
60
|
+
INVENTORY_DATA_LOCAL_PATH = os.path.join(
|
|
61
|
+
os.path.expanduser("~"), "shared/inventories/cava/inventory.csv"
|
|
62
|
+
)
|
|
63
|
+
ERA5_DATA_REMOTE_URL = (
|
|
64
|
+
"https://hub.ipcc.ifca.es/thredds/dodsC/fao/observations/ERA5/0.25/ERA5_025.ncml"
|
|
65
|
+
)
|
|
66
|
+
ERA5_DATA_LOCAL_PATH = os.path.join(
|
|
67
|
+
os.path.expanduser("~"), "shared/data/observations/ERA5/0.25/ERA5_025.ncml"
|
|
68
|
+
)
|
|
69
|
+
DEFAULT_YEARS_OBS = range(1980, 2006)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def get_climate_data(
|
|
73
|
+
*,
|
|
74
|
+
country: str | None,
|
|
75
|
+
cordex_domain: str,
|
|
76
|
+
rcp: str,
|
|
77
|
+
gcm: str,
|
|
78
|
+
rcm: str,
|
|
79
|
+
years_up_to: int,
|
|
80
|
+
years_obs: range | None = None,
|
|
81
|
+
bias_correction: bool = False,
|
|
82
|
+
historical: bool = False,
|
|
83
|
+
obs: bool = False,
|
|
84
|
+
buffer: int = 0,
|
|
85
|
+
xlim: tuple[float, float] | None = None,
|
|
86
|
+
ylim: tuple[float, float] | None = None,
|
|
87
|
+
remote: bool = True,
|
|
88
|
+
variables: list[str] | None = None,
|
|
89
|
+
num_processes: int = len(VALID_VARIABLES),
|
|
90
|
+
max_threads_per_process: int = 8,
|
|
91
|
+
) -> dict[str, xr.DataArray]:
|
|
92
|
+
f"""
|
|
93
|
+
Process climate data required by pyAEZ climate module.
|
|
94
|
+
The function automatically access CORDEX-CORE models at 0.25° and the ERA5 datasets.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
country (str): Name of the country for which data is to be processed.
|
|
98
|
+
Use None if specifying a region using xlim and ylim.
|
|
99
|
+
cordex_domain (str): CORDEX domain of the climate data. One of {VALID_DOMAINS}.
|
|
100
|
+
rcp (str): Representative Concentration Pathway. One of {VALID_RCPS}.
|
|
101
|
+
gcm (str): GCM name. One of {VALID_GCM}.
|
|
102
|
+
rcm (str): RCM name. One of {VALID_RCM}.
|
|
103
|
+
years_up_to (int): The ending year for the projected data. Projections start in 2006 and ends in 2100.
|
|
104
|
+
Hence, if years_up_to is set to 2030, data will be downloaded for the 2006-2030 period.
|
|
105
|
+
years_obs (range): Range of years for observational data (ERA5 only). Only used when obs is True. (default: None).
|
|
106
|
+
bias_correction (bool): Whether to apply bias correction (default: False).
|
|
107
|
+
historical (bool): Flag to indicate if processing historical data (default: False).
|
|
108
|
+
If True, historical data is provided together with projections.
|
|
109
|
+
Historical simulation runs for CORDEX-CORE initiative are provided for the 1980-2005 time period.
|
|
110
|
+
obs (bool): Flag to indicate if processing observational data (default: False).
|
|
111
|
+
buffer (int): Buffer distance to expand the region of interest (default: 0).
|
|
112
|
+
xlim (tuple or None): Longitudinal bounds of the region of interest. Use only when country is None (default: None).
|
|
113
|
+
ylim (tuple or None): Latitudinal bounds of the region of interest. Use only when country is None (default: None).
|
|
114
|
+
remote (bool): Flag to work with remote data or not (default: True).
|
|
115
|
+
variables (list[str] or None): List of variables to process. Must be a subset of {VALID_VARIABLES}. If None, all variables are processed. (default: None).
|
|
116
|
+
num_processes (int): Number of processes to use, one per variable.
|
|
117
|
+
By default equals to the number of all possible variables. (default: {len(VALID_VARIABLES)}).
|
|
118
|
+
max_threads_per_process (int): Max number of threads within each process. (default: 8).
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
dict: A dictionary containing processed climate data for each variable as an xarray object.
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
if xlim is None and ylim is not None or xlim is not None and ylim is None:
|
|
125
|
+
raise ValueError(
|
|
126
|
+
"xlim and ylim mismatch: they must be both specified or both unspecified"
|
|
127
|
+
)
|
|
128
|
+
if country is None and xlim is None:
|
|
129
|
+
raise ValueError("You must specify a country or (xlim, ylim)")
|
|
130
|
+
if country is not None and xlim is not None:
|
|
131
|
+
raise ValueError("You must specify either country or (xlim, ylim), not both")
|
|
132
|
+
verify_variables = {
|
|
133
|
+
"cordex_domain": VALID_DOMAINS,
|
|
134
|
+
"rcp": VALID_RCPS,
|
|
135
|
+
"gcm": VALID_GCM,
|
|
136
|
+
"rcm": VALID_RCM,
|
|
137
|
+
}
|
|
138
|
+
for var_name, valid_values in verify_variables.items():
|
|
139
|
+
var_value = locals()[var_name]
|
|
140
|
+
if var_value not in valid_values:
|
|
141
|
+
raise ValueError(
|
|
142
|
+
f"Invalid {var_name}={var_value}. Must be one of {valid_values}"
|
|
143
|
+
)
|
|
144
|
+
if years_up_to <= 2006:
|
|
145
|
+
raise ValueError("years_up_to must be greater than 2006")
|
|
146
|
+
if years_obs is not None and not (1980 <= min(years_obs) <= max(years_obs) <= 2020):
|
|
147
|
+
raise ValueError("Years in years_obs must be within the range 1980 to 2020")
|
|
148
|
+
if obs and years_obs is None:
|
|
149
|
+
raise ValueError("years_obs must be provided when obs is True")
|
|
150
|
+
if not obs or years_obs is None:
|
|
151
|
+
# Make sure years_obs is set to default when obs=False
|
|
152
|
+
years_obs = DEFAULT_YEARS_OBS
|
|
153
|
+
|
|
154
|
+
# Validate variables if provided
|
|
155
|
+
if variables is not None:
|
|
156
|
+
invalid_vars = [var for var in variables if var not in VALID_VARIABLES]
|
|
157
|
+
if invalid_vars:
|
|
158
|
+
raise ValueError(
|
|
159
|
+
f"Invalid variables: {invalid_vars}. Must be a subset of {VALID_VARIABLES}"
|
|
160
|
+
)
|
|
161
|
+
else:
|
|
162
|
+
variables = VALID_VARIABLES
|
|
163
|
+
|
|
164
|
+
_validate_urls(gcm, rcm, rcp, remote, cordex_domain, obs)
|
|
165
|
+
|
|
166
|
+
bbox = _geo_localize(country, xlim, ylim, buffer, cordex_domain)
|
|
167
|
+
|
|
168
|
+
with mp.Pool(processes=min(num_processes, len(variables))) as pool:
|
|
169
|
+
futures = []
|
|
170
|
+
for variable in variables:
|
|
171
|
+
futures.append(
|
|
172
|
+
pool.apply_async(
|
|
173
|
+
process_worker,
|
|
174
|
+
args=(max_threads_per_process,),
|
|
175
|
+
kwds={
|
|
176
|
+
"variable": variable,
|
|
177
|
+
"bbox": bbox,
|
|
178
|
+
"cordex_domain": cordex_domain,
|
|
179
|
+
"rcp": rcp,
|
|
180
|
+
"gcm": gcm,
|
|
181
|
+
"rcm": rcm,
|
|
182
|
+
"years_up_to": years_up_to,
|
|
183
|
+
"years_obs": years_obs,
|
|
184
|
+
"obs": obs,
|
|
185
|
+
"bias_correction": bias_correction,
|
|
186
|
+
"historical": historical,
|
|
187
|
+
"remote": remote,
|
|
188
|
+
},
|
|
189
|
+
)
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
results = {
|
|
193
|
+
variable: futures[i].get() for i, variable in enumerate(variables)
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
pool.close() # Prevent any more tasks from being submitted to the pool
|
|
197
|
+
pool.join() # Wait for all worker processes to finish
|
|
198
|
+
|
|
199
|
+
return results
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def _validate_urls(
|
|
203
|
+
gcm: str = None,
|
|
204
|
+
rcm: str = None,
|
|
205
|
+
rcp: str = None,
|
|
206
|
+
remote: bool = True,
|
|
207
|
+
cordex_domain: str = None,
|
|
208
|
+
obs: bool = False,
|
|
209
|
+
):
|
|
210
|
+
# Load the data
|
|
211
|
+
log = logger.getChild("URL-validation")
|
|
212
|
+
|
|
213
|
+
if obs is False:
|
|
214
|
+
inventory_csv_url = (
|
|
215
|
+
INVENTORY_DATA_REMOTE_URL if remote else INVENTORY_DATA_LOCAL_PATH
|
|
216
|
+
)
|
|
217
|
+
data = pd.read_csv(inventory_csv_url)
|
|
218
|
+
|
|
219
|
+
# Set the column to use based on whether the data is remote or local
|
|
220
|
+
column_to_use = "location" if remote else "hub"
|
|
221
|
+
|
|
222
|
+
# Filter the data based on the conditions
|
|
223
|
+
filtered_data = data[
|
|
224
|
+
lambda x: (
|
|
225
|
+
x["activity"].str.contains("FAO", na=False)
|
|
226
|
+
& (x["domain"] == cordex_domain)
|
|
227
|
+
& (x["model"].str.contains(gcm, na=False))
|
|
228
|
+
& (x["rcm"].str.contains(rcm, na=False))
|
|
229
|
+
& (x["experiment"].isin([rcp, "historical"]))
|
|
230
|
+
)
|
|
231
|
+
][["experiment", column_to_use]]
|
|
232
|
+
|
|
233
|
+
# Extract the column values as a list
|
|
234
|
+
num_rows = filtered_data.shape[0]
|
|
235
|
+
column_values = filtered_data[column_to_use]
|
|
236
|
+
|
|
237
|
+
if num_rows == 1:
|
|
238
|
+
# Log the output for one row
|
|
239
|
+
row1 = column_values.iloc[0]
|
|
240
|
+
log_proj = logger.getChild("URL-validation-projections")
|
|
241
|
+
log_proj.info(f"{row1}")
|
|
242
|
+
else:
|
|
243
|
+
# Log the output for two rows
|
|
244
|
+
row1 = column_values.iloc[0]
|
|
245
|
+
row2 = column_values.iloc[1]
|
|
246
|
+
log_hist = logger.getChild("URL-validation-historical")
|
|
247
|
+
log_proj = logger.getChild("URL-validation-projections")
|
|
248
|
+
log_hist.info(f"{row1}")
|
|
249
|
+
log_proj.info(f"{row2}")
|
|
250
|
+
else: # when obs is True
|
|
251
|
+
log_obs = logger.getChild("URL-validation-observations")
|
|
252
|
+
log_obs.info(f"{ERA5_DATA_REMOTE_URL}")
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def _geo_localize(
|
|
256
|
+
country: str = None,
|
|
257
|
+
xlim: tuple[float, float] = None,
|
|
258
|
+
ylim: tuple[float, float] = None,
|
|
259
|
+
buffer: int = 0,
|
|
260
|
+
cordex_domain: str = None,
|
|
261
|
+
) -> dict[str, tuple[float, float]]:
|
|
262
|
+
if country:
|
|
263
|
+
if xlim or ylim:
|
|
264
|
+
raise ValueError(
|
|
265
|
+
"Specify either a country or bounding box limits (xlim, ylim), but not both."
|
|
266
|
+
)
|
|
267
|
+
# Load country shapefile and extract bounds
|
|
268
|
+
world = gpd.read_file(gpd.datasets.get_path("naturalearth_lowres"))
|
|
269
|
+
country_shp = world[world.name == country]
|
|
270
|
+
if country_shp.empty:
|
|
271
|
+
raise ValueError(f"Country '{country}' is unknown.")
|
|
272
|
+
bounds = country_shp.total_bounds # [minx, miny, maxx, maxy]
|
|
273
|
+
xlim, ylim = (bounds[0], bounds[2]), (bounds[1], bounds[3])
|
|
274
|
+
elif not (xlim and ylim):
|
|
275
|
+
raise ValueError(
|
|
276
|
+
"Either a country or bounding box limits (xlim, ylim) must be specified."
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
# Apply buffer
|
|
280
|
+
xlim = (xlim[0] - buffer, xlim[1] + buffer)
|
|
281
|
+
ylim = (ylim[0] - buffer, ylim[1] + buffer)
|
|
282
|
+
|
|
283
|
+
# Always validate CORDEX domain
|
|
284
|
+
if cordex_domain:
|
|
285
|
+
_validate_cordex_domain(xlim, ylim, cordex_domain)
|
|
286
|
+
|
|
287
|
+
return {"xlim": xlim, "ylim": ylim}
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
def _validate_cordex_domain(xlim, ylim, cordex_domain):
|
|
291
|
+
|
|
292
|
+
# CORDEX domains data
|
|
293
|
+
cordex_domains_df = pd.DataFrame(
|
|
294
|
+
{
|
|
295
|
+
"min_lon": [
|
|
296
|
+
-33,
|
|
297
|
+
-28.3,
|
|
298
|
+
89.25,
|
|
299
|
+
86.75,
|
|
300
|
+
19.25,
|
|
301
|
+
44.0,
|
|
302
|
+
-106.25,
|
|
303
|
+
-115.0,
|
|
304
|
+
-24.25,
|
|
305
|
+
10.75,
|
|
306
|
+
],
|
|
307
|
+
"min_lat": [
|
|
308
|
+
-28,
|
|
309
|
+
-23,
|
|
310
|
+
-15.25,
|
|
311
|
+
-54.25,
|
|
312
|
+
-15.75,
|
|
313
|
+
-4.0,
|
|
314
|
+
-58.25,
|
|
315
|
+
-14.5,
|
|
316
|
+
-46.25,
|
|
317
|
+
17.75,
|
|
318
|
+
],
|
|
319
|
+
"max_lon": [
|
|
320
|
+
20,
|
|
321
|
+
18,
|
|
322
|
+
147.0,
|
|
323
|
+
-152.75,
|
|
324
|
+
116.25,
|
|
325
|
+
-172.0,
|
|
326
|
+
-16.25,
|
|
327
|
+
-30.5,
|
|
328
|
+
59.75,
|
|
329
|
+
140.25,
|
|
330
|
+
],
|
|
331
|
+
"max_lat": [28, 21.7, 26.5, 13.75, 45.75, 65.0, 18.75, 28.5, 42.75, 69.75],
|
|
332
|
+
"cordex_domain": [
|
|
333
|
+
"NAM-22",
|
|
334
|
+
"EUR-22",
|
|
335
|
+
"SEA-22",
|
|
336
|
+
"AUS-22",
|
|
337
|
+
"WAS-22",
|
|
338
|
+
"EAS-22",
|
|
339
|
+
"SAM-22",
|
|
340
|
+
"CAM-22",
|
|
341
|
+
"AFR-22",
|
|
342
|
+
"CAS-22",
|
|
343
|
+
],
|
|
344
|
+
}
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
def is_bbox_contained(bbox, domain):
|
|
348
|
+
"""Check if bbox is contained within the domain bounding box."""
|
|
349
|
+
return (
|
|
350
|
+
bbox[0] >= domain["min_lon"]
|
|
351
|
+
and bbox[1] >= domain["min_lat"]
|
|
352
|
+
and bbox[2] <= domain["max_lon"]
|
|
353
|
+
and bbox[3] <= domain["max_lat"]
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
user_bbox = [xlim[0], ylim[0], xlim[1], ylim[1]]
|
|
357
|
+
domain_row = cordex_domains_df[cordex_domains_df["cordex_domain"] == cordex_domain]
|
|
358
|
+
|
|
359
|
+
if domain_row.empty:
|
|
360
|
+
raise ValueError(f"CORDEX domain '{cordex_domain}' is not recognized.")
|
|
361
|
+
|
|
362
|
+
domain_bbox = domain_row.iloc[0]
|
|
363
|
+
|
|
364
|
+
if not is_bbox_contained(user_bbox, domain_bbox):
|
|
365
|
+
suggested_domains = cordex_domains_df[
|
|
366
|
+
cordex_domains_df.apply(
|
|
367
|
+
lambda row: is_bbox_contained(user_bbox, row), axis=1
|
|
368
|
+
)
|
|
369
|
+
]
|
|
370
|
+
|
|
371
|
+
if suggested_domains.empty:
|
|
372
|
+
raise ValueError(
|
|
373
|
+
f"The bounding box {user_bbox} is outside of all available CORDEX domains."
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
suggested_domain = suggested_domains.iloc[0]["cordex_domain"]
|
|
377
|
+
|
|
378
|
+
raise ValueError(
|
|
379
|
+
f"Bounding box {user_bbox} is not within '{cordex_domain}'. Suggested domain: '{suggested_domain}'."
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
|
|
383
|
+
def process_worker(num_threads, **kwargs) -> xr.DataArray:
|
|
384
|
+
variable = kwargs["variable"]
|
|
385
|
+
log = logger.getChild(variable)
|
|
386
|
+
try:
|
|
387
|
+
with ThreadPoolExecutor(
|
|
388
|
+
max_workers=num_threads, thread_name_prefix="climate"
|
|
389
|
+
) as executor:
|
|
390
|
+
return _climate_data_for_variable(executor, **kwargs)
|
|
391
|
+
except Exception as e:
|
|
392
|
+
log.exception(f"Process worker failed: {e}")
|
|
393
|
+
raise
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
def _climate_data_for_variable(
|
|
397
|
+
executor: ThreadPoolExecutor,
|
|
398
|
+
*,
|
|
399
|
+
variable: str,
|
|
400
|
+
bbox: dict[str, tuple[float, float]],
|
|
401
|
+
cordex_domain: str,
|
|
402
|
+
rcp: str,
|
|
403
|
+
gcm: str,
|
|
404
|
+
rcm: str,
|
|
405
|
+
years_up_to: int,
|
|
406
|
+
years_obs: range,
|
|
407
|
+
obs: bool,
|
|
408
|
+
bias_correction: bool,
|
|
409
|
+
historical: bool,
|
|
410
|
+
remote: bool,
|
|
411
|
+
) -> xr.DataArray:
|
|
412
|
+
log = logger.getChild(variable)
|
|
413
|
+
|
|
414
|
+
pd.options.mode.chained_assignment = None
|
|
415
|
+
inventory_csv_url = (
|
|
416
|
+
INVENTORY_DATA_REMOTE_URL if remote else INVENTORY_DATA_LOCAL_PATH
|
|
417
|
+
)
|
|
418
|
+
data = pd.read_csv(inventory_csv_url)
|
|
419
|
+
column_to_use = "location" if remote else "hub"
|
|
420
|
+
filtered_data = data[
|
|
421
|
+
lambda x: (x["activity"].str.contains("FAO", na=False))
|
|
422
|
+
& (x["domain"] == cordex_domain)
|
|
423
|
+
& (x["model"].str.contains(gcm, na=False))
|
|
424
|
+
& (x["rcm"].str.contains(rcm, na=False))
|
|
425
|
+
& (x["experiment"].isin([rcp, "historical"]))
|
|
426
|
+
][["experiment", column_to_use]]
|
|
427
|
+
|
|
428
|
+
future_obs = None
|
|
429
|
+
if obs or bias_correction:
|
|
430
|
+
future_obs = executor.submit(
|
|
431
|
+
_thread_download_data,
|
|
432
|
+
url=None,
|
|
433
|
+
bbox=bbox,
|
|
434
|
+
variable=variable,
|
|
435
|
+
obs=True,
|
|
436
|
+
years_up_to=years_up_to,
|
|
437
|
+
years_obs=years_obs,
|
|
438
|
+
remote=remote,
|
|
439
|
+
)
|
|
440
|
+
|
|
441
|
+
if not obs:
|
|
442
|
+
download_fn = partial(
|
|
443
|
+
_thread_download_data,
|
|
444
|
+
bbox=bbox,
|
|
445
|
+
variable=variable,
|
|
446
|
+
obs=False,
|
|
447
|
+
years_obs=years_obs,
|
|
448
|
+
years_up_to=years_up_to,
|
|
449
|
+
remote=remote,
|
|
450
|
+
)
|
|
451
|
+
downloaded_models = list(
|
|
452
|
+
executor.map(download_fn, filtered_data[column_to_use])
|
|
453
|
+
)
|
|
454
|
+
|
|
455
|
+
# Add the downloaded models to the DataFrame
|
|
456
|
+
filtered_data["models"] = downloaded_models
|
|
457
|
+
hist = (
|
|
458
|
+
filtered_data["models"].iloc[0].interpolate_na(dim="time", method="linear")
|
|
459
|
+
)
|
|
460
|
+
proj = (
|
|
461
|
+
filtered_data["models"].iloc[1].interpolate_na(dim="time", method="linear")
|
|
462
|
+
)
|
|
463
|
+
if bias_correction and historical:
|
|
464
|
+
# Load observations for bias correction
|
|
465
|
+
ref = future_obs.result()
|
|
466
|
+
log.info("Training eqm with historical data")
|
|
467
|
+
QM_mo = sdba.EmpiricalQuantileMapping.train(
|
|
468
|
+
ref,
|
|
469
|
+
hist,
|
|
470
|
+
group="time.month",
|
|
471
|
+
kind="*" if variable in ["pr", "rsds", "sfcWind"] else "+",
|
|
472
|
+
)
|
|
473
|
+
log.info("Performing bias correction with eqm")
|
|
474
|
+
hist_bs = QM_mo.adjust(hist, extrapolation="constant", interp="linear")
|
|
475
|
+
proj_bs = QM_mo.adjust(proj, extrapolation="constant", interp="linear")
|
|
476
|
+
log.info("Done!")
|
|
477
|
+
if variable == "hurs":
|
|
478
|
+
hist_bs = hist_bs.where(hist_bs <= 100, 100)
|
|
479
|
+
hist_bs = hist_bs.where(hist_bs >= 0, 0)
|
|
480
|
+
combined = xr.concat([hist_bs, proj_bs], dim="time")
|
|
481
|
+
return combined
|
|
482
|
+
|
|
483
|
+
elif not bias_correction and historical:
|
|
484
|
+
combined = xr.concat([hist, proj], dim="time")
|
|
485
|
+
return combined
|
|
486
|
+
|
|
487
|
+
elif bias_correction and not historical:
|
|
488
|
+
ref = future_obs.result()
|
|
489
|
+
log.info("Training eqm with historical data")
|
|
490
|
+
QM_mo = sdba.EmpiricalQuantileMapping.train(
|
|
491
|
+
ref,
|
|
492
|
+
hist,
|
|
493
|
+
group="time.month",
|
|
494
|
+
kind="*" if variable in ["pr", "rsds", "sfcWind"] else "+",
|
|
495
|
+
) # multiplicative approach for pr, rsds and wind speed
|
|
496
|
+
log.info("Performing bias correction with eqm")
|
|
497
|
+
proj_bs = QM_mo.adjust(proj, extrapolation="constant", interp="linear")
|
|
498
|
+
log.info("Done!")
|
|
499
|
+
if variable == "hurs":
|
|
500
|
+
proj_bs = proj_bs.where(proj_bs <= 100, 100)
|
|
501
|
+
proj_bs = proj_bs.where(proj_bs >= 0, 0)
|
|
502
|
+
return proj_bs
|
|
503
|
+
|
|
504
|
+
return proj
|
|
505
|
+
|
|
506
|
+
else: # when observations are True
|
|
507
|
+
downloaded_obs = future_obs.result()
|
|
508
|
+
log.info("Done!")
|
|
509
|
+
return downloaded_obs
|
|
510
|
+
|
|
511
|
+
|
|
512
|
+
def _thread_download_data(url: str | None, **kwargs):
|
|
513
|
+
variable = kwargs["variable"]
|
|
514
|
+
temporal = "observations" if kwargs["obs"] else ("historical" if "historical" in str(url) else "projections")
|
|
515
|
+
log = logger.getChild(f"{variable}-{temporal}")
|
|
516
|
+
try:
|
|
517
|
+
return _download_data(url=url, **kwargs)
|
|
518
|
+
except Exception as e:
|
|
519
|
+
log.exception(f"Failed to process data from {url}: {e}")
|
|
520
|
+
raise
|
|
521
|
+
|
|
522
|
+
|
|
523
|
+
def _download_data(
|
|
524
|
+
url: str | None,
|
|
525
|
+
bbox: dict[str, tuple[float, float]],
|
|
526
|
+
variable: str,
|
|
527
|
+
obs: bool,
|
|
528
|
+
years_obs: range,
|
|
529
|
+
years_up_to: int,
|
|
530
|
+
remote: bool,
|
|
531
|
+
) -> xr.DataArray:
|
|
532
|
+
temporal = "observations" if obs else ("historical" if url and "historical" in url else "projections")
|
|
533
|
+
log = logger.getChild(f"{variable}-{temporal}")
|
|
534
|
+
|
|
535
|
+
if obs:
|
|
536
|
+
var = VARIABLES_MAP[variable]
|
|
537
|
+
log.info(f"Establishing connection to ERA5 data for {variable}({var})")
|
|
538
|
+
if remote:
|
|
539
|
+
ds_var = xr.open_dataset(ERA5_DATA_REMOTE_URL)[var]
|
|
540
|
+
else:
|
|
541
|
+
ds_var = xr.open_dataset(ERA5_DATA_LOCAL_PATH)[var]
|
|
542
|
+
log.info(f"Connection to ERA5 data for {variable}({var}) has been established")
|
|
543
|
+
|
|
544
|
+
# Coordinate normalization and renaming for 'hurs'
|
|
545
|
+
if var == "hurs":
|
|
546
|
+
ds_var = ds_var.rename({"lat": "latitude", "lon": "longitude"})
|
|
547
|
+
ds_cropped = ds_var.sel(
|
|
548
|
+
longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
|
|
549
|
+
latitude=slice(bbox["ylim"][0], bbox["ylim"][1]),
|
|
550
|
+
)
|
|
551
|
+
else:
|
|
552
|
+
ds_var.coords["longitude"] = (ds_var.coords["longitude"] + 180) % 360 - 180
|
|
553
|
+
ds_var = ds_var.sortby(ds_var.longitude)
|
|
554
|
+
ds_cropped = ds_var.sel(
|
|
555
|
+
longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
|
|
556
|
+
latitude=slice(bbox["ylim"][1], bbox["ylim"][0]),
|
|
557
|
+
)
|
|
558
|
+
|
|
559
|
+
# Unit conversion
|
|
560
|
+
if var in ["t2mx", "t2mn", "t2m"]:
|
|
561
|
+
ds_cropped -= 273.15 # Convert from Kelvin to Celsius
|
|
562
|
+
ds_cropped.attrs["units"] = "°C"
|
|
563
|
+
elif var == "tp":
|
|
564
|
+
ds_cropped *= 1000 # Convert precipitation
|
|
565
|
+
ds_cropped.attrs["units"] = "mm"
|
|
566
|
+
elif var == "ssrd":
|
|
567
|
+
ds_cropped /= 86400 # Convert from J/m^2 to W/m^2
|
|
568
|
+
ds_cropped.attrs["units"] = "W m-2"
|
|
569
|
+
elif var == "sfcwind":
|
|
570
|
+
ds_cropped = ds_cropped * (
|
|
571
|
+
4.87 / np.log((67.8 * 10) - 5.42)
|
|
572
|
+
) # Convert wind speed from 10 m to 2 m
|
|
573
|
+
ds_cropped.attrs["units"] = "m s-1"
|
|
574
|
+
|
|
575
|
+
# Select years
|
|
576
|
+
years = [x for x in years_obs]
|
|
577
|
+
time_mask = (ds_cropped["time"].dt.year >= years[0]) & (
|
|
578
|
+
ds_cropped["time"].dt.year <= years[-1]
|
|
579
|
+
)
|
|
580
|
+
|
|
581
|
+
else:
|
|
582
|
+
log.info(f"Establishing connection to CORDEX data for {variable}")
|
|
583
|
+
ds_var = xr.open_dataset(url)[variable]
|
|
584
|
+
log.info(f"Connection to CORDEX data for {variable} has been established")
|
|
585
|
+
ds_cropped = ds_var.sel(
|
|
586
|
+
longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
|
|
587
|
+
latitude=slice(bbox["ylim"][1], bbox["ylim"][0]),
|
|
588
|
+
)
|
|
589
|
+
|
|
590
|
+
# Unit conversion
|
|
591
|
+
if variable in ["tas", "tasmax", "tasmin"]:
|
|
592
|
+
ds_cropped -= 273.15 # Convert from Kelvin to Celsius
|
|
593
|
+
ds_cropped.attrs["units"] = "°C"
|
|
594
|
+
elif variable == "pr":
|
|
595
|
+
ds_cropped *= 86400 # Convert from kg m^-2 s^-1 to mm/day
|
|
596
|
+
ds_cropped.attrs["units"] = "mm"
|
|
597
|
+
elif variable == "rsds":
|
|
598
|
+
ds_cropped.attrs["units"] = "W m-2"
|
|
599
|
+
elif variable == "sfcWind":
|
|
600
|
+
ds_cropped = ds_cropped * (
|
|
601
|
+
4.87 / np.log((67.8 * 10) - 5.42)
|
|
602
|
+
) # Convert wind speed from 10 m to 2 m
|
|
603
|
+
ds_cropped.attrs["units"] = "m s-1"
|
|
604
|
+
|
|
605
|
+
# Select years based on rcp
|
|
606
|
+
if "rcp" in url:
|
|
607
|
+
years = [x for x in range(2006, years_up_to + 1)]
|
|
608
|
+
else:
|
|
609
|
+
years = [x for x in DEFAULT_YEARS_OBS]
|
|
610
|
+
|
|
611
|
+
# Add missing dates
|
|
612
|
+
ds_cropped = ds_cropped.convert_calendar(
|
|
613
|
+
calendar="gregorian", missing=np.nan, align_on="date"
|
|
614
|
+
)
|
|
615
|
+
|
|
616
|
+
time_mask = (ds_cropped["time"].dt.year >= years[0]) & (
|
|
617
|
+
ds_cropped["time"].dt.year <= years[-1]
|
|
618
|
+
)
|
|
619
|
+
|
|
620
|
+
# subset years
|
|
621
|
+
ds_cropped = ds_cropped.sel(time=time_mask)
|
|
622
|
+
|
|
623
|
+
assert isinstance(ds_cropped, xr.DataArray)
|
|
624
|
+
|
|
625
|
+
if obs:
|
|
626
|
+
log.info(
|
|
627
|
+
f"ERA5 data for {variable} has been processed: unit conversion ({ds_cropped.attrs.get('units', 'unknown units')}), time selection ({years[0]}-{years[-1]})"
|
|
628
|
+
)
|
|
629
|
+
else:
|
|
630
|
+
log.info(
|
|
631
|
+
f"CORDEX data for {variable} has been processed: unit conversion ({ds_cropped.attrs.get('units', 'unknown units')}), calendar transformation (360-day to Gregorian), time selection ({years[0]}-{years[-1]})"
|
|
632
|
+
)
|
|
633
|
+
|
|
634
|
+
return ds_cropped
|
|
635
|
+
|
|
636
|
+
|
|
637
|
+
if __name__ == "__main__":
|
|
638
|
+
data = get_climate_data(
|
|
639
|
+
country="Zambia",
|
|
640
|
+
cordex_domain="AFR-22",
|
|
641
|
+
rcp="rcp26",
|
|
642
|
+
gcm="MPI",
|
|
643
|
+
rcm="REMO",
|
|
644
|
+
years_up_to=2030,
|
|
645
|
+
obs=False,
|
|
646
|
+
bias_correction=True,
|
|
647
|
+
historical=False,
|
|
648
|
+
)
|
|
649
|
+
print(data)
|