cavapy 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cavapy might be problematic. Click here for more details.

cavapy.py CHANGED
@@ -1,655 +1,689 @@
1
- import os
2
- import multiprocessing as mp
3
- from concurrent.futures import ThreadPoolExecutor
4
- from functools import partial
5
- import logging
6
- import warnings
7
-
8
- warnings.filterwarnings(
9
- "ignore",
10
- category=FutureWarning,
11
- message=".*geopandas.dataset module is deprecated.*",
12
- )
13
- import geopandas as gpd # noqa: E402
14
- import pandas as pd # noqa: E402
15
- import xarray as xr # noqa: E402
16
- import numpy as np # noqa: E402
17
- from xclim import sdba # noqa: E402
18
-
19
-
20
- logger = logging.getLogger("climate")
21
- logger.handlers = [] # Remove any existing handlers
22
- handler = logging.StreamHandler()
23
- formatter = logging.Formatter(
24
- "%(asctime)s | %(name)s | %(process)d:%(thread)d [%(levelname)s]: %(message)s"
25
- )
26
- handler.setFormatter(formatter)
27
- for hdlr in logger.handlers[:]: # remove all old handlers
28
- logger.removeHandler(hdlr)
29
- logger.addHandler(handler)
30
- logger.setLevel(logging.DEBUG)
31
-
32
- VARIABLES_MAP = {
33
- "pr": "tp",
34
- "tasmax": "t2mx",
35
- "tasmin": "t2mn",
36
- "hurs": "hurs",
37
- "sfcWind": "sfcwind",
38
- "rsds": "ssrd",
39
- }
40
- VALID_VARIABLES = list(VARIABLES_MAP)
41
- # TODO: Throw an error if the selected country is not in the selected domain
42
- VALID_DOMAINS = [
43
- "NAM-22",
44
- "EUR-22",
45
- "AFR-22",
46
- "EAS-22",
47
- "SEA-22",
48
- "WAS-22",
49
- "AUS-22",
50
- "SAM-22",
51
- "CAM-22",
52
- ]
53
- VALID_RCPS = ["rcp26", "rcp85"]
54
- VALID_GCM = ["MOHC", "MPI", "NCC"]
55
- VALID_RCM = ["REMO", "Reg"]
56
-
57
- INVENTORY_DATA_REMOTE_URL = (
58
- "https://hub.ipcc.ifca.es/thredds/fileServer/inventories/cava.csv"
59
- )
60
- INVENTORY_DATA_LOCAL_PATH = os.path.join(
61
- os.path.expanduser("~"), "shared/inventories/cava/inventory.csv"
62
- )
63
- ERA5_DATA_REMOTE_URL = (
64
- "https://hub.ipcc.ifca.es/thredds/dodsC/fao/observations/ERA5/0.25/ERA5_025.ncml"
65
- )
66
- ERA5_DATA_LOCAL_PATH = os.path.join(
67
- os.path.expanduser("~"), "shared/data/observations/ERA5/0.25/ERA5_025.ncml"
68
- )
69
- DEFAULT_YEARS_OBS = range(1980, 2006)
70
-
71
-
72
- def get_climate_data(
73
- *,
74
- country: str | None,
75
- cordex_domain: str,
76
- rcp: str,
77
- gcm: str,
78
- rcm: str,
79
- years_up_to: int,
80
- years_obs: range | None = None,
81
- bias_correction: bool = False,
82
- historical: bool = False,
83
- obs: bool = False,
84
- buffer: int = 0,
85
- xlim: tuple[float, float] | None = None,
86
- ylim: tuple[float, float] | None = None,
87
- remote: bool = True,
88
- variables: list[str] | None = None,
89
- num_processes: int = len(VALID_VARIABLES),
90
- max_threads_per_process: int = 8,
91
- ) -> dict[str, xr.DataArray]:
92
- f"""
93
- Process climate data required by pyAEZ climate module.
94
- The function automatically access CORDEX-CORE models at 0.25° and the ERA5 datasets.
95
-
96
- Args:
97
- country (str): Name of the country for which data is to be processed.
98
- Use None if specifying a region using xlim and ylim.
99
- cordex_domain (str): CORDEX domain of the climate data. One of {VALID_DOMAINS}.
100
- rcp (str): Representative Concentration Pathway. One of {VALID_RCPS}.
101
- gcm (str): GCM name. One of {VALID_GCM}.
102
- rcm (str): RCM name. One of {VALID_RCM}.
103
- years_up_to (int): The ending year for the projected data. Projections start in 2006 and ends in 2100.
104
- Hence, if years_up_to is set to 2030, data will be downloaded for the 2006-2030 period.
105
- years_obs (range): Range of years for observational data (ERA5 only). Only used when obs is True. (default: None).
106
- bias_correction (bool): Whether to apply bias correction (default: False).
107
- historical (bool): Flag to indicate if processing historical data (default: False).
108
- If True, historical data is provided together with projections.
109
- Historical simulation runs for CORDEX-CORE initiative are provided for the 1980-2005 time period.
110
- obs (bool): Flag to indicate if processing observational data (default: False).
111
- buffer (int): Buffer distance to expand the region of interest (default: 0).
112
- xlim (tuple or None): Longitudinal bounds of the region of interest. Use only when country is None (default: None).
113
- ylim (tuple or None): Latitudinal bounds of the region of interest. Use only when country is None (default: None).
114
- remote (bool): Flag to work with remote data or not (default: True).
115
- variables (list[str] or None): List of variables to process. Must be a subset of {VALID_VARIABLES}. If None, all variables are processed. (default: None).
116
- num_processes (int): Number of processes to use, one per variable.
117
- By default equals to the number of all possible variables. (default: {len(VALID_VARIABLES)}).
118
- max_threads_per_process (int): Max number of threads within each process. (default: 8).
119
-
120
- Returns:
121
- dict: A dictionary containing processed climate data for each variable as an xarray object.
122
- """
123
-
124
- if xlim is None and ylim is not None or xlim is not None and ylim is None:
125
- raise ValueError(
126
- "xlim and ylim mismatch: they must be both specified or both unspecified"
127
- )
128
- if country is None and xlim is None:
129
- raise ValueError("You must specify a country or (xlim, ylim)")
130
- if country is not None and xlim is not None:
131
- raise ValueError("You must specify either country or (xlim, ylim), not both")
132
- verify_variables = {
133
- "cordex_domain": VALID_DOMAINS,
134
- "rcp": VALID_RCPS,
135
- "gcm": VALID_GCM,
136
- "rcm": VALID_RCM,
137
- }
138
- for var_name, valid_values in verify_variables.items():
139
- var_value = locals()[var_name]
140
- if var_value not in valid_values:
141
- raise ValueError(
142
- f"Invalid {var_name}={var_value}. Must be one of {valid_values}"
143
- )
144
- if years_up_to <= 2006:
145
- raise ValueError("years_up_to must be greater than 2006")
146
- if years_obs is not None and not (1980 <= min(years_obs) <= max(years_obs) <= 2020):
147
- raise ValueError("Years in years_obs must be within the range 1980 to 2020")
148
- if obs and years_obs is None:
149
- raise ValueError("years_obs must be provided when obs is True")
150
- if not obs or years_obs is None:
151
- # Make sure years_obs is set to default when obs=False
152
- years_obs = DEFAULT_YEARS_OBS
153
-
154
- # Validate variables if provided
155
- if variables is not None:
156
- invalid_vars = [var for var in variables if var not in VALID_VARIABLES]
157
- if invalid_vars:
158
- raise ValueError(
159
- f"Invalid variables: {invalid_vars}. Must be a subset of {VALID_VARIABLES}"
160
- )
161
- else:
162
- variables = VALID_VARIABLES
163
-
164
- _validate_urls(gcm, rcm, rcp, remote, cordex_domain, obs, historical, bias_correction)
165
-
166
- bbox = _geo_localize(country, xlim, ylim, buffer, cordex_domain)
167
-
168
- with mp.Pool(processes=min(num_processes, len(variables))) as pool:
169
- futures = []
170
- for variable in variables:
171
- futures.append(
172
- pool.apply_async(
173
- process_worker,
174
- args=(max_threads_per_process,),
175
- kwds={
176
- "variable": variable,
177
- "bbox": bbox,
178
- "cordex_domain": cordex_domain,
179
- "rcp": rcp,
180
- "gcm": gcm,
181
- "rcm": rcm,
182
- "years_up_to": years_up_to,
183
- "years_obs": years_obs,
184
- "obs": obs,
185
- "bias_correction": bias_correction,
186
- "historical": historical,
187
- "remote": remote,
188
- },
189
- )
190
- )
191
-
192
- results = {
193
- variable: futures[i].get() for i, variable in enumerate(variables)
194
- }
195
-
196
- pool.close() # Prevent any more tasks from being submitted to the pool
197
- pool.join() # Wait for all worker processes to finish
198
-
199
- return results
200
-
201
-
202
- def _validate_urls(
203
- gcm: str = None,
204
- rcm: str = None,
205
- rcp: str = None,
206
- remote: bool = True,
207
- cordex_domain: str = None,
208
- obs: bool = False,
209
- historical: bool = False,
210
- bias_correction: bool = False,
211
- ):
212
- # Load the data
213
- log = logger.getChild("URL-validation")
214
-
215
- if obs is False:
216
- inventory_csv_url = (
217
- INVENTORY_DATA_REMOTE_URL if remote else INVENTORY_DATA_LOCAL_PATH
218
- )
219
- data = pd.read_csv(inventory_csv_url)
220
-
221
- # Set the column to use based on whether the data is remote or local
222
- column_to_use = "location" if remote else "hub"
223
-
224
- # Define which experiments we need
225
- experiments = [rcp]
226
- if historical or bias_correction:
227
- experiments.append("historical")
228
-
229
- # Filter the data based on the conditions
230
- filtered_data = data[
231
- lambda x: (
232
- x["activity"].str.contains("FAO", na=False)
233
- & (x["domain"] == cordex_domain)
234
- & (x["model"].str.contains(gcm, na=False))
235
- & (x["rcm"].str.contains(rcm, na=False))
236
- & (x["experiment"].isin(experiments))
237
- )
238
- ][["experiment", column_to_use]]
239
-
240
- # Extract the column values as a list
241
- for _, row in filtered_data.iterrows():
242
- if row["experiment"] == "historical":
243
- log_hist = logger.getChild("URL-validation-historical")
244
- log_hist.info(f"{row[column_to_use]}")
245
- else:
246
- log_proj = logger.getChild("URL-validation-projections")
247
- log_proj.info(f"{row[column_to_use]}")
248
-
249
- else: # when obs is True
250
- log_obs = logger.getChild("URL-validation-observations")
251
- log_obs.info(f"{ERA5_DATA_REMOTE_URL}")
252
-
253
-
254
- def _geo_localize(
255
- country: str = None,
256
- xlim: tuple[float, float] = None,
257
- ylim: tuple[float, float] = None,
258
- buffer: int = 0,
259
- cordex_domain: str = None,
260
- ) -> dict[str, tuple[float, float]]:
261
- if country:
262
- if xlim or ylim:
263
- raise ValueError(
264
- "Specify either a country or bounding box limits (xlim, ylim), but not both."
265
- )
266
- # Load country shapefile and extract bounds
267
- world = gpd.read_file(gpd.datasets.get_path("naturalearth_lowres"))
268
- country_shp = world[world.name == country]
269
- if country_shp.empty:
270
- raise ValueError(f"Country '{country}' is unknown.")
271
- bounds = country_shp.total_bounds # [minx, miny, maxx, maxy]
272
- xlim, ylim = (bounds[0], bounds[2]), (bounds[1], bounds[3])
273
- elif not (xlim and ylim):
274
- raise ValueError(
275
- "Either a country or bounding box limits (xlim, ylim) must be specified."
276
- )
277
-
278
- # Apply buffer
279
- xlim = (xlim[0] - buffer, xlim[1] + buffer)
280
- ylim = (ylim[0] - buffer, ylim[1] + buffer)
281
-
282
- # Always validate CORDEX domain
283
- if cordex_domain:
284
- _validate_cordex_domain(xlim, ylim, cordex_domain)
285
-
286
- return {"xlim": xlim, "ylim": ylim}
287
-
288
-
289
- def _validate_cordex_domain(xlim, ylim, cordex_domain):
290
-
291
- # CORDEX domains data
292
- cordex_domains_df = pd.DataFrame(
293
- {
294
- "min_lon": [
295
- -33,
296
- -28.3,
297
- 89.25,
298
- 86.75,
299
- 19.25,
300
- 44.0,
301
- -106.25,
302
- -115.0,
303
- -24.25,
304
- 10.75,
305
- ],
306
- "min_lat": [
307
- -28,
308
- -23,
309
- -15.25,
310
- -54.25,
311
- -15.75,
312
- -4.0,
313
- -58.25,
314
- -14.5,
315
- -46.25,
316
- 17.75,
317
- ],
318
- "max_lon": [
319
- 20,
320
- 18,
321
- 147.0,
322
- -152.75,
323
- 116.25,
324
- -172.0,
325
- -16.25,
326
- -30.5,
327
- 59.75,
328
- 140.25,
329
- ],
330
- "max_lat": [28, 21.7, 26.5, 13.75, 45.75, 65.0, 18.75, 28.5, 42.75, 69.75],
331
- "cordex_domain": [
332
- "NAM-22",
333
- "EUR-22",
334
- "SEA-22",
335
- "AUS-22",
336
- "WAS-22",
337
- "EAS-22",
338
- "SAM-22",
339
- "CAM-22",
340
- "AFR-22",
341
- "CAS-22",
342
- ],
343
- }
344
- )
345
-
346
- def is_bbox_contained(bbox, domain):
347
- """Check if bbox is contained within the domain bounding box."""
348
- return (
349
- bbox[0] >= domain["min_lon"]
350
- and bbox[1] >= domain["min_lat"]
351
- and bbox[2] <= domain["max_lon"]
352
- and bbox[3] <= domain["max_lat"]
353
- )
354
-
355
- user_bbox = [xlim[0], ylim[0], xlim[1], ylim[1]]
356
- domain_row = cordex_domains_df[cordex_domains_df["cordex_domain"] == cordex_domain]
357
-
358
- if domain_row.empty:
359
- raise ValueError(f"CORDEX domain '{cordex_domain}' is not recognized.")
360
-
361
- domain_bbox = domain_row.iloc[0]
362
-
363
- if not is_bbox_contained(user_bbox, domain_bbox):
364
- suggested_domains = cordex_domains_df[
365
- cordex_domains_df.apply(
366
- lambda row: is_bbox_contained(user_bbox, row), axis=1
367
- )
368
- ]
369
-
370
- if suggested_domains.empty:
371
- raise ValueError(
372
- f"The bounding box {user_bbox} is outside of all available CORDEX domains."
373
- )
374
-
375
- suggested_domain = suggested_domains.iloc[0]["cordex_domain"]
376
-
377
- raise ValueError(
378
- f"Bounding box {user_bbox} is not within '{cordex_domain}'. Suggested domain: '{suggested_domain}'."
379
- )
380
-
381
-
382
- def process_worker(num_threads, **kwargs) -> xr.DataArray:
383
- variable = kwargs["variable"]
384
- log = logger.getChild(variable)
385
- try:
386
- with ThreadPoolExecutor(
387
- max_workers=num_threads, thread_name_prefix="climate"
388
- ) as executor:
389
- return _climate_data_for_variable(executor, **kwargs)
390
- except Exception as e:
391
- log.exception(f"Process worker failed: {e}")
392
- raise
393
-
394
-
395
- def _climate_data_for_variable(
396
- executor: ThreadPoolExecutor,
397
- *,
398
- variable: str,
399
- bbox: dict[str, tuple[float, float]],
400
- cordex_domain: str,
401
- rcp: str,
402
- gcm: str,
403
- rcm: str,
404
- years_up_to: int,
405
- years_obs: range,
406
- obs: bool,
407
- bias_correction: bool,
408
- historical: bool,
409
- remote: bool,
410
- ) -> xr.DataArray:
411
- log = logger.getChild(variable)
412
-
413
- pd.options.mode.chained_assignment = None
414
- inventory_csv_url = (
415
- INVENTORY_DATA_REMOTE_URL if remote else INVENTORY_DATA_LOCAL_PATH
416
- )
417
- data = pd.read_csv(inventory_csv_url)
418
- column_to_use = "location" if remote else "hub"
419
-
420
- # Filter data based on whether we need historical data
421
- experiments = [rcp]
422
- if historical or bias_correction:
423
- experiments.append("historical")
424
-
425
- filtered_data = data[
426
- lambda x: (x["activity"].str.contains("FAO", na=False))
427
- & (x["domain"] == cordex_domain)
428
- & (x["model"].str.contains(gcm, na=False))
429
- & (x["rcm"].str.contains(rcm, na=False))
430
- & (x["experiment"].isin(experiments))
431
- ][["experiment", column_to_use]]
432
-
433
- future_obs = None
434
- if obs or bias_correction:
435
- future_obs = executor.submit(
436
- _thread_download_data,
437
- url=None,
438
- bbox=bbox,
439
- variable=variable,
440
- obs=True,
441
- years_up_to=years_up_to,
442
- years_obs=years_obs,
443
- remote=remote,
444
- )
445
-
446
- if not obs:
447
- download_fn = partial(
448
- _thread_download_data,
449
- bbox=bbox,
450
- variable=variable,
451
- obs=False,
452
- years_obs=years_obs,
453
- years_up_to=years_up_to,
454
- remote=remote,
455
- )
456
- downloaded_models = list(
457
- executor.map(download_fn, filtered_data[column_to_use])
458
- )
459
-
460
- # Add the downloaded models to the DataFrame
461
- filtered_data["models"] = downloaded_models
462
-
463
- if historical or bias_correction:
464
- hist = filtered_data[filtered_data["experiment"] == "historical"]["models"].iloc[0].interpolate_na(dim="time", method="linear")
465
- proj = filtered_data[filtered_data["experiment"] == rcp]["models"].iloc[0].interpolate_na(dim="time", method="linear")
466
- else:
467
- proj = filtered_data["models"].iloc[0].interpolate_na(dim="time", method="linear")
468
-
469
- if bias_correction and historical:
470
- # Load observations for bias correction
471
- ref = future_obs.result()
472
- log.info("Training eqm with historical data")
473
- QM_mo = sdba.EmpiricalQuantileMapping.train(
474
- ref,
475
- hist,
476
- group="time.month",
477
- kind="*" if variable in ["pr", "rsds", "sfcWind"] else "+",
478
- )
479
- log.info("Performing bias correction with eqm")
480
- hist_bs = QM_mo.adjust(hist, extrapolation="constant", interp="linear")
481
- proj_bs = QM_mo.adjust(proj, extrapolation="constant", interp="linear")
482
- log.info("Done!")
483
- if variable == "hurs":
484
- hist_bs = hist_bs.where(hist_bs <= 100, 100)
485
- hist_bs = hist_bs.where(hist_bs >= 0, 0)
486
- combined = xr.concat([hist_bs, proj_bs], dim="time")
487
- return combined
488
-
489
- elif not bias_correction and historical:
490
- combined = xr.concat([hist, proj], dim="time")
491
- return combined
492
-
493
- elif bias_correction and not historical:
494
- ref = future_obs.result()
495
- log.info("Training eqm with historical data")
496
- QM_mo = sdba.EmpiricalQuantileMapping.train(
497
- ref,
498
- hist,
499
- group="time.month",
500
- kind="*" if variable in ["pr", "rsds", "sfcWind"] else "+",
501
- ) # multiplicative approach for pr, rsds and wind speed
502
- log.info("Performing bias correction with eqm")
503
- proj_bs = QM_mo.adjust(proj, extrapolation="constant", interp="linear")
504
- log.info("Done!")
505
- if variable == "hurs":
506
- proj_bs = proj_bs.where(proj_bs <= 100, 100)
507
- proj_bs = proj_bs.where(proj_bs >= 0, 0)
508
- return proj_bs
509
-
510
- return proj
511
-
512
- else: # when observations are True
513
- downloaded_obs = future_obs.result()
514
- log.info("Done!")
515
- return downloaded_obs
516
-
517
-
518
- def _thread_download_data(url: str | None, **kwargs):
519
- variable = kwargs["variable"]
520
- temporal = "observations" if kwargs["obs"] else ("historical" if "historical" in str(url) else "projections")
521
- log = logger.getChild(f"{variable}-{temporal}")
522
- try:
523
- return _download_data(url=url, **kwargs)
524
- except Exception as e:
525
- log.exception(f"Failed to process data from {url}: {e}")
526
- raise
527
-
528
-
529
- def _download_data(
530
- url: str | None,
531
- bbox: dict[str, tuple[float, float]],
532
- variable: str,
533
- obs: bool,
534
- years_obs: range,
535
- years_up_to: int,
536
- remote: bool,
537
- ) -> xr.DataArray:
538
- temporal = "observations" if obs else ("historical" if url and "historical" in url else "projections")
539
- log = logger.getChild(f"{variable}-{temporal}")
540
-
541
- if obs:
542
- var = VARIABLES_MAP[variable]
543
- log.info(f"Establishing connection to ERA5 data for {variable}({var})")
544
- if remote:
545
- ds_var = xr.open_dataset(ERA5_DATA_REMOTE_URL)[var]
546
- else:
547
- ds_var = xr.open_dataset(ERA5_DATA_LOCAL_PATH)[var]
548
- log.info(f"Connection to ERA5 data for {variable}({var}) has been established")
549
-
550
- # Coordinate normalization and renaming for 'hurs'
551
- if var == "hurs":
552
- ds_var = ds_var.rename({"lat": "latitude", "lon": "longitude"})
553
- ds_cropped = ds_var.sel(
554
- longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
555
- latitude=slice(bbox["ylim"][0], bbox["ylim"][1]),
556
- )
557
- else:
558
- ds_var.coords["longitude"] = (ds_var.coords["longitude"] + 180) % 360 - 180
559
- ds_var = ds_var.sortby(ds_var.longitude)
560
- ds_cropped = ds_var.sel(
561
- longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
562
- latitude=slice(bbox["ylim"][1], bbox["ylim"][0]),
563
- )
564
-
565
- # Unit conversion
566
- if var in ["t2mx", "t2mn", "t2m"]:
567
- ds_cropped -= 273.15 # Convert from Kelvin to Celsius
568
- ds_cropped.attrs["units"] = "°C"
569
- elif var == "tp":
570
- ds_cropped *= 1000 # Convert precipitation
571
- ds_cropped.attrs["units"] = "mm"
572
- elif var == "ssrd":
573
- ds_cropped /= 86400 # Convert from J/m^2 to W/m^2
574
- ds_cropped.attrs["units"] = "W m-2"
575
- elif var == "sfcwind":
576
- ds_cropped = ds_cropped * (
577
- 4.87 / np.log((67.8 * 10) - 5.42)
578
- ) # Convert wind speed from 10 m to 2 m
579
- ds_cropped.attrs["units"] = "m s-1"
580
-
581
- # Select years
582
- years = [x for x in years_obs]
583
- time_mask = (ds_cropped["time"].dt.year >= years[0]) & (
584
- ds_cropped["time"].dt.year <= years[-1]
585
- )
586
-
587
- else:
588
- log.info(f"Establishing connection to CORDEX data for {variable}")
589
- ds_var = xr.open_dataset(url)[variable]
590
- log.info(f"Connection to CORDEX data for {variable} has been established")
591
- ds_cropped = ds_var.sel(
592
- longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
593
- latitude=slice(bbox["ylim"][1], bbox["ylim"][0]),
594
- )
595
-
596
- # Unit conversion
597
- if variable in ["tas", "tasmax", "tasmin"]:
598
- ds_cropped -= 273.15 # Convert from Kelvin to Celsius
599
- ds_cropped.attrs["units"] = "°C"
600
- elif variable == "pr":
601
- ds_cropped *= 86400 # Convert from kg m^-2 s^-1 to mm/day
602
- ds_cropped.attrs["units"] = "mm"
603
- elif variable == "rsds":
604
- ds_cropped.attrs["units"] = "W m-2"
605
- elif variable == "sfcWind":
606
- ds_cropped = ds_cropped * (
607
- 4.87 / np.log((67.8 * 10) - 5.42)
608
- ) # Convert wind speed from 10 m to 2 m
609
- ds_cropped.attrs["units"] = "m s-1"
610
-
611
- # Select years based on rcp
612
- if "rcp" in url:
613
- years = [x for x in range(2006, years_up_to + 1)]
614
- else:
615
- years = [x for x in DEFAULT_YEARS_OBS]
616
-
617
- # Add missing dates
618
- ds_cropped = ds_cropped.convert_calendar(
619
- calendar="gregorian", missing=np.nan, align_on="date"
620
- )
621
-
622
- time_mask = (ds_cropped["time"].dt.year >= years[0]) & (
623
- ds_cropped["time"].dt.year <= years[-1]
624
- )
625
-
626
- # subset years
627
- ds_cropped = ds_cropped.sel(time=time_mask)
628
-
629
- assert isinstance(ds_cropped, xr.DataArray)
630
-
631
- if obs:
632
- log.info(
633
- f"ERA5 data for {variable} has been processed: unit conversion ({ds_cropped.attrs.get('units', 'unknown units')}), time selection ({years[0]}-{years[-1]})"
634
- )
635
- else:
636
- log.info(
637
- f"CORDEX data for {variable} has been processed: unit conversion ({ds_cropped.attrs.get('units', 'unknown units')}), calendar transformation (360-day to Gregorian), time selection ({years[0]}-{years[-1]})"
638
- )
639
-
640
- return ds_cropped
641
-
642
-
643
- if __name__ == "__main__":
644
- data = get_climate_data(
645
- country="Zambia",
646
- cordex_domain="AFR-22",
647
- rcp="rcp26",
648
- gcm="MPI",
649
- rcm="REMO",
650
- years_up_to=2030,
651
- obs=False,
652
- bias_correction=True,
653
- historical=False,
654
- )
655
- print(data)
1
+ import os
2
+ import multiprocessing as mp
3
+ from concurrent.futures import ThreadPoolExecutor
4
+ from functools import partial
5
+ import logging
6
+ import warnings
7
+
8
+ warnings.filterwarnings(
9
+ "ignore",
10
+ category=FutureWarning,
11
+ message=".*geopandas.dataset module is deprecated.*",
12
+ )
13
+ import geopandas as gpd # noqa: E402
14
+ import pandas as pd # noqa: E402
15
+ import xarray as xr # noqa: E402
16
+ import numpy as np # noqa: E402
17
+ from xclim import sdba # noqa: E402
18
+
19
+
20
+ logger = logging.getLogger("climate")
21
+ logger.handlers = [] # Remove any existing handlers
22
+ handler = logging.StreamHandler()
23
+ formatter = logging.Formatter(
24
+ "%(asctime)s | %(name)s | %(process)d:%(thread)d [%(levelname)s]: %(message)s"
25
+ )
26
+ handler.setFormatter(formatter)
27
+ for hdlr in logger.handlers[:]: # remove all old handlers
28
+ logger.removeHandler(hdlr)
29
+ logger.addHandler(handler)
30
+ logger.setLevel(logging.DEBUG)
31
+
32
+ VARIABLES_MAP = {
33
+ "pr": "tp",
34
+ "tasmax": "t2mx",
35
+ "tasmin": "t2mn",
36
+ "hurs": "hurs",
37
+ "sfcWind": "sfcwind",
38
+ "rsds": "ssrd",
39
+ }
40
+ VALID_VARIABLES = list(VARIABLES_MAP)
41
+ # TODO: Throw an error if the selected country is not in the selected domain
42
+ VALID_DOMAINS = [
43
+ "NAM-22",
44
+ "EUR-22",
45
+ "AFR-22",
46
+ "EAS-22",
47
+ "SEA-22",
48
+ "WAS-22",
49
+ "AUS-22",
50
+ "SAM-22",
51
+ "CAM-22",
52
+ ]
53
+ VALID_RCPS = ["rcp26", "rcp85"]
54
+ VALID_GCM = ["MOHC", "MPI", "NCC"]
55
+ VALID_RCM = ["REMO", "Reg"]
56
+
57
+ INVENTORY_DATA_REMOTE_URL = (
58
+ "https://hub.ipcc.ifca.es/thredds/fileServer/inventories/cava.csv"
59
+ )
60
+ INVENTORY_DATA_LOCAL_PATH = os.path.join(
61
+ os.path.expanduser("~"), "shared/inventories/cava/inventory.csv"
62
+ )
63
+ ERA5_DATA_REMOTE_URL = (
64
+ "https://hub.ipcc.ifca.es/thredds/dodsC/fao/observations/ERA5/0.25/ERA5_025.ncml"
65
+ )
66
+ ERA5_DATA_LOCAL_PATH = os.path.join(
67
+ os.path.expanduser("~"), "shared/data/observations/ERA5/0.25/ERA5_025.ncml"
68
+ )
69
+ DEFAULT_YEARS_OBS = range(1980, 2006)
70
+
71
+
72
+ class VariableNotAvailableError(Exception):
73
+ """Raised when a requested climate variable is not available in the model."""
74
+ pass
75
+
76
+
77
+ def get_climate_data(
78
+ *,
79
+ country: str | None,
80
+ cordex_domain: str,
81
+ rcp: str,
82
+ gcm: str,
83
+ rcm: str,
84
+ years_up_to: int,
85
+ years_obs: range | None = None,
86
+ bias_correction: bool = False,
87
+ historical: bool = False,
88
+ obs: bool = False,
89
+ buffer: int = 0,
90
+ xlim: tuple[float, float] | None = None,
91
+ ylim: tuple[float, float] | None = None,
92
+ remote: bool = True,
93
+ variables: list[str] | None = None,
94
+ num_processes: int = len(VALID_VARIABLES),
95
+ max_threads_per_process: int = 8,
96
+ ) -> dict[str, xr.DataArray]:
97
+ f"""
98
+ Process climate data required by pyAEZ climate module.
99
+ The function automatically access CORDEX-CORE models at 0.25° and the ERA5 datasets.
100
+
101
+ Args:
102
+ country (str): Name of the country for which data is to be processed.
103
+ Use None if specifying a region using xlim and ylim.
104
+ cordex_domain (str): CORDEX domain of the climate data. One of {VALID_DOMAINS}.
105
+ rcp (str): Representative Concentration Pathway. One of {VALID_RCPS}.
106
+ gcm (str): GCM name. One of {VALID_GCM}.
107
+ rcm (str): RCM name. One of {VALID_RCM}.
108
+ years_up_to (int): The ending year for the projected data. Projections start in 2006 and ends in 2100.
109
+ Hence, if years_up_to is set to 2030, data will be downloaded for the 2006-2030 period.
110
+ years_obs (range): Range of years for observational data (ERA5 only). Only used when obs is True. (default: None).
111
+ bias_correction (bool): Whether to apply bias correction (default: False).
112
+ historical (bool): Flag to indicate if processing historical data (default: False).
113
+ If True, historical data is provided together with projections.
114
+ Historical simulation runs for CORDEX-CORE initiative are provided for the 1980-2005 time period.
115
+ obs (bool): Flag to indicate if processing observational data (default: False).
116
+ buffer (int): Buffer distance to expand the region of interest (default: 0).
117
+ xlim (tuple or None): Longitudinal bounds of the region of interest. Use only when country is None (default: None).
118
+ ylim (tuple or None): Latitudinal bounds of the region of interest. Use only when country is None (default: None).
119
+ remote (bool): Flag to work with remote data or not (default: True).
120
+ variables (list[str] or None): List of variables to process. Must be a subset of {VALID_VARIABLES}. If None, all variables are processed. (default: None).
121
+ num_processes (int): Number of processes to use, one per variable.
122
+ By default equals to the number of all possible variables. (default: {len(VALID_VARIABLES)}).
123
+ max_threads_per_process (int): Max number of threads within each process. (default: 8).
124
+
125
+ Returns:
126
+ dict: A dictionary containing processed climate data for each variable as an xarray object.
127
+ """
128
+
129
+ if xlim is None and ylim is not None or xlim is not None and ylim is None:
130
+ raise ValueError(
131
+ "xlim and ylim mismatch: they must be both specified or both unspecified"
132
+ )
133
+ if country is None and xlim is None:
134
+ raise ValueError("You must specify a country or (xlim, ylim)")
135
+ if country is not None and xlim is not None:
136
+ raise ValueError("You must specify either country or (xlim, ylim), not both")
137
+ verify_variables = {
138
+ "cordex_domain": VALID_DOMAINS,
139
+ "rcp": VALID_RCPS,
140
+ "gcm": VALID_GCM,
141
+ "rcm": VALID_RCM,
142
+ }
143
+ for var_name, valid_values in verify_variables.items():
144
+ var_value = locals()[var_name]
145
+ if var_value not in valid_values:
146
+ raise ValueError(
147
+ f"Invalid {var_name}={var_value}. Must be one of {valid_values}"
148
+ )
149
+ if years_up_to <= 2006:
150
+ raise ValueError("years_up_to must be greater than 2006")
151
+ if years_obs is not None and not (1980 <= min(years_obs) <= max(years_obs) <= 2020):
152
+ raise ValueError("Years in years_obs must be within the range 1980 to 2020")
153
+ if obs and years_obs is None:
154
+ raise ValueError("years_obs must be provided when obs is True")
155
+ if not obs or years_obs is None:
156
+ # Make sure years_obs is set to default when obs=False
157
+ years_obs = DEFAULT_YEARS_OBS
158
+
159
+ # Validate variables if provided
160
+ if variables is not None:
161
+ invalid_vars = [var for var in variables if var not in VALID_VARIABLES]
162
+ if invalid_vars:
163
+ raise ValueError(
164
+ f"Invalid variables: {invalid_vars}. Must be a subset of {VALID_VARIABLES}"
165
+ )
166
+ else:
167
+ variables = VALID_VARIABLES
168
+
169
+ _validate_urls(gcm, rcm, rcp, remote, cordex_domain, obs, historical, bias_correction)
170
+
171
+ bbox = _geo_localize(country, xlim, ylim, buffer, cordex_domain)
172
+
173
+ with mp.Pool(processes=min(num_processes, len(variables))) as pool:
174
+ futures = []
175
+ for variable in variables:
176
+ futures.append(
177
+ pool.apply_async(
178
+ process_worker,
179
+ args=(max_threads_per_process,),
180
+ kwds={
181
+ "variable": variable,
182
+ "bbox": bbox,
183
+ "cordex_domain": cordex_domain,
184
+ "rcp": rcp,
185
+ "gcm": gcm,
186
+ "rcm": rcm,
187
+ "years_up_to": years_up_to,
188
+ "years_obs": years_obs,
189
+ "obs": obs,
190
+ "bias_correction": bias_correction,
191
+ "historical": historical,
192
+ "remote": remote,
193
+ },
194
+ )
195
+ )
196
+
197
+ # Try to get the first result - if it fails, terminate all processes
198
+ try:
199
+ first_result = futures[0].get()
200
+ # If first result succeeded, try to get the rest
201
+ results = {variables[0]: first_result}
202
+ for i, future in enumerate(futures[1:], 1):
203
+ try:
204
+ results[variables[i]] = future.get()
205
+ except Exception as e:
206
+ pool.terminate()
207
+ raise e
208
+ except Exception as e:
209
+ pool.terminate()
210
+ raise e
211
+ finally:
212
+ pool.close()
213
+ pool.join()
214
+
215
+ return results
216
+
217
+
218
+ def _validate_urls(
219
+ gcm: str = None,
220
+ rcm: str = None,
221
+ rcp: str = None,
222
+ remote: bool = True,
223
+ cordex_domain: str = None,
224
+ obs: bool = False,
225
+ historical: bool = False,
226
+ bias_correction: bool = False,
227
+ ):
228
+ # Load the data
229
+ log = logger.getChild("URL-validation")
230
+
231
+ if obs is False:
232
+ inventory_csv_url = (
233
+ INVENTORY_DATA_REMOTE_URL if remote else INVENTORY_DATA_LOCAL_PATH
234
+ )
235
+ data = pd.read_csv(inventory_csv_url)
236
+
237
+ # Set the column to use based on whether the data is remote or local
238
+ column_to_use = "location" if remote else "hub"
239
+
240
+ # Define which experiments we need
241
+ experiments = [rcp]
242
+ if historical or bias_correction:
243
+ experiments.append("historical")
244
+
245
+ # Filter the data based on the conditions
246
+ filtered_data = data[
247
+ lambda x: (
248
+ x["activity"].str.contains("FAO", na=False)
249
+ & (x["domain"] == cordex_domain)
250
+ & (x["model"].str.contains(gcm, na=False))
251
+ & (x["rcm"].str.contains(rcm, na=False))
252
+ & (x["experiment"].isin(experiments))
253
+ )
254
+ ][["experiment", column_to_use]]
255
+
256
+ # Extract the column values as a list
257
+ for _, row in filtered_data.iterrows():
258
+ if row["experiment"] == "historical":
259
+ log_hist = logger.getChild("URL-validation-historical")
260
+ log_hist.info(f"{row[column_to_use]}")
261
+ else:
262
+ log_proj = logger.getChild("URL-validation-projections")
263
+ log_proj.info(f"{row[column_to_use]}")
264
+
265
+ else: # when obs is True
266
+ log_obs = logger.getChild("URL-validation-observations")
267
+ log_obs.info(f"{ERA5_DATA_REMOTE_URL}")
268
+
269
+
270
+ def _geo_localize(
271
+ country: str = None,
272
+ xlim: tuple[float, float] = None,
273
+ ylim: tuple[float, float] = None,
274
+ buffer: int = 0,
275
+ cordex_domain: str = None,
276
+ ) -> dict[str, tuple[float, float]]:
277
+ if country:
278
+ if xlim or ylim:
279
+ raise ValueError(
280
+ "Specify either a country or bounding box limits (xlim, ylim), but not both."
281
+ )
282
+ # Load country shapefile and extract bounds
283
+ world = gpd.read_file(gpd.datasets.get_path("naturalearth_lowres"))
284
+ country_shp = world[world.name == country]
285
+ if country_shp.empty:
286
+ raise ValueError(f"Country '{country}' is unknown.")
287
+ bounds = country_shp.total_bounds # [minx, miny, maxx, maxy]
288
+ xlim, ylim = (bounds[0], bounds[2]), (bounds[1], bounds[3])
289
+ elif not (xlim and ylim):
290
+ raise ValueError(
291
+ "Either a country or bounding box limits (xlim, ylim) must be specified."
292
+ )
293
+
294
+ # Apply buffer
295
+ xlim = (xlim[0] - buffer, xlim[1] + buffer)
296
+ ylim = (ylim[0] - buffer, ylim[1] + buffer)
297
+
298
+ # Always validate CORDEX domain
299
+ if cordex_domain:
300
+ _validate_cordex_domain(xlim, ylim, cordex_domain)
301
+
302
+ return {"xlim": xlim, "ylim": ylim}
303
+
304
+
305
+ def _validate_cordex_domain(xlim, ylim, cordex_domain):
306
+
307
+ # CORDEX domains data
308
+ cordex_domains_df = pd.DataFrame(
309
+ {
310
+ "min_lon": [
311
+ -33,
312
+ -28.3,
313
+ 89.25,
314
+ 86.75,
315
+ 19.25,
316
+ 44.0,
317
+ -106.25,
318
+ -115.0,
319
+ -24.25,
320
+ 10.75,
321
+ ],
322
+ "min_lat": [
323
+ -28,
324
+ -23,
325
+ -15.25,
326
+ -54.25,
327
+ -15.75,
328
+ -4.0,
329
+ -58.25,
330
+ -14.5,
331
+ -46.25,
332
+ 17.75,
333
+ ],
334
+ "max_lon": [
335
+ 20,
336
+ 18,
337
+ 147.0,
338
+ -152.75,
339
+ 116.25,
340
+ -172.0,
341
+ -16.25,
342
+ -30.5,
343
+ 59.75,
344
+ 140.25,
345
+ ],
346
+ "max_lat": [28, 21.7, 26.5, 13.75, 45.75, 65.0, 18.75, 28.5, 42.75, 69.75],
347
+ "cordex_domain": [
348
+ "NAM-22",
349
+ "EUR-22",
350
+ "SEA-22",
351
+ "AUS-22",
352
+ "WAS-22",
353
+ "EAS-22",
354
+ "SAM-22",
355
+ "CAM-22",
356
+ "AFR-22",
357
+ "CAS-22",
358
+ ],
359
+ }
360
+ )
361
+
362
+ def is_bbox_contained(bbox, domain):
363
+ """Check if bbox is contained within the domain bounding box."""
364
+ return (
365
+ bbox[0] >= domain["min_lon"]
366
+ and bbox[1] >= domain["min_lat"]
367
+ and bbox[2] <= domain["max_lon"]
368
+ and bbox[3] <= domain["max_lat"]
369
+ )
370
+
371
+ user_bbox = [xlim[0], ylim[0], xlim[1], ylim[1]]
372
+ domain_row = cordex_domains_df[cordex_domains_df["cordex_domain"] == cordex_domain]
373
+
374
+ if domain_row.empty:
375
+ raise ValueError(f"CORDEX domain '{cordex_domain}' is not recognized.")
376
+
377
+ domain_bbox = domain_row.iloc[0]
378
+
379
+ if not is_bbox_contained(user_bbox, domain_bbox):
380
+ suggested_domains = cordex_domains_df[
381
+ cordex_domains_df.apply(
382
+ lambda row: is_bbox_contained(user_bbox, row), axis=1
383
+ )
384
+ ]
385
+
386
+ if suggested_domains.empty:
387
+ raise ValueError(
388
+ f"The bounding box {user_bbox} is outside of all available CORDEX domains."
389
+ )
390
+
391
+ suggested_domain = suggested_domains.iloc[0]["cordex_domain"]
392
+
393
+ raise ValueError(
394
+ f"Bounding box {user_bbox} is not within '{cordex_domain}'. Suggested domain: '{suggested_domain}'."
395
+ )
396
+
397
+
398
+ def process_worker(num_threads, **kwargs) -> xr.DataArray:
399
+ variable = kwargs["variable"]
400
+ log = logger.getChild(variable)
401
+ try:
402
+ with ThreadPoolExecutor(
403
+ max_workers=num_threads, thread_name_prefix="climate"
404
+ ) as executor:
405
+ return _climate_data_for_variable(executor, **kwargs)
406
+ except Exception as e:
407
+ log.exception(f"Process worker failed: {e}")
408
+ if "is not available in model configuration" in str(e):
409
+ raise VariableNotAvailableError(str(e)) from e
410
+ raise
411
+
412
+
413
+ def _climate_data_for_variable(
414
+ executor: ThreadPoolExecutor,
415
+ *,
416
+ variable: str,
417
+ bbox: dict[str, tuple[float, float]],
418
+ cordex_domain: str,
419
+ rcp: str,
420
+ gcm: str,
421
+ rcm: str,
422
+ years_up_to: int,
423
+ years_obs: range,
424
+ obs: bool,
425
+ bias_correction: bool,
426
+ historical: bool,
427
+ remote: bool,
428
+ ) -> xr.DataArray:
429
+ log = logger.getChild(variable)
430
+
431
+ pd.options.mode.chained_assignment = None
432
+ inventory_csv_url = (
433
+ INVENTORY_DATA_REMOTE_URL if remote else INVENTORY_DATA_LOCAL_PATH
434
+ )
435
+ data = pd.read_csv(inventory_csv_url)
436
+ column_to_use = "location" if remote else "hub"
437
+
438
+ # Filter data based on whether we need historical data
439
+ experiments = [rcp]
440
+ if historical or bias_correction:
441
+ experiments.append("historical")
442
+
443
+ filtered_data = data[
444
+ lambda x: (x["activity"].str.contains("FAO", na=False))
445
+ & (x["domain"] == cordex_domain)
446
+ & (x["model"].str.contains(gcm, na=False))
447
+ & (x["rcm"].str.contains(rcm, na=False))
448
+ & (x["experiment"].isin(experiments))
449
+ ][["experiment", column_to_use]]
450
+
451
+ future_obs = None
452
+ if obs or bias_correction:
453
+ future_obs = executor.submit(
454
+ _thread_download_data,
455
+ url=None,
456
+ bbox=bbox,
457
+ variable=variable,
458
+ obs=True,
459
+ years_up_to=years_up_to,
460
+ years_obs=years_obs,
461
+ remote=remote,
462
+ )
463
+
464
+ if not obs:
465
+ download_fn = partial(
466
+ _thread_download_data,
467
+ bbox=bbox,
468
+ variable=variable,
469
+ obs=False,
470
+ years_obs=years_obs,
471
+ years_up_to=years_up_to,
472
+ remote=remote,
473
+ )
474
+ downloaded_models = list(
475
+ executor.map(download_fn, filtered_data[column_to_use])
476
+ )
477
+
478
+ # Add the downloaded models to the DataFrame
479
+ filtered_data["models"] = downloaded_models
480
+
481
+ if historical or bias_correction:
482
+ hist = filtered_data[filtered_data["experiment"] == "historical"]["models"].iloc[0]
483
+ proj = filtered_data[filtered_data["experiment"] == rcp]["models"].iloc[0]
484
+
485
+ hist = hist.interpolate_na(dim="time", method="linear")
486
+ proj = proj.interpolate_na(dim="time", method="linear")
487
+ else:
488
+ proj = filtered_data["models"].iloc[0]
489
+ proj = proj.interpolate_na(dim="time", method="linear")
490
+
491
+ if bias_correction and historical:
492
+ # Load observations for bias correction
493
+ ref = future_obs.result()
494
+ log.info("Training eqm with historical data")
495
+ QM_mo = sdba.EmpiricalQuantileMapping.train(
496
+ ref,
497
+ hist,
498
+ group="time.month",
499
+ kind="*" if variable in ["pr", "rsds", "sfcWind"] else "+",
500
+ )
501
+ log.info("Performing bias correction with eqm")
502
+ hist_bs = QM_mo.adjust(hist, extrapolation="constant", interp="linear")
503
+ proj_bs = QM_mo.adjust(proj, extrapolation="constant", interp="linear")
504
+ log.info("Done!")
505
+ if variable == "hurs":
506
+ hist_bs = hist_bs.where(hist_bs <= 100, 100)
507
+ hist_bs = hist_bs.where(hist_bs >= 0, 0)
508
+ combined = xr.concat([hist_bs, proj_bs], dim="time")
509
+ return combined
510
+
511
+ elif not bias_correction and historical:
512
+ combined = xr.concat([hist, proj], dim="time")
513
+ return combined
514
+
515
+ elif bias_correction and not historical:
516
+ ref = future_obs.result()
517
+ log.info("Training eqm with historical data")
518
+ QM_mo = sdba.EmpiricalQuantileMapping.train(
519
+ ref,
520
+ hist,
521
+ group="time.month",
522
+ kind="*" if variable in ["pr", "rsds", "sfcWind"] else "+",
523
+ ) # multiplicative approach for pr, rsds and wind speed
524
+ log.info("Performing bias correction with eqm")
525
+ proj_bs = QM_mo.adjust(proj, extrapolation="constant", interp="linear")
526
+ log.info("Done!")
527
+ if variable == "hurs":
528
+ proj_bs = proj_bs.where(proj_bs <= 100, 100)
529
+ proj_bs = proj_bs.where(proj_bs >= 0, 0)
530
+ return proj_bs
531
+
532
+ return proj
533
+
534
+ else: # when observations are True
535
+ downloaded_obs = future_obs.result()
536
+ log.info("Done!")
537
+ return downloaded_obs
538
+
539
+
540
+ def _thread_download_data(url: str | None, **kwargs):
541
+ variable = kwargs["variable"]
542
+ temporal = "observations" if kwargs["obs"] else ("historical" if "historical" in str(url) else "projections")
543
+ log = logger.getChild(f"{variable}-{temporal}")
544
+ try:
545
+ return _download_data(url=url, **kwargs)
546
+ except Exception as e:
547
+ log.exception(f"Failed to process data from {url}: {e}")
548
+ raise
549
+
550
+
551
+ def _download_data(
552
+ url: str | None,
553
+ bbox: dict[str, tuple[float, float]],
554
+ variable: str,
555
+ obs: bool,
556
+ years_obs: range,
557
+ years_up_to: int,
558
+ remote: bool,
559
+ ) -> xr.DataArray:
560
+ temporal = "observations" if obs else ("historical" if url and "historical" in url else "projections")
561
+ log = logger.getChild(f"{variable}-{temporal}")
562
+
563
+ if obs:
564
+ var = VARIABLES_MAP[variable]
565
+ log.info(f"Establishing connection to ERA5 data for {variable}({var})")
566
+ if remote:
567
+ ds_var = xr.open_dataset(ERA5_DATA_REMOTE_URL)[var]
568
+ else:
569
+ ds_var = xr.open_dataset(ERA5_DATA_LOCAL_PATH)[var]
570
+ log.info(f"Connection to ERA5 data for {variable}({var}) has been established")
571
+
572
+ # Coordinate normalization and renaming for 'hurs'
573
+ if var == "hurs":
574
+ ds_var = ds_var.rename({"lat": "latitude", "lon": "longitude"})
575
+ ds_cropped = ds_var.sel(
576
+ longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
577
+ latitude=slice(bbox["ylim"][0], bbox["ylim"][1]),
578
+ )
579
+ else:
580
+ ds_var.coords["longitude"] = (ds_var.coords["longitude"] + 180) % 360 - 180
581
+ ds_var = ds_var.sortby(ds_var.longitude)
582
+ ds_cropped = ds_var.sel(
583
+ longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
584
+ latitude=slice(bbox["ylim"][1], bbox["ylim"][0]),
585
+ )
586
+
587
+ # Unit conversion
588
+ if var in ["t2mx", "t2mn", "t2m"]:
589
+ ds_cropped -= 273.15 # Convert from Kelvin to Celsius
590
+ ds_cropped.attrs["units"] = "°C"
591
+ elif var == "tp":
592
+ ds_cropped *= 1000 # Convert precipitation
593
+ ds_cropped.attrs["units"] = "mm"
594
+ elif var == "ssrd":
595
+ ds_cropped /= 86400 # Convert from J/m^2 to W/m^2
596
+ ds_cropped.attrs["units"] = "W m-2"
597
+ elif var == "sfcwind":
598
+ ds_cropped = ds_cropped * (
599
+ 4.87 / np.log((67.8 * 10) - 5.42)
600
+ ) # Convert wind speed from 10 m to 2 m
601
+ ds_cropped.attrs["units"] = "m s-1"
602
+
603
+ # Select years
604
+ years = [x for x in years_obs]
605
+ time_mask = (ds_cropped["time"].dt.year >= years[0]) & (
606
+ ds_cropped["time"].dt.year <= years[-1]
607
+ )
608
+
609
+ else:
610
+ log.info(f"Establishing connection to CORDEX data for {variable}")
611
+ ds_var = xr.open_dataset(url)[variable]
612
+
613
+ # Check if time dimension has a prefix, indicating variable is not available. This is a fix implemented by Ezi
614
+ time_dims = [dim for dim in ds_var.dims if dim.startswith('time_')]
615
+ if time_dims:
616
+ # Extract GCM and RCM from URL
617
+ model_info = url.split('/')[-1] # Get filename from URL
618
+ msg = f"Variable {variable} is not available in model configuration: {model_info}. You can rerun the function excluding {variable} from the variables list."
619
+ log.exception(msg)
620
+ raise ValueError(msg)
621
+
622
+ log.info(f"Connection to CORDEX data for {variable} has been established")
623
+ ds_cropped = ds_var.sel(
624
+ longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
625
+ latitude=slice(bbox["ylim"][1], bbox["ylim"][0]),
626
+ )
627
+
628
+ # Unit conversion
629
+ if variable in ["tas", "tasmax", "tasmin"]:
630
+ ds_cropped -= 273.15 # Convert from Kelvin to Celsius
631
+ ds_cropped.attrs["units"] = "°C"
632
+ elif variable == "pr":
633
+ ds_cropped *= 86400 # Convert from kg m^-2 s^-1 to mm/day
634
+ ds_cropped.attrs["units"] = "mm"
635
+ elif variable == "rsds":
636
+ ds_cropped.attrs["units"] = "W m-2"
637
+ elif variable == "sfcWind":
638
+ ds_cropped = ds_cropped * (
639
+ 4.87 / np.log((67.8 * 10) - 5.42)
640
+ ) # Convert wind speed from 10 m to 2 m
641
+ ds_cropped.attrs["units"] = "m s-1"
642
+
643
+ # Select years based on rcp
644
+ if "rcp" in url:
645
+ years = [x for x in range(2006, years_up_to + 1)]
646
+ else:
647
+ years = [x for x in DEFAULT_YEARS_OBS]
648
+
649
+ # Add missing dates
650
+ ds_cropped = ds_cropped.convert_calendar(
651
+ calendar="gregorian", missing=np.nan, align_on="date"
652
+ )
653
+
654
+ time_mask = (ds_cropped["time"].dt.year >= years[0]) & (
655
+ ds_cropped["time"].dt.year <= years[-1]
656
+ )
657
+
658
+ # subset years
659
+ ds_cropped = ds_cropped.sel(time=time_mask)
660
+
661
+ assert isinstance(ds_cropped, xr.DataArray)
662
+
663
+ if obs:
664
+ log.info(
665
+ f"ERA5 data for {variable} has been processed: unit conversion ({ds_cropped.attrs.get('units', 'unknown units')}), time selection ({years[0]}-{years[-1]})"
666
+ )
667
+ else:
668
+ log.info(
669
+ f"CORDEX data for {variable} has been processed: unit conversion ({ds_cropped.attrs.get('units', 'unknown units')}), calendar transformation (360-day to Gregorian), time selection ({years[0]}-{years[-1]})"
670
+ )
671
+
672
+ return ds_cropped
673
+
674
+
675
+ if __name__ == "__main__":
676
+ data = get_climate_data(
677
+ country="Togo",
678
+ variables=["tasmax","hurs"],
679
+ cordex_domain="AFR-22",
680
+ rcp="rcp26",
681
+ gcm="MPI",
682
+ rcm="Reg",
683
+ years_up_to=2030,
684
+ obs=False,
685
+ bias_correction=False,
686
+ historical=False,
687
+ )
688
+ print(data)
689
+