cavapy 0.1.0__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cavapy might be problematic. Click here for more details.

cavapy.py CHANGED
@@ -1,631 +1,649 @@
1
- import os
2
- import multiprocessing as mp
3
- from concurrent.futures import ThreadPoolExecutor
4
- from functools import partial
5
- import logging
6
- import warnings
7
-
8
- warnings.filterwarnings(
9
- "ignore",
10
- category=FutureWarning,
11
- message=".*geopandas.dataset module is deprecated.*",
12
- )
13
- import geopandas as gpd # noqa: E402
14
- import pandas as pd # noqa: E402
15
- import xarray as xr # noqa: E402
16
- import numpy as np # noqa: E402
17
- from xclim import sdba # noqa: E402
18
-
19
-
20
- logger = logging.getLogger("climate")
21
- logger.handlers = [] # Remove any existing handlers
22
- handler = logging.StreamHandler()
23
- formatter = logging.Formatter(
24
- "%(asctime)s | %(name)s | %(process)d:%(thread)d [%(levelname)s]: %(message)s"
25
- )
26
- handler.setFormatter(formatter)
27
- for hdlr in logger.handlers[:]: # remove all old handlers
28
- logger.removeHandler(hdlr)
29
- logger.addHandler(handler)
30
- logger.setLevel(logging.DEBUG)
31
-
32
- VARIABLES_MAP = {
33
- "pr": "tp",
34
- "tasmax": "t2mx",
35
- "tasmin": "t2mn",
36
- "hurs": "hurs",
37
- "sfcWind": "sfcwind",
38
- "rsds": "ssrd",
39
- }
40
- VALID_VARIABLES = list(VARIABLES_MAP)
41
- # TODO: Throw an error if the selected country is not in the selected domain
42
- VALID_DOMAINS = [
43
- "NAM-22",
44
- "EUR-22",
45
- "AFR-22",
46
- "EAS-22",
47
- "SEA-22",
48
- "WAS-22",
49
- "AUS-22",
50
- "SAM-22",
51
- "CAM-22",
52
- ]
53
- VALID_RCPS = ["rcp26", "rcp85"]
54
- VALID_GCM = ["MOHC", "MPI", "NCC"]
55
- VALID_RCM = ["REMO", "Reg"]
56
-
57
- INVENTORY_DATA_REMOTE_URL = (
58
- "https://hub.ipcc.ifca.es/thredds/fileServer/inventories/cava.csv"
59
- )
60
- INVENTORY_DATA_LOCAL_PATH = os.path.join(
61
- os.path.expanduser("~"), "shared/inventories/cava/inventory.csv"
62
- )
63
- ERA5_DATA_REMOTE_URL = (
64
- "https://hub.ipcc.ifca.es/thredds/dodsC/fao/observations/ERA5/0.25/ERA5_025.ncml"
65
- )
66
- ERA5_DATA_LOCAL_PATH = os.path.join(
67
- os.path.expanduser("~"), "shared/data/observations/ERA5/0.25/ERA5_025.ncml"
68
- )
69
- DEFAULT_YEARS_OBS = range(1980, 2006)
70
-
71
-
72
- def get_climate_data(
73
- *,
74
- country: str | None,
75
- cordex_domain: str,
76
- rcp: str,
77
- gcm: str,
78
- rcm: str,
79
- years_up_to: int,
80
- years_obs: range | None = None,
81
- bias_correction: bool = False,
82
- historical: bool = False,
83
- obs: bool = False,
84
- buffer: int = 0,
85
- xlim: tuple[float, float] | None = None,
86
- ylim: tuple[float, float] | None = None,
87
- remote: bool = True,
88
- num_processes: int = len(VALID_VARIABLES),
89
- max_threads_per_process: int = 8,
90
- ) -> dict[str, xr.DataArray]:
91
- f"""
92
- Process climate data required by pyAEZ climate module.
93
- The function automatically access CORDEX-CORE models at 0.25° and the ERA5 datasets.
94
-
95
- Args:
96
- country (str): Name of the country for which data is to be processed.
97
- Use None if specifying a region using xlim and ylim.
98
- cordex_domain (str): CORDEX domain of the climate data. One of {VALID_DOMAINS}.
99
- rcp (str): Representative Concentration Pathway. One of {VALID_RCPS}.
100
- gcm (str): GCM name. One of {VALID_GCM}.
101
- rcm (str): RCM name. One of {VALID_RCM}.
102
- years_up_to (int): The ending year for the projected data. Projections start in 2006 and ends in 2100.
103
- Hence, if years_up_to is set to 2030, data will be downloaded for the 2006-2030 period.
104
- years_obs (range): Range of years for observational data (ERA5 only). Only used when obs is True. (default: None).
105
- bias_correction (bool): Whether to apply bias correction (default: False).
106
- historical (bool): Flag to indicate if processing historical data (default: False).
107
- If True, historical data is provided together with projections.
108
- Historical simulation runs for CORDEX-CORE initiative are provided for the 1980-2005 time period.
109
- obs (bool): Flag to indicate if processing observational data (default: False).
110
- buffer (int): Buffer distance to expand the region of interest (default: 0).
111
- xlim (tuple or None): Longitudinal bounds of the region of interest. Use only when country is None (default: None).
112
- ylim (tuple or None): Latitudinal bounds of the region of interest. Use only when country is None (default: None).
113
- remote (bool): Flag to work with remote data or not (default: True).
114
- num_processes (int): Number of processes to use, one per variable.
115
- By default equals to the number of all possible variables. (default: {len(VALID_VARIABLES)}).
116
- max_threads_per_process (int): Max number of threads within each process. (default: 8).
117
-
118
- Returns:
119
- dict: A dictionary containing processed climate data for each variable as an xarray object.
120
- """
121
-
122
- if xlim is None and ylim is not None or xlim is not None and ylim is None:
123
- raise ValueError(
124
- "xlim and ylim mismatch: they must be both specified or both unspecified"
125
- )
126
- if country is None and xlim is None:
127
- raise ValueError("You must specify a country or (xlim, ylim)")
128
- if country is not None and xlim is not None:
129
- raise ValueError("You must specify either country or (xlim, ylim), not both")
130
- verify_variables = {
131
- "cordex_domain": VALID_DOMAINS,
132
- "rcp": VALID_RCPS,
133
- "gcm": VALID_GCM,
134
- "rcm": VALID_RCM,
135
- }
136
- for var_name, valid_values in verify_variables.items():
137
- var_value = locals()[var_name]
138
- if var_value not in valid_values:
139
- raise ValueError(
140
- f"Invalid {var_name}={var_value}. Must be one of {valid_values}"
141
- )
142
- if years_up_to <= 2006:
143
- raise ValueError("years_up_to must be greater than 2006")
144
- if years_obs is not None and not (1980 <= min(years_obs) <= max(years_obs) <= 2020):
145
- raise ValueError("Years in years_obs must be within the range 1980 to 2020")
146
- if obs and years_obs is None:
147
- raise ValueError("years_obs must be provided when obs is True")
148
- if not obs or years_obs is None:
149
- # Make sure years_obs is set to default when obs=False
150
- years_obs = DEFAULT_YEARS_OBS
151
-
152
- _validate_urls(gcm, rcm, rcp, remote, cordex_domain, obs)
153
-
154
- bbox = _geo_localize(country, xlim, ylim, buffer, cordex_domain)
155
-
156
- with mp.Pool(processes=num_processes) as pool:
157
- futures = []
158
- for variable in VALID_VARIABLES:
159
- futures.append(
160
- pool.apply_async(
161
- process_worker,
162
- args=(max_threads_per_process,),
163
- kwds={
164
- "variable": variable,
165
- "bbox": bbox,
166
- "cordex_domain": cordex_domain,
167
- "rcp": rcp,
168
- "gcm": gcm,
169
- "rcm": rcm,
170
- "years_up_to": years_up_to,
171
- "years_obs": years_obs,
172
- "obs": obs,
173
- "bias_correction": bias_correction,
174
- "historical": historical,
175
- "remote": remote,
176
- },
177
- )
178
- )
179
-
180
- results = {
181
- variable: futures[i].get() for i, variable in enumerate(VALID_VARIABLES)
182
- }
183
-
184
- pool.close() # Prevent any more tasks from being submitted to the pool
185
- pool.join() # Wait for all worker processes to finish
186
-
187
- return results
188
-
189
-
190
- def _validate_urls(
191
- gcm: str = None,
192
- rcm: str = None,
193
- rcp: str = None,
194
- remote: bool = True,
195
- cordex_domain: str = None,
196
- obs: bool = False,
197
- ):
198
- # Load the data
199
- log = logger.getChild("URLs validation")
200
-
201
- if obs is False:
202
- inventory_csv_url = (
203
- INVENTORY_DATA_REMOTE_URL if remote else INVENTORY_DATA_LOCAL_PATH
204
- )
205
- data = pd.read_csv(inventory_csv_url)
206
-
207
- # Set the column to use based on whether the data is remote or local
208
- column_to_use = "location" if remote else "hub"
209
-
210
- # Filter the data based on the conditions
211
- filtered_data = data[
212
- lambda x: (
213
- x["activity"].str.contains("FAO", na=False)
214
- & (x["domain"] == cordex_domain)
215
- & (x["model"].str.contains(gcm, na=False))
216
- & (x["rcm"].str.contains(rcm, na=False))
217
- & (x["experiment"].isin([rcp, "historical"]))
218
- )
219
- ][["experiment", column_to_use]]
220
-
221
- # Extract the column values as a list
222
- num_rows = filtered_data.shape[0]
223
- column_values = filtered_data[column_to_use]
224
-
225
- if num_rows == 1:
226
- # Log the output for one row
227
- row1 = column_values.iloc[0]
228
- log.info(f"Projections: {row1}")
229
- else:
230
- # Log the output for two rows
231
- row1 = column_values.iloc[0]
232
- row2 = column_values.iloc[1]
233
- log.info(f"Historical simulation: {row1}")
234
- log.info(f"Projections: {row2}")
235
- else: # when obs is True
236
- log.info(f"Observations: {ERA5_DATA_REMOTE_URL}")
237
-
238
-
239
- def _geo_localize(
240
- country: str = None,
241
- xlim: tuple[float, float] = None,
242
- ylim: tuple[float, float] = None,
243
- buffer: int = 0,
244
- cordex_domain: str = None,
245
- ) -> dict[str, tuple[float, float]]:
246
- if country:
247
- if xlim or ylim:
248
- raise ValueError(
249
- "Specify either a country or bounding box limits (xlim, ylim), but not both."
250
- )
251
- # Load country shapefile and extract bounds
252
- world = gpd.read_file(gpd.datasets.get_path("naturalearth_lowres"))
253
- country_shp = world[world.name == country]
254
- if country_shp.empty:
255
- raise ValueError(f"Country '{country}' is unknown.")
256
- bounds = country_shp.total_bounds # [minx, miny, maxx, maxy]
257
- xlim, ylim = (bounds[0], bounds[2]), (bounds[1], bounds[3])
258
- elif not (xlim and ylim):
259
- raise ValueError(
260
- "Either a country or bounding box limits (xlim, ylim) must be specified."
261
- )
262
-
263
- # Apply buffer
264
- xlim = (xlim[0] - buffer, xlim[1] + buffer)
265
- ylim = (ylim[0] - buffer, ylim[1] + buffer)
266
-
267
- # Always validate CORDEX domain
268
- if cordex_domain:
269
- _validate_cordex_domain(xlim, ylim, cordex_domain)
270
-
271
- return {"xlim": xlim, "ylim": ylim}
272
-
273
-
274
- def _validate_cordex_domain(xlim, ylim, cordex_domain):
275
-
276
- # CORDEX domains data
277
- cordex_domains_df = pd.DataFrame(
278
- {
279
- "min_lon": [
280
- -33,
281
- -28.3,
282
- 89.25,
283
- 86.75,
284
- 19.25,
285
- 44.0,
286
- -106.25,
287
- -115.0,
288
- -24.25,
289
- 10.75,
290
- ],
291
- "min_lat": [
292
- -28,
293
- -23,
294
- -15.25,
295
- -54.25,
296
- -15.75,
297
- -4.0,
298
- -58.25,
299
- -14.5,
300
- -46.25,
301
- 17.75,
302
- ],
303
- "max_lon": [
304
- 20,
305
- 18,
306
- 147.0,
307
- -152.75,
308
- 116.25,
309
- -172.0,
310
- -16.25,
311
- -30.5,
312
- 59.75,
313
- 140.25,
314
- ],
315
- "max_lat": [28, 21.7, 26.5, 13.75, 45.75, 65.0, 18.75, 28.5, 42.75, 69.75],
316
- "cordex_domain": [
317
- "NAM-22",
318
- "EUR-22",
319
- "SEA-22",
320
- "AUS-22",
321
- "WAS-22",
322
- "EAS-22",
323
- "SAM-22",
324
- "CAM-22",
325
- "AFR-22",
326
- "CAS-22",
327
- ],
328
- }
329
- )
330
-
331
- def is_bbox_contained(bbox, domain):
332
- """Check if bbox is contained within the domain bounding box."""
333
- return (
334
- bbox[0] >= domain["min_lon"]
335
- and bbox[1] >= domain["min_lat"]
336
- and bbox[2] <= domain["max_lon"]
337
- and bbox[3] <= domain["max_lat"]
338
- )
339
-
340
- user_bbox = [xlim[0], ylim[0], xlim[1], ylim[1]]
341
- domain_row = cordex_domains_df[cordex_domains_df["cordex_domain"] == cordex_domain]
342
-
343
- if domain_row.empty:
344
- raise ValueError(f"CORDEX domain '{cordex_domain}' is not recognized.")
345
-
346
- domain_bbox = domain_row.iloc[0]
347
-
348
- if not is_bbox_contained(user_bbox, domain_bbox):
349
- suggested_domains = cordex_domains_df[
350
- cordex_domains_df.apply(
351
- lambda row: is_bbox_contained(user_bbox, row), axis=1
352
- )
353
- ]
354
-
355
- if suggested_domains.empty:
356
- raise ValueError(
357
- f"The bounding box {user_bbox} is outside of all available CORDEX domains."
358
- )
359
-
360
- suggested_domain = suggested_domains.iloc[0]["cordex_domain"]
361
-
362
- raise ValueError(
363
- f"Bounding box {user_bbox} is not within '{cordex_domain}'. Suggested domain: '{suggested_domain}'."
364
- )
365
-
366
-
367
- def process_worker(num_threads, **kwargs) -> xr.DataArray:
368
- variable = kwargs["variable"]
369
- log = logger.getChild(variable)
370
- try:
371
- with ThreadPoolExecutor(
372
- max_workers=num_threads, thread_name_prefix="climate"
373
- ) as executor:
374
- return _climate_data_for_variable(executor, **kwargs)
375
- except Exception as e:
376
- log.exception(f"Process worker failed: {e}")
377
- raise
378
-
379
-
380
- def _climate_data_for_variable(
381
- executor: ThreadPoolExecutor,
382
- *,
383
- variable: str,
384
- bbox: dict[str, tuple[float, float]],
385
- cordex_domain: str,
386
- rcp: str,
387
- gcm: str,
388
- rcm: str,
389
- years_up_to: int,
390
- years_obs: range,
391
- obs: bool,
392
- bias_correction: bool,
393
- historical: bool,
394
- remote: bool,
395
- ) -> xr.DataArray:
396
- log = logger.getChild(variable)
397
-
398
- pd.options.mode.chained_assignment = None
399
- inventory_csv_url = (
400
- INVENTORY_DATA_REMOTE_URL if remote else INVENTORY_DATA_LOCAL_PATH
401
- )
402
- data = pd.read_csv(inventory_csv_url)
403
- column_to_use = "location" if remote else "hub"
404
- filtered_data = data[
405
- lambda x: (x["activity"].str.contains("FAO", na=False))
406
- & (x["domain"] == cordex_domain)
407
- & (x["model"].str.contains(gcm, na=False))
408
- & (x["rcm"].str.contains(rcm, na=False))
409
- & (x["experiment"].isin([rcp, "historical"]))
410
- ][["experiment", column_to_use]]
411
-
412
- future_obs = None
413
- if obs or bias_correction:
414
- future_obs = executor.submit(
415
- _thread_download_data,
416
- url=None,
417
- bbox=bbox,
418
- variable=variable,
419
- obs=True,
420
- years_up_to=years_up_to,
421
- years_obs=years_obs,
422
- remote=remote,
423
- )
424
-
425
- if not obs:
426
- download_fn = partial(
427
- _thread_download_data,
428
- bbox=bbox,
429
- variable=variable,
430
- obs=False,
431
- years_obs=years_obs,
432
- years_up_to=years_up_to,
433
- remote=remote,
434
- )
435
- downloaded_models = list(
436
- executor.map(download_fn, filtered_data[column_to_use])
437
- )
438
-
439
- # Add the downloaded models to the DataFrame
440
- filtered_data["models"] = downloaded_models
441
- log.info("Interpolating missing values")
442
- hist = (
443
- filtered_data["models"].iloc[0].interpolate_na(dim="time", method="linear")
444
- )
445
- proj = (
446
- filtered_data["models"].iloc[1].interpolate_na(dim="time", method="linear")
447
- )
448
- log.info("Missing values interpolated")
449
-
450
- if bias_correction and historical:
451
- # Load observations for bias correction
452
- ref = future_obs.result()
453
- log.info("Training eqm with historical data")
454
- QM_mo = sdba.EmpiricalQuantileMapping.train(
455
- ref,
456
- hist,
457
- group="time.month",
458
- kind="*" if variable in ["pr", "rsds", "sfcWind"] else "+",
459
- )
460
- log.info("Performing bias correction with eqm")
461
- hist_bs = QM_mo.adjust(hist, extrapolation="constant", interp="linear")
462
- proj_bs = QM_mo.adjust(proj, extrapolation="constant", interp="linear")
463
- log.info("Done!")
464
- if variable == "hurs":
465
- hist_bs = hist_bs.where(hist_bs <= 100, 100)
466
- hist_bs = hist_bs.where(hist_bs >= 0, 0)
467
- combined = xr.concat([hist_bs, proj_bs], dim="time")
468
- return combined
469
-
470
- elif not bias_correction and historical:
471
- combined = xr.concat([hist, proj], dim="time")
472
- return combined
473
-
474
- elif bias_correction and not historical:
475
- ref = future_obs.result()
476
- log.info("Training eqm with historical data")
477
- QM_mo = sdba.EmpiricalQuantileMapping.train(
478
- ref,
479
- hist,
480
- group="time.month",
481
- kind="*" if variable in ["pr", "rsds", "sfcWind"] else "+",
482
- ) # multiplicative approach for pr, rsds and wind speed
483
- log.info("Performing bias correction with eqm")
484
- proj_bs = QM_mo.adjust(proj, extrapolation="constant", interp="linear")
485
- log.info("Done!")
486
- if variable == "hurs":
487
- proj_bs = proj_bs.where(proj_bs <= 100, 100)
488
- proj_bs = proj_bs.where(proj_bs >= 0, 0)
489
- return proj_bs
490
-
491
- return proj
492
-
493
- else: # when observations are True
494
- downloaded_obs = future_obs.result()
495
- log.info("Done!")
496
- return downloaded_obs
497
-
498
-
499
- def _thread_download_data(url: str | None, **kwargs):
500
- variable = kwargs["variable"]
501
- log = logger.getChild(variable)
502
- try:
503
- return _download_data(url=url, **kwargs)
504
- except Exception as e:
505
- log.exception(f"Failed to download data from {url}: {e}")
506
- raise
507
-
508
-
509
- def _download_data(
510
- url: str | None,
511
- bbox: dict[str, tuple[float, float]],
512
- variable: str,
513
- obs: bool,
514
- years_obs: range,
515
- years_up_to: int,
516
- remote: bool,
517
- ) -> xr.DataArray:
518
- log = logger.getChild(variable)
519
- if obs:
520
- var = VARIABLES_MAP[variable]
521
- log.info(f"Downloading observational data for {variable}({var})")
522
- if remote:
523
- ds_var = xr.open_dataset(ERA5_DATA_REMOTE_URL)[var]
524
- else:
525
- ds_var = xr.open_dataset(ERA5_DATA_LOCAL_PATH)[var]
526
- log.info(f"Observational data for {variable}({var}) has been downloaded")
527
-
528
- # Coordinate normalization and renaming for 'hurs'
529
- if var == "hurs":
530
- ds_var = ds_var.rename({"lat": "latitude", "lon": "longitude"})
531
- ds_cropped = ds_var.sel(
532
- longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
533
- latitude=slice(bbox["ylim"][0], bbox["ylim"][1]),
534
- )
535
- else:
536
- ds_var.coords["longitude"] = (ds_var.coords["longitude"] + 180) % 360 - 180
537
- ds_var = ds_var.sortby(ds_var.longitude)
538
- ds_cropped = ds_var.sel(
539
- longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
540
- latitude=slice(bbox["ylim"][1], bbox["ylim"][0]),
541
- )
542
-
543
- # Unit conversion
544
- if var in ["t2mx", "t2mn", "t2m"]:
545
- ds_cropped -= 273.15 # Convert from Kelvin to Celsius
546
- ds_cropped.attrs["units"] = "°C"
547
- elif var == "tp":
548
- ds_cropped *= 1000 # Convert precipitation
549
- ds_cropped.attrs["units"] = "mm"
550
- elif var == "ssrd":
551
- ds_cropped /= 86400 # Convert from J/m^2 to W/m^2
552
- ds_cropped.attrs["units"] = "W m-2"
553
- elif var == "sfcwind":
554
- ds_cropped = ds_cropped * (
555
- 4.87 / np.log((67.8 * 10) - 5.42)
556
- ) # Convert wind speed from 10 m to 2 m
557
- ds_cropped.attrs["units"] = "m s-1"
558
-
559
- # Select years
560
- years = [x for x in years_obs]
561
- time_mask = (ds_cropped["time"].dt.year >= years[0]) & (
562
- ds_cropped["time"].dt.year <= years[-1]
563
- )
564
-
565
- else:
566
- log.info(f"Downloading CORDEX data for {variable}")
567
- ds_var = xr.open_dataset(url)[variable]
568
- log.info(f"CORDEX data for {variable} has been downloaded")
569
- ds_cropped = ds_var.sel(
570
- longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
571
- latitude=slice(bbox["ylim"][1], bbox["ylim"][0]),
572
- )
573
-
574
- # Unit conversion
575
- if variable in ["tas", "tasmax", "tasmin"]:
576
- ds_cropped -= 273.15 # Convert from Kelvin to Celsius
577
- ds_cropped.attrs["units"] = "°C"
578
- elif variable == "pr":
579
- ds_cropped *= 86400 # Convert from kg m^-2 s^-1 to mm/day
580
- ds_cropped.attrs["units"] = "mm"
581
- elif variable == "rsds":
582
- ds_cropped.attrs["units"] = "W m-2"
583
- elif variable == "sfcWind":
584
- ds_cropped = ds_cropped * (
585
- 4.87 / np.log((67.8 * 10) - 5.42)
586
- ) # Convert wind speed from 10 m to 2 m
587
- ds_cropped.attrs["units"] = "m s-1"
588
-
589
- # Select years based on rcp
590
- if "rcp" in url:
591
- years = [x for x in range(2006, years_up_to + 1)]
592
- else:
593
- years = [x for x in DEFAULT_YEARS_OBS]
594
-
595
- # Add missing dates
596
- ds_cropped = ds_cropped.convert_calendar(
597
- calendar="gregorian", missing=np.nan, align_on="date"
598
- )
599
- log.debug(
600
- "360-calendar converted into Gregorian calendar and missing values linearly interpolated"
601
- )
602
-
603
- time_mask = (ds_cropped["time"].dt.year >= years[0]) & (
604
- ds_cropped["time"].dt.year <= years[-1]
605
- )
606
-
607
- # subset years
608
- ds_cropped = ds_cropped.sel(time=time_mask)
609
-
610
- assert isinstance(ds_cropped, xr.DataArray)
611
-
612
- log.info(
613
- f"{'Observational' if obs else 'CORDEX'} data for {variable} has been processed"
614
- )
615
-
616
- return ds_cropped
617
-
618
-
619
- if __name__ == "__main__":
620
- data = get_climate_data(
621
- country="Zambia",
622
- cordex_domain="AFR-22",
623
- rcp="rcp26",
624
- gcm="MPI",
625
- rcm="REMO",
626
- years_up_to=2030,
627
- obs=False,
628
- bias_correction=True,
629
- historical=False,
630
- )
631
- print(data)
1
+ import os
2
+ import multiprocessing as mp
3
+ from concurrent.futures import ThreadPoolExecutor
4
+ from functools import partial
5
+ import logging
6
+ import warnings
7
+
8
+ warnings.filterwarnings(
9
+ "ignore",
10
+ category=FutureWarning,
11
+ message=".*geopandas.dataset module is deprecated.*",
12
+ )
13
+ import geopandas as gpd # noqa: E402
14
+ import pandas as pd # noqa: E402
15
+ import xarray as xr # noqa: E402
16
+ import numpy as np # noqa: E402
17
+ from xclim import sdba # noqa: E402
18
+
19
+
20
+ logger = logging.getLogger("climate")
21
+ logger.handlers = [] # Remove any existing handlers
22
+ handler = logging.StreamHandler()
23
+ formatter = logging.Formatter(
24
+ "%(asctime)s | %(name)s | %(process)d:%(thread)d [%(levelname)s]: %(message)s"
25
+ )
26
+ handler.setFormatter(formatter)
27
+ for hdlr in logger.handlers[:]: # remove all old handlers
28
+ logger.removeHandler(hdlr)
29
+ logger.addHandler(handler)
30
+ logger.setLevel(logging.DEBUG)
31
+
32
+ VARIABLES_MAP = {
33
+ "pr": "tp",
34
+ "tasmax": "t2mx",
35
+ "tasmin": "t2mn",
36
+ "hurs": "hurs",
37
+ "sfcWind": "sfcwind",
38
+ "rsds": "ssrd",
39
+ }
40
+ VALID_VARIABLES = list(VARIABLES_MAP)
41
+ # TODO: Throw an error if the selected country is not in the selected domain
42
+ VALID_DOMAINS = [
43
+ "NAM-22",
44
+ "EUR-22",
45
+ "AFR-22",
46
+ "EAS-22",
47
+ "SEA-22",
48
+ "WAS-22",
49
+ "AUS-22",
50
+ "SAM-22",
51
+ "CAM-22",
52
+ ]
53
+ VALID_RCPS = ["rcp26", "rcp85"]
54
+ VALID_GCM = ["MOHC", "MPI", "NCC"]
55
+ VALID_RCM = ["REMO", "Reg"]
56
+
57
+ INVENTORY_DATA_REMOTE_URL = (
58
+ "https://hub.ipcc.ifca.es/thredds/fileServer/inventories/cava.csv"
59
+ )
60
+ INVENTORY_DATA_LOCAL_PATH = os.path.join(
61
+ os.path.expanduser("~"), "shared/inventories/cava/inventory.csv"
62
+ )
63
+ ERA5_DATA_REMOTE_URL = (
64
+ "https://hub.ipcc.ifca.es/thredds/dodsC/fao/observations/ERA5/0.25/ERA5_025.ncml"
65
+ )
66
+ ERA5_DATA_LOCAL_PATH = os.path.join(
67
+ os.path.expanduser("~"), "shared/data/observations/ERA5/0.25/ERA5_025.ncml"
68
+ )
69
+ DEFAULT_YEARS_OBS = range(1980, 2006)
70
+
71
+
72
+ def get_climate_data(
73
+ *,
74
+ country: str | None,
75
+ cordex_domain: str,
76
+ rcp: str,
77
+ gcm: str,
78
+ rcm: str,
79
+ years_up_to: int,
80
+ years_obs: range | None = None,
81
+ bias_correction: bool = False,
82
+ historical: bool = False,
83
+ obs: bool = False,
84
+ buffer: int = 0,
85
+ xlim: tuple[float, float] | None = None,
86
+ ylim: tuple[float, float] | None = None,
87
+ remote: bool = True,
88
+ variables: list[str] | None = None,
89
+ num_processes: int = len(VALID_VARIABLES),
90
+ max_threads_per_process: int = 8,
91
+ ) -> dict[str, xr.DataArray]:
92
+ f"""
93
+ Process climate data required by pyAEZ climate module.
94
+ The function automatically access CORDEX-CORE models at 0.25° and the ERA5 datasets.
95
+
96
+ Args:
97
+ country (str): Name of the country for which data is to be processed.
98
+ Use None if specifying a region using xlim and ylim.
99
+ cordex_domain (str): CORDEX domain of the climate data. One of {VALID_DOMAINS}.
100
+ rcp (str): Representative Concentration Pathway. One of {VALID_RCPS}.
101
+ gcm (str): GCM name. One of {VALID_GCM}.
102
+ rcm (str): RCM name. One of {VALID_RCM}.
103
+ years_up_to (int): The ending year for the projected data. Projections start in 2006 and ends in 2100.
104
+ Hence, if years_up_to is set to 2030, data will be downloaded for the 2006-2030 period.
105
+ years_obs (range): Range of years for observational data (ERA5 only). Only used when obs is True. (default: None).
106
+ bias_correction (bool): Whether to apply bias correction (default: False).
107
+ historical (bool): Flag to indicate if processing historical data (default: False).
108
+ If True, historical data is provided together with projections.
109
+ Historical simulation runs for CORDEX-CORE initiative are provided for the 1980-2005 time period.
110
+ obs (bool): Flag to indicate if processing observational data (default: False).
111
+ buffer (int): Buffer distance to expand the region of interest (default: 0).
112
+ xlim (tuple or None): Longitudinal bounds of the region of interest. Use only when country is None (default: None).
113
+ ylim (tuple or None): Latitudinal bounds of the region of interest. Use only when country is None (default: None).
114
+ remote (bool): Flag to work with remote data or not (default: True).
115
+ variables (list[str] or None): List of variables to process. Must be a subset of {VALID_VARIABLES}. If None, all variables are processed. (default: None).
116
+ num_processes (int): Number of processes to use, one per variable.
117
+ By default equals to the number of all possible variables. (default: {len(VALID_VARIABLES)}).
118
+ max_threads_per_process (int): Max number of threads within each process. (default: 8).
119
+
120
+ Returns:
121
+ dict: A dictionary containing processed climate data for each variable as an xarray object.
122
+ """
123
+
124
+ if xlim is None and ylim is not None or xlim is not None and ylim is None:
125
+ raise ValueError(
126
+ "xlim and ylim mismatch: they must be both specified or both unspecified"
127
+ )
128
+ if country is None and xlim is None:
129
+ raise ValueError("You must specify a country or (xlim, ylim)")
130
+ if country is not None and xlim is not None:
131
+ raise ValueError("You must specify either country or (xlim, ylim), not both")
132
+ verify_variables = {
133
+ "cordex_domain": VALID_DOMAINS,
134
+ "rcp": VALID_RCPS,
135
+ "gcm": VALID_GCM,
136
+ "rcm": VALID_RCM,
137
+ }
138
+ for var_name, valid_values in verify_variables.items():
139
+ var_value = locals()[var_name]
140
+ if var_value not in valid_values:
141
+ raise ValueError(
142
+ f"Invalid {var_name}={var_value}. Must be one of {valid_values}"
143
+ )
144
+ if years_up_to <= 2006:
145
+ raise ValueError("years_up_to must be greater than 2006")
146
+ if years_obs is not None and not (1980 <= min(years_obs) <= max(years_obs) <= 2020):
147
+ raise ValueError("Years in years_obs must be within the range 1980 to 2020")
148
+ if obs and years_obs is None:
149
+ raise ValueError("years_obs must be provided when obs is True")
150
+ if not obs or years_obs is None:
151
+ # Make sure years_obs is set to default when obs=False
152
+ years_obs = DEFAULT_YEARS_OBS
153
+
154
+ # Validate variables if provided
155
+ if variables is not None:
156
+ invalid_vars = [var for var in variables if var not in VALID_VARIABLES]
157
+ if invalid_vars:
158
+ raise ValueError(
159
+ f"Invalid variables: {invalid_vars}. Must be a subset of {VALID_VARIABLES}"
160
+ )
161
+ else:
162
+ variables = VALID_VARIABLES
163
+
164
+ _validate_urls(gcm, rcm, rcp, remote, cordex_domain, obs)
165
+
166
+ bbox = _geo_localize(country, xlim, ylim, buffer, cordex_domain)
167
+
168
+ with mp.Pool(processes=min(num_processes, len(variables))) as pool:
169
+ futures = []
170
+ for variable in variables:
171
+ futures.append(
172
+ pool.apply_async(
173
+ process_worker,
174
+ args=(max_threads_per_process,),
175
+ kwds={
176
+ "variable": variable,
177
+ "bbox": bbox,
178
+ "cordex_domain": cordex_domain,
179
+ "rcp": rcp,
180
+ "gcm": gcm,
181
+ "rcm": rcm,
182
+ "years_up_to": years_up_to,
183
+ "years_obs": years_obs,
184
+ "obs": obs,
185
+ "bias_correction": bias_correction,
186
+ "historical": historical,
187
+ "remote": remote,
188
+ },
189
+ )
190
+ )
191
+
192
+ results = {
193
+ variable: futures[i].get() for i, variable in enumerate(variables)
194
+ }
195
+
196
+ pool.close() # Prevent any more tasks from being submitted to the pool
197
+ pool.join() # Wait for all worker processes to finish
198
+
199
+ return results
200
+
201
+
202
+ def _validate_urls(
203
+ gcm: str = None,
204
+ rcm: str = None,
205
+ rcp: str = None,
206
+ remote: bool = True,
207
+ cordex_domain: str = None,
208
+ obs: bool = False,
209
+ ):
210
+ # Load the data
211
+ log = logger.getChild("URL-validation")
212
+
213
+ if obs is False:
214
+ inventory_csv_url = (
215
+ INVENTORY_DATA_REMOTE_URL if remote else INVENTORY_DATA_LOCAL_PATH
216
+ )
217
+ data = pd.read_csv(inventory_csv_url)
218
+
219
+ # Set the column to use based on whether the data is remote or local
220
+ column_to_use = "location" if remote else "hub"
221
+
222
+ # Filter the data based on the conditions
223
+ filtered_data = data[
224
+ lambda x: (
225
+ x["activity"].str.contains("FAO", na=False)
226
+ & (x["domain"] == cordex_domain)
227
+ & (x["model"].str.contains(gcm, na=False))
228
+ & (x["rcm"].str.contains(rcm, na=False))
229
+ & (x["experiment"].isin([rcp, "historical"]))
230
+ )
231
+ ][["experiment", column_to_use]]
232
+
233
+ # Extract the column values as a list
234
+ num_rows = filtered_data.shape[0]
235
+ column_values = filtered_data[column_to_use]
236
+
237
+ if num_rows == 1:
238
+ # Log the output for one row
239
+ row1 = column_values.iloc[0]
240
+ log_proj = logger.getChild("URL-validation-projections")
241
+ log_proj.info(f"{row1}")
242
+ else:
243
+ # Log the output for two rows
244
+ row1 = column_values.iloc[0]
245
+ row2 = column_values.iloc[1]
246
+ log_hist = logger.getChild("URL-validation-historical")
247
+ log_proj = logger.getChild("URL-validation-projections")
248
+ log_hist.info(f"{row1}")
249
+ log_proj.info(f"{row2}")
250
+ else: # when obs is True
251
+ log_obs = logger.getChild("URL-validation-observations")
252
+ log_obs.info(f"{ERA5_DATA_REMOTE_URL}")
253
+
254
+
255
+ def _geo_localize(
256
+ country: str = None,
257
+ xlim: tuple[float, float] = None,
258
+ ylim: tuple[float, float] = None,
259
+ buffer: int = 0,
260
+ cordex_domain: str = None,
261
+ ) -> dict[str, tuple[float, float]]:
262
+ if country:
263
+ if xlim or ylim:
264
+ raise ValueError(
265
+ "Specify either a country or bounding box limits (xlim, ylim), but not both."
266
+ )
267
+ # Load country shapefile and extract bounds
268
+ world = gpd.read_file(gpd.datasets.get_path("naturalearth_lowres"))
269
+ country_shp = world[world.name == country]
270
+ if country_shp.empty:
271
+ raise ValueError(f"Country '{country}' is unknown.")
272
+ bounds = country_shp.total_bounds # [minx, miny, maxx, maxy]
273
+ xlim, ylim = (bounds[0], bounds[2]), (bounds[1], bounds[3])
274
+ elif not (xlim and ylim):
275
+ raise ValueError(
276
+ "Either a country or bounding box limits (xlim, ylim) must be specified."
277
+ )
278
+
279
+ # Apply buffer
280
+ xlim = (xlim[0] - buffer, xlim[1] + buffer)
281
+ ylim = (ylim[0] - buffer, ylim[1] + buffer)
282
+
283
+ # Always validate CORDEX domain
284
+ if cordex_domain:
285
+ _validate_cordex_domain(xlim, ylim, cordex_domain)
286
+
287
+ return {"xlim": xlim, "ylim": ylim}
288
+
289
+
290
+ def _validate_cordex_domain(xlim, ylim, cordex_domain):
291
+
292
+ # CORDEX domains data
293
+ cordex_domains_df = pd.DataFrame(
294
+ {
295
+ "min_lon": [
296
+ -33,
297
+ -28.3,
298
+ 89.25,
299
+ 86.75,
300
+ 19.25,
301
+ 44.0,
302
+ -106.25,
303
+ -115.0,
304
+ -24.25,
305
+ 10.75,
306
+ ],
307
+ "min_lat": [
308
+ -28,
309
+ -23,
310
+ -15.25,
311
+ -54.25,
312
+ -15.75,
313
+ -4.0,
314
+ -58.25,
315
+ -14.5,
316
+ -46.25,
317
+ 17.75,
318
+ ],
319
+ "max_lon": [
320
+ 20,
321
+ 18,
322
+ 147.0,
323
+ -152.75,
324
+ 116.25,
325
+ -172.0,
326
+ -16.25,
327
+ -30.5,
328
+ 59.75,
329
+ 140.25,
330
+ ],
331
+ "max_lat": [28, 21.7, 26.5, 13.75, 45.75, 65.0, 18.75, 28.5, 42.75, 69.75],
332
+ "cordex_domain": [
333
+ "NAM-22",
334
+ "EUR-22",
335
+ "SEA-22",
336
+ "AUS-22",
337
+ "WAS-22",
338
+ "EAS-22",
339
+ "SAM-22",
340
+ "CAM-22",
341
+ "AFR-22",
342
+ "CAS-22",
343
+ ],
344
+ }
345
+ )
346
+
347
+ def is_bbox_contained(bbox, domain):
348
+ """Check if bbox is contained within the domain bounding box."""
349
+ return (
350
+ bbox[0] >= domain["min_lon"]
351
+ and bbox[1] >= domain["min_lat"]
352
+ and bbox[2] <= domain["max_lon"]
353
+ and bbox[3] <= domain["max_lat"]
354
+ )
355
+
356
+ user_bbox = [xlim[0], ylim[0], xlim[1], ylim[1]]
357
+ domain_row = cordex_domains_df[cordex_domains_df["cordex_domain"] == cordex_domain]
358
+
359
+ if domain_row.empty:
360
+ raise ValueError(f"CORDEX domain '{cordex_domain}' is not recognized.")
361
+
362
+ domain_bbox = domain_row.iloc[0]
363
+
364
+ if not is_bbox_contained(user_bbox, domain_bbox):
365
+ suggested_domains = cordex_domains_df[
366
+ cordex_domains_df.apply(
367
+ lambda row: is_bbox_contained(user_bbox, row), axis=1
368
+ )
369
+ ]
370
+
371
+ if suggested_domains.empty:
372
+ raise ValueError(
373
+ f"The bounding box {user_bbox} is outside of all available CORDEX domains."
374
+ )
375
+
376
+ suggested_domain = suggested_domains.iloc[0]["cordex_domain"]
377
+
378
+ raise ValueError(
379
+ f"Bounding box {user_bbox} is not within '{cordex_domain}'. Suggested domain: '{suggested_domain}'."
380
+ )
381
+
382
+
383
+ def process_worker(num_threads, **kwargs) -> xr.DataArray:
384
+ variable = kwargs["variable"]
385
+ log = logger.getChild(variable)
386
+ try:
387
+ with ThreadPoolExecutor(
388
+ max_workers=num_threads, thread_name_prefix="climate"
389
+ ) as executor:
390
+ return _climate_data_for_variable(executor, **kwargs)
391
+ except Exception as e:
392
+ log.exception(f"Process worker failed: {e}")
393
+ raise
394
+
395
+
396
+ def _climate_data_for_variable(
397
+ executor: ThreadPoolExecutor,
398
+ *,
399
+ variable: str,
400
+ bbox: dict[str, tuple[float, float]],
401
+ cordex_domain: str,
402
+ rcp: str,
403
+ gcm: str,
404
+ rcm: str,
405
+ years_up_to: int,
406
+ years_obs: range,
407
+ obs: bool,
408
+ bias_correction: bool,
409
+ historical: bool,
410
+ remote: bool,
411
+ ) -> xr.DataArray:
412
+ log = logger.getChild(variable)
413
+
414
+ pd.options.mode.chained_assignment = None
415
+ inventory_csv_url = (
416
+ INVENTORY_DATA_REMOTE_URL if remote else INVENTORY_DATA_LOCAL_PATH
417
+ )
418
+ data = pd.read_csv(inventory_csv_url)
419
+ column_to_use = "location" if remote else "hub"
420
+ filtered_data = data[
421
+ lambda x: (x["activity"].str.contains("FAO", na=False))
422
+ & (x["domain"] == cordex_domain)
423
+ & (x["model"].str.contains(gcm, na=False))
424
+ & (x["rcm"].str.contains(rcm, na=False))
425
+ & (x["experiment"].isin([rcp, "historical"]))
426
+ ][["experiment", column_to_use]]
427
+
428
+ future_obs = None
429
+ if obs or bias_correction:
430
+ future_obs = executor.submit(
431
+ _thread_download_data,
432
+ url=None,
433
+ bbox=bbox,
434
+ variable=variable,
435
+ obs=True,
436
+ years_up_to=years_up_to,
437
+ years_obs=years_obs,
438
+ remote=remote,
439
+ )
440
+
441
+ if not obs:
442
+ download_fn = partial(
443
+ _thread_download_data,
444
+ bbox=bbox,
445
+ variable=variable,
446
+ obs=False,
447
+ years_obs=years_obs,
448
+ years_up_to=years_up_to,
449
+ remote=remote,
450
+ )
451
+ downloaded_models = list(
452
+ executor.map(download_fn, filtered_data[column_to_use])
453
+ )
454
+
455
+ # Add the downloaded models to the DataFrame
456
+ filtered_data["models"] = downloaded_models
457
+ hist = (
458
+ filtered_data["models"].iloc[0].interpolate_na(dim="time", method="linear")
459
+ )
460
+ proj = (
461
+ filtered_data["models"].iloc[1].interpolate_na(dim="time", method="linear")
462
+ )
463
+ if bias_correction and historical:
464
+ # Load observations for bias correction
465
+ ref = future_obs.result()
466
+ log.info("Training eqm with historical data")
467
+ QM_mo = sdba.EmpiricalQuantileMapping.train(
468
+ ref,
469
+ hist,
470
+ group="time.month",
471
+ kind="*" if variable in ["pr", "rsds", "sfcWind"] else "+",
472
+ )
473
+ log.info("Performing bias correction with eqm")
474
+ hist_bs = QM_mo.adjust(hist, extrapolation="constant", interp="linear")
475
+ proj_bs = QM_mo.adjust(proj, extrapolation="constant", interp="linear")
476
+ log.info("Done!")
477
+ if variable == "hurs":
478
+ hist_bs = hist_bs.where(hist_bs <= 100, 100)
479
+ hist_bs = hist_bs.where(hist_bs >= 0, 0)
480
+ combined = xr.concat([hist_bs, proj_bs], dim="time")
481
+ return combined
482
+
483
+ elif not bias_correction and historical:
484
+ combined = xr.concat([hist, proj], dim="time")
485
+ return combined
486
+
487
+ elif bias_correction and not historical:
488
+ ref = future_obs.result()
489
+ log.info("Training eqm with historical data")
490
+ QM_mo = sdba.EmpiricalQuantileMapping.train(
491
+ ref,
492
+ hist,
493
+ group="time.month",
494
+ kind="*" if variable in ["pr", "rsds", "sfcWind"] else "+",
495
+ ) # multiplicative approach for pr, rsds and wind speed
496
+ log.info("Performing bias correction with eqm")
497
+ proj_bs = QM_mo.adjust(proj, extrapolation="constant", interp="linear")
498
+ log.info("Done!")
499
+ if variable == "hurs":
500
+ proj_bs = proj_bs.where(proj_bs <= 100, 100)
501
+ proj_bs = proj_bs.where(proj_bs >= 0, 0)
502
+ return proj_bs
503
+
504
+ return proj
505
+
506
+ else: # when observations are True
507
+ downloaded_obs = future_obs.result()
508
+ log.info("Done!")
509
+ return downloaded_obs
510
+
511
+
512
+ def _thread_download_data(url: str | None, **kwargs):
513
+ variable = kwargs["variable"]
514
+ temporal = "observations" if kwargs["obs"] else ("historical" if "historical" in str(url) else "projections")
515
+ log = logger.getChild(f"{variable}-{temporal}")
516
+ try:
517
+ return _download_data(url=url, **kwargs)
518
+ except Exception as e:
519
+ log.exception(f"Failed to process data from {url}: {e}")
520
+ raise
521
+
522
+
523
+ def _download_data(
524
+ url: str | None,
525
+ bbox: dict[str, tuple[float, float]],
526
+ variable: str,
527
+ obs: bool,
528
+ years_obs: range,
529
+ years_up_to: int,
530
+ remote: bool,
531
+ ) -> xr.DataArray:
532
+ temporal = "observations" if obs else ("historical" if url and "historical" in url else "projections")
533
+ log = logger.getChild(f"{variable}-{temporal}")
534
+
535
+ if obs:
536
+ var = VARIABLES_MAP[variable]
537
+ log.info(f"Establishing connection to ERA5 data for {variable}({var})")
538
+ if remote:
539
+ ds_var = xr.open_dataset(ERA5_DATA_REMOTE_URL)[var]
540
+ else:
541
+ ds_var = xr.open_dataset(ERA5_DATA_LOCAL_PATH)[var]
542
+ log.info(f"Connection to ERA5 data for {variable}({var}) has been established")
543
+
544
+ # Coordinate normalization and renaming for 'hurs'
545
+ if var == "hurs":
546
+ ds_var = ds_var.rename({"lat": "latitude", "lon": "longitude"})
547
+ ds_cropped = ds_var.sel(
548
+ longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
549
+ latitude=slice(bbox["ylim"][0], bbox["ylim"][1]),
550
+ )
551
+ else:
552
+ ds_var.coords["longitude"] = (ds_var.coords["longitude"] + 180) % 360 - 180
553
+ ds_var = ds_var.sortby(ds_var.longitude)
554
+ ds_cropped = ds_var.sel(
555
+ longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
556
+ latitude=slice(bbox["ylim"][1], bbox["ylim"][0]),
557
+ )
558
+
559
+ # Unit conversion
560
+ if var in ["t2mx", "t2mn", "t2m"]:
561
+ ds_cropped -= 273.15 # Convert from Kelvin to Celsius
562
+ ds_cropped.attrs["units"] = "°C"
563
+ elif var == "tp":
564
+ ds_cropped *= 1000 # Convert precipitation
565
+ ds_cropped.attrs["units"] = "mm"
566
+ elif var == "ssrd":
567
+ ds_cropped /= 86400 # Convert from J/m^2 to W/m^2
568
+ ds_cropped.attrs["units"] = "W m-2"
569
+ elif var == "sfcwind":
570
+ ds_cropped = ds_cropped * (
571
+ 4.87 / np.log((67.8 * 10) - 5.42)
572
+ ) # Convert wind speed from 10 m to 2 m
573
+ ds_cropped.attrs["units"] = "m s-1"
574
+
575
+ # Select years
576
+ years = [x for x in years_obs]
577
+ time_mask = (ds_cropped["time"].dt.year >= years[0]) & (
578
+ ds_cropped["time"].dt.year <= years[-1]
579
+ )
580
+
581
+ else:
582
+ log.info(f"Establishing connection to CORDEX data for {variable}")
583
+ ds_var = xr.open_dataset(url)[variable]
584
+ log.info(f"Connection to CORDEX data for {variable} has been established")
585
+ ds_cropped = ds_var.sel(
586
+ longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
587
+ latitude=slice(bbox["ylim"][1], bbox["ylim"][0]),
588
+ )
589
+
590
+ # Unit conversion
591
+ if variable in ["tas", "tasmax", "tasmin"]:
592
+ ds_cropped -= 273.15 # Convert from Kelvin to Celsius
593
+ ds_cropped.attrs["units"] = "°C"
594
+ elif variable == "pr":
595
+ ds_cropped *= 86400 # Convert from kg m^-2 s^-1 to mm/day
596
+ ds_cropped.attrs["units"] = "mm"
597
+ elif variable == "rsds":
598
+ ds_cropped.attrs["units"] = "W m-2"
599
+ elif variable == "sfcWind":
600
+ ds_cropped = ds_cropped * (
601
+ 4.87 / np.log((67.8 * 10) - 5.42)
602
+ ) # Convert wind speed from 10 m to 2 m
603
+ ds_cropped.attrs["units"] = "m s-1"
604
+
605
+ # Select years based on rcp
606
+ if "rcp" in url:
607
+ years = [x for x in range(2006, years_up_to + 1)]
608
+ else:
609
+ years = [x for x in DEFAULT_YEARS_OBS]
610
+
611
+ # Add missing dates
612
+ ds_cropped = ds_cropped.convert_calendar(
613
+ calendar="gregorian", missing=np.nan, align_on="date"
614
+ )
615
+
616
+ time_mask = (ds_cropped["time"].dt.year >= years[0]) & (
617
+ ds_cropped["time"].dt.year <= years[-1]
618
+ )
619
+
620
+ # subset years
621
+ ds_cropped = ds_cropped.sel(time=time_mask)
622
+
623
+ assert isinstance(ds_cropped, xr.DataArray)
624
+
625
+ if obs:
626
+ log.info(
627
+ f"ERA5 data for {variable} has been processed: unit conversion ({ds_cropped.attrs.get('units', 'unknown units')}), time selection ({years[0]}-{years[-1]})"
628
+ )
629
+ else:
630
+ log.info(
631
+ f"CORDEX data for {variable} has been processed: unit conversion ({ds_cropped.attrs.get('units', 'unknown units')}), calendar transformation (360-day to Gregorian), time selection ({years[0]}-{years[-1]})"
632
+ )
633
+
634
+ return ds_cropped
635
+
636
+
637
+ if __name__ == "__main__":
638
+ data = get_climate_data(
639
+ country="Zambia",
640
+ cordex_domain="AFR-22",
641
+ rcp="rcp26",
642
+ gcm="MPI",
643
+ rcm="REMO",
644
+ years_up_to=2030,
645
+ obs=False,
646
+ bias_correction=True,
647
+ historical=False,
648
+ )
649
+ print(data)