cavapy 1.1.0__py3-none-any.whl → 1.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cavapy might be problematic. Click here for more details.

cavapy.py CHANGED
@@ -1,1177 +1,523 @@
1
- import os
2
- import multiprocessing as mp
3
- from concurrent.futures import ThreadPoolExecutor
4
- from functools import partial
5
- import logging
6
- import warnings
7
-
8
- import pandas as pd # noqa: E402
9
- import xarray as xr # noqa: E402
10
- import numpy as np # noqa: E402
11
- import xsdba as sdba # noqa: E402
12
- import matplotlib.pyplot as plt # noqa: E402
13
- import matplotlib.dates as mdates # noqa: E402
14
- import seaborn as sns # noqa: E402
15
- from datetime import datetime # noqa: E402
16
- from typing import Union, List, Tuple, Optional # noqa: E402
17
-
18
- import cartopy.crs as ccrs # noqa: E402
19
- import cartopy.feature as cfeature # noqa: E402
20
- import cartopy.io.shapereader as shpreader # noqa: E402
21
-
22
- # Suppress cartopy download warnings for Natural Earth data
23
- try:
24
- from cartopy.io import DownloadWarning
25
- warnings.filterwarnings('ignore', category=DownloadWarning)
26
- except ImportError:
27
- # Fallback to suppressing all UserWarnings from cartopy.io
28
- warnings.filterwarnings('ignore', category=UserWarning, module='cartopy.io')
29
-
30
- logger = logging.getLogger("climate")
31
- logger.handlers = [] # Remove any existing handlers
32
- handler = logging.StreamHandler()
33
- formatter = logging.Formatter(
34
- "%(asctime)s | %(name)s | %(process)d:%(thread)d [%(levelname)s]: %(message)s"
35
- )
36
- handler.setFormatter(formatter)
37
- for hdlr in logger.handlers[:]: # remove all old handlers
38
- logger.removeHandler(hdlr)
39
- logger.addHandler(handler)
40
- logger.setLevel(logging.DEBUG)
41
-
42
- VARIABLES_MAP = {
43
- "pr": "tp",
44
- "tasmax": "t2mx",
45
- "tasmin": "t2mn",
46
- "hurs": "hurs",
47
- "sfcWind": "sfcwind",
48
- "rsds": "ssrd",
49
- }
50
- VALID_VARIABLES = list(VARIABLES_MAP)
51
- # TODO: Throw an error if the selected country is not in the selected domain
52
- VALID_DOMAINS = [
53
- "NAM-22",
54
- "EUR-22",
55
- "AFR-22",
56
- "EAS-22",
57
- "SEA-22",
58
- "WAS-22",
59
- "AUS-22",
60
- "SAM-22",
61
- "CAM-22",
62
- ]
63
- VALID_RCPS = ["rcp26", "rcp85"]
64
- VALID_GCM = ["MOHC", "MPI", "NCC"]
65
- VALID_RCM = ["REMO", "Reg"]
66
- VALID_DATASETS = ["CORDEX-CORE", "CORDEX-CORE-BC"]
67
-
68
- INVENTORY_DATA_REMOTE_URL = (
69
- "https://hub.ipcc.ifca.es/thredds/fileServer/inventories/cava.csv"
70
- )
71
- INVENTORY_DATA_LOCAL_PATH = os.path.join(
72
- os.path.expanduser("~"), "shared/inventories/cava/inventory.csv"
73
- )
74
- ERA5_DATA_REMOTE_URL = (
75
- "https://hub.ipcc.ifca.es/thredds/dodsC/fao/observations/ERA5/0.25/ERA5_025.ncml"
76
- )
77
- ERA5_DATA_LOCAL_PATH = os.path.join(
78
- os.path.expanduser("~"), "shared/data/observations/ERA5/0.25/ERA5_025.ncml"
79
- )
80
- DEFAULT_YEARS_OBS = range(1980, 2006)
81
-
82
-
83
- def _ensure_inventory_not_empty(
84
- filtered_data: pd.DataFrame,
85
- *,
86
- dataset: str,
87
- cordex_domain: str,
88
- gcm: str,
89
- rcm: str,
90
- experiments: list[str],
91
- activity_filter: str,
92
- log: logging.Logger | None = None,
93
- ) -> None:
94
- """
95
- Ensure that the inventory filter returned at least one URL.
96
- If not, raise a clear, informative error instead of failing later with iloc[0].
97
- """
98
- if not filtered_data.empty:
99
- return
100
-
101
- msg = (
102
- "No CORDEX entries found in the inventory for the requested configuration.\n"
103
- f" dataset : {dataset}\n"
104
- f" domain : {cordex_domain}\n"
105
- f" gcm : {gcm}\n"
106
- f" rcm : {rcm}\n"
107
- f" experiments : {experiments}\n"
108
- f" activity_filter: {activity_filter}\n\n"
109
- "This usually means that this GCM/RCM/experiment combination does not exist "
110
- "or that ther is an issue with the inventory data.\n"
111
- "Please check the inventory CSV at https://hub.ipcc.ifca.es/thredds/fileServer/inventories/cava.csv"
112
- )
113
-
114
- if log is not None:
115
- log.error(msg)
116
-
117
- raise ValueError(msg)
118
-
119
-
120
- def get_climate_data(
121
- *,
122
- country: str | None,
123
- years_obs: range | None = None,
124
- obs: bool = False,
125
- cordex_domain: str | None = None,
126
- rcp: str | None = None,
127
- gcm: str | None = None,
128
- rcm: str | None = None,
129
- years_up_to: int | None = None,
130
- bias_correction: bool = False,
131
- historical: bool = False,
132
- buffer: int = 0,
133
- xlim: tuple[float, float] | None = None,
134
- ylim: tuple[float, float] | None = None,
135
- remote: bool = True,
136
- variables: list[str] | None = None,
137
- num_processes: int = len(VALID_VARIABLES),
138
- max_threads_per_process: int = 8,
139
- dataset: str = "CORDEX-CORE",
140
- ) -> dict[str, xr.DataArray]:
141
- f"""
142
- Process climate data required by pyAEZ climate module.
143
- The function automatically access CORDEX-CORE models at 0.25° and the ERA5 datasets.
144
-
145
- Args:
146
- country (str): Name of the country for which data is to be processed.
147
- Use None if specifying a region using xlim and ylim.
148
- years_obs (range): Range of years for observational data (ERA5 only). Required when obs is True. (default: None).
149
- obs (bool): Flag to indicate if processing observational data (default: False).
150
- When True, only years_obs is required. CORDEX parameters are optional.
151
- cordex_domain (str): CORDEX domain of the climate data. One of {VALID_DOMAINS}.
152
- Required when obs is False. (default: None).
153
- rcp (str): Representative Concentration Pathway. One of {VALID_RCPS}.
154
- Required when obs is False. (default: None).
155
- gcm (str): GCM name. One of {VALID_GCM}.
156
- Required when obs is False. (default: None).
157
- rcm (str): RCM name. One of {VALID_RCM}.
158
- Required when obs is False. (default: None).
159
- years_up_to (int): The ending year for the projected data. Projections start in 2006 and ends in 2100.
160
- Hence, if years_up_to is set to 2030, data will be downloaded for the 2006-2030 period.
161
- Required when obs is False. (default: None).
162
- bias_correction (bool): Whether to apply bias correction (default: False).
163
- historical (bool): Flag to indicate if processing historical data (default: False).
164
- If True, historical data is provided together with projections.
165
- Historical simulation runs for CORDEX-CORE initiative are provided for the 1980-2005 time period.
166
- buffer (int): Buffer distance to expand the region of interest (default: 0).
167
- xlim (tuple or None): Longitudinal bounds of the region of interest. Use only when country is None (default: None).
168
- ylim (tuple or None): Latitudinal bounds of the region of interest. Use only when country is None (default: None).
169
- remote (bool): Flag to work with remote data or not (default: True).
170
- variables (list[str] or None): List of variables to process. Must be a subset of {VALID_VARIABLES}. If None, all variables are processed. (default: None).
171
- num_processes (int): Number of processes to use, one per variable.
172
- By default equals to the number of all possible variables. (default: {len(VALID_VARIABLES)}).
173
- max_threads_per_process (int): Max number of threads within each process. (default: 8).
174
- dataset (str): Dataset source to use. Options are "CORDEX-CORE" (original data) or "CORDEX-CORE-BC" (ISIMIP bias-corrected data). (default: "CORDEX-CORE").
175
-
176
- Returns:
177
- dict: A dictionary containing processed climate data for each variable as an xarray object.
178
- """
179
-
180
- # Validation for basic parameters
181
- if xlim is None and ylim is not None or xlim is not None and ylim is None:
182
- raise ValueError(
183
- "xlim and ylim mismatch: they must be both specified or both unspecified"
184
- )
185
- if country is None and xlim is None:
186
- raise ValueError("You must specify a country or (xlim, ylim)")
187
- if country is not None and xlim is not None:
188
- raise ValueError("You must specify either country or (xlim, ylim), not both")
189
-
190
- # Conditional validation based on obs flag
191
- if obs:
192
- # When obs=True, only years_obs is required
193
- if years_obs is None:
194
- raise ValueError("years_obs must be provided when obs is True")
195
- if not (1980 <= min(years_obs) <= max(years_obs) <= 2020):
196
- raise ValueError("Years in years_obs must be within the range 1980 to 2020")
197
-
198
- # Set default values for CORDEX parameters (not used but needed for function calls)
199
- cordex_domain = cordex_domain or "AFR-22" # dummy value
200
- rcp = rcp or "rcp26" # dummy value
201
- gcm = gcm or "MPI" # dummy value
202
- rcm = rcm or "Reg" # dummy value
203
- years_up_to = years_up_to or 2030 # dummy value
204
- else:
205
- # When obs=False, CORDEX parameters are required
206
- required_params = {
207
- "cordex_domain": VALID_DOMAINS,
208
- "rcp": VALID_RCPS,
209
- "gcm": VALID_GCM,
210
- "rcm": VALID_RCM,
211
- }
212
- for param_name, valid_values in required_params.items():
213
- param_value = locals()[param_name]
214
- if param_value is None:
215
- raise ValueError(f"{param_name} is required when obs is False")
216
- if param_value not in valid_values:
217
- raise ValueError(
218
- f"Invalid {param_name}={param_value}. Must be one of {valid_values}"
219
- )
220
-
221
- if years_up_to is None:
222
- raise ValueError("years_up_to is required when obs is False")
223
- if years_up_to <= 2006:
224
- raise ValueError("years_up_to must be greater than 2006")
225
-
226
- # Set default years_obs when not processing observations
227
- if years_obs is None:
228
- years_obs = DEFAULT_YEARS_OBS
229
-
230
- # Validate dataset parameter
231
- if dataset not in VALID_DATASETS:
232
- raise ValueError(
233
- f"Invalid dataset='{dataset}'. Must be one of {VALID_DATASETS}"
234
- )
235
-
236
- # Check for incompatible dataset and bias_correction combination
237
- if dataset == "CORDEX-CORE-BC" and bias_correction:
238
- raise ValueError(
239
- "Cannot apply bias_correction=True when using dataset='CORDEX-CORE-BC'. "
240
- "The CORDEX-CORE-BC dataset is already bias-corrected using ISIMIP methodology."
241
- )
242
-
243
- # Validate variables if provided
244
- if variables is not None:
245
- invalid_vars = [var for var in variables if var not in VALID_VARIABLES]
246
- if invalid_vars:
247
- raise ValueError(
248
- f"Invalid variables: {invalid_vars}. Must be a subset of {VALID_VARIABLES}"
249
- )
250
- else:
251
- variables = VALID_VARIABLES
252
-
253
- # Validate GCM-RCM combinations for specific domains (only for non-observational data)
254
- if not obs:
255
- _validate_gcm_rcm_combinations(cordex_domain, gcm, rcm)
256
-
257
- _validate_urls(gcm, rcm, rcp, remote, cordex_domain, obs, historical, bias_correction, dataset)
258
-
259
- bbox = _geo_localize(country, xlim, ylim, buffer, cordex_domain, obs)
260
-
261
- with mp.Pool(processes=min(num_processes, len(variables))) as pool:
262
- futures = []
263
- for variable in variables:
264
- futures.append(
265
- pool.apply_async(
266
- process_worker,
267
- args=(max_threads_per_process,),
268
- kwds={
269
- "variable": variable,
270
- "bbox": bbox,
271
- "cordex_domain": cordex_domain,
272
- "rcp": rcp,
273
- "gcm": gcm,
274
- "rcm": rcm,
275
- "years_up_to": years_up_to,
276
- "years_obs": years_obs,
277
- "obs": obs,
278
- "bias_correction": bias_correction,
279
- "historical": historical,
280
- "remote": remote,
281
- "dataset": dataset,
282
- },
283
- )
284
- )
285
-
286
- results = {
287
- variable: futures[i].get() for i, variable in enumerate(variables)
288
- }
289
-
290
- pool.close() # Prevent any more tasks from being submitted to the pool
291
- pool.join() # Wait for all worker processes to finish
292
-
293
- return results
294
-
295
-
296
- def _validate_urls(
297
- gcm: str = None,
298
- rcm: str = None,
299
- rcp: str = None,
300
- remote: bool = True,
301
- cordex_domain: str = None,
302
- obs: bool = False,
303
- historical: bool = False,
304
- bias_correction: bool = False,
305
- dataset: str = "CORDEX-CORE",
306
- ):
307
- # Load the data
308
- log = logger.getChild("URL-validation")
309
-
310
- if obs is False:
311
- inventory_csv_url = (
312
- INVENTORY_DATA_REMOTE_URL if remote else INVENTORY_DATA_LOCAL_PATH
313
- )
314
- data = pd.read_csv(inventory_csv_url)
315
-
316
- # Set the column to use based on whether the data is remote or local
317
- column_to_use = "location" if remote else "hub"
318
-
319
- # Define which experiments we need
320
- experiments = [rcp]
321
- if historical or bias_correction:
322
- experiments.append("historical")
323
-
324
- # Determine activity filter based on dataset
325
- activity_filter = "FAO" if dataset == "CORDEX-CORE" else "CRDX-ISIMIP-025"
326
-
327
- # Filter the data based on the conditions
328
- filtered_data = data[
329
- lambda x: (
330
- x["activity"].str.contains(activity_filter, na=False)
331
- & (x["domain"] == cordex_domain)
332
- & (x["model"].str.contains(gcm, na=False))
333
- & (x["rcm"].str.contains(rcm, na=False))
334
- & (x["experiment"].isin(experiments))
335
- )
336
- ][["experiment", column_to_use]]
337
-
338
- # Fail early if nothing is found
339
- _ensure_inventory_not_empty(
340
- filtered_data,
341
- dataset=dataset,
342
- cordex_domain=cordex_domain,
343
- gcm=gcm,
344
- rcm=rcm,
345
- experiments=experiments,
346
- activity_filter=activity_filter,
347
- log=log,
348
- )
349
-
350
- # Extract the column values as a list
351
- for _, row in filtered_data.iterrows():
352
- if row["experiment"] == "historical":
353
- log_hist = logger.getChild("URL-validation-historical")
354
- log_hist.info(f"{row[column_to_use]}")
355
- else:
356
- log_proj = logger.getChild("URL-validation-projections")
357
- log_proj.info(f"{row[column_to_use]}")
358
-
359
- else: # when obs is True
360
- log_obs = logger.getChild("URL-validation-observations")
361
- log_obs.info(f"{ERA5_DATA_REMOTE_URL}")
362
-
363
-
364
- def _get_country_bounds(country_name: str) -> tuple[float, float, float, float]:
365
- """
366
- Get country bounding box using cartopy's Natural Earth data.
367
-
368
- Args:
369
- country_name: Name of the country
370
-
371
- Returns:
372
- tuple: (minx, miny, maxx, maxy) bounding box
373
-
374
- Raises:
375
- ValueError: If country not found
376
- """
377
- # Use Natural Earth countries dataset via cartopy
378
- countries_feature = cfeature.NaturalEarthFeature(
379
- 'cultural', 'admin_0_countries', '50m'
380
- )
381
-
382
- # Get the actual shapefile path from the feature
383
- shapefile_path = countries_feature.with_scale('50m').geometries()
384
-
385
- # Search for the country using Natural Earth records
386
- for country_record in shpreader.Reader(shpreader.natural_earth(resolution='50m', category='cultural', name='admin_0_countries')).records():
387
- # Try multiple name fields for better matching
388
- country_names = [
389
- country_record.attributes.get('NAME', ''),
390
- country_record.attributes.get('NAME_LONG', ''),
391
- country_record.attributes.get('ADMIN', ''),
392
- country_record.attributes.get('NAME_EN', '')
393
- ]
394
-
395
- if any(name.lower() == country_name.lower() for name in country_names if name):
396
- return country_record.geometry.bounds
397
-
398
- # If not found, check for capitalization issue
399
- if country_name and country_name[0].islower():
400
- capitalized = country_name.capitalize()
401
- raise ValueError(f"Country '{country_name}' not found. Try capitalizing the first letter: '{capitalized}'")
402
- else:
403
- raise ValueError(f"Country '{country_name}' is unknown.")
404
-
405
-
406
- def _geo_localize(
407
- country: str = None,
408
- xlim: tuple[float, float] = None,
409
- ylim: tuple[float, float] = None,
410
- buffer: int = 0,
411
- cordex_domain: str = None,
412
- obs: bool = False,
413
- ) -> dict[str, tuple[float, float]]:
414
- if country:
415
- if xlim or ylim:
416
- raise ValueError(
417
- "Specify either a country or bounding box limits (xlim, ylim), but not both."
418
- )
419
-
420
- bounds = _get_country_bounds(country)
421
- xlim, ylim = (bounds[0], bounds[2]), (bounds[1], bounds[3])
422
- elif not (xlim and ylim):
423
- raise ValueError(
424
- "Either a country or bounding box limits (xlim, ylim) must be specified."
425
- )
426
-
427
- # Apply buffer
428
- xlim = (xlim[0] - buffer, xlim[1] + buffer)
429
- ylim = (ylim[0] - buffer, ylim[1] + buffer)
430
-
431
- # Only validate CORDEX domain when processing non-observational data
432
- # Skip validation for observations or when using dummy values
433
- if not obs and cordex_domain:
434
- _validate_cordex_domain(xlim, ylim, cordex_domain)
435
-
436
- return {"xlim": xlim, "ylim": ylim}
437
-
438
-
439
- def _validate_gcm_rcm_combinations(cordex_domain: str, gcm: str, rcm: str):
440
- """
441
- Validate that the GCM-RCM combination is available for the specified CORDEX domain.
442
-
443
- Args:
444
- cordex_domain: CORDEX domain name
445
- gcm: Global Climate Model name
446
- rcm: Regional Climate Model name
447
-
448
- Raises:
449
- ValueError: If the combination is not available for the domain
450
- """
451
- # Define invalid combinations per domain
452
- invalid_combinations = {
453
- "WAS-22": [
454
- ("MOHC", "Reg") # MOHC-Reg is not available for WAS-22
455
- ],
456
- "CAS-22": [
457
- ("MOHC", "Reg"), # Reg is not available for any GCM in CAS-22
458
- ("MPI", "Reg"),
459
- ("NCC", "Reg")
460
- ]
461
- }
462
-
463
- if cordex_domain in invalid_combinations:
464
- invalid_combos = invalid_combinations[cordex_domain]
465
- current_combo = (gcm, rcm)
466
-
467
- if current_combo in invalid_combos:
468
- # Get available combinations for this domain
469
- all_gcm = VALID_GCM
470
- all_rcm = VALID_RCM
471
- available_combos = []
472
-
473
- for g in all_gcm:
474
- for r in all_rcm:
475
- if (g, r) not in invalid_combos:
476
- available_combos.append(f"{g}-{r}")
477
-
478
- raise ValueError(
479
- f"The combination {gcm}-{rcm} is not available for domain {cordex_domain}. "
480
- f"Available combinations for {cordex_domain}: {', '.join(available_combos)}"
481
- )
482
-
483
-
484
- def _validate_cordex_domain(xlim, ylim, cordex_domain):
485
-
486
- # CORDEX domains data
487
- cordex_domains_df = pd.DataFrame(
488
- {
489
- "min_lon": [
490
- -33,
491
- -28.3,
492
- 89.25,
493
- 86.75,
494
- 19.25,
495
- 44.0,
496
- -106.25,
497
- -115.0,
498
- -24.25,
499
- 10.75,
500
- ],
501
- "min_lat": [
502
- -28,
503
- -23,
504
- -15.25,
505
- -54.25,
506
- -15.75,
507
- -4.0,
508
- -58.25,
509
- -14.5,
510
- -46.25,
511
- 17.75,
512
- ],
513
- "max_lon": [
514
- 20,
515
- 18,
516
- 147.0,
517
- -152.75,
518
- 116.25,
519
- -172.0,
520
- -16.25,
521
- -30.5,
522
- 59.75,
523
- 140.25,
524
- ],
525
- "max_lat": [28, 21.7, 26.5, 13.75, 45.75, 65.0, 18.75, 28.5, 42.75, 69.75],
526
- "cordex_domain": [
527
- "NAM-22",
528
- "EUR-22",
529
- "SEA-22",
530
- "AUS-22",
531
- "WAS-22",
532
- "EAS-22",
533
- "SAM-22",
534
- "CAM-22",
535
- "AFR-22",
536
- "CAS-22",
537
- ],
538
- }
539
- )
540
-
541
- def is_bbox_contained(bbox, domain):
542
- """Check if bbox is contained within the domain bounding box."""
543
- return (
544
- bbox[0] >= domain["min_lon"]
545
- and bbox[1] >= domain["min_lat"]
546
- and bbox[2] <= domain["max_lon"]
547
- and bbox[3] <= domain["max_lat"]
548
- )
549
-
550
- user_bbox = [xlim[0], ylim[0], xlim[1], ylim[1]]
551
- domain_row = cordex_domains_df[cordex_domains_df["cordex_domain"] == cordex_domain]
552
-
553
- if domain_row.empty:
554
- raise ValueError(f"CORDEX domain '{cordex_domain}' is not recognized.")
555
-
556
- domain_bbox = domain_row.iloc[0]
557
-
558
- if not is_bbox_contained(user_bbox, domain_bbox):
559
- suggested_domains = cordex_domains_df[
560
- cordex_domains_df.apply(
561
- lambda row: is_bbox_contained(user_bbox, row), axis=1
562
- )
563
- ]
564
-
565
- if suggested_domains.empty:
566
- raise ValueError(
567
- f"The bounding box {user_bbox} is outside of all available CORDEX domains."
568
- )
569
-
570
- suggested_domain = suggested_domains.iloc[0]["cordex_domain"]
571
-
572
- raise ValueError(
573
- f"Bounding box {user_bbox} is not within '{cordex_domain}'. Suggested domain: '{suggested_domain}'."
574
- )
575
-
576
-
577
- def _leave_one_out_bias_correction(ref, hist, variable, log):
578
- """
579
- Perform leave-one-out cross-validation for bias correction to avoid overfitting.
580
-
581
- Args:
582
- ref: Reference (observational) data
583
- hist: Historical model data
584
- variable: Variable name for determining correction method
585
- log: Logger instance
586
-
587
- Returns:
588
- xr.DataArray: Bias-corrected historical data
589
- """
590
- log.info("Starting leave-one-out cross-validation for bias correction")
591
-
592
- # Get unique years from historical data
593
- hist_years = hist.time.dt.year.values
594
- unique_years = np.unique(hist_years)
595
-
596
- # Initialize list to store corrected data for each year
597
- corrected_years = []
598
-
599
- for leave_out_year in unique_years:
600
- log.info(f"Processing leave-out year: {leave_out_year}")
601
-
602
- # Create masks for training (all years except leave_out_year) and testing (only leave_out_year)
603
- train_mask = hist.time.dt.year != leave_out_year
604
- test_mask = hist.time.dt.year == leave_out_year
605
-
606
- # Get training data (all years except the current one)
607
- hist_train = hist.sel(time=train_mask)
608
- hist_test = hist.sel(time=test_mask)
609
-
610
- # Get corresponding reference data for training period
611
- ref_train_mask = ref.time.dt.year != leave_out_year
612
- ref_train = ref.sel(time=ref_train_mask)
613
-
614
- # Train the bias correction model on the training data
615
- QM_leave_out = sdba.EmpiricalQuantileMapping.train(
616
- ref_train,
617
- hist_train,
618
- group="time.month",
619
- kind="*" if variable in ["pr", "rsds", "sfcWind"] else "+",
620
- )
621
-
622
- # Apply bias correction to the left-out year
623
- hist_corrected_year = QM_leave_out.adjust(
624
- hist_test, extrapolation="constant", interp="linear"
625
- )
626
-
627
- # Apply variable-specific constraints
628
- if variable == "hurs":
629
- hist_corrected_year = hist_corrected_year.where(hist_corrected_year <= 100, 100)
630
- hist_corrected_year = hist_corrected_year.where(hist_corrected_year >= 0, 0)
631
-
632
- corrected_years.append(hist_corrected_year)
633
-
634
- # Concatenate all corrected years and sort by time
635
- hist_bs = xr.concat(corrected_years, dim="time").sortby("time")
636
-
637
- log.info("Leave-one-out cross-validation bias correction completed")
638
- return hist_bs
639
-
640
-
641
- def process_worker(num_threads, **kwargs) -> xr.DataArray:
642
- variable = kwargs["variable"]
643
- log = logger.getChild(variable)
644
- try:
645
- with ThreadPoolExecutor(
646
- max_workers=num_threads, thread_name_prefix="climate"
647
- ) as executor:
648
- return _climate_data_for_variable(executor, **kwargs)
649
- except Exception as e:
650
- log.exception(f"Process worker failed: {e}")
651
- raise
652
-
653
-
654
- def _climate_data_for_variable(
655
- executor: ThreadPoolExecutor,
656
- *,
657
- variable: str,
658
- bbox: dict[str, tuple[float, float]],
659
- cordex_domain: str,
660
- rcp: str,
661
- gcm: str,
662
- rcm: str,
663
- years_up_to: int,
664
- years_obs: range,
665
- obs: bool,
666
- bias_correction: bool,
667
- historical: bool,
668
- remote: bool,
669
- dataset: str = "CORDEX-CORE",
670
- ) -> xr.DataArray:
671
- log = logger.getChild(variable)
672
-
673
- pd.options.mode.chained_assignment = None
674
- inventory_csv_url = (
675
- INVENTORY_DATA_REMOTE_URL if remote else INVENTORY_DATA_LOCAL_PATH
676
- )
677
- data = pd.read_csv(inventory_csv_url)
678
- column_to_use = "location" if remote else "hub"
679
-
680
- # Filter data based on whether we need historical data
681
- experiments = [rcp]
682
- if historical or bias_correction:
683
- experiments.append("historical")
684
-
685
- # Determine activity filter based on dataset
686
- activity_filter = "FAO" if dataset == "CORDEX-CORE" else "CRDX-ISIMIP-025"
687
-
688
- filtered_data = data[
689
- lambda x: (x["activity"].str.contains(activity_filter, na=False))
690
- & (x["domain"] == cordex_domain)
691
- & (x["model"].str.contains(gcm, na=False))
692
- & (x["rcm"].str.contains(rcm, na=False))
693
- & (x["experiment"].isin(experiments))
694
- ][["experiment", column_to_use]]
695
-
696
- # Fail early if nothing is found
697
- _ensure_inventory_not_empty(
698
- filtered_data,
699
- dataset=dataset,
700
- cordex_domain=cordex_domain,
701
- gcm=gcm,
702
- rcm=rcm,
703
- experiments=experiments,
704
- activity_filter=activity_filter,
705
- log=log,
706
- )
707
-
708
- future_obs = None
709
- if obs or bias_correction:
710
- future_obs = executor.submit(
711
- _thread_download_data,
712
- url=None,
713
- bbox=bbox,
714
- variable=variable,
715
- obs=True,
716
- years_up_to=years_up_to,
717
- years_obs=years_obs,
718
- remote=remote,
719
- )
720
-
721
- if not obs:
722
- download_fn = partial(
723
- _thread_download_data,
724
- bbox=bbox,
725
- variable=variable,
726
- obs=False,
727
- years_obs=years_obs,
728
- years_up_to=years_up_to,
729
- remote=remote,
730
- )
731
- downloaded_models = list(
732
- executor.map(download_fn, filtered_data[column_to_use])
733
- )
734
-
735
- # Add the downloaded models to the DataFrame
736
- filtered_data["models"] = downloaded_models
737
-
738
- if historical or bias_correction:
739
- hist = filtered_data[filtered_data["experiment"] == "historical"]["models"].iloc[0]
740
- proj = filtered_data[filtered_data["experiment"] == rcp]["models"].iloc[0]
741
-
742
- hist = hist.interpolate_na(dim="time", method="linear")
743
- proj = proj.interpolate_na(dim="time", method="linear")
744
- else:
745
- proj = filtered_data["models"].iloc[0]
746
- proj = proj.interpolate_na(dim="time", method="linear")
747
-
748
- if bias_correction and historical:
749
- # Load observations for bias correction
750
- ref = future_obs.result()
751
- log.info("Training eqm with leave-one-out cross-validation")
752
-
753
- # Use leave-one-out cross-validation for historical bias correction
754
- hist_bs = _leave_one_out_bias_correction(ref, hist, variable, log)
755
-
756
- # For projections, train on all historical data
757
- QM_mo = sdba.EmpiricalQuantileMapping.train(
758
- ref,
759
- hist,
760
- group="time.month",
761
- kind="*" if variable in ["pr", "rsds", "sfcWind"] else "+",
762
- )
763
- log.info("Performing bias correction on projections with full historical training")
764
- proj_bs = QM_mo.adjust(proj, extrapolation="constant", interp="linear")
765
- log.info("Done!")
766
- if variable == "hurs":
767
- proj_bs = proj_bs.where(proj_bs <= 100, 100)
768
- proj_bs = proj_bs.where(proj_bs >= 0, 0)
769
- combined = xr.concat([hist_bs, proj_bs], dim="time")
770
- return combined
771
-
772
- elif not bias_correction and historical:
773
- combined = xr.concat([hist, proj], dim="time")
774
- return combined
775
-
776
- elif bias_correction and not historical:
777
- ref = future_obs.result()
778
- log.info("Training eqm with historical data")
779
- QM_mo = sdba.EmpiricalQuantileMapping.train(
780
- ref,
781
- hist,
782
- group="time.month",
783
- kind="*" if variable in ["pr", "rsds", "sfcWind"] else "+",
784
- ) # multiplicative approach for pr, rsds and wind speed
785
- log.info("Performing bias correction with eqm")
786
- proj_bs = QM_mo.adjust(proj, extrapolation="constant", interp="linear")
787
- log.info("Done!")
788
- if variable == "hurs":
789
- proj_bs = proj_bs.where(proj_bs <= 100, 100)
790
- proj_bs = proj_bs.where(proj_bs >= 0, 0)
791
- return proj_bs
792
-
793
- return proj
794
-
795
- else: # when observations are True
796
- downloaded_obs = future_obs.result()
797
- log.info("Done!")
798
- return downloaded_obs
799
-
800
-
801
- def _thread_download_data(url: str | None, **kwargs):
802
- variable = kwargs["variable"]
803
- temporal = "observations" if kwargs["obs"] else ("historical" if url and "historical" in url else "projections")
804
- log = logger.getChild(f"{variable}-{temporal}")
805
- try:
806
- return _download_data(url=url, **kwargs)
807
- except Exception as e:
808
- log.exception(f"Failed to process data from {url}: {e}")
809
- raise
810
-
811
-
812
- def _download_data(
813
- url: str | None,
814
- bbox: dict[str, tuple[float, float]],
815
- variable: str,
816
- obs: bool,
817
- years_obs: range,
818
- years_up_to: int,
819
- remote: bool,
820
- ) -> xr.DataArray:
821
- temporal = "observations" if obs else ("historical" if url and "historical" in url else "projections")
822
- log = logger.getChild(f"{variable}-{temporal}")
823
-
824
- if obs:
825
- var = VARIABLES_MAP[variable]
826
- log.info(f"Establishing connection to ERA5 data for {variable}({var})")
827
- if remote:
828
- ds_var = xr.open_dataset(ERA5_DATA_REMOTE_URL)[var]
829
- else:
830
- ds_var = xr.open_dataset(ERA5_DATA_LOCAL_PATH)[var]
831
- log.info(f"Connection to ERA5 data for {variable}({var}) has been established")
832
-
833
- # Coordinate normalization and renaming for 'hurs'
834
- if var == "hurs":
835
- ds_var = ds_var.rename({"lat": "latitude", "lon": "longitude"})
836
- ds_cropped = ds_var.sel(
837
- longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
838
- latitude=slice(bbox["ylim"][0], bbox["ylim"][1]),
839
- )
840
- else:
841
- ds_var.coords["longitude"] = (ds_var.coords["longitude"] + 180) % 360 - 180
842
- ds_var = ds_var.sortby(ds_var.longitude)
843
- ds_cropped = ds_var.sel(
844
- longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
845
- latitude=slice(bbox["ylim"][1], bbox["ylim"][0]),
846
- )
847
-
848
- # Unit conversion
849
- if var in ["t2mx", "t2mn", "t2m"]:
850
- ds_cropped -= 273.15 # Convert from Kelvin to Celsius
851
- ds_cropped.attrs["units"] = "°C"
852
- elif var == "tp":
853
- ds_cropped *= 1000 # Convert precipitation
854
- ds_cropped.attrs["units"] = "mm"
855
- elif var == "ssrd":
856
- ds_cropped /= 86400 # Convert from J/m^2 to W/m^2
857
- ds_cropped.attrs["units"] = "W m-2"
858
- elif var == "sfcwind":
859
- ds_cropped = ds_cropped * (
860
- 4.87 / np.log((67.8 * 10) - 5.42)
861
- ) # Convert wind speed from 10 m to 2 m
862
- ds_cropped.attrs["units"] = "m s-1"
863
-
864
- # Select years
865
- years = [x for x in years_obs]
866
- time_mask = (ds_cropped["time"].dt.year >= years[0]) & (
867
- ds_cropped["time"].dt.year <= years[-1]
868
- )
869
-
870
- else:
871
- log.info(f"Establishing connection to CORDEX data for {variable}")
872
- ds_var = xr.open_dataset(url)[variable]
873
-
874
- # Check if time dimension has a prefix, indicating variable is not available
875
- time_dims = [dim for dim in ds_var.dims if dim.startswith('time_')]
876
- if time_dims:
877
- msg = f"Variable {variable} is not available for this model: {url}"
878
- log.exception(msg)
879
- raise ValueError(msg)
880
-
881
- log.info(f"Connection to CORDEX data for {variable} has been established")
882
- ds_cropped = ds_var.sel(
883
- longitude=slice(bbox["xlim"][0], bbox["xlim"][1]),
884
- latitude=slice(bbox["ylim"][1], bbox["ylim"][0]),
885
- )
886
-
887
- # Unit conversion
888
- if variable in ["tas", "tasmax", "tasmin"]:
889
- ds_cropped -= 273.15 # Convert from Kelvin to Celsius
890
- ds_cropped.attrs["units"] = "°C"
891
- elif variable == "pr":
892
- ds_cropped *= 86400 # Convert from kg m^-2 s^-1 to mm/day
893
- ds_cropped.attrs["units"] = "mm"
894
- elif variable == "rsds":
895
- ds_cropped.attrs["units"] = "W m-2"
896
- elif variable == "sfcWind":
897
- ds_cropped = ds_cropped * (
898
- 4.87 / np.log((67.8 * 10) - 5.42)
899
- ) # Convert wind speed from 10 m to 2 m
900
- ds_cropped.attrs["units"] = "m s-1"
901
-
902
- # Select years based on rcp
903
- if "rcp" in url:
904
- years = [x for x in range(2006, years_up_to + 1)]
905
- else:
906
- years = [x for x in DEFAULT_YEARS_OBS]
907
-
908
- # Add missing dates
909
- ds_cropped = ds_cropped.convert_calendar(
910
- calendar="gregorian", missing=np.nan, align_on="date"
911
- )
912
-
913
- time_mask = (ds_cropped["time"].dt.year >= years[0]) & (
914
- ds_cropped["time"].dt.year <= years[-1]
915
- )
916
-
917
- # subset years
918
- ds_cropped = ds_cropped.sel(time=time_mask)
919
-
920
- assert isinstance(ds_cropped, xr.DataArray)
921
-
922
- if obs:
923
- log.info(
924
- f"ERA5 data for {variable} has been processed: unit conversion ({ds_cropped.attrs.get('units', 'unknown units')}), time selection ({years[0]}-{years[-1]})"
925
- )
926
- else:
927
- log.info(
928
- f"CORDEX data for {variable} has been processed: unit conversion ({ds_cropped.attrs.get('units', 'unknown units')}), calendar transformation (360-day to Gregorian), time selection ({years[0]}-{years[-1]})"
929
- )
930
-
931
- return ds_cropped
932
-
933
-
934
- # =============================================================================
935
- # PLOTTING FUNCTIONS
936
- # =============================================================================
937
-
938
- def plot_spatial_map(
939
- data: xr.DataArray,
940
- time_period: Optional[Tuple[int, int]] = None,
941
- aggregation: str = "mean",
942
- title: Optional[str] = None,
943
- cmap: str = "viridis",
944
- figsize: Tuple[int, int] = (12, 8),
945
- show_countries: bool = True,
946
- save_path: Optional[str] = None,
947
- **kwargs
948
- ) -> plt.Figure:
949
- """
950
- Create a spatial map visualization of climate data.
951
- """
952
- # Subset data by time period if specified
953
- plot_data = data.copy()
954
- if time_period is not None:
955
- start_year, end_year = time_period
956
- plot_data = plot_data.sel(
957
- time=slice(f"{start_year}-01-01", f"{end_year}-12-31")
958
- )
959
-
960
- # Apply temporal aggregation
961
- if aggregation == "mean":
962
- plot_data = plot_data.mean(dim="time")
963
- elif aggregation == "sum":
964
- plot_data = plot_data.sum(dim="time")
965
- elif aggregation == "min":
966
- plot_data = plot_data.min(dim="time")
967
- elif aggregation == "max":
968
- plot_data = plot_data.max(dim="time")
969
- elif aggregation == "std":
970
- plot_data = plot_data.std(dim="time")
971
- else:
972
- raise ValueError(f"Unsupported aggregation method: {aggregation}")
973
-
974
- # Create figure with cartopy
975
- fig, ax = plt.subplots(
976
- figsize=figsize,
977
- subplot_kw={'projection': ccrs.PlateCarree()}
978
- )
979
-
980
- # Plot data
981
- im = plot_data.plot(
982
- ax=ax,
983
- cmap=cmap,
984
- transform=ccrs.PlateCarree(),
985
- add_colorbar=False,
986
- **kwargs
987
- )
988
-
989
- # Add map features
990
- ax.add_feature(cfeature.COASTLINE, linewidth=0.5)
991
- if show_countries:
992
- ax.add_feature(cfeature.BORDERS, linewidth=0.3, alpha=0.7)
993
- ax.add_feature(cfeature.OCEAN, color='lightblue', alpha=0.3)
994
- ax.add_feature(cfeature.LAND, color='lightgray', alpha=0.3)
995
-
996
- # Set extent to data bounds with small buffer
997
- lon_min, lon_max = plot_data.longitude.min().item(), plot_data.longitude.max().item()
998
- lat_min, lat_max = plot_data.latitude.min().item(), plot_data.latitude.max().item()
999
- buffer = 0.5
1000
- ax.set_extent([lon_min - buffer, lon_max + buffer,
1001
- lat_min - buffer, lat_max + buffer], ccrs.PlateCarree())
1002
-
1003
- # Add gridlines with labels only on left and bottom
1004
- gl = ax.gridlines(draw_labels=True, alpha=0.3)
1005
- gl.top_labels = False
1006
- gl.right_labels = False
1007
- gl.left_labels = True
1008
- gl.bottom_labels = True
1009
-
1010
- # Add colorbar
1011
- cbar = plt.colorbar(im, ax=ax, shrink=0.8, pad=0.02)
1012
- if hasattr(plot_data, 'units'):
1013
- cbar.set_label(f"{plot_data.name} ({plot_data.units})", rotation=270, labelpad=20)
1014
- else:
1015
- cbar.set_label(f"{plot_data.name}", rotation=270, labelpad=20)
1016
-
1017
- # Set title
1018
- if title is None:
1019
- var_name = plot_data.name or "Climate Variable"
1020
- if time_period:
1021
- title = f"{aggregation.title()} {var_name} ({time_period[0]}-{time_period[1]})"
1022
- else:
1023
- title = f"{aggregation.title()} {var_name}"
1024
-
1025
- ax.set_title(title, fontsize=14, pad=20)
1026
-
1027
- plt.tight_layout()
1028
-
1029
- if save_path:
1030
- plt.savefig(save_path, dpi=300, bbox_inches='tight')
1031
-
1032
- return fig
1033
-
1034
-
1035
- def plot_time_series(
1036
- data: Union[xr.DataArray, List[xr.DataArray]],
1037
- aggregation: str = "mean",
1038
- labels: Optional[List[str]] = None,
1039
- title: Optional[str] = None,
1040
- ylabel: Optional[str] = None,
1041
- figsize: Tuple[int, int] = (12, 6),
1042
- trend_line: bool = False,
1043
- save_path: Optional[str] = None,
1044
- **kwargs
1045
- ) -> plt.Figure:
1046
- """
1047
- Create time series plots of climate data.
1048
- """
1049
- # Ensure data is a list
1050
- if isinstance(data, xr.DataArray):
1051
- data_list = [data]
1052
- labels = labels or [data.name or "Data"]
1053
- else:
1054
- data_list = data
1055
- labels = labels or [f"Dataset {i+1}" for i in range(len(data_list))]
1056
-
1057
- if len(data_list) != len(labels):
1058
- raise ValueError("Number of labels must match number of datasets")
1059
-
1060
- # Set up the plot
1061
- fig, ax1 = plt.subplots(figsize=figsize)
1062
-
1063
- # Process and plot each dataset
1064
- for i, (dataset, label) in enumerate(zip(data_list, labels)):
1065
- # Apply spatial aggregation
1066
- if aggregation == "mean":
1067
- ts_data = dataset.mean(dim=["latitude", "longitude"])
1068
- elif aggregation == "sum":
1069
- ts_data = dataset.sum(dim=["latitude", "longitude"])
1070
- elif aggregation == "min":
1071
- ts_data = dataset.min(dim=["latitude", "longitude"])
1072
- elif aggregation == "max":
1073
- ts_data = dataset.max(dim=["latitude", "longitude"])
1074
- elif aggregation == "std":
1075
- ts_data = dataset.std(dim=["latitude", "longitude"])
1076
- else:
1077
- raise ValueError(f"Unsupported aggregation method: {aggregation}")
1078
-
1079
- # Convert to annual means for cleaner plotting
1080
- annual_data = ts_data.groupby("time.year").mean()
1081
-
1082
- # Plot the time series
1083
- ax1.plot(annual_data.year, annual_data.values, label=label, linewidth=2, **kwargs)
1084
-
1085
- # Add trend line if requested
1086
- if trend_line:
1087
- z = np.polyfit(annual_data.year, annual_data.values, 1)
1088
- p = np.poly1d(z)
1089
- ax1.plot(annual_data.year, p(annual_data.year),
1090
- linestyle='--', alpha=0.7,
1091
- color=ax1.lines[-1].get_color())
1092
-
1093
- # Format main plot
1094
- ax1.set_xlabel("Year", fontsize=12)
1095
- if ylabel is None:
1096
- if hasattr(data_list[0], 'units'):
1097
- ylabel = f"{data_list[0].name} ({data_list[0].units})"
1098
- else:
1099
- ylabel = data_list[0].name or "Value"
1100
- ax1.set_ylabel(ylabel, fontsize=12)
1101
-
1102
- if len(data_list) > 1:
1103
- ax1.legend()
1104
-
1105
- ax1.grid(True, alpha=0.3)
1106
-
1107
- # Set main title
1108
- if title is None:
1109
- var_name = data_list[0].name or "Climate Variable"
1110
- title = f"{aggregation.title()} {var_name} Time Series"
1111
-
1112
- ax1.set_title(title, fontsize=14, pad=20)
1113
-
1114
- plt.tight_layout()
1115
-
1116
- if save_path:
1117
- plt.savefig(save_path, dpi=300, bbox_inches='tight')
1118
-
1119
- return fig
1120
-
1121
-
1122
- if __name__ == "__main__":
1123
- # Example 1: Get observational data
1124
- print("Getting observational data...")
1125
- obs_data = get_climate_data(
1126
- country="Togo",
1127
- obs=True,
1128
- years_obs=range(1990, 2011),
1129
- variables=["pr", "tasmax"]
1130
- )
1131
- print("Observational data keys:", list(obs_data.keys()))
1132
-
1133
- # Example 2: Get CORDEX bc projection data and bc historical data
1134
- print("\nGetting CORDEX projection data...")
1135
- proj_data = get_climate_data(
1136
- country="Togo",
1137
- variables=["tasmax", "tasmin"],
1138
- cordex_domain="AFR-22",
1139
- rcp="rcp26",
1140
- gcm="MPI",
1141
- rcm="Reg",
1142
- years_up_to=2010,
1143
- historical=True,
1144
- bias_correction=True
1145
- )
1146
- print("Projection data keys:", list(proj_data.keys()))
1147
-
1148
- # Example 3: Get CORDEX-CORE-BC (ISIMIP bias-corrected) data
1149
- print("\nGetting CORDEX-CORE-BC (ISIMIP bias-corrected) data...")
1150
- proj_data_bc = get_climate_data(
1151
- country="Togo",
1152
- variables=["pr", "tasmax"],
1153
- cordex_domain="AFR-22",
1154
- rcp="rcp85",
1155
- gcm="MPI",
1156
- rcm="Reg",
1157
- years_up_to=2030,
1158
- historical=True,
1159
- bias_correction=False, # Must be False when using CORDEX-CORE-BC
1160
- dataset="CORDEX-CORE-BC"
1161
- )
1162
- print("CORDEX-CORE-BC data keys:", list(proj_data_bc.keys()))
1163
-
1164
- # Example 4: Test new country lookup functionality
1165
- print("\nTesting country lookup functionality...")
1166
- try:
1167
- # Test cartopy-based country lookup
1168
- bounds = _get_country_bounds("Togo")
1169
- print(f"Country lookup successful - Togo bounds: {bounds}")
1170
- except Exception as e:
1171
- print(f"Country lookup failed: {e}")
1172
-
1173
- # Example 5: Plotting demonstrations (commented out to avoid blocking)
1174
- print("\nPlotting functionality is available!")
1175
- print("Use plot_spatial_map() and plot_time_series() functions")
1176
-
1177
- print("Example completed successfully!")
1
+ """Public API for retrieving and visualizing CAVA climate data."""
2
+
3
+ import multiprocessing as mp
4
+ import xarray as xr
5
+
6
+ from cava_config import (
7
+ DEFAULT_YEARS_OBS,
8
+ VALID_DATASETS,
9
+ VALID_DOMAINS,
10
+ VALID_GCM,
11
+ VALID_RCM,
12
+ VALID_RCPS,
13
+ VALID_VARIABLES,
14
+ logger,
15
+ )
16
+ from cava_download import process_worker
17
+ from cava_plot import plot_spatial_map, plot_time_series
18
+ from cava_validation import (
19
+ _geo_localize,
20
+ _get_country_bounds,
21
+ _validate_gcm_rcm_combinations,
22
+ _validate_urls,
23
+ )
24
+
25
+
26
+ def _get_climate_data_single(
27
+ *,
28
+ country: str | None,
29
+ years_obs: range | None = None,
30
+ obs: bool = False,
31
+ cordex_domain: str | None = None,
32
+ rcp: str | None = None,
33
+ gcm: str | None = None,
34
+ rcm: str | None = None,
35
+ years_up_to: int | None = None,
36
+ bias_correction: bool = False,
37
+ historical: bool = False,
38
+ buffer: int = 0,
39
+ xlim: tuple[float, float] | None = None,
40
+ ylim: tuple[float, float] | None = None,
41
+ remote: bool = True,
42
+ variables: list[str] | None = None,
43
+ num_processes: int = len(VALID_VARIABLES),
44
+ max_threads_per_process: int = 3,
45
+ dataset: str = "CORDEX-CORE",
46
+ ) -> dict[str, xr.DataArray]:
47
+ """Internal single-combination fetch (one rcp/gcm/rcm), preserves legacy behavior."""
48
+
49
+ # Validation for basic parameters
50
+ if xlim is None and ylim is not None or xlim is not None and ylim is None:
51
+ raise ValueError(
52
+ "xlim and ylim mismatch: they must be both specified or both unspecified"
53
+ )
54
+ if country is None and xlim is None:
55
+ raise ValueError("You must specify a country or (xlim, ylim)")
56
+ if country is not None and xlim is not None:
57
+ raise ValueError("You must specify either country or (xlim, ylim), not both")
58
+
59
+ # Conditional validation based on obs flag
60
+ if obs:
61
+ # When obs=True, only years_obs is required
62
+ if years_obs is None:
63
+ raise ValueError("years_obs must be provided when obs is True")
64
+ if not (1980 <= min(years_obs) <= max(years_obs) <= 2020):
65
+ raise ValueError("Years in years_obs must be within the range 1980 to 2020")
66
+
67
+ # Set default values for CORDEX parameters (not used but needed for function calls)
68
+ cordex_domain = cordex_domain or "AFR-22" # dummy value
69
+ rcp = rcp or "rcp26" # dummy value
70
+ gcm = gcm or "MPI" # dummy value
71
+ rcm = rcm or "Reg" # dummy value
72
+ years_up_to = years_up_to or 2030 # dummy value
73
+ else:
74
+ # When obs=False, CORDEX parameters are required
75
+ required_params = {
76
+ "cordex_domain": VALID_DOMAINS,
77
+ "rcp": VALID_RCPS,
78
+ "gcm": VALID_GCM,
79
+ "rcm": VALID_RCM,
80
+ }
81
+ for param_name, valid_values in required_params.items():
82
+ param_value = locals()[param_name]
83
+ if param_value is None:
84
+ raise ValueError(f"{param_name} is required when obs is False")
85
+ if param_value not in valid_values:
86
+ raise ValueError(
87
+ f"Invalid {param_name}={param_value}. Must be one of {valid_values}"
88
+ )
89
+
90
+ if years_up_to is None:
91
+ raise ValueError("years_up_to is required when obs is False")
92
+ if years_up_to <= 2006:
93
+ raise ValueError("years_up_to must be greater than 2006")
94
+
95
+ # Set default years_obs when not processing observations
96
+ if years_obs is None:
97
+ years_obs = DEFAULT_YEARS_OBS
98
+
99
+ # Validate dataset parameter
100
+ if dataset not in VALID_DATASETS:
101
+ raise ValueError(
102
+ f"Invalid dataset='{dataset}'. Must be one of {VALID_DATASETS}"
103
+ )
104
+
105
+ # Check for incompatible dataset and bias_correction combination
106
+ if dataset == "CORDEX-CORE-BC" and bias_correction:
107
+ raise ValueError(
108
+ "Cannot apply bias_correction=True when using dataset='CORDEX-CORE-BC'. "
109
+ "The CORDEX-CORE-BC dataset is already bias-corrected using ISIMIP methodology."
110
+ )
111
+
112
+ # Validate variables if provided
113
+ if variables is not None:
114
+ invalid_vars = [var for var in variables if var not in VALID_VARIABLES]
115
+ if invalid_vars:
116
+ raise ValueError(
117
+ f"Invalid variables: {invalid_vars}. Must be a subset of {VALID_VARIABLES}"
118
+ )
119
+ else:
120
+ variables = VALID_VARIABLES
121
+
122
+ # Validate GCM-RCM combinations for specific domains (only for non-observational data)
123
+ if not obs:
124
+ _validate_gcm_rcm_combinations(cordex_domain, gcm, rcm)
125
+
126
+ _validate_urls(
127
+ gcm,
128
+ rcm,
129
+ rcp,
130
+ remote,
131
+ cordex_domain,
132
+ obs,
133
+ historical,
134
+ bias_correction,
135
+ dataset,
136
+ variables,
137
+ )
138
+
139
+ bbox = _geo_localize(country, xlim, ylim, buffer, cordex_domain, obs)
140
+
141
+ if num_processes <= 1 or len(variables) <= 1:
142
+ results = {}
143
+ for variable in variables:
144
+ try:
145
+ results[variable] = process_worker(
146
+ max_threads_per_process,
147
+ variable=variable,
148
+ bbox=bbox,
149
+ cordex_domain=cordex_domain,
150
+ rcp=rcp,
151
+ gcm=gcm,
152
+ rcm=rcm,
153
+ years_up_to=years_up_to,
154
+ years_obs=years_obs,
155
+ obs=obs,
156
+ bias_correction=bias_correction,
157
+ historical=historical,
158
+ remote=remote,
159
+ dataset=dataset,
160
+ )
161
+ except Exception as exc:
162
+ raise RuntimeError(
163
+ f"Variable '{variable}' failed for {gcm}-{rcm} {rcp}"
164
+ ) from exc
165
+ return results
166
+
167
+ with mp.Pool(processes=min(num_processes, len(variables))) as pool:
168
+ futures = []
169
+ for variable in variables:
170
+ futures.append(
171
+ pool.apply_async(
172
+ process_worker,
173
+ args=(max_threads_per_process,),
174
+ kwds={
175
+ "variable": variable,
176
+ "bbox": bbox,
177
+ "cordex_domain": cordex_domain,
178
+ "rcp": rcp,
179
+ "gcm": gcm,
180
+ "rcm": rcm,
181
+ "years_up_to": years_up_to,
182
+ "years_obs": years_obs,
183
+ "obs": obs,
184
+ "bias_correction": bias_correction,
185
+ "historical": historical,
186
+ "remote": remote,
187
+ "dataset": dataset,
188
+ },
189
+ )
190
+ )
191
+
192
+ try:
193
+ results = {
194
+ variable: futures[i].get() for i, variable in enumerate(variables)
195
+ }
196
+ except Exception as exc:
197
+ pool.terminate()
198
+ pool.join()
199
+ raise RuntimeError(
200
+ f"Variable processing failed for {gcm}-{rcm} {rcp}"
201
+ ) from exc
202
+
203
+ pool.close() # Prevent any more tasks from being submitted to the pool
204
+ pool.join() # Wait for all worker processes to finish
205
+
206
+ return results
207
+
208
+
209
+ def _normalize_selection(
210
+ value: str | list[str] | None,
211
+ valid_values: list[str],
212
+ name: str,
213
+ ) -> tuple[list[str], bool]:
214
+ if value is None:
215
+ return list(valid_values), True
216
+ if isinstance(value, str):
217
+ values = [value]
218
+ else:
219
+ values = list(value)
220
+ if not values:
221
+ raise ValueError(f"{name} list cannot be empty")
222
+ invalid = [v for v in values if v not in valid_values]
223
+ if invalid:
224
+ raise ValueError(f"Invalid {name} values: {invalid}. Must be within {valid_values}")
225
+ return values, False
226
+
227
+
228
+ def _run_combo_variable_task(
229
+ rcp_val: str,
230
+ gcm_val: str,
231
+ rcm_val: str,
232
+ variable: str,
233
+ common_kwargs: dict,
234
+ max_threads_per_process: int,
235
+ bbox: dict,
236
+ ):
237
+ data = process_worker(
238
+ max_threads_per_process,
239
+ variable=variable,
240
+ bbox=bbox,
241
+ rcp=rcp_val,
242
+ gcm=gcm_val,
243
+ rcm=rcm_val,
244
+ **common_kwargs,
245
+ )
246
+ return rcp_val, gcm_val, rcm_val, variable, data
247
+
248
+
249
+ def get_climate_data(
250
+ *,
251
+ country: str | None,
252
+ years_obs: range | None = None,
253
+ obs: bool = False,
254
+ cordex_domain: str | None = None,
255
+ rcp: str | list[str] | None = None,
256
+ gcm: str | list[str] | None = None,
257
+ rcm: str | list[str] | None = None,
258
+ years_up_to: int | None = None,
259
+ bias_correction: bool = False,
260
+ historical: bool = False,
261
+ buffer: int = 0,
262
+ xlim: tuple[float, float] | None = None,
263
+ ylim: tuple[float, float] | None = None,
264
+ remote: bool = True,
265
+ variables: list[str] | None = None,
266
+ num_processes: int = len(VALID_VARIABLES),
267
+ max_threads_per_process: int = 3,
268
+ dataset: str = "CORDEX-CORE",
269
+ max_total_processes: int = 12,
270
+ ) -> dict:
271
+ """
272
+ Retrieve CORDEX-CORE projections and/or ERA5 observations for a region.
273
+
274
+ The function orchestrates validation, spatial subsetting, unit conversion,
275
+ optional bias correction, and parallel download/processing.
276
+ Parallelization uses processes across variables or model/variable combinations,
277
+ with a thread pool inside each process for per-variable downloads.
278
+
279
+ Args:
280
+ country (str): Name of the country for which data is to be processed.
281
+ Use None if specifying a region using xlim and ylim.
282
+ years_obs (range): Range of years for observational data (ERA5 only). Required when obs is True. (default: None).
283
+ obs (bool): Flag to indicate if processing observational data (default: False).
284
+ When True, only years_obs is required. CORDEX parameters are optional.
285
+ cordex_domain (str): CORDEX domain of the climate data. One of {VALID_DOMAINS}.
286
+ Required when obs is False. (default: None).
287
+ rcp (str | list[str] | None): Representative Concentration Pathway(s). One of {VALID_RCPS}.
288
+ If None, all RCPs are used. Required when obs is False. (default: None).
289
+ gcm (str | list[str] | None): GCM name(s). One of {VALID_GCM}.
290
+ If None, all GCMs are used. Required when obs is False. (default: None).
291
+ rcm (str | list[str] | None): RCM name(s). One of {VALID_RCM}.
292
+ If None, all RCMs are used. Required when obs is False. (default: None).
293
+ years_up_to (int): The ending year for the projected data. Projections start in 2006 and ends in 2100.
294
+ Hence, if years_up_to is set to 2030, data will be downloaded for the 2006-2030 period.
295
+ Required when obs is False. (default: None).
296
+ bias_correction (bool): Whether to apply bias correction (default: False).
297
+ historical (bool): Flag to indicate if processing historical data (default: False).
298
+ If True, historical data is provided together with projections.
299
+ Historical simulation runs for CORDEX-CORE initiative are provided for the 1980-2005 time period.
300
+ buffer (int): Buffer distance to expand the region of interest (default: 0).
301
+ xlim (tuple or None): Longitudinal bounds of the region of interest. Use only when country is None (default: None).
302
+ ylim (tuple or None): Latitudinal bounds of the region of interest. Use only when country is None (default: None).
303
+ remote (bool): Flag to work with remote data or not (default: True).
304
+ variables (list[str] or None): List of variables to process. Must be a subset of {VALID_VARIABLES}. If None, all variables are processed. (default: None).
305
+ num_processes (int): Number of processes to use, one per variable for a single combo.
306
+ If num_processes <= 1 or only one variable is requested, variables run sequentially.
307
+ By default equals to the number of all possible variables. (default: {len(VALID_VARIABLES)}).
308
+ max_threads_per_process (int): Max number of threads within each process. (default: 3).
309
+ dataset (str): Dataset source to use. Options are "CORDEX-CORE" (original data) or "CORDEX-CORE-BC" (ISIMIP bias-corrected data). (default: "CORDEX-CORE").
310
+ max_total_processes (int): Max number of processes when multiple models/RCPs are requested.
311
+ Defaults to 12 (cap applies to total combo-variable tasks).
312
+
313
+ Returns:
314
+ dict: If a single (gcm, rcm, rcp) is requested, returns {variable: DataArray}.
315
+ If multiple are requested, returns {rcp: {"{gcm}-{rcm}": {variable: DataArray}}}.
316
+ """
317
+
318
+ if obs and any(isinstance(v, list) for v in (rcp, gcm, rcm) if v is not None):
319
+ raise ValueError("rcp/gcm/rcm lists are not supported when obs=True")
320
+
321
+ if obs:
322
+ return _get_climate_data_single(
323
+ country=country,
324
+ years_obs=years_obs,
325
+ obs=obs,
326
+ cordex_domain=cordex_domain,
327
+ rcp=rcp if isinstance(rcp, str) or rcp is None else rcp[0],
328
+ gcm=gcm if isinstance(gcm, str) or gcm is None else gcm[0],
329
+ rcm=rcm if isinstance(rcm, str) or rcm is None else rcm[0],
330
+ years_up_to=years_up_to,
331
+ bias_correction=bias_correction,
332
+ historical=historical,
333
+ buffer=buffer,
334
+ xlim=xlim,
335
+ ylim=ylim,
336
+ remote=remote,
337
+ variables=variables,
338
+ num_processes=num_processes,
339
+ max_threads_per_process=max_threads_per_process,
340
+ dataset=dataset,
341
+ )
342
+
343
+ if cordex_domain is None:
344
+ raise ValueError("cordex_domain is required when obs is False")
345
+
346
+ if xlim is None and ylim is not None or xlim is not None and ylim is None:
347
+ raise ValueError(
348
+ "xlim and ylim mismatch: they must be both specified or both unspecified"
349
+ )
350
+ if country is None and xlim is None:
351
+ raise ValueError("You must specify a country or (xlim, ylim)")
352
+ if country is not None and xlim is not None:
353
+ raise ValueError("You must specify either country or (xlim, ylim), not both")
354
+
355
+ if dataset not in VALID_DATASETS:
356
+ raise ValueError(
357
+ f"Invalid dataset='{dataset}'. Must be one of {VALID_DATASETS}"
358
+ )
359
+ if dataset == "CORDEX-CORE-BC" and bias_correction:
360
+ raise ValueError(
361
+ "Cannot apply bias_correction=True when using dataset='CORDEX-CORE-BC'. "
362
+ "The CORDEX-CORE-BC dataset is already bias-corrected using ISIMIP methodology."
363
+ )
364
+
365
+ if years_up_to is None:
366
+ raise ValueError("years_up_to is required when obs is False")
367
+ if years_up_to <= 2006:
368
+ raise ValueError("years_up_to must be greater than 2006")
369
+
370
+ if years_obs is None:
371
+ years_obs = DEFAULT_YEARS_OBS
372
+
373
+ rcps, _all_rcps = _normalize_selection(rcp, VALID_RCPS, "rcp")
374
+ gcms, all_gcms = _normalize_selection(gcm, VALID_GCM, "gcm")
375
+ rcms, all_rcms = _normalize_selection(rcm, VALID_RCM, "rcm")
376
+
377
+ combos = [(r, g, m) for r in rcps for g in gcms for m in rcms]
378
+ if len(combos) == 1:
379
+ rcp_single, gcm_single, rcm_single = combos[0]
380
+ return _get_climate_data_single(
381
+ country=country,
382
+ years_obs=years_obs,
383
+ obs=obs,
384
+ cordex_domain=cordex_domain,
385
+ rcp=rcp_single,
386
+ gcm=gcm_single,
387
+ rcm=rcm_single,
388
+ years_up_to=years_up_to,
389
+ bias_correction=bias_correction,
390
+ historical=historical,
391
+ buffer=buffer,
392
+ xlim=xlim,
393
+ ylim=ylim,
394
+ remote=remote,
395
+ variables=variables,
396
+ num_processes=num_processes,
397
+ max_threads_per_process=max_threads_per_process,
398
+ dataset=dataset,
399
+ )
400
+
401
+ valid_combos: list[tuple[str, str, str]] = []
402
+ invalid_combos: list[tuple[str, str, str]] = []
403
+ for rcp_val, gcm_val, rcm_val in combos:
404
+ try:
405
+ _validate_gcm_rcm_combinations(cordex_domain, gcm_val, rcm_val)
406
+ valid_combos.append((rcp_val, gcm_val, rcm_val))
407
+ except ValueError:
408
+ invalid_combos.append((rcp_val, gcm_val, rcm_val))
409
+
410
+ if invalid_combos and not (all_gcms or all_rcms):
411
+ raise ValueError(
412
+ "Some requested GCM/RCM combinations are invalid for this domain: "
413
+ + ", ".join(f"{g}-{m} ({r})" for r, g, m in invalid_combos)
414
+ )
415
+ if invalid_combos and (all_gcms or all_rcms):
416
+ logger.warning(
417
+ "Skipping invalid GCM/RCM combinations for %s: %s",
418
+ cordex_domain,
419
+ ", ".join(f"{g}-{m} ({r})" for r, g, m in invalid_combos),
420
+ )
421
+
422
+ if variables is not None:
423
+ invalid_vars = [var for var in variables if var not in VALID_VARIABLES]
424
+ if invalid_vars:
425
+ raise ValueError(
426
+ f"Invalid variables: {invalid_vars}. Must be a subset of {VALID_VARIABLES}"
427
+ )
428
+ variables_list = list(variables)
429
+ else:
430
+ variables_list = list(VALID_VARIABLES)
431
+
432
+ results: dict[str, dict[str, dict[str, xr.DataArray]]] = {}
433
+
434
+ max_workers = max_total_processes
435
+ max_workers = max(1, min(max_workers, len(valid_combos) * len(variables_list)))
436
+
437
+ common_kwargs = {
438
+ "years_obs": years_obs,
439
+ "obs": obs,
440
+ "cordex_domain": cordex_domain,
441
+ "years_up_to": years_up_to,
442
+ "bias_correction": bias_correction,
443
+ "historical": historical,
444
+ "remote": remote,
445
+ "dataset": dataset,
446
+ }
447
+
448
+ bbox = _geo_localize(country, xlim, ylim, buffer, cordex_domain, obs)
449
+
450
+ for rcp_val, gcm_val, rcm_val in valid_combos:
451
+ _validate_urls(
452
+ gcm_val,
453
+ rcm_val,
454
+ rcp_val,
455
+ remote,
456
+ cordex_domain,
457
+ obs,
458
+ historical,
459
+ bias_correction,
460
+ dataset,
461
+ variables_list,
462
+ )
463
+
464
+ tasks = [
465
+ (rcp_val, gcm_val, rcm_val, variable, common_kwargs, max_threads_per_process, bbox)
466
+ for rcp_val, gcm_val, rcm_val in valid_combos
467
+ for variable in variables_list
468
+ ]
469
+
470
+ with mp.Pool(processes=max_workers) as pool:
471
+ try:
472
+ for rcp_val, gcm_val, rcm_val, variable, data in pool.starmap(
473
+ _run_combo_variable_task, tasks
474
+ ):
475
+ results.setdefault(rcp_val, {}).setdefault(f"{gcm_val}-{rcm_val}", {})[
476
+ variable
477
+ ] = data
478
+ except Exception as exc:
479
+ pool.terminate()
480
+ pool.join()
481
+ raise RuntimeError(
482
+ "Model/RCP processing failed. Enable DEBUG logs for details."
483
+ ) from exc
484
+
485
+ return results
486
+
487
+
488
+ if __name__ == "__main__":
489
+ # Examples: show how get_climate_data parallelizes.
490
+ cordex_domain = "AFR-22"
491
+ years_up_to = 2015
492
+
493
+ print("\nExample 1: multiple models (combo-variable tasks parallelized)...")
494
+ multi = get_climate_data(
495
+ country="Togo",
496
+ cordex_domain=cordex_domain,
497
+ rcp="rcp26",
498
+ gcm=VALID_GCM,
499
+ rcm=VALID_RCM,
500
+ years_up_to=years_up_to,
501
+ historical=True,
502
+ bias_correction=False,
503
+ dataset="CORDEX-CORE",
504
+ )
505
+ # Show a compact summary of the structure returned
506
+ for rcp_val, model_map in multi.items():
507
+ print(rcp_val, "models:", list(model_map.keys()))
508
+
509
+ print("\nExample 2: single model/RCP (variables parallelized)...")
510
+ single = get_climate_data(
511
+ country="Togo",
512
+ cordex_domain=cordex_domain,
513
+ rcp="rcp26",
514
+ gcm="MPI",
515
+ rcm="REMO",
516
+ years_up_to=years_up_to,
517
+ historical=True,
518
+ bias_correction=False,
519
+ dataset="CORDEX-CORE",
520
+ )
521
+ print("Single model variables:", list(single.keys()))
522
+
523
+ print("Examples completed successfully!")