climate-ref-esmvaltool 0.6.4__py3-none-any.whl → 0.6.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- climate_ref_esmvaltool/dataset_registry/data.txt +75 -0
- climate_ref_esmvaltool/diagnostics/__init__.py +24 -0
- climate_ref_esmvaltool/diagnostics/base.py +82 -16
- climate_ref_esmvaltool/diagnostics/climate_at_global_warming_levels.py +5 -2
- climate_ref_esmvaltool/diagnostics/climate_drivers_for_fire.py +68 -0
- climate_ref_esmvaltool/diagnostics/cloud_radiative_effects.py +2 -2
- climate_ref_esmvaltool/diagnostics/cloud_scatterplots.py +188 -0
- climate_ref_esmvaltool/diagnostics/ecs.py +9 -18
- climate_ref_esmvaltool/diagnostics/enso.py +10 -4
- climate_ref_esmvaltool/diagnostics/example.py +15 -2
- climate_ref_esmvaltool/diagnostics/regional_historical_changes.py +340 -0
- climate_ref_esmvaltool/diagnostics/sea_ice_area_basic.py +5 -2
- climate_ref_esmvaltool/diagnostics/sea_ice_sensitivity.py +108 -0
- climate_ref_esmvaltool/diagnostics/tcr.py +9 -18
- climate_ref_esmvaltool/diagnostics/tcre.py +5 -2
- climate_ref_esmvaltool/diagnostics/zec.py +5 -2
- climate_ref_esmvaltool/recipe.py +46 -7
- climate_ref_esmvaltool/recipes.txt +16 -10
- climate_ref_esmvaltool/requirements/conda-lock.yml +4081 -3770
- climate_ref_esmvaltool/requirements/environment.yml +1 -0
- {climate_ref_esmvaltool-0.6.4.dist-info → climate_ref_esmvaltool-0.6.6.dist-info}/METADATA +2 -2
- climate_ref_esmvaltool-0.6.6.dist-info/RECORD +30 -0
- climate_ref_esmvaltool-0.6.4.dist-info/RECORD +0 -26
- {climate_ref_esmvaltool-0.6.4.dist-info → climate_ref_esmvaltool-0.6.6.dist-info}/WHEEL +0 -0
- {climate_ref_esmvaltool-0.6.4.dist-info → climate_ref_esmvaltool-0.6.6.dist-info}/entry_points.txt +0 -0
- {climate_ref_esmvaltool-0.6.4.dist-info → climate_ref_esmvaltool-0.6.6.dist-info}/licenses/LICENCE +0 -0
- {climate_ref_esmvaltool-0.6.4.dist-info → climate_ref_esmvaltool-0.6.6.dist-info}/licenses/NOTICE +0 -0
|
@@ -61,7 +61,10 @@ class EquilibriumClimateSensitivity(ESMValToolDiagnostic):
|
|
|
61
61
|
facets = ("grid_label", "member_id", "source_id", "region", "metric")
|
|
62
62
|
|
|
63
63
|
@staticmethod
|
|
64
|
-
def update_recipe(
|
|
64
|
+
def update_recipe(
|
|
65
|
+
recipe: Recipe,
|
|
66
|
+
input_files: dict[SourceDatasetType, pandas.DataFrame],
|
|
67
|
+
) -> None:
|
|
65
68
|
"""Update the recipe."""
|
|
66
69
|
# Only run the diagnostic that computes ECS for a single model.
|
|
67
70
|
recipe["diagnostics"] = {
|
|
@@ -88,21 +91,11 @@ class EquilibriumClimateSensitivity(ESMValToolDiagnostic):
|
|
|
88
91
|
# Prepare updated datasets section in recipe. It contains two
|
|
89
92
|
# datasets, one for the "abrupt-4xCO2" and one for the "piControl"
|
|
90
93
|
# experiment.
|
|
91
|
-
recipe_variables = dataframe_to_recipe(
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
for variable in recipe_variables.values():
|
|
97
|
-
for dataset in variable["additional_datasets"]:
|
|
98
|
-
start, end = dataset["timerange"].split("/")
|
|
99
|
-
start_times.append(start)
|
|
100
|
-
end_times.append(end)
|
|
101
|
-
timerange = f"{max(start_times)}/{min(end_times)}"
|
|
102
|
-
|
|
103
|
-
datasets = recipe_variables["tas"]["additional_datasets"]
|
|
104
|
-
for dataset in datasets:
|
|
105
|
-
dataset["timerange"] = timerange
|
|
94
|
+
recipe_variables = dataframe_to_recipe(
|
|
95
|
+
input_files[SourceDatasetType.CMIP6],
|
|
96
|
+
equalize_timerange=True,
|
|
97
|
+
)
|
|
98
|
+
recipe["datasets"] = recipe_variables["tas"]["additional_datasets"]
|
|
106
99
|
|
|
107
100
|
# Remove keys from the recipe that are only used for YAML anchors
|
|
108
101
|
keys_to_remove = [
|
|
@@ -116,8 +109,6 @@ class EquilibriumClimateSensitivity(ESMValToolDiagnostic):
|
|
|
116
109
|
for key in keys_to_remove:
|
|
117
110
|
recipe.pop(key, None)
|
|
118
111
|
|
|
119
|
-
recipe["datasets"] = datasets
|
|
120
|
-
|
|
121
112
|
@staticmethod
|
|
122
113
|
def format_result(
|
|
123
114
|
result_dir: Path,
|
|
@@ -54,9 +54,12 @@ class ENSOBasicClimatology(ESMValToolDiagnostic):
|
|
|
54
54
|
facets = ()
|
|
55
55
|
|
|
56
56
|
@staticmethod
|
|
57
|
-
def update_recipe(
|
|
57
|
+
def update_recipe(
|
|
58
|
+
recipe: Recipe,
|
|
59
|
+
input_files: dict[SourceDatasetType, pandas.DataFrame],
|
|
60
|
+
) -> None:
|
|
58
61
|
"""Update the recipe."""
|
|
59
|
-
recipe_variables = dataframe_to_recipe(input_files)
|
|
62
|
+
recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
|
|
60
63
|
recipe.pop("datasets")
|
|
61
64
|
for diagnostic in recipe["diagnostics"].values():
|
|
62
65
|
for variable in diagnostic["variables"].values():
|
|
@@ -97,9 +100,12 @@ class ENSOCharacteristics(ESMValToolDiagnostic):
|
|
|
97
100
|
facets = ("grid_label", "member_id", "source_id", "region", "metric")
|
|
98
101
|
|
|
99
102
|
@staticmethod
|
|
100
|
-
def update_recipe(
|
|
103
|
+
def update_recipe(
|
|
104
|
+
recipe: Recipe,
|
|
105
|
+
input_files: dict[SourceDatasetType, pandas.DataFrame],
|
|
106
|
+
) -> None:
|
|
101
107
|
"""Update the recipe."""
|
|
102
|
-
recipe_variables = dataframe_to_recipe(input_files)
|
|
108
|
+
recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
|
|
103
109
|
recipe["datasets"] = recipe_variables["tos"]["additional_datasets"]
|
|
104
110
|
# TODO: update the observational data requirement once available on ESGF.
|
|
105
111
|
# Observations - use only one per run
|
|
@@ -3,6 +3,7 @@ import pandas
|
|
|
3
3
|
from climate_ref_core.constraints import AddSupplementaryDataset, RequireContiguousTimerange
|
|
4
4
|
from climate_ref_core.datasets import FacetFilter, SourceDatasetType
|
|
5
5
|
from climate_ref_core.diagnostics import DataRequirement
|
|
6
|
+
from climate_ref_core.metric_values.typing import SeriesDefinition
|
|
6
7
|
from climate_ref_esmvaltool.diagnostics.base import ESMValToolDiagnostic
|
|
7
8
|
from climate_ref_esmvaltool.recipe import dataframe_to_recipe
|
|
8
9
|
from climate_ref_esmvaltool.types import Recipe
|
|
@@ -16,6 +17,15 @@ class GlobalMeanTimeseries(ESMValToolDiagnostic):
|
|
|
16
17
|
name = "Global Mean Timeseries"
|
|
17
18
|
slug = "global-mean-timeseries"
|
|
18
19
|
base_recipe = "examples/recipe_python.yml"
|
|
20
|
+
series = (
|
|
21
|
+
SeriesDefinition(
|
|
22
|
+
file_pattern="timeseries/script1/*.nc",
|
|
23
|
+
dimensions={},
|
|
24
|
+
values_name="tas",
|
|
25
|
+
index_name="time",
|
|
26
|
+
attributes=[],
|
|
27
|
+
),
|
|
28
|
+
)
|
|
19
29
|
|
|
20
30
|
data_requirements = (
|
|
21
31
|
DataRequirement(
|
|
@@ -31,7 +41,10 @@ class GlobalMeanTimeseries(ESMValToolDiagnostic):
|
|
|
31
41
|
facets = ()
|
|
32
42
|
|
|
33
43
|
@staticmethod
|
|
34
|
-
def update_recipe(
|
|
44
|
+
def update_recipe(
|
|
45
|
+
recipe: Recipe,
|
|
46
|
+
input_files: dict[SourceDatasetType, pandas.DataFrame],
|
|
47
|
+
) -> None:
|
|
35
48
|
"""Update the recipe."""
|
|
36
49
|
# Clear unwanted elements from the recipe.
|
|
37
50
|
recipe["datasets"].clear()
|
|
@@ -40,7 +53,7 @@ class GlobalMeanTimeseries(ESMValToolDiagnostic):
|
|
|
40
53
|
variables.clear()
|
|
41
54
|
|
|
42
55
|
# Prepare updated variables section in recipe.
|
|
43
|
-
recipe_variables = dataframe_to_recipe(input_files)
|
|
56
|
+
recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
|
|
44
57
|
recipe_variables = {k: v for k, v in recipe_variables.items() if k != "areacella"}
|
|
45
58
|
for variable in recipe_variables.values():
|
|
46
59
|
variable["preprocessor"] = "annual_mean_global"
|
|
@@ -0,0 +1,340 @@
|
|
|
1
|
+
import copy
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
import numpy as np
|
|
5
|
+
import pandas
|
|
6
|
+
import xarray
|
|
7
|
+
|
|
8
|
+
from climate_ref_core.constraints import (
|
|
9
|
+
AddSupplementaryDataset,
|
|
10
|
+
RequireContiguousTimerange,
|
|
11
|
+
RequireFacets,
|
|
12
|
+
)
|
|
13
|
+
from climate_ref_core.datasets import ExecutionDatasetCollection, FacetFilter, SourceDatasetType
|
|
14
|
+
from climate_ref_core.diagnostics import DataRequirement
|
|
15
|
+
from climate_ref_core.metric_values.typing import SeriesDefinition
|
|
16
|
+
from climate_ref_core.pycmec.metric import CMECMetric, MetricCV
|
|
17
|
+
from climate_ref_core.pycmec.output import CMECOutput
|
|
18
|
+
from climate_ref_esmvaltool.diagnostics.base import ESMValToolDiagnostic
|
|
19
|
+
from climate_ref_esmvaltool.recipe import dataframe_to_recipe
|
|
20
|
+
from climate_ref_esmvaltool.types import MetricBundleArgs, OutputBundleArgs, Recipe
|
|
21
|
+
|
|
22
|
+
REGIONS = (
|
|
23
|
+
"Arabian-Peninsula",
|
|
24
|
+
"Arabian-Sea",
|
|
25
|
+
"Arctic-Ocean",
|
|
26
|
+
"Bay-of-Bengal",
|
|
27
|
+
"C.Australia",
|
|
28
|
+
"C.North-America",
|
|
29
|
+
"Caribbean",
|
|
30
|
+
"Central-Africa",
|
|
31
|
+
"E.Antarctica",
|
|
32
|
+
"E.Asia",
|
|
33
|
+
"E.Australia",
|
|
34
|
+
"E.C.Asia",
|
|
35
|
+
"E.Europe",
|
|
36
|
+
"E.North-America",
|
|
37
|
+
"E.Siberia",
|
|
38
|
+
"E.Southern-Africa",
|
|
39
|
+
"Equatorial.Atlantic-Ocean",
|
|
40
|
+
"Equatorial.Indic-Ocean",
|
|
41
|
+
"Equatorial.Pacific-Ocean",
|
|
42
|
+
"Greenland/Iceland",
|
|
43
|
+
"Madagascar",
|
|
44
|
+
"Mediterranean",
|
|
45
|
+
"N.Atlantic-Ocean",
|
|
46
|
+
"N.Australia",
|
|
47
|
+
"N.Central-America",
|
|
48
|
+
"N.E.North-America",
|
|
49
|
+
"N.E.South-America",
|
|
50
|
+
"N.Eastern-Africa",
|
|
51
|
+
"N.Europe",
|
|
52
|
+
"N.Pacific-Ocean",
|
|
53
|
+
"N.South-America",
|
|
54
|
+
"N.W.North-America",
|
|
55
|
+
"N.W.South-America",
|
|
56
|
+
"New-Zealand",
|
|
57
|
+
"Russian-Arctic",
|
|
58
|
+
"Russian-Far-East",
|
|
59
|
+
"S.Asia",
|
|
60
|
+
"S.Atlantic-Ocean",
|
|
61
|
+
"S.Australia",
|
|
62
|
+
"S.Central-America",
|
|
63
|
+
"S.E.Asia",
|
|
64
|
+
"S.E.South-America",
|
|
65
|
+
"S.Eastern-Africa",
|
|
66
|
+
"S.Indic-Ocean",
|
|
67
|
+
"S.Pacific-Ocean",
|
|
68
|
+
"S.South-America",
|
|
69
|
+
"S.W.South-America",
|
|
70
|
+
"Sahara",
|
|
71
|
+
"South-American-Monsoon",
|
|
72
|
+
"Southern-Ocean",
|
|
73
|
+
"Tibetan-Plateau",
|
|
74
|
+
"W.Antarctica",
|
|
75
|
+
"W.C.Asia",
|
|
76
|
+
"W.North-America",
|
|
77
|
+
"W.Siberia",
|
|
78
|
+
"W.Southern-Africa",
|
|
79
|
+
"West&Central-Europe",
|
|
80
|
+
"Western-Africa",
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def normalize_region(region: str) -> str:
|
|
85
|
+
"""Normalize region name so it can be used in filenames."""
|
|
86
|
+
return region.replace("&", "-and-").replace("/", "-and-")
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class RegionalHistoricalAnnualCycle(ESMValToolDiagnostic):
|
|
90
|
+
"""
|
|
91
|
+
Plot regional historical annual cycle of climate variables.
|
|
92
|
+
"""
|
|
93
|
+
|
|
94
|
+
name = "Regional historical annual cycle of climate variables"
|
|
95
|
+
slug = "regional-historical-annual-cycle"
|
|
96
|
+
base_recipe = "ref/recipe_ref_annual_cycle_region.yml"
|
|
97
|
+
|
|
98
|
+
variables = (
|
|
99
|
+
"hus",
|
|
100
|
+
"pr",
|
|
101
|
+
"psl",
|
|
102
|
+
"tas",
|
|
103
|
+
"ua",
|
|
104
|
+
)
|
|
105
|
+
series = tuple(
|
|
106
|
+
SeriesDefinition(
|
|
107
|
+
file_pattern=f"anncyc-{region}/allplots/*_{var_name}_*.nc",
|
|
108
|
+
sel={"dim0": 0}, # Select the model and not the observation.
|
|
109
|
+
dimensions={"region": region},
|
|
110
|
+
values_name=var_name,
|
|
111
|
+
index_name="month_number",
|
|
112
|
+
attributes=[],
|
|
113
|
+
)
|
|
114
|
+
for var_name in variables
|
|
115
|
+
for region in REGIONS
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
data_requirements = (
|
|
119
|
+
DataRequirement(
|
|
120
|
+
source_type=SourceDatasetType.CMIP6,
|
|
121
|
+
filters=(
|
|
122
|
+
FacetFilter(
|
|
123
|
+
facets={
|
|
124
|
+
"variable_id": variables,
|
|
125
|
+
"experiment_id": "historical",
|
|
126
|
+
"frequency": "mon",
|
|
127
|
+
},
|
|
128
|
+
),
|
|
129
|
+
),
|
|
130
|
+
group_by=("source_id", "member_id", "grid_label"),
|
|
131
|
+
constraints=(
|
|
132
|
+
RequireFacets("variable_id", variables),
|
|
133
|
+
RequireContiguousTimerange(group_by=("instance_id",)),
|
|
134
|
+
AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6),
|
|
135
|
+
),
|
|
136
|
+
),
|
|
137
|
+
DataRequirement(
|
|
138
|
+
source_type=SourceDatasetType.obs4MIPs,
|
|
139
|
+
filters=(
|
|
140
|
+
FacetFilter(
|
|
141
|
+
facets={
|
|
142
|
+
"variable_id": (
|
|
143
|
+
"psl",
|
|
144
|
+
"ua",
|
|
145
|
+
),
|
|
146
|
+
"source_id": "ERA-5",
|
|
147
|
+
"frequency": "mon",
|
|
148
|
+
},
|
|
149
|
+
),
|
|
150
|
+
),
|
|
151
|
+
group_by=("source_id",),
|
|
152
|
+
constraints=(RequireContiguousTimerange(group_by=("instance_id",)),),
|
|
153
|
+
# TODO: Add obs4MIPs datasets once available and working:
|
|
154
|
+
#
|
|
155
|
+
# obs4MIPs dataset that cannot be ingested (https://github.com/Climate-REF/climate-ref/issues/260):
|
|
156
|
+
# - GPCP-V2.3: pr
|
|
157
|
+
#
|
|
158
|
+
# Not yet available on obs4MIPs:
|
|
159
|
+
# - ERA5: hus
|
|
160
|
+
# - HadCRUT5_ground_5.0.1.0-analysis: tas
|
|
161
|
+
),
|
|
162
|
+
)
|
|
163
|
+
facets = ()
|
|
164
|
+
|
|
165
|
+
@staticmethod
|
|
166
|
+
def update_recipe(
|
|
167
|
+
recipe: Recipe,
|
|
168
|
+
input_files: dict[SourceDatasetType, pandas.DataFrame],
|
|
169
|
+
) -> None:
|
|
170
|
+
"""Update the recipe."""
|
|
171
|
+
# Update the dataset.
|
|
172
|
+
recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
|
|
173
|
+
dataset = recipe_variables["hus"]["additional_datasets"][0]
|
|
174
|
+
dataset.pop("timerange")
|
|
175
|
+
dataset["benchmark_dataset"] = True
|
|
176
|
+
dataset["plot_label"] = "{dataset}.{ensemble}.{grid}".format(**dataset)
|
|
177
|
+
recipe["datasets"] = [dataset]
|
|
178
|
+
|
|
179
|
+
# Generate diagnostics for each region.
|
|
180
|
+
diagnostics = {}
|
|
181
|
+
for region in REGIONS:
|
|
182
|
+
for diagnostic_name, orig_diagnostic in recipe["diagnostics"].items():
|
|
183
|
+
# Create the diagnostic for the region.
|
|
184
|
+
diagnostic = copy.deepcopy(orig_diagnostic)
|
|
185
|
+
normalized_region = normalize_region(region)
|
|
186
|
+
diagnostics[f"{diagnostic_name}-{normalized_region}"] = diagnostic
|
|
187
|
+
|
|
188
|
+
for variable in diagnostic["variables"].values():
|
|
189
|
+
# Remove unwanted facets that are part of the dataset.
|
|
190
|
+
for facet in ("project", "exp", "ensemble", "grid"):
|
|
191
|
+
variable.pop(facet, None)
|
|
192
|
+
# Update the preprocessor so it extracts the region.
|
|
193
|
+
preprocessor_name = variable["preprocessor"]
|
|
194
|
+
preprocessor = copy.deepcopy(recipe["preprocessors"][preprocessor_name])
|
|
195
|
+
preprocessor["extract_shape"]["ids"] = {"Name": [region]}
|
|
196
|
+
variable["preprocessor"] = f"{preprocessor_name}-{normalized_region}"
|
|
197
|
+
recipe["preprocessors"][variable["preprocessor"]] = preprocessor
|
|
198
|
+
|
|
199
|
+
# Update plot titles with region name.
|
|
200
|
+
for script in diagnostic["scripts"].values():
|
|
201
|
+
for plot in script["plots"].values():
|
|
202
|
+
plot["pyplot_kwargs"] = {"title": f"{{long_name}} {region}"}
|
|
203
|
+
recipe["diagnostics"] = diagnostics
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
class RegionalHistoricalTimeSeries(RegionalHistoricalAnnualCycle):
|
|
207
|
+
"""
|
|
208
|
+
Plot regional historical mean and anomaly of climate variables.
|
|
209
|
+
"""
|
|
210
|
+
|
|
211
|
+
name = "Regional historical mean and anomaly of climate variables"
|
|
212
|
+
slug = "regional-historical-timeseries"
|
|
213
|
+
base_recipe = "ref/recipe_ref_timeseries_region.yml"
|
|
214
|
+
series = tuple(
|
|
215
|
+
SeriesDefinition(
|
|
216
|
+
file_pattern=f"{diagnostic}-{region}/allplots/*_{var_name}_*.nc",
|
|
217
|
+
sel={"dim0": 0}, # Select the model and not the observation.
|
|
218
|
+
dimensions={"region": region},
|
|
219
|
+
values_name=var_name,
|
|
220
|
+
index_name="time",
|
|
221
|
+
attributes=[],
|
|
222
|
+
)
|
|
223
|
+
for var_name in RegionalHistoricalAnnualCycle.variables
|
|
224
|
+
for region in REGIONS
|
|
225
|
+
for diagnostic in ["timeseries_abs", "timeseries"]
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
class RegionalHistoricalTrend(ESMValToolDiagnostic):
|
|
230
|
+
"""
|
|
231
|
+
Plot regional historical trend of climate variables.
|
|
232
|
+
"""
|
|
233
|
+
|
|
234
|
+
name = "Regional historical trend of climate variables"
|
|
235
|
+
slug = "regional-historical-trend"
|
|
236
|
+
base_recipe = "ref/recipe_ref_trend_regions.yml"
|
|
237
|
+
|
|
238
|
+
data_requirements = (
|
|
239
|
+
DataRequirement(
|
|
240
|
+
source_type=SourceDatasetType.CMIP6,
|
|
241
|
+
filters=(
|
|
242
|
+
FacetFilter(
|
|
243
|
+
facets={
|
|
244
|
+
"variable_id": (
|
|
245
|
+
"hus",
|
|
246
|
+
"pr",
|
|
247
|
+
"psl",
|
|
248
|
+
"tas",
|
|
249
|
+
"ua",
|
|
250
|
+
),
|
|
251
|
+
"experiment_id": "historical",
|
|
252
|
+
"frequency": "mon",
|
|
253
|
+
},
|
|
254
|
+
),
|
|
255
|
+
),
|
|
256
|
+
group_by=("source_id", "member_id", "grid_label"),
|
|
257
|
+
constraints=(
|
|
258
|
+
RequireContiguousTimerange(group_by=("instance_id",)),
|
|
259
|
+
AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6),
|
|
260
|
+
),
|
|
261
|
+
),
|
|
262
|
+
DataRequirement(
|
|
263
|
+
source_type=SourceDatasetType.obs4MIPs,
|
|
264
|
+
filters=(
|
|
265
|
+
FacetFilter(
|
|
266
|
+
facets={
|
|
267
|
+
"variable_id": (
|
|
268
|
+
"psl",
|
|
269
|
+
"tas",
|
|
270
|
+
"ua",
|
|
271
|
+
),
|
|
272
|
+
"source_id": "ERA-5",
|
|
273
|
+
"frequency": "mon",
|
|
274
|
+
},
|
|
275
|
+
),
|
|
276
|
+
),
|
|
277
|
+
group_by=("source_id",),
|
|
278
|
+
constraints=(RequireContiguousTimerange(group_by=("instance_id",)),),
|
|
279
|
+
# TODO: Add obs4MIPs datasets once available and working:
|
|
280
|
+
#
|
|
281
|
+
# obs4MIPs dataset that cannot be ingested (https://github.com/Climate-REF/climate-ref/issues/260):
|
|
282
|
+
# - GPCP-V2.3: pr
|
|
283
|
+
#
|
|
284
|
+
# Not yet available on obs4MIPs:
|
|
285
|
+
# - ERA5: hus
|
|
286
|
+
# - HadCRUT5_ground_5.0.1.0-analysis: tas
|
|
287
|
+
),
|
|
288
|
+
)
|
|
289
|
+
facets = ("grid_label", "member_id", "source_id", "variable_id", "region", "metric")
|
|
290
|
+
|
|
291
|
+
@staticmethod
|
|
292
|
+
def update_recipe(
|
|
293
|
+
recipe: Recipe,
|
|
294
|
+
input_files: dict[SourceDatasetType, pandas.DataFrame],
|
|
295
|
+
) -> None:
|
|
296
|
+
"""Update the recipe."""
|
|
297
|
+
recipe["datasets"] = []
|
|
298
|
+
recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
|
|
299
|
+
diagnostics = {}
|
|
300
|
+
for diagnostic_name, diagnostic in recipe["diagnostics"].items():
|
|
301
|
+
for variable_name, variable in diagnostic["variables"].items():
|
|
302
|
+
if variable_name not in recipe_variables:
|
|
303
|
+
continue
|
|
304
|
+
dataset = recipe_variables[variable_name]["additional_datasets"][0]
|
|
305
|
+
dataset.pop("timerange")
|
|
306
|
+
variable["additional_datasets"].append(dataset)
|
|
307
|
+
diagnostics[diagnostic_name] = diagnostic
|
|
308
|
+
recipe["diagnostics"] = diagnostics
|
|
309
|
+
|
|
310
|
+
@classmethod
|
|
311
|
+
def format_result(
|
|
312
|
+
cls,
|
|
313
|
+
result_dir: Path,
|
|
314
|
+
execution_dataset: ExecutionDatasetCollection,
|
|
315
|
+
metric_args: MetricBundleArgs,
|
|
316
|
+
output_args: OutputBundleArgs,
|
|
317
|
+
) -> tuple[CMECMetric, CMECOutput]:
|
|
318
|
+
"""Format the result."""
|
|
319
|
+
metric_args[MetricCV.DIMENSIONS.value] = {
|
|
320
|
+
"json_structure": ["variable_id", "region", "metric"],
|
|
321
|
+
"variable_id": {},
|
|
322
|
+
"region": {},
|
|
323
|
+
"metric": {"trend": {}},
|
|
324
|
+
}
|
|
325
|
+
for file in result_dir.glob("work/*_trends/plot/seaborn_barplot.nc"):
|
|
326
|
+
ds = xarray.open_dataset(file)
|
|
327
|
+
source_id = execution_dataset[SourceDatasetType.CMIP6].source_id.iloc[0]
|
|
328
|
+
select = source_id == np.array([s.strip() for s in ds.dataset.values.astype(str).tolist()])
|
|
329
|
+
ds.isel(dim0=select)
|
|
330
|
+
variable_id = next(iter(ds.data_vars.keys()))
|
|
331
|
+
metric_args[MetricCV.DIMENSIONS.value]["variable_id"][variable_id] = {}
|
|
332
|
+
metric_args[MetricCV.RESULTS.value][variable_id] = {}
|
|
333
|
+
for region_value, trend_value in zip(ds.shape_id.astype(str).values, ds[variable_id].values):
|
|
334
|
+
region = region_value.strip()
|
|
335
|
+
trend = float(trend_value)
|
|
336
|
+
if region not in metric_args[MetricCV.DIMENSIONS.value]["region"]:
|
|
337
|
+
metric_args[MetricCV.DIMENSIONS.value]["region"][region] = {}
|
|
338
|
+
metric_args[MetricCV.RESULTS.value][variable_id][region] = {"trend": trend}
|
|
339
|
+
|
|
340
|
+
return CMECMetric.model_validate(metric_args), CMECOutput.model_validate(output_args)
|
|
@@ -42,10 +42,13 @@ class SeaIceAreaBasic(ESMValToolDiagnostic):
|
|
|
42
42
|
facets = ()
|
|
43
43
|
|
|
44
44
|
@staticmethod
|
|
45
|
-
def update_recipe(
|
|
45
|
+
def update_recipe(
|
|
46
|
+
recipe: Recipe,
|
|
47
|
+
input_files: dict[SourceDatasetType, pandas.DataFrame],
|
|
48
|
+
) -> None:
|
|
46
49
|
"""Update the recipe."""
|
|
47
50
|
# Update datasets
|
|
48
|
-
recipe_variables = dataframe_to_recipe(input_files)
|
|
51
|
+
recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
|
|
49
52
|
recipe["datasets"] = recipe_variables["siconc"]["additional_datasets"]
|
|
50
53
|
|
|
51
54
|
# Use the timerange from the recipe, as defined in the variable.
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
import pandas
|
|
4
|
+
import pandas as pd
|
|
5
|
+
|
|
6
|
+
from climate_ref_core.constraints import (
|
|
7
|
+
AddSupplementaryDataset,
|
|
8
|
+
RequireContiguousTimerange,
|
|
9
|
+
RequireFacets,
|
|
10
|
+
)
|
|
11
|
+
from climate_ref_core.datasets import ExecutionDatasetCollection, FacetFilter, SourceDatasetType
|
|
12
|
+
from climate_ref_core.diagnostics import DataRequirement
|
|
13
|
+
from climate_ref_core.pycmec.metric import CMECMetric, MetricCV
|
|
14
|
+
from climate_ref_core.pycmec.output import CMECOutput
|
|
15
|
+
from climate_ref_esmvaltool.diagnostics.base import ESMValToolDiagnostic
|
|
16
|
+
from climate_ref_esmvaltool.recipe import dataframe_to_recipe
|
|
17
|
+
from climate_ref_esmvaltool.types import MetricBundleArgs, OutputBundleArgs, Recipe
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class SeaIceSensitivity(ESMValToolDiagnostic):
|
|
21
|
+
"""
|
|
22
|
+
Calculate sea ice sensitivity.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
name = "Sea ice sensitivity"
|
|
26
|
+
slug = "sea-ice-sensitivity"
|
|
27
|
+
base_recipe = "recipe_seaice_sensitivity.yml"
|
|
28
|
+
|
|
29
|
+
variables = (
|
|
30
|
+
"siconc",
|
|
31
|
+
"tas",
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
data_requirements = (
|
|
35
|
+
DataRequirement(
|
|
36
|
+
source_type=SourceDatasetType.CMIP6,
|
|
37
|
+
filters=(
|
|
38
|
+
FacetFilter(
|
|
39
|
+
facets={
|
|
40
|
+
"variable_id": variables,
|
|
41
|
+
"experiment_id": "historical",
|
|
42
|
+
},
|
|
43
|
+
),
|
|
44
|
+
),
|
|
45
|
+
group_by=("experiment_id",), # this does nothing, but group_by cannot be empty
|
|
46
|
+
constraints=(
|
|
47
|
+
AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6),
|
|
48
|
+
AddSupplementaryDataset.from_defaults("areacello", SourceDatasetType.CMIP6),
|
|
49
|
+
RequireContiguousTimerange(group_by=("instance_id",)),
|
|
50
|
+
RequireFacets("variable_id", variables),
|
|
51
|
+
# TODO: Add a constraint to ensure that tas, siconc and areacello
|
|
52
|
+
# are available for each model or alternatively filter out
|
|
53
|
+
# incomplete models below.
|
|
54
|
+
),
|
|
55
|
+
),
|
|
56
|
+
)
|
|
57
|
+
facets = ("experiment_id", "source_id", "region", "metric")
|
|
58
|
+
|
|
59
|
+
@staticmethod
|
|
60
|
+
def update_recipe(
|
|
61
|
+
recipe: Recipe,
|
|
62
|
+
input_files: dict[SourceDatasetType, pandas.DataFrame],
|
|
63
|
+
) -> None:
|
|
64
|
+
"""Update the recipe."""
|
|
65
|
+
recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
|
|
66
|
+
datasets = recipe_variables["tas"]["additional_datasets"]
|
|
67
|
+
for dataset in datasets:
|
|
68
|
+
dataset.pop("mip")
|
|
69
|
+
dataset["timerange"] = "1979/2014"
|
|
70
|
+
recipe["datasets"] = datasets
|
|
71
|
+
|
|
72
|
+
@staticmethod
|
|
73
|
+
def format_result(
|
|
74
|
+
result_dir: Path,
|
|
75
|
+
execution_dataset: ExecutionDatasetCollection,
|
|
76
|
+
metric_args: MetricBundleArgs,
|
|
77
|
+
output_args: OutputBundleArgs,
|
|
78
|
+
) -> tuple[CMECMetric, CMECOutput]:
|
|
79
|
+
"""Format the result."""
|
|
80
|
+
metric_args[MetricCV.DIMENSIONS.value] = {
|
|
81
|
+
"json_structure": [
|
|
82
|
+
"source_id",
|
|
83
|
+
"region",
|
|
84
|
+
"metric",
|
|
85
|
+
],
|
|
86
|
+
"source_id": {},
|
|
87
|
+
"region": {},
|
|
88
|
+
"metric": {},
|
|
89
|
+
}
|
|
90
|
+
for region in "antarctic", "arctic":
|
|
91
|
+
df = pd.read_csv(
|
|
92
|
+
result_dir / "work" / region / "sea_ice_sensitivity_script" / "plotted_values.csv"
|
|
93
|
+
)
|
|
94
|
+
df = df.rename(columns={"Unnamed: 0": "source_id"}).drop(columns=["label"])
|
|
95
|
+
metric_args[MetricCV.DIMENSIONS.value]["region"][region] = {}
|
|
96
|
+
for metric in df.columns[1:]:
|
|
97
|
+
metric_args[MetricCV.DIMENSIONS.value]["metric"][metric] = {}
|
|
98
|
+
for row in df.itertuples(index=False):
|
|
99
|
+
source_id = row.source_id
|
|
100
|
+
metric_args[MetricCV.DIMENSIONS.value]["source_id"][source_id] = {}
|
|
101
|
+
for metric, value in zip(df.columns[1:], row[1:]):
|
|
102
|
+
if source_id not in metric_args[MetricCV.RESULTS.value]:
|
|
103
|
+
metric_args[MetricCV.RESULTS.value][source_id] = {}
|
|
104
|
+
if region not in metric_args[MetricCV.RESULTS.value][source_id]:
|
|
105
|
+
metric_args[MetricCV.RESULTS.value][source_id][region] = {}
|
|
106
|
+
metric_args[MetricCV.RESULTS.value][source_id][region][metric] = value
|
|
107
|
+
|
|
108
|
+
return CMECMetric.model_validate(metric_args), CMECOutput.model_validate(output_args)
|
|
@@ -54,7 +54,10 @@ class TransientClimateResponse(ESMValToolDiagnostic):
|
|
|
54
54
|
facets = ("grid_label", "member_id", "source_id", "region", "metric")
|
|
55
55
|
|
|
56
56
|
@staticmethod
|
|
57
|
-
def update_recipe(
|
|
57
|
+
def update_recipe(
|
|
58
|
+
recipe: Recipe,
|
|
59
|
+
input_files: dict[SourceDatasetType, pandas.DataFrame],
|
|
60
|
+
) -> None:
|
|
58
61
|
"""Update the recipe."""
|
|
59
62
|
# Only run the diagnostic that computes TCR for a single model.
|
|
60
63
|
recipe["diagnostics"] = {
|
|
@@ -77,21 +80,11 @@ class TransientClimateResponse(ESMValToolDiagnostic):
|
|
|
77
80
|
# Prepare updated datasets section in recipe. It contains two
|
|
78
81
|
# datasets, one for the "1pctCO2" and one for the "piControl"
|
|
79
82
|
# experiment.
|
|
80
|
-
recipe_variables = dataframe_to_recipe(
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
for variable in recipe_variables.values():
|
|
86
|
-
for dataset in variable["additional_datasets"]:
|
|
87
|
-
start, end = dataset["timerange"].split("/")
|
|
88
|
-
start_times.append(start)
|
|
89
|
-
end_times.append(end)
|
|
90
|
-
timerange = f"{max(start_times)}/{min(end_times)}"
|
|
91
|
-
|
|
92
|
-
datasets = recipe_variables["tas"]["additional_datasets"]
|
|
93
|
-
for dataset in datasets:
|
|
94
|
-
dataset["timerange"] = timerange
|
|
83
|
+
recipe_variables = dataframe_to_recipe(
|
|
84
|
+
input_files[SourceDatasetType.CMIP6],
|
|
85
|
+
equalize_timerange=True,
|
|
86
|
+
)
|
|
87
|
+
recipe["datasets"] = recipe_variables["tas"]["additional_datasets"]
|
|
95
88
|
|
|
96
89
|
# Remove keys from the recipe that are only used for YAML anchors
|
|
97
90
|
keys_to_remove = [
|
|
@@ -102,8 +95,6 @@ class TransientClimateResponse(ESMValToolDiagnostic):
|
|
|
102
95
|
for key in keys_to_remove:
|
|
103
96
|
recipe.pop(key, None)
|
|
104
97
|
|
|
105
|
-
recipe["datasets"] = datasets
|
|
106
|
-
|
|
107
98
|
@staticmethod
|
|
108
99
|
def format_result(
|
|
109
100
|
result_dir: Path,
|
|
@@ -67,12 +67,15 @@ class TransientClimateResponseEmissions(ESMValToolDiagnostic):
|
|
|
67
67
|
facets = ("grid_label", "member_id", "source_id", "region", "metric")
|
|
68
68
|
|
|
69
69
|
@staticmethod
|
|
70
|
-
def update_recipe(
|
|
70
|
+
def update_recipe(
|
|
71
|
+
recipe: Recipe,
|
|
72
|
+
input_files: dict[SourceDatasetType, pandas.DataFrame],
|
|
73
|
+
) -> None:
|
|
71
74
|
"""Update the recipe."""
|
|
72
75
|
# Prepare updated datasets section in recipe. It contains three
|
|
73
76
|
# datasets, "tas" and "fco2antt" for the "esm-1pctCO2" and just "tas"
|
|
74
77
|
# for the "esm-piControl" experiment.
|
|
75
|
-
recipe_variables = dataframe_to_recipe(input_files)
|
|
78
|
+
recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
|
|
76
79
|
tas_esm_1pctCO2 = next(
|
|
77
80
|
ds for ds in recipe_variables["tas"]["additional_datasets"] if ds["exp"] == "esm-1pctCO2"
|
|
78
81
|
)
|
|
@@ -54,12 +54,15 @@ class ZeroEmissionCommitment(ESMValToolDiagnostic):
|
|
|
54
54
|
facets = ("grid_label", "member_id", "source_id", "region", "metric")
|
|
55
55
|
|
|
56
56
|
@staticmethod
|
|
57
|
-
def update_recipe(
|
|
57
|
+
def update_recipe(
|
|
58
|
+
recipe: Recipe,
|
|
59
|
+
input_files: dict[SourceDatasetType, pandas.DataFrame],
|
|
60
|
+
) -> None:
|
|
58
61
|
"""Update the recipe."""
|
|
59
62
|
# Prepare updated datasets section in recipe. It contains two
|
|
60
63
|
# datasets, one for the "esm-1pct-brch-1000PgC" and one for the "piControl"
|
|
61
64
|
# experiment.
|
|
62
|
-
datasets = dataframe_to_recipe(input_files)["tas"]["additional_datasets"]
|
|
65
|
+
datasets = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])["tas"]["additional_datasets"]
|
|
63
66
|
base_dataset = next(ds for ds in datasets if ds["exp"] == "1pctCO2")
|
|
64
67
|
dataset = next(ds for ds in datasets if ds["exp"] == "esm-1pct-brch-1000PgC")
|
|
65
68
|
start = dataset["timerange"].split("/")[0]
|