climate-ref-esmvaltool 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- climate_ref_esmvaltool/__init__.py +30 -0
- climate_ref_esmvaltool/_version.py +3 -0
- climate_ref_esmvaltool/dataset_registry/data.txt +74 -0
- climate_ref_esmvaltool/diagnostics/__init__.py +19 -0
- climate_ref_esmvaltool/diagnostics/base.py +212 -0
- climate_ref_esmvaltool/diagnostics/climate_at_global_warming_levels.py +100 -0
- climate_ref_esmvaltool/diagnostics/ecs.py +144 -0
- climate_ref_esmvaltool/diagnostics/example.py +50 -0
- climate_ref_esmvaltool/diagnostics/sea_ice_area_seasonal_cycle.py +78 -0
- climate_ref_esmvaltool/diagnostics/tcr.py +130 -0
- climate_ref_esmvaltool/diagnostics/tcre.py +136 -0
- climate_ref_esmvaltool/diagnostics/zec.py +109 -0
- climate_ref_esmvaltool/py.typed +0 -0
- climate_ref_esmvaltool/recipe.py +170 -0
- climate_ref_esmvaltool/recipes.txt +7 -0
- climate_ref_esmvaltool/requirements/conda-lock.yml +20028 -0
- climate_ref_esmvaltool/requirements/environment.yml +6 -0
- climate_ref_esmvaltool/types.py +6 -0
- climate_ref_esmvaltool-0.5.0.dist-info/METADATA +59 -0
- climate_ref_esmvaltool-0.5.0.dist-info/RECORD +23 -0
- climate_ref_esmvaltool-0.5.0.dist-info/WHEEL +4 -0
- climate_ref_esmvaltool-0.5.0.dist-info/licenses/LICENCE +201 -0
- climate_ref_esmvaltool-0.5.0.dist-info/licenses/NOTICE +3 -0
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import pandas
|
|
2
|
+
|
|
3
|
+
from climate_ref_core.constraints import (
|
|
4
|
+
AddSupplementaryDataset,
|
|
5
|
+
RequireContiguousTimerange,
|
|
6
|
+
)
|
|
7
|
+
from climate_ref_core.datasets import FacetFilter, SourceDatasetType
|
|
8
|
+
from climate_ref_core.diagnostics import DataRequirement
|
|
9
|
+
from climate_ref_esmvaltool.diagnostics.base import ESMValToolDiagnostic
|
|
10
|
+
from climate_ref_esmvaltool.recipe import dataframe_to_recipe
|
|
11
|
+
from climate_ref_esmvaltool.types import Recipe
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class SeaIceAreaSeasonalCycle(ESMValToolDiagnostic):
|
|
15
|
+
"""
|
|
16
|
+
Calculate seasonal cycle and time series of NH and SH sea ice area.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
name = "Sea ice area seasonal cycle"
|
|
20
|
+
slug = "sea-ice-area-seasonal-cycle"
|
|
21
|
+
base_recipe = "ref/recipe_ref_sea_ice_area_basic.yml"
|
|
22
|
+
|
|
23
|
+
data_requirements = (
|
|
24
|
+
DataRequirement(
|
|
25
|
+
source_type=SourceDatasetType.CMIP6,
|
|
26
|
+
filters=(
|
|
27
|
+
FacetFilter(
|
|
28
|
+
facets={
|
|
29
|
+
"variable_id": "siconc",
|
|
30
|
+
"experiment_id": "historical",
|
|
31
|
+
},
|
|
32
|
+
),
|
|
33
|
+
),
|
|
34
|
+
group_by=("instance_id",),
|
|
35
|
+
constraints=(
|
|
36
|
+
RequireContiguousTimerange(group_by=("instance_id",)),
|
|
37
|
+
AddSupplementaryDataset.from_defaults("areacello", SourceDatasetType.CMIP6),
|
|
38
|
+
),
|
|
39
|
+
),
|
|
40
|
+
# TODO: Use OSI-450-nh and OSI-450-sh from obs4MIPs once available.
|
|
41
|
+
)
|
|
42
|
+
facets = ("model", "metric")
|
|
43
|
+
|
|
44
|
+
@staticmethod
|
|
45
|
+
def update_recipe(recipe: Recipe, input_files: pandas.DataFrame) -> None:
|
|
46
|
+
"""Update the recipe."""
|
|
47
|
+
# Overlap between observations and historical experiment.
|
|
48
|
+
timerange = "1995/2014"
|
|
49
|
+
|
|
50
|
+
# Update datasets
|
|
51
|
+
recipe_variables = dataframe_to_recipe(input_files)
|
|
52
|
+
recipe["datasets"] = recipe_variables["siconc"]["additional_datasets"]
|
|
53
|
+
for dataset in recipe["datasets"]:
|
|
54
|
+
dataset["timerange"] = timerange
|
|
55
|
+
|
|
56
|
+
# Update observational datasets
|
|
57
|
+
nh_obs = {
|
|
58
|
+
"dataset": "OSI-450-nh",
|
|
59
|
+
"mip": "OImon",
|
|
60
|
+
"project": "OBS",
|
|
61
|
+
"supplementary_variables": [
|
|
62
|
+
{
|
|
63
|
+
"short_name": "areacello",
|
|
64
|
+
"mip": "fx",
|
|
65
|
+
},
|
|
66
|
+
],
|
|
67
|
+
"tier": 2,
|
|
68
|
+
"timerange": timerange,
|
|
69
|
+
"type": "reanaly",
|
|
70
|
+
"version": "v3",
|
|
71
|
+
}
|
|
72
|
+
sh_obs = nh_obs.copy()
|
|
73
|
+
sh_obs["dataset"] = "OSI-450-sh"
|
|
74
|
+
diagnostics = recipe["diagnostics"]
|
|
75
|
+
diagnostics["siarea_min"]["variables"]["sea_ice_area_nh_sep"]["additional_datasets"] = [nh_obs]
|
|
76
|
+
diagnostics["siarea_min"]["variables"]["sea_ice_area_sh_feb"]["additional_datasets"] = [sh_obs]
|
|
77
|
+
diagnostics["siarea_seas"]["variables"]["sea_ice_area_nh"]["additional_datasets"] = [nh_obs]
|
|
78
|
+
diagnostics["siarea_seas"]["variables"]["sea_ice_area_sh"]["additional_datasets"] = [sh_obs]
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
import pandas
|
|
4
|
+
import xarray
|
|
5
|
+
|
|
6
|
+
from climate_ref_core.constraints import (
|
|
7
|
+
AddSupplementaryDataset,
|
|
8
|
+
RequireContiguousTimerange,
|
|
9
|
+
RequireFacets,
|
|
10
|
+
RequireOverlappingTimerange,
|
|
11
|
+
)
|
|
12
|
+
from climate_ref_core.datasets import ExecutionDatasetCollection, FacetFilter, SourceDatasetType
|
|
13
|
+
from climate_ref_core.diagnostics import DataRequirement
|
|
14
|
+
from climate_ref_core.pycmec.metric import MetricCV
|
|
15
|
+
from climate_ref_esmvaltool.diagnostics.base import ESMValToolDiagnostic
|
|
16
|
+
from climate_ref_esmvaltool.recipe import dataframe_to_recipe
|
|
17
|
+
from climate_ref_esmvaltool.types import MetricBundleArgs, OutputBundleArgs, Recipe
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class TransientClimateResponse(ESMValToolDiagnostic):
|
|
21
|
+
"""
|
|
22
|
+
Calculate the global mean transient climate response for a dataset.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
name = "Transient Climate Response"
|
|
26
|
+
slug = "transient-climate-response"
|
|
27
|
+
base_recipe = "recipe_tcr.yml"
|
|
28
|
+
|
|
29
|
+
experiments = (
|
|
30
|
+
"1pctCO2",
|
|
31
|
+
"piControl",
|
|
32
|
+
)
|
|
33
|
+
data_requirements = (
|
|
34
|
+
DataRequirement(
|
|
35
|
+
source_type=SourceDatasetType.CMIP6,
|
|
36
|
+
filters=(
|
|
37
|
+
FacetFilter(
|
|
38
|
+
facets={
|
|
39
|
+
"variable_id": ("tas",),
|
|
40
|
+
"experiment_id": experiments,
|
|
41
|
+
},
|
|
42
|
+
),
|
|
43
|
+
),
|
|
44
|
+
group_by=("source_id", "member_id", "grid_label"),
|
|
45
|
+
constraints=(
|
|
46
|
+
RequireFacets("experiment_id", experiments),
|
|
47
|
+
RequireContiguousTimerange(group_by=("instance_id",)),
|
|
48
|
+
RequireOverlappingTimerange(group_by=("instance_id",)),
|
|
49
|
+
AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6),
|
|
50
|
+
),
|
|
51
|
+
),
|
|
52
|
+
)
|
|
53
|
+
facets = ("source_id", "region", "metric")
|
|
54
|
+
|
|
55
|
+
@staticmethod
|
|
56
|
+
def update_recipe(recipe: Recipe, input_files: pandas.DataFrame) -> None:
|
|
57
|
+
"""Update the recipe."""
|
|
58
|
+
# Only run the diagnostic that computes TCR for a single model.
|
|
59
|
+
recipe["diagnostics"] = {
|
|
60
|
+
"cmip6": {
|
|
61
|
+
"description": "Calculate TCR.",
|
|
62
|
+
"variables": {
|
|
63
|
+
"tas": {
|
|
64
|
+
"preprocessor": "spatial_mean",
|
|
65
|
+
},
|
|
66
|
+
},
|
|
67
|
+
"scripts": {
|
|
68
|
+
"tcr": {
|
|
69
|
+
"script": "climate_metrics/tcr.py",
|
|
70
|
+
"calculate_mmm": False,
|
|
71
|
+
},
|
|
72
|
+
},
|
|
73
|
+
},
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
# Prepare updated datasets section in recipe. It contains two
|
|
77
|
+
# datasets, one for the "1pctCO2" and one for the "piControl"
|
|
78
|
+
# experiment.
|
|
79
|
+
recipe_variables = dataframe_to_recipe(input_files)
|
|
80
|
+
recipe_variables = {k: v for k, v in recipe_variables.items() if k != "areacella"}
|
|
81
|
+
|
|
82
|
+
# Select a timerange covered by all datasets.
|
|
83
|
+
start_times, end_times = [], []
|
|
84
|
+
for variable in recipe_variables.values():
|
|
85
|
+
for dataset in variable["additional_datasets"]:
|
|
86
|
+
start, end = dataset["timerange"].split("/")
|
|
87
|
+
start_times.append(start)
|
|
88
|
+
end_times.append(end)
|
|
89
|
+
timerange = f"{max(start_times)}/{min(end_times)}"
|
|
90
|
+
|
|
91
|
+
datasets = recipe_variables["tas"]["additional_datasets"]
|
|
92
|
+
for dataset in datasets:
|
|
93
|
+
dataset["timerange"] = timerange
|
|
94
|
+
|
|
95
|
+
recipe["datasets"] = datasets
|
|
96
|
+
|
|
97
|
+
@staticmethod
|
|
98
|
+
def format_result(
|
|
99
|
+
result_dir: Path,
|
|
100
|
+
execution_dataset: ExecutionDatasetCollection,
|
|
101
|
+
metric_args: MetricBundleArgs,
|
|
102
|
+
output_args: OutputBundleArgs,
|
|
103
|
+
) -> tuple[MetricBundleArgs, OutputBundleArgs]:
|
|
104
|
+
"""Format the result."""
|
|
105
|
+
input_files = next(c.datasets for _, c in execution_dataset.items())
|
|
106
|
+
source_id = input_files.iloc[0].source_id
|
|
107
|
+
|
|
108
|
+
tcr_ds = xarray.open_dataset(result_dir / "work" / "cmip6" / "tcr" / "tcr.nc")
|
|
109
|
+
tcr = float(tcr_ds["tcr"].values[0])
|
|
110
|
+
|
|
111
|
+
# Update the diagnostic bundle arguments with the computed diagnostics.
|
|
112
|
+
metric_args[MetricCV.DIMENSIONS.value] = {
|
|
113
|
+
"json_structure": [
|
|
114
|
+
"source_id",
|
|
115
|
+
"region",
|
|
116
|
+
"metric",
|
|
117
|
+
],
|
|
118
|
+
"source_id": {source_id: {}},
|
|
119
|
+
"region": {"global": {}},
|
|
120
|
+
"metric": {"tcr": {}},
|
|
121
|
+
}
|
|
122
|
+
metric_args[MetricCV.RESULTS.value] = {
|
|
123
|
+
source_id: {
|
|
124
|
+
"global": {
|
|
125
|
+
"tcr": tcr,
|
|
126
|
+
},
|
|
127
|
+
},
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
return metric_args, output_args
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
import pandas
|
|
4
|
+
import xarray
|
|
5
|
+
|
|
6
|
+
from climate_ref_core.constraints import (
|
|
7
|
+
AddSupplementaryDataset,
|
|
8
|
+
RequireContiguousTimerange,
|
|
9
|
+
RequireFacets,
|
|
10
|
+
RequireOverlappingTimerange,
|
|
11
|
+
)
|
|
12
|
+
from climate_ref_core.datasets import ExecutionDatasetCollection, FacetFilter, SourceDatasetType
|
|
13
|
+
from climate_ref_core.diagnostics import DataRequirement
|
|
14
|
+
from climate_ref_core.pycmec.metric import MetricCV
|
|
15
|
+
from climate_ref_esmvaltool.diagnostics.base import ESMValToolDiagnostic
|
|
16
|
+
from climate_ref_esmvaltool.recipe import dataframe_to_recipe
|
|
17
|
+
from climate_ref_esmvaltool.types import MetricBundleArgs, OutputBundleArgs, Recipe
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class TransientClimateResponseEmissions(ESMValToolDiagnostic):
|
|
21
|
+
"""
|
|
22
|
+
Calculate the global mean Transient Climate Response to Cumulative CO2 Emissions.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
name = "Transient Climate Response to Cumulative CO2 Emissions"
|
|
26
|
+
slug = "transient-climate-response-emissions"
|
|
27
|
+
base_recipe = "recipe_tcre.yml"
|
|
28
|
+
|
|
29
|
+
experiments = (
|
|
30
|
+
"esm-1pctCO2",
|
|
31
|
+
"esm-piControl",
|
|
32
|
+
)
|
|
33
|
+
variables = (
|
|
34
|
+
"tas",
|
|
35
|
+
"fco2antt",
|
|
36
|
+
)
|
|
37
|
+
data_requirements = (
|
|
38
|
+
DataRequirement(
|
|
39
|
+
source_type=SourceDatasetType.CMIP6,
|
|
40
|
+
filters=(
|
|
41
|
+
FacetFilter(
|
|
42
|
+
facets={
|
|
43
|
+
"variable_id": variables,
|
|
44
|
+
"frequency": "mon",
|
|
45
|
+
"experiment_id": experiments,
|
|
46
|
+
},
|
|
47
|
+
),
|
|
48
|
+
FacetFilter(
|
|
49
|
+
facets={
|
|
50
|
+
"variable_id": "fco2antt",
|
|
51
|
+
"experiment_id": "esm-piControl",
|
|
52
|
+
},
|
|
53
|
+
keep=False,
|
|
54
|
+
),
|
|
55
|
+
),
|
|
56
|
+
group_by=("source_id", "member_id", "grid_label"),
|
|
57
|
+
constraints=(
|
|
58
|
+
RequireFacets("experiment_id", experiments),
|
|
59
|
+
RequireFacets("variable_id", variables),
|
|
60
|
+
RequireContiguousTimerange(group_by=("instance_id",)),
|
|
61
|
+
RequireOverlappingTimerange(group_by=("instance_id",)),
|
|
62
|
+
AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6),
|
|
63
|
+
),
|
|
64
|
+
),
|
|
65
|
+
)
|
|
66
|
+
facets = ("source_id", "region", "metric")
|
|
67
|
+
|
|
68
|
+
@staticmethod
|
|
69
|
+
def update_recipe(recipe: Recipe, input_files: pandas.DataFrame) -> None:
|
|
70
|
+
"""Update the recipe."""
|
|
71
|
+
# Prepare updated datasets section in recipe. It contains three
|
|
72
|
+
# datasets, "tas" and "fco2antt" for the "esm-1pctCO2" and just "tas"
|
|
73
|
+
# for the "esm-piControl" experiment.
|
|
74
|
+
recipe_variables = dataframe_to_recipe(input_files)
|
|
75
|
+
tas_esm_1pctCO2 = next(
|
|
76
|
+
ds for ds in recipe_variables["tas"]["additional_datasets"] if ds["exp"] == "esm-1pctCO2"
|
|
77
|
+
)
|
|
78
|
+
fco2antt_esm_1pctCO2 = next(
|
|
79
|
+
ds for ds in recipe_variables["fco2antt"]["additional_datasets"] if ds["exp"] == "esm-1pctCO2"
|
|
80
|
+
)
|
|
81
|
+
tas_esm_piControl = next(
|
|
82
|
+
ds for ds in recipe_variables["tas"]["additional_datasets"] if ds["exp"] == "esm-piControl"
|
|
83
|
+
)
|
|
84
|
+
tas_esm_piControl["timerange"] = tas_esm_1pctCO2["timerange"]
|
|
85
|
+
|
|
86
|
+
recipe["diagnostics"]["tcre"]["variables"] = {
|
|
87
|
+
"tas_esm-1pctCO2": {
|
|
88
|
+
"short_name": "tas",
|
|
89
|
+
"preprocessor": "global_annual_mean_anomaly",
|
|
90
|
+
"additional_datasets": [tas_esm_1pctCO2],
|
|
91
|
+
},
|
|
92
|
+
"tas_esm-piControl": {
|
|
93
|
+
"short_name": "tas",
|
|
94
|
+
"preprocessor": "global_annual_mean_anomaly",
|
|
95
|
+
"additional_datasets": [tas_esm_piControl],
|
|
96
|
+
},
|
|
97
|
+
"fco2antt": {
|
|
98
|
+
"preprocessor": "global_cumulative_sum",
|
|
99
|
+
"additional_datasets": [fco2antt_esm_1pctCO2],
|
|
100
|
+
},
|
|
101
|
+
}
|
|
102
|
+
recipe["diagnostics"].pop("barplot")
|
|
103
|
+
|
|
104
|
+
@staticmethod
|
|
105
|
+
def format_result(
|
|
106
|
+
result_dir: Path,
|
|
107
|
+
execution_dataset: ExecutionDatasetCollection,
|
|
108
|
+
metric_args: MetricBundleArgs,
|
|
109
|
+
output_args: OutputBundleArgs,
|
|
110
|
+
) -> tuple[MetricBundleArgs, OutputBundleArgs]:
|
|
111
|
+
"""Format the result."""
|
|
112
|
+
input_files = next(c.datasets for _, c in execution_dataset.items())
|
|
113
|
+
source_id = input_files.iloc[0].source_id
|
|
114
|
+
|
|
115
|
+
tcre_ds = xarray.open_dataset(result_dir / "work" / "tcre" / "calculate_tcre" / "tcre.nc")
|
|
116
|
+
tcre = float(tcre_ds["tcre"].values[0])
|
|
117
|
+
|
|
118
|
+
# Update the diagnostic bundle arguments with the computed diagnostics.
|
|
119
|
+
metric_args[MetricCV.DIMENSIONS.value] = {
|
|
120
|
+
"json_structure": [
|
|
121
|
+
"source_id",
|
|
122
|
+
"region",
|
|
123
|
+
"metric",
|
|
124
|
+
],
|
|
125
|
+
"source_id": {source_id: {}},
|
|
126
|
+
"region": {"global": {}},
|
|
127
|
+
"metric": {"tcre": {}},
|
|
128
|
+
}
|
|
129
|
+
metric_args[MetricCV.RESULTS.value] = {
|
|
130
|
+
source_id: {
|
|
131
|
+
"global": {
|
|
132
|
+
"tcre": tcre,
|
|
133
|
+
},
|
|
134
|
+
},
|
|
135
|
+
}
|
|
136
|
+
return metric_args, output_args
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
import pandas
|
|
4
|
+
import xarray
|
|
5
|
+
|
|
6
|
+
from climate_ref_core.constraints import (
|
|
7
|
+
AddSupplementaryDataset,
|
|
8
|
+
RequireContiguousTimerange,
|
|
9
|
+
RequireFacets,
|
|
10
|
+
RequireOverlappingTimerange,
|
|
11
|
+
)
|
|
12
|
+
from climate_ref_core.datasets import ExecutionDatasetCollection, FacetFilter, SourceDatasetType
|
|
13
|
+
from climate_ref_core.diagnostics import DataRequirement
|
|
14
|
+
from climate_ref_core.pycmec.metric import MetricCV
|
|
15
|
+
from climate_ref_esmvaltool.diagnostics.base import ESMValToolDiagnostic
|
|
16
|
+
from climate_ref_esmvaltool.recipe import dataframe_to_recipe
|
|
17
|
+
from climate_ref_esmvaltool.types import MetricBundleArgs, OutputBundleArgs, Recipe
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ZeroEmissionCommitment(ESMValToolDiagnostic):
|
|
21
|
+
"""
|
|
22
|
+
Calculate the global mean Zero Emission Commitment (ZEC) temperature.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
name = "Zero Emission Commitment"
|
|
26
|
+
slug = "zero-emission-commitment"
|
|
27
|
+
base_recipe = "recipe_zec.yml"
|
|
28
|
+
|
|
29
|
+
experiments = (
|
|
30
|
+
"1pctCO2",
|
|
31
|
+
"esm-1pct-brch-1000PgC",
|
|
32
|
+
)
|
|
33
|
+
data_requirements = (
|
|
34
|
+
DataRequirement(
|
|
35
|
+
source_type=SourceDatasetType.CMIP6,
|
|
36
|
+
filters=(
|
|
37
|
+
FacetFilter(
|
|
38
|
+
facets={
|
|
39
|
+
"variable_id": ("tas",),
|
|
40
|
+
"experiment_id": experiments,
|
|
41
|
+
},
|
|
42
|
+
),
|
|
43
|
+
),
|
|
44
|
+
group_by=("source_id", "member_id", "grid_label"),
|
|
45
|
+
constraints=(
|
|
46
|
+
RequireFacets("experiment_id", experiments),
|
|
47
|
+
RequireContiguousTimerange(group_by=("instance_id",)),
|
|
48
|
+
RequireOverlappingTimerange(group_by=("instance_id",)),
|
|
49
|
+
AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6),
|
|
50
|
+
),
|
|
51
|
+
),
|
|
52
|
+
)
|
|
53
|
+
facets = ("source_id", "region", "metric")
|
|
54
|
+
|
|
55
|
+
@staticmethod
|
|
56
|
+
def update_recipe(recipe: Recipe, input_files: pandas.DataFrame) -> None:
|
|
57
|
+
"""Update the recipe."""
|
|
58
|
+
# Prepare updated datasets section in recipe. It contains two
|
|
59
|
+
# datasets, one for the "esm-1pct-brch-1000PgC" and one for the "piControl"
|
|
60
|
+
# experiment.
|
|
61
|
+
datasets = dataframe_to_recipe(input_files)["tas"]["additional_datasets"]
|
|
62
|
+
base_dataset = next(ds for ds in datasets if ds["exp"] == "1pctCO2")
|
|
63
|
+
dataset = next(ds for ds in datasets if ds["exp"] == "esm-1pct-brch-1000PgC")
|
|
64
|
+
start = dataset["timerange"].split("/")[0]
|
|
65
|
+
base_start = f"{int(start[:4]) - 10:04d}{start[4:]}"
|
|
66
|
+
base_end = f"{int(start[:4]) + 10:04d}{start[4:]}"
|
|
67
|
+
base_dataset["timerange"] = f"{base_start}/{base_end}"
|
|
68
|
+
variables = recipe["diagnostics"]["zec"]["variables"]
|
|
69
|
+
variables["tas_base"] = {
|
|
70
|
+
"short_name": "tas",
|
|
71
|
+
"preprocessor": "anomaly_base",
|
|
72
|
+
"additional_datasets": [base_dataset],
|
|
73
|
+
}
|
|
74
|
+
variables["tas"] = {
|
|
75
|
+
"preprocessor": "spatial_mean",
|
|
76
|
+
"additional_datasets": [dataset],
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
@classmethod
|
|
80
|
+
def format_result(
|
|
81
|
+
cls,
|
|
82
|
+
result_dir: Path,
|
|
83
|
+
execution_dataset: ExecutionDatasetCollection,
|
|
84
|
+
metric_args: MetricBundleArgs,
|
|
85
|
+
output_args: OutputBundleArgs,
|
|
86
|
+
) -> tuple[MetricBundleArgs, OutputBundleArgs]:
|
|
87
|
+
"""Format the result."""
|
|
88
|
+
input_files = next(c.datasets for _, c in execution_dataset.items())
|
|
89
|
+
source_id = input_files.iloc[0].source_id
|
|
90
|
+
|
|
91
|
+
zec_ds = xarray.open_dataset(result_dir / "work" / "zec" / "zec" / "zec_50.nc")
|
|
92
|
+
zec = float(zec_ds["zec"].values[0])
|
|
93
|
+
|
|
94
|
+
# Update the diagnostic bundle arguments with the computed diagnostics.
|
|
95
|
+
metric_args[MetricCV.DIMENSIONS.value] = {
|
|
96
|
+
"json_structure": cls.facets,
|
|
97
|
+
"source_id": {source_id: {}},
|
|
98
|
+
"region": {"global": {}},
|
|
99
|
+
"metric": {"zec": {}},
|
|
100
|
+
}
|
|
101
|
+
metric_args[MetricCV.RESULTS.value] = {
|
|
102
|
+
source_id: {
|
|
103
|
+
"global": {
|
|
104
|
+
"zec": zec,
|
|
105
|
+
},
|
|
106
|
+
},
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
return metric_args, output_args
|
|
File without changes
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import importlib.resources
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import TYPE_CHECKING, Any
|
|
6
|
+
|
|
7
|
+
import pooch
|
|
8
|
+
from ruamel.yaml import YAML
|
|
9
|
+
|
|
10
|
+
from climate_ref_esmvaltool.types import Recipe
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
import pandas as pd
|
|
14
|
+
|
|
15
|
+
yaml = YAML()
|
|
16
|
+
|
|
17
|
+
FACETS = {
|
|
18
|
+
"CMIP6": {
|
|
19
|
+
"activity": "activity_id",
|
|
20
|
+
"dataset": "source_id",
|
|
21
|
+
"ensemble": "member_id",
|
|
22
|
+
"institute": "institution_id",
|
|
23
|
+
"exp": "experiment_id",
|
|
24
|
+
"grid": "grid_label",
|
|
25
|
+
"mip": "table_id",
|
|
26
|
+
"short_name": "variable_id",
|
|
27
|
+
},
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def as_isodate(timestamp: pd.Timestamp) -> str:
|
|
32
|
+
"""Format a timestamp as an ISO 8601 datetime.
|
|
33
|
+
|
|
34
|
+
For example, '2014-12-16 12:00:00' will be formatted as '20141216T120000'.
|
|
35
|
+
|
|
36
|
+
Parameters
|
|
37
|
+
----------
|
|
38
|
+
timestamp
|
|
39
|
+
The timestamp to format.
|
|
40
|
+
|
|
41
|
+
"""
|
|
42
|
+
return str(timestamp).replace(" ", "T").replace("-", "").replace(":", "")
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def as_timerange(group: pd.DataFrame) -> str | None:
|
|
46
|
+
"""Format the timeranges from a dataframe as an ESMValTool timerange.
|
|
47
|
+
|
|
48
|
+
Parameters
|
|
49
|
+
----------
|
|
50
|
+
group
|
|
51
|
+
The dataframe describing a single dataset.
|
|
52
|
+
|
|
53
|
+
Returns
|
|
54
|
+
-------
|
|
55
|
+
A timerange.
|
|
56
|
+
"""
|
|
57
|
+
# TODO: apply some rounding to avoid problems?
|
|
58
|
+
# https://github.com/ESMValGroup/ESMValCore/issues/2048
|
|
59
|
+
start_times = group.start_time.dropna()
|
|
60
|
+
if start_times.empty:
|
|
61
|
+
return None
|
|
62
|
+
end_times = group.end_time.dropna()
|
|
63
|
+
if end_times.empty:
|
|
64
|
+
return None # pragma: no cover
|
|
65
|
+
return f"{as_isodate(start_times.min())}/{as_isodate(end_times.max())}"
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def as_facets(
|
|
69
|
+
group: pd.DataFrame,
|
|
70
|
+
) -> dict[str, Any]:
|
|
71
|
+
"""Convert a group from the datasets dataframe to ESMValTool facets.
|
|
72
|
+
|
|
73
|
+
Parameters
|
|
74
|
+
----------
|
|
75
|
+
group:
|
|
76
|
+
A group of datasets representing a single instance_id.
|
|
77
|
+
|
|
78
|
+
Returns
|
|
79
|
+
-------
|
|
80
|
+
A :obj:`dict` containing facet-value pairs.
|
|
81
|
+
|
|
82
|
+
"""
|
|
83
|
+
facets = {}
|
|
84
|
+
first_row = group.iloc[0]
|
|
85
|
+
project = first_row.instance_id.split(".", 2)[0]
|
|
86
|
+
facets["project"] = project
|
|
87
|
+
for esmvaltool_name, ref_name in FACETS[project].items():
|
|
88
|
+
facets[esmvaltool_name] = getattr(first_row, ref_name)
|
|
89
|
+
timerange = as_timerange(group)
|
|
90
|
+
if timerange is not None:
|
|
91
|
+
facets["timerange"] = timerange
|
|
92
|
+
return facets
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def dataframe_to_recipe(files: pd.DataFrame) -> dict[str, Any]:
|
|
96
|
+
"""Convert the datasets dataframe to a recipe "variables" section.
|
|
97
|
+
|
|
98
|
+
Parameters
|
|
99
|
+
----------
|
|
100
|
+
files
|
|
101
|
+
The pandas dataframe describing the input files.
|
|
102
|
+
|
|
103
|
+
Returns
|
|
104
|
+
-------
|
|
105
|
+
A "variables" section that can be used in an ESMValTool recipe.
|
|
106
|
+
"""
|
|
107
|
+
variables: dict[str, Any] = {}
|
|
108
|
+
# TODO: refine to make it possible to combine historical and scenario runs.
|
|
109
|
+
for _, group in files.groupby("instance_id"):
|
|
110
|
+
facets = as_facets(group)
|
|
111
|
+
short_name = facets.pop("short_name")
|
|
112
|
+
if short_name not in variables:
|
|
113
|
+
variables[short_name] = {"additional_datasets": []}
|
|
114
|
+
variables[short_name]["additional_datasets"].append(facets)
|
|
115
|
+
return variables
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
_ESMVALTOOL_COMMIT = "a759ce46d5185e3784997ce38a3956e39322cdac"
|
|
119
|
+
_ESMVALTOOL_VERSION = f"2.13.0.dev27+g{_ESMVALTOOL_COMMIT[:9]}"
|
|
120
|
+
|
|
121
|
+
_RECIPES = pooch.create(
|
|
122
|
+
path=pooch.os_cache("climate_ref_esmvaltool"),
|
|
123
|
+
# TODO: use a released version
|
|
124
|
+
# base_url="https://raw.githubusercontent.com/ESMValGroup/ESMValTool/refs/tags/v{version}/esmvaltool/recipes/",
|
|
125
|
+
# version=_ESMVALTOOL_VERSION,
|
|
126
|
+
base_url=f"https://raw.githubusercontent.com/ESMValGroup/ESMValTool/{_ESMVALTOOL_COMMIT}/esmvaltool/recipes/",
|
|
127
|
+
env="REF_METRICS_ESMVALTOOL_DATA_DIR",
|
|
128
|
+
)
|
|
129
|
+
_RECIPES.load_registry(str(importlib.resources.files("climate_ref_esmvaltool").joinpath("recipes.txt")))
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def load_recipe(recipe: str) -> Recipe:
|
|
133
|
+
"""Load a recipe.
|
|
134
|
+
|
|
135
|
+
Parameters
|
|
136
|
+
----------
|
|
137
|
+
recipe
|
|
138
|
+
The name of an ESMValTool recipe.
|
|
139
|
+
|
|
140
|
+
Returns
|
|
141
|
+
-------
|
|
142
|
+
The loaded recipe.
|
|
143
|
+
"""
|
|
144
|
+
filename = _RECIPES.fetch(recipe)
|
|
145
|
+
return yaml.load(Path(filename).read_text(encoding="utf-8")) # type: ignore[no-any-return]
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def prepare_climate_data(datasets: pd.DataFrame, climate_data_dir: Path) -> None:
|
|
149
|
+
"""Symlink the input files from the Pandas dataframe into a directory tree.
|
|
150
|
+
|
|
151
|
+
This ensures that ESMValTool can find the data and only uses the
|
|
152
|
+
requested data.
|
|
153
|
+
|
|
154
|
+
Parameters
|
|
155
|
+
----------
|
|
156
|
+
datasets
|
|
157
|
+
The pandas dataframe describing the input datasets.
|
|
158
|
+
climate_data_dir
|
|
159
|
+
The directory where ESMValTool should look for input data.
|
|
160
|
+
"""
|
|
161
|
+
for row in datasets.itertuples():
|
|
162
|
+
if not isinstance(row.instance_id, str): # pragma: no branch
|
|
163
|
+
msg = f"Invalid instance_id encountered in {row}"
|
|
164
|
+
raise ValueError(msg)
|
|
165
|
+
if not isinstance(row.path, str): # pragma: no branch
|
|
166
|
+
msg = f"Invalid path encountered in {row}"
|
|
167
|
+
raise ValueError(msg)
|
|
168
|
+
tgt = climate_data_dir.joinpath(*row.instance_id.split(".")) / Path(row.path).name
|
|
169
|
+
tgt.parent.mkdir(parents=True, exist_ok=True)
|
|
170
|
+
tgt.symlink_to(row.path)
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
examples/recipe_python.yml ab3f06d269bb2c1368f4dc39da9bcb232fb2adb1fa556ba769e6c16294ffb4a3
|
|
2
|
+
recipe_calculate_gwl_exceedance_stats.yml 5aa266abc9a8029649b689a2b369a47623b0935d609354332ff4148994642d6b
|
|
3
|
+
recipe_ecs.yml 0cc57034fcb64e32015b4ff949ece5df8cdb8c6f493618b50ceded119fb37918
|
|
4
|
+
recipe_tcr.yml 35f9ef035a4e71aff5cac5dd26c49da2162fc00291bf3b0bd16b661b7b2f606b
|
|
5
|
+
recipe_tcre.yml 4668e357e00c515a8264ac75cb319ce558289689e10189e6f9e982886c414c94
|
|
6
|
+
recipe_zec.yml b0af7f789b7610ab3f29a6617124aa40c40866ead958204fc199eaf82863de51
|
|
7
|
+
ref/recipe_ref_sea_ice_area_basic.yml 552e282a16ec355778b06f33897e1b8ba8388e5f8a5f814c4c42d91f63007457
|