climate-ref-esmvaltool 0.6.6__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. climate_ref_esmvaltool/diagnostics/base.py +92 -45
  2. climate_ref_esmvaltool/diagnostics/climate_at_global_warming_levels.py +37 -14
  3. climate_ref_esmvaltool/diagnostics/climate_drivers_for_fire.py +37 -15
  4. climate_ref_esmvaltool/diagnostics/cloud_radiative_effects.py +37 -18
  5. climate_ref_esmvaltool/diagnostics/cloud_scatterplots.py +19 -7
  6. climate_ref_esmvaltool/diagnostics/ecs.py +26 -5
  7. climate_ref_esmvaltool/diagnostics/enso.py +98 -8
  8. climate_ref_esmvaltool/diagnostics/example.py +11 -10
  9. climate_ref_esmvaltool/diagnostics/regional_historical_changes.py +139 -24
  10. climate_ref_esmvaltool/diagnostics/sea_ice_area_basic.py +57 -3
  11. climate_ref_esmvaltool/diagnostics/sea_ice_sensitivity.py +26 -12
  12. climate_ref_esmvaltool/diagnostics/tcr.py +16 -3
  13. climate_ref_esmvaltool/diagnostics/tcre.py +10 -12
  14. climate_ref_esmvaltool/diagnostics/zec.py +17 -3
  15. climate_ref_esmvaltool/recipe.py +9 -5
  16. {climate_ref_esmvaltool-0.6.6.dist-info → climate_ref_esmvaltool-0.8.0.dist-info}/METADATA +2 -1
  17. climate_ref_esmvaltool-0.8.0.dist-info/RECORD +30 -0
  18. {climate_ref_esmvaltool-0.6.6.dist-info → climate_ref_esmvaltool-0.8.0.dist-info}/WHEEL +1 -1
  19. climate_ref_esmvaltool-0.6.6.dist-info/RECORD +0 -30
  20. {climate_ref_esmvaltool-0.6.6.dist-info → climate_ref_esmvaltool-0.8.0.dist-info}/entry_points.txt +0 -0
  21. {climate_ref_esmvaltool-0.6.6.dist-info → climate_ref_esmvaltool-0.8.0.dist-info}/licenses/LICENCE +0 -0
  22. {climate_ref_esmvaltool-0.6.6.dist-info → climate_ref_esmvaltool-0.8.0.dist-info}/licenses/NOTICE +0 -0
@@ -4,6 +4,8 @@ from collections.abc import Iterable
4
4
  from pathlib import Path
5
5
  from typing import ClassVar
6
6
 
7
+ import netCDF4
8
+ import numpy as np
7
9
  import pandas
8
10
  import xarray as xr
9
11
  import yaml
@@ -23,6 +25,18 @@ from climate_ref_esmvaltool.recipe import load_recipe, prepare_climate_data
23
25
  from climate_ref_esmvaltool.types import MetricBundleArgs, OutputBundleArgs, Recipe
24
26
 
25
27
 
28
+ def mask_fillvalues(array: np.ndarray) -> np.ma.MaskedArray: # type: ignore[type-arg]
29
+ """Convert netCDF4 fill values in an array to a mask."""
30
+ # Workaround for https://github.com/pydata/xarray/issues/2742
31
+ defaults = {np.dtype(k): v for k, v in netCDF4.default_fillvals.items()}
32
+ return np.ma.masked_equal(array, defaults[array.dtype]) # type: ignore[no-untyped-call,no-any-return]
33
+
34
+
35
+ def fillvalues_to_nan(array: np.ndarray) -> np.ndarray: # type: ignore[type-arg]
36
+ """Convert netCDF4 fill values in an array to NaN."""
37
+ return mask_fillvalues(array).filled(np.nan) # type: ignore[no-untyped-call,no-any-return]
38
+
39
+
26
40
  class ESMValToolDiagnostic(CommandLineDiagnostic):
27
41
  """ESMValTool Diagnostic base class."""
28
42
 
@@ -93,10 +107,11 @@ class ESMValToolDiagnostic(CommandLineDiagnostic):
93
107
  }
94
108
  recipe = load_recipe(self.base_recipe)
95
109
  self.update_recipe(recipe, input_files)
96
-
110
+ recipe_txt = yaml.safe_dump(recipe, sort_keys=False)
111
+ logger.info(f"Using ESMValTool recipe:\n{recipe_txt}")
97
112
  recipe_path = definition.to_output_path("recipe.yml")
98
113
  with recipe_path.open("w", encoding="utf-8") as file:
99
- yaml.safe_dump(recipe, file, sort_keys=False)
114
+ file.write(recipe_txt)
100
115
  return recipe_path
101
116
 
102
117
  def build_cmd(self, definition: ExecutionDefinition) -> Iterable[str]:
@@ -166,8 +181,10 @@ class ESMValToolDiagnostic(CommandLineDiagnostic):
166
181
 
167
182
  config_dir = definition.to_output_path("config")
168
183
  config_dir.mkdir()
184
+ config_txt = yaml.safe_dump(config)
185
+ logger.info(f"Using ESMValTool configuration:\n{config_txt}")
169
186
  with (config_dir / "config.yml").open("w", encoding="utf-8") as file:
170
- yaml.safe_dump(config, file)
187
+ file.write(config_txt)
171
188
 
172
189
  return [
173
190
  "esmvaltool",
@@ -198,15 +215,21 @@ class ESMValToolDiagnostic(CommandLineDiagnostic):
198
215
  metric_args = CMECMetric.create_template()
199
216
  output_args = CMECOutput.create_template()
200
217
 
218
+ # Input selectors for the datasets used in the diagnostic.
219
+ # TODO: Better handling of multiple source types
220
+ if SourceDatasetType.CMIP6 in definition.datasets:
221
+ input_selectors = definition.datasets[SourceDatasetType.CMIP6].selector_dict()
222
+ elif SourceDatasetType.obs4MIPs in definition.datasets:
223
+ input_selectors = definition.datasets[SourceDatasetType.obs4MIPs].selector_dict()
224
+ else:
225
+ input_selectors = {}
226
+
201
227
  # Add the plots and data files
202
- variable_attributes = (
203
- "long_name",
204
- "standard_name",
205
- "units",
206
- )
207
228
  series = []
208
229
  plot_suffixes = {".png", ".jpg", ".pdf", ".ps"}
209
- for metadata_file in result_dir.glob("run/*/*/diagnostic_provenance.yml"):
230
+ # Sort metadata files for stable processing
231
+ metadata_files = sorted(result_dir.glob("run/*/*/diagnostic_provenance.yml"))
232
+ for metadata_file in metadata_files:
210
233
  metadata = yaml.safe_load(metadata_file.read_text(encoding="utf-8"))
211
234
  for filename in metadata:
212
235
  caption = metadata[filename].get("caption", "")
@@ -220,41 +243,11 @@ class ESMValToolDiagnostic(CommandLineDiagnostic):
220
243
  OutputCV.LONG_NAME.value: caption,
221
244
  OutputCV.DESCRIPTION.value: "",
222
245
  }
223
- for series_def in definition.diagnostic.series:
224
- if fnmatch.fnmatch(str(relative_path), f"executions/*/{series_def.file_pattern}"):
225
- dataset = xr.open_dataset(
226
- filename, decode_times=xr.coders.CFDatetimeCoder(use_cftime=True)
227
- )
228
- dataset = dataset.sel(series_def.sel)
229
- attributes = {
230
- attr: dataset.attrs[attr]
231
- for attr in series_def.attributes
232
- if attr in dataset.attrs
233
- }
234
- attributes["caption"] = caption
235
- attributes["values_name"] = series_def.values_name
236
- attributes["index_name"] = series_def.index_name
237
- for attr in variable_attributes:
238
- if attr in dataset[series_def.values_name].attrs:
239
- attributes[f"value_{attr}"] = dataset[series_def.values_name].attrs[attr]
240
- if attr in dataset[series_def.index_name].attrs:
241
- attributes[f"index_{attr}"] = dataset[series_def.index_name].attrs[attr]
242
- index = dataset[series_def.index_name].values.tolist()
243
- if hasattr(index[0], "calendar"):
244
- attributes["calendar"] = index[0].calendar
245
- if hasattr(index[0], "isoformat"):
246
- # Convert time objects to strings.
247
- index = [v.isoformat() for v in index]
248
-
249
- series.append(
250
- SeriesMetricValue(
251
- dimensions=series_def.dimensions,
252
- values=dataset[series_def.values_name].values.tolist(),
253
- index=index,
254
- index_name=series_def.index_name,
255
- attributes=attributes,
256
- )
257
- )
246
+ series.extend(
247
+ self._extract_series_from_file(
248
+ definition, filename, relative_path, caption=caption, input_selectors=input_selectors
249
+ )
250
+ )
258
251
 
259
252
  # Add the index.html file
260
253
  index_html = f"{result_dir}/index.html"
@@ -278,7 +271,6 @@ class ESMValToolDiagnostic(CommandLineDiagnostic):
278
271
 
279
272
  # Add the extra information from the groupby operations
280
273
  if len(metric_bundle.DIMENSIONS[MetricCV.JSON_STRUCTURE.value]):
281
- input_selectors = definition.datasets[SourceDatasetType.CMIP6].selector_dict()
282
274
  metric_bundle = metric_bundle.prepend_dimensions(input_selectors)
283
275
 
284
276
  return ExecutionResult.build_from_output_bundle(
@@ -287,3 +279,58 @@ class ESMValToolDiagnostic(CommandLineDiagnostic):
287
279
  cmec_metric_bundle=metric_bundle,
288
280
  series=series,
289
281
  )
282
+
283
+ def _extract_series_from_file(
284
+ self,
285
+ definition: ExecutionDefinition,
286
+ filename: Path,
287
+ relative_path: Path,
288
+ caption: str,
289
+ input_selectors: dict[str, str],
290
+ ) -> list[SeriesMetricValue]:
291
+ """
292
+ Extract series data from a file if it matches any of the series definitions.
293
+ """
294
+ variable_attributes = (
295
+ "long_name",
296
+ "standard_name",
297
+ "units",
298
+ )
299
+
300
+ series = []
301
+ for series_def in definition.diagnostic.series:
302
+ if fnmatch.fnmatch(
303
+ str(relative_path),
304
+ f"executions/*/{series_def.file_pattern.format(**input_selectors)}",
305
+ ):
306
+ dataset = xr.open_dataset(filename, decode_times=xr.coders.CFDatetimeCoder(use_cftime=True))
307
+ dataset = dataset.sel(series_def.sel)
308
+ attributes = {
309
+ attr: dataset.attrs[attr] for attr in series_def.attributes if attr in dataset.attrs
310
+ }
311
+ attributes["caption"] = caption
312
+ attributes["values_name"] = series_def.values_name
313
+ attributes["index_name"] = series_def.index_name
314
+ for attr in variable_attributes:
315
+ if attr in dataset[series_def.values_name].attrs:
316
+ attributes[f"value_{attr}"] = dataset[series_def.values_name].attrs[attr]
317
+ if attr in dataset[series_def.index_name].attrs:
318
+ attributes[f"index_{attr}"] = dataset[series_def.index_name].attrs[attr]
319
+ # TODO: Handle masked values in the index
320
+ index = dataset[series_def.index_name].values.tolist()
321
+ if hasattr(index[0], "calendar"):
322
+ attributes["calendar"] = index[0].calendar
323
+ if hasattr(index[0], "isoformat"):
324
+ # Convert time objects to strings.
325
+ index = [v.isoformat() for v in index]
326
+
327
+ series.append(
328
+ SeriesMetricValue(
329
+ dimensions={**input_selectors, **series_def.dimensions},
330
+ values=fillvalues_to_nan(dataset[series_def.values_name].values).tolist(),
331
+ index=index,
332
+ index_name=series_def.index_name,
333
+ attributes=attributes,
334
+ )
335
+ )
336
+ return series
@@ -2,8 +2,9 @@ import pandas
2
2
 
3
3
  from climate_ref_core.constraints import (
4
4
  AddSupplementaryDataset,
5
- RequireContiguousTimerange,
5
+ PartialDateTime,
6
6
  RequireFacets,
7
+ RequireTimerange,
7
8
  )
8
9
  from climate_ref_core.datasets import FacetFilter, SourceDatasetType
9
10
  from climate_ref_core.diagnostics import DataRequirement
@@ -26,6 +27,14 @@ class ClimateAtGlobalWarmingLevels(ESMValToolDiagnostic):
26
27
  "tas",
27
28
  )
28
29
 
30
+ matching_facets = (
31
+ "source_id",
32
+ "member_id",
33
+ "grid_label",
34
+ "table_id",
35
+ "variable_id",
36
+ )
37
+
29
38
  data_requirements = (
30
39
  DataRequirement(
31
40
  source_type=SourceDatasetType.CMIP6,
@@ -39,25 +48,32 @@ class ClimateAtGlobalWarmingLevels(ESMValToolDiagnostic):
39
48
  "ssp370",
40
49
  "ssp585",
41
50
  ),
51
+ "table_id": "Amon",
42
52
  },
43
53
  ),
44
54
  ),
45
55
  group_by=("experiment_id",),
46
56
  constraints=(
47
- RequireFacets("variable_id", variables),
48
57
  AddSupplementaryDataset(
49
58
  supplementary_facets={"experiment_id": "historical"},
50
- matching_facets=(
51
- "source_id",
52
- "member_id",
53
- "grid_label",
54
- "table_id",
55
- "variable_id",
56
- ),
59
+ matching_facets=matching_facets,
57
60
  optional_matching_facets=tuple(),
58
61
  ),
59
- RequireFacets("experiment_id", ("historical",)),
60
- RequireContiguousTimerange(group_by=("instance_id",)),
62
+ RequireTimerange(
63
+ group_by=matching_facets,
64
+ start=PartialDateTime(year=1850, month=1),
65
+ end=PartialDateTime(year=2100, month=12),
66
+ ),
67
+ RequireFacets(
68
+ "experiment_id",
69
+ required_facets=("historical",),
70
+ group_by=matching_facets,
71
+ ),
72
+ RequireFacets(
73
+ "variable_id",
74
+ required_facets=variables,
75
+ group_by=("experiment_id", "source_id", "member_id", "grid_label", "table_id"),
76
+ ),
61
77
  AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6),
62
78
  ),
63
79
  ),
@@ -74,13 +90,20 @@ class ClimateAtGlobalWarmingLevels(ESMValToolDiagnostic):
74
90
  diagnostics = recipe["diagnostics"]
75
91
  for diagnostic in diagnostics.values():
76
92
  diagnostic.pop("additional_datasets")
77
- recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
93
+ recipe_variables = dataframe_to_recipe(
94
+ input_files[SourceDatasetType.CMIP6],
95
+ group_by=(
96
+ "source_id",
97
+ "member_id",
98
+ "grid_label",
99
+ "table_id",
100
+ "variable_id",
101
+ ),
102
+ )
78
103
  datasets = recipe_variables["tas"]["additional_datasets"]
79
104
  datasets = [ds for ds in datasets if ds["exp"] != "historical"]
80
105
  for dataset in datasets:
81
106
  dataset.pop("timerange")
82
- dataset["activity"] = ["CMIP", dataset["activity"]]
83
- dataset["exp"] = ["historical", dataset["exp"]]
84
107
  recipe["datasets"] = datasets
85
108
 
86
109
  # Specify the timeranges
@@ -2,8 +2,9 @@ import pandas
2
2
 
3
3
  from climate_ref_core.constraints import (
4
4
  AddSupplementaryDataset,
5
+ PartialDateTime,
5
6
  RequireFacets,
6
- RequireOverlappingTimerange,
7
+ RequireTimerange,
7
8
  )
8
9
  from climate_ref_core.datasets import FacetFilter, SourceDatasetType
9
10
  from climate_ref_core.diagnostics import DataRequirement
@@ -21,32 +22,53 @@ class ClimateDriversForFire(ESMValToolDiagnostic):
21
22
  slug = "climate-drivers-for-fire"
22
23
  base_recipe = "ref/recipe_ref_fire.yml"
23
24
 
24
- variables = (
25
- "cVeg",
26
- "hurs",
27
- "pr",
28
- "tas",
29
- "tasmax",
30
- "treeFrac",
31
- "vegFrac",
32
- )
33
25
  data_requirements = (
34
26
  DataRequirement(
35
27
  source_type=SourceDatasetType.CMIP6,
36
28
  filters=(
37
29
  FacetFilter(
38
- facets={
39
- "variable_id": variables,
40
- "frequency": "mon",
30
+ {
31
+ "variable_id": ("hurs", "pr", "tas", "tasmax"),
32
+ "experiment_id": "historical",
33
+ "table_id": "Amon",
34
+ }
35
+ ),
36
+ FacetFilter(
37
+ {
38
+ "variable_id": ("cVeg", "treeFrac"),
41
39
  "experiment_id": "historical",
40
+ "table_id": "Lmon",
41
+ }
42
+ ),
43
+ FacetFilter(
44
+ {
45
+ "variable_id": "vegFrac",
46
+ "experiment_id": "historical",
47
+ "table_id": "Emon",
42
48
  }
43
49
  ),
44
50
  ),
45
51
  group_by=("source_id", "member_id", "grid_label"),
46
52
  constraints=(
47
- RequireFacets("variable_id", variables),
48
- RequireOverlappingTimerange(group_by=("instance_id",)),
53
+ RequireTimerange(
54
+ group_by=("instance_id",),
55
+ start=PartialDateTime(2013, 1),
56
+ end=PartialDateTime(2014, 12),
57
+ ),
49
58
  AddSupplementaryDataset.from_defaults("sftlf", SourceDatasetType.CMIP6),
59
+ RequireFacets(
60
+ "variable_id",
61
+ (
62
+ "cVeg",
63
+ "hurs",
64
+ "pr",
65
+ "tas",
66
+ "tasmax",
67
+ "sftlf",
68
+ "treeFrac",
69
+ "vegFrac",
70
+ ),
71
+ ),
50
72
  ),
51
73
  ),
52
74
  )
@@ -2,12 +2,14 @@ import pandas
2
2
 
3
3
  from climate_ref_core.constraints import (
4
4
  AddSupplementaryDataset,
5
- RequireContiguousTimerange,
5
+ PartialDateTime,
6
6
  RequireFacets,
7
7
  RequireOverlappingTimerange,
8
+ RequireTimerange,
8
9
  )
9
10
  from climate_ref_core.datasets import FacetFilter, SourceDatasetType
10
11
  from climate_ref_core.diagnostics import DataRequirement
12
+ from climate_ref_core.metric_values.typing import SeriesDefinition
11
13
  from climate_ref_esmvaltool.diagnostics.base import ESMValToolDiagnostic
12
14
  from climate_ref_esmvaltool.recipe import dataframe_to_recipe
13
15
  from climate_ref_esmvaltool.types import Recipe
@@ -22,8 +24,6 @@ class CloudRadiativeEffects(ESMValToolDiagnostic):
22
24
  slug = "cloud-radiative-effects"
23
25
  base_recipe = "ref/recipe_ref_cre.yml"
24
26
 
25
- facets = ()
26
-
27
27
  variables = (
28
28
  "rlut",
29
29
  "rlutcs",
@@ -37,40 +37,59 @@ class CloudRadiativeEffects(ESMValToolDiagnostic):
37
37
  FacetFilter(
38
38
  facets={
39
39
  "variable_id": variables,
40
- "experiment_id": ("historical",),
40
+ "experiment_id": "historical",
41
+ "table_id": "Amon",
41
42
  }
42
43
  ),
43
44
  ),
44
45
  group_by=("source_id", "member_id", "grid_label"),
45
46
  constraints=(
46
- RequireFacets("variable_id", variables),
47
- RequireContiguousTimerange(group_by=("instance_id",)),
47
+ RequireTimerange(
48
+ group_by=("instance_id",),
49
+ start=PartialDateTime(1996, 1),
50
+ end=PartialDateTime(2014, 12),
51
+ ),
48
52
  RequireOverlappingTimerange(group_by=("instance_id",)),
53
+ RequireFacets("variable_id", variables),
49
54
  AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6),
50
55
  ),
51
56
  ),
52
57
  # TODO: Use CERES-EBAF, ESACCI-CLOUD, and ISCCP-FH from obs4MIPs once available.
53
58
  )
54
59
 
60
+ facets = ()
61
+ series = tuple(
62
+ SeriesDefinition(
63
+ file_pattern=f"plot_profiles/plot/variable_vs_lat_{var_name}_*.nc",
64
+ sel={"dim0": 0}, # Select the model.
65
+ dimensions={"variable_id": var_name, "statistic": "zonal mean"},
66
+ values_name=var_name,
67
+ index_name="lat",
68
+ attributes=[],
69
+ )
70
+ for var_name in ["lwcre", "swcre"]
71
+ ) + tuple(
72
+ SeriesDefinition(
73
+ file_pattern=f"plot_profiles/plot/variable_vs_lat_{var_name}_*.nc",
74
+ sel={"dim0": i}, # Select the observation.
75
+ dimensions={"variable_id": var_name, "statistic": "zonal mean", "reference_source_id": source_id},
76
+ values_name=var_name,
77
+ index_name="lat",
78
+ attributes=[],
79
+ )
80
+ for var_name in ["lwcre", "swcre"]
81
+ for i, source_id in enumerate(
82
+ ["CERES-EBAF-Ed4.2", "ESACCI-CLOUD-AVHRR-AMPM-fv3.0", "ISCCP-FH"], start=1
83
+ )
84
+ )
85
+
55
86
  @staticmethod
56
87
  def update_recipe(recipe: Recipe, input_files: dict[SourceDatasetType, pandas.DataFrame]) -> None:
57
88
  """Update the recipe."""
58
89
  recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
59
90
  recipe_variables = {k: v for k, v in recipe_variables.items() if k != "areacella"}
60
91
 
61
- # Select a timerange covered by all datasets.
62
- start_times, end_times = [], []
63
- for variable in recipe_variables.values():
64
- for dataset in variable["additional_datasets"]:
65
- start, end = dataset["timerange"].split("/")
66
- start_times.append(start)
67
- end_times.append(end)
68
- start_time = max(start_times)
69
- start_time = max(start_time, "20010101T000000") # Earliest observational dataset availability
70
- timerange = f"{start_time}/{min(end_times)}"
71
-
72
92
  datasets = recipe_variables["rsut"]["additional_datasets"]
73
93
  for dataset in datasets:
74
94
  dataset.pop("timerange")
75
95
  recipe["datasets"] = datasets
76
- recipe["timerange_for_models"] = timerange
@@ -4,9 +4,9 @@ import pandas
4
4
 
5
5
  from climate_ref_core.constraints import (
6
6
  AddSupplementaryDataset,
7
- RequireContiguousTimerange,
7
+ PartialDateTime,
8
8
  RequireFacets,
9
- RequireOverlappingTimerange,
9
+ RequireTimerange,
10
10
  )
11
11
  from climate_ref_core.datasets import FacetFilter, SourceDatasetType
12
12
  from climate_ref_core.diagnostics import DataRequirement
@@ -25,15 +25,18 @@ def get_cmip6_data_requirements(variables: tuple[str, ...]) -> tuple[DataRequire
25
25
  facets={
26
26
  "variable_id": variables,
27
27
  "experiment_id": "historical",
28
+ "table_id": "Amon",
28
29
  },
29
30
  ),
30
31
  ),
31
32
  group_by=("source_id", "experiment_id", "member_id", "frequency", "grid_label"),
32
33
  constraints=(
34
+ RequireTimerange(
35
+ group_by=("instance_id",),
36
+ start=PartialDateTime(1996, 1),
37
+ end=PartialDateTime(2014, 12),
38
+ ),
33
39
  RequireFacets("variable_id", variables),
34
- RequireContiguousTimerange(group_by=("instance_id",)),
35
- RequireOverlappingTimerange(group_by=("instance_id",)),
36
- # TODO: Add a RequireTimeRange constraint to match reference datasets?
37
40
  AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6),
38
41
  ),
39
42
  ),
@@ -47,13 +50,16 @@ def update_recipe(
47
50
  var_y: str,
48
51
  ) -> None:
49
52
  """Update the recipe."""
50
- recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6], equalize_timerange=True)
53
+ recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
51
54
  diagnostics = recipe["diagnostics"]
52
55
  diagnostic_name = f"plot_joint_{var_x}_{var_y}_model"
53
56
  diagnostic = diagnostics.pop(diagnostic_name)
54
57
  diagnostics.clear()
55
58
  diagnostics[diagnostic_name] = diagnostic
59
+ recipe_variables = {k: v for k, v in recipe_variables.items() if k != "areacella"}
56
60
  datasets = next(iter(recipe_variables.values()))["additional_datasets"]
61
+ for dataset in datasets:
62
+ dataset["timerange"] = "1996/2014"
57
63
  diagnostic["additional_datasets"] = datasets
58
64
  suptitle = "CMIP6 {dataset} {ensemble} {grid} {timerange}".format(**datasets[0])
59
65
  diagnostic["scripts"]["plot"]["suptitle"] = suptitle
@@ -135,7 +141,13 @@ class CloudScatterplotsReference(ESMValToolDiagnostic):
135
141
  ),
136
142
  ),
137
143
  group_by=("instance_id",),
138
- constraints=(RequireContiguousTimerange(group_by=("instance_id",)),),
144
+ constraints=(
145
+ RequireTimerange(
146
+ group_by=("instance_id",),
147
+ start=PartialDateTime(2007, 1),
148
+ end=PartialDateTime(2014, 12),
149
+ ),
150
+ ),
139
151
  # TODO: Add obs4MIPs datasets once available and working:
140
152
  #
141
153
  # obs4MIPs datasets with issues:
@@ -11,9 +11,10 @@ from climate_ref_core.constraints import (
11
11
  )
12
12
  from climate_ref_core.datasets import ExecutionDatasetCollection, FacetFilter, SourceDatasetType
13
13
  from climate_ref_core.diagnostics import DataRequirement
14
+ from climate_ref_core.metric_values.typing import SeriesDefinition
14
15
  from climate_ref_core.pycmec.metric import CMECMetric, MetricCV
15
16
  from climate_ref_core.pycmec.output import CMECOutput
16
- from climate_ref_esmvaltool.diagnostics.base import ESMValToolDiagnostic
17
+ from climate_ref_esmvaltool.diagnostics.base import ESMValToolDiagnostic, fillvalues_to_nan
17
18
  from climate_ref_esmvaltool.recipe import dataframe_to_recipe
18
19
  from climate_ref_esmvaltool.types import MetricBundleArgs, OutputBundleArgs, Recipe
19
20
 
@@ -45,20 +46,40 @@ class EquilibriumClimateSensitivity(ESMValToolDiagnostic):
45
46
  facets={
46
47
  "variable_id": variables,
47
48
  "experiment_id": experiments,
49
+ "table_id": "Amon",
48
50
  },
49
51
  ),
50
52
  ),
51
53
  group_by=("source_id", "member_id", "grid_label"),
52
54
  constraints=(
53
- RequireFacets("variable_id", variables),
54
- RequireFacets("experiment_id", experiments),
55
55
  RequireContiguousTimerange(group_by=("instance_id",)),
56
56
  RequireOverlappingTimerange(group_by=("instance_id",)),
57
+ RequireFacets(
58
+ "variable_id",
59
+ required_facets=variables,
60
+ group_by=("source_id", "member_id", "grid_label", "experiment_id"),
61
+ ),
62
+ RequireFacets(
63
+ "experiment_id",
64
+ required_facets=experiments,
65
+ group_by=("source_id", "member_id", "grid_label", "variable_id"),
66
+ ),
57
67
  AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6),
58
68
  ),
59
69
  ),
60
70
  )
61
71
  facets = ("grid_label", "member_id", "source_id", "region", "metric")
72
+ series = (
73
+ SeriesDefinition(
74
+ file_pattern="ecs/calculate/ecs_regression_*.nc",
75
+ dimensions={
76
+ "statistic": ("global annual mean anomaly of rtnt vs tas"),
77
+ },
78
+ values_name="rtnt_anomaly",
79
+ index_name="tas_anomaly",
80
+ attributes=[],
81
+ ),
82
+ )
62
83
 
63
84
  @staticmethod
64
85
  def update_recipe(
@@ -118,9 +139,9 @@ class EquilibriumClimateSensitivity(ESMValToolDiagnostic):
118
139
  ) -> tuple[CMECMetric, CMECOutput]:
119
140
  """Format the result."""
120
141
  ecs_ds = xarray.open_dataset(result_dir / "work" / "ecs" / "calculate" / "ecs.nc")
121
- ecs = float(ecs_ds["ecs"].values[0])
142
+ ecs = float(fillvalues_to_nan(ecs_ds["ecs"].values)[0])
122
143
  lambda_ds = xarray.open_dataset(result_dir / "work" / "ecs" / "calculate" / "lambda.nc")
123
- lambda_ = float(lambda_ds["lambda"].values[0])
144
+ lambda_ = float(fillvalues_to_nan(lambda_ds["lambda"].values)[0])
124
145
 
125
146
  # Update the diagnostic bundle arguments with the computed diagnostics.
126
147
  metric_args[MetricCV.DIMENSIONS.value] = {