climate-ref-esmvaltool 0.6.5__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. climate_ref_esmvaltool/dataset_registry/data.txt +4 -0
  2. climate_ref_esmvaltool/diagnostics/__init__.py +22 -0
  3. climate_ref_esmvaltool/diagnostics/base.py +112 -14
  4. climate_ref_esmvaltool/diagnostics/climate_at_global_warming_levels.py +41 -15
  5. climate_ref_esmvaltool/diagnostics/climate_drivers_for_fire.py +90 -0
  6. climate_ref_esmvaltool/diagnostics/cloud_radiative_effects.py +26 -20
  7. climate_ref_esmvaltool/diagnostics/cloud_scatterplots.py +200 -0
  8. climate_ref_esmvaltool/diagnostics/ecs.py +32 -20
  9. climate_ref_esmvaltool/diagnostics/enso.py +88 -12
  10. climate_ref_esmvaltool/diagnostics/example.py +17 -3
  11. climate_ref_esmvaltool/diagnostics/regional_historical_changes.py +435 -0
  12. climate_ref_esmvaltool/diagnostics/sea_ice_area_basic.py +63 -5
  13. climate_ref_esmvaltool/diagnostics/sea_ice_sensitivity.py +31 -14
  14. climate_ref_esmvaltool/diagnostics/tcr.py +23 -19
  15. climate_ref_esmvaltool/diagnostics/tcre.py +13 -12
  16. climate_ref_esmvaltool/diagnostics/zec.py +20 -3
  17. climate_ref_esmvaltool/recipe.py +55 -10
  18. climate_ref_esmvaltool/recipes.txt +16 -11
  19. climate_ref_esmvaltool/requirements/conda-lock.yml +4081 -3770
  20. climate_ref_esmvaltool/requirements/environment.yml +1 -0
  21. {climate_ref_esmvaltool-0.6.5.dist-info → climate_ref_esmvaltool-0.7.0.dist-info}/METADATA +1 -1
  22. climate_ref_esmvaltool-0.7.0.dist-info/RECORD +30 -0
  23. climate_ref_esmvaltool-0.6.5.dist-info/RECORD +0 -27
  24. {climate_ref_esmvaltool-0.6.5.dist-info → climate_ref_esmvaltool-0.7.0.dist-info}/WHEEL +0 -0
  25. {climate_ref_esmvaltool-0.6.5.dist-info → climate_ref_esmvaltool-0.7.0.dist-info}/entry_points.txt +0 -0
  26. {climate_ref_esmvaltool-0.6.5.dist-info → climate_ref_esmvaltool-0.7.0.dist-info}/licenses/LICENCE +0 -0
  27. {climate_ref_esmvaltool-0.6.5.dist-info → climate_ref_esmvaltool-0.7.0.dist-info}/licenses/NOTICE +0 -0
@@ -0,0 +1,435 @@
1
+ import copy
2
+ from pathlib import Path
3
+
4
+ import numpy as np
5
+ import pandas
6
+ import xarray
7
+
8
+ from climate_ref_core.constraints import (
9
+ AddSupplementaryDataset,
10
+ PartialDateTime,
11
+ RequireFacets,
12
+ RequireTimerange,
13
+ )
14
+ from climate_ref_core.datasets import ExecutionDatasetCollection, FacetFilter, SourceDatasetType
15
+ from climate_ref_core.diagnostics import DataRequirement
16
+ from climate_ref_core.metric_values.typing import SeriesDefinition
17
+ from climate_ref_core.pycmec.metric import CMECMetric, MetricCV
18
+ from climate_ref_core.pycmec.output import CMECOutput
19
+ from climate_ref_esmvaltool.diagnostics.base import ESMValToolDiagnostic
20
+ from climate_ref_esmvaltool.recipe import dataframe_to_recipe
21
+ from climate_ref_esmvaltool.types import MetricBundleArgs, OutputBundleArgs, Recipe
22
+
23
+ REGIONS = (
24
+ "Arabian-Peninsula",
25
+ "Arabian-Sea",
26
+ "Arctic-Ocean",
27
+ "Bay-of-Bengal",
28
+ "C.Australia",
29
+ "C.North-America",
30
+ "Caribbean",
31
+ "Central-Africa",
32
+ "E.Antarctica",
33
+ "E.Asia",
34
+ "E.Australia",
35
+ "E.C.Asia",
36
+ "E.Europe",
37
+ "E.North-America",
38
+ "E.Siberia",
39
+ "E.Southern-Africa",
40
+ "Equatorial.Atlantic-Ocean",
41
+ "Equatorial.Indic-Ocean",
42
+ "Equatorial.Pacific-Ocean",
43
+ "Greenland/Iceland",
44
+ "Madagascar",
45
+ "Mediterranean",
46
+ "N.Atlantic-Ocean",
47
+ "N.Australia",
48
+ "N.Central-America",
49
+ "N.E.North-America",
50
+ "N.E.South-America",
51
+ "N.Eastern-Africa",
52
+ "N.Europe",
53
+ "N.Pacific-Ocean",
54
+ "N.South-America",
55
+ "N.W.North-America",
56
+ "N.W.South-America",
57
+ "New-Zealand",
58
+ "Russian-Arctic",
59
+ "Russian-Far-East",
60
+ "S.Asia",
61
+ "S.Atlantic-Ocean",
62
+ "S.Australia",
63
+ "S.Central-America",
64
+ "S.E.Asia",
65
+ "S.E.South-America",
66
+ "S.Eastern-Africa",
67
+ "S.Indic-Ocean",
68
+ "S.Pacific-Ocean",
69
+ "S.South-America",
70
+ "S.W.South-America",
71
+ "Sahara",
72
+ "South-American-Monsoon",
73
+ "Southern-Ocean",
74
+ "Tibetan-Plateau",
75
+ "W.Antarctica",
76
+ "W.C.Asia",
77
+ "W.North-America",
78
+ "W.Siberia",
79
+ "W.Southern-Africa",
80
+ "West&Central-Europe",
81
+ "Western-Africa",
82
+ )
83
+
84
+
85
+ def normalize_region(region: str) -> str:
86
+ """Normalize region name so it can be used in filenames."""
87
+ return region.replace("&", "-and-").replace("/", "-and-")
88
+
89
+
90
+ class RegionalHistoricalAnnualCycle(ESMValToolDiagnostic):
91
+ """
92
+ Plot regional historical annual cycle of climate variables.
93
+ """
94
+
95
+ name = "Regional historical annual cycle of climate variables"
96
+ slug = "regional-historical-annual-cycle"
97
+ base_recipe = "ref/recipe_ref_annual_cycle_region.yml"
98
+
99
+ variables = (
100
+ "hus",
101
+ "pr",
102
+ "psl",
103
+ "tas",
104
+ "ua",
105
+ )
106
+
107
+ data_requirements = (
108
+ DataRequirement(
109
+ source_type=SourceDatasetType.CMIP6,
110
+ filters=(
111
+ FacetFilter(
112
+ facets={
113
+ "variable_id": variables,
114
+ "experiment_id": "historical",
115
+ "table_id": "Amon",
116
+ },
117
+ ),
118
+ ),
119
+ group_by=("source_id", "member_id", "grid_label"),
120
+ constraints=(
121
+ RequireTimerange(
122
+ group_by=("instance_id",),
123
+ start=PartialDateTime(1980, 1),
124
+ end=PartialDateTime(2009, 12),
125
+ ),
126
+ RequireFacets("variable_id", variables),
127
+ AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6),
128
+ ),
129
+ ),
130
+ DataRequirement(
131
+ source_type=SourceDatasetType.obs4MIPs,
132
+ filters=(
133
+ FacetFilter(
134
+ facets={
135
+ "variable_id": (
136
+ "psl",
137
+ "ua",
138
+ ),
139
+ "source_id": "ERA-5",
140
+ "frequency": "mon",
141
+ },
142
+ ),
143
+ ),
144
+ group_by=("source_id",),
145
+ constraints=(
146
+ RequireTimerange(
147
+ group_by=("instance_id",),
148
+ start=PartialDateTime(1980, 1),
149
+ end=PartialDateTime(2009, 12),
150
+ ),
151
+ RequireFacets("variable_id", ("psl", "ua")),
152
+ ),
153
+ # TODO: Add obs4MIPs datasets once available and working:
154
+ #
155
+ # obs4MIPs dataset that cannot be ingested (https://github.com/Climate-REF/climate-ref/issues/260):
156
+ # - GPCP-V2.3: pr
157
+ #
158
+ # Not yet available on obs4MIPs:
159
+ # - ERA5: hus
160
+ # - HadCRUT5_ground_5.0.1.0-analysis: tas
161
+ ),
162
+ )
163
+
164
+ facets = ()
165
+ series = tuple(
166
+ SeriesDefinition(
167
+ file_pattern=f"anncyc-{region}/allplots/*_{var_name}_*.nc",
168
+ sel={"dim0": 0}, # Select the model and not the observation.
169
+ dimensions={"region": region, "statistic": f"{var_name} regional mean"},
170
+ values_name=var_name,
171
+ index_name="month_number",
172
+ attributes=[],
173
+ )
174
+ for var_name in variables
175
+ for region in REGIONS
176
+ )
177
+
178
+ @staticmethod
179
+ def update_recipe(
180
+ recipe: Recipe,
181
+ input_files: dict[SourceDatasetType, pandas.DataFrame],
182
+ ) -> None:
183
+ """Update the recipe."""
184
+ # Update the dataset.
185
+ recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
186
+ dataset = recipe_variables["hus"]["additional_datasets"][0]
187
+ dataset.pop("timerange")
188
+ dataset["benchmark_dataset"] = True
189
+ dataset["plot_label"] = "{dataset}.{ensemble}.{grid}".format(**dataset)
190
+ recipe["datasets"] = [dataset]
191
+
192
+ # Generate diagnostics for each region.
193
+ diagnostics = {}
194
+ for region in REGIONS:
195
+ for diagnostic_name, orig_diagnostic in recipe["diagnostics"].items():
196
+ # Create the diagnostic for the region.
197
+ diagnostic = copy.deepcopy(orig_diagnostic)
198
+ normalized_region = normalize_region(region)
199
+ diagnostics[f"{diagnostic_name}-{normalized_region}"] = diagnostic
200
+
201
+ for variable in diagnostic["variables"].values():
202
+ # Remove unwanted facets that are part of the dataset.
203
+ for facet in ("project", "exp", "ensemble", "grid"):
204
+ variable.pop(facet, None)
205
+ # Update the preprocessor so it extracts the region.
206
+ preprocessor_name = variable["preprocessor"]
207
+ preprocessor = copy.deepcopy(recipe["preprocessors"][preprocessor_name])
208
+ preprocessor["extract_shape"]["ids"] = {"Name": [region]}
209
+ variable["preprocessor"] = f"{preprocessor_name}-{normalized_region}"
210
+ recipe["preprocessors"][variable["preprocessor"]] = preprocessor
211
+
212
+ # Update plot titles with region name.
213
+ for script in diagnostic["scripts"].values():
214
+ for plot in script["plots"].values():
215
+ plot["pyplot_kwargs"] = {"title": f"{{long_name}} {region}"}
216
+ recipe["diagnostics"] = diagnostics
217
+
218
+
219
+ class RegionalHistoricalTimeSeries(RegionalHistoricalAnnualCycle):
220
+ """
221
+ Plot regional historical mean and anomaly of climate variables.
222
+ """
223
+
224
+ name = "Regional historical mean and anomaly of climate variables"
225
+ slug = "regional-historical-timeseries"
226
+ base_recipe = "ref/recipe_ref_timeseries_region.yml"
227
+
228
+ variables = (
229
+ "hus",
230
+ "pr",
231
+ "psl",
232
+ "tas",
233
+ "ua",
234
+ )
235
+
236
+ data_requirements = (
237
+ DataRequirement(
238
+ source_type=SourceDatasetType.CMIP6,
239
+ filters=(
240
+ FacetFilter(
241
+ facets={
242
+ "variable_id": variables,
243
+ "experiment_id": "historical",
244
+ "table_id": "Amon",
245
+ },
246
+ ),
247
+ ),
248
+ group_by=("source_id", "member_id", "grid_label"),
249
+ constraints=(
250
+ RequireTimerange(
251
+ group_by=("instance_id",),
252
+ start=PartialDateTime(1980, 1),
253
+ end=PartialDateTime(2014, 12),
254
+ ),
255
+ RequireFacets("variable_id", variables),
256
+ AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6),
257
+ ),
258
+ ),
259
+ DataRequirement(
260
+ source_type=SourceDatasetType.obs4MIPs,
261
+ filters=(
262
+ FacetFilter(
263
+ facets={
264
+ "variable_id": (
265
+ "psl",
266
+ "ua",
267
+ ),
268
+ "source_id": "ERA-5",
269
+ "frequency": "mon",
270
+ },
271
+ ),
272
+ ),
273
+ group_by=("source_id",),
274
+ constraints=(
275
+ RequireTimerange(
276
+ group_by=("instance_id",),
277
+ start=PartialDateTime(1980, 1),
278
+ end=PartialDateTime(2014, 12),
279
+ ),
280
+ ),
281
+ # TODO: Add obs4MIPs datasets once available and working:
282
+ #
283
+ # obs4MIPs dataset that cannot be ingested (https://github.com/Climate-REF/climate-ref/issues/260):
284
+ # - GPCP-V2.3: pr
285
+ #
286
+ # Not yet available on obs4MIPs:
287
+ # - ERA5: hus
288
+ # - HadCRUT5_ground_5.0.1.0-analysis: tas
289
+ ),
290
+ )
291
+
292
+ series = tuple(
293
+ SeriesDefinition(
294
+ file_pattern=f"{diagnostic}-{region}/allplots/*_{var_name}_*.nc",
295
+ sel={"dim0": 0}, # Select the model and not the observation.
296
+ dimensions={
297
+ "region": region,
298
+ "statistic": (
299
+ f"{var_name} regional mean"
300
+ if diagnostic == "timeseries_abs"
301
+ else f"{var_name} regional mean anomaly"
302
+ ),
303
+ },
304
+ values_name=var_name,
305
+ index_name="time",
306
+ attributes=[],
307
+ )
308
+ for var_name in variables
309
+ for region in REGIONS
310
+ for diagnostic in ["timeseries_abs", "timeseries"]
311
+ )
312
+
313
+
314
+ class RegionalHistoricalTrend(ESMValToolDiagnostic):
315
+ """
316
+ Plot regional historical trend of climate variables.
317
+ """
318
+
319
+ name = "Regional historical trend of climate variables"
320
+ slug = "regional-historical-trend"
321
+ base_recipe = "ref/recipe_ref_trend_regions.yml"
322
+
323
+ data_requirements = (
324
+ DataRequirement(
325
+ source_type=SourceDatasetType.CMIP6,
326
+ filters=(
327
+ FacetFilter(
328
+ facets={
329
+ "variable_id": (
330
+ "hus",
331
+ "pr",
332
+ "psl",
333
+ "tas",
334
+ "ua",
335
+ ),
336
+ "experiment_id": "historical",
337
+ "table_id": "Amon",
338
+ },
339
+ ),
340
+ ),
341
+ group_by=("source_id", "member_id", "grid_label"),
342
+ constraints=(
343
+ RequireTimerange(
344
+ group_by=("instance_id",),
345
+ start=PartialDateTime(1980, 1),
346
+ end=PartialDateTime(2009, 12),
347
+ ),
348
+ AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6),
349
+ ),
350
+ ),
351
+ DataRequirement(
352
+ source_type=SourceDatasetType.obs4MIPs,
353
+ filters=(
354
+ FacetFilter(
355
+ facets={
356
+ "variable_id": (
357
+ "psl",
358
+ "tas",
359
+ "ua",
360
+ ),
361
+ "source_id": "ERA-5",
362
+ "frequency": "mon",
363
+ },
364
+ ),
365
+ ),
366
+ group_by=("source_id",),
367
+ constraints=(
368
+ RequireTimerange(
369
+ group_by=("instance_id",),
370
+ start=PartialDateTime(1980, 1),
371
+ end=PartialDateTime(2009, 12),
372
+ ),
373
+ ),
374
+ # TODO: Add obs4MIPs datasets once available and working:
375
+ #
376
+ # obs4MIPs dataset that cannot be ingested (https://github.com/Climate-REF/climate-ref/issues/260):
377
+ # - GPCP-V2.3: pr
378
+ #
379
+ # Not yet available on obs4MIPs:
380
+ # - ERA5: hus
381
+ # - HadCRUT5_ground_5.0.1.0-analysis: tas
382
+ ),
383
+ )
384
+ facets = ("grid_label", "member_id", "source_id", "variable_id", "region", "metric")
385
+
386
+ @staticmethod
387
+ def update_recipe(
388
+ recipe: Recipe,
389
+ input_files: dict[SourceDatasetType, pandas.DataFrame],
390
+ ) -> None:
391
+ """Update the recipe."""
392
+ recipe["datasets"] = []
393
+ recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
394
+ diagnostics = {}
395
+ for diagnostic_name, diagnostic in recipe["diagnostics"].items():
396
+ for variable_name, variable in diagnostic["variables"].items():
397
+ if variable_name not in recipe_variables:
398
+ continue
399
+ dataset = recipe_variables[variable_name]["additional_datasets"][0]
400
+ dataset.pop("timerange")
401
+ variable["additional_datasets"].append(dataset)
402
+ diagnostics[diagnostic_name] = diagnostic
403
+ recipe["diagnostics"] = diagnostics
404
+
405
+ @classmethod
406
+ def format_result(
407
+ cls,
408
+ result_dir: Path,
409
+ execution_dataset: ExecutionDatasetCollection,
410
+ metric_args: MetricBundleArgs,
411
+ output_args: OutputBundleArgs,
412
+ ) -> tuple[CMECMetric, CMECOutput]:
413
+ """Format the result."""
414
+ metric_args[MetricCV.DIMENSIONS.value] = {
415
+ "json_structure": ["variable_id", "region", "metric"],
416
+ "variable_id": {},
417
+ "region": {},
418
+ "metric": {"trend": {}},
419
+ }
420
+ for file in result_dir.glob("work/*_trends/plot/seaborn_barplot.nc"):
421
+ ds = xarray.open_dataset(file)
422
+ source_id = execution_dataset[SourceDatasetType.CMIP6].source_id.iloc[0]
423
+ select = source_id == np.array([s.strip() for s in ds.dataset.values.astype(str).tolist()])
424
+ ds.isel(dim0=select)
425
+ variable_id = next(iter(ds.data_vars.keys()))
426
+ metric_args[MetricCV.DIMENSIONS.value]["variable_id"][variable_id] = {}
427
+ metric_args[MetricCV.RESULTS.value][variable_id] = {}
428
+ for region_value, trend_value in zip(ds.shape_id.astype(str).values, ds[variable_id].values):
429
+ region = region_value.strip()
430
+ trend = float(trend_value)
431
+ if region not in metric_args[MetricCV.DIMENSIONS.value]["region"]:
432
+ metric_args[MetricCV.DIMENSIONS.value]["region"][region] = {}
433
+ metric_args[MetricCV.RESULTS.value][variable_id][region] = {"trend": trend}
434
+
435
+ return CMECMetric.model_validate(metric_args), CMECOutput.model_validate(output_args)
@@ -2,10 +2,13 @@ import pandas
2
2
 
3
3
  from climate_ref_core.constraints import (
4
4
  AddSupplementaryDataset,
5
- RequireContiguousTimerange,
5
+ PartialDateTime,
6
+ RequireFacets,
7
+ RequireTimerange,
6
8
  )
7
9
  from climate_ref_core.datasets import FacetFilter, SourceDatasetType
8
10
  from climate_ref_core.diagnostics import DataRequirement
11
+ from climate_ref_core.metric_values.typing import SeriesDefinition
9
12
  from climate_ref_esmvaltool.diagnostics.base import ESMValToolDiagnostic
10
13
  from climate_ref_esmvaltool.recipe import dataframe_to_recipe
11
14
  from climate_ref_esmvaltool.types import Recipe
@@ -28,24 +31,79 @@ class SeaIceAreaBasic(ESMValToolDiagnostic):
28
31
  facets={
29
32
  "variable_id": "siconc",
30
33
  "experiment_id": "historical",
34
+ "table_id": "SImon",
31
35
  },
32
36
  ),
33
37
  ),
34
- group_by=("instance_id",),
38
+ group_by=("source_id", "member_id", "grid_label"),
35
39
  constraints=(
36
- RequireContiguousTimerange(group_by=("instance_id",)),
40
+ RequireTimerange(
41
+ group_by=("instance_id",),
42
+ start=PartialDateTime(1979, 1),
43
+ end=PartialDateTime(2014, 12),
44
+ ),
37
45
  AddSupplementaryDataset.from_defaults("areacello", SourceDatasetType.CMIP6),
46
+ RequireFacets("variable_id", ("siconc", "areacello")),
38
47
  ),
39
48
  ),
40
49
  # TODO: Use OSI-450-nh and OSI-450-sh from obs4MIPs once available.
41
50
  )
42
51
  facets = ()
52
+ series = (
53
+ SeriesDefinition(
54
+ file_pattern="siarea_min/allplots/timeseries_sea_ice_area_nh_*.nc",
55
+ sel={"dim0": 0}, # Select the model and not the observations.
56
+ dimensions={
57
+ "region": "Northern Hemisphere",
58
+ "statistic": "September sea ice area",
59
+ },
60
+ values_name="siconc",
61
+ index_name="time",
62
+ attributes=[],
63
+ ),
64
+ SeriesDefinition(
65
+ file_pattern="siarea_min/allplots/timeseries_sea_ice_area_sh_*.nc",
66
+ sel={"dim0": 0}, # Select the model and not the observations.
67
+ dimensions={
68
+ "region": "Southern Hemisphere",
69
+ "statistic": "February sea ice area",
70
+ },
71
+ values_name="siconc",
72
+ index_name="time",
73
+ attributes=[],
74
+ ),
75
+ SeriesDefinition(
76
+ file_pattern="siarea_seas/allplots/annual_cycle_sea_ice_area_nh_*.nc",
77
+ sel={"dim0": 0}, # Select the model and not the observations.
78
+ dimensions={
79
+ "region": "Northern Hemisphere",
80
+ "statistic": "20-year average seasonal cycle of the sea ice area",
81
+ },
82
+ values_name="siconc",
83
+ index_name="month_number",
84
+ attributes=[],
85
+ ),
86
+ SeriesDefinition(
87
+ file_pattern="siarea_seas/allplots/annual_cycle_sea_ice_area_sh_*.nc",
88
+ sel={"dim0": 0}, # Select the model and not the observations.
89
+ dimensions={
90
+ "region": "Southern Hemisphere",
91
+ "statistic": "20-year average seasonal cycle of the sea ice area,",
92
+ },
93
+ values_name="siconc",
94
+ index_name="month_number",
95
+ attributes=[],
96
+ ),
97
+ )
43
98
 
44
99
  @staticmethod
45
- def update_recipe(recipe: Recipe, input_files: pandas.DataFrame) -> None:
100
+ def update_recipe(
101
+ recipe: Recipe,
102
+ input_files: dict[SourceDatasetType, pandas.DataFrame],
103
+ ) -> None:
46
104
  """Update the recipe."""
47
105
  # Update datasets
48
- recipe_variables = dataframe_to_recipe(input_files)
106
+ recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
49
107
  recipe["datasets"] = recipe_variables["siconc"]["additional_datasets"]
50
108
 
51
109
  # Use the timerange from the recipe, as defined in the variable.
@@ -5,8 +5,9 @@ import pandas as pd
5
5
 
6
6
  from climate_ref_core.constraints import (
7
7
  AddSupplementaryDataset,
8
- RequireContiguousTimerange,
8
+ PartialDateTime,
9
9
  RequireFacets,
10
+ RequireTimerange,
10
11
  )
11
12
  from climate_ref_core.datasets import ExecutionDatasetCollection, FacetFilter, SourceDatasetType
12
13
  from climate_ref_core.diagnostics import DataRequirement
@@ -26,40 +27,56 @@ class SeaIceSensitivity(ESMValToolDiagnostic):
26
27
  slug = "sea-ice-sensitivity"
27
28
  base_recipe = "recipe_seaice_sensitivity.yml"
28
29
 
29
- variables = (
30
- "siconc",
31
- "tas",
32
- )
33
-
34
30
  data_requirements = (
35
31
  DataRequirement(
36
32
  source_type=SourceDatasetType.CMIP6,
37
33
  filters=(
38
34
  FacetFilter(
39
35
  facets={
40
- "variable_id": variables,
36
+ "variable_id": "siconc",
37
+ "experiment_id": "historical",
38
+ "table_id": "SImon",
39
+ },
40
+ ),
41
+ FacetFilter(
42
+ facets={
43
+ "variable_id": "tas",
41
44
  "experiment_id": "historical",
45
+ "table_id": "Amon",
42
46
  },
43
47
  ),
44
48
  ),
45
49
  group_by=("experiment_id",), # this does nothing, but group_by cannot be empty
46
50
  constraints=(
51
+ RequireTimerange(
52
+ group_by=("instance_id",),
53
+ start=PartialDateTime(1979, 1),
54
+ end=PartialDateTime(2014, 12),
55
+ ),
56
+ RequireFacets(
57
+ "variable_id",
58
+ required_facets=("siconc", "tas"),
59
+ group_by=("source_id", "member_id", "grid_label"),
60
+ ),
47
61
  AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6),
48
62
  AddSupplementaryDataset.from_defaults("areacello", SourceDatasetType.CMIP6),
49
- RequireContiguousTimerange(group_by=("instance_id",)),
50
- RequireFacets("variable_id", variables),
51
- # TODO: Add a constraint to ensure that tas, siconc and areacello
52
- # are available for each model or alternatively filter out
53
- # incomplete models below.
63
+ RequireFacets(
64
+ "variable_id",
65
+ required_facets=("areacello",),
66
+ group_by=("source_id", "grid_label"),
67
+ ),
54
68
  ),
55
69
  ),
56
70
  )
57
71
  facets = ("experiment_id", "source_id", "region", "metric")
58
72
 
59
73
  @staticmethod
60
- def update_recipe(recipe: Recipe, input_files: pandas.DataFrame) -> None:
74
+ def update_recipe(
75
+ recipe: Recipe,
76
+ input_files: dict[SourceDatasetType, pandas.DataFrame],
77
+ ) -> None:
61
78
  """Update the recipe."""
62
- recipe_variables = dataframe_to_recipe(input_files)
79
+ recipe_variables = dataframe_to_recipe(input_files[SourceDatasetType.CMIP6])
63
80
  datasets = recipe_variables["tas"]["additional_datasets"]
64
81
  for dataset in datasets:
65
82
  dataset.pop("mip")