climate-ref-core 0.7.0__tar.gz → 0.8.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/PKG-INFO +1 -1
  2. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/pyproject.toml +1 -1
  3. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/constraints.py +29 -1
  4. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/metric_values/typing.py +29 -7
  5. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/providers.py +51 -1
  6. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/pycmec/metric.py +12 -4
  7. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/test_constraints.py +53 -0
  8. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/test_providers.py +72 -4
  9. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/.gitignore +0 -0
  10. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/LICENCE +0 -0
  11. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/NOTICE +0 -0
  12. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/README.md +0 -0
  13. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/__init__.py +0 -0
  14. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/dataset_registry.py +0 -0
  15. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/datasets.py +0 -0
  16. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/diagnostics.py +0 -0
  17. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/env.py +0 -0
  18. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/exceptions.py +0 -0
  19. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/executor.py +0 -0
  20. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/logging.py +0 -0
  21. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/metric_values/__init__.py +0 -0
  22. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/py.typed +0 -0
  23. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/pycmec/README.md +0 -0
  24. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/pycmec/__init__.py +0 -0
  25. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/pycmec/controlled_vocabulary.py +0 -0
  26. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/pycmec/cv_cmip7_aft.yaml +0 -0
  27. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/src/climate_ref_core/pycmec/output.py +0 -0
  28. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/metric_values/test_typing.py +0 -0
  29. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/pycmec/cmec_testdata/cmec_metric_sample.json +0 -0
  30. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/pycmec/cmec_testdata/cmec_output_sample.json +0 -0
  31. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/pycmec/cmec_testdata/cv_sample.yaml +0 -0
  32. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/pycmec/cmec_testdata/test_metric_json_schema.yml +0 -0
  33. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/pycmec/cmec_testdata/test_output_json_schema.yml +0 -0
  34. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/pycmec/conftest.py +0 -0
  35. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/pycmec/test_cmec_metric.py +0 -0
  36. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/pycmec/test_cmec_output.py +0 -0
  37. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/pycmec/test_controlled_vocabulary.py +0 -0
  38. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/test_dataset_registry/test_dataset_registry.py +0 -0
  39. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/test_datasets/dataset_collection_hash.yml +0 -0
  40. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/test_datasets/dataset_collection_obs4mips_hash.yml +0 -0
  41. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/test_datasets/execution_dataset_hash.yml +0 -0
  42. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/test_datasets.py +0 -0
  43. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/test_diagnostics.py +0 -0
  44. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/test_exceptions.py +0 -0
  45. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/test_executor.py +0 -0
  46. {climate_ref_core-0.7.0 → climate_ref_core-0.8.0}/tests/unit/test_logging.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: climate-ref-core
3
- Version: 0.7.0
3
+ Version: 0.8.0
4
4
  Summary: Core library for the CMIP Rapid Evaluation Framework
5
5
  Author-email: Jared Lewis <jared.lewis@climate-resource.com>, Mika Pflueger <mika.pflueger@climate-resource.com>, Bouwe Andela <b.andela@esciencecenter.nl>, Jiwoo Lee <lee1043@llnl.gov>, Min Xu <xum1@ornl.gov>, Nathan Collier <collierno@ornl.gov>, Dora Hegedus <dora.hegedus@stfc.ac.uk>
6
6
  License-Expression: Apache-2.0
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "climate-ref-core"
3
- version = "0.7.0"
3
+ version = "0.8.0"
4
4
  description = "Core library for the CMIP Rapid Evaluation Framework"
5
5
  readme = "README.md"
6
6
  authors = [
@@ -100,7 +100,7 @@ def apply_constraint(
100
100
 
101
101
  def _to_tuple(value: None | str | tuple[str, ...]) -> tuple[str, ...]:
102
102
  """
103
- Clean the value of group_by to a tuple of strings
103
+ Normalize value to a tuple of strings.
104
104
  """
105
105
  if value is None:
106
106
  return ()
@@ -109,6 +109,13 @@ def _to_tuple(value: None | str | tuple[str, ...]) -> tuple[str, ...]:
109
109
  return tuple(value)
110
110
 
111
111
 
112
+ def _to_tuple_dict(value: dict[str, str | tuple[str, ...]]) -> dict[str, tuple[str, ...]]:
113
+ """
114
+ Normalize value to a dict of tuples of strings.
115
+ """
116
+ return {k: _to_tuple(v) for k, v in value.items()}
117
+
118
+
112
119
  @frozen
113
120
  class RequireFacets:
114
121
  """
@@ -153,6 +160,27 @@ class RequireFacets:
153
160
  return group[select]
154
161
 
155
162
 
163
+ @frozen
164
+ class IgnoreFacets:
165
+ """
166
+ A constraint that ignores certain facet values.
167
+
168
+ Datasets with these facet values are removed from the selection.
169
+ """
170
+
171
+ facets: dict[str, str | tuple[str, ...]] = field(converter=_to_tuple_dict)
172
+ """The facet values to ignore."""
173
+
174
+ def apply(self, group: pd.DataFrame, data_catalog: pd.DataFrame) -> pd.DataFrame:
175
+ """
176
+ Filter out datasets with the ignored facets.
177
+ """
178
+ mask = group[list(self.facets)].isin(self.facets).all(axis="columns")
179
+ if mask.any():
180
+ logger.debug(f"Ignoring files {', '.join(group.loc[mask, 'path'])} becauseof {self}")
181
+ return group[~mask]
182
+
183
+
156
184
  @frozen
157
185
  class AddSupplementaryDataset:
158
186
  """
@@ -3,7 +3,8 @@ from collections.abc import Sequence
3
3
  from pathlib import Path
4
4
  from typing import Any, Self
5
5
 
6
- from pydantic import BaseModel, model_validator
6
+ import numpy as np
7
+ from pydantic import BaseModel, field_validator, model_validator
7
8
 
8
9
  Value = float | int
9
10
 
@@ -64,20 +65,35 @@ class SeriesMetricValue(BaseModel):
64
65
  This is used for presentation purposes and is not used in the controlled vocabulary.
65
66
  """
66
67
 
67
- attributes: dict[str, str | Value] | None = None
68
+ attributes: dict[str, str | Value | None] | None = None
68
69
  """
69
70
  Additional unstructured attributes associated with the metric value
70
71
  """
71
72
 
72
73
  @model_validator(mode="after")
73
- def validate_index_length(self) -> Self:
74
- """Validate that index has the same length as values"""
74
+ def validate_index(self) -> Self:
75
+ """Validate that index has the same length as values and contains no NaNs"""
75
76
  if len(self.index) != len(self.values):
76
77
  raise ValueError(
77
78
  f"Index length ({len(self.index)}) must match values length ({len(self.values)})"
78
79
  )
80
+ for v in self.index:
81
+ if isinstance(v, float) and not np.isfinite(v):
82
+ raise ValueError("NaN or Inf values are not allowed in the index")
79
83
  return self
80
84
 
85
+ @field_validator("values", mode="before")
86
+ @classmethod
87
+ def validate_values(cls, value: Any) -> Any:
88
+ """
89
+ Transform None values to NaN in the values field
90
+ """
91
+ if not isinstance(value, (list, tuple)):
92
+ raise ValueError("`values` must be a list or tuple.")
93
+
94
+ # Transform None values to NaN
95
+ return [float("nan") if v is None else v for v in value]
96
+
81
97
  @classmethod
82
98
  def dump_to_json(cls, path: Path, series: Sequence["SeriesMetricValue"]) -> None:
83
99
  """
@@ -94,7 +110,13 @@ class SeriesMetricValue(BaseModel):
94
110
  The series values to dump.
95
111
  """
96
112
  with open(path, "w") as f:
97
- json.dump([s.model_dump() for s in series], f, indent=2)
113
+ json.dump(
114
+ [s.model_dump(mode="json") for s in series],
115
+ f,
116
+ indent=2,
117
+ allow_nan=False,
118
+ sort_keys=True,
119
+ )
98
120
 
99
121
  @classmethod
100
122
  def load_from_json(
@@ -102,7 +124,7 @@ class SeriesMetricValue(BaseModel):
102
124
  path: Path,
103
125
  ) -> list["SeriesMetricValue"]:
104
126
  """
105
- Dump a sequence of SeriesMetricValue to a JSON file.
127
+ Load a sequence of SeriesMetricValue from a JSON file.
106
128
 
107
129
  Parameters
108
130
  ----------
@@ -115,7 +137,7 @@ class SeriesMetricValue(BaseModel):
115
137
  if not isinstance(data, list):
116
138
  raise ValueError(f"Expected a list of series values, got {type(data)}")
117
139
 
118
- return [cls.model_validate(s) for s in data]
140
+ return [cls.model_validate(s, strict=True) for s in data]
119
141
 
120
142
 
121
143
  class ScalarMetricValue(BaseModel):
@@ -16,14 +16,18 @@ import os
16
16
  import stat
17
17
  import subprocess
18
18
  from abc import abstractmethod
19
- from collections.abc import Iterable
19
+ from collections.abc import Iterable, Sequence
20
20
  from contextlib import AbstractContextManager
21
21
  from pathlib import Path
22
22
  from typing import TYPE_CHECKING
23
23
 
24
24
  import requests
25
+ import yaml
26
+ from attrs import evolve
25
27
  from loguru import logger
26
28
 
29
+ from climate_ref_core.constraints import IgnoreFacets
30
+ from climate_ref_core.datasets import SourceDatasetType
27
31
  from climate_ref_core.diagnostics import Diagnostic
28
32
  from climate_ref_core.exceptions import InvalidDiagnosticException, InvalidProviderException
29
33
 
@@ -74,6 +78,51 @@ class DiagnosticProvider:
74
78
  config :
75
79
  A configuration.
76
80
  """
81
+ logger.debug(
82
+ f"Configuring provider {self.slug} using ignore_datasets_file {config.ignore_datasets_file}"
83
+ )
84
+ # The format of the configuration file is:
85
+ # provider:
86
+ # diagnostic:
87
+ # source_type:
88
+ # - facet: value
89
+ # - other_facet: [other_value1, other_value2]
90
+ ignore_datasets_all = yaml.safe_load(config.ignore_datasets_file.read_text(encoding="utf-8")) or {}
91
+ ignore_datasets = ignore_datasets_all.get(self.slug, {})
92
+ if unknown_slugs := {slug for slug in ignore_datasets} - {d.slug for d in self.diagnostics()}:
93
+ logger.warning(
94
+ f"Unknown diagnostics found in {config.ignore_datasets_file} "
95
+ f"for provider {self.slug}: {', '.join(sorted(unknown_slugs))}"
96
+ )
97
+
98
+ known_source_types = {s.value for s in iter(SourceDatasetType)}
99
+ for diagnostic in self.diagnostics():
100
+ if diagnostic.slug in ignore_datasets:
101
+ if unknown_source_types := set(ignore_datasets[diagnostic.slug]) - known_source_types:
102
+ logger.warning(
103
+ f"Unknown source types found in {config.ignore_datasets_file} for "
104
+ f"diagnostic '{diagnostic.slug}' by provider {self.slug}: "
105
+ f"{', '.join(sorted(unknown_source_types))}"
106
+ )
107
+ data_requirements = (
108
+ r if isinstance(r, Sequence) else (r,) for r in diagnostic.data_requirements
109
+ )
110
+ diagnostic.data_requirements = tuple(
111
+ tuple(
112
+ evolve(
113
+ data_requirement,
114
+ constraints=tuple(
115
+ IgnoreFacets(facets)
116
+ for facets in ignore_datasets[diagnostic.slug].get(
117
+ data_requirement.source_type.value, []
118
+ )
119
+ )
120
+ + data_requirement.constraints,
121
+ )
122
+ for data_requirement in requirement_collection
123
+ )
124
+ for requirement_collection in data_requirements
125
+ )
77
126
 
78
127
  def diagnostics(self) -> list[Diagnostic]:
79
128
  """
@@ -287,6 +336,7 @@ class CondaDiagnosticProvider(CommandLineDiagnosticProvider):
287
336
 
288
337
  def configure(self, config: Config) -> None:
289
338
  """Configure the provider."""
339
+ super().configure(config)
290
340
  self.prefix = config.paths.software / "conda"
291
341
 
292
342
  def _install_conda(self, update: bool) -> Path:
@@ -153,7 +153,7 @@ class MetricResults(RootModel[Any]):
153
153
  CMEC diagnostic bundle RESULTS object
154
154
  """
155
155
 
156
- model_config = ConfigDict(strict=True)
156
+ model_config = ConfigDict(strict=True, allow_inf_nan=False)
157
157
  root: dict[str, dict[Any, Any]]
158
158
 
159
159
  @classmethod
@@ -284,7 +284,7 @@ class CMECMetric(BaseModel):
284
284
  Contains the diagnostics calculated during a diagnostic execution, in a standardised format.
285
285
  """
286
286
 
287
- model_config = ConfigDict(strict=True, extra="allow")
287
+ model_config = ConfigDict(strict=True, extra="allow", allow_inf_nan=False)
288
288
 
289
289
  DIMENSIONS: MetricDimensions
290
290
  """
@@ -342,7 +342,15 @@ class CMECMetric(BaseModel):
342
342
  :
343
343
  None
344
344
  """
345
- pathlib.Path(json_file).write_text(self.model_dump_json(indent=2))
345
+ pathlib.Path(json_file).write_text(
346
+ json.dumps(
347
+ self.model_dump(mode="json"),
348
+ indent=2,
349
+ allow_nan=False,
350
+ sort_keys=True,
351
+ ),
352
+ encoding="utf-8",
353
+ )
346
354
 
347
355
  @classmethod
348
356
  @validate_call
@@ -360,7 +368,7 @@ class CMECMetric(BaseModel):
360
368
  :
361
369
  CMEC Diagnostic object if the file is CMEC-compatible
362
370
  """
363
- json_str = pathlib.Path(json_file).read_text()
371
+ json_str = pathlib.Path(json_file).read_text(encoding="utf-8")
364
372
  metric_obj = cls.model_validate_json(json_str)
365
373
 
366
374
  return metric_obj
@@ -9,6 +9,7 @@ from pandas.testing import assert_frame_equal
9
9
  from climate_ref_core.constraints import (
10
10
  AddSupplementaryDataset,
11
11
  GroupConstraint,
12
+ IgnoreFacets,
12
13
  PartialDateTime,
13
14
  RequireContiguousTimerange,
14
15
  RequireFacets,
@@ -97,6 +98,58 @@ class TestRequireFacets:
97
98
  assert_frame_equal(result, expected)
98
99
 
99
100
 
101
+ class TestIgnoreFacets:
102
+ @pytest.mark.parametrize(
103
+ "data, expected_rows",
104
+ [
105
+ (
106
+ pd.DataFrame(
107
+ {
108
+ "variable_id": [],
109
+ "path": [],
110
+ }
111
+ ),
112
+ [],
113
+ ),
114
+ (
115
+ pd.DataFrame(
116
+ {
117
+ "variable_id": ["tas", "pr", "tas", "rsut"],
118
+ "path": ["tas.nc", "pr.nc", "tas2.nc", "rsut.nc"],
119
+ }
120
+ ),
121
+ [1, 3],
122
+ ),
123
+ ],
124
+ )
125
+ def test_apply_single(self, data, expected_rows):
126
+ constraint = IgnoreFacets(facets={"variable_id": "tas"})
127
+ result = constraint.apply(group=data, data_catalog=data.loc[[]])
128
+ expected = data.iloc[expected_rows]
129
+ assert_frame_equal(result, expected)
130
+
131
+ @pytest.mark.parametrize(
132
+ "data, expected_rows",
133
+ [
134
+ (
135
+ pd.DataFrame(
136
+ {
137
+ "variable_id": ["tas", "pr", "tas", "rsut"],
138
+ "source_id": ["A", "B", "C", "C"],
139
+ "path": ["A_tas.nc", "B_pr.nc", "C_tas.nc", "C_rsut.nc"],
140
+ }
141
+ ),
142
+ [0, 1, 3],
143
+ ),
144
+ ],
145
+ )
146
+ def test_apply_multiple(self, data, expected_rows):
147
+ constraint = IgnoreFacets(facets={"variable_id": ("tas", "pr"), "source_id": "C"})
148
+ result = constraint.apply(group=data, data_catalog=data.loc[[]])
149
+ expected = data.iloc[expected_rows]
150
+ assert_frame_equal(result, expected)
151
+
152
+
100
153
  class TestAddSupplementaryDataset:
101
154
  constraint = AddSupplementaryDataset.from_defaults("areacella", SourceDatasetType.CMIP6)
102
155
 
@@ -1,6 +1,7 @@
1
1
  import datetime
2
2
  import logging
3
3
  import subprocess
4
+ import textwrap
4
5
  import time
5
6
  from contextlib import contextmanager
6
7
  from pathlib import Path
@@ -8,12 +9,23 @@ from pathlib import Path
8
9
  import pytest
9
10
 
10
11
  import climate_ref_core.providers
12
+ from climate_ref_core.constraints import IgnoreFacets
11
13
  from climate_ref_core.diagnostics import CommandLineDiagnostic, Diagnostic
12
14
  from climate_ref_core.exceptions import InvalidDiagnosticException, InvalidProviderException
13
15
  from climate_ref_core.providers import CondaDiagnosticProvider, DiagnosticProvider, import_provider
14
16
 
15
17
 
16
- class TestMetricsProvider:
18
+ @pytest.fixture
19
+ def mock_config(tmp_path, mocker):
20
+ """Use a mock config to avoid depending on `climate_ref.config.Config`."""
21
+ config = mocker.Mock()
22
+ config.paths.software = tmp_path / "software"
23
+ config.ignore_datasets_file = tmp_path / "ignore_datasets.yaml"
24
+ config.ignore_datasets_file.touch()
25
+ return config
26
+
27
+
28
+ class TestDiagnosticProvider:
17
29
  def test_provider(self):
18
30
  provider = DiagnosticProvider("provider_name", "v0.23")
19
31
 
@@ -51,6 +63,62 @@ class TestMetricsProvider:
51
63
  result = provider.get("mock")
52
64
  assert isinstance(result, Diagnostic)
53
65
 
66
+ def test_configure(self, provider, mock_config):
67
+ mock_config.ignore_datasets_file.write_text(
68
+ textwrap.dedent(
69
+ """
70
+ mock_provider:
71
+ mock:
72
+ cmip6:
73
+ - source_id: A
74
+ """
75
+ ),
76
+ encoding="utf-8",
77
+ )
78
+ provider.configure(mock_config)
79
+ expected_constraint = IgnoreFacets(facets={"source_id": ("A",)})
80
+ assert provider.diagnostics()[0].data_requirements[0][0].constraints[0] == expected_constraint
81
+
82
+ def test_configure_unknown_diagnostic(self, provider, mock_config, caplog):
83
+ mock_config.ignore_datasets_file.write_text(
84
+ textwrap.dedent(
85
+ """
86
+ mock_provider:
87
+ invalid_diagnostic:
88
+ cmip6:
89
+ - source_id: A
90
+ """
91
+ ),
92
+ encoding="utf-8",
93
+ )
94
+ with caplog.at_level(logging.WARNING):
95
+ provider.configure(mock_config)
96
+ expected_msg = (
97
+ f"Unknown diagnostics found in {mock_config.ignore_datasets_file} "
98
+ "for provider mock_provider: invalid_diagnostic"
99
+ )
100
+ assert expected_msg in caplog.text
101
+
102
+ def test_configure_unknown_source_type(self, provider, mock_config, caplog):
103
+ mock_config.ignore_datasets_file.write_text(
104
+ textwrap.dedent(
105
+ """
106
+ mock_provider:
107
+ mock:
108
+ invalid_source_type:
109
+ - source_id: A
110
+ """
111
+ ),
112
+ encoding="utf-8",
113
+ )
114
+ with caplog.at_level(logging.WARNING):
115
+ provider.configure(mock_config)
116
+ expected_msg = (
117
+ f"Unknown source types found in {mock_config.ignore_datasets_file} "
118
+ "for diagnostic 'mock' by provider mock_provider: invalid_source_type"
119
+ )
120
+ assert expected_msg in caplog.text
121
+
54
122
 
55
123
  @pytest.mark.parametrize("fqn", ["climate_ref_esmvaltool:provider", "climate_ref_esmvaltool"])
56
124
  def test_import_provider(fqn):
@@ -104,7 +172,7 @@ def test_get_micromamba_url(mocker, sysname, machine):
104
172
  assert "{" not in result
105
173
 
106
174
 
107
- class TestCondaMetricsProvider:
175
+ class TestCondaDiagnosticProvider:
108
176
  @pytest.fixture
109
177
  def provider(self, tmp_path):
110
178
  provider = CondaDiagnosticProvider("provider_name", "v0.23")
@@ -117,9 +185,9 @@ class TestCondaMetricsProvider:
117
185
  with pytest.raises(ValueError, match=r"No prefix for conda environments configured.*"):
118
186
  provider.prefix
119
187
 
120
- def test_configure(self, config):
188
+ def test_configure(self, mock_config):
121
189
  provider = CondaDiagnosticProvider("provider_name", "v0.23")
122
- provider.configure(config)
190
+ provider.configure(mock_config)
123
191
 
124
192
  assert isinstance(provider.prefix, Path)
125
193