coconet-python 0.4.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
coconet/__init__.py ADDED
@@ -0,0 +1,13 @@
1
+ """CoCoNet headless model package."""
2
+
3
+ from coconet.api import CoconetRunResult, load_coconet_config, run_coconet
4
+ from coconet.config import CoconetConfig
5
+ from coconet.model import CoconetModel
6
+
7
+ __all__ = [
8
+ "CoconetConfig",
9
+ "CoconetModel",
10
+ "CoconetRunResult",
11
+ "load_coconet_config",
12
+ "run_coconet",
13
+ ]
coconet/__main__.py ADDED
@@ -0,0 +1,6 @@
1
+ """Allow ``python -m coconet`` as an alternative to the ``coconet`` console script."""
2
+
3
+ from coconet.cli import main
4
+
5
+ if __name__ == "__main__":
6
+ main()
coconet/api.py ADDED
@@ -0,0 +1,110 @@
1
+ """Stable library entry points for running CoCoNet outside the bundled CLI.
2
+
3
+ Other clients (custom CLIs, orchestration, containers) should prefer this module:
4
+ load configuration via :func:`load_coconet_config`, then execute with
5
+ :func:`run_coconet`.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import logging
11
+ from collections.abc import Mapping
12
+ from dataclasses import dataclass, fields
13
+ from pathlib import Path
14
+ from typing import Any
15
+
16
+ from coconet.config import CoconetConfig
17
+ from coconet.logging_utils import configure_logging
18
+ from coconet.model import CoconetModel
19
+
20
+ __all__ = [
21
+ "CoconetRunResult",
22
+ "load_coconet_config",
23
+ "run_coconet",
24
+ ]
25
+
26
+ logger = logging.getLogger(__name__)
27
+
28
+
29
+ def _config_field_names() -> frozenset[str]:
30
+ return frozenset(f.name for f in fields(CoconetConfig))
31
+
32
+
33
+ def _apply_mapping_overrides(config: CoconetConfig, values: Mapping[str, Any]) -> None:
34
+ valid = _config_field_names()
35
+ for key, value in values.items():
36
+ if key not in valid:
37
+ raise TypeError(f"Unknown CoconetConfig attribute: {key!r}")
38
+ setattr(config, key, value)
39
+
40
+
41
+ @dataclass(slots=True)
42
+ class CoconetRunResult:
43
+ """Outcome of :func:`run_coconet` (extend with metrics later if needed)."""
44
+
45
+ output_file: str
46
+
47
+
48
+ def load_coconet_config(
49
+ *,
50
+ config_file: str | Path | None = None,
51
+ parameter_file: str | Path | None = None,
52
+ env_prefix: str = "COCONET_",
53
+ scenario: Mapping[str, Any] | None = None,
54
+ **overrides: Any,
55
+ ) -> CoconetConfig:
56
+ """Build a :class:`~coconet.config.CoconetConfig` from files, env, and overrides.
57
+
58
+ Resolution order:
59
+
60
+ 1. Defaults from :class:`~coconet.config.CoconetConfig`.
61
+ 2. Optional YAML ``config_file`` (same keys as the dataclass fields).
62
+ 3. Optional legacy NetLogo CSV ``parameter_file``.
63
+ 4. Environment variables ``{env_prefix}FIELD`` (see :meth:`CoconetConfig.from_file`).
64
+ 5. ``scenario`` mapping (flat dict of dataclass fields).
65
+ 6. Keyword ``overrides`` (same names as dataclass fields); ``None`` values are skipped.
66
+
67
+ Unknown keys in ``scenario`` or ``overrides`` raise ``TypeError``.
68
+ """
69
+ cfg = CoconetConfig.from_file(
70
+ config_file=config_file,
71
+ parameter_file=parameter_file,
72
+ env_prefix=env_prefix,
73
+ )
74
+ if scenario is not None:
75
+ _apply_mapping_overrides(cfg, scenario)
76
+ for key, value in overrides.items():
77
+ if value is None:
78
+ continue
79
+ _apply_mapping_overrides(cfg, {key: value})
80
+ return cfg
81
+
82
+
83
+ def run_coconet(
84
+ config: CoconetConfig,
85
+ *,
86
+ configure_logs: bool = False,
87
+ log_level: str | None = None,
88
+ ) -> CoconetRunResult:
89
+ """Run the simulation for ``config`` and return basic result metadata.
90
+
91
+ ``configure_logs``: when true, call :func:`~coconet.logging_utils.configure_logging`
92
+ using ``log_level`` if set, otherwise ``config.log_level``.
93
+
94
+ Idiomatic logging for library embedders: set ``configure_logs=False`` and configure
95
+ the standard logging module (or your framework's logging) yourself before calling.
96
+ """
97
+ if configure_logs:
98
+ level = log_level if log_level is not None else config.log_level
99
+ configure_logging(level)
100
+
101
+ logger.debug(
102
+ "CoCoNet run starting: reefs_file=%s coastline_file=%s output_file=%s",
103
+ config.reefs_file,
104
+ config.coastline_file,
105
+ config.output_file,
106
+ )
107
+ model = CoconetModel(config)
108
+ model.run()
109
+ logger.debug("CoCoNet run finished: output_file=%s", config.output_file)
110
+ return CoconetRunResult(output_file=config.output_file)
coconet/cli.py ADDED
@@ -0,0 +1,251 @@
1
+ from __future__ import annotations
2
+
3
+ import argparse
4
+ import contextlib
5
+ import io
6
+ import logging
7
+ import os
8
+ from pathlib import Path
9
+ from typing import TYPE_CHECKING, Any
10
+
11
+ from coconet.api import load_coconet_config, run_coconet
12
+ from coconet.logging_utils import configure_logging
13
+
14
+ if TYPE_CHECKING:
15
+ from pyinstrument import Profiler
16
+
17
+
18
+ def build_parser() -> argparse.ArgumentParser:
19
+ parser = argparse.ArgumentParser(description="Run headless CoCoNet model.")
20
+ parser.add_argument(
21
+ "--config",
22
+ type=Path,
23
+ default=None,
24
+ help="Optional YAML config file with scenario parameters.",
25
+ )
26
+ parser.add_argument(
27
+ "--parameter-file",
28
+ type=Path,
29
+ default=None,
30
+ help="Legacy NetLogo-style parameter CSV file.",
31
+ )
32
+ parser.add_argument(
33
+ "--output-file",
34
+ type=Path,
35
+ default=None,
36
+ help="Output CSV file path.",
37
+ )
38
+ parser.add_argument(
39
+ "--reefs-file",
40
+ type=Path,
41
+ default=None,
42
+ help="Reef attribute / spatial input CSV. Overrides config and COCONET_REEFS_FILE.",
43
+ )
44
+ parser.add_argument(
45
+ "--coastline-file",
46
+ type=Path,
47
+ default=None,
48
+ help="Coastline input CSV. Overrides config and COCONET_COASTLINE_FILE.",
49
+ )
50
+ parser.add_argument(
51
+ "--log-level",
52
+ type=str,
53
+ default=None,
54
+ help="Logging level (CRITICAL, ERROR, WARNING, INFO, DEBUG).",
55
+ )
56
+ parser.add_argument(
57
+ "--ensemble-threads",
58
+ type=int,
59
+ default=None,
60
+ metavar="N",
61
+ help="After ensemble-0 spinup, run simulation ensembles in parallel worker "
62
+ "processes (spawn): 1 = serial on the main process; 0 = auto "
63
+ "(min(CPU count, ensemble count)); N > 1 caps workers. "
64
+ "Default from config / COCONET_ENSEMBLE_THREADS.",
65
+ )
66
+ prof = parser.add_argument_group(
67
+ "profiling",
68
+ "Optional CPU profiling via pyinstrument (install: uv sync --extra profile).",
69
+ )
70
+ prof.add_argument(
71
+ "--profile",
72
+ action="store_true",
73
+ help="Profile the model run and write a dump to --profile-output.",
74
+ )
75
+ prof.add_argument(
76
+ "--profile-format",
77
+ choices=("html", "text", "speedscope"),
78
+ default="html",
79
+ help="Dump format: html (self-contained viewer), text (call tree), "
80
+ "or speedscope (open at https://www.speedscope.app/). Default: html.",
81
+ )
82
+ prof.add_argument(
83
+ "--profile-output",
84
+ type=Path,
85
+ default=None,
86
+ help="Output file path. Default: coconet-profile.<html|txt|speedscope.json>.",
87
+ )
88
+ prof.add_argument(
89
+ "--profile-interval",
90
+ type=float,
91
+ default=0.01,
92
+ metavar="SECONDS",
93
+ help="Seconds between CPU stack samples (pyinstrument default upstream is 0.001). "
94
+ "Coarser sampling (e.g. 0.01) keeps sessions smaller so the HTML report often "
95
+ "avoids heavy resampling. Use 0.001 for maximum detail.",
96
+ )
97
+ prof.add_argument(
98
+ "--profile-html-resample-interval",
99
+ type=float,
100
+ default=None,
101
+ metavar="SECONDS",
102
+ help="When using HTML output, optional resample_interval for pyinstrument's "
103
+ "HTMLRenderer (minimum time between retained samples in the viewer).",
104
+ )
105
+ return parser
106
+
107
+
108
+ def _default_profile_path(fmt: str) -> Path:
109
+ if fmt == "html":
110
+ return Path("coconet-profile.html")
111
+ if fmt == "text":
112
+ return Path("coconet-profile.txt")
113
+ return Path("coconet-profile.speedscope.json")
114
+
115
+
116
+ def _write_profile_dump(
117
+ profiler: Profiler,
118
+ path: Path,
119
+ fmt: str,
120
+ *,
121
+ html_resample_interval: float | None,
122
+ logger: logging.Logger,
123
+ ) -> None:
124
+ if fmt == "html":
125
+ session = profiler.last_session
126
+ n_samples = len(session.frame_records) if session is not None else 0
127
+ if n_samples > 100_000:
128
+ logger.info(
129
+ "Profile HTML: session has %s stack samples; the interactive viewer "
130
+ "uses resampled data (pyinstrument limit ~100k). "
131
+ "Use a larger --profile-interval and/or --profile-format speedscope "
132
+ "for long runs.",
133
+ n_samples,
134
+ )
135
+ # pyinstrument always prints a noisy resample notice to stderr when
136
+ # resampling; we already log above and pass resample_interval when set.
137
+ stderr_buf = io.StringIO()
138
+ with contextlib.redirect_stderr(stderr_buf):
139
+ content = profiler.output_html(resample_interval=html_resample_interval)
140
+ extra = stderr_buf.getvalue().strip()
141
+ if extra:
142
+ logger.debug("%s", extra)
143
+ elif fmt == "text":
144
+ content = profiler.output_text(unicode=True, color=False)
145
+ else:
146
+ from pyinstrument.renderers import SpeedscopeRenderer
147
+
148
+ content = profiler.output(SpeedscopeRenderer())
149
+ path.parent.mkdir(parents=True, exist_ok=True)
150
+ path.write_text(content, encoding="utf-8")
151
+
152
+
153
+ def main() -> None:
154
+ parser = build_parser()
155
+ args = parser.parse_args()
156
+
157
+ bootstrap_level = args.log_level or os.getenv("COCONET_LOG_LEVEL") or "INFO"
158
+ try:
159
+ configure_logging(bootstrap_level)
160
+ except ValueError as exc:
161
+ parser.error(str(exc))
162
+ return
163
+
164
+ logger = logging.getLogger(__name__)
165
+ logger.debug(
166
+ "Loading configuration (config_file=%s, parameter_file=%s).",
167
+ args.config,
168
+ args.parameter_file,
169
+ )
170
+
171
+ cli_overrides: dict[str, Any] = {}
172
+ if args.output_file is not None:
173
+ cli_overrides["output_file"] = str(args.output_file)
174
+ if args.reefs_file is not None:
175
+ cli_overrides["reefs_file"] = str(args.reefs_file)
176
+ if args.coastline_file is not None:
177
+ cli_overrides["coastline_file"] = str(args.coastline_file)
178
+ if args.log_level is not None:
179
+ cli_overrides["log_level"] = args.log_level
180
+ if args.ensemble_threads is not None:
181
+ cli_overrides["ensemble_threads"] = args.ensemble_threads
182
+
183
+ config = load_coconet_config(
184
+ config_file=args.config,
185
+ parameter_file=args.parameter_file,
186
+ **cli_overrides,
187
+ )
188
+
189
+ try:
190
+ effective_log_level = configure_logging(config.log_level)
191
+ except ValueError as exc:
192
+ parser.error(str(exc))
193
+ return
194
+
195
+ logger = logging.getLogger(__name__)
196
+ logger.info(
197
+ "Starting CoCoNet run: log_level=%s reefs_file=%s coastline_file=%s "
198
+ "output_file=%s config_file=%s parameter_file=%s",
199
+ effective_log_level,
200
+ config.reefs_file,
201
+ config.coastline_file,
202
+ config.output_file,
203
+ args.config,
204
+ args.parameter_file,
205
+ )
206
+
207
+ if args.profile:
208
+ try:
209
+ from pyinstrument import Profiler
210
+ except ImportError:
211
+ parser.error(
212
+ "Profiling needs pyinstrument. Install the optional extra, e.g. "
213
+ "`uv sync --extra profile` or `pip install 'coconet-python[profile]'`."
214
+ )
215
+ if args.profile_interval <= 0:
216
+ parser.error("--profile-interval must be positive.")
217
+ if (
218
+ args.profile_html_resample_interval is not None
219
+ and args.profile_html_resample_interval < 0
220
+ ):
221
+ parser.error(
222
+ "--profile-html-resample-interval must be non-negative "
223
+ "(0 disables HTML resampling)."
224
+ )
225
+ out = args.profile_output or _default_profile_path(args.profile_format)
226
+ profiler = Profiler(interval=args.profile_interval)
227
+ profiler.start()
228
+ try:
229
+ run_coconet(config, configure_logs=False)
230
+ finally:
231
+ profiler.stop()
232
+ _write_profile_dump(
233
+ profiler,
234
+ out,
235
+ args.profile_format,
236
+ html_resample_interval=args.profile_html_resample_interval,
237
+ logger=logger,
238
+ )
239
+ logger.info(
240
+ "Wrote %s profile to %s",
241
+ args.profile_format,
242
+ out.resolve(),
243
+ )
244
+ else:
245
+ run_coconet(config, configure_logs=False)
246
+
247
+ logger.info("CoCoNet run finished successfully.")
248
+
249
+
250
+ if __name__ == "__main__":
251
+ main()
coconet/config.py ADDED
@@ -0,0 +1,240 @@
1
+ from __future__ import annotations
2
+
3
+ import csv
4
+ import os
5
+ from dataclasses import dataclass, fields
6
+ from pathlib import Path
7
+ from typing import Any
8
+
9
+ import yaml
10
+
11
+
12
+ def effective_ensemble_workers(ensemble_threads: int, ensemble_runs: int) -> int:
13
+ """Cap for parallel simulation worker processes (ensembles 1..ensemble_runs).
14
+
15
+ ensemble_threads: 0 = auto (min(CPU count, simulation ensembles)), 1 = serial,
16
+ N > 1 = use at most N workers (still capped by ensemble_runs).
17
+ """
18
+ sim = max(0, ensemble_runs)
19
+ if sim == 0:
20
+ return 1
21
+ if ensemble_threads <= 0:
22
+ cap = os.cpu_count() or 1
23
+ return max(1, min(cap, sim))
24
+ return max(1, min(int(ensemble_threads), sim))
25
+
26
+
27
+ def use_parallel_ensemble_run(ensemble_threads: int, ensemble_runs: int) -> bool:
28
+ return ensemble_runs > 0 and effective_ensemble_workers(ensemble_threads, ensemble_runs) > 1
29
+
30
+
31
+ def _parse_scalar(text: str) -> Any:
32
+ value = text.strip()
33
+ if value == "":
34
+ return ""
35
+ lowered = value.lower()
36
+ if lowered in {"true", "false"}:
37
+ return lowered == "true"
38
+ try:
39
+ if "." in value or "e" in lowered:
40
+ number = float(value)
41
+ if number.is_integer():
42
+ return int(number)
43
+ return number
44
+ return int(value)
45
+ except ValueError:
46
+ return value
47
+
48
+
49
+ @dataclass(slots=True)
50
+ class CoconetConfig:
51
+ # Input/output
52
+ reefs_file: str = "legacy/reefs2024.csv"
53
+ coastline_file: str = "legacy/coastline.csv"
54
+ output_file: str = "output.csv"
55
+ parameter_file: str | None = None
56
+ log_level: str = "INFO"
57
+ # 0 = auto (min(CPU cores, simulation ensemble count)); 1 = serial; N > 1 = process pool cap.
58
+ ensemble_threads: int = 0
59
+
60
+ # Scenario controls (matching NetLogo globals)
61
+ SSP: float = 2.6
62
+ ensemble_runs: int = 20
63
+ start_year: int = 1956
64
+ # Years before start_year where ensemble-0 spinup begins (legacy NetLogo: 50).
65
+ spinup_backtrack_years: int = 50
66
+ save_year: int = 1986
67
+ projection_year: int = 2025
68
+ search_year: int = 9999
69
+ end_year: int = 2030
70
+
71
+ start_catchment_restore: int = 9999
72
+ restore_timeframe: float = 0.0
73
+
74
+ start_CoTS_control: int = 9999
75
+ eco_threshold: float = 8.0
76
+ CoTS_threshold: float = 999999.0
77
+ coral_threshold: float = 0.0
78
+ CoTS_vessels_GBR: int = 0
79
+ CoTS_vessels_FN: int = 0
80
+ CoTS_vessels_N: int = 0
81
+ CoTS_vessels_C: int = 0
82
+ CoTS_vessels_S: int = 0
83
+ CoTS_vessels_sector: int = 0
84
+
85
+ intervene_lon_min: float = 140.0
86
+ intervene_lon_max: float = 155.0
87
+ intervene_lat_min: float = -25.0
88
+ intervene_lat_max: float = -9.0
89
+
90
+ start_modified_zoning: int = 9999
91
+ rezoned_reefs: int = 0
92
+ start_modified_fishing: int = 9999
93
+ catch_reduction: float = 0.0
94
+ start_lower_sizelimit: int = 9999
95
+ start_upper_sizelimit: int = 9999
96
+ start_CoTSlimit: int = 9999
97
+
98
+ start_emperor_release: int = 9999
99
+ release_reefs: int = 500
100
+ release_threshold: float = 9999999.0
101
+ release_number: float = 0.0
102
+
103
+ start_regional_shading: int = 9999
104
+ regional_shading_reduction: float = 0.0
105
+ start_rubble_consolidation: int = 9999
106
+ consolidation_reefs: int = 0
107
+ consolidation_threshold: float = 0.0
108
+ consolidation_hectares: float = 0.0
109
+
110
+ start_coral_seeding: int = 9999
111
+ seed_reefs: int = 0
112
+ seed_threshold: float = 0.0
113
+ seed_hectares: float = 0.0
114
+ hybrid_fraction: float = 0.0
115
+ dominance: float = 0.0
116
+
117
+ start_coral_slick: int = 9999
118
+ slick_reefs: int = 0
119
+ slick_threshold: float = 0.0
120
+ slick_hectares: float = 0.0
121
+
122
+ start_reef_shading: int = 9999
123
+ shading_reefs: int = 0
124
+ reef_shading_reduction: float = 0.0
125
+ start_pH_protection: int = 9999
126
+ pH_reefs: int = 0
127
+ pH_protection: float = 0.0
128
+
129
+ # Legacy interface-only globals
130
+ search_mode: int = 0
131
+ perfect_intervention: str = ""
132
+ unregulated_fishing: bool = False
133
+
134
+ @classmethod
135
+ def from_file(
136
+ cls,
137
+ config_file: str | Path | None = None,
138
+ parameter_file: str | Path | None = None,
139
+ env_prefix: str = "COCONET_",
140
+ ) -> CoconetConfig:
141
+ config = cls()
142
+
143
+ if config_file is not None:
144
+ loaded = yaml.safe_load(Path(config_file).read_text()) or {}
145
+ for k, v in loaded.items():
146
+ if hasattr(config, k):
147
+ setattr(config, k, v)
148
+
149
+ if parameter_file is not None:
150
+ config.parameter_file = str(parameter_file)
151
+ config._apply_legacy_parameter_file(Path(parameter_file))
152
+
153
+ config._apply_env_overrides(env_prefix)
154
+ return config
155
+
156
+ def _apply_env_overrides(self, env_prefix: str) -> None:
157
+ field_lookup = {f.name.lower(): f.name for f in fields(self)}
158
+ for env_key, env_value in os.environ.items():
159
+ if not env_key.startswith(env_prefix):
160
+ continue
161
+ key = env_key[len(env_prefix) :].lower()
162
+ if key not in field_lookup:
163
+ continue
164
+ attr = field_lookup[key]
165
+ setattr(self, attr, _parse_scalar(env_value))
166
+
167
+ def _apply_legacy_parameter_file(self, path: Path) -> None:
168
+ label_to_attr = {
169
+ "climate scenario": "SSP",
170
+ "ensemble runs": "ensemble_runs",
171
+ "start year": "start_year",
172
+ "spinup backtrack (years)": "spinup_backtrack_years",
173
+ "save year": "save_year",
174
+ "projection year": "projection_year",
175
+ "search year": "search_year",
176
+ "end year": "end_year",
177
+ "cots control start year": "start_CoTS_control",
178
+ "cots control ecological threshold (cots per ha)": "eco_threshold",
179
+ "cots control cots threshold (cots per ha)": "CoTS_threshold",
180
+ "cots control coral threshold (cots per ha)": "coral_threshold",
181
+ "cots vessels across gbr": "CoTS_vessels_GBR",
182
+ "cots vessels in far-northern region": "CoTS_vessels_FN",
183
+ "cots vessels in northern region": "CoTS_vessels_N",
184
+ "cots vessels in central region": "CoTS_vessels_C",
185
+ "cots vessels in southern region": "CoTS_vessels_S",
186
+ "catchment restoration start year": "start_catchment_restore",
187
+ "catchment restoration timescale (years)": "restore_timeframe",
188
+ "future zoning start year": "start_modified_zoning",
189
+ "number of reefs included in future rezoning": "rezoned_reefs",
190
+ "reduction in fisheries catch start year": "start_modified_fishing",
191
+ "fractional reduction in fisheries catches": "catch_reduction",
192
+ "upper fish size limit start year": "start_upper_sizelimit",
193
+ "lower fish size limit start year": "start_lower_sizelimit",
194
+ "exclude fishing from active outbreak reefs start year": "start_CoTSlimit",
195
+ "emperor release start year": "start_emperor_release",
196
+ "number of release reefs": "release_reefs",
197
+ "maximum adult emperors (per ha) for release": "release_threshold",
198
+ "number of juvenile emperors released per reef": "release_number",
199
+ "regional shading start year": "start_regional_shading",
200
+ "absolute dhw reduction due to regional shading (dhw)": "regional_shading_reduction",
201
+ "minimum longitude of interventions": "intervene_lon_min",
202
+ "maximum longitude of interventions": "intervene_lon_max",
203
+ "minimum latitude of interventions": "intervene_lat_min",
204
+ "maximum latitude of interventions": "intervene_lat_max",
205
+ "rubble consolidation start year": "start_rubble_consolidation",
206
+ "annual number of consolidated reefs": "consolidation_reefs",
207
+ "minimum rubble cover threshold for consolidation [0 1]": "consolidation_threshold",
208
+ "total annual consolidated area (ha)": "consolidation_hectares",
209
+ "thermally tolerant coral seeding start year": "start_coral_seeding",
210
+ "annual number of reefs seeded with coral": "seed_reefs",
211
+ "maximum coral cover threshold for coral seeding [0 1]": "seed_threshold",
212
+ "total annual area of seeded corals (ha)": "seed_hectares",
213
+ "fraction of staghorn acropora corals able to hybridise with thermally tolerant corals [0 1]": "hybrid_fraction",
214
+ "dominance of thermally tolerant corals in setting thermal tolerance of hybrids [0 1]": "dominance",
215
+ "coral slicks start year": "start_coral_slick",
216
+ "annual number of reefs with coral slicks released": "slick_reefs",
217
+ "maximum coral cover threshold for coral slicks [0 1]": "slick_threshold",
218
+ "total annual area of slick corals (ha)": "slick_hectares",
219
+ "reef shading start year": "start_reef_shading",
220
+ "annual number of reefs locally shaded": "shading_reefs",
221
+ "fractional dhw reduction due to local shading [0 1]": "reef_shading_reduction",
222
+ "ocean acidification treatment start year": "start_pH_protection",
223
+ "annual number of reefs treated for ocean acidification": "pH_reefs",
224
+ "fractional protection from ocean acidification [0 1]": "pH_protection",
225
+ "cots vessels in active sector": "CoTS_vessels_sector",
226
+ }
227
+
228
+ with path.open("r", newline="") as f:
229
+ reader = csv.reader(f)
230
+ for row in reader:
231
+ if len(row) < 2:
232
+ continue
233
+ label = row[0].strip().lower()
234
+ # Header row "ensemble" marks end of config lines (not e.g. "ensemble runs").
235
+ if label.startswith("ensemble") and label == "ensemble":
236
+ break
237
+ attr = label_to_attr.get(label)
238
+ if attr is None:
239
+ continue
240
+ setattr(self, attr, _parse_scalar(row[1]))
@@ -0,0 +1,26 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+
5
+ VALID_LOG_LEVELS = ("CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG")
6
+
7
+
8
+ def normalize_log_level(level: str | None) -> str:
9
+ if level is None:
10
+ return "INFO"
11
+ normalized = level.strip().upper()
12
+ if normalized not in VALID_LOG_LEVELS:
13
+ valid_levels = ", ".join(VALID_LOG_LEVELS)
14
+ raise ValueError(f"Invalid log level '{level}'. Valid levels: {valid_levels}.")
15
+ return normalized
16
+
17
+
18
+ def configure_logging(level: str | None) -> str:
19
+ normalized_level = normalize_log_level(level)
20
+ logging.basicConfig(
21
+ level=getattr(logging, normalized_level),
22
+ format="%(asctime)s | %(levelname)-8s | %(name)s | %(message)s",
23
+ datefmt="%Y-%m-%d %H:%M:%S",
24
+ force=True,
25
+ )
26
+ return normalized_level