esgvoc 0.2.1__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of esgvoc might be problematic. Click here for more details.

Files changed (70) hide show
  1. esgvoc/__init__.py +3 -1
  2. esgvoc/api/__init__.py +23 -34
  3. esgvoc/api/_utils.py +28 -14
  4. esgvoc/api/data_descriptors/__init__.py +18 -12
  5. esgvoc/api/data_descriptors/activity.py +8 -45
  6. esgvoc/api/data_descriptors/area_label.py +6 -0
  7. esgvoc/api/data_descriptors/branded_suffix.py +5 -0
  8. esgvoc/api/data_descriptors/branded_variable.py +5 -0
  9. esgvoc/api/data_descriptors/consortium.py +16 -56
  10. esgvoc/api/data_descriptors/data_descriptor.py +106 -0
  11. esgvoc/api/data_descriptors/date.py +3 -46
  12. esgvoc/api/data_descriptors/directory_date.py +3 -46
  13. esgvoc/api/data_descriptors/experiment.py +19 -54
  14. esgvoc/api/data_descriptors/forcing_index.py +3 -45
  15. esgvoc/api/data_descriptors/frequency.py +6 -43
  16. esgvoc/api/data_descriptors/grid_label.py +6 -44
  17. esgvoc/api/data_descriptors/horizontal_label.py +6 -0
  18. esgvoc/api/data_descriptors/initialisation_index.py +3 -44
  19. esgvoc/api/data_descriptors/institution.py +11 -54
  20. esgvoc/api/data_descriptors/license.py +4 -44
  21. esgvoc/api/data_descriptors/mip_era.py +6 -44
  22. esgvoc/api/data_descriptors/model_component.py +7 -45
  23. esgvoc/api/data_descriptors/organisation.py +3 -40
  24. esgvoc/api/data_descriptors/physic_index.py +3 -45
  25. esgvoc/api/data_descriptors/product.py +4 -43
  26. esgvoc/api/data_descriptors/realisation_index.py +3 -44
  27. esgvoc/api/data_descriptors/realm.py +4 -42
  28. esgvoc/api/data_descriptors/resolution.py +6 -44
  29. esgvoc/api/data_descriptors/source.py +18 -53
  30. esgvoc/api/data_descriptors/source_type.py +3 -41
  31. esgvoc/api/data_descriptors/sub_experiment.py +3 -41
  32. esgvoc/api/data_descriptors/table.py +6 -48
  33. esgvoc/api/data_descriptors/temporal_label.py +6 -0
  34. esgvoc/api/data_descriptors/time_range.py +3 -27
  35. esgvoc/api/data_descriptors/variable.py +13 -71
  36. esgvoc/api/data_descriptors/variant_label.py +3 -47
  37. esgvoc/api/data_descriptors/vertical_label.py +5 -0
  38. esgvoc/api/projects.py +187 -171
  39. esgvoc/api/report.py +21 -12
  40. esgvoc/api/search.py +3 -1
  41. esgvoc/api/universe.py +44 -34
  42. esgvoc/apps/__init__.py +3 -4
  43. esgvoc/apps/drs/generator.py +166 -161
  44. esgvoc/apps/drs/report.py +222 -131
  45. esgvoc/apps/drs/validator.py +103 -105
  46. esgvoc/cli/drs.py +29 -19
  47. esgvoc/cli/get.py +26 -25
  48. esgvoc/cli/install.py +11 -8
  49. esgvoc/cli/main.py +0 -2
  50. esgvoc/cli/status.py +5 -5
  51. esgvoc/cli/valid.py +40 -40
  52. esgvoc/core/db/models/universe.py +3 -3
  53. esgvoc/core/db/project_ingestion.py +1 -1
  54. esgvoc/core/db/universe_ingestion.py +6 -5
  55. esgvoc/core/logging_handler.py +1 -1
  56. esgvoc/core/repo_fetcher.py +4 -3
  57. esgvoc/core/service/__init__.py +37 -5
  58. esgvoc/core/service/configuration/config_manager.py +188 -0
  59. esgvoc/core/service/configuration/setting.py +88 -0
  60. esgvoc/core/service/state.py +49 -32
  61. {esgvoc-0.2.1.dist-info → esgvoc-0.3.0.dist-info}/METADATA +34 -3
  62. esgvoc-0.3.0.dist-info/RECORD +78 -0
  63. esgvoc/cli/config.py +0 -82
  64. esgvoc/core/service/settings.py +0 -73
  65. esgvoc/core/service/settings.toml +0 -17
  66. esgvoc/core/service/settings_default.toml +0 -17
  67. esgvoc-0.2.1.dist-info/RECORD +0 -73
  68. {esgvoc-0.2.1.dist-info → esgvoc-0.3.0.dist-info}/WHEEL +0 -0
  69. {esgvoc-0.2.1.dist-info → esgvoc-0.3.0.dist-info}/entry_points.txt +0 -0
  70. {esgvoc-0.2.1.dist-info → esgvoc-0.3.0.dist-info}/licenses/LICENSE.txt +0 -0
esgvoc/cli/valid.py CHANGED
@@ -38,52 +38,52 @@ def valid(
38
38
  verbose: bool = typer.Option(False, "-v", "--verbose", help="Provide detailed validation results")
39
39
  ):
40
40
  """
41
- Validates one or more strings against specified Project:Collection:Term configurations.
42
-
43
- Depending on the provided key structure, the function performs different validation operations:
44
- - If all are None (e.g., "::"), validates the term across all projects (`valid_term_in_all_projects`).
45
- - If Term is None (e.g., "Project:Collection:"), validates the term in the specified collection (`valid_term_in_collection`).
46
- - If Term and Collection are None (e.g., "Project::"), validates the term in the specified project (`valid_term_in_project`).
47
- - If all are specified (e.g., "Project:Collection:Term"), validates the term exactly (`valid_term`).
48
-
49
- Parameters:
50
- strings_targets (List[str]): A list of validation pairs, where each pair consists of:
51
- - A string to validate.
52
- - A key in the form '<Project:Collection:Term>'.
53
- Usage :
54
- Valid one:
55
- esgvocab valid IPSL cmip6plus:institution_id:ipsl
56
- esgvocab valid IPSL cmip6plus:institution_id:
57
- esgvocab valid IPSL cmip6plus::
58
- esgvocab valid IPSL ::
59
-
60
- Unvalid one:
61
- esgvocab valid IPSL_invalid cmip6plus:institution_id:ipsl
62
- esgvocab valid IPSL cmip6plus:institution_id:isl <= term cant be found
63
- esgvocab valid IPSL cmip6plus:institutin_id:ispl <= collection cant be found
64
- esgvocab valid IPSL cmip6pls:institution_id:ispl <= project cant be found
65
-
66
- Multiple validation for all known projects:
67
- esgvocab valid IPSL :: IPS ::
68
- result will be [True, False]
69
-
70
- esgvocab valid --verbose IPS :: IPSL ::
71
- result will be
72
- ┏━━━━━━━━┳━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
73
- ┃ String ┃ Key ┃ Result ┃ Errors
74
- ┡━━━━━━━━╇━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩
75
- │ IPS │ :: │ ❌ Invalid │ did not found matching term
76
- │ IPSL │ :: │ ✅ Valid │ None
77
- └────────┴─────┴────────────┴─────────────────────────────┘
78
- Returns:
79
- List[bool]: Validation results for each pair in the input.
41
+ Validates one or more strings against specified Project:Collection:Term configurations.\n
42
+ \n
43
+ Depending on the provided key structure, the function performs different validation operations:\n
44
+ - If all are None (e.g., "::"), validates the term across all projects (`valid_term_in_all_projects`).\n
45
+ - If Term is None (e.g., "Project:Collection:"), validates the term in the specified collection (`valid_term_in_collection`).\n
46
+ - If Term and Collection are None (e.g., "Project::"), validates the term in the specified project (`valid_term_in_project`).\n
47
+ - If all are specified (e.g., "Project:Collection:Term"), validates the term exactly (`valid_term`).\n
48
+ \n
49
+ Parameters:\n
50
+ \tstrings_targets (List[str]): A list of validation pairs, where each pair consists of:\n
51
+ \t\t- A string to validate.\n
52
+ \t\t- A key in the form '<Project:Collection:Term>'.\n
53
+ Usage :\n
54
+ \tValid one:\n
55
+ \tesgvocab valid IPSL cmip6plus:institution_id:ipsl\n
56
+ \tesgvocab valid IPSL cmip6plus:institution_id:\n
57
+ \tesgvocab valid IPSL cmip6plus::\n
58
+ \tesgvocab valid IPSL ::\n
59
+ \n
60
+ \tUnvalid one:\n
61
+ \tesgvocab valid IPSL_invalid cmip6plus:institution_id:ipsl\n
62
+ \tesgvocab valid IPSL cmip6plus:institution_id:isl <= term cant be found\n
63
+ \tesgvocab valid IPSL cmip6plus:institutin_id:ispl <= collection cant be found\n
64
+ \tesgvocab valid IPSL cmip6pls:institution_id:ispl <= project cant be found\n
65
+ \n
66
+ \tMultiple validation for all known projects: \n
67
+ \tesgvocab valid IPSL :: IPS :: \n
68
+ \t\tresult will be [True, False]\n
69
+ \n
70
+ \tesgvocab valid --verbose IPS :: IPSL ::\n
71
+ \tresult will be \n
72
+ \t\t┏━━━━━━━━┳━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓\n
73
+ \t\t┃ String ┃ Key ┃ Result ┃ Errors ┃\n
74
+ \t\t┡━━━━━━━━╇━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩\n
75
+ \t\t│ IPS │ :: │ ❌ Invalid │ did not found matching term │\n
76
+ \t\t│ IPSL │ :: │ ✅ Valid │ None │\n
77
+ \t\t└────────┴─────┴────────────┴─────────────────────────────┘\n
78
+ Returns:\n
79
+ \tList[bool]: Validation results for each pair in the input.\n
80
80
  """
81
81
  results = []
82
82
  detailed_results = []
83
83
 
84
84
  # Combine string and target into pairs
85
85
  pairs = [strings_targets[i] + " " + strings_targets[i + 1] for i in range(0, len(strings_targets), 2)]
86
-
86
+
87
87
  # Validate each string against each target
88
88
  for validation in pairs:
89
89
  match = re.match(r"(.+)\s+([^:]*):([^:]*):([^:]*)", validation)
@@ -14,10 +14,10 @@ _LOGGER = logging.getLogger("universe_db_creation")
14
14
  class Universe(SQLModel, PkMixin, table=True):
15
15
  __tablename__ = "universes"
16
16
  git_hash: str
17
- data_descriptors: list["DataDescriptor"] = Relationship(back_populates="universe")
17
+ data_descriptors: list["UDataDescriptor"] = Relationship(back_populates="universe")
18
18
 
19
19
 
20
- class DataDescriptor(SQLModel, PkMixin, IdMixin, table=True):
20
+ class UDataDescriptor(SQLModel, PkMixin, IdMixin, table=True):
21
21
  __tablename__ = "data_descriptors"
22
22
  context: dict = Field(sa_column=sa.Column(JSON))
23
23
  universe_pk: int | None = Field(default=None, foreign_key="universes.pk")
@@ -33,7 +33,7 @@ class UTerm(SQLModel, PkMixin, IdMixin, table=True):
33
33
  data_descriptor_pk: int | None = Field(
34
34
  default=None, foreign_key="data_descriptors.pk"
35
35
  )
36
- data_descriptor: DataDescriptor = Relationship(back_populates="terms")
36
+ data_descriptor: UDataDescriptor = Relationship(back_populates="terms")
37
37
 
38
38
 
39
39
  def universe_create_db(db_file_path: Path) -> None:
@@ -76,7 +76,7 @@ def ingest_collection(collection_dir_path: Path,
76
76
  try:
77
77
  json_specs = DataMerger(data=JsonLdResource(uri =str(term_file_path)),
78
78
  # locally_available={"https://espri-mod.github.io/mip-cmor-tables":".cache/repos/WCRP-universe"}).merge_linked_json()[-1]
79
- locally_available={"https://espri-mod.github.io/mip-cmor-tables":service.service_settings.universe.local_path}).merge_linked_json()[-1]
79
+ locally_available={"https://espri-mod.github.io/mip-cmor-tables":service.current_state.universe.local_path}).merge_linked_json()[-1]
80
80
 
81
81
  term_kind = infer_term_kind(json_specs)
82
82
  term_id = json_specs["id"]
@@ -9,7 +9,7 @@ from sqlmodel import Session, select
9
9
  import esgvoc.core.db.connection as db
10
10
  from esgvoc.core.db.connection import read_json_file
11
11
  from esgvoc.core.db.models.mixins import TermKind
12
- from esgvoc.core.db.models.universe import DataDescriptor, UTerm, Universe
12
+ from esgvoc.core.db.models.universe import UDataDescriptor, UTerm, Universe
13
13
  from esgvoc.core.db.models.universe import universe_create_db
14
14
  import esgvoc.core.service as service
15
15
 
@@ -63,7 +63,7 @@ def ingest_data_descriptor(data_descriptor_path: Path,
63
63
  return
64
64
 
65
65
  with connection.create_session() as session:
66
- data_descriptor = DataDescriptor(id=data_descriptor_id,
66
+ data_descriptor = UDataDescriptor(id=data_descriptor_id,
67
67
  context=context,
68
68
  term_kind="") # we ll know it only when we ll add a term (hypothesis all term have the same kind in a data_descriptor)
69
69
  term_kind_dd = None
@@ -72,9 +72,10 @@ def ingest_data_descriptor(data_descriptor_path: Path,
72
72
  for term_file_path in data_descriptor_path.iterdir():
73
73
  _LOGGER.debug(f"found term path : {term_file_path}, {term_file_path.suffix}")
74
74
  if term_file_path.is_file() and term_file_path.suffix == ".json":
75
+
75
76
  try:
76
77
  json_specs=DataMerger(data=JsonLdResource(uri=str(term_file_path)),
77
- locally_available={"https://espri-mod.github.io/mip-cmor-tables":service.service_settings.universe.local_path}).merge_linked_json()[-1]
78
+ locally_available={"https://espri-mod.github.io/mip-cmor-tables":service.current_state.universe.local_path}).merge_linked_json()[-1]
78
79
  term_kind = infer_term_kind(json_specs)
79
80
  term_id = json_specs["id"]
80
81
 
@@ -103,8 +104,8 @@ def get_universe_term(data_descriptor_id: str,
103
104
  universe_db_session: Session) -> tuple[TermKind, dict]:
104
105
  statement = (
105
106
  select(UTerm)
106
- .join(DataDescriptor)
107
- .where(DataDescriptor.id == data_descriptor_id, UTerm.id == term_id)
107
+ .join(UDataDescriptor)
108
+ .where(UDataDescriptor.id == data_descriptor_id, UTerm.id == term_id)
108
109
  )
109
110
  results = universe_db_session.exec(statement)
110
111
  term = results.one()
@@ -1,4 +1,4 @@
1
1
  import logging.config
2
2
  from pathlib import Path
3
3
 
4
- logging.config.fileConfig(f"{Path(__file__).parent}/logging.conf")
4
+ logging.config.fileConfig(f"{Path(__file__).parent}/logging.conf", disable_existing_loggers=False)
@@ -153,7 +153,7 @@ class RepoFetcher:
153
153
  files = [item['name'] for item in contents if item['type'] == 'file']
154
154
  return files
155
155
 
156
- def clone_repository(self, owner: str, repo: str, branch: Optional[str] = None):
156
+ def clone_repository(self, owner: str, repo: str, branch: Optional[str] = None, local_path: str|None = None):
157
157
  """
158
158
  Clone a GitHub repository to a target directory.
159
159
  :param owner: Repository owner
@@ -162,15 +162,16 @@ class RepoFetcher:
162
162
  :param branch: (Optional) The branch to clone. Clones the default branch if None.
163
163
  """
164
164
  repo_url = f"https://github.com/{owner}/{repo}.git"
165
+ destination = local_path if local_path else f"{self.repo_dir}/{repo}"
165
166
 
166
- command = ["git", "clone", repo_url, f"{self.repo_dir}/{repo}"]
167
+ command = ["git", "clone", repo_url, destination]
167
168
  if branch:
168
169
  command.extend(["--branch", branch])
169
170
  with redirect_stdout_to_log():
170
171
 
171
172
  try:
172
173
  subprocess.run(command, check=True)
173
- _LOGGER.debug(f"Repository cloned successfully into {self.repo_dir}/{repo}")
174
+ _LOGGER.debug(f"Repository cloned successfully into {destination}")
174
175
  except subprocess.CalledProcessError:
175
176
  try:
176
177
  current_work_dir = os.getcwd()
@@ -1,8 +1,40 @@
1
- from esgvoc.core.service.settings import ServiceSettings
1
+ # from esgvoc.core.service.config_register import ConfigManager
2
+ # from esgvoc.core.service.settings import ServiceSettings
3
+ # from esgvoc.core.service.state import StateService
4
+ #
5
+ # config_manager = ConfigManager()
6
+ # active_setting = config_manager.get_active_config()
7
+ # active_setting["base_dir"] = str(config_manager.config_dir / config_manager.get_active_config_name())
8
+ # service_settings = ServiceSettings.from_config(active_setting)
9
+ # state_service = StateService(service_settings)
10
+
11
+
12
+ from esgvoc.core.service.configuration.config_manager import ConfigManager
13
+ from esgvoc.core.service.configuration.setting import ServiceSettings
2
14
  from esgvoc.core.service.state import StateService
3
- from pathlib import Path
4
15
 
5
- settings_path = Path(__file__).parent / "settings.toml"
6
- service_settings = ServiceSettings.load_from_file(str(settings_path))
7
- state_service = StateService(service_settings)
16
+ config_manager : ConfigManager | None = None
17
+ current_state : StateService | None = None
18
+
19
+ def get_config_manager():
20
+ global config_manager
21
+ if config_manager is None:
22
+
23
+ config_manager = ConfigManager(ServiceSettings, app_name="esgvoc", app_author="ipsl", default_settings=ServiceSettings.DEFAULT_SETTINGS)
24
+ active_config_name= config_manager.get_active_config_name()
25
+ config_manager.data_config_dir = config_manager.data_dir / active_config_name
26
+ config_manager.data_config_dir.mkdir(parents=True, exist_ok=True)
27
+
28
+ return config_manager
29
+
30
+
31
+ def get_state():
32
+ global current_state
33
+ if config_manager is not None:
34
+ current_state = StateService(config_manager.get_active_config())
35
+ return current_state
36
+
37
+ # Singleton Access Function
38
+ config_manager = get_config_manager()
39
+ current_state = get_state()
8
40
 
@@ -0,0 +1,188 @@
1
+ import toml
2
+ import logging
3
+ from pathlib import Path
4
+ from platformdirs import PlatformDirs
5
+ from typing import Type, TypeVar, Generic, Protocol
6
+
7
+ # Setup logging
8
+ logging.basicConfig(level=logging.INFO)
9
+ logger = logging.getLogger(__name__)
10
+
11
+ # Define a generic type for configuration
12
+ T = TypeVar("T", bound="ConfigSchema")
13
+
14
+ class ConfigSchema(Protocol):
15
+ """Protocol for application-specific configuration classes."""
16
+
17
+ @classmethod
18
+ def load_from_file(cls, file_path: str): ...
19
+
20
+ def save_to_file(self, file_path: str): ...
21
+
22
+ class ConfigManager(Generic[T]):
23
+ def __init__(self, config_cls: Type[T], app_name: str, app_author: str, default_settings : dict | None = None ):
24
+ """
25
+ Initialize the configuration manager.
26
+ - config_cls: A class that implements `ConfigSchema` (e.g., ServiceSettings).
27
+ - app_name: Name of the application (used for directory paths).
28
+ - app_author: Name of the author/organization (used for directory paths).
29
+ """
30
+ self.config_cls = config_cls
31
+ self.dirs = PlatformDirs(app_name, app_author)
32
+
33
+ # Define standard paths
34
+ self.config_dir = Path(self.dirs.user_config_path).expanduser().resolve()
35
+ self.data_dir = Path(self.dirs.user_data_path).expanduser().resolve()
36
+ self.data_config_dir = None # depends on loaded settings
37
+
38
+ self.cache_dir = Path(self.dirs.user_cache_path).expanduser().resolve()
39
+
40
+
41
+ self.config_dir.mkdir(parents=True, exist_ok=True)
42
+ self.data_dir.mkdir(parents=True, exist_ok=True)
43
+ self.cache_dir.mkdir(parents=True, exist_ok=True)
44
+
45
+ self.registry_path = self.config_dir / "config_registry.toml"
46
+ self.default_config_path = self.config_dir / "default_setting.toml"
47
+ self.default_settings = default_settings
48
+ self._init_registry()
49
+
50
+ def _init_registry(self):
51
+ """Initialize the registry file if it doesn't exist."""
52
+ if not self.registry_path.exists():
53
+ logger.info("Initializing configuration registry...")
54
+ registry = {"configs": {"default": str(self.default_config_path)}, "active": "default"}
55
+ self._save_toml(self.registry_path, registry)
56
+ # Ensure the default settings file exists and save it if necessary
57
+ if not self.default_config_path.exists():
58
+ if self.default_settings:
59
+ logger.info("Saving default settings...")
60
+ self._save_toml(self.default_config_path, self.default_settings)
61
+ else:
62
+ logger.warning("No default settings provided.")
63
+
64
+ def _load_toml(self, path: Path) -> dict:
65
+ """Load TOML data from a file."""
66
+ if not path.exists():
67
+ logger.error(f"Configuration file not found: {path}")
68
+ raise FileNotFoundError(f"Configuration file not found: {path}")
69
+ with open(path, "r") as f:
70
+ return toml.load(f)
71
+
72
+ def _save_toml(self, path: Path, data: dict) -> None:
73
+ """Save TOML data to a file."""
74
+ with open(path, "w") as f:
75
+ toml.dump(data, f)
76
+
77
+ def _get_active_config_path(self) -> Path:
78
+ """Retrieve the path of the active configuration file."""
79
+ registry = self._load_toml(self.registry_path)
80
+ active_config_name = registry["active"]
81
+ return Path(registry["configs"][active_config_name])
82
+
83
+ def get_config(self, config_name:str) -> T:
84
+ """Load the configuration as an instance of the given config schema."""
85
+ registry = self._load_toml(self.registry_path)
86
+ if config_name not in registry["configs"]:
87
+ logger.error(f"Config '{config_name}' not found in registry.")
88
+ raise ValueError(f"Config '{config_name}' not found in registry.")
89
+ config_path = self.config_cls.load_from_file(registry["configs"][config_name])
90
+ return self.config_cls.load_from_file(str(config_path))
91
+
92
+ def get_active_config(self) -> T:
93
+ """Load the active configuration as an instance of the given config schema."""
94
+ active_config_path = self._get_active_config_path()
95
+
96
+ return self.config_cls.load_from_file(str(active_config_path))
97
+
98
+ def get_active_config_name(self) -> str:
99
+ """Retrieve the config name from the registry"""
100
+ registry = self._load_toml(self.registry_path)
101
+ return registry["active"]
102
+
103
+ def save_config(self, config_data: dict, name: str | None = None) -> None:
104
+ """Save the modified configuration to the corresponding file and update the registry."""
105
+
106
+ if name:
107
+ # If a name is provided, save the configuration with that name
108
+ config_path = self.config_dir / f"{name}.toml"
109
+ self._save_toml(config_path, config_data)
110
+
111
+ # Update the registry with the new config name
112
+ registry = self._load_toml(self.registry_path)
113
+ registry["configs"][name] = str(config_path)
114
+ registry["active"] = name
115
+ self._save_toml(self.registry_path, registry)
116
+
117
+ logger.info(f"Saved configuration to {config_path} and updated registry.")
118
+ else:
119
+ # If no name is provided, give the user a default name, like "user_config"
120
+ default_name = "user_config"
121
+ config_path = self.config_dir / f"{default_name}.toml"
122
+
123
+ # Check if the user_config already exists, if so, warn them
124
+ if config_path.exists():
125
+ logger.warning(f"{default_name}.toml already exists. Overwriting with the new config.")
126
+
127
+ # Save the configuration with the default name
128
+ self._save_toml(config_path, config_data)
129
+
130
+ # Update the registry with the new config name
131
+ registry = self._load_toml(self.registry_path)
132
+ registry["configs"][default_name] = str(config_path)
133
+ registry["active"] = default_name
134
+ self._save_toml(self.registry_path, registry)
135
+
136
+ logger.info(f"Saved new configuration to {config_path} and updated registry.")
137
+
138
+ def save_active_config(self, config: T):
139
+ """Save the current configuration to the active file."""
140
+ active_config_path = self._get_active_config_path()
141
+ config.save_to_file(str(active_config_path))
142
+
143
+ def switch_config(self, config_name: str):
144
+ """Switch to a different configuration."""
145
+ registry = self._load_toml(self.registry_path)
146
+ if config_name not in registry["configs"]:
147
+ logger.error(f"Config '{config_name}' not found in registry.")
148
+ raise ValueError(f"Config '{config_name}' not found in registry.")
149
+ registry["active"] = config_name
150
+
151
+ self._save_toml(self.registry_path, registry)
152
+ logger.info(f"Switched to configuration: {config_name}")
153
+
154
+ def list_configs(self) -> dict:
155
+ """Return a list of available configurations."""
156
+ return self._load_toml(self.registry_path)["configs"]
157
+
158
+ def add_config(self, config_name: str, config_data: dict):
159
+ """Add a new configuration."""
160
+ registry = self._load_toml(self.registry_path)
161
+ if config_name in registry["configs"]:
162
+ raise ValueError(f"Config '{config_name}' already exists.")
163
+ config_path = self.config_dir / f"{config_name}.toml"
164
+ self._save_toml(config_path, config_data)
165
+ registry["configs"][config_name] = str(config_path)
166
+ self._save_toml(self.registry_path, registry)
167
+
168
+ def remove_config(self, config_name: str):
169
+ """Remove a configuration."""
170
+ registry = self._load_toml(self.registry_path)
171
+ if config_name == "default":
172
+ raise ValueError("Cannot remove the default configuration.")
173
+ if config_name not in registry["configs"]:
174
+ raise ValueError(f"Config '{config_name}' not found.")
175
+ del registry["configs"][config_name]
176
+ config_path = self.config_dir / f"{config_name}.toml"
177
+ config_path.unlink()
178
+
179
+ self._save_toml(self.registry_path, registry)
180
+ logger.info(f"Removed configuration: {config_name}")
181
+ if registry["active"] not in registry["configs"]:
182
+ self.switch_config("default")
183
+ logger.info("active configuration doesnot exist anymore : Switch to default configuration")
184
+
185
+
186
+
187
+
188
+
@@ -0,0 +1,88 @@
1
+ from typing import ClassVar, Dict, Optional
2
+ import toml
3
+ from pydantic import BaseModel, Field
4
+
5
+
6
+ class ProjectSettings(BaseModel):
7
+ project_name: str
8
+ github_repo: str
9
+ branch: Optional[str] = "main"
10
+ local_path: Optional[str] = None
11
+ db_path: Optional[str] = None
12
+
13
+
14
+ class UniverseSettings(BaseModel):
15
+ github_repo: str
16
+ branch: Optional[str] = None
17
+ local_path: Optional[str] = None
18
+ db_path: Optional[str] = None
19
+
20
+
21
+ class ServiceSettings(BaseModel):
22
+ universe: UniverseSettings
23
+ projects: Dict[str, ProjectSettings] = Field(default_factory=dict)
24
+
25
+ # 🔹 Define default settings
26
+ DEFAULT_SETTINGS : ClassVar[dict]= {
27
+ "universe": {
28
+ "github_repo": "https://github.com/WCRP-CMIP/WCRP-universe",
29
+ "branch": "esgvoc",
30
+ "local_path": "repos/WCRP-universe",
31
+ "db_path": "dbs/universe.sqlite",
32
+ },
33
+ "projects": [
34
+ {
35
+ "project_name": "cmip6",
36
+ "github_repo": "https://github.com/WCRP-CMIP/CMIP6_CVs",
37
+ "branch": "esgvoc",
38
+ "local_path": "repos/CMIP6_CVs",
39
+ "db_path": "dbs/cmip6.sqlite",
40
+ },
41
+ {
42
+ "project_name": "cmip6plus",
43
+ "github_repo": "https://github.com/WCRP-CMIP/CMIP6Plus_CVs",
44
+ "branch": "esgvoc",
45
+ "local_path": "repos/CMIP6Plus_CVs",
46
+ "db_path": "dbs/cmip6plus.sqlite",
47
+ },
48
+ ],
49
+ }
50
+
51
+ @classmethod
52
+ def load_from_file(cls, file_path: str) -> "ServiceSettings":
53
+ """Load configuration from a TOML file, falling back to defaults if necessary."""
54
+ try:
55
+ data = toml.load(file_path)
56
+ except FileNotFoundError:
57
+ data = cls.DEFAULT_SETTINGS # Use defaults if the file is missing
58
+
59
+ projects = {p["project_name"]: ProjectSettings(**p) for p in data.pop("projects", [])}
60
+ return cls(universe=UniverseSettings(**data["universe"]), projects=projects)
61
+
62
+ @classmethod
63
+ def load_default(cls) -> "ServiceSettings":
64
+ """Load default settings."""
65
+ return cls.load_from_dict(cls.DEFAULT_SETTINGS)
66
+
67
+ @classmethod
68
+ def load_from_dict(cls, config_data: dict) -> "ServiceSettings":
69
+ """Load configuration from a dictionary."""
70
+ projects = {p["project_name"]: ProjectSettings(**p) for p in config_data.get("projects", [])}
71
+ return cls(universe=UniverseSettings(**config_data["universe"]), projects=projects)
72
+
73
+ def save_to_file(self, file_path: str):
74
+ """Save the configuration to a TOML file."""
75
+ data = {
76
+ "universe": self.universe.model_dump(),
77
+ "projects": [p.model_dump() for p in self.projects.values()],
78
+ }
79
+ with open(file_path, "w") as f:
80
+ toml.dump(data, f)
81
+
82
+ def dump(self)->dict:
83
+ data = {
84
+ "universe": self.universe.model_dump(),
85
+ "projects": [p.model_dump() for p in self.projects.values()],
86
+ }
87
+ return data
88
+