esgvoc 0.1.2__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of esgvoc might be problematic. Click here for more details.
- esgvoc/__init__.py +3 -1
- esgvoc/api/__init__.py +30 -30
- esgvoc/api/_utils.py +28 -14
- esgvoc/api/data_descriptors/__init__.py +19 -10
- esgvoc/api/data_descriptors/activity.py +8 -45
- esgvoc/api/data_descriptors/area_label.py +6 -0
- esgvoc/api/data_descriptors/branded_suffix.py +5 -0
- esgvoc/api/data_descriptors/branded_variable.py +5 -0
- esgvoc/api/data_descriptors/consortium.py +16 -56
- esgvoc/api/data_descriptors/data_descriptor.py +106 -0
- esgvoc/api/data_descriptors/date.py +3 -46
- esgvoc/api/data_descriptors/directory_date.py +5 -0
- esgvoc/api/data_descriptors/experiment.py +19 -54
- esgvoc/api/data_descriptors/forcing_index.py +3 -45
- esgvoc/api/data_descriptors/frequency.py +6 -43
- esgvoc/api/data_descriptors/grid_label.py +6 -44
- esgvoc/api/data_descriptors/horizontal_label.py +6 -0
- esgvoc/api/data_descriptors/initialisation_index.py +3 -44
- esgvoc/api/data_descriptors/institution.py +11 -54
- esgvoc/api/data_descriptors/license.py +4 -44
- esgvoc/api/data_descriptors/mip_era.py +6 -44
- esgvoc/api/data_descriptors/model_component.py +7 -45
- esgvoc/api/data_descriptors/organisation.py +3 -40
- esgvoc/api/data_descriptors/physic_index.py +3 -45
- esgvoc/api/data_descriptors/product.py +4 -43
- esgvoc/api/data_descriptors/realisation_index.py +3 -44
- esgvoc/api/data_descriptors/realm.py +4 -42
- esgvoc/api/data_descriptors/resolution.py +6 -44
- esgvoc/api/data_descriptors/source.py +18 -53
- esgvoc/api/data_descriptors/source_type.py +3 -41
- esgvoc/api/data_descriptors/sub_experiment.py +3 -41
- esgvoc/api/data_descriptors/table.py +6 -48
- esgvoc/api/data_descriptors/temporal_label.py +6 -0
- esgvoc/api/data_descriptors/time_range.py +3 -27
- esgvoc/api/data_descriptors/variable.py +13 -71
- esgvoc/api/data_descriptors/variant_label.py +3 -47
- esgvoc/api/data_descriptors/vertical_label.py +5 -0
- esgvoc/api/project_specs.py +82 -0
- esgvoc/api/projects.py +284 -238
- esgvoc/api/report.py +89 -52
- esgvoc/api/search.py +31 -11
- esgvoc/api/universe.py +57 -48
- esgvoc/apps/__init__.py +6 -0
- esgvoc/apps/drs/__init__.py +0 -16
- esgvoc/apps/drs/constants.py +2 -0
- esgvoc/apps/drs/generator.py +429 -0
- esgvoc/apps/drs/report.py +492 -0
- esgvoc/apps/drs/validator.py +330 -0
- esgvoc/cli/drs.py +248 -0
- esgvoc/cli/get.py +26 -25
- esgvoc/cli/install.py +11 -8
- esgvoc/cli/main.py +4 -5
- esgvoc/cli/status.py +14 -2
- esgvoc/cli/valid.py +41 -45
- esgvoc/core/db/models/mixins.py +7 -0
- esgvoc/core/db/models/project.py +3 -8
- esgvoc/core/db/models/universe.py +3 -3
- esgvoc/core/db/project_ingestion.py +4 -1
- esgvoc/core/db/universe_ingestion.py +8 -7
- esgvoc/core/logging_handler.py +1 -1
- esgvoc/core/repo_fetcher.py +4 -3
- esgvoc/core/service/__init__.py +37 -5
- esgvoc/core/service/configuration/config_manager.py +188 -0
- esgvoc/core/service/configuration/setting.py +88 -0
- esgvoc/core/service/state.py +66 -42
- esgvoc-0.3.0.dist-info/METADATA +89 -0
- esgvoc-0.3.0.dist-info/RECORD +78 -0
- esgvoc-0.3.0.dist-info/licenses/LICENSE.txt +519 -0
- esgvoc/apps/drs/models.py +0 -43
- esgvoc/apps/drs/parser.py +0 -27
- esgvoc/cli/config.py +0 -79
- esgvoc/core/service/settings.py +0 -64
- esgvoc/core/service/settings.toml +0 -12
- esgvoc/core/service/settings_default.toml +0 -20
- esgvoc-0.1.2.dist-info/METADATA +0 -54
- esgvoc-0.1.2.dist-info/RECORD +0 -66
- {esgvoc-0.1.2.dist-info → esgvoc-0.3.0.dist-info}/WHEEL +0 -0
- {esgvoc-0.1.2.dist-info → esgvoc-0.3.0.dist-info}/entry_points.txt +0 -0
esgvoc/cli/main.py
CHANGED
|
@@ -1,22 +1,21 @@
|
|
|
1
|
-
|
|
2
1
|
import typer
|
|
3
|
-
from esgvoc.cli.config import app as config_app
|
|
4
2
|
from esgvoc.cli.get import app as get_app
|
|
5
3
|
from esgvoc.cli.status import app as status_app
|
|
6
4
|
from esgvoc.cli.valid import app as valid_app
|
|
7
5
|
from esgvoc.cli.install import app as install_app
|
|
8
|
-
|
|
9
|
-
|
|
6
|
+
from esgvoc.cli.drs import app as drs_app
|
|
10
7
|
app = typer.Typer()
|
|
11
8
|
|
|
12
9
|
# Register the subcommands
|
|
13
|
-
app.add_typer(config_app)
|
|
14
10
|
app.add_typer(get_app)
|
|
15
11
|
app.add_typer(status_app)
|
|
16
12
|
app.add_typer(valid_app)
|
|
17
13
|
app.add_typer(install_app)
|
|
14
|
+
app.add_typer(drs_app)
|
|
18
15
|
|
|
19
16
|
def main():
|
|
20
17
|
app()
|
|
18
|
+
|
|
19
|
+
|
|
21
20
|
if __name__ == "__main__":
|
|
22
21
|
main()
|
esgvoc/cli/status.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from esgvoc.core import service
|
|
2
|
+
from rich.table import Table
|
|
2
3
|
import typer
|
|
3
4
|
from rich.console import Console
|
|
4
5
|
|
|
@@ -19,8 +20,19 @@ def status():
|
|
|
19
20
|
i.e summary of version of usable ressources (between remote/cached)
|
|
20
21
|
|
|
21
22
|
"""
|
|
23
|
+
assert(service.current_state is not None)
|
|
24
|
+
service.current_state.get_state_summary()
|
|
25
|
+
#display(service.state_service.table())
|
|
22
26
|
|
|
23
|
-
|
|
24
|
-
|
|
27
|
+
|
|
28
|
+
table = Table(show_header=False, show_lines=True)
|
|
29
|
+
|
|
30
|
+
table.add_row("","Remote github repo","Local repository","Cache Database", style = "bright_green")
|
|
31
|
+
table.add_row("Universe path",service.current_state.universe.github_repo,service.current_state.universe.local_path,service.current_state.universe.db_path, style = "white")
|
|
32
|
+
table.add_row("Version",service.current_state.universe.github_version,service.current_state.universe.local_version,service.current_state.universe.db_version, style="bright_blue")
|
|
33
|
+
for proj_name,proj in service.current_state.projects.items():
|
|
34
|
+
table.add_row(f"{proj_name} path",proj.github_repo,proj.local_path,proj.db_path, style="white")
|
|
35
|
+
table.add_row("Version",proj.github_version,proj.local_version,proj.db_version,style ="bright_blue")
|
|
36
|
+
display(table)
|
|
25
37
|
|
|
26
38
|
|
esgvoc/cli/valid.py
CHANGED
|
@@ -6,10 +6,8 @@ from esgvoc.api.projects import (
|
|
|
6
6
|
valid_term_in_project,
|
|
7
7
|
valid_term_in_all_projects
|
|
8
8
|
)
|
|
9
|
-
from esgvoc.api import BasicValidationErrorVisitor
|
|
10
9
|
from requests import logging
|
|
11
10
|
from rich.table import Table
|
|
12
|
-
from sqlmodel import except_
|
|
13
11
|
import typer
|
|
14
12
|
import re
|
|
15
13
|
from rich.console import Console
|
|
@@ -40,52 +38,52 @@ def valid(
|
|
|
40
38
|
verbose: bool = typer.Option(False, "-v", "--verbose", help="Provide detailed validation results")
|
|
41
39
|
):
|
|
42
40
|
"""
|
|
43
|
-
Validates one or more strings against specified Project:Collection:Term configurations
|
|
44
|
-
|
|
45
|
-
Depending on the provided key structure, the function performs different validation operations
|
|
46
|
-
- If all are None (e.g., "::"), validates the term across all projects (`valid_term_in_all_projects`)
|
|
47
|
-
- If Term is None (e.g., "Project:Collection:"), validates the term in the specified collection (`valid_term_in_collection`)
|
|
48
|
-
- If Term and Collection are None (e.g., "Project::"), validates the term in the specified project (`valid_term_in_project`)
|
|
49
|
-
- If all are specified (e.g., "Project:Collection:Term"), validates the term exactly (`valid_term`)
|
|
50
|
-
|
|
51
|
-
Parameters
|
|
52
|
-
|
|
53
|
-
- A string to validate
|
|
54
|
-
- A key in the form '<Project:Collection:Term>'
|
|
55
|
-
Usage
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
Returns
|
|
81
|
-
|
|
41
|
+
Validates one or more strings against specified Project:Collection:Term configurations.\n
|
|
42
|
+
\n
|
|
43
|
+
Depending on the provided key structure, the function performs different validation operations:\n
|
|
44
|
+
- If all are None (e.g., "::"), validates the term across all projects (`valid_term_in_all_projects`).\n
|
|
45
|
+
- If Term is None (e.g., "Project:Collection:"), validates the term in the specified collection (`valid_term_in_collection`).\n
|
|
46
|
+
- If Term and Collection are None (e.g., "Project::"), validates the term in the specified project (`valid_term_in_project`).\n
|
|
47
|
+
- If all are specified (e.g., "Project:Collection:Term"), validates the term exactly (`valid_term`).\n
|
|
48
|
+
\n
|
|
49
|
+
Parameters:\n
|
|
50
|
+
\tstrings_targets (List[str]): A list of validation pairs, where each pair consists of:\n
|
|
51
|
+
\t\t- A string to validate.\n
|
|
52
|
+
\t\t- A key in the form '<Project:Collection:Term>'.\n
|
|
53
|
+
Usage :\n
|
|
54
|
+
\tValid one:\n
|
|
55
|
+
\tesgvocab valid IPSL cmip6plus:institution_id:ipsl\n
|
|
56
|
+
\tesgvocab valid IPSL cmip6plus:institution_id:\n
|
|
57
|
+
\tesgvocab valid IPSL cmip6plus::\n
|
|
58
|
+
\tesgvocab valid IPSL ::\n
|
|
59
|
+
\n
|
|
60
|
+
\tUnvalid one:\n
|
|
61
|
+
\tesgvocab valid IPSL_invalid cmip6plus:institution_id:ipsl\n
|
|
62
|
+
\tesgvocab valid IPSL cmip6plus:institution_id:isl <= term cant be found\n
|
|
63
|
+
\tesgvocab valid IPSL cmip6plus:institutin_id:ispl <= collection cant be found\n
|
|
64
|
+
\tesgvocab valid IPSL cmip6pls:institution_id:ispl <= project cant be found\n
|
|
65
|
+
\n
|
|
66
|
+
\tMultiple validation for all known projects: \n
|
|
67
|
+
\tesgvocab valid IPSL :: IPS :: \n
|
|
68
|
+
\t\tresult will be [True, False]\n
|
|
69
|
+
\n
|
|
70
|
+
\tesgvocab valid --verbose IPS :: IPSL ::\n
|
|
71
|
+
\tresult will be \n
|
|
72
|
+
\t\t┏━━━━━━━━┳━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓\n
|
|
73
|
+
\t\t┃ String ┃ Key ┃ Result ┃ Errors ┃\n
|
|
74
|
+
\t\t┡━━━━━━━━╇━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩\n
|
|
75
|
+
\t\t│ IPS │ :: │ ❌ Invalid │ did not found matching term │\n
|
|
76
|
+
\t\t│ IPSL │ :: │ ✅ Valid │ None │\n
|
|
77
|
+
\t\t└────────┴─────┴────────────┴─────────────────────────────┘\n
|
|
78
|
+
Returns:\n
|
|
79
|
+
\tList[bool]: Validation results for each pair in the input.\n
|
|
82
80
|
"""
|
|
83
81
|
results = []
|
|
84
82
|
detailed_results = []
|
|
85
83
|
|
|
86
84
|
# Combine string and target into pairs
|
|
87
85
|
pairs = [strings_targets[i] + " " + strings_targets[i + 1] for i in range(0, len(strings_targets), 2)]
|
|
88
|
-
|
|
86
|
+
|
|
89
87
|
# Validate each string against each target
|
|
90
88
|
for validation in pairs:
|
|
91
89
|
match = re.match(r"(.+)\s+([^:]*):([^:]*):([^:]*)", validation)
|
|
@@ -121,11 +119,9 @@ def valid(
|
|
|
121
119
|
# Parse and collect errors for verbose mode
|
|
122
120
|
if validation_result == []:
|
|
123
121
|
detailed_results.append({"validation":validation, "errors":["did not found matching term"]})
|
|
124
|
-
|
|
125
122
|
results.append(False)
|
|
126
123
|
if project and collection and term and exception_message is None:
|
|
127
|
-
|
|
128
|
-
errors = [error.accept(visitor) for error in validation_result.errors]
|
|
124
|
+
errors = [str(error) for error in validation_result.errors]
|
|
129
125
|
detailed_results.append({"validation": validation, "errors": errors})
|
|
130
126
|
if exception_message is not None:
|
|
131
127
|
detailed_results.append({"validation": validation, "errors": [exception_message]})
|
esgvoc/core/db/models/mixins.py
CHANGED
|
@@ -4,10 +4,17 @@ from sqlmodel import Field
|
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
class TermKind(Enum):
|
|
7
|
+
"""
|
|
8
|
+
The kinds of term.
|
|
9
|
+
"""
|
|
7
10
|
PLAIN = "plain"
|
|
11
|
+
"""End written term."""
|
|
8
12
|
PATTERN = "pattern"
|
|
13
|
+
"""Regex based terms"""
|
|
9
14
|
COMPOSITE = "composite"
|
|
15
|
+
"""Term composed of terms."""
|
|
10
16
|
MIXED = 'mixed'
|
|
17
|
+
"""To be defined."""
|
|
11
18
|
|
|
12
19
|
|
|
13
20
|
class PkMixin:
|
esgvoc/core/db/models/project.py
CHANGED
|
@@ -7,7 +7,6 @@ from sqlmodel import Column, Field, Relationship, SQLModel
|
|
|
7
7
|
|
|
8
8
|
import esgvoc.core.db.connection as db
|
|
9
9
|
from esgvoc.core.db.models.mixins import IdMixin, PkMixin, TermKind
|
|
10
|
-
|
|
11
10
|
_LOGGER = logging.getLogger("project_db_creation")
|
|
12
11
|
|
|
13
12
|
|
|
@@ -34,13 +33,10 @@ class PTerm(SQLModel, PkMixin, IdMixin, table=True):
|
|
|
34
33
|
kind: TermKind = Field(sa_column=Column(sa.Enum(TermKind)))
|
|
35
34
|
collection_pk: int | None = Field(default=None, foreign_key="collections.pk")
|
|
36
35
|
collection: Collection = Relationship(back_populates="terms")
|
|
36
|
+
__table_args__ = (sa.Index(
|
|
37
|
+
"drs_name_index", specs.sa_column["drs_name"]
|
|
37
38
|
|
|
38
|
-
|
|
39
|
-
def create_drs_name_index():
|
|
40
|
-
PTerm.__table_args__ = sa.Index(
|
|
41
|
-
"drs_name_index", PTerm.__table__.c.specs["drs_name"]
|
|
42
|
-
)
|
|
43
|
-
|
|
39
|
+
),)
|
|
44
40
|
|
|
45
41
|
def project_create_db(db_file_path: Path):
|
|
46
42
|
try:
|
|
@@ -53,7 +49,6 @@ def project_create_db(db_file_path: Path):
|
|
|
53
49
|
tables_to_be_created = [SQLModel.metadata.tables['projects'],
|
|
54
50
|
SQLModel.metadata.tables['collections'],
|
|
55
51
|
SQLModel.metadata.tables['pterms']]
|
|
56
|
-
create_drs_name_index()
|
|
57
52
|
SQLModel.metadata.create_all(connection.get_engine(), tables=tables_to_be_created)
|
|
58
53
|
except Exception as e:
|
|
59
54
|
msg = f'Unable to create tables in SQLite database at {db_file_path}. Abort.'
|
|
@@ -14,10 +14,10 @@ _LOGGER = logging.getLogger("universe_db_creation")
|
|
|
14
14
|
class Universe(SQLModel, PkMixin, table=True):
|
|
15
15
|
__tablename__ = "universes"
|
|
16
16
|
git_hash: str
|
|
17
|
-
data_descriptors: list["
|
|
17
|
+
data_descriptors: list["UDataDescriptor"] = Relationship(back_populates="universe")
|
|
18
18
|
|
|
19
19
|
|
|
20
|
-
class
|
|
20
|
+
class UDataDescriptor(SQLModel, PkMixin, IdMixin, table=True):
|
|
21
21
|
__tablename__ = "data_descriptors"
|
|
22
22
|
context: dict = Field(sa_column=sa.Column(JSON))
|
|
23
23
|
universe_pk: int | None = Field(default=None, foreign_key="universes.pk")
|
|
@@ -33,7 +33,7 @@ class UTerm(SQLModel, PkMixin, IdMixin, table=True):
|
|
|
33
33
|
data_descriptor_pk: int | None = Field(
|
|
34
34
|
default=None, foreign_key="data_descriptors.pk"
|
|
35
35
|
)
|
|
36
|
-
data_descriptor:
|
|
36
|
+
data_descriptor: UDataDescriptor = Relationship(back_populates="terms")
|
|
37
37
|
|
|
38
38
|
|
|
39
39
|
def universe_create_db(db_file_path: Path) -> None:
|
|
@@ -11,6 +11,7 @@ from pydantic import BaseModel
|
|
|
11
11
|
import esgvoc.core.db.connection as db
|
|
12
12
|
from esgvoc.core.db.connection import read_json_file
|
|
13
13
|
from esgvoc.core.db.models.project import Collection, Project, PTerm
|
|
14
|
+
import esgvoc.core.service as service
|
|
14
15
|
|
|
15
16
|
|
|
16
17
|
_LOGGER = logging.getLogger("project_ingestion")
|
|
@@ -74,7 +75,9 @@ def ingest_collection(collection_dir_path: Path,
|
|
|
74
75
|
if term_file_path.is_file() and term_file_path.suffix==".json":
|
|
75
76
|
try:
|
|
76
77
|
json_specs = DataMerger(data=JsonLdResource(uri =str(term_file_path)),
|
|
77
|
-
locally_available={"https://espri-mod.github.io/mip-cmor-tables":".cache/repos/
|
|
78
|
+
# locally_available={"https://espri-mod.github.io/mip-cmor-tables":".cache/repos/WCRP-universe"}).merge_linked_json()[-1]
|
|
79
|
+
locally_available={"https://espri-mod.github.io/mip-cmor-tables":service.current_state.universe.local_path}).merge_linked_json()[-1]
|
|
80
|
+
|
|
78
81
|
term_kind = infer_term_kind(json_specs)
|
|
79
82
|
term_id = json_specs["id"]
|
|
80
83
|
|
|
@@ -9,8 +9,9 @@ from sqlmodel import Session, select
|
|
|
9
9
|
import esgvoc.core.db.connection as db
|
|
10
10
|
from esgvoc.core.db.connection import read_json_file
|
|
11
11
|
from esgvoc.core.db.models.mixins import TermKind
|
|
12
|
-
from esgvoc.core.db.models.universe import
|
|
12
|
+
from esgvoc.core.db.models.universe import UDataDescriptor, UTerm, Universe
|
|
13
13
|
from esgvoc.core.db.models.universe import universe_create_db
|
|
14
|
+
import esgvoc.core.service as service
|
|
14
15
|
|
|
15
16
|
_LOGGER = logging.getLogger(__name__)
|
|
16
17
|
|
|
@@ -32,7 +33,7 @@ def ingest_universe(universe_repo_dir_path: Path, universe_db_file_path: Path) -
|
|
|
32
33
|
raise IOError(msg) from e
|
|
33
34
|
|
|
34
35
|
for data_descriptor_dir_path in universe_repo_dir_path.iterdir():
|
|
35
|
-
if data_descriptor_dir_path.is_dir() and (data_descriptor_dir_path / "000_context.jsonld").exists(): # TODO maybe put that in setting
|
|
36
|
+
if data_descriptor_dir_path.is_dir() and (data_descriptor_dir_path / "000_context.jsonld").exists(): # TODO maybe put that in setting
|
|
36
37
|
try:
|
|
37
38
|
ingest_data_descriptor(data_descriptor_dir_path, connection)
|
|
38
39
|
except Exception as e:
|
|
@@ -61,9 +62,8 @@ def ingest_data_descriptor(data_descriptor_path: Path,
|
|
|
61
62
|
_LOGGER.warning(msg)
|
|
62
63
|
return
|
|
63
64
|
|
|
64
|
-
|
|
65
65
|
with connection.create_session() as session:
|
|
66
|
-
data_descriptor =
|
|
66
|
+
data_descriptor = UDataDescriptor(id=data_descriptor_id,
|
|
67
67
|
context=context,
|
|
68
68
|
term_kind="") # we ll know it only when we ll add a term (hypothesis all term have the same kind in a data_descriptor)
|
|
69
69
|
term_kind_dd = None
|
|
@@ -72,9 +72,10 @@ def ingest_data_descriptor(data_descriptor_path: Path,
|
|
|
72
72
|
for term_file_path in data_descriptor_path.iterdir():
|
|
73
73
|
_LOGGER.debug(f"found term path : {term_file_path}, {term_file_path.suffix}")
|
|
74
74
|
if term_file_path.is_file() and term_file_path.suffix == ".json":
|
|
75
|
+
|
|
75
76
|
try:
|
|
76
77
|
json_specs=DataMerger(data=JsonLdResource(uri=str(term_file_path)),
|
|
77
|
-
locally_available={"https://espri-mod.github.io/mip-cmor-tables":
|
|
78
|
+
locally_available={"https://espri-mod.github.io/mip-cmor-tables":service.current_state.universe.local_path}).merge_linked_json()[-1]
|
|
78
79
|
term_kind = infer_term_kind(json_specs)
|
|
79
80
|
term_id = json_specs["id"]
|
|
80
81
|
|
|
@@ -103,8 +104,8 @@ def get_universe_term(data_descriptor_id: str,
|
|
|
103
104
|
universe_db_session: Session) -> tuple[TermKind, dict]:
|
|
104
105
|
statement = (
|
|
105
106
|
select(UTerm)
|
|
106
|
-
.join(
|
|
107
|
-
.where(
|
|
107
|
+
.join(UDataDescriptor)
|
|
108
|
+
.where(UDataDescriptor.id == data_descriptor_id, UTerm.id == term_id)
|
|
108
109
|
)
|
|
109
110
|
results = universe_db_session.exec(statement)
|
|
110
111
|
term = results.one()
|
esgvoc/core/logging_handler.py
CHANGED
esgvoc/core/repo_fetcher.py
CHANGED
|
@@ -153,7 +153,7 @@ class RepoFetcher:
|
|
|
153
153
|
files = [item['name'] for item in contents if item['type'] == 'file']
|
|
154
154
|
return files
|
|
155
155
|
|
|
156
|
-
def clone_repository(self, owner: str, repo: str, branch: Optional[str] = None):
|
|
156
|
+
def clone_repository(self, owner: str, repo: str, branch: Optional[str] = None, local_path: str|None = None):
|
|
157
157
|
"""
|
|
158
158
|
Clone a GitHub repository to a target directory.
|
|
159
159
|
:param owner: Repository owner
|
|
@@ -162,15 +162,16 @@ class RepoFetcher:
|
|
|
162
162
|
:param branch: (Optional) The branch to clone. Clones the default branch if None.
|
|
163
163
|
"""
|
|
164
164
|
repo_url = f"https://github.com/{owner}/{repo}.git"
|
|
165
|
+
destination = local_path if local_path else f"{self.repo_dir}/{repo}"
|
|
165
166
|
|
|
166
|
-
command = ["git", "clone", repo_url,
|
|
167
|
+
command = ["git", "clone", repo_url, destination]
|
|
167
168
|
if branch:
|
|
168
169
|
command.extend(["--branch", branch])
|
|
169
170
|
with redirect_stdout_to_log():
|
|
170
171
|
|
|
171
172
|
try:
|
|
172
173
|
subprocess.run(command, check=True)
|
|
173
|
-
_LOGGER.debug(f"Repository cloned successfully into {
|
|
174
|
+
_LOGGER.debug(f"Repository cloned successfully into {destination}")
|
|
174
175
|
except subprocess.CalledProcessError:
|
|
175
176
|
try:
|
|
176
177
|
current_work_dir = os.getcwd()
|
esgvoc/core/service/__init__.py
CHANGED
|
@@ -1,8 +1,40 @@
|
|
|
1
|
-
from esgvoc.core.service.
|
|
1
|
+
# from esgvoc.core.service.config_register import ConfigManager
|
|
2
|
+
# from esgvoc.core.service.settings import ServiceSettings
|
|
3
|
+
# from esgvoc.core.service.state import StateService
|
|
4
|
+
#
|
|
5
|
+
# config_manager = ConfigManager()
|
|
6
|
+
# active_setting = config_manager.get_active_config()
|
|
7
|
+
# active_setting["base_dir"] = str(config_manager.config_dir / config_manager.get_active_config_name())
|
|
8
|
+
# service_settings = ServiceSettings.from_config(active_setting)
|
|
9
|
+
# state_service = StateService(service_settings)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
from esgvoc.core.service.configuration.config_manager import ConfigManager
|
|
13
|
+
from esgvoc.core.service.configuration.setting import ServiceSettings
|
|
2
14
|
from esgvoc.core.service.state import StateService
|
|
3
|
-
from pathlib import Path
|
|
4
15
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
16
|
+
config_manager : ConfigManager | None = None
|
|
17
|
+
current_state : StateService | None = None
|
|
18
|
+
|
|
19
|
+
def get_config_manager():
|
|
20
|
+
global config_manager
|
|
21
|
+
if config_manager is None:
|
|
22
|
+
|
|
23
|
+
config_manager = ConfigManager(ServiceSettings, app_name="esgvoc", app_author="ipsl", default_settings=ServiceSettings.DEFAULT_SETTINGS)
|
|
24
|
+
active_config_name= config_manager.get_active_config_name()
|
|
25
|
+
config_manager.data_config_dir = config_manager.data_dir / active_config_name
|
|
26
|
+
config_manager.data_config_dir.mkdir(parents=True, exist_ok=True)
|
|
27
|
+
|
|
28
|
+
return config_manager
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def get_state():
|
|
32
|
+
global current_state
|
|
33
|
+
if config_manager is not None:
|
|
34
|
+
current_state = StateService(config_manager.get_active_config())
|
|
35
|
+
return current_state
|
|
36
|
+
|
|
37
|
+
# Singleton Access Function
|
|
38
|
+
config_manager = get_config_manager()
|
|
39
|
+
current_state = get_state()
|
|
8
40
|
|
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
import toml
|
|
2
|
+
import logging
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from platformdirs import PlatformDirs
|
|
5
|
+
from typing import Type, TypeVar, Generic, Protocol
|
|
6
|
+
|
|
7
|
+
# Setup logging
|
|
8
|
+
logging.basicConfig(level=logging.INFO)
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
# Define a generic type for configuration
|
|
12
|
+
T = TypeVar("T", bound="ConfigSchema")
|
|
13
|
+
|
|
14
|
+
class ConfigSchema(Protocol):
|
|
15
|
+
"""Protocol for application-specific configuration classes."""
|
|
16
|
+
|
|
17
|
+
@classmethod
|
|
18
|
+
def load_from_file(cls, file_path: str): ...
|
|
19
|
+
|
|
20
|
+
def save_to_file(self, file_path: str): ...
|
|
21
|
+
|
|
22
|
+
class ConfigManager(Generic[T]):
|
|
23
|
+
def __init__(self, config_cls: Type[T], app_name: str, app_author: str, default_settings : dict | None = None ):
|
|
24
|
+
"""
|
|
25
|
+
Initialize the configuration manager.
|
|
26
|
+
- config_cls: A class that implements `ConfigSchema` (e.g., ServiceSettings).
|
|
27
|
+
- app_name: Name of the application (used for directory paths).
|
|
28
|
+
- app_author: Name of the author/organization (used for directory paths).
|
|
29
|
+
"""
|
|
30
|
+
self.config_cls = config_cls
|
|
31
|
+
self.dirs = PlatformDirs(app_name, app_author)
|
|
32
|
+
|
|
33
|
+
# Define standard paths
|
|
34
|
+
self.config_dir = Path(self.dirs.user_config_path).expanduser().resolve()
|
|
35
|
+
self.data_dir = Path(self.dirs.user_data_path).expanduser().resolve()
|
|
36
|
+
self.data_config_dir = None # depends on loaded settings
|
|
37
|
+
|
|
38
|
+
self.cache_dir = Path(self.dirs.user_cache_path).expanduser().resolve()
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
self.config_dir.mkdir(parents=True, exist_ok=True)
|
|
42
|
+
self.data_dir.mkdir(parents=True, exist_ok=True)
|
|
43
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
44
|
+
|
|
45
|
+
self.registry_path = self.config_dir / "config_registry.toml"
|
|
46
|
+
self.default_config_path = self.config_dir / "default_setting.toml"
|
|
47
|
+
self.default_settings = default_settings
|
|
48
|
+
self._init_registry()
|
|
49
|
+
|
|
50
|
+
def _init_registry(self):
|
|
51
|
+
"""Initialize the registry file if it doesn't exist."""
|
|
52
|
+
if not self.registry_path.exists():
|
|
53
|
+
logger.info("Initializing configuration registry...")
|
|
54
|
+
registry = {"configs": {"default": str(self.default_config_path)}, "active": "default"}
|
|
55
|
+
self._save_toml(self.registry_path, registry)
|
|
56
|
+
# Ensure the default settings file exists and save it if necessary
|
|
57
|
+
if not self.default_config_path.exists():
|
|
58
|
+
if self.default_settings:
|
|
59
|
+
logger.info("Saving default settings...")
|
|
60
|
+
self._save_toml(self.default_config_path, self.default_settings)
|
|
61
|
+
else:
|
|
62
|
+
logger.warning("No default settings provided.")
|
|
63
|
+
|
|
64
|
+
def _load_toml(self, path: Path) -> dict:
|
|
65
|
+
"""Load TOML data from a file."""
|
|
66
|
+
if not path.exists():
|
|
67
|
+
logger.error(f"Configuration file not found: {path}")
|
|
68
|
+
raise FileNotFoundError(f"Configuration file not found: {path}")
|
|
69
|
+
with open(path, "r") as f:
|
|
70
|
+
return toml.load(f)
|
|
71
|
+
|
|
72
|
+
def _save_toml(self, path: Path, data: dict) -> None:
|
|
73
|
+
"""Save TOML data to a file."""
|
|
74
|
+
with open(path, "w") as f:
|
|
75
|
+
toml.dump(data, f)
|
|
76
|
+
|
|
77
|
+
def _get_active_config_path(self) -> Path:
|
|
78
|
+
"""Retrieve the path of the active configuration file."""
|
|
79
|
+
registry = self._load_toml(self.registry_path)
|
|
80
|
+
active_config_name = registry["active"]
|
|
81
|
+
return Path(registry["configs"][active_config_name])
|
|
82
|
+
|
|
83
|
+
def get_config(self, config_name:str) -> T:
|
|
84
|
+
"""Load the configuration as an instance of the given config schema."""
|
|
85
|
+
registry = self._load_toml(self.registry_path)
|
|
86
|
+
if config_name not in registry["configs"]:
|
|
87
|
+
logger.error(f"Config '{config_name}' not found in registry.")
|
|
88
|
+
raise ValueError(f"Config '{config_name}' not found in registry.")
|
|
89
|
+
config_path = self.config_cls.load_from_file(registry["configs"][config_name])
|
|
90
|
+
return self.config_cls.load_from_file(str(config_path))
|
|
91
|
+
|
|
92
|
+
def get_active_config(self) -> T:
|
|
93
|
+
"""Load the active configuration as an instance of the given config schema."""
|
|
94
|
+
active_config_path = self._get_active_config_path()
|
|
95
|
+
|
|
96
|
+
return self.config_cls.load_from_file(str(active_config_path))
|
|
97
|
+
|
|
98
|
+
def get_active_config_name(self) -> str:
|
|
99
|
+
"""Retrieve the config name from the registry"""
|
|
100
|
+
registry = self._load_toml(self.registry_path)
|
|
101
|
+
return registry["active"]
|
|
102
|
+
|
|
103
|
+
def save_config(self, config_data: dict, name: str | None = None) -> None:
|
|
104
|
+
"""Save the modified configuration to the corresponding file and update the registry."""
|
|
105
|
+
|
|
106
|
+
if name:
|
|
107
|
+
# If a name is provided, save the configuration with that name
|
|
108
|
+
config_path = self.config_dir / f"{name}.toml"
|
|
109
|
+
self._save_toml(config_path, config_data)
|
|
110
|
+
|
|
111
|
+
# Update the registry with the new config name
|
|
112
|
+
registry = self._load_toml(self.registry_path)
|
|
113
|
+
registry["configs"][name] = str(config_path)
|
|
114
|
+
registry["active"] = name
|
|
115
|
+
self._save_toml(self.registry_path, registry)
|
|
116
|
+
|
|
117
|
+
logger.info(f"Saved configuration to {config_path} and updated registry.")
|
|
118
|
+
else:
|
|
119
|
+
# If no name is provided, give the user a default name, like "user_config"
|
|
120
|
+
default_name = "user_config"
|
|
121
|
+
config_path = self.config_dir / f"{default_name}.toml"
|
|
122
|
+
|
|
123
|
+
# Check if the user_config already exists, if so, warn them
|
|
124
|
+
if config_path.exists():
|
|
125
|
+
logger.warning(f"{default_name}.toml already exists. Overwriting with the new config.")
|
|
126
|
+
|
|
127
|
+
# Save the configuration with the default name
|
|
128
|
+
self._save_toml(config_path, config_data)
|
|
129
|
+
|
|
130
|
+
# Update the registry with the new config name
|
|
131
|
+
registry = self._load_toml(self.registry_path)
|
|
132
|
+
registry["configs"][default_name] = str(config_path)
|
|
133
|
+
registry["active"] = default_name
|
|
134
|
+
self._save_toml(self.registry_path, registry)
|
|
135
|
+
|
|
136
|
+
logger.info(f"Saved new configuration to {config_path} and updated registry.")
|
|
137
|
+
|
|
138
|
+
def save_active_config(self, config: T):
|
|
139
|
+
"""Save the current configuration to the active file."""
|
|
140
|
+
active_config_path = self._get_active_config_path()
|
|
141
|
+
config.save_to_file(str(active_config_path))
|
|
142
|
+
|
|
143
|
+
def switch_config(self, config_name: str):
|
|
144
|
+
"""Switch to a different configuration."""
|
|
145
|
+
registry = self._load_toml(self.registry_path)
|
|
146
|
+
if config_name not in registry["configs"]:
|
|
147
|
+
logger.error(f"Config '{config_name}' not found in registry.")
|
|
148
|
+
raise ValueError(f"Config '{config_name}' not found in registry.")
|
|
149
|
+
registry["active"] = config_name
|
|
150
|
+
|
|
151
|
+
self._save_toml(self.registry_path, registry)
|
|
152
|
+
logger.info(f"Switched to configuration: {config_name}")
|
|
153
|
+
|
|
154
|
+
def list_configs(self) -> dict:
|
|
155
|
+
"""Return a list of available configurations."""
|
|
156
|
+
return self._load_toml(self.registry_path)["configs"]
|
|
157
|
+
|
|
158
|
+
def add_config(self, config_name: str, config_data: dict):
|
|
159
|
+
"""Add a new configuration."""
|
|
160
|
+
registry = self._load_toml(self.registry_path)
|
|
161
|
+
if config_name in registry["configs"]:
|
|
162
|
+
raise ValueError(f"Config '{config_name}' already exists.")
|
|
163
|
+
config_path = self.config_dir / f"{config_name}.toml"
|
|
164
|
+
self._save_toml(config_path, config_data)
|
|
165
|
+
registry["configs"][config_name] = str(config_path)
|
|
166
|
+
self._save_toml(self.registry_path, registry)
|
|
167
|
+
|
|
168
|
+
def remove_config(self, config_name: str):
|
|
169
|
+
"""Remove a configuration."""
|
|
170
|
+
registry = self._load_toml(self.registry_path)
|
|
171
|
+
if config_name == "default":
|
|
172
|
+
raise ValueError("Cannot remove the default configuration.")
|
|
173
|
+
if config_name not in registry["configs"]:
|
|
174
|
+
raise ValueError(f"Config '{config_name}' not found.")
|
|
175
|
+
del registry["configs"][config_name]
|
|
176
|
+
config_path = self.config_dir / f"{config_name}.toml"
|
|
177
|
+
config_path.unlink()
|
|
178
|
+
|
|
179
|
+
self._save_toml(self.registry_path, registry)
|
|
180
|
+
logger.info(f"Removed configuration: {config_name}")
|
|
181
|
+
if registry["active"] not in registry["configs"]:
|
|
182
|
+
self.switch_config("default")
|
|
183
|
+
logger.info("active configuration doesnot exist anymore : Switch to default configuration")
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
|