esgvoc 0.3.0__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of esgvoc might be problematic. Click here for more details.

@@ -1,21 +1,22 @@
1
1
  import logging
2
2
  from pathlib import Path
3
3
 
4
- import esgvoc.core.constants
5
- from esgvoc.core.data_handler import JsonLdResource
6
- from esgvoc.core.db.connection import DBConnection
7
- from esgvoc.core.service.data_merger import DataMerger
8
- from esgvoc.core.db.models.mixins import TermKind
9
4
  from pydantic import BaseModel
5
+ from sqlalchemy import text
10
6
 
7
+ import esgvoc.core.constants
11
8
  import esgvoc.core.db.connection as db
12
- from esgvoc.core.db.connection import read_json_file
9
+ import esgvoc.core.service as service
10
+ from esgvoc.core.data_handler import JsonLdResource
11
+ from esgvoc.core.db.connection import DBConnection, read_json_file
12
+ from esgvoc.core.db.models.mixins import TermKind
13
13
  from esgvoc.core.db.models.project import Collection, Project, PTerm
14
- import esgvoc.core.service as service
15
-
14
+ from esgvoc.core.exceptions import EsgvocDbError
15
+ from esgvoc.core.service.data_merger import DataMerger
16
16
 
17
17
  _LOGGER = logging.getLogger("project_ingestion")
18
18
 
19
+
19
20
  def infer_term_kind(json_specs: dict) -> TermKind:
20
21
  if esgvoc.core.constants.PATTERN_JSON_KEY in json_specs:
21
22
  return TermKind.PATTERN
@@ -25,15 +26,16 @@ def infer_term_kind(json_specs: dict) -> TermKind:
25
26
  return TermKind.PLAIN
26
27
 
27
28
 
28
- def ingest_metadata_project(connection:DBConnection,git_hash):
29
+ def ingest_metadata_project(connection: DBConnection, git_hash):
29
30
  with connection.create_session() as session:
30
- project = Project(id=str(connection.file_path.stem), git_hash=git_hash,specs={})
31
- session.add(project)
31
+ project = Project(id=str(connection.file_path.stem), git_hash=git_hash, specs={})
32
+ session.add(project)
32
33
  session.commit()
33
34
 
34
- ###############################
35
+
35
36
  def get_data_descriptor_id_from_context(collection_context: dict) -> str:
36
- data_descriptor_url = collection_context[esgvoc.core.constants.CONTEXT_JSON_KEY][esgvoc.core.constants.DATA_DESCRIPTOR_JSON_KEY]
37
+ data_descriptor_url = collection_context[esgvoc.core.constants.CONTEXT_JSON_KEY]\
38
+ [esgvoc.core.constants.DATA_DESCRIPTOR_JSON_KEY] # noqa E211
37
39
  return Path(data_descriptor_url).name
38
40
 
39
41
 
@@ -50,40 +52,40 @@ def instantiate_project_term(universe_term_json_specs: dict,
50
52
  def ingest_collection(collection_dir_path: Path,
51
53
  project: Project,
52
54
  project_db_session) -> None:
53
-
54
-
55
55
  collection_id = collection_dir_path.name
56
56
  collection_context_file_path = collection_dir_path.joinpath(esgvoc.core.constants.CONTEXT_FILENAME)
57
57
  try:
58
58
  collection_context = read_json_file(collection_context_file_path)
59
59
  data_descriptor_id = get_data_descriptor_id_from_context(collection_context)
60
60
  except Exception as e:
61
- msg = f'Unable to read project context file {collection_context_file_path}. Abort.'
61
+ msg = f'unable to read project context file {collection_context_file_path}'
62
62
  _LOGGER.fatal(msg)
63
- raise RuntimeError(msg) from e
63
+ raise EsgvocDbError(msg) from e
64
64
  # [KEEP]
65
65
  collection = Collection(
66
66
  id=collection_id,
67
67
  context=collection_context,
68
68
  project=project,
69
69
  data_descriptor_id=data_descriptor_id,
70
- term_kind="") # we ll know it only when we ll add a term (hypothesis all term have the same kind in a collection
70
+ term_kind="") # We ll know it only when we ll add a term
71
+ # (hypothesis all term have the same kind in a collection) # noqa E116
71
72
  term_kind_collection = None
72
73
 
73
74
  for term_file_path in collection_dir_path.iterdir():
74
75
  _LOGGER.debug(f"found term path : {term_file_path}")
75
- if term_file_path.is_file() and term_file_path.suffix==".json":
76
+ if term_file_path.is_file() and term_file_path.suffix == ".json":
76
77
  try:
77
- json_specs = DataMerger(data=JsonLdResource(uri =str(term_file_path)),
78
+ locally_avail = {"https://espri-mod.github.io/mip-cmor-tables":
79
+ service.current_state.universe.local_path}
80
+ json_specs = DataMerger(data=JsonLdResource(uri=str(term_file_path)),
78
81
  # locally_available={"https://espri-mod.github.io/mip-cmor-tables":".cache/repos/WCRP-universe"}).merge_linked_json()[-1]
79
- locally_available={"https://espri-mod.github.io/mip-cmor-tables":service.current_state.universe.local_path}).merge_linked_json()[-1]
80
-
82
+ locally_available=locally_avail).merge_linked_json()[-1]
81
83
  term_kind = infer_term_kind(json_specs)
82
84
  term_id = json_specs["id"]
83
85
 
84
86
  if term_kind_collection is None:
85
87
  term_kind_collection = term_kind
86
-
88
+
87
89
  except Exception as e:
88
90
  _LOGGER.warning(f'Unable to read term {term_file_path}. Skip.\n{str(e)}')
89
91
  continue
@@ -105,51 +107,63 @@ def ingest_collection(collection_dir_path: Path,
105
107
  collection.term_kind = term_kind_collection
106
108
  project_db_session.add(collection)
107
109
 
110
+
108
111
  def ingest_project(project_dir_path: Path,
109
112
  project_db_file_path: Path,
110
- git_hash : str
113
+ git_hash: str
111
114
  ):
112
115
  try:
113
116
  project_connection = db.DBConnection(project_db_file_path)
114
117
  except Exception as e:
115
- msg = f'Unable to read project SQLite file at {project_db_file_path}. Abort.'
118
+ msg = f'unable to read project SQLite file at {project_db_file_path}'
116
119
  _LOGGER.fatal(msg)
117
- raise RuntimeError(msg) from e
118
-
120
+ raise EsgvocDbError(msg) from e
121
+
119
122
  with project_connection.create_session() as project_db_session:
123
+ project_specs_file_path = project_dir_path.joinpath(esgvoc.core.constants.PROJECT_SPECS_FILENAME)
120
124
  try:
121
- project_specs_file_path = project_dir_path.joinpath(esgvoc.core.constants.PROJECT_SPECS_FILENAME)
122
125
  project_json_specs = read_json_file(project_specs_file_path)
123
126
  project_id = project_json_specs[esgvoc.core.constants.PROJECT_ID_JSON_KEY]
124
127
  except Exception as e:
125
- msg = f'Unable to read project specs file {project_specs_file_path}. Abort.'
128
+ msg = f'unable to read project specs file {project_specs_file_path}'
126
129
  _LOGGER.fatal(msg)
127
- raise RuntimeError(msg) from e
128
-
129
- project = Project(id=project_id, specs=project_json_specs,git_hash=git_hash)
130
+ raise EsgvocDbError(msg) from e
131
+
132
+ project = Project(id=project_id, specs=project_json_specs, git_hash=git_hash)
130
133
  project_db_session.add(project)
131
-
132
134
 
133
135
  for collection_dir_path in project_dir_path.iterdir():
134
- if collection_dir_path.is_dir() and (collection_dir_path / "000_context.jsonld").exists(): #TODO maybe put that in settings
136
+ # TODO maybe put that in settings
137
+ if collection_dir_path.is_dir() and (collection_dir_path / "000_context.jsonld").exists():
135
138
  _LOGGER.debug(f"found collection dir : {collection_dir_path}")
136
139
  try:
137
140
  ingest_collection(collection_dir_path,
138
141
  project,
139
142
  project_db_session)
140
143
  except Exception as e:
141
- msg = f'Unexpected error while ingesting collection {collection_dir_path}. Abort.'
144
+ msg = f'unexpected error while ingesting collection {collection_dir_path}'
142
145
  _LOGGER.fatal(msg)
143
- raise RuntimeError(msg) from e
146
+ raise EsgvocDbError(msg) from e
144
147
  project_db_session.commit()
145
148
 
146
-
147
-
148
-
149
-
150
-
151
-
152
-
153
-
154
-
155
-
149
+ # Well, the following instructions are not data duplication. It is more building an index.
150
+ # Read: https://sqlite.org/fts5.html
151
+ try:
152
+ sql_query = 'INSERT INTO pterms_fts5(pk, id, specs, kind, collection_pk) ' + \
153
+ 'SELECT pk, id, specs, kind, collection_pk FROM pterms;' # noqa: S608
154
+ project_db_session.exec(text(sql_query)) # type: ignore
155
+ except Exception as e:
156
+ msg = f'unable to insert rows into pterms_fts5 table for {project_db_file_path}'
157
+ _LOGGER.fatal(msg)
158
+ raise EsgvocDbError(msg) from e
159
+ project_db_session.commit()
160
+ try:
161
+ sql_query = 'INSERT INTO pcollections_fts5(pk, id, data_descriptor_id, context, ' + \
162
+ 'project_pk, term_kind) SELECT pk, id, data_descriptor_id, context, ' + \
163
+ 'project_pk, term_kind FROM collections;' # noqa: S608
164
+ project_db_session.exec(text(sql_query)) # type: ignore
165
+ except Exception as e:
166
+ msg = f'unable to insert rows into pcollections_fts5 table for {project_db_file_path}'
167
+ _LOGGER.fatal(msg)
168
+ raise EsgvocDbError(msg) from e
169
+ project_db_session.commit()
@@ -1,20 +1,22 @@
1
1
  import logging
2
2
  from pathlib import Path
3
3
 
4
- import esgvoc.core.constants
5
- from esgvoc.core.data_handler import JsonLdResource
6
- from esgvoc.core.service.data_merger import DataMerger
4
+ from sqlalchemy import text
7
5
  from sqlmodel import Session, select
8
6
 
7
+ import esgvoc.core.constants
9
8
  import esgvoc.core.db.connection as db
9
+ import esgvoc.core.service as service
10
+ from esgvoc.core.data_handler import JsonLdResource
10
11
  from esgvoc.core.db.connection import read_json_file
11
12
  from esgvoc.core.db.models.mixins import TermKind
12
- from esgvoc.core.db.models.universe import UDataDescriptor, UTerm, Universe
13
- from esgvoc.core.db.models.universe import universe_create_db
14
- import esgvoc.core.service as service
13
+ from esgvoc.core.db.models.universe import UDataDescriptor, Universe, UTerm, universe_create_db
14
+ from esgvoc.core.exceptions import EsgvocDbError
15
+ from esgvoc.core.service.data_merger import DataMerger
15
16
 
16
17
  _LOGGER = logging.getLogger(__name__)
17
18
 
19
+
18
20
  def infer_term_kind(json_specs: dict) -> TermKind:
19
21
  if esgvoc.core.constants.PATTERN_JSON_KEY in json_specs:
20
22
  return TermKind.PATTERN
@@ -32,25 +34,48 @@ def ingest_universe(universe_repo_dir_path: Path, universe_db_file_path: Path) -
32
34
  _LOGGER.fatal(msg)
33
35
  raise IOError(msg) from e
34
36
 
35
- for data_descriptor_dir_path in universe_repo_dir_path.iterdir():
36
- if data_descriptor_dir_path.is_dir() and (data_descriptor_dir_path / "000_context.jsonld").exists(): # TODO maybe put that in setting
37
+ for data_descriptor_dir_path in universe_repo_dir_path.iterdir():
38
+ if data_descriptor_dir_path.is_dir() and \
39
+ (data_descriptor_dir_path / "000_context.jsonld").exists(): # TODO may be put that in setting
37
40
  try:
38
41
  ingest_data_descriptor(data_descriptor_dir_path, connection)
39
42
  except Exception as e:
40
- msg = f'Unexpected error while processing data descriptor {data_descriptor_dir_path}. Abort.'
43
+ msg = f'unexpected error while processing data descriptor {data_descriptor_dir_path}'
41
44
  _LOGGER.fatal(msg)
42
- raise RuntimeError(msg) from e
43
-
44
- def ingest_metadata_universe(connection,git_hash):
45
+ raise EsgvocDbError(msg) from e
46
+
47
+ with connection.create_session() as session:
48
+ # Well, the following instructions are not data duplication. It is more building an index.
49
+ # Read: https://sqlite.org/fts5.html
50
+ try:
51
+ sql_query = 'INSERT INTO uterms_fts5(pk, id, specs, kind, data_descriptor_pk) ' + \
52
+ 'SELECT pk, id, specs, kind, data_descriptor_pk FROM uterms;' # noqa: S608
53
+ session.exec(text(sql_query)) # type: ignore
54
+ except Exception as e:
55
+ msg = f'unable to insert rows into uterms_fts5 table for {universe_db_file_path}'
56
+ _LOGGER.fatal(msg)
57
+ raise EsgvocDbError(msg) from e
58
+ session.commit()
59
+ try:
60
+ sql_query = 'INSERT INTO udata_descriptors_fts5(pk, id, universe_pk, context, term_kind) ' + \
61
+ 'SELECT pk, id, universe_pk, context, term_kind FROM udata_descriptors;' # noqa: S608
62
+ session.exec(text(sql_query)) # type: ignore
63
+ except Exception as e:
64
+ msg = f'unable to insert rows into udata_descriptors_fts5 table for {universe_db_file_path}'
65
+ _LOGGER.fatal(msg)
66
+ raise EsgvocDbError(msg) from e
67
+ session.commit()
68
+
69
+
70
+ def ingest_metadata_universe(connection, git_hash):
45
71
  with connection.create_session() as session:
46
72
  universe = Universe(git_hash=git_hash)
47
- session.add(universe)
73
+ session.add(universe)
48
74
  session.commit()
49
75
 
76
+
50
77
  def ingest_data_descriptor(data_descriptor_path: Path,
51
78
  connection: db.DBConnection) -> None:
52
-
53
-
54
79
  data_descriptor_id = data_descriptor_path.name
55
80
 
56
81
  context_file_path = data_descriptor_path.joinpath(esgvoc.core.constants.CONTEXT_FILENAME)
@@ -60,22 +85,24 @@ def ingest_data_descriptor(data_descriptor_path: Path,
60
85
  msg = f'Unable to read the context file {context_file_path} of data descriptor \
61
86
  {data_descriptor_id}. Skip.\n{str(e)}'
62
87
  _LOGGER.warning(msg)
63
- return
88
+ return
64
89
 
65
90
  with connection.create_session() as session:
91
+ # We ll know it only when we ll add a term (hypothesis all term have the same kind in a data_descriptor)
66
92
  data_descriptor = UDataDescriptor(id=data_descriptor_id,
67
- context=context,
68
- term_kind="") # we ll know it only when we ll add a term (hypothesis all term have the same kind in a data_descriptor)
93
+ context=context,
94
+ term_kind="")
69
95
  term_kind_dd = None
70
96
 
71
97
  _LOGGER.debug(f"add data_descriptor : {data_descriptor_id}")
72
98
  for term_file_path in data_descriptor_path.iterdir():
73
99
  _LOGGER.debug(f"found term path : {term_file_path}, {term_file_path.suffix}")
74
100
  if term_file_path.is_file() and term_file_path.suffix == ".json":
75
-
76
101
  try:
77
- json_specs=DataMerger(data=JsonLdResource(uri=str(term_file_path)),
78
- locally_available={"https://espri-mod.github.io/mip-cmor-tables":service.current_state.universe.local_path}).merge_linked_json()[-1]
102
+ locally_available = {"https://espri-mod.github.io/mip-cmor-tables":
103
+ service.current_state.universe.local_path}
104
+ json_specs = DataMerger(data=JsonLdResource(uri=str(term_file_path)),
105
+ locally_available=locally_available).merge_linked_json()[-1]
79
106
  term_kind = infer_term_kind(json_specs)
80
107
  term_id = json_specs["id"]
81
108
 
@@ -83,7 +110,8 @@ def ingest_data_descriptor(data_descriptor_path: Path,
83
110
  term_kind_dd = term_kind
84
111
 
85
112
  except Exception as e:
86
- _LOGGER.warning(f'Unable to read term {term_file_path} for data descriptor {data_descriptor_path}. Skip.\n{str(e)}')
113
+ _LOGGER.warning(f'Unable to read term {term_file_path} for data descriptor ' +
114
+ f'{data_descriptor_path}. Skip.\n{str(e)}')
87
115
  continue
88
116
  if term_id and json_specs and data_descriptor and term_kind:
89
117
  _LOGGER.debug("adding {term_id}")
@@ -99,9 +127,10 @@ def ingest_data_descriptor(data_descriptor_path: Path,
99
127
  session.add(data_descriptor)
100
128
  session.commit()
101
129
 
130
+
102
131
  def get_universe_term(data_descriptor_id: str,
103
- term_id: str,
104
- universe_db_session: Session) -> tuple[TermKind, dict]:
132
+ term_id: str,
133
+ universe_db_session: Session) -> tuple[TermKind, dict]:
105
134
  statement = (
106
135
  select(UTerm)
107
136
  .join(UDataDescriptor)
@@ -113,9 +142,8 @@ def get_universe_term(data_descriptor_id: str,
113
142
 
114
143
 
115
144
  if __name__ == "__main__":
116
- #ingest_universe(db.UNIVERSE_DIR_PATH, db.UNIVERSE_DB_FILE_PATH)
117
145
  import os
118
146
  root_dir = Path(str(os.getcwd())).parent.parent
119
147
  print(root_dir)
120
- universe_create_db(root_dir / Path(".cache/dbs/universe.sqlite"))
121
- ingest_universe(root_dir / Path(".cache/repos/mip-cmor-tables"),root_dir / Path(".cache/dbs/universe.sqlite"))
148
+ universe_create_db(root_dir / Path(".cache/dbs/universe.sqlite"))
149
+ ingest_universe(root_dir / Path(".cache/repos/mip-cmor-tables"), root_dir / Path(".cache/dbs/universe.sqlite"))
@@ -0,0 +1,33 @@
1
+ class EsgvocException(Exception):
2
+ """
3
+ Class base of all ESGVOC errors.
4
+ """
5
+ pass
6
+
7
+
8
+ class EsgvocNotFoundError(EsgvocException):
9
+ """
10
+ Represents the not found errors.
11
+ """
12
+ pass
13
+
14
+
15
+ class EsgvocValueError(EsgvocException):
16
+ """
17
+ Represents value errors.
18
+ """
19
+ pass
20
+
21
+
22
+ class EsgvocDbError(EsgvocException):
23
+ """
24
+ Represents errors relative to data base management.
25
+ """
26
+ pass
27
+
28
+
29
+ class EsgvocNotImplementedError(EsgvocException):
30
+ """
31
+ Represents not implemented errors.
32
+ """
33
+ pass
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: esgvoc
3
- Version: 0.3.0
3
+ Version: 0.4.0
4
4
  Summary: python library and CLI to interact with WCRP CVs
5
5
  Project-URL: Repository, https://github.com/ESGF/esgf-vocab
6
6
  Author-email: Sébastien Gardoll <sebastien@gardoll.fr>, Guillaume Levavasseur <guillaume.levavasseur@ipsl.fr>, Laurent Troussellier <laurent.troussellier@ipsl.fr>
@@ -1,11 +1,11 @@
1
- esgvoc/__init__.py,sha256=D1rteIBXx1OIT6Jg9Yqo4pOud1VZTXZI8vrhJxm7Nvs,66
2
- esgvoc/api/__init__.py,sha256=Oe6ch-MhVnN886jQAJ-OuyCI1eO3BxOtB_zvdbkVTXk,2764
3
- esgvoc/api/_utils.py,sha256=pddSwnqYqLua0r2SXX6098OaCDxWJxaW7zlt2Xarl9o,2149
4
- esgvoc/api/project_specs.py,sha256=uX1zqvTC8Dj5bOegZQVMAqU1VOle01s_8ncPXvtp0J4,2328
5
- esgvoc/api/projects.py,sha256=E-Jt3vlu4vhPpVUixDGc7SHSj1HrNeIYz7n3cd9GhWI,42306
6
- esgvoc/api/report.py,sha256=rAqOwYTqmzMguONT54YzNU1i7ZtmjYaCRD0Yz7mJsOI,3511
7
- esgvoc/api/search.py,sha256=yV00GjvC9ZRodmfTp9HhOSk5OBsKq1s6ZNfpfxkbCwc,4208
8
- esgvoc/api/universe.py,sha256=oFgYXCwVZP4HSgdGXYMFmGolSah1uCmfCMWKhNTuhog,10581
1
+ esgvoc/__init__.py,sha256=1U7uasmBR5VzekoxCEz4GBNgpPcsEU5BwY-QOSW37qA,66
2
+ esgvoc/api/__init__.py,sha256=w68CdVRS553bDWezZoCTxIFq_vsP7mFluSoO4yUo_Uc,4130
3
+ esgvoc/api/project_specs.py,sha256=a-hEL-tXsfvwFeSU5PBnQZq_tqB7L67wxpB1ACah-no,2330
4
+ esgvoc/api/projects.py,sha256=y6qcBSuW22a4nDB9V8lU6PFUj1j2SOrj3xtu9FKVyS4,57466
5
+ esgvoc/api/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ esgvoc/api/report.py,sha256=OlK5ApnaikMKmC6FyJ1uTSBeTezZe85yTCJwsk14uZE,3526
7
+ esgvoc/api/search.py,sha256=Ss_-EvCQHZc6pBTDJzPUrOE_ErTcdqa-zmLkcBjsVkk,6667
8
+ esgvoc/api/universe.py,sha256=8aEvkTHWv9PEWz9hw_JYDdjJBbHzizi_RdVg9Ua-ook,21830
9
9
  esgvoc/api/data_descriptors/__init__.py,sha256=6ldSy7x4rnol7ZykPBn9xtnKq1bT_d8G19LIyJnc_mw,3248
10
10
  esgvoc/api/data_descriptors/activity.py,sha256=HZIaFqOVeYKmMJd1B_ihbKTAAAr13wr8IFISZI_jdO8,620
11
11
  esgvoc/api/data_descriptors/area_label.py,sha256=sJqHuuLeXqDajZfEatAWX7Jot6I0xnCd1gvFJaA2s_M,161
@@ -41,38 +41,40 @@ esgvoc/api/data_descriptors/variable.py,sha256=WtWlYRO0NLPRn1qe7Dt84doJWZYNCvCCq
41
41
  esgvoc/api/data_descriptors/variant_label.py,sha256=FL8nz0BfvJgKFjMmfBgNyRb8jcHaLBDLPpOvr6mBx9A,155
42
42
  esgvoc/api/data_descriptors/vertical_label.py,sha256=g2t-38eE-FY4H_aHrOj-ScZSPHIX6m71oltLcRHOtqI,141
43
43
  esgvoc/apps/__init__.py,sha256=Kyq36qRjvTWN7gu4_iFaLOjNUYvW0k1xp8bvkgJlQ5w,269
44
+ esgvoc/apps/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
45
  esgvoc/apps/drs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
45
- esgvoc/apps/drs/constants.py,sha256=kZxFo7Aa8DNy3WBq2j0FuZVEqOyxG53ZAkkjQaObE8s,95
46
- esgvoc/apps/drs/generator.py,sha256=2-l68Zp4XVzZ42a6x1GN5hHwqYDe--JaiVjfWIGgiBw,22996
47
- esgvoc/apps/drs/report.py,sha256=HywC7LXA7ER8VQu5g56izm5Z-zLxhoDekQJbhNUNkmI,17316
48
- esgvoc/apps/drs/validator.py,sha256=-hX5PEYeiuRyIwS6jsY9XRyvj-wPpdcUD8PJn31D9hE,14913
49
- esgvoc/cli/drs.py,sha256=ykisrZEOz3sl9rFuQehJfgOf4xsE9tKjPd_CmMWR08M,9133
50
- esgvoc/cli/get.py,sha256=NrNCb2nWftlPkdRmYysbMU_pVUksFTeYItCuVviAAqQ,5171
46
+ esgvoc/apps/drs/constants.py,sha256=rVWq1QQwAFgISjvl3YzJDLLPNUPXHpqgv66spmjyPMQ,96
47
+ esgvoc/apps/drs/generator.py,sha256=M0QMtpAaMU4E0142FPqdhix3n034CU_QaPuwvXsWtlA,22392
48
+ esgvoc/apps/drs/report.py,sha256=ZRu5l6T-U-hqY7O3ZwAseYbWZPcJiMhJ2dpFKZJE3Gk,17371
49
+ esgvoc/apps/drs/validator.py,sha256=yNijdOPhF9adgZbya5Ugvs13GbL4MvgQepCT38A66vM,13825
50
+ esgvoc/cli/drs.py,sha256=PvVbLxef34A1IO600AFWOEWb5iLaWrBRHwwgMJ4u-PM,9237
51
+ esgvoc/cli/get.py,sha256=zwLWkWOfAddK5onyRjWZ0zU909MtTDjEB_YrQtIwXqw,5242
51
52
  esgvoc/cli/install.py,sha256=zMssevZDrigrUlw1QHWITGUL1TvsHMZBQdYeNtHgWQA,433
52
53
  esgvoc/cli/main.py,sha256=kjB-yus-cmG9rOmVIPBhmjOr3tkwB13dHLcNqrdpYAM,483
53
54
  esgvoc/cli/status.py,sha256=hmpyrszrb5ke9l_1SgendPSeoXW1h-h7nH0zGFt_vUw,1357
54
55
  esgvoc/cli/valid.py,sha256=XrseGONeWR6gnnwZrRMJNjVBFQLT82Uzn5rHrjjM1Uk,7040
55
- esgvoc/core/constants.py,sha256=b9GoM3pTICki95gMCnUZbg4_mMiywKhJX5ME01pgwMs,431
56
+ esgvoc/core/constants.py,sha256=i03VR29sQmg89DdQpGZ1fzBT-elT3-_S0bTNraGA6T4,432
56
57
  esgvoc/core/convert.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
57
58
  esgvoc/core/data_handler.py,sha256=BmcD_dSvX5fCkUEGAQnadPTeuKA7IvgMmQtesMXKh5g,5004
59
+ esgvoc/core/exceptions.py,sha256=hs2D1lRWYhFuXDRbApKyJmEZMs8HxTPlSGkDSpkeQiA,538
58
60
  esgvoc/core/logging.conf,sha256=GK52lsTu17CfC2tKiMAIvkuHkIt5tqPmkWv68COOilc,278
59
61
  esgvoc/core/logging_handler.py,sha256=VgRBWeW_xdC4HtXG0TleQFwoy_YbJR4wlpci_sFezK0,147
60
62
  esgvoc/core/repo_fetcher.py,sha256=Rg668F4NN60jd8Ydz9dXpJQxd2eQOgaO0Ziwm53mcaI,9823
61
- esgvoc/core/db/__init__.py,sha256=gjAT_lUJtPqxsc8WnwbG_sxDqSIFAlG8vEKxdGyH82w,150
62
- esgvoc/core/db/connection.py,sha256=dhnCy1mwytUUvt149WHk7eYW0KSP64IaF3kMvKOQSJE,877
63
- esgvoc/core/db/project_ingestion.py,sha256=c7sgl91ScPfB4n05OR6HbtDqxNELv0jTiFf77S7tqc4,6327
64
- esgvoc/core/db/universe_ingestion.py,sha256=kH9TryeHbahLMaF1BFfOzrakWkcllIi9KWz90IKAphI,5215
63
+ esgvoc/core/db/__init__.py,sha256=fszGxJfRUa6uuuogrdh8_ExtdyMLZSaaVawpdgXzqKM,113
64
+ esgvoc/core/db/connection.py,sha256=AIFo0IWznu0Alk0SK_4bqp6FL5ZqSezNrfc_AlM9Z14,882
65
+ esgvoc/core/db/project_ingestion.py,sha256=M0Yruh7w--Q9JyRnbLynoPiVfmufp8b6h5UuUeNnnPA,7687
66
+ esgvoc/core/db/universe_ingestion.py,sha256=vPNr_svmduX3JKuLA_-SQMTxO2FWMqCNyCWkPSDznNE,6522
65
67
  esgvoc/core/db/models/mixins.py,sha256=S4_6iuKf1kYLdUXAgqRKSTXs8H9I--43MKlEq4F-dm4,445
66
- esgvoc/core/db/models/project.py,sha256=tuVcwNOkv0fQLFojpQCWacpnM7ZIX3eMDq6Mnko5OI0,2284
67
- esgvoc/core/db/models/universe.py,sha256=_WwuqrLGrheLbSOXaFN-2rWmHmeEGseO5sN400p6BuY,2202
68
+ esgvoc/core/db/models/project.py,sha256=hkDCPJNo3wGt-UMOfujeQYPgju_aH1oExDAnIgoe96M,4369
69
+ esgvoc/core/db/models/universe.py,sha256=vrR1TMD9ZI6RSGRi-qnLEKHD2Qk6Mh3qz8gciPgsELQ,4199
68
70
  esgvoc/core/service/__init__.py,sha256=hveqCB4oC6gKDf_L-wZxu9iBz7RiY4x9OeJGP6S5xtU,1534
69
71
  esgvoc/core/service/data_merger.py,sha256=GNFp5DTV2jlBVJZNpILngi6jCbUvVGcqka4EMWKj_Os,3456
70
72
  esgvoc/core/service/esg_voc.py,sha256=5G0P4_xmQzoI_RG_agpq-yHoYYZx220P27v2nPrpyNs,2420
71
73
  esgvoc/core/service/state.py,sha256=f1Pb11yYRAcLyK93zJ60i5l-mifDXP8_81tJQtMAvPo,10622
72
74
  esgvoc/core/service/configuration/config_manager.py,sha256=K-gU3Kd-eJMunxDKOk4x72CRcyJ50IZXLfqQgyI9zTs,8282
73
75
  esgvoc/core/service/configuration/setting.py,sha256=WJgo9ZjZJrTGR9WEBhp1d7ab0Yb2Y6XmnO1oImTPc2s,3042
74
- esgvoc-0.3.0.dist-info/METADATA,sha256=nIkD0iIw2LFnD7DTYzWIml_af3Rx5Io6KkCFKmADoZE,2165
75
- esgvoc-0.3.0.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
76
- esgvoc-0.3.0.dist-info/entry_points.txt,sha256=ZXufSC7Jlx1lb52U6Buv9IitJMcqAAXOerR2V9DaIto,48
77
- esgvoc-0.3.0.dist-info/licenses/LICENSE.txt,sha256=rWJoZt3vach8ZNdLq-Ee5djzCMFnJ1gIfBeJU5RIop4,21782
78
- esgvoc-0.3.0.dist-info/RECORD,,
76
+ esgvoc-0.4.0.dist-info/METADATA,sha256=rDbK18ahBLJtPMTi00NI64tEhNn8ulPlfvMoJ4TBIlc,2165
77
+ esgvoc-0.4.0.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
78
+ esgvoc-0.4.0.dist-info/entry_points.txt,sha256=ZXufSC7Jlx1lb52U6Buv9IitJMcqAAXOerR2V9DaIto,48
79
+ esgvoc-0.4.0.dist-info/licenses/LICENSE.txt,sha256=rWJoZt3vach8ZNdLq-Ee5djzCMFnJ1gIfBeJU5RIop4,21782
80
+ esgvoc-0.4.0.dist-info/RECORD,,
esgvoc/api/_utils.py DELETED
@@ -1,53 +0,0 @@
1
- from typing import Iterable, MutableSequence
2
-
3
- from sqlmodel import Session
4
-
5
- import esgvoc.core.constants as api_settings
6
- import esgvoc.core.service as service
7
- from esgvoc.api.data_descriptors import DATA_DESCRIPTOR_CLASS_MAPPING
8
- from esgvoc.api.data_descriptors.data_descriptor import (DataDescriptor,
9
- DataDescriptorSubSet)
10
- from esgvoc.core.db.models.project import PTerm
11
- from esgvoc.core.db.models.universe import UTerm
12
-
13
-
14
- class APIException(Exception): ...
15
-
16
-
17
- def get_pydantic_class(data_descriptor_id_or_term_type: str) -> type[DataDescriptor]:
18
- if data_descriptor_id_or_term_type in DATA_DESCRIPTOR_CLASS_MAPPING:
19
- return DATA_DESCRIPTOR_CLASS_MAPPING[data_descriptor_id_or_term_type]
20
- else:
21
- raise RuntimeError(f"{data_descriptor_id_or_term_type} pydantic class not found")
22
-
23
-
24
- def get_universe_session() -> Session:
25
-
26
- UNIVERSE_DB_CONNECTION = service.current_state.universe.db_connection
27
- if UNIVERSE_DB_CONNECTION:
28
- return UNIVERSE_DB_CONNECTION.create_session()
29
- else:
30
- raise RuntimeError('universe connection is not initialized')
31
-
32
-
33
- def instantiate_pydantic_term(term: UTerm|PTerm,
34
- selected_term_fields: Iterable[str]|None) -> DataDescriptor:
35
- type = term.specs[api_settings.TERM_TYPE_JSON_KEY]
36
- if selected_term_fields:
37
- subset = DataDescriptorSubSet(id=term.id, type=type)
38
- for field in selected_term_fields:
39
- setattr(subset, field, term.specs.get(field, None))
40
- for field in DataDescriptorSubSet.MANDATORY_TERM_FIELDS:
41
- setattr(subset, field, term.specs.get(field, None))
42
- return subset
43
- else:
44
- term_class = get_pydantic_class(type)
45
- return term_class(**term.specs)
46
-
47
-
48
- def instantiate_pydantic_terms(db_terms: Iterable[UTerm|PTerm],
49
- list_to_populate: MutableSequence[DataDescriptor],
50
- selected_term_fields: Iterable[str]|None) -> None:
51
- for db_term in db_terms:
52
- term = instantiate_pydantic_term(db_term, selected_term_fields)
53
- list_to_populate.append(term)
File without changes