dapla-toolbelt-metadata 0.9.0__py3-none-any.whl → 0.9.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dapla-toolbelt-metadata might be problematic. Click here for more details.
- dapla_metadata/datasets/__init__.py +1 -1
- dapla_metadata/datasets/_merge.py +333 -0
- dapla_metadata/datasets/core.py +23 -170
- dapla_metadata/datasets/utility/constants.py +5 -4
- dapla_metadata/datasets/utility/utils.py +4 -89
- dapla_metadata/variable_definitions/__init__.py +2 -0
- dapla_metadata/variable_definitions/_utils/constants.py +14 -0
- dapla_metadata/variable_definitions/_utils/template_files.py +3 -9
- {dapla_toolbelt_metadata-0.9.0.dist-info → dapla_toolbelt_metadata-0.9.2.dist-info}/METADATA +1 -1
- {dapla_toolbelt_metadata-0.9.0.dist-info → dapla_toolbelt_metadata-0.9.2.dist-info}/RECORD +12 -11
- {dapla_toolbelt_metadata-0.9.0.dist-info → dapla_toolbelt_metadata-0.9.2.dist-info}/WHEEL +0 -0
- {dapla_toolbelt_metadata-0.9.0.dist-info → dapla_toolbelt_metadata-0.9.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
"""Code relating to merging metadata from an existing metadata document and metadata extracted from a new dataset.
|
|
2
|
+
|
|
3
|
+
This is primarily convenience functionality for users whereby they can programmatically generate metadata without
|
|
4
|
+
having to manually enter it. This is primarily useful when data is sharded by time (i.e. each dataset applies for
|
|
5
|
+
a particular period like a month or a year). Assuming there aren't structural changes, the metadata may be reused
|
|
6
|
+
for all periods.
|
|
7
|
+
|
|
8
|
+
It is important to be able to detect changes in the structure of the data and warn users about this so that they can
|
|
9
|
+
make changes as appropriate.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import copy
|
|
13
|
+
import logging
|
|
14
|
+
import warnings
|
|
15
|
+
from collections.abc import Iterable
|
|
16
|
+
from dataclasses import dataclass
|
|
17
|
+
from dataclasses import field
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from typing import cast
|
|
20
|
+
|
|
21
|
+
import datadoc_model
|
|
22
|
+
import datadoc_model.all_optional.model as all_optional_model
|
|
23
|
+
import datadoc_model.required.model as required_model
|
|
24
|
+
from cloudpathlib import CloudPath
|
|
25
|
+
|
|
26
|
+
from dapla_metadata.datasets.dapla_dataset_path_info import DaplaDatasetPathInfo
|
|
27
|
+
from dapla_metadata.datasets.utility.constants import (
|
|
28
|
+
DATASET_FIELDS_FROM_EXISTING_METADATA,
|
|
29
|
+
)
|
|
30
|
+
from dapla_metadata.datasets.utility.constants import INCONSISTENCIES_MESSAGE
|
|
31
|
+
from dapla_metadata.datasets.utility.utils import OptionalDatadocMetadataType
|
|
32
|
+
from dapla_metadata.datasets.utility.utils import VariableListType
|
|
33
|
+
|
|
34
|
+
logger = logging.getLogger(__name__)
|
|
35
|
+
|
|
36
|
+
BUCKET_NAME_MESSAGE = "Bucket name"
|
|
37
|
+
DATA_PRODUCT_NAME_MESSAGE = "Data product name"
|
|
38
|
+
DATASET_STATE_MESSAGE = "Dataset state"
|
|
39
|
+
DATASET_SHORT_NAME_MESSAGE = "Dataset short name"
|
|
40
|
+
VARIABLES_ADDITIONAL_MESSAGE = (
|
|
41
|
+
"Dataset has additional variables than defined in metadata"
|
|
42
|
+
)
|
|
43
|
+
VARIABLE_RENAME_MESSAGE = "Variables have been renamed in the dataset"
|
|
44
|
+
VARIABLE_ORDER_MESSAGE = "The order of variables in the dataset has changed"
|
|
45
|
+
VARIABLE_DATATYPES_MESSAGE = "Variable datatypes differ"
|
|
46
|
+
VARIABLES_FEWER_MESSAGE = "Dataset has fewer variables than defined in metadata"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class InconsistentDatasetsWarning(UserWarning):
|
|
50
|
+
"""Existing and new datasets differ significantly from one another."""
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class InconsistentDatasetsError(ValueError):
|
|
54
|
+
"""Existing and new datasets differ significantly from one another."""
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@dataclass
|
|
58
|
+
class DatasetConsistencyStatus:
|
|
59
|
+
"""Store the status for different aspects of dataset consistency.
|
|
60
|
+
|
|
61
|
+
Attributes:
|
|
62
|
+
message: Communicates to the user what aspect is inconsistent.
|
|
63
|
+
success: False if inconsistency is detected.
|
|
64
|
+
variables: Optionally communicate which variables are affected.
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
message: str
|
|
68
|
+
success: bool
|
|
69
|
+
variables: Iterable[str] = field(default_factory=list)
|
|
70
|
+
|
|
71
|
+
def __str__(self) -> str:
|
|
72
|
+
"""Format the user message."""
|
|
73
|
+
message = self.message
|
|
74
|
+
if self.variables:
|
|
75
|
+
message += f"\n\tVariables: {self.variables}"
|
|
76
|
+
return message
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def check_dataset_consistency(
|
|
80
|
+
new_dataset_path: Path | CloudPath,
|
|
81
|
+
existing_dataset_path: Path,
|
|
82
|
+
) -> list[DatasetConsistencyStatus]:
|
|
83
|
+
"""Run consistency tests.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
new_dataset_path: Path to the dataset to be documented.
|
|
87
|
+
existing_dataset_path: Path stored in the existing metadata.
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
List of consistency check results.
|
|
91
|
+
"""
|
|
92
|
+
new_dataset_path_info = DaplaDatasetPathInfo(new_dataset_path)
|
|
93
|
+
existing_dataset_path_info = DaplaDatasetPathInfo(existing_dataset_path)
|
|
94
|
+
return [
|
|
95
|
+
DatasetConsistencyStatus(
|
|
96
|
+
message=BUCKET_NAME_MESSAGE,
|
|
97
|
+
success=(
|
|
98
|
+
new_dataset_path_info.bucket_name
|
|
99
|
+
== existing_dataset_path_info.bucket_name
|
|
100
|
+
),
|
|
101
|
+
),
|
|
102
|
+
DatasetConsistencyStatus(
|
|
103
|
+
message=DATA_PRODUCT_NAME_MESSAGE,
|
|
104
|
+
success=(
|
|
105
|
+
new_dataset_path_info.statistic_short_name
|
|
106
|
+
== existing_dataset_path_info.statistic_short_name
|
|
107
|
+
),
|
|
108
|
+
),
|
|
109
|
+
DatasetConsistencyStatus(
|
|
110
|
+
message=DATASET_STATE_MESSAGE,
|
|
111
|
+
success=(
|
|
112
|
+
new_dataset_path_info.dataset_state
|
|
113
|
+
== existing_dataset_path_info.dataset_state
|
|
114
|
+
),
|
|
115
|
+
),
|
|
116
|
+
DatasetConsistencyStatus(
|
|
117
|
+
message=DATASET_SHORT_NAME_MESSAGE,
|
|
118
|
+
success=(
|
|
119
|
+
new_dataset_path_info.dataset_short_name
|
|
120
|
+
== existing_dataset_path_info.dataset_short_name
|
|
121
|
+
),
|
|
122
|
+
),
|
|
123
|
+
]
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def check_variables_consistency(
|
|
127
|
+
extracted_variables: VariableListType,
|
|
128
|
+
existing_variables: VariableListType,
|
|
129
|
+
) -> list[DatasetConsistencyStatus]:
|
|
130
|
+
"""Check for consistency in variables structure.
|
|
131
|
+
|
|
132
|
+
Compares the existing metadata and that extracted from the new dataset and provides
|
|
133
|
+
highly detailed feedback on what is different between them.
|
|
134
|
+
|
|
135
|
+
We don't return all the results because that could create conflicting messages and false positives.
|
|
136
|
+
|
|
137
|
+
Args:
|
|
138
|
+
extracted_variables (VariableListType): Variables extracted from the new dataset.
|
|
139
|
+
existing_variables (VariableListType): Variables already documented in existing metadata
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
list[DatasetConsistencyStatus]: The list of checks and whether they were successful.
|
|
143
|
+
"""
|
|
144
|
+
extracted_names_set = {v.short_name or "" for v in extracted_variables}
|
|
145
|
+
existing_names_set = {v.short_name or "" for v in existing_variables}
|
|
146
|
+
same_length = len(extracted_variables) == len(existing_variables)
|
|
147
|
+
more_extracted_variables = extracted_names_set.difference(existing_names_set)
|
|
148
|
+
fewer_extracted_variables = existing_names_set.difference(extracted_names_set)
|
|
149
|
+
results = []
|
|
150
|
+
if same_length:
|
|
151
|
+
if more_extracted_variables:
|
|
152
|
+
results.append(
|
|
153
|
+
DatasetConsistencyStatus(
|
|
154
|
+
message=VARIABLE_RENAME_MESSAGE,
|
|
155
|
+
variables=more_extracted_variables,
|
|
156
|
+
success=not bool(more_extracted_variables),
|
|
157
|
+
)
|
|
158
|
+
)
|
|
159
|
+
else:
|
|
160
|
+
results.append(
|
|
161
|
+
DatasetConsistencyStatus(
|
|
162
|
+
message=VARIABLE_ORDER_MESSAGE,
|
|
163
|
+
success=[v.short_name or "" for v in extracted_variables]
|
|
164
|
+
== [v.short_name or "" for v in existing_variables],
|
|
165
|
+
)
|
|
166
|
+
)
|
|
167
|
+
results.append(
|
|
168
|
+
DatasetConsistencyStatus(
|
|
169
|
+
message=VARIABLE_DATATYPES_MESSAGE,
|
|
170
|
+
success=[v.data_type for v in extracted_variables]
|
|
171
|
+
== [v.data_type for v in existing_variables],
|
|
172
|
+
)
|
|
173
|
+
)
|
|
174
|
+
else:
|
|
175
|
+
results.extend(
|
|
176
|
+
[
|
|
177
|
+
DatasetConsistencyStatus(
|
|
178
|
+
message=VARIABLES_ADDITIONAL_MESSAGE,
|
|
179
|
+
variables=more_extracted_variables,
|
|
180
|
+
success=not bool(more_extracted_variables),
|
|
181
|
+
),
|
|
182
|
+
DatasetConsistencyStatus(
|
|
183
|
+
message=VARIABLES_FEWER_MESSAGE,
|
|
184
|
+
variables=fewer_extracted_variables,
|
|
185
|
+
success=not bool(fewer_extracted_variables),
|
|
186
|
+
),
|
|
187
|
+
]
|
|
188
|
+
)
|
|
189
|
+
return results
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def check_ready_to_merge(
|
|
193
|
+
results: list[DatasetConsistencyStatus], *, errors_as_warnings: bool
|
|
194
|
+
) -> None:
|
|
195
|
+
"""Check if the datasets are consistent enough to make a successful merge of metadata.
|
|
196
|
+
|
|
197
|
+
Args:
|
|
198
|
+
results: List if dict with property name and boolean success flag
|
|
199
|
+
errors_as_warnings: True if failing checks should be raised as warnings, not errors.
|
|
200
|
+
|
|
201
|
+
Raises:
|
|
202
|
+
InconsistentDatasetsError: If inconsistencies are found and `errors_as_warnings == False`
|
|
203
|
+
"""
|
|
204
|
+
if failures := [result for result in results if not result.success]:
|
|
205
|
+
messages_list = "\n - ".join(str(f) for f in failures)
|
|
206
|
+
msg = f"{INCONSISTENCIES_MESSAGE}\n - {messages_list}"
|
|
207
|
+
if errors_as_warnings:
|
|
208
|
+
warnings.warn(
|
|
209
|
+
message=msg,
|
|
210
|
+
category=InconsistentDatasetsWarning,
|
|
211
|
+
stacklevel=2,
|
|
212
|
+
)
|
|
213
|
+
else:
|
|
214
|
+
raise InconsistentDatasetsError(
|
|
215
|
+
msg,
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
def override_dataset_fields(
|
|
220
|
+
merged_metadata: all_optional_model.DatadocMetadata,
|
|
221
|
+
existing_metadata: all_optional_model.DatadocMetadata
|
|
222
|
+
| required_model.DatadocMetadata,
|
|
223
|
+
) -> None:
|
|
224
|
+
"""Overrides specific fields in the dataset of `merged_metadata` with values from the dataset of `existing_metadata`.
|
|
225
|
+
|
|
226
|
+
This function iterates over a predefined list of fields, `DATASET_FIELDS_FROM_EXISTING_METADATA`,
|
|
227
|
+
and sets the corresponding fields in the `merged_metadata.dataset` object to the values
|
|
228
|
+
from the `existing_metadata.dataset` object.
|
|
229
|
+
|
|
230
|
+
Args:
|
|
231
|
+
merged_metadata: An instance of `DatadocMetadata` containing the dataset to be updated.
|
|
232
|
+
existing_metadata: An instance of `DatadocMetadata` containing the dataset whose values are used to update `merged_metadata.dataset`.
|
|
233
|
+
|
|
234
|
+
Returns:
|
|
235
|
+
`None`.
|
|
236
|
+
"""
|
|
237
|
+
if merged_metadata.dataset and existing_metadata.dataset:
|
|
238
|
+
# Override the fields as defined
|
|
239
|
+
for field in DATASET_FIELDS_FROM_EXISTING_METADATA:
|
|
240
|
+
setattr(
|
|
241
|
+
merged_metadata.dataset,
|
|
242
|
+
field,
|
|
243
|
+
getattr(existing_metadata.dataset, field),
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def merge_variables(
|
|
248
|
+
existing_metadata: OptionalDatadocMetadataType,
|
|
249
|
+
extracted_metadata: all_optional_model.DatadocMetadata,
|
|
250
|
+
merged_metadata: all_optional_model.DatadocMetadata,
|
|
251
|
+
) -> all_optional_model.DatadocMetadata:
|
|
252
|
+
"""Merges variables from the extracted metadata into the existing metadata and updates the merged metadata.
|
|
253
|
+
|
|
254
|
+
This function compares the variables from `extracted_metadata` with those in `existing_metadata`.
|
|
255
|
+
For each variable in `extracted_metadata`, it checks if a variable with the same `short_name` exists
|
|
256
|
+
in `existing_metadata`. If a match is found, it updates the existing variable with information from
|
|
257
|
+
`extracted_metadata`. If no match is found, the variable from `extracted_metadata` is directly added to `merged_metadata`.
|
|
258
|
+
|
|
259
|
+
Args:
|
|
260
|
+
existing_metadata: The metadata object containing the current state of variables.
|
|
261
|
+
extracted_metadata: The metadata object containing new or updated variables to merge.
|
|
262
|
+
merged_metadata: The metadata object that will contain the result of the merge.
|
|
263
|
+
|
|
264
|
+
Returns:
|
|
265
|
+
all_optional_model.DatadocMetadata: The `merged_metadata` object containing variables from both `existing_metadata`
|
|
266
|
+
and `extracted_metadata`.
|
|
267
|
+
"""
|
|
268
|
+
if (
|
|
269
|
+
existing_metadata is not None
|
|
270
|
+
and existing_metadata.variables is not None
|
|
271
|
+
and extracted_metadata is not None
|
|
272
|
+
and extracted_metadata.variables is not None
|
|
273
|
+
and merged_metadata.variables is not None
|
|
274
|
+
):
|
|
275
|
+
for extracted in extracted_metadata.variables:
|
|
276
|
+
existing = next(
|
|
277
|
+
(
|
|
278
|
+
existing
|
|
279
|
+
for existing in existing_metadata.variables
|
|
280
|
+
if existing.short_name == extracted.short_name
|
|
281
|
+
),
|
|
282
|
+
None,
|
|
283
|
+
)
|
|
284
|
+
if existing:
|
|
285
|
+
existing.id = (
|
|
286
|
+
None # Set to None so that it will be set assigned a fresh ID later
|
|
287
|
+
)
|
|
288
|
+
existing.contains_data_from = (
|
|
289
|
+
extracted.contains_data_from or existing.contains_data_from
|
|
290
|
+
)
|
|
291
|
+
existing.contains_data_until = (
|
|
292
|
+
extracted.contains_data_until or existing.contains_data_until
|
|
293
|
+
)
|
|
294
|
+
merged_metadata.variables.append(
|
|
295
|
+
cast("datadoc_model.all_optional.model.Variable", existing)
|
|
296
|
+
)
|
|
297
|
+
else:
|
|
298
|
+
# If there is no existing metadata for this variable, we just use what we have extracted
|
|
299
|
+
merged_metadata.variables.append(extracted)
|
|
300
|
+
return merged_metadata
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
def merge_metadata(
|
|
304
|
+
extracted_metadata: all_optional_model.DatadocMetadata | None,
|
|
305
|
+
existing_metadata: OptionalDatadocMetadataType,
|
|
306
|
+
) -> all_optional_model.DatadocMetadata:
|
|
307
|
+
if not existing_metadata:
|
|
308
|
+
logger.warning(
|
|
309
|
+
"No existing metadata found, no merge to perform. Continuing with extracted metadata.",
|
|
310
|
+
)
|
|
311
|
+
return extracted_metadata or all_optional_model.DatadocMetadata()
|
|
312
|
+
|
|
313
|
+
if not extracted_metadata:
|
|
314
|
+
return cast("all_optional_model.DatadocMetadata", existing_metadata)
|
|
315
|
+
|
|
316
|
+
# Use the extracted metadata as a base
|
|
317
|
+
merged_metadata = all_optional_model.DatadocMetadata(
|
|
318
|
+
dataset=copy.deepcopy(extracted_metadata.dataset),
|
|
319
|
+
variables=[],
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
override_dataset_fields(
|
|
323
|
+
merged_metadata=merged_metadata,
|
|
324
|
+
existing_metadata=existing_metadata,
|
|
325
|
+
)
|
|
326
|
+
|
|
327
|
+
# Merge variables.
|
|
328
|
+
# For each extracted variable, copy existing metadata into the merged metadata
|
|
329
|
+
return merge_variables(
|
|
330
|
+
existing_metadata=existing_metadata,
|
|
331
|
+
extracted_metadata=extracted_metadata,
|
|
332
|
+
merged_metadata=merged_metadata,
|
|
333
|
+
)
|
dapla_metadata/datasets/core.py
CHANGED
|
@@ -5,7 +5,6 @@ from __future__ import annotations
|
|
|
5
5
|
import copy
|
|
6
6
|
import json
|
|
7
7
|
import logging
|
|
8
|
-
import warnings
|
|
9
8
|
from concurrent.futures import ThreadPoolExecutor
|
|
10
9
|
from pathlib import Path
|
|
11
10
|
from typing import TYPE_CHECKING
|
|
@@ -17,6 +16,11 @@ from datadoc_model.all_optional.model import DataSetStatus
|
|
|
17
16
|
|
|
18
17
|
from dapla_metadata._shared import config
|
|
19
18
|
from dapla_metadata.dapla import user_info
|
|
19
|
+
from dapla_metadata.datasets._merge import DatasetConsistencyStatus
|
|
20
|
+
from dapla_metadata.datasets._merge import check_dataset_consistency
|
|
21
|
+
from dapla_metadata.datasets._merge import check_ready_to_merge
|
|
22
|
+
from dapla_metadata.datasets._merge import check_variables_consistency
|
|
23
|
+
from dapla_metadata.datasets._merge import merge_metadata
|
|
20
24
|
from dapla_metadata.datasets.compatibility import is_metadata_in_container_structure
|
|
21
25
|
from dapla_metadata.datasets.compatibility import upgrade_metadata
|
|
22
26
|
from dapla_metadata.datasets.dapla_dataset_path_info import DaplaDatasetPathInfo
|
|
@@ -26,7 +30,6 @@ from dapla_metadata.datasets.statistic_subject_mapping import StatisticSubjectMa
|
|
|
26
30
|
from dapla_metadata.datasets.utility.constants import (
|
|
27
31
|
DEFAULT_SPATIAL_COVERAGE_DESCRIPTION,
|
|
28
32
|
)
|
|
29
|
-
from dapla_metadata.datasets.utility.constants import INCONSISTENCIES_MESSAGE
|
|
30
33
|
from dapla_metadata.datasets.utility.constants import METADATA_DOCUMENT_FILE_SUFFIX
|
|
31
34
|
from dapla_metadata.datasets.utility.constants import NUM_OBLIGATORY_DATASET_FIELDS
|
|
32
35
|
from dapla_metadata.datasets.utility.constants import NUM_OBLIGATORY_VARIABLES_FIELDS
|
|
@@ -34,7 +37,6 @@ from dapla_metadata.datasets.utility.utils import OptionalDatadocMetadataType
|
|
|
34
37
|
from dapla_metadata.datasets.utility.utils import calculate_percentage
|
|
35
38
|
from dapla_metadata.datasets.utility.utils import derive_assessment_from_state
|
|
36
39
|
from dapla_metadata.datasets.utility.utils import get_timestamp_now
|
|
37
|
-
from dapla_metadata.datasets.utility.utils import merge_variables
|
|
38
40
|
from dapla_metadata.datasets.utility.utils import normalize_path
|
|
39
41
|
from dapla_metadata.datasets.utility.utils import (
|
|
40
42
|
num_obligatory_dataset_fields_completed,
|
|
@@ -42,7 +44,6 @@ from dapla_metadata.datasets.utility.utils import (
|
|
|
42
44
|
from dapla_metadata.datasets.utility.utils import (
|
|
43
45
|
num_obligatory_variables_fields_completed,
|
|
44
46
|
)
|
|
45
|
-
from dapla_metadata.datasets.utility.utils import override_dataset_fields
|
|
46
47
|
from dapla_metadata.datasets.utility.utils import set_dataset_owner
|
|
47
48
|
from dapla_metadata.datasets.utility.utils import set_default_values_dataset
|
|
48
49
|
from dapla_metadata.datasets.utility.utils import set_default_values_variables
|
|
@@ -53,18 +54,9 @@ if TYPE_CHECKING:
|
|
|
53
54
|
|
|
54
55
|
from cloudpathlib import CloudPath
|
|
55
56
|
|
|
56
|
-
|
|
57
57
|
logger = logging.getLogger(__name__)
|
|
58
58
|
|
|
59
59
|
|
|
60
|
-
class InconsistentDatasetsWarning(UserWarning):
|
|
61
|
-
"""Existing and new datasets differ significantly from one another."""
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
class InconsistentDatasetsError(ValueError):
|
|
65
|
-
"""Existing and new datasets differ significantly from one another."""
|
|
66
|
-
|
|
67
|
-
|
|
68
60
|
class Datadoc:
|
|
69
61
|
"""Handle reading, updating and writing of metadata.
|
|
70
62
|
|
|
@@ -118,7 +110,7 @@ class Datadoc:
|
|
|
118
110
|
self.variables: list = []
|
|
119
111
|
self.variables_lookup: dict[str, all_optional_model.Variable] = {}
|
|
120
112
|
self.explicitly_defined_metadata_document = False
|
|
121
|
-
self.dataset_consistency_status: list = []
|
|
113
|
+
self.dataset_consistency_status: list[DatasetConsistencyStatus] = []
|
|
122
114
|
if metadata_document_path:
|
|
123
115
|
self.metadata_document = normalize_path(metadata_document_path)
|
|
124
116
|
self.explicitly_defined_metadata_document = True
|
|
@@ -169,20 +161,22 @@ class Datadoc:
|
|
|
169
161
|
):
|
|
170
162
|
extracted_metadata = self._extract_metadata_from_dataset(self.dataset_path)
|
|
171
163
|
|
|
172
|
-
if
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
self.
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
164
|
+
if (
|
|
165
|
+
self.dataset_path
|
|
166
|
+
and self.metadata_document
|
|
167
|
+
and extracted_metadata
|
|
168
|
+
and existing_metadata
|
|
169
|
+
):
|
|
170
|
+
self.dataset_consistency_status = check_dataset_consistency(
|
|
171
|
+
self.dataset_path,
|
|
172
|
+
Path(self.metadata_document),
|
|
173
|
+
)
|
|
174
|
+
self.dataset_consistency_status.extend(
|
|
175
|
+
check_variables_consistency(
|
|
176
|
+
extracted_metadata.variables or [],
|
|
177
|
+
existing_metadata.variables or [],
|
|
185
178
|
)
|
|
179
|
+
)
|
|
186
180
|
|
|
187
181
|
if (
|
|
188
182
|
self.dataset_path
|
|
@@ -192,11 +186,11 @@ class Datadoc:
|
|
|
192
186
|
and extracted_metadata is not None
|
|
193
187
|
and existing_metadata is not None
|
|
194
188
|
):
|
|
195
|
-
|
|
189
|
+
check_ready_to_merge(
|
|
196
190
|
self.dataset_consistency_status,
|
|
197
191
|
errors_as_warnings=self.errors_as_warnings,
|
|
198
192
|
)
|
|
199
|
-
merged_metadata =
|
|
193
|
+
merged_metadata = merge_metadata(
|
|
200
194
|
extracted_metadata,
|
|
201
195
|
existing_metadata,
|
|
202
196
|
)
|
|
@@ -214,19 +208,6 @@ class Datadoc:
|
|
|
214
208
|
set_dataset_owner(self.dataset)
|
|
215
209
|
self._create_variables_lookup()
|
|
216
210
|
|
|
217
|
-
def _get_existing_file_path(
|
|
218
|
-
self,
|
|
219
|
-
extracted_metadata: all_optional_model.DatadocMetadata | None,
|
|
220
|
-
) -> str:
|
|
221
|
-
if (
|
|
222
|
-
extracted_metadata is not None
|
|
223
|
-
and extracted_metadata.dataset is not None
|
|
224
|
-
and extracted_metadata.dataset.file_path is not None
|
|
225
|
-
):
|
|
226
|
-
return extracted_metadata.dataset.file_path
|
|
227
|
-
msg = "Could not access existing dataset file path"
|
|
228
|
-
raise ValueError(msg)
|
|
229
|
-
|
|
230
211
|
def _set_metadata(
|
|
231
212
|
self,
|
|
232
213
|
merged_metadata: OptionalDatadocMetadataType,
|
|
@@ -244,134 +225,6 @@ class Datadoc:
|
|
|
244
225
|
v.short_name: v for v in self.variables if v.short_name
|
|
245
226
|
}
|
|
246
227
|
|
|
247
|
-
@staticmethod
|
|
248
|
-
def _check_dataset_consistency(
|
|
249
|
-
new_dataset_path: Path | CloudPath,
|
|
250
|
-
existing_dataset_path: Path,
|
|
251
|
-
extracted_metadata: all_optional_model.DatadocMetadata,
|
|
252
|
-
existing_metadata: OptionalDatadocMetadataType,
|
|
253
|
-
) -> list[dict[str, object]]:
|
|
254
|
-
"""Run consistency tests.
|
|
255
|
-
|
|
256
|
-
Args:
|
|
257
|
-
new_dataset_path: Path to the dataset to be documented.
|
|
258
|
-
existing_dataset_path: Path stored in the existing metadata.
|
|
259
|
-
extracted_metadata: Metadata extracted from a physical dataset.
|
|
260
|
-
existing_metadata: Metadata from a previously created metadata document.
|
|
261
|
-
|
|
262
|
-
Returns:
|
|
263
|
-
List if dict with property name and boolean success flag
|
|
264
|
-
"""
|
|
265
|
-
new_dataset_path_info = DaplaDatasetPathInfo(new_dataset_path)
|
|
266
|
-
existing_dataset_path_info = DaplaDatasetPathInfo(existing_dataset_path)
|
|
267
|
-
return [
|
|
268
|
-
{
|
|
269
|
-
"name": "Bucket name",
|
|
270
|
-
"success": (
|
|
271
|
-
new_dataset_path_info.bucket_name
|
|
272
|
-
== existing_dataset_path_info.bucket_name
|
|
273
|
-
),
|
|
274
|
-
},
|
|
275
|
-
{
|
|
276
|
-
"name": "Data product name",
|
|
277
|
-
"success": (
|
|
278
|
-
new_dataset_path_info.statistic_short_name
|
|
279
|
-
== existing_dataset_path_info.statistic_short_name
|
|
280
|
-
),
|
|
281
|
-
},
|
|
282
|
-
{
|
|
283
|
-
"name": "Dataset state",
|
|
284
|
-
"success": (
|
|
285
|
-
new_dataset_path_info.dataset_state
|
|
286
|
-
== existing_dataset_path_info.dataset_state
|
|
287
|
-
),
|
|
288
|
-
},
|
|
289
|
-
{
|
|
290
|
-
"name": "Dataset short name",
|
|
291
|
-
"success": (
|
|
292
|
-
new_dataset_path_info.dataset_short_name
|
|
293
|
-
== existing_dataset_path_info.dataset_short_name
|
|
294
|
-
),
|
|
295
|
-
},
|
|
296
|
-
{
|
|
297
|
-
"name": "Variable names",
|
|
298
|
-
"success": (
|
|
299
|
-
existing_metadata is not None
|
|
300
|
-
and {v.short_name for v in extracted_metadata.variables or []}
|
|
301
|
-
== {v.short_name for v in existing_metadata.variables or []}
|
|
302
|
-
),
|
|
303
|
-
},
|
|
304
|
-
{
|
|
305
|
-
"name": "Variable datatypes",
|
|
306
|
-
"success": (
|
|
307
|
-
existing_metadata is not None
|
|
308
|
-
and [v.data_type for v in extracted_metadata.variables or []]
|
|
309
|
-
== [v.data_type for v in existing_metadata.variables or []]
|
|
310
|
-
),
|
|
311
|
-
},
|
|
312
|
-
]
|
|
313
|
-
|
|
314
|
-
@staticmethod
|
|
315
|
-
def _check_ready_to_merge(
|
|
316
|
-
results: list[dict[str, object]], *, errors_as_warnings: bool
|
|
317
|
-
) -> None:
|
|
318
|
-
"""Check if the datasets are consistent enough to make a successful merge of metadata.
|
|
319
|
-
|
|
320
|
-
Args:
|
|
321
|
-
results: List if dict with property name and boolean success flag
|
|
322
|
-
errors_as_warnings: True if failing checks should be raised as warnings, not errors.
|
|
323
|
-
|
|
324
|
-
Raises:
|
|
325
|
-
InconsistentDatasetsError: If inconsistencies are found and `errors_as_warnings == False`
|
|
326
|
-
"""
|
|
327
|
-
if failures := [result for result in results if not result["success"]]:
|
|
328
|
-
msg = f"{INCONSISTENCIES_MESSAGE} {', '.join(str(f['name']) for f in failures)}"
|
|
329
|
-
if errors_as_warnings:
|
|
330
|
-
warnings.warn(
|
|
331
|
-
message=msg,
|
|
332
|
-
category=InconsistentDatasetsWarning,
|
|
333
|
-
stacklevel=2,
|
|
334
|
-
)
|
|
335
|
-
else:
|
|
336
|
-
raise InconsistentDatasetsError(
|
|
337
|
-
msg,
|
|
338
|
-
)
|
|
339
|
-
|
|
340
|
-
@staticmethod
|
|
341
|
-
def _merge_metadata(
|
|
342
|
-
extracted_metadata: all_optional_model.DatadocMetadata | None,
|
|
343
|
-
existing_metadata: OptionalDatadocMetadataType,
|
|
344
|
-
) -> all_optional_model.DatadocMetadata:
|
|
345
|
-
if not existing_metadata:
|
|
346
|
-
logger.warning(
|
|
347
|
-
"No existing metadata found, no merge to perform. Continuing with extracted metadata.",
|
|
348
|
-
)
|
|
349
|
-
return extracted_metadata or all_optional_model.DatadocMetadata()
|
|
350
|
-
|
|
351
|
-
if not extracted_metadata:
|
|
352
|
-
return cast("all_optional_model.DatadocMetadata", existing_metadata)
|
|
353
|
-
|
|
354
|
-
# Use the extracted metadata as a base
|
|
355
|
-
merged_metadata = all_optional_model.DatadocMetadata(
|
|
356
|
-
dataset=copy.deepcopy(extracted_metadata.dataset),
|
|
357
|
-
variables=[],
|
|
358
|
-
)
|
|
359
|
-
|
|
360
|
-
override_dataset_fields(
|
|
361
|
-
merged_metadata=merged_metadata,
|
|
362
|
-
existing_metadata=cast(
|
|
363
|
-
"all_optional_model.DatadocMetadata", existing_metadata
|
|
364
|
-
),
|
|
365
|
-
)
|
|
366
|
-
|
|
367
|
-
# Merge variables.
|
|
368
|
-
# For each extracted variable, copy existing metadata into the merged metadata
|
|
369
|
-
return merge_variables(
|
|
370
|
-
existing_metadata=existing_metadata,
|
|
371
|
-
extracted_metadata=extracted_metadata,
|
|
372
|
-
merged_metadata=merged_metadata,
|
|
373
|
-
)
|
|
374
|
-
|
|
375
228
|
def _extract_metadata_from_existing_document(
|
|
376
229
|
self,
|
|
377
230
|
document: pathlib.Path | CloudPath,
|
|
@@ -9,7 +9,7 @@ DATE_VALIDATION_MESSAGE = f"{VALIDATION_ERROR}contains_data_from must be the sam
|
|
|
9
9
|
|
|
10
10
|
OBLIGATORY_METADATA_WARNING = "Obligatory metadata is missing: "
|
|
11
11
|
|
|
12
|
-
INCONSISTENCIES_MESSAGE = "Inconsistencies found between extracted and existing metadata
|
|
12
|
+
INCONSISTENCIES_MESSAGE = "Inconsistencies found between extracted and existing metadata! This usually means that the new dataset has a different structure and that the version number should be incremented.\nDetails:"
|
|
13
13
|
|
|
14
14
|
OBLIGATORY_DATASET_METADATA_IDENTIFIERS: list = [
|
|
15
15
|
"assessment",
|
|
@@ -17,12 +17,9 @@ OBLIGATORY_DATASET_METADATA_IDENTIFIERS: list = [
|
|
|
17
17
|
"dataset_status",
|
|
18
18
|
"name",
|
|
19
19
|
"description",
|
|
20
|
-
"data_source",
|
|
21
20
|
"population_description",
|
|
22
21
|
"version",
|
|
23
22
|
"version_description",
|
|
24
|
-
"unit_type",
|
|
25
|
-
"temporality_type",
|
|
26
23
|
"subject_field",
|
|
27
24
|
"spatial_coverage_description",
|
|
28
25
|
"owner",
|
|
@@ -44,6 +41,10 @@ OBLIGATORY_VARIABLES_METADATA_IDENTIFIERS = [
|
|
|
44
41
|
"data_type",
|
|
45
42
|
"variable_role",
|
|
46
43
|
"is_personal_data",
|
|
44
|
+
"unit_type",
|
|
45
|
+
"population_description",
|
|
46
|
+
"data_source",
|
|
47
|
+
"temporality_type",
|
|
47
48
|
]
|
|
48
49
|
|
|
49
50
|
OBLIGATORY_VARIABLES_METADATA_IDENTIFIERS_MULTILANGUAGE = [
|
|
@@ -5,9 +5,7 @@ import logging
|
|
|
5
5
|
import pathlib
|
|
6
6
|
import uuid
|
|
7
7
|
from typing import TypeAlias
|
|
8
|
-
from typing import cast
|
|
9
8
|
|
|
10
|
-
import datadoc_model
|
|
11
9
|
import datadoc_model.all_optional.model as all_optional_model
|
|
12
10
|
import datadoc_model.required.model as required_model
|
|
13
11
|
import google.auth
|
|
@@ -20,9 +18,6 @@ from datadoc_model.all_optional.model import DataSetState
|
|
|
20
18
|
from datadoc_model.all_optional.model import VariableRole
|
|
21
19
|
|
|
22
20
|
from dapla_metadata.dapla import user_info
|
|
23
|
-
from dapla_metadata.datasets.utility.constants import (
|
|
24
|
-
DATASET_FIELDS_FROM_EXISTING_METADATA,
|
|
25
|
-
)
|
|
26
21
|
from dapla_metadata.datasets.utility.constants import NUM_OBLIGATORY_VARIABLES_FIELDS
|
|
27
22
|
from dapla_metadata.datasets.utility.constants import (
|
|
28
23
|
OBLIGATORY_DATASET_METADATA_IDENTIFIERS,
|
|
@@ -43,6 +38,10 @@ DatadocMetadataType: TypeAlias = (
|
|
|
43
38
|
all_optional_model.DatadocMetadata | required_model.DatadocMetadata
|
|
44
39
|
)
|
|
45
40
|
DatasetType: TypeAlias = all_optional_model.Dataset | required_model.Dataset
|
|
41
|
+
VariableType: TypeAlias = all_optional_model.Variable | required_model.Variable
|
|
42
|
+
VariableListType: TypeAlias = (
|
|
43
|
+
list[all_optional_model.Variable] | list[required_model.Variable]
|
|
44
|
+
)
|
|
46
45
|
OptionalDatadocMetadataType: TypeAlias = DatadocMetadataType | None
|
|
47
46
|
|
|
48
47
|
|
|
@@ -426,87 +425,3 @@ def running_in_notebook() -> bool:
|
|
|
426
425
|
# interpreters and will throw a NameError. Therefore we're not running
|
|
427
426
|
# in Jupyter.
|
|
428
427
|
return False
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
def override_dataset_fields(
|
|
432
|
-
merged_metadata: all_optional_model.DatadocMetadata,
|
|
433
|
-
existing_metadata: all_optional_model.DatadocMetadata
|
|
434
|
-
| required_model.DatadocMetadata,
|
|
435
|
-
) -> None:
|
|
436
|
-
"""Overrides specific fields in the dataset of `merged_metadata` with values from the dataset of `existing_metadata`.
|
|
437
|
-
|
|
438
|
-
This function iterates over a predefined list of fields, `DATASET_FIELDS_FROM_EXISTING_METADATA`,
|
|
439
|
-
and sets the corresponding fields in the `merged_metadata.dataset` object to the values
|
|
440
|
-
from the `existing_metadata.dataset` object.
|
|
441
|
-
|
|
442
|
-
Args:
|
|
443
|
-
merged_metadata: An instance of `DatadocMetadata` containing the dataset to be updated.
|
|
444
|
-
existing_metadata: An instance of `DatadocMetadata` containing the dataset whose values are used to update `merged_metadata.dataset`.
|
|
445
|
-
|
|
446
|
-
Returns:
|
|
447
|
-
`None`.
|
|
448
|
-
"""
|
|
449
|
-
if merged_metadata.dataset and existing_metadata.dataset:
|
|
450
|
-
# Override the fields as defined
|
|
451
|
-
for field in DATASET_FIELDS_FROM_EXISTING_METADATA:
|
|
452
|
-
setattr(
|
|
453
|
-
merged_metadata.dataset,
|
|
454
|
-
field,
|
|
455
|
-
getattr(existing_metadata.dataset, field),
|
|
456
|
-
)
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
def merge_variables(
|
|
460
|
-
existing_metadata: OptionalDatadocMetadataType,
|
|
461
|
-
extracted_metadata: all_optional_model.DatadocMetadata,
|
|
462
|
-
merged_metadata: all_optional_model.DatadocMetadata,
|
|
463
|
-
) -> all_optional_model.DatadocMetadata:
|
|
464
|
-
"""Merges variables from the extracted metadata into the existing metadata and updates the merged metadata.
|
|
465
|
-
|
|
466
|
-
This function compares the variables from `extracted_metadata` with those in `existing_metadata`.
|
|
467
|
-
For each variable in `extracted_metadata`, it checks if a variable with the same `short_name` exists
|
|
468
|
-
in `existing_metadata`. If a match is found, it updates the existing variable with information from
|
|
469
|
-
`extracted_metadata`. If no match is found, the variable from `extracted_metadata` is directly added to `merged_metadata`.
|
|
470
|
-
|
|
471
|
-
Args:
|
|
472
|
-
existing_metadata: The metadata object containing the current state of variables.
|
|
473
|
-
extracted_metadata: The metadata object containing new or updated variables to merge.
|
|
474
|
-
merged_metadata: The metadata object that will contain the result of the merge.
|
|
475
|
-
|
|
476
|
-
Returns:
|
|
477
|
-
all_optional_model.DatadocMetadata: The `merged_metadata` object containing variables from both `existing_metadata`
|
|
478
|
-
and `extracted_metadata`.
|
|
479
|
-
"""
|
|
480
|
-
if (
|
|
481
|
-
existing_metadata is not None
|
|
482
|
-
and existing_metadata.variables is not None
|
|
483
|
-
and extracted_metadata is not None
|
|
484
|
-
and extracted_metadata.variables is not None
|
|
485
|
-
and merged_metadata.variables is not None
|
|
486
|
-
):
|
|
487
|
-
for extracted in extracted_metadata.variables:
|
|
488
|
-
existing = next(
|
|
489
|
-
(
|
|
490
|
-
existing
|
|
491
|
-
for existing in existing_metadata.variables
|
|
492
|
-
if existing.short_name == extracted.short_name
|
|
493
|
-
),
|
|
494
|
-
None,
|
|
495
|
-
)
|
|
496
|
-
if existing:
|
|
497
|
-
existing.id = (
|
|
498
|
-
None # Set to None so that it will be set assigned a fresh ID later
|
|
499
|
-
)
|
|
500
|
-
existing.contains_data_from = (
|
|
501
|
-
extracted.contains_data_from or existing.contains_data_from
|
|
502
|
-
)
|
|
503
|
-
existing.contains_data_until = (
|
|
504
|
-
extracted.contains_data_until or existing.contains_data_until
|
|
505
|
-
)
|
|
506
|
-
merged_metadata.variables.append(
|
|
507
|
-
cast("datadoc_model.all_optional.model.Variable", existing)
|
|
508
|
-
)
|
|
509
|
-
else:
|
|
510
|
-
# If there is no existing metadata for this variable, we just use what we have extracted
|
|
511
|
-
merged_metadata.variables.append(extracted)
|
|
512
|
-
return merged_metadata
|
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
from ._generated.vardef_client import models
|
|
4
4
|
from ._generated.vardef_client.exceptions import * # noqa: F403
|
|
5
|
+
from ._utils.constants import DEFAULT_DATE
|
|
6
|
+
from ._utils.constants import GENERATED_CONTACT
|
|
5
7
|
from .exceptions import VardefClientError
|
|
6
8
|
from .exceptions import VardefFileError
|
|
7
9
|
from .exceptions import VariableNotFoundError
|
|
@@ -2,6 +2,13 @@
|
|
|
2
2
|
|
|
3
3
|
from datetime import date
|
|
4
4
|
|
|
5
|
+
from dapla_metadata.variable_definitions._generated.vardef_client.models.contact import (
|
|
6
|
+
Contact,
|
|
7
|
+
)
|
|
8
|
+
from dapla_metadata.variable_definitions._generated.vardef_client.models.language_string_type import (
|
|
9
|
+
LanguageStringType,
|
|
10
|
+
)
|
|
11
|
+
|
|
5
12
|
VARIABLE_DEFINITIONS_DIR = "variable_definitions"
|
|
6
13
|
|
|
7
14
|
VARIABLE_STATUS_FIELD_NAME = "variable_status"
|
|
@@ -26,6 +33,13 @@ TEMPLATE_SECTION_HEADER_MACHINE_GENERATED_EN = (
|
|
|
26
33
|
|
|
27
34
|
DEFAULT_DATE = date(1000, 1, 1)
|
|
28
35
|
|
|
36
|
+
GENERATED_CONTACT = Contact(
|
|
37
|
+
title=LanguageStringType(
|
|
38
|
+
nb="generert tittel",
|
|
39
|
+
),
|
|
40
|
+
email="generert@ssb.no",
|
|
41
|
+
)
|
|
42
|
+
|
|
29
43
|
MACHINE_GENERATED_FIELDS = [
|
|
30
44
|
"id",
|
|
31
45
|
"patch_id",
|
|
@@ -4,9 +4,6 @@ from typing import TYPE_CHECKING
|
|
|
4
4
|
from dapla_metadata.variable_definitions._generated.vardef_client.models.complete_response import (
|
|
5
5
|
CompleteResponse,
|
|
6
6
|
)
|
|
7
|
-
from dapla_metadata.variable_definitions._generated.vardef_client.models.contact import (
|
|
8
|
-
Contact,
|
|
9
|
-
)
|
|
10
7
|
from dapla_metadata.variable_definitions._generated.vardef_client.models.language_string_type import (
|
|
11
8
|
LanguageStringType,
|
|
12
9
|
)
|
|
@@ -17,6 +14,7 @@ from dapla_metadata.variable_definitions._generated.vardef_client.models.variabl
|
|
|
17
14
|
VariableStatus,
|
|
18
15
|
)
|
|
19
16
|
from dapla_metadata.variable_definitions._utils.constants import DEFAULT_DATE
|
|
17
|
+
from dapla_metadata.variable_definitions._utils.constants import GENERATED_CONTACT
|
|
20
18
|
from dapla_metadata.variable_definitions._utils.constants import TEMPLATE_HEADER
|
|
21
19
|
from dapla_metadata.variable_definitions._utils.files import _create_file_name
|
|
22
20
|
from dapla_metadata.variable_definitions._utils.files import _get_current_time
|
|
@@ -35,6 +33,7 @@ if TYPE_CHECKING:
|
|
|
35
33
|
|
|
36
34
|
|
|
37
35
|
def _get_default_template() -> "VariableDefinition":
|
|
36
|
+
# Import is needed here to avoid circular imports
|
|
38
37
|
from dapla_metadata.variable_definitions.variable_definition import (
|
|
39
38
|
VariableDefinition,
|
|
40
39
|
)
|
|
@@ -52,12 +51,7 @@ def _get_default_template() -> "VariableDefinition":
|
|
|
52
51
|
subject_fields=[""],
|
|
53
52
|
contains_special_categories_of_personal_data=False,
|
|
54
53
|
owner=Owner(team="default team", groups=["default group"]),
|
|
55
|
-
contact=
|
|
56
|
-
title=LanguageStringType(
|
|
57
|
-
nb="generert tittel",
|
|
58
|
-
),
|
|
59
|
-
email="generert@ssb.no",
|
|
60
|
-
),
|
|
54
|
+
contact=GENERATED_CONTACT,
|
|
61
55
|
variable_status=VariableStatus.DRAFT.value,
|
|
62
56
|
id="",
|
|
63
57
|
patch_id=0,
|
{dapla_toolbelt_metadata-0.9.0.dist-info → dapla_toolbelt_metadata-0.9.2.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dapla-toolbelt-metadata
|
|
3
|
-
Version: 0.9.
|
|
3
|
+
Version: 0.9.2
|
|
4
4
|
Summary: Dapla Toolbelt Metadata
|
|
5
5
|
Project-URL: homepage, https://github.com/statisticsnorway/dapla-toolbelt-metadata
|
|
6
6
|
Project-URL: repository, https://github.com/statisticsnorway/dapla-toolbelt-metadata
|
|
@@ -5,9 +5,10 @@ dapla_metadata/_shared/enums.py,sha256=WHkH1d8xw41gOly6au_izZB1_-6XTcKu5rhBWUImj
|
|
|
5
5
|
dapla_metadata/_shared/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
dapla_metadata/dapla/__init__.py,sha256=tkapF-YwmruPPrKvN3pEoCZqb7xvJx_ogBM8XyGMuJI,130
|
|
7
7
|
dapla_metadata/dapla/user_info.py,sha256=bENez-ICt9ySR8orYebO68Q3_2LkIW9QTL58DTctmEQ,4833
|
|
8
|
-
dapla_metadata/datasets/__init__.py,sha256=
|
|
8
|
+
dapla_metadata/datasets/__init__.py,sha256=an-REJgi7N8-S1SCz-MYO_8as6fMe03WvhjRP_hWWkg,293
|
|
9
|
+
dapla_metadata/datasets/_merge.py,sha256=Tk5wQz6xZGr8veUAHZb42O8HARU8ObBJ_E4afvVWdlo,12993
|
|
9
10
|
dapla_metadata/datasets/code_list.py,sha256=JtCE-5Q8grAKvkn0KKjzeGhO-96O7yGsastbuoakreg,9057
|
|
10
|
-
dapla_metadata/datasets/core.py,sha256=
|
|
11
|
+
dapla_metadata/datasets/core.py,sha256=r_lWZPB4zETQGXyKLwr1cgDQkeIIK_id5jGMCPHHYqg,20209
|
|
11
12
|
dapla_metadata/datasets/dapla_dataset_path_info.py,sha256=WPeV_mwKk2B9sXd14SaP-kTb1bOQ_8W2KtrqOG7sJIY,26867
|
|
12
13
|
dapla_metadata/datasets/dataset_parser.py,sha256=3dtRXNy1C8SfG8zTYWdY26nV4l-dG25IC_0J5t2bYwI,8285
|
|
13
14
|
dapla_metadata/datasets/model_validation.py,sha256=pGT-jqaQQY4z7jz-7UQd0BQoTWDxDWPYAnDoRC2vd_c,6818
|
|
@@ -20,15 +21,15 @@ dapla_metadata/datasets/compatibility/model_backwards_compatibility.py,sha256=W5
|
|
|
20
21
|
dapla_metadata/datasets/external_sources/__init__.py,sha256=qvIdXwqyEmXNUCB94ZtZXRzifdW4hiXASFFPtC70f6E,83
|
|
21
22
|
dapla_metadata/datasets/external_sources/external_sources.py,sha256=9eIcOIUbaodNX1w9Tj2wl4U4wUmr5kF1R0i01fKUzGs,2974
|
|
22
23
|
dapla_metadata/datasets/utility/__init__.py,sha256=pp6tUcgUbo8iq9OPtFKQrTbLuI3uY7NHptwWSTpasOU,33
|
|
23
|
-
dapla_metadata/datasets/utility/constants.py,sha256=
|
|
24
|
+
dapla_metadata/datasets/utility/constants.py,sha256=Iq8_0GnBwR0Ua1tPstNPJHt5mtiSehDQsW8uLNtBv_4,2489
|
|
24
25
|
dapla_metadata/datasets/utility/enums.py,sha256=SpV4xlmP1YMaJPbmX03hqRLHUOhXIk5gquTeJ8G_5OE,432
|
|
25
|
-
dapla_metadata/datasets/utility/utils.py,sha256=
|
|
26
|
+
dapla_metadata/datasets/utility/utils.py,sha256=f1vRy9TUbp4mtvJtXnk0Z7YTL8LZv9arcReXEm46gCg,14735
|
|
26
27
|
dapla_metadata/standards/__init__.py,sha256=n8jnMrudLuScSdfQ4UMJorc-Ptg3Y1-ilT8zAaQnM70,179
|
|
27
28
|
dapla_metadata/standards/name_validator.py,sha256=6-DQE_EKVd6UjL--EXpFcZDQtusVbSFaWaUY-CfOV2c,9184
|
|
28
29
|
dapla_metadata/standards/standard_validators.py,sha256=tcCiCI76wUVtMzXA2oCgdauZc0uGgUi11FKu-t7KGwQ,3767
|
|
29
30
|
dapla_metadata/standards/utils/__init__.py,sha256=AiM7JcpFsAgyuCyLDYZo9kI94wvIImMDGoV2lKhS4pE,42
|
|
30
31
|
dapla_metadata/standards/utils/constants.py,sha256=mhWNFnS6NMsRl0c_deIdzY7_bD_wKn_oej6rzDjgwq4,2578
|
|
31
|
-
dapla_metadata/variable_definitions/__init__.py,sha256=
|
|
32
|
+
dapla_metadata/variable_definitions/__init__.py,sha256=z48vevGb8UuQ8mwkqCtBGoyM-Ts53vUcKo7Ag5rE_Wc,482
|
|
32
33
|
dapla_metadata/variable_definitions/exceptions.py,sha256=ImB81bne-h45kX9lE5hIh80QAWkOPS52uzcOftuoouM,10118
|
|
33
34
|
dapla_metadata/variable_definitions/vardef.py,sha256=WUpiKfvgFGPhMdjYSFSmdlXQKAolmRgW4-t-EocddQs,13934
|
|
34
35
|
dapla_metadata/variable_definitions/vardok_id.py,sha256=8T23BUHyVQr5hovTVc2E4HVY7f7e_jdi3YL1qzMQgFw,1268
|
|
@@ -85,11 +86,11 @@ dapla_metadata/variable_definitions/_generated/vardef_client/models/variable_sta
|
|
|
85
86
|
dapla_metadata/variable_definitions/_utils/__init__.py,sha256=qAhRLJoTBqtR3f9xRXTRhD7-5Xg0Opk1Ks5F4AUYnpA,45
|
|
86
87
|
dapla_metadata/variable_definitions/_utils/_client.py,sha256=v1-9VjrdPI6-sroam5vXMPEV1dQMPsYk7KyGd48HjYw,971
|
|
87
88
|
dapla_metadata/variable_definitions/_utils/config.py,sha256=cc03xlcy_pnn06_6px0taO8LopmVJ9ud75oayGge8Vk,1814
|
|
88
|
-
dapla_metadata/variable_definitions/_utils/constants.py,sha256=
|
|
89
|
+
dapla_metadata/variable_definitions/_utils/constants.py,sha256=zr5FNVCEz6TM9PVErQ672LNm-8CATrXyY94BGhV_ZJQ,2431
|
|
89
90
|
dapla_metadata/variable_definitions/_utils/files.py,sha256=JbPgPNQ7iA38juMqGEdcg5OjZZUwCb6NQtPL0AEspD0,10933
|
|
90
|
-
dapla_metadata/variable_definitions/_utils/template_files.py,sha256
|
|
91
|
+
dapla_metadata/variable_definitions/_utils/template_files.py,sha256=7fcc7yEHOl5JUZ698kqj4IiikXPHBi3SrAVOk4wqQtw,3308
|
|
91
92
|
dapla_metadata/variable_definitions/_utils/variable_definition_files.py,sha256=sGhcSpckR9NtYGNh2oVkiCd5SI3bbJEBhc1PA2uShs0,4701
|
|
92
|
-
dapla_toolbelt_metadata-0.9.
|
|
93
|
-
dapla_toolbelt_metadata-0.9.
|
|
94
|
-
dapla_toolbelt_metadata-0.9.
|
|
95
|
-
dapla_toolbelt_metadata-0.9.
|
|
93
|
+
dapla_toolbelt_metadata-0.9.2.dist-info/METADATA,sha256=Rf7QxJjGzKIGtHWu9PDYwv3bKzC8YRFuEMuF-EPm0lk,4723
|
|
94
|
+
dapla_toolbelt_metadata-0.9.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
95
|
+
dapla_toolbelt_metadata-0.9.2.dist-info/licenses/LICENSE,sha256=np3IfD5m0ZUofn_kVzDZqliozuiO6wrktw3LRPjyEiI,1073
|
|
96
|
+
dapla_toolbelt_metadata-0.9.2.dist-info/RECORD,,
|
|
File without changes
|
{dapla_toolbelt_metadata-0.9.0.dist-info → dapla_toolbelt_metadata-0.9.2.dist-info}/licenses/LICENSE
RENAMED
|
File without changes
|