dapla-toolbelt-metadata 0.6.2__py3-none-any.whl → 0.6.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dapla-toolbelt-metadata might be problematic. Click here for more details.
- dapla_metadata/datasets/core.py +110 -10
- dapla_metadata/standards/name_validator.py +2 -2
- dapla_metadata/standards/utils/constants.py +2 -2
- dapla_metadata/variable_definitions/_utils/config.py +1 -2
- dapla_metadata/variable_definitions/_utils/constants.py +23 -0
- dapla_metadata/variable_definitions/_utils/files.py +107 -11
- dapla_metadata/variable_definitions/_utils/template_files.py +8 -15
- dapla_metadata/variable_definitions/_utils/variable_definition_files.py +32 -5
- dapla_metadata/variable_definitions/resources/vardef_model_descriptions_nb.yaml +53 -17
- dapla_metadata/variable_definitions/vardef.py +3 -3
- dapla_metadata/variable_definitions/variable_definition.py +19 -21
- {dapla_toolbelt_metadata-0.6.2.dist-info → dapla_toolbelt_metadata-0.6.4.dist-info}/METADATA +3 -3
- {dapla_toolbelt_metadata-0.6.2.dist-info → dapla_toolbelt_metadata-0.6.4.dist-info}/RECORD +15 -15
- {dapla_toolbelt_metadata-0.6.2.dist-info → dapla_toolbelt_metadata-0.6.4.dist-info}/LICENSE +0 -0
- {dapla_toolbelt_metadata-0.6.2.dist-info → dapla_toolbelt_metadata-0.6.4.dist-info}/WHEEL +0 -0
dapla_metadata/datasets/core.py
CHANGED
|
@@ -109,8 +109,11 @@ class Datadoc:
|
|
|
109
109
|
self.dataset_path: pathlib.Path | CloudPath | None = None
|
|
110
110
|
self.dataset = model.Dataset()
|
|
111
111
|
self.variables: list = []
|
|
112
|
+
self.pseudo_variables: list[model.PseudoVariable] = []
|
|
112
113
|
self.variables_lookup: dict[str, model.Variable] = {}
|
|
114
|
+
self.pseudo_variables_lookup: dict[str, model.PseudoVariable] = {}
|
|
113
115
|
self.explicitly_defined_metadata_document = False
|
|
116
|
+
self.dataset_consistency_status: list = []
|
|
114
117
|
if metadata_document_path:
|
|
115
118
|
self.metadata_document = normalize_path(metadata_document_path)
|
|
116
119
|
self.explicitly_defined_metadata_document = True
|
|
@@ -148,11 +151,19 @@ class Datadoc:
|
|
|
148
151
|
"""
|
|
149
152
|
extracted_metadata: model.DatadocMetadata | None = None
|
|
150
153
|
existing_metadata: model.DatadocMetadata | None = None
|
|
154
|
+
existing_pseudonymization: model.PseudonymizationMetadata | None = None
|
|
155
|
+
|
|
151
156
|
if self.metadata_document and self.metadata_document.exists():
|
|
152
157
|
existing_metadata = self._extract_metadata_from_existing_document(
|
|
153
158
|
self.metadata_document,
|
|
154
159
|
)
|
|
155
160
|
|
|
161
|
+
existing_pseudonymization = (
|
|
162
|
+
self._extract_pseudonymization_from_existing_document(
|
|
163
|
+
self.metadata_document,
|
|
164
|
+
)
|
|
165
|
+
)
|
|
166
|
+
|
|
156
167
|
if (
|
|
157
168
|
self.dataset_path is not None
|
|
158
169
|
and self.dataset == model.Dataset()
|
|
@@ -169,11 +180,14 @@ class Datadoc:
|
|
|
169
180
|
and existing_metadata is not None
|
|
170
181
|
):
|
|
171
182
|
existing_file_path = self._get_existing_file_path(extracted_metadata)
|
|
172
|
-
self.
|
|
183
|
+
self.dataset_consistency_status = self._check_dataset_consistency(
|
|
173
184
|
self.dataset_path,
|
|
174
185
|
Path(existing_file_path),
|
|
175
186
|
extracted_metadata,
|
|
176
187
|
existing_metadata,
|
|
188
|
+
)
|
|
189
|
+
self._check_ready_to_merge(
|
|
190
|
+
self.dataset_consistency_status,
|
|
177
191
|
errors_as_warnings=self.errors_as_warnings,
|
|
178
192
|
)
|
|
179
193
|
merged_metadata = self._merge_metadata(
|
|
@@ -188,10 +202,15 @@ class Datadoc:
|
|
|
188
202
|
self._set_metadata(merged_metadata)
|
|
189
203
|
else:
|
|
190
204
|
self._set_metadata(existing_metadata or extracted_metadata)
|
|
205
|
+
|
|
206
|
+
if existing_pseudonymization:
|
|
207
|
+
self._set_pseudonymization_metadata(existing_pseudonymization)
|
|
208
|
+
|
|
191
209
|
set_default_values_variables(self.variables)
|
|
192
210
|
set_default_values_dataset(self.dataset)
|
|
193
211
|
set_dataset_owner(self.dataset)
|
|
194
212
|
self._create_variables_lookup()
|
|
213
|
+
self._create_pseudo_variables_lookup()
|
|
195
214
|
|
|
196
215
|
def _get_existing_file_path(
|
|
197
216
|
self,
|
|
@@ -218,35 +237,48 @@ class Datadoc:
|
|
|
218
237
|
self.dataset = merged_metadata.dataset
|
|
219
238
|
self.variables = merged_metadata.variables
|
|
220
239
|
|
|
240
|
+
def _set_pseudonymization_metadata(
|
|
241
|
+
self,
|
|
242
|
+
existing_pseudonymization: model.PseudonymizationMetadata | None,
|
|
243
|
+
) -> None:
|
|
244
|
+
if not existing_pseudonymization or not (
|
|
245
|
+
existing_pseudonymization.pseudo_variables
|
|
246
|
+
):
|
|
247
|
+
msg = "Could not read pseudonymization metadata"
|
|
248
|
+
raise ValueError(msg)
|
|
249
|
+
self.pseudo_variables = existing_pseudonymization.pseudo_variables
|
|
250
|
+
|
|
221
251
|
def _create_variables_lookup(self) -> None:
|
|
222
252
|
self.variables_lookup = {
|
|
223
253
|
v.short_name: v for v in self.variables if v.short_name
|
|
224
254
|
}
|
|
225
255
|
|
|
256
|
+
def _create_pseudo_variables_lookup(self) -> None:
|
|
257
|
+
self.pseudo_variables_lookup = {
|
|
258
|
+
v.short_name: v for v in self.pseudo_variables if v.short_name
|
|
259
|
+
}
|
|
260
|
+
|
|
226
261
|
@staticmethod
|
|
227
|
-
def
|
|
262
|
+
def _check_dataset_consistency(
|
|
228
263
|
new_dataset_path: Path | CloudPath,
|
|
229
264
|
existing_dataset_path: Path,
|
|
230
265
|
extracted_metadata: model.DatadocMetadata,
|
|
231
266
|
existing_metadata: model.DatadocMetadata,
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
) -> None:
|
|
235
|
-
"""Check if the datasets are consistent enough to make a successful merge of metadata.
|
|
267
|
+
) -> list[dict[str, object]]:
|
|
268
|
+
"""Run consistency tests.
|
|
236
269
|
|
|
237
270
|
Args:
|
|
238
271
|
new_dataset_path: Path to the dataset to be documented.
|
|
239
272
|
existing_dataset_path: Path stored in the existing metadata.
|
|
240
273
|
extracted_metadata: Metadata extracted from a physical dataset.
|
|
241
274
|
existing_metadata: Metadata from a previously created metadata document.
|
|
242
|
-
errors_as_warnings: True if failing checks should be raised as warnings, not errors.
|
|
243
275
|
|
|
244
|
-
|
|
245
|
-
|
|
276
|
+
Returns:
|
|
277
|
+
List if dict with property name and boolean success flag
|
|
246
278
|
"""
|
|
247
279
|
new_dataset_path_info = DaplaDatasetPathInfo(new_dataset_path)
|
|
248
280
|
existing_dataset_path_info = DaplaDatasetPathInfo(existing_dataset_path)
|
|
249
|
-
|
|
281
|
+
return [
|
|
250
282
|
{
|
|
251
283
|
"name": "Bucket name",
|
|
252
284
|
"success": (
|
|
@@ -290,6 +322,20 @@ class Datadoc:
|
|
|
290
322
|
),
|
|
291
323
|
},
|
|
292
324
|
]
|
|
325
|
+
|
|
326
|
+
@staticmethod
|
|
327
|
+
def _check_ready_to_merge(
|
|
328
|
+
results: list[dict[str, object]], *, errors_as_warnings: bool
|
|
329
|
+
) -> None:
|
|
330
|
+
"""Check if the datasets are consistent enough to make a successful merge of metadata.
|
|
331
|
+
|
|
332
|
+
Args:
|
|
333
|
+
results: List if dict with property name and boolean success flag
|
|
334
|
+
errors_as_warnings: True if failing checks should be raised as warnings, not errors.
|
|
335
|
+
|
|
336
|
+
Raises:
|
|
337
|
+
InconsistentDatasetsError: If inconsistencies are found and `errors_as_warnings == False`
|
|
338
|
+
"""
|
|
293
339
|
if failures := [result for result in results if not result["success"]]:
|
|
294
340
|
msg = f"{INCONSISTENCIES_MESSAGE} {', '.join(str(f['name']) for f in failures)}"
|
|
295
341
|
if errors_as_warnings:
|
|
@@ -384,6 +430,42 @@ class Datadoc:
|
|
|
384
430
|
)
|
|
385
431
|
return None
|
|
386
432
|
|
|
433
|
+
def _extract_pseudonymization_from_existing_document(
|
|
434
|
+
self,
|
|
435
|
+
document: pathlib.Path | CloudPath,
|
|
436
|
+
) -> model.PseudonymizationMetadata | None:
|
|
437
|
+
"""Read pseudo metadata from an existing metadata document.
|
|
438
|
+
|
|
439
|
+
If there is pseudo metadata in the document supplied, the method validates and returns the pseudonymization structure.
|
|
440
|
+
|
|
441
|
+
Args:
|
|
442
|
+
document: A path to the existing metadata document.
|
|
443
|
+
|
|
444
|
+
Raises:
|
|
445
|
+
json.JSONDecodeError: If the metadata document cannot be parsed.
|
|
446
|
+
"""
|
|
447
|
+
try:
|
|
448
|
+
with document.open(mode="r", encoding="utf-8") as file:
|
|
449
|
+
fresh_metadata = json.load(file)
|
|
450
|
+
except json.JSONDecodeError:
|
|
451
|
+
logger.warning(
|
|
452
|
+
"Could not open existing metadata file %s.",
|
|
453
|
+
document,
|
|
454
|
+
exc_info=True,
|
|
455
|
+
)
|
|
456
|
+
return None
|
|
457
|
+
|
|
458
|
+
if not is_metadata_in_container_structure(fresh_metadata):
|
|
459
|
+
return None
|
|
460
|
+
|
|
461
|
+
pseudonymization_metadata = fresh_metadata.get("pseudonymization")
|
|
462
|
+
if pseudonymization_metadata is None:
|
|
463
|
+
return None
|
|
464
|
+
|
|
465
|
+
return model.PseudonymizationMetadata.model_validate_json(
|
|
466
|
+
json.dumps(pseudonymization_metadata),
|
|
467
|
+
)
|
|
468
|
+
|
|
387
469
|
def _extract_subject_field_from_path(
|
|
388
470
|
self,
|
|
389
471
|
dapla_dataset_path_info: DaplaDatasetPathInfo,
|
|
@@ -501,6 +583,11 @@ class Datadoc:
|
|
|
501
583
|
)
|
|
502
584
|
if self.container:
|
|
503
585
|
self.container.datadoc = datadoc
|
|
586
|
+
if not self.container.pseudonymization:
|
|
587
|
+
self.container.pseudonymization = model.PseudonymizationMetadata(
|
|
588
|
+
pseudo_dataset=model.PseudoDataset()
|
|
589
|
+
)
|
|
590
|
+
self.container.pseudonymization.pseudo_variables = self.pseudo_variables
|
|
504
591
|
else:
|
|
505
592
|
self.container = model.MetadataContainer(datadoc=datadoc)
|
|
506
593
|
if self.metadata_document:
|
|
@@ -530,3 +617,16 @@ class Datadoc:
|
|
|
530
617
|
self.dataset,
|
|
531
618
|
) + num_obligatory_variables_fields_completed(self.variables)
|
|
532
619
|
return calculate_percentage(num_set_fields, num_all_fields)
|
|
620
|
+
|
|
621
|
+
def add_pseudo_variable(self, variable_short_name: str) -> None:
|
|
622
|
+
"""Adds a new pseudo variable to the list of pseudonymized variables."""
|
|
623
|
+
if self.variables_lookup[variable_short_name] is not None:
|
|
624
|
+
pseudo_variable = model.PseudoVariable(short_name=variable_short_name)
|
|
625
|
+
self.pseudo_variables.append(pseudo_variable)
|
|
626
|
+
self.pseudo_variables_lookup[variable_short_name] = pseudo_variable
|
|
627
|
+
|
|
628
|
+
def get_pseudo_variable(
|
|
629
|
+
self, variable_short_name: str
|
|
630
|
+
) -> model.PseudoVariable | None:
|
|
631
|
+
"""Finds a pseudo variable by shortname."""
|
|
632
|
+
return self.pseudo_variables_lookup.get(variable_short_name)
|
|
@@ -128,9 +128,9 @@ class NamingStandardReport:
|
|
|
128
128
|
"""Returns an appropriate message based on the success rate."""
|
|
129
129
|
rate = self.success_rate()
|
|
130
130
|
if rate is not None:
|
|
131
|
-
if rate
|
|
131
|
+
if 95 <= rate <= 100:
|
|
132
132
|
return SSB_NAMING_STANDARD_REPORT_RESULT_BEST
|
|
133
|
-
if 70 < rate <
|
|
133
|
+
if 70 < rate < 95:
|
|
134
134
|
return SSB_NAMING_STANDARD_REPORT_RESULT_GOOD
|
|
135
135
|
if 40 <= rate <= 70:
|
|
136
136
|
return SSB_NAMING_STANDARD_REPORT_RESULT_AVERAGE
|
|
@@ -9,7 +9,7 @@ NAME_STANDARD_SUCCESS = "Filene dine er i samsvar med SSB-navnestandarden"
|
|
|
9
9
|
NAME_STANDARD_VIOLATION = "Det er oppdaget brudd på SSB-navnestandard:"
|
|
10
10
|
|
|
11
11
|
MISSING_BUCKET_NAME = "Filnavn mangler bøttenavn ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#obligatoriske-mapper"
|
|
12
|
-
MISSING_VERSION = "Filnavn mangler versjon ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#
|
|
12
|
+
MISSING_VERSION = "Filnavn mangler versjon, hvis ikke filen er nyeste versjon kan dette være brudd på navnestandarden ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#versjonering-av-datasett"
|
|
13
13
|
MISSING_PERIOD = "Filnavn mangler gyldighetsperiode ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#filnavn"
|
|
14
14
|
MISSING_SHORT_NAME = "Kortnavn for statistikk mangler ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#obligatoriske-mapper"
|
|
15
15
|
MISSING_DATA_STATE = "Mappe for datatilstand mangler ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#obligatoriske-mapper"
|
|
@@ -26,7 +26,7 @@ BUCKET_NAME_UNKNOWN = "Kan ikke validere bøttenavn"
|
|
|
26
26
|
|
|
27
27
|
SSB_NAMING_STANDARD_REPORT = "SSB navnestandard rapport"
|
|
28
28
|
SSB_NAMING_STANDARD_REPORT_SUCCESS_RATE = "Suksess rate"
|
|
29
|
-
SSB_NAMING_STANDARD_REPORT_RESULT_BEST = "🚀 Fantastisk!
|
|
29
|
+
SSB_NAMING_STANDARD_REPORT_RESULT_BEST = "🚀 Fantastisk! 🎉\n"
|
|
30
30
|
SSB_NAMING_STANDARD_REPORT_RESULT_GOOD = (
|
|
31
31
|
"✅ Bra jobba! Fortsatt litt rom for forbedring. 😊\n"
|
|
32
32
|
)
|
|
@@ -59,8 +59,7 @@ def get_vardef_host() -> str:
|
|
|
59
59
|
case DaplaEnvironment.TEST:
|
|
60
60
|
return VARDEF_HOST_TEST
|
|
61
61
|
case DaplaEnvironment.DEV:
|
|
62
|
-
|
|
63
|
-
raise NotImplementedError(msg)
|
|
62
|
+
return VARDEF_HOST_TEST
|
|
64
63
|
case _:
|
|
65
64
|
return get_config_item("VARDEF_HOST") or "http://localhost:8080"
|
|
66
65
|
|
|
@@ -39,3 +39,26 @@ MACHINE_GENERATED_FIELDS = [
|
|
|
39
39
|
|
|
40
40
|
OPTIONAL_FIELD = "~ Valgfritt felt ~"
|
|
41
41
|
REQUIRED_FIELD = "! Obligatorisk felt !"
|
|
42
|
+
|
|
43
|
+
YAML_STR_TAG = "tag:yaml.org,2002:str"
|
|
44
|
+
|
|
45
|
+
BLOCK_FIELDS = [
|
|
46
|
+
"definition",
|
|
47
|
+
"name",
|
|
48
|
+
"contact.title",
|
|
49
|
+
"comment",
|
|
50
|
+
]
|
|
51
|
+
|
|
52
|
+
DOUBLE_QUOTE_FIELDS = [
|
|
53
|
+
"unit_types",
|
|
54
|
+
"subject_fields",
|
|
55
|
+
"related_variable_definition_uris",
|
|
56
|
+
"owner",
|
|
57
|
+
"short_name",
|
|
58
|
+
"classification_reference",
|
|
59
|
+
"measurement_type",
|
|
60
|
+
"external_reference_uri",
|
|
61
|
+
"created_by",
|
|
62
|
+
"id",
|
|
63
|
+
"last_updated_by",
|
|
64
|
+
]
|
|
@@ -4,12 +4,16 @@ import logging
|
|
|
4
4
|
from datetime import datetime
|
|
5
5
|
from pathlib import Path
|
|
6
6
|
from typing import TYPE_CHECKING
|
|
7
|
+
from typing import Any
|
|
7
8
|
from typing import cast
|
|
8
9
|
|
|
9
10
|
import pytz
|
|
10
11
|
from pydantic.config import JsonDict
|
|
11
12
|
from ruamel.yaml import YAML
|
|
12
13
|
from ruamel.yaml import CommentedMap
|
|
14
|
+
from ruamel.yaml import RoundTripRepresenter
|
|
15
|
+
from ruamel.yaml.scalarstring import DoubleQuotedScalarString
|
|
16
|
+
from ruamel.yaml.scalarstring import LiteralScalarString
|
|
13
17
|
|
|
14
18
|
from dapla_metadata.variable_definitions._generated.vardef_client.models.complete_response import (
|
|
15
19
|
CompleteResponse,
|
|
@@ -18,6 +22,8 @@ from dapla_metadata.variable_definitions._generated.vardef_client.models.variabl
|
|
|
18
22
|
VariableStatus,
|
|
19
23
|
)
|
|
20
24
|
from dapla_metadata.variable_definitions._utils import config
|
|
25
|
+
from dapla_metadata.variable_definitions._utils.constants import BLOCK_FIELDS
|
|
26
|
+
from dapla_metadata.variable_definitions._utils.constants import DOUBLE_QUOTE_FIELDS
|
|
21
27
|
from dapla_metadata.variable_definitions._utils.constants import (
|
|
22
28
|
MACHINE_GENERATED_FIELDS,
|
|
23
29
|
)
|
|
@@ -40,6 +46,7 @@ from dapla_metadata.variable_definitions._utils.constants import (
|
|
|
40
46
|
from dapla_metadata.variable_definitions._utils.constants import (
|
|
41
47
|
VARIABLE_STATUS_FIELD_NAME,
|
|
42
48
|
)
|
|
49
|
+
from dapla_metadata.variable_definitions._utils.constants import YAML_STR_TAG
|
|
43
50
|
from dapla_metadata.variable_definitions._utils.descriptions import (
|
|
44
51
|
apply_norwegian_descriptions_to_model,
|
|
45
52
|
)
|
|
@@ -112,6 +119,15 @@ def _get_variable_definitions_dir():
|
|
|
112
119
|
return folder_path
|
|
113
120
|
|
|
114
121
|
|
|
122
|
+
def _set_field_requirement(field_name: str, field: Any) -> str | None:
|
|
123
|
+
"""Determine the field requirement status."""
|
|
124
|
+
if field_name not in MACHINE_GENERATED_FIELDS:
|
|
125
|
+
if field.is_required() or field_name == VARIABLE_STATUS_FIELD_NAME:
|
|
126
|
+
return REQUIRED_FIELD
|
|
127
|
+
return OPTIONAL_FIELD
|
|
128
|
+
return None
|
|
129
|
+
|
|
130
|
+
|
|
115
131
|
def _populate_commented_map(
|
|
116
132
|
field_name: str,
|
|
117
133
|
value: str,
|
|
@@ -120,16 +136,17 @@ def _populate_commented_map(
|
|
|
120
136
|
) -> None:
|
|
121
137
|
"""Add data to a CommentedMap."""
|
|
122
138
|
commented_map[field_name] = value
|
|
123
|
-
field = model_instance.model_fields[field_name]
|
|
139
|
+
field = type(model_instance).model_fields[field_name]
|
|
124
140
|
description: JsonValue = cast(
|
|
125
141
|
JsonDict,
|
|
126
142
|
field.json_schema_extra,
|
|
127
143
|
)[NORWEGIAN_DESCRIPTIONS]
|
|
144
|
+
field_requirement: str | None = _set_field_requirement(field_name, field)
|
|
128
145
|
if description is not None:
|
|
129
146
|
new_description = (
|
|
130
|
-
(
|
|
131
|
-
|
|
132
|
-
+ str(description)
|
|
147
|
+
("\n" + field_requirement + "\n" + str(description))
|
|
148
|
+
if field_requirement
|
|
149
|
+
else ("\n" + str(description))
|
|
133
150
|
)
|
|
134
151
|
commented_map.yaml_set_comment_before_after_key(
|
|
135
152
|
field_name,
|
|
@@ -174,14 +191,22 @@ def _validate_and_create_directory(custom_directory: Path) -> Path:
|
|
|
174
191
|
return custom_directory
|
|
175
192
|
|
|
176
193
|
|
|
177
|
-
def
|
|
178
|
-
|
|
179
|
-
yaml.
|
|
180
|
-
|
|
194
|
+
def configure_yaml(yaml: YAML) -> YAML:
|
|
195
|
+
"""Common Yaml config for variable definitions."""
|
|
196
|
+
yaml.Representer = RoundTripRepresenter # Preserve the order of keys etc.
|
|
197
|
+
yaml.default_flow_style = False # Ensures pretty YAML formatting block style
|
|
198
|
+
yaml.allow_unicode = True # Support special characters
|
|
199
|
+
yaml.preserve_quotes = True
|
|
200
|
+
yaml.width = 4096 # prevent wrapping lines
|
|
201
|
+
yaml.indent(
|
|
202
|
+
mapping=4,
|
|
203
|
+
sequence=6,
|
|
204
|
+
offset=4,
|
|
205
|
+
) # Ensure indentation for nested keys and lists
|
|
181
206
|
yaml.representer.add_representer(
|
|
182
207
|
VariableStatus,
|
|
183
208
|
lambda dumper, data: dumper.represent_scalar(
|
|
184
|
-
|
|
209
|
+
YAML_STR_TAG,
|
|
185
210
|
data.value,
|
|
186
211
|
),
|
|
187
212
|
)
|
|
@@ -189,6 +214,76 @@ def _configure_yaml() -> YAML:
|
|
|
189
214
|
return yaml
|
|
190
215
|
|
|
191
216
|
|
|
217
|
+
def _safe_get(data: dict, keys: list):
|
|
218
|
+
"""Safely navigate nested dictionaries."""
|
|
219
|
+
for key in keys:
|
|
220
|
+
if not isinstance(data, dict) or key not in data or data[key] is None:
|
|
221
|
+
return None
|
|
222
|
+
data = data[key]
|
|
223
|
+
return data
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def _apply_literal_scalars(field: dict):
|
|
227
|
+
"""Helper function to wrap `LanguageStringType` values in `LiteralScalarString`.
|
|
228
|
+
|
|
229
|
+
This function wraps each non-`None` language value in a `LanguageStringType` field
|
|
230
|
+
in the `LiteralScalarString` YAML type, ensuring proper YAML formatting with block style.
|
|
231
|
+
"""
|
|
232
|
+
for lang, value in field.items():
|
|
233
|
+
if value is not None:
|
|
234
|
+
field[lang] = LiteralScalarString(value)
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def _apply_double_quotes_to_dict_values(field: dict):
|
|
238
|
+
"""Helper function to wrap dictionary values in `DoubleQuotedScalarString`.
|
|
239
|
+
|
|
240
|
+
This function wraps each non-`None` value in a dictionary, including values inside lists,
|
|
241
|
+
in the `DoubleQuotedScalarString` YAML type, ensuring proper YAML formatting with double quotes.
|
|
242
|
+
"""
|
|
243
|
+
for sub_key, sub_value in field.items():
|
|
244
|
+
if isinstance(sub_value, list):
|
|
245
|
+
field[sub_key] = [
|
|
246
|
+
DoubleQuotedScalarString(item) for item in sub_value if item is not None
|
|
247
|
+
]
|
|
248
|
+
elif sub_value is not None:
|
|
249
|
+
field[sub_key] = DoubleQuotedScalarString(sub_value)
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def pre_process_data(data: dict) -> dict:
|
|
253
|
+
"""Format variable definition model fields with ruamel YAML scalar string types.
|
|
254
|
+
|
|
255
|
+
This method sets the appropriate scalar string type (either `LiteralScalarString` or `DoubleQuotedScalarString`)
|
|
256
|
+
for fields of the variable definition model, based on predefined lists of fields.
|
|
257
|
+
|
|
258
|
+
It processes both nested dictionaries and lists, ensuring each element is formatted with the correct YAML string type.
|
|
259
|
+
|
|
260
|
+
Args:
|
|
261
|
+
data (dict): A dictionary containing the variable definition data.
|
|
262
|
+
|
|
263
|
+
Returns:
|
|
264
|
+
dict: The updated dictionary with model fields formatted as ruamel.yaml scalar string types.
|
|
265
|
+
"""
|
|
266
|
+
for key in BLOCK_FIELDS:
|
|
267
|
+
keys = key.split(".")
|
|
268
|
+
field = _safe_get(data, keys)
|
|
269
|
+
if isinstance(field, dict):
|
|
270
|
+
_apply_literal_scalars(field)
|
|
271
|
+
|
|
272
|
+
for key in DOUBLE_QUOTE_FIELDS:
|
|
273
|
+
keys = key.split(".")
|
|
274
|
+
field = _safe_get(data, keys)
|
|
275
|
+
if isinstance(field, list):
|
|
276
|
+
data[key] = [
|
|
277
|
+
DoubleQuotedScalarString(item) for item in field if item is not None
|
|
278
|
+
]
|
|
279
|
+
elif isinstance(field, str):
|
|
280
|
+
data[key] = DoubleQuotedScalarString(data[key])
|
|
281
|
+
elif isinstance(field, dict):
|
|
282
|
+
_apply_double_quotes_to_dict_values(field)
|
|
283
|
+
|
|
284
|
+
return data
|
|
285
|
+
|
|
286
|
+
|
|
192
287
|
def _model_to_yaml_with_comments(
|
|
193
288
|
model_instance: CompleteResponse,
|
|
194
289
|
file_name: str,
|
|
@@ -209,7 +304,8 @@ def _model_to_yaml_with_comments(
|
|
|
209
304
|
Returns:
|
|
210
305
|
Path: The file path of the generated YAML file.
|
|
211
306
|
"""
|
|
212
|
-
yaml =
|
|
307
|
+
yaml = YAML()
|
|
308
|
+
configure_yaml(yaml)
|
|
213
309
|
|
|
214
310
|
from dapla_metadata.variable_definitions.variable_definition import (
|
|
215
311
|
VariableDefinition,
|
|
@@ -223,7 +319,7 @@ def _model_to_yaml_with_comments(
|
|
|
223
319
|
serialize_as_any=True,
|
|
224
320
|
warnings="error",
|
|
225
321
|
)
|
|
226
|
-
|
|
322
|
+
data = pre_process_data(data)
|
|
227
323
|
# One CommentMap for each section in the yaml file
|
|
228
324
|
machine_generated_map = CommentedMap()
|
|
229
325
|
commented_map = CommentedMap()
|
|
@@ -41,31 +41,24 @@ def _get_default_template() -> "VariableDefinition":
|
|
|
41
41
|
|
|
42
42
|
return VariableDefinition(
|
|
43
43
|
name=LanguageStringType(
|
|
44
|
-
nb="
|
|
45
|
-
nn="default namn",
|
|
46
|
-
en="default name",
|
|
44
|
+
nb="Navn",
|
|
47
45
|
),
|
|
48
|
-
short_name="
|
|
46
|
+
short_name="generert_kortnavn",
|
|
49
47
|
definition=LanguageStringType(
|
|
50
|
-
nb="
|
|
51
|
-
nn="default definisjon",
|
|
52
|
-
en="default definition",
|
|
48
|
+
nb="Definisjonstekst",
|
|
53
49
|
),
|
|
54
|
-
classification_reference="class_id",
|
|
55
50
|
valid_from=DEFAULT_DATE,
|
|
56
|
-
unit_types=["
|
|
57
|
-
subject_fields=["
|
|
51
|
+
unit_types=[""],
|
|
52
|
+
subject_fields=[""],
|
|
58
53
|
contains_special_categories_of_personal_data=False,
|
|
59
|
-
variable_status=VariableStatus.DRAFT.value,
|
|
60
54
|
owner=Owner(team="default team", groups=["default group"]),
|
|
61
55
|
contact=Contact(
|
|
62
56
|
title=LanguageStringType(
|
|
63
|
-
nb="
|
|
64
|
-
nn="default tittel",
|
|
65
|
-
en="default title",
|
|
57
|
+
nb="generert tittel",
|
|
66
58
|
),
|
|
67
|
-
email="
|
|
59
|
+
email="generert@ssb.no",
|
|
68
60
|
),
|
|
61
|
+
variable_status=VariableStatus.DRAFT.value,
|
|
69
62
|
id="",
|
|
70
63
|
patch_id=0,
|
|
71
64
|
created_at=DEFAULT_DATE,
|
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
import logging
|
|
4
4
|
from os import PathLike
|
|
5
5
|
from pathlib import Path
|
|
6
|
+
from typing import Any
|
|
6
7
|
from typing import TypeVar
|
|
7
8
|
|
|
8
9
|
from pydantic import BaseModel
|
|
@@ -17,6 +18,7 @@ from dapla_metadata.variable_definitions._utils.files import _get_current_time
|
|
|
17
18
|
from dapla_metadata.variable_definitions._utils.files import (
|
|
18
19
|
_model_to_yaml_with_comments,
|
|
19
20
|
)
|
|
21
|
+
from dapla_metadata.variable_definitions._utils.files import configure_yaml
|
|
20
22
|
|
|
21
23
|
logger = logging.getLogger(__name__)
|
|
22
24
|
|
|
@@ -46,13 +48,38 @@ def create_variable_yaml(
|
|
|
46
48
|
|
|
47
49
|
def _read_variable_definition_file(file_path: Path) -> dict:
|
|
48
50
|
yaml = YAML()
|
|
49
|
-
|
|
51
|
+
configure_yaml(yaml)
|
|
50
52
|
logger.debug("Full path to variable definition file %s", file_path)
|
|
51
53
|
logger.info("Reading from '%s'", file_path.name)
|
|
52
54
|
with file_path.open(encoding="utf-8") as f:
|
|
53
55
|
return yaml.load(f)
|
|
54
56
|
|
|
55
57
|
|
|
58
|
+
def _strip_strings_recursively(data: Any) -> Any:
|
|
59
|
+
"""Recursively strip leading and trailing whitespace from string values in nested dicts/lists.
|
|
60
|
+
|
|
61
|
+
This function traverses the provided data, which may be a dictionary, list, or other types,
|
|
62
|
+
and applies the following logic:
|
|
63
|
+
- If the data is a dictionary, it recursively strips string values in all key-value pairs.
|
|
64
|
+
- If the data is a list, it recursively strips string values in all list elements.
|
|
65
|
+
- If the data is a string, it strips leading and trailing whitespace.
|
|
66
|
+
- Any other data types are returned unchanged.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
data: The input data, which may include nested dictionaries, lists, or other types.
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
Any: The processed data, with strings stripped of whitespace or unchanged if not a string.
|
|
73
|
+
"""
|
|
74
|
+
if isinstance(data, dict):
|
|
75
|
+
return {k: _strip_strings_recursively(v) for k, v in data.items()}
|
|
76
|
+
if isinstance(data, list):
|
|
77
|
+
return [_strip_strings_recursively(item) for item in data]
|
|
78
|
+
if isinstance(data, str):
|
|
79
|
+
return data.strip()
|
|
80
|
+
return data
|
|
81
|
+
|
|
82
|
+
|
|
56
83
|
def _read_file_to_model(
|
|
57
84
|
file_path: PathLike[str] | None,
|
|
58
85
|
model_class: type[T],
|
|
@@ -80,14 +107,14 @@ def _read_file_to_model(
|
|
|
80
107
|
raise FileNotFoundError(
|
|
81
108
|
msg,
|
|
82
109
|
) from e
|
|
110
|
+
raw_data = _read_variable_definition_file(file_path)
|
|
111
|
+
cleaned_data = _strip_strings_recursively(raw_data)
|
|
112
|
+
|
|
83
113
|
model = model_class.from_dict( # type:ignore [attr-defined]
|
|
84
|
-
|
|
85
|
-
file_path,
|
|
86
|
-
),
|
|
114
|
+
cleaned_data
|
|
87
115
|
)
|
|
88
116
|
|
|
89
117
|
if model is None:
|
|
90
118
|
msg = f"Could not read data from {file_path}"
|
|
91
119
|
raise FileNotFoundError(msg)
|
|
92
|
-
|
|
93
120
|
return model
|
|
@@ -1,50 +1,86 @@
|
|
|
1
1
|
# --- Variabel definisjoner ---
|
|
2
2
|
# ref: https://statistics-norway.atlassian.net/wiki/spaces/MPD/pages/3009839199/VarDef+-+Krav+til+dokumentasjon+av+variabler
|
|
3
3
|
name: |
|
|
4
|
-
Variabelens navn. Dette skal ikke være en mer “teknisk” forkortelse, men et navn som er forståelig for mennesker
|
|
4
|
+
Variabelens navn. Dette skal ikke være en mer “teknisk” forkortelse, men et navn som er forståelig for mennesker.
|
|
5
|
+
-------------------------
|
|
6
|
+
>>> EKSEMPEL:
|
|
7
|
+
name:
|
|
8
|
+
nb: |-
|
|
9
|
+
Lønnsinntekter
|
|
5
10
|
short_name: |
|
|
6
11
|
Dette er variabelens kortnavn, som kan være en mer “teknisk” forkortelse, f.eks. wlonn (kortnavnet til Lønnsinntekter). Kortnavnet til en variabel i Vardef skal være unikt.
|
|
7
|
-
Kravet til kortnavnet er at det kan inneholde a-z (kun små bokstaver), 0-9 og _ (understrek). Minimumslengden på kortnavnet er 2 tegn.
|
|
12
|
+
Kravet til kortnavnet er at det kan inneholde a-z (kun små bokstaver), 0-9 og _ (understrek). Minimumslengden på kortnavnet er 2 tegn.
|
|
13
|
+
Bokstavene “æ”, “ø” og “å” kan ikke brukes. Disse anbefales erstattet med hhv. “ae”, “oe” og “aa"
|
|
8
14
|
definition: |
|
|
9
15
|
En definisjon skal beskrive hva variabelen betyr og være så kort og presis som mulig. Mer utfyllende opplysninger kan legges i Merknad-feltet.
|
|
16
|
+
-------------------------
|
|
17
|
+
>>> EKSEMPEL:
|
|
18
|
+
definition:
|
|
19
|
+
nb: |-
|
|
20
|
+
Yrkesinntekter, kapitalinntekter, skattepliktige og skattefrie overføringer, i løpet av kalenderåret.
|
|
10
21
|
classification_reference: |
|
|
11
22
|
ID av en klassifikasjon eller kodeliste fra KLASS som beskriver verdiene variabelen kan anta.
|
|
12
23
|
For eksempel vil variabelen 'Sivilstand' ha klassifikasjon 'Standard for sivilstand' (kan vises på https://www.ssb.no/klass/klassifikasjoner/19 ) som har ID 19.
|
|
24
|
+
-------------------------
|
|
25
|
+
>>> EKSEMPEL: "19"
|
|
13
26
|
unit_types: |
|
|
14
|
-
Enhetstyper - enhetene som beskrives av denne variabelen. Variabelen “sivilstand” vil f.eks. ha enhetstypen person,
|
|
27
|
+
Enhetstyper - enhetene som beskrives av denne variabelen. Variabelen “sivilstand” vil f.eks. ha enhetstypen person,
|
|
28
|
+
mens f.eks. “Produsentpris for tjenester” vil ha både foretak og bedrift som enhetstyper siden variabelen kan beskrive begge.
|
|
15
29
|
Verdier skal være koder fra: https://www.ssb.no/klass/klassifikasjoner/702.
|
|
30
|
+
-------------------------
|
|
31
|
+
>>> EKSEMPEL:
|
|
32
|
+
- "20"
|
|
16
33
|
subject_fields: |
|
|
17
34
|
Statistikkområder som variabelen brukes innenfor. For eksempel tilhører variabelen “Sivilstand” statistikkområdet “Befolkning”.
|
|
18
35
|
Verdier skal være koder fra https://www.ssb.no/klass/klassifikasjoner/618.
|
|
36
|
+
-------------------------
|
|
37
|
+
>>> EKSEMPEL:
|
|
38
|
+
- "bf"
|
|
39
|
+
- "be"
|
|
19
40
|
contains_special_categories_of_personal_data: |
|
|
20
41
|
Viser om variabelen inneholder spesielt sensitive personopplysninger.
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
- opplysninger om politisk oppfatning
|
|
24
|
-
- opplysninger om religion
|
|
25
|
-
- opplysninger om filosofisk overbevisning
|
|
26
|
-
- opplysninger om fagforeningsmedlemskap
|
|
27
|
-
- genetiske opplysninger
|
|
28
|
-
- biometriske opplysninger med det formål å entydig identifisere noen
|
|
29
|
-
- helseopplysninger
|
|
30
|
-
- opplysninger om seksuelle forhold
|
|
31
|
-
- opplysninger om seksuell legning
|
|
32
|
-
ref: https://lovdata.no/dokument/NL/lov/2018-06-15-38/KAPITTEL_gdpr-2#gdpr/a9
|
|
42
|
+
-------------------------
|
|
43
|
+
>>> EKSEMPEL: true
|
|
33
44
|
measurement_type: |
|
|
34
45
|
Måletype som en kvantitativ variabelen tilhører, f.eks. valuta, areal osv.
|
|
35
46
|
Verdien skal være en kode fra: https://www.ssb.no/klass/klassifikasjoner/303
|
|
47
|
+
-------------------------
|
|
48
|
+
>>> EKSEMPEL: "03"
|
|
36
49
|
valid_from: |
|
|
37
50
|
Datoen variabeldefinisjonen er gyldig f.o.m.
|
|
51
|
+
-------------------------
|
|
52
|
+
>>> EKSEMPEL: 1999-01-30
|
|
38
53
|
valid_until: |
|
|
39
54
|
Datoen variabeldefinisjonens var gyldig t.o.m. Settes hvis definisjonen skal erstattet av en ny definisjon (med en ny gyldighetsperiode), eller variabelen ikke lenger skal brukes.
|
|
55
|
+
-------------------------
|
|
56
|
+
>>> EKSEMPEL: 2024-10-23
|
|
40
57
|
external_reference_uri: |
|
|
41
58
|
En peker (URI) til ekstern definisjon/dokumentasjon, f.eks. ei webside som er relevant for variabelen.
|
|
59
|
+
-----------------------------------------------------
|
|
60
|
+
>>> EKSEMPEL: "https://www.landbruksdirektoratet.com"
|
|
42
61
|
comment: |
|
|
43
|
-
Her kan en sette inn eventuelle tilleggsopplysninger som ikke hører hjemme i selve definisjonen.
|
|
62
|
+
Her kan en sette inn eventuelle tilleggsopplysninger som ikke hører hjemme i selve definisjonen.
|
|
63
|
+
Variabelen “Landbakgrunn” har f.eks. merknaden “Fra og med 1.1.2003 ble definisjon endret til også å trekke inn besteforeldrenes fødeland”.
|
|
64
|
+
-----------------------------------------------------------------------------------------------
|
|
65
|
+
>>> EKSEMPEL:
|
|
66
|
+
comment:
|
|
67
|
+
nb: |-
|
|
68
|
+
Fra og med 1.1.2003 ble definisjon endret til også å trekke inn besteforeldrenes fødeland.
|
|
44
69
|
related_variable_definition_uris: |
|
|
45
|
-
Her kan en legge inn URIer til andre variabler som er relevante. Eksempelvis er variabelen “Inntekt etter skatt” en beregnet variabel der “Yrkesinntekter” og “Kapitalinntekter” inngår i beregningen.
|
|
70
|
+
Her kan en legge inn URIer til andre variabler som er relevante. Eksempelvis er variabelen “Inntekt etter skatt” en beregnet variabel der “Yrkesinntekter” og “Kapitalinntekter” inngår i beregningen.
|
|
71
|
+
En kan da legge inn deres URI-er i dette feltet.
|
|
72
|
+
-------------------------
|
|
73
|
+
>>> EKSEMPEL:
|
|
74
|
+
- "https://example.com/"
|
|
46
75
|
contact: |
|
|
47
76
|
Her dokumenterer en navn og epost for person eller gruppe som kan svare på spørsmål.
|
|
77
|
+
-------------------------
|
|
78
|
+
>>> EKSEMPEL:
|
|
79
|
+
contact:
|
|
80
|
+
title:
|
|
81
|
+
nb: |-
|
|
82
|
+
Seksjonsleder
|
|
83
|
+
email: leder@ssb.no
|
|
48
84
|
variable_status: |
|
|
49
85
|
Livssyklus for variabelen.
|
|
50
86
|
id: |
|
|
@@ -108,7 +108,7 @@ class Vardef:
|
|
|
108
108
|
)
|
|
109
109
|
|
|
110
110
|
logger.info(
|
|
111
|
-
"Successfully created variable definition '%s' with ID '%s'",
|
|
111
|
+
"✅ Successfully created variable definition '%s' with ID '%s'",
|
|
112
112
|
new_variable.short_name,
|
|
113
113
|
new_variable.id,
|
|
114
114
|
)
|
|
@@ -165,7 +165,7 @@ class Vardef:
|
|
|
165
165
|
)
|
|
166
166
|
|
|
167
167
|
logger.info(
|
|
168
|
-
"Successfully migrated variable definition '%s' with ID '%s'",
|
|
168
|
+
"✅ Successfully migrated variable definition '%s' with ID '%s'",
|
|
169
169
|
migrated_variable.short_name,
|
|
170
170
|
migrated_variable.id,
|
|
171
171
|
)
|
|
@@ -273,7 +273,7 @@ class Vardef:
|
|
|
273
273
|
custom_directory=Path(custom_file_path) if custom_file_path else None,
|
|
274
274
|
)
|
|
275
275
|
logger.info(
|
|
276
|
-
f"Created editable variable definition template file at {file_path}", # noqa: G004
|
|
276
|
+
f"✅ Created editable variable definition template file at {file_path}", # noqa: G004
|
|
277
277
|
)
|
|
278
278
|
return file_path
|
|
279
279
|
|
|
@@ -34,6 +34,8 @@ from dapla_metadata.variable_definitions._generated.vardef_client.models.variabl
|
|
|
34
34
|
)
|
|
35
35
|
from dapla_metadata.variable_definitions._utils import config
|
|
36
36
|
from dapla_metadata.variable_definitions._utils._client import VardefClient
|
|
37
|
+
from dapla_metadata.variable_definitions._utils.files import configure_yaml
|
|
38
|
+
from dapla_metadata.variable_definitions._utils.files import pre_process_data
|
|
37
39
|
from dapla_metadata.variable_definitions._utils.variable_definition_files import (
|
|
38
40
|
_read_file_to_model,
|
|
39
41
|
)
|
|
@@ -126,7 +128,7 @@ class VariableDefinition(CompleteResponse):
|
|
|
126
128
|
self.__dict__.update(updated)
|
|
127
129
|
|
|
128
130
|
logger.info(
|
|
129
|
-
"Successfully updated variable definition '%s' with ID '%s'",
|
|
131
|
+
"✅ Successfully updated variable definition '%s' with ID '%s'",
|
|
130
132
|
updated.short_name,
|
|
131
133
|
updated.id,
|
|
132
134
|
)
|
|
@@ -176,7 +178,7 @@ class VariableDefinition(CompleteResponse):
|
|
|
176
178
|
variable_definition_id=self.id,
|
|
177
179
|
active_group=config.get_active_group(),
|
|
178
180
|
)
|
|
179
|
-
return f"Variable {self.id} safely deleted"
|
|
181
|
+
return f"✅ Variable {self.id} safely deleted"
|
|
180
182
|
|
|
181
183
|
@vardef_exception_handler
|
|
182
184
|
def get_patch(self, patch_id: int) -> "VariableDefinition":
|
|
@@ -205,9 +207,9 @@ class VariableDefinition(CompleteResponse):
|
|
|
205
207
|
|
|
206
208
|
Patches are to be used for minor changes which don't require a new Validity Period.
|
|
207
209
|
Examples of reasons for creating a new Patch:
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
210
|
+
- Correcting a typo
|
|
211
|
+
- Adding a translation
|
|
212
|
+
- Adding a subject field
|
|
211
213
|
|
|
212
214
|
Supply only the fields to be changed. Other fields will retain their current values.
|
|
213
215
|
|
|
@@ -234,7 +236,7 @@ class VariableDefinition(CompleteResponse):
|
|
|
234
236
|
self.__dict__.update(new_patch)
|
|
235
237
|
|
|
236
238
|
logger.info(
|
|
237
|
-
"Successfully created patch with patch ID '%s' for variable definition '%s' with ID '%s'",
|
|
239
|
+
"✅ Successfully created patch with patch ID '%s' for variable definition '%s' with ID '%s'",
|
|
238
240
|
new_patch.patch_id,
|
|
239
241
|
new_patch.short_name,
|
|
240
242
|
new_patch.id,
|
|
@@ -309,7 +311,7 @@ class VariableDefinition(CompleteResponse):
|
|
|
309
311
|
self.__dict__.update(new_validity_period)
|
|
310
312
|
|
|
311
313
|
logger.info(
|
|
312
|
-
"Successfully created validity period that is valid from '%s' for variable definition '%s' with ID '%s'",
|
|
314
|
+
"✅ Successfully created validity period that is valid from '%s' for variable definition '%s' with ID '%s'",
|
|
313
315
|
new_validity_period.valid_from,
|
|
314
316
|
new_validity_period.short_name,
|
|
315
317
|
new_validity_period.id,
|
|
@@ -350,7 +352,7 @@ class VariableDefinition(CompleteResponse):
|
|
|
350
352
|
UpdateDraft(variable_status=VariableStatus.PUBLISHED_INTERNAL),
|
|
351
353
|
)
|
|
352
354
|
logger.info(
|
|
353
|
-
"Variable definition '%s' with ID '%s' successfully published, new status: %s",
|
|
355
|
+
"✅ Variable definition '%s' with ID '%s' successfully published, new status: %s",
|
|
354
356
|
update.short_name,
|
|
355
357
|
update.id,
|
|
356
358
|
update.variable_status,
|
|
@@ -373,7 +375,7 @@ class VariableDefinition(CompleteResponse):
|
|
|
373
375
|
Patch(variable_status=VariableStatus.PUBLISHED_EXTERNAL),
|
|
374
376
|
)
|
|
375
377
|
logger.info(
|
|
376
|
-
"Variable definition '%s' with ID '%s' successfully published, new status: %s",
|
|
378
|
+
"✅ Variable definition '%s' with ID '%s' successfully published, new status: %s",
|
|
377
379
|
update.short_name,
|
|
378
380
|
update.id,
|
|
379
381
|
update.variable_status,
|
|
@@ -387,7 +389,7 @@ class VariableDefinition(CompleteResponse):
|
|
|
387
389
|
)
|
|
388
390
|
self.set_file_path(file_path)
|
|
389
391
|
logger.info(
|
|
390
|
-
f"Created editable variable definition file at {file_path}", # noqa: G004
|
|
392
|
+
f"✅ Created editable variable definition file at {file_path}", # noqa: G004
|
|
391
393
|
)
|
|
392
394
|
return self
|
|
393
395
|
|
|
@@ -405,16 +407,12 @@ class VariableDefinition(CompleteResponse):
|
|
|
405
407
|
|
|
406
408
|
def _convert_to_yaml_output(self) -> str:
|
|
407
409
|
stream = StringIO()
|
|
408
|
-
with ruamel.yaml.YAML(
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
self.model_dump(
|
|
415
|
-
mode="json",
|
|
416
|
-
serialize_as_any=True,
|
|
417
|
-
warnings="error",
|
|
418
|
-
),
|
|
410
|
+
with ruamel.yaml.YAML(output=stream) as yaml:
|
|
411
|
+
configure_yaml(yaml)
|
|
412
|
+
data = self.model_dump(
|
|
413
|
+
mode="json",
|
|
414
|
+
serialize_as_any=True,
|
|
415
|
+
warnings="error",
|
|
419
416
|
)
|
|
417
|
+
yaml.dump(pre_process_data(data))
|
|
420
418
|
return stream.getvalue()
|
{dapla_toolbelt_metadata-0.6.2.dist-info → dapla_toolbelt_metadata-0.6.4.dist-info}/METADATA
RENAMED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: dapla-toolbelt-metadata
|
|
3
|
-
Version: 0.6.
|
|
3
|
+
Version: 0.6.4
|
|
4
4
|
Summary: Dapla Toolbelt Metadata
|
|
5
5
|
License: MIT
|
|
6
6
|
Author: Team Metadata
|
|
7
7
|
Author-email: metadata@ssb.no
|
|
8
|
-
Requires-Python: >=3.10
|
|
8
|
+
Requires-Python: >=3.10
|
|
9
9
|
Classifier: Development Status :: 4 - Beta
|
|
10
10
|
Classifier: License :: OSI Approved :: MIT License
|
|
11
11
|
Classifier: Programming Language :: Python :: 3
|
|
@@ -24,7 +24,7 @@ Requires-Dist: pyjwt (>=2.8.0)
|
|
|
24
24
|
Requires-Dist: python-dotenv (>=1.0.1)
|
|
25
25
|
Requires-Dist: requests (>=2.31.0)
|
|
26
26
|
Requires-Dist: ruamel-yaml (>=0.18.10)
|
|
27
|
-
Requires-Dist: ssb-datadoc-model (
|
|
27
|
+
Requires-Dist: ssb-datadoc-model (==6.0.0)
|
|
28
28
|
Requires-Dist: ssb-klass-python (>=1.0.1)
|
|
29
29
|
Requires-Dist: typing-extensions (>=4.12.2)
|
|
30
30
|
Project-URL: Changelog, https://github.com/statisticsnorway/dapla-toolbelt-metadata/releases
|
|
@@ -7,7 +7,7 @@ dapla_metadata/dapla/__init__.py,sha256=tkapF-YwmruPPrKvN3pEoCZqb7xvJx_ogBM8XyGM
|
|
|
7
7
|
dapla_metadata/dapla/user_info.py,sha256=bENez-ICt9ySR8orYebO68Q3_2LkIW9QTL58DTctmEQ,4833
|
|
8
8
|
dapla_metadata/datasets/__init__.py,sha256=TvzskpdFC6hGcC9_55URT5jr5wNAPzXuISd2UjJWM_8,280
|
|
9
9
|
dapla_metadata/datasets/code_list.py,sha256=kp1O6sUiUAP9WKlWY8IgHWx_1IOzJA63WveHqolgKmg,9082
|
|
10
|
-
dapla_metadata/datasets/core.py,sha256=
|
|
10
|
+
dapla_metadata/datasets/core.py,sha256=WfBIkNWxXH_WdCyZCsO3o-CRjb2LbsMJp-P01-b8nYw,25706
|
|
11
11
|
dapla_metadata/datasets/dapla_dataset_path_info.py,sha256=zdkVjxlqXMBe7eTAneUrTDP0_fx7JsEQ_0JrKjREhfU,26854
|
|
12
12
|
dapla_metadata/datasets/dataset_parser.py,sha256=bc3KOIDQGgdZMPh3XVHhiKMsY6FxIY9glvGlwTM4g7I,8233
|
|
13
13
|
dapla_metadata/datasets/external_sources/__init__.py,sha256=qvIdXwqyEmXNUCB94ZtZXRzifdW4hiXASFFPtC70f6E,83
|
|
@@ -21,10 +21,10 @@ dapla_metadata/datasets/utility/constants.py,sha256=SqZMc1v8rO2b_nRFJR7frVd0TAGv
|
|
|
21
21
|
dapla_metadata/datasets/utility/enums.py,sha256=SpV4xlmP1YMaJPbmX03hqRLHUOhXIk5gquTeJ8G_5OE,432
|
|
22
22
|
dapla_metadata/datasets/utility/utils.py,sha256=fAevz9X0PHw-JL0_4V0geTkoBV31qoO7-dVLFJaIfbo,18370
|
|
23
23
|
dapla_metadata/standards/__init__.py,sha256=n8jnMrudLuScSdfQ4UMJorc-Ptg3Y1-ilT8zAaQnM70,179
|
|
24
|
-
dapla_metadata/standards/name_validator.py,sha256=
|
|
24
|
+
dapla_metadata/standards/name_validator.py,sha256=6-DQE_EKVd6UjL--EXpFcZDQtusVbSFaWaUY-CfOV2c,9184
|
|
25
25
|
dapla_metadata/standards/standard_validators.py,sha256=tcCiCI76wUVtMzXA2oCgdauZc0uGgUi11FKu-t7KGwQ,3767
|
|
26
26
|
dapla_metadata/standards/utils/__init__.py,sha256=AiM7JcpFsAgyuCyLDYZo9kI94wvIImMDGoV2lKhS4pE,42
|
|
27
|
-
dapla_metadata/standards/utils/constants.py,sha256=
|
|
27
|
+
dapla_metadata/standards/utils/constants.py,sha256=mhWNFnS6NMsRl0c_deIdzY7_bD_wKn_oej6rzDjgwq4,2578
|
|
28
28
|
dapla_metadata/variable_definitions/__init__.py,sha256=j_Nn5mnlZ2uio9moDFLE2xpALqrYpupIZMlvwbLuEuA,391
|
|
29
29
|
dapla_metadata/variable_definitions/_generated/.openapi-generator/FILES,sha256=hfNllHEkFODP0XbgqZB5Tz2mmEBFeAeMplXXslczo1E,634
|
|
30
30
|
dapla_metadata/variable_definitions/_generated/.openapi-generator/VERSION,sha256=Y6lrqS2bXoujk5K-DCAwRFdRmkCKuTgvlngEx6FY5So,7
|
|
@@ -73,17 +73,17 @@ dapla_metadata/variable_definitions/_generated/vardef_client/py.typed,sha256=47D
|
|
|
73
73
|
dapla_metadata/variable_definitions/_generated/vardef_client/rest.py,sha256=x4PWmg3IYQBr8OgnrWr3l4Ke2rElHP3zAEVxk2U-mOc,12022
|
|
74
74
|
dapla_metadata/variable_definitions/_utils/__init__.py,sha256=qAhRLJoTBqtR3f9xRXTRhD7-5Xg0Opk1Ks5F4AUYnpA,45
|
|
75
75
|
dapla_metadata/variable_definitions/_utils/_client.py,sha256=v1-9VjrdPI6-sroam5vXMPEV1dQMPsYk7KyGd48HjYw,971
|
|
76
|
-
dapla_metadata/variable_definitions/_utils/config.py,sha256=
|
|
77
|
-
dapla_metadata/variable_definitions/_utils/constants.py,sha256=
|
|
76
|
+
dapla_metadata/variable_definitions/_utils/config.py,sha256=BpLrnuqgtqz_kxBc_Kd-I1QNL7y2RxRXgX-IVbMIclQ,2416
|
|
77
|
+
dapla_metadata/variable_definitions/_utils/constants.py,sha256=BGITkRNYtRDySM-anDMQDvO2JrXm3lDjw7ZmYfhFlXU,1884
|
|
78
78
|
dapla_metadata/variable_definitions/_utils/descriptions.py,sha256=bB5QHNc4eOhmpLQHCty-CP5_aA82chkICifXw430suI,2746
|
|
79
|
-
dapla_metadata/variable_definitions/_utils/files.py,sha256=
|
|
80
|
-
dapla_metadata/variable_definitions/_utils/template_files.py,sha256
|
|
81
|
-
dapla_metadata/variable_definitions/_utils/variable_definition_files.py,sha256=
|
|
79
|
+
dapla_metadata/variable_definitions/_utils/files.py,sha256=qdO9D0l-6FnSGZImTtyMsrFfauFqvQyCWz0knLSklbo,13193
|
|
80
|
+
dapla_metadata/variable_definitions/_utils/template_files.py,sha256=-PgYs4TG4vrXLQgk47pow9ZsqlZqhtO755LnEmvN4MA,3405
|
|
81
|
+
dapla_metadata/variable_definitions/_utils/variable_definition_files.py,sha256=PbqsFdHxsq0EWBg9s2Y57LqVP7aPmGD5-FZfnzuOw2Q,4078
|
|
82
82
|
dapla_metadata/variable_definitions/exceptions.py,sha256=z6Gtd84FboDu7vWjC3wathIF7I0gF0imtRhwMkr16lY,7851
|
|
83
|
-
dapla_metadata/variable_definitions/resources/vardef_model_descriptions_nb.yaml,sha256=
|
|
84
|
-
dapla_metadata/variable_definitions/vardef.py,sha256=
|
|
85
|
-
dapla_metadata/variable_definitions/variable_definition.py,sha256=
|
|
86
|
-
dapla_toolbelt_metadata-0.6.
|
|
87
|
-
dapla_toolbelt_metadata-0.6.
|
|
88
|
-
dapla_toolbelt_metadata-0.6.
|
|
89
|
-
dapla_toolbelt_metadata-0.6.
|
|
83
|
+
dapla_metadata/variable_definitions/resources/vardef_model_descriptions_nb.yaml,sha256=z-P9q0yVk8mcKIMPByEhdF3q-OQzd26jPgrpLep4cU0,5223
|
|
84
|
+
dapla_metadata/variable_definitions/vardef.py,sha256=KYd31nCGhxuzC0hpKR6foQjO39Tlb3vu9IDqUoMvTeY,11352
|
|
85
|
+
dapla_metadata/variable_definitions/variable_definition.py,sha256=sj49uot0e4UJW4QJ3dEJGgjY4yfCHOkxS2NdD2t60b8,14883
|
|
86
|
+
dapla_toolbelt_metadata-0.6.4.dist-info/LICENSE,sha256=np3IfD5m0ZUofn_kVzDZqliozuiO6wrktw3LRPjyEiI,1073
|
|
87
|
+
dapla_toolbelt_metadata-0.6.4.dist-info/METADATA,sha256=Bq3nNW2h1USHM-xn1eLag-kHCqYn6l7PtwZqqEE4_bk,4905
|
|
88
|
+
dapla_toolbelt_metadata-0.6.4.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
|
|
89
|
+
dapla_toolbelt_metadata-0.6.4.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|