dapla-toolbelt-metadata 0.6.2__py3-none-any.whl → 0.6.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dapla-toolbelt-metadata might be problematic. Click here for more details.
- dapla_metadata/datasets/core.py +25 -10
- dapla_metadata/standards/name_validator.py +2 -2
- dapla_metadata/standards/utils/constants.py +2 -2
- dapla_metadata/variable_definitions/_utils/config.py +1 -2
- dapla_metadata/variable_definitions/_utils/constants.py +2 -0
- dapla_metadata/variable_definitions/_utils/files.py +89 -9
- dapla_metadata/variable_definitions/_utils/template_files.py +18 -14
- dapla_metadata/variable_definitions/resources/vardef_model_descriptions_nb.yaml +17 -0
- dapla_metadata/variable_definitions/vardef.py +3 -3
- dapla_metadata/variable_definitions/variable_definition.py +19 -21
- {dapla_toolbelt_metadata-0.6.2.dist-info → dapla_toolbelt_metadata-0.6.3.dist-info}/METADATA +1 -1
- {dapla_toolbelt_metadata-0.6.2.dist-info → dapla_toolbelt_metadata-0.6.3.dist-info}/RECORD +14 -14
- {dapla_toolbelt_metadata-0.6.2.dist-info → dapla_toolbelt_metadata-0.6.3.dist-info}/LICENSE +0 -0
- {dapla_toolbelt_metadata-0.6.2.dist-info → dapla_toolbelt_metadata-0.6.3.dist-info}/WHEEL +0 -0
dapla_metadata/datasets/core.py
CHANGED
|
@@ -111,6 +111,7 @@ class Datadoc:
|
|
|
111
111
|
self.variables: list = []
|
|
112
112
|
self.variables_lookup: dict[str, model.Variable] = {}
|
|
113
113
|
self.explicitly_defined_metadata_document = False
|
|
114
|
+
self.dataset_consistency_status: list = []
|
|
114
115
|
if metadata_document_path:
|
|
115
116
|
self.metadata_document = normalize_path(metadata_document_path)
|
|
116
117
|
self.explicitly_defined_metadata_document = True
|
|
@@ -169,11 +170,14 @@ class Datadoc:
|
|
|
169
170
|
and existing_metadata is not None
|
|
170
171
|
):
|
|
171
172
|
existing_file_path = self._get_existing_file_path(extracted_metadata)
|
|
172
|
-
self.
|
|
173
|
+
self.dataset_consistency_status = self._check_dataset_consistency(
|
|
173
174
|
self.dataset_path,
|
|
174
175
|
Path(existing_file_path),
|
|
175
176
|
extracted_metadata,
|
|
176
177
|
existing_metadata,
|
|
178
|
+
)
|
|
179
|
+
self._check_ready_to_merge(
|
|
180
|
+
self.dataset_consistency_status,
|
|
177
181
|
errors_as_warnings=self.errors_as_warnings,
|
|
178
182
|
)
|
|
179
183
|
merged_metadata = self._merge_metadata(
|
|
@@ -224,29 +228,26 @@ class Datadoc:
|
|
|
224
228
|
}
|
|
225
229
|
|
|
226
230
|
@staticmethod
|
|
227
|
-
def
|
|
231
|
+
def _check_dataset_consistency(
|
|
228
232
|
new_dataset_path: Path | CloudPath,
|
|
229
233
|
existing_dataset_path: Path,
|
|
230
234
|
extracted_metadata: model.DatadocMetadata,
|
|
231
235
|
existing_metadata: model.DatadocMetadata,
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
) -> None:
|
|
235
|
-
"""Check if the datasets are consistent enough to make a successful merge of metadata.
|
|
236
|
+
) -> list[dict[str, object]]:
|
|
237
|
+
"""Run consistency tests.
|
|
236
238
|
|
|
237
239
|
Args:
|
|
238
240
|
new_dataset_path: Path to the dataset to be documented.
|
|
239
241
|
existing_dataset_path: Path stored in the existing metadata.
|
|
240
242
|
extracted_metadata: Metadata extracted from a physical dataset.
|
|
241
243
|
existing_metadata: Metadata from a previously created metadata document.
|
|
242
|
-
errors_as_warnings: True if failing checks should be raised as warnings, not errors.
|
|
243
244
|
|
|
244
|
-
|
|
245
|
-
|
|
245
|
+
Returns:
|
|
246
|
+
List if dict with property name and boolean success flag
|
|
246
247
|
"""
|
|
247
248
|
new_dataset_path_info = DaplaDatasetPathInfo(new_dataset_path)
|
|
248
249
|
existing_dataset_path_info = DaplaDatasetPathInfo(existing_dataset_path)
|
|
249
|
-
|
|
250
|
+
return [
|
|
250
251
|
{
|
|
251
252
|
"name": "Bucket name",
|
|
252
253
|
"success": (
|
|
@@ -290,6 +291,20 @@ class Datadoc:
|
|
|
290
291
|
),
|
|
291
292
|
},
|
|
292
293
|
]
|
|
294
|
+
|
|
295
|
+
@staticmethod
|
|
296
|
+
def _check_ready_to_merge(
|
|
297
|
+
results: list[dict[str, object]], *, errors_as_warnings: bool
|
|
298
|
+
) -> None:
|
|
299
|
+
"""Check if the datasets are consistent enough to make a successful merge of metadata.
|
|
300
|
+
|
|
301
|
+
Args:
|
|
302
|
+
results: List if dict with property name and boolean success flag
|
|
303
|
+
errors_as_warnings: True if failing checks should be raised as warnings, not errors.
|
|
304
|
+
|
|
305
|
+
Raises:
|
|
306
|
+
InconsistentDatasetsError: If inconsistencies are found and `errors_as_warnings == False`
|
|
307
|
+
"""
|
|
293
308
|
if failures := [result for result in results if not result["success"]]:
|
|
294
309
|
msg = f"{INCONSISTENCIES_MESSAGE} {', '.join(str(f['name']) for f in failures)}"
|
|
295
310
|
if errors_as_warnings:
|
|
@@ -128,9 +128,9 @@ class NamingStandardReport:
|
|
|
128
128
|
"""Returns an appropriate message based on the success rate."""
|
|
129
129
|
rate = self.success_rate()
|
|
130
130
|
if rate is not None:
|
|
131
|
-
if rate
|
|
131
|
+
if 95 <= rate <= 100:
|
|
132
132
|
return SSB_NAMING_STANDARD_REPORT_RESULT_BEST
|
|
133
|
-
if 70 < rate <
|
|
133
|
+
if 70 < rate < 95:
|
|
134
134
|
return SSB_NAMING_STANDARD_REPORT_RESULT_GOOD
|
|
135
135
|
if 40 <= rate <= 70:
|
|
136
136
|
return SSB_NAMING_STANDARD_REPORT_RESULT_AVERAGE
|
|
@@ -9,7 +9,7 @@ NAME_STANDARD_SUCCESS = "Filene dine er i samsvar med SSB-navnestandarden"
|
|
|
9
9
|
NAME_STANDARD_VIOLATION = "Det er oppdaget brudd på SSB-navnestandard:"
|
|
10
10
|
|
|
11
11
|
MISSING_BUCKET_NAME = "Filnavn mangler bøttenavn ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#obligatoriske-mapper"
|
|
12
|
-
MISSING_VERSION = "Filnavn mangler versjon ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#
|
|
12
|
+
MISSING_VERSION = "Filnavn mangler versjon, hvis ikke filen er nyeste versjon kan dette være brudd på navnestandarden ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#versjonering-av-datasett"
|
|
13
13
|
MISSING_PERIOD = "Filnavn mangler gyldighetsperiode ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#filnavn"
|
|
14
14
|
MISSING_SHORT_NAME = "Kortnavn for statistikk mangler ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#obligatoriske-mapper"
|
|
15
15
|
MISSING_DATA_STATE = "Mappe for datatilstand mangler ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#obligatoriske-mapper"
|
|
@@ -26,7 +26,7 @@ BUCKET_NAME_UNKNOWN = "Kan ikke validere bøttenavn"
|
|
|
26
26
|
|
|
27
27
|
SSB_NAMING_STANDARD_REPORT = "SSB navnestandard rapport"
|
|
28
28
|
SSB_NAMING_STANDARD_REPORT_SUCCESS_RATE = "Suksess rate"
|
|
29
|
-
SSB_NAMING_STANDARD_REPORT_RESULT_BEST = "🚀 Fantastisk!
|
|
29
|
+
SSB_NAMING_STANDARD_REPORT_RESULT_BEST = "🚀 Fantastisk! 🎉\n"
|
|
30
30
|
SSB_NAMING_STANDARD_REPORT_RESULT_GOOD = (
|
|
31
31
|
"✅ Bra jobba! Fortsatt litt rom for forbedring. 😊\n"
|
|
32
32
|
)
|
|
@@ -59,8 +59,7 @@ def get_vardef_host() -> str:
|
|
|
59
59
|
case DaplaEnvironment.TEST:
|
|
60
60
|
return VARDEF_HOST_TEST
|
|
61
61
|
case DaplaEnvironment.DEV:
|
|
62
|
-
|
|
63
|
-
raise NotImplementedError(msg)
|
|
62
|
+
return VARDEF_HOST_TEST
|
|
64
63
|
case _:
|
|
65
64
|
return get_config_item("VARDEF_HOST") or "http://localhost:8080"
|
|
66
65
|
|
|
@@ -10,6 +10,9 @@ import pytz
|
|
|
10
10
|
from pydantic.config import JsonDict
|
|
11
11
|
from ruamel.yaml import YAML
|
|
12
12
|
from ruamel.yaml import CommentedMap
|
|
13
|
+
from ruamel.yaml import RoundTripRepresenter
|
|
14
|
+
from ruamel.yaml.scalarstring import DoubleQuotedScalarString
|
|
15
|
+
from ruamel.yaml.scalarstring import FoldedScalarString
|
|
13
16
|
|
|
14
17
|
from dapla_metadata.variable_definitions._generated.vardef_client.models.complete_response import (
|
|
15
18
|
CompleteResponse,
|
|
@@ -40,6 +43,7 @@ from dapla_metadata.variable_definitions._utils.constants import (
|
|
|
40
43
|
from dapla_metadata.variable_definitions._utils.constants import (
|
|
41
44
|
VARIABLE_STATUS_FIELD_NAME,
|
|
42
45
|
)
|
|
46
|
+
from dapla_metadata.variable_definitions._utils.constants import YAML_STR_TAG
|
|
43
47
|
from dapla_metadata.variable_definitions._utils.descriptions import (
|
|
44
48
|
apply_norwegian_descriptions_to_model,
|
|
45
49
|
)
|
|
@@ -120,14 +124,15 @@ def _populate_commented_map(
|
|
|
120
124
|
) -> None:
|
|
121
125
|
"""Add data to a CommentedMap."""
|
|
122
126
|
commented_map[field_name] = value
|
|
123
|
-
field = model_instance.model_fields[field_name]
|
|
127
|
+
field = type(model_instance).model_fields[field_name]
|
|
124
128
|
description: JsonValue = cast(
|
|
125
129
|
JsonDict,
|
|
126
130
|
field.json_schema_extra,
|
|
127
131
|
)[NORWEGIAN_DESCRIPTIONS]
|
|
128
132
|
if description is not None:
|
|
129
133
|
new_description = (
|
|
130
|
-
|
|
134
|
+
"\n"
|
|
135
|
+
+ (REQUIRED_FIELD if field.is_required() else OPTIONAL_FIELD)
|
|
131
136
|
+ "\n"
|
|
132
137
|
+ str(description)
|
|
133
138
|
)
|
|
@@ -174,14 +179,20 @@ def _validate_and_create_directory(custom_directory: Path) -> Path:
|
|
|
174
179
|
return custom_directory
|
|
175
180
|
|
|
176
181
|
|
|
177
|
-
def
|
|
178
|
-
|
|
179
|
-
yaml.
|
|
180
|
-
|
|
182
|
+
def configure_yaml(yaml: YAML) -> YAML:
|
|
183
|
+
"""Common Yaml config for variable definitions."""
|
|
184
|
+
yaml.Representer = RoundTripRepresenter # Preserve the order of keys etc.
|
|
185
|
+
yaml.default_flow_style = False # Ensures pretty YAML formatting block style
|
|
186
|
+
yaml.allow_unicode = True # Support special characters
|
|
187
|
+
yaml.preserve_quotes = True
|
|
188
|
+
yaml.width = 180 # wrap long lines
|
|
189
|
+
yaml.indent(
|
|
190
|
+
mapping=4, sequence=2, offset=0
|
|
191
|
+
) # Ensure indentation for nested keys and lists
|
|
181
192
|
yaml.representer.add_representer(
|
|
182
193
|
VariableStatus,
|
|
183
194
|
lambda dumper, data: dumper.represent_scalar(
|
|
184
|
-
|
|
195
|
+
YAML_STR_TAG,
|
|
185
196
|
data.value,
|
|
186
197
|
),
|
|
187
198
|
)
|
|
@@ -189,6 +200,72 @@ def _configure_yaml() -> YAML:
|
|
|
189
200
|
return yaml
|
|
190
201
|
|
|
191
202
|
|
|
203
|
+
def _safe_get(data: dict, keys: list):
|
|
204
|
+
"""Safely navigate nested dictionaries."""
|
|
205
|
+
for key in keys:
|
|
206
|
+
if not isinstance(data, dict) or key not in data or data[key] is None:
|
|
207
|
+
return None
|
|
208
|
+
data = data[key]
|
|
209
|
+
return data
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def _safe_set_folded(data: dict, path: str, lang: str):
|
|
213
|
+
keys = path.split(".")
|
|
214
|
+
parent = _safe_get(data, keys)
|
|
215
|
+
if isinstance(parent, dict) and lang in parent and parent[lang] is not None:
|
|
216
|
+
parent[lang] = FoldedScalarString(parent[lang])
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
def pre_process_data(data: dict) -> dict:
|
|
220
|
+
"""Format Variable definition model fields with ruamel yaml scalar string types."""
|
|
221
|
+
folded_fields = [
|
|
222
|
+
("definition", ["nb", "nn", "en"]),
|
|
223
|
+
("name", ["nb", "nn", "en"]),
|
|
224
|
+
("comment", ["nb", "nn", "en"]),
|
|
225
|
+
("contact.title", ["nb", "nn", "en"]),
|
|
226
|
+
]
|
|
227
|
+
for field_path, langs in folded_fields:
|
|
228
|
+
for lang in langs:
|
|
229
|
+
_safe_set_folded(data, field_path, lang)
|
|
230
|
+
|
|
231
|
+
list_fields = [
|
|
232
|
+
"unit_types",
|
|
233
|
+
"subject_fields",
|
|
234
|
+
"related_variable_definition_uris",
|
|
235
|
+
]
|
|
236
|
+
for key in list_fields:
|
|
237
|
+
if isinstance(data.get(key), list):
|
|
238
|
+
data[key] = [
|
|
239
|
+
DoubleQuotedScalarString(item) for item in data[key] if item is not None
|
|
240
|
+
]
|
|
241
|
+
|
|
242
|
+
single_line_fields = [
|
|
243
|
+
"short_name",
|
|
244
|
+
"classification_reference",
|
|
245
|
+
"measurement_type",
|
|
246
|
+
"external_reference_uri",
|
|
247
|
+
"created_by",
|
|
248
|
+
"id",
|
|
249
|
+
"last_updated_by",
|
|
250
|
+
]
|
|
251
|
+
for key in single_line_fields:
|
|
252
|
+
if data.get(key) is not None:
|
|
253
|
+
data[key] = DoubleQuotedScalarString(data[key])
|
|
254
|
+
# Special case due to complex structure
|
|
255
|
+
owner = data.get("owner")
|
|
256
|
+
if isinstance(owner, dict):
|
|
257
|
+
if owner.get("team") is not None:
|
|
258
|
+
owner["team"] = DoubleQuotedScalarString(owner["team"])
|
|
259
|
+
if isinstance(owner.get("groups"), list):
|
|
260
|
+
owner["groups"] = [
|
|
261
|
+
DoubleQuotedScalarString(item)
|
|
262
|
+
for item in owner["groups"]
|
|
263
|
+
if item is not None
|
|
264
|
+
]
|
|
265
|
+
|
|
266
|
+
return data
|
|
267
|
+
|
|
268
|
+
|
|
192
269
|
def _model_to_yaml_with_comments(
|
|
193
270
|
model_instance: CompleteResponse,
|
|
194
271
|
file_name: str,
|
|
@@ -209,7 +286,8 @@ def _model_to_yaml_with_comments(
|
|
|
209
286
|
Returns:
|
|
210
287
|
Path: The file path of the generated YAML file.
|
|
211
288
|
"""
|
|
212
|
-
yaml =
|
|
289
|
+
yaml = YAML()
|
|
290
|
+
configure_yaml(yaml)
|
|
213
291
|
|
|
214
292
|
from dapla_metadata.variable_definitions.variable_definition import (
|
|
215
293
|
VariableDefinition,
|
|
@@ -223,7 +301,7 @@ def _model_to_yaml_with_comments(
|
|
|
223
301
|
serialize_as_any=True,
|
|
224
302
|
warnings="error",
|
|
225
303
|
)
|
|
226
|
-
|
|
304
|
+
data = pre_process_data(data)
|
|
227
305
|
# One CommentMap for each section in the yaml file
|
|
228
306
|
machine_generated_map = CommentedMap()
|
|
229
307
|
commented_map = CommentedMap()
|
|
@@ -244,6 +322,8 @@ def _model_to_yaml_with_comments(
|
|
|
244
322
|
model_instance,
|
|
245
323
|
)
|
|
246
324
|
elif field_name not in {VARIABLE_STATUS_FIELD_NAME, OWNER_FIELD_NAME}:
|
|
325
|
+
if isinstance(value, str):
|
|
326
|
+
value.strip()
|
|
247
327
|
_populate_commented_map(field_name, value, commented_map, model_instance)
|
|
248
328
|
|
|
249
329
|
base_path = (
|
|
@@ -41,30 +41,34 @@ def _get_default_template() -> "VariableDefinition":
|
|
|
41
41
|
|
|
42
42
|
return VariableDefinition(
|
|
43
43
|
name=LanguageStringType(
|
|
44
|
-
nb="
|
|
45
|
-
nn="
|
|
46
|
-
en="
|
|
44
|
+
nb="navn",
|
|
45
|
+
nn="namn",
|
|
46
|
+
en="name",
|
|
47
47
|
),
|
|
48
|
-
short_name="
|
|
48
|
+
short_name="generert_kortnavn",
|
|
49
49
|
definition=LanguageStringType(
|
|
50
|
-
nb="
|
|
51
|
-
nn="
|
|
52
|
-
en="
|
|
50
|
+
nb="definisjonstekst",
|
|
51
|
+
nn="definisjonstekst",
|
|
52
|
+
en="definition text",
|
|
53
53
|
),
|
|
54
|
-
classification_reference="class_id",
|
|
55
54
|
valid_from=DEFAULT_DATE,
|
|
56
|
-
unit_types=["
|
|
57
|
-
subject_fields=["
|
|
55
|
+
unit_types=[""],
|
|
56
|
+
subject_fields=[""],
|
|
58
57
|
contains_special_categories_of_personal_data=False,
|
|
59
58
|
variable_status=VariableStatus.DRAFT.value,
|
|
60
59
|
owner=Owner(team="default team", groups=["default group"]),
|
|
61
60
|
contact=Contact(
|
|
62
61
|
title=LanguageStringType(
|
|
63
|
-
nb="
|
|
64
|
-
nn="
|
|
65
|
-
en="
|
|
62
|
+
nb="generert tittel",
|
|
63
|
+
nn="generert tittel",
|
|
64
|
+
en="generert title",
|
|
66
65
|
),
|
|
67
|
-
email="
|
|
66
|
+
email="generert@ssb.no",
|
|
67
|
+
),
|
|
68
|
+
comment=LanguageStringType(
|
|
69
|
+
nb="",
|
|
70
|
+
nn="",
|
|
71
|
+
en="",
|
|
68
72
|
),
|
|
69
73
|
id="",
|
|
70
74
|
patch_id=0,
|
|
@@ -10,12 +10,18 @@ definition: |
|
|
|
10
10
|
classification_reference: |
|
|
11
11
|
ID av en klassifikasjon eller kodeliste fra KLASS som beskriver verdiene variabelen kan anta.
|
|
12
12
|
For eksempel vil variabelen 'Sivilstand' ha klassifikasjon 'Standard for sivilstand' (kan vises på https://www.ssb.no/klass/klassifikasjoner/19 ) som har ID 19.
|
|
13
|
+
Eksempel: "19"
|
|
13
14
|
unit_types: |
|
|
14
15
|
Enhetstyper - enhetene som beskrives av denne variabelen. Variabelen “sivilstand” vil f.eks. ha enhetstypen person, mens f.eks. “Produsentpris for tjenester” vil ha både foretak og bedrift som enhetstyper siden variabelen kan beskrive begge.
|
|
15
16
|
Verdier skal være koder fra: https://www.ssb.no/klass/klassifikasjoner/702.
|
|
17
|
+
Eksempel:
|
|
18
|
+
- "20"
|
|
16
19
|
subject_fields: |
|
|
17
20
|
Statistikkområder som variabelen brukes innenfor. For eksempel tilhører variabelen “Sivilstand” statistikkområdet “Befolkning”.
|
|
18
21
|
Verdier skal være koder fra https://www.ssb.no/klass/klassifikasjoner/618.
|
|
22
|
+
Eksempel:
|
|
23
|
+
- "bf"
|
|
24
|
+
- "be"
|
|
19
25
|
contains_special_categories_of_personal_data: |
|
|
20
26
|
Viser om variabelen inneholder spesielt sensitive personopplysninger.
|
|
21
27
|
Kategorier:
|
|
@@ -30,21 +36,32 @@ contains_special_categories_of_personal_data: |
|
|
|
30
36
|
- opplysninger om seksuelle forhold
|
|
31
37
|
- opplysninger om seksuell legning
|
|
32
38
|
ref: https://lovdata.no/dokument/NL/lov/2018-06-15-38/KAPITTEL_gdpr-2#gdpr/a9
|
|
39
|
+
Eksempel: true
|
|
33
40
|
measurement_type: |
|
|
34
41
|
Måletype som en kvantitativ variabelen tilhører, f.eks. valuta, areal osv.
|
|
35
42
|
Verdien skal være en kode fra: https://www.ssb.no/klass/klassifikasjoner/303
|
|
43
|
+
Eksempel: "03"
|
|
36
44
|
valid_from: |
|
|
37
45
|
Datoen variabeldefinisjonen er gyldig f.o.m.
|
|
46
|
+
Eksempel: 1999-01-11
|
|
38
47
|
valid_until: |
|
|
39
48
|
Datoen variabeldefinisjonens var gyldig t.o.m. Settes hvis definisjonen skal erstattet av en ny definisjon (med en ny gyldighetsperiode), eller variabelen ikke lenger skal brukes.
|
|
49
|
+
Eksempel: 2025-10-03
|
|
40
50
|
external_reference_uri: |
|
|
41
51
|
En peker (URI) til ekstern definisjon/dokumentasjon, f.eks. ei webside som er relevant for variabelen.
|
|
52
|
+
Eksempel: "https://www.landbruksdirektoratet.com"
|
|
42
53
|
comment: |
|
|
43
54
|
Her kan en sette inn eventuelle tilleggsopplysninger som ikke hører hjemme i selve definisjonen. Variabelen “Landbakgrunn” har f.eks. merknaden “Fra og med 1.1.2003 ble definisjon endret til også å trekke inn besteforeldrenes fødeland”.
|
|
44
55
|
related_variable_definition_uris: |
|
|
45
56
|
Her kan en legge inn URIer til andre variabler som er relevante. Eksempelvis er variabelen “Inntekt etter skatt” en beregnet variabel der “Yrkesinntekter” og “Kapitalinntekter” inngår i beregningen. En kan da legge inn deres URI-er i dette feltet.
|
|
57
|
+
Eksempel: "https://example.com/"
|
|
46
58
|
contact: |
|
|
47
59
|
Her dokumenterer en navn og epost for person eller gruppe som kan svare på spørsmål.
|
|
60
|
+
Eksempel:
|
|
61
|
+
contact:
|
|
62
|
+
title:
|
|
63
|
+
nb: "Seksjonsleder"
|
|
64
|
+
email: leder@ssb.no
|
|
48
65
|
variable_status: |
|
|
49
66
|
Livssyklus for variabelen.
|
|
50
67
|
id: |
|
|
@@ -108,7 +108,7 @@ class Vardef:
|
|
|
108
108
|
)
|
|
109
109
|
|
|
110
110
|
logger.info(
|
|
111
|
-
"Successfully created variable definition '%s' with ID '%s'",
|
|
111
|
+
"✅ Successfully created variable definition '%s' with ID '%s'",
|
|
112
112
|
new_variable.short_name,
|
|
113
113
|
new_variable.id,
|
|
114
114
|
)
|
|
@@ -165,7 +165,7 @@ class Vardef:
|
|
|
165
165
|
)
|
|
166
166
|
|
|
167
167
|
logger.info(
|
|
168
|
-
"Successfully migrated variable definition '%s' with ID '%s'",
|
|
168
|
+
"✅ Successfully migrated variable definition '%s' with ID '%s'",
|
|
169
169
|
migrated_variable.short_name,
|
|
170
170
|
migrated_variable.id,
|
|
171
171
|
)
|
|
@@ -273,7 +273,7 @@ class Vardef:
|
|
|
273
273
|
custom_directory=Path(custom_file_path) if custom_file_path else None,
|
|
274
274
|
)
|
|
275
275
|
logger.info(
|
|
276
|
-
f"Created editable variable definition template file at {file_path}", # noqa: G004
|
|
276
|
+
f"✅ Created editable variable definition template file at {file_path}", # noqa: G004
|
|
277
277
|
)
|
|
278
278
|
return file_path
|
|
279
279
|
|
|
@@ -34,6 +34,8 @@ from dapla_metadata.variable_definitions._generated.vardef_client.models.variabl
|
|
|
34
34
|
)
|
|
35
35
|
from dapla_metadata.variable_definitions._utils import config
|
|
36
36
|
from dapla_metadata.variable_definitions._utils._client import VardefClient
|
|
37
|
+
from dapla_metadata.variable_definitions._utils.files import configure_yaml
|
|
38
|
+
from dapla_metadata.variable_definitions._utils.files import pre_process_data
|
|
37
39
|
from dapla_metadata.variable_definitions._utils.variable_definition_files import (
|
|
38
40
|
_read_file_to_model,
|
|
39
41
|
)
|
|
@@ -126,7 +128,7 @@ class VariableDefinition(CompleteResponse):
|
|
|
126
128
|
self.__dict__.update(updated)
|
|
127
129
|
|
|
128
130
|
logger.info(
|
|
129
|
-
"Successfully updated variable definition '%s' with ID '%s'",
|
|
131
|
+
"✅ Successfully updated variable definition '%s' with ID '%s'",
|
|
130
132
|
updated.short_name,
|
|
131
133
|
updated.id,
|
|
132
134
|
)
|
|
@@ -176,7 +178,7 @@ class VariableDefinition(CompleteResponse):
|
|
|
176
178
|
variable_definition_id=self.id,
|
|
177
179
|
active_group=config.get_active_group(),
|
|
178
180
|
)
|
|
179
|
-
return f"Variable {self.id} safely deleted"
|
|
181
|
+
return f"✅ Variable {self.id} safely deleted"
|
|
180
182
|
|
|
181
183
|
@vardef_exception_handler
|
|
182
184
|
def get_patch(self, patch_id: int) -> "VariableDefinition":
|
|
@@ -205,9 +207,9 @@ class VariableDefinition(CompleteResponse):
|
|
|
205
207
|
|
|
206
208
|
Patches are to be used for minor changes which don't require a new Validity Period.
|
|
207
209
|
Examples of reasons for creating a new Patch:
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
210
|
+
- Correcting a typo
|
|
211
|
+
- Adding a translation
|
|
212
|
+
- Adding a subject field
|
|
211
213
|
|
|
212
214
|
Supply only the fields to be changed. Other fields will retain their current values.
|
|
213
215
|
|
|
@@ -234,7 +236,7 @@ class VariableDefinition(CompleteResponse):
|
|
|
234
236
|
self.__dict__.update(new_patch)
|
|
235
237
|
|
|
236
238
|
logger.info(
|
|
237
|
-
"Successfully created patch with patch ID '%s' for variable definition '%s' with ID '%s'",
|
|
239
|
+
"✅ Successfully created patch with patch ID '%s' for variable definition '%s' with ID '%s'",
|
|
238
240
|
new_patch.patch_id,
|
|
239
241
|
new_patch.short_name,
|
|
240
242
|
new_patch.id,
|
|
@@ -309,7 +311,7 @@ class VariableDefinition(CompleteResponse):
|
|
|
309
311
|
self.__dict__.update(new_validity_period)
|
|
310
312
|
|
|
311
313
|
logger.info(
|
|
312
|
-
"Successfully created validity period that is valid from '%s' for variable definition '%s' with ID '%s'",
|
|
314
|
+
"✅ Successfully created validity period that is valid from '%s' for variable definition '%s' with ID '%s'",
|
|
313
315
|
new_validity_period.valid_from,
|
|
314
316
|
new_validity_period.short_name,
|
|
315
317
|
new_validity_period.id,
|
|
@@ -350,7 +352,7 @@ class VariableDefinition(CompleteResponse):
|
|
|
350
352
|
UpdateDraft(variable_status=VariableStatus.PUBLISHED_INTERNAL),
|
|
351
353
|
)
|
|
352
354
|
logger.info(
|
|
353
|
-
"Variable definition '%s' with ID '%s' successfully published, new status: %s",
|
|
355
|
+
"✅ Variable definition '%s' with ID '%s' successfully published, new status: %s",
|
|
354
356
|
update.short_name,
|
|
355
357
|
update.id,
|
|
356
358
|
update.variable_status,
|
|
@@ -373,7 +375,7 @@ class VariableDefinition(CompleteResponse):
|
|
|
373
375
|
Patch(variable_status=VariableStatus.PUBLISHED_EXTERNAL),
|
|
374
376
|
)
|
|
375
377
|
logger.info(
|
|
376
|
-
"Variable definition '%s' with ID '%s' successfully published, new status: %s",
|
|
378
|
+
"✅ Variable definition '%s' with ID '%s' successfully published, new status: %s",
|
|
377
379
|
update.short_name,
|
|
378
380
|
update.id,
|
|
379
381
|
update.variable_status,
|
|
@@ -387,7 +389,7 @@ class VariableDefinition(CompleteResponse):
|
|
|
387
389
|
)
|
|
388
390
|
self.set_file_path(file_path)
|
|
389
391
|
logger.info(
|
|
390
|
-
f"Created editable variable definition file at {file_path}", # noqa: G004
|
|
392
|
+
f"✅ Created editable variable definition file at {file_path}", # noqa: G004
|
|
391
393
|
)
|
|
392
394
|
return self
|
|
393
395
|
|
|
@@ -405,16 +407,12 @@ class VariableDefinition(CompleteResponse):
|
|
|
405
407
|
|
|
406
408
|
def _convert_to_yaml_output(self) -> str:
|
|
407
409
|
stream = StringIO()
|
|
408
|
-
with ruamel.yaml.YAML(
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
self.model_dump(
|
|
415
|
-
mode="json",
|
|
416
|
-
serialize_as_any=True,
|
|
417
|
-
warnings="error",
|
|
418
|
-
),
|
|
410
|
+
with ruamel.yaml.YAML(output=stream) as yaml:
|
|
411
|
+
configure_yaml(yaml)
|
|
412
|
+
data = self.model_dump(
|
|
413
|
+
mode="json",
|
|
414
|
+
serialize_as_any=True,
|
|
415
|
+
warnings="error",
|
|
419
416
|
)
|
|
417
|
+
yaml.dump(pre_process_data(data))
|
|
420
418
|
return stream.getvalue()
|
|
@@ -7,7 +7,7 @@ dapla_metadata/dapla/__init__.py,sha256=tkapF-YwmruPPrKvN3pEoCZqb7xvJx_ogBM8XyGM
|
|
|
7
7
|
dapla_metadata/dapla/user_info.py,sha256=bENez-ICt9ySR8orYebO68Q3_2LkIW9QTL58DTctmEQ,4833
|
|
8
8
|
dapla_metadata/datasets/__init__.py,sha256=TvzskpdFC6hGcC9_55URT5jr5wNAPzXuISd2UjJWM_8,280
|
|
9
9
|
dapla_metadata/datasets/code_list.py,sha256=kp1O6sUiUAP9WKlWY8IgHWx_1IOzJA63WveHqolgKmg,9082
|
|
10
|
-
dapla_metadata/datasets/core.py,sha256=
|
|
10
|
+
dapla_metadata/datasets/core.py,sha256=LgOyDDLV8vxUrticcgWPORlX19egm2epCerg5IcZQPs,22298
|
|
11
11
|
dapla_metadata/datasets/dapla_dataset_path_info.py,sha256=zdkVjxlqXMBe7eTAneUrTDP0_fx7JsEQ_0JrKjREhfU,26854
|
|
12
12
|
dapla_metadata/datasets/dataset_parser.py,sha256=bc3KOIDQGgdZMPh3XVHhiKMsY6FxIY9glvGlwTM4g7I,8233
|
|
13
13
|
dapla_metadata/datasets/external_sources/__init__.py,sha256=qvIdXwqyEmXNUCB94ZtZXRzifdW4hiXASFFPtC70f6E,83
|
|
@@ -21,10 +21,10 @@ dapla_metadata/datasets/utility/constants.py,sha256=SqZMc1v8rO2b_nRFJR7frVd0TAGv
|
|
|
21
21
|
dapla_metadata/datasets/utility/enums.py,sha256=SpV4xlmP1YMaJPbmX03hqRLHUOhXIk5gquTeJ8G_5OE,432
|
|
22
22
|
dapla_metadata/datasets/utility/utils.py,sha256=fAevz9X0PHw-JL0_4V0geTkoBV31qoO7-dVLFJaIfbo,18370
|
|
23
23
|
dapla_metadata/standards/__init__.py,sha256=n8jnMrudLuScSdfQ4UMJorc-Ptg3Y1-ilT8zAaQnM70,179
|
|
24
|
-
dapla_metadata/standards/name_validator.py,sha256=
|
|
24
|
+
dapla_metadata/standards/name_validator.py,sha256=6-DQE_EKVd6UjL--EXpFcZDQtusVbSFaWaUY-CfOV2c,9184
|
|
25
25
|
dapla_metadata/standards/standard_validators.py,sha256=tcCiCI76wUVtMzXA2oCgdauZc0uGgUi11FKu-t7KGwQ,3767
|
|
26
26
|
dapla_metadata/standards/utils/__init__.py,sha256=AiM7JcpFsAgyuCyLDYZo9kI94wvIImMDGoV2lKhS4pE,42
|
|
27
|
-
dapla_metadata/standards/utils/constants.py,sha256=
|
|
27
|
+
dapla_metadata/standards/utils/constants.py,sha256=mhWNFnS6NMsRl0c_deIdzY7_bD_wKn_oej6rzDjgwq4,2578
|
|
28
28
|
dapla_metadata/variable_definitions/__init__.py,sha256=j_Nn5mnlZ2uio9moDFLE2xpALqrYpupIZMlvwbLuEuA,391
|
|
29
29
|
dapla_metadata/variable_definitions/_generated/.openapi-generator/FILES,sha256=hfNllHEkFODP0XbgqZB5Tz2mmEBFeAeMplXXslczo1E,634
|
|
30
30
|
dapla_metadata/variable_definitions/_generated/.openapi-generator/VERSION,sha256=Y6lrqS2bXoujk5K-DCAwRFdRmkCKuTgvlngEx6FY5So,7
|
|
@@ -73,17 +73,17 @@ dapla_metadata/variable_definitions/_generated/vardef_client/py.typed,sha256=47D
|
|
|
73
73
|
dapla_metadata/variable_definitions/_generated/vardef_client/rest.py,sha256=x4PWmg3IYQBr8OgnrWr3l4Ke2rElHP3zAEVxk2U-mOc,12022
|
|
74
74
|
dapla_metadata/variable_definitions/_utils/__init__.py,sha256=qAhRLJoTBqtR3f9xRXTRhD7-5Xg0Opk1Ks5F4AUYnpA,45
|
|
75
75
|
dapla_metadata/variable_definitions/_utils/_client.py,sha256=v1-9VjrdPI6-sroam5vXMPEV1dQMPsYk7KyGd48HjYw,971
|
|
76
|
-
dapla_metadata/variable_definitions/_utils/config.py,sha256=
|
|
77
|
-
dapla_metadata/variable_definitions/_utils/constants.py,sha256=
|
|
76
|
+
dapla_metadata/variable_definitions/_utils/config.py,sha256=BpLrnuqgtqz_kxBc_Kd-I1QNL7y2RxRXgX-IVbMIclQ,2416
|
|
77
|
+
dapla_metadata/variable_definitions/_utils/constants.py,sha256=M9aF0P8iAQaUpjGsUWaWpOphIKxisfJmRPEf9vBB1Tc,1523
|
|
78
78
|
dapla_metadata/variable_definitions/_utils/descriptions.py,sha256=bB5QHNc4eOhmpLQHCty-CP5_aA82chkICifXw430suI,2746
|
|
79
|
-
dapla_metadata/variable_definitions/_utils/files.py,sha256=
|
|
80
|
-
dapla_metadata/variable_definitions/_utils/template_files.py,sha256=
|
|
79
|
+
dapla_metadata/variable_definitions/_utils/files.py,sha256=d6gY7X-lQ4QAsy0YjRF5SmDmWbIfWElD2awwe5idQaM,12066
|
|
80
|
+
dapla_metadata/variable_definitions/_utils/template_files.py,sha256=FSI8hTgW_ADA5r9eoQXoMQw0gYNdquN2ePGbs9bBZI0,3699
|
|
81
81
|
dapla_metadata/variable_definitions/_utils/variable_definition_files.py,sha256=ePlbsrVl1JNMDUomS-ldYOeOilmcjQy0I5RhorShE2o,2785
|
|
82
82
|
dapla_metadata/variable_definitions/exceptions.py,sha256=z6Gtd84FboDu7vWjC3wathIF7I0gF0imtRhwMkr16lY,7851
|
|
83
|
-
dapla_metadata/variable_definitions/resources/vardef_model_descriptions_nb.yaml,sha256=
|
|
84
|
-
dapla_metadata/variable_definitions/vardef.py,sha256=
|
|
85
|
-
dapla_metadata/variable_definitions/variable_definition.py,sha256=
|
|
86
|
-
dapla_toolbelt_metadata-0.6.
|
|
87
|
-
dapla_toolbelt_metadata-0.6.
|
|
88
|
-
dapla_toolbelt_metadata-0.6.
|
|
89
|
-
dapla_toolbelt_metadata-0.6.
|
|
83
|
+
dapla_metadata/variable_definitions/resources/vardef_model_descriptions_nb.yaml,sha256=Bl8B8Gv1SAnuEzq4XzCfjVPQzfPsueCnigTeNHszHoE,4890
|
|
84
|
+
dapla_metadata/variable_definitions/vardef.py,sha256=KYd31nCGhxuzC0hpKR6foQjO39Tlb3vu9IDqUoMvTeY,11352
|
|
85
|
+
dapla_metadata/variable_definitions/variable_definition.py,sha256=sj49uot0e4UJW4QJ3dEJGgjY4yfCHOkxS2NdD2t60b8,14883
|
|
86
|
+
dapla_toolbelt_metadata-0.6.3.dist-info/LICENSE,sha256=np3IfD5m0ZUofn_kVzDZqliozuiO6wrktw3LRPjyEiI,1073
|
|
87
|
+
dapla_toolbelt_metadata-0.6.3.dist-info/METADATA,sha256=DpjGArukPeGhp9Af_3je3M0ezZuRTrfX5BhS4juVoxQ,4917
|
|
88
|
+
dapla_toolbelt_metadata-0.6.3.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
|
|
89
|
+
dapla_toolbelt_metadata-0.6.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|