dapla-toolbelt-metadata 0.6.3__py3-none-any.whl → 0.6.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dapla-toolbelt-metadata might be problematic. Click here for more details.

@@ -109,7 +109,9 @@ class Datadoc:
109
109
  self.dataset_path: pathlib.Path | CloudPath | None = None
110
110
  self.dataset = model.Dataset()
111
111
  self.variables: list = []
112
+ self.pseudo_variables: list[model.PseudoVariable] = []
112
113
  self.variables_lookup: dict[str, model.Variable] = {}
114
+ self.pseudo_variables_lookup: dict[str, model.PseudoVariable] = {}
113
115
  self.explicitly_defined_metadata_document = False
114
116
  self.dataset_consistency_status: list = []
115
117
  if metadata_document_path:
@@ -149,11 +151,19 @@ class Datadoc:
149
151
  """
150
152
  extracted_metadata: model.DatadocMetadata | None = None
151
153
  existing_metadata: model.DatadocMetadata | None = None
154
+ existing_pseudonymization: model.PseudonymizationMetadata | None = None
155
+
152
156
  if self.metadata_document and self.metadata_document.exists():
153
157
  existing_metadata = self._extract_metadata_from_existing_document(
154
158
  self.metadata_document,
155
159
  )
156
160
 
161
+ existing_pseudonymization = (
162
+ self._extract_pseudonymization_from_existing_document(
163
+ self.metadata_document,
164
+ )
165
+ )
166
+
157
167
  if (
158
168
  self.dataset_path is not None
159
169
  and self.dataset == model.Dataset()
@@ -192,10 +202,15 @@ class Datadoc:
192
202
  self._set_metadata(merged_metadata)
193
203
  else:
194
204
  self._set_metadata(existing_metadata or extracted_metadata)
205
+
206
+ if existing_pseudonymization:
207
+ self._set_pseudonymization_metadata(existing_pseudonymization)
208
+
195
209
  set_default_values_variables(self.variables)
196
210
  set_default_values_dataset(self.dataset)
197
211
  set_dataset_owner(self.dataset)
198
212
  self._create_variables_lookup()
213
+ self._create_pseudo_variables_lookup()
199
214
 
200
215
  def _get_existing_file_path(
201
216
  self,
@@ -222,11 +237,27 @@ class Datadoc:
222
237
  self.dataset = merged_metadata.dataset
223
238
  self.variables = merged_metadata.variables
224
239
 
240
+ def _set_pseudonymization_metadata(
241
+ self,
242
+ existing_pseudonymization: model.PseudonymizationMetadata | None,
243
+ ) -> None:
244
+ if not existing_pseudonymization or not (
245
+ existing_pseudonymization.pseudo_variables
246
+ ):
247
+ msg = "Could not read pseudonymization metadata"
248
+ raise ValueError(msg)
249
+ self.pseudo_variables = existing_pseudonymization.pseudo_variables
250
+
225
251
  def _create_variables_lookup(self) -> None:
226
252
  self.variables_lookup = {
227
253
  v.short_name: v for v in self.variables if v.short_name
228
254
  }
229
255
 
256
+ def _create_pseudo_variables_lookup(self) -> None:
257
+ self.pseudo_variables_lookup = {
258
+ v.short_name: v for v in self.pseudo_variables if v.short_name
259
+ }
260
+
230
261
  @staticmethod
231
262
  def _check_dataset_consistency(
232
263
  new_dataset_path: Path | CloudPath,
@@ -399,6 +430,42 @@ class Datadoc:
399
430
  )
400
431
  return None
401
432
 
433
+ def _extract_pseudonymization_from_existing_document(
434
+ self,
435
+ document: pathlib.Path | CloudPath,
436
+ ) -> model.PseudonymizationMetadata | None:
437
+ """Read pseudo metadata from an existing metadata document.
438
+
439
+ If there is pseudo metadata in the document supplied, the method validates and returns the pseudonymization structure.
440
+
441
+ Args:
442
+ document: A path to the existing metadata document.
443
+
444
+ Raises:
445
+ json.JSONDecodeError: If the metadata document cannot be parsed.
446
+ """
447
+ try:
448
+ with document.open(mode="r", encoding="utf-8") as file:
449
+ fresh_metadata = json.load(file)
450
+ except json.JSONDecodeError:
451
+ logger.warning(
452
+ "Could not open existing metadata file %s.",
453
+ document,
454
+ exc_info=True,
455
+ )
456
+ return None
457
+
458
+ if not is_metadata_in_container_structure(fresh_metadata):
459
+ return None
460
+
461
+ pseudonymization_metadata = fresh_metadata.get("pseudonymization")
462
+ if pseudonymization_metadata is None:
463
+ return None
464
+
465
+ return model.PseudonymizationMetadata.model_validate_json(
466
+ json.dumps(pseudonymization_metadata),
467
+ )
468
+
402
469
  def _extract_subject_field_from_path(
403
470
  self,
404
471
  dapla_dataset_path_info: DaplaDatasetPathInfo,
@@ -516,6 +583,11 @@ class Datadoc:
516
583
  )
517
584
  if self.container:
518
585
  self.container.datadoc = datadoc
586
+ if not self.container.pseudonymization:
587
+ self.container.pseudonymization = model.PseudonymizationMetadata(
588
+ pseudo_dataset=model.PseudoDataset()
589
+ )
590
+ self.container.pseudonymization.pseudo_variables = self.pseudo_variables
519
591
  else:
520
592
  self.container = model.MetadataContainer(datadoc=datadoc)
521
593
  if self.metadata_document:
@@ -545,3 +617,16 @@ class Datadoc:
545
617
  self.dataset,
546
618
  ) + num_obligatory_variables_fields_completed(self.variables)
547
619
  return calculate_percentage(num_set_fields, num_all_fields)
620
+
621
+ def add_pseudo_variable(self, variable_short_name: str) -> None:
622
+ """Adds a new pseudo variable to the list of pseudonymized variables."""
623
+ if self.variables_lookup[variable_short_name] is not None:
624
+ pseudo_variable = model.PseudoVariable(short_name=variable_short_name)
625
+ self.pseudo_variables.append(pseudo_variable)
626
+ self.pseudo_variables_lookup[variable_short_name] = pseudo_variable
627
+
628
+ def get_pseudo_variable(
629
+ self, variable_short_name: str
630
+ ) -> model.PseudoVariable | None:
631
+ """Finds a pseudo variable by shortname."""
632
+ return self.pseudo_variables_lookup.get(variable_short_name)
@@ -41,3 +41,24 @@ OPTIONAL_FIELD = "~ Valgfritt felt ~"
41
41
  REQUIRED_FIELD = "! Obligatorisk felt !"
42
42
 
43
43
  YAML_STR_TAG = "tag:yaml.org,2002:str"
44
+
45
+ BLOCK_FIELDS = [
46
+ "definition",
47
+ "name",
48
+ "contact.title",
49
+ "comment",
50
+ ]
51
+
52
+ DOUBLE_QUOTE_FIELDS = [
53
+ "unit_types",
54
+ "subject_fields",
55
+ "related_variable_definition_uris",
56
+ "owner",
57
+ "short_name",
58
+ "classification_reference",
59
+ "measurement_type",
60
+ "external_reference_uri",
61
+ "created_by",
62
+ "id",
63
+ "last_updated_by",
64
+ ]
@@ -4,6 +4,7 @@ import logging
4
4
  from datetime import datetime
5
5
  from pathlib import Path
6
6
  from typing import TYPE_CHECKING
7
+ from typing import Any
7
8
  from typing import cast
8
9
 
9
10
  import pytz
@@ -12,7 +13,7 @@ from ruamel.yaml import YAML
12
13
  from ruamel.yaml import CommentedMap
13
14
  from ruamel.yaml import RoundTripRepresenter
14
15
  from ruamel.yaml.scalarstring import DoubleQuotedScalarString
15
- from ruamel.yaml.scalarstring import FoldedScalarString
16
+ from ruamel.yaml.scalarstring import LiteralScalarString
16
17
 
17
18
  from dapla_metadata.variable_definitions._generated.vardef_client.models.complete_response import (
18
19
  CompleteResponse,
@@ -21,6 +22,8 @@ from dapla_metadata.variable_definitions._generated.vardef_client.models.variabl
21
22
  VariableStatus,
22
23
  )
23
24
  from dapla_metadata.variable_definitions._utils import config
25
+ from dapla_metadata.variable_definitions._utils.constants import BLOCK_FIELDS
26
+ from dapla_metadata.variable_definitions._utils.constants import DOUBLE_QUOTE_FIELDS
24
27
  from dapla_metadata.variable_definitions._utils.constants import (
25
28
  MACHINE_GENERATED_FIELDS,
26
29
  )
@@ -116,6 +119,15 @@ def _get_variable_definitions_dir():
116
119
  return folder_path
117
120
 
118
121
 
122
+ def _set_field_requirement(field_name: str, field: Any) -> str | None:
123
+ """Determine the field requirement status."""
124
+ if field_name not in MACHINE_GENERATED_FIELDS:
125
+ if field.is_required() or field_name == VARIABLE_STATUS_FIELD_NAME:
126
+ return REQUIRED_FIELD
127
+ return OPTIONAL_FIELD
128
+ return None
129
+
130
+
119
131
  def _populate_commented_map(
120
132
  field_name: str,
121
133
  value: str,
@@ -129,12 +141,12 @@ def _populate_commented_map(
129
141
  JsonDict,
130
142
  field.json_schema_extra,
131
143
  )[NORWEGIAN_DESCRIPTIONS]
144
+ field_requirement: str | None = _set_field_requirement(field_name, field)
132
145
  if description is not None:
133
146
  new_description = (
134
- "\n"
135
- + (REQUIRED_FIELD if field.is_required() else OPTIONAL_FIELD)
136
- + "\n"
137
- + str(description)
147
+ ("\n" + field_requirement + "\n" + str(description))
148
+ if field_requirement
149
+ else ("\n" + str(description))
138
150
  )
139
151
  commented_map.yaml_set_comment_before_after_key(
140
152
  field_name,
@@ -185,9 +197,11 @@ def configure_yaml(yaml: YAML) -> YAML:
185
197
  yaml.default_flow_style = False # Ensures pretty YAML formatting block style
186
198
  yaml.allow_unicode = True # Support special characters
187
199
  yaml.preserve_quotes = True
188
- yaml.width = 180 # wrap long lines
200
+ yaml.width = 4096 # prevent wrapping lines
189
201
  yaml.indent(
190
- mapping=4, sequence=2, offset=0
202
+ mapping=4,
203
+ sequence=6,
204
+ offset=4,
191
205
  ) # Ensure indentation for nested keys and lists
192
206
  yaml.representer.add_representer(
193
207
  VariableStatus,
@@ -209,59 +223,63 @@ def _safe_get(data: dict, keys: list):
209
223
  return data
210
224
 
211
225
 
212
- def _safe_set_folded(data: dict, path: str, lang: str):
213
- keys = path.split(".")
214
- parent = _safe_get(data, keys)
215
- if isinstance(parent, dict) and lang in parent and parent[lang] is not None:
216
- parent[lang] = FoldedScalarString(parent[lang])
226
+ def _apply_literal_scalars(field: dict):
227
+ """Helper function to wrap `LanguageStringType` values in `LiteralScalarString`.
228
+
229
+ This function wraps each non-`None` language value in a `LanguageStringType` field
230
+ in the `LiteralScalarString` YAML type, ensuring proper YAML formatting with block style.
231
+ """
232
+ for lang, value in field.items():
233
+ if value is not None:
234
+ field[lang] = LiteralScalarString(value)
235
+
236
+
237
+ def _apply_double_quotes_to_dict_values(field: dict):
238
+ """Helper function to wrap dictionary values in `DoubleQuotedScalarString`.
239
+
240
+ This function wraps each non-`None` value in a dictionary, including values inside lists,
241
+ in the `DoubleQuotedScalarString` YAML type, ensuring proper YAML formatting with double quotes.
242
+ """
243
+ for sub_key, sub_value in field.items():
244
+ if isinstance(sub_value, list):
245
+ field[sub_key] = [
246
+ DoubleQuotedScalarString(item) for item in sub_value if item is not None
247
+ ]
248
+ elif sub_value is not None:
249
+ field[sub_key] = DoubleQuotedScalarString(sub_value)
217
250
 
218
251
 
219
252
  def pre_process_data(data: dict) -> dict:
220
- """Format Variable definition model fields with ruamel yaml scalar string types."""
221
- folded_fields = [
222
- ("definition", ["nb", "nn", "en"]),
223
- ("name", ["nb", "nn", "en"]),
224
- ("comment", ["nb", "nn", "en"]),
225
- ("contact.title", ["nb", "nn", "en"]),
226
- ]
227
- for field_path, langs in folded_fields:
228
- for lang in langs:
229
- _safe_set_folded(data, field_path, lang)
230
-
231
- list_fields = [
232
- "unit_types",
233
- "subject_fields",
234
- "related_variable_definition_uris",
235
- ]
236
- for key in list_fields:
237
- if isinstance(data.get(key), list):
253
+ """Format variable definition model fields with ruamel YAML scalar string types.
254
+
255
+ This method sets the appropriate scalar string type (either `LiteralScalarString` or `DoubleQuotedScalarString`)
256
+ for fields of the variable definition model, based on predefined lists of fields.
257
+
258
+ It processes both nested dictionaries and lists, ensuring each element is formatted with the correct YAML string type.
259
+
260
+ Args:
261
+ data (dict): A dictionary containing the variable definition data.
262
+
263
+ Returns:
264
+ dict: The updated dictionary with model fields formatted as ruamel.yaml scalar string types.
265
+ """
266
+ for key in BLOCK_FIELDS:
267
+ keys = key.split(".")
268
+ field = _safe_get(data, keys)
269
+ if isinstance(field, dict):
270
+ _apply_literal_scalars(field)
271
+
272
+ for key in DOUBLE_QUOTE_FIELDS:
273
+ keys = key.split(".")
274
+ field = _safe_get(data, keys)
275
+ if isinstance(field, list):
238
276
  data[key] = [
239
- DoubleQuotedScalarString(item) for item in data[key] if item is not None
277
+ DoubleQuotedScalarString(item) for item in field if item is not None
240
278
  ]
241
-
242
- single_line_fields = [
243
- "short_name",
244
- "classification_reference",
245
- "measurement_type",
246
- "external_reference_uri",
247
- "created_by",
248
- "id",
249
- "last_updated_by",
250
- ]
251
- for key in single_line_fields:
252
- if data.get(key) is not None:
279
+ elif isinstance(field, str):
253
280
  data[key] = DoubleQuotedScalarString(data[key])
254
- # Special case due to complex structure
255
- owner = data.get("owner")
256
- if isinstance(owner, dict):
257
- if owner.get("team") is not None:
258
- owner["team"] = DoubleQuotedScalarString(owner["team"])
259
- if isinstance(owner.get("groups"), list):
260
- owner["groups"] = [
261
- DoubleQuotedScalarString(item)
262
- for item in owner["groups"]
263
- if item is not None
264
- ]
281
+ elif isinstance(field, dict):
282
+ _apply_double_quotes_to_dict_values(field)
265
283
 
266
284
  return data
267
285
 
@@ -322,8 +340,6 @@ def _model_to_yaml_with_comments(
322
340
  model_instance,
323
341
  )
324
342
  elif field_name not in {VARIABLE_STATUS_FIELD_NAME, OWNER_FIELD_NAME}:
325
- if isinstance(value, str):
326
- value.strip()
327
343
  _populate_commented_map(field_name, value, commented_map, model_instance)
328
344
 
329
345
  base_path = (
@@ -41,35 +41,24 @@ def _get_default_template() -> "VariableDefinition":
41
41
 
42
42
  return VariableDefinition(
43
43
  name=LanguageStringType(
44
- nb="navn",
45
- nn="namn",
46
- en="name",
44
+ nb="Navn",
47
45
  ),
48
46
  short_name="generert_kortnavn",
49
47
  definition=LanguageStringType(
50
- nb="definisjonstekst",
51
- nn="definisjonstekst",
52
- en="definition text",
48
+ nb="Definisjonstekst",
53
49
  ),
54
50
  valid_from=DEFAULT_DATE,
55
51
  unit_types=[""],
56
52
  subject_fields=[""],
57
53
  contains_special_categories_of_personal_data=False,
58
- variable_status=VariableStatus.DRAFT.value,
59
54
  owner=Owner(team="default team", groups=["default group"]),
60
55
  contact=Contact(
61
56
  title=LanguageStringType(
62
57
  nb="generert tittel",
63
- nn="generert tittel",
64
- en="generert title",
65
58
  ),
66
59
  email="generert@ssb.no",
67
60
  ),
68
- comment=LanguageStringType(
69
- nb="",
70
- nn="",
71
- en="",
72
- ),
61
+ variable_status=VariableStatus.DRAFT.value,
73
62
  id="",
74
63
  patch_id=0,
75
64
  created_at=DEFAULT_DATE,
@@ -3,6 +3,7 @@
3
3
  import logging
4
4
  from os import PathLike
5
5
  from pathlib import Path
6
+ from typing import Any
6
7
  from typing import TypeVar
7
8
 
8
9
  from pydantic import BaseModel
@@ -17,6 +18,7 @@ from dapla_metadata.variable_definitions._utils.files import _get_current_time
17
18
  from dapla_metadata.variable_definitions._utils.files import (
18
19
  _model_to_yaml_with_comments,
19
20
  )
21
+ from dapla_metadata.variable_definitions._utils.files import configure_yaml
20
22
 
21
23
  logger = logging.getLogger(__name__)
22
24
 
@@ -46,13 +48,38 @@ def create_variable_yaml(
46
48
 
47
49
  def _read_variable_definition_file(file_path: Path) -> dict:
48
50
  yaml = YAML()
49
-
51
+ configure_yaml(yaml)
50
52
  logger.debug("Full path to variable definition file %s", file_path)
51
53
  logger.info("Reading from '%s'", file_path.name)
52
54
  with file_path.open(encoding="utf-8") as f:
53
55
  return yaml.load(f)
54
56
 
55
57
 
58
+ def _strip_strings_recursively(data: Any) -> Any:
59
+ """Recursively strip leading and trailing whitespace from string values in nested dicts/lists.
60
+
61
+ This function traverses the provided data, which may be a dictionary, list, or other types,
62
+ and applies the following logic:
63
+ - If the data is a dictionary, it recursively strips string values in all key-value pairs.
64
+ - If the data is a list, it recursively strips string values in all list elements.
65
+ - If the data is a string, it strips leading and trailing whitespace.
66
+ - Any other data types are returned unchanged.
67
+
68
+ Args:
69
+ data: The input data, which may include nested dictionaries, lists, or other types.
70
+
71
+ Returns:
72
+ Any: The processed data, with strings stripped of whitespace or unchanged if not a string.
73
+ """
74
+ if isinstance(data, dict):
75
+ return {k: _strip_strings_recursively(v) for k, v in data.items()}
76
+ if isinstance(data, list):
77
+ return [_strip_strings_recursively(item) for item in data]
78
+ if isinstance(data, str):
79
+ return data.strip()
80
+ return data
81
+
82
+
56
83
  def _read_file_to_model(
57
84
  file_path: PathLike[str] | None,
58
85
  model_class: type[T],
@@ -80,14 +107,14 @@ def _read_file_to_model(
80
107
  raise FileNotFoundError(
81
108
  msg,
82
109
  ) from e
110
+ raw_data = _read_variable_definition_file(file_path)
111
+ cleaned_data = _strip_strings_recursively(raw_data)
112
+
83
113
  model = model_class.from_dict( # type:ignore [attr-defined]
84
- _read_variable_definition_file(
85
- file_path,
86
- ),
114
+ cleaned_data
87
115
  )
88
116
 
89
117
  if model is None:
90
118
  msg = f"Could not read data from {file_path}"
91
119
  raise FileNotFoundError(msg)
92
-
93
120
  return model
@@ -1,67 +1,86 @@
1
1
  # --- Variabel definisjoner ---
2
2
  # ref: https://statistics-norway.atlassian.net/wiki/spaces/MPD/pages/3009839199/VarDef+-+Krav+til+dokumentasjon+av+variabler
3
3
  name: |
4
- Variabelens navn. Dette skal ikke være en mer “teknisk” forkortelse, men et navn som er forståelig for mennesker, f.eks. “Lønnsinntekter”.
4
+ Variabelens navn. Dette skal ikke være en mer “teknisk” forkortelse, men et navn som er forståelig for mennesker.
5
+ -------------------------
6
+ >>> EKSEMPEL:
7
+ name:
8
+ nb: |-
9
+ Lønnsinntekter
5
10
  short_name: |
6
11
  Dette er variabelens kortnavn, som kan være en mer “teknisk” forkortelse, f.eks. wlonn (kortnavnet til Lønnsinntekter). Kortnavnet til en variabel i Vardef skal være unikt.
7
- Kravet til kortnavnet er at det kan inneholde a-z (kun små bokstaver), 0-9 og _ (understrek). Minimumslengden på kortnavnet er 2 tegn. Bokstavene “æ”, “ø” og “å” kan ikke brukes. Disse anbefales erstattet med hhv. “ae”, “oe” og “aa"
12
+ Kravet til kortnavnet er at det kan inneholde a-z (kun små bokstaver), 0-9 og _ (understrek). Minimumslengden på kortnavnet er 2 tegn.
13
+ Bokstavene “æ”, “ø” og “å” kan ikke brukes. Disse anbefales erstattet med hhv. “ae”, “oe” og “aa"
8
14
  definition: |
9
15
  En definisjon skal beskrive hva variabelen betyr og være så kort og presis som mulig. Mer utfyllende opplysninger kan legges i Merknad-feltet.
16
+ -------------------------
17
+ >>> EKSEMPEL:
18
+ definition:
19
+ nb: |-
20
+ Yrkesinntekter, kapitalinntekter, skattepliktige og skattefrie overføringer, i løpet av kalenderåret.
10
21
  classification_reference: |
11
22
  ID av en klassifikasjon eller kodeliste fra KLASS som beskriver verdiene variabelen kan anta.
12
23
  For eksempel vil variabelen 'Sivilstand' ha klassifikasjon 'Standard for sivilstand' (kan vises på https://www.ssb.no/klass/klassifikasjoner/19 ) som har ID 19.
13
- Eksempel: "19"
24
+ -------------------------
25
+ >>> EKSEMPEL: "19"
14
26
  unit_types: |
15
- Enhetstyper - enhetene som beskrives av denne variabelen. Variabelen “sivilstand” vil f.eks. ha enhetstypen person, mens f.eks. “Produsentpris for tjenester” vil ha både foretak og bedrift som enhetstyper siden variabelen kan beskrive begge.
27
+ Enhetstyper - enhetene som beskrives av denne variabelen. Variabelen “sivilstand” vil f.eks. ha enhetstypen person,
28
+ mens f.eks. “Produsentpris for tjenester” vil ha både foretak og bedrift som enhetstyper siden variabelen kan beskrive begge.
16
29
  Verdier skal være koder fra: https://www.ssb.no/klass/klassifikasjoner/702.
17
- Eksempel:
30
+ -------------------------
31
+ >>> EKSEMPEL:
18
32
  - "20"
19
33
  subject_fields: |
20
34
  Statistikkområder som variabelen brukes innenfor. For eksempel tilhører variabelen “Sivilstand” statistikkområdet “Befolkning”.
21
35
  Verdier skal være koder fra https://www.ssb.no/klass/klassifikasjoner/618.
22
- Eksempel:
36
+ -------------------------
37
+ >>> EKSEMPEL:
23
38
  - "bf"
24
39
  - "be"
25
40
  contains_special_categories_of_personal_data: |
26
41
  Viser om variabelen inneholder spesielt sensitive personopplysninger.
27
- Kategorier:
28
- - opplysninger om etnisk opprinnelse
29
- - opplysninger om politisk oppfatning
30
- - opplysninger om religion
31
- - opplysninger om filosofisk overbevisning
32
- - opplysninger om fagforeningsmedlemskap
33
- - genetiske opplysninger
34
- - biometriske opplysninger med det formål å entydig identifisere noen
35
- - helseopplysninger
36
- - opplysninger om seksuelle forhold
37
- - opplysninger om seksuell legning
38
- ref: https://lovdata.no/dokument/NL/lov/2018-06-15-38/KAPITTEL_gdpr-2#gdpr/a9
39
- Eksempel: true
42
+ -------------------------
43
+ >>> EKSEMPEL: true
40
44
  measurement_type: |
41
45
  Måletype som en kvantitativ variabelen tilhører, f.eks. valuta, areal osv.
42
46
  Verdien skal være en kode fra: https://www.ssb.no/klass/klassifikasjoner/303
43
- Eksempel: "03"
47
+ -------------------------
48
+ >>> EKSEMPEL: "03"
44
49
  valid_from: |
45
50
  Datoen variabeldefinisjonen er gyldig f.o.m.
46
- Eksempel: 1999-01-11
51
+ -------------------------
52
+ >>> EKSEMPEL: 1999-01-30
47
53
  valid_until: |
48
54
  Datoen variabeldefinisjonens var gyldig t.o.m. Settes hvis definisjonen skal erstattet av en ny definisjon (med en ny gyldighetsperiode), eller variabelen ikke lenger skal brukes.
49
- Eksempel: 2025-10-03
55
+ -------------------------
56
+ >>> EKSEMPEL: 2024-10-23
50
57
  external_reference_uri: |
51
58
  En peker (URI) til ekstern definisjon/dokumentasjon, f.eks. ei webside som er relevant for variabelen.
52
- Eksempel: "https://www.landbruksdirektoratet.com"
59
+ -----------------------------------------------------
60
+ >>> EKSEMPEL: "https://www.landbruksdirektoratet.com"
53
61
  comment: |
54
- Her kan en sette inn eventuelle tilleggsopplysninger som ikke hører hjemme i selve definisjonen. Variabelen “Landbakgrunn” har f.eks. merknaden “Fra og med 1.1.2003 ble definisjon endret til også å trekke inn besteforeldrenes fødeland”.
62
+ Her kan en sette inn eventuelle tilleggsopplysninger som ikke hører hjemme i selve definisjonen.
63
+ Variabelen “Landbakgrunn” har f.eks. merknaden “Fra og med 1.1.2003 ble definisjon endret til også å trekke inn besteforeldrenes fødeland”.
64
+ -----------------------------------------------------------------------------------------------
65
+ >>> EKSEMPEL:
66
+ comment:
67
+ nb: |-
68
+ Fra og med 1.1.2003 ble definisjon endret til også å trekke inn besteforeldrenes fødeland.
55
69
  related_variable_definition_uris: |
56
- Her kan en legge inn URIer til andre variabler som er relevante. Eksempelvis er variabelen “Inntekt etter skatt” en beregnet variabel der “Yrkesinntekter” og “Kapitalinntekter” inngår i beregningen. En kan da legge inn deres URI-er i dette feltet.
57
- Eksempel: "https://example.com/"
70
+ Her kan en legge inn URIer til andre variabler som er relevante. Eksempelvis er variabelen “Inntekt etter skatt” en beregnet variabel der “Yrkesinntekter” og “Kapitalinntekter” inngår i beregningen.
71
+ En kan da legge inn deres URI-er i dette feltet.
72
+ -------------------------
73
+ >>> EKSEMPEL:
74
+ - "https://example.com/"
58
75
  contact: |
59
76
  Her dokumenterer en navn og epost for person eller gruppe som kan svare på spørsmål.
60
- Eksempel:
61
- contact:
62
- title:
63
- nb: "Seksjonsleder"
64
- email: leder@ssb.no
77
+ -------------------------
78
+ >>> EKSEMPEL:
79
+ contact:
80
+ title:
81
+ nb: |-
82
+ Seksjonsleder
83
+ email: leder@ssb.no
65
84
  variable_status: |
66
85
  Livssyklus for variabelen.
67
86
  id: |
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: dapla-toolbelt-metadata
3
- Version: 0.6.3
3
+ Version: 0.6.4
4
4
  Summary: Dapla Toolbelt Metadata
5
5
  License: MIT
6
6
  Author: Team Metadata
7
7
  Author-email: metadata@ssb.no
8
- Requires-Python: >=3.10,<4.0
8
+ Requires-Python: >=3.10
9
9
  Classifier: Development Status :: 4 - Beta
10
10
  Classifier: License :: OSI Approved :: MIT License
11
11
  Classifier: Programming Language :: Python :: 3
@@ -24,7 +24,7 @@ Requires-Dist: pyjwt (>=2.8.0)
24
24
  Requires-Dist: python-dotenv (>=1.0.1)
25
25
  Requires-Dist: requests (>=2.31.0)
26
26
  Requires-Dist: ruamel-yaml (>=0.18.10)
27
- Requires-Dist: ssb-datadoc-model (>=6.0.0,<7.0.0)
27
+ Requires-Dist: ssb-datadoc-model (==6.0.0)
28
28
  Requires-Dist: ssb-klass-python (>=1.0.1)
29
29
  Requires-Dist: typing-extensions (>=4.12.2)
30
30
  Project-URL: Changelog, https://github.com/statisticsnorway/dapla-toolbelt-metadata/releases
@@ -7,7 +7,7 @@ dapla_metadata/dapla/__init__.py,sha256=tkapF-YwmruPPrKvN3pEoCZqb7xvJx_ogBM8XyGM
7
7
  dapla_metadata/dapla/user_info.py,sha256=bENez-ICt9ySR8orYebO68Q3_2LkIW9QTL58DTctmEQ,4833
8
8
  dapla_metadata/datasets/__init__.py,sha256=TvzskpdFC6hGcC9_55URT5jr5wNAPzXuISd2UjJWM_8,280
9
9
  dapla_metadata/datasets/code_list.py,sha256=kp1O6sUiUAP9WKlWY8IgHWx_1IOzJA63WveHqolgKmg,9082
10
- dapla_metadata/datasets/core.py,sha256=LgOyDDLV8vxUrticcgWPORlX19egm2epCerg5IcZQPs,22298
10
+ dapla_metadata/datasets/core.py,sha256=WfBIkNWxXH_WdCyZCsO3o-CRjb2LbsMJp-P01-b8nYw,25706
11
11
  dapla_metadata/datasets/dapla_dataset_path_info.py,sha256=zdkVjxlqXMBe7eTAneUrTDP0_fx7JsEQ_0JrKjREhfU,26854
12
12
  dapla_metadata/datasets/dataset_parser.py,sha256=bc3KOIDQGgdZMPh3XVHhiKMsY6FxIY9glvGlwTM4g7I,8233
13
13
  dapla_metadata/datasets/external_sources/__init__.py,sha256=qvIdXwqyEmXNUCB94ZtZXRzifdW4hiXASFFPtC70f6E,83
@@ -74,16 +74,16 @@ dapla_metadata/variable_definitions/_generated/vardef_client/rest.py,sha256=x4PW
74
74
  dapla_metadata/variable_definitions/_utils/__init__.py,sha256=qAhRLJoTBqtR3f9xRXTRhD7-5Xg0Opk1Ks5F4AUYnpA,45
75
75
  dapla_metadata/variable_definitions/_utils/_client.py,sha256=v1-9VjrdPI6-sroam5vXMPEV1dQMPsYk7KyGd48HjYw,971
76
76
  dapla_metadata/variable_definitions/_utils/config.py,sha256=BpLrnuqgtqz_kxBc_Kd-I1QNL7y2RxRXgX-IVbMIclQ,2416
77
- dapla_metadata/variable_definitions/_utils/constants.py,sha256=M9aF0P8iAQaUpjGsUWaWpOphIKxisfJmRPEf9vBB1Tc,1523
77
+ dapla_metadata/variable_definitions/_utils/constants.py,sha256=BGITkRNYtRDySM-anDMQDvO2JrXm3lDjw7ZmYfhFlXU,1884
78
78
  dapla_metadata/variable_definitions/_utils/descriptions.py,sha256=bB5QHNc4eOhmpLQHCty-CP5_aA82chkICifXw430suI,2746
79
- dapla_metadata/variable_definitions/_utils/files.py,sha256=d6gY7X-lQ4QAsy0YjRF5SmDmWbIfWElD2awwe5idQaM,12066
80
- dapla_metadata/variable_definitions/_utils/template_files.py,sha256=FSI8hTgW_ADA5r9eoQXoMQw0gYNdquN2ePGbs9bBZI0,3699
81
- dapla_metadata/variable_definitions/_utils/variable_definition_files.py,sha256=ePlbsrVl1JNMDUomS-ldYOeOilmcjQy0I5RhorShE2o,2785
79
+ dapla_metadata/variable_definitions/_utils/files.py,sha256=qdO9D0l-6FnSGZImTtyMsrFfauFqvQyCWz0knLSklbo,13193
80
+ dapla_metadata/variable_definitions/_utils/template_files.py,sha256=-PgYs4TG4vrXLQgk47pow9ZsqlZqhtO755LnEmvN4MA,3405
81
+ dapla_metadata/variable_definitions/_utils/variable_definition_files.py,sha256=PbqsFdHxsq0EWBg9s2Y57LqVP7aPmGD5-FZfnzuOw2Q,4078
82
82
  dapla_metadata/variable_definitions/exceptions.py,sha256=z6Gtd84FboDu7vWjC3wathIF7I0gF0imtRhwMkr16lY,7851
83
- dapla_metadata/variable_definitions/resources/vardef_model_descriptions_nb.yaml,sha256=Bl8B8Gv1SAnuEzq4XzCfjVPQzfPsueCnigTeNHszHoE,4890
83
+ dapla_metadata/variable_definitions/resources/vardef_model_descriptions_nb.yaml,sha256=z-P9q0yVk8mcKIMPByEhdF3q-OQzd26jPgrpLep4cU0,5223
84
84
  dapla_metadata/variable_definitions/vardef.py,sha256=KYd31nCGhxuzC0hpKR6foQjO39Tlb3vu9IDqUoMvTeY,11352
85
85
  dapla_metadata/variable_definitions/variable_definition.py,sha256=sj49uot0e4UJW4QJ3dEJGgjY4yfCHOkxS2NdD2t60b8,14883
86
- dapla_toolbelt_metadata-0.6.3.dist-info/LICENSE,sha256=np3IfD5m0ZUofn_kVzDZqliozuiO6wrktw3LRPjyEiI,1073
87
- dapla_toolbelt_metadata-0.6.3.dist-info/METADATA,sha256=DpjGArukPeGhp9Af_3je3M0ezZuRTrfX5BhS4juVoxQ,4917
88
- dapla_toolbelt_metadata-0.6.3.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
89
- dapla_toolbelt_metadata-0.6.3.dist-info/RECORD,,
86
+ dapla_toolbelt_metadata-0.6.4.dist-info/LICENSE,sha256=np3IfD5m0ZUofn_kVzDZqliozuiO6wrktw3LRPjyEiI,1073
87
+ dapla_toolbelt_metadata-0.6.4.dist-info/METADATA,sha256=Bq3nNW2h1USHM-xn1eLag-kHCqYn6l7PtwZqqEE4_bk,4905
88
+ dapla_toolbelt_metadata-0.6.4.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
89
+ dapla_toolbelt_metadata-0.6.4.dist-info/RECORD,,