dapla-toolbelt-metadata 0.6.1__tar.gz → 0.6.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dapla-toolbelt-metadata might be problematic. Click here for more details.

Files changed (89) hide show
  1. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/PKG-INFO +3 -2
  2. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/pyproject.toml +3 -3
  3. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/code_list.py +1 -1
  4. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/core.py +25 -10
  5. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/model_backwards_compatibility.py +3 -4
  6. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/model_validation.py +1 -1
  7. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/utility/utils.py +1 -1
  8. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/standards/name_validator.py +2 -2
  9. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/standards/utils/constants.py +2 -2
  10. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/exceptions.py +1 -1
  11. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_utils/config.py +1 -2
  12. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_utils/constants.py +2 -0
  13. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_utils/files.py +89 -9
  14. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_utils/template_files.py +18 -14
  15. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/resources/vardef_model_descriptions_nb.yaml +45 -27
  16. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/vardef.py +3 -3
  17. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/variable_definition.py +19 -21
  18. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/LICENSE +0 -0
  19. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/README.md +0 -0
  20. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/__init__.py +0 -0
  21. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/_shared/__init__.py +0 -0
  22. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/_shared/config.py +0 -0
  23. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/_shared/enums.py +0 -0
  24. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/_shared/py.typed +0 -0
  25. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/dapla/__init__.py +0 -0
  26. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/dapla/user_info.py +0 -0
  27. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/__init__.py +0 -0
  28. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/dapla_dataset_path_info.py +0 -0
  29. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/dataset_parser.py +0 -0
  30. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/external_sources/__init__.py +0 -0
  31. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/external_sources/external_sources.py +0 -0
  32. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/py.typed +0 -0
  33. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/statistic_subject_mapping.py +0 -0
  34. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/utility/__init__.py +0 -0
  35. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/utility/constants.py +0 -0
  36. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/datasets/utility/enums.py +0 -0
  37. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/standards/__init__.py +0 -0
  38. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/standards/standard_validators.py +0 -0
  39. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/standards/utils/__init__.py +0 -0
  40. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/__init__.py +0 -0
  41. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/.openapi-generator/FILES +0 -0
  42. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/.openapi-generator/VERSION +0 -0
  43. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/.openapi-generator-ignore +0 -0
  44. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/README.md +0 -0
  45. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/__init__.py +0 -0
  46. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/__init__.py +0 -0
  47. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api/__init__.py +0 -0
  48. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api/data_migration_api.py +0 -0
  49. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api/draft_variable_definitions_api.py +0 -0
  50. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api/patches_api.py +0 -0
  51. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api/validity_periods_api.py +0 -0
  52. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api/variable_definitions_api.py +0 -0
  53. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api_client.py +0 -0
  54. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api_response.py +0 -0
  55. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/configuration.py +0 -0
  56. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/CompleteResponse.md +0 -0
  57. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/Contact.md +0 -0
  58. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/DataMigrationApi.md +0 -0
  59. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/Draft.md +0 -0
  60. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/DraftVariableDefinitionsApi.md +0 -0
  61. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/LanguageStringType.md +0 -0
  62. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/Owner.md +0 -0
  63. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/Patch.md +0 -0
  64. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/PatchesApi.md +0 -0
  65. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/PublicApi.md +0 -0
  66. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/SupportedLanguages.md +0 -0
  67. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/UpdateDraft.md +0 -0
  68. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/ValidityPeriod.md +0 -0
  69. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/ValidityPeriodsApi.md +0 -0
  70. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/VariableDefinitionsApi.md +0 -0
  71. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/VariableStatus.md +0 -0
  72. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/__init__.py +0 -0
  73. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/complete_response.py +0 -0
  74. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/contact.py +0 -0
  75. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/draft.py +0 -0
  76. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/language_string_type.py +0 -0
  77. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/owner.py +0 -0
  78. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/patch.py +0 -0
  79. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/problem.py +0 -0
  80. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/update_draft.py +0 -0
  81. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/validity_period.py +0 -0
  82. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/variable_status.py +0 -0
  83. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/py.typed +0 -0
  84. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_generated/vardef_client/rest.py +0 -0
  85. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_utils/__init__.py +0 -0
  86. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_utils/_client.py +0 -0
  87. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_utils/descriptions.py +0 -0
  88. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/_utils/variable_definition_files.py +0 -0
  89. {dapla_toolbelt_metadata-0.6.1 → dapla_toolbelt_metadata-0.6.3}/src/dapla_metadata/variable_definitions/exceptions.py +0 -0
@@ -1,14 +1,15 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: dapla-toolbelt-metadata
3
- Version: 0.6.1
3
+ Version: 0.6.3
4
4
  Summary: Dapla Toolbelt Metadata
5
5
  License: MIT
6
6
  Author: Team Metadata
7
7
  Author-email: metadata@ssb.no
8
- Requires-Python: >=3.11,<4.0
8
+ Requires-Python: >=3.10,<4.0
9
9
  Classifier: Development Status :: 4 - Beta
10
10
  Classifier: License :: OSI Approved :: MIT License
11
11
  Classifier: Programming Language :: Python :: 3
12
+ Classifier: Programming Language :: Python :: 3.10
12
13
  Classifier: Programming Language :: Python :: 3.11
13
14
  Classifier: Programming Language :: Python :: 3.12
14
15
  Classifier: Programming Language :: Python :: 3.13
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "dapla-toolbelt-metadata"
3
- version = "0.6.1"
3
+ version = "0.6.3"
4
4
  description = "Dapla Toolbelt Metadata"
5
5
  authors = ["Team Metadata <metadata@ssb.no>"]
6
6
  license = "MIT"
@@ -16,7 +16,7 @@ packages = [{ include = "dapla_metadata", from = "src" }]
16
16
  Changelog = "https://github.com/statisticsnorway/dapla-toolbelt-metadata/releases"
17
17
 
18
18
  [tool.poetry.dependencies]
19
- python = ">=3.11,<4.0"
19
+ python = ">=3.10,<4.0"
20
20
  pyarrow = ">=8.0.0"
21
21
  pydantic = ">=2.5.2"
22
22
  arrow = ">=1.3.0"
@@ -138,7 +138,7 @@ disable_error_code = [
138
138
  force-exclude = true # Apply excludes to pre-commit
139
139
  show-fixes = true
140
140
  src = ["src", "tests"]
141
- target-version = "py311" # Minimum Python version supported
141
+ target-version = "py310" # Minimum Python version supported
142
142
  include = ["*.py", "*.pyi", "**/pyproject.toml", "*.ipynb"]
143
143
  extend-exclude = [
144
144
  "__pycache__",
@@ -127,7 +127,7 @@ class CodeList(GetExternalSource):
127
127
  .get_codes()
128
128
  .data
129
129
  )
130
- except Exception:
130
+ except Exception: # noqa: PERF203
131
131
  logger.exception(
132
132
  "Exception while getting classifications from Klass",
133
133
  )
@@ -111,6 +111,7 @@ class Datadoc:
111
111
  self.variables: list = []
112
112
  self.variables_lookup: dict[str, model.Variable] = {}
113
113
  self.explicitly_defined_metadata_document = False
114
+ self.dataset_consistency_status: list = []
114
115
  if metadata_document_path:
115
116
  self.metadata_document = normalize_path(metadata_document_path)
116
117
  self.explicitly_defined_metadata_document = True
@@ -169,11 +170,14 @@ class Datadoc:
169
170
  and existing_metadata is not None
170
171
  ):
171
172
  existing_file_path = self._get_existing_file_path(extracted_metadata)
172
- self._check_ready_to_merge(
173
+ self.dataset_consistency_status = self._check_dataset_consistency(
173
174
  self.dataset_path,
174
175
  Path(existing_file_path),
175
176
  extracted_metadata,
176
177
  existing_metadata,
178
+ )
179
+ self._check_ready_to_merge(
180
+ self.dataset_consistency_status,
177
181
  errors_as_warnings=self.errors_as_warnings,
178
182
  )
179
183
  merged_metadata = self._merge_metadata(
@@ -224,29 +228,26 @@ class Datadoc:
224
228
  }
225
229
 
226
230
  @staticmethod
227
- def _check_ready_to_merge(
231
+ def _check_dataset_consistency(
228
232
  new_dataset_path: Path | CloudPath,
229
233
  existing_dataset_path: Path,
230
234
  extracted_metadata: model.DatadocMetadata,
231
235
  existing_metadata: model.DatadocMetadata,
232
- *,
233
- errors_as_warnings: bool,
234
- ) -> None:
235
- """Check if the datasets are consistent enough to make a successful merge of metadata.
236
+ ) -> list[dict[str, object]]:
237
+ """Run consistency tests.
236
238
 
237
239
  Args:
238
240
  new_dataset_path: Path to the dataset to be documented.
239
241
  existing_dataset_path: Path stored in the existing metadata.
240
242
  extracted_metadata: Metadata extracted from a physical dataset.
241
243
  existing_metadata: Metadata from a previously created metadata document.
242
- errors_as_warnings: True if failing checks should be raised as warnings, not errors.
243
244
 
244
- Raises:
245
- InconsistentDatasetsError: If inconsistencies are found and `errors_as_warnings == False`
245
+ Returns:
246
+ List if dict with property name and boolean success flag
246
247
  """
247
248
  new_dataset_path_info = DaplaDatasetPathInfo(new_dataset_path)
248
249
  existing_dataset_path_info = DaplaDatasetPathInfo(existing_dataset_path)
249
- results = [
250
+ return [
250
251
  {
251
252
  "name": "Bucket name",
252
253
  "success": (
@@ -290,6 +291,20 @@ class Datadoc:
290
291
  ),
291
292
  },
292
293
  ]
294
+
295
+ @staticmethod
296
+ def _check_ready_to_merge(
297
+ results: list[dict[str, object]], *, errors_as_warnings: bool
298
+ ) -> None:
299
+ """Check if the datasets are consistent enough to make a successful merge of metadata.
300
+
301
+ Args:
302
+ results: List if dict with property name and boolean success flag
303
+ errors_as_warnings: True if failing checks should be raised as warnings, not errors.
304
+
305
+ Raises:
306
+ InconsistentDatasetsError: If inconsistencies are found and `errors_as_warnings == False`
307
+ """
293
308
  if failures := [result for result in results if not result["success"]]:
294
309
  msg = f"{INCONSISTENCIES_MESSAGE} {', '.join(str(f['name']) for f in failures)}"
295
310
  if errors_as_warnings:
@@ -15,8 +15,8 @@ from __future__ import annotations
15
15
 
16
16
  from collections import OrderedDict
17
17
  from dataclasses import dataclass
18
- from datetime import UTC
19
18
  from datetime import datetime
19
+ from datetime import timezone
20
20
  from typing import TYPE_CHECKING
21
21
  from typing import Any
22
22
 
@@ -160,8 +160,7 @@ def _remove_element_from_model(
160
160
  element_to_remove: The key of the element to be removed from the metadata
161
161
  dictionary.
162
162
  """
163
- if element_to_remove in supplied_metadata:
164
- del supplied_metadata[element_to_remove]
163
+ supplied_metadata.pop(element_to_remove, None)
165
164
 
166
165
 
167
166
  def _cast_to_date_type(value_to_update: str | None) -> str | None:
@@ -384,7 +383,7 @@ def handle_version_1_0_0(supplied_metadata: dict[str, Any]) -> dict[str, Any]:
384
383
  if supplied_metadata["dataset"][field]:
385
384
  supplied_metadata["dataset"][field] = datetime.isoformat(
386
385
  datetime.fromisoformat(supplied_metadata["dataset"][field]).astimezone(
387
- tz=UTC,
386
+ tz=timezone.utc,
388
387
  ),
389
388
  timespec="seconds",
390
389
  )
@@ -5,11 +5,11 @@ from __future__ import annotations
5
5
  import logging
6
6
  import warnings
7
7
  from typing import TYPE_CHECKING
8
- from typing import Self
9
8
  from typing import TextIO
10
9
 
11
10
  from datadoc_model import model
12
11
  from pydantic import model_validator
12
+ from typing_extensions import Self
13
13
 
14
14
  from dapla_metadata.datasets.utility.constants import DATE_VALIDATION_MESSAGE
15
15
  from dapla_metadata.datasets.utility.constants import NUM_OBLIGATORY_DATASET_FIELDS
@@ -37,7 +37,7 @@ logger = logging.getLogger(__name__)
37
37
 
38
38
  def get_timestamp_now() -> datetime.datetime:
39
39
  """Return a timestamp for the current moment."""
40
- return datetime.datetime.now(tz=datetime.UTC)
40
+ return datetime.datetime.now(tz=datetime.timezone.utc)
41
41
 
42
42
 
43
43
  def normalize_path(path: str) -> pathlib.Path | CloudPath:
@@ -128,9 +128,9 @@ class NamingStandardReport:
128
128
  """Returns an appropriate message based on the success rate."""
129
129
  rate = self.success_rate()
130
130
  if rate is not None:
131
- if rate == 100:
131
+ if 95 <= rate <= 100:
132
132
  return SSB_NAMING_STANDARD_REPORT_RESULT_BEST
133
- if 70 < rate < 100:
133
+ if 70 < rate < 95:
134
134
  return SSB_NAMING_STANDARD_REPORT_RESULT_GOOD
135
135
  if 40 <= rate <= 70:
136
136
  return SSB_NAMING_STANDARD_REPORT_RESULT_AVERAGE
@@ -9,7 +9,7 @@ NAME_STANDARD_SUCCESS = "Filene dine er i samsvar med SSB-navnestandarden"
9
9
  NAME_STANDARD_VIOLATION = "Det er oppdaget brudd på SSB-navnestandard:"
10
10
 
11
11
  MISSING_BUCKET_NAME = "Filnavn mangler bøttenavn ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#obligatoriske-mapper"
12
- MISSING_VERSION = "Filnavn mangler versjon ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#filnavn"
12
+ MISSING_VERSION = "Filnavn mangler versjon, hvis ikke filen er nyeste versjon kan dette være brudd på navnestandarden ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#versjonering-av-datasett"
13
13
  MISSING_PERIOD = "Filnavn mangler gyldighetsperiode ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#filnavn"
14
14
  MISSING_SHORT_NAME = "Kortnavn for statistikk mangler ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#obligatoriske-mapper"
15
15
  MISSING_DATA_STATE = "Mappe for datatilstand mangler ref: https://manual.dapla.ssb.no/statistikkere/navnestandard.html#obligatoriske-mapper"
@@ -26,7 +26,7 @@ BUCKET_NAME_UNKNOWN = "Kan ikke validere bøttenavn"
26
26
 
27
27
  SSB_NAMING_STANDARD_REPORT = "SSB navnestandard rapport"
28
28
  SSB_NAMING_STANDARD_REPORT_SUCCESS_RATE = "Suksess rate"
29
- SSB_NAMING_STANDARD_REPORT_RESULT_BEST = "🚀 Fantastisk! Alt bestått! 🎉\n"
29
+ SSB_NAMING_STANDARD_REPORT_RESULT_BEST = "🚀 Fantastisk! 🎉\n"
30
30
  SSB_NAMING_STANDARD_REPORT_RESULT_GOOD = (
31
31
  "✅ Bra jobba! Fortsatt litt rom for forbedring. 😊\n"
32
32
  )
@@ -152,7 +152,7 @@ class ApiException(OpenApiException):
152
152
 
153
153
  def __str__(self):
154
154
  """Custom error messages for exception"""
155
- error_message = f"({self.status})\n" f"Reason: {self.reason}\n"
155
+ error_message = f"({self.status})\nReason: {self.reason}\n"
156
156
  if self.headers:
157
157
  error_message += f"HTTP response headers: {self.headers}\n"
158
158
 
@@ -59,8 +59,7 @@ def get_vardef_host() -> str:
59
59
  case DaplaEnvironment.TEST:
60
60
  return VARDEF_HOST_TEST
61
61
  case DaplaEnvironment.DEV:
62
- msg = "Vardef is not available in dev."
63
- raise NotImplementedError(msg)
62
+ return VARDEF_HOST_TEST
64
63
  case _:
65
64
  return get_config_item("VARDEF_HOST") or "http://localhost:8080"
66
65
 
@@ -39,3 +39,5 @@ MACHINE_GENERATED_FIELDS = [
39
39
 
40
40
  OPTIONAL_FIELD = "~ Valgfritt felt ~"
41
41
  REQUIRED_FIELD = "! Obligatorisk felt !"
42
+
43
+ YAML_STR_TAG = "tag:yaml.org,2002:str"
@@ -10,6 +10,9 @@ import pytz
10
10
  from pydantic.config import JsonDict
11
11
  from ruamel.yaml import YAML
12
12
  from ruamel.yaml import CommentedMap
13
+ from ruamel.yaml import RoundTripRepresenter
14
+ from ruamel.yaml.scalarstring import DoubleQuotedScalarString
15
+ from ruamel.yaml.scalarstring import FoldedScalarString
13
16
 
14
17
  from dapla_metadata.variable_definitions._generated.vardef_client.models.complete_response import (
15
18
  CompleteResponse,
@@ -40,6 +43,7 @@ from dapla_metadata.variable_definitions._utils.constants import (
40
43
  from dapla_metadata.variable_definitions._utils.constants import (
41
44
  VARIABLE_STATUS_FIELD_NAME,
42
45
  )
46
+ from dapla_metadata.variable_definitions._utils.constants import YAML_STR_TAG
43
47
  from dapla_metadata.variable_definitions._utils.descriptions import (
44
48
  apply_norwegian_descriptions_to_model,
45
49
  )
@@ -120,14 +124,15 @@ def _populate_commented_map(
120
124
  ) -> None:
121
125
  """Add data to a CommentedMap."""
122
126
  commented_map[field_name] = value
123
- field = model_instance.model_fields[field_name]
127
+ field = type(model_instance).model_fields[field_name]
124
128
  description: JsonValue = cast(
125
129
  JsonDict,
126
130
  field.json_schema_extra,
127
131
  )[NORWEGIAN_DESCRIPTIONS]
128
132
  if description is not None:
129
133
  new_description = (
130
- (REQUIRED_FIELD if field.is_required() else OPTIONAL_FIELD)
134
+ "\n"
135
+ + (REQUIRED_FIELD if field.is_required() else OPTIONAL_FIELD)
131
136
  + "\n"
132
137
  + str(description)
133
138
  )
@@ -174,14 +179,20 @@ def _validate_and_create_directory(custom_directory: Path) -> Path:
174
179
  return custom_directory
175
180
 
176
181
 
177
- def _configure_yaml() -> YAML:
178
- yaml = YAML() # Use ruamel.yaml library
179
- yaml.default_flow_style = False # Ensures pretty YAML formatting
180
-
182
+ def configure_yaml(yaml: YAML) -> YAML:
183
+ """Common Yaml config for variable definitions."""
184
+ yaml.Representer = RoundTripRepresenter # Preserve the order of keys etc.
185
+ yaml.default_flow_style = False # Ensures pretty YAML formatting block style
186
+ yaml.allow_unicode = True # Support special characters
187
+ yaml.preserve_quotes = True
188
+ yaml.width = 180 # wrap long lines
189
+ yaml.indent(
190
+ mapping=4, sequence=2, offset=0
191
+ ) # Ensure indentation for nested keys and lists
181
192
  yaml.representer.add_representer(
182
193
  VariableStatus,
183
194
  lambda dumper, data: dumper.represent_scalar(
184
- "tag:yaml.org,2002:str",
195
+ YAML_STR_TAG,
185
196
  data.value,
186
197
  ),
187
198
  )
@@ -189,6 +200,72 @@ def _configure_yaml() -> YAML:
189
200
  return yaml
190
201
 
191
202
 
203
+ def _safe_get(data: dict, keys: list):
204
+ """Safely navigate nested dictionaries."""
205
+ for key in keys:
206
+ if not isinstance(data, dict) or key not in data or data[key] is None:
207
+ return None
208
+ data = data[key]
209
+ return data
210
+
211
+
212
+ def _safe_set_folded(data: dict, path: str, lang: str):
213
+ keys = path.split(".")
214
+ parent = _safe_get(data, keys)
215
+ if isinstance(parent, dict) and lang in parent and parent[lang] is not None:
216
+ parent[lang] = FoldedScalarString(parent[lang])
217
+
218
+
219
+ def pre_process_data(data: dict) -> dict:
220
+ """Format Variable definition model fields with ruamel yaml scalar string types."""
221
+ folded_fields = [
222
+ ("definition", ["nb", "nn", "en"]),
223
+ ("name", ["nb", "nn", "en"]),
224
+ ("comment", ["nb", "nn", "en"]),
225
+ ("contact.title", ["nb", "nn", "en"]),
226
+ ]
227
+ for field_path, langs in folded_fields:
228
+ for lang in langs:
229
+ _safe_set_folded(data, field_path, lang)
230
+
231
+ list_fields = [
232
+ "unit_types",
233
+ "subject_fields",
234
+ "related_variable_definition_uris",
235
+ ]
236
+ for key in list_fields:
237
+ if isinstance(data.get(key), list):
238
+ data[key] = [
239
+ DoubleQuotedScalarString(item) for item in data[key] if item is not None
240
+ ]
241
+
242
+ single_line_fields = [
243
+ "short_name",
244
+ "classification_reference",
245
+ "measurement_type",
246
+ "external_reference_uri",
247
+ "created_by",
248
+ "id",
249
+ "last_updated_by",
250
+ ]
251
+ for key in single_line_fields:
252
+ if data.get(key) is not None:
253
+ data[key] = DoubleQuotedScalarString(data[key])
254
+ # Special case due to complex structure
255
+ owner = data.get("owner")
256
+ if isinstance(owner, dict):
257
+ if owner.get("team") is not None:
258
+ owner["team"] = DoubleQuotedScalarString(owner["team"])
259
+ if isinstance(owner.get("groups"), list):
260
+ owner["groups"] = [
261
+ DoubleQuotedScalarString(item)
262
+ for item in owner["groups"]
263
+ if item is not None
264
+ ]
265
+
266
+ return data
267
+
268
+
192
269
  def _model_to_yaml_with_comments(
193
270
  model_instance: CompleteResponse,
194
271
  file_name: str,
@@ -209,7 +286,8 @@ def _model_to_yaml_with_comments(
209
286
  Returns:
210
287
  Path: The file path of the generated YAML file.
211
288
  """
212
- yaml = _configure_yaml()
289
+ yaml = YAML()
290
+ configure_yaml(yaml)
213
291
 
214
292
  from dapla_metadata.variable_definitions.variable_definition import (
215
293
  VariableDefinition,
@@ -223,7 +301,7 @@ def _model_to_yaml_with_comments(
223
301
  serialize_as_any=True,
224
302
  warnings="error",
225
303
  )
226
-
304
+ data = pre_process_data(data)
227
305
  # One CommentMap for each section in the yaml file
228
306
  machine_generated_map = CommentedMap()
229
307
  commented_map = CommentedMap()
@@ -244,6 +322,8 @@ def _model_to_yaml_with_comments(
244
322
  model_instance,
245
323
  )
246
324
  elif field_name not in {VARIABLE_STATUS_FIELD_NAME, OWNER_FIELD_NAME}:
325
+ if isinstance(value, str):
326
+ value.strip()
247
327
  _populate_commented_map(field_name, value, commented_map, model_instance)
248
328
 
249
329
  base_path = (
@@ -41,30 +41,34 @@ def _get_default_template() -> "VariableDefinition":
41
41
 
42
42
  return VariableDefinition(
43
43
  name=LanguageStringType(
44
- nb="default navn",
45
- nn="default namn",
46
- en="default name",
44
+ nb="navn",
45
+ nn="namn",
46
+ en="name",
47
47
  ),
48
- short_name="default_kortnavn",
48
+ short_name="generert_kortnavn",
49
49
  definition=LanguageStringType(
50
- nb="default definisjon",
51
- nn="default definisjon",
52
- en="default definition",
50
+ nb="definisjonstekst",
51
+ nn="definisjonstekst",
52
+ en="definition text",
53
53
  ),
54
- classification_reference="class_id",
55
54
  valid_from=DEFAULT_DATE,
56
- unit_types=["00"],
57
- subject_fields=["aa"],
55
+ unit_types=[""],
56
+ subject_fields=[""],
58
57
  contains_special_categories_of_personal_data=False,
59
58
  variable_status=VariableStatus.DRAFT.value,
60
59
  owner=Owner(team="default team", groups=["default group"]),
61
60
  contact=Contact(
62
61
  title=LanguageStringType(
63
- nb="default tittel",
64
- nn="default tittel",
65
- en="default title",
62
+ nb="generert tittel",
63
+ nn="generert tittel",
64
+ en="generert title",
66
65
  ),
67
- email="default@ssb.no",
66
+ email="generert@ssb.no",
67
+ ),
68
+ comment=LanguageStringType(
69
+ nb="",
70
+ nn="",
71
+ en="",
68
72
  ),
69
73
  id="",
70
74
  patch_id=0,
@@ -8,41 +8,62 @@ short_name: |
8
8
  definition: |
9
9
  En definisjon skal beskrive hva variabelen betyr og være så kort og presis som mulig. Mer utfyllende opplysninger kan legges i Merknad-feltet.
10
10
  classification_reference: |
11
- Lenke (URI) til kodeverk (klassifikasjon eller kodeliste) i KLASS som beskriver verdiene variabelen kan anta. F.eks. vil variabelen “Sivilstand” ha kodeverks-URI Standard for [sivilstand](https://www.ssb.no/klass/klassifikasjoner/19).
11
+ ID av en klassifikasjon eller kodeliste fra KLASS som beskriver verdiene variabelen kan anta.
12
+ For eksempel vil variabelen 'Sivilstand' ha klassifikasjon 'Standard for sivilstand' (kan vises på https://www.ssb.no/klass/klassifikasjoner/19 ) som har ID 19.
13
+ Eksempel: "19"
12
14
  unit_types: |
13
- Enhetstype(r) - enheten(e) som beskrives av denne variabelen. Variabelen “sivilstand” vil f.eks. ha enhetstypen person, mens f.eks. “Produsentpris for tjenester” vil ha både foretak og bedrift som enhetstyper siden variabelen kan beskrive begge.
14
- ref: https://www.ssb.no/klass/klassifikasjoner/702.
15
+ Enhetstyper - enhetene som beskrives av denne variabelen. Variabelen “sivilstand” vil f.eks. ha enhetstypen person, mens f.eks. “Produsentpris for tjenester” vil ha både foretak og bedrift som enhetstyper siden variabelen kan beskrive begge.
16
+ Verdier skal være koder fra: https://www.ssb.no/klass/klassifikasjoner/702.
17
+ Eksempel:
18
+ - "20"
15
19
  subject_fields: |
16
- Statistikkområde(r) som variabelen brukes innenfor, hentet fra [Kodeliste for statistikkområder i Statistikkbanken](https://www.ssb.no/klass/klassifikasjoner/618).
17
- F.eks. tilhører variabelen “Sivilstand” statistikkområdet “Befolkning”.
20
+ Statistikkområder som variabelen brukes innenfor. For eksempel tilhører variabelen “Sivilstand” statistikkområdet “Befolkning”.
21
+ Verdier skal være koder fra https://www.ssb.no/klass/klassifikasjoner/618.
22
+ Eksempel:
23
+ - "bf"
24
+ - "be"
18
25
  contains_special_categories_of_personal_data: |
19
- Viser om variabelen inneholder spesielt sensitive personopplysninger [Lov om behandling av personopplysninger(personopplysningsloven)-KAPITTEL || Prinsipper - Lovdata](https://lovdata.no/dokument/NL/lov/2018-06-15-38/KAPITTEL_gdpr-2#gdpr/a9)
20
- - opplysninger om etnisk opprinnelse
21
- - opplysninger om politisk oppfatning
22
- - opplysninger om religion
23
- - opplysninger om filosofisk overbevisning
24
- - opplysninger om fagforeningsmedlemskap
25
- - genetiske opplysninger
26
- - biometriske opplysninger med det formål å entydig identifisere noen
27
- - helseopplysninger
28
- - opplysninger om seksuelle forhold
29
- - opplysninger om seksuell legning
26
+ Viser om variabelen inneholder spesielt sensitive personopplysninger.
27
+ Kategorier:
28
+ - opplysninger om etnisk opprinnelse
29
+ - opplysninger om politisk oppfatning
30
+ - opplysninger om religion
31
+ - opplysninger om filosofisk overbevisning
32
+ - opplysninger om fagforeningsmedlemskap
33
+ - genetiske opplysninger
34
+ - biometriske opplysninger med det formål å entydig identifisere noen
35
+ - helseopplysninger
36
+ - opplysninger om seksuelle forhold
37
+ - opplysninger om seksuell legning
38
+ ref: https://lovdata.no/dokument/NL/lov/2018-06-15-38/KAPITTEL_gdpr-2#gdpr/a9
39
+ Eksempel: true
30
40
  measurement_type: |
31
- Måletype som en kvantitativ variabelen tilhører, f.eks. valuta, areal osv. Disse ligger i kodeverket [SSB måletyper/måleenheter](https://www.ssb.no/klass/klassifikasjoner/303/koder)
41
+ Måletype som en kvantitativ variabelen tilhører, f.eks. valuta, areal osv.
42
+ Verdien skal være en kode fra: https://www.ssb.no/klass/klassifikasjoner/303
43
+ Eksempel: "03"
32
44
  valid_from: |
33
- Datoen variabeldefinisjonen er gyldig f.o.m.
45
+ Datoen variabeldefinisjonen er gyldig f.o.m.
46
+ Eksempel: 1999-01-11
34
47
  valid_until: |
35
48
  Datoen variabeldefinisjonens var gyldig t.o.m. Settes hvis definisjonen skal erstattet av en ny definisjon (med en ny gyldighetsperiode), eller variabelen ikke lenger skal brukes.
49
+ Eksempel: 2025-10-03
36
50
  external_reference_uri: |
37
51
  En peker (URI) til ekstern definisjon/dokumentasjon, f.eks. ei webside som er relevant for variabelen.
52
+ Eksempel: "https://www.landbruksdirektoratet.com"
38
53
  comment: |
39
54
  Her kan en sette inn eventuelle tilleggsopplysninger som ikke hører hjemme i selve definisjonen. Variabelen “Landbakgrunn” har f.eks. merknaden “Fra og med 1.1.2003 ble definisjon endret til også å trekke inn besteforeldrenes fødeland”.
40
55
  related_variable_definition_uris: |
41
- Her kan en legge inn URI(er) til andre variabler som er relevante. Eksempelvis er variabelen “Inntekt etter skatt” en beregnet variabel der “Yrkesinntekter” og “Kapitalinntekter” inngår i beregningen. En kan da legge inn deres URI-er i dette feltet.
56
+ Her kan en legge inn URIer til andre variabler som er relevante. Eksempelvis er variabelen “Inntekt etter skatt” en beregnet variabel der “Yrkesinntekter” og “Kapitalinntekter” inngår i beregningen. En kan da legge inn deres URI-er i dette feltet.
57
+ Eksempel: "https://example.com/"
42
58
  contact: |
43
59
  Her dokumenterer en navn og epost for person eller gruppe som kan svare på spørsmål.
60
+ Eksempel:
61
+ contact:
62
+ title:
63
+ nb: "Seksjonsleder"
64
+ email: leder@ssb.no
44
65
  variable_status: |
45
- Livssyklus for variabelen. Denne har tre kategorier: Utkast, Publisert internt og Publisert eksternt.
66
+ Livssyklus for variabelen.
46
67
  id: |
47
68
  Unik SSB identifikator for variabeldefinisjonen. Denne blir maskingenerert.
48
69
  Variabeldefinisjoner med ulike gyldighetsperioder har samme ID (og samme kortnavn).
@@ -50,14 +71,11 @@ patch_id: |
50
71
  Løpenummer som identifiserer en patch, endring, for en variabeldefinisjon.
51
72
  owner: |
52
73
  Eier av variabelen dvs. ansvarlig Dapla-team (statistikk-team) og informasjon om tilgangsstyringsgrupper. Team-tilhørighet settes automatisk til det samme som teamtilhørigheten til den som oppretter variabelen.
53
- Eksempel:
54
- team: ledstil
55
- groups: [developers]
56
74
  created_at: |
57
- Datoen variabelen ble opprettet. Denne er maskingenerert.
75
+ Tidsstempelet da variabelen ble opprettet. Denne er maskingenerert.
58
76
  created_by: |
59
- Personen som har opprettet variabelen (initialer). Dette er maskingenerert.
77
+ Personen som har opprettet variabelen. Dette er maskingenerert.
60
78
  last_updated_at: |
61
- Dato da variabelen sist ble oppdatert. Denne er maskingenerert.
79
+ Tidsstempelet da variabelen sist ble oppdatert. Denne er maskingenerert.
62
80
  last_updated_by: |
63
- Personen (initialer) som sist utførte en endring i variabelen. Denne er maskingenerert.
81
+ Personen som sist utførte en endring i variabelen. Denne er maskingenerert.
@@ -108,7 +108,7 @@ class Vardef:
108
108
  )
109
109
 
110
110
  logger.info(
111
- "Successfully created variable definition '%s' with ID '%s'",
111
+ "Successfully created variable definition '%s' with ID '%s'",
112
112
  new_variable.short_name,
113
113
  new_variable.id,
114
114
  )
@@ -165,7 +165,7 @@ class Vardef:
165
165
  )
166
166
 
167
167
  logger.info(
168
- "Successfully migrated variable definition '%s' with ID '%s'",
168
+ "Successfully migrated variable definition '%s' with ID '%s'",
169
169
  migrated_variable.short_name,
170
170
  migrated_variable.id,
171
171
  )
@@ -273,7 +273,7 @@ class Vardef:
273
273
  custom_directory=Path(custom_file_path) if custom_file_path else None,
274
274
  )
275
275
  logger.info(
276
- f"Created editable variable definition template file at {file_path}", # noqa: G004
276
+ f"Created editable variable definition template file at {file_path}", # noqa: G004
277
277
  )
278
278
  return file_path
279
279
 
@@ -34,6 +34,8 @@ from dapla_metadata.variable_definitions._generated.vardef_client.models.variabl
34
34
  )
35
35
  from dapla_metadata.variable_definitions._utils import config
36
36
  from dapla_metadata.variable_definitions._utils._client import VardefClient
37
+ from dapla_metadata.variable_definitions._utils.files import configure_yaml
38
+ from dapla_metadata.variable_definitions._utils.files import pre_process_data
37
39
  from dapla_metadata.variable_definitions._utils.variable_definition_files import (
38
40
  _read_file_to_model,
39
41
  )
@@ -126,7 +128,7 @@ class VariableDefinition(CompleteResponse):
126
128
  self.__dict__.update(updated)
127
129
 
128
130
  logger.info(
129
- "Successfully updated variable definition '%s' with ID '%s'",
131
+ "Successfully updated variable definition '%s' with ID '%s'",
130
132
  updated.short_name,
131
133
  updated.id,
132
134
  )
@@ -176,7 +178,7 @@ class VariableDefinition(CompleteResponse):
176
178
  variable_definition_id=self.id,
177
179
  active_group=config.get_active_group(),
178
180
  )
179
- return f"Variable {self.id} safely deleted"
181
+ return f"Variable {self.id} safely deleted"
180
182
 
181
183
  @vardef_exception_handler
182
184
  def get_patch(self, patch_id: int) -> "VariableDefinition":
@@ -205,9 +207,9 @@ class VariableDefinition(CompleteResponse):
205
207
 
206
208
  Patches are to be used for minor changes which don't require a new Validity Period.
207
209
  Examples of reasons for creating a new Patch:
208
- - Correcting a typo
209
- - Adding a translation
210
- - Adding a subject field
210
+ - Correcting a typo
211
+ - Adding a translation
212
+ - Adding a subject field
211
213
 
212
214
  Supply only the fields to be changed. Other fields will retain their current values.
213
215
 
@@ -234,7 +236,7 @@ class VariableDefinition(CompleteResponse):
234
236
  self.__dict__.update(new_patch)
235
237
 
236
238
  logger.info(
237
- "Successfully created patch with patch ID '%s' for variable definition '%s' with ID '%s'",
239
+ "Successfully created patch with patch ID '%s' for variable definition '%s' with ID '%s'",
238
240
  new_patch.patch_id,
239
241
  new_patch.short_name,
240
242
  new_patch.id,
@@ -309,7 +311,7 @@ class VariableDefinition(CompleteResponse):
309
311
  self.__dict__.update(new_validity_period)
310
312
 
311
313
  logger.info(
312
- "Successfully created validity period that is valid from '%s' for variable definition '%s' with ID '%s'",
314
+ "Successfully created validity period that is valid from '%s' for variable definition '%s' with ID '%s'",
313
315
  new_validity_period.valid_from,
314
316
  new_validity_period.short_name,
315
317
  new_validity_period.id,
@@ -350,7 +352,7 @@ class VariableDefinition(CompleteResponse):
350
352
  UpdateDraft(variable_status=VariableStatus.PUBLISHED_INTERNAL),
351
353
  )
352
354
  logger.info(
353
- "Variable definition '%s' with ID '%s' successfully published, new status: %s",
355
+ "Variable definition '%s' with ID '%s' successfully published, new status: %s",
354
356
  update.short_name,
355
357
  update.id,
356
358
  update.variable_status,
@@ -373,7 +375,7 @@ class VariableDefinition(CompleteResponse):
373
375
  Patch(variable_status=VariableStatus.PUBLISHED_EXTERNAL),
374
376
  )
375
377
  logger.info(
376
- "Variable definition '%s' with ID '%s' successfully published, new status: %s",
378
+ "Variable definition '%s' with ID '%s' successfully published, new status: %s",
377
379
  update.short_name,
378
380
  update.id,
379
381
  update.variable_status,
@@ -387,7 +389,7 @@ class VariableDefinition(CompleteResponse):
387
389
  )
388
390
  self.set_file_path(file_path)
389
391
  logger.info(
390
- f"Created editable variable definition file at {file_path}", # noqa: G004
392
+ f"Created editable variable definition file at {file_path}", # noqa: G004
391
393
  )
392
394
  return self
393
395
 
@@ -405,16 +407,12 @@ class VariableDefinition(CompleteResponse):
405
407
 
406
408
  def _convert_to_yaml_output(self) -> str:
407
409
  stream = StringIO()
408
- with ruamel.yaml.YAML(
409
- output=stream,
410
- ) as yaml:
411
- yaml.default_flow_style = False
412
- yaml.allow_unicode = True
413
- yaml.dump(
414
- self.model_dump(
415
- mode="json",
416
- serialize_as_any=True,
417
- warnings="error",
418
- ),
410
+ with ruamel.yaml.YAML(output=stream) as yaml:
411
+ configure_yaml(yaml)
412
+ data = self.model_dump(
413
+ mode="json",
414
+ serialize_as_any=True,
415
+ warnings="error",
419
416
  )
417
+ yaml.dump(pre_process_data(data))
420
418
  return stream.getvalue()