dapla-toolbelt-metadata 0.2.1__py3-none-any.whl → 0.9.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dapla-toolbelt-metadata might be problematic. Click here for more details.
- dapla_metadata/__init__.py +11 -1
- dapla_metadata/_shared/__init__.py +1 -0
- dapla_metadata/_shared/config.py +109 -0
- dapla_metadata/_shared/enums.py +27 -0
- dapla_metadata/_shared/py.typed +0 -0
- dapla_metadata/dapla/__init__.py +4 -0
- dapla_metadata/dapla/user_info.py +138 -0
- dapla_metadata/datasets/__init__.py +1 -1
- dapla_metadata/datasets/_merge.py +333 -0
- dapla_metadata/datasets/code_list.py +5 -6
- dapla_metadata/datasets/compatibility/__init__.py +10 -0
- dapla_metadata/datasets/compatibility/_handlers.py +363 -0
- dapla_metadata/datasets/compatibility/_utils.py +259 -0
- dapla_metadata/datasets/compatibility/model_backwards_compatibility.py +135 -0
- dapla_metadata/datasets/core.py +136 -182
- dapla_metadata/datasets/dapla_dataset_path_info.py +145 -19
- dapla_metadata/datasets/dataset_parser.py +41 -28
- dapla_metadata/datasets/model_validation.py +29 -20
- dapla_metadata/datasets/statistic_subject_mapping.py +5 -1
- dapla_metadata/datasets/utility/constants.py +22 -15
- dapla_metadata/datasets/utility/enums.py +8 -20
- dapla_metadata/datasets/utility/urn.py +234 -0
- dapla_metadata/datasets/utility/utils.py +183 -111
- dapla_metadata/standards/__init__.py +4 -0
- dapla_metadata/standards/name_validator.py +250 -0
- dapla_metadata/standards/standard_validators.py +98 -0
- dapla_metadata/standards/utils/__init__.py +1 -0
- dapla_metadata/standards/utils/constants.py +49 -0
- dapla_metadata/variable_definitions/__init__.py +11 -0
- dapla_metadata/variable_definitions/_generated/.openapi-generator/FILES +20 -0
- dapla_metadata/variable_definitions/_generated/.openapi-generator/VERSION +1 -0
- dapla_metadata/variable_definitions/_generated/.openapi-generator-ignore +6 -0
- dapla_metadata/variable_definitions/_generated/README.md +148 -0
- dapla_metadata/variable_definitions/_generated/__init__.py +0 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/__init__.py +47 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/api/__init__.py +8 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/api/data_migration_api.py +766 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/api/draft_variable_definitions_api.py +888 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/api/patches_api.py +888 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/api/validity_periods_api.py +583 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/api/variable_definitions_api.py +613 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/api_client.py +779 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/api_response.py +27 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/configuration.py +474 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/CompleteResponse.md +51 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/Contact.md +30 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/DataMigrationApi.md +90 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/Draft.md +42 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/DraftVariableDefinitionsApi.md +259 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/LanguageStringType.md +31 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/Owner.md +31 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/Patch.md +43 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/PatchesApi.md +249 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/PublicApi.md +218 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/SupportedLanguages.md +15 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/UpdateDraft.md +44 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/ValidityPeriod.md +42 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/ValidityPeriodsApi.md +236 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/VariableDefinitionsApi.md +304 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/docs/VariableStatus.md +17 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/exceptions.py +193 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/models/__init__.py +31 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/models/complete_response.py +260 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/models/contact.py +94 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/models/draft.py +228 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/models/get_vardok_vardef_mapping_by_id200_response.py +158 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/models/language_string_type.py +101 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/models/owner.py +87 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/models/patch.py +244 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/models/problem.py +118 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/models/update_draft.py +274 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/models/validity_period.py +225 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/models/vardok_id_response.py +81 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/models/vardok_vardef_id_pair_response.py +84 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/models/variable_status.py +33 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/py.typed +0 -0
- dapla_metadata/variable_definitions/_generated/vardef_client/rest.py +249 -0
- dapla_metadata/variable_definitions/_utils/__init__.py +1 -0
- dapla_metadata/variable_definitions/_utils/_client.py +32 -0
- dapla_metadata/variable_definitions/_utils/config.py +54 -0
- dapla_metadata/variable_definitions/_utils/constants.py +80 -0
- dapla_metadata/variable_definitions/_utils/files.py +309 -0
- dapla_metadata/variable_definitions/_utils/template_files.py +99 -0
- dapla_metadata/variable_definitions/_utils/variable_definition_files.py +143 -0
- dapla_metadata/variable_definitions/exceptions.py +255 -0
- dapla_metadata/variable_definitions/vardef.py +372 -0
- dapla_metadata/variable_definitions/vardok_id.py +48 -0
- dapla_metadata/variable_definitions/vardok_vardef_id_pair.py +47 -0
- dapla_metadata/variable_definitions/variable_definition.py +422 -0
- {dapla_toolbelt_metadata-0.2.1.dist-info → dapla_toolbelt_metadata-0.9.11.dist-info}/METADATA +34 -36
- dapla_toolbelt_metadata-0.9.11.dist-info/RECORD +97 -0
- {dapla_toolbelt_metadata-0.2.1.dist-info → dapla_toolbelt_metadata-0.9.11.dist-info}/WHEEL +1 -1
- dapla_metadata/datasets/config.py +0 -80
- dapla_metadata/datasets/model_backwards_compatibility.py +0 -520
- dapla_metadata/datasets/user_info.py +0 -88
- dapla_toolbelt_metadata-0.2.1.dist-info/RECORD +0 -22
- {dapla_toolbelt_metadata-0.2.1.dist-info → dapla_toolbelt_metadata-0.9.11.dist-info/licenses}/LICENSE +0 -0
|
@@ -0,0 +1,309 @@
|
|
|
1
|
+
"""Lower level file utilities."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
import pytz
|
|
8
|
+
from ruamel.yaml import YAML
|
|
9
|
+
from ruamel.yaml import CommentedMap
|
|
10
|
+
from ruamel.yaml import RoundTripRepresenter
|
|
11
|
+
from ruamel.yaml.scalarstring import DoubleQuotedScalarString
|
|
12
|
+
from ruamel.yaml.scalarstring import LiteralScalarString
|
|
13
|
+
|
|
14
|
+
from dapla_metadata.variable_definitions._generated.vardef_client.models.complete_response import (
|
|
15
|
+
CompleteResponse,
|
|
16
|
+
)
|
|
17
|
+
from dapla_metadata.variable_definitions._generated.vardef_client.models.variable_status import (
|
|
18
|
+
VariableStatus,
|
|
19
|
+
)
|
|
20
|
+
from dapla_metadata.variable_definitions._utils import config
|
|
21
|
+
from dapla_metadata.variable_definitions._utils.constants import BLOCK_FIELDS
|
|
22
|
+
from dapla_metadata.variable_definitions._utils.constants import DOUBLE_QUOTE_FIELDS
|
|
23
|
+
from dapla_metadata.variable_definitions._utils.constants import (
|
|
24
|
+
MACHINE_GENERATED_FIELDS,
|
|
25
|
+
)
|
|
26
|
+
from dapla_metadata.variable_definitions._utils.constants import OWNER_FIELD_NAME
|
|
27
|
+
from dapla_metadata.variable_definitions._utils.constants import (
|
|
28
|
+
TEMPLATE_SECTION_HEADER_MACHINE_GENERATED,
|
|
29
|
+
)
|
|
30
|
+
from dapla_metadata.variable_definitions._utils.constants import (
|
|
31
|
+
TEMPLATE_SECTION_HEADER_OWNER,
|
|
32
|
+
)
|
|
33
|
+
from dapla_metadata.variable_definitions._utils.constants import (
|
|
34
|
+
TEMPLATE_SECTION_HEADER_STATUS,
|
|
35
|
+
)
|
|
36
|
+
from dapla_metadata.variable_definitions._utils.constants import (
|
|
37
|
+
VARIABLE_DEFINITIONS_DIR,
|
|
38
|
+
)
|
|
39
|
+
from dapla_metadata.variable_definitions._utils.constants import (
|
|
40
|
+
VARIABLE_STATUS_FIELD_NAME,
|
|
41
|
+
)
|
|
42
|
+
from dapla_metadata.variable_definitions._utils.constants import YAML_STR_TAG
|
|
43
|
+
from dapla_metadata.variable_definitions.exceptions import VardefFileError
|
|
44
|
+
|
|
45
|
+
logger = logging.getLogger(__name__)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _create_file_name(
|
|
49
|
+
base_name: str,
|
|
50
|
+
time_object: str,
|
|
51
|
+
short_name: str | None = None,
|
|
52
|
+
variable_definition_id: str | None = None,
|
|
53
|
+
) -> str:
|
|
54
|
+
"""Return file name with dynamic timestamp, and shortname and id if available."""
|
|
55
|
+
return (
|
|
56
|
+
"_".join(
|
|
57
|
+
filter(
|
|
58
|
+
None,
|
|
59
|
+
[
|
|
60
|
+
base_name,
|
|
61
|
+
short_name,
|
|
62
|
+
variable_definition_id,
|
|
63
|
+
time_object,
|
|
64
|
+
],
|
|
65
|
+
),
|
|
66
|
+
)
|
|
67
|
+
+ ".yaml"
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _get_current_time() -> str:
|
|
72
|
+
"""Return a string format date now for filename."""
|
|
73
|
+
timezone = pytz.timezone("Europe/Oslo")
|
|
74
|
+
current_datetime = datetime.now(timezone).strftime("%Y-%m-%dT%H-%M-%S")
|
|
75
|
+
return str(current_datetime)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _get_workspace_dir() -> Path:
|
|
79
|
+
"""Determine the workspace directory."""
|
|
80
|
+
workspace_dir = config.get_workspace_dir()
|
|
81
|
+
|
|
82
|
+
if workspace_dir is None:
|
|
83
|
+
msg = "WORKSPACE_DIR is not set. Check your configuration or provide a custom directory."
|
|
84
|
+
raise VardefFileError(msg)
|
|
85
|
+
workspace_dir_path: Path
|
|
86
|
+
if workspace_dir is not None:
|
|
87
|
+
workspace_dir_path = Path(workspace_dir)
|
|
88
|
+
workspace_dir_path.resolve()
|
|
89
|
+
|
|
90
|
+
if not workspace_dir_path.exists():
|
|
91
|
+
msg = f"Directory '{workspace_dir_path}' does not exist."
|
|
92
|
+
raise FileNotFoundError(msg)
|
|
93
|
+
|
|
94
|
+
if not workspace_dir_path.is_dir():
|
|
95
|
+
msg = f"'{workspace_dir_path}' is not a directory."
|
|
96
|
+
raise NotADirectoryError(msg)
|
|
97
|
+
logger.debug("'WORKSPACE_DIR' value: %s", workspace_dir)
|
|
98
|
+
return workspace_dir_path
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def _get_variable_definitions_dir():
|
|
102
|
+
"""Get or create the variable definitions directory inside the workspace."""
|
|
103
|
+
workspace_dir = _get_workspace_dir()
|
|
104
|
+
folder_path = workspace_dir / VARIABLE_DEFINITIONS_DIR
|
|
105
|
+
folder_path.mkdir(parents=True, exist_ok=True)
|
|
106
|
+
return folder_path
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _validate_and_create_directory(custom_directory: Path) -> Path:
|
|
110
|
+
"""Ensure that the given path is a valid directory, creating it if necessary.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
custom_directory (Path): The target directory path.
|
|
114
|
+
|
|
115
|
+
Returns:
|
|
116
|
+
Path: The resolved absolute path of the directory.
|
|
117
|
+
|
|
118
|
+
Raises:
|
|
119
|
+
ValueError: If the provided path has a file suffix, indicating a file name instead of a directory.
|
|
120
|
+
NotADirectoryError: If the path exists but is not a directory.
|
|
121
|
+
PermissionError: If there are insufficient permissions to create the directory.
|
|
122
|
+
OSError: If an OS-related error occurs while creating the directory.
|
|
123
|
+
"""
|
|
124
|
+
custom_directory = Path(custom_directory).resolve()
|
|
125
|
+
|
|
126
|
+
if custom_directory.suffix:
|
|
127
|
+
msg = f"Expected a directory but got a file name: %{custom_directory.name}"
|
|
128
|
+
raise ValueError(msg)
|
|
129
|
+
|
|
130
|
+
if custom_directory.exists() and not custom_directory.is_dir():
|
|
131
|
+
msg = f"Path exists but is not a directory: {custom_directory}"
|
|
132
|
+
raise NotADirectoryError(msg)
|
|
133
|
+
|
|
134
|
+
try:
|
|
135
|
+
custom_directory.mkdir(parents=True, exist_ok=True)
|
|
136
|
+
except PermissionError as e:
|
|
137
|
+
msg = f"Insufficient permissions to create directory: {custom_directory}"
|
|
138
|
+
raise PermissionError(msg) from e
|
|
139
|
+
except OSError as e:
|
|
140
|
+
msg = f"Failed to create directory {custom_directory}: {e!s}"
|
|
141
|
+
raise OSError(msg) from e
|
|
142
|
+
|
|
143
|
+
return custom_directory
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def configure_yaml(yaml: YAML) -> YAML:
|
|
147
|
+
"""Common Yaml config for variable definitions."""
|
|
148
|
+
yaml.Representer = RoundTripRepresenter # Preserve the order of keys etc.
|
|
149
|
+
yaml.default_flow_style = False # Ensures pretty YAML formatting block style
|
|
150
|
+
yaml.allow_unicode = True # Support special characters
|
|
151
|
+
yaml.preserve_quotes = True
|
|
152
|
+
yaml.width = 4096 # prevent wrapping lines
|
|
153
|
+
yaml.indent(
|
|
154
|
+
mapping=4,
|
|
155
|
+
sequence=6,
|
|
156
|
+
offset=4,
|
|
157
|
+
) # Ensure indentation for nested keys and lists
|
|
158
|
+
yaml.representer.add_representer(
|
|
159
|
+
VariableStatus,
|
|
160
|
+
lambda dumper, data: dumper.represent_scalar(
|
|
161
|
+
YAML_STR_TAG,
|
|
162
|
+
data.value,
|
|
163
|
+
),
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
return yaml
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def _safe_get(data: dict, keys: list):
|
|
170
|
+
"""Safely navigate nested dictionaries."""
|
|
171
|
+
for key in keys:
|
|
172
|
+
if not isinstance(data, dict) or key not in data or data[key] is None:
|
|
173
|
+
return None
|
|
174
|
+
data = data[key]
|
|
175
|
+
return data
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def _apply_literal_scalars(field: dict):
|
|
179
|
+
"""Helper function to wrap `LanguageStringType` values in `LiteralScalarString`.
|
|
180
|
+
|
|
181
|
+
This function wraps each non-`None` language value in a `LanguageStringType` field
|
|
182
|
+
in the `LiteralScalarString` YAML type, ensuring proper YAML formatting with block style.
|
|
183
|
+
"""
|
|
184
|
+
for lang, value in field.items():
|
|
185
|
+
if value is not None:
|
|
186
|
+
field[lang] = LiteralScalarString(value)
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def _apply_double_quotes_to_dict_values(field: dict):
|
|
190
|
+
"""Helper function to wrap dictionary values in `DoubleQuotedScalarString`.
|
|
191
|
+
|
|
192
|
+
This function wraps each non-`None` value in a dictionary, including values inside lists,
|
|
193
|
+
in the `DoubleQuotedScalarString` YAML type, ensuring proper YAML formatting with double quotes.
|
|
194
|
+
"""
|
|
195
|
+
for sub_key, sub_value in field.items():
|
|
196
|
+
if isinstance(sub_value, list):
|
|
197
|
+
field[sub_key] = [
|
|
198
|
+
DoubleQuotedScalarString(item) for item in sub_value if item is not None
|
|
199
|
+
]
|
|
200
|
+
elif sub_value is not None:
|
|
201
|
+
field[sub_key] = DoubleQuotedScalarString(sub_value)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def pre_process_data(data: dict) -> dict:
|
|
205
|
+
"""Format variable definition model fields with ruamel YAML scalar string types.
|
|
206
|
+
|
|
207
|
+
This method sets the appropriate scalar string type (either `LiteralScalarString` or `DoubleQuotedScalarString`)
|
|
208
|
+
for fields of the variable definition model, based on predefined lists of fields.
|
|
209
|
+
|
|
210
|
+
It processes both nested dictionaries and lists, ensuring each element is formatted with the correct YAML string type.
|
|
211
|
+
|
|
212
|
+
Args:
|
|
213
|
+
data (dict): A dictionary containing the variable definition data.
|
|
214
|
+
|
|
215
|
+
Returns:
|
|
216
|
+
dict: The updated dictionary with model fields formatted as ruamel.yaml scalar string types.
|
|
217
|
+
"""
|
|
218
|
+
for key in BLOCK_FIELDS:
|
|
219
|
+
keys = key.split(".")
|
|
220
|
+
field = _safe_get(data, keys)
|
|
221
|
+
if isinstance(field, dict):
|
|
222
|
+
_apply_literal_scalars(field)
|
|
223
|
+
|
|
224
|
+
for key in DOUBLE_QUOTE_FIELDS:
|
|
225
|
+
keys = key.split(".")
|
|
226
|
+
field = _safe_get(data, keys)
|
|
227
|
+
if isinstance(field, list):
|
|
228
|
+
data[key] = [
|
|
229
|
+
DoubleQuotedScalarString(item) for item in field if item is not None
|
|
230
|
+
]
|
|
231
|
+
elif isinstance(field, str):
|
|
232
|
+
data[key] = DoubleQuotedScalarString(data[key])
|
|
233
|
+
elif isinstance(field, dict):
|
|
234
|
+
_apply_double_quotes_to_dict_values(field)
|
|
235
|
+
|
|
236
|
+
return data
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
def _model_to_yaml_with_comments(
|
|
240
|
+
model_instance: CompleteResponse,
|
|
241
|
+
file_name: str,
|
|
242
|
+
start_comment: str,
|
|
243
|
+
custom_directory: Path | None = None,
|
|
244
|
+
) -> Path:
|
|
245
|
+
"""Convert a model instance to a structured YAML file.
|
|
246
|
+
|
|
247
|
+
Organizes fields into sections with headers and saves
|
|
248
|
+
the YAML file with a structured format and timestamped filename.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
model_instance: The model instance to convert.
|
|
252
|
+
file_name: Name of the generated YAML file.
|
|
253
|
+
start_comment: Comment at the top of the file.
|
|
254
|
+
custom_directory: Optional directory to save the file.
|
|
255
|
+
|
|
256
|
+
Returns:
|
|
257
|
+
Path: The file path of the generated YAML file.
|
|
258
|
+
"""
|
|
259
|
+
yaml = YAML()
|
|
260
|
+
configure_yaml(yaml)
|
|
261
|
+
|
|
262
|
+
# Convert Pydantic model instance to dictionary
|
|
263
|
+
data = model_instance.model_dump(
|
|
264
|
+
serialize_as_any=True,
|
|
265
|
+
warnings="error",
|
|
266
|
+
)
|
|
267
|
+
data = pre_process_data(data)
|
|
268
|
+
# One CommentMap for each section in the yaml file
|
|
269
|
+
machine_generated_map = CommentedMap()
|
|
270
|
+
commented_map = CommentedMap()
|
|
271
|
+
status_map = CommentedMap()
|
|
272
|
+
owner_map = CommentedMap()
|
|
273
|
+
|
|
274
|
+
# Loop through all fields in the model and assigne to commented maps
|
|
275
|
+
for field_name, value in data.items():
|
|
276
|
+
if field_name == VARIABLE_STATUS_FIELD_NAME:
|
|
277
|
+
status_map[field_name] = value
|
|
278
|
+
elif field_name == OWNER_FIELD_NAME:
|
|
279
|
+
owner_map[field_name] = value
|
|
280
|
+
elif field_name in MACHINE_GENERATED_FIELDS:
|
|
281
|
+
machine_generated_map[field_name] = value
|
|
282
|
+
else:
|
|
283
|
+
commented_map[field_name] = value
|
|
284
|
+
|
|
285
|
+
base_path = (
|
|
286
|
+
_get_variable_definitions_dir()
|
|
287
|
+
if custom_directory is None
|
|
288
|
+
else _validate_and_create_directory(custom_directory)
|
|
289
|
+
)
|
|
290
|
+
|
|
291
|
+
file_path = base_path / file_name
|
|
292
|
+
|
|
293
|
+
# It is important to preserve the order of the yaml dump operations when writing to file
|
|
294
|
+
# so that the file is predictable for the user
|
|
295
|
+
with file_path.open("w", encoding="utf-8") as file:
|
|
296
|
+
commented_map.yaml_set_start_comment(start_comment)
|
|
297
|
+
yaml.dump(commented_map, file)
|
|
298
|
+
|
|
299
|
+
status_map.yaml_set_start_comment(TEMPLATE_SECTION_HEADER_STATUS)
|
|
300
|
+
yaml.dump(status_map, file)
|
|
301
|
+
|
|
302
|
+
owner_map.yaml_set_start_comment(TEMPLATE_SECTION_HEADER_OWNER)
|
|
303
|
+
yaml.dump(owner_map, file)
|
|
304
|
+
|
|
305
|
+
machine_generated_map.yaml_set_start_comment(
|
|
306
|
+
TEMPLATE_SECTION_HEADER_MACHINE_GENERATED,
|
|
307
|
+
)
|
|
308
|
+
yaml.dump(machine_generated_map, file)
|
|
309
|
+
return file_path
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from typing import TYPE_CHECKING
|
|
3
|
+
|
|
4
|
+
from dapla_metadata.variable_definitions._generated.vardef_client.models.complete_response import (
|
|
5
|
+
CompleteResponse,
|
|
6
|
+
)
|
|
7
|
+
from dapla_metadata.variable_definitions._generated.vardef_client.models.language_string_type import (
|
|
8
|
+
LanguageStringType,
|
|
9
|
+
)
|
|
10
|
+
from dapla_metadata.variable_definitions._generated.vardef_client.models.owner import (
|
|
11
|
+
Owner,
|
|
12
|
+
)
|
|
13
|
+
from dapla_metadata.variable_definitions._generated.vardef_client.models.variable_status import (
|
|
14
|
+
VariableStatus,
|
|
15
|
+
)
|
|
16
|
+
from dapla_metadata.variable_definitions._utils.constants import DEFAULT_DATE
|
|
17
|
+
from dapla_metadata.variable_definitions._utils.constants import GENERATED_CONTACT
|
|
18
|
+
from dapla_metadata.variable_definitions._utils.constants import TEMPLATE_HEADER
|
|
19
|
+
from dapla_metadata.variable_definitions._utils.files import _create_file_name
|
|
20
|
+
from dapla_metadata.variable_definitions._utils.files import _get_current_time
|
|
21
|
+
from dapla_metadata.variable_definitions._utils.files import (
|
|
22
|
+
_get_variable_definitions_dir,
|
|
23
|
+
)
|
|
24
|
+
from dapla_metadata.variable_definitions._utils.files import (
|
|
25
|
+
_model_to_yaml_with_comments,
|
|
26
|
+
)
|
|
27
|
+
from dapla_metadata.variable_definitions._utils.files import logger
|
|
28
|
+
|
|
29
|
+
if TYPE_CHECKING:
|
|
30
|
+
from dapla_metadata.variable_definitions.variable_definition import (
|
|
31
|
+
VariableDefinition,
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _get_default_template() -> "VariableDefinition":
|
|
36
|
+
# Import is needed here to avoid circular imports
|
|
37
|
+
from dapla_metadata.variable_definitions.variable_definition import (
|
|
38
|
+
VariableDefinition,
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
return VariableDefinition(
|
|
42
|
+
name=LanguageStringType(
|
|
43
|
+
nb="Navn",
|
|
44
|
+
),
|
|
45
|
+
short_name="generert_kortnavn",
|
|
46
|
+
definition=LanguageStringType(
|
|
47
|
+
nb="Definisjonstekst",
|
|
48
|
+
),
|
|
49
|
+
valid_from=DEFAULT_DATE,
|
|
50
|
+
unit_types=[""],
|
|
51
|
+
subject_fields=[""],
|
|
52
|
+
contains_special_categories_of_personal_data=False,
|
|
53
|
+
owner=Owner(team="default team", groups=["default group"]),
|
|
54
|
+
contact=GENERATED_CONTACT,
|
|
55
|
+
variable_status=VariableStatus.DRAFT.value,
|
|
56
|
+
id="",
|
|
57
|
+
patch_id=0,
|
|
58
|
+
created_at=DEFAULT_DATE,
|
|
59
|
+
created_by="",
|
|
60
|
+
last_updated_at=DEFAULT_DATE,
|
|
61
|
+
last_updated_by="",
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def create_template_yaml(
|
|
66
|
+
model_instance: CompleteResponse | None = None,
|
|
67
|
+
custom_directory: Path | None = None,
|
|
68
|
+
) -> Path:
|
|
69
|
+
"""Creates a template yaml file for a new variable definition."""
|
|
70
|
+
if model_instance is None:
|
|
71
|
+
model_instance = _get_default_template()
|
|
72
|
+
file_name = _create_file_name(
|
|
73
|
+
"variable_definition_template",
|
|
74
|
+
_get_current_time(),
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
file_path = _model_to_yaml_with_comments(
|
|
78
|
+
model_instance,
|
|
79
|
+
file_name,
|
|
80
|
+
TEMPLATE_HEADER,
|
|
81
|
+
custom_directory=custom_directory,
|
|
82
|
+
)
|
|
83
|
+
logger.debug("Created %s", file_path)
|
|
84
|
+
return file_path
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def _find_latest_template_file(directory: Path | None = None) -> Path | None:
|
|
88
|
+
def _filter_template_file(path: Path) -> bool:
|
|
89
|
+
return "variable_definition_template" in path.stem and path.suffix == ".yaml"
|
|
90
|
+
|
|
91
|
+
try:
|
|
92
|
+
return sorted(
|
|
93
|
+
filter(
|
|
94
|
+
_filter_template_file,
|
|
95
|
+
(directory or _get_variable_definitions_dir()).iterdir(),
|
|
96
|
+
),
|
|
97
|
+
)[-1]
|
|
98
|
+
except IndexError:
|
|
99
|
+
return None
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
"""Utilities for writing and reading existing variable definition files."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from io import StringIO
|
|
5
|
+
from os import PathLike
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
from typing import TypeVar
|
|
9
|
+
|
|
10
|
+
from pydantic import BaseModel
|
|
11
|
+
from ruamel.yaml import YAML
|
|
12
|
+
|
|
13
|
+
from dapla_metadata.variable_definitions._generated.vardef_client.models.complete_response import (
|
|
14
|
+
CompleteResponse,
|
|
15
|
+
)
|
|
16
|
+
from dapla_metadata.variable_definitions._utils.constants import HEADER
|
|
17
|
+
from dapla_metadata.variable_definitions._utils.files import _create_file_name
|
|
18
|
+
from dapla_metadata.variable_definitions._utils.files import _get_current_time
|
|
19
|
+
from dapla_metadata.variable_definitions._utils.files import (
|
|
20
|
+
_model_to_yaml_with_comments,
|
|
21
|
+
)
|
|
22
|
+
from dapla_metadata.variable_definitions._utils.files import configure_yaml
|
|
23
|
+
from dapla_metadata.variable_definitions._utils.files import pre_process_data
|
|
24
|
+
|
|
25
|
+
logger = logging.getLogger(__name__)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
T = TypeVar("T", bound=BaseModel)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def create_variable_yaml(
|
|
32
|
+
model_instance: CompleteResponse,
|
|
33
|
+
custom_directory: Path | None = None,
|
|
34
|
+
) -> Path:
|
|
35
|
+
"""Creates a yaml file for an existing variable definition."""
|
|
36
|
+
file_name = _create_file_name(
|
|
37
|
+
"variable_definition",
|
|
38
|
+
_get_current_time(),
|
|
39
|
+
model_instance.short_name,
|
|
40
|
+
model_instance.id,
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
return _model_to_yaml_with_comments(
|
|
44
|
+
model_instance,
|
|
45
|
+
file_name,
|
|
46
|
+
HEADER,
|
|
47
|
+
custom_directory=custom_directory,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def _read_variable_definition_file(file_path: Path) -> dict:
|
|
52
|
+
yaml = YAML()
|
|
53
|
+
configure_yaml(yaml)
|
|
54
|
+
logger.debug("Full path to variable definition file %s", file_path)
|
|
55
|
+
logger.info("Reading from '%s'", file_path.name)
|
|
56
|
+
with file_path.open(encoding="utf-8") as f:
|
|
57
|
+
return yaml.load(f)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _strip_strings_recursively(data: Any) -> Any:
|
|
61
|
+
"""Recursively strip leading and trailing whitespace from string values in nested dicts/lists.
|
|
62
|
+
|
|
63
|
+
This function traverses the provided data, which may be a dictionary, list, or other types,
|
|
64
|
+
and applies the following logic:
|
|
65
|
+
- If the data is a dictionary, it recursively strips string values in all key-value pairs.
|
|
66
|
+
- If the data is a list, it recursively strips string values in all list elements.
|
|
67
|
+
- If the data is a string, it strips leading and trailing whitespace.
|
|
68
|
+
- Any other data types are returned unchanged.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
data: The input data, which may include nested dictionaries, lists, or other types.
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
Any: The processed data, with strings stripped of whitespace or unchanged if not a string.
|
|
75
|
+
"""
|
|
76
|
+
if isinstance(data, dict):
|
|
77
|
+
return {k: _strip_strings_recursively(v) for k, v in data.items()}
|
|
78
|
+
if isinstance(data, list):
|
|
79
|
+
return [_strip_strings_recursively(item) for item in data]
|
|
80
|
+
if isinstance(data, str):
|
|
81
|
+
return data.strip()
|
|
82
|
+
return data
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def _read_file_to_model(
|
|
86
|
+
file_path: PathLike[str] | None,
|
|
87
|
+
model_class: type[T],
|
|
88
|
+
) -> T:
|
|
89
|
+
"""Read from a variable definition file into the given Pydantic model.
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
file_path (PathLike[str]): The path to the file to read in.
|
|
93
|
+
model_class (type[T]): The model to instantiate. Must inherit from Pydantic's BaseModel.
|
|
94
|
+
|
|
95
|
+
Raises:
|
|
96
|
+
TypeError: If no file path could be deduced.
|
|
97
|
+
FileNotFoundError: If we could not instantiate the model.
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
T: BaseModel: The instantiated Pydantic model
|
|
101
|
+
"""
|
|
102
|
+
try:
|
|
103
|
+
file_path = Path(
|
|
104
|
+
# type incongruence (i.e. None) is handled by catching the exception
|
|
105
|
+
file_path, # type: ignore [arg-type]
|
|
106
|
+
)
|
|
107
|
+
except TypeError as e:
|
|
108
|
+
msg = "Could not deduce a path to the file. Please supply a path to the yaml file you wish to submit with the `file_path` parameter."
|
|
109
|
+
raise FileNotFoundError(
|
|
110
|
+
msg,
|
|
111
|
+
) from e
|
|
112
|
+
raw_data = _read_variable_definition_file(file_path)
|
|
113
|
+
cleaned_data = _strip_strings_recursively(raw_data)
|
|
114
|
+
|
|
115
|
+
model = model_class.from_dict( # type:ignore [attr-defined]
|
|
116
|
+
cleaned_data
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
if model is None:
|
|
120
|
+
msg = f"Could not read data from {file_path}"
|
|
121
|
+
raise FileNotFoundError(msg)
|
|
122
|
+
return model
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def _convert_to_yaml_output(model: BaseModel) -> str:
|
|
126
|
+
"""Convert a Pydantic model to YAML format.
|
|
127
|
+
|
|
128
|
+
Args:
|
|
129
|
+
model: A Pydantic model instance
|
|
130
|
+
|
|
131
|
+
Returns:
|
|
132
|
+
YAML string representation of the model
|
|
133
|
+
"""
|
|
134
|
+
stream = StringIO()
|
|
135
|
+
with YAML(output=stream) as yaml:
|
|
136
|
+
configure_yaml(yaml)
|
|
137
|
+
data = model.model_dump(
|
|
138
|
+
mode="json",
|
|
139
|
+
serialize_as_any=True,
|
|
140
|
+
warnings="error",
|
|
141
|
+
)
|
|
142
|
+
yaml.dump(pre_process_data(data))
|
|
143
|
+
return stream.getvalue()
|