dapla-toolbelt-metadata 0.8.5__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dapla-toolbelt-metadata might be problematic. Click here for more details.

@@ -0,0 +1,135 @@
1
+ """Upgrade old metadata files to be compatible with new versions.
2
+
3
+ An important principle of Datadoc is that we ALWAYS guarantee backwards
4
+ compatibility of existing metadata documents. This means that we guarantee
5
+ that a user will never lose data, even if their document is decades old.
6
+
7
+ For each document version we release with breaking changes, we implement a
8
+ handler and register the version by defining a BackwardsCompatibleVersion
9
+ instance. These documents will then be upgraded when they're opened in Datadoc.
10
+
11
+ A test must also be implemented for each new version.
12
+ """
13
+
14
+ from __future__ import annotations
15
+
16
+ import logging
17
+ from collections import OrderedDict
18
+ from dataclasses import dataclass
19
+ from typing import TYPE_CHECKING
20
+ from typing import Any
21
+
22
+ from dapla_metadata.datasets.compatibility._handlers import handle_current_version
23
+ from dapla_metadata.datasets.compatibility._handlers import handle_version_0_1_1
24
+ from dapla_metadata.datasets.compatibility._handlers import handle_version_1_0_0
25
+ from dapla_metadata.datasets.compatibility._handlers import handle_version_2_1_0
26
+ from dapla_metadata.datasets.compatibility._handlers import handle_version_2_2_0
27
+ from dapla_metadata.datasets.compatibility._handlers import handle_version_3_1_0
28
+ from dapla_metadata.datasets.compatibility._handlers import handle_version_3_2_0
29
+ from dapla_metadata.datasets.compatibility._handlers import handle_version_3_3_0
30
+ from dapla_metadata.datasets.compatibility._handlers import handle_version_4_0_0
31
+ from dapla_metadata.datasets.compatibility._handlers import handle_version_5_0_1
32
+ from dapla_metadata.datasets.compatibility._handlers import handle_version_6_0_0
33
+ from dapla_metadata.datasets.compatibility._utils import DATADOC_KEY
34
+ from dapla_metadata.datasets.compatibility._utils import DOCUMENT_VERSION_KEY
35
+ from dapla_metadata.datasets.compatibility._utils import UnknownModelVersionError
36
+ from dapla_metadata.datasets.compatibility._utils import (
37
+ is_metadata_in_container_structure,
38
+ )
39
+
40
+ logger = logging.getLogger(__name__)
41
+
42
+ if TYPE_CHECKING:
43
+ from collections.abc import Callable
44
+
45
+ SUPPORTED_VERSIONS: OrderedDict[str, BackwardsCompatibleVersion] = OrderedDict()
46
+
47
+
48
+ @dataclass()
49
+ class BackwardsCompatibleVersion:
50
+ """A version which we support with backwards compatibility.
51
+
52
+ This class registers a version and its corresponding handler function
53
+ for backwards compatibility.
54
+ """
55
+
56
+ version: str
57
+ handler: Callable[[dict[str, Any]], dict[str, Any]]
58
+
59
+ def __post_init__(self) -> None:
60
+ """Register this version in the supported versions map.
61
+
62
+ This method adds the instance to the `SUPPORTED_VERSIONS` dictionary
63
+ using the version as the key.
64
+ """
65
+ SUPPORTED_VERSIONS[self.version] = self
66
+
67
+ def upgrade(self, metadata: dict[str, Any]) -> dict[str, Any]:
68
+ """Upgrade metadata from the format of the previous version to the format of this version.
69
+
70
+ This method handles bumping the Document Version field so it's not necessary to do this in
71
+ the individual handler functions.
72
+
73
+ Args:
74
+ metadata (dict[str, Any]): Metadata in the format of the previous version, to be upgraded.
75
+
76
+ Returns:
77
+ dict[str, Any]: The metadata upgraded to the version specified
78
+ """
79
+ metadata = self.handler(metadata)
80
+ if is_metadata_in_container_structure(metadata):
81
+ metadata[DATADOC_KEY][DOCUMENT_VERSION_KEY] = self.version
82
+ else:
83
+ metadata[DOCUMENT_VERSION_KEY] = self.version
84
+ return metadata
85
+
86
+
87
+ # Register all the supported versions and their handlers.
88
+ BackwardsCompatibleVersion(version="0.1.1", handler=handle_version_0_1_1)
89
+ BackwardsCompatibleVersion(version="1.0.0", handler=handle_version_1_0_0)
90
+ BackwardsCompatibleVersion(version="2.1.0", handler=handle_version_2_1_0)
91
+ BackwardsCompatibleVersion(version="2.2.0", handler=handle_version_2_2_0)
92
+ BackwardsCompatibleVersion(version="3.1.0", handler=handle_version_3_1_0)
93
+ BackwardsCompatibleVersion(version="3.2.0", handler=handle_version_3_2_0)
94
+ BackwardsCompatibleVersion(version="3.3.0", handler=handle_version_3_3_0)
95
+ BackwardsCompatibleVersion(version="4.0.0", handler=handle_version_4_0_0)
96
+ BackwardsCompatibleVersion(version="5.0.1", handler=handle_version_5_0_1)
97
+ BackwardsCompatibleVersion(version="6.0.0", handler=handle_version_6_0_0)
98
+ BackwardsCompatibleVersion(version="6.1.0", handler=handle_current_version)
99
+
100
+
101
+ def upgrade_metadata(fresh_metadata: dict[str, Any]) -> dict[str, Any]:
102
+ """Upgrade the metadata to the latest version using registered handlers.
103
+
104
+ This function checks the version of the provided metadata and applies a series
105
+ of upgrade handlers to migrate the metadata to the latest version.
106
+ It starts from the provided version and applies all subsequent handlers in
107
+ sequence. If the metadata is already in the latest version or the version
108
+ cannot be determined, appropriate actions are taken.
109
+
110
+ Args:
111
+ fresh_metadata: The metadata dictionary to be upgraded. This dictionary
112
+ must include version information that determines which handlers to apply.
113
+
114
+ Returns:
115
+ The upgraded metadata dictionary, after applying all necessary handlers.
116
+
117
+ Raises:
118
+ UnknownModelVersionError: If the metadata's version is unknown or unsupported.
119
+ """
120
+ if is_metadata_in_container_structure(fresh_metadata):
121
+ if fresh_metadata[DATADOC_KEY] is None:
122
+ return fresh_metadata
123
+ supplied_version = fresh_metadata[DATADOC_KEY][DOCUMENT_VERSION_KEY]
124
+ else:
125
+ supplied_version = fresh_metadata[DOCUMENT_VERSION_KEY]
126
+ start_running_handlers = False
127
+ # Run all the handlers in order from the supplied version onwards
128
+ for k, v in SUPPORTED_VERSIONS.items():
129
+ if k == supplied_version:
130
+ start_running_handlers = True
131
+ if start_running_handlers:
132
+ fresh_metadata = v.upgrade(fresh_metadata)
133
+ if not start_running_handlers:
134
+ raise UnknownModelVersionError(supplied_version)
135
+ return fresh_metadata
@@ -17,12 +17,10 @@ from datadoc_model.all_optional.model import DataSetStatus
17
17
 
18
18
  from dapla_metadata._shared import config
19
19
  from dapla_metadata.dapla import user_info
20
+ from dapla_metadata.datasets.compatibility import is_metadata_in_container_structure
21
+ from dapla_metadata.datasets.compatibility import upgrade_metadata
20
22
  from dapla_metadata.datasets.dapla_dataset_path_info import DaplaDatasetPathInfo
21
23
  from dapla_metadata.datasets.dataset_parser import DatasetParser
22
- from dapla_metadata.datasets.model_backwards_compatibility import (
23
- is_metadata_in_container_structure,
24
- )
25
- from dapla_metadata.datasets.model_backwards_compatibility import upgrade_metadata
26
24
  from dapla_metadata.datasets.model_validation import ValidateDatadocMetadata
27
25
  from dapla_metadata.datasets.statistic_subject_mapping import StatisticSubjectMapping
28
26
  from dapla_metadata.datasets.utility.constants import (
@@ -606,7 +604,6 @@ class Datadoc:
606
604
  variable.pseudonymization = (
607
605
  pseudonymization or all_optional_model.Pseudonymization()
608
606
  )
609
- variable.is_personal_data = True
610
607
 
611
608
  def remove_pseudonymization(self, variable_short_name: str) -> None:
612
609
  """Removes a pseudo variable by using the shortname.
@@ -619,4 +616,3 @@ class Datadoc:
619
616
  """
620
617
  if self.variables_lookup[variable_short_name].pseudonymization is not None:
621
618
  self.variables_lookup[variable_short_name].pseudonymization = None
622
- self.variables_lookup[variable_short_name].is_personal_data = False
@@ -75,16 +75,11 @@ DATASET_FIELDS_FROM_EXISTING_METADATA = [
75
75
  "dataset_status",
76
76
  "name",
77
77
  "description",
78
- "data_source",
79
78
  "population_description",
80
- "unit_type",
81
- "temporality_type",
82
79
  "subject_field",
83
80
  "keyword",
84
81
  "spatial_coverage_description",
85
- "contains_personal_data",
86
- "use_restriction",
87
- "use_restriction_date",
82
+ "use_restrictions",
88
83
  "custom_type",
89
84
  "owner",
90
85
  "version_description",
@@ -4,6 +4,7 @@ import datetime # import is needed in xdoctest
4
4
  import logging
5
5
  import pathlib
6
6
  import uuid
7
+ from typing import TypeAlias
7
8
  from typing import cast
8
9
 
9
10
  import datadoc_model
@@ -38,11 +39,11 @@ from dapla_metadata.datasets.utility.constants import (
38
39
 
39
40
  logger = logging.getLogger(__name__)
40
41
 
41
- DatadocMetadataType = (
42
+ DatadocMetadataType: TypeAlias = (
42
43
  all_optional_model.DatadocMetadata | required_model.DatadocMetadata
43
44
  )
44
- DatasetType = all_optional_model.Dataset | required_model.Dataset
45
- OptionalDatadocMetadataType = DatadocMetadataType | None
45
+ DatasetType: TypeAlias = all_optional_model.Dataset | required_model.Dataset
46
+ OptionalDatadocMetadataType: TypeAlias = DatadocMetadataType | None
46
47
 
47
48
 
48
49
  def get_timestamp_now() -> datetime.datetime:
@@ -138,18 +139,13 @@ def set_default_values_dataset(
138
139
  dataset: The dataset object to set default values on.
139
140
 
140
141
  Example:
141
- >>> dataset = model.Dataset(id=None, contains_personal_data=None)
142
+ >>> dataset = model.Dataset(id=None)
142
143
  >>> set_default_values_dataset(dataset)
143
144
  >>> dataset.id is not None
144
145
  True
145
-
146
- >>> dataset.contains_personal_data == False
147
- True
148
146
  """
149
147
  if not dataset.id:
150
148
  dataset.id = uuid.uuid4()
151
- if dataset.contains_personal_data is None:
152
- dataset.contains_personal_data = False
153
149
 
154
150
 
155
151
  def set_dataset_owner(
@@ -181,14 +177,9 @@ def set_variables_inherit_from_dataset(
181
177
  variables: A list of variable objects to update with dataset values.
182
178
 
183
179
  Example:
184
- >>> dataset = model.Dataset(short_name='person_data_v1',data_source='01',temporality_type='STATUS',id='9662875c-c245-41de-b667-12ad2091a1ee',contains_data_from="2010-09-05",contains_data_until="2022-09-05")
185
- >>> variables = [model.Variable(short_name="pers",data_source =None,temporality_type = None, contains_data_from = None,contains_data_until = None)]
180
+ >>> dataset = model.Dataset(short_name='person_data_v1', id='9662875c-c245-41de-b667-12ad2091a1ee', contains_data_from="2010-09-05", contains_data_until="2022-09-05")
181
+ >>> variables = [model.Variable(short_name="pers", data_source=None, temporality_type=None, contains_data_from=None, contains_data_until=None)]
186
182
  >>> set_variables_inherit_from_dataset(dataset, variables)
187
- >>> variables[0].data_source == dataset.data_source
188
- True
189
-
190
- >>> variables[0].temporality_type is None
191
- False
192
183
 
193
184
  >>> variables[0].contains_data_from == dataset.contains_data_from
194
185
  True
@@ -199,8 +190,6 @@ def set_variables_inherit_from_dataset(
199
190
  for v in variables:
200
191
  v.contains_data_from = v.contains_data_from or dataset.contains_data_from
201
192
  v.contains_data_until = v.contains_data_until or dataset.contains_data_until
202
- v.temporality_type = v.temporality_type or dataset.temporality_type
203
- v.data_source = v.data_source or dataset.data_source
204
193
 
205
194
 
206
195
  def incorrect_date_order(
@@ -1,36 +1,30 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: dapla-toolbelt-metadata
3
- Version: 0.8.5
3
+ Version: 0.9.0
4
4
  Summary: Dapla Toolbelt Metadata
5
- License: MIT
6
- Author: Team Metadata
7
- Author-email: metadata@ssb.no
8
- Requires-Python: >=3.10
9
- Classifier: Development Status :: 4 - Beta
10
- Classifier: License :: OSI Approved :: MIT License
11
- Classifier: Programming Language :: Python :: 3
12
- Classifier: Programming Language :: Python :: 3.10
13
- Classifier: Programming Language :: Python :: 3.11
14
- Classifier: Programming Language :: Python :: 3.12
15
- Classifier: Programming Language :: Python :: 3.13
16
- Requires-Dist: arrow (>=1.3.0)
17
- Requires-Dist: beautifulsoup4 (>=4.12.3)
18
- Requires-Dist: cloudpathlib[gs] (>=0.17.0)
19
- Requires-Dist: google-auth (>=2.38.0)
20
- Requires-Dist: lxml (>=5.3.1)
21
- Requires-Dist: pyarrow (>=8.0.0)
22
- Requires-Dist: pydantic (>=2.5.2)
23
- Requires-Dist: pyjwt (>=2.8.0)
24
- Requires-Dist: python-dotenv (>=1.0.1)
25
- Requires-Dist: requests (>=2.31.0)
26
- Requires-Dist: ruamel-yaml (>=0.18.10)
27
- Requires-Dist: ssb-datadoc-model (==7.0.2)
28
- Requires-Dist: ssb-klass-python (>=1.0.1)
29
- Requires-Dist: typing-extensions (>=4.12.2)
5
+ Project-URL: homepage, https://github.com/statisticsnorway/dapla-toolbelt-metadata
6
+ Project-URL: repository, https://github.com/statisticsnorway/dapla-toolbelt-metadata
7
+ Project-URL: documentation, https://statisticsnorway.github.io/dapla-toolbelt-metadata
30
8
  Project-URL: Changelog, https://github.com/statisticsnorway/dapla-toolbelt-metadata/releases
31
- Project-URL: Documentation, https://statisticsnorway.github.io/dapla-toolbelt-metadata
32
- Project-URL: Homepage, https://github.com/statisticsnorway/dapla-toolbelt-metadata
33
- Project-URL: Repository, https://github.com/statisticsnorway/dapla-toolbelt-metadata
9
+ Author-email: Statistics Norway <metadata@ssb.no>
10
+ License-Expression: MIT
11
+ License-File: LICENSE
12
+ Classifier: Development Status :: 4 - Beta
13
+ Requires-Python: >=3.10
14
+ Requires-Dist: arrow>=1.3.0
15
+ Requires-Dist: beautifulsoup4>=4.12.3
16
+ Requires-Dist: cloudpathlib[gs]>=0.17.0
17
+ Requires-Dist: google-auth>=2.38.0
18
+ Requires-Dist: lxml>=5.3.1
19
+ Requires-Dist: pyarrow>=8.0.0
20
+ Requires-Dist: pydantic>=2.5.2
21
+ Requires-Dist: pyjwt>=2.8.0
22
+ Requires-Dist: python-dotenv>=1.0.1
23
+ Requires-Dist: requests>=2.31.0
24
+ Requires-Dist: ruamel-yaml>=0.18.10
25
+ Requires-Dist: ssb-datadoc-model<9.0.0,>=8.0.0
26
+ Requires-Dist: ssb-klass-python>=1.0.1
27
+ Requires-Dist: typing-extensions>=4.12.2
34
28
  Description-Content-Type: text/markdown
35
29
 
36
30
  # Dapla Toolbelt Metadata
@@ -47,27 +41,28 @@ Description-Content-Type: text/markdown
47
41
 
48
42
  [![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)][pre-commit]
49
43
  [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
50
- [![Poetry](https://img.shields.io/endpoint?url=https://python-poetry.org/badge/v0.json)][poetry]
44
+ [![uv](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/uv/main/assets/badge/v0.json)][uv]
51
45
 
52
46
  [pypi status]: https://pypi.org/project/dapla-toolbelt-metadata/
53
47
  [documentation]: https://statisticsnorway.github.io/dapla-toolbelt-metadata
54
48
  [tests]: https://github.com/statisticsnorway/dapla-toolbelt-metadata/actions?workflow=Tests
55
-
56
49
  [sonarcov]: https://sonarcloud.io/summary/overall?id=statisticsnorway_dapla-toolbelt-metadata
57
50
  [sonarquality]: https://sonarcloud.io/summary/overall?id=statisticsnorway_dapla-toolbelt-metadata
58
51
  [pre-commit]: https://github.com/pre-commit/pre-commit
59
- [poetry]: https://python-poetry.org/
52
+ [uv]: https://docs.astral.sh/uv/
60
53
 
61
54
  Tools and clients for working with the Dapla Metadata system.
62
55
 
63
56
  ## Features
64
57
 
65
- - Create and update metadata for datasets (Datadoc).
58
+ - Create and update metadata for datasets (aka Datadoc).
59
+ - Read, create and update variable definitions (aka Vardef).
60
+ - Check compliance with SSBs naming standard.
66
61
 
67
62
  ### Coming
68
63
 
69
- - Read, create and update variable definitions.
70
64
  - Publish dataset metadata to Statistics Norway's data catalogue.
65
+ - Maintain classifications and code lists.
71
66
 
72
67
  ## Installation
73
68
 
@@ -111,4 +106,3 @@ This project was generated from [Statistics Norway]'s [SSB PyPI Template].
111
106
  [license]: https://github.com/statisticsnorway/dapla-toolbelt-metadata/blob/main/LICENSE
112
107
  [contributor guide]: https://github.com/statisticsnorway/dapla-toolbelt-metadata/blob/main/CONTRIBUTING.md
113
108
  [reference guide]: https://statisticsnorway.github.io/dapla-toolbelt-metadata/reference.html
114
-
@@ -6,41 +6,52 @@ dapla_metadata/_shared/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
6
6
  dapla_metadata/dapla/__init__.py,sha256=tkapF-YwmruPPrKvN3pEoCZqb7xvJx_ogBM8XyGMuJI,130
7
7
  dapla_metadata/dapla/user_info.py,sha256=bENez-ICt9ySR8orYebO68Q3_2LkIW9QTL58DTctmEQ,4833
8
8
  dapla_metadata/datasets/__init__.py,sha256=TvzskpdFC6hGcC9_55URT5jr5wNAPzXuISd2UjJWM_8,280
9
- dapla_metadata/datasets/code_list.py,sha256=6lrPVhXW97kHxhmszuKSQ8kOHERCpVSqX1s7ogefBuI,9054
10
- dapla_metadata/datasets/core.py,sha256=R_FJbU7y4hWe9jjqKUChe1MyNftt0ZxWeFnWeiUfLNM,25936
9
+ dapla_metadata/datasets/code_list.py,sha256=JtCE-5Q8grAKvkn0KKjzeGhO-96O7yGsastbuoakreg,9057
10
+ dapla_metadata/datasets/core.py,sha256=jwnV6kqS7GpS_9kVWbFz7J0TE-TiSSiqq_gSV4sE628,25774
11
11
  dapla_metadata/datasets/dapla_dataset_path_info.py,sha256=WPeV_mwKk2B9sXd14SaP-kTb1bOQ_8W2KtrqOG7sJIY,26867
12
12
  dapla_metadata/datasets/dataset_parser.py,sha256=3dtRXNy1C8SfG8zTYWdY26nV4l-dG25IC_0J5t2bYwI,8285
13
- dapla_metadata/datasets/external_sources/__init__.py,sha256=qvIdXwqyEmXNUCB94ZtZXRzifdW4hiXASFFPtC70f6E,83
14
- dapla_metadata/datasets/external_sources/external_sources.py,sha256=9eIcOIUbaodNX1w9Tj2wl4U4wUmr5kF1R0i01fKUzGs,2974
15
- dapla_metadata/datasets/model_backwards_compatibility.py,sha256=2uj_LpqumGOkVbZzoeWfLZKp6vNhLrhuiO1msm7NqW8,23543
16
13
  dapla_metadata/datasets/model_validation.py,sha256=pGT-jqaQQY4z7jz-7UQd0BQoTWDxDWPYAnDoRC2vd_c,6818
17
14
  dapla_metadata/datasets/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
15
  dapla_metadata/datasets/statistic_subject_mapping.py,sha256=ovT-bZv6eGPD3L0UIs5nIw4AjJrfZn0hyWyD72JBmhs,6395
16
+ dapla_metadata/datasets/compatibility/__init__.py,sha256=hKoLOIhF-BMS8EZQUaAI_S-rf6QXufyI0tr9LB3ly74,400
17
+ dapla_metadata/datasets/compatibility/_handlers.py,sha256=8ITKPeaNRpg2ACX-IKvu764z52yAHYMkQUx-52SjzVQ,13858
18
+ dapla_metadata/datasets/compatibility/_utils.py,sha256=bPhI0D78Le12IO3fOJhzijYrSBYbf8Lj3_YxJzupS7U,9030
19
+ dapla_metadata/datasets/compatibility/model_backwards_compatibility.py,sha256=W5AgCu1CGQgQh3-enN5_4Syrs12LPCF9dTUpULxY3_g,6038
20
+ dapla_metadata/datasets/external_sources/__init__.py,sha256=qvIdXwqyEmXNUCB94ZtZXRzifdW4hiXASFFPtC70f6E,83
21
+ dapla_metadata/datasets/external_sources/external_sources.py,sha256=9eIcOIUbaodNX1w9Tj2wl4U4wUmr5kF1R0i01fKUzGs,2974
19
22
  dapla_metadata/datasets/utility/__init__.py,sha256=pp6tUcgUbo8iq9OPtFKQrTbLuI3uY7NHptwWSTpasOU,33
20
- dapla_metadata/datasets/utility/constants.py,sha256=YEs2ECLNJMM1SSORPTDnzNep_Qut5YbJ5JJx_oP3ios,2470
23
+ dapla_metadata/datasets/utility/constants.py,sha256=f9TfBN5aJbiKBQVpu8Whc0X-EMpXv43-Yu2L4KUDA4U,2353
21
24
  dapla_metadata/datasets/utility/enums.py,sha256=SpV4xlmP1YMaJPbmX03hqRLHUOhXIk5gquTeJ8G_5OE,432
22
- dapla_metadata/datasets/utility/utils.py,sha256=VhriEFJVz_ed10BayZevmrfxiWKpas65omvBIo0wHcg,18945
25
+ dapla_metadata/datasets/utility/utils.py,sha256=AjPWlg_8DJpqFQ8B2MaVJrB62EZ3dilvfRfc0DImjQI,18499
23
26
  dapla_metadata/standards/__init__.py,sha256=n8jnMrudLuScSdfQ4UMJorc-Ptg3Y1-ilT8zAaQnM70,179
24
27
  dapla_metadata/standards/name_validator.py,sha256=6-DQE_EKVd6UjL--EXpFcZDQtusVbSFaWaUY-CfOV2c,9184
25
28
  dapla_metadata/standards/standard_validators.py,sha256=tcCiCI76wUVtMzXA2oCgdauZc0uGgUi11FKu-t7KGwQ,3767
26
29
  dapla_metadata/standards/utils/__init__.py,sha256=AiM7JcpFsAgyuCyLDYZo9kI94wvIImMDGoV2lKhS4pE,42
27
30
  dapla_metadata/standards/utils/constants.py,sha256=mhWNFnS6NMsRl0c_deIdzY7_bD_wKn_oej6rzDjgwq4,2578
28
31
  dapla_metadata/variable_definitions/__init__.py,sha256=j_Nn5mnlZ2uio9moDFLE2xpALqrYpupIZMlvwbLuEuA,391
29
- dapla_metadata/variable_definitions/_generated/.openapi-generator/FILES,sha256=262V-LIx_NsddtK6OZVH1rRr6S7AQPhPO6g4scZiypY,800
30
- dapla_metadata/variable_definitions/_generated/.openapi-generator/VERSION,sha256=Y6lrqS2bXoujk5K-DCAwRFdRmkCKuTgvlngEx6FY5So,7
32
+ dapla_metadata/variable_definitions/exceptions.py,sha256=ImB81bne-h45kX9lE5hIh80QAWkOPS52uzcOftuoouM,10118
33
+ dapla_metadata/variable_definitions/vardef.py,sha256=WUpiKfvgFGPhMdjYSFSmdlXQKAolmRgW4-t-EocddQs,13934
34
+ dapla_metadata/variable_definitions/vardok_id.py,sha256=8T23BUHyVQr5hovTVc2E4HVY7f7e_jdi3YL1qzMQgFw,1268
35
+ dapla_metadata/variable_definitions/vardok_vardef_id_pair.py,sha256=8MDdd2-9L30MXkoQrk7NDcueaoxdeYie-TJhgoskTzk,1389
36
+ dapla_metadata/variable_definitions/variable_definition.py,sha256=uSWvSuVDh5zmSXGUb7vitiNd0VThU5DzFV3Rd6gumYE,14620
31
37
  dapla_metadata/variable_definitions/_generated/.openapi-generator-ignore,sha256=x9lryVB5wtVEuKQ5GcZ94b10RgtkVXbtvWXOArO1XsM,169
32
38
  dapla_metadata/variable_definitions/_generated/README.md,sha256=Y4et1oAhZTCr7a-CZfLbIpyYnhKzpygNg-gj7qJ09Eg,7650
33
39
  dapla_metadata/variable_definitions/_generated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
+ dapla_metadata/variable_definitions/_generated/.openapi-generator/FILES,sha256=262V-LIx_NsddtK6OZVH1rRr6S7AQPhPO6g4scZiypY,800
41
+ dapla_metadata/variable_definitions/_generated/.openapi-generator/VERSION,sha256=Y6lrqS2bXoujk5K-DCAwRFdRmkCKuTgvlngEx6FY5So,7
34
42
  dapla_metadata/variable_definitions/_generated/vardef_client/__init__.py,sha256=7WZfJtGCTCWX-IDogKS1V3BQl1HzaIfWzm5Xykrv930,4211
43
+ dapla_metadata/variable_definitions/_generated/vardef_client/api_client.py,sha256=D2Jd8RBNbrhGusN5TGz0JVF9euIwd9GgDrbD1Gbseog,29531
44
+ dapla_metadata/variable_definitions/_generated/vardef_client/api_response.py,sha256=FDrkcrL60n7sBj5kEEfbhGehtIsBcnKV9XsQUF-vZw0,738
45
+ dapla_metadata/variable_definitions/_generated/vardef_client/configuration.py,sha256=bzzTunhv7xUk_rKxipGopP9khWyTCD4-wu4TrJIyZNU,17979
46
+ dapla_metadata/variable_definitions/_generated/vardef_client/exceptions.py,sha256=YiEZFepEuBfg2aBZcuDSp7NXjk6VL06ecySIOJch13o,8444
47
+ dapla_metadata/variable_definitions/_generated/vardef_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
+ dapla_metadata/variable_definitions/_generated/vardef_client/rest.py,sha256=x4PWmg3IYQBr8OgnrWr3l4Ke2rElHP3zAEVxk2U-mOc,12022
35
49
  dapla_metadata/variable_definitions/_generated/vardef_client/api/__init__.py,sha256=LsTxGiOxCBqPeztRvoCANeVr-t0ApkHUqWiouWM8YS8,343
36
50
  dapla_metadata/variable_definitions/_generated/vardef_client/api/data_migration_api.py,sha256=P6eY76I0xayOxnqt8e6RS6jPk509qXv7FX6V_1km20s,34811
37
51
  dapla_metadata/variable_definitions/_generated/vardef_client/api/draft_variable_definitions_api.py,sha256=C3ZBBNQbinZjVAkNUKg-XvK1tfHxU7JXKEGx5NUt7B8,39930
38
52
  dapla_metadata/variable_definitions/_generated/vardef_client/api/patches_api.py,sha256=oAedyr4BaJzIKy2s1x9KsmS6gMaJB4GQrUVhsqxhniQ,39788
39
53
  dapla_metadata/variable_definitions/_generated/vardef_client/api/validity_periods_api.py,sha256=MbAHYpKpSiNzSi0c1qrzIlVJ6-L4PYevGKQ7kfzo-z4,27016
40
54
  dapla_metadata/variable_definitions/_generated/vardef_client/api/variable_definitions_api.py,sha256=BF6nw3wxw1FzIaQ6ShzPG3Ag7qIrAp2qhrtcD9o2y2o,27768
41
- dapla_metadata/variable_definitions/_generated/vardef_client/api_client.py,sha256=D2Jd8RBNbrhGusN5TGz0JVF9euIwd9GgDrbD1Gbseog,29531
42
- dapla_metadata/variable_definitions/_generated/vardef_client/api_response.py,sha256=FDrkcrL60n7sBj5kEEfbhGehtIsBcnKV9XsQUF-vZw0,738
43
- dapla_metadata/variable_definitions/_generated/vardef_client/configuration.py,sha256=bzzTunhv7xUk_rKxipGopP9khWyTCD4-wu4TrJIyZNU,17979
44
55
  dapla_metadata/variable_definitions/_generated/vardef_client/docs/CompleteResponse.md,sha256=CpB_PB8BkLAM5pwDPHb9AjMtk5Lxkhrf3bpZZhtFGeo,4092
45
56
  dapla_metadata/variable_definitions/_generated/vardef_client/docs/Contact.md,sha256=UrWzrB2oK38fWCV2XRQ8SBfBz9yRRKcNuO6h-A8ZOdw,847
46
57
  dapla_metadata/variable_definitions/_generated/vardef_client/docs/DataMigrationApi.md,sha256=_PSmVqGcyHipdn5dtv3gKpzLkYnZnD6r56n07jN3LEY,3269
@@ -57,7 +68,6 @@ dapla_metadata/variable_definitions/_generated/vardef_client/docs/ValidityPeriod
57
68
  dapla_metadata/variable_definitions/_generated/vardef_client/docs/ValidityPeriodsApi.md,sha256=oKw2fbxeUqdR4JcDSFA0nwX5Emroog8OuoaVt7FORJo,8172
58
69
  dapla_metadata/variable_definitions/_generated/vardef_client/docs/VariableDefinitionsApi.md,sha256=Gm4oBYTzOx0-pZiocAhLWcWukMzk9980pmH38nGCdYE,10812
59
70
  dapla_metadata/variable_definitions/_generated/vardef_client/docs/VariableStatus.md,sha256=UL4LmoMEdHA9Nv1XARIe8YMm6e5sA9KOtxkOU-Ujh74,418
60
- dapla_metadata/variable_definitions/_generated/vardef_client/exceptions.py,sha256=YiEZFepEuBfg2aBZcuDSp7NXjk6VL06ecySIOJch13o,8444
61
71
  dapla_metadata/variable_definitions/_generated/vardef_client/models/__init__.py,sha256=AXEkxN7a89KC1oPZ_mZUp8MRJF8o-vXOd5HiL7bH99w,3788
62
72
  dapla_metadata/variable_definitions/_generated/vardef_client/models/complete_response.py,sha256=EGYcmV_QRqoSwhWyKcNKP5V2QdLbljgpa6Fc9fu9hY8,13411
63
73
  dapla_metadata/variable_definitions/_generated/vardef_client/models/contact.py,sha256=QQbiI34m467sFWV4LpN7vLxualKfOq9lns2QlaXPLkc,5551
@@ -72,8 +82,6 @@ dapla_metadata/variable_definitions/_generated/vardef_client/models/validity_per
72
82
  dapla_metadata/variable_definitions/_generated/vardef_client/models/vardok_id_response.py,sha256=ArX_-UcvXaPBt1MGdCwaB-n5wRyzFsnCeEEV_u_zGzE,5149
73
83
  dapla_metadata/variable_definitions/_generated/vardef_client/models/vardok_vardef_id_pair_response.py,sha256=7GWZPE63_JdNb3vsecyieEtMPK8qD2995z5yxOS5GjY,5284
74
84
  dapla_metadata/variable_definitions/_generated/vardef_client/models/variable_status.py,sha256=x9PZJMkE4rFAPm_d0e1PWdVoa8x049y16Po9iP7npP0,3541
75
- dapla_metadata/variable_definitions/_generated/vardef_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
76
- dapla_metadata/variable_definitions/_generated/vardef_client/rest.py,sha256=x4PWmg3IYQBr8OgnrWr3l4Ke2rElHP3zAEVxk2U-mOc,12022
77
85
  dapla_metadata/variable_definitions/_utils/__init__.py,sha256=qAhRLJoTBqtR3f9xRXTRhD7-5Xg0Opk1Ks5F4AUYnpA,45
78
86
  dapla_metadata/variable_definitions/_utils/_client.py,sha256=v1-9VjrdPI6-sroam5vXMPEV1dQMPsYk7KyGd48HjYw,971
79
87
  dapla_metadata/variable_definitions/_utils/config.py,sha256=cc03xlcy_pnn06_6px0taO8LopmVJ9ud75oayGge8Vk,1814
@@ -81,12 +89,7 @@ dapla_metadata/variable_definitions/_utils/constants.py,sha256=HI_XprHnuXWd4ias5
81
89
  dapla_metadata/variable_definitions/_utils/files.py,sha256=JbPgPNQ7iA38juMqGEdcg5OjZZUwCb6NQtPL0AEspD0,10933
82
90
  dapla_metadata/variable_definitions/_utils/template_files.py,sha256=-PgYs4TG4vrXLQgk47pow9ZsqlZqhtO755LnEmvN4MA,3405
83
91
  dapla_metadata/variable_definitions/_utils/variable_definition_files.py,sha256=sGhcSpckR9NtYGNh2oVkiCd5SI3bbJEBhc1PA2uShs0,4701
84
- dapla_metadata/variable_definitions/exceptions.py,sha256=ImB81bne-h45kX9lE5hIh80QAWkOPS52uzcOftuoouM,10118
85
- dapla_metadata/variable_definitions/vardef.py,sha256=WUpiKfvgFGPhMdjYSFSmdlXQKAolmRgW4-t-EocddQs,13934
86
- dapla_metadata/variable_definitions/vardok_id.py,sha256=8T23BUHyVQr5hovTVc2E4HVY7f7e_jdi3YL1qzMQgFw,1268
87
- dapla_metadata/variable_definitions/vardok_vardef_id_pair.py,sha256=8MDdd2-9L30MXkoQrk7NDcueaoxdeYie-TJhgoskTzk,1389
88
- dapla_metadata/variable_definitions/variable_definition.py,sha256=uSWvSuVDh5zmSXGUb7vitiNd0VThU5DzFV3Rd6gumYE,14620
89
- dapla_toolbelt_metadata-0.8.5.dist-info/LICENSE,sha256=np3IfD5m0ZUofn_kVzDZqliozuiO6wrktw3LRPjyEiI,1073
90
- dapla_toolbelt_metadata-0.8.5.dist-info/METADATA,sha256=h1embOpjLgmdlQ-diKeK62uuTdhNHIFWKryhf7perAc,4905
91
- dapla_toolbelt_metadata-0.8.5.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
92
- dapla_toolbelt_metadata-0.8.5.dist-info/RECORD,,
92
+ dapla_toolbelt_metadata-0.9.0.dist-info/METADATA,sha256=kbbNm1D7RWsZqqNxPaebH4nILqnXpUvcppqLZ82RJGg,4723
93
+ dapla_toolbelt_metadata-0.9.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
94
+ dapla_toolbelt_metadata-0.9.0.dist-info/licenses/LICENSE,sha256=np3IfD5m0ZUofn_kVzDZqliozuiO6wrktw3LRPjyEiI,1073
95
+ dapla_toolbelt_metadata-0.9.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 2.1.3
2
+ Generator: hatchling 1.27.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any