dapla-toolbelt-metadata 0.5.0__tar.gz → 0.6.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dapla-toolbelt-metadata might be problematic. Click here for more details.

Files changed (90) hide show
  1. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/PKG-INFO +5 -5
  2. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/pyproject.toml +11 -6
  3. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/__init__.py +2 -0
  4. dapla_toolbelt_metadata-0.6.0/src/dapla_metadata/dapla/__init__.py +4 -0
  5. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/dapla/user_info.py +15 -16
  6. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/dapla_dataset_path_info.py +128 -14
  7. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/dataset_parser.py +20 -14
  8. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/utility/utils.py +2 -2
  9. dapla_toolbelt_metadata-0.6.0/src/dapla_metadata/standards/__init__.py +4 -0
  10. dapla_toolbelt_metadata-0.6.0/src/dapla_metadata/standards/name_validator.py +250 -0
  11. dapla_toolbelt_metadata-0.6.0/src/dapla_metadata/standards/standard_validators.py +98 -0
  12. dapla_toolbelt_metadata-0.6.0/src/dapla_metadata/standards/utils/__init__.py +1 -0
  13. dapla_toolbelt_metadata-0.6.0/src/dapla_metadata/standards/utils/constants.py +49 -0
  14. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_utils/descriptions.py +7 -4
  15. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/variable_definition.py +21 -11
  16. dapla_toolbelt_metadata-0.5.0/src/dapla_metadata/dapla/__init__.py +0 -1
  17. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/LICENSE +0 -0
  18. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/README.md +0 -0
  19. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/_shared/__init__.py +0 -0
  20. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/_shared/config.py +0 -0
  21. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/_shared/enums.py +0 -0
  22. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/_shared/py.typed +0 -0
  23. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/__init__.py +0 -0
  24. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/code_list.py +0 -0
  25. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/core.py +0 -0
  26. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/external_sources/__init__.py +0 -0
  27. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/external_sources/external_sources.py +0 -0
  28. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/model_backwards_compatibility.py +0 -0
  29. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/model_validation.py +0 -0
  30. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/py.typed +0 -0
  31. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/statistic_subject_mapping.py +0 -0
  32. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/utility/__init__.py +0 -0
  33. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/utility/constants.py +0 -0
  34. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/datasets/utility/enums.py +0 -0
  35. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/__init__.py +0 -0
  36. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/.openapi-generator/FILES +0 -0
  37. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/.openapi-generator/VERSION +0 -0
  38. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/.openapi-generator-ignore +0 -0
  39. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/README.md +0 -0
  40. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/__init__.py +0 -0
  41. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/__init__.py +0 -0
  42. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api/__init__.py +0 -0
  43. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api/data_migration_api.py +0 -0
  44. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api/draft_variable_definitions_api.py +0 -0
  45. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api/patches_api.py +0 -0
  46. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api/validity_periods_api.py +0 -0
  47. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api/variable_definitions_api.py +0 -0
  48. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api_client.py +0 -0
  49. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/api_response.py +0 -0
  50. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/configuration.py +0 -0
  51. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/CompleteResponse.md +0 -0
  52. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/Contact.md +0 -0
  53. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/DataMigrationApi.md +0 -0
  54. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/Draft.md +0 -0
  55. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/DraftVariableDefinitionsApi.md +0 -0
  56. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/LanguageStringType.md +0 -0
  57. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/Owner.md +0 -0
  58. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/Patch.md +0 -0
  59. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/PatchesApi.md +0 -0
  60. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/PublicApi.md +0 -0
  61. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/SupportedLanguages.md +0 -0
  62. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/UpdateDraft.md +0 -0
  63. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/ValidityPeriod.md +0 -0
  64. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/ValidityPeriodsApi.md +0 -0
  65. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/VariableDefinitionsApi.md +0 -0
  66. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/docs/VariableStatus.md +0 -0
  67. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/exceptions.py +0 -0
  68. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/__init__.py +0 -0
  69. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/complete_response.py +0 -0
  70. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/contact.py +0 -0
  71. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/draft.py +0 -0
  72. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/language_string_type.py +0 -0
  73. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/owner.py +0 -0
  74. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/patch.py +0 -0
  75. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/problem.py +0 -0
  76. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/update_draft.py +0 -0
  77. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/validity_period.py +0 -0
  78. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/models/variable_status.py +0 -0
  79. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/py.typed +0 -0
  80. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_generated/vardef_client/rest.py +0 -0
  81. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_utils/__init__.py +0 -0
  82. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_utils/_client.py +0 -0
  83. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_utils/config.py +0 -0
  84. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_utils/constants.py +0 -0
  85. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_utils/files.py +0 -0
  86. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_utils/template_files.py +0 -0
  87. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/_utils/variable_definition_files.py +0 -0
  88. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/exceptions.py +0 -0
  89. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/resources/vardef_model_descriptions_nb.yaml +0 -0
  90. {dapla_toolbelt_metadata-0.5.0 → dapla_toolbelt_metadata-0.6.0}/src/dapla_metadata/variable_definitions/vardef.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: dapla-toolbelt-metadata
3
- Version: 0.5.0
3
+ Version: 0.6.0
4
4
  Summary: Dapla Toolbelt Metadata
5
5
  License: MIT
6
6
  Author: Team Metadata
@@ -15,16 +15,16 @@ Classifier: Programming Language :: Python :: 3.13
15
15
  Requires-Dist: arrow (>=1.3.0)
16
16
  Requires-Dist: beautifulsoup4 (>=4.12.3)
17
17
  Requires-Dist: cloudpathlib[gs] (>=0.17.0)
18
- Requires-Dist: dapla-toolbelt (>=3.0.1)
19
- Requires-Dist: pandas (>=1.4.2)
18
+ Requires-Dist: google-auth (>=2.38.0)
19
+ Requires-Dist: lxml (>=5.3.1)
20
20
  Requires-Dist: pyarrow (>=8.0.0)
21
21
  Requires-Dist: pydantic (>=2.5.2)
22
22
  Requires-Dist: pyjwt (>=2.8.0)
23
23
  Requires-Dist: python-dotenv (>=1.0.1)
24
24
  Requires-Dist: requests (>=2.31.0)
25
- Requires-Dist: ruamel-yaml (>=0.18.10,<0.19.0)
25
+ Requires-Dist: ruamel-yaml (>=0.18.10)
26
26
  Requires-Dist: ssb-datadoc-model (>=6.0.0,<7.0.0)
27
- Requires-Dist: ssb-klass-python (>=0.0.9)
27
+ Requires-Dist: ssb-klass-python (>=1.0.1)
28
28
  Requires-Dist: typing-extensions (>=4.12.2)
29
29
  Project-URL: Changelog, https://github.com/statisticsnorway/dapla-toolbelt-metadata/releases
30
30
  Project-URL: Documentation, https://statisticsnorway.github.io/dapla-toolbelt-metadata
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "dapla-toolbelt-metadata"
3
- version = "0.5.0"
3
+ version = "0.6.0"
4
4
  description = "Dapla Toolbelt Metadata"
5
5
  authors = ["Team Metadata <metadata@ssb.no>"]
6
6
  license = "MIT"
@@ -19,18 +19,18 @@ Changelog = "https://github.com/statisticsnorway/dapla-toolbelt-metadata/release
19
19
  python = ">=3.11,<4.0"
20
20
  pyarrow = ">=8.0.0"
21
21
  pydantic = ">=2.5.2"
22
- pandas = ">=1.4.2"
23
- dapla-toolbelt = ">=3.0.1"
24
22
  arrow = ">=1.3.0"
25
23
  python-dotenv = ">=1.0.1"
26
24
  requests = ">=2.31.0"
27
25
  beautifulsoup4 = ">=4.12.3"
28
26
  cloudpathlib = { extras = ["gs"], version = ">=0.17.0" }
29
27
  pyjwt = ">=2.8.0"
30
- ssb-klass-python = ">=0.0.9"
28
+ ssb-klass-python = ">=1.0.1"
31
29
  ssb-datadoc-model = "^6.0.0"
32
30
  typing-extensions = ">=4.12.2"
33
- ruamel-yaml = "^0.18.10"
31
+ ruamel-yaml = ">=0.18.10"
32
+ google-auth = ">=2.38.0"
33
+ lxml = ">=5.3.1"
34
34
 
35
35
  [tool.poetry.group.dev.dependencies]
36
36
  pygments = ">=2.18.0"
@@ -48,6 +48,7 @@ typeguard = ">=2.13.3"
48
48
  xdoctest = { extras = ["colors"], version = ">=0.15.10" }
49
49
  myst-parser = { version = ">=0.16.1" }
50
50
  mypy = ">=0.950"
51
+ pandas = ">=1.4.2"
51
52
  pytest-cov = ">=3.0.0"
52
53
  python-kacl = "*"
53
54
  pytest-mock = ">=3.14.0"
@@ -58,11 +59,13 @@ requests-mock = ">=1.12.1"
58
59
  types-Pygments = "*"
59
60
  types-colorama = "*"
60
61
  types-setuptools = "*"
62
+ types-requests = "*"
61
63
  types-beautifulsoup4 = ">=4.12.0.20240511"
62
64
  ipykernel = "^6.29.5"
63
65
  rich = "^13.9.4"
64
66
  bpython = "^0.24"
65
67
  testcontainers = { version = "^4.8.2", extras = ["generic"] }
68
+ pytest-asyncio = "^0.26.0"
66
69
 
67
70
  [tool.pytest.ini_options]
68
71
  pythonpath = ["src"]
@@ -115,8 +118,8 @@ module = [
115
118
  "datadoc_model.model",
116
119
  "pytest_mock",
117
120
  "testcontainers.*",
118
- "yaml",
119
121
  "httpx",
122
+ "ruamel.*",
120
123
  ]
121
124
  ignore_missing_imports = true
122
125
 
@@ -152,9 +155,11 @@ select = ["ALL"]
152
155
  ignore = [
153
156
  "ANN202", # Don't requiere return type annotation for private functions.
154
157
  "ANN401", # Allow type annotation with type Any.
158
+ "COM812", # Suggested to ignore when using ruff format
155
159
  "D100", # Supress undocumented-public-module. Only doc of public api required.
156
160
  "FBT001", # Allow boolean positional arguments in a function.
157
161
  "FBT002", # Allow boolean default positional arguments in a function.
162
+ "FIX002", # Don't fail on TODO as long as it's documented correctly.
158
163
  "E402", # Supress module-import-not-at-top-of-file, needed in jupyter notebooks.
159
164
  "E501", # Supress line-too-long warnings: trust black's judgement on this one.
160
165
  "PLR2004", # Allow to compare with unnamed numerical constants.
@@ -9,5 +9,7 @@ warnings.filterwarnings(
9
9
 
10
10
  import datadoc_model.model as datadoc_model
11
11
 
12
+ from . import dapla
12
13
  from . import datasets
14
+ from . import standards
13
15
  from . import variable_definitions
@@ -0,0 +1,4 @@
1
+ """Expose information specific to the Dapla platform."""
2
+
3
+ from .user_info import DaplaLabUserInfo
4
+ from .user_info import UserInfo
@@ -13,11 +13,6 @@ from dapla_metadata._shared.enums import DaplaService
13
13
  logger = logging.getLogger(__name__)
14
14
 
15
15
 
16
- PLACEHOLDER_EMAIL_ADDRESS = "default_user@ssb.no"
17
- PLACEHOLDER_GROUP = "default-team-developers"
18
- PLACEHOLDER_TEAM = "default-team"
19
-
20
-
21
16
  class UserInfo(Protocol):
22
17
  """Information about the current user.
23
18
 
@@ -62,20 +57,24 @@ class UnknownUserInfo:
62
57
  class TestUserInfo:
63
58
  """Information about the current user for local development and testing."""
64
59
 
60
+ PLACEHOLDER_EMAIL_ADDRESS = "default_user@ssb.no"
61
+ PLACEHOLDER_GROUP = "default-team-developers"
62
+ PLACEHOLDER_TEAM = "default-team"
63
+
65
64
  @property
66
65
  def short_email(self) -> str | None:
67
66
  """Get the short email address."""
68
- return PLACEHOLDER_EMAIL_ADDRESS
67
+ return TestUserInfo.PLACEHOLDER_EMAIL_ADDRESS
69
68
 
70
69
  @property
71
70
  def current_group(self) -> str | None:
72
71
  """Get the group which the user is currently representing."""
73
- return PLACEHOLDER_GROUP
72
+ return TestUserInfo.PLACEHOLDER_GROUP
74
73
 
75
74
  @property
76
75
  def current_team(self) -> str | None:
77
76
  """Get the team which the user is currently representing."""
78
- return PLACEHOLDER_TEAM
77
+ return TestUserInfo.PLACEHOLDER_TEAM
79
78
 
80
79
 
81
80
  class DaplaLabUserInfo:
@@ -146,17 +145,17 @@ def get_user_info_for_current_platform() -> UserInfo:
146
145
  def parse_team_name(group: str) -> str:
147
146
  """Parses the group to get the current team.
148
147
 
149
- >>> parse_team_name(dapla-metadata-developers)
150
- (dapla-metadata)
148
+ >>> parse_team_name("dapla-metadata-developers")
149
+ 'dapla-metadata'
151
150
 
152
- >>> parse_team_name(dapla-metadata-data-admins)
153
- (dapla-metadata)
151
+ >>> parse_team_name("dapla-metadata-data-admins")
152
+ 'dapla-metadata'
154
153
 
155
- >>> parse_team_name(dapla-metadata)
156
- (dapla)
154
+ >>> parse_team_name("dapla-metadata")
155
+ 'dapla'
157
156
 
158
- >>> parse_team_name(dapla-metadata-not-real-name)
159
- (dapla-metadata-not-real)
157
+ >>> parse_team_name("dapla-metadata-not-real-name")
158
+ 'dapla-metadata-not-real'
160
159
  """
161
160
  parts = group.split("-")
162
161
  return "-".join(parts[:-2] if group.endswith("data-admins") else parts[:-1])
@@ -478,7 +478,7 @@ class DaplaDatasetPathInfo:
478
478
  """Extract the bucket name from the dataset path.
479
479
 
480
480
  Returns:
481
- The bucket name or None if the dataset path is not a GCS path.
481
+ The bucket name or None if the dataset path is not a GCS path nor ssb bucketeer path.
482
482
 
483
483
  Examples:
484
484
  >>> DaplaDatasetPathInfo('gs://ssb-staging-dapla-felles-data-delt/datadoc/utdata/person_data_p2021_v2.parquet').bucket_name
@@ -492,17 +492,35 @@ class DaplaDatasetPathInfo:
492
492
 
493
493
  >>> DaplaDatasetPathInfo('ssb-staging-dapla-felles-data-delt/datadoc/utdata/person_data_p2021_v2.parquet').bucket_name
494
494
  None
495
+
496
+ >>> DaplaDatasetPathInfo('ssb-staging-dapla-felles-data-delt/datadoc/utdata/person_data_p2021_v2.parquet').bucket_name
497
+ None
498
+
499
+ >>> DaplaDatasetPathInfo('buckets/ssb-staging-dapla-felles-data-delt/stat/utdata/person_data_p2021_v2.parquet').bucket_name
500
+ ssb-staging-dapla-felles-data-delt
501
+
502
+ >>> DaplaDatasetPathInfo('buckets/ssb-staging-dapla-felles-data-delt/person_data_p2021_v2.parquet').bucket_name
503
+ ssb-staging-dapla-felles-data-delt
504
+
505
+ >>> DaplaDatasetPathInfo('home/work/buckets/ssb-staging-dapla-felles-produkt/stat/utdata/person_data_p2021_v2.parquet').bucket_name
506
+ ssb-staging-dapla-felles-produkt
495
507
  """
496
508
  prefix: str | None = None
497
- if self.dataset_string.startswith(GSPath.cloud_prefix):
509
+ dataset_string = str(self.dataset_string)
510
+ if GSPath.cloud_prefix in self.dataset_string:
498
511
  prefix = GSPath.cloud_prefix
499
- elif self.dataset_string.startswith(GS_PREFIX_FROM_PATHLIB):
512
+ _, bucket_and_rest = dataset_string.split(prefix, 1)
513
+ elif GS_PREFIX_FROM_PATHLIB in self.dataset_string:
500
514
  prefix = GS_PREFIX_FROM_PATHLIB
515
+ _, bucket_and_rest = self.dataset_string.split(prefix, 1)
516
+ elif "buckets/" in self.dataset_string:
517
+ prefix = "buckets/"
518
+ _, bucket_and_rest = self.dataset_string.split(prefix, 1)
501
519
  else:
502
520
  return None
503
521
 
504
522
  return pathlib.Path(
505
- self.dataset_string.removeprefix(prefix),
523
+ bucket_and_rest,
506
524
  ).parts[0]
507
525
 
508
526
  @property
@@ -528,6 +546,15 @@ class DaplaDatasetPathInfo:
528
546
 
529
547
  >>> DaplaDatasetPathInfo('my_data/simple_dataset_name.parquet').dataset_short_name
530
548
  simple_dataset_name
549
+
550
+ >>> DaplaDatasetPathInfo('gs:/ssb-staging-dapla-felles-data-delt/datadoc/utdata/person_data_p2021_v2.parquet').dataset_short_name
551
+ person_data
552
+
553
+ >>> DaplaDatasetPathInfo('buckets/ssb-staging-dapla-felles-data-delt/stat/utdata/folk_data_p2021_v2.parquet').dataset_short_name
554
+ folk_data
555
+
556
+ >>> DaplaDatasetPathInfo('buckets/ssb-staging-dapla-felles-data-delt/stat/utdata/dapla/bus_p2021_v2.parquet').dataset_short_name
557
+ bus
531
558
  """
532
559
  if self.contains_data_from or self.contains_data_until:
533
560
  short_name_sections = self.dataset_name_sections[
@@ -601,6 +628,9 @@ class DaplaDatasetPathInfo:
601
628
  >>> DaplaDatasetPathInfo('utdata/min_statistikk/person_data_v1.parquet').dataset_state
602
629
  <DataSetState.OUTPUT_DATA: 'OUTPUT_DATA'>
603
630
 
631
+ >>> DaplaDatasetPathInfo('buckets/bucket_name/stat_name/inndata/min_statistikk/person_data_v1.parquet').dataset_state
632
+ <DataSetState.INPUT_DATA: 'INPUT_DATA'>
633
+
604
634
  >>> DaplaDatasetPathInfo('my_special_data/person_data_v1.parquet').dataset_state
605
635
  None
606
636
  """
@@ -632,6 +662,12 @@ class DaplaDatasetPathInfo:
632
662
 
633
663
  >>> DaplaDatasetPathInfo('person_data.parquet').dataset_version
634
664
  None
665
+
666
+ >>> DaplaDatasetPathInfo('buckets/bucket_name/stat_name/inndata/min_statistikk/person_data_v1.parquet').dataset_version
667
+ '1'
668
+
669
+ >>> DaplaDatasetPathInfo('buckets/bucket_name/stat_name/inndata/min_statistikk/person_data.parquet').dataset_version
670
+ None
635
671
  """
636
672
  minimum_elements_in_file_name: Final[int] = 2
637
673
  minimum_characters_in_version_string: Final[int] = 2
@@ -645,13 +681,37 @@ class DaplaDatasetPathInfo:
645
681
  return last_filename_element[1:]
646
682
  return None
647
683
 
684
+ def _get_left_parts(
685
+ self,
686
+ dataset_path_parts: list[str],
687
+ state_index: int,
688
+ ) -> list[str]:
689
+ """Retrieve the path parts before the dataset state, considering bucket prefixes."""
690
+ bucket_prefix = {"gs:", "buckets"}
691
+ left_parts = dataset_path_parts[:state_index]
692
+
693
+ # Stop checking beyond the bucket prefix
694
+ prefix_intersection = bucket_prefix & set(left_parts)
695
+ if prefix_intersection:
696
+ first_prefix = min(
697
+ left_parts.index(prefix) for prefix in prefix_intersection
698
+ )
699
+ left_parts = left_parts[first_prefix:]
700
+
701
+ return (
702
+ []
703
+ if left_parts == ["/"]
704
+ or (left_parts[0] in bucket_prefix and len(left_parts) <= 2)
705
+ else left_parts
706
+ )
707
+
648
708
  @property
649
709
  def statistic_short_name(
650
710
  self,
651
711
  ) -> str | None:
652
712
  """Extract the statistical short name from the filepath.
653
713
 
654
- Extract the statistical short name from the filepath right before the
714
+ Extract the statistical short name from the filepath either after bucket name or right before the
655
715
  dataset state based on the Dapla filepath naming convention.
656
716
 
657
717
  Returns:
@@ -662,21 +722,75 @@ class DaplaDatasetPathInfo:
662
722
  >>> DaplaDatasetPathInfo('prosjekt/befolkning/klargjorte_data/person_data_v1.parquet').statistic_short_name
663
723
  befolkning
664
724
 
725
+ >>> DaplaDatasetPathInfo('buckets/prosjekt/befolkning/person_data_v1.parquet').statistic_short_name
726
+ befolkning
727
+
665
728
  >>> DaplaDatasetPathInfo('befolkning/inndata/person_data_v1.parquet').statistic_short_name
666
729
  befolkning
667
730
 
731
+ >>> DaplaDatasetPathInfo('buckets/bucket_name/stat_name/inndata/min_statistikk/person_data.parquet').statistic_short_name
732
+ stat_name
733
+
734
+ >>> DaplaDatasetPathInfo('buckets/stat_name/utdata/person_data.parquet').statistic_short_name
735
+ None
736
+
668
737
  >>> DaplaDatasetPathInfo('befolkning/person_data.parquet').statistic_short_name
669
738
  None
739
+
740
+ >>> DaplaDatasetPathInfo('buckets/produkt/befolkning/utdata/person_data.parquet').statistic_short_name
741
+ befolkning
742
+
743
+ >>> DaplaDatasetPathInfo('resources/buckets/produkt/befolkning/utdata/person_data.parquet').statistic_short_name
744
+ befolkning
745
+
746
+ >>> DaplaDatasetPathInfo('gs://statistikk/produkt/klargjorte-data/persondata_p1990-Q1_p2023-Q4_v1/aar=2019/data.parquet').statistic_short_name
747
+ produkt
748
+
749
+ >>> DaplaDatasetPathInfo('gs://statistikk/produkt/persondata_p1990-Q1_p2023-Q4_v1/aar=2019/data.parquet').statistic_short_name
750
+ None
751
+
752
+ >>> DaplaDatasetPathInfo('buckets/ssb-staging-dapla-felles-data-delt/person_data_p2021_v2.parquet').statistic_short_name
753
+ None
670
754
  """
671
- dataset_state = self.dataset_state
672
- if dataset_state is not None:
673
- dataset_state_names = self._extract_norwegian_dataset_state_path_part(
674
- dataset_state,
675
- )
676
- dataset_path_parts = list(self.dataset_path.parts)
677
- for i in dataset_state_names:
678
- if i in dataset_path_parts and dataset_path_parts.index(i) != 0:
679
- return dataset_path_parts[dataset_path_parts.index(i) - 1]
755
+ if not self.dataset_state:
756
+ if self.bucket_name:
757
+ parts = self.dataset_path.parent.parts
758
+
759
+ if self.bucket_name not in parts:
760
+ return None
761
+
762
+ # Find the index of bucket_name in the path
763
+ bucket_name_index = self.dataset_path.parent.parts.index(
764
+ self.bucket_name,
765
+ )
766
+
767
+ # If there are parts after bucket_name, return the part immediately after it
768
+ if len(self.dataset_path.parent.parts) > bucket_name_index + 1:
769
+ return self.dataset_path.parent.parts[bucket_name_index + 1]
770
+
771
+ return None
772
+
773
+ dataset_state_names = self._extract_norwegian_dataset_state_path_part(
774
+ self.dataset_state,
775
+ )
776
+ dataset_path_parts = list(self.dataset_path.parts)
777
+
778
+ for state in dataset_state_names:
779
+ if state not in dataset_path_parts:
780
+ continue
781
+
782
+ index = dataset_path_parts.index(state)
783
+
784
+ if index == 0:
785
+ continue
786
+
787
+ left_parts = self._get_left_parts(dataset_path_parts, index)
788
+
789
+ if not left_parts:
790
+ return None
791
+
792
+ return dataset_path_parts[index - 1]
793
+
680
794
  return None
681
795
 
682
796
  def path_complies_with_naming_standard(self) -> bool:
@@ -89,7 +89,7 @@ TYPE_CORRESPONDENCE: list[tuple[tuple[str, ...], DataType]] = [
89
89
  ]
90
90
  TYPE_MAP: dict[str, DataType] = {}
91
91
  for concrete_type, abstract_type in TYPE_CORRESPONDENCE:
92
- TYPE_MAP.update({c: abstract_type for c in concrete_type})
92
+ TYPE_MAP.update(dict.fromkeys(concrete_type, abstract_type))
93
93
 
94
94
  TDatasetParser = t.TypeVar("TDatasetParser", bound="DatasetParser")
95
95
 
@@ -112,31 +112,23 @@ class DatasetParser(ABC):
112
112
  @staticmethod
113
113
  def for_file(dataset: pathlib.Path | CloudPath) -> DatasetParser:
114
114
  """Return the correct subclass based on the given dataset file."""
115
- supported_file_types: dict[
116
- str,
117
- type[DatasetParser],
118
- ] = {
119
- ".parquet": DatasetParserParquet,
120
- ".sas7bdat": DatasetParserSas7Bdat,
121
- ".parquet.gzip": DatasetParserParquet,
122
- }
123
115
  file_type = "Unknown"
124
116
  try:
125
117
  file_type = dataset.suffix
126
118
  # Gzipped parquet files can be read with DatasetParserParquet
127
- match = re.search(r"(.parquet.gzip)", str(dataset).lower())
128
- file_type = ".parquet.gzip" if match else file_type
119
+ match = re.search(PARQUET_GZIP_FILE_SUFFIX, str(dataset).lower())
120
+ file_type = PARQUET_GZIP_FILE_SUFFIX if match else file_type
129
121
  # Extract the appropriate reader class from the SUPPORTED_FILE_TYPES dict and return an instance of it
130
- reader = supported_file_types[file_type](dataset)
122
+ reader = SUPPORTED_DATASET_FILE_SUFFIXES[file_type](dataset)
131
123
  except IndexError as e:
132
124
  # Thrown when just one element is returned from split, meaning there is no file extension supplied
133
- msg = f"Could not recognise file type for provided {dataset = }. Supported file types are: {', '.join(supported_file_types.keys())}"
125
+ msg = f"Could not recognise file type for provided {dataset = }. Supported file types are: {', '.join(SUPPORTED_DATASET_FILE_SUFFIXES.keys())}"
134
126
  raise FileNotFoundError(
135
127
  msg,
136
128
  ) from e
137
129
  except KeyError as e:
138
130
  # In this case the file type is not supported, so we throw a helpful exception
139
- msg = f"{file_type = } is not supported. Please open one of the following supported files types: {', '.join(supported_file_types.keys())} or contact the maintainers to request support."
131
+ msg = f"{file_type = } is not supported. Please open one of the following supported files types: {', '.join(SUPPORTED_DATASET_FILE_SUFFIXES.keys())} or contact the maintainers to request support."
140
132
  raise NotImplementedError(
141
133
  msg,
142
134
  ) from e
@@ -239,3 +231,17 @@ class DatasetParserSas7Bdat(DatasetParser):
239
231
  )
240
232
 
241
233
  return fields
234
+
235
+
236
+ PARQUET_FILE_SUFFIX = ".parquet"
237
+ PARQUET_GZIP_FILE_SUFFIX = ".parquet.gzip"
238
+ SAS7BDAT_FILE_SUFFIX = ".sas7bdat"
239
+
240
+ SUPPORTED_DATASET_FILE_SUFFIXES: dict[
241
+ str,
242
+ type[DatasetParser],
243
+ ] = {
244
+ PARQUET_FILE_SUFFIX: DatasetParserParquet,
245
+ PARQUET_GZIP_FILE_SUFFIX: DatasetParserParquet,
246
+ SAS7BDAT_FILE_SUFFIX: DatasetParserSas7Bdat,
247
+ }
@@ -5,6 +5,7 @@ import logging
5
5
  import pathlib
6
6
  import uuid
7
7
 
8
+ import google.auth
8
9
  from cloudpathlib import CloudPath
9
10
  from cloudpathlib import GSClient
10
11
  from cloudpathlib import GSPath
@@ -13,7 +14,6 @@ from datadoc_model.model import Assessment
13
14
  from datadoc_model.model import DataSetState
14
15
  from datadoc_model.model import VariableRole
15
16
 
16
- from dapla import AuthClient
17
17
  from dapla_metadata.dapla import user_info
18
18
  from dapla_metadata.datasets.utility.constants import (
19
19
  DATASET_FIELDS_FROM_EXISTING_METADATA,
@@ -52,7 +52,7 @@ def normalize_path(path: str) -> pathlib.Path | CloudPath:
52
52
  Pathlib compatible object.
53
53
  """
54
54
  if path.startswith(GSPath.cloud_prefix):
55
- client = GSClient(credentials=AuthClient.fetch_google_credentials())
55
+ client = GSClient(credentials=google.auth.default()[0])
56
56
  return GSPath(path, client=client)
57
57
  return pathlib.Path(path)
58
58
 
@@ -0,0 +1,4 @@
1
+ """Expose information specific to validating ssb standards."""
2
+
3
+ from .standard_validators import check_naming_standard
4
+ from .standard_validators import generate_validation_report