junifer 0.0.5.dev219__py3-none-any.whl → 0.0.5.dev242__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- junifer/_version.py +2 -2
- junifer/datagrabber/__init__.py +2 -0
- junifer/datagrabber/base.py +10 -6
- junifer/datagrabber/hcp1200/hcp1200.py +1 -1
- junifer/datagrabber/multiple.py +42 -6
- junifer/datagrabber/pattern.py +33 -10
- junifer/datagrabber/pattern_validation_mixin.py +388 -0
- junifer/datagrabber/tests/test_multiple.py +161 -84
- junifer/datagrabber/tests/{test_datagrabber_utils.py → test_pattern_validation_mixin.py} +133 -108
- junifer/utils/__init__.py +2 -1
- junifer/utils/helpers.py +30 -2
- junifer/utils/logging.py +18 -1
- junifer/utils/tests/test_logging.py +8 -0
- {junifer-0.0.5.dev219.dist-info → junifer-0.0.5.dev242.dist-info}/METADATA +1 -1
- {junifer-0.0.5.dev219.dist-info → junifer-0.0.5.dev242.dist-info}/RECORD +20 -20
- {junifer-0.0.5.dev219.dist-info → junifer-0.0.5.dev242.dist-info}/WHEEL +1 -1
- junifer/datagrabber/utils.py +0 -317
- {junifer-0.0.5.dev219.dist-info → junifer-0.0.5.dev242.dist-info}/AUTHORS.rst +0 -0
- {junifer-0.0.5.dev219.dist-info → junifer-0.0.5.dev242.dist-info}/LICENSE.md +0 -0
- {junifer-0.0.5.dev219.dist-info → junifer-0.0.5.dev242.dist-info}/entry_points.txt +0 -0
- {junifer-0.0.5.dev219.dist-info → junifer-0.0.5.dev242.dist-info}/top_level.txt +0 -0
junifer/utils/helpers.py
CHANGED
@@ -3,13 +3,14 @@
|
|
3
3
|
# Authors: Synchon Mandal <s.mandal@fz-juelich.de>
|
4
4
|
# License: AGPL
|
5
5
|
|
6
|
+
import collections.abc
|
6
7
|
import subprocess
|
7
|
-
from typing import List
|
8
|
+
from typing import Dict, List
|
8
9
|
|
9
10
|
from .logging import logger, raise_error
|
10
11
|
|
11
12
|
|
12
|
-
__all__ = ["run_ext_cmd"]
|
13
|
+
__all__ = ["run_ext_cmd", "deep_update"]
|
13
14
|
|
14
15
|
|
15
16
|
def run_ext_cmd(name: str, cmd: List[str]) -> None:
|
@@ -54,3 +55,30 @@ def run_ext_cmd(name: str, cmd: List[str]) -> None:
|
|
54
55
|
),
|
55
56
|
klass=RuntimeError,
|
56
57
|
)
|
58
|
+
|
59
|
+
|
60
|
+
def deep_update(d: Dict, u: Dict) -> Dict:
|
61
|
+
"""Deep update `d` with `u`.
|
62
|
+
|
63
|
+
From: "https://stackoverflow.com/questions/3232943/update-value-of-a-nested
|
64
|
+
-dictionary-of-varying-depth"
|
65
|
+
|
66
|
+
Parameters
|
67
|
+
----------
|
68
|
+
d : dict
|
69
|
+
The dictionary to deep-update.
|
70
|
+
u : dict
|
71
|
+
The dictionary to deep-update `d` with.
|
72
|
+
|
73
|
+
Returns
|
74
|
+
-------
|
75
|
+
dict
|
76
|
+
The updated dictionary.
|
77
|
+
|
78
|
+
"""
|
79
|
+
for k, v in u.items():
|
80
|
+
if isinstance(v, collections.abc.Mapping):
|
81
|
+
d[k] = deep_update(d.get(k, {}), v)
|
82
|
+
else:
|
83
|
+
d[k] = v
|
84
|
+
return d
|
junifer/utils/logging.py
CHANGED
@@ -13,6 +13,7 @@ else: # pragma: no cover
|
|
13
13
|
from looseversion import LooseVersion
|
14
14
|
|
15
15
|
import logging
|
16
|
+
import warnings
|
16
17
|
from pathlib import Path
|
17
18
|
from subprocess import PIPE, Popen, TimeoutExpired
|
18
19
|
from typing import Dict, NoReturn, Optional, Type, Union
|
@@ -44,6 +45,23 @@ _logging_types = {
|
|
44
45
|
}
|
45
46
|
|
46
47
|
|
48
|
+
# Copied over from stdlib and tweaked to our use-case.
|
49
|
+
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
50
|
+
s = warnings.formatwarning(message, category, filename, lineno, line)
|
51
|
+
logger.warning(str(s))
|
52
|
+
|
53
|
+
|
54
|
+
# Overwrite warnings display to integrate with logging
|
55
|
+
|
56
|
+
|
57
|
+
def capture_warnings():
|
58
|
+
"""Capture warnings and log them."""
|
59
|
+
warnings.showwarning = _showwarning
|
60
|
+
|
61
|
+
|
62
|
+
capture_warnings()
|
63
|
+
|
64
|
+
|
47
65
|
class WrapStdOut(logging.StreamHandler):
|
48
66
|
"""Dynamically wrap to sys.stdout.
|
49
67
|
|
@@ -325,5 +343,4 @@ def warn_with_log(
|
|
325
343
|
The warning subclass (default RuntimeWarning).
|
326
344
|
|
327
345
|
"""
|
328
|
-
logger.warning(msg)
|
329
346
|
warn(msg, category=category, stacklevel=2)
|
@@ -145,8 +145,16 @@ def test_log_file(tmp_path: Path) -> None:
|
|
145
145
|
assert any("Warn3 message" in line for line in lines)
|
146
146
|
assert any("Error3 message" in line for line in lines)
|
147
147
|
|
148
|
+
# This should raise a warning (test that it was raised)
|
148
149
|
with pytest.warns(RuntimeWarning, match=r"Warn raised"):
|
149
150
|
warn_with_log("Warn raised")
|
151
|
+
|
152
|
+
# This should log the warning (workaround for pytest messing with logging)
|
153
|
+
from junifer.utils.logging import capture_warnings
|
154
|
+
|
155
|
+
capture_warnings()
|
156
|
+
|
157
|
+
warn_with_log("Warn raised 2")
|
150
158
|
with pytest.raises(ValueError, match=r"Error raised"):
|
151
159
|
raise_error("Error raised")
|
152
160
|
with open(tmp_path / "test4.log") as f:
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: junifer
|
3
|
-
Version: 0.0.5.
|
3
|
+
Version: 0.0.5.dev242
|
4
4
|
Summary: JUelich NeuroImaging FEature extractoR
|
5
5
|
Author-email: Fede Raimondo <f.raimondo@fz-juelich.de>, Synchon Mandal <s.mandal@fz-juelich.de>
|
6
6
|
Maintainer-email: Fede Raimondo <f.raimondo@fz-juelich.de>, Synchon Mandal <s.mandal@fz-juelich.de>
|
@@ -1,5 +1,5 @@
|
|
1
1
|
junifer/__init__.py,sha256=-T9XmiCCL0j3YLx-0Pph15sPfL5FlcBDscajjJ-V4sU,604
|
2
|
-
junifer/_version.py,sha256=
|
2
|
+
junifer/_version.py,sha256=5ar8KTPD2riEkS9RKe7tYuw4gZesZ5Gonw6vdJ43Z_8,428
|
3
3
|
junifer/stats.py,sha256=BjQb2lfTGDP9l4UuQYmJFcJJNRfbJDGlNvC06SJaDDE,6237
|
4
4
|
junifer/api/__init__.py,sha256=lwyIF0hPc7fICuSoddJfay0LPqlTRxHJ_xbtizgFYZA,312
|
5
5
|
junifer/api/cli.py,sha256=53pews3mXkJ7DUDSkV51PbitYnuVAdQRkWG-gjO08Uw,16142
|
@@ -90,14 +90,14 @@ junifer/data/tests/test_data_utils.py,sha256=_DaiC8K79gs9HFHxr-udNeE2YTM6JA0-1i-
|
|
90
90
|
junifer/data/tests/test_masks.py,sha256=pL42xTzrvy0qLCqpG5p5CdCCqjJ9n5nz7BCUofydfag,15723
|
91
91
|
junifer/data/tests/test_parcellations.py,sha256=ZEU1VHIK0AyxpclcJhG_0rQU0phaBU_dHP7Erfi3mN8,38222
|
92
92
|
junifer/data/tests/test_template_spaces.py,sha256=PJulN7xHpAcSOTY-UzTG_WPywZEBSlAZGiNG4gzk1_8,3144
|
93
|
-
junifer/datagrabber/__init__.py,sha256=
|
94
|
-
junifer/datagrabber/base.py,sha256=
|
93
|
+
junifer/datagrabber/__init__.py,sha256=kYvlrRS6f64fwntAuyk_fL2c_vyw9m9mZpMWpm_cxEM,1058
|
94
|
+
junifer/datagrabber/base.py,sha256=fFPIt6p3SdZ6vzReGQxK2qJnQzh8HTwBe87A2WYArVI,6538
|
95
95
|
junifer/datagrabber/datalad_base.py,sha256=6B7XMIMFlBw3uixDnfoaH4gBU9EzJIO5gwmc0iHniRo,11044
|
96
96
|
junifer/datagrabber/dmcc13_benchmark.py,sha256=se9F6QVw9WX22MNld33OQv_BtdW-yPvXXu6kYykxLNw,12225
|
97
|
-
junifer/datagrabber/multiple.py,sha256=
|
98
|
-
junifer/datagrabber/pattern.py,sha256=
|
97
|
+
junifer/datagrabber/multiple.py,sha256=IjBcFN-KegIad9bwopsfAQ9b_WRRUHmCbKRX2gmus0Q,6487
|
98
|
+
junifer/datagrabber/pattern.py,sha256=iSubnHOJjYck1Zhk_JAYRZjRPKYmfoO81tG1zowd3B4,17039
|
99
99
|
junifer/datagrabber/pattern_datalad.py,sha256=QPWXIToYHDU4mvm9lz_hy8BjdqqoCXiGiJKCcATrT-w,4568
|
100
|
-
junifer/datagrabber/
|
100
|
+
junifer/datagrabber/pattern_validation_mixin.py,sha256=SSnTdUA7elaTh9HF7syvW-lTBS1VgdSkyOJiw5mT2Vw,13469
|
101
101
|
junifer/datagrabber/aomic/__init__.py,sha256=lRezU9dyIEoL4tJYglMX01P-F5_hrnZrJxuaQxF_z2w,358
|
102
102
|
junifer/datagrabber/aomic/id1000.py,sha256=wJpZiSZrsfil5w-506bOtKMWm3FllnbB8-cMvGDPiLM,7219
|
103
103
|
junifer/datagrabber/aomic/piop1.py,sha256=AcjIueSUmhepuIfmbMEpocV7grUbJ2xKXG94O1dq2FA,9637
|
@@ -107,15 +107,15 @@ junifer/datagrabber/aomic/tests/test_piop1.py,sha256=J9ei2HLzdJPciysWjRo33cbZsqP
|
|
107
107
|
junifer/datagrabber/aomic/tests/test_piop2.py,sha256=Bk23KvRse4clMTuC88YntSfJnJyTunafC79Y1OJwJI0,4166
|
108
108
|
junifer/datagrabber/hcp1200/__init__.py,sha256=yKINewhzkPRcqVpBd6DG02jrs3qLsUbUsi93YJZOUIk,231
|
109
109
|
junifer/datagrabber/hcp1200/datalad_hcp1200.py,sha256=hngQYLv4b8tC9Ep2X5A5R_L2sFM3ZJ8dmWTr_OlRLAA,2463
|
110
|
-
junifer/datagrabber/hcp1200/hcp1200.py,sha256=
|
110
|
+
junifer/datagrabber/hcp1200/hcp1200.py,sha256=AfVPd44CdyMcrUTOfps2PSpTQrXde68QeZaLGkXUTn4,6116
|
111
111
|
junifer/datagrabber/hcp1200/tests/test_hcp1200.py,sha256=KJ-Jq01l0a6TaboG898qjBdPTHG1E3PZtHCjJ7n-1X0,10765
|
112
112
|
junifer/datagrabber/tests/test_base.py,sha256=fZdVhNhvfht9lpTHrAUf5E6mAfNNUP7OTQ5KLaBQ1gI,3506
|
113
|
-
junifer/datagrabber/tests/test_datagrabber_utils.py,sha256=x1nqFiHI9xHBQFwXji0DPUG8crVoEzw6zmi3pKdhJQk,6513
|
114
113
|
junifer/datagrabber/tests/test_datalad_base.py,sha256=71erxpAECuy8iLtkmq_SRqfP4sKQf4uEb3O8CThBHT0,16285
|
115
114
|
junifer/datagrabber/tests/test_dmcc13_benchmark.py,sha256=DcqkDXXBoabHFVbxekGR2NZyGeugGlxpOwXIwy38Ofg,9109
|
116
|
-
junifer/datagrabber/tests/test_multiple.py,sha256=
|
115
|
+
junifer/datagrabber/tests/test_multiple.py,sha256=gdekgSHyRx_EtcMNQpJsGEyo56xSxH5-XSQRQ5P2zt4,8288
|
117
116
|
junifer/datagrabber/tests/test_pattern.py,sha256=H55jYRPfT3rMsoIQOAnWJgw3nGrkU7m2xFa3-ed6NQE,9527
|
118
117
|
junifer/datagrabber/tests/test_pattern_datalad.py,sha256=5lA4hkYNaIAVy3GjcVqBXj1d-3qd8-14Pv0z6QGqgtI,6483
|
118
|
+
junifer/datagrabber/tests/test_pattern_validation_mixin.py,sha256=4diiRduuqMxOZpfWBoe-O9AhDPYb7DLQU8-JaWAfTrg,7494
|
119
119
|
junifer/datareader/__init__.py,sha256=9TXyHpy5o-yfs-q8QHGfjUCcWdORp7qv_jrokAOPBP8,293
|
120
120
|
junifer/datareader/default.py,sha256=peNQTYHx9x3ZqGjm2Uj5yCLlsJ6X86r0f2XiUgnpw1M,6745
|
121
121
|
junifer/datareader/tests/test_default_reader.py,sha256=9dPZSkba1YQjFsA0XwdUbx5sq8DVIEZoy_WfMAcvRus,5220
|
@@ -259,17 +259,17 @@ junifer/testing/tests/test_spmauditory_datagrabber.py,sha256=1G1emk-Ze59HiNLaYsy
|
|
259
259
|
junifer/testing/tests/test_testing_registry.py,sha256=oerticBaPRaRZm3yANzInLac0Mqph3Y0aZPQFayu7xA,827
|
260
260
|
junifer/tests/test_main.py,sha256=GMff7jlisGM9_FsiUwWDte43j-KQJGFRYZpwRRqTkd8,373
|
261
261
|
junifer/tests/test_stats.py,sha256=3vPMgYYpWxk8ECDFOMm3-dFBlh4XxjL83SwRBSBAHok,4155
|
262
|
-
junifer/utils/__init__.py,sha256=
|
262
|
+
junifer/utils/__init__.py,sha256=F_I7WXtZMrBGGNLN09LvzBRwWKopL2k1z0UgCZvpwj0,471
|
263
263
|
junifer/utils/fs.py,sha256=M3CKBLh4gPS6s9giyopgb1hHMXzLb6k3cung2wHVBjs,492
|
264
|
-
junifer/utils/helpers.py,sha256=
|
265
|
-
junifer/utils/logging.py,sha256=
|
264
|
+
junifer/utils/helpers.py,sha256=D17zdq1y92fOc-5nwnhaXfgbk8o5yHocLSct9E05YUk,1996
|
265
|
+
junifer/utils/logging.py,sha256=ardaiJkDfZMYvak5UIL5Etxg5Ii7inmVQSBdFLdgtb8,9781
|
266
266
|
junifer/utils/tests/test_fs.py,sha256=WQS7cKlKEZ742CIuiOYYpueeAhY9PqlastfDVpVVtvE,923
|
267
267
|
junifer/utils/tests/test_helpers.py,sha256=k5qqfxK8dFyuewTJyR1Qn6-nFaYNuVr0ysc18bfPjyU,929
|
268
|
-
junifer/utils/tests/test_logging.py,sha256=
|
269
|
-
junifer-0.0.5.
|
270
|
-
junifer-0.0.5.
|
271
|
-
junifer-0.0.5.
|
272
|
-
junifer-0.0.5.
|
273
|
-
junifer-0.0.5.
|
274
|
-
junifer-0.0.5.
|
275
|
-
junifer-0.0.5.
|
268
|
+
junifer/utils/tests/test_logging.py,sha256=duO4ou365hxwa_kwihFtKPLaL6LC5XHiyhOijrrngbA,8009
|
269
|
+
junifer-0.0.5.dev242.dist-info/AUTHORS.rst,sha256=rmULKpchpSol4ExWFdm-qu4fkpSZPYqIESVJBZtGb6E,163
|
270
|
+
junifer-0.0.5.dev242.dist-info/LICENSE.md,sha256=MqCnOBu8uXsEOzRZWh9EBVfVz-kE9NkXcLCrtGXo2yU,34354
|
271
|
+
junifer-0.0.5.dev242.dist-info/METADATA,sha256=xrjlsQCL9WQaZXNlEaFdd13Wn30Ff1NogZdJDtgMpGM,8280
|
272
|
+
junifer-0.0.5.dev242.dist-info/WHEEL,sha256=Wyh-_nZ0DJYolHNn1_hMa4lM7uDedD_RGVwbmTjyItk,91
|
273
|
+
junifer-0.0.5.dev242.dist-info/entry_points.txt,sha256=DxFvKq0pOqRunAK0FxwJcoDfV1-dZvsFDpD5HRqSDhw,48
|
274
|
+
junifer-0.0.5.dev242.dist-info/top_level.txt,sha256=4bAq1R2QFQ4b3hohjys2JBvxrl0GKk5LNFzYvz9VGcA,8
|
275
|
+
junifer-0.0.5.dev242.dist-info/RECORD,,
|
junifer/datagrabber/utils.py
DELETED
@@ -1,317 +0,0 @@
|
|
1
|
-
"""Provide utility functions for the datagrabber sub-package."""
|
2
|
-
|
3
|
-
# Authors: Federico Raimondo <f.raimondo@fz-juelich.de>
|
4
|
-
# Synchon Mandal <s.mandal@fz-juelich.de>
|
5
|
-
# License: AGPL
|
6
|
-
|
7
|
-
from typing import Dict, List
|
8
|
-
|
9
|
-
from ..utils import logger, raise_error
|
10
|
-
|
11
|
-
|
12
|
-
__all__ = ["validate_types", "validate_replacements", "validate_patterns"]
|
13
|
-
|
14
|
-
|
15
|
-
# Define schema for pattern-based datagrabber's patterns
|
16
|
-
PATTERNS_SCHEMA = {
|
17
|
-
"T1w": {
|
18
|
-
"mandatory": ["pattern", "space"],
|
19
|
-
"optional": {
|
20
|
-
"mask": {"mandatory": ["pattern", "space"], "optional": []},
|
21
|
-
},
|
22
|
-
},
|
23
|
-
"T2w": {
|
24
|
-
"mandatory": ["pattern", "space"],
|
25
|
-
"optional": {
|
26
|
-
"mask": {"mandatory": ["pattern", "space"], "optional": []},
|
27
|
-
},
|
28
|
-
},
|
29
|
-
"BOLD": {
|
30
|
-
"mandatory": ["pattern", "space"],
|
31
|
-
"optional": {
|
32
|
-
"mask": {"mandatory": ["pattern", "space"], "optional": []},
|
33
|
-
"confounds": {
|
34
|
-
"mandatory": ["pattern", "format"],
|
35
|
-
"optional": ["mappings"],
|
36
|
-
},
|
37
|
-
},
|
38
|
-
},
|
39
|
-
"Warp": {
|
40
|
-
"mandatory": ["pattern", "src", "dst"],
|
41
|
-
"optional": {},
|
42
|
-
},
|
43
|
-
"VBM_GM": {
|
44
|
-
"mandatory": ["pattern", "space"],
|
45
|
-
"optional": {},
|
46
|
-
},
|
47
|
-
"VBM_WM": {
|
48
|
-
"mandatory": ["pattern", "space"],
|
49
|
-
"optional": {},
|
50
|
-
},
|
51
|
-
"VBM_CSF": {
|
52
|
-
"mandatory": ["pattern", "space"],
|
53
|
-
"optional": {},
|
54
|
-
},
|
55
|
-
"DWI": {
|
56
|
-
"mandatory": ["pattern"],
|
57
|
-
"optional": {},
|
58
|
-
},
|
59
|
-
"FreeSurfer": {
|
60
|
-
"mandatory": ["pattern"],
|
61
|
-
"optional": {
|
62
|
-
"aseg": {"mandatory": ["pattern"], "optional": []},
|
63
|
-
"norm": {"mandatory": ["pattern"], "optional": []},
|
64
|
-
"lh_white": {"mandatory": ["pattern"], "optional": []},
|
65
|
-
"rh_white": {"mandatory": ["pattern"], "optional": []},
|
66
|
-
"lh_pial": {"mandatory": ["pattern"], "optional": []},
|
67
|
-
"rh_pial": {"mandatory": ["pattern"], "optional": []},
|
68
|
-
},
|
69
|
-
},
|
70
|
-
}
|
71
|
-
|
72
|
-
|
73
|
-
def validate_types(types: List[str]) -> None:
|
74
|
-
"""Validate the types.
|
75
|
-
|
76
|
-
Parameters
|
77
|
-
----------
|
78
|
-
types : list of str
|
79
|
-
The object to validate.
|
80
|
-
|
81
|
-
Raises
|
82
|
-
------
|
83
|
-
TypeError
|
84
|
-
If ``types`` is not a list or if the values are not string.
|
85
|
-
|
86
|
-
"""
|
87
|
-
if not isinstance(types, list):
|
88
|
-
raise_error(msg="`types` must be a list", klass=TypeError)
|
89
|
-
if any(not isinstance(x, str) for x in types):
|
90
|
-
raise_error(msg="`types` must be a list of strings", klass=TypeError)
|
91
|
-
|
92
|
-
|
93
|
-
def validate_replacements(
|
94
|
-
replacements: List[str], patterns: Dict[str, Dict[str, str]]
|
95
|
-
) -> None:
|
96
|
-
"""Validate the replacements.
|
97
|
-
|
98
|
-
Parameters
|
99
|
-
----------
|
100
|
-
replacements : list of str
|
101
|
-
The object to validate.
|
102
|
-
patterns : dict
|
103
|
-
The patterns to validate against.
|
104
|
-
|
105
|
-
Raises
|
106
|
-
------
|
107
|
-
TypeError
|
108
|
-
If ``replacements`` is not a list or if the values are not string.
|
109
|
-
ValueError
|
110
|
-
If a value in ``replacements`` is not part of a data type pattern or
|
111
|
-
if no data type patterns contain all values in ``replacements``.
|
112
|
-
|
113
|
-
"""
|
114
|
-
if not isinstance(replacements, list):
|
115
|
-
raise_error(msg="`replacements` must be a list.", klass=TypeError)
|
116
|
-
|
117
|
-
if any(not isinstance(x, str) for x in replacements):
|
118
|
-
raise_error(
|
119
|
-
msg="`replacements` must be a list of strings.", klass=TypeError
|
120
|
-
)
|
121
|
-
|
122
|
-
for x in replacements:
|
123
|
-
if all(
|
124
|
-
x not in y
|
125
|
-
for y in [
|
126
|
-
data_type_val["pattern"] for data_type_val in patterns.values()
|
127
|
-
]
|
128
|
-
):
|
129
|
-
raise_error(msg=f"Replacement: {x} is not part of any pattern.")
|
130
|
-
|
131
|
-
# Check that at least one pattern has all the replacements
|
132
|
-
at_least_one = False
|
133
|
-
for data_type_val in patterns.values():
|
134
|
-
if all(x in data_type_val["pattern"] for x in replacements):
|
135
|
-
at_least_one = True
|
136
|
-
if at_least_one is False:
|
137
|
-
raise_error(msg="At least one pattern must contain all replacements.")
|
138
|
-
|
139
|
-
|
140
|
-
def _validate_mandatory_keys(
|
141
|
-
keys: List[str], schema: List[str], data_type: str
|
142
|
-
) -> None:
|
143
|
-
"""Validate mandatory keys.
|
144
|
-
|
145
|
-
Parameters
|
146
|
-
----------
|
147
|
-
keys : list of str
|
148
|
-
The keys to validate.
|
149
|
-
schema : list of str
|
150
|
-
The schema to validate against.
|
151
|
-
data_type : str
|
152
|
-
The data type being validated.
|
153
|
-
|
154
|
-
Raises
|
155
|
-
------
|
156
|
-
KeyError
|
157
|
-
If any mandatory key is missing for a data type.
|
158
|
-
|
159
|
-
"""
|
160
|
-
for key in schema:
|
161
|
-
if key not in keys:
|
162
|
-
raise_error(
|
163
|
-
msg=f"Mandatory key: `{key}` missing for {data_type}",
|
164
|
-
klass=KeyError,
|
165
|
-
)
|
166
|
-
else:
|
167
|
-
logger.debug(f"Mandatory key: `{key}` found for {data_type}")
|
168
|
-
|
169
|
-
|
170
|
-
def _identify_stray_keys(
|
171
|
-
keys: List[str], schema: List[str], data_type: str
|
172
|
-
) -> None:
|
173
|
-
"""Identify stray keys.
|
174
|
-
|
175
|
-
Parameters
|
176
|
-
----------
|
177
|
-
keys : list of str
|
178
|
-
The keys to check.
|
179
|
-
schema : list of str
|
180
|
-
The schema to check against.
|
181
|
-
data_type : str
|
182
|
-
The data type being checked.
|
183
|
-
|
184
|
-
Raises
|
185
|
-
------
|
186
|
-
RuntimeError
|
187
|
-
If an unknown key is found for a data type.
|
188
|
-
|
189
|
-
"""
|
190
|
-
for key in keys:
|
191
|
-
if key not in schema:
|
192
|
-
raise_error(
|
193
|
-
msg=(
|
194
|
-
f"Key: {key} not accepted for {data_type} "
|
195
|
-
"pattern, remove it to proceed"
|
196
|
-
),
|
197
|
-
klass=RuntimeError,
|
198
|
-
)
|
199
|
-
|
200
|
-
|
201
|
-
def validate_patterns(
|
202
|
-
types: List[str], patterns: Dict[str, Dict[str, str]]
|
203
|
-
) -> None:
|
204
|
-
"""Validate the patterns.
|
205
|
-
|
206
|
-
Parameters
|
207
|
-
----------
|
208
|
-
types : list of str
|
209
|
-
The types list.
|
210
|
-
patterns : dict
|
211
|
-
The object to validate.
|
212
|
-
|
213
|
-
Raises
|
214
|
-
------
|
215
|
-
TypeError
|
216
|
-
If ``patterns`` is not a dictionary.
|
217
|
-
ValueError
|
218
|
-
If length of ``types`` and ``patterns`` are different or
|
219
|
-
if ``patterns`` is missing entries from ``types`` or
|
220
|
-
if unknown data type is found in ``patterns`` or
|
221
|
-
if data type pattern key contains '*' as value.
|
222
|
-
|
223
|
-
"""
|
224
|
-
# Validate the types
|
225
|
-
validate_types(types)
|
226
|
-
if not isinstance(patterns, dict):
|
227
|
-
raise_error(msg="`patterns` must be a dict.", klass=TypeError)
|
228
|
-
# Unequal length of objects
|
229
|
-
if len(types) > len(patterns):
|
230
|
-
raise_error(
|
231
|
-
msg="Length of `types` more than that of `patterns`.",
|
232
|
-
klass=ValueError,
|
233
|
-
)
|
234
|
-
# Missing type in patterns
|
235
|
-
if any(x not in patterns for x in types):
|
236
|
-
raise_error(
|
237
|
-
msg="`patterns` must contain all `types`", klass=ValueError
|
238
|
-
)
|
239
|
-
# Check against schema
|
240
|
-
for data_type_key, data_type_val in patterns.items():
|
241
|
-
# Check if valid data type is provided
|
242
|
-
if data_type_key not in PATTERNS_SCHEMA:
|
243
|
-
raise_error(
|
244
|
-
f"Unknown data type: {data_type_key}, "
|
245
|
-
f"should be one of: {list(PATTERNS_SCHEMA.keys())}"
|
246
|
-
)
|
247
|
-
# Check mandatory keys for data type
|
248
|
-
_validate_mandatory_keys(
|
249
|
-
keys=list(data_type_val),
|
250
|
-
schema=PATTERNS_SCHEMA[data_type_key]["mandatory"],
|
251
|
-
data_type=data_type_key,
|
252
|
-
)
|
253
|
-
# Check optional keys for data type
|
254
|
-
for optional_key, optional_val in PATTERNS_SCHEMA[data_type_key][
|
255
|
-
"optional"
|
256
|
-
].items():
|
257
|
-
if optional_key not in data_type_val:
|
258
|
-
logger.debug(
|
259
|
-
f"Optional key: `{optional_key}` missing for "
|
260
|
-
f"{data_type_key}"
|
261
|
-
)
|
262
|
-
else:
|
263
|
-
logger.debug(
|
264
|
-
f"Optional key: `{optional_key}` found for "
|
265
|
-
f"{data_type_key}"
|
266
|
-
)
|
267
|
-
# Set nested type name for easier access
|
268
|
-
nested_data_type = f"{data_type_key}.{optional_key}"
|
269
|
-
nested_mandatory_keys_schema = PATTERNS_SCHEMA[data_type_key][
|
270
|
-
"optional"
|
271
|
-
][optional_key]["mandatory"]
|
272
|
-
nested_optional_keys_schema = PATTERNS_SCHEMA[data_type_key][
|
273
|
-
"optional"
|
274
|
-
][optional_key]["optional"]
|
275
|
-
# Check mandatory keys for nested type
|
276
|
-
_validate_mandatory_keys(
|
277
|
-
keys=list(optional_val["mandatory"]),
|
278
|
-
schema=nested_mandatory_keys_schema,
|
279
|
-
data_type=nested_data_type,
|
280
|
-
)
|
281
|
-
# Check optional keys for nested type
|
282
|
-
for nested_optional_key in nested_optional_keys_schema:
|
283
|
-
if nested_optional_key not in optional_val["optional"]:
|
284
|
-
logger.debug(
|
285
|
-
f"Optional key: `{nested_optional_key}` missing "
|
286
|
-
f"for {nested_data_type}"
|
287
|
-
)
|
288
|
-
else:
|
289
|
-
logger.debug(
|
290
|
-
f"Optional key: `{nested_optional_key}` found for "
|
291
|
-
f"{nested_data_type}"
|
292
|
-
)
|
293
|
-
# Check stray key for nested data type
|
294
|
-
_identify_stray_keys(
|
295
|
-
keys=optional_val["mandatory"] + optional_val["optional"],
|
296
|
-
schema=nested_mandatory_keys_schema
|
297
|
-
+ nested_optional_keys_schema,
|
298
|
-
data_type=nested_data_type,
|
299
|
-
)
|
300
|
-
# Check stray key for data type
|
301
|
-
_identify_stray_keys(
|
302
|
-
keys=list(data_type_val.keys()),
|
303
|
-
schema=(
|
304
|
-
PATTERNS_SCHEMA[data_type_key]["mandatory"]
|
305
|
-
+ list(PATTERNS_SCHEMA[data_type_key]["optional"].keys())
|
306
|
-
),
|
307
|
-
data_type=data_type_key,
|
308
|
-
)
|
309
|
-
# Wildcard check in patterns
|
310
|
-
if "}*" in data_type_val["pattern"]:
|
311
|
-
raise_error(
|
312
|
-
msg=(
|
313
|
-
f"`{data_type_key}.pattern` must not contain `*` "
|
314
|
-
"following a replacement"
|
315
|
-
),
|
316
|
-
klass=ValueError,
|
317
|
-
)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|