ChessAnalysisPipeline 0.0.17.dev3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHAP/TaskManager.py +216 -0
- CHAP/__init__.py +27 -0
- CHAP/common/__init__.py +57 -0
- CHAP/common/models/__init__.py +8 -0
- CHAP/common/models/common.py +124 -0
- CHAP/common/models/integration.py +659 -0
- CHAP/common/models/map.py +1291 -0
- CHAP/common/processor.py +2869 -0
- CHAP/common/reader.py +658 -0
- CHAP/common/utils.py +110 -0
- CHAP/common/writer.py +730 -0
- CHAP/edd/__init__.py +23 -0
- CHAP/edd/models.py +876 -0
- CHAP/edd/processor.py +3069 -0
- CHAP/edd/reader.py +1023 -0
- CHAP/edd/select_material_params_gui.py +348 -0
- CHAP/edd/utils.py +1572 -0
- CHAP/edd/writer.py +26 -0
- CHAP/foxden/__init__.py +19 -0
- CHAP/foxden/models.py +71 -0
- CHAP/foxden/processor.py +124 -0
- CHAP/foxden/reader.py +224 -0
- CHAP/foxden/utils.py +80 -0
- CHAP/foxden/writer.py +168 -0
- CHAP/giwaxs/__init__.py +11 -0
- CHAP/giwaxs/models.py +491 -0
- CHAP/giwaxs/processor.py +776 -0
- CHAP/giwaxs/reader.py +8 -0
- CHAP/giwaxs/writer.py +8 -0
- CHAP/inference/__init__.py +7 -0
- CHAP/inference/processor.py +69 -0
- CHAP/inference/reader.py +8 -0
- CHAP/inference/writer.py +8 -0
- CHAP/models.py +227 -0
- CHAP/pipeline.py +479 -0
- CHAP/processor.py +125 -0
- CHAP/reader.py +124 -0
- CHAP/runner.py +277 -0
- CHAP/saxswaxs/__init__.py +7 -0
- CHAP/saxswaxs/processor.py +8 -0
- CHAP/saxswaxs/reader.py +8 -0
- CHAP/saxswaxs/writer.py +8 -0
- CHAP/server.py +125 -0
- CHAP/sin2psi/__init__.py +7 -0
- CHAP/sin2psi/processor.py +8 -0
- CHAP/sin2psi/reader.py +8 -0
- CHAP/sin2psi/writer.py +8 -0
- CHAP/tomo/__init__.py +15 -0
- CHAP/tomo/models.py +210 -0
- CHAP/tomo/processor.py +3862 -0
- CHAP/tomo/reader.py +9 -0
- CHAP/tomo/writer.py +59 -0
- CHAP/utils/__init__.py +6 -0
- CHAP/utils/converters.py +188 -0
- CHAP/utils/fit.py +2947 -0
- CHAP/utils/general.py +2655 -0
- CHAP/utils/material.py +274 -0
- CHAP/utils/models.py +595 -0
- CHAP/utils/parfile.py +224 -0
- CHAP/writer.py +122 -0
- MLaaS/__init__.py +0 -0
- MLaaS/ktrain.py +205 -0
- MLaaS/mnist_img.py +83 -0
- MLaaS/tfaas_client.py +371 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/LICENSE +60 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/METADATA +29 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/RECORD +70 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/WHEEL +5 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/entry_points.txt +2 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/top_level.txt +2 -0
CHAP/TaskManager.py
ADDED
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Python thread pool, see
|
|
3
|
+
http://code.activestate.com/recipes/577187-python-thread-pool/
|
|
4
|
+
Author: Valentin Kuznetsov <vkuznet [AT] gmail [DOT] com>
|
|
5
|
+
"""
|
|
6
|
+
from builtins import range
|
|
7
|
+
|
|
8
|
+
# System modules
|
|
9
|
+
import time
|
|
10
|
+
import json
|
|
11
|
+
import hashlib
|
|
12
|
+
import logging
|
|
13
|
+
import threading
|
|
14
|
+
from queue import Queue
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def genkey(query):
|
|
18
|
+
"""Generate a new key-hash for a given query. We use md5 hash for
|
|
19
|
+
the query and key is just hex representation of this hash.
|
|
20
|
+
"""
|
|
21
|
+
if isinstance(query, dict):
|
|
22
|
+
record = dict(query)
|
|
23
|
+
query = json.JSONEncoder(sort_keys=True).encode(record)
|
|
24
|
+
keyhash = hashlib.md5()
|
|
25
|
+
keyhash.update(query.encode('utf-8', 'strict'))
|
|
26
|
+
return keyhash.hexdigest()
|
|
27
|
+
|
|
28
|
+
def set_thread_name(ident, name):
|
|
29
|
+
"""Set thread name for given identified."""
|
|
30
|
+
for thr in threading.enumerate():
|
|
31
|
+
if thr.ident == ident:
|
|
32
|
+
thr.name = name
|
|
33
|
+
break
|
|
34
|
+
|
|
35
|
+
class StoppableThread(threading.Thread):
|
|
36
|
+
"""Thread class with a stop() method. The thread itself has to
|
|
37
|
+
check regularly for the stopped() condition.
|
|
38
|
+
"""
|
|
39
|
+
def __init__(self, target, name, args):
|
|
40
|
+
super(StoppableThread, self).__init__(
|
|
41
|
+
target=target, name=name, args=args)
|
|
42
|
+
self._stop_event = threading.Event()
|
|
43
|
+
|
|
44
|
+
def stop(self):
|
|
45
|
+
"""Set event to stop the thread."""
|
|
46
|
+
self._stop_event.set()
|
|
47
|
+
|
|
48
|
+
def stopped(self):
|
|
49
|
+
"""Return stopped status of the thread."""
|
|
50
|
+
return self._stop_event.is_set()
|
|
51
|
+
|
|
52
|
+
def running(self):
|
|
53
|
+
"""Return running status of the thread."""
|
|
54
|
+
return not self._stop_event.is_set()
|
|
55
|
+
|
|
56
|
+
def start_new_thread(name, func, args, unique=False):
|
|
57
|
+
"""Wrapper wroung standard thread.strart_new_thread call."""
|
|
58
|
+
if unique:
|
|
59
|
+
threads = sorted(threading.enumerate())
|
|
60
|
+
for thr in threads:
|
|
61
|
+
if name == thr.name:
|
|
62
|
+
return thr
|
|
63
|
+
# thr = threading.Thread(target=func, name=name, args=args)
|
|
64
|
+
thr = StoppableThread(target=func, name=name, args=args)
|
|
65
|
+
thr.daemon = True
|
|
66
|
+
thr.start()
|
|
67
|
+
return thr
|
|
68
|
+
|
|
69
|
+
class UidSet():
|
|
70
|
+
"""UID holder keeps track of uid frequency."""
|
|
71
|
+
def __init__(self):
|
|
72
|
+
self.set = {}
|
|
73
|
+
|
|
74
|
+
def add(self, uid):
|
|
75
|
+
"""Add given uid or increment uid occurence in a set."""
|
|
76
|
+
if not uid:
|
|
77
|
+
return
|
|
78
|
+
if uid in self.set:
|
|
79
|
+
self.set[uid] += 1
|
|
80
|
+
else:
|
|
81
|
+
self.set[uid] = 1
|
|
82
|
+
|
|
83
|
+
def discard(self, uid):
|
|
84
|
+
"""Either discard or downgrade uid occurence in a set."""
|
|
85
|
+
if uid in self.set:
|
|
86
|
+
self.set[uid] -= 1
|
|
87
|
+
if uid in self.set and not self.set[uid]:
|
|
88
|
+
del self.set[uid]
|
|
89
|
+
|
|
90
|
+
def __contains__(self, uid):
|
|
91
|
+
"""Check if uid present in a set."""
|
|
92
|
+
if uid in self.set:
|
|
93
|
+
return True
|
|
94
|
+
return False
|
|
95
|
+
|
|
96
|
+
def get(self, uid):
|
|
97
|
+
"""Get value for given uid."""
|
|
98
|
+
return self.set.get(uid, 0)
|
|
99
|
+
|
|
100
|
+
class Worker(threading.Thread):
|
|
101
|
+
"""Thread executing worker from a given tasks queue."""
|
|
102
|
+
def __init__(self, name, taskq, pidq, uidq, logger=None):
|
|
103
|
+
self.logger = logging.getLogger() if logger is None else logger
|
|
104
|
+
threading.Thread.__init__(self, name=name)
|
|
105
|
+
self.exit = 0
|
|
106
|
+
self.tasks = taskq
|
|
107
|
+
self.pids = pidq
|
|
108
|
+
self.uids = uidq
|
|
109
|
+
self.daemon = True
|
|
110
|
+
self.start()
|
|
111
|
+
|
|
112
|
+
def force_exit(self):
|
|
113
|
+
"""Force run loop to exit in a hard way."""
|
|
114
|
+
self.exit = 1
|
|
115
|
+
|
|
116
|
+
def run(self):
|
|
117
|
+
"""Run thread loop."""
|
|
118
|
+
while True:
|
|
119
|
+
if self.exit:
|
|
120
|
+
return
|
|
121
|
+
task = self.tasks.get()
|
|
122
|
+
if task is None:
|
|
123
|
+
return
|
|
124
|
+
if self.exit:
|
|
125
|
+
return
|
|
126
|
+
if isinstance(task, str):
|
|
127
|
+
print(f'Worker daemon run {task}')
|
|
128
|
+
elif isinstance(task, tuple) and len(task) == 5:
|
|
129
|
+
evt, pid, func, args, kwargs = task
|
|
130
|
+
try:
|
|
131
|
+
func(*args, **kwargs)
|
|
132
|
+
self.pids.discard(pid)
|
|
133
|
+
except Exception as exc:
|
|
134
|
+
self.pids.discard(pid)
|
|
135
|
+
msg = f'func={func} args={args} kwargs={kwargs}'
|
|
136
|
+
self.logger.error(f'error {str(exc)}, call {msg}')
|
|
137
|
+
evt.set()
|
|
138
|
+
else:
|
|
139
|
+
print(f'Unsupported task {task}')
|
|
140
|
+
|
|
141
|
+
class TaskManager():
|
|
142
|
+
"""Task manager class based on thread module which executes
|
|
143
|
+
assigned tasks concurently. It uses a pool of thread workers,
|
|
144
|
+
queue of tasks and pid set to monitor jobs execution.
|
|
145
|
+
|
|
146
|
+
.. doctest::
|
|
147
|
+
|
|
148
|
+
Use case:
|
|
149
|
+
mgr = TaskManager()
|
|
150
|
+
jobs = []
|
|
151
|
+
jobs.append(mgr.spawn(func, args))
|
|
152
|
+
mgr.joinall(jobs)
|
|
153
|
+
"""
|
|
154
|
+
def __init__(self, nworkers=10, name='TaskManager'):
|
|
155
|
+
self.logger = logging.getLogger()
|
|
156
|
+
self.name = name
|
|
157
|
+
self.pids = set()
|
|
158
|
+
self.uids = UidSet()
|
|
159
|
+
self.tasks = Queue()
|
|
160
|
+
self.workers = [Worker(name, self.tasks, self.pids, self.uids,
|
|
161
|
+
self.logger)
|
|
162
|
+
for _ in range(0, nworkers)]
|
|
163
|
+
|
|
164
|
+
def status(self):
|
|
165
|
+
"""Return status of task manager queue."""
|
|
166
|
+
info = {'qsize':self.tasks.qsize(), 'full':self.tasks.full(),
|
|
167
|
+
'unfinished':self.tasks.unfinished_tasks,
|
|
168
|
+
'nworkers':len(self.workers)}
|
|
169
|
+
return {self.name: info}
|
|
170
|
+
|
|
171
|
+
def nworkers(self):
|
|
172
|
+
"""Return number of workers associated with this manager."""
|
|
173
|
+
return len(self.workers)
|
|
174
|
+
|
|
175
|
+
def spawn(self, func, *args, **kwargs):
|
|
176
|
+
"""Spawn new process for given function."""
|
|
177
|
+
pid = kwargs.get('pid', genkey(str(args) + str(kwargs)))
|
|
178
|
+
evt = threading.Event()
|
|
179
|
+
if not pid in self.pids:
|
|
180
|
+
self.pids.add(pid)
|
|
181
|
+
task = (evt, pid, func, args, kwargs)
|
|
182
|
+
self.tasks.put(task)
|
|
183
|
+
else:
|
|
184
|
+
# the event was not added to task list, invoke set()
|
|
185
|
+
# to pass it in wait() call, see joinall
|
|
186
|
+
evt.set()
|
|
187
|
+
return evt, pid
|
|
188
|
+
|
|
189
|
+
def remove(self, pid):
|
|
190
|
+
"""Remove pid and associative process from the queue."""
|
|
191
|
+
self.pids.discard(pid)
|
|
192
|
+
|
|
193
|
+
def is_alive(self, pid):
|
|
194
|
+
"""Check worker queue if given pid of the process is still
|
|
195
|
+
running.
|
|
196
|
+
"""
|
|
197
|
+
return pid in self.pids
|
|
198
|
+
|
|
199
|
+
def clear(self, tasks):
|
|
200
|
+
"""Clear all tasks in a queue. It allows current jobs to run,
|
|
201
|
+
but will block all new requests till workers event flag is set
|
|
202
|
+
again.
|
|
203
|
+
"""
|
|
204
|
+
# Each task is return from spawn, i.e. a pair (evt, pid)
|
|
205
|
+
_ = [t[0].clear() for t in tasks]
|
|
206
|
+
|
|
207
|
+
def joinall(self, tasks):
|
|
208
|
+
"""Join all tasks in a queue and quit."""
|
|
209
|
+
# Each task is return from spawn, i.e. a pair (evt, pid)
|
|
210
|
+
_ = [t[0].wait() for t in tasks]
|
|
211
|
+
|
|
212
|
+
def quit(self):
|
|
213
|
+
"""Put None task to all workers and let them quit."""
|
|
214
|
+
_ = [self.tasks.put(None) for _ in self.workers]
|
|
215
|
+
# Let workers threads cool-off and quit
|
|
216
|
+
time.sleep(1)
|
CHAP/__init__.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"""The ChessAnalysisPipeline (CHAP) provides infrastructure to
|
|
2
|
+
construct and run X-ray data processing / analysis workflows using a
|
|
3
|
+
set of modular components. We call these components `PipelineItem`s
|
|
4
|
+
(subclassed into `Reader`s, `Processor`s, and `Writer`s). A `Pipeline`
|
|
5
|
+
uses a sequence of `PipelineItem`s to execute a data processing
|
|
6
|
+
workflow where the data returned by one `PipelineItem` becomes input
|
|
7
|
+
for the next one.
|
|
8
|
+
|
|
9
|
+
Many `PipelineItem`s can be shared by data processing workflows for
|
|
10
|
+
multiple different X-ray techniques, while others may be unique to
|
|
11
|
+
just a single technique. The `PipelineItem`s that are shared by many
|
|
12
|
+
techniques are organized in the `CHAP.common` subpackage.
|
|
13
|
+
`PipelineItem`s unique to a tomography workflow, for instance, are
|
|
14
|
+
organized in the `CHAP.tomo` subpackage.
|
|
15
|
+
|
|
16
|
+
[`CHAP.utils`](CHAP.utils.md) contains a
|
|
17
|
+
broad selection of utilities to assist in some common tasks that
|
|
18
|
+
appear in specific `Processor` implementations.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
from CHAP.models import CHAPBaseModel
|
|
22
|
+
from CHAP.reader import Reader
|
|
23
|
+
from CHAP.processor import Processor
|
|
24
|
+
from CHAP.writer import Writer
|
|
25
|
+
|
|
26
|
+
version = 'v0.0.17.dev3'
|
|
27
|
+
#version = 'v0.0.17'
|
CHAP/common/__init__.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"""This subpackage of `CHAP` contains `PipelineItem`\ s that are or can
|
|
2
|
+
be used in workflows for processing data from multiple different X-ray
|
|
3
|
+
techniques.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from CHAP.common.processor import (
|
|
7
|
+
# AnimationProcessor,
|
|
8
|
+
AsyncProcessor,
|
|
9
|
+
BinarizeProcessor,
|
|
10
|
+
ConvertStructuredProcessor,
|
|
11
|
+
ImageProcessor,
|
|
12
|
+
MapProcessor,
|
|
13
|
+
MPICollectProcessor,
|
|
14
|
+
MPIMapProcessor,
|
|
15
|
+
MPISpawnMapProcessor,
|
|
16
|
+
NexusToNumpyProcessor,
|
|
17
|
+
# NexusToTiffsprocessor,
|
|
18
|
+
NexusToXarrayProcessor,
|
|
19
|
+
NormalizeNexusProcessor,
|
|
20
|
+
NormalizeMapProcessor,
|
|
21
|
+
PrintProcessor,
|
|
22
|
+
PyfaiAzimuthalIntegrationProcessor,
|
|
23
|
+
RawDetectorDataMapProcessor,
|
|
24
|
+
SetupNXdataProcessor,
|
|
25
|
+
UpdateNXvalueProcessor,
|
|
26
|
+
UpdateNXdataProcessor,
|
|
27
|
+
UnstructuredToStructuredProcessor,
|
|
28
|
+
NXdataToDataPointsProcessor,
|
|
29
|
+
XarrayToNexusProcessor,
|
|
30
|
+
XarrayToNumpyProcessor,
|
|
31
|
+
# SumProcessor,
|
|
32
|
+
)
|
|
33
|
+
from CHAP.common.reader import (
|
|
34
|
+
BinaryFileReader,
|
|
35
|
+
ConfigReader,
|
|
36
|
+
FabioImageReader,
|
|
37
|
+
H5Reader,
|
|
38
|
+
LinkamReader,
|
|
39
|
+
NexusReader,
|
|
40
|
+
NXdataReader,
|
|
41
|
+
NXfieldReader,
|
|
42
|
+
SpecReader,
|
|
43
|
+
URLReader,
|
|
44
|
+
YAMLReader,
|
|
45
|
+
)
|
|
46
|
+
from CHAP.common.writer import (
|
|
47
|
+
ExtractArchiveWriter,
|
|
48
|
+
FileTreeWriter,
|
|
49
|
+
H5Writer,
|
|
50
|
+
ImageWriter,
|
|
51
|
+
MatplotlibAnimationWriter,
|
|
52
|
+
MatplotlibFigureWriter,
|
|
53
|
+
NexusWriter,
|
|
54
|
+
PyfaiResultsWriter,
|
|
55
|
+
YAMLWriter,
|
|
56
|
+
TXTWriter,
|
|
57
|
+
)
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
"""Common Pydantic model classes."""
|
|
2
|
+
|
|
3
|
+
# System modules
|
|
4
|
+
from typing import (
|
|
5
|
+
Literal,
|
|
6
|
+
Optional,
|
|
7
|
+
Union,
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
# Third party modules
|
|
11
|
+
from pydantic import (
|
|
12
|
+
confloat,
|
|
13
|
+
conint,
|
|
14
|
+
conlist,
|
|
15
|
+
constr,
|
|
16
|
+
field_validator,
|
|
17
|
+
)
|
|
18
|
+
#from typing_extensions import Annotated
|
|
19
|
+
|
|
20
|
+
# Local modules
|
|
21
|
+
from CHAP.models import CHAPBaseModel
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class BinarizeProcessorConfig(CHAPBaseModel):
|
|
25
|
+
"""Configuration class to binarize a dataset in a 2D or 3D
|
|
26
|
+
array-like object or a NeXus NXdata or NXfield object.
|
|
27
|
+
|
|
28
|
+
:param method: Binarization method, defaults to `'CHAP'`
|
|
29
|
+
(CHAP's internal implementation of Otzu's method).
|
|
30
|
+
:type method: Literal['CHAP', 'isodata', 'minimum', 'otsu', 'yen']
|
|
31
|
+
:param num_bin: The number of bins used to calculate the
|
|
32
|
+
histogram in the binarization algorithms, defaults to `256`.
|
|
33
|
+
:type num_bin: int, optional
|
|
34
|
+
:param nxpath: The path to a specific NeXus NXdata or NXfield
|
|
35
|
+
object in the NeXus file tree to read the input data from
|
|
36
|
+
(ignored for non-NeXus input objects).
|
|
37
|
+
:type nxpath: str, optional
|
|
38
|
+
:param remove_original_data: Removes the original data field
|
|
39
|
+
(ignored for non-NeXus input objects), defaults to `False`.
|
|
40
|
+
:type remove_original_data: bool, optional
|
|
41
|
+
"""
|
|
42
|
+
method: Optional[Literal[
|
|
43
|
+
'CHAP', 'isodata', 'minimum', 'otsu', 'yen']] = 'CHAP'
|
|
44
|
+
num_bin: Optional[conint(ge=0)] = 256
|
|
45
|
+
nxpath: Optional[str] = None
|
|
46
|
+
remove_original_data: Optional[bool] = False
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class ImageProcessorConfig(CHAPBaseModel):
|
|
50
|
+
"""Class representing the configuration of various image selection
|
|
51
|
+
and visualization types of processors.
|
|
52
|
+
|
|
53
|
+
:param animation: Create an animation for an image stack
|
|
54
|
+
(ignored for a single image), defaults to `False`.
|
|
55
|
+
:type animation: bool, optional
|
|
56
|
+
:param axis: Axis direction or name for the image slice(s),
|
|
57
|
+
defaults to `0`.
|
|
58
|
+
:type axis: Union[int, str], optional
|
|
59
|
+
:param coord_range: Coordinate value range of the selected image
|
|
60
|
+
slice(s), up to three floating point numbers (start, end,
|
|
61
|
+
step), defaults to `None`, which enables index_range to select
|
|
62
|
+
the image slice(s). Include only `coord_range` or
|
|
63
|
+
`index_range`, not both.
|
|
64
|
+
:type coord_range: Union[float, list[float]], optional
|
|
65
|
+
:param index_range: Array index range of the selected image
|
|
66
|
+
slice(s), up to three integers (start, end, step).
|
|
67
|
+
Set index_range to -1 to select the center image slice
|
|
68
|
+
of an image stack in the `axis` direction. Only used when
|
|
69
|
+
coord_range = `None`. Defaults to `None`, which will include
|
|
70
|
+
all slices.
|
|
71
|
+
:type index_range: Union[int, list[int]], optional
|
|
72
|
+
:ivar fileformat: Image (stack) return file type, defaults to
|
|
73
|
+
'png' for a single image, 'tif' for an image stack, or
|
|
74
|
+
'gif' for an animation.
|
|
75
|
+
:type fileformat: Literal['gif', 'jpeg', 'png', 'tif'], optional
|
|
76
|
+
:param vrange: Data value range in image slice(s), defaults to
|
|
77
|
+
`None`, which uses the full data value range in the slice(s).
|
|
78
|
+
:type vrange: list[float, float]
|
|
79
|
+
"""
|
|
80
|
+
animation: Optional[bool] = False
|
|
81
|
+
axis: Optional[Union[conint(ge=0), constr(min_length=1)]] = 0
|
|
82
|
+
coord_range: Optional[Union[
|
|
83
|
+
confloat(allow_inf_nan=False),
|
|
84
|
+
conlist(min_length=2, max_length=3,
|
|
85
|
+
item_type=confloat(allow_inf_nan=False))]] = None
|
|
86
|
+
index_range: Optional[Union[
|
|
87
|
+
int,
|
|
88
|
+
conlist(
|
|
89
|
+
min_length=2, max_length=3, item_type=Union[None, int])]] = None
|
|
90
|
+
fileformat: Optional[Literal['gif', 'jpeg', 'png', 'tif']] = None
|
|
91
|
+
vrange: Optional[
|
|
92
|
+
conlist(min_length=2, max_length=2,
|
|
93
|
+
item_type=confloat(allow_inf_nan=False))] = None
|
|
94
|
+
|
|
95
|
+
@field_validator('index_range', mode='before')
|
|
96
|
+
@classmethod
|
|
97
|
+
def validate_index_range(cls, index_range):
|
|
98
|
+
"""Validate the index_range.
|
|
99
|
+
|
|
100
|
+
:ivar index_range: Array index range of the selected image
|
|
101
|
+
slice(s), defaults to `None`..
|
|
102
|
+
:type index_range: Union[float, list[float]], optional
|
|
103
|
+
:return: Validated index_range.
|
|
104
|
+
:rtype: list[int]
|
|
105
|
+
"""
|
|
106
|
+
if isinstance(index_range, int):
|
|
107
|
+
return index_range
|
|
108
|
+
return [None if isinstance(i, str) and i.lower() == 'none' else i
|
|
109
|
+
for i in index_range]
|
|
110
|
+
|
|
111
|
+
@field_validator('vrange', mode='before')
|
|
112
|
+
@classmethod
|
|
113
|
+
def validate_vrange(cls, vrange):
|
|
114
|
+
"""Validate the vrange.
|
|
115
|
+
|
|
116
|
+
:ivar vrange: Data value range in image slice(s),
|
|
117
|
+
defaults to `None`..
|
|
118
|
+
:type vrange: list[float, float], optional
|
|
119
|
+
:return: Validated vrange.
|
|
120
|
+
:rtype: list[float, float]
|
|
121
|
+
"""
|
|
122
|
+
if isinstance(vrange, (list, tuple)) and len(vrange) == 2:
|
|
123
|
+
return [min(vrange), max(vrange)]
|
|
124
|
+
return vrange
|