ChessAnalysisPipeline 0.0.17.dev3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHAP/TaskManager.py +216 -0
- CHAP/__init__.py +27 -0
- CHAP/common/__init__.py +57 -0
- CHAP/common/models/__init__.py +8 -0
- CHAP/common/models/common.py +124 -0
- CHAP/common/models/integration.py +659 -0
- CHAP/common/models/map.py +1291 -0
- CHAP/common/processor.py +2869 -0
- CHAP/common/reader.py +658 -0
- CHAP/common/utils.py +110 -0
- CHAP/common/writer.py +730 -0
- CHAP/edd/__init__.py +23 -0
- CHAP/edd/models.py +876 -0
- CHAP/edd/processor.py +3069 -0
- CHAP/edd/reader.py +1023 -0
- CHAP/edd/select_material_params_gui.py +348 -0
- CHAP/edd/utils.py +1572 -0
- CHAP/edd/writer.py +26 -0
- CHAP/foxden/__init__.py +19 -0
- CHAP/foxden/models.py +71 -0
- CHAP/foxden/processor.py +124 -0
- CHAP/foxden/reader.py +224 -0
- CHAP/foxden/utils.py +80 -0
- CHAP/foxden/writer.py +168 -0
- CHAP/giwaxs/__init__.py +11 -0
- CHAP/giwaxs/models.py +491 -0
- CHAP/giwaxs/processor.py +776 -0
- CHAP/giwaxs/reader.py +8 -0
- CHAP/giwaxs/writer.py +8 -0
- CHAP/inference/__init__.py +7 -0
- CHAP/inference/processor.py +69 -0
- CHAP/inference/reader.py +8 -0
- CHAP/inference/writer.py +8 -0
- CHAP/models.py +227 -0
- CHAP/pipeline.py +479 -0
- CHAP/processor.py +125 -0
- CHAP/reader.py +124 -0
- CHAP/runner.py +277 -0
- CHAP/saxswaxs/__init__.py +7 -0
- CHAP/saxswaxs/processor.py +8 -0
- CHAP/saxswaxs/reader.py +8 -0
- CHAP/saxswaxs/writer.py +8 -0
- CHAP/server.py +125 -0
- CHAP/sin2psi/__init__.py +7 -0
- CHAP/sin2psi/processor.py +8 -0
- CHAP/sin2psi/reader.py +8 -0
- CHAP/sin2psi/writer.py +8 -0
- CHAP/tomo/__init__.py +15 -0
- CHAP/tomo/models.py +210 -0
- CHAP/tomo/processor.py +3862 -0
- CHAP/tomo/reader.py +9 -0
- CHAP/tomo/writer.py +59 -0
- CHAP/utils/__init__.py +6 -0
- CHAP/utils/converters.py +188 -0
- CHAP/utils/fit.py +2947 -0
- CHAP/utils/general.py +2655 -0
- CHAP/utils/material.py +274 -0
- CHAP/utils/models.py +595 -0
- CHAP/utils/parfile.py +224 -0
- CHAP/writer.py +122 -0
- MLaaS/__init__.py +0 -0
- MLaaS/ktrain.py +205 -0
- MLaaS/mnist_img.py +83 -0
- MLaaS/tfaas_client.py +371 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/LICENSE +60 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/METADATA +29 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/RECORD +70 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/WHEEL +5 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/entry_points.txt +2 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/top_level.txt +2 -0
CHAP/giwaxs/reader.py
ADDED
CHAP/giwaxs/writer.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
"""This subpackage contains `PipelineItem`s used to interact with a
|
|
2
|
+
[TFaaS inference server](https://github.com/vkuznet/TFaaS/).
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from CHAP.inference.processor import TFaaSImageProcessor
|
|
6
|
+
# from CHAP.inference.reader import
|
|
7
|
+
# from CHAP.inference.writer import
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
#-*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
File : processor.py
|
|
5
|
+
Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
|
|
6
|
+
Description: Processor module
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
# System modules
|
|
10
|
+
from time import time
|
|
11
|
+
|
|
12
|
+
# Local modules
|
|
13
|
+
from CHAP import Processor
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class TFaaSImageProcessor(Processor):
|
|
17
|
+
"""A Processor to get predictions from TFaaS inference server."""
|
|
18
|
+
def process(self, data, url, model, verbose=False):
|
|
19
|
+
"""process data API"""
|
|
20
|
+
t0 = time()
|
|
21
|
+
self.logger.info(f'Executing "process" with url {url} model {model}')
|
|
22
|
+
data = self._process(data, url, model, verbose)
|
|
23
|
+
self.logger.info(f'Finished "process" in {time()-t0:.3f} seconds\n')
|
|
24
|
+
return data
|
|
25
|
+
|
|
26
|
+
def _process(self, data, url, model, verbose):
|
|
27
|
+
"""Print and return the input data.
|
|
28
|
+
|
|
29
|
+
:param data: Input image data, either file name or actual
|
|
30
|
+
image data.
|
|
31
|
+
:type data: object
|
|
32
|
+
:return: The input data.
|
|
33
|
+
:rtype: object
|
|
34
|
+
"""
|
|
35
|
+
# System modules
|
|
36
|
+
from pathlib import Path
|
|
37
|
+
|
|
38
|
+
# Local modules
|
|
39
|
+
from MLaaS.tfaas_client import predictImage
|
|
40
|
+
|
|
41
|
+
self.logger.info(f'input data {type(data)}')
|
|
42
|
+
if isinstance(data, str) and Path(data).is_file():
|
|
43
|
+
img_file = data
|
|
44
|
+
data = predictImage(url, img_file, model, verbose)
|
|
45
|
+
else:
|
|
46
|
+
# Third party modules
|
|
47
|
+
from requests import Session
|
|
48
|
+
|
|
49
|
+
rdict = data[0]
|
|
50
|
+
img = rdict['data']
|
|
51
|
+
session = Session()
|
|
52
|
+
rurl = url + '/predict/image'
|
|
53
|
+
payload = {'model': model}
|
|
54
|
+
files = {'image': img}
|
|
55
|
+
self.logger.info(
|
|
56
|
+
f'HTTP request {rurl} with image file and {payload} payload')
|
|
57
|
+
req = session.post(rurl, files=files, data=payload)
|
|
58
|
+
data = req.content
|
|
59
|
+
data = data.decode('utf-8').replace('\n', '')
|
|
60
|
+
self.logger.info(f'HTTP response {data}')
|
|
61
|
+
|
|
62
|
+
return data
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
if __name__ == '__main__':
|
|
66
|
+
# Local modules
|
|
67
|
+
from CHAP.processor import main
|
|
68
|
+
|
|
69
|
+
main()
|
CHAP/inference/reader.py
ADDED
CHAP/inference/writer.py
ADDED
CHAP/models.py
ADDED
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
"""Common Pydantic model classes."""
|
|
2
|
+
|
|
3
|
+
# System modules
|
|
4
|
+
import os
|
|
5
|
+
from pathlib import PosixPath
|
|
6
|
+
from typing import (
|
|
7
|
+
Literal,
|
|
8
|
+
Optional,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
# Third party modules
|
|
12
|
+
from pydantic import (
|
|
13
|
+
BaseModel,
|
|
14
|
+
DirectoryPath,
|
|
15
|
+
PrivateAttr,
|
|
16
|
+
field_validator,
|
|
17
|
+
model_validator,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class CHAPBaseModel(BaseModel):
|
|
22
|
+
"""Base CHAP configuration class implementing robust
|
|
23
|
+
serialization tools.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def dict(self, *args, **kwargs):
|
|
27
|
+
return self.model_dump(*args, **kwargs)
|
|
28
|
+
|
|
29
|
+
def model_dump(self, *args, **kwargs):
|
|
30
|
+
"""Dump the class implemention to a dictionary
|
|
31
|
+
|
|
32
|
+
:return: Class implementation.
|
|
33
|
+
:rtype: dict
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
if hasattr(self, '_exclude'):
|
|
37
|
+
kwargs['exclude'] = self._merge_exclude(
|
|
38
|
+
None if kwargs is None else kwargs.get('exclude'))
|
|
39
|
+
if 'by_alias' not in kwargs:
|
|
40
|
+
kwargs['by_alias'] = True
|
|
41
|
+
return self._serialize(super().model_dump(*args, **kwargs))
|
|
42
|
+
|
|
43
|
+
def model_dump_json(self, *args, **kwargs):
|
|
44
|
+
"""Dump the class implemention to a JSON string
|
|
45
|
+
|
|
46
|
+
:return: Class implementation.
|
|
47
|
+
:rtype: str
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
# Third party modules
|
|
51
|
+
from json import dumps
|
|
52
|
+
|
|
53
|
+
return dumps(self.model_dump(*args, **kwargs))
|
|
54
|
+
|
|
55
|
+
def _merge_exclude(self, exclude):
|
|
56
|
+
if exclude is None:
|
|
57
|
+
exclude = self._exclude
|
|
58
|
+
elif isinstance(exclude, set):
|
|
59
|
+
if isinstance(self._exclude, set):
|
|
60
|
+
exclude |= self._exclude
|
|
61
|
+
elif isinstance(self._exclude, dict):
|
|
62
|
+
exclude = {**{v:True for v in exclude}, **self._exclude}
|
|
63
|
+
elif isinstance(exclude, dict):
|
|
64
|
+
if isinstance(self._exclude, set):
|
|
65
|
+
exclude = {**exclude, **{v:True for v in self._exclude}}
|
|
66
|
+
elif isinstance(self._exclude, dict):
|
|
67
|
+
exclude = {**exclude, **self._exclude}
|
|
68
|
+
return exclude
|
|
69
|
+
|
|
70
|
+
def _serialize(self, value):
|
|
71
|
+
if isinstance(value, dict):
|
|
72
|
+
value = {k:self._serialize(v) for k, v in value.items()}
|
|
73
|
+
elif isinstance(value, (tuple, list)):
|
|
74
|
+
value = [self._serialize(v) for v in value]
|
|
75
|
+
elif isinstance(value, PosixPath):
|
|
76
|
+
value = str(value)
|
|
77
|
+
else:
|
|
78
|
+
try:
|
|
79
|
+
# For np.array, np.ndarray, any np scalar, or native types
|
|
80
|
+
value = getattr(value, "tolist", lambda: value)()
|
|
81
|
+
except Exception:
|
|
82
|
+
pass
|
|
83
|
+
return value
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class RunConfig(CHAPBaseModel):
|
|
87
|
+
"""Pipeline run configuration class.
|
|
88
|
+
|
|
89
|
+
:ivar root: Default work directory, defaults to the current run
|
|
90
|
+
directory.
|
|
91
|
+
:type root: str, optional
|
|
92
|
+
:ivar inputdir: Input directory, used only if any input file in the
|
|
93
|
+
pipeline is not an absolute path, defaults to `'root'`.
|
|
94
|
+
:type inputdir: str, optional
|
|
95
|
+
:ivar outputdir: Output directory, used only if any output file in
|
|
96
|
+
the pipeline is not an absolute path, defaults to `'root'`.
|
|
97
|
+
:type outputdir: str, optional
|
|
98
|
+
:ivar interactive: Allows for user interactions,
|
|
99
|
+
defaults to `False`.
|
|
100
|
+
:type interactive: bool, optional
|
|
101
|
+
:ivar log_level: Logger level (not case sensitive),
|
|
102
|
+
defaults to `'INFO'`.
|
|
103
|
+
:type log_level: Literal[
|
|
104
|
+
'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], optional
|
|
105
|
+
"""
|
|
106
|
+
|
|
107
|
+
root: Optional[DirectoryPath] = os.getcwd()
|
|
108
|
+
inputdir: Optional[DirectoryPath] = None
|
|
109
|
+
outputdir: Optional[DirectoryPath] = None
|
|
110
|
+
interactive: Optional[bool] = False
|
|
111
|
+
log_level: Optional[Literal[
|
|
112
|
+
'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']] = 'INFO'
|
|
113
|
+
|
|
114
|
+
# Internal flags, only set them during object construction
|
|
115
|
+
# For code profiling
|
|
116
|
+
_profile: bool = PrivateAttr(default=False)
|
|
117
|
+
# To detemine if a pipeline is executed from a apawned worker
|
|
118
|
+
_spawn: int = PrivateAttr(default=0)
|
|
119
|
+
|
|
120
|
+
def __init__(self, **data):
|
|
121
|
+
super().__init__(**data)
|
|
122
|
+
if 'profile' in data:
|
|
123
|
+
self._profile = data.pop('profile')
|
|
124
|
+
if not isinstance(self._profile, bool):
|
|
125
|
+
raise ValueError(
|
|
126
|
+
f'Invalid private attribute profile {self._profile}')
|
|
127
|
+
if 'spawn' in data:
|
|
128
|
+
self._spawn = data.pop('spawn')
|
|
129
|
+
if not (isinstance(self._spawn, int) and -1 <= self._spawn <= 1):
|
|
130
|
+
raise ValueError(
|
|
131
|
+
f'Invalid private attribute spawn {self._spawn}')
|
|
132
|
+
|
|
133
|
+
@model_validator(mode='before')
|
|
134
|
+
@classmethod
|
|
135
|
+
def validate_runconfig_before(cls, data):
|
|
136
|
+
"""Ensure that valid directory paths are provided.
|
|
137
|
+
|
|
138
|
+
:param data: Pydantic validator data object.
|
|
139
|
+
:type data: RunConfig,
|
|
140
|
+
pydantic_core._pydantic_core.ValidationInfo
|
|
141
|
+
:return: The currently validated list of class properties.
|
|
142
|
+
:rtype: dict
|
|
143
|
+
"""
|
|
144
|
+
|
|
145
|
+
if isinstance(data, dict):
|
|
146
|
+
# System modules
|
|
147
|
+
from tempfile import NamedTemporaryFile
|
|
148
|
+
|
|
149
|
+
# Make sure os.makedirs is only called from the root node
|
|
150
|
+
comm = data.get('comm')
|
|
151
|
+
if comm is None:
|
|
152
|
+
rank = 0
|
|
153
|
+
else:
|
|
154
|
+
rank = comm.Get_rank()
|
|
155
|
+
|
|
156
|
+
# Check if root exists (create it if not) and is readable
|
|
157
|
+
root = data.get('root')
|
|
158
|
+
if root is None:
|
|
159
|
+
root = os.getcwd()
|
|
160
|
+
if not rank:
|
|
161
|
+
if not os.path.isdir(root):
|
|
162
|
+
os.makedirs(root)
|
|
163
|
+
if not os.access(root, os.R_OK):
|
|
164
|
+
raise OSError('root directory is not accessible for '
|
|
165
|
+
f'reading ({root})')
|
|
166
|
+
data['root'] = os.path.realpath(root)
|
|
167
|
+
|
|
168
|
+
# Check if inputdir exists and is readable
|
|
169
|
+
inputdir = data.get('inputdir', '.')
|
|
170
|
+
if not os.path.isabs(inputdir):
|
|
171
|
+
inputdir = os.path.normpath(os.path.realpath(
|
|
172
|
+
os.path.join(root, inputdir)))
|
|
173
|
+
if not rank:
|
|
174
|
+
if not os.path.isdir(inputdir):
|
|
175
|
+
raise OSError(
|
|
176
|
+
f'input directory does not exist ({inputdir})')
|
|
177
|
+
if not os.access(inputdir, os.R_OK):
|
|
178
|
+
raise OSError(
|
|
179
|
+
'input directory is not accessible for reading '
|
|
180
|
+
f'({inputdir})')
|
|
181
|
+
data['inputdir'] = inputdir
|
|
182
|
+
|
|
183
|
+
# Check if outputdir exists (create it if not) and is writable
|
|
184
|
+
outputdir = data.get('outputdir', '.')
|
|
185
|
+
if not os.path.isabs(outputdir):
|
|
186
|
+
outputdir = os.path.normpath(os.path.realpath(
|
|
187
|
+
os.path.join(root, outputdir)))
|
|
188
|
+
if not rank:
|
|
189
|
+
if not os.path.isdir(outputdir):
|
|
190
|
+
os.makedirs(outputdir)
|
|
191
|
+
try:
|
|
192
|
+
NamedTemporaryFile(dir=outputdir)
|
|
193
|
+
except Exception as exc:
|
|
194
|
+
raise OSError('output directory is not accessible for '
|
|
195
|
+
f'writing ({outputdir})') from exc
|
|
196
|
+
data['outputdir'] = outputdir
|
|
197
|
+
|
|
198
|
+
# Make sure os.makedirs completes before continuing
|
|
199
|
+
# Make sure barrier() is also called on the main node if
|
|
200
|
+
# this is called from a spawned slave node
|
|
201
|
+
if comm is not None:
|
|
202
|
+
comm.barrier()
|
|
203
|
+
|
|
204
|
+
return data
|
|
205
|
+
|
|
206
|
+
@field_validator('log_level', mode='before')
|
|
207
|
+
@classmethod
|
|
208
|
+
def validate_log_level(cls, log_level):
|
|
209
|
+
"""Capitalize log_level."""
|
|
210
|
+
|
|
211
|
+
return log_level.upper()
|
|
212
|
+
|
|
213
|
+
@property
|
|
214
|
+
def profile(self):
|
|
215
|
+
"""Return the profiling flag."""
|
|
216
|
+
|
|
217
|
+
if hasattr(self, '_profile'):
|
|
218
|
+
return self._profile
|
|
219
|
+
return False
|
|
220
|
+
|
|
221
|
+
@property
|
|
222
|
+
def spawn(self):
|
|
223
|
+
"""Return the spawned worker flag."""
|
|
224
|
+
|
|
225
|
+
if hasattr(self, '_spawn'):
|
|
226
|
+
return self._spawn
|
|
227
|
+
return 0
|