ChessAnalysisPipeline 0.0.17.dev3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHAP/TaskManager.py +216 -0
- CHAP/__init__.py +27 -0
- CHAP/common/__init__.py +57 -0
- CHAP/common/models/__init__.py +8 -0
- CHAP/common/models/common.py +124 -0
- CHAP/common/models/integration.py +659 -0
- CHAP/common/models/map.py +1291 -0
- CHAP/common/processor.py +2869 -0
- CHAP/common/reader.py +658 -0
- CHAP/common/utils.py +110 -0
- CHAP/common/writer.py +730 -0
- CHAP/edd/__init__.py +23 -0
- CHAP/edd/models.py +876 -0
- CHAP/edd/processor.py +3069 -0
- CHAP/edd/reader.py +1023 -0
- CHAP/edd/select_material_params_gui.py +348 -0
- CHAP/edd/utils.py +1572 -0
- CHAP/edd/writer.py +26 -0
- CHAP/foxden/__init__.py +19 -0
- CHAP/foxden/models.py +71 -0
- CHAP/foxden/processor.py +124 -0
- CHAP/foxden/reader.py +224 -0
- CHAP/foxden/utils.py +80 -0
- CHAP/foxden/writer.py +168 -0
- CHAP/giwaxs/__init__.py +11 -0
- CHAP/giwaxs/models.py +491 -0
- CHAP/giwaxs/processor.py +776 -0
- CHAP/giwaxs/reader.py +8 -0
- CHAP/giwaxs/writer.py +8 -0
- CHAP/inference/__init__.py +7 -0
- CHAP/inference/processor.py +69 -0
- CHAP/inference/reader.py +8 -0
- CHAP/inference/writer.py +8 -0
- CHAP/models.py +227 -0
- CHAP/pipeline.py +479 -0
- CHAP/processor.py +125 -0
- CHAP/reader.py +124 -0
- CHAP/runner.py +277 -0
- CHAP/saxswaxs/__init__.py +7 -0
- CHAP/saxswaxs/processor.py +8 -0
- CHAP/saxswaxs/reader.py +8 -0
- CHAP/saxswaxs/writer.py +8 -0
- CHAP/server.py +125 -0
- CHAP/sin2psi/__init__.py +7 -0
- CHAP/sin2psi/processor.py +8 -0
- CHAP/sin2psi/reader.py +8 -0
- CHAP/sin2psi/writer.py +8 -0
- CHAP/tomo/__init__.py +15 -0
- CHAP/tomo/models.py +210 -0
- CHAP/tomo/processor.py +3862 -0
- CHAP/tomo/reader.py +9 -0
- CHAP/tomo/writer.py +59 -0
- CHAP/utils/__init__.py +6 -0
- CHAP/utils/converters.py +188 -0
- CHAP/utils/fit.py +2947 -0
- CHAP/utils/general.py +2655 -0
- CHAP/utils/material.py +274 -0
- CHAP/utils/models.py +595 -0
- CHAP/utils/parfile.py +224 -0
- CHAP/writer.py +122 -0
- MLaaS/__init__.py +0 -0
- MLaaS/ktrain.py +205 -0
- MLaaS/mnist_img.py +83 -0
- MLaaS/tfaas_client.py +371 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/LICENSE +60 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/METADATA +29 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/RECORD +70 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/WHEEL +5 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/entry_points.txt +2 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/top_level.txt +2 -0
CHAP/tomo/reader.py
ADDED
CHAP/tomo/writer.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""Tomography command line writer."""
|
|
3
|
+
|
|
4
|
+
# Local modules
|
|
5
|
+
from CHAP import Writer
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class TomoWriter(Writer):
|
|
9
|
+
"""Writer for saving tomo data."""
|
|
10
|
+
def write(self, data):
|
|
11
|
+
"""Write the results of the (partial) tomographic
|
|
12
|
+
reconstruction and add provenance data to the data pipeline.
|
|
13
|
+
|
|
14
|
+
:param data: Input data.
|
|
15
|
+
:type data: list[PipelineData]
|
|
16
|
+
:return: Output data.
|
|
17
|
+
:rtype: list[PipelineData]
|
|
18
|
+
"""
|
|
19
|
+
# System modules
|
|
20
|
+
from os import path as os_path
|
|
21
|
+
|
|
22
|
+
# Local modules
|
|
23
|
+
from CHAP.pipeline import PipelineData
|
|
24
|
+
|
|
25
|
+
# Load the (partial) tomographic reconstruction result
|
|
26
|
+
tomodata = self.get_data(data, schema='tomodata', remove=self.remove)
|
|
27
|
+
|
|
28
|
+
# Local modules
|
|
29
|
+
if isinstance(tomodata, dict):
|
|
30
|
+
from CHAP.common.writer import YAMLWriter as writer
|
|
31
|
+
else:
|
|
32
|
+
from CHAP.common.writer import NexusWriter as writer
|
|
33
|
+
|
|
34
|
+
# Write the (partial) tomographic reconstruction result
|
|
35
|
+
#RV FIX make class methods from the Writer.write functions?
|
|
36
|
+
# or create write function that also accept some default type
|
|
37
|
+
# other than list[PipelineData]?
|
|
38
|
+
writer(**self.model_dump()).write(
|
|
39
|
+
[PipelineData(name=self.__name__, data=tomodata)])
|
|
40
|
+
|
|
41
|
+
# Add provenance info to the data pipeline
|
|
42
|
+
metadata = self.get_data(data, schema='metadata', remove=False)
|
|
43
|
+
did = metadata['did']
|
|
44
|
+
provenance = {
|
|
45
|
+
'did': did,
|
|
46
|
+
'input_files': [{'name': 'todo.fix: pipeline.yaml'}],
|
|
47
|
+
'output_files': [{'name': os_path.realpath(self.filename)}],
|
|
48
|
+
}
|
|
49
|
+
data.append(PipelineData(
|
|
50
|
+
name=self.__name__, data=provenance, schema='provenance'))
|
|
51
|
+
|
|
52
|
+
return data
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
if __name__ == '__main__':
|
|
56
|
+
# Local modules
|
|
57
|
+
from CHAP.writer import main
|
|
58
|
+
|
|
59
|
+
main()
|
CHAP/utils/__init__.py
ADDED
CHAP/utils/converters.py
ADDED
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
"""Functions for converting between some commonly used data
|
|
2
|
+
formats."""
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def convert_sparse_dense(data):
|
|
6
|
+
"""Converts between dense and sparse representations for NumPy
|
|
7
|
+
arrays, xarray DataArrays, and xarray Datasets.
|
|
8
|
+
|
|
9
|
+
- If input is a NumPy array, converts to a SciPy sparse CSR matrix.
|
|
10
|
+
- If input is a SciPy sparse matrix, converts to a dense NumPy array.
|
|
11
|
+
- If input is an xarray DataArray or Dataset containing sparse
|
|
12
|
+
arrays, converts to dense.
|
|
13
|
+
- If input is an xarray DataArray or Dataset containing dense
|
|
14
|
+
arrays, converts to sparse.
|
|
15
|
+
|
|
16
|
+
:param data: The imput data.
|
|
17
|
+
:type data: Union[numpy.ndarray, scipy.sparse.spmatrix,
|
|
18
|
+
xarray.DataArray, xarray.Dataset]
|
|
19
|
+
:return: Converted object -- sparse if input is dense, dense if
|
|
20
|
+
input is sparse.
|
|
21
|
+
"""
|
|
22
|
+
import numpy as np
|
|
23
|
+
import scipy.sparse as sp
|
|
24
|
+
import xarray as xr
|
|
25
|
+
|
|
26
|
+
if isinstance(data, np.ndarray):
|
|
27
|
+
return sp.csr_matrix(data) # Convert dense NumPy array to sparse
|
|
28
|
+
|
|
29
|
+
if sp.issparse(data):
|
|
30
|
+
return data.toarray() # Convert sparse matrix to dense NumPy array
|
|
31
|
+
|
|
32
|
+
if isinstance(data, xr.DataArray):
|
|
33
|
+
# Convert DataArray values while preserving metadata
|
|
34
|
+
if sp.issparse(data.data):
|
|
35
|
+
return xr.DataArray(
|
|
36
|
+
data.data.toarray(), dims=data.dims,
|
|
37
|
+
coords=data.coords, attrs=data.attrs)
|
|
38
|
+
return xr.DataArray(
|
|
39
|
+
sp.csr_matrix(data.data), dims=data.dims,
|
|
40
|
+
coords=data.coords, attrs=data.attrs)
|
|
41
|
+
|
|
42
|
+
if isinstance(data, xr.Dataset):
|
|
43
|
+
# Convert each variable in the Dataset
|
|
44
|
+
def convert_var(var):
|
|
45
|
+
if sp.issparse(var.data):
|
|
46
|
+
return xr.DataArray(var.data.toarray(), dims=var.dims,
|
|
47
|
+
coords=var.coords, attrs=var.attrs)
|
|
48
|
+
return xr.DataArray(sp.csr_matrix(var.data), dims=var.dims,
|
|
49
|
+
coords=var.coords, attrs=var.attrs)
|
|
50
|
+
|
|
51
|
+
return data.map(convert_var)
|
|
52
|
+
|
|
53
|
+
raise TypeError(f'Unsupported data type: {type(data)}. '
|
|
54
|
+
'Input must be a NumPy array, SciPy sparse matrix, '
|
|
55
|
+
'xarray DataArray, or xarray Dataset.')
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def convert_xarray_nexus(data):
|
|
59
|
+
"""Convert an `xarray.DataArray` or `xarray.Dataset` into an
|
|
60
|
+
`nexusformat.nexus.NXdata` or vice versa.
|
|
61
|
+
|
|
62
|
+
:param data: Input data.
|
|
63
|
+
:type data: Union[xarray.DataArray, xarray.Dataset,
|
|
64
|
+
nexusformat.nexus.NXdata]
|
|
65
|
+
:return: Conveted data.
|
|
66
|
+
:rtype: Union[xarray.DataArray, xarray.Dataset,
|
|
67
|
+
nexusformat.nexus.NXdata]
|
|
68
|
+
"""
|
|
69
|
+
import xarray as xr
|
|
70
|
+
from nexusformat.nexus import NXdata, NXfield
|
|
71
|
+
|
|
72
|
+
if isinstance(data, xr.DataArray):
|
|
73
|
+
return NXdata(
|
|
74
|
+
value=data.values,
|
|
75
|
+
name=data.name,
|
|
76
|
+
attrs=data.attrs,
|
|
77
|
+
axes=tuple(
|
|
78
|
+
NXfield(
|
|
79
|
+
value=data[dim].values,
|
|
80
|
+
name=dim,
|
|
81
|
+
attrs=data[dim].attrs,
|
|
82
|
+
)
|
|
83
|
+
for dim in data.dims),
|
|
84
|
+
)
|
|
85
|
+
if isinstance(data, xr.Dataset):
|
|
86
|
+
return NXdata(
|
|
87
|
+
**{var:
|
|
88
|
+
NXfield(
|
|
89
|
+
value=data[var].values,
|
|
90
|
+
name=var,
|
|
91
|
+
attrs=data[var].attrs,
|
|
92
|
+
)
|
|
93
|
+
for var in data.data_vars},
|
|
94
|
+
name=data.name,
|
|
95
|
+
attrs=data.attrs,
|
|
96
|
+
axes=tuple(
|
|
97
|
+
NXfield(
|
|
98
|
+
value=data[dim].values,
|
|
99
|
+
name=dim,
|
|
100
|
+
attrs=data[dim].attrs,
|
|
101
|
+
)
|
|
102
|
+
for dim in data.dims),
|
|
103
|
+
)
|
|
104
|
+
if isinstance(data, NXdata):
|
|
105
|
+
nxaxes = data.nxaxes
|
|
106
|
+
if nxaxes is None:
|
|
107
|
+
if 'unstructured_axes' in data.attrs:
|
|
108
|
+
nxaxes = data.unstructured_axes
|
|
109
|
+
if isinstance(nxaxes, str):
|
|
110
|
+
nxaxes = [nxaxes]
|
|
111
|
+
return xr.Dataset(
|
|
112
|
+
data_vars={
|
|
113
|
+
name: xr.DataArray(
|
|
114
|
+
data=field.nxdata,
|
|
115
|
+
name=name,
|
|
116
|
+
attrs=field.attrs,
|
|
117
|
+
)
|
|
118
|
+
for name, field in data.items()
|
|
119
|
+
if isinstance(field, NXfield)
|
|
120
|
+
and name not in nxaxes
|
|
121
|
+
},
|
|
122
|
+
coords={
|
|
123
|
+
axis: xr.DataArray(
|
|
124
|
+
data=data[axis].nxdata,
|
|
125
|
+
name=data[axis].nxname,
|
|
126
|
+
attrs=data[axis].attrs,
|
|
127
|
+
)
|
|
128
|
+
for axis in nxaxes
|
|
129
|
+
},
|
|
130
|
+
attrs=data.attrs,
|
|
131
|
+
)
|
|
132
|
+
raise TypeError(f'Unsupported data type: {type(data)}. '
|
|
133
|
+
'Must be xarray.DataArray, xarray.Dataset, or NXdata.')
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def convert_structured_unstructured(data):
|
|
137
|
+
from copy import deepcopy
|
|
138
|
+
from nexusformat.nexus import NXdata, NXfield
|
|
139
|
+
import numpy as np
|
|
140
|
+
|
|
141
|
+
if isinstance(data, NXdata):
|
|
142
|
+
if 'unstructured_axes' in data.attrs:
|
|
143
|
+
# Convert unstructured to structured
|
|
144
|
+
nxaxes = data.attrs['unstructured_axes']
|
|
145
|
+
attrs = deepcopy(data.attrs)
|
|
146
|
+
attrs['axes'] = attrs['unstructured_axes']
|
|
147
|
+
attrs.pop('unstructured_axes')
|
|
148
|
+
signals = [name for name, child in data.items()
|
|
149
|
+
if name not in nxaxes]
|
|
150
|
+
structured_axes = {a: np.unique(data[a].nxdata) for a in nxaxes}
|
|
151
|
+
dataset_shape = tuple(len(v) for a, v in structured_axes.items())
|
|
152
|
+
structured_signals = {s: np.empty(
|
|
153
|
+
(*dataset_shape, *data[s].shape[1:]),
|
|
154
|
+
dtype=data[s].nxdata.dtype,
|
|
155
|
+
) for s in signals}
|
|
156
|
+
npts = len(data[signals[0]].nxdata.tolist())
|
|
157
|
+
print(f'converting {npts} data points')
|
|
158
|
+
indices = {
|
|
159
|
+
a: np.searchsorted(structured_axes[a], data[a].nxdata)
|
|
160
|
+
for a in nxaxes
|
|
161
|
+
}
|
|
162
|
+
for s, value in structured_signals.items():
|
|
163
|
+
value[tuple(indices[a] for a in nxaxes)] = data[s]
|
|
164
|
+
structured_data = NXdata(
|
|
165
|
+
**{s: NXfield(
|
|
166
|
+
value=structured_signals[s],
|
|
167
|
+
name=s,
|
|
168
|
+
attrs=data[s].attrs,
|
|
169
|
+
) for s in signals},
|
|
170
|
+
name=data.nxname,
|
|
171
|
+
attrs=attrs,
|
|
172
|
+
axes=tuple(
|
|
173
|
+
NXfield(
|
|
174
|
+
value=value,
|
|
175
|
+
name=a,
|
|
176
|
+
attrs={k: v for k, v in data[a].attrs.items()
|
|
177
|
+
if not k == 'target'},
|
|
178
|
+
)
|
|
179
|
+
for a, value in structured_axes.items()
|
|
180
|
+
)
|
|
181
|
+
)
|
|
182
|
+
return structured_data
|
|
183
|
+
|
|
184
|
+
if 'axes' in data.attrs:
|
|
185
|
+
# Convert structued to unstructured
|
|
186
|
+
raise NotImplementedError(
|
|
187
|
+
'Conversion from structured to unstructured not implemented.')
|
|
188
|
+
raise TypeError(f'Unsupported data type: {type(data)}')
|