ChessAnalysisPipeline 0.0.17.dev3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. CHAP/TaskManager.py +216 -0
  2. CHAP/__init__.py +27 -0
  3. CHAP/common/__init__.py +57 -0
  4. CHAP/common/models/__init__.py +8 -0
  5. CHAP/common/models/common.py +124 -0
  6. CHAP/common/models/integration.py +659 -0
  7. CHAP/common/models/map.py +1291 -0
  8. CHAP/common/processor.py +2869 -0
  9. CHAP/common/reader.py +658 -0
  10. CHAP/common/utils.py +110 -0
  11. CHAP/common/writer.py +730 -0
  12. CHAP/edd/__init__.py +23 -0
  13. CHAP/edd/models.py +876 -0
  14. CHAP/edd/processor.py +3069 -0
  15. CHAP/edd/reader.py +1023 -0
  16. CHAP/edd/select_material_params_gui.py +348 -0
  17. CHAP/edd/utils.py +1572 -0
  18. CHAP/edd/writer.py +26 -0
  19. CHAP/foxden/__init__.py +19 -0
  20. CHAP/foxden/models.py +71 -0
  21. CHAP/foxden/processor.py +124 -0
  22. CHAP/foxden/reader.py +224 -0
  23. CHAP/foxden/utils.py +80 -0
  24. CHAP/foxden/writer.py +168 -0
  25. CHAP/giwaxs/__init__.py +11 -0
  26. CHAP/giwaxs/models.py +491 -0
  27. CHAP/giwaxs/processor.py +776 -0
  28. CHAP/giwaxs/reader.py +8 -0
  29. CHAP/giwaxs/writer.py +8 -0
  30. CHAP/inference/__init__.py +7 -0
  31. CHAP/inference/processor.py +69 -0
  32. CHAP/inference/reader.py +8 -0
  33. CHAP/inference/writer.py +8 -0
  34. CHAP/models.py +227 -0
  35. CHAP/pipeline.py +479 -0
  36. CHAP/processor.py +125 -0
  37. CHAP/reader.py +124 -0
  38. CHAP/runner.py +277 -0
  39. CHAP/saxswaxs/__init__.py +7 -0
  40. CHAP/saxswaxs/processor.py +8 -0
  41. CHAP/saxswaxs/reader.py +8 -0
  42. CHAP/saxswaxs/writer.py +8 -0
  43. CHAP/server.py +125 -0
  44. CHAP/sin2psi/__init__.py +7 -0
  45. CHAP/sin2psi/processor.py +8 -0
  46. CHAP/sin2psi/reader.py +8 -0
  47. CHAP/sin2psi/writer.py +8 -0
  48. CHAP/tomo/__init__.py +15 -0
  49. CHAP/tomo/models.py +210 -0
  50. CHAP/tomo/processor.py +3862 -0
  51. CHAP/tomo/reader.py +9 -0
  52. CHAP/tomo/writer.py +59 -0
  53. CHAP/utils/__init__.py +6 -0
  54. CHAP/utils/converters.py +188 -0
  55. CHAP/utils/fit.py +2947 -0
  56. CHAP/utils/general.py +2655 -0
  57. CHAP/utils/material.py +274 -0
  58. CHAP/utils/models.py +595 -0
  59. CHAP/utils/parfile.py +224 -0
  60. CHAP/writer.py +122 -0
  61. MLaaS/__init__.py +0 -0
  62. MLaaS/ktrain.py +205 -0
  63. MLaaS/mnist_img.py +83 -0
  64. MLaaS/tfaas_client.py +371 -0
  65. chessanalysispipeline-0.0.17.dev3.dist-info/LICENSE +60 -0
  66. chessanalysispipeline-0.0.17.dev3.dist-info/METADATA +29 -0
  67. chessanalysispipeline-0.0.17.dev3.dist-info/RECORD +70 -0
  68. chessanalysispipeline-0.0.17.dev3.dist-info/WHEEL +5 -0
  69. chessanalysispipeline-0.0.17.dev3.dist-info/entry_points.txt +2 -0
  70. chessanalysispipeline-0.0.17.dev3.dist-info/top_level.txt +2 -0
CHAP/tomo/reader.py ADDED
@@ -0,0 +1,9 @@
1
+ #!/usr/bin/env python
2
+ """Tomography specific reader."""
3
+
4
+
5
+ if __name__ == '__main__':
6
+ # Local modules
7
+ from CHAP.reader import main
8
+
9
+ main()
CHAP/tomo/writer.py ADDED
@@ -0,0 +1,59 @@
1
+ #!/usr/bin/env python
2
+ """Tomography command line writer."""
3
+
4
+ # Local modules
5
+ from CHAP import Writer
6
+
7
+
8
+ class TomoWriter(Writer):
9
+ """Writer for saving tomo data."""
10
+ def write(self, data):
11
+ """Write the results of the (partial) tomographic
12
+ reconstruction and add provenance data to the data pipeline.
13
+
14
+ :param data: Input data.
15
+ :type data: list[PipelineData]
16
+ :return: Output data.
17
+ :rtype: list[PipelineData]
18
+ """
19
+ # System modules
20
+ from os import path as os_path
21
+
22
+ # Local modules
23
+ from CHAP.pipeline import PipelineData
24
+
25
+ # Load the (partial) tomographic reconstruction result
26
+ tomodata = self.get_data(data, schema='tomodata', remove=self.remove)
27
+
28
+ # Local modules
29
+ if isinstance(tomodata, dict):
30
+ from CHAP.common.writer import YAMLWriter as writer
31
+ else:
32
+ from CHAP.common.writer import NexusWriter as writer
33
+
34
+ # Write the (partial) tomographic reconstruction result
35
+ #RV FIX make class methods from the Writer.write functions?
36
+ # or create write function that also accept some default type
37
+ # other than list[PipelineData]?
38
+ writer(**self.model_dump()).write(
39
+ [PipelineData(name=self.__name__, data=tomodata)])
40
+
41
+ # Add provenance info to the data pipeline
42
+ metadata = self.get_data(data, schema='metadata', remove=False)
43
+ did = metadata['did']
44
+ provenance = {
45
+ 'did': did,
46
+ 'input_files': [{'name': 'todo.fix: pipeline.yaml'}],
47
+ 'output_files': [{'name': os_path.realpath(self.filename)}],
48
+ }
49
+ data.append(PipelineData(
50
+ name=self.__name__, data=provenance, schema='provenance'))
51
+
52
+ return data
53
+
54
+
55
+ if __name__ == '__main__':
56
+ # Local modules
57
+ from CHAP.writer import main
58
+
59
+ main()
CHAP/utils/__init__.py ADDED
@@ -0,0 +1,6 @@
1
+ """This subpackage contains generic utilities for fitting, parsing
2
+ CHESS scan data, collecting interactive user input, and finding
3
+ lattice properties of materials (among others).
4
+ """
5
+
6
+ from CHAP.utils.fit import FitProcessor
@@ -0,0 +1,188 @@
1
+ """Functions for converting between some commonly used data
2
+ formats."""
3
+
4
+
5
+ def convert_sparse_dense(data):
6
+ """Converts between dense and sparse representations for NumPy
7
+ arrays, xarray DataArrays, and xarray Datasets.
8
+
9
+ - If input is a NumPy array, converts to a SciPy sparse CSR matrix.
10
+ - If input is a SciPy sparse matrix, converts to a dense NumPy array.
11
+ - If input is an xarray DataArray or Dataset containing sparse
12
+ arrays, converts to dense.
13
+ - If input is an xarray DataArray or Dataset containing dense
14
+ arrays, converts to sparse.
15
+
16
+ :param data: The imput data.
17
+ :type data: Union[numpy.ndarray, scipy.sparse.spmatrix,
18
+ xarray.DataArray, xarray.Dataset]
19
+ :return: Converted object -- sparse if input is dense, dense if
20
+ input is sparse.
21
+ """
22
+ import numpy as np
23
+ import scipy.sparse as sp
24
+ import xarray as xr
25
+
26
+ if isinstance(data, np.ndarray):
27
+ return sp.csr_matrix(data) # Convert dense NumPy array to sparse
28
+
29
+ if sp.issparse(data):
30
+ return data.toarray() # Convert sparse matrix to dense NumPy array
31
+
32
+ if isinstance(data, xr.DataArray):
33
+ # Convert DataArray values while preserving metadata
34
+ if sp.issparse(data.data):
35
+ return xr.DataArray(
36
+ data.data.toarray(), dims=data.dims,
37
+ coords=data.coords, attrs=data.attrs)
38
+ return xr.DataArray(
39
+ sp.csr_matrix(data.data), dims=data.dims,
40
+ coords=data.coords, attrs=data.attrs)
41
+
42
+ if isinstance(data, xr.Dataset):
43
+ # Convert each variable in the Dataset
44
+ def convert_var(var):
45
+ if sp.issparse(var.data):
46
+ return xr.DataArray(var.data.toarray(), dims=var.dims,
47
+ coords=var.coords, attrs=var.attrs)
48
+ return xr.DataArray(sp.csr_matrix(var.data), dims=var.dims,
49
+ coords=var.coords, attrs=var.attrs)
50
+
51
+ return data.map(convert_var)
52
+
53
+ raise TypeError(f'Unsupported data type: {type(data)}. '
54
+ 'Input must be a NumPy array, SciPy sparse matrix, '
55
+ 'xarray DataArray, or xarray Dataset.')
56
+
57
+
58
+ def convert_xarray_nexus(data):
59
+ """Convert an `xarray.DataArray` or `xarray.Dataset` into an
60
+ `nexusformat.nexus.NXdata` or vice versa.
61
+
62
+ :param data: Input data.
63
+ :type data: Union[xarray.DataArray, xarray.Dataset,
64
+ nexusformat.nexus.NXdata]
65
+ :return: Conveted data.
66
+ :rtype: Union[xarray.DataArray, xarray.Dataset,
67
+ nexusformat.nexus.NXdata]
68
+ """
69
+ import xarray as xr
70
+ from nexusformat.nexus import NXdata, NXfield
71
+
72
+ if isinstance(data, xr.DataArray):
73
+ return NXdata(
74
+ value=data.values,
75
+ name=data.name,
76
+ attrs=data.attrs,
77
+ axes=tuple(
78
+ NXfield(
79
+ value=data[dim].values,
80
+ name=dim,
81
+ attrs=data[dim].attrs,
82
+ )
83
+ for dim in data.dims),
84
+ )
85
+ if isinstance(data, xr.Dataset):
86
+ return NXdata(
87
+ **{var:
88
+ NXfield(
89
+ value=data[var].values,
90
+ name=var,
91
+ attrs=data[var].attrs,
92
+ )
93
+ for var in data.data_vars},
94
+ name=data.name,
95
+ attrs=data.attrs,
96
+ axes=tuple(
97
+ NXfield(
98
+ value=data[dim].values,
99
+ name=dim,
100
+ attrs=data[dim].attrs,
101
+ )
102
+ for dim in data.dims),
103
+ )
104
+ if isinstance(data, NXdata):
105
+ nxaxes = data.nxaxes
106
+ if nxaxes is None:
107
+ if 'unstructured_axes' in data.attrs:
108
+ nxaxes = data.unstructured_axes
109
+ if isinstance(nxaxes, str):
110
+ nxaxes = [nxaxes]
111
+ return xr.Dataset(
112
+ data_vars={
113
+ name: xr.DataArray(
114
+ data=field.nxdata,
115
+ name=name,
116
+ attrs=field.attrs,
117
+ )
118
+ for name, field in data.items()
119
+ if isinstance(field, NXfield)
120
+ and name not in nxaxes
121
+ },
122
+ coords={
123
+ axis: xr.DataArray(
124
+ data=data[axis].nxdata,
125
+ name=data[axis].nxname,
126
+ attrs=data[axis].attrs,
127
+ )
128
+ for axis in nxaxes
129
+ },
130
+ attrs=data.attrs,
131
+ )
132
+ raise TypeError(f'Unsupported data type: {type(data)}. '
133
+ 'Must be xarray.DataArray, xarray.Dataset, or NXdata.')
134
+
135
+
136
+ def convert_structured_unstructured(data):
137
+ from copy import deepcopy
138
+ from nexusformat.nexus import NXdata, NXfield
139
+ import numpy as np
140
+
141
+ if isinstance(data, NXdata):
142
+ if 'unstructured_axes' in data.attrs:
143
+ # Convert unstructured to structured
144
+ nxaxes = data.attrs['unstructured_axes']
145
+ attrs = deepcopy(data.attrs)
146
+ attrs['axes'] = attrs['unstructured_axes']
147
+ attrs.pop('unstructured_axes')
148
+ signals = [name for name, child in data.items()
149
+ if name not in nxaxes]
150
+ structured_axes = {a: np.unique(data[a].nxdata) for a in nxaxes}
151
+ dataset_shape = tuple(len(v) for a, v in structured_axes.items())
152
+ structured_signals = {s: np.empty(
153
+ (*dataset_shape, *data[s].shape[1:]),
154
+ dtype=data[s].nxdata.dtype,
155
+ ) for s in signals}
156
+ npts = len(data[signals[0]].nxdata.tolist())
157
+ print(f'converting {npts} data points')
158
+ indices = {
159
+ a: np.searchsorted(structured_axes[a], data[a].nxdata)
160
+ for a in nxaxes
161
+ }
162
+ for s, value in structured_signals.items():
163
+ value[tuple(indices[a] for a in nxaxes)] = data[s]
164
+ structured_data = NXdata(
165
+ **{s: NXfield(
166
+ value=structured_signals[s],
167
+ name=s,
168
+ attrs=data[s].attrs,
169
+ ) for s in signals},
170
+ name=data.nxname,
171
+ attrs=attrs,
172
+ axes=tuple(
173
+ NXfield(
174
+ value=value,
175
+ name=a,
176
+ attrs={k: v for k, v in data[a].attrs.items()
177
+ if not k == 'target'},
178
+ )
179
+ for a, value in structured_axes.items()
180
+ )
181
+ )
182
+ return structured_data
183
+
184
+ if 'axes' in data.attrs:
185
+ # Convert structued to unstructured
186
+ raise NotImplementedError(
187
+ 'Conversion from structured to unstructured not implemented.')
188
+ raise TypeError(f'Unsupported data type: {type(data)}')