nxs-analysis-tools 0.1.4__tar.gz → 0.1.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of nxs-analysis-tools might be problematic. Click here for more details.
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/PKG-INFO +2 -24
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/pyproject.toml +4 -4
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/src/_meta/__init__.py +1 -1
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/src/nxs_analysis_tools/chess.py +113 -8
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/src/nxs_analysis_tools/datareduction.py +142 -56
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/src/nxs_analysis_tools/fitting.py +51 -9
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/src/nxs_analysis_tools/pairdistribution.py +113 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/src/nxs_analysis_tools.egg-info/SOURCES.txt +2 -0
- nxs_analysis_tools-0.1.6/tests/test_ellipsoidal_window.py +15 -0
- nxs_analysis_tools-0.1.6/tests/test_plot_slice_axes_types.py +40 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/LICENSE +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/MANIFEST.in +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/README.md +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/setup.cfg +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/setup.py +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/src/nxs_analysis_tools/__init__.py +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/tests/test_accurate_highlight.py +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/tests/test_chess.py +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/tests/test_chess_fitting.py +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/tests/test_datareduction.py +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/tests/test_fitting.py +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/tests/test_lmfit.py +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/tests/test_mask_plotting.py +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/tests/test_pairdistribution.py +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/tests/test_plot_slice_with_ndarray.py +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/tests/test_rotate_data.py +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/tests/test_sum_axis.py +0 -0
- {nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/tests/test_symmetrizer_rectangular_plane.py +0 -0
|
@@ -1,30 +1,9 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: nxs-analysis-tools
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.6
|
|
4
4
|
Summary: Reduce and transform nexus format (.nxs) scattering data.
|
|
5
5
|
Author-email: "Steven J. Gomez Alvarado" <stevenjgomez@ucsb.edu>
|
|
6
|
-
License: MIT
|
|
7
|
-
|
|
8
|
-
Copyright (c) 2023-2025 Steven J. Gomez Alvarado
|
|
9
|
-
|
|
10
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
11
|
-
of this software and associated documentation files (the "Software"), to deal
|
|
12
|
-
in the Software without restriction, including without limitation the rights
|
|
13
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
14
|
-
copies of the Software, and to permit persons to whom the Software is
|
|
15
|
-
furnished to do so, subject to the following conditions:
|
|
16
|
-
|
|
17
|
-
The above copyright notice and this permission notice shall be included in all
|
|
18
|
-
copies or substantial portions of the Software.
|
|
19
|
-
|
|
20
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
21
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
22
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
23
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
24
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
25
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
26
|
-
SOFTWARE.
|
|
27
|
-
|
|
6
|
+
License-Expression: MIT
|
|
28
7
|
Project-URL: Download, https://pypi.org/project/nxs-analysis-tools/
|
|
29
8
|
Project-URL: Bug Tracker, https://github.com/stevenjgomez/nxs_analysis_tools/issues
|
|
30
9
|
Project-URL: Documentation, https://nxs-analysis-tools.readthedocs.io/en/stable/
|
|
@@ -33,7 +12,6 @@ Project-URL: DOI, https://doi.org/10.5281/zenodo.15186359
|
|
|
33
12
|
Keywords: diffraction,xrd,nexusformat,nexus,nxs,scattering
|
|
34
13
|
Classifier: Development Status :: 5 - Production/Stable
|
|
35
14
|
Classifier: Intended Audience :: Science/Research
|
|
36
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
37
15
|
Classifier: Programming Language :: Python
|
|
38
16
|
Classifier: Programming Language :: Python :: 3
|
|
39
17
|
Classifier: Programming Language :: Python :: 3.9
|
|
@@ -6,11 +6,12 @@ build-backend = 'setuptools.build_meta'
|
|
|
6
6
|
|
|
7
7
|
[project]
|
|
8
8
|
name = 'nxs-analysis-tools'
|
|
9
|
-
version = '0.1.
|
|
9
|
+
version = '0.1.6'
|
|
10
10
|
description = 'Reduce and transform nexus format (.nxs) scattering data.'
|
|
11
11
|
readme = 'README.md'
|
|
12
12
|
requires-python = '>=3.7'
|
|
13
|
-
license =
|
|
13
|
+
license = "MIT"
|
|
14
|
+
license-files = ['LICENSE']
|
|
14
15
|
authors = [{ name = 'Steven J. Gomez Alvarado', email = 'stevenjgomez@ucsb.edu' }]
|
|
15
16
|
|
|
16
17
|
keywords = [
|
|
@@ -24,7 +25,6 @@ keywords = [
|
|
|
24
25
|
classifiers = [
|
|
25
26
|
'Development Status :: 5 - Production/Stable',
|
|
26
27
|
'Intended Audience :: Science/Research',
|
|
27
|
-
'License :: OSI Approved :: MIT License',
|
|
28
28
|
'Programming Language :: Python',
|
|
29
29
|
'Programming Language :: Python :: 3',
|
|
30
30
|
'Programming Language :: Python :: 3.9',
|
|
@@ -71,7 +71,7 @@ dev = [
|
|
|
71
71
|
'DOI' = 'https://doi.org/10.5281/zenodo.15186359'
|
|
72
72
|
|
|
73
73
|
[tool.bumpver]
|
|
74
|
-
current_version = "0.1.
|
|
74
|
+
current_version = "0.1.6"
|
|
75
75
|
version_pattern = "MAJOR.MINOR.PATCH[-TAG]"
|
|
76
76
|
tag_pattern = "vMAJOR.MINOR.PATCH[-TAG]"
|
|
77
77
|
commit_message = "Bump version {old_version} -> {new_version}"
|
|
@@ -14,6 +14,7 @@ from IPython.display import display, Markdown
|
|
|
14
14
|
from nxs_analysis_tools import load_data, Scissors
|
|
15
15
|
from nxs_analysis_tools.fitting import LinecutModel
|
|
16
16
|
from nxs_analysis_tools.datareduction import load_transform, reciprocal_lattice_params
|
|
17
|
+
from lmfit.models import PseudoVoigtModel, LinearModel
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
class TempDependence:
|
|
@@ -98,7 +99,11 @@ class TempDependence:
|
|
|
98
99
|
Fit the line cut models for each temperature.
|
|
99
100
|
plot_fit(mdheadings=False, **kwargs):
|
|
100
101
|
Plot the fit results for each temperature.
|
|
101
|
-
|
|
102
|
+
overlay_fits(numpoints=None, vertical_offset=0, cmap='viridis', ax=ax):
|
|
103
|
+
Plot raw data and fitted models for each temperature.
|
|
104
|
+
fit_peak_simple():
|
|
105
|
+
Perform a basic fit using a pseudo-Voigt peak shape, linear background, and no constraints.
|
|
106
|
+
plot_order_parameter():
|
|
102
107
|
Plot the temperature dependence of the peakheight parameter.
|
|
103
108
|
print_fit_report():
|
|
104
109
|
Print the fit report for each temperature.
|
|
@@ -188,7 +193,7 @@ class TempDependence:
|
|
|
188
193
|
"""
|
|
189
194
|
self.datasets[temperature] = data
|
|
190
195
|
|
|
191
|
-
def load_transforms(self, temperatures_list=None, print_tree=True):
|
|
196
|
+
def load_transforms(self, temperatures_list=None, print_tree=True, use_nxlink=False):
|
|
192
197
|
"""
|
|
193
198
|
Load transform datasets (from nxrefine) based on temperature.
|
|
194
199
|
|
|
@@ -196,8 +201,14 @@ class TempDependence:
|
|
|
196
201
|
----------
|
|
197
202
|
temperatures_list : list of int or None, optional
|
|
198
203
|
List of temperatures to load. If None, all available temperatures are loaded.
|
|
204
|
+
|
|
199
205
|
print_tree : bool, optional
|
|
200
206
|
Whether to print the data tree upon loading. Default True.
|
|
207
|
+
|
|
208
|
+
use_nxlink : bool, optional
|
|
209
|
+
If True, maintains the NXlink defined in the data file, which references
|
|
210
|
+
the raw data in the transform.nxs file. This saves memory when working with
|
|
211
|
+
many datasets. In this case, the axes are in reverse order. Default is False.
|
|
201
212
|
"""
|
|
202
213
|
# Convert all temperatures to strings
|
|
203
214
|
if temperatures_list:
|
|
@@ -240,7 +251,7 @@ class TempDependence:
|
|
|
240
251
|
|
|
241
252
|
# Save dataset
|
|
242
253
|
try:
|
|
243
|
-
self.datasets[self.temperatures[i]] = load_transform(path, print_tree)
|
|
254
|
+
self.datasets[self.temperatures[i]] = load_transform(path, print_tree=print_tree, use_nxlink=use_nxlink)
|
|
244
255
|
except Exception as e:
|
|
245
256
|
# Report temperature that was unable to load, then raise exception.
|
|
246
257
|
temp_failed = self.temperatures[i]
|
|
@@ -463,7 +474,7 @@ class TempDependence:
|
|
|
463
474
|
y = np.array([int(t) for t in self.temperatures])
|
|
464
475
|
|
|
465
476
|
# Collect counts from each temperature and ensure they are numpy arrays
|
|
466
|
-
v = [self.linecuts[T].
|
|
477
|
+
v = [self.linecuts[T].nxsignal.nxdata for T in self.temperatures]
|
|
467
478
|
|
|
468
479
|
# Convert list of arrays to a 2D array for the heatmap
|
|
469
480
|
v_2d = np.array(v)
|
|
@@ -542,7 +553,7 @@ class TempDependence:
|
|
|
542
553
|
|
|
543
554
|
Parameters
|
|
544
555
|
----------
|
|
545
|
-
model_components : Model or iterable of Model
|
|
556
|
+
model_components : Model, CompositeModel, or iterable of Model
|
|
546
557
|
The model components to set for all line cut models.
|
|
547
558
|
|
|
548
559
|
"""
|
|
@@ -555,7 +566,8 @@ class TempDependence:
|
|
|
555
566
|
|
|
556
567
|
This method sets the parameter hints for all line cut models in the analysis.
|
|
557
568
|
It iterates over each line cut model and calls their respective `set_param_hint` method
|
|
558
|
-
with the provided arguments and keyword arguments.
|
|
569
|
+
with the provided arguments and keyword arguments. These are implemented when the
|
|
570
|
+
.make_params() method is called.
|
|
559
571
|
|
|
560
572
|
Parameters
|
|
561
573
|
----------
|
|
@@ -567,10 +579,40 @@ class TempDependence:
|
|
|
567
579
|
"""
|
|
568
580
|
[linecutmodel.set_param_hint(*args, **kwargs)
|
|
569
581
|
for linecutmodel in self.linecutmodels.values()]
|
|
582
|
+
|
|
583
|
+
def params_set(self, name, **kwargs):
|
|
584
|
+
"""
|
|
585
|
+
Set constraints on a parameter for all line cut models.
|
|
586
|
+
|
|
587
|
+
This method updates the specified parameter across all models in
|
|
588
|
+
`self.linecutmodels` using the keyword arguments provided. These
|
|
589
|
+
keyword arguments are passed to the `set()` method of the parameter,
|
|
590
|
+
which comes from a `lmfit.Parameters` object.
|
|
591
|
+
|
|
592
|
+
Parameters
|
|
593
|
+
----------
|
|
594
|
+
name : str
|
|
595
|
+
Name of the parameter to modify (must exist in each model).
|
|
596
|
+
**kwargs
|
|
597
|
+
Constraint arguments passed to `Parameter.set()`, such as `value`,
|
|
598
|
+
`min`, `max`, `vary`, etc.
|
|
599
|
+
|
|
600
|
+
Raises
|
|
601
|
+
------
|
|
602
|
+
KeyError
|
|
603
|
+
If the parameter `name` does not exist in one of the models.
|
|
604
|
+
|
|
605
|
+
Example
|
|
606
|
+
-------
|
|
607
|
+
>>> sample.params_set('peakamplitude', value=5, min=0, vary=True)
|
|
608
|
+
"""
|
|
609
|
+
|
|
610
|
+
for linecutmodel in self.linecutmodels.values():
|
|
611
|
+
linecutmodel.params[name].set(**kwargs)
|
|
570
612
|
|
|
571
613
|
def make_params(self):
|
|
572
614
|
"""
|
|
573
|
-
|
|
615
|
+
Create and initialize the parameters for all models.
|
|
574
616
|
|
|
575
617
|
This method creates the parameters for all line cut models in the analysis.
|
|
576
618
|
It iterates over each line cut model and calls their respective `make_params` method.
|
|
@@ -579,7 +621,8 @@ class TempDependence:
|
|
|
579
621
|
|
|
580
622
|
def guess(self):
|
|
581
623
|
"""
|
|
582
|
-
Make initial parameter guesses for all line cut models.
|
|
624
|
+
Make initial parameter guesses for all line cut models. This overwrites any prior initial
|
|
625
|
+
values and constraints.
|
|
583
626
|
|
|
584
627
|
This method generates initial parameter guesses for all line cut models in the analysis.
|
|
585
628
|
It iterates over each line cut model and calls their respective `guess` method.
|
|
@@ -651,6 +694,68 @@ class TempDependence:
|
|
|
651
694
|
title=f"{T} K",
|
|
652
695
|
**kwargs)
|
|
653
696
|
|
|
697
|
+
def overlay_fits(self, numpoints=None, vertical_offset=0, cmap='viridis', ax=None):
|
|
698
|
+
"""
|
|
699
|
+
Plot raw data and fitted models for each temperature with optional vertical offsets.
|
|
700
|
+
|
|
701
|
+
Parameters:
|
|
702
|
+
-----------
|
|
703
|
+
numpoints : int or None, default=None
|
|
704
|
+
Number of points to evaluate for the fitted model curves.
|
|
705
|
+
If None, uses the number of raw data points for each linecut.
|
|
706
|
+
vertical_offset : float, default=0
|
|
707
|
+
Amount to vertically offset each linecut for clarity.
|
|
708
|
+
cmap : str, default='viridis'
|
|
709
|
+
Name of the matplotlib colormap used to distinguish different temperatures.
|
|
710
|
+
ax : matplotlib.axes.Axes or None, default=None
|
|
711
|
+
Axis object to plot on. If None, a new figure and axis are created.
|
|
712
|
+
|
|
713
|
+
The function:
|
|
714
|
+
- Uses a colormap to assign unique colors to each temperature.
|
|
715
|
+
- Plots raw data alongside evaluated fit models for each linecut.
|
|
716
|
+
- Vertically offsets each trace by a constant value for visual separation.
|
|
717
|
+
- Displays a legend in reverse order to match top-to-bottom visual stacking.
|
|
718
|
+
- Automatically labels the x- and y-axes based on NeXus-style data metadata.
|
|
719
|
+
"""
|
|
720
|
+
|
|
721
|
+
# Create a figure and axes if an axis is not already provided
|
|
722
|
+
_, ax = plt.subplots() if ax is None else (None, ax)
|
|
723
|
+
|
|
724
|
+
# Generate a color palette for the various temperatures
|
|
725
|
+
cmap = plt.get_cmap(cmap)
|
|
726
|
+
colors = [cmap(i / len(self.temperatures)) for i, _ in enumerate(self.temperatures)]
|
|
727
|
+
|
|
728
|
+
for i, lm in enumerate(self.linecutmodels.values()):
|
|
729
|
+
# Plot the raw data
|
|
730
|
+
ax.plot(lm.x, lm.y + vertical_offset * i, '.', c=colors[i])
|
|
731
|
+
|
|
732
|
+
# Evaluate the fit
|
|
733
|
+
numpoints = len(lm.x) if numpoints is None else numpoints
|
|
734
|
+
x_eval = np.linspace(lm.x.min(), lm.x.max(), numpoints)
|
|
735
|
+
y_eval = lm.modelresult.eval(x=x_eval)
|
|
736
|
+
ax.plot(x_eval, y_eval + vertical_offset * i, '-', c=colors[i], label=self.temperatures[i])
|
|
737
|
+
|
|
738
|
+
# Reverse legend entries to match top-to-bottom stacking
|
|
739
|
+
handles, labels = ax.get_legend_handles_labels()
|
|
740
|
+
ax.legend(handles[::-1], labels[::-1])
|
|
741
|
+
|
|
742
|
+
# Add axis labels
|
|
743
|
+
ax.set(xlabel=lm.data.nxaxes[0].nxname, ylabel=lm.data.nxsignal.nxname)
|
|
744
|
+
|
|
745
|
+
def fit_peak_simple(self):
|
|
746
|
+
"""
|
|
747
|
+
Fit all linecuts in the temperature series using a pseudo-Voigt peak shape and linear
|
|
748
|
+
background, with no constraints.
|
|
749
|
+
"""
|
|
750
|
+
|
|
751
|
+
for T in self.temperatures:
|
|
752
|
+
linecutmodel = self.linecutmodels[T]
|
|
753
|
+
linecutmodel.set_model_components([PseudoVoigtModel(prefix='peak'),
|
|
754
|
+
LinearModel(prefix='background')])
|
|
755
|
+
linecutmodel.make_params()
|
|
756
|
+
linecutmodel.guess()
|
|
757
|
+
linecutmodel.fit()
|
|
758
|
+
|
|
654
759
|
def plot_order_parameter(self):
|
|
655
760
|
"""
|
|
656
761
|
Plot the temperature dependence of the peak height (order parameter).
|
{nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/src/nxs_analysis_tools/datareduction.py
RENAMED
|
@@ -49,7 +49,7 @@ def load_data(path, print_tree=True):
|
|
|
49
49
|
return g.entry.data
|
|
50
50
|
|
|
51
51
|
|
|
52
|
-
def load_transform(path, print_tree=True):
|
|
52
|
+
def load_transform(path, print_tree=True, use_nxlink=False):
|
|
53
53
|
"""
|
|
54
54
|
Load transform data from an nxrefine output file.
|
|
55
55
|
|
|
@@ -61,16 +61,24 @@ def load_transform(path, print_tree=True):
|
|
|
61
61
|
print_tree : bool, optional
|
|
62
62
|
If True, prints the NeXus data tree upon loading. Default is True.
|
|
63
63
|
|
|
64
|
+
use_nxlink : bool, optional
|
|
65
|
+
If True, maintains the NXlink defined in the data file, which references
|
|
66
|
+
the raw data in the transform.nxs file. This saves memory when working with
|
|
67
|
+
many datasets. In this case, the axes are in reverse order. Default is False.
|
|
68
|
+
|
|
64
69
|
Returns
|
|
65
70
|
-------
|
|
66
71
|
data : NXdata
|
|
67
72
|
The loaded transform data as an NXdata object.
|
|
68
73
|
"""
|
|
69
74
|
|
|
70
|
-
|
|
75
|
+
root = nxload(path)
|
|
71
76
|
|
|
72
|
-
|
|
73
|
-
|
|
77
|
+
if use_nxlink:
|
|
78
|
+
data = root.entry.transform
|
|
79
|
+
else:
|
|
80
|
+
data = NXdata(NXfield(root.entry.transform.data.nxdata.transpose(2, 1, 0), name='counts'),
|
|
81
|
+
(root.entry.transform.Qh, root.entry.transform.Qk, root.entry.transform.Ql))
|
|
74
82
|
|
|
75
83
|
print(data.tree) if print_tree else None
|
|
76
84
|
|
|
@@ -138,6 +146,36 @@ def rebin_3d(array):
|
|
|
138
146
|
|
|
139
147
|
return rebinned
|
|
140
148
|
|
|
149
|
+
def rebin_2d(array):
|
|
150
|
+
"""
|
|
151
|
+
Rebins a 2D NumPy array by a factor of 2 along each dimension.
|
|
152
|
+
|
|
153
|
+
This function reduces the size of the input array by averaging over non-overlapping
|
|
154
|
+
2x2 blocks. Each dimension of the input array must be divisible by 2.
|
|
155
|
+
|
|
156
|
+
Parameters
|
|
157
|
+
----------
|
|
158
|
+
array : np.ndarray
|
|
159
|
+
A 2-dimensional NumPy array to be rebinned.
|
|
160
|
+
|
|
161
|
+
Returns
|
|
162
|
+
-------
|
|
163
|
+
np.ndarray
|
|
164
|
+
A rebinned array with shape (N//2, M//2) if the original shape was (N, M).
|
|
165
|
+
"""
|
|
166
|
+
|
|
167
|
+
# Ensure the array shape is divisible by 2 in each dimension
|
|
168
|
+
shape = array.shape
|
|
169
|
+
if any(dim % 2 != 0 for dim in shape):
|
|
170
|
+
raise ValueError("Each dimension of the array must be divisible by 2 to rebin.")
|
|
171
|
+
|
|
172
|
+
# Reshape the array to group the data into 2x2 blocks
|
|
173
|
+
reshaped = array.reshape(shape[0] // 2, 2, shape[1] // 2, 2)
|
|
174
|
+
|
|
175
|
+
# Average over the 2x2 blocks
|
|
176
|
+
rebinned = reshaped.mean(axis=(1, 3))
|
|
177
|
+
|
|
178
|
+
return rebinned
|
|
141
179
|
|
|
142
180
|
def rebin_1d(array):
|
|
143
181
|
"""
|
|
@@ -179,8 +217,7 @@ def rebin_nxdata(data):
|
|
|
179
217
|
- Then, each axis is rebinned using `rebin_1d`.
|
|
180
218
|
|
|
181
219
|
The signal array is similarly cropped to remove the last element along any dimension
|
|
182
|
-
with an odd shape, and then the data is averaged over 2x2x... blocks
|
|
183
|
-
`rebin_1d` method (assumed to apply across 1D slices).
|
|
220
|
+
with an odd shape, and then the data is averaged over 2x2x... blocks.
|
|
184
221
|
|
|
185
222
|
Parameters
|
|
186
223
|
----------
|
|
@@ -224,7 +261,12 @@ def rebin_nxdata(data):
|
|
|
224
261
|
data_arr = data_arr[tuple(slice_obj)]
|
|
225
262
|
|
|
226
263
|
# Perform actual rebinning
|
|
227
|
-
|
|
264
|
+
if data.ndim == 3:
|
|
265
|
+
data_arr = rebin_3d(data_arr)
|
|
266
|
+
elif data.ndim == 2:
|
|
267
|
+
data_arr = rebin_2d(data_arr)
|
|
268
|
+
elif data.ndim == 1:
|
|
269
|
+
data_arr = rebin_1d(data_arr)
|
|
228
270
|
|
|
229
271
|
return NXdata(NXfield(data_arr, name=data.signal),
|
|
230
272
|
tuple([axis for axis in new_axes])
|
|
@@ -246,13 +288,15 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
|
|
|
246
288
|
data : :class:`nexusformat.nexus.NXdata` or ndarray
|
|
247
289
|
The dataset to plot. Can be an `NXdata` object or a `numpy` array.
|
|
248
290
|
|
|
249
|
-
X : NXfield, optional
|
|
250
|
-
The X axis
|
|
251
|
-
|
|
291
|
+
X : ndarray or NXfield, optional
|
|
292
|
+
The values for the X axis. If `data` is an NXdata object and `X` is None, the X axis is
|
|
293
|
+
inherited from the NXdata object. If `data` is a NumPy ndarray and `X` is None, a default
|
|
294
|
+
range from 0 to the number of columns in `data` is used.
|
|
252
295
|
|
|
253
|
-
Y : NXfield, optional
|
|
254
|
-
The Y axis
|
|
255
|
-
|
|
296
|
+
Y : ndarray or NXfield, optional
|
|
297
|
+
The values for the Y axis. If `data` is an NXdata object and `Y` is None, the Y axis is
|
|
298
|
+
inherited from the NXdata object. If `data` is a NumPy ndarray and `Y` is None, a default
|
|
299
|
+
range from 0 to the number of rows in `data` is used.
|
|
256
300
|
|
|
257
301
|
sum_axis : int, optional
|
|
258
302
|
If the input data is 3D, this specifies the axis to sum over in order
|
|
@@ -327,9 +371,17 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
|
|
|
327
371
|
p : :class:`matplotlib.collections.QuadMesh`
|
|
328
372
|
The `matplotlib` QuadMesh object representing the plotted data.
|
|
329
373
|
"""
|
|
374
|
+
|
|
375
|
+
# Some logic to control the processing of the arrays
|
|
330
376
|
is_array = False
|
|
331
377
|
is_nxdata = False
|
|
378
|
+
no_xy_provided = True
|
|
379
|
+
|
|
380
|
+
# If X,Y not provided by user
|
|
381
|
+
if X is not None and Y is not None:
|
|
382
|
+
no_xy_provided = False
|
|
332
383
|
|
|
384
|
+
# Examine data type to be plotted
|
|
333
385
|
if isinstance(data, np.ndarray):
|
|
334
386
|
is_array = True
|
|
335
387
|
elif isinstance(data, (NXdata, NXfield)):
|
|
@@ -339,43 +391,72 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
|
|
|
339
391
|
f"Supported types are np.ndarray and NXdata.")
|
|
340
392
|
|
|
341
393
|
# If three-dimensional, demand sum_axis to reduce to two dimensions.
|
|
342
|
-
if
|
|
343
|
-
assert sum_axis is not None, "sum_axis must be specified when data
|
|
344
|
-
|
|
345
|
-
data = data.sum(axis=sum_axis)
|
|
394
|
+
if data.ndim == 3:
|
|
395
|
+
assert sum_axis is not None, "sum_axis must be specified when data.ndim == 3."
|
|
346
396
|
|
|
347
|
-
|
|
348
|
-
|
|
397
|
+
if is_array:
|
|
398
|
+
data = data.sum(axis=sum_axis)
|
|
399
|
+
elif is_nxdata:
|
|
400
|
+
arr = data.nxsignal.nxdata
|
|
401
|
+
arr = arr.sum(axis=sum_axis)
|
|
349
402
|
|
|
350
|
-
|
|
351
|
-
|
|
403
|
+
# Create a 2D template from the original nxdata
|
|
404
|
+
slice_obj = [slice(None)] * len(data.shape)
|
|
405
|
+
slice_obj[sum_axis] = 0
|
|
352
406
|
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
slice_obj[sum_axis] = 0
|
|
356
|
-
|
|
357
|
-
# Use the 2D template to create a new nxdata
|
|
358
|
-
data = array_to_nxdata(arr, data[slice_obj])
|
|
407
|
+
# Use the 2D template to create a new nxdata
|
|
408
|
+
data = array_to_nxdata(arr, data[slice_obj])
|
|
359
409
|
|
|
410
|
+
# If the data is of type ndarray, then convert to NXdata
|
|
360
411
|
if is_array:
|
|
412
|
+
# Convert X to NXfield if it is not already
|
|
361
413
|
if X is None:
|
|
362
|
-
X = NXfield(np.
|
|
414
|
+
X = NXfield(np.arange(data.shape[0]), name='x')
|
|
415
|
+
elif isinstance(X, np.ndarray):
|
|
416
|
+
X = NXfield(X, name='x')
|
|
417
|
+
elif isinstance(X, NXfield):
|
|
418
|
+
pass
|
|
419
|
+
else:
|
|
420
|
+
raise TypeError("X must be of type np.ndarray or NXdata")
|
|
421
|
+
|
|
422
|
+
# Convert Y to NXfield if it is not already
|
|
363
423
|
if Y is None:
|
|
364
|
-
Y = NXfield(np.
|
|
424
|
+
Y = NXfield(np.arange(data.shape[1]), name='y')
|
|
425
|
+
elif isinstance(Y, np.ndarray):
|
|
426
|
+
Y = NXfield(Y, name='y')
|
|
427
|
+
elif isinstance(Y, NXfield):
|
|
428
|
+
pass
|
|
429
|
+
else:
|
|
430
|
+
raise TypeError("Y must be of type np.ndarray or NXdata")
|
|
431
|
+
|
|
365
432
|
if transpose:
|
|
366
433
|
X, Y = Y, X
|
|
367
434
|
data = data.transpose()
|
|
435
|
+
|
|
368
436
|
data = NXdata(NXfield(data, name='value'), (X, Y))
|
|
369
|
-
data_arr = data
|
|
437
|
+
data_arr = data.nxsignal.nxdata.transpose()
|
|
438
|
+
# Otherwise, if data is of type NXdata, then decide whether to create axes,
|
|
439
|
+
# use provided axes, or inherit axes.
|
|
370
440
|
elif is_nxdata:
|
|
371
441
|
if X is None:
|
|
372
|
-
X = data
|
|
442
|
+
X = data.nxaxes[0]
|
|
443
|
+
elif isinstance(X, np.ndarray):
|
|
444
|
+
X = NXfield(X, name='x')
|
|
445
|
+
elif isinstance(X, NXdata):
|
|
446
|
+
pass
|
|
373
447
|
if Y is None:
|
|
374
|
-
Y = data
|
|
448
|
+
Y = data.nxaxes[1]
|
|
449
|
+
elif isinstance(Y, np.ndarray):
|
|
450
|
+
Y = NXfield(Y, name='y')
|
|
451
|
+
elif isinstance(Y, NXdata):
|
|
452
|
+
pass
|
|
453
|
+
|
|
454
|
+
# Transpose axes and data if specified
|
|
375
455
|
if transpose:
|
|
376
456
|
X, Y = Y, X
|
|
377
457
|
data = data.transpose()
|
|
378
|
-
|
|
458
|
+
|
|
459
|
+
data_arr = data.nxsignal.nxdata.transpose()
|
|
379
460
|
|
|
380
461
|
# Display Markdown heading
|
|
381
462
|
if mdheading is None:
|
|
@@ -407,6 +488,7 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
|
|
|
407
488
|
elif logscale:
|
|
408
489
|
norm = colors.LogNorm(vmin=vmin, vmax=vmax)
|
|
409
490
|
|
|
491
|
+
|
|
410
492
|
# Plot data
|
|
411
493
|
p = ax.pcolormesh(X.nxdata, Y.nxdata, data_arr, shading='auto', norm=norm, cmap=cmap, **kwargs)
|
|
412
494
|
|
|
@@ -448,14 +530,15 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
|
|
|
448
530
|
# Correct aspect ratio for the x/y axes after transformation
|
|
449
531
|
ax.set(aspect=np.cos(skew_angle_adj * np.pi / 180))
|
|
450
532
|
|
|
451
|
-
# Add tick marks all around
|
|
452
|
-
ax.tick_params(direction='in', top=True, right=True, which='both')
|
|
453
533
|
|
|
454
534
|
# Automatically set tick locations, only if NXdata or if X,Y axes are provided for an array
|
|
455
|
-
if is_nxdata or (is_array and (
|
|
535
|
+
if is_nxdata or (is_array and (no_xy_provided == False)):
|
|
456
536
|
# Add default minor ticks on x
|
|
457
537
|
ax.xaxis.set_minor_locator(MultipleLocator(1))
|
|
458
538
|
|
|
539
|
+
# Add tick marks all around
|
|
540
|
+
ax.tick_params(direction='in', top=True, right=True, which='both')
|
|
541
|
+
|
|
459
542
|
if xticks is not None:
|
|
460
543
|
# Use user provided values
|
|
461
544
|
ax.xaxis.set_major_locator(MultipleLocator(xticks))
|
|
@@ -466,6 +549,9 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
|
|
|
466
549
|
if yticks is not None:
|
|
467
550
|
# Use user provided values
|
|
468
551
|
ax.yaxis.set_major_locator(MultipleLocator(yticks))
|
|
552
|
+
else:
|
|
553
|
+
# Add tick marks all around
|
|
554
|
+
ax.tick_params(direction='in', top=True, right=True, which='major')
|
|
469
555
|
|
|
470
556
|
# Apply transform to tick marks
|
|
471
557
|
for i in range(0, len(ax.xaxis.get_ticklines())):
|
|
@@ -753,8 +839,8 @@ class Scissors:
|
|
|
753
839
|
slice_obj[2] = center[2]
|
|
754
840
|
|
|
755
841
|
p1 = plot_slice(data[slice_obj],
|
|
756
|
-
X=data
|
|
757
|
-
Y=data
|
|
842
|
+
X=data.nxaxes[0],
|
|
843
|
+
Y=data.nxaxes[1],
|
|
758
844
|
ax=axes[0],
|
|
759
845
|
**kwargs)
|
|
760
846
|
ax = axes[0]
|
|
@@ -777,8 +863,8 @@ class Scissors:
|
|
|
777
863
|
slice_obj[1] = center[1]
|
|
778
864
|
|
|
779
865
|
p2 = plot_slice(data[slice_obj],
|
|
780
|
-
X=data
|
|
781
|
-
Y=data
|
|
866
|
+
X=data.nxaxes[0],
|
|
867
|
+
Y=data.nxaxes[2],
|
|
782
868
|
ax=axes[1],
|
|
783
869
|
**kwargs)
|
|
784
870
|
ax = axes[1]
|
|
@@ -801,8 +887,8 @@ class Scissors:
|
|
|
801
887
|
slice_obj[0] = center[0]
|
|
802
888
|
|
|
803
889
|
p3 = plot_slice(data[slice_obj],
|
|
804
|
-
X=data
|
|
805
|
-
Y=data
|
|
890
|
+
X=data.nxaxes[1],
|
|
891
|
+
Y=data.nxaxes[2],
|
|
806
892
|
ax=axes[2],
|
|
807
893
|
**kwargs)
|
|
808
894
|
ax = axes[2]
|
|
@@ -849,31 +935,31 @@ class Scissors:
|
|
|
849
935
|
slice_obj = [slice(None)] * data.ndim
|
|
850
936
|
slice_obj[2] = center[2]
|
|
851
937
|
p1 = plot_slice(data[slice_obj],
|
|
852
|
-
X=data
|
|
853
|
-
Y=data
|
|
938
|
+
X=data.nxaxes[0],
|
|
939
|
+
Y=data.nxaxes[1],
|
|
854
940
|
ax=axes[0],
|
|
855
941
|
**kwargs)
|
|
856
|
-
axes[0].set_aspect(len(data
|
|
942
|
+
axes[0].set_aspect(len(data.nxaxes[0].nxdata) / len(data.nxaxes[1].nxdata))
|
|
857
943
|
|
|
858
944
|
# Plot cross section 2
|
|
859
945
|
slice_obj = [slice(None)] * data.ndim
|
|
860
946
|
slice_obj[1] = center[1]
|
|
861
947
|
p3 = plot_slice(data[slice_obj],
|
|
862
|
-
X=data
|
|
863
|
-
Y=data
|
|
948
|
+
X=data.nxaxes[0],
|
|
949
|
+
Y=data.nxaxes[2],
|
|
864
950
|
ax=axes[1],
|
|
865
951
|
**kwargs)
|
|
866
|
-
axes[1].set_aspect(len(data
|
|
952
|
+
axes[1].set_aspect(len(data.nxaxes[0].nxdata) / len(data.nxaxes[2].nxdata))
|
|
867
953
|
|
|
868
954
|
# Plot cross-section 3
|
|
869
955
|
slice_obj = [slice(None)] * data.ndim
|
|
870
956
|
slice_obj[0] = center[0]
|
|
871
957
|
p2 = plot_slice(data[slice_obj],
|
|
872
|
-
X=data
|
|
873
|
-
Y=data
|
|
958
|
+
X=data.nxaxes[1],
|
|
959
|
+
Y=data.nxaxes[2],
|
|
874
960
|
ax=axes[2],
|
|
875
961
|
**kwargs)
|
|
876
|
-
axes[2].set_aspect(len(data
|
|
962
|
+
axes[2].set_aspect(len(data.nxaxes[1].nxdata) / len(data.nxaxes[2].nxdata))
|
|
877
963
|
|
|
878
964
|
# Adjust subplot padding
|
|
879
965
|
fig.subplots_adjust(wspace=0.3)
|
|
@@ -984,7 +1070,7 @@ def rotate_data(data, lattice_angle, rotation_angle, rotation_axis, printout=Fal
|
|
|
984
1070
|
Rotated data as an NXdata object.
|
|
985
1071
|
"""
|
|
986
1072
|
# Define output array
|
|
987
|
-
output_array = np.zeros(data
|
|
1073
|
+
output_array = np.zeros(data.nxsignal.shape)
|
|
988
1074
|
|
|
989
1075
|
# Define shear transformation
|
|
990
1076
|
skew_angle_adj = 90 - lattice_angle
|
|
@@ -1085,7 +1171,7 @@ def rotate_data(data, lattice_angle, rotation_angle, rotation_axis, printout=Fal
|
|
|
1085
1171
|
output_array[:, :, i] = counts_unpadded
|
|
1086
1172
|
print('\nDone.')
|
|
1087
1173
|
return NXdata(NXfield(output_array, name=p.padded.signal),
|
|
1088
|
-
(data
|
|
1174
|
+
(data.nxaxes[0], data.nxaxes[1], data.nxaxes[2]))
|
|
1089
1175
|
|
|
1090
1176
|
|
|
1091
1177
|
def rotate_data_2D(data, lattice_angle, rotation_angle):
|
|
@@ -1180,7 +1266,7 @@ def rotate_data_2D(data, lattice_angle, rotation_angle):
|
|
|
1180
1266
|
|
|
1181
1267
|
print('\nDone.')
|
|
1182
1268
|
return NXdata(NXfield(counts_unpadded, name=p.padded.signal),
|
|
1183
|
-
(data
|
|
1269
|
+
(data.nxaxes[0], data.nxaxes[1]))
|
|
1184
1270
|
|
|
1185
1271
|
|
|
1186
1272
|
class Padder:
|
|
@@ -1265,7 +1351,7 @@ class Padder:
|
|
|
1265
1351
|
data = self.data
|
|
1266
1352
|
self.padding = padding
|
|
1267
1353
|
|
|
1268
|
-
padded_shape = tuple(data
|
|
1354
|
+
padded_shape = tuple(data.nxsignal.nxdata.shape[i]
|
|
1269
1355
|
+ self.padding[i] * 2 for i in range(data.ndim))
|
|
1270
1356
|
|
|
1271
1357
|
# Create padded dataset
|
|
@@ -1275,7 +1361,7 @@ class Padder:
|
|
|
1275
1361
|
for i, _ in enumerate(slice_obj):
|
|
1276
1362
|
slice_obj[i] = slice(self.padding[i], -self.padding[i], None)
|
|
1277
1363
|
slice_obj = tuple(slice_obj)
|
|
1278
|
-
padded[slice_obj] = data
|
|
1364
|
+
padded[slice_obj] = data.nxsignal.nxdata
|
|
1279
1365
|
|
|
1280
1366
|
padmaxes = tuple(self.maxes[i] + self.padding[i] * self.steps[i]
|
|
1281
1367
|
for i in range(data.ndim))
|
|
@@ -3,8 +3,9 @@ Module for fitting of linecuts using the lmfit package.
|
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
5
|
import operator
|
|
6
|
-
from lmfit
|
|
7
|
-
from lmfit.model import CompositeModel
|
|
6
|
+
from lmfit import Parameters
|
|
7
|
+
from lmfit.model import Model, CompositeModel
|
|
8
|
+
from lmfit.models import PseudoVoigtModel, LinearModel
|
|
8
9
|
import matplotlib.pyplot as plt
|
|
9
10
|
import numpy as np
|
|
10
11
|
|
|
@@ -66,6 +67,8 @@ class LinecutModel:
|
|
|
66
67
|
Fit the model to the data.
|
|
67
68
|
plot_fit(self, numpoints=None, fit_report=True, **kwargs)
|
|
68
69
|
Plot the fitted model.
|
|
70
|
+
fit_peak_simple():
|
|
71
|
+
Perform a basic fit using a pseudo-Voigt peak shape, linear background, and no constraints.
|
|
69
72
|
print_fit_report(self)
|
|
70
73
|
Print the fit report.
|
|
71
74
|
"""
|
|
@@ -110,15 +113,25 @@ class LinecutModel:
|
|
|
110
113
|
|
|
111
114
|
Parameters
|
|
112
115
|
----------
|
|
113
|
-
model_components : Model or
|
|
114
|
-
The model component(s) to be used for fitting
|
|
115
|
-
which will be combined into a CompositeModel.
|
|
116
|
+
model_components : Model, CompositeModel, or iterable of Model
|
|
117
|
+
The model component(s) to be used for fitting.
|
|
116
118
|
"""
|
|
117
119
|
|
|
118
120
|
# If the model only has one component, then use it as the model
|
|
119
121
|
if isinstance(model_components, Model):
|
|
120
122
|
self.model = model_components
|
|
121
|
-
|
|
123
|
+
self.params = self.model.make_params()
|
|
124
|
+
|
|
125
|
+
# If the model is a composite model, then use it as the model
|
|
126
|
+
elif isinstance(model_components, CompositeModel):
|
|
127
|
+
self.model = model_components
|
|
128
|
+
self.model_components = self.model.components
|
|
129
|
+
# Make params for each component of the model
|
|
130
|
+
self.params = Parameters()
|
|
131
|
+
for component in self.model.components:
|
|
132
|
+
self.params.update(component.make_params())
|
|
133
|
+
|
|
134
|
+
# Else, combine the components into a composite model and use that as the model
|
|
122
135
|
else:
|
|
123
136
|
self.model_components = model_components
|
|
124
137
|
self.model = model_components[0]
|
|
@@ -127,9 +140,15 @@ class LinecutModel:
|
|
|
127
140
|
for component in model_components[1:]:
|
|
128
141
|
self.model = CompositeModel(self.model, component, operator.add)
|
|
129
142
|
|
|
143
|
+
# Make params for each component of the model
|
|
144
|
+
self.params = Parameters()
|
|
145
|
+
for component in self.model.components:
|
|
146
|
+
self.params.update(component.make_params())
|
|
147
|
+
|
|
130
148
|
def set_param_hint(self, *args, **kwargs):
|
|
131
149
|
"""
|
|
132
|
-
Set parameter hints for the model.
|
|
150
|
+
Set parameter hints for the model. These are implemented when the .make_params() method
|
|
151
|
+
is called.
|
|
133
152
|
|
|
134
153
|
Parameters
|
|
135
154
|
----------
|
|
@@ -159,10 +178,22 @@ class LinecutModel:
|
|
|
159
178
|
|
|
160
179
|
def guess(self):
|
|
161
180
|
"""
|
|
162
|
-
Perform initial guesses for each model component.
|
|
181
|
+
Perform initial guesses for each model component and update params. This overwrites any
|
|
182
|
+
prior initial values and constraints.
|
|
183
|
+
|
|
184
|
+
Returns
|
|
185
|
+
-------
|
|
186
|
+
components_params : list
|
|
187
|
+
A list containing params objects for each component of the model.
|
|
163
188
|
"""
|
|
164
|
-
|
|
189
|
+
|
|
190
|
+
components_params = []
|
|
191
|
+
|
|
192
|
+
for model_component in self.model.components:
|
|
165
193
|
self.params.update(model_component.guess(self.y, x=self.x))
|
|
194
|
+
components_params.append(model_component.guess(self.y, x=self.x))
|
|
195
|
+
|
|
196
|
+
return components_params
|
|
166
197
|
|
|
167
198
|
def print_initial_params(self):
|
|
168
199
|
"""
|
|
@@ -251,6 +282,17 @@ class LinecutModel:
|
|
|
251
282
|
if fit_report:
|
|
252
283
|
print(self.modelresult.fit_report())
|
|
253
284
|
return ax
|
|
285
|
+
|
|
286
|
+
def fit_peak_simple(self):
|
|
287
|
+
"""
|
|
288
|
+
Fit all linecuts in the temperature series using a pseudo-Voigt peak shape and linear
|
|
289
|
+
background, with no constraints.
|
|
290
|
+
"""
|
|
291
|
+
self.set_model_components([PseudoVoigtModel(prefix='peak'),
|
|
292
|
+
LinearModel(prefix='background')])
|
|
293
|
+
self.make_params()
|
|
294
|
+
self.guess()
|
|
295
|
+
self.fit()
|
|
254
296
|
|
|
255
297
|
def print_fit_report(self):
|
|
256
298
|
"""
|
{nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/src/nxs_analysis_tools/pairdistribution.py
RENAMED
|
@@ -1039,6 +1039,8 @@ class Interpolator:
|
|
|
1039
1039
|
The dataset containing the data to be interpolated.
|
|
1040
1040
|
"""
|
|
1041
1041
|
self.data = data
|
|
1042
|
+
self.interpolated = data
|
|
1043
|
+
self.tapered = data
|
|
1042
1044
|
|
|
1043
1045
|
def set_kernel(self, kernel):
|
|
1044
1046
|
"""
|
|
@@ -1198,6 +1200,82 @@ class Interpolator:
|
|
|
1198
1200
|
|
|
1199
1201
|
self.window = window
|
|
1200
1202
|
|
|
1203
|
+
def set_ellipsoidal_tukey_window(self, tukey_alpha=1.0, coeffs=None):
|
|
1204
|
+
"""
|
|
1205
|
+
Set an ellipsoidal Tukey window function for data tapering.
|
|
1206
|
+
|
|
1207
|
+
The Tukey window smoothly tapers the data to zero near the edges of the
|
|
1208
|
+
elliptical region defined by quadratic form coefficients. This helps reduce
|
|
1209
|
+
artifacts in Fourier transforms and other operations sensitive to boundary effects.
|
|
1210
|
+
|
|
1211
|
+
Parameters
|
|
1212
|
+
----------
|
|
1213
|
+
tukey_alpha : float, optional
|
|
1214
|
+
Tapering parameter for the Tukey window, between 0 and 1.
|
|
1215
|
+
- `tukey_alpha = 0` results in a ellipsoidal window (no tapering).
|
|
1216
|
+
- `tukey_alpha = 1` results in a full cosine taper.
|
|
1217
|
+
Default is 1.0.
|
|
1218
|
+
|
|
1219
|
+
coeffs : tuple of float, optional
|
|
1220
|
+
Coefficients `(c0, c1, c2, c3, c4, c5)` defining the ellipsoidal
|
|
1221
|
+
quadratic form:
|
|
1222
|
+
R^2 = c0*H^2 + c1*H*K + c2*K^2 + c3*K*L + c4*L^2 + c5*L*H
|
|
1223
|
+
If None, coefficients are automatically set to match the edges of the
|
|
1224
|
+
reciprocal space axes (H, K, L), which should be appropriate in cases
|
|
1225
|
+
where H, K, and L are orthogonal.
|
|
1226
|
+
|
|
1227
|
+
Notes
|
|
1228
|
+
-----
|
|
1229
|
+
- The maximum allowed radius `Qmax` is determined from the minimum radius
|
|
1230
|
+
value along the edges of reciprocal space.
|
|
1231
|
+
- The Tukey window is applied radially as a function of the distance `R`
|
|
1232
|
+
from the center, defined by the ellipsoidal quadratic form.
|
|
1233
|
+
|
|
1234
|
+
Sets
|
|
1235
|
+
----
|
|
1236
|
+
self.window : ndarray
|
|
1237
|
+
A 3D array of the same shape as the data, containing the Tukey window
|
|
1238
|
+
values between 0 and 1.
|
|
1239
|
+
"""
|
|
1240
|
+
|
|
1241
|
+
# Initialize axes
|
|
1242
|
+
H,K,L = [axis for axis in self.data.nxaxes]
|
|
1243
|
+
|
|
1244
|
+
# Initialize coeffs (default to window reaching edge of array)
|
|
1245
|
+
smallest_extent = np.min([H.max(), K.max(), L.max()])
|
|
1246
|
+
c = coeffs if coeffs is not None else ((smallest_extent / H.max()) ** 2,
|
|
1247
|
+
0,
|
|
1248
|
+
(smallest_extent / K.max()) ** 2,
|
|
1249
|
+
0,
|
|
1250
|
+
(smallest_extent / L.max()) ** 2,
|
|
1251
|
+
0
|
|
1252
|
+
)
|
|
1253
|
+
|
|
1254
|
+
# Create meshgrid
|
|
1255
|
+
HH, KK, LL = np.meshgrid(H,K,L, indexing='ij')
|
|
1256
|
+
|
|
1257
|
+
# Create radius array
|
|
1258
|
+
RR = np.sqrt(
|
|
1259
|
+
c[0] * HH ** 2 +
|
|
1260
|
+
c[1] * HH * KK +
|
|
1261
|
+
c[2] * KK ** 2 +
|
|
1262
|
+
c[3] * KK * LL +
|
|
1263
|
+
c[4] * LL ** 2 +
|
|
1264
|
+
c[5] * LL * HH
|
|
1265
|
+
)
|
|
1266
|
+
|
|
1267
|
+
# Check the edges of reciprocal space to verify Qmax
|
|
1268
|
+
# Create list of pixels where H = H.max() or K = K.max() or L = L.max()
|
|
1269
|
+
edges = np.where(np.logical_or(np.logical_or(HH == H.max(), KK == K.max()), LL == L.max()), RR, RR.max())
|
|
1270
|
+
Qmax = edges.min()
|
|
1271
|
+
alpha = tukey_alpha
|
|
1272
|
+
period = (Qmax * alpha) / np.pi
|
|
1273
|
+
|
|
1274
|
+
window = np.where(RR > Qmax * (1 - alpha), (np.cos((RR - Qmax * (1 - alpha)) / period) + 1) / 2, 1)
|
|
1275
|
+
window = np.where(RR > Qmax, 0, window)
|
|
1276
|
+
|
|
1277
|
+
self.window = window
|
|
1278
|
+
|
|
1201
1279
|
def set_window(self, window):
|
|
1202
1280
|
"""
|
|
1203
1281
|
Set a custom window function for data tapering.
|
|
@@ -1578,6 +1656,41 @@ class DeltaPDF:
|
|
|
1578
1656
|
self.interpolator.set_hexagonal_tukey_window(tukey_alphas)
|
|
1579
1657
|
self.window = self.interpolator.window
|
|
1580
1658
|
|
|
1659
|
+
def set_ellipsoidal_tukey_window(self, tukey_alpha=1.0, coeffs=None):
|
|
1660
|
+
"""
|
|
1661
|
+
Set an ellipsoidal Tukey window function for data tapering.
|
|
1662
|
+
|
|
1663
|
+
The Tukey window smoothly tapers the data to zero near the edges of the
|
|
1664
|
+
elliptical region defined by quadratic form coefficients. This helps reduce
|
|
1665
|
+
artifacts in Fourier transforms and other operations sensitive to boundary effects.
|
|
1666
|
+
|
|
1667
|
+
Parameters
|
|
1668
|
+
----------
|
|
1669
|
+
tukey_alpha : float, optional
|
|
1670
|
+
Tapering parameter for the Tukey window, between 0 and 1.
|
|
1671
|
+
- `tukey_alpha = 0` results in a ellipsoidal window (no tapering).
|
|
1672
|
+
- `tukey_alpha = 1` results in a full cosine taper.
|
|
1673
|
+
Default is 1.0.
|
|
1674
|
+
|
|
1675
|
+
coeffs : tuple of float, optional
|
|
1676
|
+
Coefficients `(c0, c1, c2, c3, c4, c5)` defining the ellipsoidal
|
|
1677
|
+
quadratic form:
|
|
1678
|
+
R^2 = c0*H^2 + c1*H*K + c2*K^2 + c3*K*L + c4*L^2 + c5*L*H
|
|
1679
|
+
If None, coefficients are automatically set to match the edges of the
|
|
1680
|
+
reciprocal space axes (H, K, L), which should be appropriate in cases
|
|
1681
|
+
where H, K, and L are orthogonal.
|
|
1682
|
+
|
|
1683
|
+
Notes
|
|
1684
|
+
-----
|
|
1685
|
+
- The maximum allowed radius `Qmax` is determined from the minimum radius
|
|
1686
|
+
value along the edges of reciprocal space.
|
|
1687
|
+
- The Tukey window is applied radially as a function of the distance `R`
|
|
1688
|
+
from the center, defined by the ellipsoidal quadratic form.
|
|
1689
|
+
"""
|
|
1690
|
+
self.interpolator.set_ellipsoidal_tukey_window(tukey_alpha=tukey_alpha, coeffs=coeffs)
|
|
1691
|
+
self.window = self.interpolator.window
|
|
1692
|
+
|
|
1693
|
+
|
|
1581
1694
|
def set_window(self, window):
|
|
1582
1695
|
"""
|
|
1583
1696
|
Set a custom window function for data tapering.
|
{nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/src/nxs_analysis_tools.egg-info/SOURCES.txt
RENAMED
|
@@ -13,10 +13,12 @@ tests/test_accurate_highlight.py
|
|
|
13
13
|
tests/test_chess.py
|
|
14
14
|
tests/test_chess_fitting.py
|
|
15
15
|
tests/test_datareduction.py
|
|
16
|
+
tests/test_ellipsoidal_window.py
|
|
16
17
|
tests/test_fitting.py
|
|
17
18
|
tests/test_lmfit.py
|
|
18
19
|
tests/test_mask_plotting.py
|
|
19
20
|
tests/test_pairdistribution.py
|
|
21
|
+
tests/test_plot_slice_axes_types.py
|
|
20
22
|
tests/test_plot_slice_with_ndarray.py
|
|
21
23
|
tests/test_rotate_data.py
|
|
22
24
|
tests/test_sum_axis.py
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import matplotlib.pyplot as plt
|
|
3
|
+
from nxs_analysis_tools import *
|
|
4
|
+
|
|
5
|
+
from nxs_analysis_tools.pairdistribution import Interpolator
|
|
6
|
+
|
|
7
|
+
data = load_data('docs/source/examples/example_data/plot_slice_data/cubic_hkli.nxs')
|
|
8
|
+
h = Interpolator()
|
|
9
|
+
h.set_data(data)
|
|
10
|
+
h.set_ellipsoidal_tukey_window(tukey_alpha=0.0)
|
|
11
|
+
h.apply_window()
|
|
12
|
+
fig,axs = plt.subplots(2,1, figsize=(4,8), dpi=100)
|
|
13
|
+
plot_slice(h.tapered[:,:,0.0], vmin=0, vmax=0.01, ax=axs[0])
|
|
14
|
+
plot_slice(h.tapered[:,0.0,:], vmin=0, vmax=0.01, ax=axs[1])
|
|
15
|
+
plt.show()
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
from src.nxs_analysis_tools import *
|
|
2
|
+
import matplotlib.pyplot as plt
|
|
3
|
+
import numpy as np
|
|
4
|
+
|
|
5
|
+
data = load_data('../docs/source/examples/example_data/plot_slice_data/cubic_hkli.nxs')
|
|
6
|
+
|
|
7
|
+
# NXdata with inherent axes
|
|
8
|
+
_,ax = plt.subplots()
|
|
9
|
+
plot_slice(data[:,:,0.0], ax=ax)
|
|
10
|
+
# plt.show()
|
|
11
|
+
|
|
12
|
+
# NXdata with NXfield axes
|
|
13
|
+
_,ax = plt.subplots()
|
|
14
|
+
plot_slice(data[:,:,0.0],X=data.nxaxes[0]*2,Y=data.nxaxes[1]*2, ax=ax)
|
|
15
|
+
# plt.show()
|
|
16
|
+
|
|
17
|
+
# NXdata with ndarray axes
|
|
18
|
+
_,ax = plt.subplots()
|
|
19
|
+
plot_slice(data[:,:,0.0],X=np.linspace(-0.1,0.1,len(data.nxaxes[0])),Y=np.linspace(-0.3,0.3,len(data.nxaxes[1])), ax=ax)
|
|
20
|
+
# plt.show()
|
|
21
|
+
|
|
22
|
+
# ndarray with inherent axes
|
|
23
|
+
_,ax = plt.subplots()
|
|
24
|
+
plot_slice(data[:,:,0.0].counts.nxdata, ax=ax)
|
|
25
|
+
# plt.show()
|
|
26
|
+
|
|
27
|
+
# ndarray with NXfield axes
|
|
28
|
+
_,ax = plt.subplots()
|
|
29
|
+
plot_slice(data[:,:,0.0].counts.nxdata,X=data.nxaxes[0]*2,Y=data.nxaxes[1]*2, ax=ax)
|
|
30
|
+
# plt.show()
|
|
31
|
+
|
|
32
|
+
# ndarray with ndarray axes
|
|
33
|
+
_,ax = plt.subplots()
|
|
34
|
+
plot_slice(data[:,:,0.0].counts.nxdata,X=np.linspace(-0.1,0.1,len(data.nxaxes[0])),Y=np.linspace(-0.3,0.3,len(data.nxaxes[1])), ax=ax)
|
|
35
|
+
# plt.show()
|
|
36
|
+
|
|
37
|
+
# ndarray with invalid axes
|
|
38
|
+
# _,ax = plt.subplots()
|
|
39
|
+
# plot_slice(data[:,:,0.0].counts.nxdata,X='test',Y='test', ax=ax)
|
|
40
|
+
# plt.show()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{nxs_analysis_tools-0.1.4 → nxs_analysis_tools-0.1.6}/tests/test_symmetrizer_rectangular_plane.py
RENAMED
|
File without changes
|