nxs-analysis-tools 0.1.9__py3-none-any.whl → 0.1.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of nxs-analysis-tools might be problematic. Click here for more details.
- _meta/__init__.py +1 -1
- nxs_analysis_tools/__init__.py +1 -1
- nxs_analysis_tools/chess.py +77 -48
- nxs_analysis_tools/datareduction.py +85 -195
- nxs_analysis_tools/datasets.py +108 -0
- nxs_analysis_tools/lineartransformations.py +51 -0
- nxs_analysis_tools/pairdistribution.py +35 -110
- {nxs_analysis_tools-0.1.9.dist-info → nxs_analysis_tools-0.1.11.dist-info}/METADATA +15 -5
- nxs_analysis_tools-0.1.11.dist-info/RECORD +13 -0
- nxs_analysis_tools-0.1.9.dist-info/RECORD +0 -11
- {nxs_analysis_tools-0.1.9.dist-info → nxs_analysis_tools-0.1.11.dist-info}/WHEEL +0 -0
- {nxs_analysis_tools-0.1.9.dist-info → nxs_analysis_tools-0.1.11.dist-info}/licenses/LICENSE +0 -0
- {nxs_analysis_tools-0.1.9.dist-info → nxs_analysis_tools-0.1.11.dist-info}/top_level.txt +0 -0
_meta/__init__.py
CHANGED
nxs_analysis_tools/__init__.py
CHANGED
|
@@ -9,7 +9,7 @@ from .chess import TempDependence
|
|
|
9
9
|
|
|
10
10
|
# What to import when running "from nxs_analysis_tools import *"
|
|
11
11
|
__all__ = ['load_data', 'load_transform', 'plot_slice', 'Scissors',
|
|
12
|
-
'reciprocal_lattice_params', 'rotate_data',
|
|
12
|
+
'reciprocal_lattice_params', 'rotate_data',
|
|
13
13
|
'convert_to_inverse_angstroms', 'array_to_nxdata', 'Padder',
|
|
14
14
|
'rebin_nxdata', 'rebin_3d', 'rebin_1d', 'TempDependence',
|
|
15
15
|
'animate_slice_temp', 'animate_slice_axis']
|
nxs_analysis_tools/chess.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"""
|
|
2
2
|
This module provides classes and functions for analyzing scattering datasets collected at CHESS
|
|
3
|
-
(ID4B) with temperature dependence. It includes functions for loading
|
|
4
|
-
|
|
3
|
+
(ID4B) with temperature dependence. It includes functions for loading temperature series and
|
|
4
|
+
performing operations on all datasets in the series at once (e.g., cutting, fitting).
|
|
5
5
|
"""
|
|
6
6
|
import os
|
|
7
7
|
import re
|
|
@@ -59,10 +59,11 @@ class TempDependence:
|
|
|
59
59
|
Initialize Scissors and LinecutModel objects for each temperature.
|
|
60
60
|
set_data(temperature, data):
|
|
61
61
|
Set the dataset for a specific temperature.
|
|
62
|
-
load_transforms(temperatures_list=None, print_tree=True):
|
|
62
|
+
load_transforms(temperatures_list=None, exclude_temperatures=None, print_tree=True):
|
|
63
63
|
Load transform datasets (from nxrefine) based on temperature.
|
|
64
|
-
load_datasets(file_ending='hkli.nxs', temperatures_list=None,
|
|
65
|
-
|
|
64
|
+
load_datasets(file_ending='hkli.nxs', temperatures_list=None, exclude_temperatures=None,
|
|
65
|
+
print_tree=True):
|
|
66
|
+
Load datasets (legacy CHESS format) from the specified folder.
|
|
66
67
|
get_sample_directory():
|
|
67
68
|
Get the folder path where the datasets are located.
|
|
68
69
|
clear_datasets():
|
|
@@ -99,11 +100,12 @@ class TempDependence:
|
|
|
99
100
|
Fit the line cut models for each temperature.
|
|
100
101
|
plot_fit(mdheadings=False, **kwargs):
|
|
101
102
|
Plot the fit results for each temperature.
|
|
102
|
-
overlay_fits(numpoints=None, vertical_offset=0, cmap='viridis', ax=ax
|
|
103
|
+
overlay_fits(numpoints=None, vertical_offset=0, cmap='viridis', ax=ax,
|
|
104
|
+
data_kwargs=None, fit_kwargs=None):
|
|
103
105
|
Plot raw data and fitted models for each temperature.
|
|
104
106
|
fit_peak_simple():
|
|
105
107
|
Perform a basic fit using a pseudo-Voigt peak shape, linear background, and no constraints.
|
|
106
|
-
plot_order_parameter(ax, **kwargs):
|
|
108
|
+
plot_order_parameter(ax, param_name='peakheight', **kwargs):
|
|
107
109
|
Plot the temperature dependence of the peakheight parameter.
|
|
108
110
|
print_fit_report():
|
|
109
111
|
Print the fit report for each temperature.
|
|
@@ -188,6 +190,8 @@ class TempDependence:
|
|
|
188
190
|
"""
|
|
189
191
|
for temperature in self.temperatures:
|
|
190
192
|
self.scissors[temperature] = Scissors()
|
|
193
|
+
if temperature in self.datasets.keys():
|
|
194
|
+
self.scissors[temperature].set_data(self.datasets[temperature])
|
|
191
195
|
self.linecutmodels[temperature] = LinecutModel()
|
|
192
196
|
|
|
193
197
|
def set_data(self, temperature, data):
|
|
@@ -203,7 +207,7 @@ class TempDependence:
|
|
|
203
207
|
"""
|
|
204
208
|
self.datasets[temperature] = data
|
|
205
209
|
|
|
206
|
-
def load_transforms(self, temperatures_list=None, print_tree=True, use_nxlink=False):
|
|
210
|
+
def load_transforms(self, temperatures_list=None, exclude_temperatures=None, print_tree=True, use_nxlink=False):
|
|
207
211
|
"""
|
|
208
212
|
Load transform datasets (from nxrefine) based on temperature.
|
|
209
213
|
|
|
@@ -211,6 +215,9 @@ class TempDependence:
|
|
|
211
215
|
----------
|
|
212
216
|
temperatures_list : list of int or None, optional
|
|
213
217
|
List of temperatures to load. If None, all available temperatures are loaded.
|
|
218
|
+
|
|
219
|
+
exclude_temperatures : int, str, optional
|
|
220
|
+
Temperatures to skip. Applied after filtering with `temperatures_list`, if provided.
|
|
214
221
|
|
|
215
222
|
print_tree : bool, optional
|
|
216
223
|
Whether to print the data tree upon loading. Default True.
|
|
@@ -223,6 +230,10 @@ class TempDependence:
|
|
|
223
230
|
# Convert all temperatures to strings
|
|
224
231
|
if temperatures_list:
|
|
225
232
|
temperatures_list = [str(t) for t in temperatures_list]
|
|
233
|
+
if exclude_temperatures:
|
|
234
|
+
if isinstance(exclude_temperatures, str):
|
|
235
|
+
exclude_temperatures = [exclude_temperatures]
|
|
236
|
+
exclude_temperatures = [str(t) for t in list(exclude_temperatures)]
|
|
226
237
|
|
|
227
238
|
# Clear existing temperatures before loading files
|
|
228
239
|
self.temperatures = []
|
|
@@ -237,7 +248,15 @@ class TempDependence:
|
|
|
237
248
|
# Identify temperature
|
|
238
249
|
temperature = match.group(1)
|
|
239
250
|
# print(f'Temperature = {temperature}')
|
|
240
|
-
if
|
|
251
|
+
if temperatures_list is not None:
|
|
252
|
+
incl_temp = temperature in temperatures_list
|
|
253
|
+
else:
|
|
254
|
+
incl_temp = True
|
|
255
|
+
if exclude_temperatures is not None:
|
|
256
|
+
not_excl_temp = temperature not in exclude_temperatures
|
|
257
|
+
else:
|
|
258
|
+
not_excl_temp = True
|
|
259
|
+
if incl_temp and not_excl_temp:
|
|
241
260
|
# Prepare file to be loaded
|
|
242
261
|
self.temperatures.append(temperature)
|
|
243
262
|
items_to_load.append(item)
|
|
@@ -269,40 +288,38 @@ class TempDependence:
|
|
|
269
288
|
f" Error: {e}")
|
|
270
289
|
raise # Re-raise the exception
|
|
271
290
|
|
|
272
|
-
|
|
273
|
-
self.scissors[self.temperatures[i]] = Scissors()
|
|
274
|
-
self.scissors[self.temperatures[i]].set_data(self.datasets[self.temperatures[i]])
|
|
291
|
+
self.initialize()
|
|
275
292
|
|
|
276
|
-
|
|
277
|
-
self.linecutmodels[self.temperatures[i]] = LinecutModel()
|
|
278
|
-
|
|
279
|
-
def load_datasets(self, file_ending='hkli.nxs', temperatures_list=None, print_tree=True):
|
|
293
|
+
def load_datasets(self, file_ending='hkli.nxs', temperatures_list=None, exclude_temperatures=None, print_tree=True):
|
|
280
294
|
"""
|
|
281
295
|
Load datasets (CHESS format) from the specified folder.
|
|
282
296
|
|
|
283
297
|
Parameters
|
|
284
298
|
----------
|
|
285
299
|
file_ending : str, optional
|
|
286
|
-
|
|
287
|
-
temperatures_list : list of int or
|
|
288
|
-
|
|
289
|
-
|
|
300
|
+
File extension of datasets to load. Default is 'hkli.nxs'.
|
|
301
|
+
temperatures_list : list of int or str, optional
|
|
302
|
+
Specific temperatures to load. If None, all temperatures are loaded.
|
|
303
|
+
exclude_temperatures : list of int or str, optional
|
|
304
|
+
Temperatures to skip. Applied after filtering with `temperatures_list`, if provided.
|
|
290
305
|
print_tree : bool, optional
|
|
291
|
-
|
|
306
|
+
If True, prints the NeXus tree structure for each file. Default is True.
|
|
292
307
|
"""
|
|
293
|
-
temperature_folders = [] # Empty list to store temperature folder names
|
|
294
|
-
for item in os.listdir(self.sample_directory):
|
|
295
|
-
try:
|
|
296
|
-
temperature_folders.append(int(item)) # If folder name can be int, add it
|
|
297
|
-
except ValueError:
|
|
298
|
-
pass # Otherwise don't add it
|
|
299
|
-
temperature_folders.sort() # Sort from low to high T
|
|
300
|
-
temperature_folders = [str(i) for i in temperature_folders] # Convert to strings
|
|
301
|
-
|
|
302
|
-
self.temperatures = temperature_folders
|
|
303
308
|
|
|
304
309
|
if temperatures_list is not None:
|
|
305
310
|
self.temperatures = [str(t) for t in temperatures_list]
|
|
311
|
+
else:
|
|
312
|
+
self.temperatures = [] # Empty list to store temperature folder names
|
|
313
|
+
for item in os.listdir(self.sample_directory):
|
|
314
|
+
try:
|
|
315
|
+
self.temperatures.append(int(item)) # If folder name can be int, add it
|
|
316
|
+
except ValueError:
|
|
317
|
+
pass # Otherwise don't add it
|
|
318
|
+
self.temperatures.sort() # Sort from low to high T
|
|
319
|
+
self.temperatures = [str(i) for i in self.temperatures] # Convert to strings
|
|
320
|
+
|
|
321
|
+
if exclude_temperatures is not None:
|
|
322
|
+
[self.temperatures.remove(str(t)) for t in exclude_temperatures]
|
|
306
323
|
|
|
307
324
|
# Load .nxs files
|
|
308
325
|
for T in self.temperatures:
|
|
@@ -313,12 +330,7 @@ class TempDependence:
|
|
|
313
330
|
# Load dataset at each temperature
|
|
314
331
|
self.datasets[T] = load_data(filepath, print_tree)
|
|
315
332
|
|
|
316
|
-
|
|
317
|
-
self.scissors[T] = Scissors()
|
|
318
|
-
self.scissors[T].set_data(self.datasets[T])
|
|
319
|
-
|
|
320
|
-
# Initialize linecutmodel object at each temperature
|
|
321
|
-
self.linecutmodels[T] = LinecutModel()
|
|
333
|
+
self.initialize()
|
|
322
334
|
|
|
323
335
|
def get_sample_directory(self):
|
|
324
336
|
"""
|
|
@@ -437,11 +449,16 @@ class TempDependence:
|
|
|
437
449
|
# Get the Viridis colormap
|
|
438
450
|
cmap = mpl.colormaps.get_cmap('viridis')
|
|
439
451
|
|
|
452
|
+
# Reverse zorder
|
|
453
|
+
zorder = 0
|
|
454
|
+
|
|
440
455
|
for i, linecut in enumerate(self.linecuts.values()):
|
|
456
|
+
|
|
441
457
|
x_data = linecut[linecut.axes].nxdata
|
|
442
458
|
y_data = linecut[linecut.signal].nxdata + i * vertical_offset
|
|
443
|
-
ax.plot(x_data, y_data, color=cmap(i / len(self.linecuts)), label=self.temperatures[i],
|
|
444
|
-
**kwargs)
|
|
459
|
+
ax.plot(x_data, y_data, color=cmap(i / len(self.linecuts)), label=self.temperatures[i],
|
|
460
|
+
zorder=zorder, **kwargs)
|
|
461
|
+
zorder -= 1
|
|
445
462
|
|
|
446
463
|
ax.set(xlabel=self.xlabel,
|
|
447
464
|
ylabel=self.linecuts[self.temperatures[0]].signal)
|
|
@@ -704,7 +721,8 @@ class TempDependence:
|
|
|
704
721
|
title=f"{T} K",
|
|
705
722
|
**kwargs)
|
|
706
723
|
|
|
707
|
-
def overlay_fits(self, numpoints=None, vertical_offset=0, cmap='viridis', ax=None
|
|
724
|
+
def overlay_fits(self, numpoints=None, vertical_offset=0, cmap='viridis', ax=None,
|
|
725
|
+
data_kwargs=None, fit_kwargs=None):
|
|
708
726
|
"""
|
|
709
727
|
Plot raw data and fitted models for each temperature with optional vertical offsets.
|
|
710
728
|
|
|
@@ -719,6 +737,11 @@ class TempDependence:
|
|
|
719
737
|
Name of the matplotlib colormap used to distinguish different temperatures.
|
|
720
738
|
ax : :class:`matplotlib.axes.Axes` or None, default=None
|
|
721
739
|
Axis object to plot on. If None, a new figure and axis are created.
|
|
740
|
+
data_kwargs : dict
|
|
741
|
+
Keyword arguments to be passed to the data plot function.
|
|
742
|
+
fit_kwargs : dict
|
|
743
|
+
Keyword arguments to be passed to the fit plot function.
|
|
744
|
+
|
|
722
745
|
|
|
723
746
|
The function:
|
|
724
747
|
- Uses a colormap to assign unique colors to each temperature.
|
|
@@ -731,19 +754,24 @@ class TempDependence:
|
|
|
731
754
|
# Create a figure and axes if an axis is not already provided
|
|
732
755
|
_, ax = plt.subplots() if ax is None else (None, ax)
|
|
733
756
|
|
|
757
|
+
if data_kwargs is None:
|
|
758
|
+
data_kwargs = {}
|
|
759
|
+
if fit_kwargs is None:
|
|
760
|
+
fit_kwargs = {}
|
|
761
|
+
|
|
734
762
|
# Generate a color palette for the various temperatures
|
|
735
763
|
cmap = plt.get_cmap(cmap)
|
|
736
764
|
colors = [cmap(i / len(self.temperatures)) for i, _ in enumerate(self.temperatures)]
|
|
737
765
|
|
|
738
766
|
for i, lm in enumerate(self.linecutmodels.values()):
|
|
739
767
|
# Plot the raw data
|
|
740
|
-
ax.plot(lm.x, lm.y + vertical_offset * i, '.', c=colors[i])
|
|
768
|
+
ax.plot(lm.x, lm.y + vertical_offset * i, '.', c=colors[i], **data_kwargs)
|
|
741
769
|
|
|
742
770
|
# Evaluate the fit
|
|
743
771
|
numpoints = len(lm.x) if numpoints is None else numpoints
|
|
744
772
|
x_eval = np.linspace(lm.x.min(), lm.x.max(), numpoints)
|
|
745
773
|
y_eval = lm.modelresult.eval(x=x_eval)
|
|
746
|
-
ax.plot(x_eval, y_eval + vertical_offset * i, '-', c=colors[i], label=self.temperatures[i])
|
|
774
|
+
ax.plot(x_eval, y_eval + vertical_offset * i, '-', c=colors[i], label=self.temperatures[i], **fit_kwargs)
|
|
747
775
|
|
|
748
776
|
# Reverse legend entries to match top-to-bottom stacking
|
|
749
777
|
handles, labels = ax.get_legend_handles_labels()
|
|
@@ -767,18 +795,19 @@ class TempDependence:
|
|
|
767
795
|
linecutmodel.params['peakamplitude'].set(min=0)
|
|
768
796
|
linecutmodel.fit()
|
|
769
797
|
|
|
770
|
-
def plot_order_parameter(self, ax=None, **kwargs):
|
|
798
|
+
def plot_order_parameter(self, param_name='peakheight', ax=None, **kwargs):
|
|
771
799
|
"""
|
|
772
800
|
Plot the temperature dependence of the peak height (order parameter).
|
|
773
801
|
|
|
774
|
-
This method extracts the
|
|
775
|
-
line cut fit stored in `linecutmodels` and plots it as a function
|
|
776
|
-
of temperature using matplotlib.
|
|
802
|
+
This method extracts the values of a chosen parameter from each temperature-dependent
|
|
803
|
+
line cut fit stored in `linecutmodels` and plots it as a function of temperature.
|
|
777
804
|
|
|
778
805
|
Parameters
|
|
779
806
|
----------
|
|
780
807
|
ax : :class:`matplotlib.axes.Axes`, optional
|
|
781
808
|
Axis object to plot on. If None, a new figure and axis are created.
|
|
809
|
+
param_name : str, optional
|
|
810
|
+
The name of the lmfit parameter to extract. Default is 'peakheight'.
|
|
782
811
|
**kwargs
|
|
783
812
|
Keyword arguments to be passed to the plot function.
|
|
784
813
|
|
|
@@ -810,7 +839,7 @@ class TempDependence:
|
|
|
810
839
|
if self.linecutmodels[T].modelresult is None:
|
|
811
840
|
raise AttributeError("Model result is empty. Have you fit the data to a model?")
|
|
812
841
|
|
|
813
|
-
peakheights.append(self.linecutmodels[T].modelresult.params[
|
|
842
|
+
peakheights.append(self.linecutmodels[T].modelresult.params[param_name].value)
|
|
814
843
|
|
|
815
844
|
# Plot the peakheights vs. temperature
|
|
816
845
|
if ax is None:
|
|
@@ -818,7 +847,7 @@ class TempDependence:
|
|
|
818
847
|
else:
|
|
819
848
|
fig = ax.figure
|
|
820
849
|
ax.plot(temperatures, peakheights, **kwargs)
|
|
821
|
-
ax.set(xlabel='$T$ (K)', ylabel=
|
|
850
|
+
ax.set(xlabel='$T$ (K)', ylabel=param_name)
|
|
822
851
|
return fig, ax
|
|
823
852
|
|
|
824
853
|
def print_fit_report(self):
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
"""
|
|
2
|
-
|
|
2
|
+
Tools for reducing data into 2D and 1D, and visualization functions for plotting and animating
|
|
3
|
+
data.
|
|
3
4
|
"""
|
|
4
5
|
import os
|
|
5
6
|
import io
|
|
@@ -14,12 +15,14 @@ from matplotlib import colors
|
|
|
14
15
|
from matplotlib import patches
|
|
15
16
|
from IPython.display import display, Markdown, HTML, Image
|
|
16
17
|
from nexusformat.nexus import NXfield, NXdata, nxload, NeXusError, NXroot, NXentry, nxsave
|
|
17
|
-
from scipy import
|
|
18
|
+
from scipy.ndimage import rotate
|
|
19
|
+
|
|
20
|
+
from .lineartransformations import ShearTransformer
|
|
18
21
|
|
|
19
22
|
|
|
20
23
|
# Specify items on which users are allowed to perform standalone imports
|
|
21
24
|
__all__ = ['load_data', 'load_transform', 'plot_slice', 'Scissors',
|
|
22
|
-
'reciprocal_lattice_params', 'rotate_data',
|
|
25
|
+
'reciprocal_lattice_params', 'rotate_data',
|
|
23
26
|
'convert_to_inverse_angstroms', 'array_to_nxdata', 'Padder',
|
|
24
27
|
'rebin_nxdata', 'rebin_3d', 'rebin_1d', 'animate_slice_temp',
|
|
25
28
|
'animate_slice_axis']
|
|
@@ -397,7 +400,8 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
|
|
|
397
400
|
|
|
398
401
|
# If three-dimensional, demand sum_axis to reduce to two dimensions.
|
|
399
402
|
if data.ndim == 3:
|
|
400
|
-
|
|
403
|
+
if sum_axis is None:
|
|
404
|
+
raise ValueError("sum_axis must be specified when data.ndim == 3.")
|
|
401
405
|
|
|
402
406
|
if is_array:
|
|
403
407
|
data = data.sum(axis=sum_axis)
|
|
@@ -501,17 +505,8 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
|
|
|
501
505
|
p = ax.pcolormesh(X.nxdata, Y.nxdata, data_arr, shading='auto', norm=norm, cmap=cmap, **kwargs)
|
|
502
506
|
|
|
503
507
|
## Transform data to new coordinate system if necessary
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
# Create blank 2D affine transformation
|
|
507
|
-
t = Affine2D()
|
|
508
|
-
# Scale y-axis to preserve norm while shearing
|
|
509
|
-
t += Affine2D().scale(1, np.cos(skew_angle_adj * np.pi / 180))
|
|
510
|
-
# Shear along x-axis
|
|
511
|
-
t += Affine2D().skew_deg(skew_angle_adj, 0)
|
|
512
|
-
# Return to original y-axis scaling
|
|
513
|
-
t += Affine2D().scale(1, np.cos(skew_angle_adj * np.pi / 180)).inverted()
|
|
514
|
-
## Correct for x-displacement after shearing
|
|
508
|
+
t = ShearTransformer(skew_angle)
|
|
509
|
+
|
|
515
510
|
# If ylims provided, use those
|
|
516
511
|
if ylim is not None:
|
|
517
512
|
# Set ylims
|
|
@@ -521,8 +516,8 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
|
|
|
521
516
|
else:
|
|
522
517
|
ymin, ymax = ax.get_ylim()
|
|
523
518
|
# Use ylims to calculate translation (necessary to display axes in correct position)
|
|
524
|
-
p.set_transform(t
|
|
525
|
-
+ Affine2D().translate(-ymin * np.sin(
|
|
519
|
+
p.set_transform(t.t
|
|
520
|
+
+ Affine2D().translate(-ymin * np.sin(t.shear_angle * np.pi / 180), 0)
|
|
526
521
|
+ ax.transData)
|
|
527
522
|
|
|
528
523
|
# Set x limits
|
|
@@ -531,12 +526,12 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
|
|
|
531
526
|
else:
|
|
532
527
|
xmin, xmax = ax.get_xlim()
|
|
533
528
|
if skew_angle <= 90:
|
|
534
|
-
ax.set(xlim=(xmin, xmax + (ymax - ymin) / np.tan((90 -
|
|
529
|
+
ax.set(xlim=(xmin, xmax + (ymax - ymin) / np.tan((90 - t.shear_angle) * np.pi / 180)))
|
|
535
530
|
else:
|
|
536
|
-
ax.set(xlim=(xmin - (ymax - ymin) / np.tan((
|
|
531
|
+
ax.set(xlim=(xmin - (ymax - ymin) / np.tan((t.shear_angle - 90) * np.pi / 180), xmax))
|
|
537
532
|
|
|
538
533
|
# Correct aspect ratio for the x/y axes after transformation
|
|
539
|
-
ax.set(aspect=np.cos(
|
|
534
|
+
ax.set(aspect=np.cos(t.shear_angle * np.pi / 180))
|
|
540
535
|
|
|
541
536
|
|
|
542
537
|
# Automatically set tick locations, only if NXdata or if X,Y axes are provided for an array
|
|
@@ -568,7 +563,7 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
|
|
|
568
563
|
line = ax.xaxis.get_majorticklines()[i]
|
|
569
564
|
if i % 2:
|
|
570
565
|
# Top ticks (translation here makes their direction="in")
|
|
571
|
-
m._transform.set(Affine2D().translate(0, -1) + Affine2D().skew_deg(
|
|
566
|
+
m._transform.set(Affine2D().translate(0, -1) + Affine2D().skew_deg(t.shear_angle, 0))
|
|
572
567
|
# This first method shifts the top ticks horizontally to match the skew angle.
|
|
573
568
|
# This does not look good in all cases.
|
|
574
569
|
# line.set_transform(Affine2D().translate((ymax-ymin)*np.sin(skew_angle*np.pi/180),0) +
|
|
@@ -578,7 +573,7 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
|
|
|
578
573
|
line.set_transform(line.get_transform()) # This does nothing
|
|
579
574
|
else:
|
|
580
575
|
# Bottom ticks
|
|
581
|
-
m._transform.set(Affine2D().skew_deg(
|
|
576
|
+
m._transform.set(Affine2D().skew_deg(t.shear_angle, 0))
|
|
582
577
|
|
|
583
578
|
line.set_marker(m)
|
|
584
579
|
|
|
@@ -586,9 +581,9 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
|
|
|
586
581
|
m = MarkerStyle(2)
|
|
587
582
|
line = ax.xaxis.get_minorticklines()[i]
|
|
588
583
|
if i % 2:
|
|
589
|
-
m._transform.set(Affine2D().translate(0, -1) + Affine2D().skew_deg(
|
|
584
|
+
m._transform.set(Affine2D().translate(0, -1) + Affine2D().skew_deg(t.shear_angle, 0))
|
|
590
585
|
else:
|
|
591
|
-
m._transform.set(Affine2D().skew_deg(
|
|
586
|
+
m._transform.set(Affine2D().skew_deg(t.shear_angle, 0))
|
|
592
587
|
|
|
593
588
|
line.set_marker(m)
|
|
594
589
|
|
|
@@ -773,7 +768,7 @@ def animate_slice_axis(data, axis, axis_values, ax=None, interval=500, save_gif=
|
|
|
773
768
|
|
|
774
769
|
if title:
|
|
775
770
|
axis_label = data.axes[axis]
|
|
776
|
-
ax.set(title=f'
|
|
771
|
+
ax.set(title=f'{axis_label}={parameter:{title_fmt}}')
|
|
777
772
|
|
|
778
773
|
ani = animation.FuncAnimation(fig, update, frames=axis_values, interval=interval, repeat=False)
|
|
779
774
|
|
|
@@ -1232,9 +1227,9 @@ def convert_to_inverse_angstroms(data, lattice_params):
|
|
|
1232
1227
|
return NXdata(new_data, (a_star, b_star, c_star))
|
|
1233
1228
|
|
|
1234
1229
|
|
|
1235
|
-
def rotate_data(data, lattice_angle, rotation_angle, rotation_axis, printout=False):
|
|
1230
|
+
def rotate_data(data, lattice_angle, rotation_angle, rotation_axis=None, printout=False):
|
|
1236
1231
|
"""
|
|
1237
|
-
Rotates
|
|
1232
|
+
Rotates slices of data around the normal axis.
|
|
1238
1233
|
|
|
1239
1234
|
Parameters
|
|
1240
1235
|
----------
|
|
@@ -1244,13 +1239,12 @@ def rotate_data(data, lattice_angle, rotation_angle, rotation_axis, printout=Fal
|
|
|
1244
1239
|
Angle between the two in-plane lattice axes in degrees.
|
|
1245
1240
|
rotation_angle : float
|
|
1246
1241
|
Angle of rotation in degrees.
|
|
1247
|
-
rotation_axis : int
|
|
1248
|
-
Axis of rotation (0, 1, or 2).
|
|
1242
|
+
rotation_axis : int, optional
|
|
1243
|
+
Axis of rotation (0, 1, or 2). Only necessary when data is three-dimensional.
|
|
1249
1244
|
printout : bool, optional
|
|
1250
|
-
Enables printout of rotation progress. If set to True,
|
|
1251
|
-
about each rotation slice will be printed to the console, indicating
|
|
1252
|
-
the axis being rotated and the corresponding coordinate value.
|
|
1253
|
-
Defaults to False.
|
|
1245
|
+
Enables printout of rotation progress for three-dimensional data. If set to True,
|
|
1246
|
+
information about each rotation slice will be printed to the console, indicating
|
|
1247
|
+
the axis being rotated and the corresponding coordinate value. Defaults to False.
|
|
1254
1248
|
|
|
1255
1249
|
|
|
1256
1250
|
Returns
|
|
@@ -1258,36 +1252,36 @@ def rotate_data(data, lattice_angle, rotation_angle, rotation_axis, printout=Fal
|
|
|
1258
1252
|
rotated_data : :class:`nexusformat.nexus.NXdata`
|
|
1259
1253
|
Rotated data as an NXdata object.
|
|
1260
1254
|
"""
|
|
1255
|
+
|
|
1256
|
+
if data.ndim == 3 and rotation_axis is None:
|
|
1257
|
+
raise ValueError('rotation_axis must be specified for three-dimensional datasets.')
|
|
1258
|
+
|
|
1259
|
+
if not((data.ndim == 2) or (data.ndim == 3)):
|
|
1260
|
+
raise ValueError('Data must be 2 or 3 dimensional.')
|
|
1261
|
+
|
|
1261
1262
|
# Define output array
|
|
1262
1263
|
output_array = np.zeros(data.nxsignal.shape)
|
|
1263
1264
|
|
|
1264
|
-
# Define shear transformation
|
|
1265
|
-
skew_angle_adj = 90 - lattice_angle
|
|
1266
|
-
t = Affine2D()
|
|
1267
|
-
# Scale y-axis to preserve norm while shearing
|
|
1268
|
-
t += Affine2D().scale(1, np.cos(skew_angle_adj * np.pi / 180))
|
|
1269
|
-
# Shear along x-axis
|
|
1270
|
-
t += Affine2D().skew_deg(skew_angle_adj, 0)
|
|
1271
|
-
# Return to original y-axis scaling
|
|
1272
|
-
t += Affine2D().scale(1, np.cos(skew_angle_adj * np.pi / 180)).inverted()
|
|
1273
|
-
|
|
1274
1265
|
# Iterate over all layers perpendicular to the rotation axis
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
sliced_data =
|
|
1266
|
+
if data.ndim == 3:
|
|
1267
|
+
num_slices = len(data.nxaxes[rotation_axis])
|
|
1268
|
+
elif data.ndim == 2:
|
|
1269
|
+
num_slices = 1
|
|
1270
|
+
|
|
1271
|
+
for i in range(num_slices):
|
|
1272
|
+
|
|
1273
|
+
if data.ndim == 3:
|
|
1274
|
+
# Print progress
|
|
1275
|
+
if printout:
|
|
1276
|
+
print(f'\rRotating {data.axes[rotation_axis]}'
|
|
1277
|
+
f'={data.nxaxes[rotation_axis][i]}... ',
|
|
1278
|
+
end='', flush=True)
|
|
1279
|
+
index = [slice(None)] * 3
|
|
1280
|
+
index[rotation_axis] = i
|
|
1281
|
+
sliced_data = data[tuple(index)]
|
|
1282
|
+
|
|
1283
|
+
elif data.ndim == 2:
|
|
1284
|
+
sliced_data = data
|
|
1291
1285
|
|
|
1292
1286
|
# Add padding to avoid data cutoff during rotation
|
|
1293
1287
|
p = Padder(sliced_data)
|
|
@@ -1295,76 +1289,38 @@ def rotate_data(data, lattice_angle, rotation_angle, rotation_axis, printout=Fal
|
|
|
1295
1289
|
counts = p.pad(padding)
|
|
1296
1290
|
counts = p.padded[p.padded.signal]
|
|
1297
1291
|
|
|
1298
|
-
#
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
offset=[counts.shape[0] / 2
|
|
1302
|
-
* np.sin(skew_angle_adj * np.pi / 180),
|
|
1303
|
-
0],
|
|
1304
|
-
order=0,
|
|
1305
|
-
)
|
|
1306
|
-
# Scale data based on skew angle
|
|
1307
|
-
scale1 = np.cos(skew_angle_adj * np.pi / 180)
|
|
1308
|
-
counts_scaled1 = ndimage.affine_transform(counts_skewed,
|
|
1309
|
-
Affine2D().scale(scale1, 1).get_matrix()[:2, :2],
|
|
1310
|
-
offset=[(1 - scale1) * counts.shape[0] / 2, 0],
|
|
1311
|
-
order=0,
|
|
1312
|
-
)
|
|
1313
|
-
# Scale data based on ratio of array dimensions
|
|
1314
|
-
scale2 = counts.shape[0] / counts.shape[1]
|
|
1315
|
-
counts_scaled2 = ndimage.affine_transform(counts_scaled1,
|
|
1316
|
-
Affine2D().scale(scale2, 1).get_matrix()[:2, :2],
|
|
1317
|
-
offset=[(1 - scale2) * counts.shape[0] / 2, 0],
|
|
1318
|
-
order=0,
|
|
1319
|
-
)
|
|
1292
|
+
# Skew data to match lattice angle
|
|
1293
|
+
t = ShearTransformer(lattice_angle)
|
|
1294
|
+
counts = t.apply(counts)
|
|
1320
1295
|
|
|
1321
1296
|
# Perform rotation
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
# Undo
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
scale2, 1
|
|
1328
|
-
).inverted().get_matrix()[:2, :2],
|
|
1329
|
-
offset=[-(1 - scale2) * counts.shape[
|
|
1330
|
-
0] / 2 / scale2, 0],
|
|
1331
|
-
order=0,
|
|
1332
|
-
)
|
|
1333
|
-
# Undo scaling 1
|
|
1334
|
-
counts_unscaled1 = ndimage.affine_transform(counts_unscaled2,
|
|
1335
|
-
Affine2D().scale(
|
|
1336
|
-
scale1, 1
|
|
1337
|
-
).inverted().get_matrix()[:2, :2],
|
|
1338
|
-
offset=[-(1 - scale1) * counts.shape[
|
|
1339
|
-
0] / 2 / scale1, 0],
|
|
1340
|
-
order=0,
|
|
1341
|
-
)
|
|
1342
|
-
# Undo shear operation
|
|
1343
|
-
counts_unskewed = ndimage.affine_transform(counts_unscaled1,
|
|
1344
|
-
t.get_matrix()[:2, :2],
|
|
1345
|
-
offset=[
|
|
1346
|
-
(-counts.shape[0] / 2
|
|
1347
|
-
* np.sin(skew_angle_adj * np.pi / 180)),
|
|
1348
|
-
0],
|
|
1349
|
-
order=0,
|
|
1350
|
-
)
|
|
1297
|
+
counts = rotate(counts, rotation_angle, reshape=False, order=0)
|
|
1298
|
+
|
|
1299
|
+
# Undo skew transformation
|
|
1300
|
+
counts = t.invert(counts)
|
|
1301
|
+
|
|
1351
1302
|
# Remove padding
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
# Write
|
|
1355
|
-
if
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
output_array[
|
|
1359
|
-
elif
|
|
1360
|
-
output_array
|
|
1361
|
-
|
|
1303
|
+
counts = p.unpad(counts)
|
|
1304
|
+
|
|
1305
|
+
# Write slice
|
|
1306
|
+
if data.ndim == 3:
|
|
1307
|
+
index = [slice(None)] * 3
|
|
1308
|
+
index[rotation_axis] = i
|
|
1309
|
+
output_array[tuple(index)] = counts
|
|
1310
|
+
elif data.ndim == 2:
|
|
1311
|
+
output_array = counts
|
|
1312
|
+
|
|
1313
|
+
print('\nRotation completed.')
|
|
1314
|
+
|
|
1362
1315
|
return NXdata(NXfield(output_array, name=p.padded.signal),
|
|
1363
|
-
(
|
|
1316
|
+
([axis for axis in data.nxaxes]))
|
|
1317
|
+
|
|
1364
1318
|
|
|
1365
1319
|
|
|
1366
1320
|
def rotate_data_2D(data, lattice_angle, rotation_angle):
|
|
1367
1321
|
"""
|
|
1322
|
+
DEPRECATED: Use `rotate_data` instead.
|
|
1323
|
+
|
|
1368
1324
|
Rotates 2D data.
|
|
1369
1325
|
|
|
1370
1326
|
Parameters
|
|
@@ -1376,86 +1332,20 @@ def rotate_data_2D(data, lattice_angle, rotation_angle):
|
|
|
1376
1332
|
rotation_angle : float
|
|
1377
1333
|
Angle of rotation in degrees.
|
|
1378
1334
|
|
|
1379
|
-
|
|
1380
1335
|
Returns
|
|
1381
1336
|
-------
|
|
1382
1337
|
rotated_data : :class:`nexusformat.nexus.NXdata`
|
|
1383
1338
|
Rotated data as an NXdata object.
|
|
1384
1339
|
"""
|
|
1340
|
+
warnings.warn(
|
|
1341
|
+
"rotate_data_2D is deprecated and will be removed in a future release. "
|
|
1342
|
+
"Use rotate_data instead.",
|
|
1343
|
+
DeprecationWarning,
|
|
1344
|
+
stacklevel=2,
|
|
1345
|
+
)
|
|
1385
1346
|
|
|
1386
|
-
#
|
|
1387
|
-
|
|
1388
|
-
t = Affine2D()
|
|
1389
|
-
# Scale y-axis to preserve norm while shearing
|
|
1390
|
-
t += Affine2D().scale(1, np.cos(skew_angle_adj * np.pi / 180))
|
|
1391
|
-
# Shear along x-axis
|
|
1392
|
-
t += Affine2D().skew_deg(skew_angle_adj, 0)
|
|
1393
|
-
# Return to original y-axis scaling
|
|
1394
|
-
t += Affine2D().scale(1, np.cos(skew_angle_adj * np.pi / 180)).inverted()
|
|
1395
|
-
|
|
1396
|
-
# Add padding to avoid data cutoff during rotation
|
|
1397
|
-
p = Padder(data)
|
|
1398
|
-
padding = tuple(len(data[axis]) for axis in data.axes)
|
|
1399
|
-
counts = p.pad(padding)
|
|
1400
|
-
counts = p.padded[p.padded.signal]
|
|
1401
|
-
|
|
1402
|
-
# Perform shear operation
|
|
1403
|
-
counts_skewed = ndimage.affine_transform(counts,
|
|
1404
|
-
t.inverted().get_matrix()[:2, :2],
|
|
1405
|
-
offset=[counts.shape[0] / 2
|
|
1406
|
-
* np.sin(skew_angle_adj * np.pi / 180), 0],
|
|
1407
|
-
order=0,
|
|
1408
|
-
)
|
|
1409
|
-
# Scale data based on skew angle
|
|
1410
|
-
scale1 = np.cos(skew_angle_adj * np.pi / 180)
|
|
1411
|
-
counts_scaled1 = ndimage.affine_transform(counts_skewed,
|
|
1412
|
-
Affine2D().scale(scale1, 1).get_matrix()[:2, :2],
|
|
1413
|
-
offset=[(1 - scale1) * counts.shape[0] / 2, 0],
|
|
1414
|
-
order=0,
|
|
1415
|
-
)
|
|
1416
|
-
# Scale data based on ratio of array dimensions
|
|
1417
|
-
scale2 = counts.shape[0] / counts.shape[1]
|
|
1418
|
-
counts_scaled2 = ndimage.affine_transform(counts_scaled1,
|
|
1419
|
-
Affine2D().scale(scale2, 1).get_matrix()[:2, :2],
|
|
1420
|
-
offset=[(1 - scale2) * counts.shape[0] / 2, 0],
|
|
1421
|
-
order=0,
|
|
1422
|
-
)
|
|
1423
|
-
# Perform rotation
|
|
1424
|
-
counts_rotated = ndimage.rotate(counts_scaled2, rotation_angle, reshape=False, order=0)
|
|
1425
|
-
|
|
1426
|
-
# Undo scaling 2
|
|
1427
|
-
counts_unscaled2 = ndimage.affine_transform(counts_rotated,
|
|
1428
|
-
Affine2D().scale(
|
|
1429
|
-
scale2, 1
|
|
1430
|
-
).inverted().get_matrix()[:2, :2],
|
|
1431
|
-
offset=[-(1 - scale2) * counts.shape[
|
|
1432
|
-
0] / 2 / scale2, 0],
|
|
1433
|
-
order=0,
|
|
1434
|
-
)
|
|
1435
|
-
# Undo scaling 1
|
|
1436
|
-
counts_unscaled1 = ndimage.affine_transform(counts_unscaled2,
|
|
1437
|
-
Affine2D().scale(
|
|
1438
|
-
scale1, 1
|
|
1439
|
-
).inverted().get_matrix()[:2, :2],
|
|
1440
|
-
offset=[-(1 - scale1) * counts.shape[
|
|
1441
|
-
0] / 2 / scale1, 0],
|
|
1442
|
-
order=0,
|
|
1443
|
-
)
|
|
1444
|
-
# Undo shear operation
|
|
1445
|
-
counts_unskewed = ndimage.affine_transform(counts_unscaled1,
|
|
1446
|
-
t.get_matrix()[:2, :2],
|
|
1447
|
-
offset=[
|
|
1448
|
-
(-counts.shape[0] / 2
|
|
1449
|
-
* np.sin(skew_angle_adj * np.pi / 180)),
|
|
1450
|
-
0],
|
|
1451
|
-
order=0,
|
|
1452
|
-
)
|
|
1453
|
-
# Remove padding
|
|
1454
|
-
counts_unpadded = p.unpad(counts_unskewed)
|
|
1455
|
-
|
|
1456
|
-
print('\nDone.')
|
|
1457
|
-
return NXdata(NXfield(counts_unpadded, name=p.padded.signal),
|
|
1458
|
-
(data.nxaxes[0], data.nxaxes[1]))
|
|
1347
|
+
# Call the new general function
|
|
1348
|
+
return rotate_data(data, lattice_angle=lattice_angle, rotation_angle=rotation_angle)
|
|
1459
1349
|
|
|
1460
1350
|
|
|
1461
1351
|
class Padder:
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import pooch
|
|
3
|
+
|
|
4
|
+
GOODBOY = pooch.create(
|
|
5
|
+
path=pooch.os_cache("nxs_analysis_tools/cubic"),
|
|
6
|
+
base_url="https://raw.githubusercontent.com/stevenjgomez/dataset-cubic/main/data/",
|
|
7
|
+
registry={
|
|
8
|
+
"cubic_15.nxs": None,
|
|
9
|
+
"15/transform.nxs": None,
|
|
10
|
+
"cubic_25.nxs": None,
|
|
11
|
+
"25/transform.nxs": None,
|
|
12
|
+
"cubic_35.nxs": None,
|
|
13
|
+
"35/transform.nxs": None,
|
|
14
|
+
"cubic_45.nxs": None,
|
|
15
|
+
"45/transform.nxs": None,
|
|
16
|
+
"cubic_55.nxs": None,
|
|
17
|
+
"55/transform.nxs": None,
|
|
18
|
+
"cubic_65.nxs": None,
|
|
19
|
+
"65/transform.nxs": None,
|
|
20
|
+
"cubic_75.nxs": None,
|
|
21
|
+
"75/transform.nxs": None,
|
|
22
|
+
"cubic_80.nxs": None,
|
|
23
|
+
"80/transform.nxs": None,
|
|
24
|
+
"cubic_104.nxs": None,
|
|
25
|
+
"104/transform.nxs": None,
|
|
26
|
+
"cubic_128.nxs": None,
|
|
27
|
+
"128/transform.nxs": None,
|
|
28
|
+
"cubic_153.nxs": None,
|
|
29
|
+
"153/transform.nxs": None,
|
|
30
|
+
"cubic_177.nxs": None,
|
|
31
|
+
"177/transform.nxs": None,
|
|
32
|
+
"cubic_202.nxs": None,
|
|
33
|
+
"202/transform.nxs": None,
|
|
34
|
+
"cubic_226.nxs": None,
|
|
35
|
+
"226/transform.nxs": None,
|
|
36
|
+
"cubic_251.nxs": None,
|
|
37
|
+
"251/transform.nxs": None,
|
|
38
|
+
"cubic_275.nxs": None,
|
|
39
|
+
"275/transform.nxs": None,
|
|
40
|
+
"cubic_300.nxs": None,
|
|
41
|
+
"300/transform.nxs": None,
|
|
42
|
+
}
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
def fetch_cubic(temperatures=None):
|
|
46
|
+
"""
|
|
47
|
+
Load the cubic dataset.
|
|
48
|
+
"""
|
|
49
|
+
fnames = []
|
|
50
|
+
temperatures = [15, 25, 35, 45, 55, 65, 75, 80, 104, 128,
|
|
51
|
+
153, 177, 202, 226, 251, 275, 300] if temperatures is None else temperatures
|
|
52
|
+
for T in temperatures:
|
|
53
|
+
fnames.append(GOODBOY.fetch(f"cubic_{T}.nxs"))
|
|
54
|
+
fnames.append(GOODBOY.fetch(f"{T}/transform.nxs"))
|
|
55
|
+
return fnames
|
|
56
|
+
|
|
57
|
+
def cubic(temperatures=None):
|
|
58
|
+
fnames = fetch_cubic(temperatures)
|
|
59
|
+
dirname = os.path.dirname(fnames[0])
|
|
60
|
+
return dirname
|
|
61
|
+
|
|
62
|
+
POOCH = pooch.create(
|
|
63
|
+
path=pooch.os_cache("nxs_analysis_tools/hexagonal"),
|
|
64
|
+
base_url="https://raw.githubusercontent.com/stevenjgomez/dataset-hexagonal/main/data/",
|
|
65
|
+
registry={
|
|
66
|
+
"hexagonal_15.nxs": "850d666d6fb0c7bbf7f7159fed952fbd53355c3c0bfb40410874d3918a3cca49",
|
|
67
|
+
"15/transform.nxs": "45c089be295e0a5b927e963540a90b41f567edb75f283811dbc6bb4a26f2fba5",
|
|
68
|
+
"hexagonal_300.nxs": "c6a9ff704d1e42d9576d007a92a333f529e3ddf605e3f76a82ff15557b7d4a43",
|
|
69
|
+
"300/transform.nxs": "e665ba59debe8e60c90c3181e2fb1ebbce668a3d3918a89a6bf31e3563ebf32e",
|
|
70
|
+
}
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
def fetch_hexagonal(temperatures=None):
|
|
74
|
+
"""
|
|
75
|
+
Load the hexagonal dataset.
|
|
76
|
+
"""
|
|
77
|
+
fnames = []
|
|
78
|
+
temperatures = [15, 300] if temperatures is None else temperatures
|
|
79
|
+
for T in temperatures:
|
|
80
|
+
fnames.append(POOCH.fetch(f"hexagonal_{T}.nxs"))
|
|
81
|
+
fnames.append(POOCH.fetch(f"{T}/transform.nxs"))
|
|
82
|
+
return fnames
|
|
83
|
+
|
|
84
|
+
def hexagonal(temperatures=None):
|
|
85
|
+
fnames = fetch_hexagonal(temperatures)
|
|
86
|
+
dirname = os.path.dirname(fnames[0])
|
|
87
|
+
return dirname
|
|
88
|
+
|
|
89
|
+
BONES = pooch.create(
|
|
90
|
+
path=pooch.os_cache("nxs_analysis_tools/vacancies"),
|
|
91
|
+
base_url="https://raw.githubusercontent.com/stevenjgomez/dataset-vacancies/main/",
|
|
92
|
+
registry={
|
|
93
|
+
"vacancies.nxs": "39eaf8df84a0dbcacbe6ce7c6017da4da578fbf68a6218ee18ade3953c26efb5",
|
|
94
|
+
"fft.nxs": "c81178eda0ec843502935f29fcb2b0b878f7413e461612c731d37ea9e5e414a9",
|
|
95
|
+
}
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
def vacancies():
|
|
99
|
+
"""
|
|
100
|
+
Load the vacancies dataset.
|
|
101
|
+
"""
|
|
102
|
+
return BONES.fetch(f"vacancies.nxs")
|
|
103
|
+
|
|
104
|
+
def vacanciesfft():
|
|
105
|
+
"""
|
|
106
|
+
Load the vacancies dataset.
|
|
107
|
+
"""
|
|
108
|
+
return BONES.fetch(f"fft.nxs")
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
from scipy.ndimage import affine_transform
|
|
3
|
+
from matplotlib.transforms import Affine2D
|
|
4
|
+
|
|
5
|
+
def shear_transformation(angle):
|
|
6
|
+
# Define shear transformation
|
|
7
|
+
t = Affine2D()
|
|
8
|
+
|
|
9
|
+
# Scale y-axis to preserve norm while shearing
|
|
10
|
+
t += Affine2D().scale(1, np.cos(angle * np.pi / 180))
|
|
11
|
+
|
|
12
|
+
# Shear along x-axis
|
|
13
|
+
t += Affine2D().skew_deg(angle, 0)
|
|
14
|
+
|
|
15
|
+
# Return to original y-axis scaling
|
|
16
|
+
t += Affine2D().scale(1, np.cos(angle * np.pi / 180)).inverted()
|
|
17
|
+
|
|
18
|
+
return t
|
|
19
|
+
|
|
20
|
+
class ShearTransformer():
|
|
21
|
+
def __init__(self, angle):
|
|
22
|
+
self.shear_angle = 90 - angle
|
|
23
|
+
self.t = shear_transformation(self.shear_angle)
|
|
24
|
+
self.scale = np.cos(self.shear_angle * np.pi / 180)
|
|
25
|
+
|
|
26
|
+
def apply(self, image):
|
|
27
|
+
# Perform shear operation
|
|
28
|
+
image_skewed = affine_transform(image, self.t.inverted().get_matrix()[:2, :2],
|
|
29
|
+
offset=[image.shape[0] / 2 * np.sin(self.shear_angle * np.pi / 180), 0],
|
|
30
|
+
order=0
|
|
31
|
+
)
|
|
32
|
+
# Scale data based on skew angle
|
|
33
|
+
image_scaled = affine_transform(image_skewed, Affine2D().scale(self.scale, 1).get_matrix()[:2, :2],
|
|
34
|
+
offset=[(1 - self.scale) * image.shape[0] / 2, 0],
|
|
35
|
+
order=0
|
|
36
|
+
)
|
|
37
|
+
return image_scaled
|
|
38
|
+
|
|
39
|
+
def invert(self, image):
|
|
40
|
+
|
|
41
|
+
# Undo scaling
|
|
42
|
+
image_unscaled = affine_transform(image, Affine2D().scale(self.scale, 1).inverted().get_matrix()[:2, :2],
|
|
43
|
+
offset=[-(1 - self.scale) * image.shape[0] / 2 / self.scale, 0],
|
|
44
|
+
order=0
|
|
45
|
+
)
|
|
46
|
+
# Undo shear operation
|
|
47
|
+
image_unskewed = affine_transform(image_unscaled, self.t.get_matrix()[:2, :2],
|
|
48
|
+
offset=[(-image.shape[0] / 2 * np.sin(self.shear_angle * np.pi / 180)), 0],
|
|
49
|
+
order=0
|
|
50
|
+
)
|
|
51
|
+
return image_unskewed
|
|
@@ -5,7 +5,7 @@ import time
|
|
|
5
5
|
import os
|
|
6
6
|
import gc
|
|
7
7
|
import math
|
|
8
|
-
from scipy import
|
|
8
|
+
from scipy.ndimage import rotate, affine_transform
|
|
9
9
|
import scipy
|
|
10
10
|
import matplotlib.pyplot as plt
|
|
11
11
|
from matplotlib.transforms import Affine2D
|
|
@@ -15,6 +15,7 @@ from astropy.convolution import Kernel, convolve_fft
|
|
|
15
15
|
import pyfftw
|
|
16
16
|
from .datareduction import plot_slice, reciprocal_lattice_params, Padder, \
|
|
17
17
|
array_to_nxdata
|
|
18
|
+
from .lineartransformations import ShearTransformer
|
|
18
19
|
|
|
19
20
|
__all__ = ['Symmetrizer2D', 'Symmetrizer3D', 'Puncher', 'Interpolator',
|
|
20
21
|
'fourier_transform_nxdata', 'Gaussian3DKernel', 'DeltaPDF',
|
|
@@ -37,9 +38,6 @@ class Symmetrizer2D:
|
|
|
37
38
|
symmetrized : NXdata or None
|
|
38
39
|
The symmetrized dataset after applying the symmetrization operations.
|
|
39
40
|
Default is None until symmetrization is performed.
|
|
40
|
-
wedges : NXdata or None
|
|
41
|
-
The wedges extracted from the dataset based on the angular limits.
|
|
42
|
-
Default is None until symmetrization is performed.
|
|
43
41
|
rotations : int or None
|
|
44
42
|
The number of rotations needed to reconstruct the full dataset from
|
|
45
43
|
a single wedge. Default is None until parameters are set.
|
|
@@ -93,7 +91,6 @@ class Symmetrizer2D:
|
|
|
93
91
|
"""
|
|
94
92
|
self.mirror_axis = None
|
|
95
93
|
self.symmetrized = None
|
|
96
|
-
self.wedges = None
|
|
97
94
|
self.rotations = None
|
|
98
95
|
self.transform = None
|
|
99
96
|
self.mirror = None
|
|
@@ -129,16 +126,8 @@ class Symmetrizer2D:
|
|
|
129
126
|
self.mirror = mirror
|
|
130
127
|
self.mirror_axis = mirror_axis
|
|
131
128
|
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
t = Affine2D()
|
|
135
|
-
# Scale y-axis to preserve norm while shearing
|
|
136
|
-
t += Affine2D().scale(1, np.cos(skew_angle_adj * np.pi / 180))
|
|
137
|
-
# Shear along x-axis
|
|
138
|
-
t += Affine2D().skew_deg(skew_angle_adj, 0)
|
|
139
|
-
# Return to original y-axis scaling
|
|
140
|
-
t += Affine2D().scale(1, np.cos(skew_angle_adj * np.pi / 180)).inverted()
|
|
141
|
-
self.transform = t
|
|
129
|
+
self.transformer = ShearTransformer(lattice_angle)
|
|
130
|
+
self.transform = self.transformer.t
|
|
142
131
|
|
|
143
132
|
# Calculate number of rotations needed to reconstruct the dataset
|
|
144
133
|
if mirror:
|
|
@@ -172,7 +161,6 @@ class Symmetrizer2D:
|
|
|
172
161
|
theta_max = self.theta_max
|
|
173
162
|
mirror = self.mirror
|
|
174
163
|
mirror_axis = self.mirror_axis
|
|
175
|
-
t = self.transform
|
|
176
164
|
rotations = self.rotations
|
|
177
165
|
|
|
178
166
|
# Pad the dataset so that rotations don't get cutoff if they extend
|
|
@@ -191,37 +179,9 @@ class Symmetrizer2D:
|
|
|
191
179
|
symmetrization_mask = np.logical_and(theta >= theta_min * np.pi / 180,
|
|
192
180
|
theta <= theta_max * np.pi / 180)
|
|
193
181
|
|
|
194
|
-
#
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
# Scale and skew counts
|
|
198
|
-
skew_angle_adj = 90 - self.skew_angle
|
|
199
|
-
|
|
200
|
-
scale2 = 1 # q1.max()/q2.max() # TODO: Need to double check this
|
|
201
|
-
counts_unscaled2 = ndimage.affine_transform(counts,
|
|
202
|
-
Affine2D().scale(scale2, 1).inverted().get_matrix()[:2, :2],
|
|
203
|
-
offset=[-(1 - scale2) * counts.shape[
|
|
204
|
-
0] / 2 / scale2, 0],
|
|
205
|
-
order=0,
|
|
206
|
-
)
|
|
207
|
-
|
|
208
|
-
scale1 = np.cos(skew_angle_adj * np.pi / 180)
|
|
209
|
-
counts_unscaled1 = ndimage.affine_transform(counts_unscaled2,
|
|
210
|
-
Affine2D().scale(scale1, 1).inverted().get_matrix()[:2, :2],
|
|
211
|
-
offset=[-(1 - scale1) * counts.shape[
|
|
212
|
-
0] / 2 / scale1, 0],
|
|
213
|
-
order=0,
|
|
214
|
-
)
|
|
215
|
-
|
|
216
|
-
mask = ndimage.affine_transform(counts_unscaled1,
|
|
217
|
-
t.get_matrix()[:2, :2],
|
|
218
|
-
offset=[-counts.shape[0] / 2
|
|
219
|
-
* np.sin(skew_angle_adj * np.pi / 180), 0],
|
|
220
|
-
order=0,
|
|
221
|
-
)
|
|
222
|
-
|
|
223
|
-
# Convert mask to nxdata
|
|
224
|
-
mask = array_to_nxdata(mask, data_padded)
|
|
182
|
+
# Bring mask from skewed basis to data array basis
|
|
183
|
+
mask = array_to_nxdata(self.transformer.invert(symmetrization_mask), data_padded)
|
|
184
|
+
|
|
225
185
|
|
|
226
186
|
# Save mask for user interaction
|
|
227
187
|
self.symmetrization_mask = p.unpad(mask)
|
|
@@ -235,84 +195,49 @@ class Symmetrizer2D:
|
|
|
235
195
|
# Convert wedge back to array for further transformations
|
|
236
196
|
wedge = wedge[data.signal].nxdata
|
|
237
197
|
|
|
238
|
-
#
|
|
239
|
-
|
|
198
|
+
# Bring wedge from data array basis to skewed basis for reconstruction
|
|
199
|
+
wedge = self.transformer.apply(wedge)
|
|
240
200
|
|
|
241
|
-
#
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
* np.sin(skew_angle_adj * np.pi / 180), 0],
|
|
247
|
-
order=0,
|
|
248
|
-
)
|
|
249
|
-
scale1 = np.cos(skew_angle_adj * np.pi / 180)
|
|
250
|
-
wedge = ndimage.affine_transform(counts_skew,
|
|
251
|
-
Affine2D().scale(scale1, 1).get_matrix()[:2, :2],
|
|
252
|
-
offset=[(1 - scale1) * counts.shape[0] / 2, 0],
|
|
253
|
-
order=0,
|
|
254
|
-
)
|
|
255
|
-
|
|
256
|
-
scale2 = counts.shape[0]/counts.shape[1]
|
|
257
|
-
wedge = ndimage.affine_transform(wedge,
|
|
258
|
-
Affine2D().scale(scale2, 1).get_matrix()[:2, :2],
|
|
259
|
-
offset=[(1 - scale2) * counts.shape[0] / 2, 0],
|
|
201
|
+
# Apply additional scaling before rotations
|
|
202
|
+
scale = wedge.shape[0]/wedge.shape[1]
|
|
203
|
+
wedge = affine_transform(wedge,
|
|
204
|
+
Affine2D().scale(scale, 1).get_matrix()[:2, :2],
|
|
205
|
+
offset=[(1 - scale) * wedge.shape[0] / 2, 0],
|
|
260
206
|
order=0,
|
|
261
207
|
)
|
|
262
208
|
|
|
263
209
|
# Reconstruct full dataset from wedge
|
|
264
|
-
reconstructed = np.zeros(
|
|
210
|
+
reconstructed = np.zeros(wedge.shape)
|
|
211
|
+
|
|
265
212
|
for _ in range(0, rotations):
|
|
266
|
-
# The following are attempts to combine images with minimal overlapping pixels
|
|
267
213
|
reconstructed += wedge
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
wedge = ndimage.rotate(wedge, 360 / rotations, reshape=False, order=0)
|
|
271
|
-
|
|
272
|
-
# self.rotated_only = NXdata(NXfield(reconstructed, name=data.signal),
|
|
273
|
-
# (q1, q2))
|
|
214
|
+
wedge = rotate(wedge, 360 / rotations, reshape=False, order=0)
|
|
274
215
|
|
|
275
216
|
if mirror:
|
|
276
|
-
# The following are attempts to combine images with minimal overlapping pixels
|
|
277
217
|
reconstructed = np.where(reconstructed == 0,
|
|
278
218
|
reconstructed + np.flip(reconstructed, axis=mirror_axis),
|
|
279
219
|
reconstructed)
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
# self.rotated_and_mirrored = NXdata(NXfield(reconstructed, name=data.signal),
|
|
283
|
-
# (q1, q2))
|
|
220
|
+
|
|
284
221
|
|
|
285
|
-
|
|
222
|
+
# Undo scaling transformation
|
|
223
|
+
reconstructed = affine_transform(reconstructed,
|
|
286
224
|
Affine2D().scale(
|
|
287
|
-
|
|
225
|
+
scale, 1
|
|
288
226
|
).inverted().get_matrix()[:2, :2],
|
|
289
|
-
offset=[-(1 -
|
|
290
|
-
0] / 2 /
|
|
291
|
-
order=0,
|
|
292
|
-
)
|
|
293
|
-
reconstructed = ndimage.affine_transform(reconstructed,
|
|
294
|
-
Affine2D().scale(
|
|
295
|
-
scale1, 1
|
|
296
|
-
).inverted().get_matrix()[:2, :2],
|
|
297
|
-
offset=[-(1 - scale1) * counts.shape[
|
|
298
|
-
0] / 2 / scale1, 0],
|
|
299
|
-
order=0,
|
|
300
|
-
)
|
|
301
|
-
reconstructed = ndimage.affine_transform(reconstructed,
|
|
302
|
-
t.get_matrix()[:2, :2],
|
|
303
|
-
offset=[(-counts.shape[0] / 2
|
|
304
|
-
* np.sin(skew_angle_adj * np.pi / 180)),
|
|
305
|
-
0],
|
|
227
|
+
offset=[-(1 - scale) * wedge.shape[
|
|
228
|
+
0] / 2 / scale, 0],
|
|
306
229
|
order=0,
|
|
307
230
|
)
|
|
231
|
+
|
|
232
|
+
reconstructed = self.transformer.invert(reconstructed)
|
|
308
233
|
|
|
309
|
-
|
|
234
|
+
reconstructed = p.unpad(reconstructed)
|
|
310
235
|
|
|
311
236
|
# Fix any overlapping pixels by truncating counts to max
|
|
312
|
-
|
|
237
|
+
reconstructed[reconstructed > data[data.signal].nxdata.max()] \
|
|
313
238
|
= data[data.signal].nxdata.max()
|
|
314
239
|
|
|
315
|
-
symmetrized = NXdata(NXfield(
|
|
240
|
+
symmetrized = NXdata(NXfield(reconstructed, name=data.signal),
|
|
316
241
|
(data[data.axes[0]],
|
|
317
242
|
data[data.axes[1]]))
|
|
318
243
|
|
|
@@ -349,12 +274,11 @@ class Symmetrizer2D:
|
|
|
349
274
|
- Subplot 3: The wedge slice used for reconstruction of the full symmetrized dataset.
|
|
350
275
|
- Subplot 4: The symmetrized dataset.
|
|
351
276
|
|
|
352
|
-
Example
|
|
353
|
-
|
|
354
|
-
s = Symmetrizer2D()
|
|
355
|
-
s.set_parameters(theta_min, theta_max, skew_angle, mirror)
|
|
356
|
-
s.test(data)
|
|
357
|
-
```
|
|
277
|
+
Example
|
|
278
|
+
-------
|
|
279
|
+
>>> s = Symmetrizer2D()
|
|
280
|
+
>>> s.set_parameters(theta_min, theta_max, skew_angle, mirror)
|
|
281
|
+
>>> s.test(data)
|
|
358
282
|
"""
|
|
359
283
|
s = self
|
|
360
284
|
symm_test = s.symmetrize_2d(data)
|
|
@@ -400,7 +324,8 @@ class Symmetrizer3D:
|
|
|
400
324
|
The input 3D dataset to be symmetrized.
|
|
401
325
|
"""
|
|
402
326
|
|
|
403
|
-
|
|
327
|
+
if data is None:
|
|
328
|
+
raise ValueError("Symmetrizer3D requires a 3D NXdata object for initialization.")
|
|
404
329
|
|
|
405
330
|
self.a, self.b, self.c, self.al, self.be, self.ga = [None] * 6
|
|
406
331
|
self.a_star, self.b_star, self.c_star, self.al_star, self.be_star, self.ga_star = [None] * 6
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: nxs-analysis-tools
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.11
|
|
4
4
|
Summary: Reduce and transform nexus format (.nxs) scattering data.
|
|
5
5
|
Author-email: "Steven J. Gomez Alvarado" <stevenjgomez@ucsb.edu>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -14,11 +14,10 @@ Classifier: Development Status :: 5 - Production/Stable
|
|
|
14
14
|
Classifier: Intended Audience :: Science/Research
|
|
15
15
|
Classifier: Programming Language :: Python
|
|
16
16
|
Classifier: Programming Language :: Python :: 3
|
|
17
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
18
17
|
Classifier: Programming Language :: Python :: 3.10
|
|
19
18
|
Classifier: Topic :: Scientific/Engineering :: Image Processing
|
|
20
19
|
Classifier: Topic :: Scientific/Engineering
|
|
21
|
-
Requires-Python: >=3.
|
|
20
|
+
Requires-Python: >=3.10
|
|
22
21
|
Description-Content-Type: text/markdown
|
|
23
22
|
License-File: LICENSE
|
|
24
23
|
Requires-Dist: matplotlib>=3.10.0
|
|
@@ -46,6 +45,7 @@ Requires-Dist: sphinx-autobuild>=2021.3.14; extra == "dev"
|
|
|
46
45
|
Requires-Dist: sphinx-copybutton>=0.5.0; extra == "dev"
|
|
47
46
|
Requires-Dist: sphinxext-opengraph>=0.6.3; extra == "dev"
|
|
48
47
|
Requires-Dist: twine>=4.0.1; extra == "dev"
|
|
48
|
+
Requires-Dist: pooch>=1.8.2; extra == "dev"
|
|
49
49
|
Dynamic: license-file
|
|
50
50
|
|
|
51
51
|
# nxs-analysis-tools
|
|
@@ -56,7 +56,7 @@ Dynamic: license-file
|
|
|
56
56
|
|
|
57
57
|
## Overview
|
|
58
58
|
|
|
59
|
-
nxs-analysis-tools provides a suite of tools for
|
|
59
|
+
nxs-analysis-tools provides a suite of tools for performing slices (2D), cuts (1D), and transformations (_e.g._, symmetrization, interpolation, delta-PDF) on nexus format (.nxs) scattering data.
|
|
60
60
|
|
|
61
61
|
View the documentation [here](https://nxs-analysis-tools.readthedocs.io/en/stable/).
|
|
62
62
|
|
|
@@ -76,4 +76,14 @@ git clone https://github.com/stevenjgomez/nxs-analysis-tools.git
|
|
|
76
76
|
|
|
77
77
|
## License
|
|
78
78
|
|
|
79
|
-
This project is licensed under the MIT License.
|
|
79
|
+
This project is licensed under the MIT License. If you find the nxs-analysis-tools package useful in your scientific publications, please cite the relevant DOI.
|
|
80
|
+
|
|
81
|
+
To cite the most recent version, use:
|
|
82
|
+
|
|
83
|
+
[](https://doi.org/10.5281/zenodo.15186359)
|
|
84
|
+
|
|
85
|
+
To cite all versions, use:
|
|
86
|
+
|
|
87
|
+
```
|
|
88
|
+
10.5281/zenodo.15186359
|
|
89
|
+
```
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
_meta/__init__.py,sha256=O-Csyugy_6ZYl7nR_6FgU1-D1-64dSBtRSRzDF1aiq4,347
|
|
2
|
+
nxs_analysis_tools/__init__.py,sha256=Gs61l3FrgmhVRv77oqz58W6vmxFhNARfcioYA0FZbqU,604
|
|
3
|
+
nxs_analysis_tools/chess.py,sha256=_gFkLZnoYSCOKPZHm0b1CileCveHJpOszLufeg4QKFg,34086
|
|
4
|
+
nxs_analysis_tools/datareduction.py,sha256=Ky8Q5izEXmOUj3OKOlvZ8A-YeKRdAOD22luggiZtLoY,52181
|
|
5
|
+
nxs_analysis_tools/datasets.py,sha256=KnJBdxuCV7n2Q6MOM0Cv5-Dq6x1hvrrEXIc3Jn7Xsos,3497
|
|
6
|
+
nxs_analysis_tools/fitting.py,sha256=kRMhjObetGqmZ5-Jk1OHKGrXW4qI4D37s8VeC2ygJV8,10275
|
|
7
|
+
nxs_analysis_tools/lineartransformations.py,sha256=-Ce2RzcRcUgg8_kM1o0kO9lOpS_nC2AxRyZTUA1tAe8,2144
|
|
8
|
+
nxs_analysis_tools/pairdistribution.py,sha256=u4WyOfK_nBX78QJr7QO8QWGKV9bH_sBWtdzVaqs2wWo,61238
|
|
9
|
+
nxs_analysis_tools-0.1.11.dist-info/licenses/LICENSE,sha256=bE6FnYixueAGAnEfUuumbkSeMgdBguAAkheVgjv47Jo,1086
|
|
10
|
+
nxs_analysis_tools-0.1.11.dist-info/METADATA,sha256=sdphx8gVih0ZeQOPEHw4_R-sS6D6XlQNZWc6JOxAB5g,3493
|
|
11
|
+
nxs_analysis_tools-0.1.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
12
|
+
nxs_analysis_tools-0.1.11.dist-info/top_level.txt,sha256=8U000GNPzo6T6pOMjRdgOSO5heMzLMGjkxa1CDtyMHM,25
|
|
13
|
+
nxs_analysis_tools-0.1.11.dist-info/RECORD,,
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
_meta/__init__.py,sha256=_6YTHnZTh1hFtcZstSxlck37JDEvSrKKPTDaeiNi2mc,346
|
|
2
|
-
nxs_analysis_tools/__init__.py,sha256=lutfLk7oBaMpKq2G2hf6V59SNqAhzSUyKLXGwTI_iDg,622
|
|
3
|
-
nxs_analysis_tools/chess.py,sha256=mTl3-hfKG6xUtuzqWJa63t1qg975Iv8ND6C6B1dxPio,32699
|
|
4
|
-
nxs_analysis_tools/datareduction.py,sha256=PMSV-_f66NK0Sf70z_xp7eQqcOe5krRkbQdVo6mTX3E,59233
|
|
5
|
-
nxs_analysis_tools/fitting.py,sha256=kRMhjObetGqmZ5-Jk1OHKGrXW4qI4D37s8VeC2ygJV8,10275
|
|
6
|
-
nxs_analysis_tools/pairdistribution.py,sha256=BDJdPiQ-XEk8vZKiFQnCotaWeS5cDDGqmSyhzC3fwrQ,65586
|
|
7
|
-
nxs_analysis_tools-0.1.9.dist-info/licenses/LICENSE,sha256=bE6FnYixueAGAnEfUuumbkSeMgdBguAAkheVgjv47Jo,1086
|
|
8
|
-
nxs_analysis_tools-0.1.9.dist-info/METADATA,sha256=Z0pCGXvg06-cD0iZhuIzMtam_j8EAJGRL3K3NAgNiCI,3180
|
|
9
|
-
nxs_analysis_tools-0.1.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
10
|
-
nxs_analysis_tools-0.1.9.dist-info/top_level.txt,sha256=8U000GNPzo6T6pOMjRdgOSO5heMzLMGjkxa1CDtyMHM,25
|
|
11
|
-
nxs_analysis_tools-0.1.9.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|