nxs-analysis-tools 0.1.8__tar.gz → 0.1.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nxs-analysis-tools might be problematic. Click here for more details.

Files changed (32) hide show
  1. {nxs_analysis_tools-0.1.8 → nxs_analysis_tools-0.1.10}/PKG-INFO +13 -2
  2. {nxs_analysis_tools-0.1.8 → nxs_analysis_tools-0.1.10}/README.md +11 -1
  3. {nxs_analysis_tools-0.1.8 → nxs_analysis_tools-0.1.10}/pyproject.toml +3 -2
  4. {nxs_analysis_tools-0.1.8 → nxs_analysis_tools-0.1.10}/src/_meta/__init__.py +1 -1
  5. {nxs_analysis_tools-0.1.8 → nxs_analysis_tools-0.1.10}/src/nxs_analysis_tools/chess.py +77 -48
  6. {nxs_analysis_tools-0.1.8 → nxs_analysis_tools-0.1.10}/src/nxs_analysis_tools/datareduction.py +22 -8
  7. nxs_analysis_tools-0.1.10/src/nxs_analysis_tools/datasets.py +108 -0
  8. {nxs_analysis_tools-0.1.8 → nxs_analysis_tools-0.1.10}/src/nxs_analysis_tools/pairdistribution.py +7 -7
  9. nxs_analysis_tools-0.1.10/src/nxs_analysis_tools.egg-info/SOURCES.txt +14 -0
  10. nxs_analysis_tools-0.1.10/tests/test_loading.py +46 -0
  11. nxs_analysis_tools-0.1.10/tests/test_plot_slice.py +112 -0
  12. nxs_analysis_tools-0.1.8/src/nxs_analysis_tools.egg-info/SOURCES.txt +0 -25
  13. nxs_analysis_tools-0.1.8/tests/test_accurate_highlight.py +0 -388
  14. nxs_analysis_tools-0.1.8/tests/test_chess.py +0 -16
  15. nxs_analysis_tools-0.1.8/tests/test_chess_fitting.py +0 -21
  16. nxs_analysis_tools-0.1.8/tests/test_datareduction.py +0 -19
  17. nxs_analysis_tools-0.1.8/tests/test_ellipsoidal_window.py +0 -15
  18. nxs_analysis_tools-0.1.8/tests/test_fitting.py +0 -28
  19. nxs_analysis_tools-0.1.8/tests/test_lmfit.py +0 -95
  20. nxs_analysis_tools-0.1.8/tests/test_mask_plotting.py +0 -388
  21. nxs_analysis_tools-0.1.8/tests/test_pairdistribution.py +0 -40
  22. nxs_analysis_tools-0.1.8/tests/test_plot_slice_axes_types.py +0 -40
  23. nxs_analysis_tools-0.1.8/tests/test_plot_slice_with_ndarray.py +0 -277
  24. nxs_analysis_tools-0.1.8/tests/test_rotate_data.py +0 -23
  25. nxs_analysis_tools-0.1.8/tests/test_sum_axis.py +0 -299
  26. nxs_analysis_tools-0.1.8/tests/test_symmetrizer_rectangular_plane.py +0 -382
  27. {nxs_analysis_tools-0.1.8 → nxs_analysis_tools-0.1.10}/LICENSE +0 -0
  28. {nxs_analysis_tools-0.1.8 → nxs_analysis_tools-0.1.10}/MANIFEST.in +0 -0
  29. {nxs_analysis_tools-0.1.8 → nxs_analysis_tools-0.1.10}/setup.cfg +0 -0
  30. {nxs_analysis_tools-0.1.8 → nxs_analysis_tools-0.1.10}/setup.py +0 -0
  31. {nxs_analysis_tools-0.1.8 → nxs_analysis_tools-0.1.10}/src/nxs_analysis_tools/__init__.py +0 -0
  32. {nxs_analysis_tools-0.1.8 → nxs_analysis_tools-0.1.10}/src/nxs_analysis_tools/fitting.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nxs-analysis-tools
3
- Version: 0.1.8
3
+ Version: 0.1.10
4
4
  Summary: Reduce and transform nexus format (.nxs) scattering data.
5
5
  Author-email: "Steven J. Gomez Alvarado" <stevenjgomez@ucsb.edu>
6
6
  License-Expression: MIT
@@ -46,6 +46,7 @@ Requires-Dist: sphinx-autobuild>=2021.3.14; extra == "dev"
46
46
  Requires-Dist: sphinx-copybutton>=0.5.0; extra == "dev"
47
47
  Requires-Dist: sphinxext-opengraph>=0.6.3; extra == "dev"
48
48
  Requires-Dist: twine>=4.0.1; extra == "dev"
49
+ Requires-Dist: pooch>=1.8.2; extra == "dev"
49
50
  Dynamic: license-file
50
51
 
51
52
  # nxs-analysis-tools
@@ -76,4 +77,14 @@ git clone https://github.com/stevenjgomez/nxs-analysis-tools.git
76
77
 
77
78
  ## License
78
79
 
79
- This project is licensed under the MIT License.
80
+ This project is licensed under the MIT License. If you find the nxs-analysis-tools package useful in your scientific publications, please cite the relevant DOI.
81
+
82
+ To cite the most recent version, use:
83
+
84
+ [![DOI](https://zenodo.org/badge/644189683.svg)](https://doi.org/10.5281/zenodo.15186359)
85
+
86
+ To cite all versions, use:
87
+
88
+ ```
89
+ 10.5281/zenodo.15186359
90
+ ```
@@ -26,4 +26,14 @@ git clone https://github.com/stevenjgomez/nxs-analysis-tools.git
26
26
 
27
27
  ## License
28
28
 
29
- This project is licensed under the MIT License.
29
+ This project is licensed under the MIT License. If you find the nxs-analysis-tools package useful in your scientific publications, please cite the relevant DOI.
30
+
31
+ To cite the most recent version, use:
32
+
33
+ [![DOI](https://zenodo.org/badge/644189683.svg)](https://doi.org/10.5281/zenodo.15186359)
34
+
35
+ To cite all versions, use:
36
+
37
+ ```
38
+ 10.5281/zenodo.15186359
39
+ ```
@@ -6,7 +6,7 @@ build-backend = 'setuptools.build_meta'
6
6
 
7
7
  [project]
8
8
  name = 'nxs-analysis-tools'
9
- version = '0.1.8'
9
+ version = '0.1.10'
10
10
  description = 'Reduce and transform nexus format (.nxs) scattering data.'
11
11
  readme = 'README.md'
12
12
  requires-python = '>=3.7'
@@ -61,6 +61,7 @@ dev = [
61
61
  'sphinx-copybutton >= 0.5.0',
62
62
  'sphinxext-opengraph >= 0.6.3',
63
63
  'twine >= 4.0.1',
64
+ 'pooch >= 1.8.2',
64
65
  ]
65
66
 
66
67
  [project.urls]
@@ -71,7 +72,7 @@ dev = [
71
72
  'DOI' = 'https://doi.org/10.5281/zenodo.15186359'
72
73
 
73
74
  [tool.bumpver]
74
- current_version = "0.1.8"
75
+ current_version = "0.1.10"
75
76
  version_pattern = "MAJOR.MINOR.PATCH[-TAG]"
76
77
  tag_pattern = "vMAJOR.MINOR.PATCH[-TAG]"
77
78
  commit_message = "Bump version {old_version} -> {new_version}"
@@ -6,5 +6,5 @@ __author__ = 'Steven J. Gomez Alvarado'
6
6
  __email__ = 'stevenjgomez@ucsb.edu'
7
7
  __copyright__ = f"2023-2025, {__author__}"
8
8
  __license__ = 'MIT'
9
- __version__ = '0.1.8'
9
+ __version__ = '0.1.10'
10
10
  __repo_url__ = 'https://github.com/stevenjgomez/nxs_analysis_tools'
@@ -1,7 +1,7 @@
1
1
  """
2
2
  This module provides classes and functions for analyzing scattering datasets collected at CHESS
3
- (ID4B) with temperature dependence. It includes functions for loading data, cutting data, and
4
- plotting linecuts.
3
+ (ID4B) with temperature dependence. It includes functions for loading temperature series and
4
+ performing operations on all datasets in the series at once (e.g., cutting, fitting).
5
5
  """
6
6
  import os
7
7
  import re
@@ -59,10 +59,11 @@ class TempDependence:
59
59
  Initialize Scissors and LinecutModel objects for each temperature.
60
60
  set_data(temperature, data):
61
61
  Set the dataset for a specific temperature.
62
- load_transforms(temperatures_list=None, print_tree=True):
62
+ load_transforms(temperatures_list=None, exclude_temperatures=None, print_tree=True):
63
63
  Load transform datasets (from nxrefine) based on temperature.
64
- load_datasets(file_ending='hkli.nxs', temperatures_list=None, print_tree=True):
65
- Load datasets (CHESS format) from the specified folder.
64
+ load_datasets(file_ending='hkli.nxs', temperatures_list=None, exclude_temperatures=None,
65
+ print_tree=True):
66
+ Load datasets (legacy CHESS format) from the specified folder.
66
67
  get_sample_directory():
67
68
  Get the folder path where the datasets are located.
68
69
  clear_datasets():
@@ -99,11 +100,12 @@ class TempDependence:
99
100
  Fit the line cut models for each temperature.
100
101
  plot_fit(mdheadings=False, **kwargs):
101
102
  Plot the fit results for each temperature.
102
- overlay_fits(numpoints=None, vertical_offset=0, cmap='viridis', ax=ax):
103
+ overlay_fits(numpoints=None, vertical_offset=0, cmap='viridis', ax=ax,
104
+ data_kwargs=None, fit_kwargs=None):
103
105
  Plot raw data and fitted models for each temperature.
104
106
  fit_peak_simple():
105
107
  Perform a basic fit using a pseudo-Voigt peak shape, linear background, and no constraints.
106
- plot_order_parameter(ax, **kwargs):
108
+ plot_order_parameter(ax, param_name='peakheight', **kwargs):
107
109
  Plot the temperature dependence of the peakheight parameter.
108
110
  print_fit_report():
109
111
  Print the fit report for each temperature.
@@ -188,6 +190,8 @@ class TempDependence:
188
190
  """
189
191
  for temperature in self.temperatures:
190
192
  self.scissors[temperature] = Scissors()
193
+ if temperature in self.datasets.keys():
194
+ self.scissors[temperature].set_data(self.datasets[temperature])
191
195
  self.linecutmodels[temperature] = LinecutModel()
192
196
 
193
197
  def set_data(self, temperature, data):
@@ -203,7 +207,7 @@ class TempDependence:
203
207
  """
204
208
  self.datasets[temperature] = data
205
209
 
206
- def load_transforms(self, temperatures_list=None, print_tree=True, use_nxlink=False):
210
+ def load_transforms(self, temperatures_list=None, exclude_temperatures=None, print_tree=True, use_nxlink=False):
207
211
  """
208
212
  Load transform datasets (from nxrefine) based on temperature.
209
213
 
@@ -211,6 +215,9 @@ class TempDependence:
211
215
  ----------
212
216
  temperatures_list : list of int or None, optional
213
217
  List of temperatures to load. If None, all available temperatures are loaded.
218
+
219
+ exclude_temperatures : int, str, optional
220
+ Temperatures to skip. Applied after filtering with `temperatures_list`, if provided.
214
221
 
215
222
  print_tree : bool, optional
216
223
  Whether to print the data tree upon loading. Default True.
@@ -223,6 +230,10 @@ class TempDependence:
223
230
  # Convert all temperatures to strings
224
231
  if temperatures_list:
225
232
  temperatures_list = [str(t) for t in temperatures_list]
233
+ if exclude_temperatures:
234
+ if isinstance(exclude_temperatures, str):
235
+ exclude_temperatures = [exclude_temperatures]
236
+ exclude_temperatures = [str(t) for t in list(exclude_temperatures)]
226
237
 
227
238
  # Clear existing temperatures before loading files
228
239
  self.temperatures = []
@@ -237,7 +248,15 @@ class TempDependence:
237
248
  # Identify temperature
238
249
  temperature = match.group(1)
239
250
  # print(f'Temperature = {temperature}')
240
- if (temperatures_list is None) or (temperature in temperatures_list):
251
+ if temperatures_list is not None:
252
+ incl_temp = temperature in temperatures_list
253
+ else:
254
+ incl_temp = True
255
+ if exclude_temperatures is not None:
256
+ not_excl_temp = temperature not in exclude_temperatures
257
+ else:
258
+ not_excl_temp = True
259
+ if incl_temp and not_excl_temp:
241
260
  # Prepare file to be loaded
242
261
  self.temperatures.append(temperature)
243
262
  items_to_load.append(item)
@@ -269,40 +288,38 @@ class TempDependence:
269
288
  f" Error: {e}")
270
289
  raise # Re-raise the exception
271
290
 
272
- # Initialize scissors object
273
- self.scissors[self.temperatures[i]] = Scissors()
274
- self.scissors[self.temperatures[i]].set_data(self.datasets[self.temperatures[i]])
291
+ self.initialize()
275
292
 
276
- # Initialize linecutmodel object
277
- self.linecutmodels[self.temperatures[i]] = LinecutModel()
278
-
279
- def load_datasets(self, file_ending='hkli.nxs', temperatures_list=None, print_tree=True):
293
+ def load_datasets(self, file_ending='hkli.nxs', temperatures_list=None, exclude_temperatures=None, print_tree=True):
280
294
  """
281
295
  Load datasets (CHESS format) from the specified folder.
282
296
 
283
297
  Parameters
284
298
  ----------
285
299
  file_ending : str, optional
286
- The file extension of the datasets to be loaded. The default is 'hkli.nxs'.
287
- temperatures_list : list of int or None, optional
288
- The list of specific temperatures to load. If None, all available temperatures are
289
- loaded. The default is None.
300
+ File extension of datasets to load. Default is 'hkli.nxs'.
301
+ temperatures_list : list of int or str, optional
302
+ Specific temperatures to load. If None, all temperatures are loaded.
303
+ exclude_temperatures : list of int or str, optional
304
+ Temperatures to skip. Applied after filtering with `temperatures_list`, if provided.
290
305
  print_tree : bool, optional
291
- Whether to print the data tree upon loading. Default True.
306
+ If True, prints the NeXus tree structure for each file. Default is True.
292
307
  """
293
- temperature_folders = [] # Empty list to store temperature folder names
294
- for item in os.listdir(self.sample_directory):
295
- try:
296
- temperature_folders.append(int(item)) # If folder name can be int, add it
297
- except ValueError:
298
- pass # Otherwise don't add it
299
- temperature_folders.sort() # Sort from low to high T
300
- temperature_folders = [str(i) for i in temperature_folders] # Convert to strings
301
-
302
- self.temperatures = temperature_folders
303
308
 
304
309
  if temperatures_list is not None:
305
310
  self.temperatures = [str(t) for t in temperatures_list]
311
+ else:
312
+ self.temperatures = [] # Empty list to store temperature folder names
313
+ for item in os.listdir(self.sample_directory):
314
+ try:
315
+ self.temperatures.append(int(item)) # If folder name can be int, add it
316
+ except ValueError:
317
+ pass # Otherwise don't add it
318
+ self.temperatures.sort() # Sort from low to high T
319
+ self.temperatures = [str(i) for i in self.temperatures] # Convert to strings
320
+
321
+ if exclude_temperatures is not None:
322
+ [self.temperatures.remove(str(t)) for t in exclude_temperatures]
306
323
 
307
324
  # Load .nxs files
308
325
  for T in self.temperatures:
@@ -313,12 +330,7 @@ class TempDependence:
313
330
  # Load dataset at each temperature
314
331
  self.datasets[T] = load_data(filepath, print_tree)
315
332
 
316
- # Initialize scissors object at each temperature
317
- self.scissors[T] = Scissors()
318
- self.scissors[T].set_data(self.datasets[T])
319
-
320
- # Initialize linecutmodel object at each temperature
321
- self.linecutmodels[T] = LinecutModel()
333
+ self.initialize()
322
334
 
323
335
  def get_sample_directory(self):
324
336
  """
@@ -437,11 +449,16 @@ class TempDependence:
437
449
  # Get the Viridis colormap
438
450
  cmap = mpl.colormaps.get_cmap('viridis')
439
451
 
452
+ # Reverse zorder
453
+ zorder = 0
454
+
440
455
  for i, linecut in enumerate(self.linecuts.values()):
456
+
441
457
  x_data = linecut[linecut.axes].nxdata
442
458
  y_data = linecut[linecut.signal].nxdata + i * vertical_offset
443
- ax.plot(x_data, y_data, color=cmap(i / len(self.linecuts)), label=self.temperatures[i],
444
- **kwargs)
459
+ ax.plot(x_data, y_data, color=cmap(i / len(self.linecuts)), label=self.temperatures[i],
460
+ zorder=zorder, **kwargs)
461
+ zorder -= 1
445
462
 
446
463
  ax.set(xlabel=self.xlabel,
447
464
  ylabel=self.linecuts[self.temperatures[0]].signal)
@@ -704,7 +721,8 @@ class TempDependence:
704
721
  title=f"{T} K",
705
722
  **kwargs)
706
723
 
707
- def overlay_fits(self, numpoints=None, vertical_offset=0, cmap='viridis', ax=None):
724
+ def overlay_fits(self, numpoints=None, vertical_offset=0, cmap='viridis', ax=None,
725
+ data_kwargs=None, fit_kwargs=None):
708
726
  """
709
727
  Plot raw data and fitted models for each temperature with optional vertical offsets.
710
728
 
@@ -719,6 +737,11 @@ class TempDependence:
719
737
  Name of the matplotlib colormap used to distinguish different temperatures.
720
738
  ax : :class:`matplotlib.axes.Axes` or None, default=None
721
739
  Axis object to plot on. If None, a new figure and axis are created.
740
+ data_kwargs : dict
741
+ Keyword arguments to be passed to the data plot function.
742
+ fit_kwargs : dict
743
+ Keyword arguments to be passed to the fit plot function.
744
+
722
745
 
723
746
  The function:
724
747
  - Uses a colormap to assign unique colors to each temperature.
@@ -731,19 +754,24 @@ class TempDependence:
731
754
  # Create a figure and axes if an axis is not already provided
732
755
  _, ax = plt.subplots() if ax is None else (None, ax)
733
756
 
757
+ if data_kwargs is None:
758
+ data_kwargs = {}
759
+ if fit_kwargs is None:
760
+ fit_kwargs = {}
761
+
734
762
  # Generate a color palette for the various temperatures
735
763
  cmap = plt.get_cmap(cmap)
736
764
  colors = [cmap(i / len(self.temperatures)) for i, _ in enumerate(self.temperatures)]
737
765
 
738
766
  for i, lm in enumerate(self.linecutmodels.values()):
739
767
  # Plot the raw data
740
- ax.plot(lm.x, lm.y + vertical_offset * i, '.', c=colors[i])
768
+ ax.plot(lm.x, lm.y + vertical_offset * i, '.', c=colors[i], **data_kwargs)
741
769
 
742
770
  # Evaluate the fit
743
771
  numpoints = len(lm.x) if numpoints is None else numpoints
744
772
  x_eval = np.linspace(lm.x.min(), lm.x.max(), numpoints)
745
773
  y_eval = lm.modelresult.eval(x=x_eval)
746
- ax.plot(x_eval, y_eval + vertical_offset * i, '-', c=colors[i], label=self.temperatures[i])
774
+ ax.plot(x_eval, y_eval + vertical_offset * i, '-', c=colors[i], label=self.temperatures[i], **fit_kwargs)
747
775
 
748
776
  # Reverse legend entries to match top-to-bottom stacking
749
777
  handles, labels = ax.get_legend_handles_labels()
@@ -767,18 +795,19 @@ class TempDependence:
767
795
  linecutmodel.params['peakamplitude'].set(min=0)
768
796
  linecutmodel.fit()
769
797
 
770
- def plot_order_parameter(self, ax=None, **kwargs):
798
+ def plot_order_parameter(self, param_name='peakheight', ax=None, **kwargs):
771
799
  """
772
800
  Plot the temperature dependence of the peak height (order parameter).
773
801
 
774
- This method extracts the peak height from each temperature-dependent
775
- line cut fit stored in `linecutmodels` and plots it as a function
776
- of temperature using matplotlib.
802
+ This method extracts the values of a chosen parameter from each temperature-dependent
803
+ line cut fit stored in `linecutmodels` and plots it as a function of temperature.
777
804
 
778
805
  Parameters
779
806
  ----------
780
807
  ax : :class:`matplotlib.axes.Axes`, optional
781
808
  Axis object to plot on. If None, a new figure and axis are created.
809
+ param_name : str, optional
810
+ The name of the lmfit parameter to extract. Default is 'peakheight'.
782
811
  **kwargs
783
812
  Keyword arguments to be passed to the plot function.
784
813
 
@@ -810,7 +839,7 @@ class TempDependence:
810
839
  if self.linecutmodels[T].modelresult is None:
811
840
  raise AttributeError("Model result is empty. Have you fit the data to a model?")
812
841
 
813
- peakheights.append(self.linecutmodels[T].modelresult.params['peakheight'].value)
842
+ peakheights.append(self.linecutmodels[T].modelresult.params[param_name].value)
814
843
 
815
844
  # Plot the peakheights vs. temperature
816
845
  if ax is None:
@@ -818,7 +847,7 @@ class TempDependence:
818
847
  else:
819
848
  fig = ax.figure
820
849
  ax.plot(temperatures, peakheights, **kwargs)
821
- ax.set(xlabel='$T$ (K)', ylabel='peakheight')
850
+ ax.set(xlabel='$T$ (K)', ylabel=param_name)
822
851
  return fig, ax
823
852
 
824
853
  def print_fit_report(self):
@@ -1,5 +1,6 @@
1
1
  """
2
- Reduces scattering data into 2D and 1D datasets.
2
+ Tools for reducing data into 2D and 1D, and visualization functions for plotting and animating
3
+ data.
3
4
  """
4
5
  import os
5
6
  import io
@@ -397,7 +398,8 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
397
398
 
398
399
  # If three-dimensional, demand sum_axis to reduce to two dimensions.
399
400
  if data.ndim == 3:
400
- assert sum_axis is not None, "sum_axis must be specified when data.ndim == 3."
401
+ if sum_axis is None:
402
+ raise ValueError("sum_axis must be specified when data.ndim == 3.")
401
403
 
402
404
  if is_array:
403
405
  data = data.sum(axis=sum_axis)
@@ -608,8 +610,9 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
608
610
  # Return the quadmesh object
609
611
  return p
610
612
 
611
- def animate_slice_temp(temp_dependence, slice_obj, ax=None, interval=500, save_gif=False, filename='animation',
612
- title=True, title_fmt='d', plot_slice_kwargs=None, ax_kwargs=None):
613
+ def animate_slice_temp(temp_dependence, slice_obj, ax=None, reverse_temps=False, interval=500,
614
+ save_gif=False, filename='animation', title=True, title_fmt='d',
615
+ plot_slice_kwargs=None, ax_kwargs=None):
613
616
  """
614
617
  Animate 2D slices from a temperature-dependent dataset.
615
618
 
@@ -625,6 +628,8 @@ def animate_slice_temp(temp_dependence, slice_obj, ax=None, interval=500, save_g
625
628
  Slice object to apply to each dataset; None entries are treated as ':'.
626
629
  ax : matplotlib.axes.Axes, optional
627
630
  The axes object to plot on. If None, a new figure and axes will be created.
631
+ reverse_temps : bool, optional
632
+ If True, animates datasets with increasing temperature. Default is False.
628
633
  interval : int, optional
629
634
  Delay between frames in milliseconds. Default is 500.
630
635
  save_gif : bool, optional
@@ -679,8 +684,17 @@ def animate_slice_temp(temp_dependence, slice_obj, ax=None, interval=500, save_g
679
684
  raise ValueError(f"Invalid title_fmt '{title_fmt}' for temperature value '{temp}'")
680
685
  ax.set(title=f'$T$={formatted_temp}')
681
686
 
687
+ # Animate frames upon warming
688
+ if reverse_temps:
689
+ frames = temp_dependence.temperatures.copy()
690
+ # Animate frames upon cooling (default)
691
+ else:
692
+ frames = temp_dependence.temperatures.copy()
693
+ frames.reverse()
694
+
695
+
682
696
  ani = animation.FuncAnimation(fig, update,
683
- frames=temp_dependence.temperatures,
697
+ frames=frames,
684
698
  interval=interval, repeat=False)
685
699
 
686
700
  display(HTML(ani.to_jshtml()))
@@ -761,7 +775,7 @@ def animate_slice_axis(data, axis, axis_values, ax=None, interval=500, save_gif=
761
775
 
762
776
  if title:
763
777
  axis_label = data.axes[axis]
764
- ax.set(title=f'${axis_label}$={parameter:{title_fmt}}')
778
+ ax.set(title=f'{axis_label}={parameter:{title_fmt}}')
765
779
 
766
780
  ani = animation.FuncAnimation(fig, update, frames=axis_values, interval=interval, repeat=False)
767
781
 
@@ -1299,7 +1313,7 @@ def rotate_data(data, lattice_angle, rotation_angle, rotation_axis, printout=Fal
1299
1313
  order=0,
1300
1314
  )
1301
1315
  # Scale data based on ratio of array dimensions
1302
- scale2 = counts.shape[0] / counts.shape[1]
1316
+ scale2 = 1 # counts.shape[0] / counts.shape[1]
1303
1317
  counts_scaled2 = ndimage.affine_transform(counts_scaled1,
1304
1318
  Affine2D().scale(scale2, 1).get_matrix()[:2, :2],
1305
1319
  offset=[(1 - scale2) * counts.shape[0] / 2, 0],
@@ -1402,7 +1416,7 @@ def rotate_data_2D(data, lattice_angle, rotation_angle):
1402
1416
  order=0,
1403
1417
  )
1404
1418
  # Scale data based on ratio of array dimensions
1405
- scale2 = counts.shape[0] / counts.shape[1]
1419
+ scale2 = 1 # counts.shape[0] / counts.shape[1]
1406
1420
  counts_scaled2 = ndimage.affine_transform(counts_scaled1,
1407
1421
  Affine2D().scale(scale2, 1).get_matrix()[:2, :2],
1408
1422
  offset=[(1 - scale2) * counts.shape[0] / 2, 0],
@@ -0,0 +1,108 @@
1
+ import os
2
+ import pooch
3
+
4
+ GOODBOY = pooch.create(
5
+ path=pooch.os_cache("nxs_analysis_tools/cubic"),
6
+ base_url="https://raw.githubusercontent.com/stevenjgomez/dataset-cubic/main/data/",
7
+ registry={
8
+ "cubic_15.nxs": None,
9
+ "15/transform.nxs": None,
10
+ "cubic_25.nxs": None,
11
+ "25/transform.nxs": None,
12
+ "cubic_35.nxs": None,
13
+ "35/transform.nxs": None,
14
+ "cubic_45.nxs": None,
15
+ "45/transform.nxs": None,
16
+ "cubic_55.nxs": None,
17
+ "55/transform.nxs": None,
18
+ "cubic_65.nxs": None,
19
+ "65/transform.nxs": None,
20
+ "cubic_75.nxs": None,
21
+ "75/transform.nxs": None,
22
+ "cubic_80.nxs": None,
23
+ "80/transform.nxs": None,
24
+ "cubic_104.nxs": None,
25
+ "104/transform.nxs": None,
26
+ "cubic_128.nxs": None,
27
+ "128/transform.nxs": None,
28
+ "cubic_153.nxs": None,
29
+ "153/transform.nxs": None,
30
+ "cubic_177.nxs": None,
31
+ "177/transform.nxs": None,
32
+ "cubic_202.nxs": None,
33
+ "202/transform.nxs": None,
34
+ "cubic_226.nxs": None,
35
+ "226/transform.nxs": None,
36
+ "cubic_251.nxs": None,
37
+ "251/transform.nxs": None,
38
+ "cubic_275.nxs": None,
39
+ "275/transform.nxs": None,
40
+ "cubic_300.nxs": None,
41
+ "300/transform.nxs": None,
42
+ }
43
+ )
44
+
45
+ def fetch_cubic(temperatures=None):
46
+ """
47
+ Load the cubic dataset.
48
+ """
49
+ fnames = []
50
+ temperatures = [15, 25, 35, 45, 55, 65, 75, 80, 104, 128,
51
+ 153, 177, 202, 226, 251, 275, 300] if temperatures is None else temperatures
52
+ for T in temperatures:
53
+ fnames.append(GOODBOY.fetch(f"cubic_{T}.nxs"))
54
+ fnames.append(GOODBOY.fetch(f"{T}/transform.nxs"))
55
+ return fnames
56
+
57
+ def cubic(temperatures=None):
58
+ fnames = fetch_cubic(temperatures)
59
+ dirname = os.path.dirname(fnames[0])
60
+ return dirname
61
+
62
+ POOCH = pooch.create(
63
+ path=pooch.os_cache("nxs_analysis_tools/hexagonal"),
64
+ base_url="https://raw.githubusercontent.com/stevenjgomez/dataset-hexagonal/main/data/",
65
+ registry={
66
+ "hexagonal_15.nxs": "850d666d6fb0c7bbf7f7159fed952fbd53355c3c0bfb40410874d3918a3cca49",
67
+ "15/transform.nxs": "45c089be295e0a5b927e963540a90b41f567edb75f283811dbc6bb4a26f2fba5",
68
+ "hexagonal_300.nxs": "c6a9ff704d1e42d9576d007a92a333f529e3ddf605e3f76a82ff15557b7d4a43",
69
+ "300/transform.nxs": "e665ba59debe8e60c90c3181e2fb1ebbce668a3d3918a89a6bf31e3563ebf32e",
70
+ }
71
+ )
72
+
73
+ def fetch_hexagonal(temperatures=None):
74
+ """
75
+ Load the hexagonal dataset.
76
+ """
77
+ fnames = []
78
+ temperatures = [15, 300] if temperatures is None else temperatures
79
+ for T in temperatures:
80
+ fnames.append(POOCH.fetch(f"hexagonal_{T}.nxs"))
81
+ fnames.append(POOCH.fetch(f"{T}/transform.nxs"))
82
+ return fnames
83
+
84
+ def hexagonal(temperatures=None):
85
+ fnames = fetch_hexagonal(temperatures)
86
+ dirname = os.path.dirname(fnames[0])
87
+ return dirname
88
+
89
+ BONES = pooch.create(
90
+ path=pooch.os_cache("nxs_analysis_tools/vacancies"),
91
+ base_url="https://raw.githubusercontent.com/stevenjgomez/dataset-vacancies/main/",
92
+ registry={
93
+ "vacancies.nxs": "39eaf8df84a0dbcacbe6ce7c6017da4da578fbf68a6218ee18ade3953c26efb5",
94
+ "fft.nxs": "c81178eda0ec843502935f29fcb2b0b878f7413e461612c731d37ea9e5e414a9",
95
+ }
96
+ )
97
+
98
+ def vacancies():
99
+ """
100
+ Load the vacancies dataset.
101
+ """
102
+ return BONES.fetch(f"vacancies.nxs")
103
+
104
+ def vacanciesfft():
105
+ """
106
+ Load the vacancies dataset.
107
+ """
108
+ return BONES.fetch(f"fft.nxs")
@@ -349,12 +349,11 @@ class Symmetrizer2D:
349
349
  - Subplot 3: The wedge slice used for reconstruction of the full symmetrized dataset.
350
350
  - Subplot 4: The symmetrized dataset.
351
351
 
352
- Example usage:
353
- ```
354
- s = Symmetrizer2D()
355
- s.set_parameters(theta_min, theta_max, skew_angle, mirror)
356
- s.test(data)
357
- ```
352
+ Example
353
+ -------
354
+ >>> s = Symmetrizer2D()
355
+ >>> s.set_parameters(theta_min, theta_max, skew_angle, mirror)
356
+ >>> s.test(data)
358
357
  """
359
358
  s = self
360
359
  symm_test = s.symmetrize_2d(data)
@@ -400,7 +399,8 @@ class Symmetrizer3D:
400
399
  The input 3D dataset to be symmetrized.
401
400
  """
402
401
 
403
- assert data is not None, "Symmetrizer3D requires a 3D NXdata object for initialization."
402
+ if data is None:
403
+ raise ValueError("Symmetrizer3D requires a 3D NXdata object for initialization.")
404
404
 
405
405
  self.a, self.b, self.c, self.al, self.be, self.ga = [None] * 6
406
406
  self.a_star, self.b_star, self.c_star, self.al_star, self.be_star, self.ga_star = [None] * 6
@@ -0,0 +1,14 @@
1
+ LICENSE
2
+ MANIFEST.in
3
+ README.md
4
+ pyproject.toml
5
+ setup.py
6
+ src/_meta/__init__.py
7
+ src/nxs_analysis_tools/__init__.py
8
+ src/nxs_analysis_tools/chess.py
9
+ src/nxs_analysis_tools/datareduction.py
10
+ src/nxs_analysis_tools/datasets.py
11
+ src/nxs_analysis_tools/fitting.py
12
+ src/nxs_analysis_tools/pairdistribution.py
13
+ tests/test_loading.py
14
+ tests/test_plot_slice.py
@@ -0,0 +1,46 @@
1
+ import pytest
2
+ from nexusformat.nexus import NXdata, NXfield, NXentry, NXroot, NXlink
3
+ from nexusformat.nexus import nxsave
4
+ import numpy as np
5
+
6
+ from nxs_analysis_tools.datareduction import load_transform
7
+ from nxs_analysis_tools.chess import TempDependence
8
+
9
+ # @pytest.fixture
10
+ # def nxrefine_nxs_file(tmp_path):
11
+ # # Load original data
12
+ # x = NXfield(np.linspace(0, 1, 10))
13
+ # y = NXfield(np.linspace(0, 1, 10))
14
+ # z = NXfield(np.linspace(0, 1, 10))
15
+ # v = NXfield(np.random.rand(10, 10))
16
+ # data = NXdata(v, (x, y, z))
17
+ # raw_data = data.nxsignal.nxdata
18
+
19
+ # # Save intermediate transformed data
20
+ # out_data = NXdata(NXfield(raw_data.transpose(2, 1, 0), name='v'))
21
+ # # Create subfolder '15' under the temp directory
22
+ # transform_dir = tmp_path / '15'
23
+ # transform_dir.mkdir()
24
+ # transform_path = transform_dir / 'transform.nxs'
25
+ # nxsave(str(transform_path), out_data)
26
+
27
+ # # Construct final NXroot structure
28
+ # main_file = NXroot()
29
+ # main_file['entry'] = NXentry()
30
+ # newH = NXfield(data.nxaxes[0].nxdata, name='Qh')
31
+ # newK = NXfield(data.nxaxes[1].nxdata, name='Qk')
32
+ # newL = NXfield(data.nxaxes[2].nxdata, name='Ql')
33
+
34
+ # main_file['entry']['transform'] = NXdata(
35
+ # NXlink(name='data', target='/entry/data/v', file=transform_path.name),
36
+ # [newL, newK, newH]
37
+ # )
38
+ # main_file.entry.transform.attrs['angles'] = [90., 90., 90.]
39
+ # main_file.entry.transform.attrs['signal'] = 'data'
40
+ # main_file.entry.transform['title'] = '15.000K Transform'
41
+
42
+ # final_file_path = tmp_path / 'CsV3Sb5_15.nxs'
43
+ # nxsave(str(final_file_path), main_file)
44
+
45
+ # # Return path for test usage
46
+ # return final_file_path