nxs-analysis-tools 0.1.4__py3-none-any.whl → 0.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nxs-analysis-tools might be problematic. Click here for more details.

_meta/__init__.py CHANGED
@@ -6,5 +6,5 @@ __author__ = 'Steven J. Gomez Alvarado'
6
6
  __email__ = 'stevenjgomez@ucsb.edu'
7
7
  __copyright__ = f"2023-2025, {__author__}"
8
8
  __license__ = 'MIT'
9
- __version__ = '0.1.4'
9
+ __version__ = '0.1.6'
10
10
  __repo_url__ = 'https://github.com/stevenjgomez/nxs_analysis_tools'
@@ -14,6 +14,7 @@ from IPython.display import display, Markdown
14
14
  from nxs_analysis_tools import load_data, Scissors
15
15
  from nxs_analysis_tools.fitting import LinecutModel
16
16
  from nxs_analysis_tools.datareduction import load_transform, reciprocal_lattice_params
17
+ from lmfit.models import PseudoVoigtModel, LinearModel
17
18
 
18
19
 
19
20
  class TempDependence:
@@ -98,7 +99,11 @@ class TempDependence:
98
99
  Fit the line cut models for each temperature.
99
100
  plot_fit(mdheadings=False, **kwargs):
100
101
  Plot the fit results for each temperature.
101
- plot_order_parameter(self):
102
+ overlay_fits(numpoints=None, vertical_offset=0, cmap='viridis', ax=ax):
103
+ Plot raw data and fitted models for each temperature.
104
+ fit_peak_simple():
105
+ Perform a basic fit using a pseudo-Voigt peak shape, linear background, and no constraints.
106
+ plot_order_parameter():
102
107
  Plot the temperature dependence of the peakheight parameter.
103
108
  print_fit_report():
104
109
  Print the fit report for each temperature.
@@ -188,7 +193,7 @@ class TempDependence:
188
193
  """
189
194
  self.datasets[temperature] = data
190
195
 
191
- def load_transforms(self, temperatures_list=None, print_tree=True):
196
+ def load_transforms(self, temperatures_list=None, print_tree=True, use_nxlink=False):
192
197
  """
193
198
  Load transform datasets (from nxrefine) based on temperature.
194
199
 
@@ -196,8 +201,14 @@ class TempDependence:
196
201
  ----------
197
202
  temperatures_list : list of int or None, optional
198
203
  List of temperatures to load. If None, all available temperatures are loaded.
204
+
199
205
  print_tree : bool, optional
200
206
  Whether to print the data tree upon loading. Default True.
207
+
208
+ use_nxlink : bool, optional
209
+ If True, maintains the NXlink defined in the data file, which references
210
+ the raw data in the transform.nxs file. This saves memory when working with
211
+ many datasets. In this case, the axes are in reverse order. Default is False.
201
212
  """
202
213
  # Convert all temperatures to strings
203
214
  if temperatures_list:
@@ -240,7 +251,7 @@ class TempDependence:
240
251
 
241
252
  # Save dataset
242
253
  try:
243
- self.datasets[self.temperatures[i]] = load_transform(path, print_tree)
254
+ self.datasets[self.temperatures[i]] = load_transform(path, print_tree=print_tree, use_nxlink=use_nxlink)
244
255
  except Exception as e:
245
256
  # Report temperature that was unable to load, then raise exception.
246
257
  temp_failed = self.temperatures[i]
@@ -463,7 +474,7 @@ class TempDependence:
463
474
  y = np.array([int(t) for t in self.temperatures])
464
475
 
465
476
  # Collect counts from each temperature and ensure they are numpy arrays
466
- v = [self.linecuts[T].counts.nxdata for T in self.temperatures]
477
+ v = [self.linecuts[T].nxsignal.nxdata for T in self.temperatures]
467
478
 
468
479
  # Convert list of arrays to a 2D array for the heatmap
469
480
  v_2d = np.array(v)
@@ -542,7 +553,7 @@ class TempDependence:
542
553
 
543
554
  Parameters
544
555
  ----------
545
- model_components : Model or iterable of Model
556
+ model_components : Model, CompositeModel, or iterable of Model
546
557
  The model components to set for all line cut models.
547
558
 
548
559
  """
@@ -555,7 +566,8 @@ class TempDependence:
555
566
 
556
567
  This method sets the parameter hints for all line cut models in the analysis.
557
568
  It iterates over each line cut model and calls their respective `set_param_hint` method
558
- with the provided arguments and keyword arguments.
569
+ with the provided arguments and keyword arguments. These are implemented when the
570
+ .make_params() method is called.
559
571
 
560
572
  Parameters
561
573
  ----------
@@ -567,10 +579,40 @@ class TempDependence:
567
579
  """
568
580
  [linecutmodel.set_param_hint(*args, **kwargs)
569
581
  for linecutmodel in self.linecutmodels.values()]
582
+
583
+ def params_set(self, name, **kwargs):
584
+ """
585
+ Set constraints on a parameter for all line cut models.
586
+
587
+ This method updates the specified parameter across all models in
588
+ `self.linecutmodels` using the keyword arguments provided. These
589
+ keyword arguments are passed to the `set()` method of the parameter,
590
+ which comes from a `lmfit.Parameters` object.
591
+
592
+ Parameters
593
+ ----------
594
+ name : str
595
+ Name of the parameter to modify (must exist in each model).
596
+ **kwargs
597
+ Constraint arguments passed to `Parameter.set()`, such as `value`,
598
+ `min`, `max`, `vary`, etc.
599
+
600
+ Raises
601
+ ------
602
+ KeyError
603
+ If the parameter `name` does not exist in one of the models.
604
+
605
+ Example
606
+ -------
607
+ >>> sample.params_set('peakamplitude', value=5, min=0, vary=True)
608
+ """
609
+
610
+ for linecutmodel in self.linecutmodels.values():
611
+ linecutmodel.params[name].set(**kwargs)
570
612
 
571
613
  def make_params(self):
572
614
  """
573
- Make parameters for all line cut models.
615
+ Create and initialize the parameters for all models.
574
616
 
575
617
  This method creates the parameters for all line cut models in the analysis.
576
618
  It iterates over each line cut model and calls their respective `make_params` method.
@@ -579,7 +621,8 @@ class TempDependence:
579
621
 
580
622
  def guess(self):
581
623
  """
582
- Make initial parameter guesses for all line cut models.
624
+ Make initial parameter guesses for all line cut models. This overwrites any prior initial
625
+ values and constraints.
583
626
 
584
627
  This method generates initial parameter guesses for all line cut models in the analysis.
585
628
  It iterates over each line cut model and calls their respective `guess` method.
@@ -651,6 +694,68 @@ class TempDependence:
651
694
  title=f"{T} K",
652
695
  **kwargs)
653
696
 
697
+ def overlay_fits(self, numpoints=None, vertical_offset=0, cmap='viridis', ax=None):
698
+ """
699
+ Plot raw data and fitted models for each temperature with optional vertical offsets.
700
+
701
+ Parameters:
702
+ -----------
703
+ numpoints : int or None, default=None
704
+ Number of points to evaluate for the fitted model curves.
705
+ If None, uses the number of raw data points for each linecut.
706
+ vertical_offset : float, default=0
707
+ Amount to vertically offset each linecut for clarity.
708
+ cmap : str, default='viridis'
709
+ Name of the matplotlib colormap used to distinguish different temperatures.
710
+ ax : matplotlib.axes.Axes or None, default=None
711
+ Axis object to plot on. If None, a new figure and axis are created.
712
+
713
+ The function:
714
+ - Uses a colormap to assign unique colors to each temperature.
715
+ - Plots raw data alongside evaluated fit models for each linecut.
716
+ - Vertically offsets each trace by a constant value for visual separation.
717
+ - Displays a legend in reverse order to match top-to-bottom visual stacking.
718
+ - Automatically labels the x- and y-axes based on NeXus-style data metadata.
719
+ """
720
+
721
+ # Create a figure and axes if an axis is not already provided
722
+ _, ax = plt.subplots() if ax is None else (None, ax)
723
+
724
+ # Generate a color palette for the various temperatures
725
+ cmap = plt.get_cmap(cmap)
726
+ colors = [cmap(i / len(self.temperatures)) for i, _ in enumerate(self.temperatures)]
727
+
728
+ for i, lm in enumerate(self.linecutmodels.values()):
729
+ # Plot the raw data
730
+ ax.plot(lm.x, lm.y + vertical_offset * i, '.', c=colors[i])
731
+
732
+ # Evaluate the fit
733
+ numpoints = len(lm.x) if numpoints is None else numpoints
734
+ x_eval = np.linspace(lm.x.min(), lm.x.max(), numpoints)
735
+ y_eval = lm.modelresult.eval(x=x_eval)
736
+ ax.plot(x_eval, y_eval + vertical_offset * i, '-', c=colors[i], label=self.temperatures[i])
737
+
738
+ # Reverse legend entries to match top-to-bottom stacking
739
+ handles, labels = ax.get_legend_handles_labels()
740
+ ax.legend(handles[::-1], labels[::-1])
741
+
742
+ # Add axis labels
743
+ ax.set(xlabel=lm.data.nxaxes[0].nxname, ylabel=lm.data.nxsignal.nxname)
744
+
745
+ def fit_peak_simple(self):
746
+ """
747
+ Fit all linecuts in the temperature series using a pseudo-Voigt peak shape and linear
748
+ background, with no constraints.
749
+ """
750
+
751
+ for T in self.temperatures:
752
+ linecutmodel = self.linecutmodels[T]
753
+ linecutmodel.set_model_components([PseudoVoigtModel(prefix='peak'),
754
+ LinearModel(prefix='background')])
755
+ linecutmodel.make_params()
756
+ linecutmodel.guess()
757
+ linecutmodel.fit()
758
+
654
759
  def plot_order_parameter(self):
655
760
  """
656
761
  Plot the temperature dependence of the peak height (order parameter).
@@ -49,7 +49,7 @@ def load_data(path, print_tree=True):
49
49
  return g.entry.data
50
50
 
51
51
 
52
- def load_transform(path, print_tree=True):
52
+ def load_transform(path, print_tree=True, use_nxlink=False):
53
53
  """
54
54
  Load transform data from an nxrefine output file.
55
55
 
@@ -61,16 +61,24 @@ def load_transform(path, print_tree=True):
61
61
  print_tree : bool, optional
62
62
  If True, prints the NeXus data tree upon loading. Default is True.
63
63
 
64
+ use_nxlink : bool, optional
65
+ If True, maintains the NXlink defined in the data file, which references
66
+ the raw data in the transform.nxs file. This saves memory when working with
67
+ many datasets. In this case, the axes are in reverse order. Default is False.
68
+
64
69
  Returns
65
70
  -------
66
71
  data : NXdata
67
72
  The loaded transform data as an NXdata object.
68
73
  """
69
74
 
70
- g = nxload(path)
75
+ root = nxload(path)
71
76
 
72
- data = NXdata(NXfield(g.entry.transform.data.nxdata.transpose(2, 1, 0), name='counts'),
73
- (g.entry.transform.Qh, g.entry.transform.Qk, g.entry.transform.Ql))
77
+ if use_nxlink:
78
+ data = root.entry.transform
79
+ else:
80
+ data = NXdata(NXfield(root.entry.transform.data.nxdata.transpose(2, 1, 0), name='counts'),
81
+ (root.entry.transform.Qh, root.entry.transform.Qk, root.entry.transform.Ql))
74
82
 
75
83
  print(data.tree) if print_tree else None
76
84
 
@@ -138,6 +146,36 @@ def rebin_3d(array):
138
146
 
139
147
  return rebinned
140
148
 
149
+ def rebin_2d(array):
150
+ """
151
+ Rebins a 2D NumPy array by a factor of 2 along each dimension.
152
+
153
+ This function reduces the size of the input array by averaging over non-overlapping
154
+ 2x2 blocks. Each dimension of the input array must be divisible by 2.
155
+
156
+ Parameters
157
+ ----------
158
+ array : np.ndarray
159
+ A 2-dimensional NumPy array to be rebinned.
160
+
161
+ Returns
162
+ -------
163
+ np.ndarray
164
+ A rebinned array with shape (N//2, M//2) if the original shape was (N, M).
165
+ """
166
+
167
+ # Ensure the array shape is divisible by 2 in each dimension
168
+ shape = array.shape
169
+ if any(dim % 2 != 0 for dim in shape):
170
+ raise ValueError("Each dimension of the array must be divisible by 2 to rebin.")
171
+
172
+ # Reshape the array to group the data into 2x2 blocks
173
+ reshaped = array.reshape(shape[0] // 2, 2, shape[1] // 2, 2)
174
+
175
+ # Average over the 2x2 blocks
176
+ rebinned = reshaped.mean(axis=(1, 3))
177
+
178
+ return rebinned
141
179
 
142
180
  def rebin_1d(array):
143
181
  """
@@ -179,8 +217,7 @@ def rebin_nxdata(data):
179
217
  - Then, each axis is rebinned using `rebin_1d`.
180
218
 
181
219
  The signal array is similarly cropped to remove the last element along any dimension
182
- with an odd shape, and then the data is averaged over 2x2x... blocks using the same
183
- `rebin_1d` method (assumed to apply across 1D slices).
220
+ with an odd shape, and then the data is averaged over 2x2x... blocks.
184
221
 
185
222
  Parameters
186
223
  ----------
@@ -224,7 +261,12 @@ def rebin_nxdata(data):
224
261
  data_arr = data_arr[tuple(slice_obj)]
225
262
 
226
263
  # Perform actual rebinning
227
- data_arr = rebin_3d(data_arr)
264
+ if data.ndim == 3:
265
+ data_arr = rebin_3d(data_arr)
266
+ elif data.ndim == 2:
267
+ data_arr = rebin_2d(data_arr)
268
+ elif data.ndim == 1:
269
+ data_arr = rebin_1d(data_arr)
228
270
 
229
271
  return NXdata(NXfield(data_arr, name=data.signal),
230
272
  tuple([axis for axis in new_axes])
@@ -246,13 +288,15 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
246
288
  data : :class:`nexusformat.nexus.NXdata` or ndarray
247
289
  The dataset to plot. Can be an `NXdata` object or a `numpy` array.
248
290
 
249
- X : NXfield, optional
250
- The X axis values. If None, a default range from 0 to the number of
251
- columns in `data` is used.
291
+ X : ndarray or NXfield, optional
292
+ The values for the X axis. If `data` is an NXdata object and `X` is None, the X axis is
293
+ inherited from the NXdata object. If `data` is a NumPy ndarray and `X` is None, a default
294
+ range from 0 to the number of columns in `data` is used.
252
295
 
253
- Y : NXfield, optional
254
- The Y axis values. If None, a default range from 0 to the number of
255
- rows in `data` is used.
296
+ Y : ndarray or NXfield, optional
297
+ The values for the Y axis. If `data` is an NXdata object and `Y` is None, the Y axis is
298
+ inherited from the NXdata object. If `data` is a NumPy ndarray and `Y` is None, a default
299
+ range from 0 to the number of rows in `data` is used.
256
300
 
257
301
  sum_axis : int, optional
258
302
  If the input data is 3D, this specifies the axis to sum over in order
@@ -327,9 +371,17 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
327
371
  p : :class:`matplotlib.collections.QuadMesh`
328
372
  The `matplotlib` QuadMesh object representing the plotted data.
329
373
  """
374
+
375
+ # Some logic to control the processing of the arrays
330
376
  is_array = False
331
377
  is_nxdata = False
378
+ no_xy_provided = True
379
+
380
+ # If X,Y not provided by user
381
+ if X is not None and Y is not None:
382
+ no_xy_provided = False
332
383
 
384
+ # Examine data type to be plotted
333
385
  if isinstance(data, np.ndarray):
334
386
  is_array = True
335
387
  elif isinstance(data, (NXdata, NXfield)):
@@ -339,43 +391,72 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
339
391
  f"Supported types are np.ndarray and NXdata.")
340
392
 
341
393
  # If three-dimensional, demand sum_axis to reduce to two dimensions.
342
- if is_array and len(data.shape) == 3:
343
- assert sum_axis is not None, "sum_axis must be specified when data is 3D."
344
-
345
- data = data.sum(axis=sum_axis)
394
+ if data.ndim == 3:
395
+ assert sum_axis is not None, "sum_axis must be specified when data.ndim == 3."
346
396
 
347
- if is_nxdata and len(data.shape) == 3:
348
- assert sum_axis is not None, "sum_axis must be specified when data is 3D."
397
+ if is_array:
398
+ data = data.sum(axis=sum_axis)
399
+ elif is_nxdata:
400
+ arr = data.nxsignal.nxdata
401
+ arr = arr.sum(axis=sum_axis)
349
402
 
350
- arr = data.nxsignal.nxdata
351
- arr = arr.sum(axis=sum_axis)
403
+ # Create a 2D template from the original nxdata
404
+ slice_obj = [slice(None)] * len(data.shape)
405
+ slice_obj[sum_axis] = 0
352
406
 
353
- # Create a 2D template from the original nxdata
354
- slice_obj = [slice(None)] * len(data.shape)
355
- slice_obj[sum_axis] = 0
356
-
357
- # Use the 2D template to create a new nxdata
358
- data = array_to_nxdata(arr, data[slice_obj])
407
+ # Use the 2D template to create a new nxdata
408
+ data = array_to_nxdata(arr, data[slice_obj])
359
409
 
410
+ # If the data is of type ndarray, then convert to NXdata
360
411
  if is_array:
412
+ # Convert X to NXfield if it is not already
361
413
  if X is None:
362
- X = NXfield(np.linspace(0, data.shape[0], data.shape[0]), name='x')
414
+ X = NXfield(np.arange(data.shape[0]), name='x')
415
+ elif isinstance(X, np.ndarray):
416
+ X = NXfield(X, name='x')
417
+ elif isinstance(X, NXfield):
418
+ pass
419
+ else:
420
+ raise TypeError("X must be of type np.ndarray or NXdata")
421
+
422
+ # Convert Y to NXfield if it is not already
363
423
  if Y is None:
364
- Y = NXfield(np.linspace(0, data.shape[1], data.shape[1]), name='y')
424
+ Y = NXfield(np.arange(data.shape[1]), name='y')
425
+ elif isinstance(Y, np.ndarray):
426
+ Y = NXfield(Y, name='y')
427
+ elif isinstance(Y, NXfield):
428
+ pass
429
+ else:
430
+ raise TypeError("Y must be of type np.ndarray or NXdata")
431
+
365
432
  if transpose:
366
433
  X, Y = Y, X
367
434
  data = data.transpose()
435
+
368
436
  data = NXdata(NXfield(data, name='value'), (X, Y))
369
- data_arr = data[data.signal].nxdata.transpose()
437
+ data_arr = data.nxsignal.nxdata.transpose()
438
+ # Otherwise, if data is of type NXdata, then decide whether to create axes,
439
+ # use provided axes, or inherit axes.
370
440
  elif is_nxdata:
371
441
  if X is None:
372
- X = data[data.axes[0]]
442
+ X = data.nxaxes[0]
443
+ elif isinstance(X, np.ndarray):
444
+ X = NXfield(X, name='x')
445
+ elif isinstance(X, NXdata):
446
+ pass
373
447
  if Y is None:
374
- Y = data[data.axes[1]]
448
+ Y = data.nxaxes[1]
449
+ elif isinstance(Y, np.ndarray):
450
+ Y = NXfield(Y, name='y')
451
+ elif isinstance(Y, NXdata):
452
+ pass
453
+
454
+ # Transpose axes and data if specified
375
455
  if transpose:
376
456
  X, Y = Y, X
377
457
  data = data.transpose()
378
- data_arr = data[data.signal].nxdata.transpose()
458
+
459
+ data_arr = data.nxsignal.nxdata.transpose()
379
460
 
380
461
  # Display Markdown heading
381
462
  if mdheading is None:
@@ -407,6 +488,7 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
407
488
  elif logscale:
408
489
  norm = colors.LogNorm(vmin=vmin, vmax=vmax)
409
490
 
491
+
410
492
  # Plot data
411
493
  p = ax.pcolormesh(X.nxdata, Y.nxdata, data_arr, shading='auto', norm=norm, cmap=cmap, **kwargs)
412
494
 
@@ -448,14 +530,15 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
448
530
  # Correct aspect ratio for the x/y axes after transformation
449
531
  ax.set(aspect=np.cos(skew_angle_adj * np.pi / 180))
450
532
 
451
- # Add tick marks all around
452
- ax.tick_params(direction='in', top=True, right=True, which='both')
453
533
 
454
534
  # Automatically set tick locations, only if NXdata or if X,Y axes are provided for an array
455
- if is_nxdata or (is_array and (X is not None and Y is not None)):
535
+ if is_nxdata or (is_array and (no_xy_provided == False)):
456
536
  # Add default minor ticks on x
457
537
  ax.xaxis.set_minor_locator(MultipleLocator(1))
458
538
 
539
+ # Add tick marks all around
540
+ ax.tick_params(direction='in', top=True, right=True, which='both')
541
+
459
542
  if xticks is not None:
460
543
  # Use user provided values
461
544
  ax.xaxis.set_major_locator(MultipleLocator(xticks))
@@ -466,6 +549,9 @@ def plot_slice(data, X=None, Y=None, sum_axis=None, transpose=False, vmin=None,
466
549
  if yticks is not None:
467
550
  # Use user provided values
468
551
  ax.yaxis.set_major_locator(MultipleLocator(yticks))
552
+ else:
553
+ # Add tick marks all around
554
+ ax.tick_params(direction='in', top=True, right=True, which='major')
469
555
 
470
556
  # Apply transform to tick marks
471
557
  for i in range(0, len(ax.xaxis.get_ticklines())):
@@ -753,8 +839,8 @@ class Scissors:
753
839
  slice_obj[2] = center[2]
754
840
 
755
841
  p1 = plot_slice(data[slice_obj],
756
- X=data[data.axes[0]],
757
- Y=data[data.axes[1]],
842
+ X=data.nxaxes[0],
843
+ Y=data.nxaxes[1],
758
844
  ax=axes[0],
759
845
  **kwargs)
760
846
  ax = axes[0]
@@ -777,8 +863,8 @@ class Scissors:
777
863
  slice_obj[1] = center[1]
778
864
 
779
865
  p2 = plot_slice(data[slice_obj],
780
- X=data[data.axes[0]],
781
- Y=data[data.axes[2]],
866
+ X=data.nxaxes[0],
867
+ Y=data.nxaxes[2],
782
868
  ax=axes[1],
783
869
  **kwargs)
784
870
  ax = axes[1]
@@ -801,8 +887,8 @@ class Scissors:
801
887
  slice_obj[0] = center[0]
802
888
 
803
889
  p3 = plot_slice(data[slice_obj],
804
- X=data[data.axes[1]],
805
- Y=data[data.axes[2]],
890
+ X=data.nxaxes[1],
891
+ Y=data.nxaxes[2],
806
892
  ax=axes[2],
807
893
  **kwargs)
808
894
  ax = axes[2]
@@ -849,31 +935,31 @@ class Scissors:
849
935
  slice_obj = [slice(None)] * data.ndim
850
936
  slice_obj[2] = center[2]
851
937
  p1 = plot_slice(data[slice_obj],
852
- X=data[data.axes[0]],
853
- Y=data[data.axes[1]],
938
+ X=data.nxaxes[0],
939
+ Y=data.nxaxes[1],
854
940
  ax=axes[0],
855
941
  **kwargs)
856
- axes[0].set_aspect(len(data[data.axes[0]].nxdata) / len(data[data.axes[1]].nxdata))
942
+ axes[0].set_aspect(len(data.nxaxes[0].nxdata) / len(data.nxaxes[1].nxdata))
857
943
 
858
944
  # Plot cross section 2
859
945
  slice_obj = [slice(None)] * data.ndim
860
946
  slice_obj[1] = center[1]
861
947
  p3 = plot_slice(data[slice_obj],
862
- X=data[data.axes[0]],
863
- Y=data[data.axes[2]],
948
+ X=data.nxaxes[0],
949
+ Y=data.nxaxes[2],
864
950
  ax=axes[1],
865
951
  **kwargs)
866
- axes[1].set_aspect(len(data[data.axes[0]].nxdata) / len(data[data.axes[2]].nxdata))
952
+ axes[1].set_aspect(len(data.nxaxes[0].nxdata) / len(data.nxaxes[2].nxdata))
867
953
 
868
954
  # Plot cross-section 3
869
955
  slice_obj = [slice(None)] * data.ndim
870
956
  slice_obj[0] = center[0]
871
957
  p2 = plot_slice(data[slice_obj],
872
- X=data[data.axes[1]],
873
- Y=data[data.axes[2]],
958
+ X=data.nxaxes[1],
959
+ Y=data.nxaxes[2],
874
960
  ax=axes[2],
875
961
  **kwargs)
876
- axes[2].set_aspect(len(data[data.axes[1]].nxdata) / len(data[data.axes[2]].nxdata))
962
+ axes[2].set_aspect(len(data.nxaxes[1].nxdata) / len(data.nxaxes[2].nxdata))
877
963
 
878
964
  # Adjust subplot padding
879
965
  fig.subplots_adjust(wspace=0.3)
@@ -984,7 +1070,7 @@ def rotate_data(data, lattice_angle, rotation_angle, rotation_axis, printout=Fal
984
1070
  Rotated data as an NXdata object.
985
1071
  """
986
1072
  # Define output array
987
- output_array = np.zeros(data[data.signal].shape)
1073
+ output_array = np.zeros(data.nxsignal.shape)
988
1074
 
989
1075
  # Define shear transformation
990
1076
  skew_angle_adj = 90 - lattice_angle
@@ -1085,7 +1171,7 @@ def rotate_data(data, lattice_angle, rotation_angle, rotation_axis, printout=Fal
1085
1171
  output_array[:, :, i] = counts_unpadded
1086
1172
  print('\nDone.')
1087
1173
  return NXdata(NXfield(output_array, name=p.padded.signal),
1088
- (data[data.axes[0]], data[data.axes[1]], data[data.axes[2]]))
1174
+ (data.nxaxes[0], data.nxaxes[1], data.nxaxes[2]))
1089
1175
 
1090
1176
 
1091
1177
  def rotate_data_2D(data, lattice_angle, rotation_angle):
@@ -1180,7 +1266,7 @@ def rotate_data_2D(data, lattice_angle, rotation_angle):
1180
1266
 
1181
1267
  print('\nDone.')
1182
1268
  return NXdata(NXfield(counts_unpadded, name=p.padded.signal),
1183
- (data[data.axes[0]], data[data.axes[1]]))
1269
+ (data.nxaxes[0], data.nxaxes[1]))
1184
1270
 
1185
1271
 
1186
1272
  class Padder:
@@ -1265,7 +1351,7 @@ class Padder:
1265
1351
  data = self.data
1266
1352
  self.padding = padding
1267
1353
 
1268
- padded_shape = tuple(data[data.signal].nxdata.shape[i]
1354
+ padded_shape = tuple(data.nxsignal.nxdata.shape[i]
1269
1355
  + self.padding[i] * 2 for i in range(data.ndim))
1270
1356
 
1271
1357
  # Create padded dataset
@@ -1275,7 +1361,7 @@ class Padder:
1275
1361
  for i, _ in enumerate(slice_obj):
1276
1362
  slice_obj[i] = slice(self.padding[i], -self.padding[i], None)
1277
1363
  slice_obj = tuple(slice_obj)
1278
- padded[slice_obj] = data[data.signal].nxdata
1364
+ padded[slice_obj] = data.nxsignal.nxdata
1279
1365
 
1280
1366
  padmaxes = tuple(self.maxes[i] + self.padding[i] * self.steps[i]
1281
1367
  for i in range(data.ndim))
@@ -3,8 +3,9 @@ Module for fitting of linecuts using the lmfit package.
3
3
  """
4
4
 
5
5
  import operator
6
- from lmfit.model import Model
7
- from lmfit.model import CompositeModel
6
+ from lmfit import Parameters
7
+ from lmfit.model import Model, CompositeModel
8
+ from lmfit.models import PseudoVoigtModel, LinearModel
8
9
  import matplotlib.pyplot as plt
9
10
  import numpy as np
10
11
 
@@ -66,6 +67,8 @@ class LinecutModel:
66
67
  Fit the model to the data.
67
68
  plot_fit(self, numpoints=None, fit_report=True, **kwargs)
68
69
  Plot the fitted model.
70
+ fit_peak_simple():
71
+ Perform a basic fit using a pseudo-Voigt peak shape, linear background, and no constraints.
69
72
  print_fit_report(self)
70
73
  Print the fit report.
71
74
  """
@@ -110,15 +113,25 @@ class LinecutModel:
110
113
 
111
114
  Parameters
112
115
  ----------
113
- model_components : Model or list of Models
114
- The model component(s) to be used for fitting,
115
- which will be combined into a CompositeModel.
116
+ model_components : Model, CompositeModel, or iterable of Model
117
+ The model component(s) to be used for fitting.
116
118
  """
117
119
 
118
120
  # If the model only has one component, then use it as the model
119
121
  if isinstance(model_components, Model):
120
122
  self.model = model_components
121
- # Else, combine the components into a composite model and use that as the
123
+ self.params = self.model.make_params()
124
+
125
+ # If the model is a composite model, then use it as the model
126
+ elif isinstance(model_components, CompositeModel):
127
+ self.model = model_components
128
+ self.model_components = self.model.components
129
+ # Make params for each component of the model
130
+ self.params = Parameters()
131
+ for component in self.model.components:
132
+ self.params.update(component.make_params())
133
+
134
+ # Else, combine the components into a composite model and use that as the model
122
135
  else:
123
136
  self.model_components = model_components
124
137
  self.model = model_components[0]
@@ -127,9 +140,15 @@ class LinecutModel:
127
140
  for component in model_components[1:]:
128
141
  self.model = CompositeModel(self.model, component, operator.add)
129
142
 
143
+ # Make params for each component of the model
144
+ self.params = Parameters()
145
+ for component in self.model.components:
146
+ self.params.update(component.make_params())
147
+
130
148
  def set_param_hint(self, *args, **kwargs):
131
149
  """
132
- Set parameter hints for the model.
150
+ Set parameter hints for the model. These are implemented when the .make_params() method
151
+ is called.
133
152
 
134
153
  Parameters
135
154
  ----------
@@ -159,10 +178,22 @@ class LinecutModel:
159
178
 
160
179
  def guess(self):
161
180
  """
162
- Perform initial guesses for each model component.
181
+ Perform initial guesses for each model component and update params. This overwrites any
182
+ prior initial values and constraints.
183
+
184
+ Returns
185
+ -------
186
+ components_params : list
187
+ A list containing params objects for each component of the model.
163
188
  """
164
- for model_component in list(self.model_components):
189
+
190
+ components_params = []
191
+
192
+ for model_component in self.model.components:
165
193
  self.params.update(model_component.guess(self.y, x=self.x))
194
+ components_params.append(model_component.guess(self.y, x=self.x))
195
+
196
+ return components_params
166
197
 
167
198
  def print_initial_params(self):
168
199
  """
@@ -251,6 +282,17 @@ class LinecutModel:
251
282
  if fit_report:
252
283
  print(self.modelresult.fit_report())
253
284
  return ax
285
+
286
+ def fit_peak_simple(self):
287
+ """
288
+ Fit all linecuts in the temperature series using a pseudo-Voigt peak shape and linear
289
+ background, with no constraints.
290
+ """
291
+ self.set_model_components([PseudoVoigtModel(prefix='peak'),
292
+ LinearModel(prefix='background')])
293
+ self.make_params()
294
+ self.guess()
295
+ self.fit()
254
296
 
255
297
  def print_fit_report(self):
256
298
  """
@@ -1039,6 +1039,8 @@ class Interpolator:
1039
1039
  The dataset containing the data to be interpolated.
1040
1040
  """
1041
1041
  self.data = data
1042
+ self.interpolated = data
1043
+ self.tapered = data
1042
1044
 
1043
1045
  def set_kernel(self, kernel):
1044
1046
  """
@@ -1198,6 +1200,82 @@ class Interpolator:
1198
1200
 
1199
1201
  self.window = window
1200
1202
 
1203
+ def set_ellipsoidal_tukey_window(self, tukey_alpha=1.0, coeffs=None):
1204
+ """
1205
+ Set an ellipsoidal Tukey window function for data tapering.
1206
+
1207
+ The Tukey window smoothly tapers the data to zero near the edges of the
1208
+ elliptical region defined by quadratic form coefficients. This helps reduce
1209
+ artifacts in Fourier transforms and other operations sensitive to boundary effects.
1210
+
1211
+ Parameters
1212
+ ----------
1213
+ tukey_alpha : float, optional
1214
+ Tapering parameter for the Tukey window, between 0 and 1.
1215
+ - `tukey_alpha = 0` results in a ellipsoidal window (no tapering).
1216
+ - `tukey_alpha = 1` results in a full cosine taper.
1217
+ Default is 1.0.
1218
+
1219
+ coeffs : tuple of float, optional
1220
+ Coefficients `(c0, c1, c2, c3, c4, c5)` defining the ellipsoidal
1221
+ quadratic form:
1222
+ R^2 = c0*H^2 + c1*H*K + c2*K^2 + c3*K*L + c4*L^2 + c5*L*H
1223
+ If None, coefficients are automatically set to match the edges of the
1224
+ reciprocal space axes (H, K, L), which should be appropriate in cases
1225
+ where H, K, and L are orthogonal.
1226
+
1227
+ Notes
1228
+ -----
1229
+ - The maximum allowed radius `Qmax` is determined from the minimum radius
1230
+ value along the edges of reciprocal space.
1231
+ - The Tukey window is applied radially as a function of the distance `R`
1232
+ from the center, defined by the ellipsoidal quadratic form.
1233
+
1234
+ Sets
1235
+ ----
1236
+ self.window : ndarray
1237
+ A 3D array of the same shape as the data, containing the Tukey window
1238
+ values between 0 and 1.
1239
+ """
1240
+
1241
+ # Initialize axes
1242
+ H,K,L = [axis for axis in self.data.nxaxes]
1243
+
1244
+ # Initialize coeffs (default to window reaching edge of array)
1245
+ smallest_extent = np.min([H.max(), K.max(), L.max()])
1246
+ c = coeffs if coeffs is not None else ((smallest_extent / H.max()) ** 2,
1247
+ 0,
1248
+ (smallest_extent / K.max()) ** 2,
1249
+ 0,
1250
+ (smallest_extent / L.max()) ** 2,
1251
+ 0
1252
+ )
1253
+
1254
+ # Create meshgrid
1255
+ HH, KK, LL = np.meshgrid(H,K,L, indexing='ij')
1256
+
1257
+ # Create radius array
1258
+ RR = np.sqrt(
1259
+ c[0] * HH ** 2 +
1260
+ c[1] * HH * KK +
1261
+ c[2] * KK ** 2 +
1262
+ c[3] * KK * LL +
1263
+ c[4] * LL ** 2 +
1264
+ c[5] * LL * HH
1265
+ )
1266
+
1267
+ # Check the edges of reciprocal space to verify Qmax
1268
+ # Create list of pixels where H = H.max() or K = K.max() or L = L.max()
1269
+ edges = np.where(np.logical_or(np.logical_or(HH == H.max(), KK == K.max()), LL == L.max()), RR, RR.max())
1270
+ Qmax = edges.min()
1271
+ alpha = tukey_alpha
1272
+ period = (Qmax * alpha) / np.pi
1273
+
1274
+ window = np.where(RR > Qmax * (1 - alpha), (np.cos((RR - Qmax * (1 - alpha)) / period) + 1) / 2, 1)
1275
+ window = np.where(RR > Qmax, 0, window)
1276
+
1277
+ self.window = window
1278
+
1201
1279
  def set_window(self, window):
1202
1280
  """
1203
1281
  Set a custom window function for data tapering.
@@ -1578,6 +1656,41 @@ class DeltaPDF:
1578
1656
  self.interpolator.set_hexagonal_tukey_window(tukey_alphas)
1579
1657
  self.window = self.interpolator.window
1580
1658
 
1659
+ def set_ellipsoidal_tukey_window(self, tukey_alpha=1.0, coeffs=None):
1660
+ """
1661
+ Set an ellipsoidal Tukey window function for data tapering.
1662
+
1663
+ The Tukey window smoothly tapers the data to zero near the edges of the
1664
+ elliptical region defined by quadratic form coefficients. This helps reduce
1665
+ artifacts in Fourier transforms and other operations sensitive to boundary effects.
1666
+
1667
+ Parameters
1668
+ ----------
1669
+ tukey_alpha : float, optional
1670
+ Tapering parameter for the Tukey window, between 0 and 1.
1671
+ - `tukey_alpha = 0` results in a ellipsoidal window (no tapering).
1672
+ - `tukey_alpha = 1` results in a full cosine taper.
1673
+ Default is 1.0.
1674
+
1675
+ coeffs : tuple of float, optional
1676
+ Coefficients `(c0, c1, c2, c3, c4, c5)` defining the ellipsoidal
1677
+ quadratic form:
1678
+ R^2 = c0*H^2 + c1*H*K + c2*K^2 + c3*K*L + c4*L^2 + c5*L*H
1679
+ If None, coefficients are automatically set to match the edges of the
1680
+ reciprocal space axes (H, K, L), which should be appropriate in cases
1681
+ where H, K, and L are orthogonal.
1682
+
1683
+ Notes
1684
+ -----
1685
+ - The maximum allowed radius `Qmax` is determined from the minimum radius
1686
+ value along the edges of reciprocal space.
1687
+ - The Tukey window is applied radially as a function of the distance `R`
1688
+ from the center, defined by the ellipsoidal quadratic form.
1689
+ """
1690
+ self.interpolator.set_ellipsoidal_tukey_window(tukey_alpha=tukey_alpha, coeffs=coeffs)
1691
+ self.window = self.interpolator.window
1692
+
1693
+
1581
1694
  def set_window(self, window):
1582
1695
  """
1583
1696
  Set a custom window function for data tapering.
@@ -1,30 +1,9 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nxs-analysis-tools
3
- Version: 0.1.4
3
+ Version: 0.1.6
4
4
  Summary: Reduce and transform nexus format (.nxs) scattering data.
5
5
  Author-email: "Steven J. Gomez Alvarado" <stevenjgomez@ucsb.edu>
6
- License: MIT License
7
-
8
- Copyright (c) 2023-2025 Steven J. Gomez Alvarado
9
-
10
- Permission is hereby granted, free of charge, to any person obtaining a copy
11
- of this software and associated documentation files (the "Software"), to deal
12
- in the Software without restriction, including without limitation the rights
13
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14
- copies of the Software, and to permit persons to whom the Software is
15
- furnished to do so, subject to the following conditions:
16
-
17
- The above copyright notice and this permission notice shall be included in all
18
- copies or substantial portions of the Software.
19
-
20
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26
- SOFTWARE.
27
-
6
+ License-Expression: MIT
28
7
  Project-URL: Download, https://pypi.org/project/nxs-analysis-tools/
29
8
  Project-URL: Bug Tracker, https://github.com/stevenjgomez/nxs_analysis_tools/issues
30
9
  Project-URL: Documentation, https://nxs-analysis-tools.readthedocs.io/en/stable/
@@ -33,7 +12,6 @@ Project-URL: DOI, https://doi.org/10.5281/zenodo.15186359
33
12
  Keywords: diffraction,xrd,nexusformat,nexus,nxs,scattering
34
13
  Classifier: Development Status :: 5 - Production/Stable
35
14
  Classifier: Intended Audience :: Science/Research
36
- Classifier: License :: OSI Approved :: MIT License
37
15
  Classifier: Programming Language :: Python
38
16
  Classifier: Programming Language :: Python :: 3
39
17
  Classifier: Programming Language :: Python :: 3.9
@@ -0,0 +1,11 @@
1
+ _meta/__init__.py,sha256=KHPI9g5HZm4SkX7sGgxiwCtGNi1EE_Wxga18Nnq74T4,346
2
+ nxs_analysis_tools/__init__.py,sha256=3UFf4nxseTCfsPDSluxupmpd0Es55F9_du5T8-z4CsE,570
3
+ nxs_analysis_tools/chess.py,sha256=Wq5mbLOOdVJPz7e9v9Ao4G2nvpvfZRd_so4ePnrGDOQ,31668
4
+ nxs_analysis_tools/datareduction.py,sha256=yb0L6Y48z72JUpYg5GOjmJxvDfbOYus0WKhjZNMY4rA,52110
5
+ nxs_analysis_tools/fitting.py,sha256=kRMhjObetGqmZ5-Jk1OHKGrXW4qI4D37s8VeC2ygJV8,10275
6
+ nxs_analysis_tools/pairdistribution.py,sha256=BDJdPiQ-XEk8vZKiFQnCotaWeS5cDDGqmSyhzC3fwrQ,65586
7
+ nxs_analysis_tools-0.1.6.dist-info/licenses/LICENSE,sha256=bE6FnYixueAGAnEfUuumbkSeMgdBguAAkheVgjv47Jo,1086
8
+ nxs_analysis_tools-0.1.6.dist-info/METADATA,sha256=e_zGdrs-JbBrUDr6CtjCJpQT5aFt2sVZybxDhKXraDc,3180
9
+ nxs_analysis_tools-0.1.6.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
10
+ nxs_analysis_tools-0.1.6.dist-info/top_level.txt,sha256=8U000GNPzo6T6pOMjRdgOSO5heMzLMGjkxa1CDtyMHM,25
11
+ nxs_analysis_tools-0.1.6.dist-info/RECORD,,
@@ -1,11 +0,0 @@
1
- _meta/__init__.py,sha256=o4CUg4WvH362OKDTpP79g3gJNh8m8f690oZznkfuXoc,346
2
- nxs_analysis_tools/__init__.py,sha256=3UFf4nxseTCfsPDSluxupmpd0Es55F9_du5T8-z4CsE,570
3
- nxs_analysis_tools/chess.py,sha256=Yi33jFtsuBieadG5X-Sbip9ioyDzjrIBhTq2a_Ia4ao,26930
4
- nxs_analysis_tools/datareduction.py,sha256=EVluTVGedr4R6PxZHTreyweN8IUuKPOIcx0ZDPL9SDE,49229
5
- nxs_analysis_tools/fitting.py,sha256=xkx66AJiJJO_kC1OBa3bcSDwVV20MEowRa59K-yFlqg,8551
6
- nxs_analysis_tools/pairdistribution.py,sha256=wDH5qGN3jpo4gSKnUBxNkGbX-qUOEQaSOyvOQ_V4IbE,60720
7
- nxs_analysis_tools-0.1.4.dist-info/licenses/LICENSE,sha256=bE6FnYixueAGAnEfUuumbkSeMgdBguAAkheVgjv47Jo,1086
8
- nxs_analysis_tools-0.1.4.dist-info/METADATA,sha256=6VSwbPQAPARxE2Tlgot70lUcNRCKbY-TNviyO2R6_JM,4471
9
- nxs_analysis_tools-0.1.4.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
10
- nxs_analysis_tools-0.1.4.dist-info/top_level.txt,sha256=8U000GNPzo6T6pOMjRdgOSO5heMzLMGjkxa1CDtyMHM,25
11
- nxs_analysis_tools-0.1.4.dist-info/RECORD,,