pyreduce-astro 0.7a6__cp313-cp313-win_amd64.whl → 0.7b1__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. pyreduce/__main__.py +136 -29
  2. pyreduce/clib/Release/_slitfunc_2d.obj +0 -0
  3. pyreduce/clib/Release/_slitfunc_bd.obj +0 -0
  4. pyreduce/clib/_slitfunc_2d.cp313-win_amd64.pyd +0 -0
  5. pyreduce/clib/_slitfunc_bd.cp313-win_amd64.pyd +0 -0
  6. pyreduce/clib/build_extract.py +60 -55
  7. pyreduce/configuration.py +20 -0
  8. pyreduce/cwrappers.py +26 -40
  9. pyreduce/datasets.py +125 -173
  10. pyreduce/estimate_background_scatter.py +8 -8
  11. pyreduce/extract.py +180 -181
  12. pyreduce/{extraction_width.py → extraction_height.py} +4 -4
  13. pyreduce/instruments/common.py +36 -19
  14. pyreduce/instruments/crires_plus.py +25 -0
  15. pyreduce/instruments/jwst_niriss.py +2 -10
  16. pyreduce/pipeline.py +1 -5
  17. pyreduce/rectify.py +14 -14
  18. pyreduce/reduce.py +93 -68
  19. pyreduce/settings/settings_AJ.json +1 -1
  20. pyreduce/settings/settings_ANDES.json +7 -7
  21. pyreduce/settings/settings_CRIRES_PLUS.json +11 -13
  22. pyreduce/settings/settings_HARPN.json +7 -7
  23. pyreduce/settings/settings_HARPS.json +9 -8
  24. pyreduce/settings/settings_JWST_MIRI.json +6 -6
  25. pyreduce/settings/settings_JWST_NIRISS.json +6 -6
  26. pyreduce/settings/settings_LICK_APF.json +12 -11
  27. pyreduce/settings/settings_MCDONALD.json +6 -6
  28. pyreduce/settings/settings_METIS_IFU.json +9 -9
  29. pyreduce/settings/settings_METIS_LSS.json +9 -9
  30. pyreduce/settings/settings_MICADO.json +9 -9
  31. pyreduce/settings/settings_NEID.json +7 -7
  32. pyreduce/settings/settings_NIRSPEC.json +6 -6
  33. pyreduce/settings/settings_NTE.json +7 -7
  34. pyreduce/settings/settings_UVES.json +4 -4
  35. pyreduce/settings/settings_XSHOOTER.json +6 -6
  36. pyreduce/settings/settings_pyreduce.json +12 -11
  37. pyreduce/settings/settings_schema.json +25 -11
  38. pyreduce/slit_curve.py +739 -0
  39. {pyreduce_astro-0.7a6.dist-info → pyreduce_astro-0.7b1.dist-info}/METADATA +2 -3
  40. {pyreduce_astro-0.7a6.dist-info → pyreduce_astro-0.7b1.dist-info}/RECORD +43 -43
  41. pyreduce_astro-0.7b1.dist-info/entry_points.txt +4 -0
  42. pyreduce/make_shear.py +0 -607
  43. pyreduce_astro-0.7a6.dist-info/entry_points.txt +0 -2
  44. {pyreduce_astro-0.7a6.dist-info → pyreduce_astro-0.7b1.dist-info}/WHEEL +0 -0
  45. {pyreduce_astro-0.7a6.dist-info → pyreduce_astro-0.7b1.dist-info}/licenses/LICENSE +0 -0
pyreduce/__main__.py CHANGED
@@ -38,7 +38,7 @@ def cli():
38
38
 
39
39
  @cli.command()
40
40
  @click.argument("instrument")
41
- @click.argument("target")
41
+ @click.option("--target", "-t", default=None, help="Target star name or regex pattern")
42
42
  @click.option("--night", "-n", default=None, help="Observation night (YYYY-MM-DD)")
43
43
  @click.option("--channel", "-c", default=None, help="Instrument channel")
44
44
  @click.option(
@@ -54,7 +54,7 @@ def cli():
54
54
  help="Base directory for data (default: $REDUCE_DATA or ~/REDUCE_DATA)",
55
55
  )
56
56
  @click.option(
57
- "--input-dir", "-i", default="raw", help="Input directory relative to base"
57
+ "--input-dir", "-i", default=None, help="Input directory relative to base"
58
58
  )
59
59
  @click.option(
60
60
  "--output-dir", "-o", default="reduced", help="Output directory relative to base"
@@ -67,6 +67,12 @@ def cli():
67
67
  default=None,
68
68
  help="Order range to process (e.g., '1,21')",
69
69
  )
70
+ @click.option(
71
+ "--settings",
72
+ default=None,
73
+ type=click.Path(exists=True),
74
+ help="JSON file with settings overrides",
75
+ )
70
76
  def run(
71
77
  instrument,
72
78
  target,
@@ -78,13 +84,13 @@ def run(
78
84
  output_dir,
79
85
  plot,
80
86
  order_range,
87
+ settings,
81
88
  ):
82
89
  """Run the reduction pipeline.
83
90
 
84
91
  INSTRUMENT: Name of the instrument (e.g., UVES, HARPS, XSHOOTER)
85
- TARGET: Target star name or regex pattern
86
92
  """
87
- from .configuration import get_configuration_for_instrument
93
+ from .configuration import get_configuration_for_instrument, load_settings_override
88
94
  from .reduce import main as reduce_main
89
95
 
90
96
  # Parse steps
@@ -100,21 +106,26 @@ def run(
100
106
 
101
107
  # Load configuration
102
108
  config = get_configuration_for_instrument(instrument)
109
+ if settings:
110
+ config = load_settings_override(config, settings)
103
111
 
104
112
  # Run reduction
105
- reduce_main(
106
- instrument=instrument,
107
- target=target,
108
- night=night,
109
- channels=channel,
110
- steps=steps,
111
- base_dir=base_dir or "",
112
- input_dir=input_dir,
113
- output_dir=output_dir,
114
- configuration=config,
115
- order_range=order_range,
116
- plot=plot,
117
- )
113
+ try:
114
+ reduce_main(
115
+ instrument=instrument,
116
+ target=target,
117
+ night=night,
118
+ channels=channel,
119
+ steps=steps,
120
+ base_dir=base_dir,
121
+ input_dir=input_dir,
122
+ output_dir=output_dir,
123
+ configuration=config,
124
+ order_range=order_range,
125
+ plot=plot,
126
+ )
127
+ except FileNotFoundError as e:
128
+ raise click.ClickException(str(e)) from None
118
129
 
119
130
 
120
131
  @cli.command()
@@ -292,10 +303,28 @@ def make_step_command(step_name):
292
303
  default=None,
293
304
  help="Specific input file (bypasses file discovery)",
294
305
  )
306
+ @click.option(
307
+ "--settings",
308
+ default=None,
309
+ type=click.Path(exists=True),
310
+ help="JSON file with settings overrides",
311
+ )
295
312
  def cmd(
296
- instrument, target, night, channel, base_dir, input_dir, output_dir, plot, file
313
+ instrument,
314
+ target,
315
+ night,
316
+ channel,
317
+ base_dir,
318
+ input_dir,
319
+ output_dir,
320
+ plot,
321
+ file,
322
+ settings,
297
323
  ):
298
- from .configuration import get_configuration_for_instrument
324
+ from .configuration import (
325
+ get_configuration_for_instrument,
326
+ load_settings_override,
327
+ )
299
328
  from .reduce import main as reduce_main
300
329
 
301
330
  if file:
@@ -305,7 +334,6 @@ def make_step_command(step_name):
305
334
  import numpy as np
306
335
 
307
336
  from . import reduce as reduce_module
308
- from .configuration import get_configuration_for_instrument
309
337
  from .instruments.instrument_info import load_instrument
310
338
 
311
339
  inst = load_instrument(instrument)
@@ -317,31 +345,98 @@ def make_step_command(step_name):
317
345
 
318
346
  # Load configuration for this step
319
347
  config = get_configuration_for_instrument(instrument)
320
- step_config = config.get(step_name, {})
321
- step_config["plot"] = plot
348
+ if settings:
349
+ config = load_settings_override(config, settings)
322
350
 
323
- # Get the step class
351
+ # Step classes that support --file (take raw files as input)
324
352
  step_classes = {
325
353
  "bias": reduce_module.Bias,
326
354
  "flat": reduce_module.Flat,
327
355
  "trace": reduce_module.OrderTracing,
328
356
  "curvature": reduce_module.SlitCurvatureDetermination,
329
357
  "scatter": reduce_module.BackgroundScatter,
330
- "norm_flat": reduce_module.NormalizeFlatField,
331
358
  "wavecal_master": reduce_module.WavelengthCalibrationMaster,
332
- "wavecal_init": reduce_module.WavelengthCalibrationInitialize,
333
- "wavecal": reduce_module.WavelengthCalibrationFinalize,
334
359
  "freq_comb_master": reduce_module.LaserFrequencyCombMaster,
335
- "freq_comb": reduce_module.LaserFrequencyCombFinalize,
336
360
  "science": reduce_module.ScienceExtraction,
337
- "continuum": reduce_module.ContinuumNormalization,
338
361
  }
339
362
 
363
+ # Steps that don't take raw files - --file makes no sense
364
+ no_file_steps = {
365
+ "mask",
366
+ "norm_flat",
367
+ "wavecal_init",
368
+ "wavecal",
369
+ "freq_comb",
370
+ "continuum",
371
+ "finalize",
372
+ "rectify",
373
+ }
374
+
375
+ if step_name in no_file_steps:
376
+ raise click.ClickException(
377
+ f"Step '{step_name}' does not accept raw files. "
378
+ f"Run without --file to use the pipeline."
379
+ )
380
+
340
381
  if step_name not in step_classes:
341
382
  raise click.ClickException(
342
383
  f"Step '{step_name}' does not support --file option"
343
384
  )
344
385
 
386
+ def make_step(name):
387
+ """Create a step instance."""
388
+ step_class = {
389
+ "mask": reduce_module.Mask,
390
+ "bias": reduce_module.Bias,
391
+ "flat": reduce_module.Flat,
392
+ "trace": reduce_module.OrderTracing,
393
+ "curvature": reduce_module.SlitCurvatureDetermination,
394
+ "scatter": reduce_module.BackgroundScatter,
395
+ "norm_flat": reduce_module.NormalizeFlatField,
396
+ "wavecal_master": reduce_module.WavelengthCalibrationMaster,
397
+ "freq_comb_master": reduce_module.LaserFrequencyCombMaster,
398
+ "science": reduce_module.ScienceExtraction,
399
+ }.get(name)
400
+ if not step_class:
401
+ return None
402
+ step_config = config.get(name, {}).copy()
403
+ step_config["plot"] = 0 # No plots for dependency loading
404
+ return step_class(
405
+ inst,
406
+ channel,
407
+ target=target or "",
408
+ night=night,
409
+ output_dir=output_dir_full,
410
+ order_range=None,
411
+ **step_config,
412
+ )
413
+
414
+ def load_dependency(name, loaded):
415
+ """Load a dependency from disk."""
416
+ if name in loaded:
417
+ return loaded[name]
418
+ if name == "config":
419
+ loaded["config"] = config
420
+ return config
421
+ dep_step = make_step(name)
422
+ if dep_step is None:
423
+ return None
424
+ # Load this step's dependencies first
425
+ for sub_dep in dep_step.loadDependsOn:
426
+ load_dependency(sub_dep, loaded)
427
+ # Build kwargs for load()
428
+ load_kwargs = {
429
+ d: loaded[d] for d in dep_step.loadDependsOn if d in loaded
430
+ }
431
+ try:
432
+ loaded[name] = dep_step.load(**load_kwargs)
433
+ except FileNotFoundError:
434
+ loaded[name] = None
435
+ return loaded[name]
436
+
437
+ # Create the target step
438
+ step_config = config.get(step_name, {}).copy()
439
+ step_config["plot"] = plot
345
440
  step_class = step_classes[step_name]
346
441
  step = step_class(
347
442
  inst,
@@ -352,9 +447,21 @@ def make_step_command(step_name):
352
447
  order_range=None,
353
448
  **step_config,
354
449
  )
355
- step.run(files=np.array([file]), mask=None, bias=None)
450
+
451
+ # Load all dependencies
452
+ loaded = {}
453
+ for dep in step.dependsOn:
454
+ load_dependency(dep, loaded)
455
+
456
+ # Build kwargs for run()
457
+ run_kwargs = {d: loaded.get(d) for d in step.dependsOn}
458
+ run_kwargs["files"] = np.array([file])
459
+
460
+ step.run(**run_kwargs)
356
461
  else:
357
462
  config = get_configuration_for_instrument(instrument)
463
+ if settings:
464
+ config = load_settings_override(config, settings)
358
465
  reduce_main(
359
466
  instrument=instrument,
360
467
  target=target,
Binary file
Binary file
@@ -1,75 +1,80 @@
1
1
  #!/usr/bin/env python3
2
- """Builds the C library that contains the extraction algorithm
2
+ """Build script for CFFI C extensions (development only).
3
3
 
4
- This module prepares and builds the C libary containing the curved
5
- (and vertical) extraction algorithm using CFFI.
6
- It also prepares the ffibuilder objects for setup.py,
7
- so that the library is compiled on installation.
4
+ Usage:
5
+ uv run reduce-build # compile extensions
6
+ uv run reduce-clean # remove compiled files
8
7
 
9
-
10
- The user can also call the Module as a script to compile the
11
- C libraries again.
12
-
13
- Attributes
14
- ----------
15
- ffi_builder_vertical : FFI
16
- CFFI Builder for the vertical extraction algorithm
17
- ffi_builder_curved : FFI
18
- CFFI Builder for the curved extraction algorithm
8
+ This compiles the C extraction libraries in-place for development.
9
+ For production, extensions are built automatically during wheel creation
10
+ via hatch_build.py.
19
11
  """
20
12
 
21
- import logging
13
+ import glob
22
14
  import os
23
15
 
24
16
  from cffi import FFI
25
17
 
26
- logger = logging.getLogger(__name__)
27
-
28
-
29
18
  CWD = os.path.dirname(__file__)
30
19
  CWD = os.path.abspath(CWD)
31
20
  release_path = os.path.join(CWD, "Release")
32
21
 
33
- print("Include dir: ", CWD)
34
- print("Release dir: ", release_path)
35
-
36
22
 
37
- ffibuilder_vertical = FFI()
38
- with open(os.path.join(CWD, "slit_func_bd.h")) as f:
39
- ffibuilder_vertical.cdef(f.read())
40
- with open(os.path.join(CWD, "slit_func_bd.c")) as f:
41
- ffibuilder_vertical.set_source(
42
- "_slitfunc_bd",
43
- f.read(),
44
- include_dirs=[CWD, release_path],
45
- depends=["slit_func_bd.h"],
46
- )
47
-
48
- ffibuilder_curved = FFI()
49
- with open(os.path.join(CWD, "slit_func_2d_xi_zeta_bd.h")) as f:
50
- ffibuilder_curved.cdef(f.read())
51
- with open(os.path.join(CWD, "slit_func_2d_xi_zeta_bd.c")) as f:
52
- ffibuilder_curved.set_source(
53
- "_slitfunc_2d",
54
- f.read(),
55
- include_dirs=[CWD, release_path],
56
- depends=["slit_func_2d_xi_zeta_bd.h"],
57
- )
23
+ def clean():
24
+ """Remove compiled extension files."""
25
+ patterns = ["*.so", "*.o", "*.pyd"]
26
+ removed = []
27
+ for pattern in patterns:
28
+ for f in glob.glob(os.path.join(CWD, pattern)):
29
+ os.remove(f)
30
+ removed.append(os.path.basename(f))
31
+ if removed:
32
+ print(f"Removed: {', '.join(removed)}")
33
+ else:
34
+ print("Nothing to clean.")
58
35
 
59
36
 
60
37
  def build():
61
- """Builds the C slitfunc library"""
62
- logger.info("Building required C libraries, this might take a few seconds")
38
+ """Build the C slitfunc libraries in-place."""
39
+ print("Building CFFI extensions for development...")
40
+ print(f" Source dir: {CWD}")
63
41
 
64
42
  old_cwd = os.getcwd()
65
- path = os.path.abspath(CWD)
66
- os.chdir(path)
67
-
68
- ffibuilder_vertical.compile(verbose=True, debug=False)
69
- ffibuilder_curved.compile(verbose=True, debug=False)
70
-
71
- os.chdir(old_cwd)
72
-
73
-
74
- if __name__ == "__main__": # pragma: no cover
43
+ os.chdir(CWD)
44
+
45
+ try:
46
+ # Vertical extraction
47
+ ffibuilder_vertical = FFI()
48
+ with open("slit_func_bd.h") as f:
49
+ ffibuilder_vertical.cdef(f.read())
50
+ with open("slit_func_bd.c") as f:
51
+ ffibuilder_vertical.set_source(
52
+ "_slitfunc_bd",
53
+ f.read(),
54
+ include_dirs=[CWD, release_path],
55
+ depends=["slit_func_bd.h"],
56
+ )
57
+ ffibuilder_vertical.compile(verbose=True)
58
+ print("[OK] _slitfunc_bd")
59
+
60
+ # Curved extraction
61
+ ffibuilder_curved = FFI()
62
+ with open("slit_func_2d_xi_zeta_bd.h") as f:
63
+ ffibuilder_curved.cdef(f.read())
64
+ with open("slit_func_2d_xi_zeta_bd.c") as f:
65
+ ffibuilder_curved.set_source(
66
+ "_slitfunc_2d",
67
+ f.read(),
68
+ include_dirs=[CWD, release_path],
69
+ depends=["slit_func_2d_xi_zeta_bd.h"],
70
+ )
71
+ ffibuilder_curved.compile(verbose=True)
72
+ print("[OK] _slitfunc_2d")
73
+
74
+ print("Done.")
75
+ finally:
76
+ os.chdir(old_cwd)
77
+
78
+
79
+ if __name__ == "__main__":
75
80
  build()
pyreduce/configuration.py CHANGED
@@ -43,6 +43,26 @@ def get_configuration_for_instrument(instrument, **kwargs):
43
43
  return config
44
44
 
45
45
 
46
+ def load_settings_override(config, settings_file):
47
+ """Apply settings overrides from a JSON file.
48
+
49
+ Parameters
50
+ ----------
51
+ config : dict
52
+ Base configuration to override
53
+ settings_file : str
54
+ Path to JSON file with override settings
55
+
56
+ Returns
57
+ -------
58
+ config : dict
59
+ Updated configuration
60
+ """
61
+ with open(settings_file) as f:
62
+ overrides = json.load(f)
63
+ return update(config, overrides, check=False)
64
+
65
+
46
66
  def load_config(configuration, instrument, j=0):
47
67
  if configuration is None:
48
68
  logger.info(
pyreduce/cwrappers.py CHANGED
@@ -14,23 +14,9 @@ from scipy.ndimage import median_filter
14
14
 
15
15
  logger = logging.getLogger(__name__)
16
16
 
17
- try:
18
- from .clib._slitfunc_2d import ffi
19
- from .clib._slitfunc_2d import lib as slitfunc_2dlib
20
- from .clib._slitfunc_bd import lib as slitfunclib
21
- except ImportError: # pragma: no cover
22
- logger.error(
23
- "C libraries could not be found. Compiling them by running build_extract.py"
24
- )
25
- from .clib import build_extract
26
-
27
- build_extract.build()
28
- del build_extract
29
-
30
- from .clib._slitfunc_2d import ffi
31
- from .clib._slitfunc_2d import lib as slitfunc_2dlib
32
- from .clib._slitfunc_bd import lib as slitfunclib
33
-
17
+ from .clib._slitfunc_2d import ffi
18
+ from .clib._slitfunc_2d import lib as slitfunc_2dlib
19
+ from .clib._slitfunc_bd import lib as slitfunclib
34
20
 
35
21
  c_double = ctypes.c_double
36
22
  c_int = ctypes.c_int
@@ -129,7 +115,7 @@ def slitfunc(img, ycen, lambda_sp=0, lambda_sf=0.1, osample=1):
129
115
 
130
116
 
131
117
  def slitfunc_curved(
132
- img, ycen, tilt, shear, lambda_sp, lambda_sf, osample, yrange, maxiter=20, gain=1
118
+ img, ycen, p1, p2, lambda_sp, lambda_sf, osample, yrange, maxiter=20, gain=1
133
119
  ):
134
120
  """Decompose an image into a spectrum and a slitfunction, image may be curved
135
121
 
@@ -139,10 +125,10 @@ def slitfunc_curved(
139
125
  input image
140
126
  ycen : array[n]
141
127
  traces the center of the order
142
- tilt : array[n]
143
- tilt (1st order curvature) of the order along the image, set to 0 if order straight
144
- shear : array[n]
145
- shear (2nd order curvature) of the order along the image, set to 0 if order straight
128
+ p1 : array[n]
129
+ 1st order curvature of the order along the image, set to 0 if order straight
130
+ p2 : array[n]
131
+ 2nd order curvature of the order along the image, set to 0 if order straight
146
132
  osample : int
147
133
  Subpixel ovsersampling factor (the default is 1, no oversampling)
148
134
  lambda_sp : float
@@ -175,23 +161,23 @@ def slitfunc_curved(
175
161
  assert ycen.ndim == 1, "Ycen must be 1 dimensional"
176
162
  assert maxiter > 0, "Maximum iterations must be positive"
177
163
 
178
- if np.isscalar(tilt):
179
- tilt = np.full(img.shape[1], tilt, dtype=c_double)
164
+ if np.isscalar(p1):
165
+ p1 = np.full(img.shape[1], p1, dtype=c_double)
180
166
  else:
181
- tilt = np.asarray(tilt, dtype=c_double)
182
- if np.isscalar(shear):
183
- shear = np.full(img.shape[1], shear, dtype=c_double)
167
+ p1 = np.asarray(p1, dtype=c_double)
168
+ if np.isscalar(p2):
169
+ p2 = np.full(img.shape[1], p2, dtype=c_double)
184
170
  else:
185
- shear = np.asarray(shear, dtype=c_double)
171
+ p2 = np.asarray(p2, dtype=c_double)
186
172
 
187
173
  assert img.shape[1] == ycen.size, (
188
174
  f"Image and Ycen shapes are incompatible, got {img.shape} and {ycen.shape}"
189
175
  )
190
- assert img.shape[1] == tilt.size, (
191
- f"Image and Tilt shapes are incompatible, got {img.shape} and {tilt.shape}"
176
+ assert img.shape[1] == p1.size, (
177
+ f"Image and p1 shapes are incompatible, got {img.shape} and {p1.shape}"
192
178
  )
193
- assert img.shape[1] == shear.size, (
194
- f"Image and Shear shapes are incompatible, got {img.shape} and {shear.shape}"
179
+ assert img.shape[1] == p2.size, (
180
+ f"Image and p2 shapes are incompatible, got {img.shape} and {p2.shape}"
195
181
  )
196
182
 
197
183
  assert osample > 0, f"Oversample rate must be positive, but got {osample}"
@@ -202,8 +188,8 @@ def slitfunc_curved(
202
188
 
203
189
  # assert np.ma.all(np.isfinite(img)), "All values in the image must be finite"
204
190
  assert np.all(np.isfinite(ycen)), "All values in ycen must be finite"
205
- assert np.all(np.isfinite(tilt)), "All values in tilt must be finite"
206
- assert np.all(np.isfinite(shear)), "All values in shear must be finite"
191
+ assert np.all(np.isfinite(p1)), "All values in p1 must be finite"
192
+ assert np.all(np.isfinite(p2)), "All values in p2 must be finite"
207
193
 
208
194
  assert yrange.ndim == 1, "Yrange must be 1 dimensional"
209
195
  assert yrange.size == 2, "Yrange must have 2 elements"
@@ -251,8 +237,8 @@ def slitfunc_curved(
251
237
  pix_unc[pix_unc < 1] = 1
252
238
 
253
239
  psf_curve = np.zeros((ncols, 3), dtype=c_double)
254
- psf_curve[:, 1] = tilt
255
- psf_curve[:, 2] = shear
240
+ psf_curve[:, 1] = p1
241
+ psf_curve[:, 2] = p2
256
242
 
257
243
  # Initialize arrays and ensure the correct datatype for C
258
244
  requirements = ["C", "A", "W", "O"]
@@ -333,8 +319,8 @@ def xi_zeta_tensors(
333
319
  ycen: np.ndarray,
334
320
  yrange, # (int, int)
335
321
  osample: int,
336
- tilt: np.ndarray,
337
- shear: np.ndarray,
322
+ p1: np.ndarray,
323
+ p2: np.ndarray,
338
324
  ):
339
325
  ncols = int(ncols)
340
326
  nrows = int(nrows)
@@ -346,8 +332,8 @@ def xi_zeta_tensors(
346
332
  y_lower_lim = int(yrange[0])
347
333
 
348
334
  psf_curve = np.zeros((ncols, 3), dtype=c_double)
349
- psf_curve[:, 1] = tilt
350
- psf_curve[:, 2] = shear
335
+ psf_curve[:, 1] = p1
336
+ psf_curve[:, 2] = p2
351
337
 
352
338
  requirements = ["C", "A", "W", "O"]
353
339
  ycen_int = np.require(ycen_int, dtype=c_double, requirements=requirements)