yippy 0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,93 @@
1
+ name: Publish to PyPI
2
+
3
+ on: push
4
+
5
+ jobs:
6
+ build:
7
+ name: Build distribution
8
+ runs-on: ubuntu-latest
9
+
10
+ steps:
11
+ - uses: actions/checkout@v4
12
+ - name: Set up Python
13
+ uses: actions/setup-python@v4
14
+ with:
15
+ python-version: "3.x"
16
+ - name: Install pypa/build
17
+ run: >-
18
+ python3 -m
19
+ pip install
20
+ build
21
+ --user
22
+ - name: Build a binary wheel and a source tarball
23
+ run: python3 -m build
24
+ - name: Store the distribution packages
25
+ uses: actions/upload-artifact@v3
26
+ with:
27
+ name: python-package-distributions
28
+ path: dist/
29
+
30
+ publish-to-pypi:
31
+ name: >-
32
+ Publish Python distribution to PyPI
33
+ if: startsWith(github.ref, 'refs/tags/')
34
+ needs:
35
+ - build
36
+ runs-on: ubuntu-latest
37
+ environment:
38
+ name: pypi
39
+ url: https://pypi.org/p/yippy
40
+ permissions:
41
+ id-token: write
42
+
43
+ steps:
44
+ - name: Download all the dists
45
+ uses: actions/download-artifact@v3
46
+ with:
47
+ name: python-package-distributions
48
+ path: dist/
49
+ - name: Publish distribution to PyPI
50
+ uses: pypa/gh-action-pypi-publish@release/v1
51
+
52
+ github-release:
53
+ name: >-
54
+ Sign the Python distribution with Sigstore
55
+ and upload them to GitHub Release
56
+ needs:
57
+ - publish-to-pypi
58
+ runs-on: ubuntu-latest
59
+
60
+ permissions:
61
+ contents: write
62
+ id-token: write
63
+
64
+ steps:
65
+ - name: Download all the dists
66
+ uses: actions/download-artifact@v3
67
+ with:
68
+ name: python-package-distributions
69
+ path: dist/
70
+ - name: Sign the dists with Sigstore
71
+ uses: sigstore/gh-action-sigstore-python@v1.2.3
72
+ with:
73
+ inputs: >-
74
+ ./dist/*.tar.gz
75
+ ./dist/*.whl
76
+ - name: Create GitHub Release
77
+ env:
78
+ GITHUB_TOKEN: ${{ github.token }}
79
+ run: >-
80
+ gh release create
81
+ '${{ github.ref_name }}'
82
+ --repo '${{ github.repository }}'
83
+ --notes ""
84
+ - name: Upload artifact signatures to GitHub Release
85
+ env:
86
+ GITHUB_TOKEN: ${{ github.token }}
87
+ # Upload to GitHub Release using the `gh` CLI.
88
+ # `dist/` contains the built packages, and the
89
+ # sigstore-produced signatures and certificates.
90
+ run: >-
91
+ gh release upload
92
+ '${{ github.ref_name }}' dist/**
93
+ --repo '${{ github.repository }}'
yippy-0.1/.gitignore ADDED
@@ -0,0 +1,160 @@
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py,cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # poetry
98
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102
+ #poetry.lock
103
+
104
+ # pdm
105
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106
+ #pdm.lock
107
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108
+ # in version control.
109
+ # https://pdm.fming.dev/#use-with-ide
110
+ .pdm.toml
111
+
112
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113
+ __pypackages__/
114
+
115
+ # Celery stuff
116
+ celerybeat-schedule
117
+ celerybeat.pid
118
+
119
+ # SageMath parsed files
120
+ *.sage.py
121
+
122
+ # Environments
123
+ .env
124
+ .venv
125
+ env/
126
+ venv/
127
+ ENV/
128
+ env.bak/
129
+ venv.bak/
130
+
131
+ # Spyder project settings
132
+ .spyderproject
133
+ .spyproject
134
+
135
+ # Rope project settings
136
+ .ropeproject
137
+
138
+ # mkdocs documentation
139
+ /site
140
+
141
+ # mypy
142
+ .mypy_cache/
143
+ .dmypy.json
144
+ dmypy.json
145
+
146
+ # Pyre type checker
147
+ .pyre/
148
+
149
+ # pytype static type analyzer
150
+ .pytype/
151
+
152
+ # Cython debug symbols
153
+ cython_debug/
154
+
155
+ # PyCharm
156
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
157
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
159
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
160
+ #.idea/
@@ -0,0 +1,15 @@
1
+ repos:
2
+ - repo: https://github.com/pre-commit/pre-commit-hooks
3
+ rev: "v4.5.0"
4
+ hooks:
5
+ - id: trailing-whitespace
6
+ - id: name-tests-test
7
+ - id: end-of-file-fixer
8
+ - repo: https://github.com/astral-sh/ruff-pre-commit
9
+ rev: v0.3.0
10
+ hooks:
11
+ # Run the linter.
12
+ - id: ruff
13
+ args: [ --fix ]
14
+ # Run the formatter.
15
+ - id: ruff-format
yippy-0.1/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Corey Spohn
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
yippy-0.1/PKG-INFO ADDED
@@ -0,0 +1,42 @@
1
+ Metadata-Version: 2.3
2
+ Name: yippy
3
+ Version: 0.1
4
+ Summary: A minimal wrapper to create a coronagraph object from a yield input package
5
+ Project-URL: Homepage, https://github.com/CoreySpohn/yippy
6
+ Project-URL: Issues, https://github.com/CoreySpohn/yippy/issues
7
+ Author-email: Corey Spohn <corey.a.spohn@nasa.gov>
8
+ License: MIT License
9
+
10
+ Copyright (c) 2024 Corey Spohn
11
+
12
+ Permission is hereby granted, free of charge, to any person obtaining a copy
13
+ of this software and associated documentation files (the "Software"), to deal
14
+ in the Software without restriction, including without limitation the rights
15
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
16
+ copies of the Software, and to permit persons to whom the Software is
17
+ furnished to do so, subject to the following conditions:
18
+
19
+ The above copyright notice and this permission notice shall be included in all
20
+ copies or substantial portions of the Software.
21
+
22
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
23
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
24
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
25
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
26
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
27
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
28
+ SOFTWARE.
29
+ License-File: LICENSE
30
+ Classifier: Intended Audience :: Science/Research
31
+ Classifier: License :: OSI Approved :: MIT License
32
+ Classifier: Programming Language :: Python :: 3
33
+ Classifier: Topic :: Scientific/Engineering :: Astronomy
34
+ Requires-Dist: astropy
35
+ Requires-Dist: lod-unit
36
+ Requires-Dist: numpy
37
+ Requires-Dist: scipy
38
+ Requires-Dist: tqdm
39
+ Description-Content-Type: text/markdown
40
+
41
+ # yippy
42
+ A minimal wrapper to create a coronagraph object from a yield input package
yippy-0.1/README.md ADDED
@@ -0,0 +1,2 @@
1
+ # yippy
2
+ A minimal wrapper to create a coronagraph object from a yield input package
@@ -0,0 +1,28 @@
1
+ [build-system]
2
+ requires = ['hatchling', "hatch-fancy-pypi-readme"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "yippy"
7
+ version = "0.1"
8
+ authors = [{ name = "Corey Spohn", email = "corey.a.spohn@nasa.gov" }]
9
+ description = "A minimal wrapper to create a coronagraph object from a yield input package"
10
+ dependencies = ["numpy", "astropy", "scipy", "lod_unit", "tqdm"]
11
+ license = { file = "LICENSE" }
12
+ dynamic = ['readme']
13
+ classifiers = [
14
+ "Programming Language :: Python :: 3",
15
+ "License :: OSI Approved :: MIT License",
16
+ "Intended Audience :: Science/Research",
17
+ "Topic :: Scientific/Engineering :: Astronomy",
18
+ ]
19
+
20
+ [project.urls]
21
+ Homepage = "https://github.com/CoreySpohn/yippy"
22
+ Issues = "https://github.com/CoreySpohn/yippy/issues"
23
+
24
+ [tool.hatch.metadata.hooks.fancy-pypi-readme]
25
+ content-type = "text/markdown"
26
+
27
+ [[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
28
+ path = "README.md"
@@ -0,0 +1,4 @@
1
+ __all__ = ["Coronagraph", "setup_logger", "logger"]
2
+
3
+ from .coronagraph import Coronagraph
4
+ from .logger import setup_logger
@@ -0,0 +1,456 @@
1
+ from pathlib import Path
2
+
3
+ import astropy.io.fits as pyfits
4
+ import astropy.units as u
5
+ import numpy as np
6
+ import xarray as xr
7
+ from lod_unit import lod
8
+ from scipy.interpolate import RegularGridInterpolator, interp1d
9
+ from scipy.ndimage import rotate, zoom
10
+ from tqdm import tqdm
11
+
12
+ from yippy.logger import setup_logger
13
+
14
+
15
+ class Coronagraph:
16
+ def __init__(self, yip_path, logging_level="INFO"):
17
+ """
18
+ Args:
19
+ yip_path (str):
20
+ Yield input package directory. Must have fits files
21
+ stellar_intens.fits - Stellar intensity map
22
+ Unitless 3d array of the stellar intensity function I,
23
+ as a function of (x, y) pixel coordinates and the
24
+ stellar angular diameter theta_star. Values in the map
25
+ are equal to the stellar count rate in a given pixel
26
+ divided by the total stellar count rate entering the
27
+ coronagraph. Does not include reductions such as QE, as
28
+ in without a coronagraph the total of I is unity.
29
+ stellar_intens_diam_list.fits - Stellar diameter list
30
+ A vector of stellar diameter values (lam/D) corresponding
31
+ to the theta_star values in stellar_intens.
32
+ offax_psf_offset_list - The off-axis PSF list
33
+ offax_psf - PSF of off-axis sources
34
+ sky_trans - Sky transmission data
35
+ logging_level (str):
36
+ Logging level for the logger (e.g. INFO, DEBUG, WARNING, ERROR,
37
+ CRITICAL), use to suppress logging if used as part of a larger
38
+ workflow. Default is INFO.
39
+ """
40
+
41
+ self.logger = setup_logger(logging_level)
42
+ ###################
43
+ # Read input data #
44
+ ###################
45
+ self.logger.info("Creating coronagraph")
46
+
47
+ yip_path = Path(yip_path)
48
+ self.name = yip_path.stem
49
+ # Get header and calculate the lambda/D value
50
+ stellar_intens_header = pyfits.getheader(
51
+ Path(yip_path, "stellar_intens.fits"), 0
52
+ )
53
+
54
+ # Stellar intensity of the star being observed as function of stellar
55
+ # angular diameter (unitless)
56
+ self.stellar_intens = pyfits.getdata(Path(yip_path, "stellar_intens.fits"), 0)
57
+ # the stellar angular diameters in stellar_intens_1 in units of lambda/D
58
+ self.stellar_intens_diam_list = (
59
+ pyfits.getdata(Path(yip_path, "stellar_intens_diam_list.fits"), 0) * lod
60
+ )
61
+
62
+ # Get pixel scale with units
63
+ self.pixel_scale = stellar_intens_header["PIXSCALE"] * lod / u.pixel
64
+
65
+ # Load off-axis data (e.g. the planet) (unitless intensity maps)
66
+ self.offax_psf = pyfits.getdata(Path(yip_path, "offax_psf.fits"), 0)
67
+
68
+ # The offset list here is in units of lambda/D
69
+ self.offax_psf_offset_list = (
70
+ pyfits.getdata(Path(yip_path, "offax_psf_offset_list.fits"), 0) * lod
71
+ )
72
+
73
+ ########################################################################
74
+ # Determine the format of the input coronagraph files so we can handle #
75
+ # the coronagraph correctly (e.g. radially symmetric in x direction) #
76
+ ########################################################################
77
+ if len(self.offax_psf_offset_list.shape) > 1:
78
+ if (self.offax_psf_offset_list.shape[1] != 2) and (
79
+ self.offax_psf_offset_list.shape[0] == 2
80
+ ):
81
+ # This condition occurs when the offax_psf_offset_list is transposed
82
+ # from the expected format for radially symmetric coronagraphs
83
+ self.offax_psf_offset_list = self.offax_psf_offset_list.T
84
+
85
+ # Check that we have both x and y offset information (even if there
86
+ # is only one axis with multiple values)
87
+ if self.offax_psf_offset_list.shape[1] != 2:
88
+ raise UserWarning("Array offax_psf_offset_list should have 2 columns")
89
+
90
+ # Get the unique values of the offset list so that we can format the
91
+ # data into
92
+ self.offax_psf_offset_x = np.unique(self.offax_psf_offset_list[:, 0])
93
+ self.offax_psf_offset_y = np.unique(self.offax_psf_offset_list[:, 1])
94
+
95
+ if (len(self.offax_psf_offset_x) == 1) and (
96
+ self.offax_psf_offset_x[0] == 0 * lod
97
+ ):
98
+ self.type = "1d"
99
+ # Instead of handling angles for 1dy, swap the x and y
100
+ self.offax_psf_offset_x, self.offax_psf_offset_y = (
101
+ self.offax_psf_offset_y,
102
+ self.offax_psf_offset_x,
103
+ )
104
+
105
+ # self.offax_psf_base_angle = 90.0 * u.deg
106
+ self.logger.info("Coronagraph is radially symmetric")
107
+ elif (len(self.offax_psf_offset_y) == 1) and (
108
+ self.offax_psf_offset_y[0] == 0 * lod
109
+ ):
110
+ self.type = "1d"
111
+ # self.offax_psf_base_angle = 0.0 * u.deg
112
+ self.logger.info("Coronagraph is radially symmetric")
113
+ elif len(self.offax_psf_offset_x) == 1:
114
+ # 1 dimensional with offset (e.g. no offset=0)
115
+ self.type = "1dno0"
116
+ self.offax_psf_offset_x, self.offax_psf_offset_y = (
117
+ self.offax_psf_offset_y,
118
+ self.offax_psf_offset_x,
119
+ )
120
+ # self.offax_psf_base_angle = 90.0 * u.deg
121
+ self.logger.info("Coronagraph is radially symmetric")
122
+ elif len(self.offax_psf_offset_y) == 1:
123
+ self.type = "1dno0"
124
+ # self.offax_psf_base_angle = 0.0 * u.deg
125
+ self.logger.info("Coronagraph is radially symmetric")
126
+ elif np.min(self.offax_psf_offset_list) >= 0 * lod:
127
+ self.type = "2dq"
128
+ # self.offax_psf_base_angle = 0.0 * u.deg
129
+ # self.logger.info(
130
+ # f"Quarterly symmetric response --> reflecting PSFs ({self.type})"
131
+ # )
132
+ self.logger.info("Coronagraph is quarterly symmetric")
133
+ else:
134
+ self.type = "2df"
135
+ # self.offax_psf_base_angle = 0.0 * u.deg
136
+ self.logger.info("Coronagraph response is full 2D")
137
+
138
+ ############
139
+ # Clean up #
140
+ ############
141
+ # Center coronagraph model so that image size is odd and central pixel is center
142
+ # TODO: Automate this process
143
+ verified_coronagraph_models = [
144
+ "LUVOIR-A_APLC_10bw_smallFPM_2021-05-05_Dyn10pm-nostaticabb",
145
+ "LUVOIR-A_APLC_18bw_medFPM_2021-05-07_Dyn10pm-nostaticabb",
146
+ "LUVOIR-B-VC6_timeseries",
147
+ "LUVOIR-B_VC6_timeseries",
148
+ ]
149
+ if yip_path.parts[-1] in verified_coronagraph_models:
150
+ self.stellar_intens = self.stellar_intens[:, 1:, 1:]
151
+ self.offax_psf = self.offax_psf[:, :-1, 1:]
152
+ else:
153
+ raise UserWarning(
154
+ "Please validate centering for this unknown coronagraph model"
155
+ )
156
+
157
+ # Simulation parameters
158
+ self.yip_path = yip_path
159
+
160
+ #########################################################################
161
+ # Interpolate coronagraph model (in log space to avoid negative values) #
162
+ #########################################################################
163
+ # Fill value for interpolation
164
+ fill = np.log(1e-100)
165
+
166
+ # interpolate stellar data
167
+ self.ln_stellar_intens_interp = interp1d(
168
+ self.stellar_intens_diam_list,
169
+ np.log(self.stellar_intens),
170
+ kind="cubic",
171
+ axis=0,
172
+ bounds_error=False,
173
+ fill_value=fill,
174
+ )
175
+ self.stellar_intens_interp = lambda stellar_diam: np.exp(
176
+ self.ln_stellar_intens_interp(stellar_diam)
177
+ )
178
+
179
+ # interpolate planet data depending on type
180
+ if "1" in self.type:
181
+ # Always set up to interpolate along the x axis
182
+ self.ln_offax_psf_interp = interp1d(
183
+ self.offax_psf_offset_list[:, 0],
184
+ np.log(self.offax_psf),
185
+ kind="cubic",
186
+ axis=0,
187
+ bounds_error=False,
188
+ fill_value=fill,
189
+ )
190
+ else:
191
+ zz_temp = self.offax_psf.reshape(
192
+ self.offax_psf_offset_x.shape[0],
193
+ self.offax_psf_offset_y.shape[0],
194
+ self.offax_psf.shape[1],
195
+ self.offax_psf.shape[2],
196
+ )
197
+ if self.type == "2dq":
198
+ # Reflect PSFs to cover the x = 0 and y = 0 axes.
199
+ offax_psf_offset_x = np.append(
200
+ -self.offax_psf_offset_x[0], self.offax_psf_offset_x
201
+ )
202
+ offax_psf_offset_y = np.append(
203
+ -self.offax_psf_offset_y[0], self.offax_psf_offset_y
204
+ )
205
+ zz = np.pad(zz_temp, ((1, 0), (1, 0), (0, 0), (0, 0)))
206
+ zz[0, 1:] = zz_temp[0, :, ::-1, :]
207
+ zz[1:, 0] = zz_temp[:, 0, :, ::-1]
208
+ zz[0, 0] = zz_temp[0, 0, ::-1, ::-1]
209
+
210
+ self.ln_offax_psf_interp = RegularGridInterpolator(
211
+ (offax_psf_offset_x, offax_psf_offset_y),
212
+ np.log(zz),
213
+ method="linear",
214
+ bounds_error=False,
215
+ fill_value=fill,
216
+ )
217
+ else:
218
+ # This section included references to non-class attributes for
219
+ # offax_psf_offset_x and offax_psf_offset_y. I think it meant
220
+ # to be the class attributes
221
+ self.ln_offax_psf_interp = RegularGridInterpolator(
222
+ (self.offax_psf_offset_x, self.offax_psf_offset_y),
223
+ np.log(zz_temp),
224
+ method="linear",
225
+ bounds_error=False,
226
+ fill_value=fill,
227
+ )
228
+ self.offax_psf_interp = lambda coordinate: np.exp(
229
+ self.ln_offax_psf_interp(coordinate)
230
+ )
231
+
232
+ ##################################################
233
+ # Get remaining parameters and throughput values #
234
+ ##################################################
235
+
236
+ # Gets the number of pixels in the image
237
+ self.img_pixels = self.stellar_intens.shape[1] * u.pixel
238
+ self.npixels = self.img_pixels.value.astype(int)
239
+
240
+ # Photometric parameters.
241
+ head = pyfits.getheader(Path(yip_path, "stellar_intens.fits"), 0)
242
+
243
+ # fractional obscuration
244
+ self.frac_obscured = head["OBSCURED"]
245
+
246
+ # fractional bandpass
247
+ self.frac_bandwidth = (head["MAXLAM"] - head["MINLAM"]) / head["LAMBDA"]
248
+
249
+ # PSF datacube info
250
+ self.has_psf_datacube = False
251
+
252
+ def get_coro_thruput(self, aperture_radius_lod=0.8, oversample=100, plot=True):
253
+ """
254
+ Get coronagraph throughput
255
+ Args:
256
+ aperture_radius (float):
257
+ Circular aperture radius, in lambda/D (I think)
258
+ oversample (int):
259
+ Oversampling factor for interpolation
260
+ plot (Boolean):
261
+ Whether to plot the coronagraph throughput
262
+ Returns:
263
+ coro_thruput (float):
264
+ Coronagraph throughput
265
+ """
266
+ # Add units
267
+ aperture_radius = aperture_radius_lod * lod
268
+
269
+ # Compute off-axis PSF at the median separation value
270
+ # Previously was labeled half max, but there is no guarantee the
271
+ # separations are equally spaced
272
+ if len(self.offax_psf_offset_x) != 1:
273
+ med_offset = self.offax_psf_offset_x[self.offax_psf_offset_x.shape[0] // 2]
274
+ elif len(self.offax_psf_offset_y) != 1:
275
+ med_offset = self.offax_psf_offset_y[self.offax_psf_offset_y.shape[0] // 2]
276
+ else:
277
+ raise UserWarning(
278
+ (
279
+ "Array offax_psf_offset_list should have more than 1"
280
+ " unique element for at least one axis"
281
+ )
282
+ )
283
+ # Create (x, y) coordiantes of the aperture in lam/D
284
+ # if self.type in ["1dx", "1dxo"]:
285
+ aperture_pos = u.Quantity([med_offset, self.offax_psf_offset_y[0]])
286
+ # elif self.type in ["1dy", "1dyo"]:
287
+ # aperture_pos = u.Quantity([self.offax_psf_offset_x[0], med_offset])
288
+
289
+ # Create image
290
+ imgs = self.offax_psf_interp(med_offset)
291
+
292
+ # Compute aperture position and radius on subarray in pixels.
293
+ # This was 3 times the aperture radius in pixels, I don't know why 3
294
+ # Npix = int(np.ceil(3 * aperture_radius / self.pixel_scale))
295
+ aperture_radius_pix = np.ceil(
296
+ 3 * aperture_radius / self.pixel_scale
297
+ ).value.astype(int)
298
+
299
+ aperture_pos_pix = (
300
+ (aperture_pos / self.pixel_scale).value + (imgs.shape[0] - 1) / 2
301
+ ).astype(int)
302
+ subarr = imgs[
303
+ aperture_pos_pix[1] - aperture_radius_pix : aperture_pos_pix[1]
304
+ + aperture_radius_pix
305
+ + 1,
306
+ aperture_pos_pix[0] - aperture_radius_pix : aperture_pos_pix[0]
307
+ + aperture_radius_pix
308
+ + 1,
309
+ ]
310
+ # (aperture_pos / self.pixel_scale + (imgs.shape[0] - 1) / 2.0)
311
+ # This doesn't make sense to me
312
+ pos_subarr = [0, 0] + aperture_radius_pix
313
+ rad_subarr = aperture_radius / self.pixel_scale
314
+
315
+ # Compute aperture position and radius on oversampled subarray in pixels.
316
+ norm = np.sum(subarr)
317
+ subarr_zoom = zoom(subarr, oversample, mode="nearest", order=5)
318
+ subarr_zoom *= norm / np.sum(subarr_zoom)
319
+ pos_subarr_zoom = pos_subarr * oversample + (oversample - 1.0) / 2.0
320
+ rad_subarr_zoom = rad_subarr * oversample
321
+
322
+ # Compute aperture on oversampled subarray in pixels.
323
+ ramp = np.arange(subarr_zoom.shape[0])
324
+ offax_psf_offset_x, yy = np.meshgrid(ramp, ramp)
325
+ aptr = (
326
+ np.sqrt(
327
+ (offax_psf_offset_x - pos_subarr_zoom[0]) ** 2
328
+ + (yy - pos_subarr_zoom[1]) ** 2
329
+ )
330
+ <= rad_subarr_zoom.value
331
+ )
332
+
333
+ # Compute coronagraph throughput
334
+ coro_thruput = np.sum(subarr_zoom[aptr])
335
+
336
+ return coro_thruput
337
+
338
+ def get_disk_psfs(self):
339
+ """
340
+ Load the disk image from a file or generate it if it doesn't exist
341
+ """
342
+ # Load data cube of spatially dependent PSFs.
343
+ disk_dir = Path(".cache/disks/")
344
+ if not disk_dir.exists():
345
+ disk_dir.mkdir(parents=True, exist_ok=True)
346
+ path = Path(
347
+ disk_dir,
348
+ self.yip_path.name + ".nc",
349
+ )
350
+
351
+ coords = {
352
+ "x psf offset (pix)": np.arange(self.npixels),
353
+ "y psf offset (pix)": np.arange(self.npixels),
354
+ "x (pix)": np.arange(self.npixels),
355
+ "y (pix)": np.arange(self.npixels),
356
+ }
357
+ dims = ["x psf offset (pix)", "y psf offset (pix)", "x (pix)", "y (pix)"]
358
+ if path.exists():
359
+ self.logger.info(
360
+ "Loading data cube of spatially dependent PSFs, please hold..."
361
+ )
362
+ psfs_xr = xr.open_dataarray(path)
363
+ else:
364
+ self.logger.info(
365
+ "Calculating data cube of spatially dependent PSFs, please hold..."
366
+ )
367
+ # Compute pixel grid.
368
+ # lambda/D
369
+ pixel_lod = (
370
+ (np.arange(self.npixels) - ((self.npixels - 1) // 2))
371
+ * u.pixel
372
+ * self.pixel_scale
373
+ )
374
+
375
+ x_lod, y_lod = np.meshgrid(pixel_lod, pixel_lod, indexing="xy")
376
+
377
+ # lambda/D
378
+ pixel_dist_lod = np.sqrt(x_lod**2 + y_lod**2)
379
+
380
+ # deg
381
+ pixel_angle = np.arctan2(y_lod, x_lod)
382
+
383
+ # Compute pixel grid contrast.
384
+ psfs_shape = (
385
+ pixel_dist_lod.shape[0],
386
+ pixel_dist_lod.shape[1],
387
+ self.npixels,
388
+ self.npixels,
389
+ )
390
+ psfs = np.zeros(psfs_shape, dtype=np.float32)
391
+ npsfs = np.prod(pixel_dist_lod.shape)
392
+
393
+ pbar = tqdm(
394
+ total=npsfs, desc="Computing datacube of PSFs at every pixel", delay=0.5
395
+ )
396
+
397
+ radially_symmetric_psf = "1d" in self.type
398
+ # Get the PSF (npixel, npixel) of a source at every pixel
399
+
400
+ # Note: intention is that i value maps to x offset and j value maps
401
+ # to y offset
402
+ for i in range(pixel_dist_lod.shape[0]):
403
+ for j in range(pixel_dist_lod.shape[1]):
404
+ # Basic structure here is to get the distance in lambda/D,
405
+ # determine whether the psf has to be rotated (if the
406
+ # coronagraph is defined in 1 dimension), evaluate
407
+ # the offaxis psf at the distance, then rotate the
408
+ # image
409
+ if self.type == "1d":
410
+ psf_eval_dists = pixel_dist_lod[i, j]
411
+ rotate_angle = pixel_angle[i, j]
412
+ elif self.type == "1dno0":
413
+ psf_eval_dists = np.sqrt(
414
+ pixel_dist_lod[i, j] ** 2 - self.offax_psf_offset_x[0] ** 2
415
+ )
416
+ rotate_angle = pixel_angle[i, j] + np.arcsin(
417
+ self.offax_psf_offset_x[0] / pixel_dist_lod[i, j]
418
+ )
419
+ elif self.type == "2dq":
420
+ # lambda/D
421
+ temp = np.array([y_lod[i, j], x_lod[i, j]])
422
+ psf = self.offax_psf_interp(np.abs(temp))[0]
423
+ if y_lod[i, j] < 0.0:
424
+ # lambda/D
425
+ psf = psf[::-1, :]
426
+ if x_lod[i, j] < 0.0:
427
+ # lambda/D
428
+ psf = psf[:, ::-1]
429
+ else:
430
+ # lambda/D
431
+ temp = np.array([y_lod[i, j], x_lod[i, j]])
432
+ psf = self.offax_psf_interp(temp)[0]
433
+
434
+ if radially_symmetric_psf:
435
+ psf = self.ln_offax_psf_interp(psf_eval_dists)
436
+ temp = np.exp(
437
+ rotate(
438
+ psf,
439
+ -rotate_angle.to(u.deg).value,
440
+ reshape=False,
441
+ mode="nearest",
442
+ order=5,
443
+ )
444
+ )
445
+ psfs[i, j] = temp
446
+ pbar.update(1)
447
+
448
+ # Save data cube of spatially dependent PSFs.
449
+ psfs_xr = xr.DataArray(
450
+ psfs,
451
+ coords=coords,
452
+ dims=dims,
453
+ )
454
+ psfs_xr.to_netcdf(path)
455
+ self.has_psf_datacube = True
456
+ self.psf_datacube = np.ascontiguousarray(psfs_xr)
@@ -0,0 +1,45 @@
1
+ import logging
2
+
3
+
4
+ def setup_logger(shell_level="INFO", file_level="DEBUG", disable_shell_logging=False):
5
+ # Map string level names to logging levels
6
+ level_mapping = {
7
+ "CRITICAL": logging.CRITICAL,
8
+ "ERROR": logging.ERROR,
9
+ "WARNING": logging.WARNING,
10
+ "INFO": logging.INFO,
11
+ "DEBUG": logging.DEBUG,
12
+ "NOTSET": logging.NOTSET,
13
+ }
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+ logger.handlers = [] # Clear existing handlers
18
+
19
+ logger.setLevel(logging.DEBUG) # Set the lowest level to capture all logs
20
+
21
+ # File Handler
22
+ file_handler = logging.FileHandler("debug.log")
23
+ file_handler.setLevel(level_mapping.get(file_level.upper(), logging.DEBUG))
24
+ file_fmt = (
25
+ "%(levelname)s %(asctime)s [%(filename)s:%(funcName)s:%(lineno)d] %(message)s"
26
+ )
27
+ file_formatter = logging.Formatter(file_fmt)
28
+ file_handler.setFormatter(file_formatter)
29
+ logger.addHandler(file_handler)
30
+
31
+ # Shell Handler
32
+ if not disable_shell_logging:
33
+ shell_handler = logging.StreamHandler()
34
+ shell_handler.setLevel(level_mapping.get(shell_level.upper(), logging.INFO))
35
+ shell_fmt = "%(levelname)s [%(asctime)s] %(message)s"
36
+ shell_formatter = logging.Formatter(shell_fmt)
37
+ shell_handler.setFormatter(shell_formatter)
38
+ logger.addHandler(shell_handler)
39
+
40
+ logger.propagate = False
41
+ return logger
42
+
43
+
44
+ # Initialize with default settings
45
+ logger = setup_logger()