tglc 0.6.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tglc/mast.py ADDED
@@ -0,0 +1,116 @@
1
+ import ftplib
2
+ import getpass
3
+ from glob import glob
4
+ import numpy as np
5
+ from tqdm import trange
6
+ from multiprocessing import Pool
7
+ from functools import partial
8
+ from astropy.io import fits
9
+ import shutil
10
+ import time
11
+ import os
12
+ from astropy.io import ascii
13
+ import zipfile
14
+
15
+
16
+ def filter_no_tic_(i, sector=1):
17
+ time.sleep(i)
18
+ cam = 1 + i // 4
19
+ ccd = 1 + i % 4
20
+ files = glob(f'/home/tehan/data/sector{sector:04d}/lc/{cam}-{ccd}/*.fits')
21
+ for j in range(len(files)):
22
+ hdul = fits.open(files[j])
23
+ if hdul[0].header['TICID'] == '':
24
+ shutil.move(files[j], f'/home/tehan/data/sector{sector:04d}/extra_lc/{os.path.basename(files[j])}')
25
+
26
+
27
+ def filter_no_tic(sector=1):
28
+ os.makedirs(f'/home/tehan/data/sector{sector:04d}/extra_lc/', exist_ok=True)
29
+ with Pool(16) as p:
30
+ p.map(partial(filter_no_tic_, sector=sector), range(16))
31
+
32
+
33
+ def zip_folder(i, sector=1, do_zip=True, lc_num_per_zip=1e6):
34
+ time.sleep(i)
35
+ cam = 1 + i // 4
36
+ ccd = 1 + i % 4
37
+ zip_file = f'/home/tehan/data/mast/sector{sector:04d}/sector_{sector}_cam_{cam}_ccd_{ccd}'
38
+ original_file = f'/home/tehan/data/sector{sector:04d}/lc/{cam}-{ccd}/'
39
+ files = glob(f'{original_file}*.fits')
40
+ if do_zip:
41
+ # num_zips = int(len(files) // lc_num_per_zip + 1)
42
+ # for i in range(num_zips):
43
+ # with zipfile.ZipFile(f'{zip_file}_{i:02d}.zip', 'w') as zipMe:
44
+ # for file in files[int(i * lc_num_per_zip):int((i + 1) * lc_num_per_zip)]:
45
+ # zipMe.write(file, compress_type=zipfile.ZIP_DEFLATED)
46
+ shutil.make_archive(zip_file, 'zip', original_file)
47
+ return
48
+ else:
49
+ ftps = ftplib.FTP_TLS('archive.stsci.edu')
50
+ ftps.login('tehanhunter@gmail.com', getpass.getpass())
51
+ ftps.prot_p()
52
+ ftps.cwd('pub/hlsp/tglc/')
53
+ print(f"Sector {sector}")
54
+ sector_dir = f"s{sector:04d}"
55
+ # print current directory
56
+ dir_list = []
57
+ ftps.retrlines('LIST', dir_list.append)
58
+ dir_list = [d.split()[-1] for d in dir_list]
59
+ # check if sector_dir already exists
60
+ if sector_dir in dir_list:
61
+ pass
62
+ # print(f"Directory {sector_dir}/ already exists.")
63
+ # if not, mkdir new sector directory (use relative path, NOT absolute path)
64
+ else:
65
+ print(ftps.mkd(sector_dir))
66
+ # cd into sector directory (use relative path, NOT absolute path)
67
+ ftps.cwd(sector_dir)
68
+ # print('\n')
69
+ with open(f'{zip_file}.zip', 'rb') as f:
70
+ ftps.storbinary(f"STOR sector_{sector}_cam_{cam}_ccd_{ccd}.zip", f)
71
+
72
+
73
+ def hlsp_transfer(sector=1, do_zip=True):
74
+ with Pool(16) as p:
75
+ p.map(partial(zip_folder, sector=sector, do_zip=do_zip), range(16))
76
+
77
+
78
+ def search_stars(i, sector=1, star_list=None):
79
+ cam = 1 + i // 4
80
+ ccd = 1 + i % 4
81
+ files = glob(f'/home/tehan/data/sector{sector:04d}/lc/{cam}-{ccd}/hlsp_*.fits')
82
+ for j in trange(len(files)):
83
+ with fits.open(files[j], mode='denywrite') as hdul:
84
+ try:
85
+ if int(hdul[0].header['TICID']) in star_list:
86
+ hdul.writeto(f"/home/tehan/data/cosmos/dominic_EB/sector{sector:04d}/{files[j].split('/')[-1]}",
87
+ overwrite=True)
88
+ except ValueError:
89
+ pass
90
+
91
+
92
+ def star_spliter(server=1, # or 2
93
+ star_list='/home/tehan/data/cosmos/dominic_EB/eb_cat.txt'):
94
+ prsa_ebs = ascii.read(star_list)['ID'].data
95
+ # sector_list = tuple([] for _ in range(55)) ##1 extended mission
96
+ # for j in range(len(prsa_ebs)):
97
+ # try:
98
+ # sectors = prsa_ebs['sectors'][j].split(',')
99
+ # for k in range(len(sectors)):
100
+ # sector_list[int(sectors[k]) - 1].append(prsa_ebs['tess_id'][j])
101
+ # except AttributeError:
102
+ # pass
103
+ for i in range(server, 27, 2):
104
+ os.makedirs(f'/home/tehan/data/cosmos/dominic_EB/sector{i:04d}/', exist_ok=True)
105
+ with Pool(16) as p:
106
+ p.map(partial(search_stars, sector=i, star_list=prsa_ebs), range(16))
107
+ return
108
+
109
+
110
+ if __name__ == '__main__':
111
+ sector = 1
112
+ filter_no_tic(sector=sector)
113
+ hlsp_transfer(sector=sector, do_zip=True)
114
+ # hlsp_transfer(sector=sector, do_zip=False)
115
+ # star_spliter(server=1)
116
+ # star_list='/home/tehan/Documents/tglc/dominic_EB/eb_cat.txt'
tglc/quick_lc.py ADDED
@@ -0,0 +1,526 @@
1
+ import os
2
+ import pickle
3
+ from glob import glob
4
+ from tqdm import trange
5
+ from wotan import flatten
6
+ from astropy.io import ascii
7
+ from astropy.io import fits
8
+ import matplotlib.pyplot as plt
9
+ from multiprocessing import Pool
10
+ from functools import partial
11
+ from tglc.target_lightcurve import epsf
12
+ from tglc.ffi_cut import ffi_cut
13
+ from astroquery.mast import Catalogs
14
+ import astropy.units as u
15
+ from astropy.coordinates import SkyCoord
16
+ from astroquery.mast import Tesscut
17
+ # Tesscut._service_api_connection.TIMEOUT = 6000
18
+
19
+ # warnings.simplefilter('ignore', UserWarning)
20
+ from threadpoolctl import ThreadpoolController, threadpool_limits
21
+ import numpy as np
22
+ import seaborn as sns
23
+ import itertools
24
+ controller = ThreadpoolController()
25
+
26
+
27
+ @controller.wrap(limits=1, user_api='blas')
28
+ def tglc_lc(target='TIC 264468702', local_directory='', size=90, save_aper=True, limit_mag=16, get_all_lc=False,
29
+ first_sector_only=False, last_sector_only=False, sector=None, prior=None, transient=None):
30
+ '''
31
+ Generate light curve for a single target.
32
+
33
+ :param target: target identifier
34
+ :kind target: str, required
35
+ :param local_directory: output directory
36
+ :kind local_directory: str, required
37
+ :param size: size of the FFI cut, default size is 90. Recommend large number for better quality. Cannot exceed 100.
38
+ :kind size: int, optional
39
+ '''
40
+ os.makedirs(local_directory + f'logs/', exist_ok=True)
41
+ os.makedirs(local_directory + f'lc/', exist_ok=True)
42
+ os.makedirs(local_directory + f'epsf/', exist_ok=True)
43
+ os.makedirs(local_directory + f'source/', exist_ok=True)
44
+ print(f'Target: {target}')
45
+ target_ = Catalogs.query_object(target, radius=42 * 0.707 / 3600, catalog="Gaia", version=2)
46
+ if len(target_) == 0:
47
+ target_ = Catalogs.query_object(target.name, radius=5 * 21 * 0.707 / 3600, catalog="Gaia", version=2)
48
+ ra = target_[0]['ra']
49
+ dec = target_[0]['dec']
50
+ coord = SkyCoord(ra=ra, dec=dec, unit=(u.degree, u.degree), frame='icrs')
51
+ sector_table = Tesscut.get_sectors(coordinates=coord)
52
+ print(sector_table)
53
+ if get_all_lc:
54
+ name = None
55
+ else:
56
+ catalogdata = Catalogs.query_object(str(target), radius=0.02, catalog="TIC")
57
+ if target[0:3] == 'TIC':
58
+ name = int(target[4:])
59
+ elif transient is not None:
60
+ name = transient[0]
61
+ else:
62
+ name = int(np.array(catalogdata['ID'])[0])
63
+ print("Since the provided target is not TIC ID, the resulted light curve with get_all_lc=False can not be "
64
+ "guaranteed to be the target's light curve. Please check the TIC ID of the output file before using "
65
+ "the light curve or try use TIC ID as the target in the format of 'TIC 12345678'.")
66
+ if type(sector) == int:
67
+ print(f'Only processing Sector {sector}.')
68
+ print('Downloading Data from MAST and Gaia ...')
69
+ source = ffi_cut(target=target, size=size, local_directory=local_directory, sector=sector,
70
+ limit_mag=limit_mag, transient=transient) # sector
71
+ source.select_sector(sector=sector)
72
+ epsf(source, factor=2, sector=source.sector, target=target, local_directory=local_directory,
73
+ name=name, limit_mag=limit_mag, save_aper=save_aper, prior=prior)
74
+ elif first_sector_only:
75
+ print(f'Only processing the first sector the target is observed in: Sector {sector_table["sector"][0]}.')
76
+ print('Downloading Data from MAST and Gaia ...')
77
+ sector = sector_table["sector"][0]
78
+ source = ffi_cut(target=target, size=size, local_directory=local_directory, sector=sector,
79
+ limit_mag=limit_mag, transient=transient) # sector
80
+ source.select_sector(sector=source.sector_table['sector'][0])
81
+ epsf(source, factor=2, sector=source.sector, target=target, local_directory=local_directory,
82
+ name=name, limit_mag=limit_mag, save_aper=save_aper, prior=prior)
83
+ elif last_sector_only:
84
+ print(f'Only processing the last sector the target is observed in: Sector {sector_table["sector"][-1]}.')
85
+ print('Downloading Data from MAST and Gaia ...')
86
+ sector = sector_table["sector"][-1]
87
+ source = ffi_cut(target=target, size=size, local_directory=local_directory, sector=sector,
88
+ limit_mag=limit_mag, transient=transient) # sector
89
+ source.select_sector(sector=source.sector_table['sector'][-1])
90
+ epsf(source, factor=2, sector=source.sector, target=target, local_directory=local_directory,
91
+ name=name, limit_mag=limit_mag, save_aper=save_aper, prior=prior)
92
+ elif sector == None:
93
+ print(f'Processing all available sectors of the target.')
94
+ print('Downloading Data from MAST and Gaia ...')
95
+ for j in range(len(sector_table)):
96
+ print(f'################################################')
97
+ print(f'Downloading Sector {sector_table["sector"][j]}.')
98
+ source = ffi_cut(target=target, size=size, local_directory=local_directory,
99
+ sector=sector_table['sector'][j],
100
+ limit_mag=limit_mag, transient=transient)
101
+ epsf(source, factor=2, sector=source.sector, target=target, local_directory=local_directory,
102
+ name=name, limit_mag=limit_mag, save_aper=save_aper, prior=prior)
103
+ else:
104
+ print(
105
+ f'Processing all available sectors of the target in a single run. Note that if the number of sectors is '
106
+ f'large, the download might cause a timeout error from MAST.')
107
+ print('Downloading Data from MAST and Gaia ...')
108
+ source = ffi_cut(target=target, size=size, local_directory=local_directory, sector=sector,
109
+ limit_mag=limit_mag, transient=transient) # sector
110
+ for j in range(len(source.sector_table)):
111
+ source.select_sector(sector=source.sector_table['sector'][j])
112
+ epsf(source, factor=2, sector=source.sector, target=target, local_directory=local_directory,
113
+ name=name, limit_mag=limit_mag, save_aper=save_aper, prior=prior)
114
+
115
+
116
+ def search_stars(i, sector=1, tics=None, local_directory=None):
117
+ cam = 1 + i // 4
118
+ ccd = 1 + i % 4
119
+ files = glob(f'/home/tehan/data/sector{sector:04d}/lc/{cam}-{ccd}/hlsp_*.fits')
120
+ for j in trange(len(files)):
121
+ with fits.open(files[j], mode='denywrite') as hdul:
122
+ try:
123
+ if int(hdul[0].header['TICID']) in tics:
124
+ hdul.writeto(f"{local_directory}{files[j].split('/')[-1]}",
125
+ overwrite=True)
126
+ except:
127
+ pass
128
+
129
+
130
+ def timebin(time, meas, meas_err, binsize):
131
+ ind_order = np.argsort(time)
132
+ time = time[ind_order]
133
+ meas = meas[ind_order]
134
+ meas_err = meas_err[ind_order]
135
+ ct = 0
136
+ while ct < len(time):
137
+ ind = np.where((time >= time[ct]) & (time < time[ct] + binsize))[0]
138
+ num = len(ind)
139
+ wt = (1. / meas_err[ind]) ** 2. # weights based in errors
140
+ wt = wt / np.sum(wt) # normalized weights
141
+ if ct == 0:
142
+ time_out = [np.sum(wt * time[ind])]
143
+ meas_out = [np.sum(wt * meas[ind])]
144
+ meas_err_out = [1. / np.sqrt(np.sum(1. / (meas_err[ind]) ** 2))]
145
+ else:
146
+ time_out.append(np.sum(wt * time[ind]))
147
+ meas_out.append(np.sum(wt * meas[ind]))
148
+ meas_err_out.append(1. / np.sqrt(np.sum(1. / (meas_err[ind]) ** 2)))
149
+ ct += num
150
+
151
+ return time_out, meas_out, meas_err_out
152
+
153
+
154
+ def star_spliter(server=1, # or 2
155
+ tics=None, local_directory=None):
156
+ for i in range(server, 27, 2):
157
+ with Pool(16) as p:
158
+ p.map(partial(search_stars, sector=i, tics=tics, local_directory=local_directory), range(16))
159
+ return
160
+
161
+
162
+ def plot_lc(local_directory=None, kind='cal_aper_flux', xlow=None, xhigh=None, ylow=None, yhigh=None):
163
+ files = glob(f'{local_directory}lc/*.fits')
164
+ os.makedirs(f'{local_directory}plots/', exist_ok=True)
165
+ for i in range(len(files)):
166
+ with fits.open(files[i], mode='denywrite') as hdul:
167
+ q = [a and b for a, b in zip(list(hdul[1].data['TESS_flags'] == 0), list(hdul[1].data['TGLC_flags'] == 0))]
168
+ plt.figure(constrained_layout=False, figsize=(8, 4))
169
+ plt.plot(hdul[1].data['time'], hdul[1].data[kind], '.', c='silver', label=kind)
170
+ plt.plot(hdul[1].data['time'][q], hdul[1].data[kind][q], '.k', label=f'{kind}_flagged')
171
+ plt.xlim(xlow, xhigh)
172
+ plt.ylim(ylow, yhigh)
173
+ plt.title(f'TIC_{hdul[0].header["TICID"]}_sector_{hdul[0].header["SECTOR"]:04d}_{kind}')
174
+ plt.legend()
175
+ # plt.show()
176
+ plt.savefig(
177
+ f'{local_directory}plots/TIC_{hdul[0].header["TICID"]}_sector_{hdul[0].header["SECTOR"]:04d}_{kind}.png',
178
+ dpi=300)
179
+ plt.close()
180
+
181
+
182
+ def plot_aperture(local_directory=None, kind='cal_aper_flux'):
183
+ files = glob(f'{local_directory}*.fits')
184
+ os.makedirs(f'{local_directory}plots/', exist_ok=True)
185
+ portion = [0.9361215204370542, 0.9320709087810205]
186
+ data = np.empty((3, 0))
187
+
188
+ for i in range(len(files)):
189
+ with fits.open(files[i], mode='denywrite') as hdul:
190
+ print(files[i], portion[i])
191
+ q = [a and b for a, b in zip(list(hdul[1].data['TESS_flags'] == 0), list(hdul[1].data['TGLC_flags'] == 0))]
192
+ plt.figure(constrained_layout=False, figsize=(8, 4))
193
+ plt.plot(hdul[1].data['time'] % 3.79262026, hdul[1].data[kind], '.', c='silver', label=kind)
194
+ plt.plot(hdul[1].data['time'][q] % 3.79262026, hdul[1].data[kind][q], '.k', label=f'{kind}_flagged')
195
+ aperture_bar = 709.5512462444653 * portion[i]
196
+ aper_lc = np.nansum(hdul[0].data, axis=(1, 2))
197
+ local_bg = np.nanmedian(aper_lc) - aperture_bar
198
+ aper_lc = (aper_lc - local_bg) / portion[i]
199
+ cal_aper_lc = aper_lc / np.nanmedian(aper_lc)
200
+ cal_aper_lc[np.where(cal_aper_lc > 100)] = np.nan
201
+ _, trend = flatten(hdul[1].data['time'], cal_aper_lc - np.nanmin(cal_aper_lc) + 1000,
202
+ window_length=1, method='biweight', return_trend=True)
203
+ cal_aper_lc = (cal_aper_lc - np.nanmin(cal_aper_lc) + 1000 - trend) / np.nanmedian(cal_aper_lc) + 1
204
+ non_outliers = np.where(cal_aper_lc[q] > 0.6)[0]
205
+ plt.plot(hdul[1].data['time'][q][non_outliers] % 3.79262026, cal_aper_lc[q][non_outliers], '.r',
206
+ label=f'5_5_pixel_flagged')
207
+ plt.xlim(0.5, 1.0)
208
+ plt.ylim(0.95, 1.1)
209
+ plt.title(f'TIC_{hdul[0].header["TICID"]}_sector_{hdul[0].header["SECTOR"]:04d}')
210
+ plt.legend()
211
+ # plt.show()
212
+ plt.savefig(
213
+ f'{local_directory}plots/TIC_{hdul[0].header["TICID"]}_sector_{hdul[0].header["SECTOR"]:04d}.png',
214
+ dpi=300)
215
+ time = hdul[1].data['time'][q][non_outliers]
216
+ flux = cal_aper_lc[q][non_outliers]
217
+ f_err = 1.4826 * np.nanmedian(np.abs(flux - np.nanmedian(flux)))
218
+ not_nan = np.invert(np.isnan(flux))
219
+ data_ = np.array([time[not_nan],
220
+ flux[not_nan],
221
+ np.array([f_err] * len(time[not_nan]))
222
+ ])
223
+ data = np.append(data, data_, axis=1)
224
+ np.savetxt(f'{local_directory}TESS_TOI-5344_5_5_aper.csv', data, delimiter=',')
225
+
226
+
227
+ def plot_pf_lc(local_directory=None, period=None, mid_transit_tbjd=None, kind='cal_aper_flux'):
228
+ files = glob(f'{local_directory}*.fits')
229
+ os.makedirs(f'{local_directory}plots/', exist_ok=True)
230
+ fig = plt.figure(figsize=(13, 5))
231
+ t_all = np.array([])
232
+ f_all = np.array([])
233
+ f_err_all = np.array([])
234
+ for j in range(len(files)):
235
+ not_plotted_num = 0
236
+ with fits.open(files[j], mode='denywrite') as hdul:
237
+ q = [a and b for a, b in
238
+ zip(list(hdul[1].data['TESS_flags'] == 0), list(hdul[1].data['TGLC_flags'] == 0))]
239
+ # q = [a and b for a, b in zip(q, list(hdul[1].data[kind] > 0.85))]
240
+ # if hdul[0].header['sector'] == 15:
241
+ # q = [a and b for a, b in zip(q, list(hdul[1].data['time'] < 1736))]
242
+ if len(hdul[1].data['cal_aper_flux']) == len(hdul[1].data['time']):
243
+ if hdul[0].header["SECTOR"] <= 26:
244
+ t = hdul[1].data['time'][q]
245
+ f = hdul[1].data[kind][q]
246
+ elif hdul[0].header["SECTOR"] <= 55:
247
+ t = np.mean(hdul[1].data['time'][q][:len(hdul[1].data['time'][q]) // 3 * 3].reshape(-1, 3), axis=1)
248
+ f = np.mean(
249
+ hdul[1].data[kind][q][:len(hdul[1].data[kind][q]) // 3 * 3].reshape(-1, 3), axis=1)
250
+ else:
251
+ t = np.mean(hdul[1].data['time'][q][:len(hdul[1].data['time'][q]) // 9 * 9].reshape(-1, 9), axis=1)
252
+ f = np.mean(
253
+ hdul[1].data[kind][q][:len(hdul[1].data[kind][q]) // 9 * 9].reshape(-1, 9), axis=1)
254
+ t_all = np.append(t_all, t)
255
+ f_all = np.append(f_all, f)
256
+ f_err_all = np.append(f_err_all, np.array([hdul[1].header['CAPE_ERR']] * len(t)))
257
+
258
+ # plt.plot(hdul[1].data['time'] % period / period, hdul[1].data[kind], '.', c='silver', ms=3)
259
+ plt.errorbar(t % period / period, f, hdul[1].header['CAPE_ERR'], c='silver', ls='', elinewidth=0.1,
260
+ marker='.', ms=3, zorder=2)
261
+ # time_out, meas_out, meas_err_out = timebin(time=t % period, meas=f,
262
+ # meas_err=np.array([hdul[1].header['CAPE_ERR']] * len(t)),
263
+ # binsize=600 / 86400)
264
+ # plt.errorbar(np.array(time_out) / period, meas_out, meas_err_out, c=f'C{j}', ls='', elinewidth=1.5,
265
+ # marker='.', ms=8, zorder=3, label=f'Sector {hdul[0].header["sector"]}')
266
+ else:
267
+ not_plotted_num += 1
268
+ title = f'TIC_{hdul[0].header["TICID"]} with {len(files) - not_plotted_num} sector(s) of data, {kind}'
269
+ # PDCSAP_files = glob('/home/tehan/Documents/GEMS/TIC 172370679/PDCSAP/*.txt')
270
+ # for i in range(len(files)):
271
+ # PDCSAP = ascii.read(PDCSAP_files[i])
272
+ # t = np.mean(PDCSAP['col1'][:len(PDCSAP['col1']) // 15 * 15].reshape(-1, 15), axis=1)
273
+ # f = np.mean(PDCSAP['col2'][:len(PDCSAP['col2']) // 15 * 15].reshape(-1, 15), axis=1)
274
+ # ferr = np.mean(PDCSAP['col3'][:len(PDCSAP['col3']) // 15 * 15].reshape(-1, 15), axis=1)
275
+ # plt.errorbar((t - 2457000) % period / period, f, ferr, c='C0', ls='', elinewidth=0, marker='.', ms=2, zorder=1)
276
+ time_out, meas_out, meas_err_out = timebin(time=t_all % period, meas=f_all,
277
+ meas_err=f_err_all,
278
+ binsize=300 / 86400)
279
+ plt.errorbar(np.array(time_out) / period, meas_out, meas_err_out, c=f'r', ls='', elinewidth=1.5,
280
+ marker='.', ms=8, zorder=3, label=f'All sectors')
281
+
282
+ plt.ylim(0.998, 1.001)
283
+ # plt.xlim(0.3, 0.43)
284
+ plt.legend()
285
+ plt.title(title)
286
+ # plt.xlim(mid_transit_tbjd % period - 0.1 * period, mid_transit_tbjd % period + 0.1 * period)
287
+ # plt.ylim(0.9, 1.1)
288
+ # plt.hlines(y=0.92, xmin=0, xmax=1, ls='dotted', colors='k')
289
+ # plt.hlines(y=0.93, xmin=0, xmax=1, ls='dotted', colors='k')
290
+ plt.vlines(x=(mid_transit_tbjd % period), ymin=0, ymax=2, ls='dotted', colors='grey')
291
+ plt.xlabel('Phase')
292
+ plt.ylabel('Normalized flux')
293
+ plt.savefig(f'{local_directory}/plots/{title}.png', dpi=300)
294
+ plt.close(fig)
295
+
296
+
297
+ def plot_contamination(local_directory=None, gaia_dr3=None, ymin=None, ymax=None, pm_years=3000):
298
+ sns.set(rc={'font.family': 'serif', 'font.serif': 'DejaVu Serif', 'font.size': 12,
299
+ 'axes.edgecolor': '0.2', 'axes.labelcolor': '0.', 'xtick.color': '0.', 'ytick.color': '0.',
300
+ 'axes.facecolor': '0.95', "axes.grid": False})
301
+
302
+ files = glob(f'{local_directory}lc/*{gaia_dr3}*.fits')
303
+ os.makedirs(f'{local_directory}plots/', exist_ok=True)
304
+ for i in range(len(files)):
305
+ with fits.open(files[i], mode='denywrite') as hdul:
306
+ sector = hdul[0].header['SECTOR']
307
+ q = [a and b for a, b in
308
+ zip(list(hdul[1].data['TESS_flags'] == 0), list(hdul[1].data['TGLC_flags'] == 0))]
309
+ if ymin is None and ymax is None:
310
+ ymin = np.nanmin(hdul[1].data['cal_aper_flux'][q]) - 0.05
311
+ ymax = np.nanmax(hdul[1].data['cal_aper_flux'][q]) + 0.05
312
+ with open(glob(f'{local_directory}source/*_{sector}.pkl')[0], 'rb') as input_:
313
+ source = pickle.load(input_)
314
+ source.select_sector(sector=sector)
315
+ star_num = np.where(source.gaia['DESIGNATION'] == f'Gaia DR3 {gaia_dr3}')
316
+
317
+ distances = np.sqrt(
318
+ (source.gaia[f'sector_{sector}_x'][:500] - source.gaia[star_num][f'sector_{sector}_x']) ** 2 +
319
+ (source.gaia[f'sector_{sector}_y'][:500] - source.gaia[star_num][f'sector_{sector}_y']) ** 2)
320
+
321
+ # Find closest 5 stars (6-self) or those within 5 pixels
322
+ nearby_stars = np.argsort(distances)[:6]
323
+ nearby_stars = nearby_stars[distances[nearby_stars] <= 5]
324
+ star_x = source.gaia[star_num][f'sector_{sector}_x'][0]
325
+ star_y = source.gaia[star_num][f'sector_{sector}_y'][0]
326
+ max_flux = np.nanmax(
327
+ np.nanmedian(
328
+ source.flux[:, round(star_y) - 2:round(star_y) + 3, round(star_x) - 2:round(star_x) + 3],
329
+ axis=0))
330
+ fig = plt.figure(constrained_layout=False, figsize=(20, 12))
331
+ gs = fig.add_gridspec(21, 10)
332
+ gs.update(wspace=0.03, hspace=0.1)
333
+ ax0 = fig.add_subplot(gs[:10, :3])
334
+ ax0.imshow(np.median(source.flux, axis=0), cmap='RdBu', vmin=-max_flux, vmax=max_flux, origin='lower')
335
+ ax0.set_xlabel('x pixel')
336
+ ax0.set_ylabel('y pixel')
337
+ ax0.scatter(star_x, star_y, s=300, c='r', marker='*', label='target star')
338
+ ax0.scatter(source.gaia[f'sector_{sector}_x'][:500], source.gaia[f'sector_{sector}_y'][:500], s=30,
339
+ c='r', label='background stars')
340
+ ax0.scatter(source.gaia[f'sector_{sector}_x'][nearby_stars[nearby_stars != star_num[0][0]]],
341
+ source.gaia[f'sector_{sector}_y'][nearby_stars[nearby_stars != star_num[0][0]]],
342
+ s=30, c='r', edgecolor='black', linewidth=1, label='background stars')
343
+
344
+ for l in range(len(nearby_stars)):
345
+ index = np.where(
346
+ source.tic['dr3_source_id'] == int(source.gaia['DESIGNATION'][nearby_stars[l]].split(' ')[-1]))
347
+ gaia_targets = source.gaia
348
+ median_time = np.median(source.time)
349
+ interval = (median_time - 388.5) / 365.25 + pm_years
350
+ ra = gaia_targets['ra'][nearby_stars[l]]
351
+ dec = gaia_targets['dec'][nearby_stars[l]]
352
+ if not np.isnan(gaia_targets['pmra'][nearby_stars[l]]):
353
+ ra += gaia_targets['pmra'][nearby_stars[l]] * np.cos(np.deg2rad(dec)) * interval / 1000 / 3600
354
+ if not np.isnan(gaia_targets['pmdec'][nearby_stars[l]]):
355
+ dec += gaia_targets['pmdec'][nearby_stars[l]] * interval / 1000 / 3600
356
+ pixel = source.wcs.all_world2pix(np.array([ra, dec]).reshape((1, 2)), 0)
357
+ x_gaia = pixel[0][0]
358
+ y_gaia = pixel[0][1]
359
+ ax0.arrow(source.gaia[f'sector_{sector}_x'][nearby_stars[l]],
360
+ source.gaia[f'sector_{sector}_y'][nearby_stars[l]],
361
+ x_gaia - source.gaia[f'sector_{sector}_x'][nearby_stars[l]],
362
+ y_gaia - source.gaia[f'sector_{sector}_y'][nearby_stars[l]],
363
+ width=0.02, color='r', edgecolor=None, head_width=0.1)
364
+ try:
365
+ txt = ax0.text(source.gaia[f'sector_{sector}_x'][nearby_stars[l]] + 0.5,
366
+ source.gaia[f'sector_{sector}_y'][nearby_stars[l]] - 0.05,
367
+ f'TIC {int(source.tic["TIC"][index])}', size=7)
368
+
369
+ except TypeError:
370
+ designation = source.gaia[f"DESIGNATION"][nearby_stars[l]]
371
+ formatted_text = '\n'.join([designation[i:i + 15] for i in range(0, len(designation), 15)])
372
+
373
+ txt = ax0.text(source.gaia[f'sector_{sector}_x'][nearby_stars[l]] + 0.5,
374
+ source.gaia[f'sector_{sector}_y'][nearby_stars[l]] - 0.05,
375
+ formatted_text, size=7)
376
+ ax0.set_xlim(round(star_x) - 5.5, round(star_x) + 5.5)
377
+ ax0.set_ylim(round(star_y) - 5.5, round(star_y) + 5.5)
378
+ ax0.set_title(f'TIC_{hdul[0].header["TICID"]}_Sector_{hdul[0].header["SECTOR"]:04d}')
379
+ ax0.vlines(round(star_x) - 2.5, round(star_y) - 2.5, round(star_y) + 2.5, colors='k', lw=1.2)
380
+ ax0.vlines(round(star_x) + 2.5, round(star_y) - 2.5, round(star_y) + 2.5, colors='k', lw=1.2)
381
+ ax0.hlines(round(star_y) - 2.5, round(star_x) - 2.5, round(star_x) + 2.5, colors='k', lw=1.2)
382
+ ax0.hlines(round(star_y) + 2.5, round(star_x) - 2.5, round(star_x) + 2.5, colors='k', lw=1.2)
383
+ t_, y_, x_ = np.shape(hdul[0].data)
384
+ max_flux = np.max(
385
+ np.median(source.flux[:, int(star_y) - 2:int(star_y) + 3, int(star_x) - 2:int(star_x) + 3], axis=0))
386
+ sns.set(rc={'font.family': 'serif', 'font.serif': 'DejaVu Serif', 'font.size': 12,
387
+ 'axes.edgecolor': '0.2', 'axes.labelcolor': '0.', 'xtick.color': '0.', 'ytick.color': '0.',
388
+ 'axes.facecolor': '0.95', 'grid.color': '0.9'})
389
+ arrays = []
390
+ for j in range(y_):
391
+ for k in range(x_):
392
+ ax_ = fig.add_subplot(gs[(19 - 2 * j):(21 - 2 * j), (2 * k):(2 + 2 * k)])
393
+ ax_.patch.set_facecolor('#4682B4')
394
+ ax_.patch.set_alpha(min(1, max(0, 5 * np.nanmedian(hdul[0].data[:, j, k]) / max_flux)))
395
+
396
+ _, trend = flatten(hdul[1].data['time'][q],
397
+ hdul[0].data[:, j, k][q] - np.nanmin(hdul[0].data[:, j, k][q]) + 1000,
398
+ window_length=1, method='biweight', return_trend=True)
399
+ cal_aper = (hdul[0].data[:, j, k][q] - np.nanmin(
400
+ hdul[0].data[:, j, k][q]) + 1000 - trend) / np.nanmedian(
401
+ hdul[0].data[:, j, k][q]) + 1
402
+ if 1 <= j <= 3 and 1 <= k <= 3:
403
+ arrays.append(cal_aper)
404
+ ax_.plot(hdul[1].data['time'][q], cal_aper, '.k', ms=0.5)
405
+ # ax_.plot(hdul[1].data['time'][q], hdul[0].data[:, j, k][q], '.k', ms=0.5)
406
+ ax_.set_ylim(ymin, ymax)
407
+ ax_.set_xlabel('TBJD')
408
+ ax_.set_ylabel('')
409
+ if j != 0:
410
+ ax_.set_xticklabels([])
411
+ ax_.set_xlabel('')
412
+ if k != 0:
413
+ ax_.set_yticklabels([])
414
+ if j == 2 and k == 0:
415
+ ax_.set_ylabel('Normalized and detrended Flux of each pixel')
416
+
417
+ combinations = itertools.combinations(arrays, 2)
418
+ median_abs_diffs = []
419
+ for arr_a, arr_b in combinations:
420
+ abs_diff = np.abs(arr_a - arr_b)
421
+ median_diff = np.median(abs_diff)
422
+ median_abs_diffs.append(median_diff)
423
+ median_abs_diffs = np.array(median_abs_diffs)
424
+ iqr = np.percentile(median_abs_diffs, 75) - np.percentile(median_abs_diffs, 25)
425
+ print(f"Interquartile Range (IQR): {iqr}")
426
+ std_dev = np.std(median_abs_diffs)
427
+ print(f"Standard Deviation: {std_dev}")
428
+ ax1 = fig.add_subplot(gs[:10, 4:7])
429
+ ax1.hist(median_abs_diffs, color='k', edgecolor='k', facecolor='none', rwidth=0.8, linewidth=2)
430
+ ax1.set_box_aspect(1)
431
+ ax1.set_title(f'Distribution of the MADs among combinations of the center 3*3 pixels')
432
+ ax1.set_xlabel('MAD between combinations of center 3*3 pixel fluxes')
433
+ ax1.set_ylabel('Counts')
434
+ text_ax = fig.add_axes([0.71, 0.9, 0.3, 0.3]) # [left, bottom, width, height] in figure coordinates
435
+ text_ax.axis('off') # Turn off axis lines, ticks, etc.
436
+ text_ax.text(0., 0., f"Gaia DR3 {gaia_dr3} \n"
437
+ f" ←← TESS SPOC FFI and TIC/Gaia stars with proper motions. \n"
438
+ f" Arrows show Gaia proper motion after {pm_years} years. \n"
439
+ f" ← Histogram of the MADs between 3*3 pixel fluxes. \n"
440
+ f" ↓ Fluxes of each pixels after contaminations are removed. \n"
441
+ f" The fluxes are normalized and detrended. The background \n"
442
+ f" color shows the pixel brightness after the decontamination. \n"
443
+ f"\n"
444
+ f"How to interpret these plots: \n"
445
+ f" If the signals you are interested in (i.e. transits, \n"
446
+ f" eclipses, variable stars) show similar amplitudes in \n"
447
+ f" all (especially the center 3*3) pixels, then the star \n"
448
+ f" is likely to be the source. The median absolute \n"
449
+ f" differences (MADs) taken between all combinations \n"
450
+ f" of the center pixel fluxes are shown in the histogram \n"
451
+ f" for a quantititive comparison to other possible sources. \n"
452
+ f" The star with smaller distribution width (IQR or \n"
453
+ f" STD) is more likely to be the source of the signal. \n"
454
+ f"\n"
455
+ f"Interquartile Range (IQR): {iqr:05f} \n"
456
+ f"Standard Deviation: {std_dev:05f}", transform=text_ax.transAxes, ha='left',
457
+ va='top')
458
+ plt.subplots_adjust(top=.98, bottom=0.05, left=0.05, right=0.95)
459
+ plt.savefig(
460
+ f'{local_directory}plots/contamination_sector_{hdul[0].header["SECTOR"]:04d}_Gaia_DR3_{gaia_dr3}.pdf',
461
+ dpi=300)
462
+ # plt.savefig(f'{local_directory}plots/contamination_sector_{hdul[0].header["SECTOR"]:04d}_Gaia_DR3_{gaia_dr3}.png',
463
+ # dpi=600)
464
+ plt.close()
465
+
466
+
467
+ def plot_epsf(local_directory=None):
468
+ files = glob(f'{local_directory}epsf/*.npy')
469
+ os.makedirs(f'{local_directory}plots/', exist_ok=True)
470
+ for i in range(len(files)):
471
+ psf = np.load(files[i])
472
+ plt.imshow(psf[0, :23 ** 2].reshape(23, 23), cmap='bone', origin='lower')
473
+ plt.tick_params(axis='x', bottom=False)
474
+ plt.tick_params(axis='y', left=False)
475
+ plt.title(f'{files[i].split("/")[-1].split(".")[0]}')
476
+ plt.savefig(f'{local_directory}plots/{files[i].split("/")[-1]}.png', bbox_inches='tight', dpi=300)
477
+
478
+
479
+ def choose_prior(tics, local_directory=None, priors=np.logspace(-5, 0, 100)):
480
+ mad = np.zeros((2, 100))
481
+ for i in trange(len(priors)):
482
+ resid = get_tglc_lc(tics=tics, method='query', server=1, directory=local_directory, prior=priors[i])
483
+ print(resid)
484
+ mad[:, i] = resid
485
+ # with fits.open(
486
+ # '/home/tehan/data/cosmos/GEMS/TIC 16005254/lc/hlsp_tglc_tess_ffi_gaiaid-52359538285081728-s0043-cam3-ccd3_tess_v1_llc.fits',
487
+ # mode='denywrite') as hdul:
488
+ # mad[0, i] = np.nanmedian(abs(hdul[1].data['cal_psf_flux'] - np.nanmedian(hdul[1].data['cal_psf_flux'])))
489
+ # with fits.open(
490
+ # '/home/tehan/data/cosmos/GEMS/TIC 16005254/lc/hlsp_tglc_tess_ffi_gaiaid-52359538285081728-s0044-cam1-ccd1_tess_v1_llc.fits',
491
+ # mode='denywrite') as hdul:
492
+ # mad[1, i] = np.nanmedian(abs(hdul[1].data['cal_psf_flux'] - np.nanmedian(hdul[1].data['cal_psf_flux'])))
493
+ np.save('/home/tehan/Documents/GEMS/TIC 16005254/mad.npy', mad)
494
+ # plt.plot(priors, mad)
495
+ # plt.xscale('log')
496
+ # plt.title(f'best prior = {priors[np.argmin(mad)]:04d}')
497
+ # plt.show()
498
+
499
+
500
+ def get_tglc_lc(tics=None, method='query', server=1, directory=None, prior=None):
501
+ if method == 'query':
502
+ for i in range(len(tics)):
503
+ target = f'TIC {tics[i]}'
504
+ local_directory = f'{directory}{target}/'
505
+ os.makedirs(local_directory, exist_ok=True)
506
+ tglc_lc(target=target, local_directory=local_directory, size=90, save_aper=True, limit_mag=16,
507
+ get_all_lc=False, first_sector_only=False, last_sector_only=False, sector=None, prior=prior,
508
+ transient=None)
509
+ plot_lc(local_directory=f'{directory}TIC {tics[i]}/', kind='cal_aper_flux')
510
+ if method == 'search':
511
+ star_spliter(server=server, tics=tics, local_directory=directory)
512
+
513
+
514
+ if __name__ == '__main__':
515
+ tics = [16005254]
516
+ directory = f'/home/tehan/data/'
517
+ os.makedirs(directory, exist_ok=True)
518
+ get_tglc_lc(tics=tics, method='query', server=1, directory=directory)
519
+ # plot_lc(local_directory=f'{directory}TIC {tics[0]}/', kind='cal_aper_flux')
520
+ # plot_lc(local_directory=f'/home/tehan/Documents/tglc/TIC 16005254/', kind='cal_aper_flux', ylow=0.9, yhigh=1.1)
521
+ # plot_contamination(local_directory=f'{directory}TIC {tics[0]}/', gaia_dr3=5751990597042725632)
522
+ # plot_epsf(local_directory=f'{directory}TIC {tics[0]}/')
523
+ # plot_pf_lc(local_directory=f'{directory}TIC {tics[0]}/lc/', period=0.71912603, mid_transit_tbjd=2790.58344,
524
+ # kind='cal_psf_flux')
525
+ # plot_pf_lc(local_directory=f'{directory}TIC {tics[0]}/lc/', period=0.23818244, mid_transit_tbjd=1738.71248,
526
+ # kind='cal_aper_flux')