asteroid_spinprops 0.2.32__tar.gz → 1.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. asteroid_spinprops-1.0.1/PKG-INFO +186 -0
  2. asteroid_spinprops-1.0.1/README.md +157 -0
  3. asteroid_spinprops-1.0.1/asteroid_spinprops/ssolib/dataprep.py +250 -0
  4. {asteroid_spinprops-0.2.32 → asteroid_spinprops-1.0.1}/asteroid_spinprops/ssolib/modelfit.py +143 -252
  5. {asteroid_spinprops-0.2.32 → asteroid_spinprops-1.0.1}/asteroid_spinprops/ssolib/periodest.py +126 -158
  6. {asteroid_spinprops-0.2.32 → asteroid_spinprops-1.0.1}/asteroid_spinprops/ssolib/utils.py +164 -211
  7. {asteroid_spinprops-0.2.32 → asteroid_spinprops-1.0.1}/pyproject.toml +13 -11
  8. asteroid_spinprops-0.2.32/PKG-INFO +0 -77
  9. asteroid_spinprops-0.2.32/README.md +0 -50
  10. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/.ruff_cache/.gitignore +0 -2
  11. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/.ruff_cache/0.13.2/1980339045096230685 +0 -0
  12. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/.ruff_cache/CACHEDIR.TAG +0 -1
  13. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/dataprep.py +0 -596
  14. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/pipetools.py +0 -167
  15. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/atlas_x_ztf_testing/test_pqfile_1.parquet +0 -0
  16. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/atlas_x_ztf_testing/test_pqfile_2.parquet +0 -0
  17. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/ephemeris_testing/2000 WL152 +0 -1702
  18. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/ephemeris_testing/2001 PC +0 -94
  19. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/ephemeris_testing/2001 SG276 +0 -111
  20. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/ephemeris_testing/2008 GX32 +0 -93
  21. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/ephemeris_testing/2009 BE185 +0 -130
  22. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/ephemeris_testing/2011 EY17 +0 -101
  23. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/ephemeris_testing/2134 T-1 +0 -352
  24. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/ephemeris_testing/Bellmore +0 -2657
  25. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/ephemeris_testing/Dermott +0 -2971
  26. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/ephemeris_testing/Duke +0 -2026
  27. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/ephemeris_testing/Izenberg +0 -2440
  28. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/ephemeris_testing/Lermontov +0 -2760
  29. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/ephemeris_testing/Poullain +0 -1272
  30. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/ephemeris_testing/Sonneberga +0 -2756
  31. asteroid_spinprops-0.2.32/asteroid_spinprops/ssolib/testing/testing_ssoname_keys.pkl +0 -0
  32. {asteroid_spinprops-0.2.32 → asteroid_spinprops-1.0.1}/asteroid_spinprops/__init__.py +0 -0
  33. {asteroid_spinprops-0.2.32 → asteroid_spinprops-1.0.1}/asteroid_spinprops/ssolib/__init__.py +0 -0
  34. {asteroid_spinprops-0.2.32 → asteroid_spinprops-1.0.1}/asteroid_spinprops/ssolib/ssptools.py +0 -0
@@ -0,0 +1,186 @@
1
+ Metadata-Version: 2.4
2
+ Name: asteroid_spinprops
3
+ Version: 1.0.1
4
+ Summary: Collection of tools used for fitting sHG1G2 and SOCCA photometric models to sparse asteroid photometry
5
+ License: MIT
6
+ Author: Odysseas
7
+ Author-email: odysseas.xenos@proton.me
8
+ Requires-Python: >=3.9
9
+ Classifier: License :: OSI Approved :: MIT License
10
+ Classifier: Programming Language :: Python :: 3
11
+ Classifier: Programming Language :: Python :: 3.9
12
+ Classifier: Programming Language :: Python :: 3.10
13
+ Classifier: Programming Language :: Python :: 3.11
14
+ Classifier: Programming Language :: Python :: 3.12
15
+ Classifier: Programming Language :: Python :: 3.13
16
+ Classifier: Programming Language :: Python :: 3.14
17
+ Requires-Dist: astropy (>=6.0)
18
+ Requires-Dist: fink-utils (>=0.47.0)
19
+ Requires-Dist: nifty-ls (>=1.1.0)
20
+ Requires-Dist: pandas (>=2.3)
21
+ Requires-Dist: pyarrow (>=18.1)
22
+ Requires-Dist: sbpy (>=0.5.0)
23
+ Requires-Dist: scipy (>=1.10)
24
+ Requires-Dist: tqdm (>=4.67)
25
+ Project-URL: Homepage, https://gitlab.com/odysseas_xenos/asteroid-spinprops
26
+ Project-URL: Repository, https://gitlab.com/odysseas_xenos/asteroid-spinprops
27
+ Description-Content-Type: text/markdown
28
+
29
+ # asteroid-spinprops
30
+
31
+ ## Overview
32
+ **asteroid-spinprops** is a Python package providing tools to fit SHG1G2 and SOCCA photometric models to sparse asteroid photometry.
33
+ It supports multiband modeling, residual analysis and shape, period and pole orientation estimation for small solar system objects.
34
+
35
+ ---
36
+
37
+ ## Installation
38
+ Install the package via pip:
39
+
40
+ ```bash
41
+ pip install asteroid_spinprops
42
+ ```
43
+
44
+ ## Input column requirements and preprocessing
45
+
46
+ `asteroid_spinprops` expects photometric measurements to follow the **Fink alert schema**.
47
+ If your DataFrame uses different column names, they must be renamed to the standard format before analysis.
48
+
49
+ The package maps common input columns to Fink-style fields:
50
+
51
+ | Expected name | Description |
52
+ |---------------|-------------|
53
+ | `cjd` | Observation time (JD) |
54
+ | `cmagpsf` | PSF magnitude |
55
+ | `csigmapsf` | Magnitude uncertainty |
56
+ | `cfid` | Filter identifier |
57
+ | `ra` | Right ascension (deg) |
58
+ | `dec` | Declination (deg) |
59
+ | `Phase` | Solar phase angle (deg) |
60
+
61
+ ### Additional columns created during preprocessing
62
+
63
+ The preprocessing step also adds the following fields:
64
+
65
+ - **`cmred` — Reduced magnitude**
66
+
67
+ Computed from the heliocentric and observer-centric distances:
68
+
69
+
70
+ $$
71
+ \mathrm{cmred} = \mathrm{cmagpsf} - 5\log_{10}\!\left(\frac{r\,\Delta}{\mathrm{AU}^2}\right)
72
+ $$
73
+ where `Obj_Sun_LTC_km` = \(r\) and `Range_LTC_km` = \(\Delta\).
74
+
75
+ - **`jd_ltc` — Light-time–corrected Julian Date**
76
+
77
+ First converts MJD → JD (`+ 2400000.5`), then applies the correction
78
+
79
+ $$
80
+ JD_\mathrm{ltc} = JD - \frac{\Delta}{c},
81
+ $$
82
+
83
+ using the one-way light-travel time in days.
84
+
85
+ ```python
86
+ pdf.rename(
87
+ columns={
88
+ "Your_JD_column": "cjd",
89
+ "Your_magnitudes_column": "cmagpsf",
90
+ "Your_phase_angle_column": "Phase",
91
+ "Your_RA_column": "ra",
92
+ "Your_Dec_column": "dec",
93
+ "Your_magnitude_uncertainty_column": "csigmapsf",
94
+ "Your_filter_column": "cfid",
95
+ },
96
+ inplace=True,
97
+ )
98
+
99
+ # Add missing columns
100
+ pdf["cmred"] = pdf["cmagpsf"] - 5 * np.log10(
101
+ pdf["Observer_SSO_distance_column"] * pdf["Sun_SSO_distance_column"] / (au**2)
102
+ )
103
+
104
+ # LT correction
105
+ pdf["cjd"] = pdf["cjd"] + 2400000.5 # MJD to JD
106
+ pdf["jd_ltc"] = pdf["cjd"] - pdf["Observer_SSO_distance_column"] / c_kmday # light time correction
107
+
108
+ ```
109
+
110
+ ### Required inputs
111
+
112
+ Your input DataFrame must therefore include:
113
+
114
+ - time of observation
115
+ - PSF magnitude and uncertainty
116
+ - filter ID
117
+ - RA, Dec
118
+ - phase angle
119
+ - heliocentric distance (`Obj_Sun_LTC_km`)
120
+ - observer-centric distance (`Range_LTC_km`)
121
+
122
+ The preprocessing step renames these fields to the Fink schema, computes reduced magnitudes, and applies the light-time correction to the observation timestamps.
123
+
124
+
125
+ ## Quick Start
126
+ ```python
127
+ import numpy as np
128
+ import pandas as pd
129
+ from asteroid_spinprops.ssolib import dataprep, periodest, modelfit
130
+
131
+ # Suppose `pdf` is your initial asteroid DataFrame
132
+ # Ensure all columns are converted to the required single row format.
133
+ pdf_s = pd.DataFrame({col: [np.array(pdf[col])] for col in pdf.columns})
134
+
135
+ # Convert filter IDs to numeric
136
+ unique_vals, inv = np.unique(pdf_s["cfid"].values[0], return_inverse=True)
137
+ numeric_filter = inv + 1
138
+ pdf_s["cfid"].values[0] = numeric_filter
139
+
140
+ # --- Data cleaning and filtering ---
141
+ clean_data, errorbar_rejects = dataprep.errorbar_filtering(data=pdf_s, mlimit=0.7928)
142
+ clean_data, projection_rejects = dataprep.projection_filtering(data=clean_data)
143
+ clean_data, iterative_rejects = dataprep.iterative_filtering(data=clean_data)
144
+
145
+ # --- Fit SHG1G2 model ---
146
+ shg1g2_params = modelfit.get_fit_params(
147
+ data=clean_data,
148
+ flavor="SHG1G2",
149
+ )
150
+
151
+ # Compute residuals for period analysis
152
+ residuals_dataframe = modelfit.make_residuals_df(
153
+ clean_data, model_parameters=shg1g2_params
154
+ )
155
+
156
+ # --- Estimate rotation period ---
157
+ p_in, k_val, p_rms, signal_peak, window_peak = periodest.get_multiband_period_estimate(
158
+ residuals_dataframe,
159
+ k_free=True,
160
+ )
161
+
162
+ # Assess period robustness via bootstrap resampling
163
+ _, Nbs = periodest.perform_residual_resampling(
164
+ resid_df=residuals_dataframe,
165
+ p_min=0.03,
166
+ p_max=2,
167
+ k=int(k_val)
168
+ )
169
+
170
+ # --- Fit SSHG1G2 (spin + multiband) model ---
171
+ SOCCA_params = modelfit.get_fit_params(
172
+ data=clean_data,
173
+ flavor="SSHG1G2",
174
+ shg1g2_constrained=True,
175
+ blind_scan=True,
176
+ period_in=p_in,
177
+ )
178
+ ```
179
+
180
+ ## Models
181
+ Photometric models from Carry et al.(2024) {2024A&A...687A..38C}
182
+ and https://github.com/astrolabsoftware
183
+
184
+ ## Project status
185
+ Under development
186
+
@@ -0,0 +1,157 @@
1
+ # asteroid-spinprops
2
+
3
+ ## Overview
4
+ **asteroid-spinprops** is a Python package providing tools to fit SHG1G2 and SOCCA photometric models to sparse asteroid photometry.
5
+ It supports multiband modeling, residual analysis and shape, period and pole orientation estimation for small solar system objects.
6
+
7
+ ---
8
+
9
+ ## Installation
10
+ Install the package via pip:
11
+
12
+ ```bash
13
+ pip install asteroid_spinprops
14
+ ```
15
+
16
+ ## Input column requirements and preprocessing
17
+
18
+ `asteroid_spinprops` expects photometric measurements to follow the **Fink alert schema**.
19
+ If your DataFrame uses different column names, they must be renamed to the standard format before analysis.
20
+
21
+ The package maps common input columns to Fink-style fields:
22
+
23
+ | Expected name | Description |
24
+ |---------------|-------------|
25
+ | `cjd` | Observation time (JD) |
26
+ | `cmagpsf` | PSF magnitude |
27
+ | `csigmapsf` | Magnitude uncertainty |
28
+ | `cfid` | Filter identifier |
29
+ | `ra` | Right ascension (deg) |
30
+ | `dec` | Declination (deg) |
31
+ | `Phase` | Solar phase angle (deg) |
32
+
33
+ ### Additional columns created during preprocessing
34
+
35
+ The preprocessing step also adds the following fields:
36
+
37
+ - **`cmred` — Reduced magnitude**
38
+
39
+ Computed from the heliocentric and observer-centric distances:
40
+
41
+
42
+ $$
43
+ \mathrm{cmred} = \mathrm{cmagpsf} - 5\log_{10}\!\left(\frac{r\,\Delta}{\mathrm{AU}^2}\right)
44
+ $$
45
+ where `Obj_Sun_LTC_km` = \(r\) and `Range_LTC_km` = \(\Delta\).
46
+
47
+ - **`jd_ltc` — Light-time–corrected Julian Date**
48
+
49
+ First converts MJD → JD (`+ 2400000.5`), then applies the correction
50
+
51
+ $$
52
+ JD_\mathrm{ltc} = JD - \frac{\Delta}{c},
53
+ $$
54
+
55
+ using the one-way light-travel time in days.
56
+
57
+ ```python
58
+ pdf.rename(
59
+ columns={
60
+ "Your_JD_column": "cjd",
61
+ "Your_magnitudes_column": "cmagpsf",
62
+ "Your_phase_angle_column": "Phase",
63
+ "Your_RA_column": "ra",
64
+ "Your_Dec_column": "dec",
65
+ "Your_magnitude_uncertainty_column": "csigmapsf",
66
+ "Your_filter_column": "cfid",
67
+ },
68
+ inplace=True,
69
+ )
70
+
71
+ # Add missing columns
72
+ pdf["cmred"] = pdf["cmagpsf"] - 5 * np.log10(
73
+ pdf["Observer_SSO_distance_column"] * pdf["Sun_SSO_distance_column"] / (au**2)
74
+ )
75
+
76
+ # LT correction
77
+ pdf["cjd"] = pdf["cjd"] + 2400000.5 # MJD to JD
78
+ pdf["jd_ltc"] = pdf["cjd"] - pdf["Observer_SSO_distance_column"] / c_kmday # light time correction
79
+
80
+ ```
81
+
82
+ ### Required inputs
83
+
84
+ Your input DataFrame must therefore include:
85
+
86
+ - time of observation
87
+ - PSF magnitude and uncertainty
88
+ - filter ID
89
+ - RA, Dec
90
+ - phase angle
91
+ - heliocentric distance (`Obj_Sun_LTC_km`)
92
+ - observer-centric distance (`Range_LTC_km`)
93
+
94
+ The preprocessing step renames these fields to the Fink schema, computes reduced magnitudes, and applies the light-time correction to the observation timestamps.
95
+
96
+
97
+ ## Quick Start
98
+ ```python
99
+ import numpy as np
100
+ import pandas as pd
101
+ from asteroid_spinprops.ssolib import dataprep, periodest, modelfit
102
+
103
+ # Suppose `pdf` is your initial asteroid DataFrame
104
+ # Ensure all columns are converted to the required single row format.
105
+ pdf_s = pd.DataFrame({col: [np.array(pdf[col])] for col in pdf.columns})
106
+
107
+ # Convert filter IDs to numeric
108
+ unique_vals, inv = np.unique(pdf_s["cfid"].values[0], return_inverse=True)
109
+ numeric_filter = inv + 1
110
+ pdf_s["cfid"].values[0] = numeric_filter
111
+
112
+ # --- Data cleaning and filtering ---
113
+ clean_data, errorbar_rejects = dataprep.errorbar_filtering(data=pdf_s, mlimit=0.7928)
114
+ clean_data, projection_rejects = dataprep.projection_filtering(data=clean_data)
115
+ clean_data, iterative_rejects = dataprep.iterative_filtering(data=clean_data)
116
+
117
+ # --- Fit SHG1G2 model ---
118
+ shg1g2_params = modelfit.get_fit_params(
119
+ data=clean_data,
120
+ flavor="SHG1G2",
121
+ )
122
+
123
+ # Compute residuals for period analysis
124
+ residuals_dataframe = modelfit.make_residuals_df(
125
+ clean_data, model_parameters=shg1g2_params
126
+ )
127
+
128
+ # --- Estimate rotation period ---
129
+ p_in, k_val, p_rms, signal_peak, window_peak = periodest.get_multiband_period_estimate(
130
+ residuals_dataframe,
131
+ k_free=True,
132
+ )
133
+
134
+ # Assess period robustness via bootstrap resampling
135
+ _, Nbs = periodest.perform_residual_resampling(
136
+ resid_df=residuals_dataframe,
137
+ p_min=0.03,
138
+ p_max=2,
139
+ k=int(k_val)
140
+ )
141
+
142
+ # --- Fit SSHG1G2 (spin + multiband) model ---
143
+ SOCCA_params = modelfit.get_fit_params(
144
+ data=clean_data,
145
+ flavor="SSHG1G2",
146
+ shg1g2_constrained=True,
147
+ blind_scan=True,
148
+ period_in=p_in,
149
+ )
150
+ ```
151
+
152
+ ## Models
153
+ Photometric models from Carry et al.(2024) {2024A&A...687A..38C}
154
+ and https://github.com/astrolabsoftware
155
+
156
+ ## Project status
157
+ Under development
@@ -0,0 +1,250 @@
1
+ import numpy as np
2
+ import pandas as pd
3
+ from asteroid_spinprops.ssolib.modelfit import (
4
+ get_fit_params,
5
+ get_residuals,
6
+ )
7
+
8
+ import asteroid_spinprops.ssolib.utils as utils
9
+
10
+
11
+ def errorbar_filtering(data, mlimit):
12
+ """
13
+ Filter out data points with large photometric uncertainties.
14
+
15
+ Parameters
16
+ -----------
17
+ data : pd.DataFrame
18
+ A single-row DataFrame where each column contains an array of values
19
+ for a solar system object.
20
+ mlimit : float
21
+ Threshold value to filter out points with uncertainties greater than mlimit / 2.
22
+
23
+ Returns
24
+ -------
25
+ data : pd.DataFrame
26
+ Filtered DataFrame
27
+ rejects : pd.DataFrame
28
+ DataFrame containing the rejected measurements
29
+ """
30
+ errorbar_condition = data["csigmapsf"].values[0] <= mlimit / 2
31
+ rejects = data.copy()
32
+
33
+ for c in data.columns:
34
+ if c not in ["index", "kast", "name"]:
35
+ rejects.at[0, c] = data[c].values[0][~errorbar_condition]
36
+ data.at[0, c] = data[c].values[0][errorbar_condition]
37
+
38
+ return data, rejects
39
+
40
+
41
+ def projection_filtering(data):
42
+ """
43
+ Filters out photometric outliers in reduced magnitude space per filter using a 3 sigma criterion.
44
+
45
+ Parameters
46
+ -----------
47
+ data : pd.DataFrame
48
+ A single-row DataFrame where each column contains an array of values.
49
+ Returns
50
+ --------
51
+ data : pd.DataFrame
52
+ Filtered DataFrame
53
+ rejects : pd.DataFrame
54
+ DataFrame containing the rejected measurements
55
+ """
56
+ rejects = data.copy()
57
+ valid_indices = []
58
+
59
+ for f in np.unique(data["cfid"].values[0]):
60
+ filter_mask = np.array(data["cfid"].values[0]) == f
61
+
62
+ mean_val = np.mean(data["cmred"].values[0][filter_mask])
63
+ std_val = np.std(data["cmred"].values[0][filter_mask])
64
+
65
+ project_condition = (
66
+ filter_mask
67
+ & (data["cmred"].values[0] > mean_val - 3 * std_val)
68
+ & (data["cmred"].values[0] < mean_val + 3 * std_val)
69
+ )
70
+
71
+ valid_indices.append(np.where(project_condition)[0])
72
+
73
+ valid_indices = np.sort(
74
+ np.concatenate([valid_indices[n] for n in range(len(valid_indices))])
75
+ )
76
+
77
+ dummy = np.ones(data["cfid"].values[0].shape, dtype=bool)
78
+ dummy[valid_indices] = False
79
+
80
+ for c in data.columns:
81
+ if c not in ["index", "kast", "name"]:
82
+ rejects.at[0, c] = data[c].values[0][dummy]
83
+ data.at[0, c] = data[c].values[0][valid_indices]
84
+
85
+ return data, rejects
86
+
87
+
88
+ def iterative_filtering(data, max_iter=10):
89
+ """
90
+ Iteratively removes outliers based on residuals from fitting the SHG1G2 mdoel until convergence.
91
+
92
+ Parameters
93
+ -----------
94
+ data : pd.DataFrame
95
+ A single-row DataFrame where each column contains an array of values.
96
+
97
+ max_iter : int
98
+ Maximum number of filtering iterations (default is 10).
99
+
100
+ Returns
101
+ --------
102
+ data : pd.DataFrame
103
+ Filtered DataFrame
104
+
105
+ rejects : pd.DataFrame
106
+ DataFrame containing the rejected measurements
107
+ """
108
+ rejects = data.copy()
109
+
110
+ mask = np.ones_like(data["cfid"].values[0], dtype=bool)
111
+ inloop_quants = {}
112
+ reject_quants = {}
113
+
114
+ for c in data.columns:
115
+ if c not in ["index", "kast", "name"]:
116
+ inloop_quants[c] = data[c].values[0]
117
+ reject_quants[c] = np.array([])
118
+
119
+ for niter in range(max_iter):
120
+ prev_len = len(inloop_quants["cfid"])
121
+
122
+ for k in inloop_quants.keys():
123
+ reject_quants[k] = np.append(reject_quants[k], inloop_quants[k][~mask])
124
+ inloop_quants[k] = inloop_quants[k][mask]
125
+
126
+ mparams = get_fit_params(pd.DataFrame([inloop_quants]), "SHG1G2")
127
+ try:
128
+ residuals = get_residuals(pd.DataFrame([inloop_quants]), mparams)
129
+ except KeyError:
130
+ break
131
+ mask = np.abs(residuals) < 3 * np.std(residuals)
132
+
133
+ if prev_len == len(inloop_quants["Phase"][mask]):
134
+ break
135
+
136
+ for c in data.columns:
137
+ if c not in ["index", "kast", "name"]:
138
+ data.at[0, c] = inloop_quants[c]
139
+ rejects.at[0, c] = reject_quants[c]
140
+ return data, rejects
141
+
142
+
143
+ def lightcurve_filtering(data, window=10, maglim=0.6):
144
+ """
145
+ Filters out lightcurve points that deviate from the median by more than given mag limitation within time bins.
146
+
147
+ Parameters
148
+ ----------
149
+ data : pd.DataFrame
150
+ Single-row DataFrame
151
+ window : float
152
+ Time bin size (default is 10 days).
153
+ maglim : float
154
+ Magnitude deviation threshold from the median (default is 0.4 mag).
155
+
156
+ Returns
157
+ -------
158
+ data : pd.DataFrame
159
+ Filtered data
160
+ rejects : pd.DataFrame
161
+ DataFrame containing the rejected measurements
162
+ """
163
+ dummym, dummyt, dummyf, dummyi = [], [], [], []
164
+
165
+ dates = data["cjd"].values[0]
166
+ magnitudes = data["cmred"].values[0]
167
+ filters = data["cfid"].values[0]
168
+ indices = np.array([ind for ind in range(len(data["cfid"].values[0]))])
169
+
170
+ ufilters = np.unique(filters)
171
+
172
+ mag_pfilt = {}
173
+
174
+ date0 = dates.min()
175
+ date0_plus_step = date0 + window
176
+ # TODO: Use np.digitize instead of this
177
+ while date0 < dates.max():
178
+ prev_ind = np.where(dates == utils.find_nearest(dates, date0))[0][0]
179
+ plus_ten_index = np.where(dates == utils.find_nearest(dates, date0_plus_step))[
180
+ 0
181
+ ][0]
182
+
183
+ dummym.append(magnitudes[prev_ind:plus_ten_index])
184
+ dummyt.append(dates[prev_ind:plus_ten_index])
185
+ dummyf.append(filters[prev_ind:plus_ten_index])
186
+ dummyi.append(indices[prev_ind:plus_ten_index])
187
+
188
+ date0 = dates[plus_ten_index]
189
+ date0_plus_step = date0_plus_step + window
190
+
191
+ dummym.append(magnitudes[plus_ten_index:])
192
+ dummyt.append(dates[plus_ten_index:])
193
+ dummyf.append(filters[plus_ten_index:])
194
+ dummyi.append(indices[plus_ten_index:])
195
+
196
+ mag_binned, _, filt_binned, ind_binned = (
197
+ np.asarray(dummym, dtype=object),
198
+ np.asarray(dummyt, dtype=object),
199
+ np.asarray(dummyf, dtype=object),
200
+ np.asarray(dummyi, dtype=object),
201
+ )
202
+
203
+ for f in ufilters:
204
+ dummymain, dummym, dummyt, dummydiff, dummyi = [], [], [], [], []
205
+ for n in range(len(mag_binned)):
206
+ fcond = filt_binned[n] == f
207
+ dummymain.append(mag_binned[n][fcond])
208
+ dummym.append(np.median(mag_binned[n][fcond]))
209
+ dummydiff.append(
210
+ np.max(mag_binned[n][fcond], initial=0)
211
+ - np.min(mag_binned[n][fcond], initial=1e3)
212
+ )
213
+ dummyi.append(ind_binned[n][fcond])
214
+
215
+ dummydiff = np.array(dummydiff)
216
+ dummydiff[dummydiff == np.float64(-1000.0)] = 0
217
+
218
+ mag_pfilt["medimag_{}".format(f)] = dummym
219
+ mag_pfilt["mxmnmag_{}".format(f)] = dummydiff
220
+ mag_pfilt["mag_{}".format(f)] = dummymain
221
+ mag_pfilt["ind_{}".format(f)] = dummyi
222
+
223
+ valid_indices = []
224
+ reject_indices = []
225
+
226
+ rejects = data.copy()
227
+
228
+ for f in ufilters:
229
+ for n in range(len(mag_binned)):
230
+ bin_cond = (
231
+ mag_pfilt["mag_{}".format(f)][n]
232
+ > mag_pfilt["medimag_{}".format(f)][n] + maglim
233
+ ) | (
234
+ mag_pfilt["mag_{}".format(f)][n]
235
+ < mag_pfilt["medimag_{}".format(f)][n] - maglim
236
+ )
237
+ valid_indices.append(mag_pfilt["ind_{}".format(f)][n][~bin_cond])
238
+ reject_indices.append(mag_pfilt["ind_{}".format(f)][n][bin_cond])
239
+
240
+ valid_indices = np.array(utils.flatten_list(valid_indices), dtype=int)
241
+ reject_indices = np.array(utils.flatten_list(reject_indices), dtype=int)
242
+
243
+ for c in data.columns:
244
+ if c not in ["index", "kast", "name"]:
245
+ rejects.at[0, c] = data[c].values[0][reject_indices]
246
+ data.at[0, c] = data[c].values[0][valid_indices]
247
+
248
+ data = utils.sort_by_cjd(data)
249
+
250
+ return data, rejects