asteroid_spinprops 0.2.31__tar.gz → 1.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- asteroid_spinprops-1.0.0/PKG-INFO +101 -0
- asteroid_spinprops-1.0.0/README.md +76 -0
- asteroid_spinprops-1.0.0/asteroid_spinprops/ssolib/dataprep.py +250 -0
- {asteroid_spinprops-0.2.31 → asteroid_spinprops-1.0.0}/asteroid_spinprops/ssolib/modelfit.py +143 -252
- {asteroid_spinprops-0.2.31 → asteroid_spinprops-1.0.0}/asteroid_spinprops/ssolib/periodest.py +146 -160
- {asteroid_spinprops-0.2.31 → asteroid_spinprops-1.0.0}/asteroid_spinprops/ssolib/utils.py +164 -211
- {asteroid_spinprops-0.2.31 → asteroid_spinprops-1.0.0}/pyproject.toml +3 -5
- asteroid_spinprops-0.2.31/PKG-INFO +0 -77
- asteroid_spinprops-0.2.31/README.md +0 -50
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/.ruff_cache/.gitignore +0 -2
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/.ruff_cache/0.13.2/1980339045096230685 +0 -0
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/.ruff_cache/CACHEDIR.TAG +0 -1
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/dataprep.py +0 -596
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/pipetools.py +0 -167
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/atlas_x_ztf_testing/test_pqfile_1.parquet +0 -0
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/atlas_x_ztf_testing/test_pqfile_2.parquet +0 -0
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/ephemeris_testing/2000 WL152 +0 -1702
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/ephemeris_testing/2001 PC +0 -94
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/ephemeris_testing/2001 SG276 +0 -111
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/ephemeris_testing/2008 GX32 +0 -93
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/ephemeris_testing/2009 BE185 +0 -130
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/ephemeris_testing/2011 EY17 +0 -101
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/ephemeris_testing/2134 T-1 +0 -352
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/ephemeris_testing/Bellmore +0 -2657
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/ephemeris_testing/Dermott +0 -2971
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/ephemeris_testing/Duke +0 -2026
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/ephemeris_testing/Izenberg +0 -2440
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/ephemeris_testing/Lermontov +0 -2760
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/ephemeris_testing/Poullain +0 -1272
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/ephemeris_testing/Sonneberga +0 -2756
- asteroid_spinprops-0.2.31/asteroid_spinprops/ssolib/testing/testing_ssoname_keys.pkl +0 -0
- {asteroid_spinprops-0.2.31 → asteroid_spinprops-1.0.0}/asteroid_spinprops/__init__.py +0 -0
- {asteroid_spinprops-0.2.31 → asteroid_spinprops-1.0.0}/asteroid_spinprops/ssolib/__init__.py +0 -0
- {asteroid_spinprops-0.2.31 → asteroid_spinprops-1.0.0}/asteroid_spinprops/ssolib/ssptools.py +0 -0
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: asteroid_spinprops
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: Collection of tools used for fitting sHG1G2 and SOCCA photometric models to sparse asteroid photometry
|
|
5
|
+
License: MIT
|
|
6
|
+
Author: Odysseas
|
|
7
|
+
Author-email: odysseas.xenos@proton.me
|
|
8
|
+
Requires-Python: >=3.11
|
|
9
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
15
|
+
Requires-Dist: astropy (==7.0.0)
|
|
16
|
+
Requires-Dist: fink-utils (==0.43.0)
|
|
17
|
+
Requires-Dist: nifty-ls (==1.1.0)
|
|
18
|
+
Requires-Dist: pandas (==2.3.3)
|
|
19
|
+
Requires-Dist: scipy (==1.16.2)
|
|
20
|
+
Requires-Dist: tqdm (==4.67.1)
|
|
21
|
+
Project-URL: Homepage, https://gitlab.com/odysseas_xenos/asteroid-spinprops
|
|
22
|
+
Project-URL: Repository, https://gitlab.com/odysseas_xenos/asteroid-spinprops
|
|
23
|
+
Description-Content-Type: text/markdown
|
|
24
|
+
|
|
25
|
+
# asteroid-spinprops
|
|
26
|
+
|
|
27
|
+
## Overview
|
|
28
|
+
**asteroid-spinprops** is a Python package providing tools to fit SHG1G2 and SOCCA photometric models to sparse asteroid photometry.
|
|
29
|
+
It supports multiband modeling, residual analysis and shape, period and pole orientation estimation for small solar system objects.
|
|
30
|
+
|
|
31
|
+
---
|
|
32
|
+
|
|
33
|
+
## Installation
|
|
34
|
+
Install the package via pip:
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
pip install asteroid_spinprops
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
## Quick Start
|
|
41
|
+
```python
|
|
42
|
+
import numpy as np
|
|
43
|
+
import pandas as pd
|
|
44
|
+
from asteroid_spinprops.ssolib import dataprep, periodest, modelfit
|
|
45
|
+
|
|
46
|
+
# Suppose `pdf` is your initial asteroid DataFrame
|
|
47
|
+
# Ensure all columns are converted to the required single row format.
|
|
48
|
+
pdf_s = pd.DataFrame({col: [np.array(pdf[col])] for col in pdf.columns})
|
|
49
|
+
|
|
50
|
+
# Convert filter IDs to numeric
|
|
51
|
+
unique_vals, inv = np.unique(pdf_s["cfid"].values[0], return_inverse=True)
|
|
52
|
+
numeric_filter = inv + 1
|
|
53
|
+
pdf_s["cfid"].values[0] = numeric_filter
|
|
54
|
+
|
|
55
|
+
# --- Data cleaning and filtering ---
|
|
56
|
+
clean_data, errorbar_rejects = dataprep.errorbar_filtering(data=pdf_s, mlimit=0.7928)
|
|
57
|
+
clean_data, projection_rejects = dataprep.projection_filtering(data=clean_data)
|
|
58
|
+
clean_data, iterative_rejects = dataprep.iterative_filtering(data=clean_data)
|
|
59
|
+
|
|
60
|
+
# --- Fit SHG1G2 model ---
|
|
61
|
+
shg1g2_params = modelfit.get_fit_params(
|
|
62
|
+
data=clean_data,
|
|
63
|
+
flavor="SHG1G2",
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
# Compute residuals for period analysis
|
|
67
|
+
residuals_dataframe = modelfit.make_residuals_df(
|
|
68
|
+
clean_data, model_parameters=shg1g2_params
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# --- Estimate rotation period ---
|
|
72
|
+
p_in, k_val, p_rms, signal_peak, window_peak = periodest.get_multiband_period_estimate(
|
|
73
|
+
residuals_dataframe,
|
|
74
|
+
k_free=True,
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
# Assess period robustness via bootstrap resampling
|
|
78
|
+
_, Nbs = periodest.perform_residual_resampling(
|
|
79
|
+
resid_df=residuals_dataframe,
|
|
80
|
+
p_min=0.03,
|
|
81
|
+
p_max=2,
|
|
82
|
+
k=int(k_val)
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
# --- Fit SSHG1G2 (spin + multiband) model ---
|
|
86
|
+
SOCCA_params = modelfit.get_fit_params(
|
|
87
|
+
data=clean_data,
|
|
88
|
+
flavor="SSHG1G2",
|
|
89
|
+
shg1g2_constrained=True,
|
|
90
|
+
blind_scan=True,
|
|
91
|
+
period_in=p_in,
|
|
92
|
+
)
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
## Models
|
|
96
|
+
Photometric models from Carry et al.(2024) {2024A&A...687A..38C}
|
|
97
|
+
and https://github.com/astrolabsoftware
|
|
98
|
+
|
|
99
|
+
## Project status
|
|
100
|
+
Under development
|
|
101
|
+
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
# asteroid-spinprops
|
|
2
|
+
|
|
3
|
+
## Overview
|
|
4
|
+
**asteroid-spinprops** is a Python package providing tools to fit SHG1G2 and SOCCA photometric models to sparse asteroid photometry.
|
|
5
|
+
It supports multiband modeling, residual analysis and shape, period and pole orientation estimation for small solar system objects.
|
|
6
|
+
|
|
7
|
+
---
|
|
8
|
+
|
|
9
|
+
## Installation
|
|
10
|
+
Install the package via pip:
|
|
11
|
+
|
|
12
|
+
```bash
|
|
13
|
+
pip install asteroid_spinprops
|
|
14
|
+
```
|
|
15
|
+
|
|
16
|
+
## Quick Start
|
|
17
|
+
```python
|
|
18
|
+
import numpy as np
|
|
19
|
+
import pandas as pd
|
|
20
|
+
from asteroid_spinprops.ssolib import dataprep, periodest, modelfit
|
|
21
|
+
|
|
22
|
+
# Suppose `pdf` is your initial asteroid DataFrame
|
|
23
|
+
# Ensure all columns are converted to the required single row format.
|
|
24
|
+
pdf_s = pd.DataFrame({col: [np.array(pdf[col])] for col in pdf.columns})
|
|
25
|
+
|
|
26
|
+
# Convert filter IDs to numeric
|
|
27
|
+
unique_vals, inv = np.unique(pdf_s["cfid"].values[0], return_inverse=True)
|
|
28
|
+
numeric_filter = inv + 1
|
|
29
|
+
pdf_s["cfid"].values[0] = numeric_filter
|
|
30
|
+
|
|
31
|
+
# --- Data cleaning and filtering ---
|
|
32
|
+
clean_data, errorbar_rejects = dataprep.errorbar_filtering(data=pdf_s, mlimit=0.7928)
|
|
33
|
+
clean_data, projection_rejects = dataprep.projection_filtering(data=clean_data)
|
|
34
|
+
clean_data, iterative_rejects = dataprep.iterative_filtering(data=clean_data)
|
|
35
|
+
|
|
36
|
+
# --- Fit SHG1G2 model ---
|
|
37
|
+
shg1g2_params = modelfit.get_fit_params(
|
|
38
|
+
data=clean_data,
|
|
39
|
+
flavor="SHG1G2",
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
# Compute residuals for period analysis
|
|
43
|
+
residuals_dataframe = modelfit.make_residuals_df(
|
|
44
|
+
clean_data, model_parameters=shg1g2_params
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
# --- Estimate rotation period ---
|
|
48
|
+
p_in, k_val, p_rms, signal_peak, window_peak = periodest.get_multiband_period_estimate(
|
|
49
|
+
residuals_dataframe,
|
|
50
|
+
k_free=True,
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
# Assess period robustness via bootstrap resampling
|
|
54
|
+
_, Nbs = periodest.perform_residual_resampling(
|
|
55
|
+
resid_df=residuals_dataframe,
|
|
56
|
+
p_min=0.03,
|
|
57
|
+
p_max=2,
|
|
58
|
+
k=int(k_val)
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
# --- Fit SSHG1G2 (spin + multiband) model ---
|
|
62
|
+
SOCCA_params = modelfit.get_fit_params(
|
|
63
|
+
data=clean_data,
|
|
64
|
+
flavor="SSHG1G2",
|
|
65
|
+
shg1g2_constrained=True,
|
|
66
|
+
blind_scan=True,
|
|
67
|
+
period_in=p_in,
|
|
68
|
+
)
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
## Models
|
|
72
|
+
Photometric models from Carry et al.(2024) {2024A&A...687A..38C}
|
|
73
|
+
and https://github.com/astrolabsoftware
|
|
74
|
+
|
|
75
|
+
## Project status
|
|
76
|
+
Under development
|
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from asteroid_spinprops.ssolib.modelfit import (
|
|
4
|
+
get_fit_params,
|
|
5
|
+
get_residuals,
|
|
6
|
+
)
|
|
7
|
+
|
|
8
|
+
import asteroid_spinprops.ssolib.utils as utils
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def errorbar_filtering(data, mlimit):
|
|
12
|
+
"""
|
|
13
|
+
Filter out data points with large photometric uncertainties.
|
|
14
|
+
|
|
15
|
+
Parameters
|
|
16
|
+
-----------
|
|
17
|
+
data : pd.DataFrame
|
|
18
|
+
A single-row DataFrame where each column contains an array of values
|
|
19
|
+
for a solar system object.
|
|
20
|
+
mlimit : float
|
|
21
|
+
Threshold value to filter out points with uncertainties greater than mlimit / 2.
|
|
22
|
+
|
|
23
|
+
Returns
|
|
24
|
+
-------
|
|
25
|
+
data : pd.DataFrame
|
|
26
|
+
Filtered DataFrame
|
|
27
|
+
rejects : pd.DataFrame
|
|
28
|
+
DataFrame containing the rejected measurements
|
|
29
|
+
"""
|
|
30
|
+
errorbar_condition = data["csigmapsf"].values[0] <= mlimit / 2
|
|
31
|
+
rejects = data.copy()
|
|
32
|
+
|
|
33
|
+
for c in data.columns:
|
|
34
|
+
if c not in ["index", "kast", "name"]:
|
|
35
|
+
rejects.at[0, c] = data[c].values[0][~errorbar_condition]
|
|
36
|
+
data.at[0, c] = data[c].values[0][errorbar_condition]
|
|
37
|
+
|
|
38
|
+
return data, rejects
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def projection_filtering(data):
|
|
42
|
+
"""
|
|
43
|
+
Filters out photometric outliers in reduced magnitude space per filter using a 3 sigma criterion.
|
|
44
|
+
|
|
45
|
+
Parameters
|
|
46
|
+
-----------
|
|
47
|
+
data : pd.DataFrame
|
|
48
|
+
A single-row DataFrame where each column contains an array of values.
|
|
49
|
+
Returns
|
|
50
|
+
--------
|
|
51
|
+
data : pd.DataFrame
|
|
52
|
+
Filtered DataFrame
|
|
53
|
+
rejects : pd.DataFrame
|
|
54
|
+
DataFrame containing the rejected measurements
|
|
55
|
+
"""
|
|
56
|
+
rejects = data.copy()
|
|
57
|
+
valid_indices = []
|
|
58
|
+
|
|
59
|
+
for f in np.unique(data["cfid"].values[0]):
|
|
60
|
+
filter_mask = np.array(data["cfid"].values[0]) == f
|
|
61
|
+
|
|
62
|
+
mean_val = np.mean(data["cmred"].values[0][filter_mask])
|
|
63
|
+
std_val = np.std(data["cmred"].values[0][filter_mask])
|
|
64
|
+
|
|
65
|
+
project_condition = (
|
|
66
|
+
filter_mask
|
|
67
|
+
& (data["cmred"].values[0] > mean_val - 3 * std_val)
|
|
68
|
+
& (data["cmred"].values[0] < mean_val + 3 * std_val)
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
valid_indices.append(np.where(project_condition)[0])
|
|
72
|
+
|
|
73
|
+
valid_indices = np.sort(
|
|
74
|
+
np.concatenate([valid_indices[n] for n in range(len(valid_indices))])
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
dummy = np.ones(data["cfid"].values[0].shape, dtype=bool)
|
|
78
|
+
dummy[valid_indices] = False
|
|
79
|
+
|
|
80
|
+
for c in data.columns:
|
|
81
|
+
if c not in ["index", "kast", "name"]:
|
|
82
|
+
rejects.at[0, c] = data[c].values[0][dummy]
|
|
83
|
+
data.at[0, c] = data[c].values[0][valid_indices]
|
|
84
|
+
|
|
85
|
+
return data, rejects
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def iterative_filtering(data, max_iter=10):
|
|
89
|
+
"""
|
|
90
|
+
Iteratively removes outliers based on residuals from fitting the SHG1G2 mdoel until convergence.
|
|
91
|
+
|
|
92
|
+
Parameters
|
|
93
|
+
-----------
|
|
94
|
+
data : pd.DataFrame
|
|
95
|
+
A single-row DataFrame where each column contains an array of values.
|
|
96
|
+
|
|
97
|
+
max_iter : int
|
|
98
|
+
Maximum number of filtering iterations (default is 10).
|
|
99
|
+
|
|
100
|
+
Returns
|
|
101
|
+
--------
|
|
102
|
+
data : pd.DataFrame
|
|
103
|
+
Filtered DataFrame
|
|
104
|
+
|
|
105
|
+
rejects : pd.DataFrame
|
|
106
|
+
DataFrame containing the rejected measurements
|
|
107
|
+
"""
|
|
108
|
+
rejects = data.copy()
|
|
109
|
+
|
|
110
|
+
mask = np.ones_like(data["cfid"].values[0], dtype=bool)
|
|
111
|
+
inloop_quants = {}
|
|
112
|
+
reject_quants = {}
|
|
113
|
+
|
|
114
|
+
for c in data.columns:
|
|
115
|
+
if c not in ["index", "kast", "name"]:
|
|
116
|
+
inloop_quants[c] = data[c].values[0]
|
|
117
|
+
reject_quants[c] = np.array([])
|
|
118
|
+
|
|
119
|
+
for niter in range(max_iter):
|
|
120
|
+
prev_len = len(inloop_quants["cfid"])
|
|
121
|
+
|
|
122
|
+
for k in inloop_quants.keys():
|
|
123
|
+
reject_quants[k] = np.append(reject_quants[k], inloop_quants[k][~mask])
|
|
124
|
+
inloop_quants[k] = inloop_quants[k][mask]
|
|
125
|
+
|
|
126
|
+
mparams = get_fit_params(pd.DataFrame([inloop_quants]), "SHG1G2")
|
|
127
|
+
try:
|
|
128
|
+
residuals = get_residuals(pd.DataFrame([inloop_quants]), mparams)
|
|
129
|
+
except KeyError:
|
|
130
|
+
break
|
|
131
|
+
mask = np.abs(residuals) < 3 * np.std(residuals)
|
|
132
|
+
|
|
133
|
+
if prev_len == len(inloop_quants["Phase"][mask]):
|
|
134
|
+
break
|
|
135
|
+
|
|
136
|
+
for c in data.columns:
|
|
137
|
+
if c not in ["index", "kast", "name"]:
|
|
138
|
+
data.at[0, c] = inloop_quants[c]
|
|
139
|
+
rejects.at[0, c] = reject_quants[c]
|
|
140
|
+
return data, rejects
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def lightcurve_filtering(data, window=10, maglim=0.6):
|
|
144
|
+
"""
|
|
145
|
+
Filters out lightcurve points that deviate from the median by more than given mag limitation within time bins.
|
|
146
|
+
|
|
147
|
+
Parameters
|
|
148
|
+
----------
|
|
149
|
+
data : pd.DataFrame
|
|
150
|
+
Single-row DataFrame
|
|
151
|
+
window : float
|
|
152
|
+
Time bin size (default is 10 days).
|
|
153
|
+
maglim : float
|
|
154
|
+
Magnitude deviation threshold from the median (default is 0.4 mag).
|
|
155
|
+
|
|
156
|
+
Returns
|
|
157
|
+
-------
|
|
158
|
+
data : pd.DataFrame
|
|
159
|
+
Filtered data
|
|
160
|
+
rejects : pd.DataFrame
|
|
161
|
+
DataFrame containing the rejected measurements
|
|
162
|
+
"""
|
|
163
|
+
dummym, dummyt, dummyf, dummyi = [], [], [], []
|
|
164
|
+
|
|
165
|
+
dates = data["cjd"].values[0]
|
|
166
|
+
magnitudes = data["cmred"].values[0]
|
|
167
|
+
filters = data["cfid"].values[0]
|
|
168
|
+
indices = np.array([ind for ind in range(len(data["cfid"].values[0]))])
|
|
169
|
+
|
|
170
|
+
ufilters = np.unique(filters)
|
|
171
|
+
|
|
172
|
+
mag_pfilt = {}
|
|
173
|
+
|
|
174
|
+
date0 = dates.min()
|
|
175
|
+
date0_plus_step = date0 + window
|
|
176
|
+
# TODO: Use np.digitize instead of this
|
|
177
|
+
while date0 < dates.max():
|
|
178
|
+
prev_ind = np.where(dates == utils.find_nearest(dates, date0))[0][0]
|
|
179
|
+
plus_ten_index = np.where(dates == utils.find_nearest(dates, date0_plus_step))[
|
|
180
|
+
0
|
|
181
|
+
][0]
|
|
182
|
+
|
|
183
|
+
dummym.append(magnitudes[prev_ind:plus_ten_index])
|
|
184
|
+
dummyt.append(dates[prev_ind:plus_ten_index])
|
|
185
|
+
dummyf.append(filters[prev_ind:plus_ten_index])
|
|
186
|
+
dummyi.append(indices[prev_ind:plus_ten_index])
|
|
187
|
+
|
|
188
|
+
date0 = dates[plus_ten_index]
|
|
189
|
+
date0_plus_step = date0_plus_step + window
|
|
190
|
+
|
|
191
|
+
dummym.append(magnitudes[plus_ten_index:])
|
|
192
|
+
dummyt.append(dates[plus_ten_index:])
|
|
193
|
+
dummyf.append(filters[plus_ten_index:])
|
|
194
|
+
dummyi.append(indices[plus_ten_index:])
|
|
195
|
+
|
|
196
|
+
mag_binned, _, filt_binned, ind_binned = (
|
|
197
|
+
np.asarray(dummym, dtype=object),
|
|
198
|
+
np.asarray(dummyt, dtype=object),
|
|
199
|
+
np.asarray(dummyf, dtype=object),
|
|
200
|
+
np.asarray(dummyi, dtype=object),
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
for f in ufilters:
|
|
204
|
+
dummymain, dummym, dummyt, dummydiff, dummyi = [], [], [], [], []
|
|
205
|
+
for n in range(len(mag_binned)):
|
|
206
|
+
fcond = filt_binned[n] == f
|
|
207
|
+
dummymain.append(mag_binned[n][fcond])
|
|
208
|
+
dummym.append(np.median(mag_binned[n][fcond]))
|
|
209
|
+
dummydiff.append(
|
|
210
|
+
np.max(mag_binned[n][fcond], initial=0)
|
|
211
|
+
- np.min(mag_binned[n][fcond], initial=1e3)
|
|
212
|
+
)
|
|
213
|
+
dummyi.append(ind_binned[n][fcond])
|
|
214
|
+
|
|
215
|
+
dummydiff = np.array(dummydiff)
|
|
216
|
+
dummydiff[dummydiff == np.float64(-1000.0)] = 0
|
|
217
|
+
|
|
218
|
+
mag_pfilt["medimag_{}".format(f)] = dummym
|
|
219
|
+
mag_pfilt["mxmnmag_{}".format(f)] = dummydiff
|
|
220
|
+
mag_pfilt["mag_{}".format(f)] = dummymain
|
|
221
|
+
mag_pfilt["ind_{}".format(f)] = dummyi
|
|
222
|
+
|
|
223
|
+
valid_indices = []
|
|
224
|
+
reject_indices = []
|
|
225
|
+
|
|
226
|
+
rejects = data.copy()
|
|
227
|
+
|
|
228
|
+
for f in ufilters:
|
|
229
|
+
for n in range(len(mag_binned)):
|
|
230
|
+
bin_cond = (
|
|
231
|
+
mag_pfilt["mag_{}".format(f)][n]
|
|
232
|
+
> mag_pfilt["medimag_{}".format(f)][n] + maglim
|
|
233
|
+
) | (
|
|
234
|
+
mag_pfilt["mag_{}".format(f)][n]
|
|
235
|
+
< mag_pfilt["medimag_{}".format(f)][n] - maglim
|
|
236
|
+
)
|
|
237
|
+
valid_indices.append(mag_pfilt["ind_{}".format(f)][n][~bin_cond])
|
|
238
|
+
reject_indices.append(mag_pfilt["ind_{}".format(f)][n][bin_cond])
|
|
239
|
+
|
|
240
|
+
valid_indices = np.array(utils.flatten_list(valid_indices), dtype=int)
|
|
241
|
+
reject_indices = np.array(utils.flatten_list(reject_indices), dtype=int)
|
|
242
|
+
|
|
243
|
+
for c in data.columns:
|
|
244
|
+
if c not in ["index", "kast", "name"]:
|
|
245
|
+
rejects.at[0, c] = data[c].values[0][reject_indices]
|
|
246
|
+
data.at[0, c] = data[c].values[0][valid_indices]
|
|
247
|
+
|
|
248
|
+
data = utils.sort_by_cjd(data)
|
|
249
|
+
|
|
250
|
+
return data, rejects
|