sdss-almanac 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- almanac/__init__.py +4 -0
- almanac/apogee.py +300 -0
- almanac/cli.py +556 -0
- almanac/config.py +110 -0
- almanac/data_models/__init__.py +3 -0
- almanac/data_models/exposure.py +350 -0
- almanac/data_models/fps.py +109 -0
- almanac/data_models/plate.py +142 -0
- almanac/data_models/types.py +87 -0
- almanac/data_models/utils.py +185 -0
- almanac/database.py +22 -0
- almanac/display.py +422 -0
- almanac/etc/__init__.py +0 -0
- almanac/etc/bad_exposures.csv +432 -0
- almanac/io.py +320 -0
- almanac/logger.py +27 -0
- almanac/qa.py +24 -0
- almanac/stash/data_models.py +0 -0
- almanac/stash/plugmap_models.py +165 -0
- almanac/utils.py +141 -0
- sdss_almanac-0.2.1.dist-info/METADATA +201 -0
- sdss_almanac-0.2.1.dist-info/RECORD +26 -0
- sdss_almanac-0.2.1.dist-info/WHEEL +5 -0
- sdss_almanac-0.2.1.dist-info/entry_points.txt +2 -0
- sdss_almanac-0.2.1.dist-info/licenses/LICENSE.md +29 -0
- sdss_almanac-0.2.1.dist-info/top_level.txt +1 -0
almanac/io.py
ADDED
@@ -0,0 +1,320 @@
|
|
1
|
+
import h5py as h5
|
2
|
+
import numpy as np
|
3
|
+
from tqdm import tqdm
|
4
|
+
from typing import List, Literal, Dict, Any, Tuple, Union, get_origin, get_args
|
5
|
+
from pydantic import BaseModel
|
6
|
+
from pydantic.fields import (FieldInfo, ComputedFieldInfo)
|
7
|
+
from pydantic_core import PydanticUndefined
|
8
|
+
import datetime
|
9
|
+
from enum import Enum
|
10
|
+
|
11
|
+
from almanac.data_models import Exposure
|
12
|
+
|
13
|
+
def write_almanac(
|
14
|
+
output: str,
|
15
|
+
results: List[Tuple[str, int, List[Exposure], Dict[str, List[Any]]]],
|
16
|
+
fibers: bool = False,
|
17
|
+
verbose: bool = False,
|
18
|
+
compression: bool = True
|
19
|
+
):
|
20
|
+
"""
|
21
|
+
Write the results of an Almanac query to an HDF5 file.
|
22
|
+
|
23
|
+
:param output:
|
24
|
+
Path to the output HDF5 file.
|
25
|
+
|
26
|
+
:param results:
|
27
|
+
List of tuples containing (observatory, mjd, exposures, sequences).
|
28
|
+
- observatory: str, e.g., "apo" or "lco"
|
29
|
+
- mjd: int, Modified Julian Date
|
30
|
+
- exposures: List[Exposure], list of Exposure models
|
31
|
+
- sequences: Dict[str, List[Any]], dictionary of sequences by image type
|
32
|
+
|
33
|
+
:param fibers:
|
34
|
+
Whether to include fiber data in the output.
|
35
|
+
|
36
|
+
:param verbose:
|
37
|
+
Whether to print progress information.
|
38
|
+
|
39
|
+
:param compression:
|
40
|
+
Compression algorithm to use for datasets. If True, uses 'gzip'.
|
41
|
+
"""
|
42
|
+
|
43
|
+
kwds = dict(fibers=fibers, verbose=verbose, compression=compression)
|
44
|
+
with h5.File(output, "a") as fp:
|
45
|
+
for args in sorted(results, key=lambda x: (x[0], x[1])):
|
46
|
+
update(fp, *args, **kwds)
|
47
|
+
|
48
|
+
def update(
|
49
|
+
fp,
|
50
|
+
observatory,
|
51
|
+
mjd,
|
52
|
+
exposures,
|
53
|
+
sequences,
|
54
|
+
fibers: bool = False,
|
55
|
+
verbose: bool = False,
|
56
|
+
compression: Union[bool, str] = True
|
57
|
+
):
|
58
|
+
_print = print if verbose else lambda *args, **kwargs: None
|
59
|
+
|
60
|
+
group = get_or_create_group(fp, f"{observatory}/{mjd}")
|
61
|
+
_print(f"\t{observatory}/{mjd}")
|
62
|
+
|
63
|
+
delete_hdf5_entry(group, "exposures")
|
64
|
+
write_models_to_hdf5_group(
|
65
|
+
exposures,
|
66
|
+
group.create_group("exposures", track_order=True)
|
67
|
+
)
|
68
|
+
|
69
|
+
_print(f"\t{observatory}/{mjd}/exposures")
|
70
|
+
|
71
|
+
if len(sequences) > 0:
|
72
|
+
delete_hdf5_entry(group, "sequences")
|
73
|
+
sequences_group = group.create_group("sequences")
|
74
|
+
for image_type, entries in sequences.items():
|
75
|
+
sequences_group.create_dataset(image_type, data=np.array(entries))
|
76
|
+
_print(f"\t{observatory}/{mjd}/sequences/{image_type}")
|
77
|
+
|
78
|
+
if fibers:
|
79
|
+
fibers_group = get_or_create_group(fp, f"{observatory}/{mjd}/fibers")
|
80
|
+
done = set()
|
81
|
+
for exposure in exposures:
|
82
|
+
if not exposure.targets:
|
83
|
+
continue
|
84
|
+
|
85
|
+
reference_id_string = str(
|
86
|
+
exposure.config_id if exposure.fps else exposure.plate_id
|
87
|
+
)
|
88
|
+
if reference_id_string in done:
|
89
|
+
continue
|
90
|
+
|
91
|
+
delete_hdf5_entry(fibers_group, reference_id_string)
|
92
|
+
write_models_to_hdf5_group(
|
93
|
+
exposure.targets,
|
94
|
+
fibers_group.create_group(reference_id_string, track_order=True)
|
95
|
+
)
|
96
|
+
done.add(reference_id_string)
|
97
|
+
_print(f"\t{observatory}/{mjd}/fibers/{reference_id_string}")
|
98
|
+
|
99
|
+
|
100
|
+
|
101
|
+
def get_or_create_group(fp, group_name):
|
102
|
+
try:
|
103
|
+
group = fp[group_name]
|
104
|
+
except KeyError:
|
105
|
+
group = fp.create_group(group_name)
|
106
|
+
finally:
|
107
|
+
return group
|
108
|
+
|
109
|
+
|
110
|
+
def delete_hdf5_entry(fp, group_name):
|
111
|
+
try:
|
112
|
+
del fp[group_name]
|
113
|
+
except KeyError:
|
114
|
+
pass
|
115
|
+
|
116
|
+
|
117
|
+
def get_hdf5_dtype(pydantic_type, sample_value=None):
|
118
|
+
"""
|
119
|
+
Map Pydantic field types to appropriate HDF5/NumPy dtypes.
|
120
|
+
|
121
|
+
Args:
|
122
|
+
pydantic_type: The Pydantic field type annotation
|
123
|
+
sample_value: A sample value to help determine string lengths, etc.
|
124
|
+
|
125
|
+
Returns:
|
126
|
+
Appropriate NumPy dtype for HDF5
|
127
|
+
"""
|
128
|
+
# Handle Union types (including Optional)
|
129
|
+
if get_origin(pydantic_type) is Union:
|
130
|
+
# For Optional[T] (Union[T, None]), use the non-None type
|
131
|
+
args = get_args(pydantic_type)
|
132
|
+
non_none_types = [arg for arg in args if arg is not type(None)]
|
133
|
+
if non_none_types:
|
134
|
+
pydantic_type = non_none_types[0]
|
135
|
+
|
136
|
+
# Handle List types
|
137
|
+
if get_origin(pydantic_type) is list:
|
138
|
+
inner_type = get_args(pydantic_type)[0]
|
139
|
+
return get_hdf5_dtype(inner_type, sample_value)
|
140
|
+
|
141
|
+
# Basic type mappings
|
142
|
+
type_mapping = {
|
143
|
+
np.int64: np.int64,
|
144
|
+
int: np.int64,
|
145
|
+
float: np.float64,
|
146
|
+
bool: np.bool_,
|
147
|
+
str: 'S', # Will be handled specially for variable length
|
148
|
+
bytes: np.bytes_,
|
149
|
+
datetime.datetime: 'S19', # ISO format YYYY-MM-DDTHH:MM:SS
|
150
|
+
datetime.date: 'S10', # ISO format YYYY-MM-DD
|
151
|
+
datetime.time: 'S8', # Format HH:MM:SS
|
152
|
+
}
|
153
|
+
|
154
|
+
# Direct type mapping
|
155
|
+
if pydantic_type in type_mapping:
|
156
|
+
dtype = type_mapping[pydantic_type]
|
157
|
+
|
158
|
+
# Handle string length determination
|
159
|
+
if dtype == 'S' and sample_value is not None:
|
160
|
+
if isinstance(sample_value, (list, tuple)):
|
161
|
+
max_len = max(len(str(v)) for v in sample_value) if sample_value else 1
|
162
|
+
else:
|
163
|
+
max_len = len(str(sample_value)) if sample_value else 1
|
164
|
+
return f'S{max_len}'
|
165
|
+
elif dtype == 'S':
|
166
|
+
return 'S100' # Default string length
|
167
|
+
|
168
|
+
return dtype
|
169
|
+
|
170
|
+
# Handle Enum types
|
171
|
+
if isinstance(pydantic_type, type) and issubclass(pydantic_type, Enum):
|
172
|
+
# Store enum values as strings
|
173
|
+
if sample_value is not None:
|
174
|
+
if isinstance(sample_value, (list, tuple)):
|
175
|
+
max_len = max(len(str(v.value)) for v in sample_value) if sample_value else 1
|
176
|
+
else:
|
177
|
+
max_len = len(str(sample_value.value)) if sample_value else 1
|
178
|
+
return f'S{max_len}'
|
179
|
+
return 'S50'
|
180
|
+
|
181
|
+
# Handle Literal types
|
182
|
+
if get_origin(pydantic_type) is Literal:
|
183
|
+
args = get_args(pydantic_type)
|
184
|
+
if all(isinstance(arg, str) for arg in args):
|
185
|
+
max_len = max(len(arg) for arg in args) if args else 1
|
186
|
+
return f'S{max_len}'
|
187
|
+
elif all(isinstance(arg, int) for arg in args):
|
188
|
+
return np.int64
|
189
|
+
elif all(isinstance(arg, float) for arg in args):
|
190
|
+
return np.float64
|
191
|
+
elif all(isinstance(arg, bool) for arg in args):
|
192
|
+
return np.bool_
|
193
|
+
|
194
|
+
# Default fallback - try to convert to string
|
195
|
+
return 'S100'
|
196
|
+
|
197
|
+
def extract_field_data(models: List[BaseModel], field_name: str) -> List[Any]:
|
198
|
+
"""Extract data for a specific field from all models."""
|
199
|
+
return [getattr(model, field_name) for model in models]
|
200
|
+
|
201
|
+
def convert_value_for_hdf5(value, target_dtype):
|
202
|
+
"""Convert a Python value to be compatible with HDF5 storage."""
|
203
|
+
if value is None:
|
204
|
+
if target_dtype.char == 'S':
|
205
|
+
return b''
|
206
|
+
elif target_dtype == np.bool_:
|
207
|
+
return False
|
208
|
+
else:
|
209
|
+
return 0 # or np.nan for float types
|
210
|
+
|
211
|
+
if isinstance(value, Enum):
|
212
|
+
return str(value.value).encode('utf-8') if target_dtype.char == 'S' else str(value.value)
|
213
|
+
|
214
|
+
if isinstance(value, datetime.datetime):
|
215
|
+
return value.isoformat().encode('utf-8')
|
216
|
+
|
217
|
+
if isinstance(value, datetime.date):
|
218
|
+
return value.isoformat().encode('utf-8')
|
219
|
+
|
220
|
+
if isinstance(value, datetime.time):
|
221
|
+
return value.isoformat().encode('utf-8')
|
222
|
+
|
223
|
+
if isinstance(value, str) and target_dtype.char == 'S':
|
224
|
+
return value.encode('utf-8')
|
225
|
+
|
226
|
+
if isinstance(value, list):
|
227
|
+
# Handle lists by converting each element
|
228
|
+
return [convert_value_for_hdf5(v, target_dtype) for v in value]
|
229
|
+
|
230
|
+
return value
|
231
|
+
|
232
|
+
|
233
|
+
def write_models_to_hdf5_group(
|
234
|
+
models: List[BaseModel],
|
235
|
+
hdf5_group: h5.Group,
|
236
|
+
chunk_size: int = 1000,
|
237
|
+
compression: str = None
|
238
|
+
):
|
239
|
+
"""
|
240
|
+
Write a list of Pydantic models to an HDF5 group as separate datasets per field.
|
241
|
+
|
242
|
+
Args:
|
243
|
+
models: List of Pydantic model instances (all same type)
|
244
|
+
hdf5_group: HDF5 group to write datasets to
|
245
|
+
chunk_size: Chunk size for HDF5 datasets (for performance)
|
246
|
+
compression: Compression algorithm ('gzip', 'lzf', 'szip', None)
|
247
|
+
"""
|
248
|
+
model_type = type(models[0])
|
249
|
+
|
250
|
+
fields = { **model_type.model_fields, **model_type.model_computed_fields }
|
251
|
+
|
252
|
+
data = {
|
253
|
+
field_name: extract_field_data(models, field_name) for field_name in fields.keys()
|
254
|
+
}
|
255
|
+
return _write_models_to_hdf5_group(
|
256
|
+
fields,
|
257
|
+
data,
|
258
|
+
hdf5_group,
|
259
|
+
chunk_size=chunk_size,
|
260
|
+
compression=compression
|
261
|
+
)
|
262
|
+
|
263
|
+
|
264
|
+
def _write_models_to_hdf5_group(
|
265
|
+
fields,
|
266
|
+
data,
|
267
|
+
hdf5_group,
|
268
|
+
chunk_size: int = 1000,
|
269
|
+
compression: str = None
|
270
|
+
):
|
271
|
+
num_records = None
|
272
|
+
|
273
|
+
for field_name, field_spec in fields.items():
|
274
|
+
|
275
|
+
# Extract data for this field from all models
|
276
|
+
field_data = data[field_name]
|
277
|
+
if num_records is None:
|
278
|
+
num_records = len(field_data)
|
279
|
+
|
280
|
+
|
281
|
+
# Determine the appropriate HDF5 dtype
|
282
|
+
if isinstance(field_spec, FieldInfo):
|
283
|
+
field_type = field_spec.annotation
|
284
|
+
else:
|
285
|
+
field_type = field_spec.return_type
|
286
|
+
|
287
|
+
hdf5_dtype = get_hdf5_dtype(field_type, field_data)
|
288
|
+
|
289
|
+
# Convert values for HDF5 storage
|
290
|
+
converted_data = [convert_value_for_hdf5(value, np.dtype(hdf5_dtype))
|
291
|
+
for value in field_data]
|
292
|
+
|
293
|
+
# Handle variable-length data (like lists)
|
294
|
+
if any(isinstance(value, list) for value in converted_data):
|
295
|
+
# Create variable-length dataset
|
296
|
+
dt = h5py.special_dtype(vlen=np.dtype(hdf5_dtype))
|
297
|
+
dataset = hdf5_group.create_dataset(
|
298
|
+
field_name,
|
299
|
+
(num_records,),
|
300
|
+
dtype=dt,
|
301
|
+
chunks=True if num_records > chunk_size else None,
|
302
|
+
compression=compression if num_records > chunk_size else None
|
303
|
+
)
|
304
|
+
dataset[:] = converted_data
|
305
|
+
else:
|
306
|
+
# Create regular dataset
|
307
|
+
np_array = np.array(converted_data, dtype=hdf5_dtype)
|
308
|
+
|
309
|
+
chunks = (min(chunk_size, num_records),) if num_records > chunk_size else None
|
310
|
+
compression_setting = compression if num_records > chunk_size else None
|
311
|
+
|
312
|
+
dataset = hdf5_group.create_dataset(
|
313
|
+
field_name,
|
314
|
+
data=np_array,
|
315
|
+
chunks=chunks,
|
316
|
+
compression=compression_setting
|
317
|
+
)
|
318
|
+
|
319
|
+
# Add description, even if it is empty string.
|
320
|
+
dataset.attrs["description"] = field_spec.description or ""
|
almanac/logger.py
ADDED
@@ -0,0 +1,27 @@
|
|
1
|
+
import logging
|
2
|
+
import colorlog
|
3
|
+
|
4
|
+
from almanac import config
|
5
|
+
|
6
|
+
def get_formatter():
|
7
|
+
return colorlog.ColoredFormatter(
|
8
|
+
"%(log_color)s%(asctime)s [%(levelname)s] %(message)s",
|
9
|
+
log_colors={
|
10
|
+
"DEBUG": "cyan",
|
11
|
+
"INFO": "white",
|
12
|
+
"WARNING": "yellow",
|
13
|
+
"ERROR": "red",
|
14
|
+
"CRITICAL": "bold_red",
|
15
|
+
},
|
16
|
+
datefmt="%Y-%m-%d %H:%M:%S",
|
17
|
+
)
|
18
|
+
|
19
|
+
def get_logger():
|
20
|
+
handler = colorlog.StreamHandler()
|
21
|
+
logger = logging.getLogger()
|
22
|
+
logger.setLevel(int(config.logging_level))
|
23
|
+
logger.addHandler(handler)
|
24
|
+
handler.setFormatter(get_formatter())
|
25
|
+
return logger
|
26
|
+
|
27
|
+
logger = get_logger()
|
almanac/qa.py
ADDED
@@ -0,0 +1,24 @@
|
|
1
|
+
from astropy.table import Table
|
2
|
+
from importlib import resources
|
3
|
+
|
4
|
+
def get_bad_exposures():
|
5
|
+
with resources.as_file(resources.files("almanac.etc") / "bad_exposures.csv") as fp:
|
6
|
+
t = Table.read(fp)
|
7
|
+
# We want a lookup table for (observatory, mjd, exposure)
|
8
|
+
t["exposure"] = t["exposure"].filled(-1)
|
9
|
+
d = {}
|
10
|
+
for row in t:
|
11
|
+
key = (str(row["observatory"]), int(row["mjd"]), int(row["exposure"]))
|
12
|
+
try:
|
13
|
+
plate = str(row["plate"])
|
14
|
+
except:
|
15
|
+
plate = -999
|
16
|
+
d[key] = {
|
17
|
+
"image_type": str(row["image_type"]),
|
18
|
+
"plate": plate,
|
19
|
+
"notes": str(row["notes"])
|
20
|
+
}
|
21
|
+
return d
|
22
|
+
|
23
|
+
|
24
|
+
lookup_bad_exposures = get_bad_exposures()
|
File without changes
|
@@ -0,0 +1,165 @@
|
|
1
|
+
from enum import Enum
|
2
|
+
from typing import List, Literal, Optional
|
3
|
+
from pydantic import BaseModel, Field
|
4
|
+
|
5
|
+
|
6
|
+
|
7
|
+
|
8
|
+
class PluggedHole(BaseModel):
|
9
|
+
"""
|
10
|
+
Frozen data class representing a plug map object.
|
11
|
+
|
12
|
+
Corresponds to the PLUGMAPOBJ struct from the C typedef.
|
13
|
+
"""
|
14
|
+
|
15
|
+
class Config:
|
16
|
+
frozen = True
|
17
|
+
|
18
|
+
obj_id: List[int] = Field(alias="objId", min_items=5, max_items=5, description="Object IDs (5 elements)")
|
19
|
+
hole_type: PluggedHoleTypes = Field(alias="holeType", description="Type of hole")
|
20
|
+
ra: float = Field(..., description="Right ascension in degrees")
|
21
|
+
dec: float = Field(..., description="Declination in degrees")
|
22
|
+
mag: List[float] = Field(..., min_items=5, max_items=5, description="Magnitudes (5 elements)")
|
23
|
+
star_l: float = Field(alias="starL", description="Star likelihood")
|
24
|
+
exp_l: float = Field(alias="expL", description="Exponential likelihood")
|
25
|
+
de_vauc_l: float = Field(alias="deVaucL", description="de Vaucouleurs likelihood")
|
26
|
+
obj_type: ObjTypes = Field(alias="objType", description="Object type")
|
27
|
+
x_focal: float = Field(alias="xFocal", description="X focal plane coordinate")
|
28
|
+
y_focal: float = Field(alias="yFocal", description="Y focal plane coordinate")
|
29
|
+
spectrograph_id: int = Field(alias="spectrographId", description="Spectrograph ID")
|
30
|
+
fiber_id: int = Field(alias="fiberId", description="Fiber ID")
|
31
|
+
throughput: int = Field(description="Throughput value")
|
32
|
+
prim_target: int = Field(alias="primTarget", description="Primary target flag")
|
33
|
+
sec_target: int = Field(alias="secTarget", description="Secondary target flag")
|
34
|
+
|
35
|
+
|
36
|
+
class PlateHole(BaseModel):
|
37
|
+
"""
|
38
|
+
Frozen data class representing the STRUCT1 typedef.
|
39
|
+
|
40
|
+
Contains comprehensive target and observation metadata.
|
41
|
+
"""
|
42
|
+
|
43
|
+
class Config:
|
44
|
+
frozen = True
|
45
|
+
|
46
|
+
# Basic target information
|
47
|
+
planned_hole_type: PlannedHoleTypes = Field(alias="holetype", max_length=16, description="Hole type string")
|
48
|
+
target_type: TargetTypes = Field(alias="targettype", max_length=9, description="Target type string")
|
49
|
+
source_type: str = Field(alias="sourcetype", max_length=9, description="Source type string") # TODO
|
50
|
+
target_ra: float = Field(..., description="Target right ascension")
|
51
|
+
target_dec: float = Field(..., description="Target declination")
|
52
|
+
target_ids: str = Field(alias="targetids")
|
53
|
+
|
54
|
+
# Plate and fiber information
|
55
|
+
iplateinput: int = Field(..., description="Plate input ID")
|
56
|
+
pointing: int = Field(..., description="Pointing number")
|
57
|
+
offset: int = Field(..., description="Offset value")
|
58
|
+
planned_fiber_id: int = Field(alias="fiberid", description="Fiber ID")
|
59
|
+
block: int = Field(..., description="Block number")
|
60
|
+
iguide: int = Field(..., description="Guide flag")
|
61
|
+
|
62
|
+
# Focal plane coordinates
|
63
|
+
xf_default: float = Field(..., description="Default X focal coordinate")
|
64
|
+
yf_default: float = Field(..., description="Default Y focal coordinate")
|
65
|
+
xfocal: float = Field(..., description="X focal plane coordinate")
|
66
|
+
yfocal: float = Field(..., description="Y focal plane coordinate")
|
67
|
+
|
68
|
+
# Spectroscopic parameters
|
69
|
+
lambda_eff: float = Field(..., description="Effective wavelength")
|
70
|
+
zoffset: float = Field(..., description="Z offset")
|
71
|
+
bluefiber: int = Field(..., description="Blue fiber flag")
|
72
|
+
chunk: int = Field(..., description="Chunk number")
|
73
|
+
ifinal: int = Field(..., description="Final flag")
|
74
|
+
|
75
|
+
# File information
|
76
|
+
origfile: str = Field(..., max_length=2, description="Original file identifier")
|
77
|
+
fileindx: int = Field(..., description="File index")
|
78
|
+
|
79
|
+
# Physical properties
|
80
|
+
diameter: float = Field(..., description="Diameter")
|
81
|
+
buffer: float = Field(..., description="Buffer size")
|
82
|
+
priority: int = Field(..., description="Target priority")
|
83
|
+
|
84
|
+
# Status flags
|
85
|
+
assigned: int = Field(..., description="Assigned flag")
|
86
|
+
conflicted: int = Field(..., description="Conflicted flag")
|
87
|
+
ranout: int = Field(..., description="Ran out flag")
|
88
|
+
outside: int = Field(..., description="Outside flag")
|
89
|
+
|
90
|
+
# MaNGA specific
|
91
|
+
mangaid: Optional[str] = Field(default="", max_length=12, description="MaNGA ID")
|
92
|
+
ifudesign: Optional[int] = Field(default=-999, description="IFU design ID")
|
93
|
+
ifudesignsize: Optional[int] = Field(default=-999, description="IFU design size")
|
94
|
+
bundle_size: Optional[int] = Field(default=-999, description="Bundle size")
|
95
|
+
fiber_size: Optional[float] = Field(default=-999.0, description="Fiber size")
|
96
|
+
ifuid: Optional[int] = Field(default=-999, description="IFU ID")
|
97
|
+
|
98
|
+
# Photometric data
|
99
|
+
tmass_j: float = Field(..., description="2MASS J magnitude")
|
100
|
+
tmass_h: float = Field(..., description="2MASS H magnitude")
|
101
|
+
tmass_k: float = Field(..., description="2MASS K magnitude")
|
102
|
+
gsc_vmag: float = Field(..., description="GSC V magnitude")
|
103
|
+
tyc_bmag: float = Field(..., description="Tycho B magnitude")
|
104
|
+
tyc_vmag: float = Field(..., description="Tycho V magnitude")
|
105
|
+
|
106
|
+
# Array photometry
|
107
|
+
mfd_mag: List[float] = Field(..., min_items=6, max_items=6, description="MFD magnitudes (6 elements)")
|
108
|
+
usnob_mag: List[float] = Field(..., min_items=5, max_items=5, description="USNO-B magnitudes (5 elements)")
|
109
|
+
|
110
|
+
# Spectral parameters
|
111
|
+
sp_param_source: str = Field(..., max_length=3, description="Spectral parameter source")
|
112
|
+
sp_params: List[float] = Field(..., min_items=4, max_items=4, description="Spectral parameters (4 elements)")
|
113
|
+
sp_param_err: List[float] = Field(..., min_items=4, max_items=4, description="Spectral parameter errors (4 elements)")
|
114
|
+
|
115
|
+
# Target selection flags
|
116
|
+
marvels_target1: int = Field(..., description="MARVELS target flag 1")
|
117
|
+
marvels_target2: int = Field(..., description="MARVELS target flag 2")
|
118
|
+
boss_target1: int = Field(..., description="BOSS target flag 1")
|
119
|
+
boss_target2: int = Field(..., description="BOSS target flag 2")
|
120
|
+
ancillary_target1: int = Field(..., description="Ancillary target flag 1")
|
121
|
+
ancillary_target2: int = Field(..., description="Ancillary target flag 2")
|
122
|
+
segue2_target1: int = Field(..., description="SEGUE-2 target flag 1")
|
123
|
+
segue2_target2: int = Field(..., description="SEGUE-2 target flag 2")
|
124
|
+
segueb_target1: int = Field(..., description="SEGUE-B target flag 1")
|
125
|
+
segueb_target2: int = Field(..., description="SEGUE-B target flag 2")
|
126
|
+
apogee_target1: int = Field(..., description="APOGEE target flag 1")
|
127
|
+
apogee_target2: int = Field(..., description="APOGEE target flag 2")
|
128
|
+
#apogee2_target1: int = Field(..., description="APOGEE-2 target flag 1")
|
129
|
+
#apogee2_target2: int = Field(..., description="APOGEE-2 target flag 2")
|
130
|
+
#apogee2_target3: int = Field(..., description="APOGEE-2 target flag 3")
|
131
|
+
manga_target1: int = Field(..., description="MaNGA target flag 1")
|
132
|
+
manga_target2: int = Field(..., description="MaNGA target flag 2")
|
133
|
+
#manga_target3: int = Field(..., description="MaNGA target flag 3")
|
134
|
+
#eboss_target0: int = Field(..., description="eBOSS target flag 0")
|
135
|
+
#eboss_target1: int = Field(..., description="eBOSS target flag 1")
|
136
|
+
#eboss_target2: int = Field(..., description="eBOSS target flag 2")
|
137
|
+
#eboss_target_id: int = Field(..., description="eBOSS target ID")
|
138
|
+
|
139
|
+
# SDSS imaging data
|
140
|
+
run: int = Field(..., description="SDSS run number")
|
141
|
+
rerun: str = Field(..., max_length=6, description="SDSS rerun")
|
142
|
+
camcol: int = Field(..., description="SDSS camera column")
|
143
|
+
field: int = Field(..., description="SDSS field number")
|
144
|
+
id: int = Field(..., description="Object ID")
|
145
|
+
|
146
|
+
# Photometric measurements (all 5-element arrays for ugriz bands)
|
147
|
+
psfflux: List[float] = Field(..., min_items=5, max_items=5, description="PSF flux (5 bands)")
|
148
|
+
psfflux_ivar: List[float] = Field(..., min_items=5, max_items=5, description="PSF flux inverse variance (5 bands)")
|
149
|
+
fiberflux: List[float] = Field(..., min_items=5, max_items=5, description="Fiber flux (5 bands)")
|
150
|
+
fiberflux_ivar: List[float] = Field(..., min_items=5, max_items=5, description="Fiber flux inverse variance (5 bands)")
|
151
|
+
fiber2flux: List[float] = Field(..., min_items=5, max_items=5, description="Fiber2 flux (5 bands)")
|
152
|
+
fiber2flux_ivar: List[float] = Field(..., min_items=5, max_items=5, description="Fiber2 flux inverse variance (5 bands)")
|
153
|
+
psfmag: List[float] = Field(..., min_items=5, max_items=5, description="PSF magnitude (5 bands)")
|
154
|
+
fibermag: List[float] = Field(..., min_items=5, max_items=5, description="Fiber magnitude (5 bands)")
|
155
|
+
fiber2mag: List[float] = Field(..., min_items=5, max_items=5, description="Fiber2 magnitude (5 bands)")
|
156
|
+
planned_mag: List[float] = Field(..., min_items=5, max_items=5, description="Magnitude (5 bands)")
|
157
|
+
|
158
|
+
# Astrometric data
|
159
|
+
epoch: float = Field(..., description="Epoch of observation")
|
160
|
+
pmra: float = Field(..., description="Proper motion in RA")
|
161
|
+
pmdec: float = Field(..., description="Proper motion in Dec")
|
162
|
+
|
163
|
+
|
164
|
+
class PluggedTarget(PlateHole, PluggedHole):
|
165
|
+
pass
|
almanac/utils.py
ADDED
@@ -0,0 +1,141 @@
|
|
1
|
+
import numpy as np
|
2
|
+
from time import time
|
3
|
+
from datetime import datetime
|
4
|
+
from itertools import cycle, groupby
|
5
|
+
from typing import Tuple, Union, Optional, List, Dict, Any
|
6
|
+
from astropy.table import Table
|
7
|
+
from astropy.time import Time
|
8
|
+
from astropy.io.registry import register_identifier, register_reader, register_writer
|
9
|
+
from pydl.pydlutils.yanny import is_yanny, read_table_yanny, write_table_yanny
|
10
|
+
|
11
|
+
|
12
|
+
register_identifier("yanny", Table, is_yanny)
|
13
|
+
register_reader("yanny", Table, read_table_yanny)
|
14
|
+
register_writer("yanny", Table, write_table_yanny)
|
15
|
+
|
16
|
+
|
17
|
+
def group_contiguous(v):
|
18
|
+
groups = []
|
19
|
+
for k, g in groupby(enumerate(sorted(v)), lambda x: x[1] - x[0]):
|
20
|
+
group = list(map(lambda x: x[1], g))
|
21
|
+
groups.append((group[0], group[-1]))
|
22
|
+
return groups
|
23
|
+
|
24
|
+
|
25
|
+
def get_observatories(apo: bool, lco: bool) -> Tuple[str, ...]:
|
26
|
+
"""Get observatory names based on boolean flags.
|
27
|
+
|
28
|
+
Args:
|
29
|
+
apo: Whether to include APO observatory
|
30
|
+
lco: Whether to include LCO observatory
|
31
|
+
|
32
|
+
Returns:
|
33
|
+
Tuple of observatory names ("apo", "lco", or both)
|
34
|
+
"""
|
35
|
+
if apo and not lco:
|
36
|
+
return ("apo",)
|
37
|
+
elif lco and not apo:
|
38
|
+
return ("lco",)
|
39
|
+
else:
|
40
|
+
return ("apo", "lco")
|
41
|
+
|
42
|
+
|
43
|
+
def timestamp_to_mjd(v: float) -> float:
|
44
|
+
"""Convert Unix timestamp to Modified Julian Date (MJD).
|
45
|
+
|
46
|
+
Args:
|
47
|
+
v: Unix timestamp in seconds
|
48
|
+
|
49
|
+
Returns:
|
50
|
+
Modified Julian Date as float
|
51
|
+
"""
|
52
|
+
return (v / 86400.0) + 40587.5
|
53
|
+
|
54
|
+
|
55
|
+
def get_current_mjd() -> int:
|
56
|
+
"""Get current Modified Julian Date as integer.
|
57
|
+
|
58
|
+
Returns:
|
59
|
+
Current MJD as integer
|
60
|
+
"""
|
61
|
+
return int(timestamp_to_mjd(time()))
|
62
|
+
|
63
|
+
|
64
|
+
def datetime_to_mjd(date: str) -> int:
|
65
|
+
"""Convert date string to Modified Julian Date.
|
66
|
+
|
67
|
+
Args:
|
68
|
+
date: Date string in format "YYYY-MM-DD"
|
69
|
+
|
70
|
+
Returns:
|
71
|
+
Modified Julian Date as integer
|
72
|
+
"""
|
73
|
+
return int(timestamp_to_mjd(datetime.strptime(date, "%Y-%m-%d").timestamp()))
|
74
|
+
|
75
|
+
def mjd_to_datetime(mjd: float) -> datetime:
|
76
|
+
"""Convert Modified Julian Date to datetime object.
|
77
|
+
|
78
|
+
Args:
|
79
|
+
mjd: Modified Julian Date
|
80
|
+
|
81
|
+
Returns:
|
82
|
+
Datetime object
|
83
|
+
"""
|
84
|
+
return Time(mjd, format='mjd').datetime
|
85
|
+
|
86
|
+
def parse_mjds(mjd: Optional[int], mjd_start: Optional[int], mjd_end: Optional[int],
|
87
|
+
date: Optional[str], date_start: Optional[str], date_end: Optional[str],
|
88
|
+
earliest_mjd: int = 0) -> Tuple[Union[int, range, Tuple[int, ...]], int, int]:
|
89
|
+
"""Parse MJD and date parameters to determine observation date range.
|
90
|
+
|
91
|
+
Args:
|
92
|
+
mjd: Single MJD value (can be negative for relative to current)
|
93
|
+
mjd_start: Start MJD for range (can be negative for relative to current)
|
94
|
+
mjd_end: End MJD for range (can be negative for relative to current)
|
95
|
+
date: Single date string in "YYYY-MM-DD" format
|
96
|
+
date_start: Start date string in "YYYY-MM-DD" format
|
97
|
+
date_end: End date string in "YYYY-MM-DD" format
|
98
|
+
earliest_mjd: Earliest allowed MJD value (default: 0)
|
99
|
+
|
100
|
+
Returns:
|
101
|
+
Tuple containing:
|
102
|
+
- MJD values (single int, range, or tuple)
|
103
|
+
- Start MJD (int)
|
104
|
+
- End MJD (int)
|
105
|
+
|
106
|
+
Raises:
|
107
|
+
ValueError: If more than one time specification method is provided
|
108
|
+
RuntimeError: If no valid time specification is found
|
109
|
+
"""
|
110
|
+
has_mjd_range = mjd_start is not None or mjd_end is not None
|
111
|
+
has_date_range = date_start is not None or date_end is not None
|
112
|
+
|
113
|
+
current_mjd = get_current_mjd()
|
114
|
+
n_given = sum([has_mjd_range, has_date_range, mjd is not None, date is not None])
|
115
|
+
if n_given > 1:
|
116
|
+
raise ValueError(
|
117
|
+
"Cannot specify more than one of --mjd, --mjd-start/--mjd-end, --date, --date-start/--date-end"
|
118
|
+
)
|
119
|
+
if n_given == 0:
|
120
|
+
return ((current_mjd, ), current_mjd, current_mjd)
|
121
|
+
if mjd is not None:
|
122
|
+
if mjd < 0:
|
123
|
+
mjd += current_mjd
|
124
|
+
return ((mjd, ), mjd, mjd)
|
125
|
+
if has_mjd_range:
|
126
|
+
mjd_start = mjd_start or earliest_mjd
|
127
|
+
if mjd_start < 0:
|
128
|
+
mjd_start += current_mjd
|
129
|
+
mjd_end = mjd_end or current_mjd
|
130
|
+
if mjd_end < 0:
|
131
|
+
mjd_end += current_mjd
|
132
|
+
return (range(mjd_start, 1 + mjd_end), mjd_start, mjd_end)
|
133
|
+
if date is not None:
|
134
|
+
mjd = datetime_to_mjd(date)
|
135
|
+
return ((mjd, ), mjd, mjd)
|
136
|
+
if has_date_range:
|
137
|
+
mjd_start = earliest_mjd if date_start is None else datetime_to_mjd(date_start)
|
138
|
+
mjd_end = current_mjd if date_end is None else datetime_to_mjd(date_end)
|
139
|
+
return (range(mjd_start, 1 + mjd_end), mjd_start, mjd_end)
|
140
|
+
|
141
|
+
raise RuntimeError("Should not be able to get here")
|