pycontrails 0.59.0__cp314-cp314-macosx_10_15_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pycontrails might be problematic. Click here for more details.
- pycontrails/__init__.py +70 -0
- pycontrails/_version.py +34 -0
- pycontrails/core/__init__.py +30 -0
- pycontrails/core/aircraft_performance.py +679 -0
- pycontrails/core/airports.py +228 -0
- pycontrails/core/cache.py +889 -0
- pycontrails/core/coordinates.py +174 -0
- pycontrails/core/fleet.py +483 -0
- pycontrails/core/flight.py +2185 -0
- pycontrails/core/flightplan.py +228 -0
- pycontrails/core/fuel.py +140 -0
- pycontrails/core/interpolation.py +702 -0
- pycontrails/core/met.py +2936 -0
- pycontrails/core/met_var.py +387 -0
- pycontrails/core/models.py +1321 -0
- pycontrails/core/polygon.py +549 -0
- pycontrails/core/rgi_cython.cpython-314-darwin.so +0 -0
- pycontrails/core/vector.py +2249 -0
- pycontrails/datalib/__init__.py +12 -0
- pycontrails/datalib/_met_utils/metsource.py +746 -0
- pycontrails/datalib/ecmwf/__init__.py +73 -0
- pycontrails/datalib/ecmwf/arco_era5.py +345 -0
- pycontrails/datalib/ecmwf/common.py +114 -0
- pycontrails/datalib/ecmwf/era5.py +554 -0
- pycontrails/datalib/ecmwf/era5_model_level.py +490 -0
- pycontrails/datalib/ecmwf/hres.py +804 -0
- pycontrails/datalib/ecmwf/hres_model_level.py +466 -0
- pycontrails/datalib/ecmwf/ifs.py +287 -0
- pycontrails/datalib/ecmwf/model_levels.py +435 -0
- pycontrails/datalib/ecmwf/static/model_level_dataframe_v20240418.csv +139 -0
- pycontrails/datalib/ecmwf/variables.py +268 -0
- pycontrails/datalib/geo_utils.py +261 -0
- pycontrails/datalib/gfs/__init__.py +28 -0
- pycontrails/datalib/gfs/gfs.py +656 -0
- pycontrails/datalib/gfs/variables.py +104 -0
- pycontrails/datalib/goes.py +764 -0
- pycontrails/datalib/gruan.py +343 -0
- pycontrails/datalib/himawari/__init__.py +27 -0
- pycontrails/datalib/himawari/header_struct.py +266 -0
- pycontrails/datalib/himawari/himawari.py +671 -0
- pycontrails/datalib/landsat.py +589 -0
- pycontrails/datalib/leo_utils/__init__.py +5 -0
- pycontrails/datalib/leo_utils/correction.py +266 -0
- pycontrails/datalib/leo_utils/landsat_metadata.py +300 -0
- pycontrails/datalib/leo_utils/search.py +250 -0
- pycontrails/datalib/leo_utils/sentinel_metadata.py +748 -0
- pycontrails/datalib/leo_utils/static/bq_roi_query.sql +6 -0
- pycontrails/datalib/leo_utils/vis.py +59 -0
- pycontrails/datalib/sentinel.py +650 -0
- pycontrails/datalib/spire/__init__.py +5 -0
- pycontrails/datalib/spire/exceptions.py +62 -0
- pycontrails/datalib/spire/spire.py +604 -0
- pycontrails/ext/bada.py +42 -0
- pycontrails/ext/cirium.py +14 -0
- pycontrails/ext/empirical_grid.py +140 -0
- pycontrails/ext/synthetic_flight.py +431 -0
- pycontrails/models/__init__.py +1 -0
- pycontrails/models/accf.py +425 -0
- pycontrails/models/apcemm/__init__.py +8 -0
- pycontrails/models/apcemm/apcemm.py +983 -0
- pycontrails/models/apcemm/inputs.py +226 -0
- pycontrails/models/apcemm/static/apcemm_yaml_template.yaml +183 -0
- pycontrails/models/apcemm/utils.py +437 -0
- pycontrails/models/cocip/__init__.py +29 -0
- pycontrails/models/cocip/cocip.py +2742 -0
- pycontrails/models/cocip/cocip_params.py +305 -0
- pycontrails/models/cocip/cocip_uncertainty.py +291 -0
- pycontrails/models/cocip/contrail_properties.py +1530 -0
- pycontrails/models/cocip/output_formats.py +2270 -0
- pycontrails/models/cocip/radiative_forcing.py +1260 -0
- pycontrails/models/cocip/radiative_heating.py +520 -0
- pycontrails/models/cocip/unterstrasser_wake_vortex.py +508 -0
- pycontrails/models/cocip/wake_vortex.py +396 -0
- pycontrails/models/cocip/wind_shear.py +120 -0
- pycontrails/models/cocipgrid/__init__.py +9 -0
- pycontrails/models/cocipgrid/cocip_grid.py +2552 -0
- pycontrails/models/cocipgrid/cocip_grid_params.py +138 -0
- pycontrails/models/dry_advection.py +602 -0
- pycontrails/models/emissions/__init__.py +21 -0
- pycontrails/models/emissions/black_carbon.py +599 -0
- pycontrails/models/emissions/emissions.py +1353 -0
- pycontrails/models/emissions/ffm2.py +336 -0
- pycontrails/models/emissions/static/default-engine-uids.csv +239 -0
- pycontrails/models/emissions/static/edb-gaseous-v29b-engines.csv +596 -0
- pycontrails/models/emissions/static/edb-nvpm-v29b-engines.csv +215 -0
- pycontrails/models/extended_k15.py +1327 -0
- pycontrails/models/humidity_scaling/__init__.py +37 -0
- pycontrails/models/humidity_scaling/humidity_scaling.py +1075 -0
- pycontrails/models/humidity_scaling/quantiles/era5-model-level-quantiles.pq +0 -0
- pycontrails/models/humidity_scaling/quantiles/era5-pressure-level-quantiles.pq +0 -0
- pycontrails/models/issr.py +210 -0
- pycontrails/models/pcc.py +326 -0
- pycontrails/models/pcr.py +154 -0
- pycontrails/models/ps_model/__init__.py +18 -0
- pycontrails/models/ps_model/ps_aircraft_params.py +381 -0
- pycontrails/models/ps_model/ps_grid.py +701 -0
- pycontrails/models/ps_model/ps_model.py +1000 -0
- pycontrails/models/ps_model/ps_operational_limits.py +525 -0
- pycontrails/models/ps_model/static/ps-aircraft-params-20250328.csv +69 -0
- pycontrails/models/ps_model/static/ps-synonym-list-20250328.csv +104 -0
- pycontrails/models/sac.py +442 -0
- pycontrails/models/tau_cirrus.py +183 -0
- pycontrails/physics/__init__.py +1 -0
- pycontrails/physics/constants.py +117 -0
- pycontrails/physics/geo.py +1138 -0
- pycontrails/physics/jet.py +968 -0
- pycontrails/physics/static/iata-cargo-load-factors-20250221.csv +74 -0
- pycontrails/physics/static/iata-passenger-load-factors-20250221.csv +74 -0
- pycontrails/physics/thermo.py +551 -0
- pycontrails/physics/units.py +472 -0
- pycontrails/py.typed +0 -0
- pycontrails/utils/__init__.py +1 -0
- pycontrails/utils/dependencies.py +66 -0
- pycontrails/utils/iteration.py +13 -0
- pycontrails/utils/json.py +187 -0
- pycontrails/utils/temp.py +50 -0
- pycontrails/utils/types.py +163 -0
- pycontrails-0.59.0.dist-info/METADATA +179 -0
- pycontrails-0.59.0.dist-info/RECORD +123 -0
- pycontrails-0.59.0.dist-info/WHEEL +6 -0
- pycontrails-0.59.0.dist-info/licenses/LICENSE +178 -0
- pycontrails-0.59.0.dist-info/licenses/NOTICE +43 -0
- pycontrails-0.59.0.dist-info/top_level.txt +3 -0
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
"""Support for overlaying flight and contrail data on Landsat & Sentinel images."""
|
|
2
|
+
|
|
3
|
+
from typing import Literal, overload
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
import numpy.typing as npt
|
|
7
|
+
import pandas as pd
|
|
8
|
+
import pyproj
|
|
9
|
+
import shapely
|
|
10
|
+
import xarray as xr
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def ephemeris_ecef_to_utm(ephemeris_df: pd.DataFrame, utm_crs: pyproj.CRS) -> pd.DataFrame:
|
|
14
|
+
"""Convert ephemeris data from ECEF to UTM coordinates.
|
|
15
|
+
|
|
16
|
+
Parameters
|
|
17
|
+
----------
|
|
18
|
+
ephemeris_df : pd.DataFrame
|
|
19
|
+
DataFrame containing the ephemeris data with columns:
|
|
20
|
+
- 'EPHEMERIS_ECEF_X': ECEF X coordinates (meters)
|
|
21
|
+
- 'EPHEMERIS_ECEF_Y': ECEF Y coordinates (meters)
|
|
22
|
+
- 'EPHEMERIS_ECEF_Z': ECEF Z coordinates (meters)
|
|
23
|
+
- 'EPHEMERIS_TIME': Timestamps (as datetime64[ns])
|
|
24
|
+
utm_crs : pyproj.CRS
|
|
25
|
+
The UTM coordinate reference system to convert to.
|
|
26
|
+
|
|
27
|
+
Returns
|
|
28
|
+
-------
|
|
29
|
+
pd.DataFrame
|
|
30
|
+
A DataFrame with columns:
|
|
31
|
+
- 'x': UTM easting (meters)
|
|
32
|
+
- 'y': UTM northing (meters)
|
|
33
|
+
- 'z': Altitude (meters)
|
|
34
|
+
- 't': Timestamps (as datetime64[ns])
|
|
35
|
+
"""
|
|
36
|
+
# Define the source CRS: ECEF (Earth-Centered, Earth-Fixed) with WGS84 datum
|
|
37
|
+
source_crs = pyproj.CRS(proj="geocent", datum="WGS84")
|
|
38
|
+
|
|
39
|
+
# Create a transformer object to convert from source CRS to target CRS
|
|
40
|
+
# The default order for ECEF is (X, Y, Z) and for UTM is (Easting, Northing, Height)
|
|
41
|
+
transformer = pyproj.Transformer.from_crs(source_crs, utm_crs)
|
|
42
|
+
|
|
43
|
+
ecef_x = ephemeris_df["EPHEMERIS_ECEF_X"].to_numpy()
|
|
44
|
+
ecef_y = ephemeris_df["EPHEMERIS_ECEF_Y"].to_numpy()
|
|
45
|
+
ecef_z = ephemeris_df["EPHEMERIS_ECEF_Z"].to_numpy()
|
|
46
|
+
ecef_t = ephemeris_df["EPHEMERIS_TIME"].to_numpy()
|
|
47
|
+
|
|
48
|
+
x, y, h = transformer.transform(ecef_x, ecef_y, ecef_z)
|
|
49
|
+
return pd.DataFrame({"x": x, "y": y, "z": h, "t": ecef_t})
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@overload
|
|
53
|
+
def scan_angle_correction(
|
|
54
|
+
ds: xr.Dataset,
|
|
55
|
+
x: npt.NDArray[np.floating],
|
|
56
|
+
y: npt.NDArray[np.floating],
|
|
57
|
+
z: npt.NDArray[np.floating],
|
|
58
|
+
*,
|
|
59
|
+
maxiter: int = ...,
|
|
60
|
+
tol: float = ...,
|
|
61
|
+
full_output: Literal[False] = ...,
|
|
62
|
+
) -> tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]: ...
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@overload
|
|
66
|
+
def scan_angle_correction(
|
|
67
|
+
ds: xr.Dataset,
|
|
68
|
+
x: npt.NDArray[np.floating],
|
|
69
|
+
y: npt.NDArray[np.floating],
|
|
70
|
+
z: npt.NDArray[np.floating],
|
|
71
|
+
*,
|
|
72
|
+
maxiter: int = ...,
|
|
73
|
+
tol: float = ...,
|
|
74
|
+
full_output: Literal[True],
|
|
75
|
+
) -> tuple[npt.NDArray[np.floating], npt.NDArray[np.floating], npt.NDArray[np.bool_]]: ...
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def scan_angle_correction(
|
|
79
|
+
ds: xr.Dataset,
|
|
80
|
+
x: npt.NDArray[np.floating],
|
|
81
|
+
y: npt.NDArray[np.floating],
|
|
82
|
+
z: npt.NDArray[np.floating],
|
|
83
|
+
*,
|
|
84
|
+
maxiter: int = 5,
|
|
85
|
+
tol: float = 10.0,
|
|
86
|
+
full_output: bool = False,
|
|
87
|
+
) -> (
|
|
88
|
+
tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]
|
|
89
|
+
| tuple[npt.NDArray[np.floating], npt.NDArray[np.floating], npt.NDArray[np.bool_]]
|
|
90
|
+
):
|
|
91
|
+
"""Apply the scan angle correction to the given x, y, z coordinates.
|
|
92
|
+
|
|
93
|
+
Parameters
|
|
94
|
+
----------
|
|
95
|
+
ds : xr.Dataset
|
|
96
|
+
The dataset containing the viewing azimuth angle (VAA)
|
|
97
|
+
and viewing zenith angle (VZA) arrays. The units for both are degrees.
|
|
98
|
+
x : npt.NDArray[np.floating]
|
|
99
|
+
The x coordinates of the points to correct. Should be in the
|
|
100
|
+
correct UTM coordinate system
|
|
101
|
+
y : npt.NDArray[np.floating]
|
|
102
|
+
The y coordinates of the points to correct. Should be in the
|
|
103
|
+
correct UTM coordinate system.
|
|
104
|
+
z : npt.NDArray[np.floating]
|
|
105
|
+
The z coordinates (altitude in meters) of the points to correct.
|
|
106
|
+
maxiter : int, optional
|
|
107
|
+
Maximum number of iterations to perform. Default is 5.
|
|
108
|
+
tol : float, optional
|
|
109
|
+
Tolerance for convergence in meters. Default is 10.0.
|
|
110
|
+
full_output : bool, optional
|
|
111
|
+
If True, return an additional boolean array indicating which points
|
|
112
|
+
successfully converged. Default is False.
|
|
113
|
+
|
|
114
|
+
Returns
|
|
115
|
+
-------
|
|
116
|
+
tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]
|
|
117
|
+
The corrected x and y coordinates as numpy arrays in the UTM
|
|
118
|
+
coordinate system. Points that are not contained in the non-nan
|
|
119
|
+
region of the image will contain nan values in the output arrays.
|
|
120
|
+
"""
|
|
121
|
+
# Confirm that x is monotonically increasing and y is decreasing
|
|
122
|
+
# (This is assumed in the filtering logic below)
|
|
123
|
+
if not np.all(np.diff(ds["x"]) > 0.0):
|
|
124
|
+
msg = "ds['x'] must be monotonically increasing"
|
|
125
|
+
raise ValueError(msg)
|
|
126
|
+
if not np.all(np.diff(ds["y"]) < 0.0):
|
|
127
|
+
msg = "ds['y'] must be monotonically decreasing"
|
|
128
|
+
raise ValueError(msg)
|
|
129
|
+
|
|
130
|
+
try:
|
|
131
|
+
ds = ds[["VZA", "VAA"]].load() # nice to load these once here instead of repeatedly below
|
|
132
|
+
except KeyError as e:
|
|
133
|
+
raise KeyError("ds must contain the variables 'VZA' and 'VAA'") from e
|
|
134
|
+
|
|
135
|
+
x = np.atleast_1d(x).astype(np.float64, copy=False)
|
|
136
|
+
y = np.atleast_1d(y).astype(np.float64, copy=False)
|
|
137
|
+
z = np.atleast_1d(z).astype(np.float64, copy=False)
|
|
138
|
+
|
|
139
|
+
x_proj = xr.DataArray(x.copy(), dims="points") # need to copy because we modify below
|
|
140
|
+
y_proj = xr.DataArray(y.copy(), dims="points") # need to copy because we modify below
|
|
141
|
+
|
|
142
|
+
offset0 = np.zeros_like(x)
|
|
143
|
+
|
|
144
|
+
for _ in range(maxiter):
|
|
145
|
+
# Note that we often get nan values back after interpolation
|
|
146
|
+
# It's arguably better to propagate nans than to keep the original values
|
|
147
|
+
# because the original values may be in the nan region of the image
|
|
148
|
+
# (or outside the image entirely)
|
|
149
|
+
vza, vaa = _interpolate_angles(ds, x_proj, y_proj)
|
|
150
|
+
|
|
151
|
+
# Convert to radians
|
|
152
|
+
vza_rad = np.deg2rad(vza)
|
|
153
|
+
vaa_rad = np.deg2rad(vaa)
|
|
154
|
+
|
|
155
|
+
# Apply spherical projection offset
|
|
156
|
+
offset = z * np.tan(vza_rad)
|
|
157
|
+
dx_offset = offset * np.sin(vaa_rad)
|
|
158
|
+
dy_offset = offset * np.cos(vaa_rad)
|
|
159
|
+
|
|
160
|
+
# Update the newly predicted x and y locations
|
|
161
|
+
x_proj[:] = x - dx_offset
|
|
162
|
+
y_proj[:] = y - dy_offset
|
|
163
|
+
|
|
164
|
+
error = np.abs(offset - offset0)
|
|
165
|
+
converged = error < tol
|
|
166
|
+
if np.all(converged | np.isnan(error)):
|
|
167
|
+
break
|
|
168
|
+
|
|
169
|
+
offset0 = offset
|
|
170
|
+
|
|
171
|
+
if full_output:
|
|
172
|
+
return x_proj.values, y_proj.values, converged
|
|
173
|
+
return x_proj.values, y_proj.values
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def _interpolate_angles(
|
|
177
|
+
ds: xr.Dataset,
|
|
178
|
+
xi: xr.DataArray,
|
|
179
|
+
yi: xr.DataArray,
|
|
180
|
+
) -> tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]:
|
|
181
|
+
"""
|
|
182
|
+
Interpolate view zenith angle (VZA) and view azimuth angle (VAA).
|
|
183
|
+
|
|
184
|
+
Parameters
|
|
185
|
+
----------
|
|
186
|
+
ds : xr.Dataset
|
|
187
|
+
Dataset containing at least the variables "VZA" and "VAA",
|
|
188
|
+
with coordinates ``x`` and ``y`` that define the spatial grid.
|
|
189
|
+
xi : xr.DataArray
|
|
190
|
+
X-coordinates of the target points for interpolation.
|
|
191
|
+
Must be the same length as ``yi``.
|
|
192
|
+
yi : xr.DataArray
|
|
193
|
+
Y-coordinates of the target points for interpolation.
|
|
194
|
+
Must be the same length as ``xi``.
|
|
195
|
+
|
|
196
|
+
Returns
|
|
197
|
+
-------
|
|
198
|
+
vza : npt.NDArray[np.floating]
|
|
199
|
+
Interpolated view zenith angles at the given (xi, yi) points.
|
|
200
|
+
vaa : npt.NDArray[np.floating]
|
|
201
|
+
Interpolated view azimuth angles at the given (xi, yi) points.
|
|
202
|
+
"""
|
|
203
|
+
interped = ds[["VZA", "VAA"]].interp(x=xi, y=yi)
|
|
204
|
+
return interped["VZA"].values, interped["VAA"].values
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def estimate_scan_time(
|
|
208
|
+
ephemeris_df: pd.DataFrame,
|
|
209
|
+
utm_crs: pyproj.CRS,
|
|
210
|
+
x: npt.NDArray[np.floating],
|
|
211
|
+
y: npt.NDArray[np.floating],
|
|
212
|
+
) -> npt.NDArray[np.datetime64]:
|
|
213
|
+
"""Estimate the scan time for the given x, y pixels.
|
|
214
|
+
|
|
215
|
+
Project the x, y coordinates (in UTM coordinate system) onto the
|
|
216
|
+
ephemeris track and interpolate the time.
|
|
217
|
+
|
|
218
|
+
Parameters
|
|
219
|
+
----------
|
|
220
|
+
ephemeris_df : pd.DataFrame
|
|
221
|
+
DataFrame containing the ephemeris data with columns:
|
|
222
|
+
- 'EPHEMERIS_ECEF_X': ECEF X coordinates (meters)
|
|
223
|
+
- 'EPHEMERIS_ECEF_Y': ECEF Y coordinates (meters)
|
|
224
|
+
- 'EPHEMERIS_ECEF_Z': ECEF Z coordinates (meters)
|
|
225
|
+
- 'EPHEMERIS_TIME': Timestamps (as datetime64[ns])
|
|
226
|
+
utm_crs : pyproj.CRS
|
|
227
|
+
The UTM coordinate reference system used for projection.
|
|
228
|
+
x : npt.NDArray[np.floating]
|
|
229
|
+
The x coordinates of the points to estimate the scan time for. Should be in the
|
|
230
|
+
correct UTM coordinate system.
|
|
231
|
+
y : npt.NDArray[np.floating]
|
|
232
|
+
The y coordinates of the points to estimate the scan time for. Should be in the
|
|
233
|
+
correct UTM coordinate system.
|
|
234
|
+
|
|
235
|
+
Returns
|
|
236
|
+
-------
|
|
237
|
+
npt.NDArray[np.datetime64]
|
|
238
|
+
The estimated scan times as numpy datetime64[ns] array. Points for which
|
|
239
|
+
``x`` or ``y`` are nan will have ``NaT`` as the corresponding output value.
|
|
240
|
+
"""
|
|
241
|
+
ephemeris_utm = ephemeris_ecef_to_utm(ephemeris_df, utm_crs)
|
|
242
|
+
|
|
243
|
+
valid = np.isfinite(x) & np.isfinite(y)
|
|
244
|
+
points = shapely.points(x[valid], y[valid])
|
|
245
|
+
|
|
246
|
+
line = shapely.LineString(ephemeris_utm[["x", "y"]])
|
|
247
|
+
|
|
248
|
+
distance = line.project(points)
|
|
249
|
+
projected = line.interpolate(distance)
|
|
250
|
+
projected_x = shapely.get_coordinates(projected)[:, 0]
|
|
251
|
+
|
|
252
|
+
if ephemeris_utm["t"].dtype != "datetime64[ns]":
|
|
253
|
+
# This could be relaxed if needed, but datetime64[ns] is what we expect
|
|
254
|
+
raise ValueError("ephemeris_utm['t'] must have dtype 'datetime64[ns]'")
|
|
255
|
+
if not ephemeris_utm["x"].diff().iloc[1:].lt(0).all():
|
|
256
|
+
# This should always be the case for sun-synchronous satellites
|
|
257
|
+
raise ValueError("ephemeris_utm['x'] must be strictly decreasing for np.interp")
|
|
258
|
+
|
|
259
|
+
out = np.full(x.shape, np.datetime64("NaT", "ns"))
|
|
260
|
+
out[valid] = np.interp(
|
|
261
|
+
projected_x,
|
|
262
|
+
ephemeris_utm["x"].iloc[::-1],
|
|
263
|
+
ephemeris_utm["t"].iloc[::-1].astype(int),
|
|
264
|
+
).astype("datetime64[ns]")
|
|
265
|
+
|
|
266
|
+
return out
|
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
"""Download and parse Landsat metadata from USGS.
|
|
2
|
+
|
|
3
|
+
This modules requires `GeoPandas <https://geopandas.org/>`_.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import re
|
|
7
|
+
|
|
8
|
+
import numpy as np
|
|
9
|
+
import numpy.typing as npt
|
|
10
|
+
import pandas as pd
|
|
11
|
+
import pyproj
|
|
12
|
+
import xarray as xr
|
|
13
|
+
|
|
14
|
+
from pycontrails.core import cache
|
|
15
|
+
from pycontrails.datalib.leo_utils import correction
|
|
16
|
+
from pycontrails.utils import dependencies
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
import geopandas as gpd
|
|
20
|
+
except ModuleNotFoundError as exc:
|
|
21
|
+
dependencies.raise_module_not_found_error(
|
|
22
|
+
name="landsat_metadata module",
|
|
23
|
+
package_name="geopandas",
|
|
24
|
+
module_not_found_error=exc,
|
|
25
|
+
pycontrails_optional_package="sat",
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
try:
|
|
29
|
+
import shapely
|
|
30
|
+
except ModuleNotFoundError as exc:
|
|
31
|
+
dependencies.raise_module_not_found_error(
|
|
32
|
+
name="landsat_metadata module",
|
|
33
|
+
package_name="shapely",
|
|
34
|
+
module_not_found_error=exc,
|
|
35
|
+
pycontrails_optional_package="sat",
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _split_antimeridian(polygon: shapely.Polygon) -> shapely.MultiPolygon:
|
|
40
|
+
"""Split a polygon into two polygons at the antimeridian.
|
|
41
|
+
|
|
42
|
+
This implementation assumes that the passed polygon is actually situated
|
|
43
|
+
on the antimeridian and does not simultaneously cross the meridian.
|
|
44
|
+
"""
|
|
45
|
+
# Shift the x-coordinates of the polygon to the right
|
|
46
|
+
# The `valid_poly` will not be valid if the polygon spans the meridian
|
|
47
|
+
valid_poly = shapely.ops.transform(lambda x, y: (x if x >= 0.0 else x + 360.0, y), polygon)
|
|
48
|
+
if not valid_poly.is_valid:
|
|
49
|
+
raise ValueError("Invalid polygon before splitting at the antimeridian.")
|
|
50
|
+
|
|
51
|
+
eastern_hemi = shapely.geometry.box(0.0, -90.0, 180.0, 90.0)
|
|
52
|
+
western_hemi = shapely.geometry.box(180.0, -90.0, 360.0, 90.0)
|
|
53
|
+
|
|
54
|
+
western_poly = valid_poly.intersection(western_hemi)
|
|
55
|
+
western_poly = shapely.ops.transform(lambda x, y: (x - 360.0, y), western_poly) # shift back
|
|
56
|
+
eastern_poly = valid_poly.intersection(eastern_hemi)
|
|
57
|
+
|
|
58
|
+
if not western_poly.is_valid or not eastern_poly.is_valid:
|
|
59
|
+
raise ValueError("Invalid polygon after splitting at the antimeridian.")
|
|
60
|
+
|
|
61
|
+
return shapely.MultiPolygon([western_poly, eastern_poly])
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _download_landsat_metadata() -> pd.DataFrame:
|
|
65
|
+
"""Download and parse the Landsat metadata CSV file from USGS.
|
|
66
|
+
|
|
67
|
+
See `the USGS documentation <https://www.usgs.gov/landsat-missions/landsat-collection-2-metadata>`_
|
|
68
|
+
for more details.
|
|
69
|
+
"""
|
|
70
|
+
p = "https://landsat.usgs.gov/landsat/metadata_service/bulk_metadata_files/LANDSAT_OT_C2_L1.csv.gz"
|
|
71
|
+
|
|
72
|
+
usecols = [
|
|
73
|
+
"Display ID",
|
|
74
|
+
"Ordering ID",
|
|
75
|
+
"Collection Category",
|
|
76
|
+
"Start Time",
|
|
77
|
+
"Stop Time",
|
|
78
|
+
"Day/Night Indicator",
|
|
79
|
+
"Satellite",
|
|
80
|
+
"Corner Upper Left Latitude",
|
|
81
|
+
"Corner Upper Left Longitude",
|
|
82
|
+
"Corner Upper Right Latitude",
|
|
83
|
+
"Corner Upper Right Longitude",
|
|
84
|
+
"Corner Lower Left Latitude",
|
|
85
|
+
"Corner Lower Left Longitude",
|
|
86
|
+
"Corner Lower Right Latitude",
|
|
87
|
+
"Corner Lower Right Longitude",
|
|
88
|
+
]
|
|
89
|
+
|
|
90
|
+
df = pd.read_csv(p, compression="gzip", usecols=usecols)
|
|
91
|
+
|
|
92
|
+
# Convert column dtypes
|
|
93
|
+
df["Start Time"] = pd.to_datetime(df["Start Time"], format="ISO8601")
|
|
94
|
+
df["Stop Time"] = pd.to_datetime(df["Stop Time"], format="ISO8601")
|
|
95
|
+
df["Display ID"] = df["Display ID"].astype("string[pyarrow]")
|
|
96
|
+
df["Ordering ID"] = df["Ordering ID"].astype("string[pyarrow]")
|
|
97
|
+
df["Collection Category"] = df["Collection Category"].astype("string[pyarrow]")
|
|
98
|
+
df["Day/Night Indicator"] = df["Day/Night Indicator"].astype("string[pyarrow]")
|
|
99
|
+
|
|
100
|
+
return df
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def _landsat_metadata_to_geodataframe(df: pd.DataFrame) -> gpd.GeoDataFrame:
|
|
104
|
+
"""Convert Landsat metadata DataFrame to GeoDataFrame with polygons."""
|
|
105
|
+
polys = shapely.polygons(
|
|
106
|
+
df[
|
|
107
|
+
[
|
|
108
|
+
"Corner Upper Left Longitude",
|
|
109
|
+
"Corner Upper Left Latitude",
|
|
110
|
+
"Corner Upper Right Longitude",
|
|
111
|
+
"Corner Upper Right Latitude",
|
|
112
|
+
"Corner Lower Right Longitude",
|
|
113
|
+
"Corner Lower Right Latitude",
|
|
114
|
+
"Corner Lower Left Longitude",
|
|
115
|
+
"Corner Lower Left Latitude",
|
|
116
|
+
"Corner Upper Left Longitude",
|
|
117
|
+
"Corner Upper Left Latitude",
|
|
118
|
+
]
|
|
119
|
+
]
|
|
120
|
+
.to_numpy()
|
|
121
|
+
.reshape(-1, 5, 2)
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
out = gpd.GeoDataFrame(
|
|
125
|
+
df,
|
|
126
|
+
geometry=polys,
|
|
127
|
+
crs="EPSG:4326",
|
|
128
|
+
columns=[
|
|
129
|
+
"Display ID",
|
|
130
|
+
"Ordering ID",
|
|
131
|
+
"Collection Category",
|
|
132
|
+
"Start Time",
|
|
133
|
+
"Stop Time",
|
|
134
|
+
"Day/Night Indicator",
|
|
135
|
+
"Satellite",
|
|
136
|
+
"geometry",
|
|
137
|
+
],
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
# Split polygons that cross the antimeridian
|
|
141
|
+
invalid = ~out.is_valid
|
|
142
|
+
out.loc[invalid, "geometry"] = out.loc[invalid, "geometry"].apply(_split_antimeridian)
|
|
143
|
+
return out
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def open_landsat_metadata(
|
|
147
|
+
cachestore: cache.CacheStore | None = None, update_cache: bool = False
|
|
148
|
+
) -> gpd.GeoDataFrame:
|
|
149
|
+
"""Download and parse the Landsat metadata CSV file from USGS.
|
|
150
|
+
|
|
151
|
+
By default, the metadata is cached in a disk cache store.
|
|
152
|
+
|
|
153
|
+
Parameters
|
|
154
|
+
----------
|
|
155
|
+
cachestore : cache.CacheStore | None, optional
|
|
156
|
+
Cache store for Landsat metadata.
|
|
157
|
+
Defaults to :class:`cache.DiskCacheStore`.
|
|
158
|
+
update_cache : bool, optional
|
|
159
|
+
Force update to cached Landsat metadata. The remote file is updated
|
|
160
|
+
daily, so this is useful to ensure you have the latest metadata.
|
|
161
|
+
|
|
162
|
+
Returns
|
|
163
|
+
-------
|
|
164
|
+
gpd.GeoDataFrame
|
|
165
|
+
Processed Landsat metadata. The ``geometry`` column contains polygons
|
|
166
|
+
representing the footprints of the Landsat scenes.
|
|
167
|
+
"""
|
|
168
|
+
if cachestore is None:
|
|
169
|
+
cache_root = cache._get_user_cache_dir()
|
|
170
|
+
cache_dir = f"{cache_root}/landsat_metadata"
|
|
171
|
+
cachestore = cache.DiskCacheStore(cache_dir=cache_dir)
|
|
172
|
+
|
|
173
|
+
cache_key = "LANDSAT_OT_C2_L1.pq"
|
|
174
|
+
if cachestore.exists(cache_key) and not update_cache:
|
|
175
|
+
return gpd.read_parquet(cachestore.path(cache_key))
|
|
176
|
+
|
|
177
|
+
df = _download_landsat_metadata()
|
|
178
|
+
gdf = _landsat_metadata_to_geodataframe(df)
|
|
179
|
+
gdf.to_parquet(cachestore.path(cache_key), index=False)
|
|
180
|
+
return gdf
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def parse_ephemeris_landsat(ang_content: str) -> pd.DataFrame:
|
|
184
|
+
"""Find the EPHEMERIS group in a ANG text file and extract the data arrays.
|
|
185
|
+
|
|
186
|
+
Parameters
|
|
187
|
+
----------
|
|
188
|
+
ang_content : str
|
|
189
|
+
The content of the ANG file as a string.
|
|
190
|
+
|
|
191
|
+
Returns
|
|
192
|
+
-------
|
|
193
|
+
pd.DataFrame
|
|
194
|
+
A :class:`pandas.DataFrame` containing the ephemeris track with columns:
|
|
195
|
+
- EPHEMERIS_TIME: Timestamps of the ephemeris data.
|
|
196
|
+
- EPHEMERIS_ECEF_X: ECEF X coordinates.
|
|
197
|
+
- EPHEMERIS_ECEF_Y: ECEF Y coordinates.
|
|
198
|
+
- EPHEMERIS_ECEF_Z: ECEF Z coordinates.
|
|
199
|
+
"""
|
|
200
|
+
|
|
201
|
+
# Find GROUP = EPHEMERIS, capture everything non-greedily (.*?) until END_GROUP = EPHEMERIS
|
|
202
|
+
pattern = r"GROUP\s*=\s*EPHEMERIS\s*(.*?)\s*END_GROUP\s*=\s*EPHEMERIS"
|
|
203
|
+
match = re.search(pattern, ang_content, flags=re.DOTALL)
|
|
204
|
+
if match is None:
|
|
205
|
+
raise ValueError("No data found for EPHEMERIS group in the ANG content.")
|
|
206
|
+
ephemeris_content = match.group(1)
|
|
207
|
+
|
|
208
|
+
pattern = r"EPHEMERIS_EPOCH_YEAR\s*=\s*(\d+)"
|
|
209
|
+
match = re.search(pattern, ephemeris_content)
|
|
210
|
+
if match is None:
|
|
211
|
+
raise ValueError("No data found for EPHEMERIS_EPOCH_YEAR in the ANG content.")
|
|
212
|
+
year = int(match.group(1))
|
|
213
|
+
|
|
214
|
+
pattern = r"EPHEMERIS_EPOCH_DAY\s*=\s*(\d+)"
|
|
215
|
+
match = re.search(pattern, ephemeris_content)
|
|
216
|
+
if match is None:
|
|
217
|
+
raise ValueError("No data found for EPHEMERIS_EPOCH_DAY in the ANG content.")
|
|
218
|
+
day = int(match.group(1))
|
|
219
|
+
|
|
220
|
+
pattern = r"EPHEMERIS_EPOCH_SECONDS\s*=\s*(\d+\.\d+)"
|
|
221
|
+
match = re.search(pattern, ephemeris_content)
|
|
222
|
+
if match is None:
|
|
223
|
+
raise ValueError("No data found for EPHEMERIS_EPOCH_SECONDS in the ANG content.")
|
|
224
|
+
seconds = float(match.group(1))
|
|
225
|
+
|
|
226
|
+
t0 = (
|
|
227
|
+
pd.Timestamp(year=year, month=1, day=1)
|
|
228
|
+
+ pd.Timedelta(days=day - 1)
|
|
229
|
+
+ pd.Timedelta(seconds=seconds)
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
# Find all the EPHEMERIS_* arrays
|
|
233
|
+
array_patterns = {
|
|
234
|
+
"EPHEMERIS_TIME": r"EPHEMERIS_TIME\s*=\s*\((.*?)\)",
|
|
235
|
+
"EPHEMERIS_ECEF_X": r"EPHEMERIS_ECEF_X\s*=\s*\((.*?)\)",
|
|
236
|
+
"EPHEMERIS_ECEF_Y": r"EPHEMERIS_ECEF_Y\s*=\s*\((.*?)\)",
|
|
237
|
+
"EPHEMERIS_ECEF_Z": r"EPHEMERIS_ECEF_Z\s*=\s*\((.*?)\)",
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
arrays = {}
|
|
241
|
+
for key, pattern in array_patterns.items():
|
|
242
|
+
match = re.search(pattern, ephemeris_content, flags=re.DOTALL)
|
|
243
|
+
if match is None:
|
|
244
|
+
raise ValueError(f"No data found for {key} in the ANG content.")
|
|
245
|
+
data_str = match.group(1)
|
|
246
|
+
|
|
247
|
+
data_list = [float(x.strip()) for x in data_str.split(",")]
|
|
248
|
+
if key == "EPHEMERIS_TIME":
|
|
249
|
+
data_list = [t0 + pd.Timedelta(seconds=t) for t in data_list]
|
|
250
|
+
arrays[key] = data_list
|
|
251
|
+
|
|
252
|
+
return pd.DataFrame(arrays)
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def get_time_delay_detector(
|
|
256
|
+
ds: xr.Dataset,
|
|
257
|
+
ephemeris: pd.DataFrame,
|
|
258
|
+
utm_crs: pyproj.CRS,
|
|
259
|
+
x: npt.NDArray[np.floating],
|
|
260
|
+
y: npt.NDArray[np.floating],
|
|
261
|
+
) -> npt.NDArray[np.timedelta64]:
|
|
262
|
+
"""Return the detector time delay at the given (x, y) coordinates.
|
|
263
|
+
|
|
264
|
+
Parameters
|
|
265
|
+
----------
|
|
266
|
+
ds : xr.Dataset
|
|
267
|
+
The Landsat dataset containing the VAA variable.
|
|
268
|
+
ephemeris : pd.DataFrame
|
|
269
|
+
The ephemeris DataFrame containing the EPHEMERIS_TIME and ECEF coordinates.
|
|
270
|
+
utm_crs : pyproj.CRS
|
|
271
|
+
The UTM coordinate reference system for the Landsat scene.
|
|
272
|
+
x : npt.NDArray[np.floating]
|
|
273
|
+
The x-coordinates of the pixels in the dataset's coordinate system.
|
|
274
|
+
y : npt.NDArray[np.floating]
|
|
275
|
+
The y-coordinates of the pixels in the dataset's coordinate system.
|
|
276
|
+
|
|
277
|
+
Returns
|
|
278
|
+
-------
|
|
279
|
+
npt.NDArray[np.timedelta64]
|
|
280
|
+
The time delay for each (x, y) coordinate as a timedelta64 array.
|
|
281
|
+
|
|
282
|
+
"""
|
|
283
|
+
x, y = np.atleast_1d(x, y)
|
|
284
|
+
|
|
285
|
+
ephemeris_utm = correction.ephemeris_ecef_to_utm(ephemeris, utm_crs)
|
|
286
|
+
eph_angle_radians = -np.arctan2(ephemeris_utm["y"].diff(), ephemeris_utm["x"].diff())
|
|
287
|
+
avg_eph_angle = (eph_angle_radians * 180.0 / np.pi).mean()
|
|
288
|
+
|
|
289
|
+
vaa = ds["VAA"].interp(x=xr.DataArray(x, dims="points"), y=xr.DataArray(y, dims="points"))
|
|
290
|
+
|
|
291
|
+
is_odd = np.isfinite(vaa) & ((vaa > avg_eph_angle) | (vaa < avg_eph_angle - 180.0))
|
|
292
|
+
is_even = np.isfinite(vaa) & ~is_odd
|
|
293
|
+
|
|
294
|
+
out = np.full(x.shape, fill_value=np.timedelta64("NaT", "ns"), dtype="timedelta64[ns]")
|
|
295
|
+
# We use an offset of +/- 2 seconds as a very rough estimate of the time delay
|
|
296
|
+
# This may only be accurate up to 1 second, but it's better than nothing
|
|
297
|
+
out[is_even] = np.timedelta64(-2000000000, "ns") # -2 seconds
|
|
298
|
+
out[is_odd] = np.timedelta64(2000000000, "ns") # 2 seconds
|
|
299
|
+
|
|
300
|
+
return out
|