pycontrails 0.55.0__cp313-cp313-macosx_11_0_arm64.whl → 0.57.0__cp313-cp313-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pycontrails might be problematic. Click here for more details.
- pycontrails/_version.py +3 -3
- pycontrails/core/airports.py +1 -1
- pycontrails/core/cache.py +3 -3
- pycontrails/core/fleet.py +1 -1
- pycontrails/core/flight.py +47 -43
- pycontrails/core/met_var.py +1 -1
- pycontrails/core/rgi_cython.cpython-313-darwin.so +0 -0
- pycontrails/core/vector.py +28 -30
- pycontrails/datalib/geo_utils.py +261 -0
- pycontrails/datalib/gfs/gfs.py +58 -64
- pycontrails/datalib/goes.py +193 -399
- pycontrails/datalib/himawari/__init__.py +27 -0
- pycontrails/datalib/himawari/header_struct.py +266 -0
- pycontrails/datalib/himawari/himawari.py +654 -0
- pycontrails/datalib/landsat.py +49 -26
- pycontrails/datalib/leo_utils/__init__.py +5 -0
- pycontrails/datalib/leo_utils/correction.py +266 -0
- pycontrails/datalib/leo_utils/landsat_metadata.py +300 -0
- pycontrails/datalib/{_leo_utils → leo_utils}/search.py +1 -1
- pycontrails/datalib/leo_utils/sentinel_metadata.py +748 -0
- pycontrails/datalib/sentinel.py +236 -93
- pycontrails/models/dry_advection.py +1 -1
- pycontrails/models/extended_k15.py +8 -8
- {pycontrails-0.55.0.dist-info → pycontrails-0.57.0.dist-info}/METADATA +4 -2
- {pycontrails-0.55.0.dist-info → pycontrails-0.57.0.dist-info}/RECORD +31 -23
- /pycontrails/datalib/{_leo_utils → leo_utils}/static/bq_roi_query.sql +0 -0
- /pycontrails/datalib/{_leo_utils → leo_utils}/vis.py +0 -0
- {pycontrails-0.55.0.dist-info → pycontrails-0.57.0.dist-info}/WHEEL +0 -0
- {pycontrails-0.55.0.dist-info → pycontrails-0.57.0.dist-info}/licenses/LICENSE +0 -0
- {pycontrails-0.55.0.dist-info → pycontrails-0.57.0.dist-info}/licenses/NOTICE +0 -0
- {pycontrails-0.55.0.dist-info → pycontrails-0.57.0.dist-info}/top_level.txt +0 -0
pycontrails/datalib/landsat.py
CHANGED
|
@@ -1,16 +1,17 @@
|
|
|
1
|
-
"""Support for
|
|
1
|
+
"""Support for Landsat 8 Collection 1 imagery retrieval through Google Cloud Platform."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
5
|
from collections.abc import Iterable
|
|
6
6
|
|
|
7
7
|
import numpy as np
|
|
8
|
+
import numpy.typing as npt
|
|
8
9
|
import pandas as pd
|
|
9
10
|
import xarray as xr
|
|
10
11
|
|
|
11
12
|
from pycontrails.core import Flight, cache
|
|
12
|
-
from pycontrails.datalib.
|
|
13
|
-
from pycontrails.datalib.
|
|
13
|
+
from pycontrails.datalib.leo_utils import search
|
|
14
|
+
from pycontrails.datalib.leo_utils.vis import equalize, normalize
|
|
14
15
|
from pycontrails.utils import dependencies
|
|
15
16
|
|
|
16
17
|
try:
|
|
@@ -60,24 +61,28 @@ def query(
|
|
|
60
61
|
extent: str | None = None,
|
|
61
62
|
columns: list[str] | None = None,
|
|
62
63
|
) -> pd.DataFrame:
|
|
63
|
-
"""Find Landsat 8
|
|
64
|
+
"""Find Landsat 8 Collection 1 imagery within spatiotemporal region of interest.
|
|
64
65
|
|
|
65
66
|
This function requires access to the
|
|
66
67
|
`Google BigQuery API <https://cloud.google.com/bigquery?hl=en>`__
|
|
67
68
|
and uses the `BigQuery python library <https://cloud.google.com/python/docs/reference/bigquery/latest/index.html>`__.
|
|
68
69
|
|
|
70
|
+
See :func:`pycontrails.datalib.leo_utils.landsat_metadata.open_landsat_metadata`
|
|
71
|
+
to download and parse the daily bulk Landsat metadata CSV file from USGS. This CSV holds
|
|
72
|
+
Collection 2 metadata, so includes the most recent scenes from Landsat 8 and 9.
|
|
73
|
+
|
|
69
74
|
Parameters
|
|
70
75
|
----------
|
|
71
76
|
start_time : np.datetime64
|
|
72
77
|
Start of time period for search
|
|
73
78
|
end_time : np.datetime64
|
|
74
79
|
End of time period for search
|
|
75
|
-
extent : str, optional
|
|
80
|
+
extent : str | None, optional
|
|
76
81
|
Spatial region of interest as a GeoJSON string. If not provided, defaults
|
|
77
82
|
to a global extent.
|
|
78
|
-
columns : list[str], optional
|
|
83
|
+
columns : list[str] | None, optional
|
|
79
84
|
Columns to return from Google
|
|
80
|
-
`BigQuery table <https://console.cloud.google.com/bigquery?p=bigquery-public-data&d=cloud_storage_geo_index&t=landsat_index&page=table
|
|
85
|
+
`BigQuery table <https://console.cloud.google.com/bigquery?p=bigquery-public-data&d=cloud_storage_geo_index&t=landsat_index&page=table>`__.
|
|
81
86
|
By default, returns imagery base URL and sensing time.
|
|
82
87
|
|
|
83
88
|
Returns
|
|
@@ -99,7 +104,7 @@ def intersect(
|
|
|
99
104
|
flight: Flight,
|
|
100
105
|
columns: list[str] | None = None,
|
|
101
106
|
) -> pd.DataFrame:
|
|
102
|
-
"""Find Landsat 8
|
|
107
|
+
"""Find Landsat 8 Collection 1 imagery intersecting with flight track.
|
|
103
108
|
|
|
104
109
|
This function will return all scenes with a bounding box that includes flight waypoints
|
|
105
110
|
both before and after the sensing time.
|
|
@@ -108,13 +113,17 @@ def intersect(
|
|
|
108
113
|
`Google BigQuery API <https://cloud.google.com/bigquery?hl=en>`__
|
|
109
114
|
and uses the `BigQuery python library <https://cloud.google.com/python/docs/reference/bigquery/latest/index.html>`__.
|
|
110
115
|
|
|
116
|
+
See :func:`pycontrails.datalib.leo_utils.landsat_metadata.open_landsat_metadata`
|
|
117
|
+
to download and parse the daily bulk Landsat metadata CSV file from USGS. This CSV holds
|
|
118
|
+
Collection 2 metadata, so includes the most recent scenes from Landsat 8 and 9.
|
|
119
|
+
|
|
111
120
|
Parameters
|
|
112
121
|
----------
|
|
113
122
|
flight : Flight
|
|
114
123
|
Flight for intersection
|
|
115
|
-
columns : list[str], optional
|
|
124
|
+
columns : list[str] | None, optional
|
|
116
125
|
Columns to return from Google
|
|
117
|
-
`BigQuery table <https://console.cloud.google.com/bigquery?p=bigquery-public-data&d=cloud_storage_geo_index&t=landsat_index&page=table
|
|
126
|
+
`BigQuery table <https://console.cloud.google.com/bigquery?p=bigquery-public-data&d=cloud_storage_geo_index&t=landsat_index&page=table>`__.
|
|
118
127
|
By default, returns imagery base URL and sensing time.
|
|
119
128
|
|
|
120
129
|
Returns
|
|
@@ -131,20 +140,31 @@ def intersect(
|
|
|
131
140
|
|
|
132
141
|
|
|
133
142
|
class Landsat:
|
|
134
|
-
"""Support for Landsat 8
|
|
143
|
+
"""Support for Landsat 8 Collection 1 data handling.
|
|
144
|
+
|
|
145
|
+
This interface does not support Landsat Collection 2, which includes all new
|
|
146
|
+
scenes from Landsat 8 and 9 and benefits from improved calibration and processing
|
|
147
|
+
algorithms. The USGS stopped updating Collection 1 in 2021, so this interface
|
|
148
|
+
works only with legacy data. In addition, Collection 1 does not include viewing angle
|
|
149
|
+
data, preventing scan angle or sensing time corrections.
|
|
150
|
+
|
|
151
|
+
To access Landsat Collection 2 data, use one of the following tools:
|
|
152
|
+
|
|
153
|
+
- `USGS M2M API <https://m2m.cr.usgs.gov/>`__ (requires registration)
|
|
154
|
+
- `USGS Earth Explorer <https://earthexplorer.usgs.gov/>`__ (requires registration;
|
|
155
|
+
includes a web interface)
|
|
156
|
+
- `Amazon Web Services (AWS) <https://registry.opendata.aws/usgs-landsat/>`__ (requester pays)
|
|
157
|
+
- `Google Earth Engine <https://developers.google.com/earth-engine/datasets/catalog/landsat>`__
|
|
158
|
+
(requires registration; stricter usage limits than the other options)
|
|
135
159
|
|
|
136
|
-
|
|
137
|
-
transformation software through the
|
|
138
|
-
`pyproj <https://pyproj4.github.io/pyproj/stable/index.html>`__ python interface.
|
|
139
|
-
pyproj is installed as part of the ``sat`` set of optional dependencies
|
|
140
|
-
(``pip install pycontrails[sat]``), but PROJ must be installed manually.
|
|
160
|
+
These services are not yet integrated with pycontrails.
|
|
141
161
|
|
|
142
162
|
Parameters
|
|
143
163
|
----------
|
|
144
164
|
base_url : str
|
|
145
165
|
Base URL of Landsat scene. To find URLs for Landsat scenes at
|
|
146
166
|
specific locations and times, see :func:`query` and :func:`intersect`.
|
|
147
|
-
bands : str |
|
|
167
|
+
bands : str | Iterable[str] | None
|
|
148
168
|
Set of bands to retrieve. The 11 possible bands are represented by
|
|
149
169
|
the string "B1" to "B11". For the Google Landsat contrails color scheme,
|
|
150
170
|
set ``bands=("B9", "B10", "B11")``. For the true color scheme, set
|
|
@@ -155,7 +175,7 @@ class Landsat:
|
|
|
155
175
|
- B8: 15 m
|
|
156
176
|
- B10, B11: 30 m (upsampled from true resolution of 100 m)
|
|
157
177
|
|
|
158
|
-
cachestore : cache.CacheStore, optional
|
|
178
|
+
cachestore : cache.CacheStore | None, optional
|
|
159
179
|
Cache store for Landsat data. If None, a :class:`DiskCacheStore` is used.
|
|
160
180
|
|
|
161
181
|
See Also
|
|
@@ -202,17 +222,19 @@ class Landsat:
|
|
|
202
222
|
|
|
203
223
|
Parameters
|
|
204
224
|
----------
|
|
205
|
-
reflective : str
|
|
225
|
+
reflective : str, optional
|
|
226
|
+
One of {"raw", "radiance", "reflectance"}.
|
|
206
227
|
Whether to return raw values or rescaled radiances or reflectances for reflective bands.
|
|
207
228
|
By default, return reflectances.
|
|
208
|
-
thermal : str
|
|
229
|
+
thermal : str, optional
|
|
230
|
+
One of {"raw", "radiance", "brightness_temperature"}.
|
|
209
231
|
Whether to return raw values or rescaled radiances or brightness temperatures
|
|
210
232
|
for thermal bands. By default, return brightness temperatures.
|
|
211
233
|
|
|
212
234
|
Returns
|
|
213
235
|
-------
|
|
214
|
-
xr.
|
|
215
|
-
|
|
236
|
+
xr.Dataset
|
|
237
|
+
Dataset of Landsat data.
|
|
216
238
|
"""
|
|
217
239
|
if reflective not in ["raw", "radiance", "reflectance"]:
|
|
218
240
|
msg = "reflective band processing must be one of ['raw', 'radiance', 'reflectance']"
|
|
@@ -423,14 +445,15 @@ def _read_image_coordinates(meta: str, band: str) -> tuple[np.ndarray, np.ndarra
|
|
|
423
445
|
|
|
424
446
|
def extract_landsat_visualization(
|
|
425
447
|
ds: xr.Dataset, color_scheme: str = "true"
|
|
426
|
-
) -> tuple[np.
|
|
448
|
+
) -> tuple[npt.NDArray[np.float32], pyproj.CRS, tuple[float, float, float, float]]:
|
|
427
449
|
"""Extract artifacts for visualizing Landsat data with the given color scheme.
|
|
428
450
|
|
|
429
451
|
Parameters
|
|
430
452
|
----------
|
|
431
453
|
ds : xr.Dataset
|
|
432
454
|
Dataset of Landsat data as returned by :meth:`Landsat.get`.
|
|
433
|
-
color_scheme : str
|
|
455
|
+
color_scheme : str, optional
|
|
456
|
+
One of {"true", "google_contrails"}.
|
|
434
457
|
Color scheme to use for visualization. The true color scheme
|
|
435
458
|
requires reflectances for bands B2, B3, and B4; and the
|
|
436
459
|
`Google contrails color scheme <https://research.google/pubs/a-human-labeled-landsat-contrails-dataset>`__
|
|
@@ -442,7 +465,7 @@ def extract_landsat_visualization(
|
|
|
442
465
|
3D RGB array of shape ``(height, width, 3)``.
|
|
443
466
|
src_crs : pyproj.CRS
|
|
444
467
|
Imagery projection
|
|
445
|
-
src_extent : tuple[float,float,float,float]
|
|
468
|
+
src_extent : tuple[float, float, float, float]
|
|
446
469
|
Imagery extent in projected coordinates
|
|
447
470
|
|
|
448
471
|
References
|
|
@@ -551,7 +574,7 @@ def to_google_contrails(ds: xr.Dataset) -> tuple[np.ndarray, pyproj.CRS]:
|
|
|
551
574
|
red = ((signal - lower) / (upper - lower)).clip(0.0, 1.0)
|
|
552
575
|
|
|
553
576
|
# green: cirrus band transmittance
|
|
554
|
-
signal = 1 - rc.values
|
|
577
|
+
signal = 1.0 - rc.values
|
|
555
578
|
lower = 0.8
|
|
556
579
|
upper = 1.0
|
|
557
580
|
green = adapt(((signal - lower) / (upper - lower)).clip(0.0, 1.0))
|
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
"""Support for overlaying flight and contrail data on Landsat & Sentinel images."""
|
|
2
|
+
|
|
3
|
+
from typing import Literal, overload
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
import numpy.typing as npt
|
|
7
|
+
import pandas as pd
|
|
8
|
+
import pyproj
|
|
9
|
+
import shapely
|
|
10
|
+
import xarray as xr
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def ephemeris_ecef_to_utm(ephemeris_df: pd.DataFrame, utm_crs: pyproj.CRS) -> pd.DataFrame:
|
|
14
|
+
"""Convert ephemeris data from ECEF to UTM coordinates.
|
|
15
|
+
|
|
16
|
+
Parameters
|
|
17
|
+
----------
|
|
18
|
+
ephemeris_df : pd.DataFrame
|
|
19
|
+
DataFrame containing the ephemeris data with columns:
|
|
20
|
+
- 'EPHEMERIS_ECEF_X': ECEF X coordinates (meters)
|
|
21
|
+
- 'EPHEMERIS_ECEF_Y': ECEF Y coordinates (meters)
|
|
22
|
+
- 'EPHEMERIS_ECEF_Z': ECEF Z coordinates (meters)
|
|
23
|
+
- 'EPHEMERIS_TIME': Timestamps (as datetime64[ns])
|
|
24
|
+
utm_crs : pyproj.CRS
|
|
25
|
+
The UTM coordinate reference system to convert to.
|
|
26
|
+
|
|
27
|
+
Returns
|
|
28
|
+
-------
|
|
29
|
+
pd.DataFrame
|
|
30
|
+
A DataFrame with columns:
|
|
31
|
+
- 'x': UTM easting (meters)
|
|
32
|
+
- 'y': UTM northing (meters)
|
|
33
|
+
- 'z': Altitude (meters)
|
|
34
|
+
- 't': Timestamps (as datetime64[ns])
|
|
35
|
+
"""
|
|
36
|
+
# Define the source CRS: ECEF (Earth-Centered, Earth-Fixed) with WGS84 datum
|
|
37
|
+
source_crs = pyproj.CRS(proj="geocent", datum="WGS84")
|
|
38
|
+
|
|
39
|
+
# Create a transformer object to convert from source CRS to target CRS
|
|
40
|
+
# The default order for ECEF is (X, Y, Z) and for UTM is (Easting, Northing, Height)
|
|
41
|
+
transformer = pyproj.Transformer.from_crs(source_crs, utm_crs)
|
|
42
|
+
|
|
43
|
+
ecef_x = ephemeris_df["EPHEMERIS_ECEF_X"].to_numpy()
|
|
44
|
+
ecef_y = ephemeris_df["EPHEMERIS_ECEF_Y"].to_numpy()
|
|
45
|
+
ecef_z = ephemeris_df["EPHEMERIS_ECEF_Z"].to_numpy()
|
|
46
|
+
ecef_t = ephemeris_df["EPHEMERIS_TIME"].to_numpy()
|
|
47
|
+
|
|
48
|
+
x, y, h = transformer.transform(ecef_x, ecef_y, ecef_z)
|
|
49
|
+
return pd.DataFrame({"x": x, "y": y, "z": h, "t": ecef_t})
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@overload
|
|
53
|
+
def scan_angle_correction(
|
|
54
|
+
ds: xr.Dataset,
|
|
55
|
+
x: npt.NDArray[np.floating],
|
|
56
|
+
y: npt.NDArray[np.floating],
|
|
57
|
+
z: npt.NDArray[np.floating],
|
|
58
|
+
*,
|
|
59
|
+
maxiter: int = ...,
|
|
60
|
+
tol: float = ...,
|
|
61
|
+
full_output: Literal[False] = ...,
|
|
62
|
+
) -> tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]: ...
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@overload
|
|
66
|
+
def scan_angle_correction(
|
|
67
|
+
ds: xr.Dataset,
|
|
68
|
+
x: npt.NDArray[np.floating],
|
|
69
|
+
y: npt.NDArray[np.floating],
|
|
70
|
+
z: npt.NDArray[np.floating],
|
|
71
|
+
*,
|
|
72
|
+
maxiter: int = ...,
|
|
73
|
+
tol: float = ...,
|
|
74
|
+
full_output: Literal[True],
|
|
75
|
+
) -> tuple[npt.NDArray[np.floating], npt.NDArray[np.floating], npt.NDArray[np.bool_]]: ...
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def scan_angle_correction(
|
|
79
|
+
ds: xr.Dataset,
|
|
80
|
+
x: npt.NDArray[np.floating],
|
|
81
|
+
y: npt.NDArray[np.floating],
|
|
82
|
+
z: npt.NDArray[np.floating],
|
|
83
|
+
*,
|
|
84
|
+
maxiter: int = 5,
|
|
85
|
+
tol: float = 10.0,
|
|
86
|
+
full_output: bool = False,
|
|
87
|
+
) -> (
|
|
88
|
+
tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]
|
|
89
|
+
| tuple[npt.NDArray[np.floating], npt.NDArray[np.floating], npt.NDArray[np.bool_]]
|
|
90
|
+
):
|
|
91
|
+
"""Apply the scan angle correction to the given x, y, z coordinates.
|
|
92
|
+
|
|
93
|
+
Parameters
|
|
94
|
+
----------
|
|
95
|
+
ds : xr.Dataset
|
|
96
|
+
The dataset containing the viewing azimuth angle (VAA)
|
|
97
|
+
and viewing zenith angle (VZA) arrays. The units for both are degrees.
|
|
98
|
+
x : npt.NDArray[np.floating]
|
|
99
|
+
The x coordinates of the points to correct. Should be in the
|
|
100
|
+
correct UTM coordinate system
|
|
101
|
+
y : npt.NDArray[np.floating]
|
|
102
|
+
The y coordinates of the points to correct. Should be in the
|
|
103
|
+
correct UTM coordinate system.
|
|
104
|
+
z : npt.NDArray[np.floating]
|
|
105
|
+
The z coordinates (altitude in meters) of the points to correct.
|
|
106
|
+
maxiter : int, optional
|
|
107
|
+
Maximum number of iterations to perform. Default is 5.
|
|
108
|
+
tol : float, optional
|
|
109
|
+
Tolerance for convergence in meters. Default is 10.0.
|
|
110
|
+
full_output : bool, optional
|
|
111
|
+
If True, return an additional boolean array indicating which points
|
|
112
|
+
successfully converged. Default is False.
|
|
113
|
+
|
|
114
|
+
Returns
|
|
115
|
+
-------
|
|
116
|
+
tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]
|
|
117
|
+
The corrected x and y coordinates as numpy arrays in the UTM
|
|
118
|
+
coordinate system. Points that are not contained in the non-nan
|
|
119
|
+
region of the image will contain nan values in the output arrays.
|
|
120
|
+
"""
|
|
121
|
+
# Confirm that x is monotonically increasing and y is decreasing
|
|
122
|
+
# (This is assumed in the filtering logic below)
|
|
123
|
+
if not np.all(np.diff(ds["x"]) > 0.0):
|
|
124
|
+
msg = "ds['x'] must be monotonically increasing"
|
|
125
|
+
raise ValueError(msg)
|
|
126
|
+
if not np.all(np.diff(ds["y"]) < 0.0):
|
|
127
|
+
msg = "ds['y'] must be monotonically decreasing"
|
|
128
|
+
raise ValueError(msg)
|
|
129
|
+
|
|
130
|
+
try:
|
|
131
|
+
ds = ds[["VZA", "VAA"]].load() # nice to load these once here instead of repeatedly below
|
|
132
|
+
except KeyError as e:
|
|
133
|
+
raise KeyError("ds must contain the variables 'VZA' and 'VAA'") from e
|
|
134
|
+
|
|
135
|
+
x = np.atleast_1d(x).astype(np.float64, copy=False)
|
|
136
|
+
y = np.atleast_1d(y).astype(np.float64, copy=False)
|
|
137
|
+
z = np.atleast_1d(z).astype(np.float64, copy=False)
|
|
138
|
+
|
|
139
|
+
x_proj = xr.DataArray(x.copy(), dims="points") # need to copy because we modify below
|
|
140
|
+
y_proj = xr.DataArray(y.copy(), dims="points") # need to copy because we modify below
|
|
141
|
+
|
|
142
|
+
offset0 = np.zeros_like(x)
|
|
143
|
+
|
|
144
|
+
for _ in range(maxiter):
|
|
145
|
+
# Note that we often get nan values back after interpolation
|
|
146
|
+
# It's arguably better to propagate nans than to keep the original values
|
|
147
|
+
# because the original values may be in the nan region of the image
|
|
148
|
+
# (or outside the image entirely)
|
|
149
|
+
vza, vaa = _interpolate_angles(ds, x_proj, y_proj)
|
|
150
|
+
|
|
151
|
+
# Convert to radians
|
|
152
|
+
vza_rad = np.deg2rad(vza)
|
|
153
|
+
vaa_rad = np.deg2rad(vaa)
|
|
154
|
+
|
|
155
|
+
# Apply spherical projection offset
|
|
156
|
+
offset = z * np.tan(vza_rad)
|
|
157
|
+
dx_offset = offset * np.sin(vaa_rad)
|
|
158
|
+
dy_offset = offset * np.cos(vaa_rad)
|
|
159
|
+
|
|
160
|
+
# Update the newly predicted x and y locations
|
|
161
|
+
x_proj[:] = x - dx_offset
|
|
162
|
+
y_proj[:] = y - dy_offset
|
|
163
|
+
|
|
164
|
+
error = np.abs(offset - offset0)
|
|
165
|
+
converged = error < tol
|
|
166
|
+
if np.all(converged | np.isnan(error)):
|
|
167
|
+
break
|
|
168
|
+
|
|
169
|
+
offset0 = offset
|
|
170
|
+
|
|
171
|
+
if full_output:
|
|
172
|
+
return x_proj.values, y_proj.values, converged
|
|
173
|
+
return x_proj.values, y_proj.values
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def _interpolate_angles(
|
|
177
|
+
ds: xr.Dataset,
|
|
178
|
+
xi: xr.DataArray,
|
|
179
|
+
yi: xr.DataArray,
|
|
180
|
+
) -> tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]:
|
|
181
|
+
"""
|
|
182
|
+
Interpolate view zenith angle (VZA) and view azimuth angle (VAA).
|
|
183
|
+
|
|
184
|
+
Parameters
|
|
185
|
+
----------
|
|
186
|
+
ds : xr.Dataset
|
|
187
|
+
Dataset containing at least the variables "VZA" and "VAA",
|
|
188
|
+
with coordinates ``x`` and ``y`` that define the spatial grid.
|
|
189
|
+
xi : xr.DataArray
|
|
190
|
+
X-coordinates of the target points for interpolation.
|
|
191
|
+
Must be the same length as ``yi``.
|
|
192
|
+
yi : xr.DataArray
|
|
193
|
+
Y-coordinates of the target points for interpolation.
|
|
194
|
+
Must be the same length as ``xi``.
|
|
195
|
+
|
|
196
|
+
Returns
|
|
197
|
+
-------
|
|
198
|
+
vza : npt.NDArray[np.floating]
|
|
199
|
+
Interpolated view zenith angles at the given (xi, yi) points.
|
|
200
|
+
vaa : npt.NDArray[np.floating]
|
|
201
|
+
Interpolated view azimuth angles at the given (xi, yi) points.
|
|
202
|
+
"""
|
|
203
|
+
interped = ds[["VZA", "VAA"]].interp(x=xi, y=yi)
|
|
204
|
+
return interped["VZA"].values, interped["VAA"].values
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def estimate_scan_time(
|
|
208
|
+
ephemeris_df: pd.DataFrame,
|
|
209
|
+
utm_crs: pyproj.CRS,
|
|
210
|
+
x: npt.NDArray[np.floating],
|
|
211
|
+
y: npt.NDArray[np.floating],
|
|
212
|
+
) -> npt.NDArray[np.datetime64]:
|
|
213
|
+
"""Estimate the scan time for the given x, y pixels.
|
|
214
|
+
|
|
215
|
+
Project the x, y coordinates (in UTM coordinate system) onto the
|
|
216
|
+
ephemeris track and interpolate the time.
|
|
217
|
+
|
|
218
|
+
Parameters
|
|
219
|
+
----------
|
|
220
|
+
ephemeris_df : pd.DataFrame
|
|
221
|
+
DataFrame containing the ephemeris data with columns:
|
|
222
|
+
- 'EPHEMERIS_ECEF_X': ECEF X coordinates (meters)
|
|
223
|
+
- 'EPHEMERIS_ECEF_Y': ECEF Y coordinates (meters)
|
|
224
|
+
- 'EPHEMERIS_ECEF_Z': ECEF Z coordinates (meters)
|
|
225
|
+
- 'EPHEMERIS_TIME': Timestamps (as datetime64[ns])
|
|
226
|
+
utm_crs : pyproj.CRS
|
|
227
|
+
The UTM coordinate reference system used for projection.
|
|
228
|
+
x : npt.NDArray[np.floating]
|
|
229
|
+
The x coordinates of the points to estimate the scan time for. Should be in the
|
|
230
|
+
correct UTM coordinate system.
|
|
231
|
+
y : npt.NDArray[np.floating]
|
|
232
|
+
The y coordinates of the points to estimate the scan time for. Should be in the
|
|
233
|
+
correct UTM coordinate system.
|
|
234
|
+
|
|
235
|
+
Returns
|
|
236
|
+
-------
|
|
237
|
+
npt.NDArray[np.datetime64]
|
|
238
|
+
The estimated scan times as numpy datetime64[ns] array. Points for which
|
|
239
|
+
``x`` or ``y`` are nan will have ``NaT`` as the corresponding output value.
|
|
240
|
+
"""
|
|
241
|
+
ephemeris_utm = ephemeris_ecef_to_utm(ephemeris_df, utm_crs)
|
|
242
|
+
|
|
243
|
+
valid = np.isfinite(x) & np.isfinite(y)
|
|
244
|
+
points = shapely.points(x[valid], y[valid])
|
|
245
|
+
|
|
246
|
+
line = shapely.LineString(ephemeris_utm[["x", "y"]])
|
|
247
|
+
|
|
248
|
+
distance = line.project(points)
|
|
249
|
+
projected = line.interpolate(distance)
|
|
250
|
+
projected_x = shapely.get_coordinates(projected)[:, 0]
|
|
251
|
+
|
|
252
|
+
if ephemeris_utm["t"].dtype != "datetime64[ns]":
|
|
253
|
+
# This could be relaxed if needed, but datetime64[ns] is what we expect
|
|
254
|
+
raise ValueError("ephemeris_utm['t'] must have dtype 'datetime64[ns]'")
|
|
255
|
+
if not ephemeris_utm["x"].diff().iloc[1:].lt(0).all():
|
|
256
|
+
# This should always be the case for sun-synchronous satellites
|
|
257
|
+
raise ValueError("ephemeris_utm['x'] must be strictly decreasing for np.interp")
|
|
258
|
+
|
|
259
|
+
out = np.full(x.shape, np.datetime64("NaT", "ns"))
|
|
260
|
+
out[valid] = np.interp(
|
|
261
|
+
projected_x,
|
|
262
|
+
ephemeris_utm["x"].iloc[::-1],
|
|
263
|
+
ephemeris_utm["t"].iloc[::-1].astype(int),
|
|
264
|
+
).astype("datetime64[ns]")
|
|
265
|
+
|
|
266
|
+
return out
|