openeo-gfmap 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openeo_gfmap/__init__.py +23 -0
- openeo_gfmap/backend.py +122 -0
- openeo_gfmap/features/__init__.py +17 -0
- openeo_gfmap/features/feature_extractor.py +389 -0
- openeo_gfmap/fetching/__init__.py +21 -0
- openeo_gfmap/fetching/commons.py +213 -0
- openeo_gfmap/fetching/fetching.py +98 -0
- openeo_gfmap/fetching/generic.py +165 -0
- openeo_gfmap/fetching/meteo.py +126 -0
- openeo_gfmap/fetching/s1.py +195 -0
- openeo_gfmap/fetching/s2.py +236 -0
- openeo_gfmap/inference/__init__.py +3 -0
- openeo_gfmap/inference/model_inference.py +347 -0
- openeo_gfmap/manager/__init__.py +31 -0
- openeo_gfmap/manager/job_manager.py +469 -0
- openeo_gfmap/manager/job_splitters.py +144 -0
- openeo_gfmap/metadata.py +24 -0
- openeo_gfmap/preprocessing/__init__.py +22 -0
- openeo_gfmap/preprocessing/cloudmasking.py +268 -0
- openeo_gfmap/preprocessing/compositing.py +74 -0
- openeo_gfmap/preprocessing/interpolation.py +12 -0
- openeo_gfmap/preprocessing/sar.py +64 -0
- openeo_gfmap/preprocessing/scaling.py +65 -0
- openeo_gfmap/preprocessing/udf_cldmask.py +36 -0
- openeo_gfmap/preprocessing/udf_rank.py +37 -0
- openeo_gfmap/preprocessing/udf_score.py +103 -0
- openeo_gfmap/spatial.py +53 -0
- openeo_gfmap/stac/__init__.py +2 -0
- openeo_gfmap/stac/constants.py +51 -0
- openeo_gfmap/temporal.py +22 -0
- openeo_gfmap/utils/__init__.py +23 -0
- openeo_gfmap/utils/build_df.py +48 -0
- openeo_gfmap/utils/catalogue.py +248 -0
- openeo_gfmap/utils/intervals.py +64 -0
- openeo_gfmap/utils/netcdf.py +25 -0
- openeo_gfmap/utils/tile_processing.py +64 -0
- openeo_gfmap-0.1.0.dist-info/METADATA +57 -0
- openeo_gfmap-0.1.0.dist-info/RECORD +40 -0
- openeo_gfmap-0.1.0.dist-info/WHEEL +4 -0
- openeo_gfmap-0.1.0.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,213 @@
|
|
1
|
+
"""
|
2
|
+
Common internal operations within collection extraction logic, such as reprojection.
|
3
|
+
"""
|
4
|
+
|
5
|
+
from functools import partial
|
6
|
+
from typing import Dict, Optional, Sequence, Union
|
7
|
+
|
8
|
+
import openeo
|
9
|
+
from geojson import GeoJSON
|
10
|
+
from openeo.api.process import Parameter
|
11
|
+
from openeo.rest.connection import InputDate
|
12
|
+
from pyproj.crs import CRS
|
13
|
+
from pyproj.exceptions import CRSError
|
14
|
+
|
15
|
+
from openeo_gfmap.spatial import BoundingBoxExtent, SpatialContext
|
16
|
+
from openeo_gfmap.temporal import TemporalContext
|
17
|
+
|
18
|
+
from .fetching import FetchType
|
19
|
+
|
20
|
+
|
21
|
+
def convert_band_names(desired_bands: list, band_dict: dict) -> list:
|
22
|
+
"""Renames the desired bands to the band names of the collection specified
|
23
|
+
in the backend.
|
24
|
+
|
25
|
+
Parameters
|
26
|
+
----------
|
27
|
+
desired_bands: list
|
28
|
+
List of bands that are desired by the user, written in the OpenEO-GFMAP
|
29
|
+
harmonized names convention.
|
30
|
+
band_dict: dict
|
31
|
+
Dictionnary mapping for a backend the collection band names to the
|
32
|
+
OpenEO-GFMAP harmonized names. This dictionnary will be reversed to be
|
33
|
+
used within this function.
|
34
|
+
Returns
|
35
|
+
-------
|
36
|
+
backend_band_list: list
|
37
|
+
List of band names within the backend collection names.
|
38
|
+
"""
|
39
|
+
# Reverse the dictionarry
|
40
|
+
band_dict = {v: k for k, v in band_dict.items()}
|
41
|
+
return [band_dict[band] for band in desired_bands]
|
42
|
+
|
43
|
+
|
44
|
+
def resample_reproject(
|
45
|
+
datacube: openeo.DataCube,
|
46
|
+
resolution: float,
|
47
|
+
epsg_code: Optional[Union[str, int]] = None,
|
48
|
+
method: str = "near",
|
49
|
+
) -> openeo.DataCube:
|
50
|
+
"""Reprojects the given datacube to the target epsg code, if the provided
|
51
|
+
epsg code is not None. Also performs checks on the give code to check
|
52
|
+
its validity.
|
53
|
+
"""
|
54
|
+
if epsg_code is not None:
|
55
|
+
# Checks that the code is valid
|
56
|
+
try:
|
57
|
+
CRS.from_epsg(int(epsg_code))
|
58
|
+
except (CRSError, ValueError) as exc:
|
59
|
+
raise ValueError(
|
60
|
+
f"Specified target_crs: {epsg_code} is not a valid " "EPSG code."
|
61
|
+
) from exc
|
62
|
+
return datacube.resample_spatial(
|
63
|
+
resolution=resolution, projection=epsg_code, method=method
|
64
|
+
)
|
65
|
+
return datacube.resample_spatial(resolution=resolution, method=method)
|
66
|
+
|
67
|
+
|
68
|
+
def rename_bands(datacube: openeo.DataCube, mapping: dict) -> openeo.DataCube:
|
69
|
+
"""Rename the bands from the given mapping scheme"""
|
70
|
+
|
71
|
+
# Filter out bands that are not part of the datacube
|
72
|
+
def filter_condition(band_name, _):
|
73
|
+
return band_name in datacube.metadata.band_names
|
74
|
+
|
75
|
+
mapping = {k: v for k, v in mapping.items() if filter_condition(k, v)}
|
76
|
+
|
77
|
+
return datacube.rename_labels(
|
78
|
+
dimension="bands", target=list(mapping.values()), source=list(mapping.keys())
|
79
|
+
)
|
80
|
+
|
81
|
+
|
82
|
+
def _load_collection_hybrid(
|
83
|
+
connection: openeo.Connection,
|
84
|
+
is_stac: bool,
|
85
|
+
collection_id_or_url: str,
|
86
|
+
bands: list,
|
87
|
+
spatial_extent: Union[Dict[str, float], Parameter, None] = None,
|
88
|
+
temporal_extent: Union[Sequence[InputDate], Parameter, str, None] = None,
|
89
|
+
properties: Optional[dict] = None,
|
90
|
+
):
|
91
|
+
"""Wrapper around the load_collection, or load_stac method of the openeo connection."""
|
92
|
+
if not is_stac:
|
93
|
+
return connection.load_collection(
|
94
|
+
collection_id=collection_id_or_url,
|
95
|
+
spatial_extent=spatial_extent,
|
96
|
+
temporal_extent=temporal_extent,
|
97
|
+
bands=bands,
|
98
|
+
properties=properties,
|
99
|
+
)
|
100
|
+
cube = connection.load_stac(
|
101
|
+
url=collection_id_or_url,
|
102
|
+
spatial_extent=spatial_extent,
|
103
|
+
temporal_extent=temporal_extent,
|
104
|
+
bands=bands,
|
105
|
+
properties=properties,
|
106
|
+
)
|
107
|
+
cube = cube.rename_labels(dimension="bands", target=bands)
|
108
|
+
return cube
|
109
|
+
|
110
|
+
|
111
|
+
def _load_collection(
|
112
|
+
connection: openeo.Connection,
|
113
|
+
bands: list,
|
114
|
+
collection_name: str,
|
115
|
+
spatial_extent: SpatialContext,
|
116
|
+
temporal_extent: Optional[TemporalContext],
|
117
|
+
fetch_type: FetchType,
|
118
|
+
is_stac: bool = False,
|
119
|
+
**params,
|
120
|
+
):
|
121
|
+
"""Loads a collection from the openeo backend, acting differently depending
|
122
|
+
on the fetch type.
|
123
|
+
"""
|
124
|
+
load_collection_parameters = params.get("load_collection", {})
|
125
|
+
load_collection_method = partial(
|
126
|
+
_load_collection_hybrid, is_stac=is_stac, collection_id_or_url=collection_name
|
127
|
+
)
|
128
|
+
|
129
|
+
if (
|
130
|
+
temporal_extent is not None
|
131
|
+
): # Can be ignored for intemporal collections such as DEM
|
132
|
+
temporal_extent = [temporal_extent.start_date, temporal_extent.end_date]
|
133
|
+
|
134
|
+
if fetch_type == FetchType.TILE:
|
135
|
+
assert isinstance(
|
136
|
+
spatial_extent, BoundingBoxExtent
|
137
|
+
), "Please provide only a bounding box for tile based fetching."
|
138
|
+
spatial_extent = dict(spatial_extent)
|
139
|
+
cube = load_collection_method(
|
140
|
+
connection=connection,
|
141
|
+
bands=bands,
|
142
|
+
spatial_extent=spatial_extent,
|
143
|
+
temporal_extent=temporal_extent,
|
144
|
+
properties=load_collection_parameters,
|
145
|
+
)
|
146
|
+
elif fetch_type == FetchType.POINT:
|
147
|
+
assert isinstance(
|
148
|
+
spatial_extent, GeoJSON
|
149
|
+
), "Please provide only a GeoJSON FeatureCollection for point based fetching."
|
150
|
+
assert (
|
151
|
+
spatial_extent["type"] == "FeatureCollection"
|
152
|
+
), "Please provide a FeatureCollection type of GeoJSON"
|
153
|
+
cube = load_collection_method(
|
154
|
+
connection=connection,
|
155
|
+
bands=bands,
|
156
|
+
spatial_extent=spatial_extent,
|
157
|
+
temporal_extent=temporal_extent,
|
158
|
+
properties=load_collection_parameters,
|
159
|
+
)
|
160
|
+
elif fetch_type == FetchType.POLYGON:
|
161
|
+
if isinstance(spatial_extent, GeoJSON):
|
162
|
+
assert (
|
163
|
+
spatial_extent["type"] == "FeatureCollection"
|
164
|
+
), "Please provide a FeatureCollection type of GeoJSON"
|
165
|
+
elif isinstance(spatial_extent, str):
|
166
|
+
assert spatial_extent.startswith("https://") or spatial_extent.startswith(
|
167
|
+
"http://"
|
168
|
+
), "Please provide a valid URL or a path to a GeoJSON file."
|
169
|
+
else:
|
170
|
+
raise ValueError(
|
171
|
+
"Please provide a valid URL to a GeoParquet or GeoJSON file."
|
172
|
+
)
|
173
|
+
cube = load_collection_method(
|
174
|
+
connection=connection,
|
175
|
+
bands=bands,
|
176
|
+
temporal_extent=temporal_extent,
|
177
|
+
properties=load_collection_parameters,
|
178
|
+
)
|
179
|
+
|
180
|
+
# Adding the process graph updates for experimental features
|
181
|
+
if params.get("update_arguments") is not None:
|
182
|
+
cube.result_node().update_arguments(**params["update_arguments"])
|
183
|
+
|
184
|
+
# Peforming pre-mask optimization
|
185
|
+
pre_mask = params.get("pre_mask", None)
|
186
|
+
if pre_mask is not None:
|
187
|
+
assert isinstance(pre_mask, openeo.DataCube), (
|
188
|
+
f"The 'pre_mask' parameter must be an openeo datacube, " f"got {pre_mask}."
|
189
|
+
)
|
190
|
+
cube = cube.mask(pre_mask)
|
191
|
+
|
192
|
+
# Merges additional bands continuing the operations.
|
193
|
+
pre_merge_cube = params.get("pre_merge", None)
|
194
|
+
if pre_merge_cube is not None:
|
195
|
+
assert isinstance(pre_merge_cube, openeo.DataCube), (
|
196
|
+
f"The 'pre_merge' parameter value must be an openeo datacube, "
|
197
|
+
f"got {pre_merge_cube}."
|
198
|
+
)
|
199
|
+
if pre_mask is not None:
|
200
|
+
pre_merge_cube = pre_merge_cube.mask(pre_mask)
|
201
|
+
cube = cube.merge_cubes(pre_merge_cube)
|
202
|
+
|
203
|
+
if fetch_type == FetchType.POLYGON:
|
204
|
+
if isinstance(spatial_extent, str):
|
205
|
+
geometry = connection.load_url(
|
206
|
+
spatial_extent,
|
207
|
+
format="Parquet" if ".parquet" in spatial_extent else "GeoJSON",
|
208
|
+
)
|
209
|
+
cube = cube.filter_spatial(geometry)
|
210
|
+
else:
|
211
|
+
cube = cube.filter_spatial(spatial_extent)
|
212
|
+
|
213
|
+
return cube
|
@@ -0,0 +1,98 @@
|
|
1
|
+
""" Main file for extractions and pre-processing of data through OpenEO
|
2
|
+
"""
|
3
|
+
|
4
|
+
from enum import Enum
|
5
|
+
from typing import Callable
|
6
|
+
|
7
|
+
import openeo
|
8
|
+
|
9
|
+
from openeo_gfmap import BackendContext
|
10
|
+
from openeo_gfmap.spatial import SpatialContext
|
11
|
+
from openeo_gfmap.temporal import TemporalContext
|
12
|
+
|
13
|
+
|
14
|
+
class FetchType(Enum):
|
15
|
+
"""Enumerates the different types of extraction. There are three types of
|
16
|
+
enumerations.
|
17
|
+
|
18
|
+
* TILE: Tile based extractions, getting the data for a dense part. The
|
19
|
+
output of such fetching process in a dense DataCube.
|
20
|
+
* POINT: Point based extractions. From a datasets of polygons, gets sparse
|
21
|
+
extractions and performs spatial aggregation on the selected polygons. The
|
22
|
+
output of such fetching process is a VectorCube, that can be used to get
|
23
|
+
a pandas.DataFrame
|
24
|
+
* POLYGON: Patch based extractions, returning a VectorCube of sparsed
|
25
|
+
patches. This can be retrieved as multiple NetCDF files from one job.
|
26
|
+
"""
|
27
|
+
|
28
|
+
TILE = "tile"
|
29
|
+
POINT = "point"
|
30
|
+
POLYGON = "polygon"
|
31
|
+
|
32
|
+
|
33
|
+
class CollectionFetcher:
|
34
|
+
"""Base class to fetch a particular collection.
|
35
|
+
|
36
|
+
Parameters
|
37
|
+
----------
|
38
|
+
backend_context: BackendContext
|
39
|
+
Information about the backend in use, useful in certain cases.
|
40
|
+
bands: list
|
41
|
+
List of band names to load from that collection.
|
42
|
+
collection_fetch: Callable
|
43
|
+
Function defining how to fetch a collection for a specific backend,
|
44
|
+
the function accepts the following parameters: connection,
|
45
|
+
spatial extent, temporal extent, bands and additional parameters.
|
46
|
+
collection_preprocessing: Callable
|
47
|
+
Function defining how to harmonize the data of a collection in a
|
48
|
+
backend. For example, this function could rename the bands as they
|
49
|
+
can be different for every backend/collection (SENTINEL2_L2A or
|
50
|
+
SENTINEL2_L2A_SENTINELHUB). Accepts the following parameters:
|
51
|
+
datacube (of pre-fetched collection) and additional parameters.
|
52
|
+
colection_params: dict
|
53
|
+
Additional parameters encoded within a dictionnary that will be
|
54
|
+
passed in the fetch and preprocessing function.
|
55
|
+
"""
|
56
|
+
|
57
|
+
def __init__(
|
58
|
+
self,
|
59
|
+
backend_context: BackendContext,
|
60
|
+
bands: list,
|
61
|
+
collection_fetch: Callable,
|
62
|
+
collection_preprocessing: Callable,
|
63
|
+
**collection_params,
|
64
|
+
):
|
65
|
+
self.backend_contect = backend_context
|
66
|
+
self.bands = bands
|
67
|
+
self.fetcher = collection_fetch
|
68
|
+
self.processing = collection_preprocessing
|
69
|
+
self.params = collection_params
|
70
|
+
|
71
|
+
def get_cube(
|
72
|
+
self,
|
73
|
+
connection: openeo.Connection,
|
74
|
+
spatial_context: SpatialContext,
|
75
|
+
temporal_context: TemporalContext,
|
76
|
+
) -> openeo.DataCube:
|
77
|
+
"""Retrieve a data cube from the given spatial and temporal context.
|
78
|
+
|
79
|
+
Parameters
|
80
|
+
----------
|
81
|
+
connection: openeo.Connection
|
82
|
+
A connection to an OpenEO backend. The backend provided must be the
|
83
|
+
same as the one this extractor class is configured for.
|
84
|
+
spatial_extent: SpatialContext
|
85
|
+
Either a GeoJSON collection on which spatial filtering will be
|
86
|
+
applied or a bounding box with an EPSG code. If a bounding box is
|
87
|
+
provided, no filtering is applied and the entirety of the data is
|
88
|
+
fetched for that region.
|
89
|
+
temporal_extent: TemporalContext
|
90
|
+
The begin and end date of the extraction.
|
91
|
+
"""
|
92
|
+
collection_data = self.fetcher(
|
93
|
+
connection, spatial_context, temporal_context, self.bands, **self.params
|
94
|
+
)
|
95
|
+
|
96
|
+
preprocessed_data = self.processing(collection_data, **self.params)
|
97
|
+
|
98
|
+
return preprocessed_data
|
@@ -0,0 +1,165 @@
|
|
1
|
+
""" Generic extraction of features, supporting VITO backend.
|
2
|
+
"""
|
3
|
+
|
4
|
+
from functools import partial
|
5
|
+
from typing import Callable
|
6
|
+
|
7
|
+
import openeo
|
8
|
+
from geojson import GeoJSON
|
9
|
+
|
10
|
+
from openeo_gfmap.backend import Backend, BackendContext
|
11
|
+
from openeo_gfmap.fetching import CollectionFetcher, FetchType, _log
|
12
|
+
from openeo_gfmap.fetching.commons import (
|
13
|
+
_load_collection,
|
14
|
+
convert_band_names,
|
15
|
+
rename_bands,
|
16
|
+
resample_reproject,
|
17
|
+
)
|
18
|
+
from openeo_gfmap.spatial import SpatialContext
|
19
|
+
from openeo_gfmap.temporal import TemporalContext
|
20
|
+
|
21
|
+
BASE_DEM_MAPPING = {"DEM": "COP-DEM"}
|
22
|
+
BASE_WEATHER_MAPPING = {
|
23
|
+
"dewpoint-temperature": "AGERA5-DEWTEMP",
|
24
|
+
"precipitation-flux": "AGERA5-PRECIP",
|
25
|
+
"solar-radiation-flux": "AGERA5-SOLRAD",
|
26
|
+
"temperature-max": "AGERA5-TMAX",
|
27
|
+
"temperature-mean": "AGERA5-TMEAN",
|
28
|
+
"temperature-min": "AGERA5-TMIN",
|
29
|
+
"vapour-pressure": "AGERA5-VAPOUR",
|
30
|
+
"wind-speed": "AGERA5-WIND",
|
31
|
+
}
|
32
|
+
|
33
|
+
|
34
|
+
def _get_generic_fetcher(collection_name: str, fetch_type: FetchType) -> Callable:
|
35
|
+
if collection_name == "COPERNICUS_30":
|
36
|
+
BASE_MAPPING = BASE_DEM_MAPPING
|
37
|
+
elif collection_name == "AGERA5":
|
38
|
+
BASE_MAPPING = BASE_WEATHER_MAPPING
|
39
|
+
else:
|
40
|
+
raise Exception("Please choose a valid collection.")
|
41
|
+
|
42
|
+
def generic_default_fetcher(
|
43
|
+
connection: openeo.Connection,
|
44
|
+
spatial_extent: SpatialContext,
|
45
|
+
temporal_extent: TemporalContext,
|
46
|
+
bands: list,
|
47
|
+
**params,
|
48
|
+
) -> openeo.DataCube:
|
49
|
+
bands = convert_band_names(bands, BASE_MAPPING)
|
50
|
+
|
51
|
+
if (collection_name == "COPERNICUS_30") and (temporal_extent is not None):
|
52
|
+
_log.warning(
|
53
|
+
"User set-up non None temporal extent for DEM collection. Ignoring it."
|
54
|
+
)
|
55
|
+
temporal_extent = None
|
56
|
+
|
57
|
+
cube = _load_collection(
|
58
|
+
connection,
|
59
|
+
bands,
|
60
|
+
collection_name,
|
61
|
+
spatial_extent,
|
62
|
+
temporal_extent,
|
63
|
+
fetch_type,
|
64
|
+
**params,
|
65
|
+
)
|
66
|
+
|
67
|
+
# Apply if the collection is a GeoJSON Feature collection
|
68
|
+
if isinstance(spatial_extent, GeoJSON):
|
69
|
+
cube = cube.filter_spatial(spatial_extent)
|
70
|
+
|
71
|
+
return cube
|
72
|
+
|
73
|
+
return generic_default_fetcher
|
74
|
+
|
75
|
+
|
76
|
+
def _get_generic_processor(collection_name: str, fetch_type: FetchType) -> Callable:
|
77
|
+
"""Builds the preprocessing function from the collection name as it stored
|
78
|
+
in the target backend.
|
79
|
+
"""
|
80
|
+
if collection_name == "COPERNICUS_30":
|
81
|
+
BASE_MAPPING = BASE_DEM_MAPPING
|
82
|
+
elif collection_name == "AGERA5":
|
83
|
+
BASE_MAPPING = BASE_WEATHER_MAPPING
|
84
|
+
else:
|
85
|
+
raise Exception("Please choose a valid collection.")
|
86
|
+
|
87
|
+
def generic_default_processor(cube: openeo.DataCube, **params):
|
88
|
+
"""Default collection preprocessing method for generic datasets.
|
89
|
+
This method renames bands and removes the time dimension in case the
|
90
|
+
requested dataset is DEM
|
91
|
+
"""
|
92
|
+
if params.get("target_resolution", None) is not None:
|
93
|
+
cube = resample_reproject(
|
94
|
+
cube,
|
95
|
+
params.get("target_resolution", 10.0),
|
96
|
+
params.get("target_crs", None),
|
97
|
+
method=params.get("resampling_method", "near"),
|
98
|
+
)
|
99
|
+
|
100
|
+
if collection_name == "COPERNICUS_30":
|
101
|
+
cube = cube.min_time()
|
102
|
+
|
103
|
+
cube = rename_bands(cube, BASE_MAPPING)
|
104
|
+
|
105
|
+
return cube
|
106
|
+
|
107
|
+
return generic_default_processor
|
108
|
+
|
109
|
+
|
110
|
+
OTHER_BACKEND_MAP = {
|
111
|
+
"AGERA5": {
|
112
|
+
Backend.TERRASCOPE: {
|
113
|
+
"fetch": partial(_get_generic_fetcher, collection_name="AGERA5"),
|
114
|
+
"preprocessor": partial(_get_generic_processor, collection_name="AGERA5"),
|
115
|
+
},
|
116
|
+
Backend.CDSE: {
|
117
|
+
"fetch": partial(_get_generic_fetcher, collection_name="AGERA5"),
|
118
|
+
"preprocessor": partial(_get_generic_processor, collection_name="AGERA5"),
|
119
|
+
},
|
120
|
+
Backend.FED: {
|
121
|
+
"fetch": partial(_get_generic_fetcher, collection_name="AGERA5"),
|
122
|
+
"preprocessor": partial(_get_generic_processor, collection_name="AGERA5"),
|
123
|
+
},
|
124
|
+
},
|
125
|
+
"COPERNICUS_30": {
|
126
|
+
Backend.TERRASCOPE: {
|
127
|
+
"fetch": partial(_get_generic_fetcher, collection_name="COPERNICUS_30"),
|
128
|
+
"preprocessor": partial(
|
129
|
+
_get_generic_processor, collection_name="COPERNICUS_30"
|
130
|
+
),
|
131
|
+
},
|
132
|
+
Backend.CDSE: {
|
133
|
+
"fetch": partial(_get_generic_fetcher, collection_name="COPERNICUS_30"),
|
134
|
+
"preprocessor": partial(
|
135
|
+
_get_generic_processor, collection_name="COPERNICUS_30"
|
136
|
+
),
|
137
|
+
},
|
138
|
+
Backend.FED: {
|
139
|
+
"fetch": partial(_get_generic_fetcher, collection_name="COPERNICUS_30"),
|
140
|
+
"preprocessor": partial(
|
141
|
+
_get_generic_processor, collection_name="COPERNICUS_30"
|
142
|
+
),
|
143
|
+
},
|
144
|
+
},
|
145
|
+
}
|
146
|
+
|
147
|
+
|
148
|
+
def build_generic_extractor(
|
149
|
+
backend_context: BackendContext,
|
150
|
+
bands: list,
|
151
|
+
fetch_type: FetchType,
|
152
|
+
collection_name: str,
|
153
|
+
**params,
|
154
|
+
) -> CollectionFetcher:
|
155
|
+
"""Creates a generic extractor adapted to the given backend. Currently only tested with VITO backend"""
|
156
|
+
backend_functions = OTHER_BACKEND_MAP.get(collection_name).get(
|
157
|
+
backend_context.backend
|
158
|
+
)
|
159
|
+
|
160
|
+
fetcher, preprocessor = (
|
161
|
+
backend_functions["fetch"](fetch_type=fetch_type),
|
162
|
+
backend_functions["preprocessor"](fetch_type=fetch_type),
|
163
|
+
)
|
164
|
+
|
165
|
+
return CollectionFetcher(backend_context, bands, fetcher, preprocessor, **params)
|
@@ -0,0 +1,126 @@
|
|
1
|
+
"""Meteo data fetchers."""
|
2
|
+
|
3
|
+
from functools import partial
|
4
|
+
|
5
|
+
import openeo
|
6
|
+
from geojson import GeoJSON
|
7
|
+
|
8
|
+
from openeo_gfmap import (
|
9
|
+
Backend,
|
10
|
+
BackendContext,
|
11
|
+
FetchType,
|
12
|
+
SpatialContext,
|
13
|
+
TemporalContext,
|
14
|
+
)
|
15
|
+
from openeo_gfmap.fetching import CollectionFetcher
|
16
|
+
from openeo_gfmap.fetching.commons import convert_band_names
|
17
|
+
from openeo_gfmap.fetching.generic import (
|
18
|
+
_get_generic_fetcher,
|
19
|
+
_get_generic_processor,
|
20
|
+
_load_collection,
|
21
|
+
)
|
22
|
+
|
23
|
+
WEATHER_MAPPING_TERRASCOPE = {
|
24
|
+
"dewpoint-temperature": "AGERA5-DEWTEMP",
|
25
|
+
"precipitation-flux": "AGERA5-PRECIP",
|
26
|
+
"solar-radiation-flux": "AGERA5-SOLRAD",
|
27
|
+
"temperature-max": "AGERA5-TMAX",
|
28
|
+
"temperature-mean": "AGERA5-TMEAN",
|
29
|
+
"temperature-min": "AGERA5-TMIN",
|
30
|
+
"vapour-pressure": "AGERA5-VAPOUR",
|
31
|
+
"wind-speed": "AGERA5-WIND",
|
32
|
+
}
|
33
|
+
|
34
|
+
WEATHER_MAPPING_STAC = {
|
35
|
+
"dewpoint_temperature_mean": "AGERA5-DEWTEMP",
|
36
|
+
"total_precipitation": "AGERA5-PRECIP",
|
37
|
+
"solar_radiataion_flux": "AGERA5-SOLRAD",
|
38
|
+
"2m_temperature_max": "AGERA5-TMAX",
|
39
|
+
"2m_temperature_mean": "AGERA5-TMEAN",
|
40
|
+
"2m_temperature_min": "AGERA5-TMIN",
|
41
|
+
"vapour_pressure": "AGERA5-VAPOUR",
|
42
|
+
"wind_speed": "AGERA5-WIND",
|
43
|
+
}
|
44
|
+
|
45
|
+
|
46
|
+
def stac_fetcher(
|
47
|
+
connection: openeo.Connection,
|
48
|
+
spatial_extent: SpatialContext,
|
49
|
+
temporal_extent: TemporalContext,
|
50
|
+
bands: list,
|
51
|
+
fetch_type: FetchType,
|
52
|
+
**params,
|
53
|
+
) -> openeo.DataCube:
|
54
|
+
bands = convert_band_names(bands, WEATHER_MAPPING_STAC)
|
55
|
+
|
56
|
+
cube = _load_collection(
|
57
|
+
connection,
|
58
|
+
bands,
|
59
|
+
"https://stac.openeo.vito.be/collections/agera5_daily",
|
60
|
+
spatial_extent,
|
61
|
+
temporal_extent,
|
62
|
+
fetch_type,
|
63
|
+
is_stac=True,
|
64
|
+
**params,
|
65
|
+
)
|
66
|
+
|
67
|
+
if isinstance(spatial_extent, GeoJSON) and fetch_type == FetchType.POLYGON:
|
68
|
+
cube = cube.filter_spatial(spatial_extent)
|
69
|
+
|
70
|
+
return cube
|
71
|
+
|
72
|
+
|
73
|
+
METEO_BACKEND_MAP = {
|
74
|
+
Backend.TERRASCOPE: {
|
75
|
+
"fetch": partial(
|
76
|
+
_get_generic_fetcher,
|
77
|
+
collection_name="AGERA5",
|
78
|
+
band_mapping=WEATHER_MAPPING_TERRASCOPE,
|
79
|
+
),
|
80
|
+
"preprocessor": partial(
|
81
|
+
_get_generic_processor,
|
82
|
+
collection_name="AGERA5",
|
83
|
+
band_mapping=WEATHER_MAPPING_TERRASCOPE,
|
84
|
+
),
|
85
|
+
},
|
86
|
+
Backend.CDSE: {
|
87
|
+
"fetch": stac_fetcher,
|
88
|
+
"preprocessor": partial(
|
89
|
+
_get_generic_processor,
|
90
|
+
collection_name="AGERA5",
|
91
|
+
band_mapping=WEATHER_MAPPING_STAC,
|
92
|
+
),
|
93
|
+
},
|
94
|
+
Backend.CDSE_STAGING: {
|
95
|
+
"fetch": stac_fetcher,
|
96
|
+
"preprocessor": partial(
|
97
|
+
_get_generic_processor,
|
98
|
+
collection_name="AGERA5",
|
99
|
+
band_mapping=WEATHER_MAPPING_STAC,
|
100
|
+
),
|
101
|
+
},
|
102
|
+
Backend.FED: {
|
103
|
+
"fetch": stac_fetcher,
|
104
|
+
"preprocessor": partial(
|
105
|
+
_get_generic_processor,
|
106
|
+
collection_name="AGERA5",
|
107
|
+
band_mapping=WEATHER_MAPPING_STAC,
|
108
|
+
),
|
109
|
+
},
|
110
|
+
}
|
111
|
+
|
112
|
+
|
113
|
+
def build_meteo_extractor(
|
114
|
+
backend_context: BackendContext,
|
115
|
+
bands: list,
|
116
|
+
fetch_type: FetchType,
|
117
|
+
**params,
|
118
|
+
) -> CollectionFetcher:
|
119
|
+
backend_functions = METEO_BACKEND_MAP.get(backend_context.backend)
|
120
|
+
|
121
|
+
fetcher, preprocessor = (
|
122
|
+
partial(backend_functions["fetch"], fetch_type=fetch_type),
|
123
|
+
backend_functions["preprocessor"](fetch_type=fetch_type),
|
124
|
+
)
|
125
|
+
|
126
|
+
return CollectionFetcher(backend_context, bands, fetcher, preprocessor, **params)
|