mapchete-eo 2025.10.1__py2.py3-none-any.whl → 2026.1.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapchete_eo/__init__.py +1 -1
- mapchete_eo/base.py +94 -54
- mapchete_eo/cli/options_arguments.py +11 -27
- mapchete_eo/cli/s2_brdf.py +1 -1
- mapchete_eo/cli/s2_cat_results.py +4 -20
- mapchete_eo/cli/s2_find_broken_products.py +4 -20
- mapchete_eo/cli/s2_jp2_static_catalog.py +2 -2
- mapchete_eo/cli/static_catalog.py +4 -45
- mapchete_eo/eostac.py +1 -1
- mapchete_eo/io/assets.py +20 -16
- mapchete_eo/io/items.py +36 -23
- mapchete_eo/io/path.py +19 -8
- mapchete_eo/io/products.py +22 -24
- mapchete_eo/platforms/sentinel2/__init__.py +1 -1
- mapchete_eo/platforms/sentinel2/_mapper_registry.py +89 -0
- mapchete_eo/platforms/sentinel2/brdf/correction.py +1 -1
- mapchete_eo/platforms/sentinel2/brdf/hls.py +1 -1
- mapchete_eo/platforms/sentinel2/brdf/models.py +1 -1
- mapchete_eo/platforms/sentinel2/brdf/protocols.py +1 -1
- mapchete_eo/platforms/sentinel2/brdf/ross_thick.py +1 -1
- mapchete_eo/platforms/sentinel2/brdf/sun_angle_arrays.py +1 -1
- mapchete_eo/platforms/sentinel2/config.py +73 -13
- mapchete_eo/platforms/sentinel2/driver.py +0 -39
- mapchete_eo/platforms/sentinel2/metadata_parser/__init__.py +6 -0
- mapchete_eo/platforms/sentinel2/{path_mappers → metadata_parser}/base.py +1 -1
- mapchete_eo/platforms/sentinel2/{path_mappers/metadata_xml.py → metadata_parser/default_path_mapper.py} +2 -2
- mapchete_eo/platforms/sentinel2/metadata_parser/models.py +78 -0
- mapchete_eo/platforms/sentinel2/{metadata_parser.py → metadata_parser/s2metadata.py} +51 -144
- mapchete_eo/platforms/sentinel2/preconfigured_sources/__init__.py +57 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/guessers.py +108 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/item_mappers.py +171 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/metadata_xml_mappers.py +217 -0
- mapchete_eo/platforms/sentinel2/preprocessing_tasks.py +22 -1
- mapchete_eo/platforms/sentinel2/processing_baseline.py +3 -0
- mapchete_eo/platforms/sentinel2/product.py +83 -18
- mapchete_eo/platforms/sentinel2/source.py +114 -0
- mapchete_eo/platforms/sentinel2/types.py +5 -0
- mapchete_eo/product.py +14 -8
- mapchete_eo/protocols.py +5 -0
- mapchete_eo/search/__init__.py +3 -3
- mapchete_eo/search/base.py +127 -99
- mapchete_eo/search/config.py +75 -4
- mapchete_eo/search/s2_mgrs.py +8 -9
- mapchete_eo/search/stac_search.py +99 -97
- mapchete_eo/search/stac_static.py +46 -102
- mapchete_eo/search/utm_search.py +54 -62
- mapchete_eo/settings.py +1 -0
- mapchete_eo/sort.py +4 -6
- mapchete_eo/source.py +107 -0
- {mapchete_eo-2025.10.1.dist-info → mapchete_eo-2026.1.0.dist-info}/METADATA +4 -3
- mapchete_eo-2026.1.0.dist-info/RECORD +89 -0
- {mapchete_eo-2025.10.1.dist-info → mapchete_eo-2026.1.0.dist-info}/WHEEL +1 -1
- {mapchete_eo-2025.10.1.dist-info → mapchete_eo-2026.1.0.dist-info}/entry_points.txt +1 -1
- {mapchete_eo-2025.10.1.dist-info → mapchete_eo-2026.1.0.dist-info}/licenses/LICENSE +1 -1
- mapchete_eo/archives/__init__.py +0 -0
- mapchete_eo/archives/base.py +0 -65
- mapchete_eo/geometry.py +0 -271
- mapchete_eo/known_catalogs.py +0 -42
- mapchete_eo/platforms/sentinel2/archives.py +0 -190
- mapchete_eo/platforms/sentinel2/path_mappers/__init__.py +0 -29
- mapchete_eo/platforms/sentinel2/path_mappers/earthsearch.py +0 -34
- mapchete_eo/platforms/sentinel2/path_mappers/sinergise.py +0 -105
- mapchete_eo-2025.10.1.dist-info/RECORD +0 -88
mapchete_eo/io/items.py
CHANGED
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
import logging
|
|
2
|
-
from typing import Any, List, Optional
|
|
2
|
+
from typing import Any, List, Optional, Tuple, Union
|
|
3
3
|
|
|
4
4
|
import numpy.ma as ma
|
|
5
5
|
import pystac
|
|
6
|
+
from mapchete.geometry import repair_antimeridian_geometry
|
|
6
7
|
from mapchete.protocols import GridProtocol
|
|
7
8
|
from mapchete.types import Bounds, NodataVals
|
|
8
9
|
from rasterio.enums import Resampling
|
|
9
10
|
from shapely.geometry import mapping, shape
|
|
10
11
|
|
|
11
12
|
from mapchete_eo.exceptions import EmptyProductException
|
|
12
|
-
from mapchete_eo.geometry import repair_antimeridian_geometry
|
|
13
13
|
from mapchete_eo.io.assets import asset_to_np_array
|
|
14
14
|
from mapchete_eo.types import BandLocation
|
|
15
15
|
|
|
@@ -69,7 +69,8 @@ def expand_params(param: Any, length: int) -> List[Any]:
|
|
|
69
69
|
|
|
70
70
|
def get_item_property(
|
|
71
71
|
item: pystac.Item,
|
|
72
|
-
property: str,
|
|
72
|
+
property: Union[str, Tuple[str, ...]],
|
|
73
|
+
default: Any = None,
|
|
73
74
|
) -> Any:
|
|
74
75
|
"""
|
|
75
76
|
Return item property.
|
|
@@ -104,28 +105,40 @@ def get_item_property(
|
|
|
104
105
|
| ``collection`` | The collection ID of an Item's collection. |
|
|
105
106
|
+--------------------+--------------------------------------------------------+
|
|
106
107
|
"""
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
108
|
+
|
|
109
|
+
def _get_item_property(item: pystac.Item, property: str) -> Any:
|
|
110
|
+
if property == "id":
|
|
111
|
+
return item.id
|
|
112
|
+
elif property in ["year", "month", "day", "date", "datetime"]:
|
|
113
|
+
if item.datetime is None: # pragma: no cover
|
|
114
|
+
raise ValueError(
|
|
115
|
+
f"STAC item has no datetime attached, thus cannot get property {property}"
|
|
116
|
+
)
|
|
117
|
+
elif property == "date":
|
|
118
|
+
return item.datetime.date().isoformat()
|
|
119
|
+
elif property == "datetime":
|
|
120
|
+
return item.datetime
|
|
121
|
+
else:
|
|
122
|
+
return item.datetime.__getattribute__(property)
|
|
123
|
+
elif property == "collection":
|
|
124
|
+
return item.collection_id
|
|
125
|
+
elif property in item.properties:
|
|
126
|
+
return item.properties[property]
|
|
127
|
+
elif property in item.extra_fields:
|
|
128
|
+
return item.extra_fields[property]
|
|
129
|
+
elif property == "stac_extensions":
|
|
130
|
+
return item.stac_extensions
|
|
118
131
|
else:
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
elif property == "stac_extensions":
|
|
127
|
-
return item.stac_extensions
|
|
132
|
+
raise KeyError
|
|
133
|
+
|
|
134
|
+
for prop in property if isinstance(property, tuple) else (property,):
|
|
135
|
+
try:
|
|
136
|
+
return _get_item_property(item, prop)
|
|
137
|
+
except KeyError:
|
|
138
|
+
pass
|
|
128
139
|
else:
|
|
140
|
+
if default is not None:
|
|
141
|
+
return default
|
|
129
142
|
raise KeyError(
|
|
130
143
|
f"item {item.id} does not have property {property} in its datetime, properties "
|
|
131
144
|
f"({', '.join(item.properties.keys())}) or extra_fields "
|
mapchete_eo/io/path.py
CHANGED
|
@@ -3,7 +3,7 @@ import logging
|
|
|
3
3
|
from contextlib import contextmanager
|
|
4
4
|
from enum import Enum
|
|
5
5
|
from tempfile import TemporaryDirectory
|
|
6
|
-
from typing import Generator
|
|
6
|
+
from typing import Generator, Tuple, Union
|
|
7
7
|
from xml.etree.ElementTree import Element, fromstring
|
|
8
8
|
|
|
9
9
|
import fsspec
|
|
@@ -126,19 +126,30 @@ def cached_path(path: MPath, active: bool = True) -> Generator[MPath, None, None
|
|
|
126
126
|
|
|
127
127
|
def asset_mpath(
|
|
128
128
|
item: pystac.Item,
|
|
129
|
-
asset: str,
|
|
129
|
+
asset: Union[str, Tuple[str, ...]],
|
|
130
130
|
fs: fsspec.AbstractFileSystem = None,
|
|
131
131
|
absolute_path: bool = True,
|
|
132
132
|
) -> MPath:
|
|
133
133
|
"""Return MPath instance with asset href."""
|
|
134
134
|
|
|
135
|
-
|
|
135
|
+
def _asset_mpath(
|
|
136
|
+
item: pystac.Item,
|
|
137
|
+
asset: str,
|
|
138
|
+
fs: fsspec.AbstractFileSystem = None,
|
|
139
|
+
absolute_path: bool = True,
|
|
140
|
+
) -> MPath:
|
|
136
141
|
asset_path = MPath(item.assets[asset].href, fs=fs)
|
|
137
|
-
|
|
142
|
+
if absolute_path and not asset_path.is_absolute():
|
|
143
|
+
return MPath(item.get_self_href(), fs=fs).parent / asset_path
|
|
144
|
+
else:
|
|
145
|
+
return asset_path
|
|
146
|
+
|
|
147
|
+
for single_asset in asset if isinstance(asset, tuple) else (asset,):
|
|
148
|
+
try:
|
|
149
|
+
return _asset_mpath(item, single_asset, fs=fs, absolute_path=absolute_path)
|
|
150
|
+
except KeyError:
|
|
151
|
+
pass
|
|
152
|
+
else:
|
|
138
153
|
raise AssetKeyError(
|
|
139
154
|
f"{item.id} no asset named '{asset}' found in assets: {', '.join(item.assets.keys())}"
|
|
140
155
|
)
|
|
141
|
-
if absolute_path and not asset_path.is_absolute():
|
|
142
|
-
return MPath(item.get_self_href(), fs=fs).parent / asset_path
|
|
143
|
-
else:
|
|
144
|
-
return asset_path
|
mapchete_eo/io/products.py
CHANGED
|
@@ -29,7 +29,6 @@ from mapchete_eo.exceptions import (
|
|
|
29
29
|
EmptyStackException,
|
|
30
30
|
NoSourceProducts,
|
|
31
31
|
)
|
|
32
|
-
from mapchete_eo.io.items import get_item_property
|
|
33
32
|
from mapchete_eo.protocols import EOProductProtocol
|
|
34
33
|
from mapchete_eo.sort import SortMethodConfig
|
|
35
34
|
from mapchete_eo.types import MergeMethod
|
|
@@ -118,7 +117,8 @@ def products_to_xarray(
|
|
|
118
117
|
coords = {
|
|
119
118
|
slice_axis_name: list(
|
|
120
119
|
np.array(
|
|
121
|
-
[product.
|
|
120
|
+
[product.get_property("datetime") for product in products],
|
|
121
|
+
dtype=np.datetime64,
|
|
122
122
|
)
|
|
123
123
|
)
|
|
124
124
|
}
|
|
@@ -132,7 +132,6 @@ class Slice:
|
|
|
132
132
|
"""Combine multiple products into one slice."""
|
|
133
133
|
|
|
134
134
|
name: Any
|
|
135
|
-
properties: dict
|
|
136
135
|
products: Sequence[EOProductProtocol]
|
|
137
136
|
datetime: datetime
|
|
138
137
|
|
|
@@ -151,21 +150,13 @@ class Slice:
|
|
|
151
150
|
|
|
152
151
|
# calculate mean datetime
|
|
153
152
|
timestamps = [
|
|
154
|
-
product.
|
|
153
|
+
product.get_property("datetime").timestamp()
|
|
155
154
|
for product in self.products
|
|
156
|
-
if product.
|
|
155
|
+
if product.get_property("datetime")
|
|
157
156
|
]
|
|
158
157
|
mean_timestamp = sum(timestamps) / len(timestamps)
|
|
159
158
|
self.datetime = datetime.fromtimestamp(mean_timestamp)
|
|
160
159
|
|
|
161
|
-
# generate combined properties
|
|
162
|
-
self.properties = {}
|
|
163
|
-
for key in self.products[0].item.properties.keys():
|
|
164
|
-
try:
|
|
165
|
-
self.properties[key] = self.get_property(key)
|
|
166
|
-
except ValueError:
|
|
167
|
-
self.properties[key] = None
|
|
168
|
-
|
|
169
160
|
def __repr__(self) -> str:
|
|
170
161
|
return f"<Slice {self.name} ({len(self.products)} products)>"
|
|
171
162
|
|
|
@@ -178,6 +169,17 @@ class Slice:
|
|
|
178
169
|
|
|
179
170
|
raise EmptySliceException
|
|
180
171
|
|
|
172
|
+
@property
|
|
173
|
+
def properties(self) -> Dict[str, Any]:
|
|
174
|
+
# generate combined properties
|
|
175
|
+
properties: Dict[str, Any] = {}
|
|
176
|
+
for key in self.products[0].item.properties.keys():
|
|
177
|
+
try:
|
|
178
|
+
properties[key] = self.get_property(key)
|
|
179
|
+
except ValueError:
|
|
180
|
+
properties[key] = None
|
|
181
|
+
return properties
|
|
182
|
+
|
|
181
183
|
@contextmanager
|
|
182
184
|
def cached(self) -> Generator[Slice, None, None]:
|
|
183
185
|
"""Clear caches and run garbage collector when context manager is closed."""
|
|
@@ -200,12 +202,9 @@ class Slice:
|
|
|
200
202
|
ValueError is raised.
|
|
201
203
|
"""
|
|
202
204
|
# if set of value hashes has a length of 1, all values are the same
|
|
203
|
-
values = [
|
|
204
|
-
get_hash(get_item_property(product.item, property=property))
|
|
205
|
-
for product in self.products
|
|
206
|
-
]
|
|
205
|
+
values = [get_hash(product.get_property(property)) for product in self.products]
|
|
207
206
|
if len(set(values)) == 1:
|
|
208
|
-
return
|
|
207
|
+
return self.products[0].get_property(property)
|
|
209
208
|
|
|
210
209
|
raise ValueError(
|
|
211
210
|
f"cannot get unique property {property} from products {self.products}"
|
|
@@ -238,7 +237,7 @@ def products_to_slices(
|
|
|
238
237
|
grouped[product.get_property(group_by_property)].append(product)
|
|
239
238
|
slices = [Slice(key, products) for key, products in grouped.items()]
|
|
240
239
|
else:
|
|
241
|
-
slices = [Slice(product.
|
|
240
|
+
slices = [Slice(product.id, [product]) for product in products]
|
|
242
241
|
|
|
243
242
|
# also check if slices is even a list, otherwise it will raise an error
|
|
244
243
|
if sort and slices:
|
|
@@ -268,7 +267,7 @@ def merge_products(
|
|
|
268
267
|
try:
|
|
269
268
|
yield product.read_np_array(**product_read_kwargs)
|
|
270
269
|
except (AssetKeyError, CorruptedProduct) as exc:
|
|
271
|
-
logger.
|
|
270
|
+
logger.warning("skip product %s because of %s", product.id, exc)
|
|
272
271
|
except StopIteration:
|
|
273
272
|
return
|
|
274
273
|
|
|
@@ -286,7 +285,7 @@ def merge_products(
|
|
|
286
285
|
out = product.read_np_array(**product_read_kwargs)
|
|
287
286
|
break
|
|
288
287
|
except (AssetKeyError, CorruptedProduct) as exc:
|
|
289
|
-
logger.
|
|
288
|
+
logger.warning("skip product %s because of %s", product.id, exc)
|
|
290
289
|
else:
|
|
291
290
|
# we cannot do anything here, as all products are broken
|
|
292
291
|
raise CorruptedSlice("all products are broken here")
|
|
@@ -378,7 +377,6 @@ def generate_slice_dataarrays(
|
|
|
378
377
|
slices = products_to_slices(
|
|
379
378
|
products, group_by_property=merge_products_by, sort=sort
|
|
380
379
|
)
|
|
381
|
-
|
|
382
380
|
logger.debug(
|
|
383
381
|
"reading %s products in %s groups...",
|
|
384
382
|
len(products),
|
|
@@ -418,8 +416,8 @@ def generate_slice_dataarrays(
|
|
|
418
416
|
)
|
|
419
417
|
# if at least one slice can be yielded, the stack is not empty
|
|
420
418
|
stack_empty = False
|
|
421
|
-
except (EmptySliceException, CorruptedSlice):
|
|
422
|
-
|
|
419
|
+
except (EmptySliceException, CorruptedSlice) as exception:
|
|
420
|
+
logger.warning(exception)
|
|
423
421
|
|
|
424
422
|
if stack_empty:
|
|
425
423
|
raise EmptyStackException("all slices are empty")
|
|
@@ -4,7 +4,7 @@ from mapchete_eo.platforms.sentinel2.driver import (
|
|
|
4
4
|
Sentinel2Cube,
|
|
5
5
|
Sentinel2CubeGroup,
|
|
6
6
|
)
|
|
7
|
-
from mapchete_eo.platforms.sentinel2.metadata_parser import S2Metadata
|
|
7
|
+
from mapchete_eo.platforms.sentinel2.metadata_parser.s2metadata import S2Metadata
|
|
8
8
|
from mapchete_eo.platforms.sentinel2.product import S2Product
|
|
9
9
|
|
|
10
10
|
__all__ = [
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
from typing import List, Callable, Dict, Any, Optional
|
|
2
|
+
|
|
3
|
+
from pystac import Item
|
|
4
|
+
|
|
5
|
+
from mapchete_eo.platforms.sentinel2.metadata_parser.s2metadata import S2Metadata
|
|
6
|
+
from mapchete_eo.platforms.sentinel2.types import DataArchive, MetadataArchive
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
# decorators for mapper functions using the registry pattern #
|
|
10
|
+
##############################################################
|
|
11
|
+
ID_MAPPER_REGISTRY: Dict[Any, Callable[[Item], Item]] = {}
|
|
12
|
+
STAC_METADATA_MAPPER_REGISTRY: Dict[Any, Callable[[Item], Item]] = {}
|
|
13
|
+
S2METADATA_MAPPER_REGISTRY: Dict[Any, Callable[[Item], S2Metadata]] = {}
|
|
14
|
+
|
|
15
|
+
MAPPER_REGISTRIES: Dict[str, Any] = {
|
|
16
|
+
"ID": ID_MAPPER_REGISTRY,
|
|
17
|
+
"STAC metadata": STAC_METADATA_MAPPER_REGISTRY,
|
|
18
|
+
"S2Metadata": S2METADATA_MAPPER_REGISTRY,
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _register_func(registry: Dict[str, Callable], key: Any, func: Callable):
|
|
23
|
+
if key in registry:
|
|
24
|
+
raise ValueError(f"{key} already registered in {registry}")
|
|
25
|
+
registry[key] = func
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def maps_item_id(from_collections: List[str]):
|
|
29
|
+
"""
|
|
30
|
+
Decorator registering mapper to common ID.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
def decorator(func):
|
|
34
|
+
# Use a tuple of the metadata as the key
|
|
35
|
+
# key = (path_type, version)
|
|
36
|
+
for collection in from_collections:
|
|
37
|
+
_register_func(registry=ID_MAPPER_REGISTRY, key=collection, func=func)
|
|
38
|
+
return func
|
|
39
|
+
|
|
40
|
+
return decorator
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def maps_stac_metadata(
|
|
44
|
+
from_collections: List[str], to_data_archives: Optional[List[DataArchive]] = None
|
|
45
|
+
):
|
|
46
|
+
"""
|
|
47
|
+
Decorator registering STAC metadata mapper.
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
def decorator(func):
|
|
51
|
+
# Use a tuple of the metadata as the key
|
|
52
|
+
for collection in from_collections:
|
|
53
|
+
if to_data_archives:
|
|
54
|
+
for data_archive in to_data_archives:
|
|
55
|
+
_register_func(
|
|
56
|
+
registry=STAC_METADATA_MAPPER_REGISTRY,
|
|
57
|
+
key=(collection, data_archive),
|
|
58
|
+
func=func,
|
|
59
|
+
)
|
|
60
|
+
else:
|
|
61
|
+
_register_func(
|
|
62
|
+
registry=STAC_METADATA_MAPPER_REGISTRY,
|
|
63
|
+
key=collection,
|
|
64
|
+
func=func,
|
|
65
|
+
)
|
|
66
|
+
return func
|
|
67
|
+
|
|
68
|
+
return decorator
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def creates_s2metadata(
|
|
72
|
+
from_collections: List[str], to_metadata_archives: List[MetadataArchive]
|
|
73
|
+
):
|
|
74
|
+
"""
|
|
75
|
+
Decorator registering S2Metadata creator.
|
|
76
|
+
"""
|
|
77
|
+
|
|
78
|
+
def decorator(func):
|
|
79
|
+
# Use a tuple of the metadata as the key
|
|
80
|
+
for collection in from_collections:
|
|
81
|
+
for metadata_archive in to_metadata_archives:
|
|
82
|
+
_register_func(
|
|
83
|
+
registry=S2METADATA_MAPPER_REGISTRY,
|
|
84
|
+
key=(collection, metadata_archive),
|
|
85
|
+
func=func,
|
|
86
|
+
)
|
|
87
|
+
return func
|
|
88
|
+
|
|
89
|
+
return decorator
|
|
@@ -13,7 +13,7 @@ from rasterio.fill import fillnodata
|
|
|
13
13
|
|
|
14
14
|
from mapchete_eo.exceptions import BRDFError
|
|
15
15
|
from mapchete_eo.platforms.sentinel2.brdf.models import BRDFModels, get_model
|
|
16
|
-
from mapchete_eo.platforms.sentinel2.metadata_parser import S2Metadata
|
|
16
|
+
from mapchete_eo.platforms.sentinel2.metadata_parser.s2metadata import S2Metadata
|
|
17
17
|
from mapchete_eo.platforms.sentinel2.types import (
|
|
18
18
|
L2ABand,
|
|
19
19
|
Resolution,
|
|
@@ -16,7 +16,7 @@ from mapchete_eo.platforms.sentinel2.brdf.protocols import (
|
|
|
16
16
|
)
|
|
17
17
|
from mapchete_eo.platforms.sentinel2.brdf.config import L2ABandFParams, ModelParameters
|
|
18
18
|
from mapchete_eo.platforms.sentinel2.brdf.sun_angle_arrays import get_sun_zenith_angles
|
|
19
|
-
from mapchete_eo.platforms.sentinel2.metadata_parser import S2Metadata
|
|
19
|
+
from mapchete_eo.platforms.sentinel2.metadata_parser.s2metadata import S2Metadata
|
|
20
20
|
from mapchete_eo.platforms.sentinel2.types import L2ABand
|
|
21
21
|
|
|
22
22
|
|
|
@@ -12,7 +12,7 @@ from mapchete_eo.platforms.sentinel2.brdf.hls import HLS
|
|
|
12
12
|
from mapchete_eo.platforms.sentinel2.brdf.ross_thick import RossThick
|
|
13
13
|
|
|
14
14
|
# from mapchete_eo.platforms.sentinel2.brdf.hls2 import HLS2
|
|
15
|
-
from mapchete_eo.platforms.sentinel2.metadata_parser import S2Metadata
|
|
15
|
+
from mapchete_eo.platforms.sentinel2.metadata_parser.s2metadata import S2Metadata
|
|
16
16
|
from mapchete_eo.platforms.sentinel2.types import L2ABand
|
|
17
17
|
|
|
18
18
|
logger = logging.getLogger(__name__)
|
|
@@ -6,7 +6,7 @@ from mapchete.io.raster import ReferencedRaster
|
|
|
6
6
|
import numpy as np
|
|
7
7
|
from numpy.typing import DTypeLike
|
|
8
8
|
|
|
9
|
-
from mapchete_eo.platforms.sentinel2.metadata_parser import S2Metadata
|
|
9
|
+
from mapchete_eo.platforms.sentinel2.metadata_parser.s2metadata import S2Metadata
|
|
10
10
|
from mapchete_eo.platforms.sentinel2.types import L2ABand
|
|
11
11
|
|
|
12
12
|
|
|
@@ -14,7 +14,7 @@ from mapchete_eo.platforms.sentinel2.brdf.protocols import (
|
|
|
14
14
|
)
|
|
15
15
|
from mapchete_eo.platforms.sentinel2.brdf.config import L2ABandFParams, ModelParameters
|
|
16
16
|
from mapchete_eo.platforms.sentinel2.brdf.hls import _get_viewing_angles
|
|
17
|
-
from mapchete_eo.platforms.sentinel2.metadata_parser import S2Metadata
|
|
17
|
+
from mapchete_eo.platforms.sentinel2.metadata_parser.s2metadata import S2Metadata
|
|
18
18
|
from mapchete_eo.platforms.sentinel2.types import L2ABand
|
|
19
19
|
|
|
20
20
|
|
|
@@ -3,7 +3,7 @@ from typing import Tuple
|
|
|
3
3
|
from fiona.transform import transform
|
|
4
4
|
import numpy as np
|
|
5
5
|
|
|
6
|
-
from mapchete_eo.platforms.sentinel2.metadata_parser import S2Metadata
|
|
6
|
+
from mapchete_eo.platforms.sentinel2.metadata_parser.s2metadata import S2Metadata
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
def get_sun_zenith_angles(s2_metadata: S2Metadata) -> np.ndarray:
|
|
@@ -1,18 +1,19 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from typing import List, Optional, Union
|
|
3
|
+
from typing import List, Optional, Union, Dict, Any
|
|
4
|
+
import warnings
|
|
4
5
|
|
|
5
6
|
from mapchete.path import MPathLike
|
|
6
|
-
from pydantic import
|
|
7
|
-
BaseModel,
|
|
8
|
-
ValidationError,
|
|
9
|
-
field_validator,
|
|
10
|
-
)
|
|
7
|
+
from pydantic import BaseModel, ValidationError, field_validator, model_validator
|
|
11
8
|
|
|
12
9
|
from mapchete_eo.base import BaseDriverConfig
|
|
13
10
|
from mapchete_eo.io.path import ProductPathGenerationMethod
|
|
14
|
-
from mapchete_eo.platforms.sentinel2.archives import ArchiveClsFromString, AWSL2ACOGv1
|
|
15
11
|
from mapchete_eo.platforms.sentinel2.brdf.config import BRDFModels
|
|
12
|
+
from mapchete_eo.platforms.sentinel2.preconfigured_sources import (
|
|
13
|
+
KNOWN_SOURCES,
|
|
14
|
+
DEPRECATED_ARCHIVES,
|
|
15
|
+
)
|
|
16
|
+
from mapchete_eo.platforms.sentinel2.source import Sentinel2Source
|
|
16
17
|
from mapchete_eo.platforms.sentinel2.types import (
|
|
17
18
|
CloudType,
|
|
18
19
|
ProductQIMaskResolution,
|
|
@@ -23,6 +24,9 @@ from mapchete_eo.search.config import StacSearchConfig
|
|
|
23
24
|
from mapchete_eo.types import TimeRange
|
|
24
25
|
|
|
25
26
|
|
|
27
|
+
default_source = Sentinel2Source.model_validate(KNOWN_SOURCES["EarthSearch"])
|
|
28
|
+
|
|
29
|
+
|
|
26
30
|
class BRDFModelConfig(BaseModel):
|
|
27
31
|
model: BRDFModels = BRDFModels.HLS
|
|
28
32
|
bands: List[str] = ["blue", "green", "red", "nir"]
|
|
@@ -47,7 +51,7 @@ class BRDFSCLClassConfig(BRDFModelConfig):
|
|
|
47
51
|
out.append(value)
|
|
48
52
|
elif isinstance(value, str):
|
|
49
53
|
out.append(SceneClassification[value])
|
|
50
|
-
else:
|
|
54
|
+
else: # pragma: no cover
|
|
51
55
|
raise ValidationError("value must be mappable to SceneClassification")
|
|
52
56
|
return out
|
|
53
57
|
|
|
@@ -107,10 +111,18 @@ class CacheConfig(BaseModel):
|
|
|
107
111
|
class Sentinel2DriverConfig(BaseDriverConfig):
|
|
108
112
|
format: str = "Sentinel-2"
|
|
109
113
|
time: Union[TimeRange, List[TimeRange]]
|
|
110
|
-
|
|
111
|
-
|
|
114
|
+
|
|
115
|
+
# new
|
|
116
|
+
source: List[Sentinel2Source] = [default_source]
|
|
117
|
+
|
|
118
|
+
# deprecated
|
|
119
|
+
# for backwards compatibility, archive should be converted to
|
|
120
|
+
# catalog & data_archive
|
|
121
|
+
# archive: ArchiveClsFromString = AWSL2ACOGv1
|
|
122
|
+
# cat_baseurl: Optional[MPathLike] = None
|
|
112
123
|
search_index: Optional[MPathLike] = None
|
|
113
|
-
|
|
124
|
+
|
|
125
|
+
# custom params
|
|
114
126
|
stac_config: StacSearchConfig = StacSearchConfig()
|
|
115
127
|
first_granule_only: bool = False
|
|
116
128
|
utm_zone: Optional[int] = None
|
|
@@ -118,6 +130,54 @@ class Sentinel2DriverConfig(BaseDriverConfig):
|
|
|
118
130
|
brdf: Optional[BRDFConfig] = None
|
|
119
131
|
cache: Optional[CacheConfig] = None
|
|
120
132
|
|
|
133
|
+
@model_validator(mode="before")
|
|
134
|
+
def deprecated_values(cls, values: Dict[str, Any]) -> Dict[str, Any]:
|
|
135
|
+
archive = values.pop("archive", None)
|
|
136
|
+
if archive:
|
|
137
|
+
warnings.warn(
|
|
138
|
+
"'archive' will be deprecated soon. Please use 'source'.",
|
|
139
|
+
category=DeprecationWarning,
|
|
140
|
+
stacklevel=2,
|
|
141
|
+
)
|
|
142
|
+
if values.get("source") is None:
|
|
143
|
+
values["source"] = DEPRECATED_ARCHIVES[archive]
|
|
144
|
+
|
|
145
|
+
cat_baseurl = values.pop("cat_baseurl", None)
|
|
146
|
+
if cat_baseurl: # pragma: no cover
|
|
147
|
+
warnings.warn(
|
|
148
|
+
"'cat_baseurl' will be deprecated soon. Please use 'catalog_type=static' in the source.",
|
|
149
|
+
category=DeprecationWarning,
|
|
150
|
+
stacklevel=2,
|
|
151
|
+
)
|
|
152
|
+
if values.get("source", []):
|
|
153
|
+
raise ValueError(
|
|
154
|
+
"deprecated cat_baseurl field found alongside sources."
|
|
155
|
+
)
|
|
156
|
+
values["source"] = [dict(collection=cat_baseurl, catalog_type="static")]
|
|
157
|
+
|
|
158
|
+
# add default source if necessary
|
|
159
|
+
sources = values.get("source", [])
|
|
160
|
+
if not sources:
|
|
161
|
+
values["source"] = [default_source.model_dump(exclude_none=True)]
|
|
162
|
+
|
|
163
|
+
max_cloud_cover = values.pop("max_cloud_cover", None)
|
|
164
|
+
if max_cloud_cover: # pragma: no cover
|
|
165
|
+
warnings.warn(
|
|
166
|
+
"'max_cloud_cover' will be deprecated soon. Please use 'eo:cloud_cover<=...' in the source 'query' field.",
|
|
167
|
+
category=DeprecationWarning,
|
|
168
|
+
stacklevel=2,
|
|
169
|
+
)
|
|
170
|
+
updated_sources = []
|
|
171
|
+
for source in values.get("source", []):
|
|
172
|
+
if source.get("query") is not None:
|
|
173
|
+
raise ValueError(
|
|
174
|
+
f"deprecated max_cloud_cover is set but also a query field is given in {source}"
|
|
175
|
+
)
|
|
176
|
+
source["query"] = f"eo:cloud_cover<={max_cloud_cover}"
|
|
177
|
+
updated_sources.append(source)
|
|
178
|
+
values["source"] = updated_sources
|
|
179
|
+
return values
|
|
180
|
+
|
|
121
181
|
|
|
122
182
|
class MaskConfig(BaseModel):
|
|
123
183
|
# mask by footprint geometry
|
|
@@ -160,7 +220,7 @@ class MaskConfig(BaseModel):
|
|
|
160
220
|
out.append(value)
|
|
161
221
|
elif isinstance(value, str):
|
|
162
222
|
out.append(SceneClassification[value])
|
|
163
|
-
else:
|
|
223
|
+
else: # pragma: no cover
|
|
164
224
|
raise ValidationError("value must be mappable to SceneClassification")
|
|
165
225
|
return out
|
|
166
226
|
|
|
@@ -175,7 +235,7 @@ class MaskConfig(BaseModel):
|
|
|
175
235
|
elif isinstance(config, dict):
|
|
176
236
|
return MaskConfig(**config)
|
|
177
237
|
|
|
178
|
-
else:
|
|
238
|
+
else: # pragma: no cover
|
|
179
239
|
raise TypeError(
|
|
180
240
|
f"mask configuration should either be a dictionary or a MaskConfig object, not {config}"
|
|
181
241
|
)
|
|
@@ -1,16 +1,11 @@
|
|
|
1
1
|
from typing import Optional, List, Tuple
|
|
2
2
|
|
|
3
|
-
from mapchete.geometry import reproject_geometry
|
|
4
|
-
from mapchete.path import MPath
|
|
5
3
|
from mapchete.types import NodataVal
|
|
6
4
|
from rasterio.enums import Resampling
|
|
7
5
|
|
|
8
6
|
from mapchete_eo import base
|
|
9
|
-
from mapchete_eo.archives.base import Archive
|
|
10
7
|
from mapchete_eo.platforms.sentinel2.config import Sentinel2DriverConfig
|
|
11
8
|
from mapchete_eo.platforms.sentinel2.preprocessing_tasks import parse_s2_product
|
|
12
|
-
from mapchete_eo.search.stac_static import STACStaticCatalog
|
|
13
|
-
from mapchete_eo.settings import mapchete_eo_settings
|
|
14
9
|
from mapchete_eo.types import MergeMethod
|
|
15
10
|
|
|
16
11
|
METADATA: dict = {
|
|
@@ -42,37 +37,3 @@ class InputData(base.InputData):
|
|
|
42
37
|
driver_config_model = Sentinel2DriverConfig
|
|
43
38
|
params: Sentinel2DriverConfig
|
|
44
39
|
input_tile_cls = Sentinel2Cube
|
|
45
|
-
|
|
46
|
-
def set_archive(self, base_dir: MPath):
|
|
47
|
-
if self.params.cat_baseurl:
|
|
48
|
-
self.archive = Archive(
|
|
49
|
-
catalog=STACStaticCatalog(
|
|
50
|
-
baseurl=MPath(self.params.cat_baseurl).absolute_path(
|
|
51
|
-
base_dir=base_dir
|
|
52
|
-
),
|
|
53
|
-
),
|
|
54
|
-
area=self.bbox(mapchete_eo_settings.default_catalog_crs),
|
|
55
|
-
time=self.time,
|
|
56
|
-
search_kwargs=dict(max_cloud_cover=self.params.max_cloud_cover),
|
|
57
|
-
)
|
|
58
|
-
elif self.params.archive:
|
|
59
|
-
catalog_area = reproject_geometry(
|
|
60
|
-
self.area,
|
|
61
|
-
src_crs=self.crs,
|
|
62
|
-
dst_crs=mapchete_eo_settings.default_catalog_crs,
|
|
63
|
-
)
|
|
64
|
-
self.archive = self.params.archive(
|
|
65
|
-
time=self.time,
|
|
66
|
-
bounds=catalog_area.bounds,
|
|
67
|
-
area=catalog_area,
|
|
68
|
-
search_kwargs=dict(
|
|
69
|
-
search_index=(
|
|
70
|
-
MPath(self.params.search_index).absolute_path(base_dir=base_dir)
|
|
71
|
-
if self.params.search_index
|
|
72
|
-
else None
|
|
73
|
-
),
|
|
74
|
-
max_cloud_cover=self.params.max_cloud_cover,
|
|
75
|
-
),
|
|
76
|
-
)
|
|
77
|
-
else:
|
|
78
|
-
raise ValueError("either 'archive' or 'cat_baseurl' or both is required.")
|
|
@@ -11,7 +11,7 @@ from typing import Optional
|
|
|
11
11
|
from mapchete.path import MPath
|
|
12
12
|
|
|
13
13
|
from mapchete_eo.io import open_xml
|
|
14
|
-
from mapchete_eo.platforms.sentinel2.
|
|
14
|
+
from mapchete_eo.platforms.sentinel2.metadata_parser.base import S2MetadataPathMapper
|
|
15
15
|
from mapchete_eo.platforms.sentinel2.processing_baseline import ProcessingBaseline
|
|
16
16
|
from mapchete_eo.platforms.sentinel2.types import (
|
|
17
17
|
BandQI,
|
|
@@ -23,7 +23,7 @@ from mapchete_eo.platforms.sentinel2.types import (
|
|
|
23
23
|
logger = logging.getLogger(__name__)
|
|
24
24
|
|
|
25
25
|
|
|
26
|
-
class XMLMapper(
|
|
26
|
+
class XMLMapper(S2MetadataPathMapper):
|
|
27
27
|
def __init__(
|
|
28
28
|
self, metadata_xml: MPath, xml_root: Optional[Element] = None, **kwargs
|
|
29
29
|
):
|