mapchete-eo 2025.10.1__py2.py3-none-any.whl → 2025.11.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. mapchete_eo/__init__.py +1 -1
  2. mapchete_eo/base.py +94 -54
  3. mapchete_eo/cli/options_arguments.py +11 -27
  4. mapchete_eo/cli/s2_brdf.py +1 -1
  5. mapchete_eo/cli/s2_cat_results.py +4 -20
  6. mapchete_eo/cli/s2_find_broken_products.py +4 -20
  7. mapchete_eo/cli/s2_jp2_static_catalog.py +2 -2
  8. mapchete_eo/cli/static_catalog.py +4 -45
  9. mapchete_eo/eostac.py +1 -1
  10. mapchete_eo/io/assets.py +7 -7
  11. mapchete_eo/io/items.py +36 -23
  12. mapchete_eo/io/path.py +19 -8
  13. mapchete_eo/io/products.py +22 -24
  14. mapchete_eo/platforms/sentinel2/__init__.py +1 -1
  15. mapchete_eo/platforms/sentinel2/_mapper_registry.py +89 -0
  16. mapchete_eo/platforms/sentinel2/brdf/correction.py +1 -1
  17. mapchete_eo/platforms/sentinel2/brdf/hls.py +1 -1
  18. mapchete_eo/platforms/sentinel2/brdf/models.py +1 -1
  19. mapchete_eo/platforms/sentinel2/brdf/protocols.py +1 -1
  20. mapchete_eo/platforms/sentinel2/brdf/ross_thick.py +1 -1
  21. mapchete_eo/platforms/sentinel2/brdf/sun_angle_arrays.py +1 -1
  22. mapchete_eo/platforms/sentinel2/config.py +73 -13
  23. mapchete_eo/platforms/sentinel2/driver.py +0 -39
  24. mapchete_eo/platforms/sentinel2/metadata_parser/__init__.py +6 -0
  25. mapchete_eo/platforms/sentinel2/{path_mappers → metadata_parser}/base.py +1 -1
  26. mapchete_eo/platforms/sentinel2/{path_mappers/metadata_xml.py → metadata_parser/default_path_mapper.py} +2 -2
  27. mapchete_eo/platforms/sentinel2/metadata_parser/models.py +78 -0
  28. mapchete_eo/platforms/sentinel2/{metadata_parser.py → metadata_parser/s2metadata.py} +51 -144
  29. mapchete_eo/platforms/sentinel2/preconfigured_sources/__init__.py +57 -0
  30. mapchete_eo/platforms/sentinel2/preconfigured_sources/guessers.py +108 -0
  31. mapchete_eo/platforms/sentinel2/preconfigured_sources/item_mappers.py +171 -0
  32. mapchete_eo/platforms/sentinel2/preconfigured_sources/metadata_xml_mappers.py +217 -0
  33. mapchete_eo/platforms/sentinel2/preprocessing_tasks.py +22 -1
  34. mapchete_eo/platforms/sentinel2/processing_baseline.py +3 -0
  35. mapchete_eo/platforms/sentinel2/product.py +83 -18
  36. mapchete_eo/platforms/sentinel2/source.py +114 -0
  37. mapchete_eo/platforms/sentinel2/types.py +5 -0
  38. mapchete_eo/product.py +14 -8
  39. mapchete_eo/protocols.py +5 -0
  40. mapchete_eo/search/__init__.py +3 -3
  41. mapchete_eo/search/base.py +105 -92
  42. mapchete_eo/search/config.py +25 -4
  43. mapchete_eo/search/s2_mgrs.py +8 -9
  44. mapchete_eo/search/stac_search.py +96 -77
  45. mapchete_eo/search/stac_static.py +47 -91
  46. mapchete_eo/search/utm_search.py +36 -49
  47. mapchete_eo/settings.py +1 -0
  48. mapchete_eo/sort.py +4 -6
  49. mapchete_eo/source.py +107 -0
  50. {mapchete_eo-2025.10.1.dist-info → mapchete_eo-2025.11.0.dist-info}/METADATA +2 -1
  51. mapchete_eo-2025.11.0.dist-info/RECORD +89 -0
  52. {mapchete_eo-2025.10.1.dist-info → mapchete_eo-2025.11.0.dist-info}/entry_points.txt +1 -1
  53. mapchete_eo/archives/__init__.py +0 -0
  54. mapchete_eo/archives/base.py +0 -65
  55. mapchete_eo/geometry.py +0 -271
  56. mapchete_eo/known_catalogs.py +0 -42
  57. mapchete_eo/platforms/sentinel2/archives.py +0 -190
  58. mapchete_eo/platforms/sentinel2/path_mappers/__init__.py +0 -29
  59. mapchete_eo/platforms/sentinel2/path_mappers/earthsearch.py +0 -34
  60. mapchete_eo/platforms/sentinel2/path_mappers/sinergise.py +0 -105
  61. mapchete_eo-2025.10.1.dist-info/RECORD +0 -88
  62. {mapchete_eo-2025.10.1.dist-info → mapchete_eo-2025.11.0.dist-info}/WHEEL +0 -0
  63. {mapchete_eo-2025.10.1.dist-info → mapchete_eo-2025.11.0.dist-info}/licenses/LICENSE +0 -0
mapchete_eo/geometry.py DELETED
@@ -1,271 +0,0 @@
1
- import logging
2
- import math
3
- from functools import partial
4
- from typing import Callable, Iterable, Tuple
5
-
6
- from fiona.crs import CRS
7
- from fiona.transform import transform as fiona_transform
8
- from mapchete.geometry import reproject_geometry
9
- from mapchete.types import Bounds, CRSLike
10
- from shapely.geometry import (
11
- GeometryCollection,
12
- LinearRing,
13
- LineString,
14
- MultiLineString,
15
- MultiPoint,
16
- MultiPolygon,
17
- Point,
18
- Polygon,
19
- box,
20
- shape,
21
- )
22
- from shapely.geometry.base import BaseGeometry
23
- from shapely.ops import unary_union
24
-
25
- CoordArrays = Tuple[Iterable[float], Iterable[float]]
26
-
27
-
28
- logger = logging.getLogger(__name__)
29
-
30
-
31
- def transform_to_latlon(
32
- geometry: BaseGeometry, src_crs: CRSLike, width_threshold: float = 180.0
33
- ) -> BaseGeometry:
34
- """Transforms a geometry to lat/lon coordinates.
35
-
36
- If resulting geometry crosses the Antimeridian it will be fixed by moving coordinates
37
- from the Western Hemisphere to outside of the lat/lon bounds on the East, making sure
38
- the correct geometry shape is preserved.
39
-
40
- As a next step, repair_antimeridian_geometry() can be applied, which then splits up
41
- this geometry into a multipart geometry where all of its subgeometries are within the
42
- lat/lon bounds again.
43
- """
44
- latlon_crs = CRS.from_epsg(4326)
45
-
46
- def transform_shift_coords(coords: CoordArrays) -> CoordArrays:
47
- out_x_coords, out_y_coords = fiona_transform(src_crs, latlon_crs, *coords)
48
- if max(out_x_coords) - min(out_x_coords) > width_threshold:
49
- # we probably have an antimeridian crossing here!
50
- out_x_coords, out_y_coords = coords_longitudinal_shift(
51
- coords_transform(coords, src_crs, latlon_crs), only_negative_coords=True
52
- )
53
- return (out_x_coords, out_y_coords)
54
-
55
- return custom_transform(geometry, transform_shift_coords)
56
-
57
-
58
- def repair_antimeridian_geometry(
59
- geometry: BaseGeometry, width_threshold: float = 180.0
60
- ) -> BaseGeometry:
61
- """
62
- Repair geometry and apply fix if it crosses the Antimeridian.
63
-
64
- A geometry crosses the Antimeridian if it is at least partly outside of the
65
- lat/lon bounding box or if its width exceeds a certain threshold. This can happen
66
- after reprojection if the geometry coordinates are transformed separately and land
67
- left and right of the Antimeridian, thus resulting in a polygon spanning almost the
68
- whole lat/lon bounding box width.
69
- """
70
- # repair geometry if it is broken
71
- geometry = geometry.buffer(0)
72
- latlon_bbox = box(-180, -90, 180, 90)
73
-
74
- # only attempt to fix if geometry is too wide or reaches over the lat/lon bounds
75
- if (
76
- Bounds.from_inp(geometry).width >= width_threshold
77
- or not geometry.difference(latlon_bbox).is_empty
78
- ):
79
- # (1) shift only coordinates on the western hemisphere by 360°, thus "fixing"
80
- # the footprint, but letting it cross the antimeridian
81
- shifted_geometry = longitudinal_shift(geometry, only_negative_coords=True)
82
-
83
- # (2) split up geometry in one outside of latlon bounds and one inside
84
- inside = shifted_geometry.intersection(latlon_bbox)
85
- outside = shifted_geometry.difference(latlon_bbox)
86
-
87
- # (3) shift back only the polygon outside of latlon bounds by -360, thus moving
88
- # it back to the western hemisphere
89
- outside_shifted = longitudinal_shift(
90
- outside, offset=-360, only_negative_coords=False
91
- )
92
-
93
- # (4) create a MultiPolygon out from these two polygons
94
- geometry = unary_union([inside, outside_shifted])
95
-
96
- return geometry
97
-
98
-
99
- def buffer_antimeridian_safe(
100
- footprint: BaseGeometry, buffer_m: float = 0
101
- ) -> BaseGeometry:
102
- """Buffer geometry by meters and make it Antimeridian-safe.
103
-
104
- Safe means that if it crosses the Antimeridian and is a MultiPolygon,
105
- the buffer will only be applied to the edges facing away from the Antimeridian
106
- thus leaving the polygon intact if shifted back.
107
- """
108
- if footprint.is_empty:
109
- return footprint
110
-
111
- # repair geometry if it is broken
112
- footprint = footprint.buffer(0)
113
-
114
- if not buffer_m:
115
- return footprint
116
-
117
- if isinstance(footprint, MultiPolygon):
118
- # we have a shifted footprint here!
119
- # (1) unshift one part
120
- subpolygons = []
121
- for polygon in footprint.geoms:
122
- lon = polygon.centroid.x
123
- if lon < 0:
124
- polygon = longitudinal_shift(polygon)
125
- subpolygons.append(polygon)
126
- # (2) merge to single polygon
127
- merged = unary_union(subpolygons)
128
-
129
- # (3) apply buffer
130
- if isinstance(merged, MultiPolygon):
131
- buffered = unary_union(
132
- [
133
- buffer_antimeridian_safe(polygon, buffer_m=buffer_m)
134
- for polygon in merged.geoms
135
- ]
136
- )
137
- else:
138
- buffered = buffer_antimeridian_safe(merged, buffer_m=buffer_m)
139
-
140
- # (4) fix again
141
- return repair_antimeridian_geometry(buffered)
142
-
143
- # UTM zone CRS
144
- utm_crs = latlon_to_utm_crs(footprint.centroid.y, footprint.centroid.x)
145
- latlon_crs = CRS.from_string("EPSG:4326")
146
-
147
- return transform_to_latlon(
148
- reproject_geometry(
149
- footprint, src_crs=latlon_crs, dst_crs=utm_crs, clip_to_crs_bounds=False
150
- ).buffer(buffer_m),
151
- src_crs=utm_crs,
152
- )
153
-
154
-
155
- def longitudinal_shift(
156
- geometry: BaseGeometry, offset: float = 360.0, only_negative_coords: bool = False
157
- ) -> BaseGeometry:
158
- """Return geometry with either all or Western hemisphere coordinates shifted by some offset."""
159
- return custom_transform(
160
- geometry,
161
- partial(
162
- coords_longitudinal_shift,
163
- by=offset,
164
- only_negative_coords=only_negative_coords,
165
- ),
166
- )
167
-
168
-
169
- def latlon_to_utm_crs(lat: float, lon: float) -> CRS:
170
- min_zone = 1
171
- max_zone = 60
172
- utm_zone = (
173
- f"{max([min([(math.floor((lon + 180) / 6) + 1), max_zone]), min_zone]):02}"
174
- )
175
- hemisphere_code = "7" if lat <= 0 else "6"
176
- return CRS.from_string(f"EPSG:32{hemisphere_code}{utm_zone}")
177
-
178
-
179
- def bounds_to_geom(bounds: Bounds) -> BaseGeometry:
180
- # TODO: move into core package
181
- if bounds.left < -180:
182
- part1 = Bounds(-180, bounds.bottom, bounds.right, bounds.top)
183
- part2 = Bounds(bounds.left + 360, bounds.bottom, 180, bounds.top)
184
- return unary_union([shape(part1), shape(part2)])
185
- elif bounds.right > 180:
186
- part1 = Bounds(-180, bounds.bottom, bounds.right - 360, bounds.top)
187
- part2 = Bounds(bounds.left, bounds.bottom, 180, bounds.top)
188
- return unary_union([shape(part1), shape(part2)])
189
- else:
190
- return shape(bounds)
191
-
192
-
193
- def custom_transform(geometry: BaseGeometry, func: Callable) -> BaseGeometry:
194
- # todo: shapely.transform.transform maybe can make this code more simple
195
- # https://shapely.readthedocs.io/en/stable/reference/shapely.transform.html#shapely.transform
196
- def _point(point: Point) -> Point:
197
- return Point(zip(*func(point.xy)))
198
-
199
- def _multipoint(multipoint: MultiPoint) -> MultiPoint:
200
- return MultiPoint([_point(point) for point in multipoint])
201
-
202
- def _linestring(linestring: LineString) -> LineString:
203
- return LineString(zip(*func(linestring.xy)))
204
-
205
- def _multilinestring(multilinestring: MultiLineString) -> MultiLineString:
206
- return MultiLineString(
207
- [_linestring(linestring) for linestring in multilinestring.geoms]
208
- )
209
-
210
- def _linearring(linearring: LinearRing) -> LinearRing:
211
- return LinearRing(((x, y) for x, y in zip(*func(linearring.xy))))
212
-
213
- def _polygon(polygon: Polygon) -> Polygon:
214
- return Polygon(
215
- _linearring(polygon.exterior),
216
- holes=list(map(_linearring, polygon.interiors)),
217
- )
218
-
219
- def _multipolygon(multipolygon: MultiPolygon) -> MultiPolygon:
220
- return MultiPolygon([_polygon(polygon) for polygon in multipolygon.geoms])
221
-
222
- def _geometrycollection(
223
- geometrycollection: GeometryCollection,
224
- ) -> GeometryCollection:
225
- return GeometryCollection(
226
- [_any_geometry(subgeometry) for subgeometry in geometrycollection.geoms]
227
- )
228
-
229
- def _any_geometry(geometry: BaseGeometry) -> BaseGeometry:
230
- transform_funcs = {
231
- Point: _point,
232
- MultiPoint: _multipoint,
233
- LineString: _linestring,
234
- MultiLineString: _multilinestring,
235
- Polygon: _polygon,
236
- MultiPolygon: _multipolygon,
237
- GeometryCollection: _geometrycollection,
238
- }
239
- try:
240
- return transform_funcs[type(geometry)](geometry)
241
- except KeyError:
242
- raise TypeError(f"unknown geometry {geometry} of type {type(geometry)}")
243
-
244
- if geometry.is_empty:
245
- return geometry
246
-
247
- # make valid by buffering
248
- return _any_geometry(geometry).buffer(0)
249
-
250
-
251
- def coords_transform(
252
- coords: CoordArrays, src_crs: CRSLike, dst_crs: CRSLike
253
- ) -> CoordArrays:
254
- return fiona_transform(src_crs, dst_crs, *coords)
255
-
256
-
257
- def coords_longitudinal_shift(
258
- coords: CoordArrays,
259
- by: float = 360,
260
- only_negative_coords: bool = False,
261
- ) -> CoordArrays:
262
- x_coords, y_coords = coords
263
- x_coords = (
264
- (
265
- x_coord + by
266
- if (only_negative_coords and x_coord < 0) or not only_negative_coords
267
- else x_coord
268
- )
269
- for x_coord in x_coords
270
- )
271
- return x_coords, y_coords
@@ -1,42 +0,0 @@
1
- """
2
- Catalogs define access to a search interface which provide products
3
- as pystac Items.
4
- """
5
-
6
- from typing import List
7
-
8
- from mapchete_eo.search import STACSearchCatalog, UTMSearchCatalog
9
-
10
-
11
- class EarthSearchV1S2L2A(STACSearchCatalog):
12
- """Earth-Search catalog for Sentinel-2 Level 2A COGs."""
13
-
14
- endpoint: str = "https://earth-search.aws.element84.com/v1/"
15
-
16
-
17
- class CDSESearch(STACSearchCatalog):
18
- """Copernicus Data Space Ecosystem (CDSE) STAC API."""
19
-
20
- endpoint: str = "https://stac.dataspace.copernicus.eu/v1"
21
-
22
-
23
- class PlanetaryComputerSearch(STACSearchCatalog):
24
- """Planetary Computer Search."""
25
-
26
- endpoint: str = "https://planetarycomputer.microsoft.com/api/stac/v1/"
27
-
28
-
29
- class AWSSearchCatalogS2L2A(UTMSearchCatalog):
30
- """
31
- Not a search endpoint, just hanging STAC collection with items separately.
32
- Need custom parser/browser to find scenes based on date and UTM MGRS Granule
33
-
34
- https://sentinel-s2-l2a-stac.s3.amazonaws.com/sentinel-s2-l2a.json
35
- """
36
-
37
- id: str = "sentinel-s2-l2a"
38
- endpoint: str = "s3://sentinel-s2-l2a-stac/"
39
- day_subdir_schema: str = "{year}/{month:02d}/{day:02d}"
40
- stac_json_endswith: str = "T{tile_id}.json"
41
- description: str = "Sentinel-2 L2A JPEG2000 archive on AWS."
42
- stac_extensions: List[str] = []
@@ -1,190 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from enum import Enum
4
- from typing import Any, Type
5
-
6
- from mapchete.path import MPath
7
- from pydantic import ValidationError
8
- from pydantic.functional_validators import BeforeValidator
9
- from pystac import Item
10
- from typing_extensions import Annotated
11
-
12
- from mapchete_eo.archives.base import Archive
13
- from mapchete_eo.io.items import item_fix_footprint
14
- from mapchete_eo.known_catalogs import (
15
- AWSSearchCatalogS2L2A,
16
- CDSESearch,
17
- EarthSearchV1S2L2A,
18
- )
19
- from mapchete_eo.platforms.sentinel2.types import ProcessingLevel
20
- from mapchete_eo.search.s2_mgrs import S2Tile
21
-
22
-
23
- def known_archive(v: Any, **args) -> Type[Archive]:
24
- if isinstance(v, str):
25
- return KnownArchives[v].value
26
- elif isinstance(v, type(Archive)):
27
- return v
28
- else:
29
- raise ValidationError(f"cannot validate {v} to archive")
30
-
31
-
32
- ArchiveClsFromString = Annotated[Type[Archive], BeforeValidator(known_archive)]
33
-
34
-
35
- def add_datastrip_id(item: Item) -> Item:
36
- """Make sure item metadata is following the standard."""
37
- # change 'sentinel2' prefix to 's2'
38
- properties = {k.replace("sentinel2:", "s2:"): v for k, v in item.properties.items()}
39
-
40
- # add datastrip id as 's2:datastrip_id'
41
- if not properties.get("s2:datastrip_id"):
42
- from mapchete_eo.platforms.sentinel2 import S2Metadata
43
-
44
- s2_metadata = S2Metadata.from_stac_item(item)
45
- properties["s2:datastrip_id"] = s2_metadata.datastrip_id
46
-
47
- item.properties = properties
48
- return item
49
-
50
-
51
- def map_cdse_paths_to_jp2_archive(item: Item) -> Item:
52
- """
53
- CSDE has the following assets:
54
- AOT_10m, AOT_20m, AOT_60m, B01_20m, B01_60m, B02_10m, B02_20m, B02_60m, B03_10m, B03_20m,
55
- B03_60m, B04_10m, B04_20m, B04_60m, B05_20m, B05_60m, B06_20m, B06_60m, B07_20m, B07_60m,
56
- B08_10m, B09_60m, B11_20m, B11_60m, B12_20m, B12_60m, B8A_20m, B8A_60m, Product, SCL_20m,
57
- SCL_60m, TCI_10m, TCI_20m, TCI_60m, WVP_10m, WVP_20m, WVP_60m, thumbnail, safe_manifest,
58
- granule_metadata, inspire_metadata, product_metadata, datastrip_metadata
59
-
60
- sample path for AWS JP2:
61
- s3://sentinel-s2-l2a/tiles/51/K/XR/2020/7/31/0/R10m/
62
- """
63
- band_name_mapping = {
64
- "AOT_10m": "aot",
65
- "B01_20m": "coastal",
66
- "B02_10m": "blue",
67
- "B03_10m": "green",
68
- "B04_10m": "red",
69
- "B05_20m": "rededge1",
70
- "B06_20m": "rededge2",
71
- "B07_20m": "rededge3",
72
- "B08_10m": "nir",
73
- "B09_60m": "nir09",
74
- "B11_20m": "swir16",
75
- "B12_20m": "swir22",
76
- "B8A_20m": "nir08",
77
- "SCL_20m": "scl",
78
- "TCI_10m": "visual",
79
- "WVP_10m": "wvp",
80
- }
81
- path_base_scheme = "s3://sentinel-s2-l2a/tiles/{utm_zone}/{latitude_band}/{grid_square}/{year}/{month}/{day}/{count}"
82
- s2tile = S2Tile.from_grid_code(item.properties["grid:code"])
83
- if item.datetime is None:
84
- raise ValueError(f"product {item.get_self_href()} does not have a timestamp")
85
- product_basepath = MPath(
86
- path_base_scheme.format(
87
- utm_zone=s2tile.utm_zone,
88
- latitude_band=s2tile.latitude_band,
89
- grid_square=s2tile.grid_square,
90
- year=item.datetime.year,
91
- month=item.datetime.month,
92
- day=item.datetime.day,
93
- count=0, # TODO: get count dynamically from metadata
94
- )
95
- )
96
- new_assets = {}
97
- for asset_name, asset in item.assets.items():
98
- # ignore these assets
99
- if asset_name in [
100
- "Product",
101
- "safe_manifest",
102
- "product_metadata",
103
- "inspire_metadata",
104
- "datastrip_metadata",
105
- ]:
106
- continue
107
- # set thumbnnail
108
- elif asset_name == "thumbnail":
109
- asset.href = str(product_basepath / "R60m" / "TCI.jp2")
110
- # point to proper metadata
111
- elif asset_name == "granule_metadata":
112
- asset.href = str(product_basepath / "metadata.xml")
113
- # change band asset names and point to their new locations
114
- elif asset_name in band_name_mapping:
115
- name, resolution = asset_name.split("_")
116
- asset.href = product_basepath / f"R{resolution}" / f"{name}.jp2"
117
- asset_name = band_name_mapping[asset_name]
118
- else:
119
- continue
120
- new_assets[asset_name] = asset
121
-
122
- item.assets = new_assets
123
-
124
- item.properties["s2:datastrip_id"] = item.properties.get("eopf:datastrip_id")
125
- return item
126
-
127
-
128
- class AWSL2ACOGv1(Archive):
129
- """COG archive on AWS using E84 STAC search endpoint."""
130
-
131
- catalog = EarthSearchV1S2L2A(
132
- collections=["sentinel-2-l2a"],
133
- )
134
- item_modifier_funcs = [
135
- item_fix_footprint,
136
- ]
137
- processing_level = ProcessingLevel.level2a
138
-
139
-
140
- class AWSL2AJP2(Archive):
141
- """
142
- JP2000 archive on AWS using dumb S3 path guesser.
143
- """
144
-
145
- catalog = AWSSearchCatalogS2L2A(
146
- collections=["sentinel-s2-l2a"],
147
- )
148
- item_modifier_funcs = [
149
- item_fix_footprint,
150
- add_datastrip_id,
151
- ]
152
- processing_level = ProcessingLevel.level2a
153
-
154
-
155
- class AWSL2AJP2CSDE(Archive):
156
- """
157
- JP2000 archive on AWS using CDSE STAC search endpoint.
158
- """
159
-
160
- catalog = CDSESearch(
161
- collections=["sentinel-2-l2a"],
162
- )
163
- item_modifier_funcs = [
164
- item_fix_footprint,
165
- map_cdse_paths_to_jp2_archive,
166
- add_datastrip_id,
167
- ]
168
- processing_level = ProcessingLevel.level2a
169
-
170
-
171
- class CDSEL2AJP2CSDE(Archive):
172
- """
173
- JP2000 archive on CDSE (EODATA s3) using CDSE STAC search endpoint.
174
- """
175
-
176
- catalog = CDSESearch(
177
- collections=["sentinel-2-l2a"],
178
- )
179
- item_modifier_funcs = [
180
- item_fix_footprint,
181
- add_datastrip_id,
182
- ]
183
- processing_level = ProcessingLevel.level2a
184
-
185
-
186
- class KnownArchives(Enum):
187
- S2AWS_COG = AWSL2ACOGv1
188
- S2AWS_JP2 = AWSL2AJP2
189
- S2CDSE_AWSJP2 = AWSL2AJP2CSDE
190
- S2CDSE_JP2 = CDSEL2AJP2CSDE
@@ -1,29 +0,0 @@
1
- from mapchete_eo.platforms.sentinel2.path_mappers.base import S2PathMapper
2
- from mapchete_eo.platforms.sentinel2.path_mappers.earthsearch import (
3
- EarthSearchPathMapper,
4
- )
5
- from mapchete_eo.platforms.sentinel2.path_mappers.metadata_xml import XMLMapper
6
- from mapchete_eo.platforms.sentinel2.path_mappers.sinergise import SinergisePathMapper
7
-
8
-
9
- def default_path_mapper_guesser(
10
- url: str,
11
- **kwargs,
12
- ) -> S2PathMapper:
13
- """Guess S2PathMapper based on URL.
14
-
15
- If a new path mapper is added in this module, it should also be added to this function
16
- in order to be detected.
17
- """
18
- if url.startswith(
19
- ("https://roda.sentinel-hub.com/sentinel-s2-l2a/", "s3://sentinel-s2-l2a/")
20
- ) or url.startswith(
21
- ("https://roda.sentinel-hub.com/sentinel-s2-l1c/", "s3://sentinel-s2-l1c/")
22
- ):
23
- return SinergisePathMapper(url, **kwargs)
24
- elif url.startswith(
25
- "https://sentinel-cogs.s3.us-west-2.amazonaws.com/sentinel-s2-l2a-cogs/"
26
- ):
27
- return EarthSearchPathMapper(url, **kwargs)
28
- else:
29
- return XMLMapper(url, **kwargs)
@@ -1,34 +0,0 @@
1
- from mapchete.path import MPath
2
-
3
- from mapchete_eo.platforms.sentinel2.path_mappers.sinergise import SinergisePathMapper
4
- from mapchete_eo.platforms.sentinel2.processing_baseline import ProcessingBaseline
5
-
6
-
7
- class EarthSearchPathMapper(SinergisePathMapper):
8
- """
9
- The COG archive maintained by E84 and covered by EarthSearch does not hold additional data
10
- such as the GML files. This class maps the metadata masks to the current EarthSearch product.
11
-
12
- e.g.:
13
- B01 detector footprints: s3://sentinel-s2-l2a/tiles/51/K/XR/2020/7/31/0/qi/MSK_DETFOO_B01.gml
14
- Cloud masks: s3://sentinel-s2-l2a/tiles/51/K/XR/2020/7/31/0/qi/MSK_CLOUDS_B00.gml
15
-
16
- newer products however:
17
- B01 detector footprints: s3://sentinel-s2-l2a/tiles/51/K/XR/2022/6/6/0/qi/DETFOO_B01.jp2
18
- no vector cloudmasks available anymore
19
- """
20
-
21
- def __init__(
22
- self,
23
- metadata_xml: MPath,
24
- alternative_metadata_baseurl: str = "sentinel-s2-l2a",
25
- protocol: str = "s3",
26
- baseline_version: str = "04.00",
27
- **kwargs,
28
- ):
29
- basedir = metadata_xml.parent
30
- self._path = (basedir / "tileinfo_metadata.json").read_json()["path"]
31
- self._utm_zone, self._latitude_band, self._grid_square = basedir.elements[-6:-3]
32
- self._baseurl = alternative_metadata_baseurl
33
- self._protocol = protocol
34
- self.processing_baseline = ProcessingBaseline.from_version(baseline_version)
@@ -1,105 +0,0 @@
1
- from mapchete.path import MPath, MPathLike
2
-
3
- from mapchete_eo.platforms.sentinel2.path_mappers.base import S2PathMapper
4
- from mapchete_eo.platforms.sentinel2.processing_baseline import ProcessingBaseline
5
- from mapchete_eo.platforms.sentinel2.types import (
6
- BandQI,
7
- L2ABand,
8
- ProductQI,
9
- ProductQIMaskResolution,
10
- )
11
-
12
-
13
- class SinergisePathMapper(S2PathMapper):
14
- """
15
- Return true paths of product quality assets from the Sinergise S2 bucket.
16
-
17
- e.g.:
18
- B01 detector footprints: s3://sentinel-s2-l2a/tiles/51/K/XR/2020/7/31/0/qi/MSK_DETFOO_B01.gml
19
- Cloud masks: s3://sentinel-s2-l2a/tiles/51/K/XR/2020/7/31/0/qi/MSK_CLOUDS_B00.gml
20
-
21
- newer products however:
22
- B01 detector footprints: s3://sentinel-s2-l2a/tiles/51/K/XR/2022/6/6/0/qi/DETFOO_B01.jp2
23
- no vector cloudmasks available anymore
24
- """
25
-
26
- _PRE_0400_MASK_PATHS = {
27
- ProductQI.classification: "MSK_CLOUDS_B00.gml",
28
- ProductQI.cloud_probability: "CLD_{resolution}.jp2", # are they really there?
29
- ProductQI.snow_probability: "SNW_{resolution}.jp2", # are they really there?
30
- BandQI.detector_footprints: "MSK_DETFOO_{band_identifier}.gml",
31
- BandQI.technical_quality: "MSK_TECQUA_{band_identifier}.gml",
32
- }
33
- _POST_0400_MASK_PATHS = {
34
- ProductQI.classification: "CLASSI_B00.jp2",
35
- ProductQI.cloud_probability: "CLD_{resolution}.jp2",
36
- ProductQI.snow_probability: "SNW_{resolution}.jp2",
37
- BandQI.detector_footprints: "DETFOO_{band_identifier}.jp2",
38
- BandQI.technical_quality: "QUALIT_{band_identifier}.jp2",
39
- }
40
-
41
- def __init__(
42
- self,
43
- url: MPathLike,
44
- bucket: str = "sentinel-s2-l2a",
45
- protocol: str = "s3",
46
- baseline_version: str = "04.00",
47
- **kwargs,
48
- ):
49
- url = MPath.from_inp(url)
50
- tileinfo_path = url.parent / "tileInfo.json"
51
- self._path = MPath(
52
- "/".join(tileinfo_path.elements[-9:-1]), **tileinfo_path._kwargs
53
- )
54
- self._utm_zone, self._latitude_band, self._grid_square = self._path.split("/")[
55
- 1:-4
56
- ]
57
- self._baseurl = bucket
58
- self._protocol = protocol
59
- self.processing_baseline = ProcessingBaseline.from_version(baseline_version)
60
-
61
- def product_qi_mask(
62
- self,
63
- qi_mask: ProductQI,
64
- resolution: ProductQIMaskResolution = ProductQIMaskResolution["60m"],
65
- ) -> MPath:
66
- """Determine product QI mask according to Sinergise bucket schema."""
67
- if self.processing_baseline.version < "04.00":
68
- mask_path = self._PRE_0400_MASK_PATHS[qi_mask]
69
- else:
70
- mask_path = self._POST_0400_MASK_PATHS[qi_mask]
71
- key = f"{self._path}/qi/{mask_path.format(resolution=resolution.name)}"
72
- return MPath.from_inp(f"{self._protocol}://{self._baseurl}/{key}")
73
-
74
- def classification_mask(self) -> MPath:
75
- return self.product_qi_mask(ProductQI.classification)
76
-
77
- def cloud_probability_mask(
78
- self, resolution: ProductQIMaskResolution = ProductQIMaskResolution["60m"]
79
- ) -> MPath:
80
- return self.product_qi_mask(ProductQI.cloud_probability, resolution=resolution)
81
-
82
- def snow_probability_mask(
83
- self, resolution: ProductQIMaskResolution = ProductQIMaskResolution["60m"]
84
- ) -> MPath:
85
- return self.product_qi_mask(ProductQI.snow_probability, resolution=resolution)
86
-
87
- def band_qi_mask(self, qi_mask: BandQI, band: L2ABand) -> MPath:
88
- """Determine product QI mask according to Sinergise bucket schema."""
89
- try:
90
- if self.processing_baseline.version < "04.00":
91
- mask_path = self._PRE_0400_MASK_PATHS[qi_mask]
92
- else:
93
- mask_path = self._POST_0400_MASK_PATHS[qi_mask]
94
- except KeyError:
95
- raise DeprecationWarning(
96
- f"'{qi_mask.name}' quality mask not found in this product"
97
- )
98
- key = f"{self._path}/qi/{mask_path.format(band_identifier=band.name)}"
99
- return MPath.from_inp(f"{self._protocol}://{self._baseurl}/{key}")
100
-
101
- def technical_quality_mask(self, band: L2ABand) -> MPath:
102
- return self.band_qi_mask(BandQI.technical_quality, band)
103
-
104
- def detector_footprints(self, band: L2ABand) -> MPath:
105
- return self.band_qi_mask(BandQI.detector_footprints, band)