mapchete-eo 2026.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapchete_eo/__init__.py +1 -0
- mapchete_eo/array/__init__.py +0 -0
- mapchete_eo/array/buffer.py +16 -0
- mapchete_eo/array/color.py +29 -0
- mapchete_eo/array/convert.py +163 -0
- mapchete_eo/base.py +653 -0
- mapchete_eo/blacklist.txt +175 -0
- mapchete_eo/cli/__init__.py +30 -0
- mapchete_eo/cli/bounds.py +22 -0
- mapchete_eo/cli/options_arguments.py +227 -0
- mapchete_eo/cli/s2_brdf.py +77 -0
- mapchete_eo/cli/s2_cat_results.py +130 -0
- mapchete_eo/cli/s2_find_broken_products.py +77 -0
- mapchete_eo/cli/s2_jp2_static_catalog.py +166 -0
- mapchete_eo/cli/s2_mask.py +71 -0
- mapchete_eo/cli/s2_mgrs.py +45 -0
- mapchete_eo/cli/s2_rgb.py +114 -0
- mapchete_eo/cli/s2_verify.py +129 -0
- mapchete_eo/cli/static_catalog.py +82 -0
- mapchete_eo/eostac.py +30 -0
- mapchete_eo/exceptions.py +87 -0
- mapchete_eo/image_operations/__init__.py +12 -0
- mapchete_eo/image_operations/blend_functions.py +579 -0
- mapchete_eo/image_operations/color_correction.py +136 -0
- mapchete_eo/image_operations/compositing.py +266 -0
- mapchete_eo/image_operations/dtype_scale.py +43 -0
- mapchete_eo/image_operations/fillnodata.py +130 -0
- mapchete_eo/image_operations/filters.py +319 -0
- mapchete_eo/image_operations/linear_normalization.py +81 -0
- mapchete_eo/image_operations/sigmoidal.py +114 -0
- mapchete_eo/io/__init__.py +37 -0
- mapchete_eo/io/assets.py +496 -0
- mapchete_eo/io/items.py +162 -0
- mapchete_eo/io/levelled_cubes.py +259 -0
- mapchete_eo/io/path.py +155 -0
- mapchete_eo/io/products.py +423 -0
- mapchete_eo/io/profiles.py +45 -0
- mapchete_eo/platforms/sentinel2/__init__.py +17 -0
- mapchete_eo/platforms/sentinel2/_mapper_registry.py +89 -0
- mapchete_eo/platforms/sentinel2/bandpass_adjustment.py +104 -0
- mapchete_eo/platforms/sentinel2/brdf/__init__.py +8 -0
- mapchete_eo/platforms/sentinel2/brdf/config.py +32 -0
- mapchete_eo/platforms/sentinel2/brdf/correction.py +260 -0
- mapchete_eo/platforms/sentinel2/brdf/hls.py +251 -0
- mapchete_eo/platforms/sentinel2/brdf/models.py +44 -0
- mapchete_eo/platforms/sentinel2/brdf/protocols.py +27 -0
- mapchete_eo/platforms/sentinel2/brdf/ross_thick.py +136 -0
- mapchete_eo/platforms/sentinel2/brdf/sun_angle_arrays.py +76 -0
- mapchete_eo/platforms/sentinel2/config.py +241 -0
- mapchete_eo/platforms/sentinel2/driver.py +43 -0
- mapchete_eo/platforms/sentinel2/masks.py +329 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/__init__.py +6 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/base.py +56 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/default_path_mapper.py +135 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/models.py +78 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/s2metadata.py +639 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/__init__.py +57 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/guessers.py +108 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/item_mappers.py +171 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/metadata_xml_mappers.py +217 -0
- mapchete_eo/platforms/sentinel2/preprocessing_tasks.py +50 -0
- mapchete_eo/platforms/sentinel2/processing_baseline.py +163 -0
- mapchete_eo/platforms/sentinel2/product.py +747 -0
- mapchete_eo/platforms/sentinel2/source.py +114 -0
- mapchete_eo/platforms/sentinel2/types.py +114 -0
- mapchete_eo/processes/__init__.py +0 -0
- mapchete_eo/processes/config.py +51 -0
- mapchete_eo/processes/dtype_scale.py +112 -0
- mapchete_eo/processes/eo_to_xarray.py +19 -0
- mapchete_eo/processes/merge_rasters.py +239 -0
- mapchete_eo/product.py +323 -0
- mapchete_eo/protocols.py +61 -0
- mapchete_eo/search/__init__.py +14 -0
- mapchete_eo/search/base.py +285 -0
- mapchete_eo/search/config.py +113 -0
- mapchete_eo/search/s2_mgrs.py +313 -0
- mapchete_eo/search/stac_search.py +278 -0
- mapchete_eo/search/stac_static.py +197 -0
- mapchete_eo/search/utm_search.py +251 -0
- mapchete_eo/settings.py +25 -0
- mapchete_eo/sort.py +60 -0
- mapchete_eo/source.py +109 -0
- mapchete_eo/time.py +62 -0
- mapchete_eo/types.py +76 -0
- mapchete_eo-2026.2.0.dist-info/METADATA +91 -0
- mapchete_eo-2026.2.0.dist-info/RECORD +89 -0
- mapchete_eo-2026.2.0.dist-info/WHEEL +4 -0
- mapchete_eo-2026.2.0.dist-info/entry_points.txt +11 -0
- mapchete_eo-2026.2.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from typing import Any, Literal, Optional
|
|
3
|
+
|
|
4
|
+
import click
|
|
5
|
+
import click_spinner
|
|
6
|
+
|
|
7
|
+
from shapely.geometry import mapping, MultiPolygon, Polygon, shape
|
|
8
|
+
|
|
9
|
+
from mapchete.cli.options import opt_bounds, opt_debug
|
|
10
|
+
from mapchete.io import fiona_open
|
|
11
|
+
from mapchete.path import MPath
|
|
12
|
+
from mapchete.types import Bounds
|
|
13
|
+
|
|
14
|
+
from mapchete_eo.cli import options_arguments
|
|
15
|
+
from mapchete_eo.io.products import Slice, products_to_slices
|
|
16
|
+
from mapchete_eo.platforms.sentinel2.product import S2Product
|
|
17
|
+
from mapchete_eo.platforms.sentinel2.source import Sentinel2Source
|
|
18
|
+
from mapchete_eo.sort import TargetDateSort
|
|
19
|
+
from mapchete_eo.types import TimeRange
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@click.command()
|
|
23
|
+
@options_arguments.arg_dst_path
|
|
24
|
+
@options_arguments.opt_start_time
|
|
25
|
+
@options_arguments.opt_end_time
|
|
26
|
+
@opt_bounds
|
|
27
|
+
@options_arguments.opt_mgrs_tile
|
|
28
|
+
@options_arguments.opt_source
|
|
29
|
+
@click.option(
|
|
30
|
+
"--format",
|
|
31
|
+
type=click.Choice(["FlatGeobuf", "GeoJSON"]),
|
|
32
|
+
help="Format of output file.",
|
|
33
|
+
)
|
|
34
|
+
@click.option("--by-slices", is_flag=True, help="Merge product to slices.")
|
|
35
|
+
@click.option(
|
|
36
|
+
"--add-index", is_flag=True, help="Add unique indexes to products/slices."
|
|
37
|
+
)
|
|
38
|
+
@opt_debug
|
|
39
|
+
def s2_cat_results(
|
|
40
|
+
dst_path: MPath,
|
|
41
|
+
start_time: datetime,
|
|
42
|
+
end_time: datetime,
|
|
43
|
+
bounds: Optional[Bounds] = None,
|
|
44
|
+
mgrs_tile: Optional[str] = None,
|
|
45
|
+
source: Sentinel2Source = Sentinel2Source(collection="EarthSearch"),
|
|
46
|
+
format: Literal["FlatGeobuf", "GeoJSON"] = "FlatGeobuf",
|
|
47
|
+
by_slices: bool = False,
|
|
48
|
+
add_index: bool = False,
|
|
49
|
+
debug: bool = False,
|
|
50
|
+
):
|
|
51
|
+
"""Write a search result."""
|
|
52
|
+
if any([start_time is None, end_time is None]): # pragma: no cover
|
|
53
|
+
raise click.ClickException("--start-time and --end-time are mandatory")
|
|
54
|
+
if all([bounds is None, mgrs_tile is None]): # pragma: no cover
|
|
55
|
+
raise click.ClickException("--bounds or --mgrs-tile are required")
|
|
56
|
+
slice_property_key = "s2:datastrip_id"
|
|
57
|
+
with click_spinner.Spinner(disable=debug):
|
|
58
|
+
catalog = source.get_catalog()
|
|
59
|
+
slices = products_to_slices(
|
|
60
|
+
[
|
|
61
|
+
S2Product.from_stac_item(item)
|
|
62
|
+
for item in catalog.search(
|
|
63
|
+
time=TimeRange(start=start_time, end=end_time),
|
|
64
|
+
bounds=bounds,
|
|
65
|
+
search_kwargs=dict(mgrs_tile=mgrs_tile),
|
|
66
|
+
)
|
|
67
|
+
],
|
|
68
|
+
group_by_property=slice_property_key if by_slices else None,
|
|
69
|
+
sort=TargetDateSort(target_date=start_time),
|
|
70
|
+
)
|
|
71
|
+
if slices:
|
|
72
|
+
schema = get_schema(by_slices=by_slices, add_index=add_index)
|
|
73
|
+
with fiona_open(
|
|
74
|
+
dst_path, mode="w", schema=schema, crs="EPSG:4326", format=format
|
|
75
|
+
) as dst:
|
|
76
|
+
for index, _slice in enumerate(slices, start=1):
|
|
77
|
+
# 2025-4 agreed to make outputs multipolygons
|
|
78
|
+
# Convert the _slice.__geom_interface__ to Multipolygon if not the case
|
|
79
|
+
|
|
80
|
+
# Ensure the result is always a MultiPolygon even if only single Polygon is returned
|
|
81
|
+
# Else split features should come here as MultiPolygons
|
|
82
|
+
slice_shape = shape(_slice.__geom_interface__)
|
|
83
|
+
if isinstance(slice_shape, Polygon):
|
|
84
|
+
slice_multipolygon = mapping(MultiPolygon([slice_shape]))
|
|
85
|
+
else:
|
|
86
|
+
slice_multipolygon = _slice.__geom_interface__
|
|
87
|
+
|
|
88
|
+
out_feature = {
|
|
89
|
+
"geometry": slice_multipolygon,
|
|
90
|
+
"properties": {
|
|
91
|
+
key: get_value(_slice, key, index, slice_property_key)
|
|
92
|
+
for key in schema["properties"].keys()
|
|
93
|
+
},
|
|
94
|
+
}
|
|
95
|
+
dst.write(out_feature)
|
|
96
|
+
else:
|
|
97
|
+
click.echo("No results found.")
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def get_schema(
|
|
101
|
+
by_slices: bool, add_index: bool, geometry_type: str = "MultiPolygon"
|
|
102
|
+
) -> dict:
|
|
103
|
+
if by_slices:
|
|
104
|
+
properties = {
|
|
105
|
+
"timestamp": "str",
|
|
106
|
+
"slice_id": "str",
|
|
107
|
+
}
|
|
108
|
+
else:
|
|
109
|
+
properties = {
|
|
110
|
+
"eo:cloud_cover": "float",
|
|
111
|
+
"timestamp": "str",
|
|
112
|
+
"slice_id": "str",
|
|
113
|
+
"product_id": "str",
|
|
114
|
+
}
|
|
115
|
+
if add_index:
|
|
116
|
+
properties.update(index="int")
|
|
117
|
+
return {"geometry": geometry_type, "properties": properties}
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def get_value(_slice: Slice, key: str, index: int, slice_property_key: str) -> Any:
|
|
121
|
+
if key == "index":
|
|
122
|
+
return index
|
|
123
|
+
elif key == "slice_id":
|
|
124
|
+
return _slice.get_property(slice_property_key)
|
|
125
|
+
elif key == "product_id":
|
|
126
|
+
return _slice.products[0].item.id
|
|
127
|
+
elif key == "timestamp":
|
|
128
|
+
return _slice.datetime
|
|
129
|
+
else:
|
|
130
|
+
return _slice.get_property(key)
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from typing import List, Optional
|
|
3
|
+
|
|
4
|
+
import click
|
|
5
|
+
from mapchete.cli.options import opt_bounds, opt_debug
|
|
6
|
+
from mapchete.path import MPath
|
|
7
|
+
from mapchete.types import Bounds
|
|
8
|
+
from tqdm import tqdm
|
|
9
|
+
|
|
10
|
+
from mapchete_eo.cli import options_arguments
|
|
11
|
+
from mapchete_eo.cli.s2_verify import verify_item
|
|
12
|
+
from mapchete_eo.platforms.sentinel2.source import Sentinel2Source
|
|
13
|
+
from mapchete_eo.product import add_to_blacklist, blacklist_products
|
|
14
|
+
from mapchete_eo.types import TimeRange
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@click.command()
|
|
18
|
+
@opt_bounds
|
|
19
|
+
@options_arguments.opt_start_time
|
|
20
|
+
@options_arguments.opt_end_time
|
|
21
|
+
@options_arguments.opt_source
|
|
22
|
+
@options_arguments.opt_assets
|
|
23
|
+
@options_arguments.opt_blacklist
|
|
24
|
+
@options_arguments.opt_thumbnail_dir
|
|
25
|
+
@opt_debug
|
|
26
|
+
def s2_find_broken_products(
|
|
27
|
+
start_time: datetime,
|
|
28
|
+
end_time: datetime,
|
|
29
|
+
bounds: Optional[Bounds] = None,
|
|
30
|
+
mgrs_tile: Optional[str] = None,
|
|
31
|
+
source: Sentinel2Source = Sentinel2Source(collection="EarthSearch"),
|
|
32
|
+
assets: List[str] = [],
|
|
33
|
+
asset_exists_check: bool = True,
|
|
34
|
+
blacklist: MPath = MPath("s3://eox-mhub-cache/blacklist.txt"),
|
|
35
|
+
thumbnail_dir: Optional[MPath] = None,
|
|
36
|
+
**__,
|
|
37
|
+
):
|
|
38
|
+
"""Find broken Sentinel-2 products."""
|
|
39
|
+
if any([start_time is None, end_time is None]): # pragma: no cover
|
|
40
|
+
raise click.ClickException("--start-time and --end-time are mandatory")
|
|
41
|
+
if all([bounds is None, mgrs_tile is None]): # pragma: no cover
|
|
42
|
+
raise click.ClickException("--bounds or --mgrs-tile are required")
|
|
43
|
+
catalog = source.get_catalog()
|
|
44
|
+
blacklisted_products = blacklist_products(blacklist)
|
|
45
|
+
for item in tqdm(
|
|
46
|
+
catalog.search(
|
|
47
|
+
time=TimeRange(start=start_time, end=end_time),
|
|
48
|
+
bounds=bounds,
|
|
49
|
+
search_kwargs=dict(mgrs_tile=mgrs_tile),
|
|
50
|
+
)
|
|
51
|
+
):
|
|
52
|
+
report = verify_item(
|
|
53
|
+
item,
|
|
54
|
+
assets=assets,
|
|
55
|
+
asset_exists_check=asset_exists_check,
|
|
56
|
+
thumbnail_dir=thumbnail_dir,
|
|
57
|
+
)
|
|
58
|
+
for asset in report.missing_asset_entries:
|
|
59
|
+
tqdm.write(f"[ERROR] {report.item.id} has no asset named '{asset}")
|
|
60
|
+
for path in report.missing_assets:
|
|
61
|
+
tqdm.write(
|
|
62
|
+
f"[ERROR] {report.item.id} asset '{asset}' with path {str(path)} does not exist"
|
|
63
|
+
)
|
|
64
|
+
if report.color_artefacts:
|
|
65
|
+
tqdm.write(
|
|
66
|
+
f"[ERROR] {report.item.id} thumbnail ({report.item.assets['thumbnail'].href}) indicates that there are some color artefacts"
|
|
67
|
+
)
|
|
68
|
+
if report.product_broken():
|
|
69
|
+
if report.item.get_self_href() in blacklisted_products:
|
|
70
|
+
tqdm.write(f"product {report.item.id} already in blacklist")
|
|
71
|
+
elif click.confirm(
|
|
72
|
+
f"should product {report.item.id} be added to the blacklist ({str(blacklist)})"
|
|
73
|
+
):
|
|
74
|
+
add_to_blacklist(
|
|
75
|
+
report.item.get_self_href(),
|
|
76
|
+
blacklist=blacklist,
|
|
77
|
+
)
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from collections import defaultdict
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from types import TracebackType
|
|
8
|
+
from typing import Optional, Type
|
|
9
|
+
|
|
10
|
+
import click
|
|
11
|
+
import tqdm
|
|
12
|
+
from fiona.crs import CRS
|
|
13
|
+
from mapchete.cli.options import opt_bounds, opt_debug
|
|
14
|
+
from mapchete.io.vector import fiona_open
|
|
15
|
+
from mapchete.path import MPath
|
|
16
|
+
from mapchete.types import Bounds
|
|
17
|
+
from pystac import Item
|
|
18
|
+
from shapely import prepare
|
|
19
|
+
|
|
20
|
+
from mapchete_eo.cli import options_arguments
|
|
21
|
+
from mapchete_eo.io.items import item_fix_footprint
|
|
22
|
+
from mapchete_eo.search.s2_mgrs import InvalidMGRSSquare, S2Tile
|
|
23
|
+
from mapchete_eo.time import day_range
|
|
24
|
+
|
|
25
|
+
logger = logging.getLogger(__name__)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
DEFAULT_SCHEMA = {
|
|
29
|
+
"geometry": ("Polygon", "MultiPolygon"),
|
|
30
|
+
"properties": {"id": "str", "path": "str"},
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@dataclass
|
|
35
|
+
class VectorDataSource:
|
|
36
|
+
path: MPath
|
|
37
|
+
schema: Optional[dict] = None
|
|
38
|
+
driver: Optional[str] = "FlatGeobuf"
|
|
39
|
+
crs: Optional[CRS] = CRS.from_epsg(4326)
|
|
40
|
+
features: dict = field(default_factory=dict)
|
|
41
|
+
|
|
42
|
+
def write(self, feature_id: str, feature: dict):
|
|
43
|
+
if feature_id not in self.features:
|
|
44
|
+
self.features[feature_id] = dict(
|
|
45
|
+
id=feature_id,
|
|
46
|
+
geometry=feature["geometry"],
|
|
47
|
+
properties=dict(feature["properties"], id=feature_id),
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
def __enter__(self) -> VectorDataSource:
|
|
51
|
+
if self.path.exists():
|
|
52
|
+
logger.debug("read existing data from %s", str(self.path))
|
|
53
|
+
with fiona_open(self.path) as src:
|
|
54
|
+
self.features = {
|
|
55
|
+
feature.properties["id"]: dict(
|
|
56
|
+
geometry=dict(feature.geometry),
|
|
57
|
+
properties=dict(feature.properties),
|
|
58
|
+
)
|
|
59
|
+
for feature in src
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
return self
|
|
63
|
+
|
|
64
|
+
def __exit__(
|
|
65
|
+
self,
|
|
66
|
+
exc_type: Optional[Type[Exception]] = None,
|
|
67
|
+
exc: Optional[Exception] = None,
|
|
68
|
+
exc_tb: Optional[TracebackType] = None,
|
|
69
|
+
):
|
|
70
|
+
if not exc_type:
|
|
71
|
+
logger.debug(f"remove {self.path} if exists ...")
|
|
72
|
+
self.path.rm(ignore_errors=True)
|
|
73
|
+
logger.debug(f"writing to {self.path} ...")
|
|
74
|
+
with fiona_open(
|
|
75
|
+
self.path,
|
|
76
|
+
mode="w",
|
|
77
|
+
driver=self.driver,
|
|
78
|
+
schema=self.schema or DEFAULT_SCHEMA,
|
|
79
|
+
crs=self.crs,
|
|
80
|
+
) as dst:
|
|
81
|
+
dst.writerecords(list(self.features.values()))
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
@click.command()
|
|
85
|
+
@options_arguments.arg_dst_path
|
|
86
|
+
@options_arguments.opt_start_time
|
|
87
|
+
@options_arguments.opt_end_time
|
|
88
|
+
@opt_bounds
|
|
89
|
+
@click.option(
|
|
90
|
+
"--basepath", type=click.Path(path_type=MPath), default="s3://sentinel-s2-l2a-stac/"
|
|
91
|
+
)
|
|
92
|
+
@opt_debug
|
|
93
|
+
def s2_jp2_static_catalog(
|
|
94
|
+
dst_path: MPath,
|
|
95
|
+
start_time: datetime,
|
|
96
|
+
end_time: datetime,
|
|
97
|
+
bounds: Optional[Bounds] = None,
|
|
98
|
+
basepath: MPath = MPath("s3://sentinel-s2-l2a-stac/"),
|
|
99
|
+
**_,
|
|
100
|
+
):
|
|
101
|
+
"""
|
|
102
|
+
Create a queriable set of static files for AWS_JP2 STAC items.
|
|
103
|
+
|
|
104
|
+
- one master file linking spatially to all S2Tile subfiles: index.fgb
|
|
105
|
+
- each entry links to a specific S2Tile file, e.g. tiles/53NQJ.fgb
|
|
106
|
+
- each S2Tile file contains for each STAC item one entry with geometry and href
|
|
107
|
+
"""
|
|
108
|
+
bounds = bounds or Bounds(-180, -90, 180, 90)
|
|
109
|
+
aoi = bounds.latlon_geometry()
|
|
110
|
+
prepare(aoi)
|
|
111
|
+
items_per_tile = defaultdict(list)
|
|
112
|
+
for day in day_range(start_date=start_time, end_date=end_time):
|
|
113
|
+
day_path = basepath / day.strftime("%Y/%m/%d")
|
|
114
|
+
click.echo(f"looking into {day_path} ...")
|
|
115
|
+
try:
|
|
116
|
+
paths = day_path.ls()
|
|
117
|
+
except FileNotFoundError:
|
|
118
|
+
continue
|
|
119
|
+
click.echo(f"found {len(paths)} items")
|
|
120
|
+
for json_path in paths:
|
|
121
|
+
tile_id = json_path.without_suffix().name.split("_")[-1]
|
|
122
|
+
items_per_tile[tile_id].append(json_path)
|
|
123
|
+
|
|
124
|
+
index_path = dst_path / "index.fgb"
|
|
125
|
+
s2tile_directory = MPath("s2tiles")
|
|
126
|
+
with VectorDataSource(path=index_path) as index:
|
|
127
|
+
for tile_id, json_paths in tqdm.tqdm(items_per_tile.items()):
|
|
128
|
+
try:
|
|
129
|
+
s2tile = S2Tile.from_tile_id(tile_id)
|
|
130
|
+
except InvalidMGRSSquare as exc:
|
|
131
|
+
logger.debug("omitting S2Tile because of %s", str(exc))
|
|
132
|
+
continue
|
|
133
|
+
if aoi.intersects(s2tile.latlon_geometry):
|
|
134
|
+
tqdm.tqdm.write(f"adding {s2tile.tile_id} ...")
|
|
135
|
+
relative_tile_index_path = s2tile_directory / f"{s2tile.tile_id}.fgb"
|
|
136
|
+
with VectorDataSource(
|
|
137
|
+
path=dst_path / relative_tile_index_path,
|
|
138
|
+
schema={
|
|
139
|
+
"geometry": ("Polygon", "MultiPolygon"),
|
|
140
|
+
"properties": {"id": "str", "path": "str", "datetime": "str"},
|
|
141
|
+
},
|
|
142
|
+
) as tile_index:
|
|
143
|
+
for json_path in json_paths:
|
|
144
|
+
item = item_fix_footprint(Item.from_file(json_path))
|
|
145
|
+
if item.geometry:
|
|
146
|
+
tile_index.write(
|
|
147
|
+
item.id,
|
|
148
|
+
dict(
|
|
149
|
+
geometry=dict(item.geometry),
|
|
150
|
+
properties=dict(
|
|
151
|
+
path=str(json_path), datetime=str(item.datetime)
|
|
152
|
+
),
|
|
153
|
+
),
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
index.write(
|
|
157
|
+
feature_id=s2tile.tile_id,
|
|
158
|
+
feature=dict(
|
|
159
|
+
geometry=s2tile.latlon_geometry,
|
|
160
|
+
properties=dict(
|
|
161
|
+
path=str(relative_tile_index_path),
|
|
162
|
+
),
|
|
163
|
+
),
|
|
164
|
+
)
|
|
165
|
+
else:
|
|
166
|
+
tqdm.tqdm.write(f"{s2tile.tile_id} not within bounds")
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import click
|
|
2
|
+
import numpy as np
|
|
3
|
+
import pystac
|
|
4
|
+
from mapchete.cli.options import opt_debug
|
|
5
|
+
from mapchete.io import rasterio_open
|
|
6
|
+
|
|
7
|
+
from mapchete_eo.cli import options_arguments
|
|
8
|
+
from mapchete_eo.platforms.sentinel2.config import MaskConfig, SceneClassification
|
|
9
|
+
from mapchete_eo.platforms.sentinel2.product import S2Product
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@click.command()
|
|
13
|
+
@options_arguments.arg_stac_item
|
|
14
|
+
@options_arguments.arg_dst_path
|
|
15
|
+
@options_arguments.opt_resolution
|
|
16
|
+
@options_arguments.opt_rio_profile
|
|
17
|
+
@options_arguments.opt_mask_footprint
|
|
18
|
+
@options_arguments.opt_mask_clouds
|
|
19
|
+
@options_arguments.opt_mask_snow_ice
|
|
20
|
+
@options_arguments.opt_mask_cloud_probability_threshold
|
|
21
|
+
@options_arguments.opt_mask_snow_probability_threshold
|
|
22
|
+
@options_arguments.opt_mask_scl_classes
|
|
23
|
+
@opt_debug
|
|
24
|
+
def s2_mask(
|
|
25
|
+
stac_item,
|
|
26
|
+
dst_path,
|
|
27
|
+
resolution=None,
|
|
28
|
+
rio_profile=None,
|
|
29
|
+
mask_footprint=False,
|
|
30
|
+
mask_clouds=False,
|
|
31
|
+
mask_snow_ice=False,
|
|
32
|
+
mask_cloud_probability_threshold=100,
|
|
33
|
+
mask_snow_probability_threshold=100,
|
|
34
|
+
mask_scl_classes=None,
|
|
35
|
+
**_,
|
|
36
|
+
):
|
|
37
|
+
"""Generate mask for Sentinel-2 product from metadata."""
|
|
38
|
+
item = pystac.Item.from_file(stac_item)
|
|
39
|
+
product = S2Product.from_stac_item(item)
|
|
40
|
+
grid = product.metadata.grid(resolution)
|
|
41
|
+
click.echo(product)
|
|
42
|
+
mask_config = MaskConfig(
|
|
43
|
+
footprint=mask_footprint,
|
|
44
|
+
l1c_clouds=mask_clouds,
|
|
45
|
+
snow_ice=mask_snow_ice,
|
|
46
|
+
cloud_probability=mask_cloud_probability_threshold != 100,
|
|
47
|
+
cloud_probability_threshold=mask_cloud_probability_threshold,
|
|
48
|
+
snow_probability=mask_snow_probability_threshold != 100,
|
|
49
|
+
snow_probability_threshold=mask_snow_probability_threshold,
|
|
50
|
+
scl=bool(mask_scl_classes),
|
|
51
|
+
scl_classes=[
|
|
52
|
+
SceneClassification[scene_class] for scene_class in mask_scl_classes
|
|
53
|
+
]
|
|
54
|
+
if bool(mask_scl_classes)
|
|
55
|
+
else None,
|
|
56
|
+
)
|
|
57
|
+
mask = product.get_mask(mask_config=mask_config).data
|
|
58
|
+
rgb = np.stack([mask * 255, mask, mask])
|
|
59
|
+
with rasterio_open(
|
|
60
|
+
dst_path,
|
|
61
|
+
mode="w",
|
|
62
|
+
crs=grid.crs,
|
|
63
|
+
transform=grid.transform,
|
|
64
|
+
width=grid.width,
|
|
65
|
+
height=grid.height,
|
|
66
|
+
dtype=np.uint8,
|
|
67
|
+
count=3,
|
|
68
|
+
nodata=0,
|
|
69
|
+
**rio_profile,
|
|
70
|
+
) as dst:
|
|
71
|
+
dst.write(rgb)
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import click
|
|
2
|
+
from mapchete.cli.options import opt_bounds, opt_debug
|
|
3
|
+
from mapchete.io import fiona_open
|
|
4
|
+
from mapchete.path import MPath
|
|
5
|
+
from mapchete.types import Bounds
|
|
6
|
+
from shapely.geometry import mapping
|
|
7
|
+
|
|
8
|
+
from mapchete_eo.cli import options_arguments
|
|
9
|
+
from mapchete_eo.search.s2_mgrs import s2_tiles_from_bounds
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@click.command()
|
|
13
|
+
@options_arguments.arg_dst_path
|
|
14
|
+
@opt_bounds
|
|
15
|
+
@opt_debug
|
|
16
|
+
def s2_mgrs(
|
|
17
|
+
dst_path: MPath,
|
|
18
|
+
bounds: Bounds,
|
|
19
|
+
**_,
|
|
20
|
+
):
|
|
21
|
+
"""Save Sentinel-2 tile grid as FlatGeobuf."""
|
|
22
|
+
schema = dict(
|
|
23
|
+
geometry="Polygon",
|
|
24
|
+
properties=dict(
|
|
25
|
+
utm_zone="str",
|
|
26
|
+
latitude_band="str",
|
|
27
|
+
grid_square="str",
|
|
28
|
+
tile_id="str",
|
|
29
|
+
),
|
|
30
|
+
)
|
|
31
|
+
with fiona_open(
|
|
32
|
+
dst_path, "w", crs="EPSG:4326", driver="FlatGeobuf", schema=schema
|
|
33
|
+
) as dst:
|
|
34
|
+
for tile in s2_tiles_from_bounds(*bounds):
|
|
35
|
+
dst.write(
|
|
36
|
+
dict(
|
|
37
|
+
geometry=mapping(tile.latlon_geometry),
|
|
38
|
+
properties=dict(
|
|
39
|
+
utm_zone=tile.utm_zone,
|
|
40
|
+
latitude_band=tile.latitude_band,
|
|
41
|
+
grid_square=tile.grid_square,
|
|
42
|
+
tile_id=tile.tile_id,
|
|
43
|
+
),
|
|
44
|
+
)
|
|
45
|
+
)
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
from typing import List
|
|
2
|
+
|
|
3
|
+
import click
|
|
4
|
+
import numpy as np
|
|
5
|
+
from numpy.typing import DTypeLike
|
|
6
|
+
import pystac
|
|
7
|
+
from mapchete.cli.options import opt_debug
|
|
8
|
+
from mapchete.io import rasterio_open
|
|
9
|
+
from mapchete.path import MPath
|
|
10
|
+
from mapchete import Timer
|
|
11
|
+
|
|
12
|
+
from mapchete_eo.cli import options_arguments
|
|
13
|
+
from mapchete_eo.image_operations import linear_normalization
|
|
14
|
+
from mapchete_eo.platforms.sentinel2.config import (
|
|
15
|
+
BRDFConfig,
|
|
16
|
+
MaskConfig,
|
|
17
|
+
SceneClassification,
|
|
18
|
+
)
|
|
19
|
+
from mapchete_eo.platforms.sentinel2.product import S2Product
|
|
20
|
+
from mapchete_eo.platforms.sentinel2.types import Resolution
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@click.command()
|
|
24
|
+
@options_arguments.arg_stac_item
|
|
25
|
+
@options_arguments.opt_dst_path
|
|
26
|
+
@options_arguments.opt_assets_rgb
|
|
27
|
+
@options_arguments.opt_resolution
|
|
28
|
+
@options_arguments.opt_rio_profile
|
|
29
|
+
@options_arguments.opt_mask_footprint
|
|
30
|
+
@options_arguments.opt_mask_snow_ice
|
|
31
|
+
@options_arguments.opt_mask_cloud_probability_threshold
|
|
32
|
+
@options_arguments.opt_mask_snow_probability_threshold
|
|
33
|
+
@options_arguments.opt_mask_scl_classes
|
|
34
|
+
@options_arguments.opt_brdf_model
|
|
35
|
+
@options_arguments.opt_brdf_weight
|
|
36
|
+
@options_arguments.opt_brdf_log10
|
|
37
|
+
@options_arguments.opt_brdf_detector_iter
|
|
38
|
+
@options_arguments.opt_out_dtype
|
|
39
|
+
@opt_debug
|
|
40
|
+
def s2_rgb(
|
|
41
|
+
stac_item: MPath,
|
|
42
|
+
dst_path: MPath,
|
|
43
|
+
assets: List[str] = ["red", "green", "blue"],
|
|
44
|
+
resolution: Resolution = Resolution["120m"],
|
|
45
|
+
rio_profile=None,
|
|
46
|
+
mask_footprint=False,
|
|
47
|
+
mask_snow_ice=False,
|
|
48
|
+
mask_cloud_probability_threshold=100,
|
|
49
|
+
mask_snow_probability_threshold=100,
|
|
50
|
+
mask_scl_classes=None,
|
|
51
|
+
brdf_model=None,
|
|
52
|
+
brdf_weight: float = 1.0,
|
|
53
|
+
brdf_log10: bool = False,
|
|
54
|
+
brdf_detector_iter: bool = False,
|
|
55
|
+
out_dtype: DTypeLike = "uint8",
|
|
56
|
+
**_,
|
|
57
|
+
):
|
|
58
|
+
"""Generate 8bit RGB image from Sentinel-2 product."""
|
|
59
|
+
out_dtype = np.dtype(out_dtype)
|
|
60
|
+
|
|
61
|
+
if not dst_path.suffix:
|
|
62
|
+
dst_path = dst_path / stac_item.without_suffix().name + ".tif"
|
|
63
|
+
if resolution == Resolution.original:
|
|
64
|
+
resolution = Resolution["10m"]
|
|
65
|
+
product = S2Product.from_stac_item(pystac.Item.from_file(stac_item))
|
|
66
|
+
grid = product.metadata.grid(resolution)
|
|
67
|
+
click.echo(
|
|
68
|
+
f"writing {stac_item} assets {', '.join(assets)} to {dst_path} in {resolution.name} resolution"
|
|
69
|
+
)
|
|
70
|
+
with Timer() as t:
|
|
71
|
+
mask_config = MaskConfig(
|
|
72
|
+
footprint=mask_footprint,
|
|
73
|
+
snow_ice=mask_snow_ice,
|
|
74
|
+
cloud_probability_threshold=mask_cloud_probability_threshold,
|
|
75
|
+
snow_probability_threshold=mask_snow_probability_threshold,
|
|
76
|
+
scl_classes=(
|
|
77
|
+
[SceneClassification[scene_class] for scene_class in mask_scl_classes]
|
|
78
|
+
if bool(mask_scl_classes)
|
|
79
|
+
else None
|
|
80
|
+
),
|
|
81
|
+
)
|
|
82
|
+
rgb = product.read_np_array(
|
|
83
|
+
assets=assets,
|
|
84
|
+
grid=grid,
|
|
85
|
+
mask_config=mask_config,
|
|
86
|
+
brdf_config=BRDFConfig(
|
|
87
|
+
bands=assets,
|
|
88
|
+
model=brdf_model,
|
|
89
|
+
correction_weight=brdf_weight,
|
|
90
|
+
log10_bands_scale=brdf_log10,
|
|
91
|
+
per_detector_correction=brdf_detector_iter,
|
|
92
|
+
)
|
|
93
|
+
if brdf_model
|
|
94
|
+
else None,
|
|
95
|
+
)
|
|
96
|
+
with rasterio_open(
|
|
97
|
+
dst_path,
|
|
98
|
+
mode="w",
|
|
99
|
+
crs=grid.crs,
|
|
100
|
+
transform=grid.transform,
|
|
101
|
+
width=grid.width,
|
|
102
|
+
height=grid.height,
|
|
103
|
+
dtype=out_dtype,
|
|
104
|
+
count=len(assets),
|
|
105
|
+
nodata=0,
|
|
106
|
+
**rio_profile,
|
|
107
|
+
) as dst:
|
|
108
|
+
if out_dtype == np.uint8:
|
|
109
|
+
dst.write(linear_normalization(rgb, out_min=1))
|
|
110
|
+
else:
|
|
111
|
+
dst.write(rgb)
|
|
112
|
+
click.echo(
|
|
113
|
+
f"{stac_item} assets {', '.join(assets)} to {dst_path} in {resolution.name} written in {t}"
|
|
114
|
+
)
|