mapchete-eo 2025.10.0__py2.py3-none-any.whl → 2025.11.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. mapchete_eo/__init__.py +1 -1
  2. mapchete_eo/array/convert.py +7 -1
  3. mapchete_eo/base.py +123 -55
  4. mapchete_eo/cli/options_arguments.py +11 -27
  5. mapchete_eo/cli/s2_brdf.py +1 -1
  6. mapchete_eo/cli/s2_cat_results.py +4 -20
  7. mapchete_eo/cli/s2_find_broken_products.py +4 -20
  8. mapchete_eo/cli/s2_jp2_static_catalog.py +2 -2
  9. mapchete_eo/cli/static_catalog.py +4 -45
  10. mapchete_eo/eostac.py +1 -1
  11. mapchete_eo/io/assets.py +7 -7
  12. mapchete_eo/io/items.py +37 -22
  13. mapchete_eo/io/levelled_cubes.py +66 -35
  14. mapchete_eo/io/path.py +19 -8
  15. mapchete_eo/io/products.py +37 -27
  16. mapchete_eo/platforms/sentinel2/__init__.py +1 -1
  17. mapchete_eo/platforms/sentinel2/_mapper_registry.py +89 -0
  18. mapchete_eo/platforms/sentinel2/brdf/correction.py +1 -1
  19. mapchete_eo/platforms/sentinel2/brdf/hls.py +1 -1
  20. mapchete_eo/platforms/sentinel2/brdf/models.py +1 -1
  21. mapchete_eo/platforms/sentinel2/brdf/protocols.py +1 -1
  22. mapchete_eo/platforms/sentinel2/brdf/ross_thick.py +1 -1
  23. mapchete_eo/platforms/sentinel2/brdf/sun_angle_arrays.py +1 -1
  24. mapchete_eo/platforms/sentinel2/config.py +73 -13
  25. mapchete_eo/platforms/sentinel2/driver.py +0 -39
  26. mapchete_eo/platforms/sentinel2/metadata_parser/__init__.py +6 -0
  27. mapchete_eo/platforms/sentinel2/{path_mappers → metadata_parser}/base.py +1 -1
  28. mapchete_eo/platforms/sentinel2/{path_mappers/metadata_xml.py → metadata_parser/default_path_mapper.py} +2 -2
  29. mapchete_eo/platforms/sentinel2/metadata_parser/models.py +78 -0
  30. mapchete_eo/platforms/sentinel2/{metadata_parser.py → metadata_parser/s2metadata.py} +51 -146
  31. mapchete_eo/platforms/sentinel2/preconfigured_sources/__init__.py +57 -0
  32. mapchete_eo/platforms/sentinel2/preconfigured_sources/guessers.py +108 -0
  33. mapchete_eo/platforms/sentinel2/preconfigured_sources/item_mappers.py +171 -0
  34. mapchete_eo/platforms/sentinel2/preconfigured_sources/metadata_xml_mappers.py +217 -0
  35. mapchete_eo/platforms/sentinel2/preprocessing_tasks.py +22 -1
  36. mapchete_eo/platforms/sentinel2/processing_baseline.py +3 -0
  37. mapchete_eo/platforms/sentinel2/product.py +88 -23
  38. mapchete_eo/platforms/sentinel2/source.py +114 -0
  39. mapchete_eo/platforms/sentinel2/types.py +5 -0
  40. mapchete_eo/processes/merge_rasters.py +7 -3
  41. mapchete_eo/product.py +14 -9
  42. mapchete_eo/protocols.py +5 -0
  43. mapchete_eo/search/__init__.py +3 -3
  44. mapchete_eo/search/base.py +126 -100
  45. mapchete_eo/search/config.py +25 -4
  46. mapchete_eo/search/s2_mgrs.py +8 -9
  47. mapchete_eo/search/stac_search.py +111 -75
  48. mapchete_eo/search/stac_static.py +63 -94
  49. mapchete_eo/search/utm_search.py +39 -48
  50. mapchete_eo/settings.py +1 -0
  51. mapchete_eo/sort.py +16 -2
  52. mapchete_eo/source.py +107 -0
  53. {mapchete_eo-2025.10.0.dist-info → mapchete_eo-2025.11.0.dist-info}/METADATA +2 -1
  54. mapchete_eo-2025.11.0.dist-info/RECORD +89 -0
  55. {mapchete_eo-2025.10.0.dist-info → mapchete_eo-2025.11.0.dist-info}/entry_points.txt +1 -1
  56. mapchete_eo/archives/__init__.py +0 -0
  57. mapchete_eo/archives/base.py +0 -65
  58. mapchete_eo/geometry.py +0 -271
  59. mapchete_eo/known_catalogs.py +0 -42
  60. mapchete_eo/platforms/sentinel2/archives.py +0 -190
  61. mapchete_eo/platforms/sentinel2/path_mappers/__init__.py +0 -29
  62. mapchete_eo/platforms/sentinel2/path_mappers/earthsearch.py +0 -34
  63. mapchete_eo/platforms/sentinel2/path_mappers/sinergise.py +0 -105
  64. mapchete_eo-2025.10.0.dist-info/RECORD +0 -88
  65. {mapchete_eo-2025.10.0.dist-info → mapchete_eo-2025.11.0.dist-info}/WHEEL +0 -0
  66. {mapchete_eo-2025.10.0.dist-info → mapchete_eo-2025.11.0.dist-info}/licenses/LICENSE +0 -0
mapchete_eo/io/assets.py CHANGED
@@ -82,7 +82,7 @@ def asset_to_np_array(
82
82
  path = asset_mpath(item, asset)
83
83
 
84
84
  # find out asset details if raster:bands is available
85
- stac_raster_bands = STACRasterBandProperties.from_asset(
85
+ band_properties = STACRasterBandProperties.from_asset(
86
86
  item.assets[asset], nodataval=nodataval
87
87
  )
88
88
 
@@ -92,23 +92,23 @@ def asset_to_np_array(
92
92
  indexes=indexes,
93
93
  grid=grid,
94
94
  resampling=resampling.name,
95
- dst_nodata=stac_raster_bands.nodata,
95
+ dst_nodata=band_properties.nodata,
96
96
  ).data
97
97
 
98
- if apply_offset and stac_raster_bands.offset:
99
- data_type = stac_raster_bands.data_type or data.dtype
98
+ if apply_offset and band_properties.offset:
99
+ data_type = band_properties.data_type or data.dtype
100
100
 
101
101
  # determine value range for the target data_type
102
102
  clip_min, clip_max = dtype_ranges[str(data_type)]
103
103
 
104
104
  # increase minimum clip value to avoid collission with nodata value
105
- if clip_min == stac_raster_bands.nodata:
105
+ if clip_min == band_properties.nodata:
106
106
  clip_min += 1
107
107
 
108
108
  data[:] = (
109
109
  (
110
- ((data * stac_raster_bands.scale) + stac_raster_bands.offset)
111
- / stac_raster_bands.scale
110
+ ((data * band_properties.scale) + band_properties.offset)
111
+ / band_properties.scale
112
112
  )
113
113
  .round()
114
114
  .clip(clip_min, clip_max)
mapchete_eo/io/items.py CHANGED
@@ -1,15 +1,15 @@
1
1
  import logging
2
- from typing import Any, List, Optional
2
+ from typing import Any, List, Optional, Tuple, Union
3
3
 
4
4
  import numpy.ma as ma
5
5
  import pystac
6
+ from mapchete.geometry import repair_antimeridian_geometry
6
7
  from mapchete.protocols import GridProtocol
7
8
  from mapchete.types import Bounds, NodataVals
8
9
  from rasterio.enums import Resampling
9
10
  from shapely.geometry import mapping, shape
10
11
 
11
12
  from mapchete_eo.exceptions import EmptyProductException
12
- from mapchete_eo.geometry import repair_antimeridian_geometry
13
13
  from mapchete_eo.io.assets import asset_to_np_array
14
14
  from mapchete_eo.types import BandLocation
15
15
 
@@ -56,7 +56,7 @@ def item_to_np_array(
56
56
  return out
57
57
 
58
58
 
59
- def expand_params(param, length):
59
+ def expand_params(param: Any, length: int) -> List[Any]:
60
60
  """
61
61
  Expand parameters if they are not a list.
62
62
  """
@@ -69,7 +69,8 @@ def expand_params(param, length):
69
69
 
70
70
  def get_item_property(
71
71
  item: pystac.Item,
72
- property: str,
72
+ property: Union[str, Tuple[str, ...]],
73
+ default: Any = None,
73
74
  ) -> Any:
74
75
  """
75
76
  Return item property.
@@ -104,26 +105,40 @@ def get_item_property(
104
105
  | ``collection`` | The collection ID of an Item's collection. |
105
106
  +--------------------+--------------------------------------------------------+
106
107
  """
107
- if property in ["year", "month", "day", "date", "datetime"]:
108
- if item.datetime is None:
109
- raise ValueError(
110
- f"STAC item has no datetime attached, thus cannot get property {property}"
111
- )
112
- elif property == "date":
113
- return item.datetime.date().isoformat()
114
- elif property == "datetime":
115
- return item.datetime
108
+
109
+ def _get_item_property(item: pystac.Item, property: str) -> Any:
110
+ if property == "id":
111
+ return item.id
112
+ elif property in ["year", "month", "day", "date", "datetime"]:
113
+ if item.datetime is None: # pragma: no cover
114
+ raise ValueError(
115
+ f"STAC item has no datetime attached, thus cannot get property {property}"
116
+ )
117
+ elif property == "date":
118
+ return item.datetime.date().isoformat()
119
+ elif property == "datetime":
120
+ return item.datetime
121
+ else:
122
+ return item.datetime.__getattribute__(property)
123
+ elif property == "collection":
124
+ return item.collection_id
125
+ elif property in item.properties:
126
+ return item.properties[property]
127
+ elif property in item.extra_fields:
128
+ return item.extra_fields[property]
129
+ elif property == "stac_extensions":
130
+ return item.stac_extensions
116
131
  else:
117
- return item.datetime.__getattribute__(property)
118
- elif property == "collection":
119
- return item.collection_id
120
- elif property in item.properties:
121
- return item.properties[property]
122
- elif property in item.extra_fields:
123
- return item.extra_fields[property]
124
- elif property == "stac_extensions":
125
- return item.stac_extensions
132
+ raise KeyError
133
+
134
+ for prop in property if isinstance(property, tuple) else (property,):
135
+ try:
136
+ return _get_item_property(item, prop)
137
+ except KeyError:
138
+ pass
126
139
  else:
140
+ if default is not None:
141
+ return default
127
142
  raise KeyError(
128
143
  f"item {item.id} does not have property {property} in its datetime, properties "
129
144
  f"({', '.join(item.properties.keys())}) or extra_fields "
@@ -40,27 +40,50 @@ def read_levelled_cube_to_np_array(
40
40
  raise_empty: bool = True,
41
41
  out_dtype: DTypeLike = np.uint16,
42
42
  out_fill_value: NodataVal = 0,
43
+ read_mask: Optional[np.ndarray] = None,
43
44
  ) -> ma.MaskedArray:
44
45
  """
45
46
  Read products as slices into a cube by filling up nodata gaps with next slice.
47
+
48
+ If a read_mask is provided, only the pixels marked True are considered to be read.
46
49
  """
47
- if len(products) == 0:
50
+ if len(products) == 0: # pragma: no cover
48
51
  raise NoSourceProducts("no products to read")
49
-
50
52
  bands = assets or eo_bands
51
- if bands is None:
53
+ if bands is None: # pragma: no cover
52
54
  raise ValueError("either assets or eo_bands have to be set")
53
-
54
55
  out_shape = (target_height, len(bands), *grid.shape)
56
+
57
+ # 2D read_mask shape
58
+ if read_mask is None:
59
+ read_mask = np.ones(grid.shape, dtype=bool)
60
+ elif read_mask.ndim != 2: # pragma: no cover
61
+ raise ValueError(
62
+ "read_mask must be 2-dimensional, not %s-dimensional",
63
+ read_mask.ndim,
64
+ )
55
65
  out: ma.MaskedArray = ma.masked_array(
56
- data=np.zeros(out_shape, dtype=out_dtype),
57
- mask=np.ones(out_shape, dtype=out_dtype),
66
+ data=np.full(out_shape, out_fill_value, dtype=out_dtype),
67
+ mask=np.ones(out_shape, dtype=bool),
58
68
  fill_value=out_fill_value,
59
69
  )
70
+
71
+ if not read_mask.any():
72
+ logger.debug("nothing to read")
73
+ return out
74
+
75
+ # extrude mask to match each layer
76
+ layer_read_mask = np.stack([read_mask for _ in bands])
77
+
78
+ def _cube_read_mask() -> np.ndarray:
79
+ # This is only needed for debug output, thus there is no need to materialize always
80
+ return np.stack([layer_read_mask for _ in range(target_height)])
81
+
60
82
  logger.debug(
61
- "empty cube with shape %s has %s",
83
+ "empty cube with shape %s has %s and %s pixels to be filled",
62
84
  out.shape,
63
85
  pretty_bytes(out.size * out.itemsize),
86
+ _cube_read_mask().sum(),
64
87
  )
65
88
 
66
89
  logger.debug("sort products into slices ...")
@@ -76,25 +99,25 @@ def read_levelled_cube_to_np_array(
76
99
  slices_read_count, slices_skip_count = 0, 0
77
100
 
78
101
  # pick slices one by one
79
- for slice_count, slice in enumerate(slices, 1):
102
+ for slice_count, slice_ in enumerate(slices, 1):
80
103
  # all filled up? let's get outta here!
81
104
  if not out.mask.any():
82
- logger.debug("cube is full, quitting!")
105
+ logger.debug("cube has no pixels to be filled, quitting!")
83
106
  break
84
107
 
85
108
  # generate 2D mask of holes to be filled in output cube
86
- cube_nodata_mask = out.mask.any(axis=0).any(axis=0)
109
+ cube_nodata_mask = np.logical_and(out.mask.any(axis=0).any(axis=0), read_mask)
87
110
 
88
111
  # read slice
89
112
  try:
90
113
  logger.debug(
91
114
  "see if slice %s %s has some of the %s unmasked pixels for cube",
92
115
  slice_count,
93
- slice,
116
+ slice_,
94
117
  cube_nodata_mask.sum(),
95
118
  )
96
- with slice.cached():
97
- slice_array = slice.read(
119
+ with slice_.cached():
120
+ slice_array = slice_.read(
98
121
  merge_method=merge_method,
99
122
  product_read_kwargs=dict(
100
123
  product_read_kwargs,
@@ -104,17 +127,18 @@ def read_levelled_cube_to_np_array(
104
127
  resampling=resampling,
105
128
  nodatavals=nodatavals,
106
129
  raise_empty=raise_empty,
107
- target_mask=~cube_nodata_mask.copy(),
130
+ read_mask=cube_nodata_mask.copy(),
131
+ out_dtype=out_dtype,
108
132
  ),
109
133
  )
110
134
  slices_read_count += 1
111
135
  except (EmptySliceException, CorruptedSlice) as exc:
112
- logger.debug("skipped slice %s: %s", slice, str(exc))
136
+ logger.debug("skipped slice %s: %s", slice_, str(exc))
113
137
  slices_skip_count += 1
114
138
  continue
115
139
 
116
140
  # if slice was not empty, fill pixels into cube
117
- logger.debug("add slice %s array to cube", slice)
141
+ logger.debug("add slice %s array to cube", slice_)
118
142
 
119
143
  # iterate through layers of cube
120
144
  for layer_index in range(target_height):
@@ -124,34 +148,35 @@ def read_levelled_cube_to_np_array(
124
148
  continue
125
149
 
126
150
  # determine empty patches of current layer
127
- empty_patches = out[layer_index].mask.copy()
128
- pixels_for_layer = (~slice_array[empty_patches].mask).sum()
151
+ empty_patches = np.logical_and(out[layer_index].mask, layer_read_mask)
152
+ remaining_pixels_for_layer = (~slice_array[empty_patches].mask).sum()
129
153
 
130
154
  # when slice has nothing to offer for this layer, skip
131
- if pixels_for_layer == 0:
155
+ if remaining_pixels_for_layer == 0:
132
156
  logger.debug(
133
157
  "layer %s: slice has no pixels for this layer, jump to next",
134
158
  layer_index,
135
159
  )
136
160
  continue
137
161
 
162
+ # insert slice data into empty patches of layer
138
163
  logger.debug(
139
164
  "layer %s: fill with %s pixels ...",
140
165
  layer_index,
141
- pixels_for_layer,
166
+ remaining_pixels_for_layer,
142
167
  )
143
- # insert slice data into empty patches of layer
144
168
  out[layer_index][empty_patches] = slice_array[empty_patches]
145
- masked_pixels = out[layer_index].mask.sum()
146
- total_pixels = out[layer_index].size
147
- percent_full = round(
148
- 100 * ((total_pixels - masked_pixels) / total_pixels), 2
149
- )
169
+
170
+ # report on layer fill status
150
171
  logger.debug(
151
- "layer %s: %s%% filled (%s empty pixels remaining)",
172
+ "layer %s: %s",
152
173
  layer_index,
153
- percent_full,
154
- out[layer_index].mask.sum(),
174
+ _percent_full(
175
+ remaining=np.logical_and(
176
+ out[layer_index].mask, layer_read_mask
177
+ ).sum(),
178
+ total=layer_read_mask.sum(),
179
+ ),
155
180
  )
156
181
 
157
182
  # remove slice values which were just inserted for next layer
@@ -161,13 +186,13 @@ def read_levelled_cube_to_np_array(
161
186
  logger.debug("slice fully inserted into cube, skipping")
162
187
  break
163
188
 
164
- masked_pixels = out.mask.sum()
165
- total_pixels = out.size
166
- percent_full = round(100 * ((total_pixels - masked_pixels) / total_pixels), 2)
189
+ # report on layer fill status
167
190
  logger.debug(
168
- "cube is %s%% filled (%s empty pixels remaining)",
169
- percent_full,
170
- masked_pixels,
191
+ "cube is %s",
192
+ _percent_full(
193
+ remaining=np.logical_and(out.mask, _cube_read_mask()).sum(),
194
+ total=_cube_read_mask().sum(),
195
+ ),
171
196
  )
172
197
 
173
198
  logger.debug(
@@ -197,6 +222,7 @@ def read_levelled_cube_to_xarray(
197
222
  band_axis_name: str = "bands",
198
223
  x_axis_name: str = "x",
199
224
  y_axis_name: str = "y",
225
+ read_mask: Optional[np.ndarray] = None,
200
226
  ) -> xr.Dataset:
201
227
  """
202
228
  Read products as slices into a cube by filling up nodata gaps with next slice.
@@ -218,6 +244,7 @@ def read_levelled_cube_to_xarray(
218
244
  sort=sort,
219
245
  product_read_kwargs=product_read_kwargs,
220
246
  raise_empty=raise_empty,
247
+ read_mask=read_mask,
221
248
  ),
222
249
  slice_names=[f"layer-{ii}" for ii in range(target_height)],
223
250
  band_names=variables,
@@ -226,3 +253,7 @@ def read_levelled_cube_to_xarray(
226
253
  x_axis_name=x_axis_name,
227
254
  y_axis_name=y_axis_name,
228
255
  )
256
+
257
+
258
+ def _percent_full(remaining: int, total: int, ndigits: int = 2) -> str:
259
+ return f"{round(100 * (total - remaining) / total, ndigits=ndigits)}% full ({remaining} remaining emtpy pixels)"
mapchete_eo/io/path.py CHANGED
@@ -3,7 +3,7 @@ import logging
3
3
  from contextlib import contextmanager
4
4
  from enum import Enum
5
5
  from tempfile import TemporaryDirectory
6
- from typing import Generator
6
+ from typing import Generator, Tuple, Union
7
7
  from xml.etree.ElementTree import Element, fromstring
8
8
 
9
9
  import fsspec
@@ -126,19 +126,30 @@ def cached_path(path: MPath, active: bool = True) -> Generator[MPath, None, None
126
126
 
127
127
  def asset_mpath(
128
128
  item: pystac.Item,
129
- asset: str,
129
+ asset: Union[str, Tuple[str, ...]],
130
130
  fs: fsspec.AbstractFileSystem = None,
131
131
  absolute_path: bool = True,
132
132
  ) -> MPath:
133
133
  """Return MPath instance with asset href."""
134
134
 
135
- try:
135
+ def _asset_mpath(
136
+ item: pystac.Item,
137
+ asset: str,
138
+ fs: fsspec.AbstractFileSystem = None,
139
+ absolute_path: bool = True,
140
+ ) -> MPath:
136
141
  asset_path = MPath(item.assets[asset].href, fs=fs)
137
- except KeyError:
142
+ if absolute_path and not asset_path.is_absolute():
143
+ return MPath(item.get_self_href(), fs=fs).parent / asset_path
144
+ else:
145
+ return asset_path
146
+
147
+ for single_asset in asset if isinstance(asset, tuple) else (asset,):
148
+ try:
149
+ return _asset_mpath(item, single_asset, fs=fs, absolute_path=absolute_path)
150
+ except KeyError:
151
+ pass
152
+ else:
138
153
  raise AssetKeyError(
139
154
  f"{item.id} no asset named '{asset}' found in assets: {', '.join(item.assets.keys())}"
140
155
  )
141
- if absolute_path and not asset_path.is_absolute():
142
- return MPath(item.get_self_href(), fs=fs).parent / asset_path
143
- else:
144
- return asset_path
@@ -10,6 +10,7 @@ from typing import Any, Dict, Generator, Iterator, List, Optional, Sequence
10
10
  from mapchete import Timer
11
11
  import numpy as np
12
12
  import numpy.ma as ma
13
+ from numpy.typing import DTypeLike
13
14
  import xarray as xr
14
15
  from mapchete.config import get_hash
15
16
  from mapchete.geometry import to_shape
@@ -28,7 +29,6 @@ from mapchete_eo.exceptions import (
28
29
  EmptyStackException,
29
30
  NoSourceProducts,
30
31
  )
31
- from mapchete_eo.io.items import get_item_property
32
32
  from mapchete_eo.protocols import EOProductProtocol
33
33
  from mapchete_eo.sort import SortMethodConfig
34
34
  from mapchete_eo.types import MergeMethod
@@ -49,11 +49,13 @@ def products_to_np_array(
49
49
  sort: Optional[SortMethodConfig] = None,
50
50
  product_read_kwargs: dict = {},
51
51
  raise_empty: bool = True,
52
+ out_dtype: Optional[DTypeLike] = None,
53
+ read_mask: Optional[np.ndarray] = None,
52
54
  ) -> ma.MaskedArray:
53
55
  """Read grid window of EOProducts and merge into a 4D xarray."""
54
56
  return ma.stack(
55
57
  [
56
- to_masked_array(s)
58
+ to_masked_array(s, out_dtype=out_dtype)
57
59
  for s in generate_slice_dataarrays(
58
60
  products=products,
59
61
  assets=assets,
@@ -66,6 +68,7 @@ def products_to_np_array(
66
68
  sort=sort,
67
69
  product_read_kwargs=product_read_kwargs,
68
70
  raise_empty=raise_empty,
71
+ read_mask=read_mask,
69
72
  )
70
73
  ]
71
74
  )
@@ -87,6 +90,7 @@ def products_to_xarray(
87
90
  sort: Optional[SortMethodConfig] = None,
88
91
  raise_empty: bool = True,
89
92
  product_read_kwargs: dict = {},
93
+ read_mask: Optional[np.ndarray] = None,
90
94
  ) -> xr.Dataset:
91
95
  """Read grid window of EOProducts and merge into a 4D xarray."""
92
96
  data_vars = [
@@ -103,6 +107,7 @@ def products_to_xarray(
103
107
  sort=sort,
104
108
  product_read_kwargs=product_read_kwargs,
105
109
  raise_empty=raise_empty,
110
+ read_mask=read_mask,
106
111
  )
107
112
  ]
108
113
  if merge_products_by and merge_products_by not in ["date", "datetime"]:
@@ -112,7 +117,8 @@ def products_to_xarray(
112
117
  coords = {
113
118
  slice_axis_name: list(
114
119
  np.array(
115
- [product.item.datetime for product in products], dtype=np.datetime64
120
+ [product.get_property("datetime") for product in products],
121
+ dtype=np.datetime64,
116
122
  )
117
123
  )
118
124
  }
@@ -126,7 +132,6 @@ class Slice:
126
132
  """Combine multiple products into one slice."""
127
133
 
128
134
  name: Any
129
- properties: dict
130
135
  products: Sequence[EOProductProtocol]
131
136
  datetime: datetime
132
137
 
@@ -145,21 +150,13 @@ class Slice:
145
150
 
146
151
  # calculate mean datetime
147
152
  timestamps = [
148
- product.item.datetime.timestamp()
153
+ product.get_property("datetime").timestamp()
149
154
  for product in self.products
150
- if product.item.datetime
155
+ if product.get_property("datetime")
151
156
  ]
152
157
  mean_timestamp = sum(timestamps) / len(timestamps)
153
158
  self.datetime = datetime.fromtimestamp(mean_timestamp)
154
159
 
155
- # generate combined properties
156
- self.properties = {}
157
- for key in self.products[0].item.properties.keys():
158
- try:
159
- self.properties[key] = self.get_property(key)
160
- except ValueError:
161
- self.properties[key] = None
162
-
163
160
  def __repr__(self) -> str:
164
161
  return f"<Slice {self.name} ({len(self.products)} products)>"
165
162
 
@@ -172,6 +169,17 @@ class Slice:
172
169
 
173
170
  raise EmptySliceException
174
171
 
172
+ @property
173
+ def properties(self) -> Dict[str, Any]:
174
+ # generate combined properties
175
+ properties: Dict[str, Any] = {}
176
+ for key in self.products[0].item.properties.keys():
177
+ try:
178
+ properties[key] = self.get_property(key)
179
+ except ValueError:
180
+ properties[key] = None
181
+ return properties
182
+
175
183
  @contextmanager
176
184
  def cached(self) -> Generator[Slice, None, None]:
177
185
  """Clear caches and run garbage collector when context manager is closed."""
@@ -194,12 +202,9 @@ class Slice:
194
202
  ValueError is raised.
195
203
  """
196
204
  # if set of value hashes has a length of 1, all values are the same
197
- values = [
198
- get_hash(get_item_property(product.item, property=property))
199
- for product in self.products
200
- ]
205
+ values = [get_hash(product.get_property(property)) for product in self.products]
201
206
  if len(set(values)) == 1:
202
- return get_item_property(self.products[0].item, property=property)
207
+ return self.products[0].get_property(property)
203
208
 
204
209
  raise ValueError(
205
210
  f"cannot get unique property {property} from products {self.products}"
@@ -232,7 +237,7 @@ def products_to_slices(
232
237
  grouped[product.get_property(group_by_property)].append(product)
233
238
  slices = [Slice(key, products) for key, products in grouped.items()]
234
239
  else:
235
- slices = [Slice(product.item.id, [product]) for product in products]
240
+ slices = [Slice(product.id, [product]) for product in products]
236
241
 
237
242
  # also check if slices is even a list, otherwise it will raise an error
238
243
  if sort and slices:
@@ -262,7 +267,7 @@ def merge_products(
262
267
  try:
263
268
  yield product.read_np_array(**product_read_kwargs)
264
269
  except (AssetKeyError, CorruptedProduct) as exc:
265
- logger.debug("skip product %s because of %s", product.item.id, exc)
270
+ logger.warning("skip product %s because of %s", product.id, exc)
266
271
  except StopIteration:
267
272
  return
268
273
 
@@ -280,7 +285,7 @@ def merge_products(
280
285
  out = product.read_np_array(**product_read_kwargs)
281
286
  break
282
287
  except (AssetKeyError, CorruptedProduct) as exc:
283
- logger.debug("skip product %s because of %s", product.item.id, exc)
288
+ logger.warning("skip product %s because of %s", product.id, exc)
284
289
  else:
285
290
  # we cannot do anything here, as all products are broken
286
291
  raise CorruptedSlice("all products are broken here")
@@ -322,8 +327,11 @@ def merge_products(
322
327
  valid_arrays = [a for a in arrays if not ma.getmaskarray(a).all()]
323
328
 
324
329
  if valid_arrays:
325
- stacked = ma.stack(valid_arrays, dtype=out.dtype)
326
- out = stacked.mean(axis=0, dtype=out.dtype)
330
+ out_dtype = out.dtype
331
+ out_fill_value = out.fill_value
332
+ stacked = ma.stack(valid_arrays, dtype=out_dtype)
333
+ out = stacked.mean(axis=0, dtype=out_dtype).astype(out_dtype, copy=False)
334
+ out.set_fill_value(out_fill_value)
327
335
  else:
328
336
  # All arrays were fully masked — return fully masked output
329
337
  out = ma.masked_all(out.shape, dtype=out.dtype)
@@ -351,10 +359,12 @@ def generate_slice_dataarrays(
351
359
  sort: Optional[SortMethodConfig] = None,
352
360
  product_read_kwargs: dict = {},
353
361
  raise_empty: bool = True,
362
+ read_mask: Optional[np.ndarray] = None,
354
363
  ) -> Iterator[xr.DataArray]:
355
364
  """
356
365
  Yield products or merged products into slices as DataArrays.
357
366
  """
367
+
358
368
  if len(products) == 0:
359
369
  raise NoSourceProducts("no products to read")
360
370
 
@@ -367,7 +377,6 @@ def generate_slice_dataarrays(
367
377
  slices = products_to_slices(
368
378
  products, group_by_property=merge_products_by, sort=sort
369
379
  )
370
-
371
380
  logger.debug(
372
381
  "reading %s products in %s groups...",
373
382
  len(products),
@@ -396,6 +405,7 @@ def generate_slice_dataarrays(
396
405
  resampling=resampling,
397
406
  nodatavals=nodatavals,
398
407
  raise_empty=raise_empty,
408
+ read_mask=read_mask,
399
409
  ),
400
410
  raise_empty=raise_empty,
401
411
  ),
@@ -406,8 +416,8 @@ def generate_slice_dataarrays(
406
416
  )
407
417
  # if at least one slice can be yielded, the stack is not empty
408
418
  stack_empty = False
409
- except (EmptySliceException, CorruptedSlice):
410
- pass
419
+ except (EmptySliceException, CorruptedSlice) as exception:
420
+ logger.warning(exception)
411
421
 
412
422
  if stack_empty:
413
423
  raise EmptyStackException("all slices are empty")
@@ -4,7 +4,7 @@ from mapchete_eo.platforms.sentinel2.driver import (
4
4
  Sentinel2Cube,
5
5
  Sentinel2CubeGroup,
6
6
  )
7
- from mapchete_eo.platforms.sentinel2.metadata_parser import S2Metadata
7
+ from mapchete_eo.platforms.sentinel2.metadata_parser.s2metadata import S2Metadata
8
8
  from mapchete_eo.platforms.sentinel2.product import S2Product
9
9
 
10
10
  __all__ = [