mapchete-eo 2025.7.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapchete_eo/__init__.py +1 -0
- mapchete_eo/archives/__init__.py +0 -0
- mapchete_eo/archives/base.py +65 -0
- mapchete_eo/array/__init__.py +0 -0
- mapchete_eo/array/buffer.py +16 -0
- mapchete_eo/array/color.py +29 -0
- mapchete_eo/array/convert.py +157 -0
- mapchete_eo/base.py +528 -0
- mapchete_eo/blacklist.txt +175 -0
- mapchete_eo/cli/__init__.py +30 -0
- mapchete_eo/cli/bounds.py +22 -0
- mapchete_eo/cli/options_arguments.py +243 -0
- mapchete_eo/cli/s2_brdf.py +77 -0
- mapchete_eo/cli/s2_cat_results.py +146 -0
- mapchete_eo/cli/s2_find_broken_products.py +93 -0
- mapchete_eo/cli/s2_jp2_static_catalog.py +166 -0
- mapchete_eo/cli/s2_mask.py +71 -0
- mapchete_eo/cli/s2_mgrs.py +45 -0
- mapchete_eo/cli/s2_rgb.py +114 -0
- mapchete_eo/cli/s2_verify.py +129 -0
- mapchete_eo/cli/static_catalog.py +123 -0
- mapchete_eo/eostac.py +30 -0
- mapchete_eo/exceptions.py +87 -0
- mapchete_eo/geometry.py +271 -0
- mapchete_eo/image_operations/__init__.py +12 -0
- mapchete_eo/image_operations/color_correction.py +136 -0
- mapchete_eo/image_operations/compositing.py +247 -0
- mapchete_eo/image_operations/dtype_scale.py +43 -0
- mapchete_eo/image_operations/fillnodata.py +130 -0
- mapchete_eo/image_operations/filters.py +319 -0
- mapchete_eo/image_operations/linear_normalization.py +81 -0
- mapchete_eo/image_operations/sigmoidal.py +114 -0
- mapchete_eo/io/__init__.py +37 -0
- mapchete_eo/io/assets.py +492 -0
- mapchete_eo/io/items.py +147 -0
- mapchete_eo/io/levelled_cubes.py +228 -0
- mapchete_eo/io/path.py +144 -0
- mapchete_eo/io/products.py +413 -0
- mapchete_eo/io/profiles.py +45 -0
- mapchete_eo/known_catalogs.py +42 -0
- mapchete_eo/platforms/sentinel2/__init__.py +17 -0
- mapchete_eo/platforms/sentinel2/archives.py +190 -0
- mapchete_eo/platforms/sentinel2/bandpass_adjustment.py +104 -0
- mapchete_eo/platforms/sentinel2/brdf/__init__.py +8 -0
- mapchete_eo/platforms/sentinel2/brdf/config.py +32 -0
- mapchete_eo/platforms/sentinel2/brdf/correction.py +260 -0
- mapchete_eo/platforms/sentinel2/brdf/hls.py +251 -0
- mapchete_eo/platforms/sentinel2/brdf/models.py +44 -0
- mapchete_eo/platforms/sentinel2/brdf/protocols.py +27 -0
- mapchete_eo/platforms/sentinel2/brdf/ross_thick.py +136 -0
- mapchete_eo/platforms/sentinel2/brdf/sun_angle_arrays.py +76 -0
- mapchete_eo/platforms/sentinel2/config.py +181 -0
- mapchete_eo/platforms/sentinel2/driver.py +78 -0
- mapchete_eo/platforms/sentinel2/masks.py +325 -0
- mapchete_eo/platforms/sentinel2/metadata_parser.py +734 -0
- mapchete_eo/platforms/sentinel2/path_mappers/__init__.py +29 -0
- mapchete_eo/platforms/sentinel2/path_mappers/base.py +56 -0
- mapchete_eo/platforms/sentinel2/path_mappers/earthsearch.py +34 -0
- mapchete_eo/platforms/sentinel2/path_mappers/metadata_xml.py +135 -0
- mapchete_eo/platforms/sentinel2/path_mappers/sinergise.py +105 -0
- mapchete_eo/platforms/sentinel2/preprocessing_tasks.py +26 -0
- mapchete_eo/platforms/sentinel2/processing_baseline.py +160 -0
- mapchete_eo/platforms/sentinel2/product.py +669 -0
- mapchete_eo/platforms/sentinel2/types.py +109 -0
- mapchete_eo/processes/__init__.py +0 -0
- mapchete_eo/processes/config.py +51 -0
- mapchete_eo/processes/dtype_scale.py +112 -0
- mapchete_eo/processes/eo_to_xarray.py +19 -0
- mapchete_eo/processes/merge_rasters.py +235 -0
- mapchete_eo/product.py +278 -0
- mapchete_eo/protocols.py +56 -0
- mapchete_eo/search/__init__.py +14 -0
- mapchete_eo/search/base.py +222 -0
- mapchete_eo/search/config.py +42 -0
- mapchete_eo/search/s2_mgrs.py +314 -0
- mapchete_eo/search/stac_search.py +251 -0
- mapchete_eo/search/stac_static.py +236 -0
- mapchete_eo/search/utm_search.py +251 -0
- mapchete_eo/settings.py +24 -0
- mapchete_eo/sort.py +48 -0
- mapchete_eo/time.py +53 -0
- mapchete_eo/types.py +73 -0
- mapchete_eo-2025.7.0.dist-info/METADATA +38 -0
- mapchete_eo-2025.7.0.dist-info/RECORD +87 -0
- mapchete_eo-2025.7.0.dist-info/WHEEL +5 -0
- mapchete_eo-2025.7.0.dist-info/entry_points.txt +11 -0
- mapchete_eo-2025.7.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import List, Optional
|
|
3
|
+
|
|
4
|
+
import numpy as np
|
|
5
|
+
import numpy.ma as ma
|
|
6
|
+
from numpy.typing import DTypeLike
|
|
7
|
+
import xarray as xr
|
|
8
|
+
from mapchete.pretty import pretty_bytes
|
|
9
|
+
from mapchete.protocols import GridProtocol
|
|
10
|
+
from mapchete.types import NodataVals, NodataVal
|
|
11
|
+
from rasterio.enums import Resampling
|
|
12
|
+
|
|
13
|
+
from mapchete_eo.array.convert import to_dataset
|
|
14
|
+
from mapchete_eo.exceptions import (
|
|
15
|
+
CorruptedSlice,
|
|
16
|
+
EmptySliceException,
|
|
17
|
+
EmptyStackException,
|
|
18
|
+
NoSourceProducts,
|
|
19
|
+
)
|
|
20
|
+
from mapchete_eo.io.products import products_to_slices
|
|
21
|
+
from mapchete_eo.protocols import EOProductProtocol
|
|
22
|
+
from mapchete_eo.sort import SortMethodConfig, TargetDateSort
|
|
23
|
+
from mapchete_eo.types import MergeMethod
|
|
24
|
+
|
|
25
|
+
logger = logging.getLogger(__name__)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def read_levelled_cube_to_np_array(
|
|
29
|
+
products: List[EOProductProtocol],
|
|
30
|
+
target_height: int,
|
|
31
|
+
grid: GridProtocol,
|
|
32
|
+
assets: Optional[List[str]] = None,
|
|
33
|
+
eo_bands: Optional[List[str]] = None,
|
|
34
|
+
resampling: Resampling = Resampling.nearest,
|
|
35
|
+
nodatavals: NodataVals = None,
|
|
36
|
+
merge_products_by: Optional[str] = None,
|
|
37
|
+
merge_method: MergeMethod = MergeMethod.first,
|
|
38
|
+
sort: SortMethodConfig = TargetDateSort(),
|
|
39
|
+
product_read_kwargs: dict = {},
|
|
40
|
+
raise_empty: bool = True,
|
|
41
|
+
out_dtype: DTypeLike = np.uint16,
|
|
42
|
+
out_fill_value: NodataVal = 0,
|
|
43
|
+
) -> ma.MaskedArray:
|
|
44
|
+
"""
|
|
45
|
+
Read products as slices into a cube by filling up nodata gaps with next slice.
|
|
46
|
+
"""
|
|
47
|
+
if len(products) == 0:
|
|
48
|
+
raise NoSourceProducts("no products to read")
|
|
49
|
+
|
|
50
|
+
bands = assets or eo_bands
|
|
51
|
+
if bands is None:
|
|
52
|
+
raise ValueError("either assets or eo_bands have to be set")
|
|
53
|
+
|
|
54
|
+
out_shape = (target_height, len(bands), *grid.shape)
|
|
55
|
+
out: ma.MaskedArray = ma.masked_array(
|
|
56
|
+
data=np.zeros(out_shape, dtype=out_dtype),
|
|
57
|
+
mask=np.ones(out_shape, dtype=out_dtype),
|
|
58
|
+
fill_value=out_fill_value,
|
|
59
|
+
)
|
|
60
|
+
logger.debug(
|
|
61
|
+
"empty cube with shape %s has %s",
|
|
62
|
+
out.shape,
|
|
63
|
+
pretty_bytes(out.size * out.itemsize),
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
logger.debug("sort products into slices ...")
|
|
67
|
+
slices = products_to_slices(
|
|
68
|
+
products=products, group_by_property=merge_products_by, sort=sort
|
|
69
|
+
)
|
|
70
|
+
logger.debug(
|
|
71
|
+
"generating levelled cube with height %s from %s slices",
|
|
72
|
+
target_height,
|
|
73
|
+
len(slices),
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
slices_read_count, slices_skip_count = 0, 0
|
|
77
|
+
|
|
78
|
+
# pick slices one by one
|
|
79
|
+
for slice_count, slice in enumerate(slices, 1):
|
|
80
|
+
# all filled up? let's get outta here!
|
|
81
|
+
if not out.mask.any():
|
|
82
|
+
logger.debug("cube is full, quitting!")
|
|
83
|
+
break
|
|
84
|
+
|
|
85
|
+
# generate 2D mask of holes to be filled in output cube
|
|
86
|
+
cube_nodata_mask = out.mask.any(axis=0).any(axis=0)
|
|
87
|
+
|
|
88
|
+
# read slice
|
|
89
|
+
try:
|
|
90
|
+
logger.debug(
|
|
91
|
+
"see if slice %s %s has some of the %s unmasked pixels for cube",
|
|
92
|
+
slice_count,
|
|
93
|
+
slice,
|
|
94
|
+
cube_nodata_mask.sum(),
|
|
95
|
+
)
|
|
96
|
+
with slice.cached():
|
|
97
|
+
slice_array = slice.read(
|
|
98
|
+
merge_method=merge_method,
|
|
99
|
+
product_read_kwargs=dict(
|
|
100
|
+
product_read_kwargs,
|
|
101
|
+
assets=assets,
|
|
102
|
+
eo_bands=eo_bands,
|
|
103
|
+
grid=grid,
|
|
104
|
+
resampling=resampling,
|
|
105
|
+
nodatavals=nodatavals,
|
|
106
|
+
raise_empty=raise_empty,
|
|
107
|
+
target_mask=~cube_nodata_mask.copy(),
|
|
108
|
+
),
|
|
109
|
+
)
|
|
110
|
+
slices_read_count += 1
|
|
111
|
+
except (EmptySliceException, CorruptedSlice) as exc:
|
|
112
|
+
logger.debug("skipped slice %s: %s", slice, str(exc))
|
|
113
|
+
slices_skip_count += 1
|
|
114
|
+
continue
|
|
115
|
+
|
|
116
|
+
# if slice was not empty, fill pixels into cube
|
|
117
|
+
logger.debug("add slice %s array to cube", slice)
|
|
118
|
+
|
|
119
|
+
# iterate through layers of cube
|
|
120
|
+
for layer_index in range(target_height):
|
|
121
|
+
# go to next layer if layer is full
|
|
122
|
+
if not out[layer_index].mask.any():
|
|
123
|
+
logger.debug("layer %s: full, jump to next", layer_index)
|
|
124
|
+
continue
|
|
125
|
+
|
|
126
|
+
# determine empty patches of current layer
|
|
127
|
+
empty_patches = out[layer_index].mask.copy()
|
|
128
|
+
pixels_for_layer = (~slice_array[empty_patches].mask).sum()
|
|
129
|
+
|
|
130
|
+
# when slice has nothing to offer for this layer, skip
|
|
131
|
+
if pixels_for_layer == 0:
|
|
132
|
+
logger.debug(
|
|
133
|
+
"layer %s: slice has no pixels for this layer, jump to next",
|
|
134
|
+
layer_index,
|
|
135
|
+
)
|
|
136
|
+
continue
|
|
137
|
+
|
|
138
|
+
logger.debug(
|
|
139
|
+
"layer %s: fill with %s pixels ...",
|
|
140
|
+
layer_index,
|
|
141
|
+
pixels_for_layer,
|
|
142
|
+
)
|
|
143
|
+
# insert slice data into empty patches of layer
|
|
144
|
+
out[layer_index][empty_patches] = slice_array[empty_patches]
|
|
145
|
+
masked_pixels = out[layer_index].mask.sum()
|
|
146
|
+
total_pixels = out[layer_index].size
|
|
147
|
+
percent_full = round(
|
|
148
|
+
100 * ((total_pixels - masked_pixels) / total_pixels), 2
|
|
149
|
+
)
|
|
150
|
+
logger.debug(
|
|
151
|
+
"layer %s: %s%% filled (%s empty pixels remaining)",
|
|
152
|
+
layer_index,
|
|
153
|
+
percent_full,
|
|
154
|
+
out[layer_index].mask.sum(),
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
# remove slice values which were just inserted for next layer
|
|
158
|
+
slice_array[empty_patches] = ma.masked
|
|
159
|
+
|
|
160
|
+
if slice_array.mask.all():
|
|
161
|
+
logger.debug("slice fully inserted into cube, skipping")
|
|
162
|
+
break
|
|
163
|
+
|
|
164
|
+
masked_pixels = out.mask.sum()
|
|
165
|
+
total_pixels = out.size
|
|
166
|
+
percent_full = round(100 * ((total_pixels - masked_pixels) / total_pixels), 2)
|
|
167
|
+
logger.debug(
|
|
168
|
+
"cube is %s%% filled (%s empty pixels remaining)",
|
|
169
|
+
percent_full,
|
|
170
|
+
masked_pixels,
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
logger.debug(
|
|
174
|
+
"%s slices read, %s slices skipped", slices_read_count, slices_skip_count
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
if raise_empty and out.mask.all():
|
|
178
|
+
raise EmptyStackException("all slices in stack are empty or corrupt")
|
|
179
|
+
|
|
180
|
+
return out
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def read_levelled_cube_to_xarray(
|
|
184
|
+
products: List[EOProductProtocol],
|
|
185
|
+
target_height: int,
|
|
186
|
+
assets: Optional[List[str]] = None,
|
|
187
|
+
eo_bands: Optional[List[str]] = None,
|
|
188
|
+
grid: Optional[GridProtocol] = None,
|
|
189
|
+
resampling: Resampling = Resampling.nearest,
|
|
190
|
+
nodatavals: NodataVals = None,
|
|
191
|
+
merge_products_by: Optional[str] = None,
|
|
192
|
+
merge_method: MergeMethod = MergeMethod.first,
|
|
193
|
+
sort: SortMethodConfig = TargetDateSort(),
|
|
194
|
+
product_read_kwargs: dict = {},
|
|
195
|
+
raise_empty: bool = True,
|
|
196
|
+
slice_axis_name: str = "layers",
|
|
197
|
+
band_axis_name: str = "bands",
|
|
198
|
+
x_axis_name: str = "x",
|
|
199
|
+
y_axis_name: str = "y",
|
|
200
|
+
) -> xr.Dataset:
|
|
201
|
+
"""
|
|
202
|
+
Read products as slices into a cube by filling up nodata gaps with next slice.
|
|
203
|
+
"""
|
|
204
|
+
assets = assets or []
|
|
205
|
+
eo_bands = eo_bands or []
|
|
206
|
+
variables = assets or eo_bands
|
|
207
|
+
return to_dataset(
|
|
208
|
+
read_levelled_cube_to_np_array(
|
|
209
|
+
products=products,
|
|
210
|
+
target_height=target_height,
|
|
211
|
+
assets=assets,
|
|
212
|
+
eo_bands=eo_bands,
|
|
213
|
+
grid=grid,
|
|
214
|
+
resampling=resampling,
|
|
215
|
+
nodatavals=nodatavals,
|
|
216
|
+
merge_products_by=merge_products_by,
|
|
217
|
+
merge_method=merge_method,
|
|
218
|
+
sort=sort,
|
|
219
|
+
product_read_kwargs=product_read_kwargs,
|
|
220
|
+
raise_empty=raise_empty,
|
|
221
|
+
),
|
|
222
|
+
slice_names=[f"layer-{ii}" for ii in range(target_height)],
|
|
223
|
+
band_names=variables,
|
|
224
|
+
slice_axis_name=slice_axis_name,
|
|
225
|
+
band_axis_name=band_axis_name,
|
|
226
|
+
x_axis_name=x_axis_name,
|
|
227
|
+
y_axis_name=y_axis_name,
|
|
228
|
+
)
|
mapchete_eo/io/path.py
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import hashlib
|
|
2
|
+
import logging
|
|
3
|
+
from contextlib import contextmanager
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from tempfile import TemporaryDirectory
|
|
6
|
+
from typing import Generator
|
|
7
|
+
from xml.etree.ElementTree import Element, fromstring
|
|
8
|
+
|
|
9
|
+
import fsspec
|
|
10
|
+
import pystac
|
|
11
|
+
from mapchete.io import copy
|
|
12
|
+
from mapchete.path import MPath
|
|
13
|
+
from mapchete.settings import IORetrySettings
|
|
14
|
+
from retry import retry
|
|
15
|
+
|
|
16
|
+
from mapchete_eo.exceptions import AssetKeyError
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
COMMON_RASTER_EXTENSIONS = [".tif", ".jp2"]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@retry(logger=logger, **dict(IORetrySettings()))
|
|
25
|
+
def open_xml(path: MPath) -> Element:
|
|
26
|
+
"""Parse an XML file path into an etree root element."""
|
|
27
|
+
logger.debug("open %s", path)
|
|
28
|
+
return fromstring(path.read_text())
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class ProductPathGenerationMethod(str, Enum):
|
|
32
|
+
"""Option to generate product cache path."""
|
|
33
|
+
|
|
34
|
+
# <cache_basepath>/<product-id>
|
|
35
|
+
product_id = "product_id"
|
|
36
|
+
|
|
37
|
+
# <cache_basepath>/<product-hash>
|
|
38
|
+
hash = "hash"
|
|
39
|
+
|
|
40
|
+
# <cache_basepath>/<product-day>/<product-month>/<product-year>/<product-id>
|
|
41
|
+
date_day_first = "date_day_first"
|
|
42
|
+
|
|
43
|
+
# <cache_basepath>/<product-year>/<product-month>/<product-day>/<product-id>
|
|
44
|
+
date_year_first = "date_year_first"
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def get_product_cache_path(
|
|
48
|
+
item: pystac.Item,
|
|
49
|
+
basepath: MPath,
|
|
50
|
+
path_generation_method: ProductPathGenerationMethod = ProductPathGenerationMethod.product_id,
|
|
51
|
+
) -> MPath:
|
|
52
|
+
"""
|
|
53
|
+
Create product path with high cardinality prefixes optimized for S3.
|
|
54
|
+
|
|
55
|
+
product_path_generation option:
|
|
56
|
+
|
|
57
|
+
"product_id":
|
|
58
|
+
<cache_basepath>/<product-id>
|
|
59
|
+
|
|
60
|
+
"product_hash":
|
|
61
|
+
<cache_basepath>/<product-hash>
|
|
62
|
+
|
|
63
|
+
"date_day_first":
|
|
64
|
+
<cache_basepath>/<product-day>/<product-month>/<product-year>/<product-id>
|
|
65
|
+
|
|
66
|
+
"date_year_first":
|
|
67
|
+
<cache_basepath>/<product-year>/<product-month>/<product-day>/<product-id>
|
|
68
|
+
"""
|
|
69
|
+
path_generation_method = ProductPathGenerationMethod[path_generation_method]
|
|
70
|
+
if path_generation_method == ProductPathGenerationMethod.product_id:
|
|
71
|
+
return basepath / item.id
|
|
72
|
+
|
|
73
|
+
elif path_generation_method == ProductPathGenerationMethod.hash:
|
|
74
|
+
return basepath / hashlib.md5(f"{item.id}".encode()).hexdigest()
|
|
75
|
+
|
|
76
|
+
else:
|
|
77
|
+
if item.datetime is None: # pragma: no cover
|
|
78
|
+
raise AttributeError(f"stac item must have a valid datetime object: {item}")
|
|
79
|
+
elif path_generation_method == ProductPathGenerationMethod.date_day_first:
|
|
80
|
+
return (
|
|
81
|
+
basepath
|
|
82
|
+
/ item.datetime.day
|
|
83
|
+
/ item.datetime.month
|
|
84
|
+
/ item.datetime.year
|
|
85
|
+
/ item.id
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
elif path_generation_method == ProductPathGenerationMethod.date_year_first:
|
|
89
|
+
return (
|
|
90
|
+
basepath
|
|
91
|
+
/ item.datetime.year
|
|
92
|
+
/ item.datetime.month
|
|
93
|
+
/ item.datetime.day
|
|
94
|
+
/ item.id
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def path_in_paths(path, existing_paths) -> bool:
|
|
99
|
+
"""Check if path is contained in list of existing paths independent of path prefix."""
|
|
100
|
+
if path.startswith("s3://"):
|
|
101
|
+
return path.lstrip("s3://") in existing_paths
|
|
102
|
+
else:
|
|
103
|
+
for existing_path in existing_paths:
|
|
104
|
+
if existing_path.endswith(path):
|
|
105
|
+
return True
|
|
106
|
+
else:
|
|
107
|
+
return False
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
@contextmanager
|
|
111
|
+
@retry(logger=logger, **dict(IORetrySettings()))
|
|
112
|
+
def cached_path(path: MPath, active: bool = True) -> Generator[MPath, None, None]:
|
|
113
|
+
"""If path is remote, download to temporary directory and return path."""
|
|
114
|
+
if active and path.is_remote():
|
|
115
|
+
with TemporaryDirectory() as tempdir:
|
|
116
|
+
tempfile = MPath(tempdir) / path.name
|
|
117
|
+
logger.debug("%s is remote, download to %s", path, tempfile)
|
|
118
|
+
copy(
|
|
119
|
+
path,
|
|
120
|
+
tempfile,
|
|
121
|
+
)
|
|
122
|
+
yield tempfile
|
|
123
|
+
else:
|
|
124
|
+
yield path
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def asset_mpath(
|
|
128
|
+
item: pystac.Item,
|
|
129
|
+
asset: str,
|
|
130
|
+
fs: fsspec.AbstractFileSystem = None,
|
|
131
|
+
absolute_path: bool = True,
|
|
132
|
+
) -> MPath:
|
|
133
|
+
"""Return MPath instance with asset href."""
|
|
134
|
+
|
|
135
|
+
try:
|
|
136
|
+
asset_path = MPath(item.assets[asset].href, fs=fs)
|
|
137
|
+
except KeyError:
|
|
138
|
+
raise AssetKeyError(
|
|
139
|
+
f"{item.id} no asset named '{asset}' found in assets: {', '.join(item.assets.keys())}"
|
|
140
|
+
)
|
|
141
|
+
if absolute_path and not asset_path.is_absolute():
|
|
142
|
+
return MPath(item.get_self_href(), fs=fs).parent / asset_path
|
|
143
|
+
else:
|
|
144
|
+
return asset_path
|