openeo-gfmap 0.2.0__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openeo_gfmap/manager/job_manager.py +16 -15
- openeo_gfmap/manager/job_splitters.py +135 -11
- openeo_gfmap/preprocessing/sar.py +12 -33
- openeo_gfmap/utils/catalogue.py +9 -3
- {openeo_gfmap-0.2.0.dist-info → openeo_gfmap-0.4.0.dist-info}/METADATA +5 -4
- {openeo_gfmap-0.2.0.dist-info → openeo_gfmap-0.4.0.dist-info}/RECORD +8 -8
- {openeo_gfmap-0.2.0.dist-info → openeo_gfmap-0.4.0.dist-info}/WHEEL +1 -1
- {openeo_gfmap-0.2.0.dist-info → openeo_gfmap-0.4.0.dist-info}/licenses/LICENSE +0 -0
@@ -17,9 +17,6 @@ from pystac import CatalogType
|
|
17
17
|
from openeo_gfmap.manager import _log
|
18
18
|
from openeo_gfmap.stac import constants
|
19
19
|
|
20
|
-
# Lock to use when writing to the STAC collection
|
21
|
-
_stac_lock = Lock()
|
22
|
-
|
23
20
|
|
24
21
|
def retry_on_exception(max_retries: int, delay_s: int = 180):
|
25
22
|
"""Decorator to retry a function if an exception occurs.
|
@@ -132,6 +129,7 @@ class GFMAPJobManager(MultiBackendJobManager):
|
|
132
129
|
self._catalogue_cache = output_dir / "catalogue_cache.bin"
|
133
130
|
|
134
131
|
self.stac = stac
|
132
|
+
self.lock = Lock()
|
135
133
|
self.stac_enabled = stac_enabled
|
136
134
|
self.collection_id = collection_id
|
137
135
|
self.collection_description = collection_description
|
@@ -258,7 +256,7 @@ class GFMAPJobManager(MultiBackendJobManager):
|
|
258
256
|
"Resuming postprocessing of job %s, queueing on_job_finished...",
|
259
257
|
row.id,
|
260
258
|
)
|
261
|
-
future = self._executor.submit(self.on_job_done, job, row
|
259
|
+
future = self._executor.submit(self.on_job_done, job, row)
|
262
260
|
future.add_done_callback(
|
263
261
|
partial(
|
264
262
|
done_callback,
|
@@ -327,7 +325,7 @@ class GFMAPJobManager(MultiBackendJobManager):
|
|
327
325
|
"Job %s finished successfully, queueing on_job_done...", job.job_id
|
328
326
|
)
|
329
327
|
job_status = "postprocessing"
|
330
|
-
future = self._executor.submit(self.on_job_done, job, row
|
328
|
+
future = self._executor.submit(self.on_job_done, job, row)
|
331
329
|
# Future will setup the status to finished when the job is done
|
332
330
|
future.add_done_callback(
|
333
331
|
partial(
|
@@ -416,37 +414,40 @@ class GFMAPJobManager(MultiBackendJobManager):
|
|
416
414
|
)
|
417
415
|
|
418
416
|
@retry_on_exception(max_retries=2, delay_s=30)
|
419
|
-
def on_job_done(
|
420
|
-
self, job: BatchJob, row: pd.Series, lock: Lock
|
421
|
-
): # pylint: disable=arguments-differ
|
417
|
+
def on_job_done(self, job: BatchJob, row: pd.Series):
|
422
418
|
"""Method called when a job finishes successfully. It will first download the results of
|
423
419
|
the job and then call the `post_job_action` method.
|
424
420
|
"""
|
425
421
|
|
426
422
|
job_products = {}
|
427
|
-
|
423
|
+
job_results = job.get_results()
|
424
|
+
asset_ids = [a.name for a in job_results.get_assets()]
|
425
|
+
for idx, asset_id in enumerate(asset_ids):
|
428
426
|
try:
|
427
|
+
asset = job_results.get_asset(asset_id)
|
429
428
|
_log.debug(
|
430
429
|
"Generating output path for asset %s from job %s...",
|
431
|
-
|
430
|
+
asset_id,
|
432
431
|
job.job_id,
|
433
432
|
)
|
434
|
-
output_path = self._output_path_gen(
|
433
|
+
output_path = self._output_path_gen(
|
434
|
+
self._output_dir, idx, row, asset_id
|
435
|
+
)
|
435
436
|
# Make the output path
|
436
437
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
437
438
|
asset.download(output_path)
|
438
439
|
# Add to the list of downloaded products
|
439
|
-
job_products[f"{job.job_id}_{
|
440
|
+
job_products[f"{job.job_id}_{asset_id}"] = [output_path]
|
440
441
|
_log.debug(
|
441
442
|
"Downloaded %s from job %s -> %s",
|
442
|
-
|
443
|
+
asset_id,
|
443
444
|
job.job_id,
|
444
445
|
output_path,
|
445
446
|
)
|
446
447
|
except Exception as e:
|
447
448
|
_log.exception(
|
448
449
|
"Error downloading asset %s from job %s:\n%s",
|
449
|
-
|
450
|
+
asset_id,
|
450
451
|
job.job_id,
|
451
452
|
e,
|
452
453
|
)
|
@@ -491,7 +492,7 @@ class GFMAPJobManager(MultiBackendJobManager):
|
|
491
492
|
_log.info("Adding %s items to the STAC collection...", len(job_items))
|
492
493
|
|
493
494
|
if self.stac_enabled:
|
494
|
-
with lock:
|
495
|
+
with self.lock:
|
495
496
|
self._update_stac(job.job_id, job_items)
|
496
497
|
|
497
498
|
_log.info("Job %s and post job action finished successfully.", job.job_id)
|
@@ -8,6 +8,7 @@ from typing import List
|
|
8
8
|
import geopandas as gpd
|
9
9
|
import h3
|
10
10
|
import requests
|
11
|
+
import s2sphere
|
11
12
|
|
12
13
|
from openeo_gfmap.manager import _log
|
13
14
|
|
@@ -16,11 +17,11 @@ def load_s2_grid(web_mercator: bool = False) -> gpd.GeoDataFrame:
|
|
16
17
|
"""Returns a geo data frame from the S2 grid."""
|
17
18
|
# Builds the path where the geodataframe should be
|
18
19
|
if not web_mercator:
|
19
|
-
gdf_path = Path.home() / ".openeo-gfmap" / "
|
20
|
-
url = "https://artifactory.vgt.vito.be/artifactory/auxdata-public/gfmap/
|
20
|
+
gdf_path = Path.home() / ".openeo-gfmap" / "s2grid_voronoi_4326.parquet"
|
21
|
+
url = "https://artifactory.vgt.vito.be/artifactory/auxdata-public/gfmap/s2grid_voronoi_4326.parquet"
|
21
22
|
else:
|
22
|
-
gdf_path = Path.home() / ".openeo-gfmap" / "
|
23
|
-
url = "https://artifactory.vgt.vito.be/artifactory/auxdata-public/gfmap/
|
23
|
+
gdf_path = Path.home() / ".openeo-gfmap" / "s2grid_voronoi_3857.parquet"
|
24
|
+
url = "https://artifactory.vgt.vito.be/artifactory/auxdata-public/gfmap/s2grid_voronoi_3857.parquet"
|
24
25
|
|
25
26
|
if not gdf_path.exists():
|
26
27
|
_log.info("S2 grid not found, downloading it from artifactory.")
|
@@ -40,6 +41,38 @@ def load_s2_grid(web_mercator: bool = False) -> gpd.GeoDataFrame:
|
|
40
41
|
return gpd.read_parquet(gdf_path)
|
41
42
|
|
42
43
|
|
44
|
+
def load_s2_grid_centroids(web_mercator: bool = False) -> gpd.GeoDataFrame:
|
45
|
+
"""Returns a geo data frame from the S2 grid centroids."""
|
46
|
+
# Builds the path where the geodataframe should be
|
47
|
+
if not web_mercator:
|
48
|
+
gdf_path = (
|
49
|
+
Path.home() / ".openeo-gfmap" / "s2grid_bounds_4326_centroids.geoparquet"
|
50
|
+
)
|
51
|
+
url = "https://artifactory.vgt.vito.be/artifactory/auxdata-public/gfmap/s2grid_bounds_4326_centroids.geoparquet"
|
52
|
+
else:
|
53
|
+
gdf_path = (
|
54
|
+
Path.home() / ".openeo-gfmap" / "s2grid_bounds_3857_centroids.geoparquet"
|
55
|
+
)
|
56
|
+
url = "https://artifactory.vgt.vito.be/artifactory/auxdata-public/gfmap/s2grid_bounds_3857_centroids.geoparquet"
|
57
|
+
|
58
|
+
if not gdf_path.exists():
|
59
|
+
_log.info("S2 grid centroids not found, downloading it from artifactory.")
|
60
|
+
# Downloads the file from the artifactory URL
|
61
|
+
gdf_path.parent.mkdir(exist_ok=True)
|
62
|
+
response = requests.get(
|
63
|
+
url,
|
64
|
+
timeout=180, # 3mins
|
65
|
+
)
|
66
|
+
if response.status_code != 200:
|
67
|
+
raise ValueError(
|
68
|
+
"Failed to download the S2 grid centroids from the artifactory. "
|
69
|
+
f"Status code: {response.status_code}"
|
70
|
+
)
|
71
|
+
with open(gdf_path, "wb") as f:
|
72
|
+
f.write(response.content)
|
73
|
+
return gpd.read_parquet(gdf_path)
|
74
|
+
|
75
|
+
|
43
76
|
def _resplit_group(
|
44
77
|
polygons: gpd.GeoDataFrame, max_points: int
|
45
78
|
) -> List[gpd.GeoDataFrame]:
|
@@ -79,14 +112,12 @@ def split_job_s2grid(
|
|
79
112
|
|
80
113
|
polygons["centroid"] = polygons.geometry.centroid
|
81
114
|
|
82
|
-
# Dataset containing all the S2 tiles, find the nearest S2 tile for each point
|
83
115
|
s2_grid = load_s2_grid(web_mercator)
|
84
|
-
s2_grid["geometry"] = s2_grid.geometry.centroid
|
85
|
-
|
86
|
-
s2_grid = s2_grid[s2_grid.cdse_valid]
|
87
116
|
|
88
|
-
polygons = gpd.
|
89
|
-
polygons.set_geometry("centroid"),
|
117
|
+
polygons = gpd.sjoin(
|
118
|
+
polygons.set_geometry("centroid"),
|
119
|
+
s2_grid[["tile", "geometry"]],
|
120
|
+
predicate="intersects",
|
90
121
|
).drop(columns=["index_right", "centroid"])
|
91
122
|
|
92
123
|
polygons = polygons.set_geometry("geometry").to_crs(original_crs)
|
@@ -113,7 +144,7 @@ def append_h3_index(
|
|
113
144
|
geom_col = geom_col.to_crs(epsg=4326)
|
114
145
|
|
115
146
|
polygons["h3index"] = geom_col.apply(
|
116
|
-
lambda pt: h3.
|
147
|
+
lambda pt: h3.latlng_to_cell(pt.y, pt.x, grid_resolution)
|
117
148
|
)
|
118
149
|
return polygons
|
119
150
|
|
@@ -165,3 +196,96 @@ def split_job_hex(
|
|
165
196
|
split_datasets.append(sub_gdf.reset_index(drop=True))
|
166
197
|
|
167
198
|
return split_datasets
|
199
|
+
|
200
|
+
|
201
|
+
def split_job_s2sphere(
|
202
|
+
gdf: gpd.GeoDataFrame, max_points=500, start_level=8
|
203
|
+
) -> List[gpd.GeoDataFrame]:
|
204
|
+
"""
|
205
|
+
EXPERIMENTAL
|
206
|
+
Split a GeoDataFrame into multiple groups based on the S2geometry cell ID of each geometry.
|
207
|
+
|
208
|
+
S2geometry is a library that provides a way to index and query spatial data. This function splits
|
209
|
+
the GeoDataFrame into groups based on the S2 cell ID of each geometry, based on it's centroid.
|
210
|
+
|
211
|
+
If a cell contains more points than max_points, it will be recursively split into
|
212
|
+
smaller cells until each cell contains at most max_points points.
|
213
|
+
|
214
|
+
More information on S2geometry can be found at https://s2geometry.io/
|
215
|
+
An overview of the S2 cell hierarchy can be found at https://s2geometry.io/resources/s2cell_statistics.html
|
216
|
+
|
217
|
+
:param gdf: GeoDataFrame containing points to split
|
218
|
+
:param max_points: Maximum number of points per group
|
219
|
+
:param start_level: Starting S2 cell level
|
220
|
+
:return: List of GeoDataFrames containing the split groups
|
221
|
+
"""
|
222
|
+
|
223
|
+
if "geometry" not in gdf.columns:
|
224
|
+
raise ValueError("The GeoDataFrame must contain a 'geometry' column.")
|
225
|
+
|
226
|
+
if gdf.crs is None:
|
227
|
+
raise ValueError("The GeoDataFrame must contain a CRS")
|
228
|
+
|
229
|
+
# Store the original CRS of the GeoDataFrame and reproject to EPSG:3857
|
230
|
+
original_crs = gdf.crs
|
231
|
+
gdf = gdf.to_crs(epsg=3857)
|
232
|
+
|
233
|
+
# Add a centroid column to the GeoDataFrame and convert it to EPSG:4326
|
234
|
+
gdf["centroid"] = gdf.geometry.centroid
|
235
|
+
|
236
|
+
# Reproject the GeoDataFrame to its orginial CRS
|
237
|
+
gdf = gdf.to_crs(original_crs)
|
238
|
+
|
239
|
+
# Set the GeoDataFrame's geometry to the centroid column and reproject to EPSG:4326
|
240
|
+
gdf = gdf.set_geometry("centroid")
|
241
|
+
gdf = gdf.to_crs(epsg=4326)
|
242
|
+
|
243
|
+
# Create a dictionary to store points by their S2 cell ID
|
244
|
+
cell_dict = {}
|
245
|
+
|
246
|
+
# Iterate over each point in the GeoDataFrame
|
247
|
+
for idx, row in gdf.iterrows():
|
248
|
+
# Get the S2 cell ID for the point at a given level
|
249
|
+
cell_id = _get_s2cell_id(row.centroid, start_level)
|
250
|
+
|
251
|
+
if cell_id not in cell_dict:
|
252
|
+
cell_dict[cell_id] = []
|
253
|
+
|
254
|
+
cell_dict[cell_id].append(row)
|
255
|
+
|
256
|
+
result_groups = []
|
257
|
+
|
258
|
+
# Function to recursively split cells if they contain more points than max_points
|
259
|
+
def _split_s2cell(cell_id, points, current_level=start_level):
|
260
|
+
if len(points) <= max_points:
|
261
|
+
if len(points) > 0:
|
262
|
+
points = gpd.GeoDataFrame(
|
263
|
+
points, crs=original_crs, geometry="geometry"
|
264
|
+
).drop(columns=["centroid"])
|
265
|
+
points["s2sphere_cell_id"] = cell_id
|
266
|
+
points["s2sphere_cell_level"] = current_level
|
267
|
+
result_groups.append(gpd.GeoDataFrame(points))
|
268
|
+
else:
|
269
|
+
children = s2sphere.CellId(cell_id).children()
|
270
|
+
child_cells = {child.id(): [] for child in children}
|
271
|
+
|
272
|
+
for point in points:
|
273
|
+
child_cell_id = _get_s2cell_id(point.centroid, current_level + 1)
|
274
|
+
child_cells[child_cell_id].append(point)
|
275
|
+
|
276
|
+
for child_cell_id, child_points in child_cells.items():
|
277
|
+
_split_s2cell(child_cell_id, child_points, current_level + 1)
|
278
|
+
|
279
|
+
# Split cells that contain more points than max_points
|
280
|
+
for cell_id, points in cell_dict.items():
|
281
|
+
_split_s2cell(cell_id, points)
|
282
|
+
|
283
|
+
return result_groups
|
284
|
+
|
285
|
+
|
286
|
+
def _get_s2cell_id(point, level):
|
287
|
+
lat, lon = point.y, point.x
|
288
|
+
cell_id = s2sphere.CellId.from_lat_lng(
|
289
|
+
s2sphere.LatLng.from_degrees(lat, lon)
|
290
|
+
).parent(level)
|
291
|
+
return cell_id.id()
|
@@ -1,9 +1,9 @@
|
|
1
1
|
"""Routines to pre-process sar signals."""
|
2
2
|
|
3
3
|
import openeo
|
4
|
-
from openeo.processes import array_create,
|
4
|
+
from openeo.processes import array_create, power
|
5
5
|
|
6
|
-
from openeo_gfmap import
|
6
|
+
from openeo_gfmap import BackendContext
|
7
7
|
|
8
8
|
|
9
9
|
def compress_backscatter_uint16(
|
@@ -27,38 +27,17 @@ def compress_backscatter_uint16(
|
|
27
27
|
openeo.DataCube
|
28
28
|
The datacube with the backscatter values compressed to uint16.
|
29
29
|
"""
|
30
|
-
backend = backend_context.backend
|
31
30
|
|
32
|
-
#
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
[
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
power(base=10, p=(10.0 * x[0].log(base=10) + 83.0) / 20.0),
|
43
|
-
),
|
44
|
-
if_(
|
45
|
-
is_nodata(x[1]),
|
46
|
-
1,
|
47
|
-
power(base=10, p=(10.0 * x[1].log(base=10) + 83.0) / 20.0),
|
48
|
-
),
|
49
|
-
]
|
50
|
-
),
|
51
|
-
)
|
52
|
-
else:
|
53
|
-
cube = cube.apply_dimension(
|
54
|
-
dimension="bands",
|
55
|
-
process=lambda x: array_create(
|
56
|
-
[
|
57
|
-
power(base=10, p=(10.0 * x[0].log(base=10) + 83.0) / 20.0),
|
58
|
-
power(base=10, p=(10.0 * x[1].log(base=10) + 83.0) / 20.0),
|
59
|
-
]
|
60
|
-
),
|
61
|
-
)
|
31
|
+
# Apply rescaling of power values in a logarithmic way
|
32
|
+
cube = cube.apply_dimension(
|
33
|
+
dimension="bands",
|
34
|
+
process=lambda x: array_create(
|
35
|
+
[
|
36
|
+
power(base=10, p=(10.0 * x[0].log(base=10) + 83.0) / 20.0),
|
37
|
+
power(base=10, p=(10.0 * x[1].log(base=10) + 83.0) / 20.0),
|
38
|
+
]
|
39
|
+
),
|
40
|
+
)
|
62
41
|
|
63
42
|
# Change the data type to uint16 for optimization purposes
|
64
43
|
return cube.linear_scale_range(1, 65534, 1, 65534)
|
openeo_gfmap/utils/catalogue.py
CHANGED
@@ -8,7 +8,7 @@ import requests
|
|
8
8
|
from pyproj.crs import CRS
|
9
9
|
from rasterio.warp import transform_bounds
|
10
10
|
from requests import adapters
|
11
|
-
from shapely.geometry import box, shape
|
11
|
+
from shapely.geometry import Point, box, shape
|
12
12
|
from shapely.ops import unary_union
|
13
13
|
|
14
14
|
from openeo_gfmap import (
|
@@ -204,8 +204,14 @@ def s1_area_per_orbitstate_vvvh(
|
|
204
204
|
shapely_geometries = [
|
205
205
|
shape(feature["geometry"]) for feature in spatial_extent["features"]
|
206
206
|
]
|
207
|
-
|
208
|
-
|
207
|
+
if len(shapely_geometries) == 1 and isinstance(shapely_geometries[0], Point):
|
208
|
+
point = shapely_geometries[0]
|
209
|
+
buffer_size = 0.0001
|
210
|
+
buffered_geometry = point.buffer(buffer_size)
|
211
|
+
bounds = buffered_geometry.bounds
|
212
|
+
else:
|
213
|
+
geometry = unary_union(shapely_geometries)
|
214
|
+
bounds = geometry.bounds
|
209
215
|
epsg = 4326
|
210
216
|
elif isinstance(spatial_extent, BoundingBoxExtent):
|
211
217
|
bounds = [
|
@@ -1,6 +1,6 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.4
|
2
2
|
Name: openeo_gfmap
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.4.0
|
4
4
|
Summary: OpenEO General Framework for Mapping
|
5
5
|
Project-URL: Homepage, https://github.com/Open-EO/openeo-gfmap
|
6
6
|
Project-URL: Bug Tracker, https://github.com/Open-EO/openeo-gfmap/issues
|
@@ -13,14 +13,15 @@ Requires-Dist: cftime
|
|
13
13
|
Requires-Dist: fastparquet
|
14
14
|
Requires-Dist: geojson>=3.0.0
|
15
15
|
Requires-Dist: geopandas
|
16
|
-
Requires-Dist: h3
|
16
|
+
Requires-Dist: h3==4.1.0
|
17
17
|
Requires-Dist: h5netcdf>=1.2.0
|
18
18
|
Requires-Dist: netcdf4
|
19
19
|
Requires-Dist: numpy<2.0.0
|
20
20
|
Requires-Dist: onnxruntime
|
21
|
-
Requires-Dist: openeo
|
21
|
+
Requires-Dist: openeo<=0.35
|
22
22
|
Requires-Dist: pyarrow
|
23
23
|
Requires-Dist: rasterio
|
24
|
+
Requires-Dist: s2sphere==0.2.*
|
24
25
|
Requires-Dist: scipy
|
25
26
|
Provides-Extra: dev
|
26
27
|
Requires-Dist: matplotlib>=3.3.0; extra == 'dev'
|
@@ -14,13 +14,13 @@ openeo_gfmap/fetching/s2.py,sha256=ytjrZiZIwXxrdiky2V0bAKLBU9Dpaa5b2XsHvI6jl1M,7
|
|
14
14
|
openeo_gfmap/inference/__init__.py,sha256=M6NnKGYCpHNYmRL9OkHi5GmfCtWoJ0wCNR6VXRuDgjE,165
|
15
15
|
openeo_gfmap/inference/model_inference.py,sha256=0qPUgrjI1hy5ZnyGwuuvvw5oxnMGdgvvu9Go6-e9LZQ,12550
|
16
16
|
openeo_gfmap/manager/__init__.py,sha256=2bckkPiDQBgoBWD9spk1BKXy2UGkWKe50A3HmIwmqrA,795
|
17
|
-
openeo_gfmap/manager/job_manager.py,sha256
|
18
|
-
openeo_gfmap/manager/job_splitters.py,sha256=
|
17
|
+
openeo_gfmap/manager/job_manager.py,sha256=-MZJBfF_wV94FejoYbFPNviEQx3jLmJXb6XLeHg7egE,27221
|
18
|
+
openeo_gfmap/manager/job_splitters.py,sha256=kkGxgiudY3LrA40Ro_9q2eFW_Pjdm5a5VaNOXk2w5qo,10694
|
19
19
|
openeo_gfmap/preprocessing/__init__.py,sha256=-kJAy_WY4o8oqziRozcUuXtuGIM0IOvTCF6agTUgRWA,619
|
20
20
|
openeo_gfmap/preprocessing/cloudmasking.py,sha256=d280H5fByjNbCVZHjPn_dUatNI-ejphu4A75sUVoRqo,10029
|
21
21
|
openeo_gfmap/preprocessing/compositing.py,sha256=Jp9Ku5JpU7TJ4DYGc6YuqMeP1Ip7zns7NguC17BtFyA,2526
|
22
22
|
openeo_gfmap/preprocessing/interpolation.py,sha256=VVD483NoC0KYUSh28XaBNZzNaybQjdyCN8rXyXy2W9E,327
|
23
|
-
openeo_gfmap/preprocessing/sar.py,sha256=
|
23
|
+
openeo_gfmap/preprocessing/sar.py,sha256=XtOIlBrX3o2DlrGHBksHYWzOdzhkZMQT2rp5LxDbdMQ,1391
|
24
24
|
openeo_gfmap/preprocessing/scaling.py,sha256=oUNhykVC41Je3E_men_-PikAKNwYhYbwN9J1_Ru8Zi4,2121
|
25
25
|
openeo_gfmap/preprocessing/udf_cldmask.py,sha256=WqqFLBK5rIQPkb_dlgUWWSzicsPtVSthaIef40FHKJA,1162
|
26
26
|
openeo_gfmap/preprocessing/udf_rank.py,sha256=n2gSIY2ZHVVr9wJx1Bs2HtmvScAkz2NqhjxUM-iIKM0,1438
|
@@ -29,12 +29,12 @@ openeo_gfmap/stac/__init__.py,sha256=kVMJ9hrN4MjcRCOgRDCj5TfAWRXe0GHu2gJQjG-dS4Y
|
|
29
29
|
openeo_gfmap/stac/constants.py,sha256=O1bcijRBj6YRqR_aAcYO5JzJg7mdzhzUSm4vKnxMbtQ,1485
|
30
30
|
openeo_gfmap/utils/__init__.py,sha256=UDwkWUwsnV6ZLXeaJKOCos-MDG2ZaIFyg8s0IiRVtng,997
|
31
31
|
openeo_gfmap/utils/build_df.py,sha256=OPmD_Onkl9ybYIiLxmU_GmanP8xD71F1ZybJc7xQmns,1515
|
32
|
-
openeo_gfmap/utils/catalogue.py,sha256
|
32
|
+
openeo_gfmap/utils/catalogue.py,sha256=sgiXbRfywY77HSdQnnJ9eKoadc4TKQVa-uuaRadgAOw,13642
|
33
33
|
openeo_gfmap/utils/intervals.py,sha256=V6l3ofww50fN_pvWC4NuGQ2ZsyGdhAlTZTiRcC0foVE,2395
|
34
34
|
openeo_gfmap/utils/netcdf.py,sha256=KkAAxnq-ZCMjDMd82638noYwxqNpMsnpiU04Q-qX26A,698
|
35
35
|
openeo_gfmap/utils/split_stac.py,sha256=asjT0jx6ic8GJFqqAisaWxOvQ_suSRv4sxyFOyHFvpI,3895
|
36
36
|
openeo_gfmap/utils/tile_processing.py,sha256=QZ9bi5tPmyTVyyNvFZgd26s5dSnMl1grTKq2veK1C90,2068
|
37
|
-
openeo_gfmap-0.
|
38
|
-
openeo_gfmap-0.
|
39
|
-
openeo_gfmap-0.
|
40
|
-
openeo_gfmap-0.
|
37
|
+
openeo_gfmap-0.4.0.dist-info/METADATA,sha256=65GVaP6sG-Gnemf3HlqyRtREBlujPgBHScIAGSk8dpI,4322
|
38
|
+
openeo_gfmap-0.4.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
39
|
+
openeo_gfmap-0.4.0.dist-info/licenses/LICENSE,sha256=aUuGpjieWiscTNtyLcSaeVsJ4pb6J9c4wUq1bR0e4t4,11349
|
40
|
+
openeo_gfmap-0.4.0.dist-info/RECORD,,
|
File without changes
|