eodash_catalog 0.3.0__tar.gz → 0.3.17__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/.bumpversion.cfg +1 -1
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/Dockerfile +1 -1
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/PKG-INFO +1 -1
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/src/eodash_catalog/__about__.py +1 -1
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/src/eodash_catalog/endpoints.py +397 -42
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/src/eodash_catalog/generate_indicators.py +19 -7
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/src/eodash_catalog/sh_endpoint.py +3 -1
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/src/eodash_catalog/stac_handling.py +72 -13
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/src/eodash_catalog/thumbnails.py +4 -1
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/src/eodash_catalog/utils.py +69 -26
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/test_geoparquet.py +5 -5
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_sh_wms.json +2 -2
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/.dockerignore +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/.github/workflows/ci.yml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/.github/workflows/python-publish.yml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/.gitignore +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/.vscode/extensions.json +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/.vscode/settings.json +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/LICENSE.txt +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/README.md +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/pyproject.toml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/requirements.txt +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/ruff.toml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/src/eodash_catalog/__init__.py +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/src/eodash_catalog/duration.py +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/__init__.py +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/test-data/regional_forecast.json +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/test_generate.py +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-catalogs/testing-json.json +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-catalogs/testing.yaml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_CROPOMAT1.yaml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_cmems.json +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_cog.json +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_geodb.yaml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_geodb_locations.yaml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_geojson.yaml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_locations_processing.json +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_process.yaml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_projection.json +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_see_solar_energy.yaml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_sh_wms_locations.json +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_tif_demo_1.yaml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_tif_demo_1_json.json +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_tif_demo_2.yaml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_veda.json +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_veda_tiles.json +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_wms_no_time.yaml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-indicators/test_indicator.yaml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-layers/baselayers.yaml +0 -0
- {eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-layers/overlays.yaml +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: eodash_catalog
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.17
|
|
4
4
|
Summary: This package is intended to help create a compatible STAC catalog for the eodash dashboard client. It supports configuration of multiple endpoint types for information extraction.
|
|
5
5
|
Project-URL: Documentation, https://github.com/eodash/eodash_catalog#readme
|
|
6
6
|
Project-URL: Issues, https://github.com/eodash/eodash_catalog/issues
|
|
@@ -1,4 +1,6 @@
|
|
|
1
|
+
import copy
|
|
1
2
|
import importlib
|
|
3
|
+
import io
|
|
2
4
|
import json
|
|
3
5
|
import os
|
|
4
6
|
import sys
|
|
@@ -7,7 +9,9 @@ from collections.abc import Callable
|
|
|
7
9
|
from datetime import datetime, timedelta
|
|
8
10
|
from itertools import groupby
|
|
9
11
|
from operator import itemgetter
|
|
12
|
+
from urllib.parse import urlparse
|
|
10
13
|
|
|
14
|
+
import pyarrow.parquet as pq
|
|
11
15
|
import requests
|
|
12
16
|
from pystac import Asset, Catalog, Collection, Item, Link, SpatialExtent, Summaries, TemporalExtent
|
|
13
17
|
from pystac_client import Client
|
|
@@ -17,6 +21,8 @@ from structlog import get_logger
|
|
|
17
21
|
|
|
18
22
|
from eodash_catalog.sh_endpoint import get_SH_token
|
|
19
23
|
from eodash_catalog.stac_handling import (
|
|
24
|
+
add_authentication,
|
|
25
|
+
add_base_overlay_info,
|
|
20
26
|
add_collection_information,
|
|
21
27
|
add_example_info,
|
|
22
28
|
add_process_info_child_collection,
|
|
@@ -28,6 +34,7 @@ from eodash_catalog.thumbnails import generate_thumbnail
|
|
|
28
34
|
from eodash_catalog.utils import (
|
|
29
35
|
Options,
|
|
30
36
|
create_geometry_from_bbox,
|
|
37
|
+
extract_extent_from_geoparquet,
|
|
31
38
|
filter_time_entries,
|
|
32
39
|
format_datetime_to_isostring_zulu,
|
|
33
40
|
generate_veda_cog_link,
|
|
@@ -169,7 +176,7 @@ def handle_STAC_based_endpoint(
|
|
|
169
176
|
catalog, collection_config["Name"], collection_config, catalog_config, endpoint_config
|
|
170
177
|
)
|
|
171
178
|
for location in collection_config["Locations"]:
|
|
172
|
-
identifier = location.get("Identifier", uuid.uuid4())
|
|
179
|
+
identifier = location.get("Identifier", str(uuid.uuid4()))
|
|
173
180
|
collection = process_STACAPI_Endpoint(
|
|
174
181
|
catalog_config=catalog_config,
|
|
175
182
|
endpoint_config=endpoint_config,
|
|
@@ -209,11 +216,13 @@ def handle_STAC_based_endpoint(
|
|
|
209
216
|
location["OverwriteBBox"],
|
|
210
217
|
]
|
|
211
218
|
)
|
|
219
|
+
add_collection_information(catalog_config, collection, collection_config)
|
|
220
|
+
add_base_overlay_info(collection, catalog_config, collection_config)
|
|
212
221
|
update_extents_from_collection_children(root_collection)
|
|
213
222
|
else:
|
|
214
223
|
bbox = None
|
|
215
|
-
if endpoint_config.get("
|
|
216
|
-
bbox = ",".join(map(str, endpoint_config["
|
|
224
|
+
if endpoint_config.get("OverwriteBBox"):
|
|
225
|
+
bbox = ",".join(map(str, endpoint_config["OverwriteBBox"]))
|
|
217
226
|
root_collection = process_STACAPI_Endpoint(
|
|
218
227
|
catalog_config=catalog_config,
|
|
219
228
|
endpoint_config=endpoint_config,
|
|
@@ -388,6 +397,7 @@ def handle_collection_only(
|
|
|
388
397
|
properties={},
|
|
389
398
|
geometry=None,
|
|
390
399
|
datetime=dt,
|
|
400
|
+
assets={"dummy_asset": Asset(href="")},
|
|
391
401
|
)
|
|
392
402
|
link = collection.add_item(item)
|
|
393
403
|
link.extra_fields["datetime"] = format_datetime_to_isostring_zulu(dt)
|
|
@@ -460,12 +470,14 @@ def handle_SH_WMS_endpoint(
|
|
|
460
470
|
LOGGER.warn(f"NO datetimes configured for collection: {collection_config['Name']}!")
|
|
461
471
|
add_visualization_info(collection, collection_config, endpoint_config)
|
|
462
472
|
add_process_info_child_collection(collection, catalog_config, collection_config)
|
|
473
|
+
add_collection_information(catalog_config, collection, collection_config)
|
|
474
|
+
add_base_overlay_info(collection, catalog_config, collection_config)
|
|
463
475
|
update_extents_from_collection_children(root_collection)
|
|
464
476
|
else:
|
|
465
477
|
# if locations are not provided, treat the collection as a
|
|
466
478
|
# general proxy to the sentinel hub layer
|
|
467
479
|
datetimes = get_collection_datetimes_from_config(endpoint_config)
|
|
468
|
-
bbox = endpoint_config.get("
|
|
480
|
+
bbox = endpoint_config.get("OverwriteBBox", [-180, -85, 180, 85])
|
|
469
481
|
items = []
|
|
470
482
|
for dt in datetimes:
|
|
471
483
|
item = Item(
|
|
@@ -536,6 +548,117 @@ def handle_rasdaman_endpoint(
|
|
|
536
548
|
return collection
|
|
537
549
|
|
|
538
550
|
|
|
551
|
+
def handle_GeoDB_Features_endpoint(
|
|
552
|
+
catalog_config: dict,
|
|
553
|
+
endpoint_config: dict,
|
|
554
|
+
collection_config: dict,
|
|
555
|
+
coll_path_rel_to_root_catalog: str,
|
|
556
|
+
catalog: Catalog,
|
|
557
|
+
options: Options,
|
|
558
|
+
) -> Collection:
|
|
559
|
+
# ID of collection is data["Name"] instead of CollectionId to be able to
|
|
560
|
+
# create more STAC collections from one geoDB table
|
|
561
|
+
collection = get_or_create_collection(
|
|
562
|
+
catalog, collection_config["Name"], collection_config, catalog_config, endpoint_config
|
|
563
|
+
)
|
|
564
|
+
coll_path_rel_to_root_catalog = f'{coll_path_rel_to_root_catalog}/{collection_config["Name"]}'
|
|
565
|
+
select = f'?select={endpoint_config["TimeParameter"]}'
|
|
566
|
+
url = (
|
|
567
|
+
endpoint_config["EndPoint"]
|
|
568
|
+
+ endpoint_config["Database"]
|
|
569
|
+
+ "_{}".format(endpoint_config["CollectionId"])
|
|
570
|
+
+ select
|
|
571
|
+
)
|
|
572
|
+
response = json.loads(requests.get(url).text)
|
|
573
|
+
# Use aggregation value to group datetime results
|
|
574
|
+
aggregation = endpoint_config.get("Aggregation", "day")
|
|
575
|
+
unique_datetimes = set()
|
|
576
|
+
for value in response:
|
|
577
|
+
time_object = datetime.fromisoformat(value[endpoint_config["TimeParameter"]])
|
|
578
|
+
match aggregation:
|
|
579
|
+
case "hour":
|
|
580
|
+
unique_datetimes.add(
|
|
581
|
+
datetime(
|
|
582
|
+
time_object.year,
|
|
583
|
+
time_object.month,
|
|
584
|
+
time_object.day,
|
|
585
|
+
time_object.hour,
|
|
586
|
+
)
|
|
587
|
+
)
|
|
588
|
+
case "day":
|
|
589
|
+
unique_datetimes.add(
|
|
590
|
+
datetime(time_object.year, time_object.month, time_object.day).date()
|
|
591
|
+
)
|
|
592
|
+
case "month":
|
|
593
|
+
unique_datetimes.add(datetime(time_object.year, time_object.month, 1).date())
|
|
594
|
+
case "year":
|
|
595
|
+
unique_datetimes.add(datetime(time_object.year, 1, 1).date())
|
|
596
|
+
case _:
|
|
597
|
+
# default to day
|
|
598
|
+
unique_datetimes.add(
|
|
599
|
+
datetime(time_object.year, time_object.month, time_object.day).date()
|
|
600
|
+
)
|
|
601
|
+
# go over unique datetimes and create items
|
|
602
|
+
items = []
|
|
603
|
+
for dt in sorted(unique_datetimes):
|
|
604
|
+
item_datetime = dt if isinstance(dt, datetime) else datetime(dt.year, dt.month, dt.day)
|
|
605
|
+
matching_string = ""
|
|
606
|
+
match aggregation:
|
|
607
|
+
case "hour":
|
|
608
|
+
matching_string = item_datetime.strftime("%Y-%m-%dT%H:00:00Z")
|
|
609
|
+
case "day":
|
|
610
|
+
matching_string = item_datetime.strftime("%Y-%m-%d")
|
|
611
|
+
case "month":
|
|
612
|
+
matching_string = item_datetime.strftime("%Y-%m")
|
|
613
|
+
case "year":
|
|
614
|
+
matching_string = item_datetime.strftime("%Y")
|
|
615
|
+
updated_query = endpoint_config["Query"].replace("{{date_time}}", matching_string)
|
|
616
|
+
assets = {
|
|
617
|
+
"geodbfeatures": Asset(
|
|
618
|
+
href=f"{endpoint_config['EndPoint']}{endpoint_config['Database']}_{endpoint_config['CollectionId']}?{updated_query}",
|
|
619
|
+
media_type="application/geodb+json",
|
|
620
|
+
roles=["data"],
|
|
621
|
+
)
|
|
622
|
+
}
|
|
623
|
+
item = Item(
|
|
624
|
+
id=format_datetime_to_isostring_zulu(item_datetime),
|
|
625
|
+
bbox=endpoint_config.get("OverwriteBBox", [-180, -90, 180, 90]),
|
|
626
|
+
properties={},
|
|
627
|
+
geometry=create_geometry_from_bbox(
|
|
628
|
+
endpoint_config.get("OverwriteBBox", [-180, -90, 180, 90])
|
|
629
|
+
),
|
|
630
|
+
datetime=item_datetime,
|
|
631
|
+
stac_extensions=[],
|
|
632
|
+
assets=assets,
|
|
633
|
+
)
|
|
634
|
+
# add eodash style visualization info if Style has been provided
|
|
635
|
+
if endpoint_config.get("Style"):
|
|
636
|
+
ep_st = endpoint_config.get("Style")
|
|
637
|
+
style_link = Link(
|
|
638
|
+
rel="style",
|
|
639
|
+
target=ep_st
|
|
640
|
+
if ep_st.startswith("http")
|
|
641
|
+
else f"{catalog_config['assets_endpoint']}/{ep_st}",
|
|
642
|
+
media_type="text/vector-styles",
|
|
643
|
+
extra_fields={
|
|
644
|
+
"asset:keys": list(assets),
|
|
645
|
+
},
|
|
646
|
+
)
|
|
647
|
+
item.add_link(style_link)
|
|
648
|
+
add_projection_info(endpoint_config, item)
|
|
649
|
+
items.append(item)
|
|
650
|
+
save_items(
|
|
651
|
+
collection,
|
|
652
|
+
items,
|
|
653
|
+
options.outputpath,
|
|
654
|
+
catalog_config["id"],
|
|
655
|
+
coll_path_rel_to_root_catalog,
|
|
656
|
+
options.gp,
|
|
657
|
+
)
|
|
658
|
+
add_collection_information(catalog_config, collection, collection_config)
|
|
659
|
+
return collection
|
|
660
|
+
|
|
661
|
+
|
|
539
662
|
def handle_GeoDB_endpoint(
|
|
540
663
|
catalog_config: dict,
|
|
541
664
|
endpoint_config: dict,
|
|
@@ -602,33 +725,96 @@ def handle_GeoDB_endpoint(
|
|
|
602
725
|
locations_collection = get_or_create_collection(
|
|
603
726
|
collection, key, sc_config, catalog_config, endpoint_config
|
|
604
727
|
)
|
|
605
|
-
if
|
|
728
|
+
# check if input data is none
|
|
729
|
+
if input_data is None:
|
|
730
|
+
input_data = []
|
|
731
|
+
if len(input_data) > 0 or endpoint_config.get("FeatureCollection"):
|
|
606
732
|
items = []
|
|
607
|
-
|
|
733
|
+
content_for_individual_datetimes = values
|
|
734
|
+
if endpoint_config.get("MapTimesCollection"):
|
|
735
|
+
# extract datetimes from another table if configured so and match it based on aoi_id
|
|
736
|
+
# special for E13d
|
|
737
|
+
select = f"?select=time&aoi_id=eq.{key}"
|
|
738
|
+
url = (
|
|
739
|
+
endpoint_config["EndPoint"]
|
|
740
|
+
+ endpoint_config["Database"]
|
|
741
|
+
+ "_{}".format(endpoint_config["MapTimesCollection"])
|
|
742
|
+
+ select
|
|
743
|
+
)
|
|
744
|
+
response = json.loads(requests.get(url).text)
|
|
745
|
+
content_for_individual_datetimes = []
|
|
746
|
+
for response_obj in response:
|
|
747
|
+
time_object = datetime.fromisoformat(response_obj["time"])
|
|
748
|
+
for searched_row in values:
|
|
749
|
+
search_datetime = datetime.fromisoformat(searched_row["time"])
|
|
750
|
+
if (
|
|
751
|
+
search_datetime.month == time_object.month
|
|
752
|
+
and search_datetime.year == time_object.year
|
|
753
|
+
):
|
|
754
|
+
break
|
|
755
|
+
insert_row = copy.deepcopy(searched_row)
|
|
756
|
+
# overwrite time with one from another collection and save
|
|
757
|
+
insert_row["time"] = response_obj["time"]
|
|
758
|
+
content_for_individual_datetimes.append(insert_row)
|
|
759
|
+
for v in content_for_individual_datetimes:
|
|
608
760
|
# add items based on inputData fields for each time step available in values
|
|
609
|
-
first_match = next(
|
|
761
|
+
first_match: dict = next(
|
|
610
762
|
(item for item in input_data if item.get("Identifier") == v["input_data"]), None
|
|
611
763
|
)
|
|
612
764
|
time_object = datetime.fromisoformat(v["time"])
|
|
765
|
+
if endpoint_config.get("MapReplaceDates"):
|
|
766
|
+
# get mapping of AOI_ID to list of dates
|
|
767
|
+
available_dates_for_aoi_id = endpoint_config.get("MapReplaceDates").get(
|
|
768
|
+
v["aoi_id"]
|
|
769
|
+
)
|
|
770
|
+
if available_dates_for_aoi_id:
|
|
771
|
+
formatted_datetime = time_object.strftime("%Y-%m-%d")
|
|
772
|
+
if formatted_datetime not in available_dates_for_aoi_id:
|
|
773
|
+
# discard this date because not in available map dates
|
|
774
|
+
continue
|
|
613
775
|
# extract wkt geometry from sub_aoi
|
|
614
776
|
if "sub_aoi" in v and v["sub_aoi"] != "/":
|
|
615
777
|
# create geometry from wkt
|
|
616
|
-
|
|
778
|
+
shapely_geometry = wkt.loads(v["sub_aoi"])
|
|
779
|
+
geometry = mapping(shapely_geometry)
|
|
617
780
|
# converting multipolygon to polygon to avoid shapely throwing an exception
|
|
618
781
|
# in collection extent from geoparquet table generation
|
|
619
782
|
# while trying to create a multipolygon extent of all multipolygons
|
|
620
783
|
if geometry["type"] == "MultiPolygon":
|
|
621
784
|
geometry = {"type": "Polygon", "coordinates": geometry["coordinates"][0]}
|
|
785
|
+
bbox = shapely_geometry.bounds
|
|
622
786
|
else:
|
|
623
787
|
geometry = create_geometry_from_bbox(bbox)
|
|
788
|
+
|
|
789
|
+
assets = {"dummy_asset": Asset(href="")}
|
|
790
|
+
if endpoint_config.get("FeatureCollection"):
|
|
791
|
+
assets["geodbfeatures"] = Asset(
|
|
792
|
+
href=f"{endpoint_config['EndPoint']}{endpoint_config['Database']}_{endpoint_config['FeatureCollection']}?aoi_id=eq.{v['aoi_id']}&time=eq.{v['time']}",
|
|
793
|
+
media_type="application/geodb+json",
|
|
794
|
+
roles=["data"],
|
|
795
|
+
)
|
|
624
796
|
item = Item(
|
|
625
797
|
id=v["time"],
|
|
626
798
|
bbox=bbox,
|
|
627
799
|
properties={},
|
|
628
800
|
geometry=geometry,
|
|
629
801
|
datetime=time_object,
|
|
630
|
-
assets=
|
|
802
|
+
assets=assets,
|
|
631
803
|
)
|
|
804
|
+
# make sure to also add Style link if FeatureCollection and Style has been provided
|
|
805
|
+
if endpoint_config.get("FeatureCollection") and endpoint_config.get("Style"):
|
|
806
|
+
ep_st = endpoint_config.get("Style")
|
|
807
|
+
style_link = Link(
|
|
808
|
+
rel="style",
|
|
809
|
+
target=ep_st
|
|
810
|
+
if ep_st.startswith("http")
|
|
811
|
+
else f"{catalog_config['assets_endpoint']}/{ep_st}",
|
|
812
|
+
media_type="text/vector-styles",
|
|
813
|
+
extra_fields={
|
|
814
|
+
"asset:keys": list(assets),
|
|
815
|
+
},
|
|
816
|
+
)
|
|
817
|
+
item.add_link(style_link)
|
|
632
818
|
if first_match:
|
|
633
819
|
match first_match["Type"]:
|
|
634
820
|
case "WMS":
|
|
@@ -637,7 +823,7 @@ def handle_GeoDB_endpoint(
|
|
|
637
823
|
"wms:layers": [first_match["Layers"]],
|
|
638
824
|
"role": ["data"],
|
|
639
825
|
}
|
|
640
|
-
if
|
|
826
|
+
if "sentinel-hub.com" in url:
|
|
641
827
|
instanceId = os.getenv("SH_INSTANCE_ID")
|
|
642
828
|
if "InstanceId" in endpoint_config:
|
|
643
829
|
instanceId = endpoint_config["InstanceId"]
|
|
@@ -652,7 +838,7 @@ def handle_GeoDB_endpoint(
|
|
|
652
838
|
{"wms:dimensions": {"TIME": f"{start_date}/{end_date}"}}
|
|
653
839
|
)
|
|
654
840
|
# we add the instance id to the url
|
|
655
|
-
url = f"
|
|
841
|
+
url = f"{url}{instanceId}"
|
|
656
842
|
else:
|
|
657
843
|
extra_fields.update({"wms:dimensions": {"TIME": v["time"]}})
|
|
658
844
|
link = Link(
|
|
@@ -664,6 +850,50 @@ def handle_GeoDB_endpoint(
|
|
|
664
850
|
)
|
|
665
851
|
item.add_link(link)
|
|
666
852
|
items.append(item)
|
|
853
|
+
case "XYZ":
|
|
854
|
+
# handler for NASA apis
|
|
855
|
+
url = first_match["Url"]
|
|
856
|
+
extra_fields = {}
|
|
857
|
+
# replace time to a formatted version
|
|
858
|
+
date_formatted = time_object.strftime(
|
|
859
|
+
first_match.get("DateFormat", "%Y_%m_%d")
|
|
860
|
+
)
|
|
861
|
+
target_url = url.replace("{time}", date_formatted)
|
|
862
|
+
if SiteMapping := first_match.get("SiteMapping"):
|
|
863
|
+
# match with aoi_id
|
|
864
|
+
site = SiteMapping.get(v["aoi_id"])
|
|
865
|
+
# replace in URL
|
|
866
|
+
if site:
|
|
867
|
+
target_url = target_url.replace("{site}", site)
|
|
868
|
+
else:
|
|
869
|
+
LOGGER.info(
|
|
870
|
+
f"Warning: no match for SiteMapping in config for {site}"
|
|
871
|
+
)
|
|
872
|
+
link = Link(
|
|
873
|
+
rel="xyz",
|
|
874
|
+
target=target_url,
|
|
875
|
+
media_type="image/png",
|
|
876
|
+
title=collection_config["Name"],
|
|
877
|
+
extra_fields=extra_fields,
|
|
878
|
+
)
|
|
879
|
+
item.add_link(link)
|
|
880
|
+
items.append(item)
|
|
881
|
+
elif endpoint_config.get("FeatureCollection"):
|
|
882
|
+
# no input data match found, just add the item with asset only
|
|
883
|
+
assets["geodbfeatures"] = Asset(
|
|
884
|
+
href=f"{endpoint_config['EndPoint']}{endpoint_config['Database']}_{endpoint_config['FeatureCollection']}?aoi_id=eq.{v['aoi_id']}&time=eq.{v['time']}",
|
|
885
|
+
media_type="application/geodb+json",
|
|
886
|
+
roles=["data"],
|
|
887
|
+
)
|
|
888
|
+
item = Item(
|
|
889
|
+
id=v["time"],
|
|
890
|
+
bbox=bbox,
|
|
891
|
+
properties={},
|
|
892
|
+
geometry=geometry,
|
|
893
|
+
datetime=time_object,
|
|
894
|
+
assets=assets,
|
|
895
|
+
)
|
|
896
|
+
items.append(item)
|
|
667
897
|
save_items(
|
|
668
898
|
locations_collection,
|
|
669
899
|
items,
|
|
@@ -687,6 +917,8 @@ def handle_GeoDB_endpoint(
|
|
|
687
917
|
link.extra_fields["latlng"] = latlon
|
|
688
918
|
link.extra_fields["country"] = country
|
|
689
919
|
link.extra_fields["name"] = city
|
|
920
|
+
add_collection_information(catalog_config, locations_collection, collection_config)
|
|
921
|
+
add_base_overlay_info(locations_collection, catalog_config, collection_config)
|
|
690
922
|
|
|
691
923
|
if "yAxis" not in collection_config:
|
|
692
924
|
# fetch yAxis and store it to data, preventing need to save it per dataset in yml
|
|
@@ -725,7 +957,8 @@ def handle_SH_endpoint(
|
|
|
725
957
|
) -> Collection:
|
|
726
958
|
token = get_SH_token(endpoint_config)
|
|
727
959
|
headers = {"Authorization": f"Bearer {token}"}
|
|
728
|
-
endpoint_config["EndPoint"]
|
|
960
|
+
endpoint_url_parts = urlparse(endpoint_config["EndPoint"])
|
|
961
|
+
endpoint_config["EndPoint"] = f"https://{endpoint_url_parts.netloc}/api/v1/catalog/1.0.0/"
|
|
729
962
|
# Overwrite collection id with type, such as ZARR or BYOC
|
|
730
963
|
if endpoint_config.get("Type"):
|
|
731
964
|
endpoint_config["CollectionId"] = (
|
|
@@ -762,12 +995,14 @@ def handle_WMS_endpoint(
|
|
|
762
995
|
# some endpoints allow "narrowed-down" capabilities per-layer, which we utilize to not
|
|
763
996
|
# have to process full service capabilities XML
|
|
764
997
|
capabilities_url = endpoint_config["EndPoint"]
|
|
765
|
-
spatial_extent,
|
|
998
|
+
spatial_extent, datetimes_retrieved = retrieveExtentFromWMSWMTS(
|
|
766
999
|
capabilities_url,
|
|
767
1000
|
endpoint_config["LayerId"],
|
|
768
1001
|
version=endpoint_config.get("Version", "1.1.1"),
|
|
769
1002
|
wmts=wmts,
|
|
770
1003
|
)
|
|
1004
|
+
if datetimes_retrieved:
|
|
1005
|
+
datetimes = datetimes_retrieved
|
|
771
1006
|
# optionally filter time results
|
|
772
1007
|
if query := endpoint_config.get("Query"):
|
|
773
1008
|
datetimes = filter_time_entries(datetimes, query)
|
|
@@ -778,19 +1013,24 @@ def handle_WMS_endpoint(
|
|
|
778
1013
|
# Create an item per time to allow visualization in stac clients
|
|
779
1014
|
if len(datetimes) > 0:
|
|
780
1015
|
for dt in datetimes:
|
|
1016
|
+
# case of wms interval coming from config
|
|
1017
|
+
dt_item = dt[0] if isinstance(dt, list) else dt
|
|
781
1018
|
item = Item(
|
|
782
|
-
id=format_datetime_to_isostring_zulu(
|
|
1019
|
+
id=format_datetime_to_isostring_zulu(dt_item),
|
|
783
1020
|
bbox=spatial_extent,
|
|
784
1021
|
properties={},
|
|
785
1022
|
geometry=create_geometry_from_bbox(spatial_extent),
|
|
786
|
-
datetime=
|
|
1023
|
+
datetime=dt_item,
|
|
787
1024
|
stac_extensions=[
|
|
788
1025
|
"https://stac-extensions.github.io/web-map-links/v1.1.0/schema.json",
|
|
789
1026
|
],
|
|
790
1027
|
assets={"dummy_asset": Asset(href="")},
|
|
791
1028
|
)
|
|
792
1029
|
add_projection_info(endpoint_config, item)
|
|
793
|
-
|
|
1030
|
+
dt_visualization = dt if isinstance(dt, list) else [dt]
|
|
1031
|
+
add_visualization_info(
|
|
1032
|
+
item, collection_config, endpoint_config, datetimes=dt_visualization
|
|
1033
|
+
)
|
|
794
1034
|
items.append(item)
|
|
795
1035
|
else:
|
|
796
1036
|
LOGGER.warn(f"NO datetimes returned for collection: {collection_config['Name']}!")
|
|
@@ -826,11 +1066,19 @@ def generate_veda_tiles_link(endpoint_config: dict, item: str | None) -> str:
|
|
|
826
1066
|
color_formula = ""
|
|
827
1067
|
if endpoint_config.get("ColorFormula"):
|
|
828
1068
|
color_formula = "&color_formula={}".format(endpoint_config["ColorFormula"])
|
|
1069
|
+
rescale = ""
|
|
1070
|
+
if endpoint_config.get("Rescale"):
|
|
1071
|
+
for rescale in endpoint_config["Rescale"]:
|
|
1072
|
+
rescale += f"&rescale={rescale}"
|
|
829
1073
|
no_data = ""
|
|
830
1074
|
if endpoint_config.get("NoData"):
|
|
831
1075
|
no_data = "&no_data={}".format(endpoint_config["NoData"])
|
|
832
1076
|
item = item if item else "{item}"
|
|
833
|
-
|
|
1077
|
+
target_url_base = endpoint_config["EndPoint"].replace("/stac/", "")
|
|
1078
|
+
target_url = (
|
|
1079
|
+
f"{target_url_base}/raster/collections/{collection}/items/{item}"
|
|
1080
|
+
f"/tiles/WebMercatorQuad/{{z}}/{{x}}/{{y}}?{assets}{color_formula}{no_data}{rescale}"
|
|
1081
|
+
)
|
|
834
1082
|
return target_url
|
|
835
1083
|
|
|
836
1084
|
|
|
@@ -871,7 +1119,6 @@ def add_visualization_info(
|
|
|
871
1119
|
start_isostring = format_datetime_to_isostring_zulu(dt)
|
|
872
1120
|
# SH WMS for public collections needs time interval, we use full day here
|
|
873
1121
|
end = dt + timedelta(days=1) - timedelta(milliseconds=1)
|
|
874
|
-
# we have start_datetime and end_datetime
|
|
875
1122
|
if len(datetimes) == 2:
|
|
876
1123
|
end = datetimes[1]
|
|
877
1124
|
end_isostring = format_datetime_to_isostring_zulu(end)
|
|
@@ -880,9 +1127,10 @@ def add_visualization_info(
|
|
|
880
1127
|
|
|
881
1128
|
if dimensions != {}:
|
|
882
1129
|
extra_fields["wms:dimensions"] = dimensions
|
|
1130
|
+
endpoint_url_parts = urlparse(endpoint_config["EndPoint"])
|
|
883
1131
|
link = Link(
|
|
884
1132
|
rel="wms",
|
|
885
|
-
target=f"https://
|
|
1133
|
+
target=f"https://{endpoint_url_parts.netloc}/ogc/wms/{instanceId}",
|
|
886
1134
|
media_type=(endpoint_config.get("MimeType", "image/png")),
|
|
887
1135
|
title=collection_config["Name"],
|
|
888
1136
|
extra_fields=extra_fields,
|
|
@@ -899,6 +1147,14 @@ def add_visualization_info(
|
|
|
899
1147
|
"role": ["data"],
|
|
900
1148
|
}
|
|
901
1149
|
)
|
|
1150
|
+
if collection_config.get("EodashIdentifier") == "FNF":
|
|
1151
|
+
extra_fields.update(
|
|
1152
|
+
{
|
|
1153
|
+
"wms:layers": endpoint_config.get("LayerId", "").replace(
|
|
1154
|
+
"{time}", (datetimes is not None and str(datetimes[0].year)) or "2020"
|
|
1155
|
+
),
|
|
1156
|
+
}
|
|
1157
|
+
)
|
|
902
1158
|
dimensions = {}
|
|
903
1159
|
if dimensions_config := endpoint_config.get("Dimensions", {}):
|
|
904
1160
|
for key, value in dimensions_config.items():
|
|
@@ -909,7 +1165,13 @@ def add_visualization_info(
|
|
|
909
1165
|
)
|
|
910
1166
|
dimensions[key] = value
|
|
911
1167
|
if datetimes is not None:
|
|
912
|
-
|
|
1168
|
+
if len(datetimes) > 1:
|
|
1169
|
+
start = format_datetime_to_isostring_zulu(datetimes[0])
|
|
1170
|
+
end = format_datetime_to_isostring_zulu(datetimes[1])
|
|
1171
|
+
interval = f"{start}/{end}"
|
|
1172
|
+
dimensions["TIME"] = interval
|
|
1173
|
+
else:
|
|
1174
|
+
dimensions["TIME"] = format_datetime_to_isostring_zulu(datetimes[0])
|
|
913
1175
|
if dimensions != {}:
|
|
914
1176
|
extra_fields["wms:dimensions"] = dimensions
|
|
915
1177
|
if endpoint_config.get("Styles"):
|
|
@@ -966,28 +1228,6 @@ def add_visualization_info(
|
|
|
966
1228
|
link,
|
|
967
1229
|
)
|
|
968
1230
|
stac_object.add_link(link)
|
|
969
|
-
elif endpoint_config["Name"] == "JAXA_WMTS_PALSAR":
|
|
970
|
-
target_url = "{}".format(endpoint_config.get("EndPoint"))
|
|
971
|
-
# custom time just for this special case as a default for collection wmts
|
|
972
|
-
time = None
|
|
973
|
-
if datetimes is not None:
|
|
974
|
-
time = datetimes[0]
|
|
975
|
-
extra_fields.update(
|
|
976
|
-
{
|
|
977
|
-
"wmts:layer": endpoint_config.get("LayerId", "").replace(
|
|
978
|
-
"{time}", (time and str(time.year)) or "2017"
|
|
979
|
-
)
|
|
980
|
-
}
|
|
981
|
-
)
|
|
982
|
-
stac_object.add_link(
|
|
983
|
-
Link(
|
|
984
|
-
rel="wmts",
|
|
985
|
-
target=target_url,
|
|
986
|
-
media_type="image/png",
|
|
987
|
-
title="wmts capabilities",
|
|
988
|
-
extra_fields=extra_fields,
|
|
989
|
-
)
|
|
990
|
-
)
|
|
991
1231
|
elif endpoint_config["Name"] == "xcube":
|
|
992
1232
|
if endpoint_config["Type"] == "zarr":
|
|
993
1233
|
# either preset ColormapName of left as a template
|
|
@@ -1214,6 +1454,121 @@ def handle_raw_source(
|
|
|
1214
1454
|
# eodash v4 compatibility, adding last referenced style to collection
|
|
1215
1455
|
if style_link:
|
|
1216
1456
|
collection.add_link(style_link)
|
|
1457
|
+
elif endpoint_config.get("ParquetSource"):
|
|
1458
|
+
# if parquet source is provided, download it and create items from it
|
|
1459
|
+
parquet_source = endpoint_config["ParquetSource"]
|
|
1460
|
+
if parquet_source.startswith("http"):
|
|
1461
|
+
# download parquet file
|
|
1462
|
+
parquet_file = requests.get(parquet_source)
|
|
1463
|
+
if parquet_file.status_code != 200:
|
|
1464
|
+
LOGGER.error(f"Failed to download parquet file from {parquet_source}")
|
|
1465
|
+
return collection
|
|
1466
|
+
try:
|
|
1467
|
+
table = pq.read_table(io.BytesIO(parquet_file.content))
|
|
1468
|
+
except Exception as e:
|
|
1469
|
+
LOGGER.error(f"Failed to read parquet file: {e}")
|
|
1470
|
+
return collection
|
|
1471
|
+
extents = extract_extent_from_geoparquet(table)
|
|
1472
|
+
collection.extent.temporal = extents[0]
|
|
1473
|
+
collection.extent.spatial = extents[1]
|
|
1474
|
+
collection.add_asset(
|
|
1475
|
+
"geoparquet",
|
|
1476
|
+
Asset(
|
|
1477
|
+
href=parquet_source,
|
|
1478
|
+
media_type="application/vnd.apache.parquet",
|
|
1479
|
+
title="GeoParquet Items",
|
|
1480
|
+
roles=["collection-mirror"],
|
|
1481
|
+
),
|
|
1482
|
+
)
|
|
1483
|
+
|
|
1484
|
+
else:
|
|
1485
|
+
LOGGER.warn(f"NO datetimes configured for collection: {collection_config['Name']}!")
|
|
1486
|
+
|
|
1487
|
+
add_collection_information(catalog_config, collection, collection_config)
|
|
1488
|
+
return collection
|
|
1489
|
+
|
|
1490
|
+
|
|
1491
|
+
def handle_vector_tile_source(
|
|
1492
|
+
catalog_config: dict,
|
|
1493
|
+
endpoint_config: dict,
|
|
1494
|
+
collection_config: dict,
|
|
1495
|
+
coll_path_rel_to_root_catalog: str,
|
|
1496
|
+
catalog: Catalog,
|
|
1497
|
+
options: Options,
|
|
1498
|
+
) -> Collection:
|
|
1499
|
+
collection = get_or_create_collection(
|
|
1500
|
+
catalog, collection_config["Name"], collection_config, catalog_config, endpoint_config
|
|
1501
|
+
)
|
|
1502
|
+
coll_path_rel_to_root_catalog = f'{coll_path_rel_to_root_catalog}/{collection_config["Name"]}'
|
|
1503
|
+
if len(endpoint_config.get("TimeEntries", [])) > 0:
|
|
1504
|
+
items = []
|
|
1505
|
+
style_link = None
|
|
1506
|
+
for time_entry in endpoint_config["TimeEntries"]:
|
|
1507
|
+
# create Item for each time entry
|
|
1508
|
+
media_type = "application/vnd.mapbox-vector-tile"
|
|
1509
|
+
style_type = "text/vector-styles"
|
|
1510
|
+
bbox = endpoint_config.get("Bbox", [-180, -85, 180, 85])
|
|
1511
|
+
dt = parse_datestring_to_tz_aware_datetime(time_entry["Time"])
|
|
1512
|
+
|
|
1513
|
+
item = Item(
|
|
1514
|
+
id=format_datetime_to_isostring_zulu(dt),
|
|
1515
|
+
bbox=bbox,
|
|
1516
|
+
properties={},
|
|
1517
|
+
geometry=create_geometry_from_bbox(bbox),
|
|
1518
|
+
datetime=dt,
|
|
1519
|
+
extra_fields={},
|
|
1520
|
+
assets={"dummy_asset": Asset(href="")},
|
|
1521
|
+
)
|
|
1522
|
+
extra_fields_link = {}
|
|
1523
|
+
add_authentication(item, time_entry["Url"], extra_fields_link)
|
|
1524
|
+
# add mapbox vector tile link
|
|
1525
|
+
identifier = str(uuid.uuid4())
|
|
1526
|
+
extra_fields_link["key"] = identifier
|
|
1527
|
+
if vector_tile_id_property := endpoint_config.get("idProperty"):
|
|
1528
|
+
extra_fields_link["idProperty"] = vector_tile_id_property
|
|
1529
|
+
link = Link(
|
|
1530
|
+
rel="vector-tile",
|
|
1531
|
+
target=time_entry["Url"],
|
|
1532
|
+
media_type=media_type,
|
|
1533
|
+
title=collection_config["Name"],
|
|
1534
|
+
extra_fields=extra_fields_link,
|
|
1535
|
+
)
|
|
1536
|
+
add_projection_info(
|
|
1537
|
+
endpoint_config,
|
|
1538
|
+
link,
|
|
1539
|
+
)
|
|
1540
|
+
item.add_link(link)
|
|
1541
|
+
add_projection_info(
|
|
1542
|
+
endpoint_config,
|
|
1543
|
+
item,
|
|
1544
|
+
)
|
|
1545
|
+
if endpoint_config.get("Attribution"):
|
|
1546
|
+
item.stac_extensions.append(
|
|
1547
|
+
"https://stac-extensions.github.io/attribution/v0.1.0/schema.json"
|
|
1548
|
+
)
|
|
1549
|
+
item.extra_fields["attribution"] = endpoint_config["Attribution"]
|
|
1550
|
+
# add style
|
|
1551
|
+
if ep_st := endpoint_config.get("Style"):
|
|
1552
|
+
style_link = Link(
|
|
1553
|
+
rel="style",
|
|
1554
|
+
target=ep_st
|
|
1555
|
+
if ep_st.startswith("http")
|
|
1556
|
+
else f"{catalog_config['assets_endpoint']}/{ep_st}",
|
|
1557
|
+
media_type=style_type,
|
|
1558
|
+
extra_fields={"links:keys": [identifier]},
|
|
1559
|
+
)
|
|
1560
|
+
item.add_link(style_link)
|
|
1561
|
+
items.append(item)
|
|
1562
|
+
|
|
1563
|
+
save_items(
|
|
1564
|
+
collection,
|
|
1565
|
+
items,
|
|
1566
|
+
options.outputpath,
|
|
1567
|
+
catalog_config["id"],
|
|
1568
|
+
coll_path_rel_to_root_catalog,
|
|
1569
|
+
options.gp,
|
|
1570
|
+
)
|
|
1571
|
+
|
|
1217
1572
|
else:
|
|
1218
1573
|
LOGGER.warn(f"NO datetimes configured for collection: {collection_config['Name']}!")
|
|
1219
1574
|
|
|
@@ -19,10 +19,12 @@ from eodash_catalog.endpoints import (
|
|
|
19
19
|
handle_collection_only,
|
|
20
20
|
handle_custom_endpoint,
|
|
21
21
|
handle_GeoDB_endpoint,
|
|
22
|
+
handle_GeoDB_Features_endpoint,
|
|
22
23
|
handle_rasdaman_endpoint,
|
|
23
24
|
handle_raw_source,
|
|
24
25
|
handle_SH_endpoint,
|
|
25
26
|
handle_SH_WMS_endpoint,
|
|
27
|
+
handle_vector_tile_source,
|
|
26
28
|
handle_VEDA_endpoint,
|
|
27
29
|
handle_WMS_endpoint,
|
|
28
30
|
handle_xcube_endpoint,
|
|
@@ -241,6 +243,15 @@ def process_collection_file(
|
|
|
241
243
|
catalog,
|
|
242
244
|
options,
|
|
243
245
|
)
|
|
246
|
+
elif endpoint_config["Name"] == "GeoDB Features":
|
|
247
|
+
collection = handle_GeoDB_Features_endpoint(
|
|
248
|
+
catalog_config,
|
|
249
|
+
endpoint_config,
|
|
250
|
+
collection_config,
|
|
251
|
+
coll_path_rel_to_root_catalog,
|
|
252
|
+
catalog,
|
|
253
|
+
options,
|
|
254
|
+
)
|
|
244
255
|
elif endpoint_config["Name"] == "VEDA":
|
|
245
256
|
collection = handle_VEDA_endpoint(
|
|
246
257
|
catalog_config,
|
|
@@ -282,16 +293,14 @@ def process_collection_file(
|
|
|
282
293
|
catalog,
|
|
283
294
|
options,
|
|
284
295
|
)
|
|
285
|
-
elif endpoint_config["Name"] == "
|
|
286
|
-
|
|
287
|
-
collection = handle_WMS_endpoint(
|
|
296
|
+
elif endpoint_config["Name"] == "VectorTile source":
|
|
297
|
+
collection = handle_vector_tile_source(
|
|
288
298
|
catalog_config,
|
|
289
299
|
endpoint_config,
|
|
290
300
|
collection_config,
|
|
291
301
|
coll_path_rel_to_root_catalog,
|
|
292
302
|
catalog,
|
|
293
303
|
options,
|
|
294
|
-
wmts=True,
|
|
295
304
|
)
|
|
296
305
|
elif endpoint_config["Name"] == "Collection-only":
|
|
297
306
|
collection = handle_collection_only(
|
|
@@ -350,9 +359,11 @@ def process_collection_file(
|
|
|
350
359
|
countries.extend(sub_coll_def["Country"])
|
|
351
360
|
else:
|
|
352
361
|
countries.append(sub_coll_def["Country"])
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
362
|
+
# commented out intentionally, because otherwise paths further down did
|
|
363
|
+
# not match, parquet file was one level deeper
|
|
364
|
+
# coll_path_rel_to_root_catalog = (
|
|
365
|
+
# f"{coll_path_rel_to_root_catalog}/{sub_coll_def['Collection']}"
|
|
366
|
+
# )
|
|
356
367
|
process_collection_file(
|
|
357
368
|
catalog_config,
|
|
358
369
|
"{}/{}".format(options.collectionspath, sub_coll_def["Collection"]),
|
|
@@ -411,6 +422,7 @@ def process_collection_file(
|
|
|
411
422
|
add_collection_information(catalog_config, parent_collection, collection_config, True)
|
|
412
423
|
add_process_info(parent_collection, catalog_config, collection_config)
|
|
413
424
|
update_extents_from_collection_children(parent_collection)
|
|
425
|
+
add_base_overlay_info(parent_collection, catalog_config, collection_config)
|
|
414
426
|
# Fill summaries for locations
|
|
415
427
|
parent_collection.summaries = Summaries(
|
|
416
428
|
{
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import time
|
|
3
|
+
from urllib.parse import urlparse
|
|
3
4
|
|
|
4
5
|
from oauthlib.oauth2 import BackendApplicationClient
|
|
5
6
|
from requests_oauthlib import OAuth2Session
|
|
6
7
|
|
|
7
|
-
SH_TOKEN_URL = "https://services.sentinel-hub.com/oauth/token"
|
|
8
8
|
_token_cache: dict[str, dict] = {}
|
|
9
9
|
|
|
10
10
|
|
|
@@ -24,6 +24,8 @@ def get_SH_token(endpoint_config: dict) -> str:
|
|
|
24
24
|
client = BackendApplicationClient(client_id=client_id)
|
|
25
25
|
oauth = OAuth2Session(client=client)
|
|
26
26
|
# Get token for the session
|
|
27
|
+
endpoint_url_parts = urlparse(endpoint_config["EndPoint"])
|
|
28
|
+
SH_TOKEN_URL = f"https://{endpoint_url_parts.netloc}/oauth/token"
|
|
27
29
|
token = oauth.fetch_token(
|
|
28
30
|
token_url=SH_TOKEN_URL,
|
|
29
31
|
client_secret=client_secret,
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import uuid
|
|
1
2
|
from datetime import datetime, timezone
|
|
2
3
|
|
|
3
4
|
import requests
|
|
@@ -18,6 +19,7 @@ from structlog import get_logger
|
|
|
18
19
|
from eodash_catalog.utils import (
|
|
19
20
|
generateDatetimesFromInterval,
|
|
20
21
|
get_full_url,
|
|
22
|
+
make_intervals,
|
|
21
23
|
parse_datestring_to_tz_aware_datetime,
|
|
22
24
|
read_config_file,
|
|
23
25
|
)
|
|
@@ -121,11 +123,14 @@ def create_service_link(endpoint_config: dict, catalog_config: dict) -> Link:
|
|
|
121
123
|
return sl
|
|
122
124
|
|
|
123
125
|
|
|
124
|
-
def create_web_map_link(
|
|
126
|
+
def create_web_map_link(
|
|
127
|
+
collection: Collection, catalog_config: dict, layer_config: dict, role: str
|
|
128
|
+
) -> Link:
|
|
125
129
|
extra_fields = {
|
|
126
130
|
"roles": [role],
|
|
127
131
|
"id": layer_config["id"],
|
|
128
132
|
}
|
|
133
|
+
media_type = (layer_config.get("media_type", "image/png"),)
|
|
129
134
|
if layer_config.get("default"):
|
|
130
135
|
extra_fields["roles"].append("default")
|
|
131
136
|
if layer_config.get("visible"):
|
|
@@ -145,12 +150,32 @@ def create_web_map_link(layer_config: dict, role: str) -> Link:
|
|
|
145
150
|
extra_fields["wmts:layer"] = layer_config["layer"]
|
|
146
151
|
if layer_config.get("dimensions"):
|
|
147
152
|
extra_fields["wmts:dimensions"] = layer_config["dimensions"]
|
|
153
|
+
case "vector-tile":
|
|
154
|
+
identifier = str(uuid.uuid4())
|
|
155
|
+
extra_fields["key"] = identifier
|
|
156
|
+
media_type = "application/vnd.mapbox-vector-tile"
|
|
157
|
+
if vector_tile_id_property := layer_config.get("idProperty"):
|
|
158
|
+
extra_fields["idProperty"] = vector_tile_id_property
|
|
159
|
+
if ep_st := layer_config.get("Style"):
|
|
160
|
+
style_link = Link(
|
|
161
|
+
rel="style",
|
|
162
|
+
target=ep_st
|
|
163
|
+
if ep_st.startswith("http")
|
|
164
|
+
else f"{catalog_config['assets_endpoint']}/{ep_st}",
|
|
165
|
+
media_type="text/vector-styles",
|
|
166
|
+
extra_fields={"links:keys": [identifier]},
|
|
167
|
+
)
|
|
168
|
+
collection.add_link(style_link)
|
|
169
|
+
add_authentication(collection, layer_config["url"], extra_fields)
|
|
170
|
+
|
|
148
171
|
if layer_config.get("Attribution"):
|
|
149
172
|
extra_fields["attribution"] = layer_config["Attribution"]
|
|
173
|
+
if layer_config.get("Colorlegend"):
|
|
174
|
+
extra_fields["eox:colorlegend"] = layer_config["Colorlegend"]
|
|
150
175
|
wml = Link(
|
|
151
176
|
rel=layer_config["protocol"],
|
|
152
177
|
target=layer_config["url"],
|
|
153
|
-
media_type=
|
|
178
|
+
media_type=media_type,
|
|
154
179
|
title=layer_config["name"],
|
|
155
180
|
extra_fields=extra_fields,
|
|
156
181
|
)
|
|
@@ -306,18 +331,21 @@ def add_collection_information(
|
|
|
306
331
|
),
|
|
307
332
|
)
|
|
308
333
|
if collection_config.get("Image"):
|
|
334
|
+
# Check if absolute URL or relative path
|
|
335
|
+
if collection_config["Image"].startswith("http"):
|
|
336
|
+
image_url = collection_config["Image"]
|
|
337
|
+
else:
|
|
338
|
+
image_url = f'{catalog_config["assets_endpoint"]}/{collection_config["Image"]}'
|
|
309
339
|
collection.add_asset(
|
|
310
340
|
"thumbnail",
|
|
311
341
|
Asset(
|
|
312
|
-
href=
|
|
342
|
+
href=image_url,
|
|
313
343
|
media_type="image/png",
|
|
314
344
|
roles=["thumbnail"],
|
|
315
345
|
),
|
|
316
346
|
)
|
|
317
347
|
# Bubble up thumbnail to extra fields
|
|
318
|
-
collection.extra_fields["thumbnail"] =
|
|
319
|
-
f'{catalog_config["assets_endpoint"]}/' f'{collection_config["Image"]}'
|
|
320
|
-
)
|
|
348
|
+
collection.extra_fields["thumbnail"] = image_url
|
|
321
349
|
# Add extra fields to collection if available
|
|
322
350
|
add_extra_fields(collection, collection_config, is_root_collection)
|
|
323
351
|
|
|
@@ -472,23 +500,31 @@ def add_base_overlay_info(
|
|
|
472
500
|
collection: Collection, catalog_config: dict, collection_config: dict
|
|
473
501
|
) -> None:
|
|
474
502
|
# add custom baselayers specially for this indicator
|
|
475
|
-
if
|
|
503
|
+
if "BaseLayers" in collection_config:
|
|
476
504
|
for layer in collection_config["BaseLayers"]:
|
|
477
|
-
collection.add_link(
|
|
505
|
+
collection.add_link(
|
|
506
|
+
create_web_map_link(collection, catalog_config, layer, role="baselayer")
|
|
507
|
+
)
|
|
478
508
|
# alternatively use default base layers defined
|
|
479
509
|
elif catalog_config.get("default_base_layers"):
|
|
480
510
|
base_layers = read_config_file(catalog_config["default_base_layers"])
|
|
481
511
|
for layer in base_layers:
|
|
482
|
-
collection.add_link(
|
|
512
|
+
collection.add_link(
|
|
513
|
+
create_web_map_link(collection, catalog_config, layer, role="baselayer")
|
|
514
|
+
)
|
|
483
515
|
# add custom overlays just for this indicator
|
|
484
|
-
if
|
|
516
|
+
if "OverlayLayers" in collection_config:
|
|
485
517
|
for layer in collection_config["OverlayLayers"]:
|
|
486
|
-
collection.add_link(
|
|
518
|
+
collection.add_link(
|
|
519
|
+
create_web_map_link(collection, catalog_config, layer, role="overlay")
|
|
520
|
+
)
|
|
487
521
|
# check if default overlay layers defined
|
|
488
522
|
elif catalog_config.get("default_overlay_layers"):
|
|
489
523
|
overlay_layers = read_config_file(catalog_config["default_overlay_layers"])
|
|
490
524
|
for layer in overlay_layers:
|
|
491
|
-
collection.add_link(
|
|
525
|
+
collection.add_link(
|
|
526
|
+
create_web_map_link(collection, catalog_config, layer, role="overlay")
|
|
527
|
+
)
|
|
492
528
|
|
|
493
529
|
|
|
494
530
|
def add_extra_fields(
|
|
@@ -535,16 +571,22 @@ def add_extra_fields(
|
|
|
535
571
|
def get_collection_datetimes_from_config(endpoint_config: dict) -> list[datetime]:
|
|
536
572
|
times_datetimes: list[datetime] = []
|
|
537
573
|
if endpoint_config:
|
|
574
|
+
interval_between_dates = endpoint_config.get("WMSIntervalsBetweenDates")
|
|
538
575
|
if endpoint_config.get("Times"):
|
|
539
576
|
times = list(endpoint_config.get("Times", []))
|
|
540
577
|
times_datetimes = sorted(
|
|
541
578
|
[parse_datestring_to_tz_aware_datetime(time) for time in times]
|
|
542
579
|
)
|
|
580
|
+
if interval_between_dates:
|
|
581
|
+
# convert to list of datetime_start and datetime_end
|
|
582
|
+
times_datetimes = make_intervals(times_datetimes)
|
|
543
583
|
elif endpoint_config.get("DateTimeInterval"):
|
|
544
584
|
start = endpoint_config["DateTimeInterval"].get("Start", "2020-09-01T00:00:00Z")
|
|
545
585
|
end = endpoint_config["DateTimeInterval"].get("End", "2020-10-01T00:00:00Z")
|
|
546
586
|
timedelta_config = endpoint_config["DateTimeInterval"].get("Timedelta", {"days": 1})
|
|
547
|
-
times_datetimes = generateDatetimesFromInterval(
|
|
587
|
+
times_datetimes = generateDatetimesFromInterval(
|
|
588
|
+
start, end, timedelta_config, interval_between_dates
|
|
589
|
+
)
|
|
548
590
|
return times_datetimes
|
|
549
591
|
|
|
550
592
|
|
|
@@ -572,3 +614,20 @@ def add_projection_info(
|
|
|
572
614
|
stac_object.extra_fields["eodash:proj4_def"] = proj
|
|
573
615
|
else:
|
|
574
616
|
raise Exception(f"Incorrect type of proj definition {proj}")
|
|
617
|
+
|
|
618
|
+
|
|
619
|
+
def add_authentication(stac_object: Item | Collection | Catalog, url: str, extra_fields_link: dict):
|
|
620
|
+
if "api.mapbox" in url:
|
|
621
|
+
# add authentication info
|
|
622
|
+
auth_extension = "https://stac-extensions.github.io/authentication/v1.1.0/schema.json"
|
|
623
|
+
if auth_extension not in stac_object.stac_extensions:
|
|
624
|
+
stac_object.stac_extensions.append(auth_extension)
|
|
625
|
+
stac_object.extra_fields["auth:schemes"] = {
|
|
626
|
+
"mapboxauth": {
|
|
627
|
+
"type": "apiKey",
|
|
628
|
+
"name": "access_token",
|
|
629
|
+
"in": "query",
|
|
630
|
+
}
|
|
631
|
+
}
|
|
632
|
+
extra_fields_link["auth:refs"] = ["mapboxauth"]
|
|
633
|
+
pass
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import re
|
|
3
3
|
from pathlib import Path
|
|
4
|
+
from urllib.parse import urlparse
|
|
4
5
|
|
|
5
6
|
import requests
|
|
6
7
|
from pystac import (
|
|
@@ -46,7 +47,9 @@ def generate_thumbnail(
|
|
|
46
47
|
# if it is start and end datetime have to exist
|
|
47
48
|
if item_datetime:
|
|
48
49
|
time = format_datetime_to_isostring_zulu(item_datetime)
|
|
49
|
-
|
|
50
|
+
endpoint_url_parts = urlparse(endpoint_config["EndPoint"])
|
|
51
|
+
url = "https://{}/ogc/wms/{}?{}&layers={}&time={}&{}".format(
|
|
52
|
+
endpoint_url_parts,
|
|
50
53
|
instanceId,
|
|
51
54
|
wms_config,
|
|
52
55
|
endpoint_config["LayerId"],
|
|
@@ -18,7 +18,7 @@ from dateutil import parser
|
|
|
18
18
|
from owslib.wcs import WebCoverageService
|
|
19
19
|
from owslib.wms import WebMapService
|
|
20
20
|
from owslib.wmts import WebMapTileService
|
|
21
|
-
from pystac import Asset, Catalog, Collection, Item,
|
|
21
|
+
from pystac import Asset, Catalog, Collection, Item, RelType, SpatialExtent, TemporalExtent
|
|
22
22
|
from pytz import timezone as pytztimezone
|
|
23
23
|
from shapely import geometry as sgeom
|
|
24
24
|
from shapely import wkb
|
|
@@ -214,7 +214,7 @@ def parse_duration(datestring):
|
|
|
214
214
|
|
|
215
215
|
|
|
216
216
|
def generateDatetimesFromInterval(
|
|
217
|
-
start: str, end: str, timedelta_config: dict | None = None
|
|
217
|
+
start: str, end: str, timedelta_config: dict | None = None, interval_between_dates: bool = False
|
|
218
218
|
) -> list[datetime]:
|
|
219
219
|
if timedelta_config is None:
|
|
220
220
|
timedelta_config = {}
|
|
@@ -226,7 +226,10 @@ def generateDatetimesFromInterval(
|
|
|
226
226
|
delta = timedelta(**timedelta_config)
|
|
227
227
|
dates = []
|
|
228
228
|
while start_dt <= end_dt:
|
|
229
|
-
|
|
229
|
+
if interval_between_dates:
|
|
230
|
+
dates.append([start_dt, start_dt + delta - timedelta(seconds=1)])
|
|
231
|
+
else:
|
|
232
|
+
dates.append(start_dt)
|
|
230
233
|
start_dt += delta
|
|
231
234
|
return dates
|
|
232
235
|
|
|
@@ -290,8 +293,12 @@ def generate_veda_cog_link(endpoint_config: dict, file_url: str | None) -> str:
|
|
|
290
293
|
)
|
|
291
294
|
|
|
292
295
|
file_url = f"url={file_url}&" if file_url else ""
|
|
293
|
-
|
|
294
|
-
target_url =
|
|
296
|
+
target_url_base = endpoint_config["EndPoint"].replace("/stac/", "")
|
|
297
|
+
target_url = (
|
|
298
|
+
f"{target_url_base}/raster/cog/tiles/WebMercatorQuad/{{z}}/{{x}}/{{y}}?"
|
|
299
|
+
f"{file_url}resampling_method=nearest"
|
|
300
|
+
f"{bidx}{colormap}{colormap_name}{rescale}{Nodata}"
|
|
301
|
+
)
|
|
295
302
|
return target_url
|
|
296
303
|
|
|
297
304
|
|
|
@@ -321,6 +328,7 @@ def add_single_item_if_collection_empty(endpoint_config: dict, collection: Colle
|
|
|
321
328
|
datetime=datetime(1970, 1, 1, 0, 0, 0, tzinfo=pytztimezone("UTC")),
|
|
322
329
|
start_datetime=datetime(1970, 1, 1, 0, 0, 0, tzinfo=pytztimezone("UTC")),
|
|
323
330
|
end_datetime=datetime.now(tz=pytztimezone("UTC")),
|
|
331
|
+
assets={"dummy_asset": Asset(href="")},
|
|
324
332
|
)
|
|
325
333
|
collection.add_item(item)
|
|
326
334
|
if not endpoint_config.get("OverwriteBBox"):
|
|
@@ -440,8 +448,37 @@ def update_extents_from_collection_children(collection: Collection):
|
|
|
440
448
|
):
|
|
441
449
|
individual_datetimes.extend(c_child.extent.temporal.intervals[0]) # type: ignore
|
|
442
450
|
individual_datetimes = list(filter(lambda x: x is not None, individual_datetimes))
|
|
443
|
-
|
|
444
|
-
|
|
451
|
+
if individual_datetimes:
|
|
452
|
+
time_extent = [min(individual_datetimes), max(individual_datetimes)]
|
|
453
|
+
collection.extent.temporal = TemporalExtent([time_extent])
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
def extract_extent_from_geoparquet(table) -> tuple[TemporalExtent, SpatialExtent]:
|
|
457
|
+
"""
|
|
458
|
+
Extract spatial and temporal extents from a GeoParquet file.
|
|
459
|
+
Args:
|
|
460
|
+
table (pyarrow.Table): The table containing the GeoParquet data.
|
|
461
|
+
Returns:
|
|
462
|
+
tuple: A tuple containing spatial and temporal extents.
|
|
463
|
+
"""
|
|
464
|
+
# add extent information to the collection
|
|
465
|
+
min_datetime = pc.min(table["datetime"]).as_py()
|
|
466
|
+
max_datetime = pc.max(table["datetime"]).as_py()
|
|
467
|
+
if not min_datetime:
|
|
468
|
+
# cases when datetime was null
|
|
469
|
+
# fallback to start_datetime
|
|
470
|
+
min_datetime = pc.min(table["start_datetime"]).as_py()
|
|
471
|
+
max_datetime = pc.max(table["start_datetime"]).as_py()
|
|
472
|
+
# Making sure time extent is timezone aware
|
|
473
|
+
if min_datetime and min_datetime.tzinfo is None:
|
|
474
|
+
min_datetime = min_datetime.replace(tzinfo=timezone.utc)
|
|
475
|
+
if max_datetime and max_datetime.tzinfo is None:
|
|
476
|
+
max_datetime = max_datetime.replace(tzinfo=timezone.utc)
|
|
477
|
+
temporal = TemporalExtent([min_datetime, max_datetime])
|
|
478
|
+
geoms = [wkb.loads(g.as_py()) for g in table["geometry"] if g is not None]
|
|
479
|
+
bbox = sgeom.MultiPolygon(geoms).bounds
|
|
480
|
+
spatial = SpatialExtent([bbox])
|
|
481
|
+
return [temporal, spatial]
|
|
445
482
|
|
|
446
483
|
|
|
447
484
|
def save_items(
|
|
@@ -493,25 +530,9 @@ def save_items(
|
|
|
493
530
|
output_path = f"{buildcatpath}/{colpath}"
|
|
494
531
|
os.makedirs(output_path, exist_ok=True)
|
|
495
532
|
stacgp.arrow.to_parquet(table, f"{output_path}/items.parquet")
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
media_type="application/vnd.apache.parquet",
|
|
500
|
-
title="GeoParquet Items",
|
|
501
|
-
)
|
|
502
|
-
collection.add_link(gp_link)
|
|
503
|
-
# add extent information to the collection
|
|
504
|
-
min_datetime = pc.min(table["datetime"]).as_py()
|
|
505
|
-
max_datetime = pc.max(table["datetime"]).as_py()
|
|
506
|
-
if not min_datetime:
|
|
507
|
-
# cases when datetime was null
|
|
508
|
-
# fallback to start_datetime
|
|
509
|
-
min_datetime = pc.min(table["start_datetime"]).as_py()
|
|
510
|
-
max_datetime = pc.max(table["start_datetime"]).as_py()
|
|
511
|
-
collection.extent.temporal = TemporalExtent([min_datetime, max_datetime])
|
|
512
|
-
geoms = [wkb.loads(g.as_py()) for g in table["geometry"] if g is not None]
|
|
513
|
-
bbox = sgeom.MultiPolygon(geoms).bounds
|
|
514
|
-
collection.extent.spatial = SpatialExtent([bbox])
|
|
533
|
+
extents = extract_extent_from_geoparquet(table)
|
|
534
|
+
collection.extent.temporal = extents[0]
|
|
535
|
+
collection.extent.spatial = extents[1]
|
|
515
536
|
# Make sure to also reference the geoparquet as asset
|
|
516
537
|
collection.add_asset(
|
|
517
538
|
"geoparquet",
|
|
@@ -604,3 +625,25 @@ def merge_bboxes(bboxes: list[list[float]]) -> list[float]:
|
|
|
604
625
|
max_lat = max(b[3] for b in bboxes)
|
|
605
626
|
|
|
606
627
|
return [min_lon, min_lat, max_lon, max_lat]
|
|
628
|
+
|
|
629
|
+
|
|
630
|
+
def make_intervals(datetimes: list[datetime]) -> list[list[datetime]]:
|
|
631
|
+
"""
|
|
632
|
+
Converts a list of datetimes into list of lists of datetimes in format of [start,end]
|
|
633
|
+
where end is next element in original list minus 1 second
|
|
634
|
+
"""
|
|
635
|
+
intervals = []
|
|
636
|
+
n = len(datetimes)
|
|
637
|
+
for i in range(n):
|
|
638
|
+
start = datetimes[i]
|
|
639
|
+
if i < n - 1:
|
|
640
|
+
# end is next datetime minus one second
|
|
641
|
+
end = datetimes[i + 1] - timedelta(seconds=1)
|
|
642
|
+
else:
|
|
643
|
+
prev_interval = timedelta(seconds=0)
|
|
644
|
+
# last item: use previous interval length added to last start
|
|
645
|
+
if n > 1:
|
|
646
|
+
prev_interval = datetimes[-1] - datetimes[-2]
|
|
647
|
+
end = start + prev_interval
|
|
648
|
+
intervals.append([start, end])
|
|
649
|
+
return intervals
|
|
@@ -45,11 +45,11 @@ def test_geoparquet_geojson_items(catalog_output_folder):
|
|
|
45
45
|
|
|
46
46
|
with open(os.path.join(child_collection_path, "collection.json")) as fp:
|
|
47
47
|
collection_json = json.load(fp)
|
|
48
|
-
|
|
49
|
-
assert
|
|
50
|
-
|
|
51
|
-
assert
|
|
52
|
-
items_path = os.path.join(child_collection_path,
|
|
48
|
+
# check if parquet source is present in assets
|
|
49
|
+
assert "geoparquet" in collection_json["assets"]
|
|
50
|
+
parquet_asset = collection_json["assets"]["geoparquet"]
|
|
51
|
+
assert parquet_asset["type"] == "application/vnd.apache.parquet"
|
|
52
|
+
items_path = os.path.join(child_collection_path, parquet_asset["href"].split("/")[-1])
|
|
53
53
|
assert os.path.exists(items_path)
|
|
54
54
|
|
|
55
55
|
with open(items_path, "rb") as fp:
|
|
@@ -33,7 +33,7 @@
|
|
|
33
33
|
"Name": "Sentinel Hub WMS",
|
|
34
34
|
"CollectionId": "sentinel-1-grd",
|
|
35
35
|
"LayerId": "SENTINEL_1_IW_VV",
|
|
36
|
-
"
|
|
36
|
+
"OverwriteBBox": [
|
|
37
37
|
101.938,
|
|
38
38
|
11.945,
|
|
39
39
|
106.37,
|
|
@@ -65,4 +65,4 @@
|
|
|
65
65
|
"Url": "https://www.copernicus.eu/en/access-data/conventional-data-access-hubs"
|
|
66
66
|
}
|
|
67
67
|
]
|
|
68
|
-
}
|
|
68
|
+
}
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_CROPOMAT1.yaml
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_geodb_locations.yaml
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_projection.json
RENAMED
|
File without changes
|
{eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_see_solar_energy.yaml
RENAMED
|
File without changes
|
{eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_sh_wms_locations.json
RENAMED
|
File without changes
|
{eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_tif_demo_1.yaml
RENAMED
|
File without changes
|
{eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_tif_demo_1_json.json
RENAMED
|
File without changes
|
{eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_tif_demo_2.yaml
RENAMED
|
File without changes
|
|
File without changes
|
{eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_veda_tiles.json
RENAMED
|
File without changes
|
{eodash_catalog-0.3.0 → eodash_catalog-0.3.17}/tests/testing-collections/test_wms_no_time.yaml
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|