eodash_catalog 0.3.2__tar.gz → 0.3.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of eodash_catalog might be problematic. Click here for more details.
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/.bumpversion.cfg +1 -1
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/Dockerfile +1 -1
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/PKG-INFO +1 -1
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/src/eodash_catalog/__about__.py +1 -1
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/src/eodash_catalog/endpoints.py +217 -11
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/src/eodash_catalog/generate_indicators.py +10 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/src/eodash_catalog/stac_handling.py +8 -1
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/src/eodash_catalog/utils.py +30 -4
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/.dockerignore +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/.github/workflows/ci.yml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/.github/workflows/python-publish.yml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/.gitignore +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/.vscode/extensions.json +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/.vscode/settings.json +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/LICENSE.txt +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/README.md +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/pyproject.toml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/requirements.txt +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/ruff.toml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/src/eodash_catalog/__init__.py +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/src/eodash_catalog/duration.py +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/src/eodash_catalog/sh_endpoint.py +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/src/eodash_catalog/thumbnails.py +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/__init__.py +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/test-data/regional_forecast.json +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/test_generate.py +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/test_geoparquet.py +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-catalogs/testing-json.json +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-catalogs/testing.yaml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_CROPOMAT1.yaml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_cmems.json +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_cog.json +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_geodb.yaml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_geodb_locations.yaml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_geojson.yaml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_locations_processing.json +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_process.yaml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_projection.json +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_see_solar_energy.yaml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_sh_wms.json +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_sh_wms_locations.json +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_tif_demo_1.yaml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_tif_demo_1_json.json +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_tif_demo_2.yaml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_veda.json +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_veda_tiles.json +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_wms_no_time.yaml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-indicators/test_indicator.yaml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-layers/baselayers.yaml +0 -0
- {eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-layers/overlays.yaml +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: eodash_catalog
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.4
|
|
4
4
|
Summary: This package is intended to help create a compatible STAC catalog for the eodash dashboard client. It supports configuration of multiple endpoint types for information extraction.
|
|
5
5
|
Project-URL: Documentation, https://github.com/eodash/eodash_catalog#readme
|
|
6
6
|
Project-URL: Issues, https://github.com/eodash/eodash_catalog/issues
|
|
@@ -538,6 +538,120 @@ def handle_rasdaman_endpoint(
|
|
|
538
538
|
# add_example_info(collection, collection_config, endpoint_config, catalog_config)
|
|
539
539
|
return collection
|
|
540
540
|
|
|
541
|
+
def handle_GeoDB_Features_endpoint(
|
|
542
|
+
catalog_config: dict,
|
|
543
|
+
endpoint_config: dict,
|
|
544
|
+
collection_config: dict,
|
|
545
|
+
coll_path_rel_to_root_catalog: str,
|
|
546
|
+
catalog: Catalog,
|
|
547
|
+
options: Options,
|
|
548
|
+
) -> Collection:
|
|
549
|
+
|
|
550
|
+
# ID of collection is data["Name"] instead of CollectionId to be able to
|
|
551
|
+
# create more STAC collections from one geoDB table
|
|
552
|
+
collection = get_or_create_collection(
|
|
553
|
+
catalog, collection_config["Name"], collection_config, catalog_config, endpoint_config
|
|
554
|
+
)
|
|
555
|
+
coll_path_rel_to_root_catalog = f'{coll_path_rel_to_root_catalog}/{collection_config["Name"]}'
|
|
556
|
+
select = f'?select={endpoint_config["TimeParameter"]}'
|
|
557
|
+
url = (
|
|
558
|
+
endpoint_config["EndPoint"]
|
|
559
|
+
+ endpoint_config["Database"]
|
|
560
|
+
+ "_{}".format(endpoint_config["CollectionId"])
|
|
561
|
+
+ select
|
|
562
|
+
)
|
|
563
|
+
response = json.loads(requests.get(url).text)
|
|
564
|
+
# Use aggregation value to group datetime results
|
|
565
|
+
aggregation = endpoint_config.get("Aggregation", "day")
|
|
566
|
+
unique_datetimes = set()
|
|
567
|
+
for value in response:
|
|
568
|
+
time_object = datetime.fromisoformat(value[endpoint_config["TimeParameter"]])
|
|
569
|
+
match aggregation:
|
|
570
|
+
case "hour":
|
|
571
|
+
unique_datetimes.add(
|
|
572
|
+
datetime(
|
|
573
|
+
time_object.year,
|
|
574
|
+
time_object.month,
|
|
575
|
+
time_object.day,
|
|
576
|
+
time_object.hour,
|
|
577
|
+
)
|
|
578
|
+
)
|
|
579
|
+
case "day":
|
|
580
|
+
unique_datetimes.add(
|
|
581
|
+
datetime(time_object.year, time_object.month, time_object.day).date()
|
|
582
|
+
)
|
|
583
|
+
case "month":
|
|
584
|
+
unique_datetimes.add(
|
|
585
|
+
datetime(time_object.year, time_object.month, 1).date()
|
|
586
|
+
)
|
|
587
|
+
case "year":
|
|
588
|
+
unique_datetimes.add(
|
|
589
|
+
datetime(time_object.year, 1, 1).date()
|
|
590
|
+
)
|
|
591
|
+
case _:
|
|
592
|
+
# default to day
|
|
593
|
+
unique_datetimes.add(
|
|
594
|
+
datetime(time_object.year, time_object.month, time_object.day).date()
|
|
595
|
+
)
|
|
596
|
+
# go over unique datetimes and create items
|
|
597
|
+
items = []
|
|
598
|
+
for dt in sorted(unique_datetimes):
|
|
599
|
+
item_datetime = dt if isinstance(dt, datetime) else datetime(dt.year, dt.month, dt.day)
|
|
600
|
+
matching_string = ""
|
|
601
|
+
match aggregation:
|
|
602
|
+
case "hour":
|
|
603
|
+
matching_string = item_datetime.strftime("%Y-%m-%dT%H:00:00Z")
|
|
604
|
+
case "day":
|
|
605
|
+
matching_string = item_datetime.strftime("%Y-%m-%d")
|
|
606
|
+
case "month":
|
|
607
|
+
matching_string = item_datetime.strftime("%Y-%m")
|
|
608
|
+
case "year":
|
|
609
|
+
matching_string = item_datetime.strftime("%Y")
|
|
610
|
+
updated_query = endpoint_config["Query"].replace("{{date_time}}", matching_string)
|
|
611
|
+
assets = {
|
|
612
|
+
"geodbfeatures": Asset(
|
|
613
|
+
href=f"{endpoint_config['EndPoint']}{endpoint_config['Database']}_{endpoint_config['CollectionId']}?{updated_query}",
|
|
614
|
+
media_type="application/geodb+json",
|
|
615
|
+
roles=["data"],
|
|
616
|
+
)}
|
|
617
|
+
item = Item(
|
|
618
|
+
id=format_datetime_to_isostring_zulu(item_datetime),
|
|
619
|
+
bbox=endpoint_config.get("OverwriteBBox", [-180, -90, 180, 90]),
|
|
620
|
+
properties={},
|
|
621
|
+
geometry=create_geometry_from_bbox(
|
|
622
|
+
endpoint_config.get("OverwriteBBox", [-180, -90, 180, 90])
|
|
623
|
+
),
|
|
624
|
+
datetime=item_datetime,
|
|
625
|
+
stac_extensions=[],
|
|
626
|
+
assets=assets,
|
|
627
|
+
)
|
|
628
|
+
# add eodash style visualization info if Style has been provided
|
|
629
|
+
if endpoint_config.get("Style"):
|
|
630
|
+
ep_st = endpoint_config.get("Style")
|
|
631
|
+
style_link = Link(
|
|
632
|
+
rel="style",
|
|
633
|
+
target=ep_st
|
|
634
|
+
if ep_st.startswith("http")
|
|
635
|
+
else f"{catalog_config['assets_endpoint']}/{ep_st}",
|
|
636
|
+
media_type="text/vector-styles",
|
|
637
|
+
extra_fields={
|
|
638
|
+
"asset:keys": list(assets),
|
|
639
|
+
},
|
|
640
|
+
)
|
|
641
|
+
item.add_link(style_link)
|
|
642
|
+
add_projection_info(endpoint_config, item)
|
|
643
|
+
items.append(item)
|
|
644
|
+
|
|
645
|
+
save_items(
|
|
646
|
+
collection,
|
|
647
|
+
items,
|
|
648
|
+
options.outputpath,
|
|
649
|
+
catalog_config["id"],
|
|
650
|
+
f"{coll_path_rel_to_root_catalog}/{collection.id}",
|
|
651
|
+
options.gp,
|
|
652
|
+
)
|
|
653
|
+
return collection
|
|
654
|
+
|
|
541
655
|
|
|
542
656
|
def handle_GeoDB_endpoint(
|
|
543
657
|
catalog_config: dict,
|
|
@@ -605,33 +719,70 @@ def handle_GeoDB_endpoint(
|
|
|
605
719
|
locations_collection = get_or_create_collection(
|
|
606
720
|
collection, key, sc_config, catalog_config, endpoint_config
|
|
607
721
|
)
|
|
608
|
-
if
|
|
722
|
+
# check if input data is none
|
|
723
|
+
if input_data is None:
|
|
724
|
+
input_data = []
|
|
725
|
+
if len(input_data) > 0 or endpoint_config.get("FeatureCollection"):
|
|
609
726
|
items = []
|
|
610
727
|
for v in values:
|
|
611
728
|
# add items based on inputData fields for each time step available in values
|
|
612
|
-
first_match = next(
|
|
729
|
+
first_match: dict = next(
|
|
613
730
|
(item for item in input_data if item.get("Identifier") == v["input_data"]), None
|
|
614
731
|
)
|
|
615
732
|
time_object = datetime.fromisoformat(v["time"])
|
|
733
|
+
if endpoint_config.get("MapReplaceDates"):
|
|
734
|
+
# get mapping of AOI_ID to list of dates
|
|
735
|
+
available_dates_for_aoi_id = endpoint_config.get("MapReplaceDates").get(
|
|
736
|
+
v["aoi_id"]
|
|
737
|
+
)
|
|
738
|
+
if available_dates_for_aoi_id:
|
|
739
|
+
formatted_datetime = time_object.strftime("%Y-%m-%d")
|
|
740
|
+
if formatted_datetime not in available_dates_for_aoi_id:
|
|
741
|
+
# discard this date because not in available map dates
|
|
742
|
+
continue
|
|
616
743
|
# extract wkt geometry from sub_aoi
|
|
617
744
|
if "sub_aoi" in v and v["sub_aoi"] != "/":
|
|
618
745
|
# create geometry from wkt
|
|
619
|
-
|
|
746
|
+
shapely_geometry = wkt.loads(v["sub_aoi"])
|
|
747
|
+
geometry = mapping(shapely_geometry)
|
|
620
748
|
# converting multipolygon to polygon to avoid shapely throwing an exception
|
|
621
749
|
# in collection extent from geoparquet table generation
|
|
622
750
|
# while trying to create a multipolygon extent of all multipolygons
|
|
623
751
|
if geometry["type"] == "MultiPolygon":
|
|
624
752
|
geometry = {"type": "Polygon", "coordinates": geometry["coordinates"][0]}
|
|
753
|
+
bbox = shapely_geometry.bounds
|
|
625
754
|
else:
|
|
626
755
|
geometry = create_geometry_from_bbox(bbox)
|
|
756
|
+
|
|
757
|
+
assets = {"dummy_asset": Asset(href="")}
|
|
758
|
+
if endpoint_config.get("FeatureCollection"):
|
|
759
|
+
assets["geodbfeatures"] = Asset(
|
|
760
|
+
href=f"{endpoint_config['EndPoint']}{endpoint_config['Database']}_{endpoint_config['FeatureCollection']}?aoi_id=eq.{v['aoi_id']}&time=eq.{v['time']}",
|
|
761
|
+
media_type="application/geodb+json",
|
|
762
|
+
roles=["data"],
|
|
763
|
+
)
|
|
627
764
|
item = Item(
|
|
628
765
|
id=v["time"],
|
|
629
766
|
bbox=bbox,
|
|
630
767
|
properties={},
|
|
631
768
|
geometry=geometry,
|
|
632
769
|
datetime=time_object,
|
|
633
|
-
assets=
|
|
770
|
+
assets=assets,
|
|
634
771
|
)
|
|
772
|
+
# make sure to also add Style link if FeatureCollection and Style has been provided
|
|
773
|
+
if endpoint_config.get("FeatureCollection") and endpoint_config.get("Style"):
|
|
774
|
+
ep_st = endpoint_config.get("Style")
|
|
775
|
+
style_link = Link(
|
|
776
|
+
rel="style",
|
|
777
|
+
target=ep_st
|
|
778
|
+
if ep_st.startswith("http")
|
|
779
|
+
else f"{catalog_config['assets_endpoint']}/{ep_st}",
|
|
780
|
+
media_type="text/vector-styles",
|
|
781
|
+
extra_fields={
|
|
782
|
+
"asset:keys": list(assets),
|
|
783
|
+
},
|
|
784
|
+
)
|
|
785
|
+
item.add_link(style_link)
|
|
635
786
|
if first_match:
|
|
636
787
|
match first_match["Type"]:
|
|
637
788
|
case "WMS":
|
|
@@ -640,7 +791,7 @@ def handle_GeoDB_endpoint(
|
|
|
640
791
|
"wms:layers": [first_match["Layers"]],
|
|
641
792
|
"role": ["data"],
|
|
642
793
|
}
|
|
643
|
-
if
|
|
794
|
+
if "sentinel-hub.com" in url:
|
|
644
795
|
instanceId = os.getenv("SH_INSTANCE_ID")
|
|
645
796
|
if "InstanceId" in endpoint_config:
|
|
646
797
|
instanceId = endpoint_config["InstanceId"]
|
|
@@ -655,7 +806,7 @@ def handle_GeoDB_endpoint(
|
|
|
655
806
|
{"wms:dimensions": {"TIME": f"{start_date}/{end_date}"}}
|
|
656
807
|
)
|
|
657
808
|
# we add the instance id to the url
|
|
658
|
-
url = f"
|
|
809
|
+
url = f"{url}{instanceId}"
|
|
659
810
|
else:
|
|
660
811
|
extra_fields.update({"wms:dimensions": {"TIME": v["time"]}})
|
|
661
812
|
link = Link(
|
|
@@ -667,6 +818,50 @@ def handle_GeoDB_endpoint(
|
|
|
667
818
|
)
|
|
668
819
|
item.add_link(link)
|
|
669
820
|
items.append(item)
|
|
821
|
+
case "XYZ":
|
|
822
|
+
# handler for NASA apis
|
|
823
|
+
url = first_match["Url"]
|
|
824
|
+
extra_fields = {}
|
|
825
|
+
# replace time to a formatted version
|
|
826
|
+
date_formatted = time_object.strftime(
|
|
827
|
+
first_match.get("DateFormat", "%Y_%m_%d")
|
|
828
|
+
)
|
|
829
|
+
target_url = url.replace("{time}", date_formatted)
|
|
830
|
+
if SiteMapping := first_match.get("SiteMapping"):
|
|
831
|
+
# match with aoi_id
|
|
832
|
+
site = SiteMapping.get(v["aoi_id"])
|
|
833
|
+
# replace in URL
|
|
834
|
+
if site:
|
|
835
|
+
target_url = target_url.replace("{site}", site)
|
|
836
|
+
else:
|
|
837
|
+
LOGGER.info(
|
|
838
|
+
f"Warning: no match for SiteMapping in config for {site}"
|
|
839
|
+
)
|
|
840
|
+
link = Link(
|
|
841
|
+
rel="xyz",
|
|
842
|
+
target=target_url,
|
|
843
|
+
media_type="image/png",
|
|
844
|
+
title=collection_config["Name"],
|
|
845
|
+
extra_fields=extra_fields,
|
|
846
|
+
)
|
|
847
|
+
item.add_link(link)
|
|
848
|
+
items.append(item)
|
|
849
|
+
elif endpoint_config.get("FeatureCollection"):
|
|
850
|
+
# no input data match found, just add the item with asset only
|
|
851
|
+
assets["geodbfeatures"] = Asset(
|
|
852
|
+
href=f"{endpoint_config['EndPoint']}{endpoint_config['Database']}_{endpoint_config['FeatureCollection']}?aoi_id=eq.{v['aoi_id']}&time=eq.{v['time']}",
|
|
853
|
+
media_type="application/geodb+json",
|
|
854
|
+
roles=["data"],
|
|
855
|
+
)
|
|
856
|
+
item = Item(
|
|
857
|
+
id=v["time"],
|
|
858
|
+
bbox=bbox,
|
|
859
|
+
properties={},
|
|
860
|
+
geometry=geometry,
|
|
861
|
+
datetime=time_object,
|
|
862
|
+
assets=assets,
|
|
863
|
+
)
|
|
864
|
+
items.append(item)
|
|
670
865
|
save_items(
|
|
671
866
|
locations_collection,
|
|
672
867
|
items,
|
|
@@ -690,6 +885,7 @@ def handle_GeoDB_endpoint(
|
|
|
690
885
|
link.extra_fields["latlng"] = latlon
|
|
691
886
|
link.extra_fields["country"] = country
|
|
692
887
|
link.extra_fields["name"] = city
|
|
888
|
+
add_collection_information(catalog_config, locations_collection, collection_config)
|
|
693
889
|
|
|
694
890
|
if "yAxis" not in collection_config:
|
|
695
891
|
# fetch yAxis and store it to data, preventing need to save it per dataset in yml
|
|
@@ -781,19 +977,24 @@ def handle_WMS_endpoint(
|
|
|
781
977
|
# Create an item per time to allow visualization in stac clients
|
|
782
978
|
if len(datetimes) > 0:
|
|
783
979
|
for dt in datetimes:
|
|
980
|
+
# case of wms interval coming from config
|
|
981
|
+
dt_item = dt[0] if isinstance(dt, list) else dt
|
|
784
982
|
item = Item(
|
|
785
|
-
id=format_datetime_to_isostring_zulu(
|
|
983
|
+
id=format_datetime_to_isostring_zulu(dt_item),
|
|
786
984
|
bbox=spatial_extent,
|
|
787
985
|
properties={},
|
|
788
986
|
geometry=create_geometry_from_bbox(spatial_extent),
|
|
789
|
-
datetime=
|
|
987
|
+
datetime=dt_item,
|
|
790
988
|
stac_extensions=[
|
|
791
989
|
"https://stac-extensions.github.io/web-map-links/v1.1.0/schema.json",
|
|
792
990
|
],
|
|
793
991
|
assets={"dummy_asset": Asset(href="")},
|
|
794
992
|
)
|
|
795
993
|
add_projection_info(endpoint_config, item)
|
|
796
|
-
|
|
994
|
+
dt_visualization = dt if isinstance(dt, list) else [dt]
|
|
995
|
+
add_visualization_info(
|
|
996
|
+
item, collection_config, endpoint_config, datetimes=dt_visualization
|
|
997
|
+
)
|
|
797
998
|
items.append(item)
|
|
798
999
|
else:
|
|
799
1000
|
LOGGER.warn(f"NO datetimes returned for collection: {collection_config['Name']}!")
|
|
@@ -878,7 +1079,6 @@ def add_visualization_info(
|
|
|
878
1079
|
start_isostring = format_datetime_to_isostring_zulu(dt)
|
|
879
1080
|
# SH WMS for public collections needs time interval, we use full day here
|
|
880
1081
|
end = dt + timedelta(days=1) - timedelta(milliseconds=1)
|
|
881
|
-
# we have start_datetime and end_datetime
|
|
882
1082
|
if len(datetimes) == 2:
|
|
883
1083
|
end = datetimes[1]
|
|
884
1084
|
end_isostring = format_datetime_to_isostring_zulu(end)
|
|
@@ -916,7 +1116,13 @@ def add_visualization_info(
|
|
|
916
1116
|
)
|
|
917
1117
|
dimensions[key] = value
|
|
918
1118
|
if datetimes is not None:
|
|
919
|
-
|
|
1119
|
+
if len(datetimes) > 1:
|
|
1120
|
+
start = format_datetime_to_isostring_zulu(datetimes[0])
|
|
1121
|
+
end = format_datetime_to_isostring_zulu(datetimes[1])
|
|
1122
|
+
interval = f"{start}/{end}"
|
|
1123
|
+
dimensions["TIME"] = interval
|
|
1124
|
+
else:
|
|
1125
|
+
dimensions["TIME"] = format_datetime_to_isostring_zulu(datetimes[0])
|
|
920
1126
|
if dimensions != {}:
|
|
921
1127
|
extra_fields["wms:dimensions"] = dimensions
|
|
922
1128
|
if endpoint_config.get("Styles"):
|
|
@@ -19,6 +19,7 @@ from eodash_catalog.endpoints import (
|
|
|
19
19
|
handle_collection_only,
|
|
20
20
|
handle_custom_endpoint,
|
|
21
21
|
handle_GeoDB_endpoint,
|
|
22
|
+
handle_GeoDB_Features_endpoint,
|
|
22
23
|
handle_rasdaman_endpoint,
|
|
23
24
|
handle_raw_source,
|
|
24
25
|
handle_SH_endpoint,
|
|
@@ -241,6 +242,15 @@ def process_collection_file(
|
|
|
241
242
|
catalog,
|
|
242
243
|
options,
|
|
243
244
|
)
|
|
245
|
+
elif endpoint_config["Name"] == "GeoDB Features":
|
|
246
|
+
collection = handle_GeoDB_Features_endpoint(
|
|
247
|
+
catalog_config,
|
|
248
|
+
endpoint_config,
|
|
249
|
+
collection_config,
|
|
250
|
+
coll_path_rel_to_root_catalog,
|
|
251
|
+
catalog,
|
|
252
|
+
options,
|
|
253
|
+
)
|
|
244
254
|
elif endpoint_config["Name"] == "VEDA":
|
|
245
255
|
collection = handle_VEDA_endpoint(
|
|
246
256
|
catalog_config,
|
|
@@ -18,6 +18,7 @@ from structlog import get_logger
|
|
|
18
18
|
from eodash_catalog.utils import (
|
|
19
19
|
generateDatetimesFromInterval,
|
|
20
20
|
get_full_url,
|
|
21
|
+
make_intervals,
|
|
21
22
|
parse_datestring_to_tz_aware_datetime,
|
|
22
23
|
read_config_file,
|
|
23
24
|
)
|
|
@@ -535,16 +536,22 @@ def add_extra_fields(
|
|
|
535
536
|
def get_collection_datetimes_from_config(endpoint_config: dict) -> list[datetime]:
|
|
536
537
|
times_datetimes: list[datetime] = []
|
|
537
538
|
if endpoint_config:
|
|
539
|
+
interval_between_dates = endpoint_config.get("WMSIntervalsBetweenDates")
|
|
538
540
|
if endpoint_config.get("Times"):
|
|
539
541
|
times = list(endpoint_config.get("Times", []))
|
|
540
542
|
times_datetimes = sorted(
|
|
541
543
|
[parse_datestring_to_tz_aware_datetime(time) for time in times]
|
|
542
544
|
)
|
|
545
|
+
if interval_between_dates:
|
|
546
|
+
# convert to list of datetime_start and datetime_end
|
|
547
|
+
times_datetimes = make_intervals(times_datetimes)
|
|
543
548
|
elif endpoint_config.get("DateTimeInterval"):
|
|
544
549
|
start = endpoint_config["DateTimeInterval"].get("Start", "2020-09-01T00:00:00Z")
|
|
545
550
|
end = endpoint_config["DateTimeInterval"].get("End", "2020-10-01T00:00:00Z")
|
|
546
551
|
timedelta_config = endpoint_config["DateTimeInterval"].get("Timedelta", {"days": 1})
|
|
547
|
-
times_datetimes = generateDatetimesFromInterval(
|
|
552
|
+
times_datetimes = generateDatetimesFromInterval(
|
|
553
|
+
start, end, timedelta_config, interval_between_dates
|
|
554
|
+
)
|
|
548
555
|
return times_datetimes
|
|
549
556
|
|
|
550
557
|
|
|
@@ -214,7 +214,7 @@ def parse_duration(datestring):
|
|
|
214
214
|
|
|
215
215
|
|
|
216
216
|
def generateDatetimesFromInterval(
|
|
217
|
-
start: str, end: str, timedelta_config: dict | None = None
|
|
217
|
+
start: str, end: str, timedelta_config: dict | None = None, interval_between_dates: bool = False
|
|
218
218
|
) -> list[datetime]:
|
|
219
219
|
if timedelta_config is None:
|
|
220
220
|
timedelta_config = {}
|
|
@@ -226,7 +226,10 @@ def generateDatetimesFromInterval(
|
|
|
226
226
|
delta = timedelta(**timedelta_config)
|
|
227
227
|
dates = []
|
|
228
228
|
while start_dt <= end_dt:
|
|
229
|
-
|
|
229
|
+
if interval_between_dates:
|
|
230
|
+
dates.append([start_dt, start_dt + delta - timedelta(seconds=1)])
|
|
231
|
+
else:
|
|
232
|
+
dates.append(start_dt)
|
|
230
233
|
start_dt += delta
|
|
231
234
|
return dates
|
|
232
235
|
|
|
@@ -444,8 +447,9 @@ def update_extents_from_collection_children(collection: Collection):
|
|
|
444
447
|
):
|
|
445
448
|
individual_datetimes.extend(c_child.extent.temporal.intervals[0]) # type: ignore
|
|
446
449
|
individual_datetimes = list(filter(lambda x: x is not None, individual_datetimes))
|
|
447
|
-
|
|
448
|
-
|
|
450
|
+
if individual_datetimes:
|
|
451
|
+
time_extent = [min(individual_datetimes), max(individual_datetimes)]
|
|
452
|
+
collection.extent.temporal = TemporalExtent([time_extent])
|
|
449
453
|
|
|
450
454
|
|
|
451
455
|
def extract_extent_from_geoparquet(table) -> tuple[TemporalExtent, SpatialExtent]:
|
|
@@ -615,3 +619,25 @@ def merge_bboxes(bboxes: list[list[float]]) -> list[float]:
|
|
|
615
619
|
max_lat = max(b[3] for b in bboxes)
|
|
616
620
|
|
|
617
621
|
return [min_lon, min_lat, max_lon, max_lat]
|
|
622
|
+
|
|
623
|
+
|
|
624
|
+
def make_intervals(datetimes: list[datetime]) -> list[list[datetime]]:
|
|
625
|
+
"""
|
|
626
|
+
Converts a list of datetimes into list of lists of datetimes in format of [start,end]
|
|
627
|
+
where end is next element in original list minus 1 second
|
|
628
|
+
"""
|
|
629
|
+
intervals = []
|
|
630
|
+
n = len(datetimes)
|
|
631
|
+
for i in range(n):
|
|
632
|
+
start = datetimes[i]
|
|
633
|
+
if i < n - 1:
|
|
634
|
+
# end is next datetime minus one second
|
|
635
|
+
end = datetimes[i + 1] - timedelta(seconds=1)
|
|
636
|
+
else:
|
|
637
|
+
prev_interval = timedelta(seconds=0)
|
|
638
|
+
# last item: use previous interval length added to last start
|
|
639
|
+
if n > 1:
|
|
640
|
+
prev_interval = datetimes[-1] - datetimes[-2]
|
|
641
|
+
end = start + prev_interval
|
|
642
|
+
intervals.append([start, end])
|
|
643
|
+
return intervals
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_geodb_locations.yaml
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_projection.json
RENAMED
|
File without changes
|
{eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_see_solar_energy.yaml
RENAMED
|
File without changes
|
|
File without changes
|
{eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_sh_wms_locations.json
RENAMED
|
File without changes
|
{eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_tif_demo_1.yaml
RENAMED
|
File without changes
|
{eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_tif_demo_1_json.json
RENAMED
|
File without changes
|
{eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_tif_demo_2.yaml
RENAMED
|
File without changes
|
|
File without changes
|
{eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_veda_tiles.json
RENAMED
|
File without changes
|
{eodash_catalog-0.3.2 → eodash_catalog-0.3.4}/tests/testing-collections/test_wms_no_time.yaml
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|