eodash_catalog 0.3.2__py3-none-any.whl → 0.3.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eodash_catalog/__about__.py +1 -1
- eodash_catalog/endpoints.py +60 -9
- eodash_catalog/stac_handling.py +8 -1
- eodash_catalog/utils.py +31 -4
- {eodash_catalog-0.3.2.dist-info → eodash_catalog-0.3.3.dist-info}/METADATA +1 -1
- eodash_catalog-0.3.3.dist-info/RECORD +14 -0
- eodash_catalog-0.3.2.dist-info/RECORD +0 -14
- {eodash_catalog-0.3.2.dist-info → eodash_catalog-0.3.3.dist-info}/WHEEL +0 -0
- {eodash_catalog-0.3.2.dist-info → eodash_catalog-0.3.3.dist-info}/entry_points.txt +0 -0
- {eodash_catalog-0.3.2.dist-info → eodash_catalog-0.3.3.dist-info}/licenses/LICENSE.txt +0 -0
eodash_catalog/__about__.py
CHANGED
eodash_catalog/endpoints.py
CHANGED
|
@@ -609,19 +609,31 @@ def handle_GeoDB_endpoint(
|
|
|
609
609
|
items = []
|
|
610
610
|
for v in values:
|
|
611
611
|
# add items based on inputData fields for each time step available in values
|
|
612
|
-
first_match = next(
|
|
612
|
+
first_match: dict = next(
|
|
613
613
|
(item for item in input_data if item.get("Identifier") == v["input_data"]), None
|
|
614
614
|
)
|
|
615
615
|
time_object = datetime.fromisoformat(v["time"])
|
|
616
|
+
if endpoint_config.get("MapReplaceDates"):
|
|
617
|
+
# get mapping of AOI_ID to list of dates
|
|
618
|
+
available_dates_for_aoi_id = endpoint_config.get("MapReplaceDates").get(
|
|
619
|
+
v["aoi_id"]
|
|
620
|
+
)
|
|
621
|
+
if available_dates_for_aoi_id:
|
|
622
|
+
formatted_datetime = time_object.strftime("%Y-%m-%d")
|
|
623
|
+
if formatted_datetime not in available_dates_for_aoi_id:
|
|
624
|
+
# discard this date because not in available map dates
|
|
625
|
+
continue
|
|
616
626
|
# extract wkt geometry from sub_aoi
|
|
617
627
|
if "sub_aoi" in v and v["sub_aoi"] != "/":
|
|
618
628
|
# create geometry from wkt
|
|
619
|
-
|
|
629
|
+
shapely_geometry = wkt.loads(v["sub_aoi"])
|
|
630
|
+
geometry = mapping(shapely_geometry)
|
|
620
631
|
# converting multipolygon to polygon to avoid shapely throwing an exception
|
|
621
632
|
# in collection extent from geoparquet table generation
|
|
622
633
|
# while trying to create a multipolygon extent of all multipolygons
|
|
623
634
|
if geometry["type"] == "MultiPolygon":
|
|
624
635
|
geometry = {"type": "Polygon", "coordinates": geometry["coordinates"][0]}
|
|
636
|
+
bbox = shapely_geometry.bounds
|
|
625
637
|
else:
|
|
626
638
|
geometry = create_geometry_from_bbox(bbox)
|
|
627
639
|
item = Item(
|
|
@@ -640,7 +652,7 @@ def handle_GeoDB_endpoint(
|
|
|
640
652
|
"wms:layers": [first_match["Layers"]],
|
|
641
653
|
"role": ["data"],
|
|
642
654
|
}
|
|
643
|
-
if
|
|
655
|
+
if "sentinel-hub.com" in url:
|
|
644
656
|
instanceId = os.getenv("SH_INSTANCE_ID")
|
|
645
657
|
if "InstanceId" in endpoint_config:
|
|
646
658
|
instanceId = endpoint_config["InstanceId"]
|
|
@@ -655,7 +667,7 @@ def handle_GeoDB_endpoint(
|
|
|
655
667
|
{"wms:dimensions": {"TIME": f"{start_date}/{end_date}"}}
|
|
656
668
|
)
|
|
657
669
|
# we add the instance id to the url
|
|
658
|
-
url = f"
|
|
670
|
+
url = f"{url}{instanceId}"
|
|
659
671
|
else:
|
|
660
672
|
extra_fields.update({"wms:dimensions": {"TIME": v["time"]}})
|
|
661
673
|
link = Link(
|
|
@@ -667,6 +679,34 @@ def handle_GeoDB_endpoint(
|
|
|
667
679
|
)
|
|
668
680
|
item.add_link(link)
|
|
669
681
|
items.append(item)
|
|
682
|
+
case "XYZ":
|
|
683
|
+
# handler for NASA apis
|
|
684
|
+
url = first_match["Url"]
|
|
685
|
+
extra_fields = {}
|
|
686
|
+
# replace time to a formatted version
|
|
687
|
+
date_formatted = time_object.strftime(
|
|
688
|
+
first_match.get("DateFormat", "%Y_%m_%d")
|
|
689
|
+
)
|
|
690
|
+
target_url = url.replace("{time}", date_formatted)
|
|
691
|
+
if SiteMapping := first_match.get("SiteMapping"):
|
|
692
|
+
# match with aoi_id
|
|
693
|
+
site = SiteMapping.get(v["aoi_id"])
|
|
694
|
+
# replace in URL
|
|
695
|
+
if site:
|
|
696
|
+
target_url = target_url.replace("{site}", site)
|
|
697
|
+
else:
|
|
698
|
+
LOGGER.info(
|
|
699
|
+
f"Warning: no match for SiteMapping in config for {site}"
|
|
700
|
+
)
|
|
701
|
+
link = Link(
|
|
702
|
+
rel="xyz",
|
|
703
|
+
target=target_url,
|
|
704
|
+
media_type="image/png",
|
|
705
|
+
title=collection_config["Name"],
|
|
706
|
+
extra_fields=extra_fields,
|
|
707
|
+
)
|
|
708
|
+
item.add_link(link)
|
|
709
|
+
items.append(item)
|
|
670
710
|
save_items(
|
|
671
711
|
locations_collection,
|
|
672
712
|
items,
|
|
@@ -690,6 +730,7 @@ def handle_GeoDB_endpoint(
|
|
|
690
730
|
link.extra_fields["latlng"] = latlon
|
|
691
731
|
link.extra_fields["country"] = country
|
|
692
732
|
link.extra_fields["name"] = city
|
|
733
|
+
add_collection_information(catalog_config, locations_collection, collection_config)
|
|
693
734
|
|
|
694
735
|
if "yAxis" not in collection_config:
|
|
695
736
|
# fetch yAxis and store it to data, preventing need to save it per dataset in yml
|
|
@@ -781,19 +822,24 @@ def handle_WMS_endpoint(
|
|
|
781
822
|
# Create an item per time to allow visualization in stac clients
|
|
782
823
|
if len(datetimes) > 0:
|
|
783
824
|
for dt in datetimes:
|
|
825
|
+
# case of wms interval coming from config
|
|
826
|
+
dt_item = dt[0] if isinstance(dt, list) else dt
|
|
784
827
|
item = Item(
|
|
785
|
-
id=format_datetime_to_isostring_zulu(
|
|
828
|
+
id=format_datetime_to_isostring_zulu(dt_item),
|
|
786
829
|
bbox=spatial_extent,
|
|
787
830
|
properties={},
|
|
788
831
|
geometry=create_geometry_from_bbox(spatial_extent),
|
|
789
|
-
datetime=
|
|
832
|
+
datetime=dt_item,
|
|
790
833
|
stac_extensions=[
|
|
791
834
|
"https://stac-extensions.github.io/web-map-links/v1.1.0/schema.json",
|
|
792
835
|
],
|
|
793
836
|
assets={"dummy_asset": Asset(href="")},
|
|
794
837
|
)
|
|
795
838
|
add_projection_info(endpoint_config, item)
|
|
796
|
-
|
|
839
|
+
dt_visualization = dt if isinstance(dt, list) else [dt]
|
|
840
|
+
add_visualization_info(
|
|
841
|
+
item, collection_config, endpoint_config, datetimes=dt_visualization
|
|
842
|
+
)
|
|
797
843
|
items.append(item)
|
|
798
844
|
else:
|
|
799
845
|
LOGGER.warn(f"NO datetimes returned for collection: {collection_config['Name']}!")
|
|
@@ -878,7 +924,6 @@ def add_visualization_info(
|
|
|
878
924
|
start_isostring = format_datetime_to_isostring_zulu(dt)
|
|
879
925
|
# SH WMS for public collections needs time interval, we use full day here
|
|
880
926
|
end = dt + timedelta(days=1) - timedelta(milliseconds=1)
|
|
881
|
-
# we have start_datetime and end_datetime
|
|
882
927
|
if len(datetimes) == 2:
|
|
883
928
|
end = datetimes[1]
|
|
884
929
|
end_isostring = format_datetime_to_isostring_zulu(end)
|
|
@@ -916,7 +961,13 @@ def add_visualization_info(
|
|
|
916
961
|
)
|
|
917
962
|
dimensions[key] = value
|
|
918
963
|
if datetimes is not None:
|
|
919
|
-
|
|
964
|
+
if len(datetimes) > 1:
|
|
965
|
+
start = format_datetime_to_isostring_zulu(datetimes[0])
|
|
966
|
+
end = format_datetime_to_isostring_zulu(datetimes[1])
|
|
967
|
+
interval = f"{start}/{end}"
|
|
968
|
+
dimensions["TIME"] = interval
|
|
969
|
+
else:
|
|
970
|
+
dimensions["TIME"] = format_datetime_to_isostring_zulu(datetimes[0])
|
|
920
971
|
if dimensions != {}:
|
|
921
972
|
extra_fields["wms:dimensions"] = dimensions
|
|
922
973
|
if endpoint_config.get("Styles"):
|
eodash_catalog/stac_handling.py
CHANGED
|
@@ -18,6 +18,7 @@ from structlog import get_logger
|
|
|
18
18
|
from eodash_catalog.utils import (
|
|
19
19
|
generateDatetimesFromInterval,
|
|
20
20
|
get_full_url,
|
|
21
|
+
make_intervals,
|
|
21
22
|
parse_datestring_to_tz_aware_datetime,
|
|
22
23
|
read_config_file,
|
|
23
24
|
)
|
|
@@ -535,16 +536,22 @@ def add_extra_fields(
|
|
|
535
536
|
def get_collection_datetimes_from_config(endpoint_config: dict) -> list[datetime]:
|
|
536
537
|
times_datetimes: list[datetime] = []
|
|
537
538
|
if endpoint_config:
|
|
539
|
+
interval_between_dates = endpoint_config.get("WMSIntervalsBetweenDates")
|
|
538
540
|
if endpoint_config.get("Times"):
|
|
539
541
|
times = list(endpoint_config.get("Times", []))
|
|
540
542
|
times_datetimes = sorted(
|
|
541
543
|
[parse_datestring_to_tz_aware_datetime(time) for time in times]
|
|
542
544
|
)
|
|
545
|
+
if interval_between_dates:
|
|
546
|
+
# convert to list of datetime_start and datetime_end
|
|
547
|
+
times_datetimes = make_intervals(times_datetimes)
|
|
543
548
|
elif endpoint_config.get("DateTimeInterval"):
|
|
544
549
|
start = endpoint_config["DateTimeInterval"].get("Start", "2020-09-01T00:00:00Z")
|
|
545
550
|
end = endpoint_config["DateTimeInterval"].get("End", "2020-10-01T00:00:00Z")
|
|
546
551
|
timedelta_config = endpoint_config["DateTimeInterval"].get("Timedelta", {"days": 1})
|
|
547
|
-
times_datetimes = generateDatetimesFromInterval(
|
|
552
|
+
times_datetimes = generateDatetimesFromInterval(
|
|
553
|
+
start, end, timedelta_config, interval_between_dates
|
|
554
|
+
)
|
|
548
555
|
return times_datetimes
|
|
549
556
|
|
|
550
557
|
|
eodash_catalog/utils.py
CHANGED
|
@@ -214,7 +214,7 @@ def parse_duration(datestring):
|
|
|
214
214
|
|
|
215
215
|
|
|
216
216
|
def generateDatetimesFromInterval(
|
|
217
|
-
start: str, end: str, timedelta_config: dict | None = None
|
|
217
|
+
start: str, end: str, timedelta_config: dict | None = None, interval_between_dates: bool = False
|
|
218
218
|
) -> list[datetime]:
|
|
219
219
|
if timedelta_config is None:
|
|
220
220
|
timedelta_config = {}
|
|
@@ -226,7 +226,10 @@ def generateDatetimesFromInterval(
|
|
|
226
226
|
delta = timedelta(**timedelta_config)
|
|
227
227
|
dates = []
|
|
228
228
|
while start_dt <= end_dt:
|
|
229
|
-
|
|
229
|
+
if interval_between_dates:
|
|
230
|
+
dates.append([start_dt, start_dt + delta - timedelta(seconds=1)])
|
|
231
|
+
else:
|
|
232
|
+
dates.append(start_dt)
|
|
230
233
|
start_dt += delta
|
|
231
234
|
return dates
|
|
232
235
|
|
|
@@ -444,8 +447,9 @@ def update_extents_from_collection_children(collection: Collection):
|
|
|
444
447
|
):
|
|
445
448
|
individual_datetimes.extend(c_child.extent.temporal.intervals[0]) # type: ignore
|
|
446
449
|
individual_datetimes = list(filter(lambda x: x is not None, individual_datetimes))
|
|
447
|
-
|
|
448
|
-
|
|
450
|
+
if individual_datetimes:
|
|
451
|
+
time_extent = [min(individual_datetimes), max(individual_datetimes)]
|
|
452
|
+
collection.extent.temporal = TemporalExtent([time_extent])
|
|
449
453
|
|
|
450
454
|
|
|
451
455
|
def extract_extent_from_geoparquet(table) -> tuple[TemporalExtent, SpatialExtent]:
|
|
@@ -615,3 +619,26 @@ def merge_bboxes(bboxes: list[list[float]]) -> list[float]:
|
|
|
615
619
|
max_lat = max(b[3] for b in bboxes)
|
|
616
620
|
|
|
617
621
|
return [min_lon, min_lat, max_lon, max_lat]
|
|
622
|
+
|
|
623
|
+
|
|
624
|
+
def make_intervals(datetimes: list[datetime]) -> list[list[datetime]]:
|
|
625
|
+
"""
|
|
626
|
+
Converts a list of datetimes into list of lists of datetimes in format of [start,end]
|
|
627
|
+
where end is next element in original list minus 1 second
|
|
628
|
+
"""
|
|
629
|
+
intervals = []
|
|
630
|
+
n = len(datetimes)
|
|
631
|
+
for i in range(n):
|
|
632
|
+
start = datetimes[i]
|
|
633
|
+
if i < n - 1:
|
|
634
|
+
# end is next datetime minus one second
|
|
635
|
+
end = datetimes[i + 1] - timedelta(seconds=1)
|
|
636
|
+
else:
|
|
637
|
+
prev_interval = timedelta(seconds=0)
|
|
638
|
+
# last item: use previous interval length added to last start
|
|
639
|
+
if n > 1:
|
|
640
|
+
prev_interval = datetimes[-1] - datetimes[-2]
|
|
641
|
+
end = start + prev_interval
|
|
642
|
+
intervals.append([start, end])
|
|
643
|
+
LOGGER.info(intervals)
|
|
644
|
+
return intervals
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: eodash_catalog
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.3
|
|
4
4
|
Summary: This package is intended to help create a compatible STAC catalog for the eodash dashboard client. It supports configuration of multiple endpoint types for information extraction.
|
|
5
5
|
Project-URL: Documentation, https://github.com/eodash/eodash_catalog#readme
|
|
6
6
|
Project-URL: Issues, https://github.com/eodash/eodash_catalog/issues
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
eodash_catalog/__about__.py,sha256=jakR3FNEOvU2GrvtecX6moFoNPby5Ok50p6_6JcZoEQ,137
|
|
2
|
+
eodash_catalog/__init__.py,sha256=_W_9emPYf6FUqc0P8L2SmADx6hGSd7PlQV3yRmCk5uM,115
|
|
3
|
+
eodash_catalog/duration.py,sha256=TBG7v1lCpbYowADK5uJ2M8kPxsvQneFAFi1NIE26dy4,10754
|
|
4
|
+
eodash_catalog/endpoints.py,sha256=uI7aapRHQ1eDYQYKlrMdLHi_6aitBS3jutrou2HDWJY,53183
|
|
5
|
+
eodash_catalog/generate_indicators.py,sha256=FPeiZm9TE4PpbTyH6UMegQ7HwaARzO91IrLtzFjFSF0,21900
|
|
6
|
+
eodash_catalog/sh_endpoint.py,sha256=XjZsZJ5jfJZLQenSTqUhiUZ5YAu9M9nv2KL1Qv3Be-I,1219
|
|
7
|
+
eodash_catalog/stac_handling.py,sha256=xMhUK_gvijs6tL-1ecg28DtWBo4msd5NTYZpYtt7FHo,25877
|
|
8
|
+
eodash_catalog/thumbnails.py,sha256=oNbWdRC8KTLUC4PbSMlSaiOeLXfkIpa0j-sOZdn1RGU,2262
|
|
9
|
+
eodash_catalog/utils.py,sha256=C-HQK6IYMNAd3Vfgq9KOqsBlJu-jtnZTcsHGp4kj7Y0,23773
|
|
10
|
+
eodash_catalog-0.3.3.dist-info/METADATA,sha256=H1Npnw-SZ3hAerwBs6ml0fscgSP9MkMDbVTryhPq7kM,3019
|
|
11
|
+
eodash_catalog-0.3.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
12
|
+
eodash_catalog-0.3.3.dist-info/entry_points.txt,sha256=kuUQrDG1PtYd8kPjf5XM6H_NtQd9Ozwl0jjiGtAvZSM,87
|
|
13
|
+
eodash_catalog-0.3.3.dist-info/licenses/LICENSE.txt,sha256=oJCW5zQxnFD-J0hGz6Zh5Lkpdk1oAndmWhseTmV224E,1107
|
|
14
|
+
eodash_catalog-0.3.3.dist-info/RECORD,,
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
eodash_catalog/__about__.py,sha256=mydI-qYrMicO3OMTwk88yDLDzfB11OJosFgmcQuLT6U,137
|
|
2
|
-
eodash_catalog/__init__.py,sha256=_W_9emPYf6FUqc0P8L2SmADx6hGSd7PlQV3yRmCk5uM,115
|
|
3
|
-
eodash_catalog/duration.py,sha256=TBG7v1lCpbYowADK5uJ2M8kPxsvQneFAFi1NIE26dy4,10754
|
|
4
|
-
eodash_catalog/endpoints.py,sha256=NTZ0LlYZ7fJZLvgWZP1aqrNXpBS6GSjfhrfIqU5lDaA,50442
|
|
5
|
-
eodash_catalog/generate_indicators.py,sha256=FPeiZm9TE4PpbTyH6UMegQ7HwaARzO91IrLtzFjFSF0,21900
|
|
6
|
-
eodash_catalog/sh_endpoint.py,sha256=XjZsZJ5jfJZLQenSTqUhiUZ5YAu9M9nv2KL1Qv3Be-I,1219
|
|
7
|
-
eodash_catalog/stac_handling.py,sha256=waw8qRjwjdbDBRtialc4bG3WSjXAATklc-W5kLKywqE,25548
|
|
8
|
-
eodash_catalog/thumbnails.py,sha256=oNbWdRC8KTLUC4PbSMlSaiOeLXfkIpa0j-sOZdn1RGU,2262
|
|
9
|
-
eodash_catalog/utils.py,sha256=1PafWiW_5SRa4_BsxODNzB43uYyvcsKJvCFIRExH0gA,22752
|
|
10
|
-
eodash_catalog-0.3.2.dist-info/METADATA,sha256=VgwGcib6LSQPiw2D7c6mdP5Vp8R-L9NgSPYyx7Oc35Q,3019
|
|
11
|
-
eodash_catalog-0.3.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
12
|
-
eodash_catalog-0.3.2.dist-info/entry_points.txt,sha256=kuUQrDG1PtYd8kPjf5XM6H_NtQd9Ozwl0jjiGtAvZSM,87
|
|
13
|
-
eodash_catalog-0.3.2.dist-info/licenses/LICENSE.txt,sha256=oJCW5zQxnFD-J0hGz6Zh5Lkpdk1oAndmWhseTmV224E,1107
|
|
14
|
-
eodash_catalog-0.3.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|