eodash_catalog 0.0.7__py3-none-any.whl → 0.0.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of eodash_catalog might be problematic. Click here for more details.
- eodash_catalog/__about__.py +1 -1
- eodash_catalog/duration.py +7 -18
- eodash_catalog/endpoints.py +669 -0
- eodash_catalog/generate_indicators.py +108 -1267
- eodash_catalog/sh_endpoint.py +1 -0
- eodash_catalog/stac_handling.py +359 -0
- eodash_catalog/thumbnails.py +49 -0
- eodash_catalog/utils.py +71 -45
- {eodash_catalog-0.0.7.dist-info → eodash_catalog-0.0.9.dist-info}/METADATA +45 -2
- eodash_catalog-0.0.9.dist-info/RECORD +14 -0
- {eodash_catalog-0.0.7.dist-info → eodash_catalog-0.0.9.dist-info}/WHEEL +1 -1
- eodash_catalog-0.0.7.dist-info/RECORD +0 -11
- {eodash_catalog-0.0.7.dist-info → eodash_catalog-0.0.9.dist-info}/entry_points.txt +0 -0
- {eodash_catalog-0.0.7.dist-info → eodash_catalog-0.0.9.dist-info}/licenses/LICENSE.txt +0 -0
|
@@ -0,0 +1,669 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
from datetime import datetime, timedelta
|
|
4
|
+
from itertools import groupby
|
|
5
|
+
from operator import itemgetter
|
|
6
|
+
|
|
7
|
+
import requests
|
|
8
|
+
from dateutil import parser
|
|
9
|
+
from pystac import (
|
|
10
|
+
Item,
|
|
11
|
+
Link,
|
|
12
|
+
SpatialExtent,
|
|
13
|
+
Summaries,
|
|
14
|
+
)
|
|
15
|
+
from pystac_client import Client
|
|
16
|
+
|
|
17
|
+
from eodash_catalog.sh_endpoint import get_SH_token
|
|
18
|
+
from eodash_catalog.stac_handling import (
|
|
19
|
+
add_collection_information,
|
|
20
|
+
add_example_info,
|
|
21
|
+
get_or_create_collection_and_times,
|
|
22
|
+
)
|
|
23
|
+
from eodash_catalog.thumbnails import generate_thumbnail
|
|
24
|
+
from eodash_catalog.utils import (
|
|
25
|
+
create_geojson_point,
|
|
26
|
+
generate_veda_cog_link,
|
|
27
|
+
retrieveExtentFromWMSWMTS,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def process_STAC_Datacube_Endpoint(config, endpoint, data, catalog):
|
|
32
|
+
collection, _ = get_or_create_collection_and_times(
|
|
33
|
+
catalog, data["Name"], data, config, endpoint
|
|
34
|
+
)
|
|
35
|
+
add_visualization_info(collection, data, endpoint)
|
|
36
|
+
|
|
37
|
+
stac_endpoint_url = endpoint["EndPoint"]
|
|
38
|
+
if endpoint.get("Name") == "xcube":
|
|
39
|
+
stac_endpoint_url = stac_endpoint_url + endpoint.get("StacEndpoint", "")
|
|
40
|
+
# assuming /search not implemented
|
|
41
|
+
api = Client.open(stac_endpoint_url)
|
|
42
|
+
coll = api.get_collection(endpoint.get("CollectionId", "datacubes"))
|
|
43
|
+
item = coll.get_item(endpoint.get("DatacubeId"))
|
|
44
|
+
# slice a datacube along temporal axis to individual items, selectively adding properties
|
|
45
|
+
dimensions = item.properties.get("cube:dimensions", {})
|
|
46
|
+
variables = item.properties.get("cube:variables")
|
|
47
|
+
if endpoint.get("Variable") not in variables:
|
|
48
|
+
raise Exception(f'Variable {endpoint.get("Variable")} not found in datacube {variables}')
|
|
49
|
+
time_dimension = "time"
|
|
50
|
+
for k, v in dimensions.items():
|
|
51
|
+
if v.get("type") == "temporal":
|
|
52
|
+
time_dimension = k
|
|
53
|
+
break
|
|
54
|
+
time_entries = dimensions.get(time_dimension).get("values")
|
|
55
|
+
for t in time_entries:
|
|
56
|
+
item = Item(
|
|
57
|
+
id=t,
|
|
58
|
+
bbox=item.bbox,
|
|
59
|
+
properties={},
|
|
60
|
+
geometry=item.geometry,
|
|
61
|
+
datetime=parser.isoparse(t),
|
|
62
|
+
)
|
|
63
|
+
link = collection.add_item(item)
|
|
64
|
+
link.extra_fields["datetime"] = t
|
|
65
|
+
# bubble up information we want to the link
|
|
66
|
+
item_datetime = item.get_datetime()
|
|
67
|
+
# it is possible for datetime to be null, if it is start and end datetime have to exist
|
|
68
|
+
if item_datetime:
|
|
69
|
+
link.extra_fields["datetime"] = item_datetime.isoformat()[:-6] + "Z"
|
|
70
|
+
else:
|
|
71
|
+
link.extra_fields["start_datetime"] = item.properties["start_datetime"]
|
|
72
|
+
link.extra_fields["end_datetime"] = item.properties["end_datetime"]
|
|
73
|
+
unit = variables.get(endpoint.get("Variable")).get("unit")
|
|
74
|
+
if unit and "yAxis" not in data:
|
|
75
|
+
data["yAxis"] = unit
|
|
76
|
+
collection.update_extent_from_items()
|
|
77
|
+
|
|
78
|
+
add_collection_information(config, collection, data)
|
|
79
|
+
|
|
80
|
+
return collection
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def handle_STAC_based_endpoint(config, endpoint, data, catalog, options, headers=None):
|
|
84
|
+
if "Locations" in data:
|
|
85
|
+
root_collection, _ = get_or_create_collection_and_times(
|
|
86
|
+
catalog, data["Name"], data, config, endpoint
|
|
87
|
+
)
|
|
88
|
+
for location in data["Locations"]:
|
|
89
|
+
if "FilterDates" in location:
|
|
90
|
+
collection = process_STACAPI_Endpoint(
|
|
91
|
+
config=config,
|
|
92
|
+
endpoint=endpoint,
|
|
93
|
+
data=data,
|
|
94
|
+
catalog=catalog,
|
|
95
|
+
options=options,
|
|
96
|
+
headers=headers,
|
|
97
|
+
bbox=",".join(map(str, location["Bbox"])),
|
|
98
|
+
filter_dates=location["FilterDates"],
|
|
99
|
+
root_collection=root_collection,
|
|
100
|
+
)
|
|
101
|
+
else:
|
|
102
|
+
collection = process_STACAPI_Endpoint(
|
|
103
|
+
config=config,
|
|
104
|
+
endpoint=endpoint,
|
|
105
|
+
data=data,
|
|
106
|
+
catalog=catalog,
|
|
107
|
+
options=options,
|
|
108
|
+
headers=headers,
|
|
109
|
+
bbox=",".join(map(str, location["Bbox"])),
|
|
110
|
+
root_collection=root_collection,
|
|
111
|
+
)
|
|
112
|
+
# Update identifier to use location as well as title
|
|
113
|
+
# TODO: should we use the name as id? it provides much more
|
|
114
|
+
# information in the clients
|
|
115
|
+
collection.id = location["Identifier"]
|
|
116
|
+
collection.title = (location["Name"],)
|
|
117
|
+
# See if description should be overwritten
|
|
118
|
+
if "Description" in location:
|
|
119
|
+
collection.description = location["Description"]
|
|
120
|
+
else:
|
|
121
|
+
collection.description = location["Name"]
|
|
122
|
+
# TODO: should we remove all assets from sub collections?
|
|
123
|
+
link = root_collection.add_child(collection)
|
|
124
|
+
latlng = f"{location["Point"][1]},{location["Point"][0]}"
|
|
125
|
+
# Add extra properties we need
|
|
126
|
+
link.extra_fields["id"] = location["Identifier"]
|
|
127
|
+
link.extra_fields["latlng"] = latlng
|
|
128
|
+
link.extra_fields["name"] = location["Name"]
|
|
129
|
+
add_example_info(collection, data, endpoint, config)
|
|
130
|
+
if "OverwriteBBox" in location:
|
|
131
|
+
collection.extent.spatial = SpatialExtent(
|
|
132
|
+
[
|
|
133
|
+
location["OverwriteBBox"],
|
|
134
|
+
]
|
|
135
|
+
)
|
|
136
|
+
root_collection.update_extent_from_items()
|
|
137
|
+
# Add bbox extents from children
|
|
138
|
+
for c_child in root_collection.get_children():
|
|
139
|
+
root_collection.extent.spatial.bboxes.append(c_child.extent.spatial.bboxes[0])
|
|
140
|
+
else:
|
|
141
|
+
if "Bbox" in endpoint:
|
|
142
|
+
root_collection = process_STACAPI_Endpoint(
|
|
143
|
+
config=config,
|
|
144
|
+
endpoint=endpoint,
|
|
145
|
+
data=data,
|
|
146
|
+
catalog=catalog,
|
|
147
|
+
options=options,
|
|
148
|
+
headers=headers,
|
|
149
|
+
bbox=",".join(map(str, endpoint["Bbox"])),
|
|
150
|
+
)
|
|
151
|
+
else:
|
|
152
|
+
root_collection = process_STACAPI_Endpoint(
|
|
153
|
+
config=config,
|
|
154
|
+
endpoint=endpoint,
|
|
155
|
+
data=data,
|
|
156
|
+
catalog=catalog,
|
|
157
|
+
options=options,
|
|
158
|
+
headers=headers,
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
add_example_info(root_collection, data, endpoint, config)
|
|
162
|
+
return root_collection
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def process_STACAPI_Endpoint(
|
|
166
|
+
config,
|
|
167
|
+
endpoint,
|
|
168
|
+
data,
|
|
169
|
+
catalog,
|
|
170
|
+
options,
|
|
171
|
+
headers=None,
|
|
172
|
+
bbox=None,
|
|
173
|
+
root_collection=None,
|
|
174
|
+
filter_dates=None,
|
|
175
|
+
):
|
|
176
|
+
if headers is None:
|
|
177
|
+
headers = {}
|
|
178
|
+
collection, _ = get_or_create_collection_and_times(
|
|
179
|
+
catalog, endpoint["CollectionId"], data, config, endpoint
|
|
180
|
+
)
|
|
181
|
+
# add_visualization_info(collection, data, endpoint)
|
|
182
|
+
|
|
183
|
+
api = Client.open(endpoint["EndPoint"], headers=headers)
|
|
184
|
+
if bbox is None:
|
|
185
|
+
bbox = "-180,-90,180,90"
|
|
186
|
+
results = api.search(
|
|
187
|
+
collections=[endpoint["CollectionId"]],
|
|
188
|
+
bbox=bbox,
|
|
189
|
+
datetime=["1900-01-01T00:00:00Z", "3000-01-01T00:00:00Z"],
|
|
190
|
+
)
|
|
191
|
+
# We keep track of potential duplicate times in this list
|
|
192
|
+
added_times = {}
|
|
193
|
+
for item in results.items():
|
|
194
|
+
item_datetime = item.get_datetime()
|
|
195
|
+
if item_datetime is not None:
|
|
196
|
+
iso_date = item_datetime.isoformat()[:10]
|
|
197
|
+
# if filterdates has been specified skip dates not listed in config
|
|
198
|
+
if filter_dates and iso_date not in filter_dates:
|
|
199
|
+
continue
|
|
200
|
+
if iso_date in added_times:
|
|
201
|
+
continue
|
|
202
|
+
added_times[iso_date] = True
|
|
203
|
+
link = collection.add_item(item)
|
|
204
|
+
if options.tn:
|
|
205
|
+
if "cog_default" in item.assets:
|
|
206
|
+
generate_thumbnail(item, data, endpoint, item.assets["cog_default"].href)
|
|
207
|
+
else:
|
|
208
|
+
generate_thumbnail(item, data, endpoint)
|
|
209
|
+
# Check if we can create visualization link
|
|
210
|
+
if "Assets" in endpoint:
|
|
211
|
+
add_visualization_info(item, data, endpoint, item.id)
|
|
212
|
+
link.extra_fields["item"] = item.id
|
|
213
|
+
elif "cog_default" in item.assets:
|
|
214
|
+
add_visualization_info(item, data, endpoint, item.assets["cog_default"].href)
|
|
215
|
+
link.extra_fields["cog_href"] = item.assets["cog_default"].href
|
|
216
|
+
elif item_datetime:
|
|
217
|
+
time_string = item_datetime.isoformat()[:-6] + "Z"
|
|
218
|
+
add_visualization_info(item, data, endpoint, time=time_string)
|
|
219
|
+
elif "start_datetime" in item.properties and "end_datetime" in item.properties:
|
|
220
|
+
add_visualization_info(
|
|
221
|
+
item,
|
|
222
|
+
data,
|
|
223
|
+
endpoint,
|
|
224
|
+
time="{}/{}".format(
|
|
225
|
+
item.properties["start_datetime"], item.properties["end_datetime"]
|
|
226
|
+
),
|
|
227
|
+
)
|
|
228
|
+
# If a root collection exists we point back to it from the item
|
|
229
|
+
if root_collection is not None:
|
|
230
|
+
item.set_collection(root_collection)
|
|
231
|
+
|
|
232
|
+
# bubble up information we want to the link
|
|
233
|
+
# it is possible for datetime to be null, if it is start and end datetime have to exist
|
|
234
|
+
if item_datetime:
|
|
235
|
+
iso_time = item_datetime.isoformat()[:-6] + "Z"
|
|
236
|
+
if endpoint["Name"] == "Sentinel Hub":
|
|
237
|
+
# for SH WMS we only save the date (no time)
|
|
238
|
+
link.extra_fields["datetime"] = iso_date
|
|
239
|
+
else:
|
|
240
|
+
link.extra_fields["datetime"] = iso_time
|
|
241
|
+
else:
|
|
242
|
+
link.extra_fields["start_datetime"] = item.properties["start_datetime"]
|
|
243
|
+
link.extra_fields["end_datetime"] = item.properties["end_datetime"]
|
|
244
|
+
|
|
245
|
+
collection.update_extent_from_items()
|
|
246
|
+
|
|
247
|
+
# replace SH identifier with catalog identifier
|
|
248
|
+
collection.id = data["Name"]
|
|
249
|
+
add_collection_information(config, collection, data)
|
|
250
|
+
|
|
251
|
+
# Check if we need to overwrite the bbox after update from items
|
|
252
|
+
if "OverwriteBBox" in endpoint:
|
|
253
|
+
collection.extent.spatial = SpatialExtent(
|
|
254
|
+
[
|
|
255
|
+
endpoint["OverwriteBBox"],
|
|
256
|
+
]
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
return collection
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def handle_VEDA_endpoint(config, endpoint, data, catalog, options):
|
|
263
|
+
collection = handle_STAC_based_endpoint(config, endpoint, data, catalog, options)
|
|
264
|
+
return collection
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
def handle_collection_only(config, endpoint, data, catalog):
|
|
268
|
+
collection, times = get_or_create_collection_and_times(
|
|
269
|
+
catalog, data["Name"], data, config, endpoint
|
|
270
|
+
)
|
|
271
|
+
if len(times) > 0 and not endpoint.get("Disable_Items"):
|
|
272
|
+
for t in times:
|
|
273
|
+
item = Item(
|
|
274
|
+
id=t,
|
|
275
|
+
bbox=endpoint.get("OverwriteBBox"),
|
|
276
|
+
properties={},
|
|
277
|
+
geometry=None,
|
|
278
|
+
datetime=parser.isoparse(t),
|
|
279
|
+
)
|
|
280
|
+
link = collection.add_item(item)
|
|
281
|
+
link.extra_fields["datetime"] = t
|
|
282
|
+
add_collection_information(config, collection, data)
|
|
283
|
+
return collection
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
def handle_SH_WMS_endpoint(config, endpoint, data, catalog):
|
|
287
|
+
# create collection and subcollections (based on locations)
|
|
288
|
+
if "Locations" in data:
|
|
289
|
+
root_collection, _ = get_or_create_collection_and_times(
|
|
290
|
+
catalog, data["Name"], data, config, endpoint
|
|
291
|
+
)
|
|
292
|
+
for location in data["Locations"]:
|
|
293
|
+
# create and populate location collections based on times
|
|
294
|
+
# TODO: Should we add some new description per location?
|
|
295
|
+
location_config = {
|
|
296
|
+
"Title": location["Name"],
|
|
297
|
+
"Description": "",
|
|
298
|
+
}
|
|
299
|
+
collection, _ = get_or_create_collection_and_times(
|
|
300
|
+
catalog, location["Identifier"], location_config, config, endpoint
|
|
301
|
+
)
|
|
302
|
+
collection.extra_fields["endpointtype"] = endpoint["Name"]
|
|
303
|
+
for time in location["Times"]:
|
|
304
|
+
item = Item(
|
|
305
|
+
id=time,
|
|
306
|
+
bbox=location["Bbox"],
|
|
307
|
+
properties={},
|
|
308
|
+
geometry=None,
|
|
309
|
+
datetime=parser.isoparse(time),
|
|
310
|
+
stac_extensions=[
|
|
311
|
+
"https://stac-extensions.github.io/web-map-links/v1.1.0/schema.json",
|
|
312
|
+
],
|
|
313
|
+
)
|
|
314
|
+
add_visualization_info(item, data, endpoint, time=time)
|
|
315
|
+
item_link = collection.add_item(item)
|
|
316
|
+
item_link.extra_fields["datetime"] = time
|
|
317
|
+
|
|
318
|
+
link = root_collection.add_child(collection)
|
|
319
|
+
# bubble up information we want to the link
|
|
320
|
+
latlng = "{},{}".format(location["Point"][1], location["Point"][0])
|
|
321
|
+
link.extra_fields["id"] = location["Identifier"]
|
|
322
|
+
link.extra_fields["latlng"] = latlng
|
|
323
|
+
link.extra_fields["country"] = location["Country"]
|
|
324
|
+
link.extra_fields["city"] = location["Name"]
|
|
325
|
+
collection.update_extent_from_items()
|
|
326
|
+
add_visualization_info(collection, data, endpoint)
|
|
327
|
+
|
|
328
|
+
root_collection.update_extent_from_items()
|
|
329
|
+
# Add bbox extents from children
|
|
330
|
+
for c_child in root_collection.get_children():
|
|
331
|
+
root_collection.extent.spatial.bboxes.append(c_child.extent.spatial.bboxes[0])
|
|
332
|
+
return root_collection
|
|
333
|
+
|
|
334
|
+
|
|
335
|
+
def handle_xcube_endpoint(config, endpoint, data: dict, catalog):
|
|
336
|
+
root_collection = process_STAC_Datacube_Endpoint(
|
|
337
|
+
config=config,
|
|
338
|
+
endpoint=endpoint,
|
|
339
|
+
data=data,
|
|
340
|
+
catalog=catalog,
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
add_example_info(root_collection, data, endpoint, config)
|
|
344
|
+
return root_collection
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
def handle_GeoDB_endpoint(config, endpoint, data: dict, catalog):
|
|
348
|
+
collection, _ = get_or_create_collection_and_times(
|
|
349
|
+
catalog, endpoint["CollectionId"], data, config, endpoint
|
|
350
|
+
)
|
|
351
|
+
select = "?select=aoi,aoi_id,country,city,time"
|
|
352
|
+
url = (
|
|
353
|
+
endpoint["EndPoint"]
|
|
354
|
+
+ endpoint["Database"]
|
|
355
|
+
+ "_{}".format(endpoint["CollectionId"])
|
|
356
|
+
+ select
|
|
357
|
+
)
|
|
358
|
+
if additional_query_parameters := endpoint.get("AdditionalQueryString"):
|
|
359
|
+
url += f"&{additional_query_parameters}"
|
|
360
|
+
response = json.loads(requests.get(url).text)
|
|
361
|
+
|
|
362
|
+
# Sort locations by key
|
|
363
|
+
sorted_locations = sorted(response, key=itemgetter("aoi_id"))
|
|
364
|
+
cities = []
|
|
365
|
+
countries = []
|
|
366
|
+
for key, value in groupby(sorted_locations, key=itemgetter("aoi_id")):
|
|
367
|
+
# Finding min and max values for date
|
|
368
|
+
values = list(value)
|
|
369
|
+
times = [datetime.fromisoformat(t["time"]) for t in values]
|
|
370
|
+
unique_values = next(iter({v["aoi_id"]: v for v in values}.values()))
|
|
371
|
+
country = unique_values["country"]
|
|
372
|
+
city = unique_values["city"]
|
|
373
|
+
IdKey = endpoint.get("IdKey", "city")
|
|
374
|
+
IdValue = unique_values[IdKey]
|
|
375
|
+
if country not in countries:
|
|
376
|
+
countries.append(country)
|
|
377
|
+
# sanitize unique key identifier to be sure it is saveable as a filename
|
|
378
|
+
if IdValue is not None:
|
|
379
|
+
IdValue = "".join(
|
|
380
|
+
[c for c in IdValue if c.isalpha() or c.isdigit() or c == " "]
|
|
381
|
+
).rstrip()
|
|
382
|
+
# Additional check to see if unique key name is empty afterwards
|
|
383
|
+
if IdValue == "" or IdValue is None:
|
|
384
|
+
# use aoi_id as a fallback unique id instead of configured key
|
|
385
|
+
IdValue = key
|
|
386
|
+
if city not in cities:
|
|
387
|
+
cities.append(city)
|
|
388
|
+
min_date = min(times)
|
|
389
|
+
max_date = max(times)
|
|
390
|
+
latlon = unique_values["aoi"]
|
|
391
|
+
[lat, lon] = [float(x) for x in latlon.split(",")]
|
|
392
|
+
# create item for unique locations
|
|
393
|
+
buff = 0.01
|
|
394
|
+
bbox = [lon - buff, lat - buff, lon + buff, lat + buff]
|
|
395
|
+
item = Item(
|
|
396
|
+
id=IdValue,
|
|
397
|
+
bbox=bbox,
|
|
398
|
+
properties={},
|
|
399
|
+
geometry=create_geojson_point(lon, lat),
|
|
400
|
+
datetime=None,
|
|
401
|
+
start_datetime=min_date,
|
|
402
|
+
end_datetime=max_date,
|
|
403
|
+
)
|
|
404
|
+
link = collection.add_item(item)
|
|
405
|
+
# bubble up information we want to the link
|
|
406
|
+
link.extra_fields["id"] = key
|
|
407
|
+
link.extra_fields["latlng"] = latlon
|
|
408
|
+
link.extra_fields["country"] = country
|
|
409
|
+
link.extra_fields["city"] = city
|
|
410
|
+
|
|
411
|
+
if "yAxis" not in data:
|
|
412
|
+
# fetch yAxis and store it to data, preventing need to save it per dataset in yml
|
|
413
|
+
select = "?select=y_axis&limit=1"
|
|
414
|
+
url = (
|
|
415
|
+
endpoint["EndPoint"]
|
|
416
|
+
+ endpoint["Database"]
|
|
417
|
+
+ "_{}".format(endpoint["CollectionId"])
|
|
418
|
+
+ select
|
|
419
|
+
)
|
|
420
|
+
response = json.loads(requests.get(url).text)
|
|
421
|
+
yAxis = response[0]["y_axis"]
|
|
422
|
+
data["yAxis"] = yAxis
|
|
423
|
+
add_collection_information(config, collection, data)
|
|
424
|
+
add_example_info(collection, data, endpoint, config)
|
|
425
|
+
|
|
426
|
+
collection.update_extent_from_items()
|
|
427
|
+
collection.summaries = Summaries(
|
|
428
|
+
{
|
|
429
|
+
"cities": cities,
|
|
430
|
+
"countries": countries,
|
|
431
|
+
}
|
|
432
|
+
)
|
|
433
|
+
return collection
|
|
434
|
+
|
|
435
|
+
|
|
436
|
+
def handle_SH_endpoint(config, endpoint, data, catalog, options):
|
|
437
|
+
token = get_SH_token()
|
|
438
|
+
headers = {"Authorization": f"Bearer {token}"}
|
|
439
|
+
endpoint["EndPoint"] = "https://services.sentinel-hub.com/api/v1/catalog/1.0.0/"
|
|
440
|
+
# Overwrite collection id with type, such as ZARR or BYOC
|
|
441
|
+
if "Type" in endpoint:
|
|
442
|
+
endpoint["CollectionId"] = endpoint["Type"] + "-" + endpoint["CollectionId"]
|
|
443
|
+
collection = handle_STAC_based_endpoint(config, endpoint, data, catalog, options, headers)
|
|
444
|
+
return collection
|
|
445
|
+
|
|
446
|
+
|
|
447
|
+
def handle_WMS_endpoint(config, endpoint, data, catalog, wmts=False):
|
|
448
|
+
collection, times = get_or_create_collection_and_times(
|
|
449
|
+
catalog, data["Name"], data, config, endpoint
|
|
450
|
+
)
|
|
451
|
+
spatial_extent = collection.extent.spatial.to_dict().get("bbox", [-180, -90, 180, 90])[0]
|
|
452
|
+
if endpoint.get("Type") != "OverwriteTimes" or not endpoint.get("OverwriteBBox"):
|
|
453
|
+
# some endpoints allow "narrowed-down" capabilities per-layer, which we utilize to not
|
|
454
|
+
# have to process full service capabilities XML
|
|
455
|
+
capabilities_url = endpoint["EndPoint"]
|
|
456
|
+
spatial_extent, times = retrieveExtentFromWMSWMTS(
|
|
457
|
+
capabilities_url,
|
|
458
|
+
endpoint["LayerId"],
|
|
459
|
+
version=endpoint.get("Version", "1.1.1"),
|
|
460
|
+
wmts=wmts,
|
|
461
|
+
)
|
|
462
|
+
# Create an item per time to allow visualization in stac clients
|
|
463
|
+
if len(times) > 0 and not endpoint.get("Disable_Items"):
|
|
464
|
+
for t in times:
|
|
465
|
+
item = Item(
|
|
466
|
+
id=t,
|
|
467
|
+
bbox=spatial_extent,
|
|
468
|
+
properties={},
|
|
469
|
+
geometry=None,
|
|
470
|
+
datetime=parser.isoparse(t),
|
|
471
|
+
stac_extensions=[
|
|
472
|
+
"https://stac-extensions.github.io/web-map-links/v1.1.0/schema.json",
|
|
473
|
+
],
|
|
474
|
+
)
|
|
475
|
+
add_visualization_info(item, data, endpoint, time=t)
|
|
476
|
+
link = collection.add_item(item)
|
|
477
|
+
link.extra_fields["datetime"] = t
|
|
478
|
+
collection.update_extent_from_items()
|
|
479
|
+
|
|
480
|
+
# Check if we should overwrite bbox
|
|
481
|
+
if "OverwriteBBox" in endpoint:
|
|
482
|
+
collection.extent.spatial = SpatialExtent(
|
|
483
|
+
[
|
|
484
|
+
endpoint["OverwriteBBox"],
|
|
485
|
+
]
|
|
486
|
+
)
|
|
487
|
+
add_collection_information(config, collection, data)
|
|
488
|
+
return collection
|
|
489
|
+
|
|
490
|
+
|
|
491
|
+
def handle_GeoDB_Tiles_endpoint(config, endpoint, data, catalog):
|
|
492
|
+
raise NotImplementedError
|
|
493
|
+
|
|
494
|
+
|
|
495
|
+
def generate_veda_tiles_link(endpoint, item):
|
|
496
|
+
collection = "collection={}".format(endpoint["CollectionId"])
|
|
497
|
+
assets = ""
|
|
498
|
+
for asset in endpoint["Assets"]:
|
|
499
|
+
assets += f"&assets={asset}"
|
|
500
|
+
color_formula = ""
|
|
501
|
+
if "ColorFormula" in endpoint:
|
|
502
|
+
color_formula = "&color_formula={}".format(endpoint["ColorFormula"])
|
|
503
|
+
no_data = ""
|
|
504
|
+
if "NoData" in endpoint:
|
|
505
|
+
no_data = "&no_data={}".format(endpoint["NoData"])
|
|
506
|
+
item = f"&item={item}" if item else ""
|
|
507
|
+
target_url = f"https://staging-raster.delta-backend.com/stac/tiles/WebMercatorQuad/{{z}}/{{x}}/{{y}}?{collection}{item}{assets}{color_formula}{no_data}"
|
|
508
|
+
return target_url
|
|
509
|
+
|
|
510
|
+
|
|
511
|
+
def add_visualization_info(stac_object, data, endpoint, file_url=None, time=None):
|
|
512
|
+
# add extension reference
|
|
513
|
+
if endpoint["Name"] == "Sentinel Hub" or endpoint["Name"] == "Sentinel Hub WMS":
|
|
514
|
+
instanceId = os.getenv("SH_INSTANCE_ID")
|
|
515
|
+
if "InstanceId" in endpoint:
|
|
516
|
+
instanceId = endpoint["InstanceId"]
|
|
517
|
+
extra_fields = {
|
|
518
|
+
"wms:layers": [endpoint["LayerId"]],
|
|
519
|
+
"role": ["data"],
|
|
520
|
+
}
|
|
521
|
+
if time is not None:
|
|
522
|
+
if endpoint["Name"] == "Sentinel Hub WMS":
|
|
523
|
+
# SH WMS for public collections needs time interval, we use full day here
|
|
524
|
+
datetime_object = datetime.strptime(time, "%Y-%m-%d")
|
|
525
|
+
start = datetime_object.isoformat()
|
|
526
|
+
end = (datetime_object + timedelta(days=1) - timedelta(milliseconds=1)).isoformat()
|
|
527
|
+
time_interval = f"{start}/{end}"
|
|
528
|
+
extra_fields["wms:dimensions"] = {"TIME": time_interval}
|
|
529
|
+
if endpoint["Name"] == "Sentinel Hub":
|
|
530
|
+
extra_fields["wms:dimensions"] = {"TIME": time}
|
|
531
|
+
stac_object.add_link(
|
|
532
|
+
Link(
|
|
533
|
+
rel="wms",
|
|
534
|
+
target=f"https://services.sentinel-hub.com/ogc/wms/{instanceId}",
|
|
535
|
+
media_type=(endpoint.get("MimeType", "image/png")),
|
|
536
|
+
title=data["Name"],
|
|
537
|
+
extra_fields=extra_fields,
|
|
538
|
+
)
|
|
539
|
+
)
|
|
540
|
+
elif endpoint["Name"] == "WMS":
|
|
541
|
+
extra_fields = {
|
|
542
|
+
"wms:layers": [endpoint["LayerId"]],
|
|
543
|
+
"role": ["data"],
|
|
544
|
+
}
|
|
545
|
+
if time is not None:
|
|
546
|
+
extra_fields["wms:dimensions"] = {
|
|
547
|
+
"TIME": time,
|
|
548
|
+
}
|
|
549
|
+
if "Styles" in endpoint:
|
|
550
|
+
extra_fields["wms:styles"] = endpoint["Styles"]
|
|
551
|
+
media_type = "image/jpeg"
|
|
552
|
+
if "MediaType" in endpoint:
|
|
553
|
+
media_type = endpoint["MediaType"]
|
|
554
|
+
stac_object.add_link(
|
|
555
|
+
Link(
|
|
556
|
+
rel="wms",
|
|
557
|
+
target=endpoint["EndPoint"],
|
|
558
|
+
media_type=media_type,
|
|
559
|
+
title=data["Name"],
|
|
560
|
+
extra_fields=extra_fields,
|
|
561
|
+
)
|
|
562
|
+
)
|
|
563
|
+
elif endpoint["Name"] == "JAXA_WMTS_PALSAR":
|
|
564
|
+
target_url = "{}".format(endpoint.get("EndPoint"))
|
|
565
|
+
# custom time just for this special case as a default for collection wmts
|
|
566
|
+
extra_fields = {"wmts:layer": endpoint.get("LayerId").replace("{time}", time or "2017")}
|
|
567
|
+
stac_object.add_link(
|
|
568
|
+
Link(
|
|
569
|
+
rel="wmts",
|
|
570
|
+
target=target_url,
|
|
571
|
+
media_type="image/png",
|
|
572
|
+
title="wmts capabilities",
|
|
573
|
+
extra_fields=extra_fields,
|
|
574
|
+
)
|
|
575
|
+
)
|
|
576
|
+
elif endpoint["Name"] == "xcube":
|
|
577
|
+
if endpoint["Type"] == "zarr":
|
|
578
|
+
# either preset ColormapName of left as a template
|
|
579
|
+
cbar = endpoint.get("ColormapName", "{cbar}")
|
|
580
|
+
# either preset Rescale of left as a template
|
|
581
|
+
vmin = "{vmin}"
|
|
582
|
+
vmax = "{vmax}"
|
|
583
|
+
if "Rescale" in endpoint:
|
|
584
|
+
vmin = endpoint["Rescale"][0]
|
|
585
|
+
vmax = endpoint["Rescale"][1]
|
|
586
|
+
crs = endpoint.get("Crs", "EPSG:3857")
|
|
587
|
+
target_url = (
|
|
588
|
+
"{}/tiles/{}/{}/{{z}}/{{y}}/{{x}}" "?crs={}&time={{time}}&vmin={}&vmax={}&cbar={}"
|
|
589
|
+
).format(
|
|
590
|
+
endpoint["EndPoint"],
|
|
591
|
+
endpoint["DatacubeId"],
|
|
592
|
+
endpoint["Variable"],
|
|
593
|
+
crs,
|
|
594
|
+
vmin,
|
|
595
|
+
vmax,
|
|
596
|
+
cbar,
|
|
597
|
+
)
|
|
598
|
+
stac_object.add_link(
|
|
599
|
+
Link(
|
|
600
|
+
rel="xyz",
|
|
601
|
+
target=target_url,
|
|
602
|
+
media_type="image/png",
|
|
603
|
+
title="xcube tiles",
|
|
604
|
+
)
|
|
605
|
+
)
|
|
606
|
+
elif endpoint["Type"] == "WMTSCapabilities":
|
|
607
|
+
target_url = "{}".format(endpoint.get("EndPoint"))
|
|
608
|
+
extra_fields = {
|
|
609
|
+
"wmts:layer": endpoint.get("LayerId"),
|
|
610
|
+
"role": ["data"],
|
|
611
|
+
}
|
|
612
|
+
dimensions = {}
|
|
613
|
+
if time is not None:
|
|
614
|
+
dimensions["time"] = time
|
|
615
|
+
if dimensions_config := endpoint.get("Dimensions", {}):
|
|
616
|
+
for key, value in dimensions_config.items():
|
|
617
|
+
dimensions[key] = value
|
|
618
|
+
if dimensions != {}:
|
|
619
|
+
extra_fields["wmts:dimensions"] = dimensions
|
|
620
|
+
stac_object.add_link(
|
|
621
|
+
Link(
|
|
622
|
+
rel="wmts",
|
|
623
|
+
target=target_url,
|
|
624
|
+
media_type="image/png",
|
|
625
|
+
title="wmts capabilities",
|
|
626
|
+
extra_fields=extra_fields,
|
|
627
|
+
)
|
|
628
|
+
)
|
|
629
|
+
elif endpoint["Name"] == "VEDA":
|
|
630
|
+
if endpoint["Type"] == "cog":
|
|
631
|
+
target_url = generate_veda_cog_link(endpoint, file_url)
|
|
632
|
+
elif endpoint["Type"] == "tiles":
|
|
633
|
+
target_url = generate_veda_tiles_link(endpoint, file_url)
|
|
634
|
+
if target_url:
|
|
635
|
+
stac_object.add_link(
|
|
636
|
+
Link(
|
|
637
|
+
rel="xyz",
|
|
638
|
+
target=target_url,
|
|
639
|
+
media_type="image/png",
|
|
640
|
+
title=data["Name"],
|
|
641
|
+
)
|
|
642
|
+
)
|
|
643
|
+
elif endpoint["Name"] == "GeoDB Vector Tiles":
|
|
644
|
+
# `${geoserverUrl}${config.layerName}@EPSG%3A${projString}@pbf/{z}/{x}/{-y}.pbf`,
|
|
645
|
+
# 'geodb_debd884d-92f9-4979-87b6-eadef1139394:GTIF_AT_Gemeinden_3857'
|
|
646
|
+
target_url = "{}{}:{}_{}@EPSG:3857@pbf/{{z}}/{{x}}/{{-y}}.pbf".format(
|
|
647
|
+
endpoint["EndPoint"],
|
|
648
|
+
endpoint["Instance"],
|
|
649
|
+
endpoint["Database"],
|
|
650
|
+
endpoint["CollectionId"],
|
|
651
|
+
)
|
|
652
|
+
stac_object.add_link(
|
|
653
|
+
Link(
|
|
654
|
+
rel="xyz",
|
|
655
|
+
target=target_url,
|
|
656
|
+
media_type="application/pbf",
|
|
657
|
+
title=data["Name"],
|
|
658
|
+
extra_fields={
|
|
659
|
+
"description": data["Title"],
|
|
660
|
+
"parameters": endpoint["Parameters"],
|
|
661
|
+
"matchKey": endpoint["MatchKey"],
|
|
662
|
+
"timeKey": endpoint["TimeKey"],
|
|
663
|
+
"source": endpoint["Source"],
|
|
664
|
+
"role": ["data"],
|
|
665
|
+
},
|
|
666
|
+
)
|
|
667
|
+
)
|
|
668
|
+
else:
|
|
669
|
+
print("Visualization endpoint not supported")
|