eodash_catalog 0.0.7__py3-none-any.whl → 0.0.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of eodash_catalog might be problematic. Click here for more details.
- eodash_catalog/__about__.py +1 -1
- eodash_catalog/duration.py +7 -18
- eodash_catalog/endpoints.py +669 -0
- eodash_catalog/generate_indicators.py +108 -1267
- eodash_catalog/sh_endpoint.py +1 -0
- eodash_catalog/stac_handling.py +359 -0
- eodash_catalog/thumbnails.py +49 -0
- eodash_catalog/utils.py +71 -45
- {eodash_catalog-0.0.7.dist-info → eodash_catalog-0.0.9.dist-info}/METADATA +45 -2
- eodash_catalog-0.0.9.dist-info/RECORD +14 -0
- {eodash_catalog-0.0.7.dist-info → eodash_catalog-0.0.9.dist-info}/WHEEL +1 -1
- eodash_catalog-0.0.7.dist-info/RECORD +0 -11
- {eodash_catalog-0.0.7.dist-info → eodash_catalog-0.0.9.dist-info}/entry_points.txt +0 -0
- {eodash_catalog-0.0.7.dist-info → eodash_catalog-0.0.9.dist-info}/licenses/LICENSE.txt +0 -0
eodash_catalog/sh_endpoint.py
CHANGED
|
@@ -0,0 +1,359 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
|
|
3
|
+
import requests
|
|
4
|
+
import spdx_lookup as lookup
|
|
5
|
+
import yaml
|
|
6
|
+
from dateutil import parser
|
|
7
|
+
from pystac import (
|
|
8
|
+
Asset,
|
|
9
|
+
Collection,
|
|
10
|
+
Extent,
|
|
11
|
+
Link,
|
|
12
|
+
Provider,
|
|
13
|
+
SpatialExtent,
|
|
14
|
+
TemporalExtent,
|
|
15
|
+
)
|
|
16
|
+
from yaml.loader import SafeLoader
|
|
17
|
+
|
|
18
|
+
from eodash_catalog.utils import generateDateIsostringsFromInterval
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def get_or_create_collection_and_times(catalog, collection_id, data, config, endpoint=None):
|
|
22
|
+
# Check if collection already in catalog
|
|
23
|
+
for collection in catalog.get_collections():
|
|
24
|
+
if collection.id == collection_id:
|
|
25
|
+
return collection, []
|
|
26
|
+
# If none found create a new one
|
|
27
|
+
spatial_extent = endpoint.get("OverwriteBBox", [-180.0, -90.0, 180.0, 90.0])
|
|
28
|
+
|
|
29
|
+
spatial_extent = SpatialExtent(
|
|
30
|
+
[
|
|
31
|
+
spatial_extent,
|
|
32
|
+
]
|
|
33
|
+
)
|
|
34
|
+
times = []
|
|
35
|
+
temporal_extent = TemporalExtent([[datetime.now(), None]])
|
|
36
|
+
if endpoint and endpoint.get("Type") == "OverwriteTimes":
|
|
37
|
+
if endpoint.get("Times"):
|
|
38
|
+
times = endpoint.get("Times")
|
|
39
|
+
times_datetimes = sorted([parser.isoparse(time) for time in times])
|
|
40
|
+
temporal_extent = TemporalExtent([[times_datetimes[0], times_datetimes[-1]]])
|
|
41
|
+
elif endpoint.get("DateTimeInterval"):
|
|
42
|
+
start = endpoint["DateTimeInterval"].get("Start", "2020-09-01T00:00:00")
|
|
43
|
+
end = endpoint["DateTimeInterval"].get("End", "2020-10-01T00:00:00")
|
|
44
|
+
timedelta_config = endpoint["DateTimeInterval"].get("Timedelta", {"days": 1})
|
|
45
|
+
times = generateDateIsostringsFromInterval(start, end, timedelta_config)
|
|
46
|
+
times_datetimes = sorted([parser.isoparse(time) for time in times])
|
|
47
|
+
temporal_extent = TemporalExtent([[times_datetimes[0], times_datetimes[-1]]])
|
|
48
|
+
extent = Extent(spatial=spatial_extent, temporal=temporal_extent)
|
|
49
|
+
|
|
50
|
+
# Check if description is link to markdown file
|
|
51
|
+
if "Description" in data:
|
|
52
|
+
description = data["Description"]
|
|
53
|
+
if description.endswith((".md", ".MD")):
|
|
54
|
+
if description.startswith("http"):
|
|
55
|
+
# if full absolute path is defined
|
|
56
|
+
response = requests.get(description)
|
|
57
|
+
if response.status_code == 200:
|
|
58
|
+
description = response.text
|
|
59
|
+
elif "Subtitle" in data:
|
|
60
|
+
print("WARNING: Markdown file could not be fetched")
|
|
61
|
+
description = data["Subtitle"]
|
|
62
|
+
else:
|
|
63
|
+
# relative path to assets was given
|
|
64
|
+
response = requests.get(f"{config["assets_endpoint"]}/{description}")
|
|
65
|
+
if response.status_code == 200:
|
|
66
|
+
description = response.text
|
|
67
|
+
elif "Subtitle" in data:
|
|
68
|
+
print("WARNING: Markdown file could not be fetched")
|
|
69
|
+
description = data["Subtitle"]
|
|
70
|
+
elif "Subtitle" in data:
|
|
71
|
+
# Try to use at least subtitle to fill some information
|
|
72
|
+
description = data["Subtitle"]
|
|
73
|
+
|
|
74
|
+
collection = Collection(
|
|
75
|
+
id=collection_id,
|
|
76
|
+
title=data["Title"],
|
|
77
|
+
description=description,
|
|
78
|
+
extent=extent,
|
|
79
|
+
)
|
|
80
|
+
return (collection, times)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def create_web_map_link(layer, role):
|
|
84
|
+
extra_fields = {
|
|
85
|
+
"roles": [role],
|
|
86
|
+
"id": layer["id"],
|
|
87
|
+
}
|
|
88
|
+
if layer.get("default"):
|
|
89
|
+
extra_fields["roles"].append("default")
|
|
90
|
+
if layer.get("visible"):
|
|
91
|
+
extra_fields["roles"].append("visible")
|
|
92
|
+
if "visible" in layer and not layer["visible"]:
|
|
93
|
+
extra_fields["roles"].append("invisible")
|
|
94
|
+
|
|
95
|
+
match layer["protocol"]:
|
|
96
|
+
case "wms":
|
|
97
|
+
# handle wms special config options
|
|
98
|
+
extra_fields["wms:layers"] = layer["layers"]
|
|
99
|
+
if "styles" in layer:
|
|
100
|
+
extra_fields["wms:styles"] = layer["styles"]
|
|
101
|
+
# TODO: handle wms dimensions extra_fields["wms:dimensions"]
|
|
102
|
+
case "wmts":
|
|
103
|
+
extra_fields["wmts:layer"] = layer["layer"]
|
|
104
|
+
# TODO: handle wmts dimensions extra_fields["wmts:dimensions"]
|
|
105
|
+
|
|
106
|
+
wml = Link(
|
|
107
|
+
rel=layer["protocol"],
|
|
108
|
+
target=layer["url"],
|
|
109
|
+
media_type=layer.get("media_type", "image/png"),
|
|
110
|
+
title=layer["name"],
|
|
111
|
+
extra_fields=extra_fields,
|
|
112
|
+
)
|
|
113
|
+
return wml
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def add_example_info(stac_object, data, endpoint, config):
|
|
117
|
+
if "Services" in data:
|
|
118
|
+
for service in data["Services"]:
|
|
119
|
+
if service["Name"] == "Statistical API":
|
|
120
|
+
service_type = service.get("Type", "byoc")
|
|
121
|
+
stac_object.add_link(
|
|
122
|
+
Link(
|
|
123
|
+
rel="example",
|
|
124
|
+
target="{}/{}".format(config["assets_endpoint"], service["Script"]),
|
|
125
|
+
title="evalscript",
|
|
126
|
+
media_type="application/javascript",
|
|
127
|
+
extra_fields={
|
|
128
|
+
"example:language": "JavaScript",
|
|
129
|
+
"dataId": "{}-{}".format(service_type, service["CollectionId"]),
|
|
130
|
+
},
|
|
131
|
+
)
|
|
132
|
+
)
|
|
133
|
+
if service["Name"] == "VEDA Statistics":
|
|
134
|
+
stac_object.add_link(
|
|
135
|
+
Link(
|
|
136
|
+
rel="example",
|
|
137
|
+
target=service["Endpoint"],
|
|
138
|
+
title=service["Name"],
|
|
139
|
+
media_type="application/json",
|
|
140
|
+
extra_fields={
|
|
141
|
+
"example:language": "JSON",
|
|
142
|
+
},
|
|
143
|
+
)
|
|
144
|
+
)
|
|
145
|
+
if service["Name"] == "EOxHub Notebook":
|
|
146
|
+
# TODO: we need to consider if we can improve information added
|
|
147
|
+
stac_object.add_link(
|
|
148
|
+
Link(
|
|
149
|
+
rel="example",
|
|
150
|
+
target=service["Url"],
|
|
151
|
+
title=(service["Title"] if "Title" in service else service["Name"]),
|
|
152
|
+
media_type="application/x-ipynb+json",
|
|
153
|
+
extra_fields={
|
|
154
|
+
"example:language": "Jupyter Notebook",
|
|
155
|
+
"example:container": True,
|
|
156
|
+
},
|
|
157
|
+
)
|
|
158
|
+
)
|
|
159
|
+
elif "Resources" in data:
|
|
160
|
+
for service in data["Resources"]:
|
|
161
|
+
if service.get("Name") == "xcube":
|
|
162
|
+
target_url = "{}/timeseries/{}/{}?aggMethods=median".format(
|
|
163
|
+
endpoint["EndPoint"],
|
|
164
|
+
endpoint["DatacubeId"],
|
|
165
|
+
endpoint["Variable"],
|
|
166
|
+
)
|
|
167
|
+
stac_object.add_link(
|
|
168
|
+
Link(
|
|
169
|
+
rel="example",
|
|
170
|
+
target=target_url,
|
|
171
|
+
title=service["Name"] + " analytics",
|
|
172
|
+
media_type="application/json",
|
|
173
|
+
extra_fields={
|
|
174
|
+
"example:language": "JSON",
|
|
175
|
+
"example:method": "POST",
|
|
176
|
+
},
|
|
177
|
+
)
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def add_collection_information(config, collection, data):
|
|
182
|
+
# Add metadata information
|
|
183
|
+
# Check license identifier
|
|
184
|
+
if "License" in data:
|
|
185
|
+
# Check if list was provided
|
|
186
|
+
if isinstance(data["License"], list):
|
|
187
|
+
if len(data["License"]) == 1:
|
|
188
|
+
collection.license = "proprietary"
|
|
189
|
+
link = Link(
|
|
190
|
+
rel="license",
|
|
191
|
+
target=data["License"][0]["Url"],
|
|
192
|
+
media_type=(data["License"][0].get("Type", "text/html")),
|
|
193
|
+
)
|
|
194
|
+
if "Title" in data["License"][0]:
|
|
195
|
+
link.title = data["License"][0]["Title"]
|
|
196
|
+
collection.links.append(link)
|
|
197
|
+
elif len(data["License"]) > 1:
|
|
198
|
+
collection.license = "various"
|
|
199
|
+
for license_entry in data["License"]:
|
|
200
|
+
link = Link(
|
|
201
|
+
rel="license",
|
|
202
|
+
target=license_entry["Url"],
|
|
203
|
+
media_type="text/html"
|
|
204
|
+
if "Type" in license_entry
|
|
205
|
+
else license_entry["Type"],
|
|
206
|
+
)
|
|
207
|
+
if "Title" in license_entry:
|
|
208
|
+
link.title = license_entry["Title"]
|
|
209
|
+
collection.links.append(link)
|
|
210
|
+
else:
|
|
211
|
+
license_data = lookup.by_id(data["License"])
|
|
212
|
+
if license_data is not None:
|
|
213
|
+
collection.license = license_data.id
|
|
214
|
+
if license_data.sources:
|
|
215
|
+
# add links to licenses
|
|
216
|
+
for source in license_data.sources:
|
|
217
|
+
collection.links.append(
|
|
218
|
+
Link(
|
|
219
|
+
rel="license",
|
|
220
|
+
target=source,
|
|
221
|
+
media_type="text/html",
|
|
222
|
+
)
|
|
223
|
+
)
|
|
224
|
+
else:
|
|
225
|
+
# fallback to proprietary
|
|
226
|
+
print("WARNING: License could not be parsed, falling back to proprietary")
|
|
227
|
+
collection.license = "proprietary"
|
|
228
|
+
else:
|
|
229
|
+
# print("WARNING: No license was provided, falling back to proprietary")
|
|
230
|
+
pass
|
|
231
|
+
|
|
232
|
+
if "Provider" in data:
|
|
233
|
+
try:
|
|
234
|
+
collection.providers = [
|
|
235
|
+
Provider(
|
|
236
|
+
# convert information to lower case
|
|
237
|
+
**{k.lower(): v for k, v in provider.items()}
|
|
238
|
+
)
|
|
239
|
+
for provider in data["Provider"]
|
|
240
|
+
]
|
|
241
|
+
except Exception:
|
|
242
|
+
print(f"WARNING: Issue creating provider information for collection: {collection.id}")
|
|
243
|
+
|
|
244
|
+
if "Citation" in data:
|
|
245
|
+
if "DOI" in data["Citation"]:
|
|
246
|
+
collection.extra_fields["sci:doi"] = data["Citation"]["DOI"]
|
|
247
|
+
if "Citation" in data["Citation"]:
|
|
248
|
+
collection.extra_fields["sci:citation"] = data["Citation"]["Citation"]
|
|
249
|
+
if "Publication" in data["Citation"]:
|
|
250
|
+
collection.extra_fields["sci:publications"] = [
|
|
251
|
+
# convert keys to lower case
|
|
252
|
+
{k.lower(): v for k, v in publication.items()}
|
|
253
|
+
for publication in data["Citation"]["Publication"]
|
|
254
|
+
]
|
|
255
|
+
|
|
256
|
+
if "Subtitle" in data:
|
|
257
|
+
collection.extra_fields["subtitle"] = data["Subtitle"]
|
|
258
|
+
if "Legend" in data:
|
|
259
|
+
collection.add_asset(
|
|
260
|
+
"legend",
|
|
261
|
+
Asset(
|
|
262
|
+
href=f"{config["assets_endpoint"]}/{data["Legend"]}",
|
|
263
|
+
media_type="image/png",
|
|
264
|
+
roles=["metadata"],
|
|
265
|
+
),
|
|
266
|
+
)
|
|
267
|
+
if "Story" in data:
|
|
268
|
+
collection.add_asset(
|
|
269
|
+
"story",
|
|
270
|
+
Asset(
|
|
271
|
+
href=f"{config["assets_endpoint"]}/{data["Story"]}",
|
|
272
|
+
media_type="text/markdown",
|
|
273
|
+
roles=["metadata"],
|
|
274
|
+
),
|
|
275
|
+
)
|
|
276
|
+
if "Image" in data:
|
|
277
|
+
collection.add_asset(
|
|
278
|
+
"thumbnail",
|
|
279
|
+
Asset(
|
|
280
|
+
href=f"{config["assets_endpoint"]}/{data["Image"]}",
|
|
281
|
+
media_type="image/png",
|
|
282
|
+
roles=["thumbnail"],
|
|
283
|
+
),
|
|
284
|
+
)
|
|
285
|
+
# Add extra fields to collection if available
|
|
286
|
+
add_extra_fields(collection, data)
|
|
287
|
+
|
|
288
|
+
if "References" in data:
|
|
289
|
+
generic_counter = 1
|
|
290
|
+
for ref in data["References"]:
|
|
291
|
+
if "Key" in ref:
|
|
292
|
+
key = ref["Key"]
|
|
293
|
+
else:
|
|
294
|
+
key = f"reference_{generic_counter}"
|
|
295
|
+
generic_counter = generic_counter + 1
|
|
296
|
+
collection.add_asset(
|
|
297
|
+
key,
|
|
298
|
+
Asset(
|
|
299
|
+
href=ref["Url"],
|
|
300
|
+
title=ref["Name"],
|
|
301
|
+
media_type=ref.get("MediaType", "text/html"),
|
|
302
|
+
roles=["metadata"],
|
|
303
|
+
),
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def add_base_overlay_info(collection, config, data):
|
|
308
|
+
# check if default base layers defined
|
|
309
|
+
if "default_base_layers" in config:
|
|
310
|
+
with open(f"{config["default_base_layers"]}.yaml") as f:
|
|
311
|
+
base_layers = yaml.load(f, Loader=SafeLoader)
|
|
312
|
+
for layer in base_layers:
|
|
313
|
+
collection.add_link(create_web_map_link(layer, role="baselayer"))
|
|
314
|
+
# check if default overlay layers defined
|
|
315
|
+
if "default_overlay_layers" in config:
|
|
316
|
+
with open("{}.yaml".format(config["default_overlay_layers"])) as f:
|
|
317
|
+
overlay_layers = yaml.load(f, Loader=SafeLoader)
|
|
318
|
+
for layer in overlay_layers:
|
|
319
|
+
collection.add_link(create_web_map_link(layer, role="overlay"))
|
|
320
|
+
if "BaseLayers" in data:
|
|
321
|
+
for layer in data["BaseLayers"]:
|
|
322
|
+
collection.add_link(create_web_map_link(layer, role="baselayer"))
|
|
323
|
+
if "OverlayLayers" in data:
|
|
324
|
+
for layer in data["OverlayLayers"]:
|
|
325
|
+
collection.add_link(create_web_map_link(layer, role="overlay"))
|
|
326
|
+
# TODO: possibility to overwrite default base and overlay layers
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
def add_extra_fields(stac_object, data):
|
|
330
|
+
if "yAxis" in data:
|
|
331
|
+
stac_object.extra_fields["yAxis"] = data["yAxis"]
|
|
332
|
+
if "Themes" in data:
|
|
333
|
+
stac_object.extra_fields["themes"] = data["Themes"]
|
|
334
|
+
if "Locations" in data or "Subcollections" in data:
|
|
335
|
+
stac_object.extra_fields["locations"] = True
|
|
336
|
+
if "Tags" in data:
|
|
337
|
+
stac_object.extra_fields["tags"] = data["Tags"]
|
|
338
|
+
if "Satellite" in data:
|
|
339
|
+
stac_object.extra_fields["satellite"] = data["Satellite"]
|
|
340
|
+
if "Sensor" in data:
|
|
341
|
+
stac_object.extra_fields["sensor"] = data["Sensor"]
|
|
342
|
+
if "Agency" in data:
|
|
343
|
+
stac_object.extra_fields["agency"] = data["Agency"]
|
|
344
|
+
if "yAxis" in data:
|
|
345
|
+
stac_object.extra_fields["yAxis"] = data["yAxis"]
|
|
346
|
+
if "EodashIdentifier" in data:
|
|
347
|
+
stac_object.extra_fields["subcode"] = data["EodashIdentifier"]
|
|
348
|
+
if "DataSource" in data:
|
|
349
|
+
if "Spaceborne" in data["DataSource"]:
|
|
350
|
+
if "Sensor" in data["DataSource"]["Spaceborne"]:
|
|
351
|
+
stac_object.extra_fields["sensor"] = data["DataSource"]["Spaceborne"]["Sensor"]
|
|
352
|
+
if "Satellite" in data["DataSource"]["Spaceborne"]:
|
|
353
|
+
stac_object.extra_fields["satellite"] = data["DataSource"]["Spaceborne"][
|
|
354
|
+
"Satellite"
|
|
355
|
+
]
|
|
356
|
+
if "InSitu" in data["DataSource"]:
|
|
357
|
+
stac_object.extra_fields["insituSources"] = data["DataSource"]["InSitu"]
|
|
358
|
+
if "Other" in data["DataSource"]:
|
|
359
|
+
stac_object.extra_fields["otherSources"] = data["DataSource"]["Other"]
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import re
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
import requests
|
|
6
|
+
|
|
7
|
+
from eodash_catalog.utils import generate_veda_cog_link
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def fetch_and_save_thumbnail(data, url):
|
|
11
|
+
collection_path = "../thumbnails/{}_{}/".format(data["EodashIdentifier"], data["Name"])
|
|
12
|
+
Path(collection_path).mkdir(parents=True, exist_ok=True)
|
|
13
|
+
image_path = f"{collection_path}/thumbnail.png"
|
|
14
|
+
if not os.path.exists(image_path):
|
|
15
|
+
data = requests.get(url).content
|
|
16
|
+
with open(image_path, "wb") as f:
|
|
17
|
+
f.write(data)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def generate_thumbnail(stac_object, data, endpoint, file_url=None, time=None, styles=None):
|
|
21
|
+
if endpoint["Name"] == "Sentinel Hub" or endpoint["Name"] == "WMS":
|
|
22
|
+
instanceId = os.getenv("SH_INSTANCE_ID")
|
|
23
|
+
if "InstanceId" in endpoint:
|
|
24
|
+
instanceId = endpoint["InstanceId"]
|
|
25
|
+
# Build example url
|
|
26
|
+
wms_config = (
|
|
27
|
+
"REQUEST=GetMap&SERVICE=WMS&VERSION=1.3.0&FORMAT=image/png&STYLES=&TRANSPARENT=true"
|
|
28
|
+
)
|
|
29
|
+
bbox_s = stac_object.bbox
|
|
30
|
+
bbox = f"{bbox_s[1]},{bbox_s[0]},{bbox_s[3]},{bbox_s[2]}"
|
|
31
|
+
output_format = f"format=image/png&WIDTH=256&HEIGHT=128&CRS=EPSG:4326&BBOX={bbox}"
|
|
32
|
+
item_datetime = stac_object.get_datetime()
|
|
33
|
+
# it is possible for datetime to be null,
|
|
34
|
+
# if it is start and end datetime have to exist
|
|
35
|
+
if item_datetime:
|
|
36
|
+
time = item_datetime.isoformat()[:-6] + "Z"
|
|
37
|
+
url = "https://services.sentinel-hub.com/ogc/wms/{}?{}&layers={}&time={}&{}".format(
|
|
38
|
+
instanceId,
|
|
39
|
+
wms_config,
|
|
40
|
+
endpoint["LayerId"],
|
|
41
|
+
time,
|
|
42
|
+
output_format,
|
|
43
|
+
)
|
|
44
|
+
fetch_and_save_thumbnail(data, url)
|
|
45
|
+
elif endpoint["Name"] == "VEDA":
|
|
46
|
+
target_url = generate_veda_cog_link(endpoint, file_url)
|
|
47
|
+
# set to get 0/0/0 tile
|
|
48
|
+
url = re.sub(r"\{.\}", "0", target_url)
|
|
49
|
+
fetch_and_save_thumbnail(data, url)
|
eodash_catalog/utils.py
CHANGED
|
@@ -1,15 +1,18 @@
|
|
|
1
|
-
import json
|
|
2
1
|
import re
|
|
3
|
-
|
|
4
|
-
from
|
|
5
|
-
from decimal import Decimal
|
|
2
|
+
import threading
|
|
3
|
+
from collections.abc import Iterator
|
|
6
4
|
from datetime import datetime, timedelta
|
|
7
|
-
from
|
|
8
|
-
from
|
|
5
|
+
from decimal import Decimal
|
|
6
|
+
from functools import reduce
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from dateutil import parser
|
|
9
10
|
from owslib.wms import WebMapService
|
|
10
11
|
from owslib.wmts import WebMapTileService
|
|
11
|
-
from
|
|
12
|
-
import
|
|
12
|
+
from pystac import Catalog
|
|
13
|
+
from six import string_types
|
|
14
|
+
|
|
15
|
+
from eodash_catalog.duration import Duration
|
|
13
16
|
|
|
14
17
|
ISO8601_PERIOD_REGEX = re.compile(
|
|
15
18
|
r"^(?P<sign>[+-])?"
|
|
@@ -25,26 +28,28 @@ ISO8601_PERIOD_REGEX = re.compile(
|
|
|
25
28
|
# regular expression to parse ISO duartion strings.
|
|
26
29
|
|
|
27
30
|
|
|
28
|
-
def create_geojson_point(lon, lat):
|
|
31
|
+
def create_geojson_point(lon: int | float, lat: int | float) -> dict[str, Any]:
|
|
29
32
|
point = {"type": "Point", "coordinates": [lon, lat]}
|
|
30
33
|
return {"type": "Feature", "geometry": point, "properties": {}}
|
|
31
34
|
|
|
32
35
|
|
|
33
|
-
def retrieveExtentFromWMSWMTS(
|
|
36
|
+
def retrieveExtentFromWMSWMTS(
|
|
37
|
+
capabilities_url: str, layer: str, version: str = "1.1.1", wmts: bool = False
|
|
38
|
+
):
|
|
34
39
|
times = []
|
|
35
|
-
service = None
|
|
36
40
|
try:
|
|
37
41
|
if not wmts:
|
|
38
|
-
service = WebMapService(
|
|
42
|
+
service = WebMapService(capabilities_url, version=version)
|
|
39
43
|
else:
|
|
40
|
-
service = WebMapTileService(
|
|
44
|
+
service = WebMapTileService(capabilities_url)
|
|
41
45
|
if layer in list(service.contents):
|
|
42
46
|
tps = []
|
|
43
|
-
if not wmts and service[layer].timepositions
|
|
47
|
+
if not wmts and service[layer].timepositions is not None:
|
|
44
48
|
tps = service[layer].timepositions
|
|
45
49
|
elif wmts:
|
|
50
|
+
time_dimension = service[layer].dimensions.get("time")
|
|
46
51
|
# specifically taking 'time' dimension
|
|
47
|
-
if time_dimension
|
|
52
|
+
if time_dimension:
|
|
48
53
|
tps = time_dimension["values"]
|
|
49
54
|
for tp in tps:
|
|
50
55
|
tp_def = tp.split("/")
|
|
@@ -59,14 +64,14 @@ def retrieveExtentFromWMSWMTS(capabilties_url, layer, version='1.1.1', wmts=Fals
|
|
|
59
64
|
times.append(tp)
|
|
60
65
|
times = [time.replace("\n", "").strip() for time in times]
|
|
61
66
|
# get unique times
|
|
62
|
-
times = reduce(lambda re, x: re
|
|
67
|
+
times = reduce(lambda re, x: [*re, x] if x not in re else re, times, [])
|
|
63
68
|
except Exception as e:
|
|
64
69
|
print("Issue extracting information from service capabilities")
|
|
65
70
|
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
|
|
66
71
|
message = template.format(type(e).__name__, e.args)
|
|
67
72
|
print(message)
|
|
68
73
|
|
|
69
|
-
bbox = [-180, -90, 180, 90]
|
|
74
|
+
bbox = [-180.0, -90.0, 180.0, 90.0]
|
|
70
75
|
if service and service[layer].boundingBoxWGS84:
|
|
71
76
|
bbox = [float(x) for x in service[layer].boundingBoxWGS84]
|
|
72
77
|
return bbox, times
|
|
@@ -84,34 +89,8 @@ def parse_duration(datestring):
|
|
|
84
89
|
Parses an ISO 8601 durations into datetime.timedelta
|
|
85
90
|
"""
|
|
86
91
|
if not isinstance(datestring, string_types):
|
|
87
|
-
raise TypeError("Expecting a string
|
|
92
|
+
raise TypeError(f"Expecting a string {datestring}")
|
|
88
93
|
match = ISO8601_PERIOD_REGEX.match(datestring)
|
|
89
|
-
if not match:
|
|
90
|
-
# try alternative format:
|
|
91
|
-
if datestring.startswith("P"):
|
|
92
|
-
durdt = parse_datetime(datestring[1:])
|
|
93
|
-
if durdt.year != 0 or durdt.month != 0:
|
|
94
|
-
# create Duration
|
|
95
|
-
ret = Duration(
|
|
96
|
-
days=durdt.day,
|
|
97
|
-
seconds=durdt.second,
|
|
98
|
-
microseconds=durdt.microsecond,
|
|
99
|
-
minutes=durdt.minute,
|
|
100
|
-
hours=durdt.hour,
|
|
101
|
-
months=durdt.month,
|
|
102
|
-
years=durdt.year,
|
|
103
|
-
)
|
|
104
|
-
else: # FIXME: currently not possible in alternative format
|
|
105
|
-
# create timedelta
|
|
106
|
-
ret = timedelta(
|
|
107
|
-
days=durdt.day,
|
|
108
|
-
seconds=durdt.second,
|
|
109
|
-
microseconds=durdt.microsecond,
|
|
110
|
-
minutes=durdt.minute,
|
|
111
|
-
hours=durdt.hour,
|
|
112
|
-
)
|
|
113
|
-
return ret
|
|
114
|
-
raise ISO8601Error("Unable to parse duration string %r" % datestring)
|
|
115
94
|
groups = match.groupdict()
|
|
116
95
|
for key, val in groups.items():
|
|
117
96
|
if key not in ("separator", "sign"):
|
|
@@ -149,7 +128,9 @@ def parse_duration(datestring):
|
|
|
149
128
|
return ret
|
|
150
129
|
|
|
151
130
|
|
|
152
|
-
def generateDateIsostringsFromInterval(start, end, timedelta_config=
|
|
131
|
+
def generateDateIsostringsFromInterval(start: str, end: str, timedelta_config: dict | None = None):
|
|
132
|
+
if timedelta_config is None:
|
|
133
|
+
timedelta_config = {}
|
|
153
134
|
start_dt = datetime.fromisoformat(start)
|
|
154
135
|
if end == "today":
|
|
155
136
|
end = datetime.now().isoformat()
|
|
@@ -174,3 +155,48 @@ class RaisingThread(threading.Thread):
|
|
|
174
155
|
super().join(timeout=timeout)
|
|
175
156
|
if self._exc:
|
|
176
157
|
raise self._exc
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def recursive_save(stac_object: Catalog, no_items: bool = False) -> None:
|
|
161
|
+
stac_object.save_object()
|
|
162
|
+
for child in stac_object.get_children():
|
|
163
|
+
recursive_save(child, no_items)
|
|
164
|
+
if not no_items:
|
|
165
|
+
# try to save items if available
|
|
166
|
+
for item in stac_object.get_items():
|
|
167
|
+
item.save_object()
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def iter_len_at_least(i, n: int) -> int:
|
|
171
|
+
return sum(1 for _ in zip(range(n), i, strict=False)) == n
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def generate_veda_cog_link(endpoint, file_url):
|
|
175
|
+
bidx = ""
|
|
176
|
+
if "Bidx" in endpoint:
|
|
177
|
+
# Check if an array was provided
|
|
178
|
+
if hasattr(endpoint["Bidx"], "__len__"):
|
|
179
|
+
for band in endpoint["Bidx"]:
|
|
180
|
+
bidx = bidx + f"&bidx={band}"
|
|
181
|
+
else:
|
|
182
|
+
bidx = "&bidx={}".format(endpoint["Bidx"])
|
|
183
|
+
|
|
184
|
+
colormap = ""
|
|
185
|
+
if "Colormap" in endpoint:
|
|
186
|
+
colormap = "&colormap={}".format(endpoint["Colormap"])
|
|
187
|
+
# TODO: For now we assume a already urlparsed colormap definition
|
|
188
|
+
# it could be nice to allow a json and better convert it on the fly
|
|
189
|
+
# colormap = "&colormap=%s"%(urllib.parse.quote(str(endpoint["Colormap"])))
|
|
190
|
+
|
|
191
|
+
colormap_name = ""
|
|
192
|
+
if "ColormapName" in endpoint:
|
|
193
|
+
colormap_name = "&colormap_name={}".format(endpoint["ColormapName"])
|
|
194
|
+
|
|
195
|
+
rescale = ""
|
|
196
|
+
if "Rescale" in endpoint:
|
|
197
|
+
rescale = "&rescale={},{}".format(endpoint["Rescale"][0], endpoint["Rescale"][1])
|
|
198
|
+
|
|
199
|
+
file_url = f"url={file_url}&" if file_url else ""
|
|
200
|
+
|
|
201
|
+
target_url = f"https://staging-raster.delta-backend.com/cog/tiles/WebMercatorQuad/{{z}}/{{x}}/{{y}}?{file_url}resampling_method=nearest{bidx}{colormap}{colormap_name}{rescale}"
|
|
202
|
+
return target_url
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
2
|
Name: eodash_catalog
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.9
|
|
4
4
|
Summary: This package is intended to help create a compatible STAC catalog for the eodash dashboard client. It supports configuration of multiple endpoint types for information extraction.
|
|
5
5
|
Project-URL: Documentation, https://github.com/eodash/eodash_catalog#readme
|
|
6
6
|
Project-URL: Issues, https://github.com/eodash/eodash_catalog/issues
|
|
@@ -57,6 +57,49 @@ Description-Content-Type: text/markdown
|
|
|
57
57
|
pip install eodash_catalog
|
|
58
58
|
```
|
|
59
59
|
|
|
60
|
+
## Testing
|
|
61
|
+
|
|
62
|
+
Project uses pytest and runs it as part of CI:
|
|
63
|
+
|
|
64
|
+
```bash
|
|
65
|
+
python -m pytest
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
## Testing
|
|
69
|
+
|
|
70
|
+
Project uses ruff to perform checks on code style and formatting
|
|
71
|
+
|
|
72
|
+
```bash
|
|
73
|
+
ruff check .
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
## Versioning and branches
|
|
77
|
+
|
|
78
|
+
eodash_catalog adheres to [Semantic Versioning](https://semver.org/) and follows these rules:
|
|
79
|
+
|
|
80
|
+
Given a version number `MAJOR.MINOR.PATCH`, we increment the:
|
|
81
|
+
|
|
82
|
+
- `MAJOR` version when we make incompatible API changes
|
|
83
|
+
- `MINOR` version when we add functionality in a backward compatible manner
|
|
84
|
+
- `PATCH` version when we make backward compatible bug fixes
|
|
85
|
+
|
|
86
|
+
Active development is followed by the `main` branch.
|
|
87
|
+
`
|
|
88
|
+
New features or maintenance commits should be done against this branch in the form of a Merge Request of a Feature branch.
|
|
89
|
+
|
|
90
|
+
## Tagging
|
|
91
|
+
|
|
92
|
+
This repository uses bump2version for managing tags. To bump a version use
|
|
93
|
+
|
|
94
|
+
```bash
|
|
95
|
+
bump2version <major|minor|patch> # or bump2version --new-version <new_version>
|
|
96
|
+
git push && git push --tags
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
Pushing a tag in the repository automatically creates:
|
|
100
|
+
|
|
101
|
+
- versioned package on pypi
|
|
102
|
+
|
|
60
103
|
## License
|
|
61
104
|
|
|
62
105
|
`eodash_catalog` is distributed under the terms of the [MIT](https://spdx.org/licenses/MIT.html) license.
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
eodash_catalog/__about__.py,sha256=3LUM-OiYdbFCeovLVNBJ4p0kHdKQnUAE8vPJdtdpHhA,137
|
|
2
|
+
eodash_catalog/__init__.py,sha256=_W_9emPYf6FUqc0P8L2SmADx6hGSd7PlQV3yRmCk5uM,115
|
|
3
|
+
eodash_catalog/duration.py,sha256=B6XOZfvNU7SuqpxuVtT1kNKODoOQJXDI6mocvA_U1ik,10816
|
|
4
|
+
eodash_catalog/endpoints.py,sha256=HrUjXNf6Eikd7uGmCnaZS31cHuZP_nYIgbGtAvSN4Q8,25942
|
|
5
|
+
eodash_catalog/generate_indicators.py,sha256=1k__jFWChnFb7We3E5_wE3_gQN56eslAm1EcFB4MBNw,17227
|
|
6
|
+
eodash_catalog/sh_endpoint.py,sha256=vHCqUnjXH4xB9T7L8UKd36TtUyqsyJLE84QBSXaONaA,582
|
|
7
|
+
eodash_catalog/stac_handling.py,sha256=6ollozeJZbB7_FKKewG1P6qRyBCtt6bPrTSQhOedc04,14366
|
|
8
|
+
eodash_catalog/thumbnails.py,sha256=yqjxJoZbTl2l2eyrRAnR13cU9fDHC0NSVT6ZnITSruw,1901
|
|
9
|
+
eodash_catalog/utils.py,sha256=-uOspvzGNUeGpr1tjeySyBVV13kQYEMsC6Njw6JvhDc,6969
|
|
10
|
+
eodash_catalog-0.0.9.dist-info/METADATA,sha256=ZcHAcKfXa6YY8BcMB2U56HCk8ti9c0LF55v9OyqM6Pk,3202
|
|
11
|
+
eodash_catalog-0.0.9.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
|
|
12
|
+
eodash_catalog-0.0.9.dist-info/entry_points.txt,sha256=kuUQrDG1PtYd8kPjf5XM6H_NtQd9Ozwl0jjiGtAvZSM,87
|
|
13
|
+
eodash_catalog-0.0.9.dist-info/licenses/LICENSE.txt,sha256=oJCW5zQxnFD-J0hGz6Zh5Lkpdk1oAndmWhseTmV224E,1107
|
|
14
|
+
eodash_catalog-0.0.9.dist-info/RECORD,,
|