eodash_catalog 0.0.9__tar.gz → 0.0.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eodash_catalog might be problematic. Click here for more details.

Files changed (23) hide show
  1. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/.bumpversion.cfg +1 -1
  2. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/.github/workflows/python-publish.yml +2 -0
  3. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/PKG-INFO +1 -1
  4. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/src/eodash_catalog/__about__.py +1 -1
  5. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/src/eodash_catalog/endpoints.py +66 -46
  6. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/src/eodash_catalog/generate_indicators.py +15 -30
  7. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/src/eodash_catalog/sh_endpoint.py +1 -1
  8. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/src/eodash_catalog/stac_handling.py +13 -8
  9. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/src/eodash_catalog/thumbnails.py +16 -6
  10. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/src/eodash_catalog/utils.py +40 -5
  11. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/.github/workflows/ci.yml +0 -0
  12. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/.gitignore +0 -0
  13. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/.vscode/extensions.json +0 -0
  14. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/.vscode/settings.json +0 -0
  15. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/LICENSE.txt +0 -0
  16. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/README.md +0 -0
  17. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/pyproject.toml +0 -0
  18. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/requirements.txt +0 -0
  19. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/ruff.toml +0 -0
  20. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/src/eodash_catalog/__init__.py +0 -0
  21. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/src/eodash_catalog/duration.py +0 -0
  22. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/tests/__init__.py +0 -0
  23. {eodash_catalog-0.0.9 → eodash_catalog-0.0.11}/tests/test_generate.py +0 -0
@@ -1,5 +1,5 @@
1
1
  [bumpversion]
2
- current_version = 0.0.9
2
+ current_version = 0.0.11
3
3
  commit = True
4
4
  tag = True
5
5
  parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(\-(?P<release>[a-z]+)\.(?P<build>\d+))?
@@ -9,7 +9,9 @@ on:
9
9
  - '*'
10
10
  jobs:
11
11
  deploy:
12
+
12
13
  runs-on: ubuntu-latest
14
+
13
15
  steps:
14
16
  - uses: actions/checkout@v4
15
17
  - name: Set up Python
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: eodash_catalog
3
- Version: 0.0.9
3
+ Version: 0.0.11
4
4
  Summary: This package is intended to help create a compatible STAC catalog for the eodash dashboard client. It supports configuration of multiple endpoint types for information extraction.
5
5
  Project-URL: Documentation, https://github.com/eodash/eodash_catalog#readme
6
6
  Project-URL: Issues, https://github.com/eodash/eodash_catalog/issues
@@ -1,4 +1,4 @@
1
1
  # SPDX-FileCopyrightText: 2024-present Daniel Santillan <daniel.santillan@eox.at>
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
- __version__ = "0.0.9"
4
+ __version__ = "0.0.11"
@@ -1,17 +1,13 @@
1
1
  import json
2
2
  import os
3
+ import uuid
3
4
  from datetime import datetime, timedelta
4
5
  from itertools import groupby
5
6
  from operator import itemgetter
6
7
 
7
8
  import requests
8
9
  from dateutil import parser
9
- from pystac import (
10
- Item,
11
- Link,
12
- SpatialExtent,
13
- Summaries,
14
- )
10
+ from pystac import Catalog, Collection, Item, Link, SpatialExtent, Summaries
15
11
  from pystac_client import Client
16
12
 
17
13
  from eodash_catalog.sh_endpoint import get_SH_token
@@ -22,13 +18,16 @@ from eodash_catalog.stac_handling import (
22
18
  )
23
19
  from eodash_catalog.thumbnails import generate_thumbnail
24
20
  from eodash_catalog.utils import (
21
+ Options,
25
22
  create_geojson_point,
26
23
  generate_veda_cog_link,
27
24
  retrieveExtentFromWMSWMTS,
28
25
  )
29
26
 
30
27
 
31
- def process_STAC_Datacube_Endpoint(config, endpoint, data, catalog):
28
+ def process_STAC_Datacube_Endpoint(
29
+ config: dict, endpoint: dict, data: dict, catalog: Catalog
30
+ ) -> Collection:
32
31
  collection, _ = get_or_create_collection_and_times(
33
32
  catalog, data["Name"], data, config, endpoint
34
33
  )
@@ -39,11 +38,17 @@ def process_STAC_Datacube_Endpoint(config, endpoint, data, catalog):
39
38
  stac_endpoint_url = stac_endpoint_url + endpoint.get("StacEndpoint", "")
40
39
  # assuming /search not implemented
41
40
  api = Client.open(stac_endpoint_url)
42
- coll = api.get_collection(endpoint.get("CollectionId", "datacubes"))
43
- item = coll.get_item(endpoint.get("DatacubeId"))
41
+ collection_id = endpoint.get("CollectionId", "datacubes")
42
+ coll = api.get_collection(collection_id)
43
+ if not coll:
44
+ raise ValueError(f"Collection {collection_id} not found in endpoint {endpoint}")
45
+ item_id = endpoint.get("DatacubeId", "")
46
+ item = coll.get_item(item_id)
47
+ if not item:
48
+ raise ValueError(f"Item {item_id} not found in collection {coll}")
44
49
  # slice a datacube along temporal axis to individual items, selectively adding properties
45
50
  dimensions = item.properties.get("cube:dimensions", {})
46
- variables = item.properties.get("cube:variables")
51
+ variables = item.properties.get("cube:variables", {})
47
52
  if endpoint.get("Variable") not in variables:
48
53
  raise Exception(f'Variable {endpoint.get("Variable")} not found in datacube {variables}')
49
54
  time_dimension = "time"
@@ -80,7 +85,9 @@ def process_STAC_Datacube_Endpoint(config, endpoint, data, catalog):
80
85
  return collection
81
86
 
82
87
 
83
- def handle_STAC_based_endpoint(config, endpoint, data, catalog, options, headers=None):
88
+ def handle_STAC_based_endpoint(
89
+ config: dict, endpoint: dict, data: dict, catalog: Catalog, options: Options, headers=None
90
+ ) -> Collection:
84
91
  if "Locations" in data:
85
92
  root_collection, _ = get_or_create_collection_and_times(
86
93
  catalog, data["Name"], data, config, endpoint
@@ -112,8 +119,8 @@ def handle_STAC_based_endpoint(config, endpoint, data, catalog, options, headers
112
119
  # Update identifier to use location as well as title
113
120
  # TODO: should we use the name as id? it provides much more
114
121
  # information in the clients
115
- collection.id = location["Identifier"]
116
- collection.title = (location["Name"],)
122
+ collection.id = location.get("Identifier", uuid.uuid4())
123
+ collection.title = location.get("Name")
117
124
  # See if description should be overwritten
118
125
  if "Description" in location:
119
126
  collection.description = location["Description"]
@@ -136,7 +143,8 @@ def handle_STAC_based_endpoint(config, endpoint, data, catalog, options, headers
136
143
  root_collection.update_extent_from_items()
137
144
  # Add bbox extents from children
138
145
  for c_child in root_collection.get_children():
139
- root_collection.extent.spatial.bboxes.append(c_child.extent.spatial.bboxes[0])
146
+ if isinstance(c_child, Collection):
147
+ root_collection.extent.spatial.bboxes.append(c_child.extent.spatial.bboxes[0])
140
148
  else:
141
149
  if "Bbox" in endpoint:
142
150
  root_collection = process_STACAPI_Endpoint(
@@ -163,26 +171,25 @@ def handle_STAC_based_endpoint(config, endpoint, data, catalog, options, headers
163
171
 
164
172
 
165
173
  def process_STACAPI_Endpoint(
166
- config,
167
- endpoint,
168
- data,
169
- catalog,
170
- options,
171
- headers=None,
174
+ config: dict,
175
+ endpoint: dict,
176
+ data: dict,
177
+ catalog: Catalog,
178
+ options: Options,
179
+ headers: dict[str, str] | None = None,
172
180
  bbox=None,
173
- root_collection=None,
174
- filter_dates=None,
175
- ):
181
+ root_collection: Collection | None = None,
182
+ filter_dates: list[str] | None = None,
183
+ ) -> Collection:
176
184
  if headers is None:
177
185
  headers = {}
178
186
  collection, _ = get_or_create_collection_and_times(
179
187
  catalog, endpoint["CollectionId"], data, config, endpoint
180
188
  )
181
- # add_visualization_info(collection, data, endpoint)
182
189
 
183
190
  api = Client.open(endpoint["EndPoint"], headers=headers)
184
191
  if bbox is None:
185
- bbox = "-180,-90,180,90"
192
+ bbox = [-180, -90, 180, 90]
186
193
  results = api.search(
187
194
  collections=[endpoint["CollectionId"]],
188
195
  bbox=bbox,
@@ -226,7 +233,7 @@ def process_STACAPI_Endpoint(
226
233
  ),
227
234
  )
228
235
  # If a root collection exists we point back to it from the item
229
- if root_collection is not None:
236
+ if root_collection:
230
237
  item.set_collection(root_collection)
231
238
 
232
239
  # bubble up information we want to the link
@@ -259,12 +266,16 @@ def process_STACAPI_Endpoint(
259
266
  return collection
260
267
 
261
268
 
262
- def handle_VEDA_endpoint(config, endpoint, data, catalog, options):
269
+ def handle_VEDA_endpoint(
270
+ config: dict, endpoint: dict, data: dict, catalog: Catalog, options: Options
271
+ ) -> Collection:
263
272
  collection = handle_STAC_based_endpoint(config, endpoint, data, catalog, options)
264
273
  return collection
265
274
 
266
275
 
267
- def handle_collection_only(config, endpoint, data, catalog):
276
+ def handle_collection_only(
277
+ config: dict, endpoint: dict, data: dict, catalog: Catalog
278
+ ) -> Collection:
268
279
  collection, times = get_or_create_collection_and_times(
269
280
  catalog, data["Name"], data, config, endpoint
270
281
  )
@@ -283,7 +294,9 @@ def handle_collection_only(config, endpoint, data, catalog):
283
294
  return collection
284
295
 
285
296
 
286
- def handle_SH_WMS_endpoint(config, endpoint, data, catalog):
297
+ def handle_SH_WMS_endpoint(
298
+ config: dict, endpoint: dict, data: dict, catalog: Catalog
299
+ ) -> Collection:
287
300
  # create collection and subcollections (based on locations)
288
301
  if "Locations" in data:
289
302
  root_collection, _ = get_or_create_collection_and_times(
@@ -328,23 +341,24 @@ def handle_SH_WMS_endpoint(config, endpoint, data, catalog):
328
341
  root_collection.update_extent_from_items()
329
342
  # Add bbox extents from children
330
343
  for c_child in root_collection.get_children():
331
- root_collection.extent.spatial.bboxes.append(c_child.extent.spatial.bboxes[0])
344
+ if isinstance(c_child, Collection):
345
+ root_collection.extent.spatial.bboxes.append(c_child.extent.spatial.bboxes[0])
332
346
  return root_collection
333
347
 
334
348
 
335
- def handle_xcube_endpoint(config, endpoint, data: dict, catalog):
336
- root_collection = process_STAC_Datacube_Endpoint(
349
+ def handle_xcube_endpoint(config: dict, endpoint: dict, data: dict, catalog: Catalog) -> Collection:
350
+ collection = process_STAC_Datacube_Endpoint(
337
351
  config=config,
338
352
  endpoint=endpoint,
339
353
  data=data,
340
354
  catalog=catalog,
341
355
  )
342
356
 
343
- add_example_info(root_collection, data, endpoint, config)
344
- return root_collection
357
+ add_example_info(collection, data, endpoint, config)
358
+ return collection
345
359
 
346
360
 
347
- def handle_GeoDB_endpoint(config, endpoint, data: dict, catalog):
361
+ def handle_GeoDB_endpoint(config: dict, endpoint: dict, data: dict, catalog: Catalog) -> Collection:
348
362
  collection, _ = get_or_create_collection_and_times(
349
363
  catalog, endpoint["CollectionId"], data, config, endpoint
350
364
  )
@@ -433,7 +447,9 @@ def handle_GeoDB_endpoint(config, endpoint, data: dict, catalog):
433
447
  return collection
434
448
 
435
449
 
436
- def handle_SH_endpoint(config, endpoint, data, catalog, options):
450
+ def handle_SH_endpoint(
451
+ config: dict, endpoint: dict, data: dict, catalog: Catalog, options: Options
452
+ ) -> Collection:
437
453
  token = get_SH_token()
438
454
  headers = {"Authorization": f"Bearer {token}"}
439
455
  endpoint["EndPoint"] = "https://services.sentinel-hub.com/api/v1/catalog/1.0.0/"
@@ -444,7 +460,9 @@ def handle_SH_endpoint(config, endpoint, data, catalog, options):
444
460
  return collection
445
461
 
446
462
 
447
- def handle_WMS_endpoint(config, endpoint, data, catalog, wmts=False):
463
+ def handle_WMS_endpoint(
464
+ config: dict, endpoint: dict, data: dict, catalog: Catalog, wmts: bool = False
465
+ ) -> Collection:
448
466
  collection, times = get_or_create_collection_and_times(
449
467
  catalog, data["Name"], data, config, endpoint
450
468
  )
@@ -488,11 +506,7 @@ def handle_WMS_endpoint(config, endpoint, data, catalog, wmts=False):
488
506
  return collection
489
507
 
490
508
 
491
- def handle_GeoDB_Tiles_endpoint(config, endpoint, data, catalog):
492
- raise NotImplementedError
493
-
494
-
495
- def generate_veda_tiles_link(endpoint, item):
509
+ def generate_veda_tiles_link(endpoint: dict, item: str | None) -> str:
496
510
  collection = "collection={}".format(endpoint["CollectionId"])
497
511
  assets = ""
498
512
  for asset in endpoint["Assets"]:
@@ -508,13 +522,19 @@ def generate_veda_tiles_link(endpoint, item):
508
522
  return target_url
509
523
 
510
524
 
511
- def add_visualization_info(stac_object, data, endpoint, file_url=None, time=None):
525
+ def add_visualization_info(
526
+ stac_object: Collection | Item,
527
+ data: dict,
528
+ endpoint: dict,
529
+ file_url: str | None = None,
530
+ time: str | None = None,
531
+ ) -> None:
512
532
  # add extension reference
513
533
  if endpoint["Name"] == "Sentinel Hub" or endpoint["Name"] == "Sentinel Hub WMS":
514
534
  instanceId = os.getenv("SH_INSTANCE_ID")
515
535
  if "InstanceId" in endpoint:
516
536
  instanceId = endpoint["InstanceId"]
517
- extra_fields = {
537
+ extra_fields: dict[str, list[str] | dict[str, str]] = {
518
538
  "wms:layers": [endpoint["LayerId"]],
519
539
  "role": ["data"],
520
540
  }
@@ -563,7 +583,7 @@ def add_visualization_info(stac_object, data, endpoint, file_url=None, time=None
563
583
  elif endpoint["Name"] == "JAXA_WMTS_PALSAR":
564
584
  target_url = "{}".format(endpoint.get("EndPoint"))
565
585
  # custom time just for this special case as a default for collection wmts
566
- extra_fields = {"wmts:layer": endpoint.get("LayerId").replace("{time}", time or "2017")}
586
+ extra_fields = {"wmts:layer": endpoint.get("LayerId", "").replace("{time}", time or "2017")}
567
587
  stac_object.add_link(
568
588
  Link(
569
589
  rel="wmts",
@@ -606,7 +626,7 @@ def add_visualization_info(stac_object, data, endpoint, file_url=None, time=None
606
626
  elif endpoint["Type"] == "WMTSCapabilities":
607
627
  target_url = "{}".format(endpoint.get("EndPoint"))
608
628
  extra_fields = {
609
- "wmts:layer": endpoint.get("LayerId"),
629
+ "wmts:layer": endpoint.get("LayerId", ""),
610
630
  "role": ["data"],
611
631
  }
612
632
  dimensions = {}
@@ -6,17 +6,12 @@ Indicator generator to harvest information from endpoints and generate catalog
6
6
 
7
7
  import os
8
8
  import time
9
- from dataclasses import dataclass
9
+ from typing import Any
10
10
 
11
11
  import click
12
12
  import yaml
13
13
  from dotenv import load_dotenv
14
- from pystac import (
15
- Catalog,
16
- CatalogType,
17
- Collection,
18
- Summaries,
19
- )
14
+ from pystac import Catalog, CatalogType, Collection, Link, Summaries
20
15
  from pystac.layout import TemplateLayoutStrategy
21
16
  from pystac.validation import validate_all
22
17
  from yaml.loader import SafeLoader
@@ -24,7 +19,6 @@ from yaml.loader import SafeLoader
24
19
  from eodash_catalog.endpoints import (
25
20
  handle_collection_only,
26
21
  handle_GeoDB_endpoint,
27
- handle_GeoDB_Tiles_endpoint,
28
22
  handle_SH_endpoint,
29
23
  handle_SH_WMS_endpoint,
30
24
  handle_VEDA_endpoint,
@@ -38,7 +32,9 @@ from eodash_catalog.stac_handling import (
38
32
  get_or_create_collection_and_times,
39
33
  )
40
34
  from eodash_catalog.utils import (
35
+ Options,
41
36
  RaisingThread,
37
+ add_single_item_if_collection_empty,
42
38
  iter_len_at_least,
43
39
  recursive_save,
44
40
  )
@@ -47,18 +43,6 @@ from eodash_catalog.utils import (
47
43
  load_dotenv()
48
44
 
49
45
 
50
- @dataclass
51
- class Options:
52
- catalogspath: str
53
- collectionspath: str
54
- indicatorspath: str
55
- outputpath: str
56
- vd: bool
57
- ni: bool
58
- tn: bool
59
- collections: list[str]
60
-
61
-
62
46
  def process_catalog_file(file_path: str, options: Options):
63
47
  print("Processing catalog:", file_path)
64
48
  with open(file_path) as f:
@@ -129,7 +113,7 @@ def process_catalog_file(file_path: str, options: Options):
129
113
  print(f"Issue validation collection: {e}")
130
114
 
131
115
 
132
- def extract_indicator_info(parent_collection):
116
+ def extract_indicator_info(parent_collection: Collection):
133
117
  to_extract = [
134
118
  "subcode",
135
119
  "themes",
@@ -139,7 +123,7 @@ def extract_indicator_info(parent_collection):
139
123
  "cities",
140
124
  "countries",
141
125
  ]
142
- summaries = {}
126
+ summaries: dict[str, Any] = {}
143
127
  for key in to_extract:
144
128
  summaries[key] = set()
145
129
 
@@ -189,12 +173,13 @@ def process_indicator_file(config: dict, file_path: str, catalog: Catalog, optio
189
173
  parent_indicator.update_extent_from_items()
190
174
  # Add bbox extents from children
191
175
  for c_child in parent_indicator.get_children():
192
- parent_indicator.extent.spatial.bboxes.append(c_child.extent.spatial.bboxes[0])
176
+ if isinstance(c_child, Collection): # typing reason
177
+ parent_indicator.extent.spatial.bboxes.append(c_child.extent.spatial.bboxes[0])
193
178
  # extract collection information and add it to summary indicator level
194
179
  extract_indicator_info(parent_indicator)
195
180
  # add baselayer and overview information to indicator collection
196
181
  add_base_overlay_info(parent_indicator, config, data)
197
- add_to_catalog(parent_indicator, catalog, None, data)
182
+ add_to_catalog(parent_indicator, catalog, {}, data)
198
183
 
199
184
 
200
185
  def process_collection_file(
@@ -221,8 +206,6 @@ def process_collection_file(
221
206
  collection = handle_xcube_endpoint(config, resource, data, catalog)
222
207
  elif resource["Name"] == "WMS":
223
208
  collection = handle_WMS_endpoint(config, resource, data, catalog)
224
- elif resource["Name"] == "GeoDB Vector Tiles":
225
- collection = handle_GeoDB_Tiles_endpoint(config, resource, data, catalog)
226
209
  elif resource["Name"] == "JAXA_WMTS_PALSAR":
227
210
  # somewhat one off creation of individual WMTS layers as individual items
228
211
  collection = handle_WMS_endpoint(config, resource, data, catalog, wmts=True)
@@ -231,6 +214,7 @@ def process_collection_file(
231
214
  else:
232
215
  raise ValueError("Type of Resource is not supported")
233
216
  if collection is not None:
217
+ add_single_item_if_collection_empty(collection)
234
218
  add_to_catalog(collection, catalog, resource, data)
235
219
  else:
236
220
  raise Exception("No collection generated")
@@ -303,7 +287,8 @@ def process_collection_file(
303
287
  parent_collection.update_extent_from_items()
304
288
  # Add bbox extents from children
305
289
  for c_child in parent_collection.get_children():
306
- parent_collection.extent.spatial.bboxes.append(c_child.extent.spatial.bboxes[0])
290
+ if isinstance(c_child, Collection):
291
+ parent_collection.extent.spatial.bboxes.append(c_child.extent.spatial.bboxes[0])
307
292
  # Fill summaries for locations
308
293
  parent_collection.summaries = Summaries(
309
294
  {
@@ -311,17 +296,17 @@ def process_collection_file(
311
296
  "countries": list(set(countries)),
312
297
  }
313
298
  )
314
- add_to_catalog(parent_collection, catalog, None, data)
299
+ add_to_catalog(parent_collection, catalog, {}, data)
315
300
 
316
301
 
317
- def add_to_catalog(collection, catalog, endpoint, data):
302
+ def add_to_catalog(collection: Collection, catalog: Catalog, endpoint: dict, data: dict):
318
303
  # check if already in catalog, if it is do not re-add it
319
304
  # TODO: probably we should add to the catalog only when creating
320
305
  for cat_coll in catalog.get_collections():
321
306
  if cat_coll.id == collection.id:
322
307
  return
323
308
 
324
- link = catalog.add_child(collection)
309
+ link: Link = catalog.add_child(collection)
325
310
  # bubble fields we want to have up to collection link and add them to collection
326
311
  if endpoint and "Type" in endpoint:
327
312
  collection.extra_fields["endpointtype"] = "{}_{}".format(
@@ -4,7 +4,7 @@ from oauthlib.oauth2 import BackendApplicationClient
4
4
  from requests_oauthlib import OAuth2Session
5
5
 
6
6
 
7
- def get_SH_token():
7
+ def get_SH_token() -> str:
8
8
  # Your client credentials
9
9
  client_id = os.getenv("SH_CLIENT_ID")
10
10
  client_secret = os.getenv("SH_CLIENT_SECRET")
@@ -6,6 +6,7 @@ import yaml
6
6
  from dateutil import parser
7
7
  from pystac import (
8
8
  Asset,
9
+ Catalog,
9
10
  Collection,
10
11
  Extent,
11
12
  Link,
@@ -18,7 +19,9 @@ from yaml.loader import SafeLoader
18
19
  from eodash_catalog.utils import generateDateIsostringsFromInterval
19
20
 
20
21
 
21
- def get_or_create_collection_and_times(catalog, collection_id, data, config, endpoint=None):
22
+ def get_or_create_collection_and_times(
23
+ catalog: Catalog, collection_id: str, data: dict, config: dict, endpoint: dict
24
+ ) -> tuple[Collection, list[str]]:
22
25
  # Check if collection already in catalog
23
26
  for collection in catalog.get_collections():
24
27
  if collection.id == collection_id:
@@ -31,11 +34,11 @@ def get_or_create_collection_and_times(catalog, collection_id, data, config, end
31
34
  spatial_extent,
32
35
  ]
33
36
  )
34
- times = []
37
+ times: list[str] = []
35
38
  temporal_extent = TemporalExtent([[datetime.now(), None]])
36
39
  if endpoint and endpoint.get("Type") == "OverwriteTimes":
37
40
  if endpoint.get("Times"):
38
- times = endpoint.get("Times")
41
+ times = list(endpoint.get("Times", []))
39
42
  times_datetimes = sorted([parser.isoparse(time) for time in times])
40
43
  temporal_extent = TemporalExtent([[times_datetimes[0], times_datetimes[-1]]])
41
44
  elif endpoint.get("DateTimeInterval"):
@@ -80,7 +83,7 @@ def get_or_create_collection_and_times(catalog, collection_id, data, config, end
80
83
  return (collection, times)
81
84
 
82
85
 
83
- def create_web_map_link(layer, role):
86
+ def create_web_map_link(layer: dict, role: str) -> Link:
84
87
  extra_fields = {
85
88
  "roles": [role],
86
89
  "id": layer["id"],
@@ -113,7 +116,9 @@ def create_web_map_link(layer, role):
113
116
  return wml
114
117
 
115
118
 
116
- def add_example_info(stac_object, data, endpoint, config):
119
+ def add_example_info(
120
+ stac_object: Collection | Catalog, data: dict, endpoint: dict, config: dict
121
+ ) -> None:
117
122
  if "Services" in data:
118
123
  for service in data["Services"]:
119
124
  if service["Name"] == "Statistical API":
@@ -178,7 +183,7 @@ def add_example_info(stac_object, data, endpoint, config):
178
183
  )
179
184
 
180
185
 
181
- def add_collection_information(config, collection, data):
186
+ def add_collection_information(config: dict, collection: Collection, data: dict) -> None:
182
187
  # Add metadata information
183
188
  # Check license identifier
184
189
  if "License" in data:
@@ -304,7 +309,7 @@ def add_collection_information(config, collection, data):
304
309
  )
305
310
 
306
311
 
307
- def add_base_overlay_info(collection, config, data):
312
+ def add_base_overlay_info(collection: Collection, config: dict, data: dict) -> None:
308
313
  # check if default base layers defined
309
314
  if "default_base_layers" in config:
310
315
  with open(f"{config["default_base_layers"]}.yaml") as f:
@@ -326,7 +331,7 @@ def add_base_overlay_info(collection, config, data):
326
331
  # TODO: possibility to overwrite default base and overlay layers
327
332
 
328
333
 
329
- def add_extra_fields(stac_object, data):
334
+ def add_extra_fields(stac_object: Collection | Catalog | Link, data: dict) -> None:
330
335
  if "yAxis" in data:
331
336
  stac_object.extra_fields["yAxis"] = data["yAxis"]
332
337
  if "Themes" in data:
@@ -3,21 +3,30 @@ import re
3
3
  from pathlib import Path
4
4
 
5
5
  import requests
6
+ from pystac import (
7
+ Item,
8
+ )
6
9
 
7
10
  from eodash_catalog.utils import generate_veda_cog_link
8
11
 
9
12
 
10
- def fetch_and_save_thumbnail(data, url):
13
+ def fetch_and_save_thumbnail(data: dict, url: str) -> None:
11
14
  collection_path = "../thumbnails/{}_{}/".format(data["EodashIdentifier"], data["Name"])
12
15
  Path(collection_path).mkdir(parents=True, exist_ok=True)
13
16
  image_path = f"{collection_path}/thumbnail.png"
14
17
  if not os.path.exists(image_path):
15
- data = requests.get(url).content
18
+ dd = requests.get(url).content
16
19
  with open(image_path, "wb") as f:
17
- f.write(data)
20
+ f.write(dd)
18
21
 
19
22
 
20
- def generate_thumbnail(stac_object, data, endpoint, file_url=None, time=None, styles=None):
23
+ def generate_thumbnail(
24
+ stac_object: Item,
25
+ data: dict,
26
+ endpoint: dict,
27
+ file_url: str = "",
28
+ time: str | None = None,
29
+ ) -> None:
21
30
  if endpoint["Name"] == "Sentinel Hub" or endpoint["Name"] == "WMS":
22
31
  instanceId = os.getenv("SH_INSTANCE_ID")
23
32
  if "InstanceId" in endpoint:
@@ -26,8 +35,9 @@ def generate_thumbnail(stac_object, data, endpoint, file_url=None, time=None, st
26
35
  wms_config = (
27
36
  "REQUEST=GetMap&SERVICE=WMS&VERSION=1.3.0&FORMAT=image/png&STYLES=&TRANSPARENT=true"
28
37
  )
29
- bbox_s = stac_object.bbox
30
- bbox = f"{bbox_s[1]},{bbox_s[0]},{bbox_s[3]},{bbox_s[2]}"
38
+ bbox = [-180, -85, 180, 85]
39
+ if bbox_s := stac_object.bbox:
40
+ bbox = f"{bbox_s[1]},{bbox_s[0]},{bbox_s[3]},{bbox_s[2]}" # type: ignore
31
41
  output_format = f"format=image/png&WIDTH=256&HEIGHT=128&CRS=EPSG:4326&BBOX={bbox}"
32
42
  item_datetime = stac_object.get_datetime()
33
43
  # it is possible for datetime to be null,
@@ -1,6 +1,8 @@
1
1
  import re
2
2
  import threading
3
+ import uuid
3
4
  from collections.abc import Iterator
5
+ from dataclasses import dataclass
4
6
  from datetime import datetime, timedelta
5
7
  from decimal import Decimal
6
8
  from functools import reduce
@@ -9,7 +11,7 @@ from typing import Any
9
11
  from dateutil import parser
10
12
  from owslib.wms import WebMapService
11
13
  from owslib.wmts import WebMapTileService
12
- from pystac import Catalog
14
+ from pystac import Catalog, Collection, Item, RelType
13
15
  from six import string_types
14
16
 
15
17
  from eodash_catalog.duration import Duration
@@ -35,7 +37,7 @@ def create_geojson_point(lon: int | float, lat: int | float) -> dict[str, Any]:
35
37
 
36
38
  def retrieveExtentFromWMSWMTS(
37
39
  capabilities_url: str, layer: str, version: str = "1.1.1", wmts: bool = False
38
- ):
40
+ ) -> tuple[list[float], list[str]]:
39
41
  times = []
40
42
  try:
41
43
  if not wmts:
@@ -91,7 +93,9 @@ def parse_duration(datestring):
91
93
  if not isinstance(datestring, string_types):
92
94
  raise TypeError(f"Expecting a string {datestring}")
93
95
  match = ISO8601_PERIOD_REGEX.match(datestring)
94
- groups = match.groupdict()
96
+ groups = {}
97
+ if match:
98
+ groups = match.groupdict()
95
99
  for key, val in groups.items():
96
100
  if key not in ("separator", "sign"):
97
101
  if val is None:
@@ -128,7 +132,9 @@ def parse_duration(datestring):
128
132
  return ret
129
133
 
130
134
 
131
- def generateDateIsostringsFromInterval(start: str, end: str, timedelta_config: dict | None = None):
135
+ def generateDateIsostringsFromInterval(
136
+ start: str, end: str, timedelta_config: dict | None = None
137
+ ) -> list[str]:
132
138
  if timedelta_config is None:
133
139
  timedelta_config = {}
134
140
  start_dt = datetime.fromisoformat(start)
@@ -171,7 +177,7 @@ def iter_len_at_least(i, n: int) -> int:
171
177
  return sum(1 for _ in zip(range(n), i, strict=False)) == n
172
178
 
173
179
 
174
- def generate_veda_cog_link(endpoint, file_url):
180
+ def generate_veda_cog_link(endpoint: dict, file_url: str | None) -> str:
175
181
  bidx = ""
176
182
  if "Bidx" in endpoint:
177
183
  # Check if an array was provided
@@ -200,3 +206,32 @@ def generate_veda_cog_link(endpoint, file_url):
200
206
 
201
207
  target_url = f"https://staging-raster.delta-backend.com/cog/tiles/WebMercatorQuad/{{z}}/{{x}}/{{y}}?{file_url}resampling_method=nearest{bidx}{colormap}{colormap_name}{rescale}"
202
208
  return target_url
209
+
210
+
211
+ @dataclass
212
+ class Options:
213
+ catalogspath: str
214
+ collectionspath: str
215
+ indicatorspath: str
216
+ outputpath: str
217
+ vd: bool
218
+ ni: bool
219
+ tn: bool
220
+ collections: list[str]
221
+
222
+
223
+ def add_single_item_if_collection_empty(collection: Collection) -> None:
224
+ for link in collection.links:
225
+ if link.rel == RelType.ITEM:
226
+ break
227
+ else:
228
+ item = Item(
229
+ id=str(uuid.uuid4()),
230
+ bbox=[-180, -85, 180, 85],
231
+ properties={},
232
+ geometry=None,
233
+ datetime=datetime(1970, 1, 1, 0, 0, 0),
234
+ start_datetime=datetime(1970, 1, 1, 0, 0, 0),
235
+ end_datetime=datetime.now(),
236
+ )
237
+ collection.add_item(item)