eodash_catalog 0.0.11__py3-none-any.whl → 0.0.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eodash_catalog might be problematic. Click here for more details.

@@ -18,6 +18,7 @@ from yaml.loader import SafeLoader
18
18
 
19
19
  from eodash_catalog.endpoints import (
20
20
  handle_collection_only,
21
+ handle_custom_endpoint,
21
22
  handle_GeoDB_endpoint,
22
23
  handle_SH_endpoint,
23
24
  handle_SH_WMS_endpoint,
@@ -29,7 +30,7 @@ from eodash_catalog.stac_handling import (
29
30
  add_base_overlay_info,
30
31
  add_collection_information,
31
32
  add_extra_fields,
32
- get_or_create_collection_and_times,
33
+ get_or_create_collection,
33
34
  )
34
35
  from eodash_catalog.utils import (
35
36
  Options,
@@ -46,23 +47,25 @@ load_dotenv()
46
47
  def process_catalog_file(file_path: str, options: Options):
47
48
  print("Processing catalog:", file_path)
48
49
  with open(file_path) as f:
49
- config: dict = yaml.load(f, Loader=SafeLoader)
50
+ catalog_config: dict = yaml.load(f, Loader=SafeLoader)
50
51
 
51
52
  if len(options.collections) > 0:
52
53
  # create only catalogs containing the passed collections
53
- process_collections = [c for c in config["collections"] if c in options.collections]
54
+ process_collections = [
55
+ c for c in catalog_config["collections"] if c in options.collections
56
+ ]
54
57
  elif (len(options.collections) == 1 and options.collections == "all") or len(
55
58
  options.collections
56
59
  ) == 0:
57
60
  # create full catalog
58
- process_collections = config["collections"]
61
+ process_collections = catalog_config["collections"]
59
62
  if len(process_collections) == 0:
60
63
  print("No applicable collections found for catalog, skipping creation")
61
64
  return
62
65
  catalog = Catalog(
63
- id=config["id"],
64
- description=config["description"],
65
- title=config["title"],
66
+ id=catalog_config["id"],
67
+ description=catalog_config["description"],
68
+ title=catalog_config["title"],
66
69
  catalog_type=CatalogType.RELATIVE_PUBLISHED,
67
70
  )
68
71
  for collection in process_collections:
@@ -70,13 +73,13 @@ def process_catalog_file(file_path: str, options: Options):
70
73
  if os.path.isfile(file_path):
71
74
  # if collection file exists process it as indicator
72
75
  # collection will be added as single collection to indicator
73
- process_indicator_file(config, file_path, catalog, options)
76
+ process_indicator_file(catalog_config, file_path, catalog, options)
74
77
  else:
75
78
  # if not try to see if indicator definition available
76
79
  file_path = f"{options.indicatorspath}/{collection}.yaml"
77
80
  if os.path.isfile(file_path):
78
81
  process_indicator_file(
79
- config,
82
+ catalog_config,
80
83
  f"{options.indicatorspath}/{collection}.yaml",
81
84
  catalog,
82
85
  options,
@@ -91,24 +94,24 @@ def process_catalog_file(file_path: str, options: Options):
91
94
  print("Started creation of collection files")
92
95
  start = time.time()
93
96
  if options.ni:
94
- catalog_self_href = f'{options.outputpath}/{config["id"]}'
97
+ catalog_self_href = f'{options.outputpath}/{catalog_config["id"]}'
95
98
  catalog.normalize_hrefs(catalog_self_href, strategy=strategy)
96
99
  recursive_save(catalog, options.ni)
97
100
  else:
98
101
  # For full catalog save with items this still seems to be faster
99
- catalog_self_href = config.get(
100
- "endpoint", "{}/{}".format(options.outputpath, config["id"])
102
+ catalog_self_href = catalog_config.get(
103
+ "endpoint", "{}/{}".format(options.outputpath, catalog_config["id"])
101
104
  )
102
105
  catalog.normalize_hrefs(catalog_self_href, strategy=strategy)
103
- catalog.save(dest_href="{}/{}".format(options.outputpath, config["id"]))
106
+ catalog.save(dest_href="{}/{}".format(options.outputpath, catalog_config["id"]))
104
107
  end = time.time()
105
- print(f"Catalog {config['id']}: Time consumed in saving: {end - start}")
108
+ print(f"Catalog {catalog_config['id']}: Time consumed in saving: {end - start}")
106
109
 
107
110
  if options.vd:
108
111
  # try to validate catalog if flag was set
109
112
  print(f"Running validation of catalog {file_path}")
110
113
  try:
111
- validate_all(catalog.to_dict(), href=config["endpoint"])
114
+ validate_all(catalog.to_dict(), href=catalog_config["endpoint"])
112
115
  except Exception as e:
113
116
  print(f"Issue validation collection: {e}")
114
117
 
@@ -150,25 +153,27 @@ def extract_indicator_info(parent_collection: Collection):
150
153
  parent_collection.summaries = Summaries(summaries)
151
154
 
152
155
 
153
- def process_indicator_file(config: dict, file_path: str, catalog: Catalog, options: Options):
156
+ def process_indicator_file(
157
+ catalog_config: dict, file_path: str, catalog: Catalog, options: Options
158
+ ):
154
159
  with open(file_path) as f:
155
160
  print("Processing indicator:", file_path)
156
- data: dict = yaml.load(f, Loader=SafeLoader)
157
- parent_indicator, _ = get_or_create_collection_and_times(
158
- catalog, data["Name"], data, config, {}
161
+ indicator_config: dict = yaml.load(f, Loader=SafeLoader)
162
+ parent_indicator = get_or_create_collection(
163
+ catalog, indicator_config["Name"], indicator_config, catalog_config, {}
159
164
  )
160
- if "Collections" in data:
161
- for collection in data["Collections"]:
165
+ if "Collections" in indicator_config:
166
+ for collection in indicator_config["Collections"]:
162
167
  process_collection_file(
163
- config,
168
+ catalog_config,
164
169
  f"{options.collectionspath}/{collection}.yaml",
165
170
  parent_indicator,
166
171
  options,
167
172
  )
168
173
  else:
169
174
  # we assume that collection files can also be loaded directly
170
- process_collection_file(config, file_path, parent_indicator, options)
171
- add_collection_information(config, parent_indicator, data)
175
+ process_collection_file(catalog_config, file_path, parent_indicator, options)
176
+ add_collection_information(catalog_config, parent_indicator, indicator_config)
172
177
  if iter_len_at_least(parent_indicator.get_items(recursive=True), 1):
173
178
  parent_indicator.update_extent_from_items()
174
179
  # Add bbox extents from children
@@ -178,55 +183,79 @@ def process_indicator_file(config: dict, file_path: str, catalog: Catalog, optio
178
183
  # extract collection information and add it to summary indicator level
179
184
  extract_indicator_info(parent_indicator)
180
185
  # add baselayer and overview information to indicator collection
181
- add_base_overlay_info(parent_indicator, config, data)
182
- add_to_catalog(parent_indicator, catalog, {}, data)
186
+ add_base_overlay_info(parent_indicator, catalog_config, indicator_config)
187
+ add_to_catalog(parent_indicator, catalog, {}, indicator_config)
183
188
 
184
189
 
185
190
  def process_collection_file(
186
- config: dict, file_path: str, catalog: Catalog | Collection, options: Options
191
+ catalog_config: dict, file_path: str, catalog: Catalog | Collection, options: Options
187
192
  ):
188
193
  print("Processing collection:", file_path)
189
194
  with open(file_path) as f:
190
- data: dict = yaml.load(f, Loader=SafeLoader)
191
- if "Resources" in data:
192
- for resource in data["Resources"]:
193
- if "EndPoint" in resource:
194
- collection = None
195
- if resource["Name"] == "Sentinel Hub":
196
- collection = handle_SH_endpoint(config, resource, data, catalog, options)
197
- elif resource["Name"] == "Sentinel Hub WMS":
198
- collection = handle_SH_WMS_endpoint(config, resource, data, catalog)
199
- elif resource["Name"] == "GeoDB":
200
- collection = handle_GeoDB_endpoint(config, resource, data, catalog)
201
- elif resource["Name"] == "VEDA":
202
- collection = handle_VEDA_endpoint(config, resource, data, catalog, options)
203
- elif resource["Name"] == "marinedatastore":
204
- collection = handle_WMS_endpoint(config, resource, data, catalog, wmts=True)
205
- elif resource["Name"] == "xcube":
206
- collection = handle_xcube_endpoint(config, resource, data, catalog)
207
- elif resource["Name"] == "WMS":
208
- collection = handle_WMS_endpoint(config, resource, data, catalog)
209
- elif resource["Name"] == "JAXA_WMTS_PALSAR":
210
- # somewhat one off creation of individual WMTS layers as individual items
211
- collection = handle_WMS_endpoint(config, resource, data, catalog, wmts=True)
212
- elif resource["Name"] == "Collection-only":
213
- collection = handle_collection_only(config, resource, data, catalog)
214
- else:
215
- raise ValueError("Type of Resource is not supported")
216
- if collection is not None:
217
- add_single_item_if_collection_empty(collection)
218
- add_to_catalog(collection, catalog, resource, data)
219
- else:
220
- raise Exception("No collection generated")
221
- elif "Subcollections" in data:
195
+ collection_config: dict = yaml.load(f, Loader=SafeLoader)
196
+ if "Resources" in collection_config:
197
+ for resource in collection_config["Resources"]:
198
+ collection = None
199
+ if resource["Name"] == "Sentinel Hub":
200
+ collection = handle_SH_endpoint(
201
+ catalog_config, resource, collection_config, catalog, options
202
+ )
203
+ elif resource["Name"] == "Sentinel Hub WMS":
204
+ collection = handle_SH_WMS_endpoint(
205
+ catalog_config, resource, collection_config, catalog
206
+ )
207
+ elif resource["Name"] == "GeoDB":
208
+ collection = handle_GeoDB_endpoint(
209
+ catalog_config, resource, collection_config, catalog
210
+ )
211
+ elif resource["Name"] == "VEDA":
212
+ collection = handle_VEDA_endpoint(
213
+ catalog_config, resource, collection_config, catalog, options
214
+ )
215
+ elif resource["Name"] == "marinedatastore":
216
+ collection = handle_WMS_endpoint(
217
+ catalog_config, resource, collection_config, catalog, wmts=True
218
+ )
219
+ elif resource["Name"] == "xcube":
220
+ collection = handle_xcube_endpoint(
221
+ catalog_config, resource, collection_config, catalog
222
+ )
223
+ elif resource["Name"] == "WMS":
224
+ collection = handle_WMS_endpoint(
225
+ catalog_config, resource, collection_config, catalog
226
+ )
227
+ elif resource["Name"] == "JAXA_WMTS_PALSAR":
228
+ # somewhat one off creation of individual WMTS layers as individual items
229
+ collection = handle_WMS_endpoint(
230
+ catalog_config, resource, collection_config, catalog, wmts=True
231
+ )
232
+ elif resource["Name"] == "Collection-only":
233
+ collection = handle_collection_only(
234
+ catalog_config, resource, collection_config, catalog
235
+ )
236
+ elif resource["Name"] == "Custom-Endpoint":
237
+ collection = handle_custom_endpoint(
238
+ catalog_config,
239
+ resource,
240
+ collection_config,
241
+ catalog,
242
+ )
243
+ else:
244
+ raise ValueError("Type of Resource is not supported")
245
+ if collection:
246
+ add_single_item_if_collection_empty(collection)
247
+ add_to_catalog(collection, catalog, resource, collection_config)
248
+ else:
249
+ raise Exception(f"No collection was generated for resource {resource}")
250
+ elif "Subcollections" in collection_config:
222
251
  # if no endpoint is specified we check for definition of subcollections
223
- parent_collection, _ = get_or_create_collection_and_times(
224
- catalog, data["Name"], data, config, {}
252
+ parent_collection = get_or_create_collection(
253
+ catalog, collection_config["Name"], collection_config, catalog_config, {}
225
254
  )
226
255
 
227
256
  locations = []
228
257
  countries = []
229
- for sub_coll_def in data["Subcollections"]:
258
+ for sub_coll_def in collection_config["Subcollections"]:
230
259
  # Subcollection has only data on one location which
231
260
  # is defined for the entire collection
232
261
  if "Name" in sub_coll_def and "Point" in sub_coll_def:
@@ -236,7 +265,7 @@ def process_collection_file(
236
265
  else:
237
266
  countries.append(sub_coll_def["Country"])
238
267
  process_collection_file(
239
- config,
268
+ catalog_config,
240
269
  "{}/{}.yaml".format(options.collectionspath, sub_coll_def["Collection"]),
241
270
  parent_collection,
242
271
  options,
@@ -265,7 +294,7 @@ def process_collection_file(
265
294
  # create temp catalog to save collection
266
295
  tmp_catalog = Catalog(id="tmp_catalog", description="temp catalog placeholder")
267
296
  process_collection_file(
268
- config,
297
+ catalog_config,
269
298
  "{}/{}.yaml".format(options.collectionspath, sub_coll_def["Collection"]),
270
299
  tmp_catalog,
271
300
  options,
@@ -283,7 +312,7 @@ def process_collection_file(
283
312
 
284
313
  parent_collection.add_links(links)
285
314
 
286
- add_collection_information(config, parent_collection, data)
315
+ add_collection_information(catalog_config, parent_collection, collection_config)
287
316
  parent_collection.update_extent_from_items()
288
317
  # Add bbox extents from children
289
318
  for c_child in parent_collection.get_children():
@@ -296,10 +325,12 @@ def process_collection_file(
296
325
  "countries": list(set(countries)),
297
326
  }
298
327
  )
299
- add_to_catalog(parent_collection, catalog, {}, data)
328
+ add_to_catalog(parent_collection, catalog, {}, collection_config)
300
329
 
301
330
 
302
- def add_to_catalog(collection: Collection, catalog: Catalog, endpoint: dict, data: dict):
331
+ def add_to_catalog(
332
+ collection: Collection, catalog: Catalog, endpoint: dict, collection_config: dict
333
+ ):
303
334
  # check if already in catalog, if it is do not re-add it
304
335
  # TODO: probably we should add to the catalog only when creating
305
336
  for cat_coll in catalog.get_collections():
@@ -320,19 +351,19 @@ def add_to_catalog(collection: Collection, catalog: Catalog, endpoint: dict, dat
320
351
  elif endpoint:
321
352
  collection.extra_fields["endpointtype"] = endpoint["Name"]
322
353
  link.extra_fields["endpointtype"] = endpoint["Name"]
323
- if "Subtitle" in data:
324
- link.extra_fields["subtitle"] = data["Subtitle"]
354
+ if "Subtitle" in collection_config:
355
+ link.extra_fields["subtitle"] = collection_config["Subtitle"]
325
356
  link.extra_fields["title"] = collection.title
326
- link.extra_fields["code"] = data["EodashIdentifier"]
327
- link.extra_fields["id"] = data["Name"]
328
- if "Themes" in data:
329
- link.extra_fields["themes"] = data["Themes"]
357
+ link.extra_fields["code"] = collection_config["EodashIdentifier"]
358
+ link.extra_fields["id"] = collection_config["Name"]
359
+ if "Themes" in collection_config:
360
+ link.extra_fields["themes"] = collection_config["Themes"]
330
361
  # Check for summaries and bubble up info
331
362
  if collection.summaries.lists:
332
363
  for summary in collection.summaries.lists:
333
364
  link.extra_fields[summary] = collection.summaries.lists[summary]
334
365
 
335
- add_extra_fields(link, data)
366
+ add_extra_fields(link, collection_config)
336
367
  return link
337
368
 
338
369
 
@@ -3,6 +3,8 @@ import os
3
3
  from oauthlib.oauth2 import BackendApplicationClient
4
4
  from requests_oauthlib import OAuth2Session
5
5
 
6
+ SH_TOKEN_URL = "https://services.sentinel-hub.com/oauth/token"
7
+
6
8
 
7
9
  def get_SH_token() -> str:
8
10
  # Your client credentials
@@ -13,7 +15,7 @@ def get_SH_token() -> str:
13
15
  oauth = OAuth2Session(client=client)
14
16
  # Get token for the session
15
17
  token = oauth.fetch_token(
16
- token_url="https://services.sentinel-hub.com/oauth/token",
18
+ token_url=SH_TOKEN_URL,
17
19
  client_secret=client_secret,
18
20
  )
19
21