eotdl 2025.2.10__py3-none-any.whl → 2025.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. eotdl/__init__.py +1 -1
  2. eotdl/access/__init__.py +13 -3
  3. eotdl/access/download.py +47 -14
  4. eotdl/access/search.py +33 -5
  5. eotdl/access/sentinelhub/__init__.py +6 -2
  6. eotdl/access/sentinelhub/client.py +7 -6
  7. eotdl/access/sentinelhub/evalscripts.py +266 -0
  8. eotdl/access/sentinelhub/parameters.py +101 -23
  9. eotdl/access/sentinelhub/utils.py +54 -15
  10. eotdl/cli.py +2 -2
  11. eotdl/commands/datasets.py +28 -31
  12. eotdl/commands/models.py +27 -30
  13. eotdl/commands/stac.py +57 -0
  14. eotdl/curation/__init__.py +0 -8
  15. eotdl/curation/stac/__init__.py +1 -8
  16. eotdl/curation/stac/api.py +58 -0
  17. eotdl/curation/stac/stac.py +31 -341
  18. eotdl/datasets/__init__.py +2 -2
  19. eotdl/datasets/ingest.py +36 -161
  20. eotdl/datasets/retrieve.py +0 -9
  21. eotdl/datasets/stage.py +64 -0
  22. eotdl/files/__init__.py +0 -2
  23. eotdl/files/ingest.bck +178 -0
  24. eotdl/files/ingest.py +237 -166
  25. eotdl/{datasets → files}/metadata.py +16 -17
  26. eotdl/models/__init__.py +1 -1
  27. eotdl/models/ingest.py +35 -158
  28. eotdl/models/stage.py +63 -0
  29. eotdl/repos/APIRepo.py +1 -1
  30. eotdl/repos/DatasetsAPIRepo.py +56 -43
  31. eotdl/repos/FilesAPIRepo.py +260 -167
  32. eotdl/repos/ModelsAPIRepo.py +50 -42
  33. eotdl/repos/STACAPIRepo.py +40 -0
  34. eotdl/repos/__init__.py +1 -0
  35. eotdl/tools/time_utils.py +3 -3
  36. {eotdl-2025.2.10.dist-info → eotdl-2025.4.2.dist-info}/METADATA +1 -1
  37. eotdl-2025.4.2.dist-info/RECORD +66 -0
  38. eotdl/curation/stac/assets.py +0 -110
  39. eotdl/curation/stac/dataframe.py +0 -172
  40. eotdl/curation/stac/dataframe_bck.py +0 -253
  41. eotdl/curation/stac/dataframe_labeling.py +0 -63
  42. eotdl/curation/stac/extensions/__init__.py +0 -23
  43. eotdl/curation/stac/extensions/base.py +0 -30
  44. eotdl/curation/stac/extensions/dem.py +0 -18
  45. eotdl/curation/stac/extensions/eo.py +0 -117
  46. eotdl/curation/stac/extensions/label/__init__.py +0 -7
  47. eotdl/curation/stac/extensions/label/base.py +0 -136
  48. eotdl/curation/stac/extensions/label/image_name_labeler.py +0 -203
  49. eotdl/curation/stac/extensions/label/scaneo.py +0 -219
  50. eotdl/curation/stac/extensions/ml_dataset.py +0 -648
  51. eotdl/curation/stac/extensions/projection.py +0 -44
  52. eotdl/curation/stac/extensions/raster.py +0 -53
  53. eotdl/curation/stac/extensions/sar.py +0 -55
  54. eotdl/curation/stac/extent.py +0 -158
  55. eotdl/curation/stac/parsers.py +0 -61
  56. eotdl/datasets/download.py +0 -104
  57. eotdl/files/list_files.py +0 -13
  58. eotdl/models/metadata.py +0 -43
  59. eotdl-2025.2.10.dist-info/RECORD +0 -81
  60. {eotdl-2025.2.10.dist-info → eotdl-2025.4.2.dist-info}/WHEEL +0 -0
  61. {eotdl-2025.2.10.dist-info → eotdl-2025.4.2.dist-info}/entry_points.txt +0 -0
@@ -2,15 +2,27 @@
2
2
  Utils
3
3
  """
4
4
 
5
- from sentinelhub import DataCollection, MosaickingOrder
6
-
5
+ from sentinelhub import DataCollection, MosaickingOrder, MimeType
7
6
  from .evalscripts import EvalScripts
8
7
 
9
8
 
9
+ class OUTPUT_FORMAT:
10
+ TIFF = MimeType.TIFF
11
+ JPG = MimeType.JPG
12
+ PNG = MimeType.PNG
13
+
14
+
10
15
  class SHParameters:
11
16
  """
12
17
  Sentinel Hub Parameters base class
13
18
  """
19
+
20
+ MAX_CLOUD_COVERAGE: float = None
21
+ FIELDS = None
22
+ MOSAICKING_ORDER = MosaickingOrder.MOST_RECENT
23
+ EVALSCRIPT = None
24
+ OUTPUT_FORMAT = MimeType.TIFF
25
+
14
26
  def __init__(self):
15
27
  pass
16
28
 
@@ -19,8 +31,8 @@ class SHS2L2AParameters(SHParameters):
19
31
  """
20
32
  Sentinel-2-L2A parameters
21
33
  """
34
+
22
35
  DATA_COLLECTION = DataCollection.SENTINEL2_L2A
23
- RESOLUTION = 10
24
36
  MOSAICKING_ORDER = MosaickingOrder.LEAST_CC
25
37
  EVALSCRIPT = EvalScripts.SENTINEL_2_L2A
26
38
  FIELDS = {
@@ -28,14 +40,17 @@ class SHS2L2AParameters(SHParameters):
28
40
  "exclude": [],
29
41
  }
30
42
  FILTER = None
43
+ RESOLUTION = 10
44
+ BASE_URL = "https://services.sentinel-hub.com"
45
+ CLOUD_COVERAGE = True
31
46
 
32
47
 
33
48
  class SHS2L1CParameters(SHParameters):
34
49
  """
35
50
  Sentinel-2-L1C parameters
36
51
  """
52
+
37
53
  DATA_COLLECTION = DataCollection.SENTINEL2_L1C
38
- RESOLUTION = 10
39
54
  MOSAICKING_ORDER = MosaickingOrder.LEAST_CC
40
55
  EVALSCRIPT = EvalScripts.SENTINEL_2_L1C
41
56
  FIELDS = {
@@ -43,47 +58,110 @@ class SHS2L1CParameters(SHParameters):
43
58
  "exclude": [],
44
59
  }
45
60
  FILTER = None
61
+ RESOLUTION = 10
62
+ BASE_URL = "https://services.sentinel-hub.com"
63
+ CLOUD_COVERAGE = True
64
+
46
65
 
47
66
  class SHS1Parameters(SHParameters):
48
67
  """
49
68
  Sentinel-1 parameters
50
69
  """
70
+
51
71
  DATA_COLLECTION = DataCollection.SENTINEL1
52
- RESOLUTION = 3
53
72
  EVALSCRIPT = EvalScripts.SENTINEL_1
54
- MOSAICKING_ORDER = None
73
+ MOSAICKING_ORDER = MosaickingOrder.MOST_RECENT
55
74
  FIELDS = {
56
- "include": [
57
- "id",
58
- "properties.datetime",
59
- "sar:instrument_mode",
60
- "s1:polarization",
61
- "sat:orbit_state",
62
- "s1:resolution",
63
- "s1:timeliness",
64
- ],
65
- "exclude": [],
66
- }
75
+ "include": [
76
+ "id",
77
+ "properties.datetime",
78
+ "sar:instrument_mode",
79
+ "s1:polarization",
80
+ "sat:orbit_state",
81
+ "s1:resolution",
82
+ "s1:timeliness",
83
+ ],
84
+ "exclude": [],
85
+ }
67
86
  FILTER = None
87
+ RESOLUTION = 3
88
+ BASE_URL = "https://services.sentinel-hub.com"
89
+ CLOUD_COVERAGE = False
68
90
 
69
91
 
70
92
  class SHDEMParameters(SHParameters):
71
93
  """
72
94
  Copernicus DEM parameters
73
95
  """
96
+
74
97
  DATA_COLLECTION = DataCollection.DEM_COPERNICUS_30
75
- RESOLUTION = 3
76
98
  MOSAICKING_ORDER = None
77
99
  EVALSCRIPT = EvalScripts.DEM
100
+ FIELDS = None
101
+ FILTER = None
102
+ RESOLUTION = 3
103
+ BASE_URL = "https://services.sentinel-hub.com"
104
+ CLOUD_COVERAGE = False
105
+
106
+
107
+ class SHHarmonizedLandsatSentinel(SHParameters):
108
+ """
109
+ Harmonized Landsat Sentinel parameters
110
+ """
111
+
112
+ DATA_COLLECTION = DataCollection.HARMONIZED_LANDSAT_SENTINEL
113
+ MOSAICKING_ORDER = MosaickingOrder.LEAST_CC
114
+ EVALSCRIPT = EvalScripts.HLS_TRUE_COLOR
115
+ FIELDS = None
78
116
  FILTER = None
117
+ RESOLUTION = 10
118
+ BASE_URL = "https://services-uswest2.sentinel-hub.com"
119
+ CLOUD_COVERAGE = True
120
+
121
+
122
+ class SHLandsatOTL2(SHParameters):
123
+ """
124
+ Landsat 8-9 Collection 2 imagery processed to level 2
125
+ """
126
+
127
+ DATA_COLLECTION = DataCollection.LANDSAT_OT_L2
128
+ MOSAICKING_ORDER = MosaickingOrder.LEAST_CC
129
+ EVALSCRIPT = EvalScripts.LANDSAT_OT_L2_TRUE_COLOR
79
130
  FIELDS = None
131
+ FILTER = None
132
+ RESOLUTION = 10
133
+ BASE_URL = "https://services-uswest2.sentinel-hub.com"
134
+ CLOUD_COVERAGE = True
135
+
80
136
 
137
+ class DATA_COLLECTION_ID:
138
+ SENTINEL_1_GRD = DataCollection.SENTINEL1.api_id
139
+ SENTINEL_2_L1C = DataCollection.SENTINEL2_L1C.api_id
140
+ SENTINEL_2_L2A = DataCollection.SENTINEL2_L2A.api_id
141
+ DEM = DataCollection.DEM_COPERNICUS_30.api_id
142
+ HLS = DataCollection.HARMONIZED_LANDSAT_SENTINEL.api_id
143
+ LANDSAT_OT_L2 = DataCollection.LANDSAT_OT_L2.api_id
81
144
 
82
- SUPPORTED_SENSORS = ("sentinel-1-grd", "sentinel-2-l1c", "sentinel-2-l2a", "dem")
145
+
146
+ SUPPORTED_COLLECTION_IDS = [
147
+ value
148
+ for name, value in DATA_COLLECTION_ID.__dict__.items()
149
+ if not name.startswith("__")
150
+ ]
83
151
 
84
152
  SH_PARAMETERS_DICT = {
85
- "sentinel-1-grd": SHS1Parameters,
86
- "sentinel-2-l1c": SHS2L1CParameters,
87
- "sentinel-2-l2a": SHS2L2AParameters,
88
- "dem": SHDEMParameters,
153
+ DATA_COLLECTION_ID.SENTINEL_1_GRD: SHS1Parameters,
154
+ DATA_COLLECTION_ID.SENTINEL_2_L1C: SHS2L1CParameters,
155
+ DATA_COLLECTION_ID.SENTINEL_2_L2A: SHS2L2AParameters,
156
+ DATA_COLLECTION_ID.DEM: SHDEMParameters,
157
+ DATA_COLLECTION_ID.HLS: SHHarmonizedLandsatSentinel,
158
+ DATA_COLLECTION_ID.LANDSAT_OT_L2: SHLandsatOTL2,
89
159
  }
160
+
161
+
162
+ def get_default_parameters(collection_id: str) -> SHParameters:
163
+ return SH_PARAMETERS_DICT[collection_id]()
164
+
165
+
166
+ def supports_cloud_coverage(collection_id: str):
167
+ return SH_PARAMETERS_DICT[collection_id]().CLOUD_COVERAGE
@@ -5,22 +5,23 @@ Utils for Sentinel Hub access
5
5
  import json
6
6
 
7
7
  from os import makedirs
8
- from datetime import datetime
9
- from typing import Union, Optional
8
+ from datetime import datetime, timedelta
9
+ from typing import Union, Optional, Iterable, List
10
10
  from glob import glob
11
11
  from shutil import copyfile, rmtree
12
12
 
13
- from .parameters import SUPPORTED_SENSORS
13
+ from .parameters import SUPPORTED_COLLECTION_IDS, SHParameters, OUTPUT_FORMAT
14
14
  from ...tools.geo_utils import is_bounding_box, get_image_bbox
15
15
  from ...tools.time_utils import is_time_interval, get_day_between
16
16
 
17
17
 
18
18
  def evaluate_sentinel_parameters(
19
- sensor: str,
20
19
  time_interval: Union[str, datetime],
21
20
  bounding_box: list,
21
+ collection_id: Optional[str] = None,
22
22
  output: Optional[str] = None,
23
23
  output_needed: Optional[bool] = True,
24
+ parameters: Optional[SHParameters] = None,
24
25
  ) -> None:
25
26
  """
26
27
  Evaluate parameters for Sentinel Hub access
@@ -28,10 +29,20 @@ def evaluate_sentinel_parameters(
28
29
  if output_needed:
29
30
  if not output:
30
31
  raise ValueError("Output path must be specified.")
31
- if sensor not in SUPPORTED_SENSORS:
32
- raise ValueError(
33
- f"Sensor {sensor} is not supported. Supported sensors are: {SUPPORTED_SENSORS}"
34
- )
32
+ if parameters and not parameters.OUTPUT_FORMAT:
33
+ raise ValueError("Output format must be specified.")
34
+ if collection_id:
35
+ if collection_id not in SUPPORTED_COLLECTION_IDS:
36
+ raise ValueError(
37
+ f"Collection id {collection_id} is not supported. Supported collections ids are: {SUPPORTED_COLLECTION_IDS}"
38
+ )
39
+ else:
40
+ if not (
41
+ parameters
42
+ and hasattr(parameters, "DATA_COLLECTION")
43
+ and hasattr(parameters.DATA_COLLECTION, "api_id")
44
+ ):
45
+ raise ValueError(f"Data collection is not defined properly.")
35
46
  if not time_interval:
36
47
  raise ValueError("Time interval must be specified.")
37
48
  else:
@@ -46,24 +57,34 @@ def evaluate_sentinel_parameters(
46
57
  raise ValueError(
47
58
  "Bounding box must be a list or tuple with four elements in format (lon_min, lat_min, lon_max, lat_max)."
48
59
  )
60
+ if parameters and parameters.MAX_CLOUD_COVERAGE:
61
+ if not isinstance(parameters.MAX_CLOUD_COVERAGE, (int, float)) or (
62
+ parameters.MAX_CLOUD_COVERAGE < 0 or parameters.MAX_CLOUD_COVERAGE > 100
63
+ ):
64
+ raise ValueError("Max cloud coverage must be a number between 0 and 100.")
49
65
 
50
66
 
51
67
  def imagery_from_tmp_to_dir(
52
68
  output_dir: str,
69
+ bounding_box: List[Union[int, float]],
53
70
  tmp_dir: Optional[str],
54
71
  name: Optional[str] = None,
55
72
  bulk: Optional[bool] = False,
73
+ output_format: Optional[str] = OUTPUT_FORMAT.TIFF,
56
74
  ) -> None:
57
75
  """
58
76
  Copy imagery from tmp to output dir
59
77
  """
60
- downloaded_files = glob(f"{tmp_dir}/**/response.tiff")
78
+ format = output_format
79
+ downloaded_files = glob(f"{tmp_dir}/**/response." + format)
80
+
61
81
  if len(downloaded_files) == 0:
62
82
  return
63
83
  makedirs(output_dir, exist_ok=True)
64
84
  for downloaded_file in downloaded_files:
65
- request_json = downloaded_file.replace("response.tiff", "request.json")
66
- metadata = generate_raster_metadata(downloaded_file, request_json)
85
+ request_json = downloaded_file.replace("response." + format, "request.json")
86
+ metadata = generate_raster_metadata(request_json, bounding_box)
87
+
67
88
  if name and not bulk:
68
89
  output_filename = name
69
90
  elif name and bulk:
@@ -73,17 +94,16 @@ def imagery_from_tmp_to_dir(
73
94
  output_filename = f"{metadata['type']}_{metadata['acquisition-date']}"
74
95
  else:
75
96
  output_filename = metadata["type"]
76
- copyfile(downloaded_file, f"{output_dir}/{output_filename}.tif")
97
+ copyfile(downloaded_file, f"{output_dir}/{output_filename}." + format)
77
98
  with open(f"{output_dir}/{output_filename}.json", "w", encoding="utf-8") as f:
78
99
  json.dump(metadata, f)
79
100
  rmtree(tmp_dir)
80
101
 
81
102
 
82
- def generate_raster_metadata(raster: str, request_json: str) -> None:
103
+ def generate_raster_metadata(request_json: str, bounding_box) -> None:
83
104
  """
84
105
  Generate metadata for raster
85
106
  """
86
- bbox = get_image_bbox(raster)
87
107
  with open(request_json, "r", encoding="utf-8") as f:
88
108
  json_content = json.load(f)
89
109
 
@@ -98,8 +118,27 @@ def generate_raster_metadata(raster: str, request_json: str) -> None:
98
118
 
99
119
  metadata = {
100
120
  "acquisition-date": acquisition_date,
101
- "bounding-box": bbox,
121
+ "bounding-box": bounding_box,
102
122
  "type": sensor_type,
103
123
  }
104
124
 
105
125
  return metadata
126
+
127
+
128
+ def filter_times(
129
+ timestamps: Iterable[datetime], time_difference: timedelta
130
+ ) -> list[datetime]:
131
+ """
132
+ Filters out timestamps within time_difference, preserving only the oldest timestamp.
133
+ """
134
+ timestamps = sorted(set(timestamps))
135
+
136
+ filtered_timestamps: list[datetime] = []
137
+ for current_timestamp in timestamps:
138
+ if (
139
+ not filtered_timestamps
140
+ or current_timestamp - filtered_timestamps[-1] > time_difference
141
+ ):
142
+ filtered_timestamps.append(current_timestamp)
143
+
144
+ return filtered_timestamps
eotdl/cli.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import typer
2
2
  import os
3
3
 
4
- from .commands import auth, datasets, models
4
+ from .commands import auth, datasets, models, stac
5
5
  from .repos import APIRepo
6
6
  from . import __version__
7
7
 
@@ -10,7 +10,7 @@ app = typer.Typer(help="Welcome to EOTDL. Learn more at https://www.eotdl.com/")
10
10
  app.add_typer(auth.app, name="auth")
11
11
  app.add_typer(datasets.app, name="datasets")
12
12
  app.add_typer(models.app, name="models")
13
-
13
+ app.add_typer(stac.app, name="stac")
14
14
 
15
15
  @app.command()
16
16
  def version():
@@ -4,39 +4,11 @@ from pathlib import Path
4
4
  from ..datasets import (
5
5
  retrieve_datasets,
6
6
  ingest_dataset,
7
- download_dataset,
7
+ stage_dataset,
8
8
  )
9
9
 
10
10
  app = typer.Typer(help="Explore, ingest and download training datasets.")
11
11
 
12
-
13
- @app.command()
14
- def list(
15
- name: str = typer.Option(
16
- None, "--name", "-n", help="Filter the returned datasets by name"
17
- ),
18
- limit: int = typer.Option(
19
- None, "--limit", "-l", help="Limit the number of returned results"
20
- ),
21
- ):
22
- """
23
- Retrieve a list with all the datasets in the EOTDL.
24
-
25
- If using --name, it will filter the results by name. If no name is provided, it will return all the datasets.\n
26
- If using --limit, it will limit the number of results. If no limit is provided, it will return all the datasets.
27
- \n\n
28
- Examples\n
29
- --------\n
30
- $ eotdl datasets list\n
31
- $ eotdl datasets list --name YourModel --limit 5
32
- """
33
- try:
34
- datasets = retrieve_datasets(name, limit)
35
- typer.echo(datasets)
36
- except Exception as e:
37
- typer.echo(e)
38
-
39
-
40
12
  @app.command()
41
13
  def ingest(
42
14
  path: Path = typer.Option(
@@ -62,7 +34,7 @@ def ingest(
62
34
  ),
63
35
  ):
64
36
  """
65
- Ingest a dataset to the EOTDL.
37
+ Ingest a dataset to the EOTDL.asdf
66
38
 
67
39
  This command ingests the dataset to the EOTDL. The dataset must be a folder with the dataset files,
68
40
  and at least a README.md file (and a catalog.json file for Q1+). If these files are missing, the ingestion
@@ -90,7 +62,32 @@ def ingest(
90
62
  ingest_dataset(path, verbose, typer.echo, foce_metadata_update, sync_metadata)
91
63
  except Exception as e:
92
64
  typer.echo(e)
65
+
66
+ @app.command()
67
+ def list(
68
+ name: str = typer.Option(
69
+ None, "--name", "-n", help="Filter the returned datasets by name"
70
+ ),
71
+ limit: int = typer.Option(
72
+ None, "--limit", "-l", help="Limit the number of returned results"
73
+ ),
74
+ ):
75
+ """
76
+ Retrieve a list with all the datasets in the EOTDL.
93
77
 
78
+ If using --name, it will filter the results by name. If no name is provided, it will return all the datasets.\n
79
+ If using --limit, it will limit the number of results. If no limit is provided, it will return all the datasets.
80
+ \n\n
81
+ Examples\n
82
+ --------\n
83
+ $ eotdl datasets list\n
84
+ $ eotdl datasets list --name YourModel --limit 5
85
+ """
86
+ try:
87
+ datasets = retrieve_datasets(name, limit)
88
+ typer.echo(datasets)
89
+ except Exception as e:
90
+ typer.echo(e)
94
91
 
95
92
  @app.command()
96
93
  def get(
@@ -130,7 +127,7 @@ def get(
130
127
  $ eotdl dataset get YourDataset --path /path/to/download --file dataset.zip --version 1 --assets True --force True --verbose True
131
128
  """
132
129
  try:
133
- dst_path = download_dataset(
130
+ dst_path = stage_dataset(
134
131
  dataset,
135
132
  version,
136
133
  path,
eotdl/commands/models.py CHANGED
@@ -4,39 +4,11 @@ from pathlib import Path
4
4
  from ..models import (
5
5
  retrieve_models,
6
6
  ingest_model,
7
- download_model,
7
+ stage_model,
8
8
  )
9
9
 
10
10
  app = typer.Typer(help="Explore, ingest and download ML models.")
11
11
 
12
-
13
- @app.command()
14
- def list(
15
- name: str = typer.Option(
16
- None, "--name", "-n", help="Filter the returned models by name"
17
- ),
18
- limit: int = typer.Option(
19
- None, "--limit", "-l", help="Limit the number of returned results"
20
- ),
21
- ):
22
- """
23
- Retrieve a list with all the models in the EOTDL.
24
-
25
- If using --name, it will filter the results by name. If no name is provided, it will return all the models.\n
26
- If using --limit, it will limit the number of results. If no limit is provided, it will return all the models.
27
- \n\n
28
- Examples\n
29
- --------\n
30
- $ eotdl models list\n
31
- $ eotdl models list --name YourModel --limit 5
32
- """
33
- try:
34
- models = retrieve_models(name, limit)
35
- typer.echo(models)
36
- except Exception as e:
37
- typer.echo(e)
38
-
39
-
40
12
  @app.command()
41
13
  def ingest(
42
14
  path: Path = typer.Option(..., "--path", "-p", help="Path to the model to ingest"),
@@ -88,6 +60,31 @@ def ingest(
88
60
  except Exception as e:
89
61
  typer.echo(e)
90
62
 
63
+ @app.command()
64
+ def list(
65
+ name: str = typer.Option(
66
+ None, "--name", "-n", help="Filter the returned models by name"
67
+ ),
68
+ limit: int = typer.Option(
69
+ None, "--limit", "-l", help="Limit the number of returned results"
70
+ ),
71
+ ):
72
+ """
73
+ Retrieve a list with all the models in the EOTDL.
74
+
75
+ If using --name, it will filter the results by name. If no name is provided, it will return all the models.\n
76
+ If using --limit, it will limit the number of results. If no limit is provided, it will return all the models.
77
+ \n\n
78
+ Examples\n
79
+ --------\n
80
+ $ eotdl models list\n
81
+ $ eotdl models list --name YourModel --limit 5
82
+ """
83
+ try:
84
+ models = retrieve_models(name, limit)
85
+ typer.echo(models)
86
+ except Exception as e:
87
+ typer.echo(e)
91
88
 
92
89
  @app.command()
93
90
  def get(
@@ -127,7 +124,7 @@ def get(
127
124
  $ eotdl models get YourModel --path /path/to/download --file model.zip --version 1 --assets True --force True --verbose True
128
125
  """
129
126
  try:
130
- dst_path = download_model(
127
+ dst_path = stage_model(
131
128
  model, version, path, typer.echo, assets, force, verbose
132
129
  )
133
130
  typer.echo(f"Data available at {dst_path}")
eotdl/commands/stac.py ADDED
@@ -0,0 +1,57 @@
1
+ import typer
2
+ from typing import Optional
3
+
4
+ from ..curation.stac.api import api_status, search_stac_columns, retrieve_stac_collections, retrieve_stac_collection, retrieve_stac_items, retrieve_stac_item, search_stac_items
5
+
6
+ app = typer.Typer(help="EOTDL STAC API")
7
+
8
+ @app.command()
9
+ def status():
10
+ try:
11
+ data = api_status()
12
+ typer.echo(data)
13
+ except Exception as e:
14
+ typer.echo(e)
15
+ raise typer.Abort()
16
+
17
+ @app.command()
18
+ def collections():
19
+ try:
20
+ data = retrieve_stac_collections()
21
+ typer.echo(data)
22
+ except Exception as e:
23
+ typer.echo(e)
24
+ raise typer.Abort()
25
+
26
+ @app.command()
27
+ def collection(collection_id: str):
28
+ try:
29
+ data = retrieve_stac_collection(collection_id)
30
+ typer.echo(data)
31
+ except Exception as e:
32
+ typer.echo(e)
33
+ raise typer.Abort()
34
+
35
+ @app.command()
36
+ def items(collection_id: str):
37
+ try:
38
+ data = retrieve_stac_items(collection_id)
39
+ typer.echo(data)
40
+ except Exception as e:
41
+ typer.echo(e)
42
+
43
+ @app.command()
44
+ def item(collection_id: str, item_id: str):
45
+ try:
46
+ data = retrieve_stac_item(collection_id, item_id)
47
+ typer.echo(data)
48
+ except Exception as e:
49
+ typer.echo(e)
50
+
51
+ @app.command()
52
+ def search(collection_id: str, query: Optional[str] = None):
53
+ try:
54
+ data = search_stac_items(collection_id, query)
55
+ typer.echo(data)
56
+ except Exception as e:
57
+ typer.echo(e)
@@ -1,8 +0,0 @@
1
- """
2
- Curation module
3
- """
4
-
5
- from .stac.dataframe import STACDataFrame # , read_stac
6
- from .stac.stac import STACGenerator
7
- from .stac.parsers import STACIdParser, StructuredParser, UnestructuredParser
8
- from .stac.dataframe_labeling import UnlabeledStrategy, LabeledStrategy
@@ -1,8 +1 @@
1
- """
2
- STAC module
3
- """
4
-
5
- # from .stac import STACGenerator
6
- # from .utils import format_time_acquired
7
- # from .parsers import STACIdParser, StructuredParser, UnestructuredParser
8
- from .dataframe import STACDataFrame, read_stac
1
+ from .stac import create_stac_catalog
@@ -0,0 +1,58 @@
1
+ import json
2
+
3
+ from ...repos import STACAPIRepo
4
+
5
+ def api_status():
6
+ repo = STACAPIRepo()
7
+ data, error = repo.status()
8
+ if error:
9
+ raise Exception(error)
10
+ return data
11
+
12
+ def retrieve_stac_collections():
13
+ repo = STACAPIRepo()
14
+ data, error = repo.collections()
15
+ if error:
16
+ raise Exception(error)
17
+ return data
18
+
19
+ def retrieve_stac_collection(collection_id):
20
+ repo = STACAPIRepo()
21
+ data, error = repo.collection(collection_id)
22
+ if error:
23
+ raise Exception(error)
24
+ return data
25
+
26
+ def retrieve_stac_items(collection_id):
27
+ repo = STACAPIRepo()
28
+ data, error = repo.items(collection_id)
29
+ if error:
30
+ raise Exception(error)
31
+ return data
32
+
33
+ def retrieve_stac_item(collection_id, item_id):
34
+ repo = STACAPIRepo()
35
+ data, error = repo.item(collection_id, item_id)
36
+ if error:
37
+ raise Exception(error)
38
+ return data
39
+
40
+ def search_stac_items(collection_id, query = None):
41
+ repo = STACAPIRepo()
42
+ if query is None:
43
+ data, error = repo.search_columns(collection_id)
44
+ if error:
45
+ raise Exception(error)
46
+ return data
47
+ data, error = repo.search(collection_id, str(query))
48
+ if error:
49
+ raise Exception(error)
50
+ return json.loads(data)
51
+
52
+
53
+ def search_stac_columns(collection_id):
54
+ repo = STACAPIRepo()
55
+ data, error = repo.search_columns(collection_id)
56
+ if error:
57
+ raise Exception(error)
58
+ return data