eotdl 2024.10.7__py3-none-any.whl → 2025.3.25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. eotdl/__init__.py +1 -1
  2. eotdl/access/search.py +0 -2
  3. eotdl/access/sentinelhub/parameters.py +1 -1
  4. eotdl/cli.py +2 -2
  5. eotdl/commands/datasets.py +28 -31
  6. eotdl/commands/models.py +27 -30
  7. eotdl/commands/stac.py +57 -0
  8. eotdl/curation/__init__.py +0 -8
  9. eotdl/curation/stac/__init__.py +1 -8
  10. eotdl/curation/stac/api.py +58 -0
  11. eotdl/curation/stac/stac.py +31 -341
  12. eotdl/datasets/__init__.py +1 -1
  13. eotdl/datasets/ingest.py +28 -159
  14. eotdl/datasets/retrieve.py +0 -9
  15. eotdl/datasets/stage.py +64 -0
  16. eotdl/files/__init__.py +0 -2
  17. eotdl/files/ingest.bck +178 -0
  18. eotdl/files/ingest.py +229 -164
  19. eotdl/{datasets → files}/metadata.py +16 -17
  20. eotdl/models/__init__.py +1 -1
  21. eotdl/models/ingest.py +28 -159
  22. eotdl/models/stage.py +60 -0
  23. eotdl/repos/APIRepo.py +1 -1
  24. eotdl/repos/DatasetsAPIRepo.py +56 -43
  25. eotdl/repos/FilesAPIRepo.py +260 -167
  26. eotdl/repos/STACAPIRepo.py +40 -0
  27. eotdl/repos/__init__.py +1 -0
  28. eotdl/tools/geo_utils.py +7 -2
  29. {eotdl-2024.10.7.dist-info → eotdl-2025.3.25.dist-info}/METADATA +5 -4
  30. eotdl-2025.3.25.dist-info/RECORD +65 -0
  31. {eotdl-2024.10.7.dist-info → eotdl-2025.3.25.dist-info}/WHEEL +1 -1
  32. eotdl/curation/stac/assets.py +0 -110
  33. eotdl/curation/stac/dataframe.py +0 -172
  34. eotdl/curation/stac/dataframe_bck.py +0 -253
  35. eotdl/curation/stac/dataframe_labeling.py +0 -63
  36. eotdl/curation/stac/extensions/__init__.py +0 -23
  37. eotdl/curation/stac/extensions/base.py +0 -30
  38. eotdl/curation/stac/extensions/dem.py +0 -18
  39. eotdl/curation/stac/extensions/eo.py +0 -117
  40. eotdl/curation/stac/extensions/label/__init__.py +0 -7
  41. eotdl/curation/stac/extensions/label/base.py +0 -136
  42. eotdl/curation/stac/extensions/label/image_name_labeler.py +0 -203
  43. eotdl/curation/stac/extensions/label/scaneo.py +0 -219
  44. eotdl/curation/stac/extensions/ml_dataset.py +0 -648
  45. eotdl/curation/stac/extensions/projection.py +0 -44
  46. eotdl/curation/stac/extensions/raster.py +0 -53
  47. eotdl/curation/stac/extensions/sar.py +0 -55
  48. eotdl/curation/stac/extent.py +0 -158
  49. eotdl/curation/stac/parsers.py +0 -61
  50. eotdl/datasets/download.py +0 -104
  51. eotdl/files/list_files.py +0 -13
  52. eotdl/models/download.py +0 -101
  53. eotdl/models/metadata.py +0 -43
  54. eotdl/wrappers/utils.py +0 -35
  55. eotdl-2024.10.7.dist-info/RECORD +0 -82
  56. {eotdl-2024.10.7.dist-info → eotdl-2025.3.25.dist-info}/entry_points.txt +0 -0
@@ -1,110 +0,0 @@
1
- """
2
- Module for STAC Asset Generators
3
- """
4
-
5
- from os import remove, listdir
6
- from os.path import dirname, join, basename, abspath, basename
7
-
8
- import pandas as pd
9
- import rasterio
10
- import pystac
11
-
12
- from ...tools.metadata import remove_raster_metadata
13
-
14
-
15
- MEDIA_TYPES_DICT = {
16
- "tif": pystac.MediaType.GEOTIFF,
17
- "tiff": pystac.MediaType.GEOTIFF,
18
- "png": pystac.MediaType.PNG,
19
- "jpg": pystac.MediaType.JPEG,
20
- "jpeg": pystac.MediaType.JPEG,
21
- }
22
-
23
-
24
- class STACAssetGenerator:
25
- """
26
- Standard STAC Asset Generator
27
- """
28
-
29
- type = "None"
30
-
31
- def __init__(self):
32
- pass
33
-
34
- @classmethod
35
- def extract_assets(cls, obj_info: pd.DataFrame):
36
- """
37
- Generate a single asset from the raster file
38
-
39
- :param raster_path: path to the raster file
40
- """
41
- # If there is no bands, create a single band asset from the file, assuming thats a singleband raster
42
- raster_path = obj_info["image"].values[0]
43
- title = basename(raster_path).split(".")[0]
44
- # Get the file extension
45
- raster_format = raster_path.split(".")[-1]
46
- asset = pystac.Asset(
47
- href=abspath(raster_path),
48
- title=title,
49
- media_type=MEDIA_TYPES_DICT[raster_format],
50
- roles=["data"],
51
- )
52
-
53
- return [asset]
54
-
55
-
56
- class BandsAssetGenerator(STACAssetGenerator):
57
- """
58
- Bands STAC Asset Generator
59
- """
60
-
61
- type = "Bands"
62
-
63
- def __init__(self) -> None:
64
- super().__init__()
65
-
66
- def extract_assets(self, obj_info: pd.DataFrame):
67
- """
68
- Extract the assets from the raster file from the bands column
69
-
70
- :param raster_path: path to the raster file
71
- """
72
- asset_list = []
73
- # File path
74
- raster_path = obj_info["image"].values[0]
75
- # Bands
76
- bands = obj_info["bands"].values
77
- bands = bands[0] if bands else None
78
-
79
- if bands:
80
- with rasterio.open(raster_path, "r") as raster:
81
- raster_name = basename(raster_path).split(".")[0]
82
- if isinstance(bands, str):
83
- bands = [bands]
84
- for band in bands:
85
- i = bands.index(band)
86
- raster_format = raster_path.split(".")[
87
- -1
88
- ] # Will be used later to save the bands files
89
- try:
90
- single_band = raster.read(i + 1)
91
- except IndexError:
92
- single_band = raster.read(1)
93
- band_name = f"{raster_name}_{band}.{raster_format}"
94
- output_band = join(dirname(raster_path), band_name)
95
- # Copy the metadata
96
- metadata = raster.meta.copy()
97
- metadata.update({"count": 1})
98
- # Write the band to the output folder
99
- with rasterio.open(output_band, "w", **metadata) as dest:
100
- dest.write(single_band, 1)
101
- # Instantiate pystac asset and append it to the list
102
- asset_list.append(
103
- pystac.Asset(
104
- href=output_band,
105
- title=band,
106
- media_type=MEDIA_TYPES_DICT[raster_format],
107
- )
108
- )
109
-
110
- return asset_list
@@ -1,172 +0,0 @@
1
- """
2
- Module for the STAC dataframe
3
- """
4
-
5
- import json
6
-
7
- from os.path import join
8
- from os import makedirs
9
- from typing import Union, Optional
10
- from math import isnan
11
- from pathlib import Path
12
-
13
- import pandas as pd
14
- import geopandas as gpd
15
- import pystac
16
- from geomet import wkt
17
-
18
- from ...tools import convert_df_geom_to_shape, get_all_children
19
-
20
-
21
- class STACDataFrame(gpd.GeoDataFrame):
22
- """
23
- STACDataFrame class
24
- """
25
-
26
- def __init__(self, *args, **kwargs):
27
- super().__init__(*args, **kwargs)
28
-
29
- @classmethod
30
- def from_stac_file(cls, stac_file: pystac.STACObject):
31
- """
32
- Create a STACDataFrame from a STAC file
33
-
34
- :param stac_file: STAC file
35
- """
36
- return read_stac(stac_file)
37
-
38
- def to_stac(self, path):
39
- """
40
- Create a STAC catalog and children from a STACDataFrame
41
- """
42
- df = self.copy()
43
-
44
- if "id" in df.columns and "stac_id" in df.columns:
45
- id_column = "stac_id"
46
- stac_id_exists = True
47
- else:
48
- id_column = "id"
49
- stac_id_exists = False
50
-
51
- # First, create the catalog and its folder, if exists
52
- catalog_df = df[df["type"] == "Catalog"]
53
-
54
- if catalog_df.empty:
55
- makedirs(path, exist_ok=True)
56
- else:
57
- for _, row in catalog_df.iterrows():
58
- root_output_folder = path + "/" + row[id_column]
59
- makedirs(root_output_folder, exist_ok=True)
60
- row_json = row.to_dict()
61
-
62
- # Curate the json row
63
- row_json = self.curate_json_row(row_json, stac_id_exists)
64
-
65
- with open(
66
- join(root_output_folder, "catalog.json"), "w", encoding="utf-8"
67
- ) as f:
68
- json.dump(row_json, f)
69
-
70
- # Second, create the collections and their folders, if exist
71
- collections = {}
72
- collections_df = df[df["type"] == "Collection"]
73
- for _, row in collections_df.iterrows():
74
- stac_output_folder = join(root_output_folder, row[id_column])
75
- collections[row[id_column]] = stac_output_folder
76
- makedirs(stac_output_folder, exist_ok=True)
77
- row_json = row.to_dict()
78
-
79
- # Curate the json row
80
- row_json = self.curate_json_row(row_json, stac_id_exists)
81
-
82
- with open(
83
- join(stac_output_folder, "collection.json"), "w", encoding="utf-8"
84
- ) as f:
85
- json.dump(row_json, f)
86
-
87
- # Then, create the items and their folders, if exist
88
- features_df = df[df["type"] == "Feature"]
89
- for _, row in features_df.iterrows():
90
- collection = row["collection"]
91
- stac_output_folder = join(collections[collection], row[id_column])
92
-
93
- # Convert the geometry from WKT back to geojson
94
- row["geometry"] = row["geometry"].wkt
95
- row["geometry"] = wkt.loads(row["geometry"])
96
- makedirs(stac_output_folder, exist_ok=True)
97
- row_json = row.to_dict()
98
-
99
- # Curate the json row
100
- row_json = self.curate_json_row(row_json, stac_id_exists)
101
-
102
- with open(
103
- join(stac_output_folder, f'{row_json["id"]}.json'),
104
- "w",
105
- encoding="utf-8",
106
- ) as f:
107
- json.dump(row_json, f)
108
-
109
- def curate_json_row(self, row: dict, stac_id_exists: bool) -> dict:
110
- """
111
- Curate the json row of a STACDataFrame, in order to generate a valid STAC file
112
-
113
- :param row: row of a STACDataFrame
114
- :param stac_id_exists: if the stac_id column exists
115
- """
116
- keys_to_remove = []
117
-
118
- # Remove the created_at and modified_at columns, if the STACDataFrame comes from GeoDB
119
- for i in "created_at", "modified_at":
120
- if i in row.keys():
121
- keys_to_remove.append(i)
122
-
123
- # Rename the stac_id column to id, to avoid conflicts with the id column
124
- if stac_id_exists:
125
- row["id"] = row["stac_id"]
126
- del row["stac_id"]
127
-
128
- # Remove the NaN values and empty strings
129
- for k, v in row.items():
130
- if (isinstance(v, float) and isnan(v)) or v == "" or not v:
131
- keys_to_remove.append(k)
132
-
133
- for key in keys_to_remove:
134
- if key in row.keys():
135
- del row[key]
136
-
137
- # Convert the value to dict if it is a string and is possible
138
- for k, v in row.items():
139
- if isinstance(v, str):
140
- try:
141
- row[k] = json.loads(v)
142
- except json.decoder.JSONDecodeError:
143
- pass
144
-
145
- return row
146
-
147
-
148
- def read_stac(
149
- stac_file: Union[pystac.Catalog, pystac.Collection, str],
150
- geometry_column: Optional[str] = "geometry",
151
- ) -> STACDataFrame:
152
- """
153
- Read a STAC file and return a STACDataFrame
154
-
155
- :param stac_file: STAC file to read
156
- :param geometry_column: name of the geometry column
157
- """
158
- if isinstance(stac_file, (str, Path)):
159
- stac_file = pystac.read_file(stac_file) # we assume this is always a catalog
160
- stac_file.make_all_asset_hrefs_absolute()
161
- children = get_all_children(stac_file)
162
-
163
- # Convert Dataframe to STACDataFrame
164
- dataframe = pd.DataFrame(children)
165
- dataframe[geometry_column] = dataframe.apply(convert_df_geom_to_shape, axis=1)
166
- stac_dataframe = STACDataFrame(
167
- dataframe,
168
- crs="EPSG:4326",
169
- geometry=gpd.GeoSeries.from_wkt(dataframe[geometry_column]),
170
- )
171
-
172
- return stac_dataframe
@@ -1,253 +0,0 @@
1
- """
2
- Module for the STAC dataframe
3
- """
4
-
5
- import pandas as pd
6
- import geopandas as gpd
7
- import pystac
8
- import json
9
- import os
10
- from xcube_geodb.core.geodb import GeoDBClient
11
- from geomet import wkt
12
- from os.path import join
13
- from os import makedirs
14
-
15
- from math import isnan
16
- from .utils import convert_df_geom_to_shape, get_all_children
17
-
18
-
19
- class STACDataFrame(gpd.GeoDataFrame):
20
- def __init__(self, *args, **kwargs):
21
- super().__init__(*args, **kwargs)
22
-
23
- @classmethod
24
- def from_stac_file(self, stac_file):
25
- """
26
- Create a STACDataFrame from a STAC file
27
- """
28
- return read_stac(stac_file)
29
-
30
- @classmethod
31
- def from_geodb(
32
- self,
33
- server_url: str,
34
- server_port: int | str,
35
- client_id: str,
36
- client_secret: str,
37
- auth_aud: str,
38
- collection: str,
39
- database: str = None,
40
- ):
41
- """
42
- Create a STACDataFrame from a GeoDB collection
43
-
44
- :param server_url: GeoDB server url
45
- :param server_port: GeoDB server port
46
- :param client_id: GeoDB client id
47
- :param client_secret: GeoDB client secret
48
- :param auth_aud: GeoDB auth aud
49
- :param collection: GeoDB collection
50
- :param database: GeoDB database
51
- """
52
- geodb_client = GeoDBClient(
53
- server_url=server_url,
54
- server_port=server_port,
55
- client_id=client_id,
56
- client_secret=client_secret,
57
- auth_aud=auth_aud,
58
- )
59
-
60
- data = geodb_client.get_collection(collection, database=database)
61
-
62
- return STACDataFrame(data, crs="EPSG:4326")
63
-
64
- def ingest(
65
- self,
66
- collection: str,
67
- server_url: str = os.environ["SERVER_URL"],
68
- server_port: int = os.environ["SERVER_PORT"],
69
- client_id: str = os.environ["CLIENT_ID"],
70
- client_secret: str = os.environ["CLIENT_SECRET"],
71
- auth_aud: str = os.environ["AUTH_DOMAIN"],
72
- database: str = None,
73
- ):
74
- """
75
- Create a GeoDB collection from a STACDataFrame
76
-
77
- :param collection: dataset name (GeoDB collection)
78
- :param server_url: GeoDB server url
79
- :param server_port: GeoDB server port
80
- :param client_id: GeoDB client id
81
- :param client_secret: GeoDB client secret
82
- :param auth_aud: GeoDB auth aud
83
- :param database: GeoDB database
84
- """
85
-
86
- geodb_client = GeoDBClient(
87
- server_url=server_url,
88
- server_port=server_port,
89
- client_id=client_id,
90
- client_secret=client_secret,
91
- auth_aud=auth_aud,
92
- )
93
-
94
- # TODO: check name is unique (use eotdl-cli)
95
-
96
- # TODO: ingest assets (only if local)
97
- # TODO: rename assets in the dataframe with URLs (only if local)
98
-
99
- # ingest to geodb
100
-
101
- # Check if the collection already exists
102
- if geodb_client.collection_exists(collection, database=database):
103
- # geodb_client.drop_collection(collection, database=database)
104
- raise Exception(f"Collection {collection} already exists")
105
-
106
- # Rename the column id to stac_id, to avoid conflicts with the id column
107
- self.rename(columns={"id": "stac_id"}, inplace=True)
108
- # Fill the NaN with '' to avoid errors, except in the geometry column
109
- copy = self.copy()
110
- columns_to_fill = copy.columns.drop("geometry")
111
- self[columns_to_fill] = self[columns_to_fill].fillna("")
112
-
113
- # Create the collection if it does not exist
114
- # and insert the data
115
- collections = {collection: self._create_collection_structure(self.columns)}
116
- geodb_client.create_collections(collections, database=database)
117
-
118
- geodb_client.insert_into_collection(collection, database=database, values=self)
119
-
120
- # TODO: save data in eotdl
121
-
122
- def _create_collection_structure(self, columns: list) -> dict:
123
- """
124
- Create the schema structure of a GeoDB collection from a STACDataFrame
125
-
126
- :param columns: columns of the STACDataFrame
127
- """
128
- stac_collection = {"crs": 4326, "properties": {}}
129
-
130
- for column in columns:
131
- if column not in ("geometry", "id"):
132
- stac_collection["properties"][column] = "json"
133
-
134
- return stac_collection
135
-
136
- def to_stac(self):
137
- """
138
- Create a STAC catalog and children from a STACDataFrame
139
- """
140
- df = self.copy()
141
-
142
- if "id" in df.columns and "stac_id" in df.columns:
143
- id_column = "stac_id"
144
- stac_id_exists = True
145
- else:
146
- id_column = "id"
147
- stac_id_exists = False
148
-
149
- # First, create the catalog and its folder, if exists
150
- catalog_df = df[df["type"] == "Catalog"]
151
-
152
- if catalog_df.empty:
153
- root_output_folder = "output"
154
- makedirs(root_output_folder, exist_ok=True)
155
- else:
156
- for index, row in catalog_df.iterrows():
157
- root_output_folder = row[id_column]
158
- makedirs(root_output_folder, exist_ok=True)
159
- row_json = row.to_dict()
160
-
161
- # Curate the json row
162
- row_json = self.curate_json_row(row_json, stac_id_exists)
163
-
164
- with open(join(root_output_folder, "catalog.json"), "w") as f:
165
- json.dump(row_json, f)
166
-
167
- # Second, create the collections and their folders, if exist
168
- collections = dict()
169
- collections_df = df[df["type"] == "Collection"]
170
- for index, row in collections_df.iterrows():
171
- stac_output_folder = join(root_output_folder, row[id_column])
172
- collections[row[id_column]] = stac_output_folder
173
- makedirs(stac_output_folder, exist_ok=True)
174
- row_json = row.to_dict()
175
-
176
- # Curate the json row
177
- row_json = self.curate_json_row(row_json, stac_id_exists)
178
-
179
- with open(join(stac_output_folder, "collection.json"), "w") as f:
180
- json.dump(row_json, f)
181
-
182
- # Then, create the items and their folders, if exist
183
- features_df = df[df["type"] == "Feature"]
184
- for index, row in features_df.iterrows():
185
- collection = row["collection"]
186
- stac_output_folder = join(collections[collection], row[id_column])
187
-
188
- # Convert the geometry from WKT back to geojson
189
- row["geometry"] = row["geometry"].wkt
190
- row["geometry"] = wkt.loads(row["geometry"])
191
- makedirs(stac_output_folder, exist_ok=True)
192
- row_json = row.to_dict()
193
-
194
- # Curate the json row
195
- row_json = self.curate_json_row(row_json, stac_id_exists)
196
-
197
- with open(join(stac_output_folder, f'{row_json["id"]}.json'), "w") as f:
198
- json.dump(row_json, f)
199
-
200
- def curate_json_row(self, row: dict, stac_id_exists: bool) -> dict:
201
- """
202
- Curate the json row of a STACDataFrame, in order to generate a valid STAC file
203
-
204
- :param row: row of a STACDataFrame
205
- :param stac_id_exists: if the stac_id column exists
206
- """
207
- keys_to_remove = list()
208
-
209
- # Remove the created_at and modified_at columns, if the STACDataFrame comes from GeoDB
210
- for i in "created_at", "modified_at":
211
- if i in row.keys():
212
- keys_to_remove.append(i)
213
-
214
- # Rename the stac_id column to id, to avoid conflicts with the id column
215
- if stac_id_exists:
216
- row["id"] = row["stac_id"]
217
- del row["stac_id"]
218
-
219
- # Remove the NaN values and empty strings
220
- for k, v in row.items():
221
- if (isinstance(v, float) and isnan(v)) or v == "":
222
- keys_to_remove.append(k)
223
- for key in keys_to_remove:
224
- del row[key]
225
- del row["geometry"]
226
-
227
- return row
228
-
229
-
230
- def read_stac(
231
- stac_file: pystac.Catalog | pystac.Collection | str,
232
- geometry_column: str = "geometry",
233
- ) -> STACDataFrame:
234
- """
235
- Read a STAC file and return a STACDataFrame
236
-
237
- :param stac_file: STAC file to read
238
- :param geometry_column: name of the geometry column
239
- """
240
- if isinstance(stac_file, str):
241
- stac_file = pystac.read_file(stac_file)
242
- children = get_all_children(stac_file)
243
-
244
- # Convert Dataframe to STACDataFrame
245
- dataframe = pd.DataFrame(children)
246
- dataframe[geometry_column] = dataframe.apply(convert_df_geom_to_shape, axis=1)
247
- stac_dataframe = STACDataFrame(
248
- dataframe,
249
- crs="EPSG:4326",
250
- geometry=gpd.GeoSeries.from_wkt(dataframe[geometry_column]),
251
- )
252
-
253
- return stac_dataframe
@@ -1,63 +0,0 @@
1
- """
2
- Module for the labeling strategy when creating a STAC catalog from a dataframe
3
- """
4
-
5
- from os.path import basename
6
-
7
-
8
- class LabelingStrategy:
9
- """
10
- Labeling strategy interface to be implemented by concrete labeling strategies
11
- """
12
-
13
- def get_images_labels(self, images):
14
- """
15
- Get the labels of the images
16
- """
17
- return
18
-
19
-
20
- class UnlabeledStrategy(LabelingStrategy):
21
- """
22
- Assumes the images are not labeled, and returns the entire filename as label
23
- """
24
-
25
- def __init__(self):
26
- super().__init__()
27
-
28
- def get_images_labels(self, images):
29
- """
30
- Get the labels of the images
31
- """
32
- labels = []
33
- for image in images:
34
- labels.append(basename(image).split(".")[0])
35
- ixs = [labels.index(x) for x in labels]
36
-
37
- return labels, ixs
38
-
39
-
40
- class LabeledStrategy(LabelingStrategy):
41
- """
42
- Assumes the images are already labeled, and returns the labels.
43
- The images filenames must follow the pattern: <label>_<id>.<ext>
44
- """
45
-
46
- def __init__(self):
47
- super().__init__()
48
-
49
- def get_images_labels(self, images):
50
- """
51
- Get the labels of the images
52
- """
53
- labels = []
54
- for image in images:
55
- image_basename = basename(image).split(".")[
56
- 0
57
- ] # Get filename without extension
58
- label = image_basename.split("_")[0]
59
- labels.append(label)
60
-
61
- ixs = [labels.index(x) for x in labels]
62
-
63
- return labels, ixs
@@ -1,23 +0,0 @@
1
- """
2
- STAC extensions module
3
- """
4
-
5
- from .sar import SarExtensionObject
6
- from .raster import RasterExtensionObject
7
- from .projection import ProjExtensionObject
8
- from .dem import DEMExtensionObject
9
- from .eo import EOS2ExtensionObject
10
- from .label import LabelExtensionObject, ImageNameLabeler, ScaneoLabeler
11
- from .ml_dataset import add_ml_extension, MLDatasetQualityMetrics
12
-
13
-
14
- SUPPORTED_EXTENSIONS = ("eo", "sar", "proj", "raster")
15
-
16
-
17
- type_stac_extensions_dict = {
18
- "sar": SarExtensionObject(),
19
- "eo": EOS2ExtensionObject(),
20
- "dem": DEMExtensionObject(),
21
- "raster": RasterExtensionObject(),
22
- "proj": ProjExtensionObject(),
23
- }
@@ -1,30 +0,0 @@
1
- """
2
- Module for STAC extensions objects
3
- """
4
-
5
- from typing import Optional, Union
6
- import pystac
7
-
8
- import pandas as pd
9
-
10
-
11
- class STACExtensionObject:
12
- """
13
- Base model for STAC extensions objects
14
- """
15
- def __init__(self) -> None:
16
- super().__init__()
17
- self.properties = {}
18
-
19
- def add_extension_to_object(
20
- self,
21
- obj: Union[pystac.Item, pystac.Asset],
22
- obj_info: Optional[pd.DataFrame] = None,
23
- ) -> Union[pystac.Item, pystac.Asset]:
24
- """
25
- Add the extension to the given object
26
-
27
- :param obj: object to add the extension
28
- :param obj_info: object info from the STACDataFrame
29
- """
30
- return
@@ -1,18 +0,0 @@
1
- """
2
- Module for DEM STAC extensions object
3
- """
4
-
5
- from .base import STACExtensionObject
6
-
7
-
8
- class DEMExtensionObject(STACExtensionObject):
9
- """
10
- DEM STAC extension object
11
- """
12
- DEM_DATE_ACQUIRED = {
13
- "start_datetime": "2011-01-01T00:00:00Z",
14
- "end_datetime": "2015-01-07T00:00:00Z",
15
- }
16
-
17
- def __init__(self) -> None:
18
- super().__init__()