eotdl 2024.10.1__py3-none-any.whl → 2025.2.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
eotdl/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "2024.10.01"
1
+ __version__ = "2025.02.10"
eotdl/access/download.py CHANGED
@@ -42,8 +42,9 @@ def download_sentinel_imagery(
42
42
  bulk = False
43
43
  else:
44
44
  bulk = True
45
- client.download_data(requests_list)
46
- imagery_from_tmp_to_dir(output, name=name, bulk=bulk)
45
+
46
+ data = client.download_data(requests_list)
47
+ imagery_from_tmp_to_dir(output, client.tmp_dir, name=name, bulk=bulk)
47
48
 
48
49
 
49
50
  def search_and_download_sentinel_imagery(
eotdl/access/search.py CHANGED
@@ -19,8 +19,6 @@ def search_sentinel_imagery(
19
19
  evaluate_sentinel_parameters(
20
20
  sensor, time_interval, bounding_box, output_needed=False
21
21
  )
22
-
23
22
  client = SHClient()
24
23
  parameters = SH_PARAMETERS_DICT[sensor]()
25
-
26
24
  return client.search_data(bounding_box, time_interval, parameters)
@@ -15,6 +15,7 @@ from sentinelhub import (
15
15
  SentinelHubDownloadClient,
16
16
  MimeType,
17
17
  )
18
+ import uuid
18
19
 
19
20
  from ...repos.AuthRepo import AuthRepo
20
21
  from .parameters import SHParameters
@@ -57,7 +58,7 @@ class SHClient:
57
58
  self.config.sh_client_id = creds["SH_CLIENT_ID"]
58
59
  self.config.sh_client_secret = creds["SH_CLIENT_SECRET"]
59
60
  self.catalog = SentinelHubCatalog(config=self.config)
60
- self.tmp_dir = "/tmp/sentinelhub"
61
+ self.tmp_dir = "/tmp/sentinelhub/" + str(uuid.uuid4())
61
62
 
62
63
  def search_data(
63
64
  self, bounding_box: list, time_interval: list, parameters: SHParameters
@@ -109,5 +110,4 @@ class SHClient:
109
110
  requests = [requests]
110
111
  download_requests = [request.download_list[0] for request in requests]
111
112
  data = download_client.download(download_requests)
112
-
113
113
  return data
@@ -42,7 +42,7 @@ class SHS2L1CParameters(SHParameters):
42
42
  "include": ["id", "properties.datetime", "properties.eo:cloud_cover"],
43
43
  "exclude": [],
44
44
  }
45
-
45
+ FILTER = None
46
46
 
47
47
  class SHS1Parameters(SHParameters):
48
48
  """
@@ -50,7 +50,7 @@ def evaluate_sentinel_parameters(
50
50
 
51
51
  def imagery_from_tmp_to_dir(
52
52
  output_dir: str,
53
- tmp_dir: Optional[str] = "/tmp/sentinelhub",
53
+ tmp_dir: Optional[str],
54
54
  name: Optional[str] = None,
55
55
  bulk: Optional[bool] = False,
56
56
  ) -> None:
@@ -60,9 +60,7 @@ def imagery_from_tmp_to_dir(
60
60
  downloaded_files = glob(f"{tmp_dir}/**/response.tiff")
61
61
  if len(downloaded_files) == 0:
62
62
  return
63
-
64
63
  makedirs(output_dir, exist_ok=True)
65
-
66
64
  for downloaded_file in downloaded_files:
67
65
  request_json = downloaded_file.replace("response.tiff", "request.json")
68
66
  metadata = generate_raster_metadata(downloaded_file, request_json)
@@ -75,11 +73,9 @@ def imagery_from_tmp_to_dir(
75
73
  output_filename = f"{metadata['type']}_{metadata['acquisition-date']}"
76
74
  else:
77
75
  output_filename = metadata["type"]
78
-
79
76
  copyfile(downloaded_file, f"{output_dir}/{output_filename}.tif")
80
77
  with open(f"{output_dir}/{output_filename}.json", "w", encoding="utf-8") as f:
81
78
  json.dump(metadata, f)
82
-
83
79
  rmtree(tmp_dir)
84
80
 
85
81
 
@@ -377,7 +377,7 @@ class MLDatasetQualityMetrics:
377
377
  destination
378
378
  ) # Remove the old catalog and replace it with the new one
379
379
  catalog.set_root(catalog)
380
- catalog.normalize_and_save(root_href=destination)
380
+ catalog.normalize_and_save(root_href=destination, catalog_type=pystac.CatalogType.SELF_CONTAINED)
381
381
  print("Success!")
382
382
  except STACValidationError:
383
383
  # Return full callback
eotdl/datasets/ingest.py CHANGED
@@ -3,6 +3,7 @@ import yaml
3
3
  from tqdm import tqdm
4
4
  import json
5
5
  import frontmatter
6
+ import pystac
6
7
 
7
8
  from ..auth import with_auth
8
9
  from .metadata import Metadata
@@ -129,7 +130,7 @@ def ingest_stac(stac_catalog, logger=None, user=None):
129
130
  repo, files_repo = DatasetsAPIRepo(), FilesAPIRepo()
130
131
  # load catalog
131
132
  logger("Loading STAC catalog...")
132
- df = STACDataFrame.from_stac_file(stac_catalog)
133
+ df = STACDataFrame.from_stac_file(stac_catalog) # assets are absolute for file ingestion
133
134
  catalog = df[df["type"] == "Catalog"]
134
135
  assert len(catalog) == 1, "STAC catalog must have exactly one root catalog"
135
136
  dataset_name = catalog.id.iloc[0]
eotdl/models/download.py CHANGED
@@ -74,12 +74,12 @@ def download_model(
74
74
  )
75
75
  if error:
76
76
  raise Exception(error)
77
- print(gdf)
77
+ # print(gdf)
78
78
  df = STACDataFrame(gdf)
79
79
  # df.geometry = df.geometry.apply(lambda x: Polygon() if x is None else x)
80
80
  df.to_stac(download_path)
81
- print("----")
82
- print(df)
81
+ # print("----")
82
+ # print(df)
83
83
  # download assets
84
84
  if assets:
85
85
  if verbose:
eotdl/repos/APIRepo.py CHANGED
@@ -5,7 +5,7 @@ import requests
5
5
  class APIRepo:
6
6
  def __init__(self, url=None):
7
7
  default_url = "https://api.eotdl.com/"
8
- # default_url = "http://localhost:8010/"
8
+ # default_url = "http://localhost:8001/"
9
9
  self.url = url if url else os.getenv("EOTDL_API_URL", default_url)
10
10
 
11
11
  def format_response(self, response):
eotdl/tools/geo_utils.py CHANGED
@@ -127,6 +127,11 @@ def bbox_from_centroid(
127
127
  width_m = width * pixel_size
128
128
  heigth_m = height * pixel_size
129
129
 
130
+ # Initialise the transformers
131
+ utm_crs = CRS.get_utm_from_wgs84(y, x).ogc_string()
132
+ from_4326_transformer = Transformer.from_crs("EPSG:4326", utm_crs)
133
+ to_4326_transformer = Transformer.from_crs(utm_crs, "EPSG:4326")
134
+
130
135
  # Transform the centroid coordinates to meters
131
136
  centroid_m = from_4326_transformer.transform(x, y)
132
137
 
@@ -137,8 +142,8 @@ def bbox_from_centroid(
137
142
  max_y = centroid_m[1] + heigth_m / 2
138
143
 
139
144
  # Convert the bounding box coordinates back to degrees
140
- min_x, min_y = from_3857_transformer.transform(min_x, min_y)
141
- max_x, max_y = from_3857_transformer.transform(max_x, max_y)
145
+ min_x, min_y = to_4326_transformer.transform(min_x, min_y)
146
+ max_x, max_y = to_4326_transformer.transform(max_x, max_y)
142
147
 
143
148
  return [min_y, min_x, max_y, max_x]
144
149
 
@@ -0,0 +1 @@
1
+ from .models import ModelWrapper
@@ -0,0 +1,158 @@
1
+ # Q1+ model wrapper
2
+ # only works with some models, extend as we include more models in EOTDL and improve MLM extension
3
+
4
+ import os
5
+ from pathlib import Path
6
+ from tqdm import tqdm
7
+ import numpy as np
8
+
9
+ from ..models.retrieve import retrieve_model
10
+ from ..curation.stac import STACDataFrame
11
+ from ..repos import FilesAPIRepo, ModelsAPIRepo
12
+ from ..auth import with_auth
13
+
14
+ class ModelWrapper:
15
+ def __init__(self, model_name, version=None, path=None, force=False, assets=True, verbose=True):
16
+ self.model_name = model_name
17
+ self.version = version
18
+ self.path = path
19
+ self.force = force
20
+ self.assets = assets
21
+ self.verbose = verbose
22
+ self.ready = False
23
+ self.setup()
24
+
25
+ def setup(self):
26
+ download_path, gdf = self.download()
27
+ self.download_path = download_path
28
+ self.gdf = gdf
29
+ # get model name from stac metadata
30
+ item = gdf[gdf['type'] == "Feature"]
31
+ assert item.shape[0] == 1, "Only one item is supported in stac metadata, found " + str(item.shape[0])
32
+ self.props = item.iloc[0].properties
33
+ assert self.props["mlm:framework"] == "ONNX", "Only ONNX models are supported, found " + self.props["mlm:framework"]
34
+ model_name = self.props["mlm:name"]
35
+ self.model_path = download_path + '/assets/' + model_name
36
+ self.ready = True
37
+
38
+ def predict(self, x):
39
+ if not self.ready:
40
+ self.setup()
41
+ ort_session = self.get_onnx_session(self.model_path)
42
+ # preprocess input
43
+ x = self.process_inputs(x)
44
+ # execute model
45
+ input_name = ort_session.get_inputs()[0].name
46
+ ort_inputs = {input_name: x}
47
+ ort_outs = ort_session.run(None, ort_inputs)
48
+ output_nodes = ort_session.get_outputs()
49
+ output_names = [node.name for node in output_nodes]
50
+ # format and return outputs
51
+ return self.return_outputs(ort_outs, output_names)
52
+
53
+ @with_auth
54
+ def download(self, user=None):
55
+ # download the model
56
+ model = retrieve_model(self.model_name)
57
+ if model["quality"] == 0:
58
+ raise Exception("Only Q1+ models are supported")
59
+ if self.version is None:
60
+ self.version = sorted(model["versions"], key=lambda v: v["version_id"])[-1][
61
+ "version_id"
62
+ ]
63
+ else:
64
+ assert self.version in [
65
+ v["version_id"] for v in model["versions"]
66
+ ], f"Version {self.version} not found"
67
+ download_base_path = os.getenv(
68
+ "EOTDL_DOWNLOAD_PATH", str(Path.home()) + "/.cache/eotdl/models"
69
+ )
70
+ if self.path is None:
71
+ download_path = download_base_path + "/" + self.model_name + "/v" + str(self.version)
72
+ else:
73
+ download_path = self.path + "/" + self.model_name + "/v" + str(self.version)
74
+ # check if model already exists
75
+ if os.path.exists(download_path) and not self.force:
76
+ os.makedirs(download_path, exist_ok=True)
77
+ gdf = STACDataFrame.from_stac_file(download_path + f"/{self.model_name}/catalog.json")
78
+ return download_path, gdf
79
+ if self.verbose:
80
+ print("Downloading STAC metadata...")
81
+ repo = ModelsAPIRepo()
82
+ gdf, error = repo.download_stac(
83
+ model["id"],
84
+ user,
85
+ )
86
+ if error:
87
+ raise Exception(error)
88
+ df = STACDataFrame(gdf)
89
+ # df.geometry = df.geometry.apply(lambda x: Polygon() if x is None else x)
90
+ df.to_stac(download_path)
91
+ # download assets
92
+ if self.assets:
93
+ if self.verbose:
94
+ print("Downloading assets...")
95
+ repo = FilesAPIRepo()
96
+ df = df.dropna(subset=["assets"])
97
+ for row in tqdm(df.iterrows(), total=len(df)):
98
+ for k, v in row[1]["assets"].items():
99
+ href = v["href"]
100
+ _, filename = href.split("/download/")
101
+ # will overwrite assets with same name :(
102
+ repo.download_file_url(
103
+ href, filename, f"{download_path}/assets", user
104
+ )
105
+ else:
106
+ print("To download assets, set assets=True.")
107
+ if self.verbose:
108
+ print("Done")
109
+ return download_path, gdf
110
+
111
+ def process_inputs(self, x):
112
+ # pre-process and validate input
113
+ input = self.props["mlm:input"]
114
+ # input data type
115
+ dtype = input["input"]["data_type"]
116
+ x = x.astype(dtype)
117
+ # input shape
118
+ input_shape = input["input"]["shape"]
119
+ ndims = len(input_shape)
120
+ if ndims != x.ndim:
121
+ if ndims == 4:
122
+ x = np.expand_dims(x, axis=0).astype(np.float32)
123
+ else:
124
+ raise Exception("Input shape not valid", input_shape, x.ndim)
125
+ for i, dim in enumerate(input_shape):
126
+ if dim != -1:
127
+ assert dim == x.shape[i], f"Input dimension not valid: The model expects {input_shape} but input has {x.shape} (-1 means any dimension)."
128
+ # TODO: should apply normalization if defined in metadata
129
+ return x
130
+
131
+ def return_outputs(self, ort_outputs, output_names):
132
+ if self.props["mlm:output"]["tasks"] == ["classification"]:
133
+ return {
134
+ "model": self.model_name,
135
+ **{
136
+ output: ort_outputs[i].tolist() for i, output in enumerate(output_names)
137
+ },
138
+ }
139
+ elif self.props["mlm:output"]["tasks"] == ["segmentation"]:
140
+ outputs = {output: ort_outputs[i] for i, output in enumerate(output_names)}
141
+ batch = outputs[output_names[0]]
142
+ image = batch[0]
143
+ return image
144
+ else:
145
+ raise Exception("Output task not supported:", self.props["mlm:output"]["tasks"])
146
+
147
+ def get_onnx_session(self, model):
148
+ try:
149
+ import onnxruntime as ort
150
+ # gpu requires `pip install onnxruntime-gpu` but no extra imports
151
+ except ImportError:
152
+ raise ImportError("onnxruntime is not installed. Please install it with `pip install onnxruntime`")
153
+ providers = ["CUDAExecutionProvider", "CPUExecutionProvider"]
154
+ try:
155
+ session = ort.InferenceSession(model, providers=providers)
156
+ except Exception as e:
157
+ raise RuntimeError(f"Error loading ONNX model: {str(e)}")
158
+ return session
@@ -1,35 +1,23 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: eotdl
3
- Version: 2024.10.1
3
+ Version: 2025.2.10
4
4
  Summary: Earth Observation Training Data Lab
5
- License: MIT
6
- Author: EarthPulse
7
- Author-email: it@earthpulse.es
8
- Requires-Python: >=3.8,<4.0
9
- Classifier: License :: OSI Approved :: MIT License
10
- Classifier: Programming Language :: Python :: 3
11
- Classifier: Programming Language :: Python :: 3.8
12
- Classifier: Programming Language :: Python :: 3.9
13
- Classifier: Programming Language :: Python :: 3.10
14
- Classifier: Programming Language :: Python :: 3.11
15
- Classifier: Programming Language :: Python :: 3.12
16
- Requires-Dist: black (>=23.10.1,<24.0.0)
17
- Requires-Dist: geomet (>=1.0.0,<2.0.0)
18
- Requires-Dist: geopandas (>=0.13.2,<0.14.0)
19
- Requires-Dist: markdown (>=3.5.2,<4.0.0)
20
- Requires-Dist: markdownify (>=0.11.6,<0.12.0)
21
- Requires-Dist: mypy (>=1.6.1,<2.0.0)
22
- Requires-Dist: openeo (>=0.31.0,<0.32.0)
23
- Requires-Dist: pydantic (>=1.10.6,<2.0.0)
24
- Requires-Dist: pyjwt (>=2.6.0,<3.0.0)
25
- Requires-Dist: pystac[validation] (==1.8.2)
26
- Requires-Dist: python-frontmatter (>=1.1.0,<2.0.0)
27
- Requires-Dist: pyyaml (>=6.0.1,<7.0.0)
28
- Requires-Dist: rasterio (>=1.3.9,<2.0.0)
29
- Requires-Dist: requests (>=2.28.2,<3.0.0)
30
- Requires-Dist: sentinelhub (>=3.9.1,<4.0.0)
31
- Requires-Dist: tqdm (>=4.65.0,<5.0.0)
32
- Requires-Dist: typer[all] (>=0.9.0,<0.10.0)
5
+ Author-email: earthpulse <it@earthpulse.es>
6
+ License-Expression: MIT
7
+ Requires-Python: >=3.8
8
+ Requires-Dist: geomet>=1.1.0
9
+ Requires-Dist: geopandas>=0.13.2
10
+ Requires-Dist: markdown>=3.7
11
+ Requires-Dist: pydantic>=2.10.6
12
+ Requires-Dist: pyjwt>=2.9.0
13
+ Requires-Dist: pystac>=1.8.4
14
+ Requires-Dist: python-frontmatter>=1.1.0
15
+ Requires-Dist: pyyaml>=6.0.2
16
+ Requires-Dist: rasterio>=1.3.11
17
+ Requires-Dist: requests>=2.32.3
18
+ Requires-Dist: sentinelhub>=3.11.1
19
+ Requires-Dist: tqdm>=4.67.1
20
+ Requires-Dist: typer>=0.15.1
33
21
  Description-Content-Type: text/markdown
34
22
 
35
23
  <p align="center">
@@ -49,10 +37,10 @@ Description-Content-Type: text/markdown
49
37
 
50
38
  This is the main library and CLI for the **Earth Observation Training Data Lab** (EOTDL), a complete environment that allows you, among other things, to:
51
39
 
52
- - Explore and download Training Datasets (TDS) for Earth Observation (EO) applications.
40
+ - Explore and stage Training Datasets (TDS) for Earth Observation (EO) applications.
53
41
  - Create and upload your own TDS by combining and annotating EO data from different sources.
54
42
  - Train Machine Learning (ML) models using the hosted TDS in the cloud with multi-GPU machines.
55
- - Explore and download pre-trianed ML models for EO applications.
43
+ - Explore and stage pre-trianed ML models for EO applications.
56
44
 
57
45
  In our blog you will find tutorials to learn how leverage the EOTDL to create and use TDS and ML models for your own EO applications.
58
46
 
@@ -62,4 +50,4 @@ One of the most limiting factors of AI for EO applications is the scarcity of su
62
50
 
63
51
  Generating TDS is time consuming and expensive. Data access is usually limited and costly, especially for Very High Resolution (VHR) images that allow objects like trees to be clearly identified. In some cases, domain experts or even in-person (in-situ) trips are required to manually confirm the objects in a satellite image are correctly annotated with a high degree of quality. This results in the field of AI for EO applications lagging when compared to other fields, impeding the development of new applications and limiting the full potential of AI in EO.
64
52
 
65
- The European Space Agency (ESA) Earth Observation Training Data Lab (EOTDL) will address key limitations and capability gaps for working with Machine Learning (ML) training data in EO by providing a set of open-source tools to create, share, and improve datasets as well as training ML algorithms in the cloud. EOTDL will also offer an online repository where datasets and models can be explored and accessed.
53
+ The European Space Agency (ESA) Earth Observation Training Data Lab (EOTDL) will address key limitations and capability gaps for working with Machine Learning (ML) training data in EO by providing a set of open-source tools to create, share, and improve datasets as well as training ML algorithms in the cloud. EOTDL will also offer an online repository where datasets and models can be explored and accessed.
@@ -1,22 +1,22 @@
1
- eotdl/__init__.py,sha256=GYhbp_qDI4r7Y9hYO76t0DwK_JCtUw2UK-DbHzV4XKk,27
1
+ eotdl/__init__.py,sha256=FAoVIMt0RmGmgzADLwWcGchcDVZIL8E-tw-vbRw_mfY,27
2
+ eotdl/cli.py,sha256=qGtdOV2gcy-YYdc-tUwzxV6X7SLWmwb9L8pBQGD6OzY,660
2
3
  eotdl/access/__init__.py,sha256=jbyjD7BRGJURlTNmtcbBBhw3Xk4EiZvkqmEykM-bJ1k,231
4
+ eotdl/access/download.py,sha256=5LHTxuV9BQzwYVGSaMoUqr7EEdYOhqY_QGDaChru6Pw,1869
5
+ eotdl/access/search.py,sha256=JW4MnM3xbXxvsaNCFkRKxPhxhNKJgZAutE2wna6qUpo,631
3
6
  eotdl/access/airbus/__init__.py,sha256=G_kkRS9eFjXbQ-aehmTLXeAxh7zpAxz_rgB7J_w0NRg,107
4
7
  eotdl/access/airbus/client.py,sha256=zjfgB_NTsCCIszoQesYkyLJgheKg-eTh28vbleXYxfw,12018
5
8
  eotdl/access/airbus/parameters.py,sha256=Z8XIrxG5wAOuOoH-fkdKfdNMEMLFp6PaxJN7v4MefMI,1009
6
9
  eotdl/access/airbus/utils.py,sha256=oh_N1Rn4fhcvUgNPpH2QzVvpe4bA0gqRgNguzRVqUps,652
7
- eotdl/access/download.py,sha256=DgemJKafNOlCUVW8OxpSP4br9ij5F1iSrSD-x0B5qFU,1845
8
- eotdl/access/search.py,sha256=sO2hml6JLK30DncNUqTWq16zy9LvRiWC6wtt5bNRzrI,633
9
10
  eotdl/access/sentinelhub/__init__.py,sha256=YpvaUBTRXM26WrXipo51ZUBCDv9WjRIdT8l1Pklpt_M,238
10
- eotdl/access/sentinelhub/client.py,sha256=g40avqlUpIa-WLjD7tK8CL8_SohBA2v3m8NZ0KbIFxc,4098
11
+ eotdl/access/sentinelhub/client.py,sha256=DNR27NbAH2ZZZeArsl95qOnaaXJvgnXTjpxFKX4OYDc,4130
11
12
  eotdl/access/sentinelhub/evalscripts.py,sha256=m6cnZ6ryXHgdH2B7RDVSlDHXWfvKi7HMGkTHXEcJsTw,4142
12
- eotdl/access/sentinelhub/parameters.py,sha256=Ni3Lqx1bLVcMzgmnuayDS00BLDm7SuM1FExVdGafopI,2061
13
- eotdl/access/sentinelhub/utils.py,sha256=X9Q1YvErBdMsRKszXyaOaG6ZMvPdM2Nl_0SH-dWSFo0,3560
13
+ eotdl/access/sentinelhub/parameters.py,sha256=SEal7mCPkADc7lhQL-63t2h5-XCssYpGMvK5Eo3etFU,2078
14
+ eotdl/access/sentinelhub/utils.py,sha256=h3V-f-qmoq62cRbE0PWn7x3O1nOTGrCudm_bXm91XSU,3535
14
15
  eotdl/auth/__init__.py,sha256=OuGNfJQ-8Kymn4zIywlHQfImEO8DJMJIwOwTQm-u_dc,99
15
16
  eotdl/auth/auth.py,sha256=EjbVFREA2H0sjFJhVqjFZrwjKPzxRJ2x83MTjizpRBs,2029
16
17
  eotdl/auth/errors.py,sha256=E1lv3Igk--J-SOgNH18i8Xx9bXrrMyBSHKt_CAUmGPo,308
17
18
  eotdl/auth/is_logged.py,sha256=QREuhkoDnarZoUZwCxVCNoESGb_Yukh0lJo1pXvrV9Q,115
18
19
  eotdl/auth/logout.py,sha256=P_Sp6WmVvnG3R9V1L9541KNyHFko9DtQPqAKD2vaguw,161
19
- eotdl/cli.py,sha256=qGtdOV2gcy-YYdc-tUwzxV6X7SLWmwb9L8pBQGD6OzY,660
20
20
  eotdl/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
21
  eotdl/commands/auth.py,sha256=WzA0aFGRoscy7fPKQTxiphBc0LJztJxBBl7rjDBRVfI,1544
22
22
  eotdl/commands/datasets.py,sha256=wiH4OimpOgvONtnHkNi8PWb8I7kR7kNm61dcz9vgZtE,5205
@@ -27,24 +27,24 @@ eotdl/curation/stac/assets.py,sha256=ay3JO6iEANMqTAe40sF7QYeEY574LbrhyanqSlVUITc
27
27
  eotdl/curation/stac/dataframe.py,sha256=w0CXK4tAegbXQgnXbak2QbLfWgC6SQXpR2g-snoimAM,5503
28
28
  eotdl/curation/stac/dataframe_bck.py,sha256=PwAwol7kll0xYtlkhLeQ_Sc5TBQ85cVd6eyzdfTMJnE,8493
29
29
  eotdl/curation/stac/dataframe_labeling.py,sha256=F22-4gpF9zFuCLqRva2XAyArOmGdrgGxXbgo9d54BFE,1520
30
+ eotdl/curation/stac/extent.py,sha256=Jb3K4v59eu_h5t429r0762o0zG_LA50iEE-abWNL0e0,5108
31
+ eotdl/curation/stac/parsers.py,sha256=H5IukLA61mpLojeuhWNQdiZk2eiYHAfpJBFxmjdGDso,1529
32
+ eotdl/curation/stac/stac.py,sha256=9GdiB1mV8iyREvXZoJmNJKQGawWyK3h9Eu0hJX5u6O4,13180
30
33
  eotdl/curation/stac/extensions/__init__.py,sha256=NSzKe14Iyr8Pm2AVg8RHxddtBD2so45--BRJmJd8bTs,629
31
34
  eotdl/curation/stac/extensions/base.py,sha256=HDisHg43aC3tJjqKdJVyQMfXc0jLHZEVYrTve9lmZak,671
32
35
  eotdl/curation/stac/extensions/dem.py,sha256=ecCLfg_izIjfWBvoKNl_WLNVuNqNiQWLaWxshNucTyY,370
33
36
  eotdl/curation/stac/extensions/eo.py,sha256=cT4RrbyoimIuuOxNRLkamhZgHpDbj4z_Ziat2G2gTuA,4004
37
+ eotdl/curation/stac/extensions/ml_dataset.py,sha256=8G1-1yljfHhp5tKk3rGysB9M_0-Upa5WN9y1s6O5nno,21477
38
+ eotdl/curation/stac/extensions/projection.py,sha256=ussVIwr_wOOhn07OmpAWY4qqbeAmYUxKjbE8onrAy7o,1236
39
+ eotdl/curation/stac/extensions/raster.py,sha256=o5U_1ow8BsgwZXpSQYwQIvMJldhyn7xoGoJmUANTJTE,1540
40
+ eotdl/curation/stac/extensions/sar.py,sha256=Akw3_5brY_x2yU094nSLbv0E6M9jnAeUC0Vo5fJDRME,1633
34
41
  eotdl/curation/stac/extensions/label/__init__.py,sha256=R6xLkgJaZHoMh5BhpmueupWdM9NWKvmaRurum-ryU_s,159
35
42
  eotdl/curation/stac/extensions/label/base.py,sha256=5xbniQWjzXkrQxxXp8v9QQxFQdRYnaFPBd5_in1QXUw,4069
36
43
  eotdl/curation/stac/extensions/label/image_name_labeler.py,sha256=bivZN-qEtIXE6ehqwPKRsJO9RVckZ1lK2BG8ifUbaA8,8074
37
44
  eotdl/curation/stac/extensions/label/scaneo.py,sha256=uUzEqEwdnKYZx-qx9o6d8HcXp1VPy8eYWc9kM0sjiyA,8787
38
- eotdl/curation/stac/extensions/ml_dataset.py,sha256=SjOX_EvVRhwIUc4iQLw6YX_bG3VTqlRd9SpmpNCnRak,21429
39
- eotdl/curation/stac/extensions/projection.py,sha256=ussVIwr_wOOhn07OmpAWY4qqbeAmYUxKjbE8onrAy7o,1236
40
- eotdl/curation/stac/extensions/raster.py,sha256=o5U_1ow8BsgwZXpSQYwQIvMJldhyn7xoGoJmUANTJTE,1540
41
- eotdl/curation/stac/extensions/sar.py,sha256=Akw3_5brY_x2yU094nSLbv0E6M9jnAeUC0Vo5fJDRME,1633
42
- eotdl/curation/stac/extent.py,sha256=Jb3K4v59eu_h5t429r0762o0zG_LA50iEE-abWNL0e0,5108
43
- eotdl/curation/stac/parsers.py,sha256=H5IukLA61mpLojeuhWNQdiZk2eiYHAfpJBFxmjdGDso,1529
44
- eotdl/curation/stac/stac.py,sha256=9GdiB1mV8iyREvXZoJmNJKQGawWyK3h9Eu0hJX5u6O4,13180
45
45
  eotdl/datasets/__init__.py,sha256=xNbkUjqI1mf0TGjy52tpZtr5DnWNX5EVgyM04P4sU3Y,175
46
46
  eotdl/datasets/download.py,sha256=JSOi7OKiENYQ3TRVj2WsC1CPYD22VlC9W5kYHPfx8Vs,3667
47
- eotdl/datasets/ingest.py,sha256=F7mwQQok3rqZkGiQC-71Jc4dYDN5OSB_SXEACdCZKkk,5742
47
+ eotdl/datasets/ingest.py,sha256=p8Clp_QAWaixeudA-rKcARxxLIvj_voNYOJmZ6LhIWE,5797
48
48
  eotdl/datasets/metadata.py,sha256=jRoW9n32sUQgX3439WUCL8BJZojXKKXpmCM344XMDMc,1432
49
49
  eotdl/datasets/retrieve.py,sha256=DJz5K1bCLizg9YNwBnhHMFzcxMXar2socYkFONdSL4c,1041
50
50
  eotdl/datasets/update.py,sha256=x-rpfxnavn9X-7QYkFMGtbn1b3bKmAZydOeS7Tjr5AQ,386
@@ -52,12 +52,12 @@ eotdl/files/__init__.py,sha256=2zfAxgLHmU_jWq_3emnfPXsX-R20gSt-yZX0bPa9h0g,87
52
52
  eotdl/files/ingest.py,sha256=dgjZfd-ACCKradDo2B02CPahwEhFtWvnKvTm372K5eo,6185
53
53
  eotdl/files/list_files.py,sha256=k4OgdbQ7u6tUEE9nJZGXw5s5HtvG0ZApOVTy0KbfTqs,519
54
54
  eotdl/models/__init__.py,sha256=-PvGWG0iSRNBqeFWpxol12dYw-QodXjR81n-JX3x6zI,146
55
- eotdl/models/download.py,sha256=ppCeKsK2YzdimYRKXiF5dfGI6VyXw_T9lOnKO54Ojjg,3469
55
+ eotdl/models/download.py,sha256=rRT3fG-qS3-SXfzFdqy0cuiDnOIV9Du74JCnsbbA9Ps,3475
56
56
  eotdl/models/ingest.py,sha256=ipWNSyL8WwW9PPUAFcS9mLYow_1PTCpNQ_DsXKQ3Hvw,5671
57
57
  eotdl/models/metadata.py,sha256=Lu_Vv6YJJ9aLPvJa44tFfbBZiyxweOI2BHfq00aNRjY,1378
58
58
  eotdl/models/retrieve.py,sha256=-Ij7dT4J1p7MW4n13OlPB9OW4tBaBXPwk9dW8IuCZPc,664
59
59
  eotdl/models/update.py,sha256=4FWeD95cXvRpefRjw3Foqb30e30otxqWUZ6nQM9cbmM,374
60
- eotdl/repos/APIRepo.py,sha256=dJNdX3atBpug0FZNako7uyom25iccNPQrnoIRTIZEXE,791
60
+ eotdl/repos/APIRepo.py,sha256=as06DHfRtENMEYTAeAbYf0XGHU7E8LNxGHGpIH8lzoE,791
61
61
  eotdl/repos/AuthAPIRepo.py,sha256=vYCqFawe3xUm2cx4SqVXCvzl8J_sr9rs_MkipYC0bXE,957
62
62
  eotdl/repos/AuthRepo.py,sha256=jpzzhINCcDZHRCyrPDsp49h17IlXp2HvX3BB3f5cnb4,1154
63
63
  eotdl/repos/DatasetsAPIRepo.py,sha256=rKqSe-UjJOlD0Kbypu4Gs5kx2mzUD7TY05gv2vgUTv4,2660
@@ -67,13 +67,15 @@ eotdl/repos/__init__.py,sha256=WvX5TP49k7yYb5dWWNjv5kzbdluO3dJ4LqjQxRIOUVc,222
67
67
  eotdl/shared/__init__.py,sha256=mF7doJC8Z5eTPmB01UQvPivThZac32DRY33T6qshXfg,41
68
68
  eotdl/shared/checksum.py,sha256=4IB6N9jRO0chMDNJzpdnFDhC9wcFF9bO5oHq2HodcHw,479
69
69
  eotdl/tools/__init__.py,sha256=_p3n2dw3ulwyr1OlVw5d_jMV64cNYfajQMUbzFfvIpU,178
70
- eotdl/tools/geo_utils.py,sha256=yZA100UH0pbH8T6wb9Kfv_VRDKYYtQDPTud3Ddkdsok,7320
70
+ eotdl/tools/geo_utils.py,sha256=JKHUAnqkwiIrvh5voDclWAW-i57qVqH2FUjeOt1TQf4,7547
71
71
  eotdl/tools/metadata.py,sha256=RvNmoMdfEKoo-DzhEAqL-f9ZCjIe_bsdHQwACMk6w1E,1664
72
72
  eotdl/tools/paths.py,sha256=yWhOtVxX4NxrDrrBX2fuye5N1mAqrxXFy_eA7dffd84,1152
73
73
  eotdl/tools/stac.py,sha256=ovXdrPm4Sn9AAJmrP88WnxDmq2Ut-xPoscjphxz3Iyo,5763
74
74
  eotdl/tools/time_utils.py,sha256=qJ3-rk1I7ne722SLfAP6-59kahQ0vLQqIf9VpOi0Kpg,4691
75
75
  eotdl/tools/tools.py,sha256=Tl4_v2ejkQo_zyZek8oofJwoYcdVosdOwW1C0lvWaNM,6354
76
- eotdl-2024.10.1.dist-info/METADATA,sha256=FSAul-2xBkcUyN-T0tk8YX0Kp_ZzlQ4AAk0e9IxYmKk,4144
77
- eotdl-2024.10.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
78
- eotdl-2024.10.1.dist-info/entry_points.txt,sha256=s6sfxUfRrSX2IP2UbrzTFTvRCtLgw3_OKcHlOKf_5F8,39
79
- eotdl-2024.10.1.dist-info/RECORD,,
76
+ eotdl/wrappers/__init__.py,sha256=IY3DK_5LMbc5bIQFleQA9kzFbPhWuTLesJ8dwfvpkdA,32
77
+ eotdl/wrappers/models.py,sha256=kNO4pYw9KKKmElE7bZWWHGs7FIThNUXj8XciKh_3rNw,6432
78
+ eotdl-2025.2.10.dist-info/METADATA,sha256=3gr3B3iSZA_mKlFpSqvSvF78cm9KioYXE63MOB2oyEc,3479
79
+ eotdl-2025.2.10.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
80
+ eotdl-2025.2.10.dist-info/entry_points.txt,sha256=FV4dFIZ5zdWj1q1nUEEip29n3sAgbviVOizEz00gEF0,40
81
+ eotdl-2025.2.10.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.9.0
2
+ Generator: hatchling 1.27.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ eotdl = eotdl.cli:app
@@ -1,3 +0,0 @@
1
- [console_scripts]
2
- eotdl=eotdl.cli:app
3
-