eotdl 2024.4.25__py3-none-any.whl → 2024.6.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eotdl/__init__.py +1 -1
- eotdl/access/download.py +22 -16
- eotdl/access/sentinelhub/utils.py +12 -4
- eotdl/datasets/__init__.py +1 -1
- eotdl/files/__init__.py +1 -0
- eotdl/files/list_files.py +13 -0
- eotdl/models/__init__.py +1 -1
- eotdl/models/download.py +33 -49
- eotdl/models/ingest.py +68 -4
- eotdl/repos/FilesAPIRepo.py +9 -0
- eotdl/repos/ModelsAPIRepo.py +25 -0
- {eotdl-2024.4.25.dist-info → eotdl-2024.6.13.dist-info}/METADATA +1 -1
- {eotdl-2024.4.25.dist-info → eotdl-2024.6.13.dist-info}/RECORD +15 -14
- {eotdl-2024.4.25.dist-info → eotdl-2024.6.13.dist-info}/WHEEL +0 -0
- {eotdl-2024.4.25.dist-info → eotdl-2024.6.13.dist-info}/entry_points.txt +0 -0
eotdl/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "2024.
|
1
|
+
__version__ = "2024.06.13"
|
eotdl/access/download.py
CHANGED
@@ -3,7 +3,7 @@ Download imagery
|
|
3
3
|
"""
|
4
4
|
|
5
5
|
from datetime import datetime
|
6
|
-
from typing import Union, List
|
6
|
+
from typing import Union, List, Optional
|
7
7
|
|
8
8
|
from .sentinelhub import (
|
9
9
|
SHClient,
|
@@ -19,6 +19,7 @@ def download_sentinel_imagery(
|
|
19
19
|
time_interval: Union[str, datetime, List[Union[str, datetime]]],
|
20
20
|
bounding_box: List[Union[int, float]],
|
21
21
|
sensor: str,
|
22
|
+
name: Optional[str] = None,
|
22
23
|
) -> None:
|
23
24
|
"""
|
24
25
|
Download Sentinel imagery
|
@@ -28,9 +29,21 @@ def download_sentinel_imagery(
|
|
28
29
|
client = SHClient()
|
29
30
|
parameters = SH_PARAMETERS_DICT[sensor]()
|
30
31
|
|
31
|
-
|
32
|
-
|
33
|
-
|
32
|
+
results = search_sentinel_imagery(time_interval, bounding_box, sensor)
|
33
|
+
timestamps = [date.strftime("%Y-%m-%d") for date in results.get_timestamps()]
|
34
|
+
|
35
|
+
requests_list = []
|
36
|
+
for date in timestamps:
|
37
|
+
requests_list.append(client.request_data(date, bounding_box, parameters))
|
38
|
+
if len(requests_list) == 0:
|
39
|
+
print(f"No images found for {sensor} in the specified time: {time_interval}")
|
40
|
+
return
|
41
|
+
elif len(requests_list) <= 2:
|
42
|
+
bulk = False
|
43
|
+
else:
|
44
|
+
bulk = True
|
45
|
+
client.download_data(requests_list)
|
46
|
+
imagery_from_tmp_to_dir(output, name=name, bulk=bulk)
|
34
47
|
|
35
48
|
|
36
49
|
def search_and_download_sentinel_imagery(
|
@@ -42,16 +55,9 @@ def search_and_download_sentinel_imagery(
|
|
42
55
|
"""
|
43
56
|
Search and download Sentinel imagery
|
44
57
|
"""
|
45
|
-
|
46
|
-
|
47
|
-
client = SHClient()
|
48
|
-
parameters = SH_PARAMETERS_DICT[sensor]()
|
49
|
-
|
50
|
-
results = search_sentinel_imagery(time_interval, bounding_box, sensor)
|
51
|
-
timestamps = [date.strftime("%Y-%m-%d") for date in results.get_timestamps()]
|
58
|
+
from warnings import warn
|
52
59
|
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
imagery_from_tmp_to_dir(output)
|
60
|
+
warn(
|
61
|
+
"The function `search_and_download_sentinel_imagery` has been deprecated and will be removed in future updates. Please use download_satellite_imagery instead."
|
62
|
+
)
|
63
|
+
download_sentinel_imagery(output, time_interval, bounding_box, sensor)
|
@@ -49,7 +49,10 @@ def evaluate_sentinel_parameters(
|
|
49
49
|
|
50
50
|
|
51
51
|
def imagery_from_tmp_to_dir(
|
52
|
-
output_dir: str,
|
52
|
+
output_dir: str,
|
53
|
+
tmp_dir: Optional[str] = "/tmp/sentinelhub",
|
54
|
+
name: Optional[str] = None,
|
55
|
+
bulk: Optional[bool] = False,
|
53
56
|
) -> None:
|
54
57
|
"""
|
55
58
|
Copy imagery from tmp to output dir
|
@@ -63,10 +66,15 @@ def imagery_from_tmp_to_dir(
|
|
63
66
|
for downloaded_file in downloaded_files:
|
64
67
|
request_json = downloaded_file.replace("response.tiff", "request.json")
|
65
68
|
metadata = generate_raster_metadata(downloaded_file, request_json)
|
66
|
-
if
|
67
|
-
output_filename =
|
69
|
+
if name and not bulk:
|
70
|
+
output_filename = name
|
71
|
+
elif name and bulk:
|
72
|
+
output_filename = f"{name}_{metadata['acquisition-date']}"
|
68
73
|
else:
|
69
|
-
|
74
|
+
if metadata["acquisition-date"]:
|
75
|
+
output_filename = f"{metadata['type']}_{metadata['acquisition-date']}"
|
76
|
+
else:
|
77
|
+
output_filename = metadata["type"]
|
70
78
|
|
71
79
|
copyfile(downloaded_file, f"{output_dir}/{output_filename}.tif")
|
72
80
|
with open(f"{output_dir}/{output_filename}.json", "w", encoding="utf-8") as f:
|
eotdl/datasets/__init__.py
CHANGED
eotdl/files/__init__.py
CHANGED
@@ -0,0 +1,13 @@
|
|
1
|
+
from ..datasets import retrieve_dataset, retrieve_dataset_files
|
2
|
+
from ..models import retrieve_model, retrieve_model_files
|
3
|
+
|
4
|
+
def list_files(dataset_or_model_name, version=1):
|
5
|
+
try:
|
6
|
+
dataset = retrieve_dataset(dataset_or_model_name)
|
7
|
+
return retrieve_dataset_files(dataset['id'], version)
|
8
|
+
except Exception as e:
|
9
|
+
try:
|
10
|
+
model = retrieve_model(dataset_or_model_name)
|
11
|
+
return retrieve_model_files(model['id'], version)
|
12
|
+
except Exception as e:
|
13
|
+
raise Exception(f"Dataset or model {dataset_or_model_name} not found.")
|
eotdl/models/__init__.py
CHANGED
eotdl/models/download.py
CHANGED
@@ -5,8 +5,9 @@ from tqdm import tqdm
|
|
5
5
|
from ..auth import with_auth
|
6
6
|
from .retrieve import retrieve_model, retrieve_model_files
|
7
7
|
from ..shared import calculate_checksum
|
8
|
-
from ..repos import FilesAPIRepo
|
8
|
+
from ..repos import FilesAPIRepo, ModelsAPIRepo
|
9
9
|
from .metadata import generate_metadata
|
10
|
+
from ..curation.stac import STACDataFrame
|
10
11
|
|
11
12
|
|
12
13
|
@with_auth
|
@@ -46,20 +47,6 @@ def download_model(
|
|
46
47
|
if model["quality"] == 0:
|
47
48
|
if file:
|
48
49
|
raise NotImplementedError("Downloading a specific file is not implemented")
|
49
|
-
# files = [f for f in model["files"] if f["name"] == file]
|
50
|
-
# if not files:
|
51
|
-
# raise Exception(f"File {file} not found")
|
52
|
-
# if len(files) > 1:
|
53
|
-
# raise Exception(f"Multiple files with name {file} found")
|
54
|
-
# dst_path = download(
|
55
|
-
# model,
|
56
|
-
# model["id"],
|
57
|
-
# file,
|
58
|
-
# files[0]["checksum"],
|
59
|
-
# download_path,
|
60
|
-
# user,
|
61
|
-
# )
|
62
|
-
# return Outputs(dst_path=dst_path)
|
63
50
|
model_files = retrieve_model_files(model["id"], version)
|
64
51
|
repo = FilesAPIRepo()
|
65
52
|
for file in tqdm(model_files, disable=verbose, unit="file"):
|
@@ -74,41 +61,38 @@ def download_model(
|
|
74
61
|
file_version,
|
75
62
|
endpoint="models",
|
76
63
|
)
|
77
|
-
|
78
|
-
|
64
|
+
if verbose:
|
65
|
+
logger("Generating README.md ...")
|
66
|
+
generate_metadata(download_path, model)
|
79
67
|
else:
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
# return Outputs(dst_path=path)
|
109
|
-
if verbose:
|
110
|
-
logger("Generating README.md ...")
|
111
|
-
generate_metadata(download_path, model)
|
68
|
+
if verbose:
|
69
|
+
logger("Downloading STAC metadata...")
|
70
|
+
repo = ModelsAPIRepo()
|
71
|
+
gdf, error = repo.download_stac(
|
72
|
+
model["id"],
|
73
|
+
user,
|
74
|
+
)
|
75
|
+
if error:
|
76
|
+
raise Exception(error)
|
77
|
+
df = STACDataFrame(gdf)
|
78
|
+
# df.geometry = df.geometry.apply(lambda x: Polygon() if x is None else x)
|
79
|
+
df.to_stac(download_path)
|
80
|
+
# download assets
|
81
|
+
if assets:
|
82
|
+
if verbose:
|
83
|
+
logger("Downloading assets...")
|
84
|
+
repo = FilesAPIRepo()
|
85
|
+
df = df.dropna(subset=["assets"])
|
86
|
+
for row in tqdm(df.iterrows(), total=len(df)):
|
87
|
+
for k, v in row[1]["assets"].items():
|
88
|
+
href = v["href"]
|
89
|
+
_, filename = href.split("/download/")
|
90
|
+
# will overwrite assets with same name :(
|
91
|
+
repo.download_file_url(
|
92
|
+
href, filename, f"{download_path}/assets", user
|
93
|
+
)
|
94
|
+
else:
|
95
|
+
logger("To download assets, set assets=True or -a in the CLI.")
|
112
96
|
if verbose:
|
113
97
|
logger("Done")
|
114
98
|
return download_path
|
eotdl/models/ingest.py
CHANGED
@@ -2,13 +2,16 @@ from pathlib import Path
|
|
2
2
|
import yaml
|
3
3
|
import frontmatter
|
4
4
|
import markdown
|
5
|
+
from tqdm import tqdm
|
6
|
+
import json
|
5
7
|
|
6
8
|
from ..auth import with_auth
|
7
9
|
from .metadata import Metadata, generate_metadata
|
8
|
-
from ..repos import ModelsAPIRepo
|
10
|
+
from ..repos import ModelsAPIRepo, FilesAPIRepo
|
9
11
|
from ..shared import calculate_checksum
|
10
|
-
from ..files import ingest_files
|
12
|
+
from ..files import ingest_files, create_new_version
|
11
13
|
from .update import update_model
|
14
|
+
from ..curation.stac import STACDataFrame
|
12
15
|
|
13
16
|
|
14
17
|
def ingest_model(
|
@@ -17,8 +20,8 @@ def ingest_model(
|
|
17
20
|
path = Path(path)
|
18
21
|
if not path.is_dir():
|
19
22
|
raise Exception("Path must be a folder")
|
20
|
-
|
21
|
-
|
23
|
+
if "catalog.json" in [f.name for f in path.iterdir()]:
|
24
|
+
return ingest_stac(path / "catalog.json", logger)
|
22
25
|
return ingest_folder(path, verbose, logger, force_metadata_update, sync_metadata)
|
23
26
|
|
24
27
|
|
@@ -101,3 +104,64 @@ def check_metadata(
|
|
101
104
|
generate_metadata(str(folder), dataset)
|
102
105
|
return False
|
103
106
|
return False
|
107
|
+
|
108
|
+
|
109
|
+
def retrieve_stac_model(model_name, user):
|
110
|
+
repo = ModelsAPIRepo()
|
111
|
+
data, error = repo.retrieve_model(model_name)
|
112
|
+
# print(data, error)
|
113
|
+
if data and data["uid"] != user["uid"]:
|
114
|
+
raise Exception("Model already exists.")
|
115
|
+
if error and error == "Model doesn't exist":
|
116
|
+
# create model
|
117
|
+
data, error = repo.create_stac_model(model_name, user)
|
118
|
+
# print(data, error)
|
119
|
+
if error:
|
120
|
+
raise Exception(error)
|
121
|
+
data["id"] = data["model_id"]
|
122
|
+
return data["id"]
|
123
|
+
|
124
|
+
|
125
|
+
@with_auth
|
126
|
+
def ingest_stac(stac_catalog, logger=None, user=None):
|
127
|
+
repo, files_repo = ModelsAPIRepo(), FilesAPIRepo()
|
128
|
+
# load catalog
|
129
|
+
logger("Loading STAC catalog...")
|
130
|
+
df = STACDataFrame.from_stac_file(stac_catalog)
|
131
|
+
catalog = df[df["type"] == "Catalog"]
|
132
|
+
assert len(catalog) == 1, "STAC catalog must have exactly one root catalog"
|
133
|
+
dataset_name = catalog.id.iloc[0]
|
134
|
+
# retrieve dataset (create if doesn't exist)
|
135
|
+
model_id = retrieve_stac_model(dataset_name, user)
|
136
|
+
# create new version
|
137
|
+
version = create_new_version(repo, model_id, user)
|
138
|
+
logger("New version created, version: " + str(version))
|
139
|
+
df2 = df.dropna(subset=["assets"])
|
140
|
+
for row in tqdm(df2.iterrows(), total=len(df2)):
|
141
|
+
try:
|
142
|
+
for k, v in row[1]["assets"].items():
|
143
|
+
data, error = files_repo.ingest_file(
|
144
|
+
v["href"],
|
145
|
+
model_id,
|
146
|
+
user,
|
147
|
+
calculate_checksum(v["href"]), # is always absolute?
|
148
|
+
"models",
|
149
|
+
version,
|
150
|
+
)
|
151
|
+
if error:
|
152
|
+
raise Exception(error)
|
153
|
+
file_url = (
|
154
|
+
f"{repo.url}models/{data['model_id']}/download/{data['filename']}"
|
155
|
+
)
|
156
|
+
df.loc[row[0], "assets"][k]["href"] = file_url
|
157
|
+
except Exception as e:
|
158
|
+
logger(f"Error uploading asset {row[0]}: {e}")
|
159
|
+
break
|
160
|
+
# ingest the STAC catalog into geodb
|
161
|
+
logger("Ingesting STAC catalog...")
|
162
|
+
data, error = repo.ingest_stac(json.loads(df.to_json()), model_id, user)
|
163
|
+
if error:
|
164
|
+
# TODO: delete all assets that were uploaded
|
165
|
+
raise Exception(error)
|
166
|
+
logger("Done")
|
167
|
+
return
|
eotdl/repos/FilesAPIRepo.py
CHANGED
@@ -2,6 +2,7 @@ import requests
|
|
2
2
|
import os
|
3
3
|
from tqdm import tqdm
|
4
4
|
import hashlib
|
5
|
+
from io import BytesIO
|
5
6
|
|
6
7
|
from ..repos import APIRepo
|
7
8
|
|
@@ -189,3 +190,11 @@ class FilesAPIRepo(APIRepo):
|
|
189
190
|
headers=self.generate_headers(user),
|
190
191
|
)
|
191
192
|
return self.format_response(r)
|
193
|
+
|
194
|
+
def get_file_stream(self, dataset_id, filename, user, version=None):
|
195
|
+
url = self.url + f"datasets/{dataset_id}/download/{filename}"
|
196
|
+
if version is not None:
|
197
|
+
url += "?version=" + str(version)
|
198
|
+
headers = self.generate_headers(user)
|
199
|
+
response = requests.get(url, headers=headers, stream=True)
|
200
|
+
return BytesIO(response.content)
|
eotdl/repos/ModelsAPIRepo.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1
1
|
import requests
|
2
|
+
import geopandas as gpd
|
2
3
|
|
3
4
|
from ..repos import APIRepo
|
4
5
|
|
@@ -53,3 +54,27 @@ class ModelsAPIRepo(APIRepo):
|
|
53
54
|
headers=self.generate_headers(user),
|
54
55
|
)
|
55
56
|
return self.format_response(response)
|
57
|
+
|
58
|
+
def create_stac_model(self, name, user):
|
59
|
+
response = requests.post(
|
60
|
+
self.url + "models/stac",
|
61
|
+
json={"name": name},
|
62
|
+
headers=self.generate_headers(user),
|
63
|
+
)
|
64
|
+
return self.format_response(response)
|
65
|
+
|
66
|
+
def ingest_stac(self, stac_json, model_id, user):
|
67
|
+
response = requests.put(
|
68
|
+
self.url + f"models/stac/{model_id}",
|
69
|
+
json={"stac": stac_json},
|
70
|
+
headers=self.generate_headers(user),
|
71
|
+
)
|
72
|
+
return self.format_response(response)
|
73
|
+
|
74
|
+
def download_stac(self, model_id, user):
|
75
|
+
url = self.url + "models/" + model_id + "/download"
|
76
|
+
headers = self.generate_headers(user)
|
77
|
+
response = requests.get(url, headers=headers)
|
78
|
+
if response.status_code != 200:
|
79
|
+
return None, response.json()["detail"]
|
80
|
+
return gpd.GeoDataFrame.from_features(response.json()["features"]), None
|
@@ -1,16 +1,16 @@
|
|
1
|
-
eotdl/__init__.py,sha256=
|
1
|
+
eotdl/__init__.py,sha256=ThfWXF_wVGcoWrP25NDPXeCmuGEi8P8FKQkVKe_JZZA,27
|
2
2
|
eotdl/access/__init__.py,sha256=jbyjD7BRGJURlTNmtcbBBhw3Xk4EiZvkqmEykM-bJ1k,231
|
3
3
|
eotdl/access/airbus/__init__.py,sha256=G_kkRS9eFjXbQ-aehmTLXeAxh7zpAxz_rgB7J_w0NRg,107
|
4
4
|
eotdl/access/airbus/client.py,sha256=zjfgB_NTsCCIszoQesYkyLJgheKg-eTh28vbleXYxfw,12018
|
5
5
|
eotdl/access/airbus/parameters.py,sha256=Z8XIrxG5wAOuOoH-fkdKfdNMEMLFp6PaxJN7v4MefMI,1009
|
6
6
|
eotdl/access/airbus/utils.py,sha256=oh_N1Rn4fhcvUgNPpH2QzVvpe4bA0gqRgNguzRVqUps,652
|
7
|
-
eotdl/access/download.py,sha256=
|
7
|
+
eotdl/access/download.py,sha256=DgemJKafNOlCUVW8OxpSP4br9ij5F1iSrSD-x0B5qFU,1845
|
8
8
|
eotdl/access/search.py,sha256=sO2hml6JLK30DncNUqTWq16zy9LvRiWC6wtt5bNRzrI,633
|
9
9
|
eotdl/access/sentinelhub/__init__.py,sha256=YpvaUBTRXM26WrXipo51ZUBCDv9WjRIdT8l1Pklpt_M,238
|
10
10
|
eotdl/access/sentinelhub/client.py,sha256=g40avqlUpIa-WLjD7tK8CL8_SohBA2v3m8NZ0KbIFxc,4098
|
11
11
|
eotdl/access/sentinelhub/evalscripts.py,sha256=m6cnZ6ryXHgdH2B7RDVSlDHXWfvKi7HMGkTHXEcJsTw,4142
|
12
12
|
eotdl/access/sentinelhub/parameters.py,sha256=Ni3Lqx1bLVcMzgmnuayDS00BLDm7SuM1FExVdGafopI,2061
|
13
|
-
eotdl/access/sentinelhub/utils.py,sha256=
|
13
|
+
eotdl/access/sentinelhub/utils.py,sha256=X9Q1YvErBdMsRKszXyaOaG6ZMvPdM2Nl_0SH-dWSFo0,3560
|
14
14
|
eotdl/auth/__init__.py,sha256=OuGNfJQ-8Kymn4zIywlHQfImEO8DJMJIwOwTQm-u_dc,99
|
15
15
|
eotdl/auth/auth.py,sha256=EjbVFREA2H0sjFJhVqjFZrwjKPzxRJ2x83MTjizpRBs,2029
|
16
16
|
eotdl/auth/errors.py,sha256=E1lv3Igk--J-SOgNH18i8Xx9bXrrMyBSHKt_CAUmGPo,308
|
@@ -42,17 +42,18 @@ eotdl/curation/stac/extensions/sar.py,sha256=Akw3_5brY_x2yU094nSLbv0E6M9jnAeUC0V
|
|
42
42
|
eotdl/curation/stac/extent.py,sha256=Jb3K4v59eu_h5t429r0762o0zG_LA50iEE-abWNL0e0,5108
|
43
43
|
eotdl/curation/stac/parsers.py,sha256=H5IukLA61mpLojeuhWNQdiZk2eiYHAfpJBFxmjdGDso,1529
|
44
44
|
eotdl/curation/stac/stac.py,sha256=9GdiB1mV8iyREvXZoJmNJKQGawWyK3h9Eu0hJX5u6O4,13180
|
45
|
-
eotdl/datasets/__init__.py,sha256=
|
45
|
+
eotdl/datasets/__init__.py,sha256=xNbkUjqI1mf0TGjy52tpZtr5DnWNX5EVgyM04P4sU3Y,175
|
46
46
|
eotdl/datasets/download.py,sha256=JSOi7OKiENYQ3TRVj2WsC1CPYD22VlC9W5kYHPfx8Vs,3667
|
47
47
|
eotdl/datasets/ingest.py,sha256=Ukiah6TapD1oBE0AxUTaEFf-k3hAMRrKr4cXVlVW-_Y,5806
|
48
48
|
eotdl/datasets/metadata.py,sha256=qonblTDGr4IZvJaiIc2rh7qwP76whEA72DorSVqZxd4,1457
|
49
49
|
eotdl/datasets/retrieve.py,sha256=DJz5K1bCLizg9YNwBnhHMFzcxMXar2socYkFONdSL4c,1041
|
50
50
|
eotdl/datasets/update.py,sha256=x-rpfxnavn9X-7QYkFMGtbn1b3bKmAZydOeS7Tjr5AQ,386
|
51
|
-
eotdl/files/__init__.py,sha256=
|
51
|
+
eotdl/files/__init__.py,sha256=2zfAxgLHmU_jWq_3emnfPXsX-R20gSt-yZX0bPa9h0g,87
|
52
52
|
eotdl/files/ingest.py,sha256=dgjZfd-ACCKradDo2B02CPahwEhFtWvnKvTm372K5eo,6185
|
53
|
-
eotdl/
|
54
|
-
eotdl/models/
|
55
|
-
eotdl/models/
|
53
|
+
eotdl/files/list_files.py,sha256=k4OgdbQ7u6tUEE9nJZGXw5s5HtvG0ZApOVTy0KbfTqs,519
|
54
|
+
eotdl/models/__init__.py,sha256=-PvGWG0iSRNBqeFWpxol12dYw-QodXjR81n-JX3x6zI,146
|
55
|
+
eotdl/models/download.py,sha256=4dgxE9ytT8QqiCyx1r19vL5UASfttUN_mCPKhFwMTfs,3410
|
56
|
+
eotdl/models/ingest.py,sha256=8xhGlsADi5dZSNbph4WWMk0cs2J_StxNCYRkSjMhUtg,5747
|
56
57
|
eotdl/models/metadata.py,sha256=S5bpIB4e2pivDnReszJKC3bYBZcaHu-KMYOc3AwHbQ4,1443
|
57
58
|
eotdl/models/retrieve.py,sha256=-Ij7dT4J1p7MW4n13OlPB9OW4tBaBXPwk9dW8IuCZPc,664
|
58
59
|
eotdl/models/update.py,sha256=4FWeD95cXvRpefRjw3Foqb30e30otxqWUZ6nQM9cbmM,374
|
@@ -60,8 +61,8 @@ eotdl/repos/APIRepo.py,sha256=dJNdX3atBpug0FZNako7uyom25iccNPQrnoIRTIZEXE,791
|
|
60
61
|
eotdl/repos/AuthAPIRepo.py,sha256=vYCqFawe3xUm2cx4SqVXCvzl8J_sr9rs_MkipYC0bXE,957
|
61
62
|
eotdl/repos/AuthRepo.py,sha256=jpzzhINCcDZHRCyrPDsp49h17IlXp2HvX3BB3f5cnb4,1154
|
62
63
|
eotdl/repos/DatasetsAPIRepo.py,sha256=rKqSe-UjJOlD0Kbypu4Gs5kx2mzUD7TY05gv2vgUTv4,2660
|
63
|
-
eotdl/repos/FilesAPIRepo.py,sha256=
|
64
|
-
eotdl/repos/ModelsAPIRepo.py,sha256=
|
64
|
+
eotdl/repos/FilesAPIRepo.py,sha256=cS6CFTkSYIXcefHEeLt7I69_EyyO-jhoAwQ7UWYlLkc,7171
|
65
|
+
eotdl/repos/ModelsAPIRepo.py,sha256=79euf5WsfUxG5KSIGhKT8T7kSl-NtISwxvqHnck-bq0,2616
|
65
66
|
eotdl/repos/__init__.py,sha256=WvX5TP49k7yYb5dWWNjv5kzbdluO3dJ4LqjQxRIOUVc,222
|
66
67
|
eotdl/shared/__init__.py,sha256=mF7doJC8Z5eTPmB01UQvPivThZac32DRY33T6qshXfg,41
|
67
68
|
eotdl/shared/checksum.py,sha256=4IB6N9jRO0chMDNJzpdnFDhC9wcFF9bO5oHq2HodcHw,479
|
@@ -72,7 +73,7 @@ eotdl/tools/paths.py,sha256=yWhOtVxX4NxrDrrBX2fuye5N1mAqrxXFy_eA7dffd84,1152
|
|
72
73
|
eotdl/tools/stac.py,sha256=ovXdrPm4Sn9AAJmrP88WnxDmq2Ut-xPoscjphxz3Iyo,5763
|
73
74
|
eotdl/tools/time_utils.py,sha256=qJ3-rk1I7ne722SLfAP6-59kahQ0vLQqIf9VpOi0Kpg,4691
|
74
75
|
eotdl/tools/tools.py,sha256=Tl4_v2ejkQo_zyZek8oofJwoYcdVosdOwW1C0lvWaNM,6354
|
75
|
-
eotdl-2024.
|
76
|
-
eotdl-2024.
|
77
|
-
eotdl-2024.
|
78
|
-
eotdl-2024.
|
76
|
+
eotdl-2024.6.13.dist-info/METADATA,sha256=cZRWhvu3uUpHBRQ3D-6hacu4TDy2iU2K48SL7ORBQOw,4052
|
77
|
+
eotdl-2024.6.13.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
78
|
+
eotdl-2024.6.13.dist-info/entry_points.txt,sha256=s6sfxUfRrSX2IP2UbrzTFTvRCtLgw3_OKcHlOKf_5F8,39
|
79
|
+
eotdl-2024.6.13.dist-info/RECORD,,
|
File without changes
|
File without changes
|