eotdl 2023.6.14.post7__tar.gz → 2023.6.14.post9__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/PKG-INFO +1 -1
  2. eotdl-2023.6.14.post9/eotdl/auth/__init__.py +1 -0
  3. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/auth/main.py +11 -4
  4. eotdl-2023.6.14.post9/eotdl/cli.py +11 -0
  5. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/commands/auth.py +2 -2
  6. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/commands/datasets.py +4 -7
  7. eotdl-2023.6.14.post9/eotdl/datasets/__init__.py +4 -0
  8. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/datasets/download.py +5 -3
  9. eotdl-2023.6.14.post9/eotdl/datasets/ingest.py +32 -0
  10. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/datasets/retrieve.py +6 -3
  11. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/datasets/update.py +5 -3
  12. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/repos/APIRepo.py +3 -1
  13. eotdl-2023.6.14.post9/eotdl/src/repos/AuthRepo.py +36 -0
  14. eotdl-2023.6.14.post9/eotdl/src/usecases/auth/__init__.py +3 -0
  15. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/usecases/datasets/DownloadDataset.py +2 -2
  16. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/usecases/datasets/IngestDataset.py +1 -1
  17. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/usecases/datasets/IngestLargeDataset.py +2 -2
  18. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/usecases/datasets/UpdateDataset.py +2 -2
  19. eotdl-2023.6.14.post9/eotdl/src/usecases/datasets/__init__.py +6 -0
  20. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/pyproject.toml +2 -2
  21. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/setup.py +2 -2
  22. eotdl-2023.6.14.post7/eotdl/auth/__init__.py +0 -1
  23. eotdl-2023.6.14.post7/eotdl/datasets/__init__.py +0 -5
  24. eotdl-2023.6.14.post7/eotdl/datasets/ingest.py +0 -36
  25. eotdl-2023.6.14.post7/eotdl/main.py +0 -16
  26. eotdl-2023.6.14.post7/eotdl/src/repos/AuthRepo.py +0 -31
  27. eotdl-2023.6.14.post7/eotdl/src/usecases/auth/__init__.py +0 -0
  28. eotdl-2023.6.14.post7/eotdl/src/usecases/datasets/__init__.py +0 -0
  29. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/README.md +0 -0
  30. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/__init__.py +0 -0
  31. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/access/__init__.py +0 -0
  32. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/access/parameters.py +0 -0
  33. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/access/sentinelhub/__init__.py +0 -0
  34. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/access/sentinelhub/client.py +0 -0
  35. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/access/sentinelhub/utils.py +0 -0
  36. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/commands/__init__.py +0 -0
  37. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/curation/__init__.py +0 -0
  38. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/curation/formatters.py +0 -0
  39. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/curation/metadata.py +0 -0
  40. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/curation/stac/__init__.py +0 -0
  41. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/curation/stac/dataframe.py +0 -0
  42. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/curation/stac/extensions.py +0 -0
  43. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/curation/stac/parsers.py +0 -0
  44. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/curation/stac/stac.py +0 -0
  45. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/curation/stac/utils.py +0 -0
  46. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/hello.py +0 -0
  47. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/__init__.py +0 -0
  48. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/errors/__init__.py +0 -0
  49. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/errors/auth.py +0 -0
  50. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/repos/__init__.py +0 -0
  51. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/usecases/__init__.py +0 -0
  52. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/usecases/auth/Auth.py +0 -0
  53. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/usecases/auth/IsLogged.py +0 -0
  54. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/usecases/auth/Logout.py +0 -0
  55. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/usecases/datasets/IngestLargeDatasetParallel.py +0 -0
  56. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/usecases/datasets/RetrieveDataset.py +0 -0
  57. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/usecases/datasets/RetrieveDatasets.py +0 -0
  58. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/src/utils.py +0 -0
  59. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/tools/__init__.py +0 -0
  60. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/tools/sen12floods/__init__.py +0 -0
  61. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/tools/sen12floods/tools.py +0 -0
  62. {eotdl-2023.6.14.post7 → eotdl-2023.6.14.post9}/eotdl/tools/stac.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: eotdl
3
- Version: 2023.6.14.post7
3
+ Version: 2023.6.14.post9
4
4
  Summary: Earth Observation Training Data Lab
5
5
  License: MIT
6
6
  Author: EarthPulse
@@ -0,0 +1 @@
1
+ from .main import is_logged, auth, generate_logout_url, with_auth
@@ -1,7 +1,5 @@
1
- from src.repos import AuthRepo, APIRepo
2
- from src.usecases.auth.IsLogged import IsLogged
3
- from src.usecases.auth.Auth import Auth
4
- from src.usecases.auth.Logout import Logout
1
+ from ..src.repos import AuthRepo, APIRepo
2
+ from ..src.usecases.auth import IsLogged, Auth, Logout
5
3
 
6
4
 
7
5
  def is_logged():
@@ -31,3 +29,12 @@ def generate_logout_url():
31
29
  inputs = _logout.Inputs()
32
30
  outputs = _logout(inputs)
33
31
  return outputs.logout_url
32
+
33
+
34
+ # auth decorator
35
+ def with_auth(func):
36
+ def wrapper(*args, **kwargs):
37
+ user = auth()
38
+ return func(*args, **kwargs, user=user)
39
+
40
+ return wrapper
@@ -0,0 +1,11 @@
1
+ import typer
2
+ from .commands import auth, datasets
3
+
4
+ app = typer.Typer()
5
+
6
+ app.add_typer(auth.app, name="auth")
7
+ app.add_typer(datasets.app, name="datasets")
8
+
9
+
10
+ if __name__ == "__main__":
11
+ app()
@@ -1,6 +1,6 @@
1
1
  import typer
2
- from auth import is_logged, auth, generate_logout_url
3
- from src.errors.auth import LoginError
2
+ from ..auth import is_logged, auth, generate_logout_url
3
+ from ..src.errors.auth import LoginError
4
4
 
5
5
  app = typer.Typer()
6
6
 
@@ -1,12 +1,12 @@
1
1
  import typer
2
- from datasets import (
2
+ from ..datasets import (
3
3
  retrieve_datasets,
4
4
  download_dataset,
5
5
  update_dataset,
6
6
  ingest_large_dataset,
7
7
  # ingest_large_dataset_parallel,
8
8
  )
9
- from auth import auth
9
+ from .auth import auth
10
10
 
11
11
  app = typer.Typer()
12
12
 
@@ -29,7 +29,6 @@ def get(name: str, path: str = None):
29
29
  path: Path to download the dataset to
30
30
  """
31
31
  try:
32
- user = auth()
33
32
  dst_path = download_dataset(name, path, user, typer.echo)
34
33
  typer.echo(f"Dataset {name} downloaded to {dst_path}")
35
34
  except Exception as e:
@@ -49,10 +48,9 @@ def ingest(
49
48
  n: Name of the dataset
50
49
  """
51
50
  try:
52
- user = auth()
53
51
  # if p:
54
52
  # ingest_large_dataset_parallel(name, path, user, p, typer.echo)
55
- ingest_large_dataset(name, path, user, typer.echo)
53
+ ingest_large_dataset(name, path, typer.echo)
56
54
  typer.echo(f"Dataset {name} ingested")
57
55
  except Exception as e:
58
56
  typer.echo(e)
@@ -70,8 +68,7 @@ def update(
70
68
  path: Path to dataset to ingest
71
69
  """
72
70
  try:
73
- user = auth()
74
- update_dataset(name, path, user, typer.echo)
71
+ update_dataset(name, path, typer.echo)
75
72
  typer.echo(f"Dataset {name} updated")
76
73
  except Exception as e:
77
74
  typer.echo(e)
@@ -0,0 +1,4 @@
1
+ from .ingest import ingest_dataset, ingest_large_dataset, ingest_q0, ingest_q1
2
+ from .download import download_dataset
3
+ from .retrieve import retrieve_datasets, retrieve_dataset, list_datasets
4
+ from .update import update_dataset
@@ -1,9 +1,11 @@
1
- from src.repos import APIRepo
2
- from src.usecases.datasets.DownloadDataset import DownloadDataset
1
+ from ..src.repos import APIRepo
2
+ from ..src.usecases.datasets import DownloadDataset
3
3
  from .retrieve import retrieve_dataset
4
+ from ..auth import with_auth
4
5
 
5
6
 
6
- def download_dataset(name, path, user, logger):
7
+ @with_auth
8
+ def download_dataset(name, path=None, logger=None, user=None):
7
9
  dataset = retrieve_dataset(name)
8
10
  dataset_id = dataset["id"]
9
11
  checksum = dataset["checksum"]
@@ -0,0 +1,32 @@
1
+ from ..src.repos import APIRepo
2
+ from ..src.usecases.datasets import IngestDataset, IngestLargeDataset
3
+ from ..auth import with_auth
4
+
5
+
6
+ @with_auth
7
+ def ingest_dataset(name, description, path, logger=None, user=None):
8
+ api_repo = APIRepo()
9
+ ingest = IngestDataset(
10
+ api_repo,
11
+ )
12
+ inputs = ingest.Inputs(name=name, description=description, path=path, user=user)
13
+ outputs = ingest(inputs)
14
+ return outputs.dataset
15
+
16
+
17
+ @with_auth
18
+ def ingest_large_dataset(name, path, logger=None, user=None):
19
+ api_repo = APIRepo()
20
+ ingest = IngestLargeDataset(api_repo, logger)
21
+ inputs = ingest.Inputs(name=name, path=path, user=user)
22
+ outputs = ingest(inputs)
23
+ return outputs.dataset
24
+
25
+
26
+ def ingest_q0(dataset, path):
27
+ return ingest_large_dataset(dataset, path)
28
+
29
+
30
+ def ingest_q1(dataset, stac_catalog):
31
+ print("holas")
32
+ return
@@ -1,6 +1,9 @@
1
- from src.repos import APIRepo
2
- from src.usecases.datasets.RetrieveDatasets import RetrieveDatasets
3
- from src.usecases.datasets.RetrieveDataset import RetrieveDataset
1
+ from ..src.repos import APIRepo
2
+ from ..src.usecases.datasets import RetrieveDatasets, RetrieveDataset
3
+
4
+
5
+ def list_datasets():
6
+ return retrieve_datasets()
4
7
 
5
8
 
6
9
  def retrieve_datasets():
@@ -1,8 +1,10 @@
1
- from src.repos import APIRepo
2
- from src.usecases.datasets.UpdateDataset import UpdateDataset
1
+ from ..src.repos import APIRepo
2
+ from ..src.usecases.datasets import UpdateDataset
3
+ from ..auth import with_auth
3
4
 
4
5
 
5
- def update_dataset(name, path, user, logger):
6
+ @with_auth
7
+ def update_dataset(name, path, logger=None, user=None):
6
8
  api_repo = APIRepo()
7
9
  ingest = UpdateDataset(api_repo, logger)
8
10
  inputs = ingest.Inputs(name=name, path=path, user=user)
@@ -35,7 +35,7 @@ class APIRepo:
35
35
  url = self.url + "datasets/" + dataset_id + "/download"
36
36
  headers = {"Authorization": "Bearer " + id_token}
37
37
  if path is None:
38
- path = str(Path.home()) + "/.etodl/datasets"
38
+ path = str(Path.home()) + "/.eotdl/datasets"
39
39
  os.makedirs(path, exist_ok=True)
40
40
  with requests.get(url, headers=headers, stream=True) as r:
41
41
  r.raise_for_status()
@@ -46,6 +46,8 @@ class APIRepo:
46
46
  )
47
47
  filename = r.headers.get("content-disposition").split("filename=")[1][1:-1]
48
48
  path = f"{path}/{filename}"
49
+ if os.path.exists(path):
50
+ raise Exception("File already exists")
49
51
  with open(path, "wb") as f:
50
52
  for chunk in r.iter_content(block_size):
51
53
  progress_bar.update(len(chunk))
@@ -0,0 +1,36 @@
1
+ from pathlib import Path
2
+ import os
3
+ import json
4
+ import jwt
5
+
6
+
7
+ class AuthRepo:
8
+ def __init__(self):
9
+ self.algorithms = ["RS256"]
10
+ self.home = str(Path.home())
11
+ self.creds_path = self.home + "/.eotdl/creds.json"
12
+
13
+ def save_creds(self, data):
14
+ os.makedirs(self.home + "/.eotdl", exist_ok=True)
15
+ with open(self.creds_path, "w") as f:
16
+ json.dump(data, f)
17
+ return self.creds_path
18
+
19
+ def load_creds(self):
20
+ if os.path.exists(self.creds_path):
21
+ with open(self.creds_path, "r") as f:
22
+ creds = json.load(f)
23
+ user = self.decode_token(creds)
24
+ user["id_token"] = creds["id_token"]
25
+ return user
26
+ return None
27
+
28
+ def decode_token(self, token_data):
29
+ return jwt.decode(
30
+ token_data["id_token"],
31
+ algorithms=self.algorithms,
32
+ options={"verify_signature": False},
33
+ )
34
+
35
+ def logout(self):
36
+ os.remove(self.creds_path)
@@ -0,0 +1,3 @@
1
+ from .Auth import Auth
2
+ from .IsLogged import IsLogged
3
+ from .Logout import Logout
@@ -1,11 +1,11 @@
1
1
  from pydantic import BaseModel
2
- from src.utils import calculate_checksum
2
+ from ....src.utils import calculate_checksum
3
3
 
4
4
 
5
5
  class DownloadDataset:
6
6
  def __init__(self, repo, logger):
7
7
  self.repo = repo
8
- self.logger = logger
8
+ self.logger = logger if logger else print
9
9
 
10
10
  class Inputs(BaseModel):
11
11
  dataset: str
@@ -4,7 +4,7 @@ from pydantic import BaseModel
4
4
  class IngestDataset:
5
5
  def __init__(self, repo, logger):
6
6
  self.repo = repo
7
- self.logger = logger
7
+ self.logger = logger if logger else print
8
8
 
9
9
  class Inputs(BaseModel):
10
10
  name: str
@@ -1,11 +1,11 @@
1
1
  from pydantic import BaseModel
2
- from src.utils import calculate_checksum
2
+ from ....src.utils import calculate_checksum
3
3
 
4
4
 
5
5
  class IngestLargeDataset:
6
6
  def __init__(self, repo, logger):
7
7
  self.repo = repo
8
- self.logger = logger
8
+ self.logger = logger if logger else print
9
9
 
10
10
  class Inputs(BaseModel):
11
11
  name: str
@@ -1,11 +1,11 @@
1
1
  from pydantic import BaseModel
2
- from src.utils import calculate_checksum
2
+ from ....src.utils import calculate_checksum
3
3
 
4
4
 
5
5
  class UpdateDataset:
6
6
  def __init__(self, repo, logger):
7
7
  self.repo = repo
8
- self.logger = logger
8
+ self.logger = logger if logger else print
9
9
 
10
10
  class Inputs(BaseModel):
11
11
  name: str
@@ -0,0 +1,6 @@
1
+ from .DownloadDataset import DownloadDataset
2
+ from .IngestDataset import IngestDataset
3
+ from .IngestLargeDataset import IngestLargeDataset
4
+ from .RetrieveDataset import RetrieveDataset
5
+ from .RetrieveDatasets import RetrieveDatasets
6
+ from .UpdateDataset import UpdateDataset
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "eotdl"
3
- version = "2023.06.14-7"
3
+ version = "2023.06.14-9"
4
4
  description = "Earth Observation Training Data Lab"
5
5
  authors = ["EarthPulse <it@earthpulse.es>"]
6
6
  license = "MIT"
@@ -8,7 +8,7 @@ readme = "README.md"
8
8
  packages = [{include = "eotdl"}]
9
9
 
10
10
  [tool.poetry.scripts]
11
- eotdl = "eotdl.main:app"
11
+ eotdl = "eotdl.cli:app"
12
12
 
13
13
  [tool.poetry.dependencies]
14
14
  python = "^3.8"
@@ -30,11 +30,11 @@ install_requires = \
30
30
  'typer[all]>=0.7.0,<0.8.0']
31
31
 
32
32
  entry_points = \
33
- {'console_scripts': ['eotdl = eotdl.main:app']}
33
+ {'console_scripts': ['eotdl = eotdl.cli:app']}
34
34
 
35
35
  setup_kwargs = {
36
36
  'name': 'eotdl',
37
- 'version': '2023.6.14.post7',
37
+ 'version': '2023.6.14.post9',
38
38
  'description': 'Earth Observation Training Data Lab',
39
39
  'long_description': '# eotdl \n\nThis is the main library and CLI for EOTDL.\n\n',
40
40
  'author': 'EarthPulse',
@@ -1 +0,0 @@
1
- from .main import is_logged, auth, generate_logout_url
@@ -1,5 +0,0 @@
1
- from .ingest import ingest_dataset, ingest_large_dataset, ingest_q0, ingest_q1
2
-
3
- # from .download import download_dataset
4
- # from .retrieve import retrieve_datasets, retrieve_dataset
5
- # from .update import update_dataset
@@ -1,36 +0,0 @@
1
- # para que la lib funcione, necesito que sea relativo
2
- # pero para la cli, necesito que sea absoluto :(
3
-
4
- from ..src.repos import APIRepo
5
- from ..src.usecases.datasets.IngestDataset import IngestDataset
6
-
7
- # from ..src.usecases.datasets.IngestLargeDataset import IngestLargeDataset
8
-
9
-
10
- def ingest_q0(dataset, file):
11
- print("hola")
12
- return
13
-
14
-
15
- def ingest_q1(dataset, stac_catalog):
16
- print("hola")
17
- return
18
-
19
-
20
- def ingest_dataset(name, description, path, user, logger):
21
- api_repo = APIRepo()
22
- ingest = IngestDataset(
23
- api_repo,
24
- )
25
- inputs = ingest.Inputs(name=name, description=description, path=path, user=user)
26
- outputs = ingest(inputs)
27
- return outputs.dataset
28
-
29
-
30
- def ingest_large_dataset(name, path, user, logger):
31
- # api_repo = APIRepo()
32
- # ingest = IngestLargeDataset(api_repo, logger)
33
- # inputs = ingest.Inputs(name=name, path=path, user=user)
34
- # outputs = ingest(inputs)
35
- # return outputs.dataset
36
- return
@@ -1,16 +0,0 @@
1
- import typer
2
- import os
3
- import sys
4
-
5
- # Add the eotdl_cli directory to the Python path
6
- eotdl_dir = os.path.dirname(os.path.realpath(__file__))
7
- sys.path.append(os.path.join(eotdl_dir))
8
-
9
- from commands import auth, datasets
10
-
11
- app = typer.Typer()
12
- app.add_typer(auth.app, name="auth")
13
- app.add_typer(datasets.app, name="datasets")
14
-
15
- if __name__ == "__main__":
16
- app()
@@ -1,31 +0,0 @@
1
- from pathlib import Path
2
- import os
3
- import json
4
- import jwt
5
-
6
- class AuthRepo():
7
- def __init__(self):
8
- self.algorithms = ['RS256']
9
- self.home = str(Path.home())
10
- self.creds_path = self.home + '/.etodl/creds.json'
11
-
12
- def save_creds(self, data):
13
- os.makedirs(self.home + '/.etodl', exist_ok=True)
14
- with open(self.creds_path, 'w') as f:
15
- json.dump(data, f)
16
- return self.creds_path
17
-
18
- def load_creds(self):
19
- if os.path.exists(self.creds_path):
20
- with open(self.creds_path, 'r') as f:
21
- creds = json.load(f)
22
- user = self.decode_token(creds)
23
- user['id_token'] = creds['id_token']
24
- return user
25
- return None
26
-
27
- def decode_token(self, token_data):
28
- return jwt.decode(token_data['id_token'], algorithms=self.algorithms, options={"verify_signature": False})
29
-
30
- def logout(self):
31
- os.remove(self.creds_path)
File without changes