satcube 0.1.13__tar.gz → 0.1.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of satcube might be problematic. Click here for more details.

@@ -1,29 +1,25 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: satcube
3
- Version: 0.1.13
3
+ Version: 0.1.15
4
4
  Summary: A Python package to create cloud-free monthly composites by fusing Landsat and Sentinel-2 data.
5
5
  Home-page: https://github.com/IPL-UV/satcube
6
6
  Author: Cesar Aybar
7
7
  Author-email: fcesar.aybar@uv.es
8
- Requires-Python: >=3.10,<4.0
8
+ Requires-Python: >=3.9
9
9
  Classifier: Programming Language :: Python :: 3
10
+ Classifier: Programming Language :: Python :: 3.9
10
11
  Classifier: Programming Language :: Python :: 3.10
11
12
  Classifier: Programming Language :: Python :: 3.11
12
13
  Classifier: Programming Language :: Python :: 3.12
13
14
  Provides-Extra: full
14
- Requires-Dist: cubexpress (>=0.1.7)
15
- Requires-Dist: earthengine-api (>=1.5.12)
15
+ Requires-Dist: cubexpress (>=0.1.10)
16
16
  Requires-Dist: mlstac (>=0.4.0)
17
- Requires-Dist: numpy (>=1.25.0)
18
- Requires-Dist: pandas (>=2.0.0)
19
17
  Requires-Dist: phicloudmask (>=0.0.2)
20
- Requires-Dist: pydantic (>=2.8.0)
21
- Requires-Dist: rasterio (>=1.3.9)
22
18
  Requires-Dist: requests (>=2.26.0)
23
- Requires-Dist: satalign (>=0.1.11)
19
+ Requires-Dist: satalign (>=0.1.9)
24
20
  Requires-Dist: scikit-learn (>=1.2.0)
25
21
  Requires-Dist: segmentation-models-pytorch (>=0.3.0)
26
- Requires-Dist: utm (>=0.7.0)
22
+ Requires-Dist: tqdm (>=4.67.1)
27
23
  Requires-Dist: xarray (>=2023.7.0)
28
24
  Project-URL: Documentation, https://ipl-uv.github.io/satcube/
29
25
  Project-URL: Repository, https://github.com/IPL-UV/satcube
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "satcube"
3
- version = "0.1.13"
3
+ version = "0.1.15"
4
4
  description = "A Python package to create cloud-free monthly composites by fusing Landsat and Sentinel-2 data."
5
5
  authors = ["Cesar Aybar <fcesar.aybar@uv.es>"]
6
6
  repository = "https://github.com/IPL-UV/satcube"
@@ -9,21 +9,16 @@ readme = "README.md"
9
9
  packages = [{ include = "satcube" }]
10
10
 
11
11
  [tool.poetry.dependencies]
12
- python = ">=3.10,<4.0"
13
- cubexpress = ">=0.1.7"
12
+ python = ">=3.9"
13
+ cubexpress = ">=0.1.10"
14
14
  mlstac = ">=0.4.0"
15
- satalign = ">=0.1.11"
15
+ satalign = ">=0.1.9"
16
16
  segmentation-models-pytorch = ">=0.3.0"
17
+ phicloudmask = ">=0.0.2"
17
18
  scikit-learn = ">=1.2.0"
18
- pandas = ">=2.0.0"
19
- pydantic = ">=2.8.0"
20
- rasterio = ">=1.3.9"
21
- earthengine-api = ">=1.5.12"
22
- numpy = ">=1.25.0"
23
19
  requests = ">=2.26.0"
24
- xarray = ">=2023.7.0"
25
- utm = ">=0.7.0"
26
- phicloudmask = ">=0.0.2"
20
+ xarray = ">=2023.7.0"
21
+ tqdm = ">=4.67.1"
27
22
 
28
23
  [tool.poetry.extras]
29
24
  full = ["torch"]
@@ -0,0 +1,9 @@
1
+ from satcube.cloud_detection import cloud_masking
2
+ from satcube.download import download
3
+ from satcube.align import align
4
+ import importlib.metadata
5
+ from satcube.objects import SatCubeMetadata
6
+
7
+ __all__ = ["cloud_masking", "download", "align", "SatCubeMetadata"]
8
+ # __version__ = importlib.metadata.version("satcube")
9
+
@@ -0,0 +1,98 @@
1
+ from __future__ import annotations
2
+
3
+ import pathlib
4
+ from typing import List, Tuple
5
+ import pickle
6
+ import pandas as pd
7
+ import satalign
8
+ import shutil
9
+
10
+ import numpy as np
11
+ import rasterio as rio
12
+ import xarray as xr
13
+ from affine import Affine
14
+ from concurrent.futures import ThreadPoolExecutor, as_completed
15
+ from tqdm import tqdm
16
+
17
+
18
+ def process_row(row: pd.Series, reference: np.ndarray, input_dir: pathlib.Path, output_dir: pathlib.Path) -> None:
19
+ row_path = input_dir / (row["id"] + ".tif")
20
+ output_path = output_dir / (row["id"] + ".tif")
21
+ with rio.open(row_path) as src:
22
+ row_image = src.read()
23
+ profile_image = src.profile
24
+
25
+ row_image_float = row_image.astype(np.float32) / 10000
26
+ row_image_float = row_image_float[np.newaxis, ...]
27
+
28
+ pcc_model = satalign.LGM(
29
+ datacube = row_image_float,
30
+ reference = reference
31
+ )
32
+ image, _ = pcc_model.run_multicore()
33
+ image = (image * 10000).astype(np.uint16).squeeze()
34
+
35
+ with rio.open(output_path, "w", **profile_image) as dst:
36
+ dst.write(image)
37
+
38
+ def align(
39
+ input_dir: str | pathlib.Path = "raw",
40
+ output_dir: str | pathlib.Path = "aligned",
41
+ nworks: int = 4,
42
+ cache: bool = False
43
+ ) -> None:
44
+
45
+ input_dir = pathlib.Path(input_dir).expanduser().resolve()
46
+ output_dir = pathlib.Path(output_dir).expanduser().resolve()
47
+ output_dir.mkdir(parents=True, exist_ok=True)
48
+
49
+ metadata_path = input_dir / "metadata.csv"
50
+
51
+ if not metadata_path.exists():
52
+ raise FileNotFoundError(
53
+ f"Metadata file not found: {metadata_path}. "
54
+ "Please run the download step first."
55
+ )
56
+ else:
57
+ metadata = pd.read_csv(metadata_path)
58
+
59
+ if cache:
60
+ exist_files = [file.stem for file in output_dir.glob("*.tif")]
61
+ metadata = metadata[~metadata["id"].isin(exist_files)]
62
+
63
+ if metadata.empty:
64
+ return
65
+
66
+ id_reference = metadata.sort_values(
67
+ by=["cs_cdf", "date"],
68
+ ascending=False,
69
+ ).iloc[0]["id"]
70
+
71
+ reference_path = input_dir / (id_reference + ".tif")
72
+
73
+ with rio.open(reference_path) as ref_src:
74
+ reference = ref_src.read()
75
+
76
+ reference_float = reference.astype(np.float32) / 10000
77
+
78
+ with ThreadPoolExecutor(max_workers=nworks) as executor:
79
+ futures = {
80
+ executor.submit(process_row, row, reference_float, input_dir, output_dir)
81
+ for _, row in metadata.iterrows()
82
+ }
83
+ for future in tqdm(
84
+ as_completed(futures),
85
+ total=len(futures),
86
+ desc="Aligning images",
87
+ unit="image",
88
+ leave=True
89
+ ):
90
+ try:
91
+ future.result()
92
+ except Exception as e:
93
+ print(f"Error processing image: {e}")
94
+
95
+ metadata = input_dir / "metadata.csv"
96
+ if metadata.exists():
97
+ metadata_dst = output_dir / "metadata.csv"
98
+ shutil.copy(metadata, metadata_dst)
@@ -0,0 +1,238 @@
1
+ """Predict cloud masks for Sentinel-2 GeoTIFFs with the SEN2CloudEnsemble model.
2
+
3
+ The callable :pyfunc:`cloud_masking` accepts **either** a single ``.tif`` file
4
+ or a directory tree; in both cases it writes a masked copy of every image (and,
5
+ optionally, the binary mask) to *output*.
6
+
7
+ Example
8
+ -------
9
+ >>> from satcube.cloud_detection import cloud_masking
10
+ >>> cloud_masking("~/s2/input", "~/s2/output", device="cuda")
11
+ """
12
+
13
+ from __future__ import annotations
14
+
15
+ import pathlib
16
+
17
+ import mlstac
18
+ import numpy as np
19
+ import rasterio as rio
20
+ from rasterio.windows import Window
21
+ import torch
22
+ import pandas as pd
23
+ from concurrent.futures import ThreadPoolExecutor, as_completed
24
+ from tqdm import tqdm
25
+ import rasterio as rio
26
+ from rasterio.merge import merge
27
+ import shutil
28
+
29
+ from satcube.utils import define_iteration, DeviceManager
30
+ import warnings
31
+ warnings.filterwarnings(
32
+ "ignore",
33
+ message="The secret HF_TOKEN does not exist in your Colab secrets.",
34
+ category=UserWarning,
35
+ module=r"huggingface_hub\.utils\._.*",
36
+ )
37
+
38
+
39
+
40
+ def infer_cloudmask(
41
+ input_path: str | pathlib.Path,
42
+ output_path: str | pathlib.Path,
43
+ cloud_model: torch.nn.Module,
44
+ *,
45
+ chunk_size: int = 512,
46
+ overlap: int = 32,
47
+ device: str = "cpu",
48
+ save_mask: bool = False,
49
+ prefix: str = ""
50
+ ) -> pathlib.Path:
51
+ """
52
+ Predict 'image_path' in overlapping patches of 'chunk_size' x 'chunk_size',
53
+ but only write the valid (inner) region to avoid seam artifacts.
54
+
55
+ This uses partial overlap logic:
56
+ - For interior tiles, skip overlap//2 on each side.
57
+ - For boundary tiles, we skip only the interior side to avoid losing data at the edges.
58
+
59
+ Parameters
60
+ ----------
61
+ image_path : Path to input image.
62
+ output_path : Path to output single-band mask.
63
+ cloud_model : PyTorch model (already loaded with weights).
64
+ chunk_size : Size of each tile to read from the source image (default 512).
65
+ overlap : Overlap in pixels between adjacent tiles (default 32).
66
+ device : "cpu" or "cuda:0".
67
+
68
+ Returns
69
+ -------
70
+ pathlib.Path : The path to the created output image.
71
+ """
72
+
73
+ input_path = pathlib.Path(input_path)
74
+ output_path = pathlib.Path(output_path)
75
+
76
+ with rio.open(input_path) as src:
77
+ meta = src.profile
78
+ if not meta.get("tiled", False):
79
+ raise ValueError("The input image is not marked as tiled in its metadata.")
80
+ # Ensure the internal blocksize matches chunk_size
81
+ if chunk_size % meta["blockxsize"] != 0 and meta["blockxsize"] <= chunk_size:
82
+ raise ValueError(f"Image blocks must be {chunk_size}x{chunk_size}, "
83
+ f"got {meta['blockxsize']}x{meta['blockysize']}")
84
+ height, width = meta["height"], meta["width"]
85
+
86
+ full_mask = np.zeros((height, width), dtype=np.float32)
87
+
88
+ coords = define_iteration((height, width), chunk_size, overlap)
89
+
90
+ with rio.open(input_path) as src:
91
+
92
+ for (row_off, col_off) in coords:
93
+
94
+ window = Window(col_off, row_off, chunk_size, chunk_size)
95
+ patch = src.read(window=window) / 1e4
96
+ patch_tensor = torch.from_numpy(patch).float().unsqueeze(0).to(device)
97
+ result = cloud_model(patch_tensor).cpu().numpy().astype(np.uint8)
98
+
99
+ if col_off == 0:
100
+ offset_x = 0
101
+ else:
102
+ offset_x = col_off + overlap // 2
103
+ if row_off == 0:
104
+ offset_y = 0
105
+ else:
106
+ offset_y = row_off + overlap // 2
107
+ if (offset_x + chunk_size) == width:
108
+ length_x = chunk_size
109
+ sub_x_start = 0
110
+ else:
111
+ length_x = chunk_size - (overlap // 2)
112
+ sub_x_start = overlap // 2 if col_off != 0 else 0
113
+
114
+ if (offset_y + chunk_size) == height:
115
+ length_y = chunk_size
116
+ sub_y_start = 0
117
+ else:
118
+ length_y = chunk_size - (overlap // 2)
119
+ sub_y_start = overlap // 2 if row_off != 0 else 0
120
+
121
+ full_mask[
122
+ offset_y : offset_y + length_y,
123
+ offset_x : offset_x + length_x
124
+ ] = result[
125
+ sub_y_start : sub_y_start + length_y,
126
+ sub_x_start : sub_x_start + length_x
127
+ ]
128
+
129
+ if save_mask:
130
+ out_meta = meta.copy()
131
+ out_meta.update(count=1, dtype="uint8", nodata=255)
132
+ output_mask = output_path.parent / (output_path.stem + "_mask.tif")
133
+ with rio.open(output_mask, "w", **out_meta) as dst:
134
+ dst.write(full_mask, 1)
135
+
136
+
137
+ data = src.read()
138
+ img_prof = src.profile.copy()
139
+
140
+ masked = data.copy()
141
+ masked[:, full_mask != 0] = 65535
142
+ img_prof.update(dtype="uint16", nodata=65535)
143
+
144
+ with rio.open(output_path, "w", **img_prof) as dst:
145
+ dst.write(masked)
146
+
147
+ return output_path
148
+
149
+ def cloud_masking(
150
+ input: str | pathlib.Path = "raw",
151
+ output: str | pathlib.Path = "masked",
152
+ model_path: str | pathlib.Path = "SEN2CloudEnsemble",
153
+ device: str = "cpu",
154
+ save_mask: bool = False,
155
+ nworks: int = 4,
156
+ ) -> list[pathlib.Path]:
157
+ """Write cloud-masked Sentinel-2 images.
158
+
159
+ Parameters
160
+ ----------
161
+ input
162
+ Path to a single ``.tif`` file **or** a directory containing them.
163
+ output
164
+ Destination directory (created i
165
+ f missing).
166
+ tile, pad
167
+ Tile size and padding (pixels) when tiling is required.
168
+ save_mask
169
+ If *True*, store the binary mask alongside the masked image.
170
+ device
171
+ Torch device for inference, e.g. ``"cpu"`` or ``"cuda:0"``.
172
+ max_pix_cpu
173
+ Tile images larger than this when running on CPU.
174
+
175
+ Returns
176
+ ------
177
+ list[pathlib.Path]
178
+ Paths to the generated masked images.
179
+ """
180
+ src = pathlib.Path(input).expanduser().resolve()
181
+ dst_dir = pathlib.Path(output).expanduser().resolve()
182
+ dst_dir.mkdir(parents=True, exist_ok=True)
183
+
184
+ # Collect files to process -------------------------------------------------
185
+ tif_paths = []
186
+ if src.is_dir():
187
+ tif_paths = [p for p in src.rglob("*.tif")]
188
+ elif src.is_file() and src.suffix.lower() == ".tif":
189
+ tif_paths = [src]
190
+ src = src.parent # for relative-path bookkeeping below
191
+ else:
192
+ raise ValueError(f"Input must be a .tif or directory, got: {src}")
193
+
194
+ if not tif_paths:
195
+ print(f"[cloud_masking] No .tif files found in {src}")
196
+ return []
197
+
198
+ if not pathlib.Path(model_path).exists():
199
+ mlstac.download(
200
+ file = "https://huggingface.co/tacofoundation/CloudSEN12-models/resolve/main/SEN2CloudEnsemble/mlm.json",
201
+ output_dir = model_path
202
+ )
203
+
204
+ model = mlstac.load(model_path)
205
+ cloud_model = DeviceManager(model, init_device=device).model
206
+ cloud_model.eval()
207
+
208
+ with ThreadPoolExecutor(max_workers=nworks) as executor:
209
+ futures = {
210
+ executor.submit(
211
+ infer_cloudmask,
212
+ input_path=p,
213
+ output_path=dst_dir / p.name,
214
+ cloud_model=cloud_model,
215
+ device=device,
216
+ save_mask=save_mask,
217
+ prefix=f"[{i+1}/{len(tif_paths)}] "
218
+ ): p for i, p in enumerate(tif_paths)
219
+ }
220
+
221
+ for future in tqdm(
222
+ as_completed(futures),
223
+ total=len(futures),
224
+ desc="Cloud Masking",
225
+ position=0,
226
+ leave=True
227
+ ):
228
+ p = futures[future]
229
+ try:
230
+ result = future.result()
231
+ print(f"{result} processed successfully.")
232
+ except Exception as e:
233
+ print(f"Error processing {p}: {e}")
234
+
235
+ metadata = src / "metadata.csv"
236
+ if metadata.exists():
237
+ metadata_dst = dst_dir / "metadata.csv"
238
+ shutil.copy(metadata, metadata_dst)
@@ -0,0 +1,68 @@
1
+ import sys, time, threading, itertools
2
+ import cubexpress as ce
3
+ import pandas as pd
4
+ from satcube.objects import SatCubeMetadata
5
+ import pathlib
6
+
7
+ def download(
8
+ lon: float,
9
+ lat: float,
10
+ edge_size: int,
11
+ start: str,
12
+ end: str,
13
+ *,
14
+ max_cscore: float = 1,
15
+ min_cscore: float = 0,
16
+ outfolder: str = "raw",
17
+ nworks: int = 4
18
+ ) -> "SatCubeMetadata":
19
+
20
+
21
+ outfolder = pathlib.Path(outfolder).resolve()
22
+
23
+ table = ce.s2_table(
24
+ lon=lon,
25
+ lat=lat,
26
+ edge_size=edge_size,
27
+ start=start,
28
+ end=end,
29
+ max_cscore=max_cscore,
30
+ min_cscore=min_cscore
31
+ )
32
+
33
+ requests = ce.table_to_requestset(
34
+ table=table,
35
+ mosaic=True
36
+ )
37
+
38
+ ce.get_cube(
39
+ requests=requests,
40
+ outfolder=outfolder,
41
+ nworks=nworks
42
+ )
43
+
44
+ table_req = (
45
+ requests._dataframe.copy()
46
+ .drop(columns=['geotransform', 'manifest', 'outname', 'width', 'height', 'scale_x', 'scale_y'])
47
+ )
48
+
49
+ table_req['date'] = table_req['id'].str.split('_').str[0]
50
+
51
+ result_table = (
52
+ table.groupby('date')
53
+ .agg(
54
+ id=('id', lambda x: '-'.join(x)),
55
+ cs_cdf=('cs_cdf', 'first')
56
+ )
57
+ .reset_index()
58
+ )
59
+
60
+ table_final = table_req.merge(
61
+ result_table,
62
+ on='date',
63
+ how='left'
64
+ ).rename(columns={'id_x': 'id', 'id_y': 'gee_ids'})
65
+
66
+ table_final.to_csv(outfolder / "metadata.csv", index=False)
67
+
68
+ return SatCubeMetadata(df=table_final, raw_dir=outfolder)
@@ -0,0 +1,71 @@
1
+ # satcube/objects.py
2
+ from __future__ import annotations
3
+ from dataclasses import dataclass, field
4
+ import pathlib
5
+ import pandas as pd
6
+
7
+ from satcube.align import align as _align_fn
8
+ from satcube.cloud_detection import cloud_masking as _cloud_fn
9
+
10
+ @dataclass
11
+ class SatCubeMetadata:
12
+ df: pd.DataFrame = field(repr=False)
13
+ raw_dir: pathlib.Path = field(repr=False)
14
+
15
+ def __repr__(self) -> str:
16
+ return self.df.__repr__()
17
+
18
+ __str__ = __repr__
19
+
20
+ def _repr_html_(self) -> str:
21
+ html = getattr(self.df, "_repr_html_", None)
22
+ return html() if callable(html) else self.df.__repr__()
23
+
24
+ def align(
25
+ self,
26
+ input_dir: str | pathlib.Path | None = None,
27
+ output_dir: str | pathlib.Path = "aligned",
28
+ nworks: int = 4,
29
+ cache: bool = False
30
+ ) -> "SatCubeMetadata":
31
+
32
+ if input_dir is None:
33
+ input_dir = self.raw_dir
34
+ else:
35
+ input_dir = pathlib.Path(input_dir).expanduser().resolve()
36
+
37
+ _align_fn(
38
+ input_dir=input_dir,
39
+ output_dir=output_dir,
40
+ nworks=nworks,
41
+ cache=cache
42
+ )
43
+ self.aligned_dir = pathlib.Path(output_dir).resolve()
44
+ return self
45
+
46
+ def cloud_masking(
47
+ self,
48
+ output_dir: str | pathlib.Path = "masked",
49
+ model_path: str | pathlib.Path = "SEN2CloudEnsemble",
50
+ device: str = "cpu"
51
+ ) -> "SatCubeMetadata":
52
+ if not hasattr(self, "aligned_dir"):
53
+ raise RuntimeError("You must run .align() first")
54
+ _cloud_fn(
55
+ input=self.aligned_dir,
56
+ output=output_dir,
57
+ model_path=model_path,
58
+ device=device
59
+ )
60
+ self.masked_dir = pathlib.Path(output_dir).resolve()
61
+ return self
62
+
63
+ def __getattr__(self, item):
64
+ return getattr(self.df, item)
65
+
66
+ def __getitem__(self, key):
67
+ return self.df.__getitem__(key)
68
+
69
+ def __len__(self):
70
+ return len(self.df)
71
+
@@ -1,11 +1,10 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import gc
4
+ import itertools
4
5
  from typing import Any, Optional
5
-
6
6
  import torch
7
7
 
8
-
9
8
  def _reset_gpu() -> None:
10
9
  """Release CUDA memory and reset allocation statistics.
11
10
 
@@ -15,6 +14,65 @@ def _reset_gpu() -> None:
15
14
  torch.cuda.reset_peak_memory_stats()
16
15
 
17
16
 
17
+ def define_iteration(dimension: tuple, chunk_size: int, overlap: int = 0):
18
+ """
19
+ Define the iteration strategy to walk through the image with an overlap.
20
+
21
+ Args:
22
+ dimension (tuple): Dimension of the S2 image.
23
+ chunk_size (int): Size of the chunks.
24
+ overlap (int): Size of the overlap between chunks.
25
+
26
+ Returns:
27
+ list: List of chunk coordinates.
28
+ """
29
+ dimy, dimx = dimension
30
+
31
+ if chunk_size > max(dimx, dimy):
32
+ return [(0, 0)]
33
+
34
+ # Adjust step to create overlap
35
+ y_step = chunk_size - overlap
36
+ x_step = chunk_size - overlap
37
+
38
+ # Generate initial chunk positions
39
+ iterchunks = list(itertools.product(range(0, dimy, y_step), range(0, dimx, x_step)))
40
+
41
+ # Fix chunks at the edges to stay within bounds
42
+ iterchunks_fixed = fix_lastchunk(
43
+ iterchunks=iterchunks, s2dim=dimension, chunk_size=chunk_size
44
+ )
45
+
46
+ return iterchunks_fixed
47
+
48
+
49
+ def fix_lastchunk(iterchunks, s2dim, chunk_size):
50
+ """
51
+ Fix the last chunk of the overlay to ensure it aligns with image boundaries.
52
+
53
+ Args:
54
+ iterchunks (list): List of chunks created by itertools.product.
55
+ s2dim (tuple): Dimension of the S2 images.
56
+ chunk_size (int): Size of the chunks.
57
+
58
+ Returns:
59
+ list: List of adjusted chunk coordinates.
60
+ """
61
+ itercontainer = []
62
+
63
+ for index_i, index_j in iterchunks:
64
+ # Adjust if the chunk extends beyond bounds
65
+ if index_i + chunk_size > s2dim[0]:
66
+ index_i = max(s2dim[0] - chunk_size, 0)
67
+ if index_j + chunk_size > s2dim[1]:
68
+ index_j = max(s2dim[1] - chunk_size, 0)
69
+
70
+ itercontainer.append((index_i, index_j))
71
+
72
+ return itercontainer
73
+
74
+
75
+
18
76
  class DeviceManager:
19
77
  """Hold a compiled mlstac model and move it between devices on demand."""
20
78
 
@@ -68,3 +126,4 @@ class DeviceManager:
68
126
  self.model = self._experiment.compiled_model(device=new_device, mode="max")
69
127
  self.device = new_device
70
128
  return self.model
129
+
@@ -1,10 +0,0 @@
1
- from satcube.cloud_detection import cloud_masking
2
- from satcube.download import download_data
3
-
4
-
5
-
6
- __all__ = ["cloud_masking", "download_data"]
7
-
8
- import importlib.metadata
9
- __version__ = importlib.metadata.version("satcube")
10
-