satcube 0.1.13__py3-none-any.whl → 0.1.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of satcube might be problematic. Click here for more details.
- satcube/__init__.py +6 -7
- satcube/align.py +98 -0
- satcube/cloud_detection.py +170 -110
- satcube/download.py +53 -42
- satcube/objects.py +71 -0
- satcube/utils.py +61 -2
- {satcube-0.1.13.dist-info → satcube-0.1.15.dist-info}/METADATA +6 -10
- satcube-0.1.15.dist-info/RECORD +10 -0
- satcube/cloud_detection_old.py +0 -24
- satcube/dataclass.py +0 -39
- satcube/download_old.py +0 -82
- satcube/main.py +0 -453
- satcube/utils_old.py +0 -1087
- satcube-0.1.13.dist-info/RECORD +0 -13
- {satcube-0.1.13.dist-info → satcube-0.1.15.dist-info}/LICENSE +0 -0
- {satcube-0.1.13.dist-info → satcube-0.1.15.dist-info}/WHEEL +0 -0
satcube/utils.py
CHANGED
|
@@ -1,11 +1,10 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import gc
|
|
4
|
+
import itertools
|
|
4
5
|
from typing import Any, Optional
|
|
5
|
-
|
|
6
6
|
import torch
|
|
7
7
|
|
|
8
|
-
|
|
9
8
|
def _reset_gpu() -> None:
|
|
10
9
|
"""Release CUDA memory and reset allocation statistics.
|
|
11
10
|
|
|
@@ -15,6 +14,65 @@ def _reset_gpu() -> None:
|
|
|
15
14
|
torch.cuda.reset_peak_memory_stats()
|
|
16
15
|
|
|
17
16
|
|
|
17
|
+
def define_iteration(dimension: tuple, chunk_size: int, overlap: int = 0):
|
|
18
|
+
"""
|
|
19
|
+
Define the iteration strategy to walk through the image with an overlap.
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
dimension (tuple): Dimension of the S2 image.
|
|
23
|
+
chunk_size (int): Size of the chunks.
|
|
24
|
+
overlap (int): Size of the overlap between chunks.
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
list: List of chunk coordinates.
|
|
28
|
+
"""
|
|
29
|
+
dimy, dimx = dimension
|
|
30
|
+
|
|
31
|
+
if chunk_size > max(dimx, dimy):
|
|
32
|
+
return [(0, 0)]
|
|
33
|
+
|
|
34
|
+
# Adjust step to create overlap
|
|
35
|
+
y_step = chunk_size - overlap
|
|
36
|
+
x_step = chunk_size - overlap
|
|
37
|
+
|
|
38
|
+
# Generate initial chunk positions
|
|
39
|
+
iterchunks = list(itertools.product(range(0, dimy, y_step), range(0, dimx, x_step)))
|
|
40
|
+
|
|
41
|
+
# Fix chunks at the edges to stay within bounds
|
|
42
|
+
iterchunks_fixed = fix_lastchunk(
|
|
43
|
+
iterchunks=iterchunks, s2dim=dimension, chunk_size=chunk_size
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
return iterchunks_fixed
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def fix_lastchunk(iterchunks, s2dim, chunk_size):
|
|
50
|
+
"""
|
|
51
|
+
Fix the last chunk of the overlay to ensure it aligns with image boundaries.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
iterchunks (list): List of chunks created by itertools.product.
|
|
55
|
+
s2dim (tuple): Dimension of the S2 images.
|
|
56
|
+
chunk_size (int): Size of the chunks.
|
|
57
|
+
|
|
58
|
+
Returns:
|
|
59
|
+
list: List of adjusted chunk coordinates.
|
|
60
|
+
"""
|
|
61
|
+
itercontainer = []
|
|
62
|
+
|
|
63
|
+
for index_i, index_j in iterchunks:
|
|
64
|
+
# Adjust if the chunk extends beyond bounds
|
|
65
|
+
if index_i + chunk_size > s2dim[0]:
|
|
66
|
+
index_i = max(s2dim[0] - chunk_size, 0)
|
|
67
|
+
if index_j + chunk_size > s2dim[1]:
|
|
68
|
+
index_j = max(s2dim[1] - chunk_size, 0)
|
|
69
|
+
|
|
70
|
+
itercontainer.append((index_i, index_j))
|
|
71
|
+
|
|
72
|
+
return itercontainer
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
|
|
18
76
|
class DeviceManager:
|
|
19
77
|
"""Hold a compiled mlstac model and move it between devices on demand."""
|
|
20
78
|
|
|
@@ -68,3 +126,4 @@ class DeviceManager:
|
|
|
68
126
|
self.model = self._experiment.compiled_model(device=new_device, mode="max")
|
|
69
127
|
self.device = new_device
|
|
70
128
|
return self.model
|
|
129
|
+
|
|
@@ -1,29 +1,25 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: satcube
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.15
|
|
4
4
|
Summary: A Python package to create cloud-free monthly composites by fusing Landsat and Sentinel-2 data.
|
|
5
5
|
Home-page: https://github.com/IPL-UV/satcube
|
|
6
6
|
Author: Cesar Aybar
|
|
7
7
|
Author-email: fcesar.aybar@uv.es
|
|
8
|
-
Requires-Python: >=3.
|
|
8
|
+
Requires-Python: >=3.9
|
|
9
9
|
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
10
11
|
Classifier: Programming Language :: Python :: 3.10
|
|
11
12
|
Classifier: Programming Language :: Python :: 3.11
|
|
12
13
|
Classifier: Programming Language :: Python :: 3.12
|
|
13
14
|
Provides-Extra: full
|
|
14
|
-
Requires-Dist: cubexpress (>=0.1.
|
|
15
|
-
Requires-Dist: earthengine-api (>=1.5.12)
|
|
15
|
+
Requires-Dist: cubexpress (>=0.1.10)
|
|
16
16
|
Requires-Dist: mlstac (>=0.4.0)
|
|
17
|
-
Requires-Dist: numpy (>=1.25.0)
|
|
18
|
-
Requires-Dist: pandas (>=2.0.0)
|
|
19
17
|
Requires-Dist: phicloudmask (>=0.0.2)
|
|
20
|
-
Requires-Dist: pydantic (>=2.8.0)
|
|
21
|
-
Requires-Dist: rasterio (>=1.3.9)
|
|
22
18
|
Requires-Dist: requests (>=2.26.0)
|
|
23
|
-
Requires-Dist: satalign (>=0.1.
|
|
19
|
+
Requires-Dist: satalign (>=0.1.9)
|
|
24
20
|
Requires-Dist: scikit-learn (>=1.2.0)
|
|
25
21
|
Requires-Dist: segmentation-models-pytorch (>=0.3.0)
|
|
26
|
-
Requires-Dist:
|
|
22
|
+
Requires-Dist: tqdm (>=4.67.1)
|
|
27
23
|
Requires-Dist: xarray (>=2023.7.0)
|
|
28
24
|
Project-URL: Documentation, https://ipl-uv.github.io/satcube/
|
|
29
25
|
Project-URL: Repository, https://github.com/IPL-UV/satcube
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
satcube/__init__.py,sha256=1q5c8Kx1dePuPBo29O7-1MA0L_GdBhi56gbxjQAa1_o,317
|
|
2
|
+
satcube/align.py,sha256=0QOzX9f-r05F8niI91xlvifmyHaAQjWgBit_4Jc6OeM,2979
|
|
3
|
+
satcube/cloud_detection.py,sha256=05OifUtWjE5jco8Q1jf_7qoQ45qrnJQgRc6LrPlMkLI,7898
|
|
4
|
+
satcube/download.py,sha256=o8kwPzipU1f_pV0KNPLmLmjUEsoWb6yhkCt7kerDViU,1545
|
|
5
|
+
satcube/objects.py,sha256=zhiYu-rUTm65TuEl2qIo7kLfpdam17_6CZl2KBCT9Jw,1988
|
|
6
|
+
satcube/utils.py,sha256=QBdmSg6_4Vy-4mXH1Z3Z2AvbIKAXBYi5g3-IgWSE1MY,3660
|
|
7
|
+
satcube-0.1.15.dist-info/LICENSE,sha256=YdB4BQMkMzWuKvXRIpQR4g91IQ_pwA5PSH2lNM97zFI,1070
|
|
8
|
+
satcube-0.1.15.dist-info/METADATA,sha256=iyw3WijWmTfyg0-c-1qdxZ8UkgTd7XbnOnzZAs2QAV4,6558
|
|
9
|
+
satcube-0.1.15.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
10
|
+
satcube-0.1.15.dist-info/RECORD,,
|
satcube/cloud_detection_old.py
DELETED
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
import torch
|
|
2
|
-
|
|
3
|
-
class LandsatCloudDetector(torch.nn.Module):
|
|
4
|
-
def __init__(self):
|
|
5
|
-
super().__init__()
|
|
6
|
-
|
|
7
|
-
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
|
8
|
-
# Define bit flags for clouds based on the
|
|
9
|
-
# Landsat QA band documentation
|
|
10
|
-
cloud_flags = (1 << 3) | (1 << 4) | (1 << 1)
|
|
11
|
-
|
|
12
|
-
## Get the QA band
|
|
13
|
-
qa_band = x[6]
|
|
14
|
-
mask_band = x[:6].mean(axis=0)
|
|
15
|
-
mask_band[~torch.isnan(mask_band)] = 1
|
|
16
|
-
|
|
17
|
-
## Create a cloud mask
|
|
18
|
-
cloud_mask = torch.bitwise_and(qa_band.int(), cloud_flags) == 0
|
|
19
|
-
cloud_mask = cloud_mask.float()
|
|
20
|
-
cloud_mask[cloud_mask == 0] = torch.nan
|
|
21
|
-
cloud_mask[cloud_mask == 0] = 1
|
|
22
|
-
final_mask = cloud_mask * mask_band
|
|
23
|
-
return final_mask
|
|
24
|
-
|
satcube/dataclass.py
DELETED
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
import pathlib
|
|
2
|
-
from datetime import datetime
|
|
3
|
-
from typing import List, Optional
|
|
4
|
-
|
|
5
|
-
import pydantic
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class Sensor(pydantic.BaseModel):
|
|
9
|
-
start_date: str
|
|
10
|
-
end_date: str
|
|
11
|
-
edge_size: int
|
|
12
|
-
bands: List[str]
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
class Sentinel2(Sensor):
|
|
16
|
-
weight_path: pathlib.Path
|
|
17
|
-
start_date: Optional[str] = "2015-06-27"
|
|
18
|
-
end_date: Optional[str] = datetime.now().strftime("%Y-%m-%d")
|
|
19
|
-
resolution: Optional[int] = 10
|
|
20
|
-
edge_size: Optional[int] = 384
|
|
21
|
-
embedding_universal: Optional[str] = "s2_embedding_model_universal.pt"
|
|
22
|
-
cloud_model_universal: str = "s2_cloud_model_universal.pt"
|
|
23
|
-
cloud_model_specific: str = "s2_cloud_model_specific.pt"
|
|
24
|
-
super_model_specific: str = "s2_super_model_specific.pt"
|
|
25
|
-
bands: List[str] = [
|
|
26
|
-
"B01",
|
|
27
|
-
"B02",
|
|
28
|
-
"B03",
|
|
29
|
-
"B04",
|
|
30
|
-
"B05",
|
|
31
|
-
"B06",
|
|
32
|
-
"B07",
|
|
33
|
-
"B08",
|
|
34
|
-
"B8A",
|
|
35
|
-
"B09",
|
|
36
|
-
"B10",
|
|
37
|
-
"B11",
|
|
38
|
-
"B12",
|
|
39
|
-
]
|
satcube/download_old.py
DELETED
|
@@ -1,82 +0,0 @@
|
|
|
1
|
-
import ee
|
|
2
|
-
import cubexpress
|
|
3
|
-
import pathlib
|
|
4
|
-
from typing import Optional
|
|
5
|
-
from datetime import datetime
|
|
6
|
-
|
|
7
|
-
def download_data(
|
|
8
|
-
lon: float,
|
|
9
|
-
lat: float,
|
|
10
|
-
cs_cdf: Optional[float] = 0.6,
|
|
11
|
-
buffer_size: Optional[int] = 1280,
|
|
12
|
-
start_date: Optional[str] = "2015-01-01",
|
|
13
|
-
end_date: Optional[str] = datetime.today().strftime('%Y-%m-%d'),
|
|
14
|
-
outfolder: Optional[str] = "raw/"
|
|
15
|
-
) -> pathlib.Path:
|
|
16
|
-
"""
|
|
17
|
-
Download Sentinel-2 imagery data using cubexpress and Earth Engine API.
|
|
18
|
-
|
|
19
|
-
Args:
|
|
20
|
-
lon (float): Longitude of the point of interest.
|
|
21
|
-
lat (float): Latitude of the point of interest.
|
|
22
|
-
cs_cdf (Optional[float]): Cloud mask threshold (default 0.6).
|
|
23
|
-
buffer_size (Optional[int]): Buffer size for image extraction (default 1280).
|
|
24
|
-
start_date (Optional[str]): Start date for image filtering (default "2015-01-01").
|
|
25
|
-
end_date (Optional[str]): End date for image filtering (default today’s date).
|
|
26
|
-
outfolder (Optional[str]): Output folder to save images (default "raw/").
|
|
27
|
-
|
|
28
|
-
Returns:
|
|
29
|
-
pathlib.Path: Path to the folder where the data is stored.
|
|
30
|
-
"""
|
|
31
|
-
|
|
32
|
-
# Initialize Earth Engine
|
|
33
|
-
ee.Initialize(project="ee-julius013199")
|
|
34
|
-
|
|
35
|
-
# Define point of interest
|
|
36
|
-
point = ee.Geometry.Point([lon, lat])
|
|
37
|
-
|
|
38
|
-
# Filter image collection by location and date
|
|
39
|
-
collection = ee.ImageCollection("COPERNICUS/S2_SR_HARMONIZED") \
|
|
40
|
-
.filterBounds(point) \
|
|
41
|
-
.filterDate(start_date, end_date)
|
|
42
|
-
|
|
43
|
-
# Get image IDs
|
|
44
|
-
image_ids = collection.aggregate_array('system:id').getInfo()
|
|
45
|
-
|
|
46
|
-
# Cloud mask function
|
|
47
|
-
def cloud_mask(image) -> ee.Image:
|
|
48
|
-
"""Apply cloud mask to the image."""
|
|
49
|
-
return image.select('MSK_CLDPRB').lt(20)
|
|
50
|
-
|
|
51
|
-
# Apply cloud mask
|
|
52
|
-
collection = collection.map(cloud_mask)
|
|
53
|
-
|
|
54
|
-
# Generate geotransform for cubexpress
|
|
55
|
-
geotransform = cubexpress.lonlat2rt(lon=lon, lat=lat, edge_size=buffer_size, scale=10)
|
|
56
|
-
|
|
57
|
-
# Prepare requests for cubexpress
|
|
58
|
-
requests = [
|
|
59
|
-
cubexpress.Request(
|
|
60
|
-
id=f"s2test_{i}",
|
|
61
|
-
raster_transform=geotransform,
|
|
62
|
-
bands=["B4", "B3", "B2"], # RGB bands
|
|
63
|
-
image=ee.Image(image_id).divide(10000) # Adjust image scaling
|
|
64
|
-
)
|
|
65
|
-
for i, image_id in enumerate(image_ids)
|
|
66
|
-
]
|
|
67
|
-
|
|
68
|
-
# Create request set
|
|
69
|
-
cube_requests = cubexpress.RequestSet(requestset=requests)
|
|
70
|
-
|
|
71
|
-
# Set output folder
|
|
72
|
-
output_path = pathlib.Path(outfolder)
|
|
73
|
-
|
|
74
|
-
# Download the data
|
|
75
|
-
cubexpress.getcube(
|
|
76
|
-
request=cube_requests,
|
|
77
|
-
output_path=output_path,
|
|
78
|
-
nworkers=4,
|
|
79
|
-
max_deep_level=5
|
|
80
|
-
)
|
|
81
|
-
|
|
82
|
-
return output_path
|