RadGEEToolbox 1.7.3__py3-none-any.whl → 1.7.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- RadGEEToolbox/CollectionStitch.py +16 -3
- RadGEEToolbox/Export.py +16 -0
- RadGEEToolbox/GenericCollection.py +698 -202
- RadGEEToolbox/LandsatCollection.py +818 -218
- RadGEEToolbox/Sentinel1Collection.py +734 -204
- RadGEEToolbox/Sentinel2Collection.py +771 -219
- RadGEEToolbox/__init__.py +4 -4
- {radgeetoolbox-1.7.3.dist-info → radgeetoolbox-1.7.5.dist-info}/METADATA +6 -6
- radgeetoolbox-1.7.5.dist-info/RECORD +14 -0
- {radgeetoolbox-1.7.3.dist-info → radgeetoolbox-1.7.5.dist-info}/WHEEL +1 -1
- radgeetoolbox-1.7.3.dist-info/RECORD +0 -14
- {radgeetoolbox-1.7.3.dist-info → radgeetoolbox-1.7.5.dist-info}/licenses/LICENSE.txt +0 -0
- {radgeetoolbox-1.7.3.dist-info → radgeetoolbox-1.7.5.dist-info}/top_level.txt +0 -0
|
@@ -2,6 +2,7 @@ import ee
|
|
|
2
2
|
import math
|
|
3
3
|
import pandas as pd
|
|
4
4
|
import numpy as np
|
|
5
|
+
import warnings
|
|
5
6
|
|
|
6
7
|
|
|
7
8
|
class Sentinel1Collection:
|
|
@@ -230,6 +231,14 @@ class Sentinel1Collection:
|
|
|
230
231
|
self._DbFromSigma0 = None
|
|
231
232
|
self._multilook = None
|
|
232
233
|
|
|
234
|
+
def __call__(self):
|
|
235
|
+
"""
|
|
236
|
+
Allows the object to be called as a function, returning itself.
|
|
237
|
+
This enables property-like methods to be accessed with or without parentheses
|
|
238
|
+
(e.g., .mosaicByDate or .mosaicByDate()).
|
|
239
|
+
"""
|
|
240
|
+
return self
|
|
241
|
+
|
|
233
242
|
@staticmethod
|
|
234
243
|
def image_dater(image):
|
|
235
244
|
"""
|
|
@@ -245,7 +254,7 @@ class Sentinel1Collection:
|
|
|
245
254
|
return image.set({"Date_Filter": date})
|
|
246
255
|
|
|
247
256
|
@staticmethod
|
|
248
|
-
def
|
|
257
|
+
def pixelAreaSum(
|
|
249
258
|
image, band_name, geometry, threshold=-1, scale=10, maxPixels=1e12
|
|
250
259
|
):
|
|
251
260
|
"""
|
|
@@ -304,8 +313,26 @@ class Sentinel1Collection:
|
|
|
304
313
|
# Call to iterate the calculate_and_set_area function over the list of bands, starting with the original image
|
|
305
314
|
final_image = ee.Image(bands.iterate(calculate_and_set_area, image))
|
|
306
315
|
return final_image
|
|
316
|
+
|
|
317
|
+
@staticmethod
|
|
318
|
+
def PixelAreaSum(
|
|
319
|
+
image, band_name, geometry, threshold=-1, scale=10, maxPixels=1e12
|
|
320
|
+
):
|
|
321
|
+
warnings.warn(
|
|
322
|
+
"The 'PixelAreaSum' method is deprecated. Please use 'pixelAreaSum' instead.",
|
|
323
|
+
DeprecationWarning,
|
|
324
|
+
stacklevel=2
|
|
325
|
+
)
|
|
326
|
+
return Sentinel1Collection.pixelAreaSum(
|
|
327
|
+
image=image,
|
|
328
|
+
band_name=band_name,
|
|
329
|
+
geometry=geometry,
|
|
330
|
+
threshold=threshold,
|
|
331
|
+
scale=scale,
|
|
332
|
+
maxPixels=maxPixels,
|
|
333
|
+
)
|
|
307
334
|
|
|
308
|
-
def
|
|
335
|
+
def pixelAreaSumCollection(
|
|
309
336
|
self, band_name, geometry, threshold=-1, scale=10, maxPixels=1e12, output_type='ImageCollection', area_data_export_path=None
|
|
310
337
|
):
|
|
311
338
|
"""
|
|
@@ -332,7 +359,7 @@ class Sentinel1Collection:
|
|
|
332
359
|
collection = self.collection
|
|
333
360
|
# Area calculation for each image in the collection, using the PixelAreaSum function
|
|
334
361
|
AreaCollection = collection.map(
|
|
335
|
-
lambda image: Sentinel1Collection.
|
|
362
|
+
lambda image: Sentinel1Collection.pixelAreaSum(
|
|
336
363
|
image,
|
|
337
364
|
band_name=band_name,
|
|
338
365
|
geometry=geometry,
|
|
@@ -348,17 +375,35 @@ class Sentinel1Collection:
|
|
|
348
375
|
|
|
349
376
|
# If an export path is provided, the area data will be exported to a CSV file
|
|
350
377
|
if area_data_export_path:
|
|
351
|
-
Sentinel1Collection(collection=self._PixelAreaSumCollection).
|
|
378
|
+
Sentinel1Collection(collection=self._PixelAreaSumCollection).exportProperties(property_names=prop_names, file_path=area_data_export_path+'.csv')
|
|
352
379
|
# Returning the result in the desired format based on output_type argument or raising an error for invalid input
|
|
353
380
|
if output_type == 'ImageCollection' or output_type == 'ee.ImageCollection':
|
|
354
381
|
return self._PixelAreaSumCollection
|
|
355
382
|
elif output_type == 'Sentinel1Collection':
|
|
356
383
|
return Sentinel1Collection(collection=self._PixelAreaSumCollection)
|
|
357
384
|
elif output_type == 'DataFrame' or output_type == 'Pandas' or output_type == 'pd' or output_type == 'dataframe' or output_type == 'df':
|
|
358
|
-
return Sentinel1Collection(collection=self._PixelAreaSumCollection).
|
|
385
|
+
return Sentinel1Collection(collection=self._PixelAreaSumCollection).exportProperties(property_names=prop_names)
|
|
359
386
|
else:
|
|
360
387
|
raise ValueError("Incorrect `output_type`. The `output_type` argument must be one of the following: 'ImageCollection', 'ee.ImageCollection', 'Sentinel1Collection', 'DataFrame', 'Pandas', 'pd', 'dataframe', or 'df'.")
|
|
361
388
|
|
|
389
|
+
def PixelAreaSumCollection(
|
|
390
|
+
self, band_name, geometry, threshold=-1, scale=10, maxPixels=1e12, output_type='ImageCollection', area_data_export_path=None
|
|
391
|
+
):
|
|
392
|
+
warnings.warn(
|
|
393
|
+
"The 'PixelAreaSumCollection' method is deprecated. Please use 'pixelAreaSumCollection' instead.",
|
|
394
|
+
DeprecationWarning,
|
|
395
|
+
stacklevel=2
|
|
396
|
+
)
|
|
397
|
+
return self.pixelAreaSumCollection(
|
|
398
|
+
band_name=band_name,
|
|
399
|
+
geometry=geometry,
|
|
400
|
+
threshold=threshold,
|
|
401
|
+
scale=scale,
|
|
402
|
+
maxPixels=maxPixels,
|
|
403
|
+
output_type=output_type,
|
|
404
|
+
area_data_export_path=area_data_export_path
|
|
405
|
+
)
|
|
406
|
+
|
|
362
407
|
@staticmethod
|
|
363
408
|
def add_month_property_fn(image):
|
|
364
409
|
"""
|
|
@@ -707,7 +752,7 @@ class Sentinel1Collection:
|
|
|
707
752
|
return Sentinel1Collection(collection=self._speckle_filter)
|
|
708
753
|
|
|
709
754
|
@property
|
|
710
|
-
def
|
|
755
|
+
def sigma0FromDb(self):
|
|
711
756
|
"""
|
|
712
757
|
Property attribute function to convert image collection from decibels to sigma0. Results are calculated once per class object then cached for future use.
|
|
713
758
|
|
|
@@ -732,9 +777,18 @@ class Sentinel1Collection:
|
|
|
732
777
|
sigma0_collection = collection.map(conversion)
|
|
733
778
|
self._Sigma0FromDb = sigma0_collection
|
|
734
779
|
return Sentinel1Collection(collection=self._Sigma0FromDb)
|
|
780
|
+
|
|
781
|
+
@property
|
|
782
|
+
def Sigma0FromDb(self):
|
|
783
|
+
warnings.warn(
|
|
784
|
+
"The 'Sigma0FromDb' property is deprecated. Please use 'sigma0FromDb' instead.",
|
|
785
|
+
DeprecationWarning,
|
|
786
|
+
stacklevel=2
|
|
787
|
+
)
|
|
788
|
+
return self.sigma0FromDb
|
|
735
789
|
|
|
736
790
|
@property
|
|
737
|
-
def
|
|
791
|
+
def dbFromSigma0(self):
|
|
738
792
|
"""
|
|
739
793
|
Property attribute function to convert image collection from decibels to sigma0. Results are calculated once per class object then cached for future use.
|
|
740
794
|
|
|
@@ -760,6 +814,15 @@ class Sentinel1Collection:
|
|
|
760
814
|
self._DbFromSigma0 = dB_collection
|
|
761
815
|
return Sentinel1Collection(collection=self._DbFromSigma0)
|
|
762
816
|
|
|
817
|
+
@property
|
|
818
|
+
def DbFromSigma0(self):
|
|
819
|
+
warnings.warn(
|
|
820
|
+
"The 'DbFromSigma0' property is deprecated. Please use 'dbFromSigma0' instead.",
|
|
821
|
+
DeprecationWarning,
|
|
822
|
+
stacklevel=2
|
|
823
|
+
)
|
|
824
|
+
return self.dbFromSigma0
|
|
825
|
+
|
|
763
826
|
@staticmethod
|
|
764
827
|
def anomaly_fn(image, geometry, band_name=None, anomaly_band_name=None, replace=True, scale=10):
|
|
765
828
|
"""
|
|
@@ -847,7 +910,7 @@ class Sentinel1Collection:
|
|
|
847
910
|
self._dates = dates
|
|
848
911
|
return self._dates
|
|
849
912
|
|
|
850
|
-
def
|
|
913
|
+
def exportProperties(self, property_names, file_path=None):
|
|
851
914
|
"""
|
|
852
915
|
Fetches and returns specified properties from each image in the collection as a list, and returns a pandas DataFrame and optionally saves the results to a csv file.
|
|
853
916
|
|
|
@@ -902,6 +965,14 @@ class Sentinel1Collection:
|
|
|
902
965
|
print(f"Properties saved to {file_path}")
|
|
903
966
|
|
|
904
967
|
return df
|
|
968
|
+
|
|
969
|
+
def ExportProperties(self, property_names, file_path=None):
|
|
970
|
+
warnings.warn(
|
|
971
|
+
"The 'ExportProperties' method is deprecated. Please use 'exportProperties' instead.",
|
|
972
|
+
DeprecationWarning,
|
|
973
|
+
stacklevel=2
|
|
974
|
+
)
|
|
975
|
+
return self.exportProperties(property_names=property_names, file_path=file_path)
|
|
905
976
|
|
|
906
977
|
def get_filtered_collection(self):
|
|
907
978
|
"""
|
|
@@ -1966,6 +2037,9 @@ class Sentinel1Collection:
|
|
|
1966
2037
|
|
|
1967
2038
|
if geometry is not None and not isinstance(geometry, ee.Geometry):
|
|
1968
2039
|
raise ValueError(f'The chosen `geometry`: {geometry} is not a valid ee.Geometry object.')
|
|
2040
|
+
|
|
2041
|
+
native_projection = image_collection.first().select(target_band).projection()
|
|
2042
|
+
|
|
1969
2043
|
# define the join, which will join all images newer than the current image
|
|
1970
2044
|
# use system:time_start if the image does not have a Date_Filter property
|
|
1971
2045
|
if join_method == 'system:time_start':
|
|
@@ -2021,7 +2095,7 @@ class Sentinel1Collection:
|
|
|
2021
2095
|
# convert the image collection to an image of s_statistic values per pixel
|
|
2022
2096
|
# where the s_statistic is the sum of partial s values
|
|
2023
2097
|
# renaming the band as 's_statistic' for later usage
|
|
2024
|
-
final_s_image = partial_s_col.sum().rename('s_statistic')
|
|
2098
|
+
final_s_image = partial_s_col.sum().rename('s_statistic').setDefaultProjection(native_projection)
|
|
2025
2099
|
|
|
2026
2100
|
|
|
2027
2101
|
########## PART 2 - VARIANCE and Z-SCORE ##########
|
|
@@ -2084,7 +2158,7 @@ class Sentinel1Collection:
|
|
|
2084
2158
|
mask = ee.Image(1).clip(geometry)
|
|
2085
2159
|
final_image = final_image.updateMask(mask)
|
|
2086
2160
|
|
|
2087
|
-
return final_image
|
|
2161
|
+
return final_image.setDefaultProjection(native_projection)
|
|
2088
2162
|
|
|
2089
2163
|
def sens_slope_trend(self, target_band=None, join_method='system:time_start', geometry=None):
|
|
2090
2164
|
"""
|
|
@@ -2120,6 +2194,8 @@ class Sentinel1Collection:
|
|
|
2120
2194
|
if geometry is not None and not isinstance(geometry, ee.Geometry):
|
|
2121
2195
|
raise ValueError(f'The chosen `geometry`: {geometry} is not a valid ee.Geometry object.')
|
|
2122
2196
|
|
|
2197
|
+
native_projection = image_collection.first().select(target_band).projection()
|
|
2198
|
+
|
|
2123
2199
|
# Add Year Band (Time X-Axis)
|
|
2124
2200
|
def add_year_band(image):
|
|
2125
2201
|
# Handle user-defined date strings vs system time
|
|
@@ -2147,7 +2223,7 @@ class Sentinel1Collection:
|
|
|
2147
2223
|
mask = ee.Image(1).clip(geometry)
|
|
2148
2224
|
slope_band = slope_band.updateMask(mask)
|
|
2149
2225
|
|
|
2150
|
-
return slope_band
|
|
2226
|
+
return slope_band.setDefaultProjection(native_projection)
|
|
2151
2227
|
|
|
2152
2228
|
|
|
2153
2229
|
def mask_to_polygon(self, polygon):
|
|
@@ -2161,20 +2237,15 @@ class Sentinel1Collection:
|
|
|
2161
2237
|
Sentinel1Collection: masked Sentinel1Collection image collection
|
|
2162
2238
|
|
|
2163
2239
|
"""
|
|
2164
|
-
|
|
2165
|
-
|
|
2166
|
-
mask = ee.Image.constant(1).clip(polygon)
|
|
2240
|
+
# Convert the polygon to a mask
|
|
2241
|
+
mask = ee.Image.constant(1).clip(polygon)
|
|
2167
2242
|
|
|
2168
|
-
|
|
2169
|
-
|
|
2170
|
-
|
|
2171
|
-
# Update the internal collection state
|
|
2172
|
-
self._geometry_masked_collection = Sentinel1Collection(
|
|
2173
|
-
collection=masked_collection
|
|
2174
|
-
)
|
|
2243
|
+
# Update the mask of each image in the collection
|
|
2244
|
+
masked_collection = self.collection.map(lambda img: img.updateMask(mask)\
|
|
2245
|
+
.copyProperties(img).set('system:time_start', img.get('system:time_start')))
|
|
2175
2246
|
|
|
2176
2247
|
# Return the updated object
|
|
2177
|
-
return
|
|
2248
|
+
return Sentinel1Collection(collection=masked_collection)
|
|
2178
2249
|
|
|
2179
2250
|
def mask_out_polygon(self, polygon):
|
|
2180
2251
|
"""
|
|
@@ -2187,23 +2258,18 @@ class Sentinel1Collection:
|
|
|
2187
2258
|
Sentinel1Collection: masked Sentinel1Collection image collection
|
|
2188
2259
|
|
|
2189
2260
|
"""
|
|
2190
|
-
|
|
2191
|
-
|
|
2192
|
-
full_mask = ee.Image.constant(1)
|
|
2261
|
+
# Convert the polygon to a mask
|
|
2262
|
+
full_mask = ee.Image.constant(1)
|
|
2193
2263
|
|
|
2194
|
-
|
|
2195
|
-
|
|
2264
|
+
# Use paint to set pixels inside polygon as 0
|
|
2265
|
+
area = full_mask.paint(polygon, 0)
|
|
2196
2266
|
|
|
2197
|
-
|
|
2198
|
-
|
|
2199
|
-
|
|
2200
|
-
# Update the internal collection state
|
|
2201
|
-
self._geometry_masked_out_collection = Sentinel1Collection(
|
|
2202
|
-
collection=masked_collection
|
|
2203
|
-
)
|
|
2267
|
+
# Update the mask of each image in the collection
|
|
2268
|
+
masked_collection = self.collection.map(lambda img: img.updateMask(area)\
|
|
2269
|
+
.copyProperties(img).set('system:time_start', img.get('system:time_start')))
|
|
2204
2270
|
|
|
2205
2271
|
# Return the updated object
|
|
2206
|
-
return
|
|
2272
|
+
return Sentinel1Collection(collection=masked_collection)
|
|
2207
2273
|
|
|
2208
2274
|
def image_grab(self, img_selector):
|
|
2209
2275
|
"""
|
|
@@ -2255,7 +2321,7 @@ class Sentinel1Collection:
|
|
|
2255
2321
|
new_col = self.collection.filter(ee.Filter.eq("Date_Filter", img_date))
|
|
2256
2322
|
return new_col.first()
|
|
2257
2323
|
|
|
2258
|
-
def
|
|
2324
|
+
def collectionStitch(self, img_col2):
|
|
2259
2325
|
"""
|
|
2260
2326
|
Function to mosaic two Sentinel1Collection objects which share image dates.
|
|
2261
2327
|
Mosaics are only formed for dates where both image collections have images.
|
|
@@ -2307,9 +2373,17 @@ class Sentinel1Collection:
|
|
|
2307
2373
|
|
|
2308
2374
|
# Return a Sentinel1Collection instance
|
|
2309
2375
|
return Sentinel1Collection(collection=new_col)
|
|
2376
|
+
|
|
2377
|
+
def CollectionStitch(self, img_col2):
|
|
2378
|
+
warnings.warn(
|
|
2379
|
+
"The 'CollectionStitch' method is deprecated. Please use 'collectionStitch' instead.",
|
|
2380
|
+
DeprecationWarning,
|
|
2381
|
+
stacklevel=2
|
|
2382
|
+
)
|
|
2383
|
+
return self.collectionStitch(img_col2)
|
|
2310
2384
|
|
|
2311
2385
|
@property
|
|
2312
|
-
def
|
|
2386
|
+
def mosaicByDateDepr(self):
|
|
2313
2387
|
"""
|
|
2314
2388
|
Property attribute function to mosaic collection images that share the same date.
|
|
2315
2389
|
The property CLOUD_COVER for each image is used to calculate an overall mean,
|
|
@@ -2370,6 +2444,75 @@ class Sentinel1Collection:
|
|
|
2370
2444
|
|
|
2371
2445
|
# Convert the list of mosaics to an ImageCollection
|
|
2372
2446
|
return self._MosaicByDate
|
|
2447
|
+
|
|
2448
|
+
@property
|
|
2449
|
+
def mosaicByDate(self):
|
|
2450
|
+
"""
|
|
2451
|
+
Property attribute function to mosaic collection images that share the same date.
|
|
2452
|
+
|
|
2453
|
+
The property CLOUD_COVER for each image is used to calculate an overall mean,
|
|
2454
|
+
which replaces the CLOUD_COVER property for each mosaiced image.
|
|
2455
|
+
Server-side friendly.
|
|
2456
|
+
|
|
2457
|
+
NOTE: if images are removed from the collection from cloud filtering, you may have mosaics composed of only one image.
|
|
2458
|
+
|
|
2459
|
+
Returns:
|
|
2460
|
+
LandsatCollection: LandsatCollection image collection with mosaiced imagery and mean CLOUD_COVER as a property
|
|
2461
|
+
"""
|
|
2462
|
+
if self._MosaicByDate is None:
|
|
2463
|
+
distinct_dates = self.collection.distinct("Date_Filter")
|
|
2464
|
+
|
|
2465
|
+
# Define a join to link images by Date_Filter
|
|
2466
|
+
filter_date = ee.Filter.equals(leftField="Date_Filter", rightField="Date_Filter")
|
|
2467
|
+
join = ee.Join.saveAll(matchesKey="date_matches")
|
|
2468
|
+
|
|
2469
|
+
# Apply the join
|
|
2470
|
+
# Primary: Distinct dates collection
|
|
2471
|
+
# Secondary: The full original collection
|
|
2472
|
+
joined_col = ee.ImageCollection(join.apply(distinct_dates, self.collection, filter_date))
|
|
2473
|
+
|
|
2474
|
+
# Define the mosaicking function
|
|
2475
|
+
def _mosaic_day(img):
|
|
2476
|
+
# Recover the list of images for this day
|
|
2477
|
+
daily_list = ee.List(img.get("date_matches"))
|
|
2478
|
+
daily_col = ee.ImageCollection.fromImages(daily_list)
|
|
2479
|
+
|
|
2480
|
+
# Create the mosaic
|
|
2481
|
+
mosaic = daily_col.mosaic().setDefaultProjection(img.projection())
|
|
2482
|
+
|
|
2483
|
+
# Properties to preserve from the representative image
|
|
2484
|
+
props_of_interest = [
|
|
2485
|
+
"platform_number",
|
|
2486
|
+
"instrument",
|
|
2487
|
+
"instrumentMode",
|
|
2488
|
+
"orbitNumber_start",
|
|
2489
|
+
"orbitNumber_stop",
|
|
2490
|
+
"orbitProperties_pass",
|
|
2491
|
+
"resolution_meters",
|
|
2492
|
+
"transmitterReceiverPolarisation",
|
|
2493
|
+
"system:time_start",
|
|
2494
|
+
"crs",
|
|
2495
|
+
"Date_Filter"
|
|
2496
|
+
]
|
|
2497
|
+
|
|
2498
|
+
# Return mosaic with properties set
|
|
2499
|
+
return mosaic.copyProperties(img, props_of_interest)
|
|
2500
|
+
|
|
2501
|
+
# 5. Map the function and wrap the result
|
|
2502
|
+
mosaiced_col = joined_col.map(_mosaic_day)
|
|
2503
|
+
self._MosaicByDate = Sentinel1Collection(collection=mosaiced_col)
|
|
2504
|
+
|
|
2505
|
+
# Convert the list of mosaics to an ImageCollection
|
|
2506
|
+
return self._MosaicByDate
|
|
2507
|
+
|
|
2508
|
+
@property
|
|
2509
|
+
def MosaicByDate(self):
|
|
2510
|
+
warnings.warn(
|
|
2511
|
+
"The 'MosaicByDate' property is deprecated. Please use 'mosaicByDate' instead.",
|
|
2512
|
+
DeprecationWarning,
|
|
2513
|
+
stacklevel=2
|
|
2514
|
+
)
|
|
2515
|
+
return self.mosaicByDate
|
|
2373
2516
|
|
|
2374
2517
|
@staticmethod
|
|
2375
2518
|
def ee_to_df(
|
|
@@ -2589,200 +2732,197 @@ class Sentinel1Collection:
|
|
|
2589
2732
|
lines,
|
|
2590
2733
|
line_names,
|
|
2591
2734
|
reducer="mean",
|
|
2592
|
-
dist_interval=
|
|
2735
|
+
dist_interval=30,
|
|
2593
2736
|
n_segments=None,
|
|
2594
2737
|
scale=10,
|
|
2595
2738
|
processing_mode='aggregated',
|
|
2596
2739
|
save_folder_path=None,
|
|
2597
2740
|
sampling_method='line',
|
|
2598
|
-
point_buffer_radius=
|
|
2741
|
+
point_buffer_radius=15,
|
|
2742
|
+
batch_size=10
|
|
2599
2743
|
):
|
|
2600
2744
|
"""
|
|
2601
|
-
Computes and returns pixel values along transects
|
|
2602
|
-
|
|
2603
|
-
|
|
2604
|
-
|
|
2605
|
-
for maximum flexibility and performance.
|
|
2606
|
-
|
|
2607
|
-
There are two processing modes available, aggregated and iterative:
|
|
2608
|
-
- 'aggregated' (default; suggested): Fast, server-side processing. Fetches all results
|
|
2609
|
-
in a single request. Highly recommended. Returns a dictionary of pandas DataFrames.
|
|
2610
|
-
- 'iterative': Slower, client-side loop that processes one image at a time.
|
|
2611
|
-
Kept for backward compatibility (effectively depreciated). Returns None and saves individual CSVs.
|
|
2612
|
-
This method is not recommended unless absolutely necessary, as it is less efficient and may be subject to client-side timeouts.
|
|
2613
|
-
|
|
2745
|
+
Computes and returns pixel values along transects. Provide a list of ee.Geometry.LineString objects and corresponding names, and the function will compute the specified reducer value
|
|
2746
|
+
at regular intervals along each line for all images in the collection. Use `dist_interval` or `n_segments` to control sampling resolution. The user can choose between 'aggregated' mode (returns a dictionary of DataFrames) or 'iterative' mode (saves individual CSVs for each transect).
|
|
2747
|
+
Alter `sampling_method` to sample directly along the line or via buffered points along the line. Buffered points can help capture more representative pixel values in heterogeneous landscapes, and the buffer radius can be adjusted via `point_buffer_radius`.
|
|
2748
|
+
|
|
2614
2749
|
Args:
|
|
2615
|
-
lines (list):
|
|
2616
|
-
|
|
2617
|
-
|
|
2618
|
-
|
|
2619
|
-
|
|
2620
|
-
|
|
2621
|
-
|
|
2622
|
-
|
|
2623
|
-
|
|
2624
|
-
|
|
2625
|
-
|
|
2626
|
-
each transect line into for sampling. This parameter overrides `dist_interval`.
|
|
2627
|
-
Defaults to None.
|
|
2628
|
-
scale (int, optional): The nominal scale in meters for the reduction,
|
|
2629
|
-
which should typically match the pixel resolution of the imagery.
|
|
2630
|
-
Defaults to 10.
|
|
2631
|
-
processing_mode (str, optional): The method for processing the collection.
|
|
2632
|
-
- 'aggregated' (default): Fast, server-side processing. Fetches all
|
|
2633
|
-
results in a single request. Highly recommended. Returns a dictionary
|
|
2634
|
-
of pandas DataFrames.
|
|
2635
|
-
- 'iterative': Slower, client-side loop that processes one image at a
|
|
2636
|
-
time. Kept for backward compatibility. Returns None and saves
|
|
2637
|
-
individual CSVs.
|
|
2638
|
-
save_folder_path (str, optional): If provided, the function will save the
|
|
2639
|
-
resulting transect data to CSV files. The behavior depends on the
|
|
2640
|
-
`processing_mode`:
|
|
2641
|
-
- In 'aggregated' mode, one CSV is saved for each transect,
|
|
2642
|
-
containing all dates. (e.g., 'MyTransect_transects.csv').
|
|
2643
|
-
- In 'iterative' mode, one CSV is saved for each date,
|
|
2644
|
-
containing all transects. (e.g., '2022-06-15_transects.csv').
|
|
2645
|
-
sampling_method (str, optional): The geometric method used for sampling.
|
|
2646
|
-
- 'line' (default): Reduces all pixels intersecting each small line
|
|
2647
|
-
segment. This can be unreliable and produce blank rows if
|
|
2648
|
-
`dist_interval` is too small relative to the `scale`.
|
|
2649
|
-
- 'buffered_point': Reduces all pixels within a buffer around the
|
|
2650
|
-
midpoint of each line segment. This method is more robust and
|
|
2651
|
-
reliably avoids blank rows, but may not reduce all pixels along a line segment.
|
|
2652
|
-
point_buffer_radius (int, optional): The radius in meters for the buffer
|
|
2653
|
-
when `sampling_method` is 'buffered_point'. Defaults to 5.
|
|
2750
|
+
lines (list): List of ee.Geometry.LineString objects.
|
|
2751
|
+
line_names (list): List of string names for each transect.
|
|
2752
|
+
reducer (str, optional): Reducer name. Defaults to 'mean'.
|
|
2753
|
+
dist_interval (float, optional): Distance interval in meters. Defaults to 30.
|
|
2754
|
+
n_segments (int, optional): Number of segments (overrides dist_interval).
|
|
2755
|
+
scale (int, optional): Scale in meters. Defaults to 10.
|
|
2756
|
+
processing_mode (str, optional): 'aggregated' or 'iterative'.
|
|
2757
|
+
save_folder_path (str, optional): Path to save CSVs.
|
|
2758
|
+
sampling_method (str, optional): 'line' or 'buffered_point'.
|
|
2759
|
+
point_buffer_radius (int, optional): Buffer radius if using 'buffered_point'.
|
|
2760
|
+
batch_size (int, optional): Images per request in 'aggregated' mode. Defaults to 10. Lower the value if you encounter a 'Too many aggregations' error.
|
|
2654
2761
|
|
|
2655
2762
|
Returns:
|
|
2656
|
-
dict or None:
|
|
2657
|
-
- If `processing_mode` is 'aggregated', returns a dictionary where each
|
|
2658
|
-
key is a transect name and each value is a pandas DataFrame. In the
|
|
2659
|
-
DataFrame, the index is the distance along the transect and each
|
|
2660
|
-
column represents an image date. Optionally saves CSV files if
|
|
2661
|
-
`save_folder_path` is provided.
|
|
2662
|
-
- If `processing_mode` is 'iterative', returns None as it saves
|
|
2663
|
-
files directly.
|
|
2664
|
-
|
|
2665
|
-
Raises:
|
|
2666
|
-
ValueError: If `lines` and `line_names` have different lengths, or if
|
|
2667
|
-
an unknown reducer or processing mode is specified.
|
|
2763
|
+
dict or None: Dictionary of DataFrames (aggregated) or None (iterative).
|
|
2668
2764
|
"""
|
|
2669
|
-
# Validating inputs
|
|
2670
2765
|
if len(lines) != len(line_names):
|
|
2671
2766
|
raise ValueError("'lines' and 'line_names' must have the same number of elements.")
|
|
2672
|
-
|
|
2767
|
+
|
|
2768
|
+
first_img = self.collection.first()
|
|
2769
|
+
bands = first_img.bandNames().getInfo()
|
|
2770
|
+
is_multiband = len(bands) > 1
|
|
2771
|
+
|
|
2772
|
+
# Setup robust dictionary for handling masked/zero values
|
|
2773
|
+
default_val = -9999
|
|
2774
|
+
dummy_dict = ee.Dictionary.fromLists(bands, ee.List.repeat(default_val, len(bands)))
|
|
2775
|
+
|
|
2776
|
+
if is_multiband:
|
|
2777
|
+
reducer_cols = [f"{b}_{reducer}" for b in bands]
|
|
2778
|
+
clean_names = bands
|
|
2779
|
+
rename_keys = bands
|
|
2780
|
+
rename_vals = reducer_cols
|
|
2781
|
+
else:
|
|
2782
|
+
reducer_cols = [reducer]
|
|
2783
|
+
clean_names = [bands[0]]
|
|
2784
|
+
rename_keys = bands
|
|
2785
|
+
rename_vals = reducer_cols
|
|
2786
|
+
|
|
2787
|
+
print("Pre-computing transect geometries from input LineString(s)...")
|
|
2788
|
+
|
|
2789
|
+
master_transect_fc = ee.FeatureCollection([])
|
|
2790
|
+
geom_error = 1.0
|
|
2791
|
+
|
|
2792
|
+
for i, line in enumerate(lines):
|
|
2793
|
+
line_name = line_names[i]
|
|
2794
|
+
length = line.length(geom_error)
|
|
2795
|
+
|
|
2796
|
+
eff_interval = length.divide(n_segments) if n_segments else dist_interval
|
|
2797
|
+
|
|
2798
|
+
distances = ee.List.sequence(0, length, eff_interval)
|
|
2799
|
+
cut_lines = line.cutLines(distances, geom_error).geometries()
|
|
2800
|
+
|
|
2801
|
+
def create_feature(l):
|
|
2802
|
+
geom = ee.Geometry(ee.List(l).get(0))
|
|
2803
|
+
dist = ee.Number(ee.List(l).get(1))
|
|
2804
|
+
|
|
2805
|
+
final_geom = ee.Algorithms.If(
|
|
2806
|
+
ee.String(sampling_method).equals('buffered_point'),
|
|
2807
|
+
geom.centroid(geom_error).buffer(point_buffer_radius),
|
|
2808
|
+
geom
|
|
2809
|
+
)
|
|
2810
|
+
|
|
2811
|
+
return ee.Feature(ee.Geometry(final_geom), {
|
|
2812
|
+
'transect_name': line_name,
|
|
2813
|
+
'distance': dist
|
|
2814
|
+
})
|
|
2815
|
+
|
|
2816
|
+
line_fc = ee.FeatureCollection(cut_lines.zip(distances).map(create_feature))
|
|
2817
|
+
master_transect_fc = master_transect_fc.merge(line_fc)
|
|
2818
|
+
|
|
2819
|
+
try:
|
|
2820
|
+
ee_reducer = getattr(ee.Reducer, reducer)()
|
|
2821
|
+
except AttributeError:
|
|
2822
|
+
raise ValueError(f"Unknown reducer: '{reducer}'.")
|
|
2823
|
+
|
|
2824
|
+
def process_image(image):
|
|
2825
|
+
date_val = image.get('Date_Filter')
|
|
2826
|
+
|
|
2827
|
+
# Map over points (Slower but Robust)
|
|
2828
|
+
def reduce_point(f):
|
|
2829
|
+
stats = image.reduceRegion(
|
|
2830
|
+
reducer=ee_reducer,
|
|
2831
|
+
geometry=f.geometry(),
|
|
2832
|
+
scale=scale,
|
|
2833
|
+
maxPixels=1e13
|
|
2834
|
+
)
|
|
2835
|
+
# Combine with defaults (preserves 0, handles masked)
|
|
2836
|
+
safe_stats = dummy_dict.combine(stats, overwrite=True)
|
|
2837
|
+
# Rename keys to match expected outputs (e.g. 'ndvi' -> 'ndvi_mean')
|
|
2838
|
+
final_stats = safe_stats.rename(rename_keys, rename_vals)
|
|
2839
|
+
|
|
2840
|
+
return f.set(final_stats).set({'image_date': date_val})
|
|
2841
|
+
|
|
2842
|
+
return master_transect_fc.map(reduce_point)
|
|
2843
|
+
|
|
2844
|
+
export_cols = ['transect_name', 'distance', 'image_date'] + reducer_cols
|
|
2845
|
+
|
|
2673
2846
|
if processing_mode == 'aggregated':
|
|
2674
|
-
|
|
2675
|
-
|
|
2676
|
-
|
|
2677
|
-
|
|
2678
|
-
|
|
2679
|
-
|
|
2680
|
-
|
|
2681
|
-
|
|
2682
|
-
|
|
2683
|
-
|
|
2684
|
-
#
|
|
2685
|
-
|
|
2686
|
-
|
|
2687
|
-
|
|
2688
|
-
|
|
2689
|
-
|
|
2690
|
-
|
|
2691
|
-
|
|
2692
|
-
# Determine effective distance interval based on n_segments or dist_interval
|
|
2693
|
-
effective_dist_interval = ee.Algorithms.If(
|
|
2694
|
-
n_segments,
|
|
2695
|
-
length.divide(n_segments),
|
|
2696
|
-
dist_interval or 30 # Defaults to 30 if both are None
|
|
2697
|
-
)
|
|
2698
|
-
# Generate distances along the line(s) for segmentation
|
|
2699
|
-
distances = ee.List.sequence(0, length, effective_dist_interval)
|
|
2700
|
-
# Segmenting the line into smaller lines at the specified distances
|
|
2701
|
-
cut_lines_geoms = line.cutLines(distances, maxError).geometries()
|
|
2702
|
-
# Function to create features with distance attributes
|
|
2703
|
-
# Adjusted to ensure consistent return types
|
|
2704
|
-
def set_dist_attr(l):
|
|
2705
|
-
# l is a list: [geometry, distance]
|
|
2706
|
-
# Extracting geometry portion of line
|
|
2707
|
-
geom_segment = ee.Geometry(ee.List(l).get(0))
|
|
2708
|
-
# Extracting distance value for attribute
|
|
2709
|
-
distance = ee.Number(ee.List(l).get(1))
|
|
2710
|
-
### Determine final geometry based on sampling method
|
|
2711
|
-
# If the sampling method is 'buffered_point',
|
|
2712
|
-
# create a buffered point feature at the centroid of each segment,
|
|
2713
|
-
# otherwise create a line feature
|
|
2714
|
-
final_feature = ee.Algorithms.If(
|
|
2715
|
-
ee.String(sampling_method).equals('buffered_point'),
|
|
2716
|
-
# True Case: Create the buffered point feature
|
|
2717
|
-
ee.Feature(
|
|
2718
|
-
geom_segment.centroid(maxError).buffer(point_buffer_radius),
|
|
2719
|
-
{'distance': distance}
|
|
2720
|
-
),
|
|
2721
|
-
# False Case: Create the line segment feature
|
|
2722
|
-
ee.Feature(geom_segment, {'distance': distance})
|
|
2723
|
-
)
|
|
2724
|
-
# Return either the line segment feature or the buffered point feature
|
|
2725
|
-
return final_feature
|
|
2726
|
-
# Creating a FeatureCollection of the cut lines with distance attributes
|
|
2727
|
-
# Using map to apply the set_dist_attr function to each cut line geometry
|
|
2728
|
-
line_features = ee.FeatureCollection(cut_lines_geoms.zip(distances).map(set_dist_attr))
|
|
2729
|
-
# Reducing the image over the line features to get transect values
|
|
2730
|
-
transect_fc = image.reduceRegions(
|
|
2731
|
-
collection=line_features, reducer=ee_reducer, scale=scale
|
|
2732
|
-
)
|
|
2733
|
-
# Adding image date and line name properties to each feature
|
|
2734
|
-
def set_props(feature):
|
|
2735
|
-
return feature.set({'image_date': image_date, 'transect_name': line_name})
|
|
2736
|
-
# Append to the list of all transects for this image
|
|
2737
|
-
all_transects_for_image = all_transects_for_image.add(transect_fc.map(set_props))
|
|
2738
|
-
# Combine all transect FeatureCollections into a single FeatureCollection and flatten
|
|
2739
|
-
# Flatten is used to merge the list of FeatureCollections into one
|
|
2740
|
-
return ee.FeatureCollection(all_transects_for_image).flatten()
|
|
2741
|
-
# Map the function over the entire image collection and flatten the results
|
|
2742
|
-
results_fc = ee.FeatureCollection(self.collection.map(get_transects_for_image)).flatten()
|
|
2743
|
-
# Convert the results to a pandas DataFrame
|
|
2744
|
-
df = Sentinel1Collection.ee_to_df(results_fc, remove_geom=True)
|
|
2745
|
-
# Check if the DataFrame is empty
|
|
2746
|
-
if df.empty:
|
|
2747
|
-
print("Warning: No transect data was generated.")
|
|
2847
|
+
collection_size = self.collection.size().getInfo()
|
|
2848
|
+
print(f"Starting batch process of {collection_size} images...")
|
|
2849
|
+
|
|
2850
|
+
dfs = []
|
|
2851
|
+
for i in range(0, collection_size, batch_size):
|
|
2852
|
+
print(f" Processing image {i} to {min(i + batch_size, collection_size)}...")
|
|
2853
|
+
|
|
2854
|
+
batch_col = ee.ImageCollection(self.collection.toList(batch_size, i))
|
|
2855
|
+
results_fc = batch_col.map(process_image).flatten()
|
|
2856
|
+
|
|
2857
|
+
# Dynamic Class Call for ee_to_df
|
|
2858
|
+
df_batch = self.__class__.ee_to_df(results_fc, columns=export_cols, remove_geom=True)
|
|
2859
|
+
|
|
2860
|
+
if not df_batch.empty:
|
|
2861
|
+
dfs.append(df_batch)
|
|
2862
|
+
|
|
2863
|
+
if not dfs:
|
|
2864
|
+
print("Warning: No transect data generated.")
|
|
2748
2865
|
return {}
|
|
2749
|
-
|
|
2866
|
+
|
|
2867
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
2868
|
+
|
|
2869
|
+
# Post-Process & Split
|
|
2750
2870
|
output_dfs = {}
|
|
2751
|
-
|
|
2871
|
+
for col in reducer_cols:
|
|
2872
|
+
df[col] = pd.to_numeric(df[col], errors='coerce')
|
|
2873
|
+
df[col] = df[col].replace(-9999, np.nan)
|
|
2874
|
+
|
|
2752
2875
|
for name in sorted(df['transect_name'].unique()):
|
|
2753
|
-
|
|
2754
|
-
|
|
2755
|
-
|
|
2756
|
-
|
|
2757
|
-
|
|
2758
|
-
|
|
2759
|
-
|
|
2760
|
-
|
|
2761
|
-
|
|
2762
|
-
|
|
2763
|
-
|
|
2764
|
-
|
|
2876
|
+
line_df = df[df['transect_name'] == name]
|
|
2877
|
+
|
|
2878
|
+
for raw_col, band_name in zip(reducer_cols, clean_names):
|
|
2879
|
+
try:
|
|
2880
|
+
# Safety drop for duplicates
|
|
2881
|
+
line_df_clean = line_df.drop_duplicates(subset=['distance', 'image_date'])
|
|
2882
|
+
|
|
2883
|
+
pivot = line_df_clean.pivot(index='distance', columns='image_date', values=raw_col)
|
|
2884
|
+
pivot.columns.name = 'Date'
|
|
2885
|
+
key = f"{name}_{band_name}"
|
|
2886
|
+
output_dfs[key] = pivot
|
|
2887
|
+
|
|
2888
|
+
if save_folder_path:
|
|
2889
|
+
safe_key = "".join(x for x in key if x.isalnum() or x in "._-")
|
|
2890
|
+
fname = f"{save_folder_path}{safe_key}_transects.csv"
|
|
2891
|
+
pivot.to_csv(fname)
|
|
2892
|
+
print(f"Saved: {fname}")
|
|
2893
|
+
except Exception as e:
|
|
2894
|
+
print(f"Skipping pivot for {name}/{band_name}: {e}")
|
|
2895
|
+
|
|
2765
2896
|
return output_dfs
|
|
2766
2897
|
|
|
2767
|
-
### old, depreciated iterative client-side processing method ###
|
|
2768
2898
|
elif processing_mode == 'iterative':
|
|
2769
2899
|
if not save_folder_path:
|
|
2770
|
-
raise ValueError("
|
|
2900
|
+
raise ValueError("save_folder_path is required for iterative mode.")
|
|
2771
2901
|
|
|
2772
2902
|
image_collection_dates = self.dates
|
|
2773
2903
|
for i, date in enumerate(image_collection_dates):
|
|
2774
2904
|
try:
|
|
2775
2905
|
print(f"Processing image {i+1}/{len(image_collection_dates)}: {date}")
|
|
2776
|
-
|
|
2777
|
-
|
|
2778
|
-
|
|
2779
|
-
)
|
|
2780
|
-
|
|
2781
|
-
|
|
2906
|
+
image_list = self.collection.toList(self.collection.size())
|
|
2907
|
+
image = ee.Image(image_list.get(i))
|
|
2908
|
+
|
|
2909
|
+
fc_result = process_image(image)
|
|
2910
|
+
df = self.__class__.ee_to_df(fc_result, columns=export_cols, remove_geom=True)
|
|
2911
|
+
|
|
2912
|
+
if not df.empty:
|
|
2913
|
+
for col in reducer_cols:
|
|
2914
|
+
df[col] = pd.to_numeric(df[col], errors='coerce')
|
|
2915
|
+
df[col] = df[col].replace(-9999, np.nan)
|
|
2916
|
+
|
|
2917
|
+
fname = f"{save_folder_path}{date}_transects.csv"
|
|
2918
|
+
df.to_csv(fname, index=False)
|
|
2919
|
+
print(f"Saved: {fname}")
|
|
2920
|
+
else:
|
|
2921
|
+
print(f"Skipping {date}: No data.")
|
|
2782
2922
|
except Exception as e:
|
|
2783
|
-
print(f"
|
|
2923
|
+
print(f"Error processing {date}: {e}")
|
|
2784
2924
|
else:
|
|
2785
|
-
raise ValueError("
|
|
2925
|
+
raise ValueError("processing_mode must be 'iterative' or 'aggregated'.")
|
|
2786
2926
|
|
|
2787
2927
|
@staticmethod
|
|
2788
2928
|
def extract_zonal_stats_from_buffer(
|
|
@@ -3070,6 +3210,386 @@ class Sentinel1Collection:
|
|
|
3070
3210
|
print(f"Zonal stats saved to {file_path}.csv")
|
|
3071
3211
|
return
|
|
3072
3212
|
return pivot_df
|
|
3213
|
+
|
|
3214
|
+
def multiband_zonal_stats(
|
|
3215
|
+
self,
|
|
3216
|
+
geometry,
|
|
3217
|
+
bands,
|
|
3218
|
+
reducer_types,
|
|
3219
|
+
scale=30,
|
|
3220
|
+
geometry_name='geom',
|
|
3221
|
+
dates=None,
|
|
3222
|
+
include_area=False,
|
|
3223
|
+
file_path=None
|
|
3224
|
+
):
|
|
3225
|
+
"""
|
|
3226
|
+
Calculates zonal statistics for multiple bands over a single geometry for each image in the collection.
|
|
3227
|
+
Allows for specifying different reducers for different bands. Optionally includes the geometry area.
|
|
3228
|
+
|
|
3229
|
+
Args:
|
|
3230
|
+
geometry (ee.Geometry or ee.Feature): The single geometry to calculate statistics for.
|
|
3231
|
+
bands (list of str): A list of band names to include in the analysis.
|
|
3232
|
+
reducer_types (str or list of str): A single reducer name (e.g., 'mean') to apply to all bands,
|
|
3233
|
+
or a list of reducer names matching the length of the 'bands' list to apply specific reducers
|
|
3234
|
+
to specific bands.
|
|
3235
|
+
scale (int, optional): The scale in meters for the reduction. Defaults to 30.
|
|
3236
|
+
geometry_name (str, optional): A name for the geometry, used in column naming. Defaults to 'geom'.
|
|
3237
|
+
dates (list of str, optional): A list of date strings ('YYYY-MM-DD') to filter the collection.
|
|
3238
|
+
Defaults to None (processes all images).
|
|
3239
|
+
include_area (bool, optional): If True, adds a column with the area of the geometry in square meters.
|
|
3240
|
+
Defaults to False.
|
|
3241
|
+
file_path (str, optional): If provided, saves the resulting DataFrame to a CSV file at this path.
|
|
3242
|
+
|
|
3243
|
+
Returns:
|
|
3244
|
+
pd.DataFrame: A pandas DataFrame indexed by Date, with columns named as '{band}_{geometry_name}_{reducer}'.
|
|
3245
|
+
"""
|
|
3246
|
+
# 1. Input Validation and Setup
|
|
3247
|
+
if not isinstance(geometry, (ee.Geometry, ee.Feature)):
|
|
3248
|
+
raise ValueError("The `geometry` argument must be an ee.Geometry or ee.Feature.")
|
|
3249
|
+
|
|
3250
|
+
region = geometry.geometry() if isinstance(geometry, ee.Feature) else geometry
|
|
3251
|
+
|
|
3252
|
+
if isinstance(bands, str):
|
|
3253
|
+
bands = [bands]
|
|
3254
|
+
if not isinstance(bands, list):
|
|
3255
|
+
raise ValueError("The `bands` argument must be a string or a list of strings.")
|
|
3256
|
+
|
|
3257
|
+
# Handle reducer_types (str vs list)
|
|
3258
|
+
if isinstance(reducer_types, str):
|
|
3259
|
+
reducers_list = [reducer_types] * len(bands)
|
|
3260
|
+
elif isinstance(reducer_types, list):
|
|
3261
|
+
if len(reducer_types) != len(bands):
|
|
3262
|
+
raise ValueError("If `reducer_types` is a list, it must have the same length as `bands`.")
|
|
3263
|
+
reducers_list = reducer_types
|
|
3264
|
+
else:
|
|
3265
|
+
raise ValueError("`reducer_types` must be a string or a list of strings.")
|
|
3266
|
+
|
|
3267
|
+
# 2. Filter Collection
|
|
3268
|
+
processing_col = self.collection
|
|
3269
|
+
|
|
3270
|
+
if dates:
|
|
3271
|
+
processing_col = processing_col.filter(ee.Filter.inList('Date_Filter', dates))
|
|
3272
|
+
|
|
3273
|
+
processing_col = processing_col.select(bands)
|
|
3274
|
+
|
|
3275
|
+
# 3. Pre-calculate Area (if requested)
|
|
3276
|
+
area_val = None
|
|
3277
|
+
area_col_name = f"{geometry_name}_area_m2"
|
|
3278
|
+
if include_area:
|
|
3279
|
+
# Calculate geodesic area in square meters with maxError of 1m
|
|
3280
|
+
area_val = region.area(1)
|
|
3281
|
+
|
|
3282
|
+
# 4. Define the Reduction Logic
|
|
3283
|
+
def calculate_multiband_stats(image):
|
|
3284
|
+
# Base feature with date property
|
|
3285
|
+
date_val = image.get('Date_Filter')
|
|
3286
|
+
feature = ee.Feature(None, {'Date': date_val})
|
|
3287
|
+
|
|
3288
|
+
# If requested, add the static area value to every feature
|
|
3289
|
+
if include_area:
|
|
3290
|
+
feature = feature.set(area_col_name, area_val)
|
|
3291
|
+
|
|
3292
|
+
unique_reducers = list(set(reducers_list))
|
|
3293
|
+
|
|
3294
|
+
# OPTIMIZED PATH: Single reducer type for all bands
|
|
3295
|
+
if len(unique_reducers) == 1:
|
|
3296
|
+
r_type = unique_reducers[0]
|
|
3297
|
+
try:
|
|
3298
|
+
reducer = getattr(ee.Reducer, r_type)()
|
|
3299
|
+
except AttributeError:
|
|
3300
|
+
reducer = ee.Reducer.mean()
|
|
3301
|
+
|
|
3302
|
+
stats = image.reduceRegion(
|
|
3303
|
+
reducer=reducer,
|
|
3304
|
+
geometry=region,
|
|
3305
|
+
scale=scale,
|
|
3306
|
+
maxPixels=1e13
|
|
3307
|
+
)
|
|
3308
|
+
|
|
3309
|
+
for band in bands:
|
|
3310
|
+
col_name = f"{band}_{geometry_name}_{r_type}"
|
|
3311
|
+
val = stats.get(band)
|
|
3312
|
+
feature = feature.set(col_name, val)
|
|
3313
|
+
|
|
3314
|
+
# ITERATIVE PATH: Different reducers for different bands
|
|
3315
|
+
else:
|
|
3316
|
+
for band, r_type in zip(bands, reducers_list):
|
|
3317
|
+
try:
|
|
3318
|
+
reducer = getattr(ee.Reducer, r_type)()
|
|
3319
|
+
except AttributeError:
|
|
3320
|
+
reducer = ee.Reducer.mean()
|
|
3321
|
+
|
|
3322
|
+
stats = image.select(band).reduceRegion(
|
|
3323
|
+
reducer=reducer,
|
|
3324
|
+
geometry=region,
|
|
3325
|
+
scale=scale,
|
|
3326
|
+
maxPixels=1e13
|
|
3327
|
+
)
|
|
3328
|
+
|
|
3329
|
+
val = stats.get(band)
|
|
3330
|
+
col_name = f"{band}_{geometry_name}_{r_type}"
|
|
3331
|
+
feature = feature.set(col_name, val)
|
|
3332
|
+
|
|
3333
|
+
return feature
|
|
3334
|
+
|
|
3335
|
+
# 5. Execute Server-Side Mapping (with explicit Cast)
|
|
3336
|
+
results_fc = ee.FeatureCollection(processing_col.map(calculate_multiband_stats))
|
|
3337
|
+
|
|
3338
|
+
# 6. Client-Side Conversion
|
|
3339
|
+
try:
|
|
3340
|
+
df = Sentinel1Collection.ee_to_df(results_fc, remove_geom=True)
|
|
3341
|
+
except Exception as e:
|
|
3342
|
+
raise RuntimeError(f"Failed to convert Earth Engine results to DataFrame. Error: {e}")
|
|
3343
|
+
|
|
3344
|
+
if df.empty:
|
|
3345
|
+
print("Warning: No results returned. Check if the geometry intersects the imagery or if dates are valid.")
|
|
3346
|
+
return pd.DataFrame()
|
|
3347
|
+
|
|
3348
|
+
# 7. Formatting & Reordering
|
|
3349
|
+
if 'Date' in df.columns:
|
|
3350
|
+
df['Date'] = pd.to_datetime(df['Date'])
|
|
3351
|
+
df = df.sort_values('Date').set_index('Date')
|
|
3352
|
+
|
|
3353
|
+
# Construct the expected column names in the exact order of the input lists
|
|
3354
|
+
expected_order = [f"{band}_{geometry_name}_{r_type}" for band, r_type in zip(bands, reducers_list)]
|
|
3355
|
+
|
|
3356
|
+
# If area was included, append it to the END of the list
|
|
3357
|
+
if include_area:
|
|
3358
|
+
expected_order.append(area_col_name)
|
|
3359
|
+
|
|
3360
|
+
# Reindex the DataFrame to match this order.
|
|
3361
|
+
existing_cols = [c for c in expected_order if c in df.columns]
|
|
3362
|
+
df = df[existing_cols]
|
|
3363
|
+
|
|
3364
|
+
# 8. Export (Optional)
|
|
3365
|
+
if file_path:
|
|
3366
|
+
if not file_path.lower().endswith('.csv'):
|
|
3367
|
+
file_path += '.csv'
|
|
3368
|
+
try:
|
|
3369
|
+
df.to_csv(file_path)
|
|
3370
|
+
print(f"Multiband zonal stats saved to {file_path}")
|
|
3371
|
+
except Exception as e:
|
|
3372
|
+
print(f"Error saving file to {file_path}: {e}")
|
|
3373
|
+
|
|
3374
|
+
return df
|
|
3375
|
+
|
|
3376
|
+
def sample(
|
|
3377
|
+
self,
|
|
3378
|
+
locations,
|
|
3379
|
+
band=None,
|
|
3380
|
+
scale=None,
|
|
3381
|
+
location_names=None,
|
|
3382
|
+
dates=None,
|
|
3383
|
+
file_path=None,
|
|
3384
|
+
tileScale=1
|
|
3385
|
+
):
|
|
3386
|
+
"""
|
|
3387
|
+
Extracts time-series pixel values for a list of locations.
|
|
3388
|
+
|
|
3389
|
+
|
|
3390
|
+
Args:
|
|
3391
|
+
locations (list, tuple, ee.Geometry, or ee.FeatureCollection): Input points.
|
|
3392
|
+
band (str, optional): The name of the band to sample. Defaults to the first band.
|
|
3393
|
+
scale (int, optional): Scale in meters. Defaults to 30 if None.
|
|
3394
|
+
location_names (list of str, optional): Custom names for locations.
|
|
3395
|
+
dates (list, optional): Date filter ['YYYY-MM-DD'].
|
|
3396
|
+
file_path (str, optional): CSV export path.
|
|
3397
|
+
tileScale (int, optional): Aggregation tile scale. Defaults to 1.
|
|
3398
|
+
|
|
3399
|
+
Returns:
|
|
3400
|
+
pd.DataFrame (or CSV if file_path is provided): DataFrame indexed by Date, columns by Location.
|
|
3401
|
+
"""
|
|
3402
|
+
col = self.collection
|
|
3403
|
+
if dates:
|
|
3404
|
+
col = col.filter(ee.Filter.inList('Date_Filter', dates))
|
|
3405
|
+
|
|
3406
|
+
first_img = col.first()
|
|
3407
|
+
available_bands = first_img.bandNames().getInfo()
|
|
3408
|
+
|
|
3409
|
+
if band:
|
|
3410
|
+
if band not in available_bands:
|
|
3411
|
+
raise ValueError(f"Band '{band}' not found. Available: {available_bands}")
|
|
3412
|
+
target_band = band
|
|
3413
|
+
else:
|
|
3414
|
+
target_band = available_bands[0]
|
|
3415
|
+
|
|
3416
|
+
processing_col = col.select([target_band])
|
|
3417
|
+
|
|
3418
|
+
def set_name(f):
|
|
3419
|
+
name = ee.Algorithms.If(
|
|
3420
|
+
f.get('geo_name'), f.get('geo_name'),
|
|
3421
|
+
ee.Algorithms.If(f.get('name'), f.get('name'),
|
|
3422
|
+
ee.Algorithms.If(f.get('system:index'), f.get('system:index'), 'unnamed'))
|
|
3423
|
+
)
|
|
3424
|
+
return f.set('geo_name', name)
|
|
3425
|
+
|
|
3426
|
+
if isinstance(locations, (ee.FeatureCollection, ee.Feature)):
|
|
3427
|
+
features = ee.FeatureCollection(locations)
|
|
3428
|
+
elif isinstance(locations, ee.Geometry):
|
|
3429
|
+
lbl = location_names[0] if (location_names and location_names[0]) else 'Point_1'
|
|
3430
|
+
features = ee.FeatureCollection([ee.Feature(locations).set('geo_name', lbl)])
|
|
3431
|
+
elif isinstance(locations, tuple) and len(locations) == 2:
|
|
3432
|
+
lbl = location_names[0] if location_names else 'Location_1'
|
|
3433
|
+
features = ee.FeatureCollection([ee.Feature(ee.Geometry.Point(locations), {'geo_name': lbl})])
|
|
3434
|
+
elif isinstance(locations, list):
|
|
3435
|
+
if all(isinstance(i, tuple) for i in locations):
|
|
3436
|
+
names = location_names if location_names else [f"Loc_{i+1}" for i in range(len(locations))]
|
|
3437
|
+
features = ee.FeatureCollection([
|
|
3438
|
+
ee.Feature(ee.Geometry.Point(p), {'geo_name': str(n)}) for p, n in zip(locations, names)
|
|
3439
|
+
])
|
|
3440
|
+
elif all(isinstance(i, ee.Geometry) for i in locations):
|
|
3441
|
+
names = location_names if location_names else [f"Geom_{i+1}" for i in range(len(locations))]
|
|
3442
|
+
features = ee.FeatureCollection([
|
|
3443
|
+
ee.Feature(g, {'geo_name': str(n)}) for g, n in zip(locations, names)
|
|
3444
|
+
])
|
|
3445
|
+
else:
|
|
3446
|
+
raise ValueError("List must contain (lon, lat) tuples or ee.Geometry objects.")
|
|
3447
|
+
else:
|
|
3448
|
+
raise TypeError("Invalid locations input.")
|
|
3449
|
+
|
|
3450
|
+
features = features.map(set_name)
|
|
3451
|
+
|
|
3452
|
+
|
|
3453
|
+
def sample_image(img):
|
|
3454
|
+
date = img.get('Date_Filter')
|
|
3455
|
+
use_scale = scale if scale is not None else 30
|
|
3456
|
+
|
|
3457
|
+
|
|
3458
|
+
default_dict = ee.Dictionary({target_band: -9999})
|
|
3459
|
+
|
|
3460
|
+
def extract_point(f):
|
|
3461
|
+
stats = img.reduceRegion(
|
|
3462
|
+
reducer=ee.Reducer.first(),
|
|
3463
|
+
geometry=f.geometry(),
|
|
3464
|
+
scale=use_scale,
|
|
3465
|
+
tileScale=tileScale
|
|
3466
|
+
)
|
|
3467
|
+
|
|
3468
|
+
# Combine dictionaries.
|
|
3469
|
+
# If stats has 'target_band' (even if 0), it overwrites -9999.
|
|
3470
|
+
# If stats is empty (masked), -9999 remains.
|
|
3471
|
+
safe_stats = default_dict.combine(stats, overwrite=True)
|
|
3472
|
+
val = safe_stats.get(target_band)
|
|
3473
|
+
|
|
3474
|
+
return f.set({
|
|
3475
|
+
target_band: val,
|
|
3476
|
+
'image_date': date
|
|
3477
|
+
})
|
|
3478
|
+
|
|
3479
|
+
return features.map(extract_point)
|
|
3480
|
+
|
|
3481
|
+
# Flatten the results
|
|
3482
|
+
flat_results = processing_col.map(sample_image).flatten()
|
|
3483
|
+
|
|
3484
|
+
df = Sentinel1Collection.ee_to_df(
|
|
3485
|
+
flat_results,
|
|
3486
|
+
columns=['image_date', 'geo_name', target_band],
|
|
3487
|
+
remove_geom=True
|
|
3488
|
+
)
|
|
3489
|
+
|
|
3490
|
+
if df.empty:
|
|
3491
|
+
print("Warning: No data returned.")
|
|
3492
|
+
return pd.DataFrame()
|
|
3493
|
+
|
|
3494
|
+
# 6. Clean and Pivot
|
|
3495
|
+
df[target_band] = pd.to_numeric(df[target_band], errors='coerce')
|
|
3496
|
+
|
|
3497
|
+
# Filter out ONLY the sentinel value (-9999), preserving 0.
|
|
3498
|
+
df = df[df[target_band] != -9999]
|
|
3499
|
+
|
|
3500
|
+
if df.empty:
|
|
3501
|
+
print(f"Warning: All data points were masked (NoData) for band '{target_band}'.")
|
|
3502
|
+
return pd.DataFrame()
|
|
3503
|
+
|
|
3504
|
+
pivot_df = df.pivot(index='image_date', columns='geo_name', values=target_band)
|
|
3505
|
+
pivot_df.index.name = 'Date'
|
|
3506
|
+
pivot_df.columns.name = None
|
|
3507
|
+
pivot_df = pivot_df.reset_index()
|
|
3508
|
+
|
|
3509
|
+
if file_path:
|
|
3510
|
+
if not file_path.lower().endswith('.csv'):
|
|
3511
|
+
file_path += '.csv'
|
|
3512
|
+
pivot_df.to_csv(file_path, index=False)
|
|
3513
|
+
print(f"Sampled data saved to {file_path}")
|
|
3514
|
+
return None
|
|
3515
|
+
|
|
3516
|
+
return pivot_df
|
|
3517
|
+
|
|
3518
|
+
def multiband_sample(
|
|
3519
|
+
self,
|
|
3520
|
+
location,
|
|
3521
|
+
scale=30,
|
|
3522
|
+
file_path=None
|
|
3523
|
+
):
|
|
3524
|
+
"""
|
|
3525
|
+
Extracts ALL band values for a SINGLE location across the entire collection.
|
|
3526
|
+
|
|
3527
|
+
Args:
|
|
3528
|
+
location (tuple or ee.Geometry): A single (lon, lat) tuple OR ee.Geometry.
|
|
3529
|
+
scale (int, optional): Scale in meters. Defaults to 30.
|
|
3530
|
+
file_path (str, optional): Path to save CSV.
|
|
3531
|
+
|
|
3532
|
+
Returns:
|
|
3533
|
+
pd.DataFrame: DataFrame indexed by Date, with columns for each Band.
|
|
3534
|
+
"""
|
|
3535
|
+
if isinstance(location, tuple) and len(location) == 2:
|
|
3536
|
+
geom = ee.Geometry.Point(location)
|
|
3537
|
+
elif isinstance(location, ee.Geometry):
|
|
3538
|
+
geom = location
|
|
3539
|
+
else:
|
|
3540
|
+
raise ValueError("Location must be a single (lon, lat) tuple or ee.Geometry.")
|
|
3541
|
+
|
|
3542
|
+
first_img = self.collection.first()
|
|
3543
|
+
band_names = first_img.bandNames()
|
|
3544
|
+
|
|
3545
|
+
# Create a dictionary of {band_name: -9999}
|
|
3546
|
+
# fill missing values so the Feature structure is consistent
|
|
3547
|
+
dummy_values = ee.List.repeat(-9999, band_names.length())
|
|
3548
|
+
default_dict = ee.Dictionary.fromLists(band_names, dummy_values)
|
|
3549
|
+
|
|
3550
|
+
def get_all_bands(img):
|
|
3551
|
+
date = img.get('Date_Filter')
|
|
3552
|
+
|
|
3553
|
+
# reduceRegion returns a Dictionary.
|
|
3554
|
+
# If a pixel is masked, that band key is missing from 'stats'.
|
|
3555
|
+
stats = img.reduceRegion(
|
|
3556
|
+
reducer=ee.Reducer.first(),
|
|
3557
|
+
geometry=geom,
|
|
3558
|
+
scale=scale,
|
|
3559
|
+
maxPixels=1e13
|
|
3560
|
+
)
|
|
3561
|
+
|
|
3562
|
+
# Combine stats with defaults.
|
|
3563
|
+
# overwrite=True means real data (stats) overwrites the -9999 defaults.
|
|
3564
|
+
complete_stats = default_dict.combine(stats, overwrite=True)
|
|
3565
|
+
|
|
3566
|
+
return ee.Feature(None, complete_stats).set('Date', date)
|
|
3567
|
+
|
|
3568
|
+
fc = ee.FeatureCollection(self.collection.map(get_all_bands))
|
|
3569
|
+
|
|
3570
|
+
df = Sentinel1Collection.ee_to_df(fc, remove_geom=True)
|
|
3571
|
+
|
|
3572
|
+
if df.empty:
|
|
3573
|
+
print("Warning: No data found.")
|
|
3574
|
+
return pd.DataFrame()
|
|
3575
|
+
|
|
3576
|
+
# 6. Cleanup
|
|
3577
|
+
if 'Date' in df.columns:
|
|
3578
|
+
df['Date'] = pd.to_datetime(df['Date'])
|
|
3579
|
+
df = df.set_index('Date').sort_index()
|
|
3580
|
+
|
|
3581
|
+
# Replace our sentinel -9999 with proper NaNs
|
|
3582
|
+
df = df.replace(-9999, np.nan)
|
|
3583
|
+
|
|
3584
|
+
# 7. Export
|
|
3585
|
+
if file_path:
|
|
3586
|
+
if not file_path.lower().endswith('.csv'):
|
|
3587
|
+
file_path += '.csv'
|
|
3588
|
+
df.to_csv(file_path)
|
|
3589
|
+
print(f"Multiband sample saved to {file_path}")
|
|
3590
|
+
return None
|
|
3591
|
+
|
|
3592
|
+
return df
|
|
3073
3593
|
|
|
3074
3594
|
def export_to_asset_collection(
|
|
3075
3595
|
self,
|
|
@@ -3080,7 +3600,8 @@ class Sentinel1Collection:
|
|
|
3080
3600
|
filename_prefix="",
|
|
3081
3601
|
crs=None,
|
|
3082
3602
|
max_pixels=int(1e13),
|
|
3083
|
-
description_prefix="export"
|
|
3603
|
+
description_prefix="export",
|
|
3604
|
+
overwrite=False
|
|
3084
3605
|
):
|
|
3085
3606
|
"""
|
|
3086
3607
|
Exports an image collection to a Google Earth Engine asset collection. The asset collection will be created if it does not already exist,
|
|
@@ -3095,6 +3616,7 @@ class Sentinel1Collection:
|
|
|
3095
3616
|
crs (str, optional): The coordinate reference system. Defaults to None, which will use the image's CRS.
|
|
3096
3617
|
max_pixels (int, optional): The maximum number of pixels. Defaults to int(1e13).
|
|
3097
3618
|
description_prefix (str, optional): The description prefix. Defaults to "export".
|
|
3619
|
+
overwrite (bool, optional): Whether to overwrite existing assets. Defaults to False.
|
|
3098
3620
|
|
|
3099
3621
|
Returns:
|
|
3100
3622
|
None: (queues export tasks)
|
|
@@ -3112,6 +3634,14 @@ class Sentinel1Collection:
|
|
|
3112
3634
|
asset_id = asset_collection_path + "/" + filename_prefix + date_str
|
|
3113
3635
|
desc = description_prefix + "_" + filename_prefix + date_str
|
|
3114
3636
|
|
|
3637
|
+
if overwrite:
|
|
3638
|
+
try:
|
|
3639
|
+
ee.data.deleteAsset(asset_id)
|
|
3640
|
+
print(f"Overwriting: Deleted existing asset {asset_id}")
|
|
3641
|
+
except ee.EEException:
|
|
3642
|
+
# Asset does not exist, so nothing to delete. Proceed safely.
|
|
3643
|
+
pass
|
|
3644
|
+
|
|
3115
3645
|
params = {
|
|
3116
3646
|
'image': img,
|
|
3117
3647
|
'description': desc,
|