openforis-whisp 2.0.0a4__py3-none-any.whl → 2.0.0a6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openforis_whisp/__init__.py +75 -75
- openforis_whisp/data_conversion.py +493 -371
- openforis_whisp/datasets.py +1384 -1381
- openforis_whisp/logger.py +75 -75
- openforis_whisp/parameters/__init__.py +15 -15
- openforis_whisp/parameters/config_runtime.py +44 -44
- openforis_whisp/parameters/lookup_context_and_metadata.csv +13 -13
- openforis_whisp/parameters/lookup_gee_datasets.csv +1 -1
- openforis_whisp/pd_schemas.py +77 -77
- openforis_whisp/reformat.py +495 -495
- openforis_whisp/risk.py +771 -777
- openforis_whisp/stats.py +1134 -953
- openforis_whisp/utils.py +154 -154
- {openforis_whisp-2.0.0a4.dist-info → openforis_whisp-2.0.0a6.dist-info}/LICENSE +21 -21
- {openforis_whisp-2.0.0a4.dist-info → openforis_whisp-2.0.0a6.dist-info}/METADATA +37 -46
- openforis_whisp-2.0.0a6.dist-info/RECORD +17 -0
- {openforis_whisp-2.0.0a4.dist-info → openforis_whisp-2.0.0a6.dist-info}/WHEEL +1 -1
- openforis_whisp-2.0.0a4.dist-info/RECORD +0 -17
openforis_whisp/stats.py
CHANGED
|
@@ -1,953 +1,1134 @@
|
|
|
1
|
-
import ee
|
|
2
|
-
import pandas as pd
|
|
3
|
-
from pathlib import Path
|
|
4
|
-
from .datasets import combine_datasets
|
|
5
|
-
import json
|
|
6
|
-
import country_converter as coco
|
|
7
|
-
from openforis_whisp.parameters.config_runtime import (
|
|
8
|
-
plot_id_column,
|
|
9
|
-
|
|
10
|
-
geometry_type_column,
|
|
11
|
-
geometry_area_column,
|
|
12
|
-
geometry_area_column_formatting,
|
|
13
|
-
centroid_x_coord_column,
|
|
14
|
-
centroid_y_coord_column,
|
|
15
|
-
iso3_country_column,
|
|
16
|
-
iso2_country_column,
|
|
17
|
-
admin_1_column,
|
|
18
|
-
stats_unit_type_column,
|
|
19
|
-
stats_area_columns_formatting,
|
|
20
|
-
stats_percent_columns_formatting,
|
|
21
|
-
water_flag,
|
|
22
|
-
)
|
|
23
|
-
from .data_conversion import (
|
|
24
|
-
convert_ee_to_df,
|
|
25
|
-
convert_geojson_to_ee,
|
|
26
|
-
convert_ee_to_geojson,
|
|
27
|
-
# convert_csv_to_geojson,
|
|
28
|
-
convert_df_to_geojson,
|
|
29
|
-
) # copied functions from whisp-api and geemap (accessed 2024) to avoid dependency
|
|
30
|
-
from .reformat import validate_dataframe_using_lookups
|
|
31
|
-
|
|
32
|
-
# NB functions that included "formatted" in the name apply a schema for validation and reformatting of the output dataframe. The schema is created from lookup tables.
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
def whisp_formatted_stats_geojson_to_df(
|
|
36
|
-
input_geojson_filepath: Path | str,
|
|
37
|
-
external_id_column=None,
|
|
38
|
-
remove_geom=False,
|
|
39
|
-
national_codes=None,
|
|
40
|
-
unit_type="ha",
|
|
41
|
-
) -> pd.DataFrame:
|
|
42
|
-
"""
|
|
43
|
-
Main function for most users.
|
|
44
|
-
Converts a GeoJSON file to a pandas DataFrame containing Whisp stats for the input ROI.
|
|
45
|
-
Output df is validated against a panderas schema (created on the fly from the two lookup CSVs).
|
|
46
|
-
|
|
47
|
-
This function first converts the provided GeoJSON file into an Earth Engine FeatureCollection.
|
|
48
|
-
It then processes the FeatureCollection to extract relevant Whisp statistics,
|
|
49
|
-
returning a structured DataFrame that aligns with the expected schema.
|
|
50
|
-
|
|
51
|
-
If `external_id_column` is provided, it will be used to link external identifiers
|
|
52
|
-
from the input GeoJSON to the output DataFrame.
|
|
53
|
-
|
|
54
|
-
Parameters
|
|
55
|
-
----------
|
|
56
|
-
input_geojson_filepath : Path | str
|
|
57
|
-
The filepath to the GeoJSON of the ROI to analyze.
|
|
58
|
-
external_id_column : str, optional
|
|
59
|
-
The column in the GeoJSON containing external IDs to be preserved in the output DataFrame.
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
""
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
The filepath to
|
|
100
|
-
|
|
101
|
-
The
|
|
102
|
-
|
|
103
|
-
The name of the column containing
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
""
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
The
|
|
142
|
-
|
|
143
|
-
The
|
|
144
|
-
|
|
145
|
-
The name of the column containing
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
""
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
The
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
""
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
The
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
The
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
""
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
The filepath to
|
|
299
|
-
|
|
300
|
-
The
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
""
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
The
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
ee.FeatureCollection: The
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
)
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
)
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
#
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
#
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
#
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
)
|
|
700
|
-
|
|
701
|
-
#
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
.
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
#
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
)
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
#
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
#
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
#
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
)
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
return
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
def
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
953
|
-
|
|
1
|
+
import ee
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from .datasets import combine_datasets
|
|
5
|
+
import json
|
|
6
|
+
import country_converter as coco
|
|
7
|
+
from openforis_whisp.parameters.config_runtime import (
|
|
8
|
+
plot_id_column,
|
|
9
|
+
external_id_column,
|
|
10
|
+
geometry_type_column,
|
|
11
|
+
geometry_area_column,
|
|
12
|
+
geometry_area_column_formatting,
|
|
13
|
+
centroid_x_coord_column,
|
|
14
|
+
centroid_y_coord_column,
|
|
15
|
+
iso3_country_column,
|
|
16
|
+
iso2_country_column,
|
|
17
|
+
admin_1_column,
|
|
18
|
+
stats_unit_type_column,
|
|
19
|
+
stats_area_columns_formatting,
|
|
20
|
+
stats_percent_columns_formatting,
|
|
21
|
+
water_flag,
|
|
22
|
+
)
|
|
23
|
+
from .data_conversion import (
|
|
24
|
+
convert_ee_to_df,
|
|
25
|
+
convert_geojson_to_ee,
|
|
26
|
+
convert_ee_to_geojson,
|
|
27
|
+
# convert_csv_to_geojson,
|
|
28
|
+
convert_df_to_geojson,
|
|
29
|
+
) # copied functions from whisp-api and geemap (accessed 2024) to avoid dependency
|
|
30
|
+
from .reformat import validate_dataframe_using_lookups
|
|
31
|
+
|
|
32
|
+
# NB functions that included "formatted" in the name apply a schema for validation and reformatting of the output dataframe. The schema is created from lookup tables.
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def whisp_formatted_stats_geojson_to_df(
|
|
36
|
+
input_geojson_filepath: Path | str,
|
|
37
|
+
external_id_column=None,
|
|
38
|
+
remove_geom=False,
|
|
39
|
+
national_codes=None,
|
|
40
|
+
unit_type="ha",
|
|
41
|
+
) -> pd.DataFrame:
|
|
42
|
+
"""
|
|
43
|
+
Main function for most users.
|
|
44
|
+
Converts a GeoJSON file to a pandas DataFrame containing Whisp stats for the input ROI.
|
|
45
|
+
Output df is validated against a panderas schema (created on the fly from the two lookup CSVs).
|
|
46
|
+
|
|
47
|
+
This function first converts the provided GeoJSON file into an Earth Engine FeatureCollection.
|
|
48
|
+
It then processes the FeatureCollection to extract relevant Whisp statistics,
|
|
49
|
+
returning a structured DataFrame that aligns with the expected schema.
|
|
50
|
+
|
|
51
|
+
If `external_id_column` is provided, it will be used to link external identifiers
|
|
52
|
+
from the input GeoJSON to the output DataFrame.
|
|
53
|
+
|
|
54
|
+
Parameters
|
|
55
|
+
----------
|
|
56
|
+
input_geojson_filepath : Path | str
|
|
57
|
+
The filepath to the GeoJSON of the ROI to analyze.
|
|
58
|
+
external_id_column : str, optional
|
|
59
|
+
The column in the GeoJSON containing external IDs to be preserved in the output DataFrame.
|
|
60
|
+
This column must exist as a property in ALL features of the GeoJSON file.
|
|
61
|
+
Use debug_feature_collection_properties() to inspect available properties if you encounter errors.
|
|
62
|
+
remove_geom : bool, default=False
|
|
63
|
+
If True, the geometry of the GeoJSON is removed from the output DataFrame.
|
|
64
|
+
national_codes : list, optional
|
|
65
|
+
List of ISO2 country codes to include national datasets.
|
|
66
|
+
unit_type: str, optional
|
|
67
|
+
Whether to use hectares ("ha") or percentage ("percent"), by default "ha".
|
|
68
|
+
|
|
69
|
+
Returns
|
|
70
|
+
-------
|
|
71
|
+
df_stats : pd.DataFrame
|
|
72
|
+
The DataFrame containing the Whisp stats for the input ROI.
|
|
73
|
+
"""
|
|
74
|
+
feature_collection = convert_geojson_to_ee(str(input_geojson_filepath))
|
|
75
|
+
|
|
76
|
+
return whisp_formatted_stats_ee_to_df(
|
|
77
|
+
feature_collection,
|
|
78
|
+
external_id_column,
|
|
79
|
+
remove_geom,
|
|
80
|
+
national_codes=national_codes,
|
|
81
|
+
unit_type=unit_type, # Fixed: now it's a keyword argument
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def whisp_formatted_stats_geojson_to_geojson(
|
|
86
|
+
input_geojson_filepath,
|
|
87
|
+
output_geojson_filepath,
|
|
88
|
+
external_id_column=None,
|
|
89
|
+
geo_column: str = "geo",
|
|
90
|
+
national_codes=None,
|
|
91
|
+
unit_type="ha",
|
|
92
|
+
):
|
|
93
|
+
"""
|
|
94
|
+
Convert a formatted GeoJSON file with a geo column into a GeoJSON file containing Whisp stats.
|
|
95
|
+
|
|
96
|
+
Parameters
|
|
97
|
+
----------
|
|
98
|
+
input_geojson_filepath : str
|
|
99
|
+
The filepath to the input GeoJSON file.
|
|
100
|
+
output_geojson_filepath : str
|
|
101
|
+
The filepath to save the output GeoJSON file.
|
|
102
|
+
external_id_column : str, optional
|
|
103
|
+
The name of the column containing external IDs, by default None.
|
|
104
|
+
geo_column : str, optional
|
|
105
|
+
The name of the column containing GeoJSON geometries, by default "geo".
|
|
106
|
+
national_codes : list, optional
|
|
107
|
+
List of ISO2 country codes to include national datasets.
|
|
108
|
+
unit_type : str, optional
|
|
109
|
+
Whether to use hectares ("ha") or percentage ("percent"), by default "ha".
|
|
110
|
+
|
|
111
|
+
Returns
|
|
112
|
+
-------
|
|
113
|
+
None
|
|
114
|
+
"""
|
|
115
|
+
df = whisp_formatted_stats_geojson_to_df(
|
|
116
|
+
input_geojson_filepath=input_geojson_filepath,
|
|
117
|
+
external_id_column=external_id_column,
|
|
118
|
+
national_codes=national_codes,
|
|
119
|
+
unit_type=unit_type,
|
|
120
|
+
)
|
|
121
|
+
# Convert the df to GeoJSON
|
|
122
|
+
convert_df_to_geojson(df, output_geojson_filepath, geo_column)
|
|
123
|
+
|
|
124
|
+
print(f"GeoJSON with Whisp stats saved to {output_geojson_filepath}")
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def whisp_formatted_stats_ee_to_geojson(
|
|
128
|
+
feature_collection: ee.FeatureCollection,
|
|
129
|
+
output_geojson_filepath: str,
|
|
130
|
+
external_id_column=None,
|
|
131
|
+
geo_column: str = "geo",
|
|
132
|
+
national_codes=None,
|
|
133
|
+
unit_type="ha",
|
|
134
|
+
):
|
|
135
|
+
"""
|
|
136
|
+
Convert an Earth Engine FeatureCollection to a GeoJSON file containing Whisp stats.
|
|
137
|
+
|
|
138
|
+
Parameters
|
|
139
|
+
----------
|
|
140
|
+
feature_collection : ee.FeatureCollection
|
|
141
|
+
The feature collection of the ROI to analyze.
|
|
142
|
+
output_geojson_filepath : str
|
|
143
|
+
The filepath to save the output GeoJSON file.
|
|
144
|
+
external_id_column : str, optional
|
|
145
|
+
The name of the column containing external IDs, by default None.
|
|
146
|
+
geo_column : str, optional
|
|
147
|
+
The name of the column containing GeoJSON geometries, by default "geo".
|
|
148
|
+
national_codes : list, optional
|
|
149
|
+
List of ISO2 country codes to include national datasets.
|
|
150
|
+
unit_type : str, optional
|
|
151
|
+
Whether to use hectares ("ha") or percentage ("percent"), by default "ha".
|
|
152
|
+
Returns
|
|
153
|
+
-------
|
|
154
|
+
None
|
|
155
|
+
"""
|
|
156
|
+
# Convert ee feature collection to a pandas dataframe
|
|
157
|
+
df_stats = whisp_formatted_stats_ee_to_df(
|
|
158
|
+
feature_collection,
|
|
159
|
+
external_id_column,
|
|
160
|
+
national_codes=national_codes,
|
|
161
|
+
unit_type=unit_type,
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
# Convert the df to GeoJSON
|
|
165
|
+
convert_df_to_geojson(df_stats, output_geojson_filepath, geo_column)
|
|
166
|
+
|
|
167
|
+
print(f"GeoJSON with Whisp stats saved to {output_geojson_filepath}")
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def whisp_formatted_stats_ee_to_df(
|
|
171
|
+
feature_collection: ee.FeatureCollection,
|
|
172
|
+
external_id_column=None,
|
|
173
|
+
remove_geom=False,
|
|
174
|
+
national_codes=None,
|
|
175
|
+
unit_type="ha",
|
|
176
|
+
) -> pd.DataFrame:
|
|
177
|
+
"""
|
|
178
|
+
Convert a feature collection to a validated DataFrame with Whisp statistics.
|
|
179
|
+
|
|
180
|
+
Parameters
|
|
181
|
+
----------
|
|
182
|
+
feature_collection : ee.FeatureCollection
|
|
183
|
+
The feature collection of the ROI to analyze.
|
|
184
|
+
external_id_column : str, optional
|
|
185
|
+
The name of the external ID column, by default None.
|
|
186
|
+
remove_geom : bool, optional
|
|
187
|
+
Whether to remove the geometry column, by default False.
|
|
188
|
+
national_codes : list, optional
|
|
189
|
+
List of ISO2 country codes to include national datasets.
|
|
190
|
+
unit_type : str, optional
|
|
191
|
+
Whether to use hectares ("ha") or percentage ("percent"), by default "ha".
|
|
192
|
+
|
|
193
|
+
Returns
|
|
194
|
+
-------
|
|
195
|
+
validated_df : pd.DataFrame
|
|
196
|
+
The validated dataframe containing the Whisp stats for the input ROI.
|
|
197
|
+
"""
|
|
198
|
+
# Convert ee feature collection to a pandas dataframe
|
|
199
|
+
df_stats = whisp_stats_ee_to_df(
|
|
200
|
+
feature_collection,
|
|
201
|
+
external_id_column,
|
|
202
|
+
remove_geom,
|
|
203
|
+
national_codes=national_codes,
|
|
204
|
+
unit_type=unit_type,
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
# Pass national_codes to validation function to filter schema
|
|
208
|
+
validated_df = validate_dataframe_using_lookups(
|
|
209
|
+
df_stats, national_codes=national_codes
|
|
210
|
+
)
|
|
211
|
+
return validated_df
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
### functions without additional formatting below (i.e., raw output from GEE processing without schema validation step)
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
def whisp_stats_geojson_to_df(
|
|
218
|
+
input_geojson_filepath: Path | str,
|
|
219
|
+
external_id_column=None,
|
|
220
|
+
remove_geom=False,
|
|
221
|
+
national_codes=None,
|
|
222
|
+
unit_type="ha",
|
|
223
|
+
) -> pd.DataFrame:
|
|
224
|
+
"""
|
|
225
|
+
Convert a GeoJSON file to a pandas DataFrame with Whisp statistics.
|
|
226
|
+
|
|
227
|
+
Parameters
|
|
228
|
+
----------
|
|
229
|
+
input_geojson_filepath : Path | str
|
|
230
|
+
The filepath to the GeoJSON of the ROI to analyze.
|
|
231
|
+
external_id_column : str, optional
|
|
232
|
+
The name of the external ID column, by default None.
|
|
233
|
+
remove_geom : bool, optional
|
|
234
|
+
Whether to remove the geometry column, by default False.
|
|
235
|
+
national_codes : list, optional
|
|
236
|
+
List of ISO2 country codes to include national datasets.
|
|
237
|
+
unit_type : str, optional
|
|
238
|
+
Whether to use hectares ("ha") or percentage ("percent"), by default "ha".
|
|
239
|
+
|
|
240
|
+
Returns
|
|
241
|
+
-------
|
|
242
|
+
df_stats : pd.DataFrame
|
|
243
|
+
The dataframe containing the Whisp stats for the input ROI.
|
|
244
|
+
"""
|
|
245
|
+
feature_collection = convert_geojson_to_ee(str(input_geojson_filepath))
|
|
246
|
+
|
|
247
|
+
return whisp_stats_ee_to_df(
|
|
248
|
+
feature_collection,
|
|
249
|
+
external_id_column,
|
|
250
|
+
remove_geom,
|
|
251
|
+
national_codes=national_codes,
|
|
252
|
+
unit_type=unit_type,
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def whisp_stats_geojson_to_ee(
|
|
257
|
+
input_geojson_filepath: Path | str,
|
|
258
|
+
external_id_column=None,
|
|
259
|
+
national_codes=None,
|
|
260
|
+
) -> ee.FeatureCollection:
|
|
261
|
+
"""
|
|
262
|
+
Convert a GeoJSON file to an Earth Engine FeatureCollection with Whisp statistics.
|
|
263
|
+
|
|
264
|
+
Parameters
|
|
265
|
+
----------
|
|
266
|
+
input_geojson_filepath : Path | str
|
|
267
|
+
The filepath to the GeoJSON of the ROI to analyze.
|
|
268
|
+
external_id_column : str, optional
|
|
269
|
+
The name of the external ID column, by default None.
|
|
270
|
+
national_codes : list, optional
|
|
271
|
+
List of ISO2 country codes to include national datasets.
|
|
272
|
+
|
|
273
|
+
Returns
|
|
274
|
+
-------
|
|
275
|
+
ee.FeatureCollection
|
|
276
|
+
The feature collection containing the Whisp stats for the input ROI.
|
|
277
|
+
"""
|
|
278
|
+
feature_collection = convert_geojson_to_ee(str(input_geojson_filepath))
|
|
279
|
+
|
|
280
|
+
return whisp_stats_ee_to_ee(
|
|
281
|
+
feature_collection, external_id_column, national_codes=national_codes
|
|
282
|
+
)
|
|
283
|
+
|
|
284
|
+
|
|
285
|
+
def whisp_stats_geojson_to_geojson(
|
|
286
|
+
input_geojson_filepath,
|
|
287
|
+
output_geojson_filepath,
|
|
288
|
+
external_id_column=None,
|
|
289
|
+
national_codes=None,
|
|
290
|
+
unit_type="ha",
|
|
291
|
+
):
|
|
292
|
+
"""
|
|
293
|
+
Convert a GeoJSON file to a GeoJSON object containing Whisp stats for the input ROI.
|
|
294
|
+
|
|
295
|
+
Parameters
|
|
296
|
+
----------
|
|
297
|
+
input_geojson_filepath : str
|
|
298
|
+
The filepath to the input GeoJSON file.
|
|
299
|
+
output_geojson_filepath : str
|
|
300
|
+
The filepath to save the output GeoJSON file.
|
|
301
|
+
external_id_column : str, optional
|
|
302
|
+
The name of the column containing external IDs, by default None.
|
|
303
|
+
national_codes : list, optional
|
|
304
|
+
List of ISO2 country codes to include national datasets.
|
|
305
|
+
unit_type : str, optional
|
|
306
|
+
Whether to use hectares ("ha") or percentage ("percent"), by default "ha".
|
|
307
|
+
|
|
308
|
+
Returns
|
|
309
|
+
-------
|
|
310
|
+
None
|
|
311
|
+
"""
|
|
312
|
+
# Convert GeoJSON to Earth Engine FeatureCollection
|
|
313
|
+
feature_collection = convert_geojson_to_ee(input_geojson_filepath)
|
|
314
|
+
|
|
315
|
+
# Get stats as a FeatureCollection
|
|
316
|
+
stats_feature_collection = whisp_stats_ee_to_ee(
|
|
317
|
+
feature_collection,
|
|
318
|
+
external_id_column,
|
|
319
|
+
national_codes=national_codes,
|
|
320
|
+
unit_type=unit_type,
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
# Convert the stats FeatureCollection to GeoJSON
|
|
324
|
+
stats_geojson = convert_ee_to_geojson(stats_feature_collection)
|
|
325
|
+
|
|
326
|
+
# Save the GeoJSON to a file
|
|
327
|
+
with open(output_geojson_filepath, "w") as f:
|
|
328
|
+
json.dump(stats_geojson, f, indent=2)
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
def whisp_stats_geojson_to_drive(
|
|
332
|
+
input_geojson_filepath: Path | str,
|
|
333
|
+
external_id_column=None,
|
|
334
|
+
national_codes=None,
|
|
335
|
+
unit_type="ha",
|
|
336
|
+
):
|
|
337
|
+
"""
|
|
338
|
+
Export Whisp statistics for a GeoJSON file to Google Drive.
|
|
339
|
+
|
|
340
|
+
Parameters
|
|
341
|
+
----------
|
|
342
|
+
input_geojson_filepath : Path | str
|
|
343
|
+
The filepath to the GeoJSON of the ROI to analyze.
|
|
344
|
+
external_id_column : str, optional
|
|
345
|
+
The name of the external ID column, by default None.
|
|
346
|
+
national_codes : list, optional
|
|
347
|
+
List of ISO2 country codes to include national datasets.
|
|
348
|
+
unit_type : str, optional
|
|
349
|
+
Whether to use hectares ("ha") or percentage ("percent"), by default "ha".
|
|
350
|
+
|
|
351
|
+
Returns
|
|
352
|
+
-------
|
|
353
|
+
Message showing location of file in Google Drive
|
|
354
|
+
"""
|
|
355
|
+
try:
|
|
356
|
+
input_geojson_filepath = Path(input_geojson_filepath)
|
|
357
|
+
if not input_geojson_filepath.exists():
|
|
358
|
+
raise FileNotFoundError(f"File {input_geojson_filepath} does not exist.")
|
|
359
|
+
|
|
360
|
+
feature_collection = convert_geojson_to_ee(str(input_geojson_filepath))
|
|
361
|
+
|
|
362
|
+
return whisp_stats_ee_to_drive(
|
|
363
|
+
feature_collection,
|
|
364
|
+
external_id_column,
|
|
365
|
+
national_codes=national_codes,
|
|
366
|
+
unit_type=unit_type,
|
|
367
|
+
)
|
|
368
|
+
|
|
369
|
+
except Exception as e:
|
|
370
|
+
print(f"An error occurred: {e}")
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
def whisp_stats_ee_to_ee(
|
|
374
|
+
feature_collection,
|
|
375
|
+
external_id_column,
|
|
376
|
+
national_codes=None,
|
|
377
|
+
unit_type="ha",
|
|
378
|
+
keep_properties=None,
|
|
379
|
+
):
|
|
380
|
+
"""
|
|
381
|
+
Process a feature collection to get statistics for each feature.
|
|
382
|
+
|
|
383
|
+
Parameters:
|
|
384
|
+
feature_collection (ee.FeatureCollection): The input feature collection.
|
|
385
|
+
external_id_column (str): The name of the external ID column to check.
|
|
386
|
+
national_codes (list, optional): List of ISO2 country codes to include national datasets.
|
|
387
|
+
unit_type (str): Whether to use hectares ("ha") or percentage ("percent"), default "ha".
|
|
388
|
+
keep_properties (None, bool, or list, optional): Properties to keep from the input features.
|
|
389
|
+
- None: Remove all properties (default behavior)
|
|
390
|
+
- True: Keep all properties
|
|
391
|
+
- list: Keep only the specified properties
|
|
392
|
+
|
|
393
|
+
Returns:
|
|
394
|
+
ee.FeatureCollection: The output feature collection with statistics.
|
|
395
|
+
"""
|
|
396
|
+
if external_id_column is not None:
|
|
397
|
+
try:
|
|
398
|
+
# Validate that the external_id_column exists in all features
|
|
399
|
+
validation_result = validate_external_id_column(
|
|
400
|
+
feature_collection, external_id_column
|
|
401
|
+
)
|
|
402
|
+
|
|
403
|
+
if not validation_result["is_valid"]:
|
|
404
|
+
raise ValueError(validation_result["error_message"])
|
|
405
|
+
|
|
406
|
+
# First handle property selection, but preserve the external_id_column
|
|
407
|
+
if keep_properties is not None:
|
|
408
|
+
if keep_properties == True:
|
|
409
|
+
# Keep all properties including external_id_column
|
|
410
|
+
pass # No need to modify feature_collection
|
|
411
|
+
elif isinstance(keep_properties, list):
|
|
412
|
+
# Ensure external_id_column is included in the list
|
|
413
|
+
if external_id_column not in keep_properties:
|
|
414
|
+
keep_properties = keep_properties + [external_id_column]
|
|
415
|
+
feature_collection = feature_collection.select(keep_properties)
|
|
416
|
+
else:
|
|
417
|
+
raise ValueError(
|
|
418
|
+
"keep_properties must be None, True, or a list of property names."
|
|
419
|
+
)
|
|
420
|
+
|
|
421
|
+
# Set the external_id with robust null handling
|
|
422
|
+
def set_external_id_safely_and_clean(feature):
|
|
423
|
+
external_id_value = feature.get(external_id_column)
|
|
424
|
+
# Use server-side null checking and string conversion
|
|
425
|
+
external_id_value = ee.Algorithms.If(
|
|
426
|
+
ee.Algorithms.IsEqual(external_id_value, None),
|
|
427
|
+
"unknown",
|
|
428
|
+
ee.String(external_id_value),
|
|
429
|
+
)
|
|
430
|
+
# Create a new feature with the standardized external_id column
|
|
431
|
+
# Note: we use "external_id" as the standardized column name, not the original external_id_column name
|
|
432
|
+
return ee.Feature(feature.set("external_id", external_id_value))
|
|
433
|
+
|
|
434
|
+
feature_collection = feature_collection.map(
|
|
435
|
+
set_external_id_safely_and_clean
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
# Finally, clean up to keep only geometry and external_id if keep_properties is None
|
|
439
|
+
if keep_properties is None:
|
|
440
|
+
feature_collection = feature_collection.select(["external_id"])
|
|
441
|
+
|
|
442
|
+
except Exception as e:
|
|
443
|
+
# Handle the exception and provide a helpful error message
|
|
444
|
+
print(
|
|
445
|
+
f"An error occurred when trying to set the external_id_column: {external_id_column}. Error: {e}"
|
|
446
|
+
)
|
|
447
|
+
raise e # Re-raise the exception to stop execution
|
|
448
|
+
else:
|
|
449
|
+
feature_collection = _keep_fc_properties(feature_collection, keep_properties)
|
|
450
|
+
|
|
451
|
+
fc = get_stats(
|
|
452
|
+
feature_collection, national_codes=national_codes, unit_type=unit_type
|
|
453
|
+
)
|
|
454
|
+
|
|
455
|
+
return add_id_to_feature_collection(dataset=fc, id_name=plot_id_column)
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
def _keep_fc_properties(feature_collection, keep_properties):
|
|
459
|
+
# If keep_properties is specified, select only those properties
|
|
460
|
+
if keep_properties is None:
|
|
461
|
+
feature_collection = feature_collection.select([])
|
|
462
|
+
elif keep_properties == True:
|
|
463
|
+
# If keep_properties is true, select all properties
|
|
464
|
+
first_feature_props = feature_collection.first().propertyNames().getInfo()
|
|
465
|
+
feature_collection = feature_collection.select(first_feature_props)
|
|
466
|
+
elif isinstance(keep_properties, list):
|
|
467
|
+
feature_collection = feature_collection.select(keep_properties)
|
|
468
|
+
else:
|
|
469
|
+
raise ValueError(
|
|
470
|
+
"keep_properties must be None, True, or a list of property names."
|
|
471
|
+
)
|
|
472
|
+
return feature_collection
|
|
473
|
+
|
|
474
|
+
|
|
475
|
+
def whisp_stats_ee_to_df(
|
|
476
|
+
feature_collection: ee.FeatureCollection,
|
|
477
|
+
external_id_column=None,
|
|
478
|
+
remove_geom=False,
|
|
479
|
+
national_codes=None,
|
|
480
|
+
unit_type="ha",
|
|
481
|
+
) -> pd.DataFrame:
|
|
482
|
+
"""
|
|
483
|
+
Convert a Google Earth Engine FeatureCollection to a pandas DataFrame and convert ISO3 to ISO2 country codes.
|
|
484
|
+
|
|
485
|
+
Parameters
|
|
486
|
+
----------
|
|
487
|
+
feature_collection : ee.FeatureCollection
|
|
488
|
+
The input FeatureCollection to analyze.
|
|
489
|
+
external_id_column : str, optional
|
|
490
|
+
The name of the external ID column, by default None.
|
|
491
|
+
remove_geom : bool, optional
|
|
492
|
+
Whether to remove the geometry column, by default True.
|
|
493
|
+
national_codes : list, optional
|
|
494
|
+
List of ISO2 country codes to include national datasets.
|
|
495
|
+
unit_type : str, optional
|
|
496
|
+
Whether to use hectares ("ha") or percentage ("percent"), by default "ha".
|
|
497
|
+
|
|
498
|
+
Returns
|
|
499
|
+
-------
|
|
500
|
+
df_stats : pd.DataFrame
|
|
501
|
+
The dataframe containing the Whisp stats for the input ROI.
|
|
502
|
+
"""
|
|
503
|
+
try:
|
|
504
|
+
df_stats = convert_ee_to_df(
|
|
505
|
+
ee_object=whisp_stats_ee_to_ee(
|
|
506
|
+
feature_collection,
|
|
507
|
+
external_id_column,
|
|
508
|
+
national_codes=national_codes,
|
|
509
|
+
unit_type=unit_type,
|
|
510
|
+
),
|
|
511
|
+
remove_geom=remove_geom,
|
|
512
|
+
)
|
|
513
|
+
except Exception as e:
|
|
514
|
+
print(f"An error occurred during the conversion from EE to DataFrame: {e}")
|
|
515
|
+
return pd.DataFrame() # Return an empty DataFrame in case of error
|
|
516
|
+
|
|
517
|
+
try:
|
|
518
|
+
df_stats = convert_iso3_to_iso2(
|
|
519
|
+
df=df_stats,
|
|
520
|
+
iso3_column=iso3_country_column,
|
|
521
|
+
iso2_column=iso2_country_column,
|
|
522
|
+
)
|
|
523
|
+
except Exception as e:
|
|
524
|
+
print(f"An error occurred during the ISO3 to ISO2 conversion: {e}")
|
|
525
|
+
return pd.DataFrame() # Return an empty DataFrame in case of error
|
|
526
|
+
|
|
527
|
+
return df_stats
|
|
528
|
+
|
|
529
|
+
|
|
530
|
+
def whisp_stats_ee_to_drive(
|
|
531
|
+
feature_collection: ee.FeatureCollection,
|
|
532
|
+
external_id_column=None,
|
|
533
|
+
national_codes=None,
|
|
534
|
+
unit_type="ha",
|
|
535
|
+
):
|
|
536
|
+
"""
|
|
537
|
+
Export Whisp statistics for a feature collection to Google Drive.
|
|
538
|
+
|
|
539
|
+
Parameters
|
|
540
|
+
----------
|
|
541
|
+
feature_collection : ee.FeatureCollection
|
|
542
|
+
The feature collection to analyze.
|
|
543
|
+
external_id_column : str, optional
|
|
544
|
+
The name of the external ID column, by default None.
|
|
545
|
+
national_codes : list, optional
|
|
546
|
+
List of ISO2 country codes to include national datasets.
|
|
547
|
+
unit_type : str, optional
|
|
548
|
+
Whether to use hectares ("ha") or percentage ("percent"), by default "ha".
|
|
549
|
+
Returns
|
|
550
|
+
-------
|
|
551
|
+
None
|
|
552
|
+
"""
|
|
553
|
+
try:
|
|
554
|
+
task = ee.batch.Export.table.toDrive(
|
|
555
|
+
collection=whisp_stats_ee_to_ee(
|
|
556
|
+
feature_collection,
|
|
557
|
+
external_id_column,
|
|
558
|
+
national_codes=national_codes,
|
|
559
|
+
unit_type=unit_type,
|
|
560
|
+
),
|
|
561
|
+
description="whisp_output_table",
|
|
562
|
+
# folder="whisp_results",
|
|
563
|
+
fileFormat="CSV",
|
|
564
|
+
)
|
|
565
|
+
task.start()
|
|
566
|
+
print(
|
|
567
|
+
"Exporting to Google Drive: 'whisp_results/whisp_output_table.csv'. To track progress: https://code.earthengine.google.com/tasks"
|
|
568
|
+
)
|
|
569
|
+
except Exception as e:
|
|
570
|
+
print(f"An error occurred during the export: {e}")
|
|
571
|
+
|
|
572
|
+
|
|
573
|
+
#### main stats functions
|
|
574
|
+
|
|
575
|
+
|
|
576
|
+
# Get stats for a feature or feature collection
|
|
577
|
+
def get_stats(feature_or_feature_col, national_codes=None, unit_type="ha"):
|
|
578
|
+
"""
|
|
579
|
+
Get stats for a feature or feature collection with optional filtering by national codes.
|
|
580
|
+
|
|
581
|
+
Parameters
|
|
582
|
+
----------
|
|
583
|
+
feature_or_feature_col : ee.Feature or ee.FeatureCollection
|
|
584
|
+
The input feature or feature collection to analyze
|
|
585
|
+
national_codes : list, optional
|
|
586
|
+
List of ISO2 country codes to include national datasets
|
|
587
|
+
unit_type : str, optional
|
|
588
|
+
Whether to use hectares ("ha") or percentage ("percent"), by default "ha".
|
|
589
|
+
Returns
|
|
590
|
+
-------
|
|
591
|
+
ee.FeatureCollection
|
|
592
|
+
Feature collection with calculated statistics
|
|
593
|
+
"""
|
|
594
|
+
# Check if the input is a Feature or a FeatureCollection
|
|
595
|
+
if isinstance(feature_or_feature_col, ee.Feature):
|
|
596
|
+
# If the input is a Feature, call the server-side function for processing
|
|
597
|
+
print("feature")
|
|
598
|
+
# For a single feature, we need to combine datasets with the national_codes filter
|
|
599
|
+
img_combined = combine_datasets(national_codes=national_codes)
|
|
600
|
+
output = ee.FeatureCollection(
|
|
601
|
+
[
|
|
602
|
+
get_stats_feature(
|
|
603
|
+
feature_or_feature_col, img_combined, unit_type=unit_type
|
|
604
|
+
)
|
|
605
|
+
]
|
|
606
|
+
)
|
|
607
|
+
elif isinstance(feature_or_feature_col, ee.FeatureCollection):
|
|
608
|
+
# If the input is a FeatureCollection, call the server-side function for processing
|
|
609
|
+
output = get_stats_fc(
|
|
610
|
+
feature_or_feature_col, national_codes=national_codes, unit_type=unit_type
|
|
611
|
+
)
|
|
612
|
+
else:
|
|
613
|
+
output = "Check inputs: not an ee.Feature or ee.FeatureCollection"
|
|
614
|
+
return output
|
|
615
|
+
|
|
616
|
+
|
|
617
|
+
# Get statistics for a feature collection
|
|
618
|
+
def get_stats_fc(feature_col, national_codes=None, unit_type="ha"):
|
|
619
|
+
"""
|
|
620
|
+
Calculate statistics for a feature collection using Whisp datasets.
|
|
621
|
+
|
|
622
|
+
Parameters
|
|
623
|
+
----------
|
|
624
|
+
feature_col : ee.FeatureCollection
|
|
625
|
+
The input feature collection to analyze
|
|
626
|
+
national_codes : list, optional
|
|
627
|
+
List of ISO2 country codes (e.g., ["BR", "US"]) to include national datasets.
|
|
628
|
+
If provided, only national datasets for these countries and global datasets will be used.
|
|
629
|
+
If None (default), only global datasets will be used.
|
|
630
|
+
unit_type : str, optional
|
|
631
|
+
Whether to use hectares ("ha") or percentage ("percent"), by default "ha".
|
|
632
|
+
Returns
|
|
633
|
+
-------
|
|
634
|
+
ee.FeatureCollection
|
|
635
|
+
Feature collection with calculated statistics
|
|
636
|
+
"""
|
|
637
|
+
img_combined = combine_datasets(
|
|
638
|
+
national_codes=national_codes
|
|
639
|
+
) # Pass national_codes to combine_datasets
|
|
640
|
+
|
|
641
|
+
out_feature_col = ee.FeatureCollection(
|
|
642
|
+
feature_col.map(
|
|
643
|
+
lambda feature: get_stats_feature(
|
|
644
|
+
feature, img_combined, unit_type=unit_type
|
|
645
|
+
)
|
|
646
|
+
)
|
|
647
|
+
)
|
|
648
|
+
# print(out_feature_col.first().getInfo()) # for testing
|
|
649
|
+
|
|
650
|
+
return out_feature_col
|
|
651
|
+
|
|
652
|
+
|
|
653
|
+
# Get statistics for a single feature
|
|
654
|
+
|
|
655
|
+
|
|
656
|
+
def get_stats_feature(feature, img_combined, unit_type="ha"):
|
|
657
|
+
"""
|
|
658
|
+
Get statistics for a single feature using a pre-combined image.
|
|
659
|
+
|
|
660
|
+
Parameters
|
|
661
|
+
----------
|
|
662
|
+
feature : ee.Feature
|
|
663
|
+
The feature to analyze
|
|
664
|
+
img_combined : ee.Image
|
|
665
|
+
Pre-combined image with all the datasets
|
|
666
|
+
unit_type : str, optional
|
|
667
|
+
Whether to use hectares ("ha") or percentage ("percent"), by default "ha".
|
|
668
|
+
|
|
669
|
+
Returns
|
|
670
|
+
-------
|
|
671
|
+
ee.Feature
|
|
672
|
+
Feature with calculated statistics
|
|
673
|
+
"""
|
|
674
|
+
reduce = img_combined.reduceRegion(
|
|
675
|
+
reducer=ee.Reducer.sum(),
|
|
676
|
+
geometry=feature.geometry(),
|
|
677
|
+
scale=10,
|
|
678
|
+
maxPixels=1e10,
|
|
679
|
+
tileScale=8,
|
|
680
|
+
)
|
|
681
|
+
|
|
682
|
+
# Get basic feature information
|
|
683
|
+
feature_info = get_type_and_location(feature)
|
|
684
|
+
|
|
685
|
+
# add statistics unit type (e.g., percentage or hectares) to dictionary
|
|
686
|
+
stats_unit_type = ee.Dictionary({stats_unit_type_column: unit_type})
|
|
687
|
+
|
|
688
|
+
# Now, modified_dict contains all keys with the prefix added
|
|
689
|
+
reduce_ha = reduce.map(
|
|
690
|
+
lambda key, val: divide_and_format(ee.Number(val), ee.Number(10000))
|
|
691
|
+
)
|
|
692
|
+
|
|
693
|
+
# Get value for hectares
|
|
694
|
+
area_ha = ee.Number(ee.Dictionary(reduce_ha).get(geometry_area_column))
|
|
695
|
+
|
|
696
|
+
# Apply the function to each value in the dictionary using map()
|
|
697
|
+
reduce_percent = reduce_ha.map(
|
|
698
|
+
lambda key, val: percent_and_format(ee.Number(val), area_ha)
|
|
699
|
+
)
|
|
700
|
+
|
|
701
|
+
# Reformat the hectare statistics
|
|
702
|
+
reducer_stats_ha = reduce_ha.set(
|
|
703
|
+
geometry_area_column, area_ha.format(geometry_area_column_formatting)
|
|
704
|
+
) # area ha (formatted)
|
|
705
|
+
|
|
706
|
+
# Reformat the percentage statistics
|
|
707
|
+
reducer_stats_percent = reduce_percent.set(
|
|
708
|
+
geometry_area_column, area_ha.format(geometry_area_column_formatting)
|
|
709
|
+
) # area ha (formatted)
|
|
710
|
+
|
|
711
|
+
# Add country info onto hectare analysis results
|
|
712
|
+
properties_ha = feature_info.combine(ee.Dictionary(reducer_stats_ha)).combine(
|
|
713
|
+
stats_unit_type
|
|
714
|
+
)
|
|
715
|
+
|
|
716
|
+
# Add country info onto percentage analysis results
|
|
717
|
+
properties_percent = feature_info.combine(
|
|
718
|
+
ee.Dictionary(reducer_stats_percent)
|
|
719
|
+
).combine(stats_unit_type)
|
|
720
|
+
|
|
721
|
+
# Choose whether to use hectares or percentage based on the parameter instead of global variable
|
|
722
|
+
out_feature = ee.Algorithms.If(
|
|
723
|
+
unit_type == "ha",
|
|
724
|
+
feature.set(properties_ha), # .setGeometry(None),
|
|
725
|
+
feature.set(properties_percent), # .setGeometry(None),
|
|
726
|
+
)
|
|
727
|
+
|
|
728
|
+
return out_feature
|
|
729
|
+
|
|
730
|
+
|
|
731
|
+
# Get basic feature information - uses admin and water datasets in gee.
|
|
732
|
+
def get_type_and_location(feature):
|
|
733
|
+
"""Extracts basic feature information including country, admin area, geometry type, coordinates, and water flags."""
|
|
734
|
+
|
|
735
|
+
# Get centroid of the feature's geometry
|
|
736
|
+
centroid = feature.geometry().centroid(1)
|
|
737
|
+
|
|
738
|
+
# Fetch location info from geoboundaries (country, admin)
|
|
739
|
+
location = ee.Dictionary(get_geoboundaries_info(centroid))
|
|
740
|
+
country = ee.Dictionary({iso3_country_column: location.get("shapeGroup")})
|
|
741
|
+
|
|
742
|
+
admin_1 = ee.Dictionary(
|
|
743
|
+
{admin_1_column: location.get("shapeName")}
|
|
744
|
+
) # Administrative level 1 (if available)
|
|
745
|
+
|
|
746
|
+
# Prepare the water flag information
|
|
747
|
+
water_all = water_flag_all_prep()
|
|
748
|
+
water_flag_dict = value_at_point_flag(
|
|
749
|
+
point=centroid, image=water_all, band_name=water_flag, output_name=water_flag
|
|
750
|
+
)
|
|
751
|
+
|
|
752
|
+
# Get the geometry type of the feature
|
|
753
|
+
geom_type = ee.Dictionary({geometry_type_column: feature.geometry().type()})
|
|
754
|
+
|
|
755
|
+
# Get the coordinates (latitude, longitude) of the centroid
|
|
756
|
+
coords_list = centroid.coordinates()
|
|
757
|
+
coords_dict = ee.Dictionary(
|
|
758
|
+
{
|
|
759
|
+
centroid_x_coord_column: coords_list.get(0), # Longitude
|
|
760
|
+
centroid_y_coord_column: coords_list.get(1), # Latitude
|
|
761
|
+
}
|
|
762
|
+
)
|
|
763
|
+
|
|
764
|
+
# Combine all the extracted info into a single dictionary
|
|
765
|
+
feature_info = (
|
|
766
|
+
country.combine(admin_1)
|
|
767
|
+
.combine(geom_type)
|
|
768
|
+
.combine(coords_dict)
|
|
769
|
+
.combine(water_flag_dict)
|
|
770
|
+
)
|
|
771
|
+
|
|
772
|
+
return feature_info
|
|
773
|
+
|
|
774
|
+
|
|
775
|
+
# Define a function to divide each value by 10,000 and format it with one decimal place
|
|
776
|
+
def divide_and_format(val, unit):
|
|
777
|
+
# Convert the image to an ee.Number, divide by 10,000, and format with one decimal place
|
|
778
|
+
formatted_value = ee.Number.parse(
|
|
779
|
+
ee.Number(ee.Number(val).divide(ee.Number(unit))).format(
|
|
780
|
+
stats_area_columns_formatting
|
|
781
|
+
)
|
|
782
|
+
)
|
|
783
|
+
# Return the formatted value
|
|
784
|
+
return ee.Number(formatted_value)
|
|
785
|
+
|
|
786
|
+
|
|
787
|
+
# Define a function to divide by total area of geometry and multiply by 100
|
|
788
|
+
def percent_and_format(val, area_ha):
|
|
789
|
+
formatted_value = ee.Number.parse(
|
|
790
|
+
ee.Number(ee.Number(val).divide(area_ha).multiply(ee.Number(100))).format(
|
|
791
|
+
stats_percent_columns_formatting
|
|
792
|
+
)
|
|
793
|
+
)
|
|
794
|
+
# Return the formatted value
|
|
795
|
+
return ee.Number(formatted_value)
|
|
796
|
+
|
|
797
|
+
|
|
798
|
+
# geoboundaries - admin units from a freqently updated database, allows commercial use (CC BY 4.0 DEED) (disputed territories may need checking)
|
|
799
|
+
def get_geoboundaries_info(geometry):
|
|
800
|
+
gbounds_ADM0 = ee.FeatureCollection("WM/geoLab/geoBoundaries/600/ADM1")
|
|
801
|
+
polygonsIntersectPoint = gbounds_ADM0.filterBounds(geometry)
|
|
802
|
+
backup_dict = ee.Dictionary({"shapeGroup": "Unknown", "shapeName": "Unknown"})
|
|
803
|
+
return ee.Algorithms.If(
|
|
804
|
+
polygonsIntersectPoint.size().gt(0),
|
|
805
|
+
polygonsIntersectPoint.first()
|
|
806
|
+
.toDictionary()
|
|
807
|
+
.select(["shapeGroup", "shapeName"]),
|
|
808
|
+
backup_dict,
|
|
809
|
+
)
|
|
810
|
+
|
|
811
|
+
|
|
812
|
+
#####
|
|
813
|
+
# water flag - to flag plots that may be erroneous (i.e., where errors may have occured in their creation / translation and so fall in either the ocean or inland water -
|
|
814
|
+
def usgs_gsv_ocean_prep(): # TO DO: for speed export image as an asset at samne res as JRC
|
|
815
|
+
# Initialize the Earth Engine API
|
|
816
|
+
# ee.Initialize()
|
|
817
|
+
|
|
818
|
+
# Load the datasets
|
|
819
|
+
mainlands = ee.FeatureCollection(
|
|
820
|
+
"projects/sat-io/open-datasets/shoreline/mainlands"
|
|
821
|
+
)
|
|
822
|
+
big_islands = ee.FeatureCollection(
|
|
823
|
+
"projects/sat-io/open-datasets/shoreline/big_islands"
|
|
824
|
+
)
|
|
825
|
+
small_islands = ee.FeatureCollection(
|
|
826
|
+
"projects/sat-io/open-datasets/shoreline/small_islands"
|
|
827
|
+
)
|
|
828
|
+
|
|
829
|
+
# Combine the datasets into one FeatureCollection
|
|
830
|
+
gsv = ee.FeatureCollection([mainlands, big_islands, small_islands]).flatten()
|
|
831
|
+
|
|
832
|
+
# Rasterize the combined FeatureCollection and make areas outside coast (i.e. ocean) as value 1
|
|
833
|
+
# and then rename the band
|
|
834
|
+
return ee.Image(1).paint(gsv).selfMask().rename("ocean")
|
|
835
|
+
|
|
836
|
+
|
|
837
|
+
def jrc_water_surface_prep():
|
|
838
|
+
jrc_surface_water = ee.Image("JRC/GSW1_4/GlobalSurfaceWater")
|
|
839
|
+
|
|
840
|
+
# use transition band
|
|
841
|
+
jrc_transition = jrc_surface_water.select("transition")
|
|
842
|
+
|
|
843
|
+
# select permanent water bodies:
|
|
844
|
+
# remap the following classes to have a value of 1:
|
|
845
|
+
# "Permanent", "New Permanent", and "Seasonal to Permanent" (i.e., classes 1,2 and 7).
|
|
846
|
+
# All other classes as value 0.
|
|
847
|
+
permanent_inland_water = jrc_transition.remap([1, 2, 7], [1, 1, 1], 0).unmask()
|
|
848
|
+
|
|
849
|
+
# optional - clip to within coast line (not needed currently and extra processing)
|
|
850
|
+
# permanent_inland_water = permanent_inland_water.where(usgs_gsv_ocean_prep(),0)
|
|
851
|
+
|
|
852
|
+
return permanent_inland_water.rename("water_inland")
|
|
853
|
+
|
|
854
|
+
|
|
855
|
+
def water_flag_all_prep():
|
|
856
|
+
# combine both where water surface is 1, then 1, else use non_land_gsv
|
|
857
|
+
return (
|
|
858
|
+
usgs_gsv_ocean_prep()
|
|
859
|
+
.unmask()
|
|
860
|
+
.where(jrc_water_surface_prep(), 1)
|
|
861
|
+
.rename(water_flag)
|
|
862
|
+
)
|
|
863
|
+
|
|
864
|
+
|
|
865
|
+
def value_at_point_flag(point, image, band_name, output_name):
|
|
866
|
+
"""Sample an image at the given point and make a dictionary output where the name is defined by output_name parameter"""
|
|
867
|
+
sample = image.sample(region=point, scale=30, numPixels=1).first()
|
|
868
|
+
|
|
869
|
+
# Get the value from the sampled point
|
|
870
|
+
value = sample.get(band_name) # assuming the band name is 'b1', change if necessary
|
|
871
|
+
|
|
872
|
+
# Use a conditional statement to check if the value is 1
|
|
873
|
+
result = value # ee.Algorithms.If(ee.Number(value).eq(1), "True", "False")
|
|
874
|
+
|
|
875
|
+
# Return the output dictionary
|
|
876
|
+
return ee.Dictionary({output_name: result}) # .getInfo()
|
|
877
|
+
|
|
878
|
+
|
|
879
|
+
def add_id_to_feature_collection(dataset, id_name):
|
|
880
|
+
"""
|
|
881
|
+
Adds an incremental (1,2,3 etc) 'id' property to each feature in the given FeatureCollection.
|
|
882
|
+
|
|
883
|
+
Args:
|
|
884
|
+
- dataset: ee.FeatureCollection, the FeatureCollection to operate on.
|
|
885
|
+
|
|
886
|
+
Returns:
|
|
887
|
+
- dataset_with_id: ee.FeatureCollection, the FeatureCollection with 'id' property added to each feature.
|
|
888
|
+
"""
|
|
889
|
+
# Get the list of system:index values
|
|
890
|
+
indexes = dataset.aggregate_array("system:index")
|
|
891
|
+
|
|
892
|
+
# Create a sequence of numbers starting from 1 to the size of indexes
|
|
893
|
+
ids = ee.List.sequence(1, indexes.size())
|
|
894
|
+
|
|
895
|
+
# Create a dictionary mapping system:index to id
|
|
896
|
+
id_by_index = ee.Dictionary.fromLists(indexes, ids)
|
|
897
|
+
|
|
898
|
+
# Function to add 'id' property to each feature
|
|
899
|
+
def add_id(feature):
|
|
900
|
+
# Get the system:index of the feature
|
|
901
|
+
system_index = feature.get("system:index")
|
|
902
|
+
|
|
903
|
+
# Get the id corresponding to the system:index
|
|
904
|
+
feature_id = id_by_index.get(system_index)
|
|
905
|
+
|
|
906
|
+
# Set the 'id' property of the feature
|
|
907
|
+
return feature.set(id_name, feature_id)
|
|
908
|
+
|
|
909
|
+
# Map the add_id function over the dataset
|
|
910
|
+
dataset_with_id = dataset.map(add_id)
|
|
911
|
+
|
|
912
|
+
return dataset_with_id
|
|
913
|
+
|
|
914
|
+
|
|
915
|
+
# Function to add ID to features
|
|
916
|
+
def add_id_to_feature(feature, id_name):
|
|
917
|
+
index = feature.get("system:index")
|
|
918
|
+
return feature.set(id_name, index)
|
|
919
|
+
|
|
920
|
+
|
|
921
|
+
# Function to flag positive values
|
|
922
|
+
def flag_positive_values(feature, flag_positive):
|
|
923
|
+
for prop_name in flag_positive:
|
|
924
|
+
flag_value = ee.Algorithms.If(
|
|
925
|
+
ee.Number(feature.get(prop_name)).gt(0), "True", "-"
|
|
926
|
+
)
|
|
927
|
+
feature = feature.set(prop_name, flag_value)
|
|
928
|
+
return feature
|
|
929
|
+
|
|
930
|
+
|
|
931
|
+
# Function to exclude properties
|
|
932
|
+
def copy_properties_and_exclude(feature, exclude_properties_from_output):
|
|
933
|
+
return ee.Feature(feature.geometry()).copyProperties(
|
|
934
|
+
source=feature, exclude=exclude_properties_from_output
|
|
935
|
+
)
|
|
936
|
+
|
|
937
|
+
|
|
938
|
+
def ee_image_checker(image):
|
|
939
|
+
"""
|
|
940
|
+
Tests if the input is a valid ee.Image.
|
|
941
|
+
|
|
942
|
+
Args:
|
|
943
|
+
image: An ee.Image object.
|
|
944
|
+
|
|
945
|
+
Returns:
|
|
946
|
+
bool: True if the input is a valid ee.Image, False otherwise.
|
|
947
|
+
"""
|
|
948
|
+
try:
|
|
949
|
+
if ee.Algorithms.ObjectType(image).getInfo() == "Image":
|
|
950
|
+
# Trigger some action on the image to ensure it's a valid image
|
|
951
|
+
image.getInfo() # This will raise an exception if the image is invalid
|
|
952
|
+
return True
|
|
953
|
+
except ee.EEException as e:
|
|
954
|
+
print(f"Image validation failed with EEException: {e}")
|
|
955
|
+
except Exception as e:
|
|
956
|
+
print(f"Image validation failed with exception: {e}")
|
|
957
|
+
return False
|
|
958
|
+
|
|
959
|
+
|
|
960
|
+
def keep_valid_images(image_list):
|
|
961
|
+
"""
|
|
962
|
+
Filters a list to return only valid ee.Images.
|
|
963
|
+
|
|
964
|
+
Args:
|
|
965
|
+
image_list: List of ee.Image objects.
|
|
966
|
+
|
|
967
|
+
Returns:
|
|
968
|
+
list: List of valid ee.Image objects.
|
|
969
|
+
"""
|
|
970
|
+
valid_imgs = []
|
|
971
|
+
for image in image_list:
|
|
972
|
+
if ee_image_checker(image):
|
|
973
|
+
valid_imgs.append(image)
|
|
974
|
+
return valid_imgs
|
|
975
|
+
|
|
976
|
+
|
|
977
|
+
def convert_iso3_to_iso2(df, iso3_column, iso2_column):
|
|
978
|
+
"""
|
|
979
|
+
Converts ISO3 country codes to ISO2 codes and adds a new column to the DataFrame.
|
|
980
|
+
|
|
981
|
+
Args:
|
|
982
|
+
df (pd.DataFrame): Input DataFrame containing ISO3 country codes.
|
|
983
|
+
iso3_column (str): The column name in the DataFrame with ISO3 country codes.
|
|
984
|
+
iso2_column (str): The new column name to store ISO2 country codes.
|
|
985
|
+
|
|
986
|
+
Returns:
|
|
987
|
+
pd.DataFrame: Updated DataFrame with the new ISO2 column.
|
|
988
|
+
"""
|
|
989
|
+
import country_converter as coco
|
|
990
|
+
|
|
991
|
+
# Apply conversion from ISO3 to ISO2
|
|
992
|
+
df[iso2_column] = df[iso3_column].apply(
|
|
993
|
+
lambda x: (
|
|
994
|
+
coco.convert(names=x, to="ISO2") if x else "not found (disputed territory)"
|
|
995
|
+
)
|
|
996
|
+
)
|
|
997
|
+
|
|
998
|
+
return df
|
|
999
|
+
|
|
1000
|
+
|
|
1001
|
+
def validate_external_id_column(feature_collection, external_id_column):
|
|
1002
|
+
"""
|
|
1003
|
+
Validates that the external_id_column exists in all features of the collection.
|
|
1004
|
+
|
|
1005
|
+
Parameters
|
|
1006
|
+
----------
|
|
1007
|
+
feature_collection : ee.FeatureCollection
|
|
1008
|
+
The feature collection to validate
|
|
1009
|
+
external_id_column : str
|
|
1010
|
+
The name of the external ID column to check
|
|
1011
|
+
|
|
1012
|
+
Returns
|
|
1013
|
+
-------
|
|
1014
|
+
dict
|
|
1015
|
+
Dictionary with validation results including:
|
|
1016
|
+
- 'is_valid': bool indicating if column exists in all features
|
|
1017
|
+
- 'total_features': int total number of features
|
|
1018
|
+
- 'features_with_column': int number of features that have the column
|
|
1019
|
+
- 'available_properties': list of properties available in first feature
|
|
1020
|
+
- 'error_message': str error message if validation fails
|
|
1021
|
+
"""
|
|
1022
|
+
try:
|
|
1023
|
+
# Get total number of features
|
|
1024
|
+
total_features = feature_collection.size().getInfo()
|
|
1025
|
+
|
|
1026
|
+
if total_features == 0:
|
|
1027
|
+
return {
|
|
1028
|
+
"is_valid": False,
|
|
1029
|
+
"total_features": 0,
|
|
1030
|
+
"features_with_column": 0,
|
|
1031
|
+
"available_properties": [],
|
|
1032
|
+
"error_message": "Feature collection is empty",
|
|
1033
|
+
}
|
|
1034
|
+
|
|
1035
|
+
# Get available properties from first feature
|
|
1036
|
+
first_feature_props = feature_collection.first().propertyNames().getInfo()
|
|
1037
|
+
|
|
1038
|
+
# Check if external_id_column exists in all features
|
|
1039
|
+
def check_column_exists(feature):
|
|
1040
|
+
has_column = feature.propertyNames().contains(external_id_column)
|
|
1041
|
+
return feature.set("_has_external_id", has_column)
|
|
1042
|
+
|
|
1043
|
+
features_with_check = feature_collection.map(check_column_exists)
|
|
1044
|
+
features_with_column = (
|
|
1045
|
+
features_with_check.filter(ee.Filter.eq("_has_external_id", True))
|
|
1046
|
+
.size()
|
|
1047
|
+
.getInfo()
|
|
1048
|
+
)
|
|
1049
|
+
|
|
1050
|
+
is_valid = features_with_column == total_features
|
|
1051
|
+
|
|
1052
|
+
error_message = None
|
|
1053
|
+
if not is_valid:
|
|
1054
|
+
missing_count = total_features - features_with_column
|
|
1055
|
+
error_message = (
|
|
1056
|
+
f"The column '{external_id_column}' is missing from {missing_count} "
|
|
1057
|
+
f"out of {total_features} features in the collection. "
|
|
1058
|
+
f"Available properties in first feature: {first_feature_props}"
|
|
1059
|
+
)
|
|
1060
|
+
|
|
1061
|
+
return {
|
|
1062
|
+
"is_valid": is_valid,
|
|
1063
|
+
"total_features": total_features,
|
|
1064
|
+
"features_with_column": features_with_column,
|
|
1065
|
+
"available_properties": first_feature_props,
|
|
1066
|
+
"error_message": error_message,
|
|
1067
|
+
}
|
|
1068
|
+
|
|
1069
|
+
except Exception as e:
|
|
1070
|
+
return {
|
|
1071
|
+
"is_valid": False,
|
|
1072
|
+
"total_features": 0,
|
|
1073
|
+
"features_with_column": 0,
|
|
1074
|
+
"available_properties": [],
|
|
1075
|
+
"error_message": f"Error during validation: {str(e)}",
|
|
1076
|
+
}
|
|
1077
|
+
|
|
1078
|
+
|
|
1079
|
+
def debug_feature_collection_properties(feature_collection, max_features=5):
|
|
1080
|
+
"""
|
|
1081
|
+
Debug helper function to inspect the properties of features in a collection.
|
|
1082
|
+
|
|
1083
|
+
Parameters
|
|
1084
|
+
----------
|
|
1085
|
+
feature_collection : ee.FeatureCollection
|
|
1086
|
+
The feature collection to inspect
|
|
1087
|
+
max_features : int, optional
|
|
1088
|
+
Maximum number of features to inspect, by default 5
|
|
1089
|
+
|
|
1090
|
+
Returns
|
|
1091
|
+
-------
|
|
1092
|
+
dict
|
|
1093
|
+
Dictionary with debugging information about the feature collection
|
|
1094
|
+
"""
|
|
1095
|
+
try:
|
|
1096
|
+
total_features = feature_collection.size().getInfo()
|
|
1097
|
+
|
|
1098
|
+
if total_features == 0:
|
|
1099
|
+
return {"total_features": 0, "error": "Feature collection is empty"}
|
|
1100
|
+
|
|
1101
|
+
# Limit the number of features to inspect
|
|
1102
|
+
features_to_check = min(max_features, total_features)
|
|
1103
|
+
limited_fc = feature_collection.limit(features_to_check)
|
|
1104
|
+
|
|
1105
|
+
# Get properties for each feature
|
|
1106
|
+
def get_feature_properties(feature):
|
|
1107
|
+
return ee.Dictionary(
|
|
1108
|
+
{
|
|
1109
|
+
"properties": feature.propertyNames(),
|
|
1110
|
+
"geometry_type": feature.geometry().type(),
|
|
1111
|
+
}
|
|
1112
|
+
)
|
|
1113
|
+
|
|
1114
|
+
feature_info = limited_fc.map(get_feature_properties).getInfo()
|
|
1115
|
+
|
|
1116
|
+
return {
|
|
1117
|
+
"total_features": total_features,
|
|
1118
|
+
"inspected_features": features_to_check,
|
|
1119
|
+
"feature_details": [
|
|
1120
|
+
{
|
|
1121
|
+
"feature_index": i,
|
|
1122
|
+
"properties": feature_info["features"][i]["properties"][
|
|
1123
|
+
"properties"
|
|
1124
|
+
],
|
|
1125
|
+
"geometry_type": feature_info["features"][i]["properties"][
|
|
1126
|
+
"geometry_type"
|
|
1127
|
+
],
|
|
1128
|
+
}
|
|
1129
|
+
for i in range(len(feature_info["features"]))
|
|
1130
|
+
],
|
|
1131
|
+
}
|
|
1132
|
+
|
|
1133
|
+
except Exception as e:
|
|
1134
|
+
return {"error": f"Error during debugging: {str(e)}"}
|