wolfhece 2.1.98__py3-none-any.whl → 2.1.100__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- wolfhece/PyDraw.py +506 -35
- wolfhece/PyParams.py +18 -8
- wolfhece/PyVertex.py +12 -5
- wolfhece/PyVertexvectors.py +27 -16
- wolfhece/Results2DGPU.py +23 -7
- wolfhece/acceptability/Parallels.py +2 -2
- wolfhece/acceptability/_add_path.py +23 -0
- wolfhece/acceptability/acceptability.py +594 -563
- wolfhece/acceptability/acceptability_gui.py +564 -331
- wolfhece/acceptability/cli.py +307 -120
- wolfhece/acceptability/func.py +1743 -1597
- wolfhece/analyze_vect.py +177 -0
- wolfhece/apps/version.py +1 -1
- wolfhece/bernoulli/losses.py +75 -22
- wolfhece/bernoulli/losses_jax.py +143 -0
- wolfhece/bernoulli/pipe.py +7 -2
- wolfhece/math_parser/__init__.py +4 -4
- wolfhece/math_parser/calculator.py +50 -9
- wolfhece/matplotlib_fig.py +1980 -0
- wolfhece/mesh2d/simple_2d.py +2399 -0
- wolfhece/mesh2d/wolf2dprev.py +1 -1
- wolfhece/pidcontroller.py +131 -0
- wolfhece/pywalous.py +7 -7
- wolfhece/scenario/config_manager.py +191 -83
- wolfhece/wolf_array.py +162 -109
- wolfhece/wolf_vrt.py +108 -7
- wolfhece/wolfresults_2D.py +109 -4
- wolfhece/xyz_file.py +91 -51
- {wolfhece-2.1.98.dist-info → wolfhece-2.1.100.dist-info}/METADATA +1 -1
- {wolfhece-2.1.98.dist-info → wolfhece-2.1.100.dist-info}/RECORD +33 -27
- {wolfhece-2.1.98.dist-info → wolfhece-2.1.100.dist-info}/WHEEL +1 -1
- {wolfhece-2.1.98.dist-info → wolfhece-2.1.100.dist-info}/entry_points.txt +0 -0
- {wolfhece-2.1.98.dist-info → wolfhece-2.1.100.dist-info}/top_level.txt +0 -0
wolfhece/acceptability/func.py
CHANGED
@@ -1,1597 +1,1743 @@
|
|
1
|
-
"""
|
2
|
-
Author: University of Liege, HECE, LEMA
|
3
|
-
Date: 2024
|
4
|
-
|
5
|
-
Copyright (c) 2024 University of Liege. All rights reserved.
|
6
|
-
|
7
|
-
This script and its content are protected by copyright law. Unauthorized
|
8
|
-
copying or distribution of this file, via any medium, is strictly prohibited.
|
9
|
-
"""
|
10
|
-
|
11
|
-
import
|
12
|
-
|
13
|
-
import
|
14
|
-
from
|
15
|
-
import
|
16
|
-
import
|
17
|
-
|
18
|
-
import
|
19
|
-
|
20
|
-
|
21
|
-
from
|
22
|
-
import
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
-
|
125
|
-
|
126
|
-
-
|
127
|
-
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
self.
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
self.
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
self.
|
182
|
-
|
183
|
-
self.
|
184
|
-
self.
|
185
|
-
self.
|
186
|
-
|
187
|
-
self.
|
188
|
-
self.
|
189
|
-
self.
|
190
|
-
self.
|
191
|
-
self.
|
192
|
-
|
193
|
-
self.
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
self.
|
198
|
-
self.
|
199
|
-
|
200
|
-
self.
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
self.
|
207
|
-
self.
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
self.
|
231
|
-
|
232
|
-
self.
|
233
|
-
self.
|
234
|
-
|
235
|
-
|
236
|
-
self.
|
237
|
-
self.
|
238
|
-
|
239
|
-
self.
|
240
|
-
self.
|
241
|
-
|
242
|
-
self.
|
243
|
-
self.
|
244
|
-
|
245
|
-
|
246
|
-
self.
|
247
|
-
self.
|
248
|
-
self.
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
self.
|
254
|
-
self.
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
self.
|
270
|
-
|
271
|
-
self.
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
self.
|
276
|
-
self.
|
277
|
-
|
278
|
-
self.
|
279
|
-
self.
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
self.
|
292
|
-
|
293
|
-
|
294
|
-
self.
|
295
|
-
self.
|
296
|
-
self.
|
297
|
-
|
298
|
-
|
299
|
-
self.
|
300
|
-
|
301
|
-
self.
|
302
|
-
self.
|
303
|
-
self.
|
304
|
-
|
305
|
-
self.
|
306
|
-
self.
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
|
364
|
-
|
365
|
-
|
366
|
-
|
367
|
-
|
368
|
-
return
|
369
|
-
|
370
|
-
def
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
378
|
-
|
379
|
-
|
380
|
-
|
381
|
-
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
|
390
|
-
|
391
|
-
|
392
|
-
|
393
|
-
|
394
|
-
|
395
|
-
|
396
|
-
|
397
|
-
|
398
|
-
|
399
|
-
|
400
|
-
return [Path(a) for a in glob.glob(str(self.
|
401
|
-
|
402
|
-
def
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
|
411
|
-
|
412
|
-
|
413
|
-
|
414
|
-
|
415
|
-
|
416
|
-
|
417
|
-
|
418
|
-
|
419
|
-
|
420
|
-
|
421
|
-
|
422
|
-
|
423
|
-
|
424
|
-
|
425
|
-
|
426
|
-
|
427
|
-
|
428
|
-
|
429
|
-
|
430
|
-
|
431
|
-
|
432
|
-
|
433
|
-
|
434
|
-
|
435
|
-
|
436
|
-
|
437
|
-
if
|
438
|
-
for
|
439
|
-
|
440
|
-
|
441
|
-
|
442
|
-
|
443
|
-
|
444
|
-
|
445
|
-
|
446
|
-
|
447
|
-
|
448
|
-
|
449
|
-
|
450
|
-
|
451
|
-
|
452
|
-
|
453
|
-
|
454
|
-
|
455
|
-
|
456
|
-
|
457
|
-
|
458
|
-
|
459
|
-
|
460
|
-
|
461
|
-
|
462
|
-
|
463
|
-
|
464
|
-
|
465
|
-
|
466
|
-
|
467
|
-
|
468
|
-
|
469
|
-
|
470
|
-
|
471
|
-
|
472
|
-
|
473
|
-
|
474
|
-
|
475
|
-
|
476
|
-
|
477
|
-
|
478
|
-
|
479
|
-
|
480
|
-
|
481
|
-
|
482
|
-
|
483
|
-
|
484
|
-
def
|
485
|
-
"""
|
486
|
-
|
487
|
-
|
488
|
-
|
489
|
-
|
490
|
-
|
491
|
-
|
492
|
-
|
493
|
-
|
494
|
-
|
495
|
-
|
496
|
-
|
497
|
-
|
498
|
-
|
499
|
-
|
500
|
-
|
501
|
-
|
502
|
-
|
503
|
-
|
504
|
-
|
505
|
-
|
506
|
-
|
507
|
-
|
508
|
-
|
509
|
-
|
510
|
-
|
511
|
-
|
512
|
-
|
513
|
-
|
514
|
-
|
515
|
-
|
516
|
-
|
517
|
-
|
518
|
-
|
519
|
-
|
520
|
-
|
521
|
-
|
522
|
-
|
523
|
-
|
524
|
-
|
525
|
-
|
526
|
-
|
527
|
-
|
528
|
-
|
529
|
-
|
530
|
-
|
531
|
-
|
532
|
-
|
533
|
-
|
534
|
-
|
535
|
-
|
536
|
-
|
537
|
-
|
538
|
-
|
539
|
-
|
540
|
-
|
541
|
-
|
542
|
-
|
543
|
-
|
544
|
-
|
545
|
-
|
546
|
-
|
547
|
-
#
|
548
|
-
|
549
|
-
|
550
|
-
|
551
|
-
|
552
|
-
|
553
|
-
|
554
|
-
|
555
|
-
|
556
|
-
|
557
|
-
|
558
|
-
|
559
|
-
|
560
|
-
|
561
|
-
|
562
|
-
|
563
|
-
|
564
|
-
|
565
|
-
|
566
|
-
|
567
|
-
|
568
|
-
|
569
|
-
|
570
|
-
|
571
|
-
|
572
|
-
|
573
|
-
|
574
|
-
|
575
|
-
|
576
|
-
|
577
|
-
|
578
|
-
|
579
|
-
|
580
|
-
|
581
|
-
|
582
|
-
|
583
|
-
|
584
|
-
|
585
|
-
|
586
|
-
|
587
|
-
|
588
|
-
|
589
|
-
|
590
|
-
|
591
|
-
|
592
|
-
|
593
|
-
|
594
|
-
|
595
|
-
|
596
|
-
|
597
|
-
|
598
|
-
|
599
|
-
|
600
|
-
|
601
|
-
|
602
|
-
|
603
|
-
|
604
|
-
|
605
|
-
|
606
|
-
|
607
|
-
|
608
|
-
|
609
|
-
|
610
|
-
|
611
|
-
|
612
|
-
|
613
|
-
|
614
|
-
|
615
|
-
|
616
|
-
|
617
|
-
|
618
|
-
|
619
|
-
|
620
|
-
|
621
|
-
|
622
|
-
|
623
|
-
|
624
|
-
|
625
|
-
|
626
|
-
|
627
|
-
|
628
|
-
|
629
|
-
|
630
|
-
|
631
|
-
|
632
|
-
self.
|
633
|
-
|
634
|
-
|
635
|
-
|
636
|
-
|
637
|
-
|
638
|
-
|
639
|
-
|
640
|
-
|
641
|
-
|
642
|
-
|
643
|
-
|
644
|
-
|
645
|
-
|
646
|
-
|
647
|
-
|
648
|
-
if self.
|
649
|
-
|
650
|
-
|
651
|
-
|
652
|
-
|
653
|
-
|
654
|
-
|
655
|
-
|
656
|
-
|
657
|
-
|
658
|
-
|
659
|
-
|
660
|
-
|
661
|
-
|
662
|
-
|
663
|
-
|
664
|
-
|
665
|
-
|
666
|
-
|
667
|
-
|
668
|
-
|
669
|
-
|
670
|
-
|
671
|
-
|
672
|
-
|
673
|
-
|
674
|
-
|
675
|
-
|
676
|
-
|
677
|
-
|
678
|
-
|
679
|
-
|
680
|
-
|
681
|
-
|
682
|
-
|
683
|
-
|
684
|
-
|
685
|
-
|
686
|
-
|
687
|
-
|
688
|
-
|
689
|
-
|
690
|
-
|
691
|
-
if not
|
692
|
-
|
693
|
-
|
694
|
-
|
695
|
-
|
696
|
-
|
697
|
-
|
698
|
-
|
699
|
-
|
700
|
-
|
701
|
-
|
702
|
-
|
703
|
-
|
704
|
-
|
705
|
-
|
706
|
-
|
707
|
-
|
708
|
-
|
709
|
-
|
710
|
-
|
711
|
-
|
712
|
-
|
713
|
-
|
714
|
-
|
715
|
-
|
716
|
-
|
717
|
-
|
718
|
-
|
719
|
-
|
720
|
-
|
721
|
-
|
722
|
-
|
723
|
-
|
724
|
-
|
725
|
-
|
726
|
-
|
727
|
-
|
728
|
-
|
729
|
-
|
730
|
-
|
731
|
-
|
732
|
-
|
733
|
-
|
734
|
-
|
735
|
-
|
736
|
-
|
737
|
-
|
738
|
-
|
739
|
-
|
740
|
-
|
741
|
-
|
742
|
-
|
743
|
-
|
744
|
-
|
745
|
-
|
746
|
-
|
747
|
-
|
748
|
-
|
749
|
-
|
750
|
-
|
751
|
-
|
752
|
-
|
753
|
-
|
754
|
-
|
755
|
-
|
756
|
-
|
757
|
-
|
758
|
-
|
759
|
-
|
760
|
-
|
761
|
-
|
762
|
-
|
763
|
-
|
764
|
-
|
765
|
-
|
766
|
-
|
767
|
-
|
768
|
-
|
769
|
-
|
770
|
-
|
771
|
-
|
772
|
-
|
773
|
-
|
774
|
-
|
775
|
-
|
776
|
-
|
777
|
-
|
778
|
-
|
779
|
-
|
780
|
-
|
781
|
-
|
782
|
-
|
783
|
-
|
784
|
-
|
785
|
-
|
786
|
-
|
787
|
-
|
788
|
-
|
789
|
-
|
790
|
-
|
791
|
-
|
792
|
-
|
793
|
-
|
794
|
-
|
795
|
-
|
796
|
-
|
797
|
-
|
798
|
-
|
799
|
-
|
800
|
-
|
801
|
-
|
802
|
-
|
803
|
-
|
804
|
-
|
805
|
-
|
806
|
-
|
807
|
-
|
808
|
-
|
809
|
-
|
810
|
-
|
811
|
-
|
812
|
-
|
813
|
-
|
814
|
-
|
815
|
-
|
816
|
-
|
817
|
-
|
818
|
-
|
819
|
-
|
820
|
-
|
821
|
-
|
822
|
-
|
823
|
-
|
824
|
-
|
825
|
-
|
826
|
-
|
827
|
-
|
828
|
-
|
829
|
-
|
830
|
-
|
831
|
-
|
832
|
-
|
833
|
-
|
834
|
-
|
835
|
-
|
836
|
-
|
837
|
-
|
838
|
-
|
839
|
-
|
840
|
-
|
841
|
-
|
842
|
-
|
843
|
-
|
844
|
-
|
845
|
-
|
846
|
-
|
847
|
-
|
848
|
-
|
849
|
-
|
850
|
-
|
851
|
-
|
852
|
-
|
853
|
-
|
854
|
-
|
855
|
-
|
856
|
-
|
857
|
-
|
858
|
-
|
859
|
-
|
860
|
-
|
861
|
-
|
862
|
-
|
863
|
-
|
864
|
-
|
865
|
-
|
866
|
-
|
867
|
-
|
868
|
-
|
869
|
-
|
870
|
-
|
871
|
-
|
872
|
-
|
873
|
-
|
874
|
-
|
875
|
-
|
876
|
-
|
877
|
-
|
878
|
-
|
879
|
-
|
880
|
-
|
881
|
-
|
882
|
-
|
883
|
-
|
884
|
-
|
885
|
-
|
886
|
-
|
887
|
-
|
888
|
-
|
889
|
-
|
890
|
-
|
891
|
-
|
892
|
-
|
893
|
-
|
894
|
-
|
895
|
-
|
896
|
-
|
897
|
-
|
898
|
-
|
899
|
-
|
900
|
-
|
901
|
-
|
902
|
-
|
903
|
-
|
904
|
-
|
905
|
-
|
906
|
-
|
907
|
-
|
908
|
-
|
909
|
-
|
910
|
-
|
911
|
-
|
912
|
-
|
913
|
-
|
914
|
-
|
915
|
-
|
916
|
-
|
917
|
-
|
918
|
-
|
919
|
-
|
920
|
-
|
921
|
-
|
922
|
-
|
923
|
-
|
924
|
-
|
925
|
-
|
926
|
-
|
927
|
-
|
928
|
-
|
929
|
-
|
930
|
-
|
931
|
-
|
932
|
-
|
933
|
-
|
934
|
-
|
935
|
-
|
936
|
-
|
937
|
-
|
938
|
-
|
939
|
-
|
940
|
-
|
941
|
-
|
942
|
-
|
943
|
-
|
944
|
-
|
945
|
-
|
946
|
-
|
947
|
-
|
948
|
-
|
949
|
-
|
950
|
-
|
951
|
-
|
952
|
-
|
953
|
-
|
954
|
-
|
955
|
-
|
956
|
-
|
957
|
-
|
958
|
-
|
959
|
-
|
960
|
-
|
961
|
-
|
962
|
-
|
963
|
-
|
964
|
-
|
965
|
-
|
966
|
-
|
967
|
-
|
968
|
-
|
969
|
-
|
970
|
-
|
971
|
-
|
972
|
-
|
973
|
-
|
974
|
-
|
975
|
-
|
976
|
-
|
977
|
-
|
978
|
-
|
979
|
-
|
980
|
-
|
981
|
-
|
982
|
-
|
983
|
-
|
984
|
-
|
985
|
-
|
986
|
-
|
987
|
-
|
988
|
-
|
989
|
-
|
990
|
-
|
991
|
-
|
992
|
-
|
993
|
-
|
994
|
-
|
995
|
-
|
996
|
-
|
997
|
-
|
998
|
-
|
999
|
-
|
1000
|
-
|
1001
|
-
|
1002
|
-
|
1003
|
-
|
1004
|
-
|
1005
|
-
|
1006
|
-
|
1007
|
-
|
1008
|
-
|
1009
|
-
|
1010
|
-
|
1011
|
-
|
1012
|
-
|
1013
|
-
|
1014
|
-
|
1015
|
-
|
1016
|
-
|
1017
|
-
|
1018
|
-
|
1019
|
-
|
1020
|
-
|
1021
|
-
|
1022
|
-
|
1023
|
-
|
1024
|
-
|
1025
|
-
|
1026
|
-
|
1027
|
-
|
1028
|
-
|
1029
|
-
|
1030
|
-
|
1031
|
-
|
1032
|
-
|
1033
|
-
|
1034
|
-
|
1035
|
-
|
1036
|
-
|
1037
|
-
|
1038
|
-
|
1039
|
-
|
1040
|
-
|
1041
|
-
|
1042
|
-
|
1043
|
-
|
1044
|
-
|
1045
|
-
|
1046
|
-
|
1047
|
-
|
1048
|
-
|
1049
|
-
|
1050
|
-
|
1051
|
-
|
1052
|
-
|
1053
|
-
|
1054
|
-
|
1055
|
-
|
1056
|
-
|
1057
|
-
|
1058
|
-
|
1059
|
-
|
1060
|
-
|
1061
|
-
|
1062
|
-
|
1063
|
-
|
1064
|
-
|
1065
|
-
|
1066
|
-
|
1067
|
-
|
1068
|
-
|
1069
|
-
|
1070
|
-
|
1071
|
-
|
1072
|
-
|
1073
|
-
|
1074
|
-
|
1075
|
-
|
1076
|
-
|
1077
|
-
|
1078
|
-
|
1079
|
-
|
1080
|
-
|
1081
|
-
|
1082
|
-
|
1083
|
-
|
1084
|
-
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
1088
|
-
|
1089
|
-
|
1090
|
-
|
1091
|
-
|
1092
|
-
|
1093
|
-
|
1094
|
-
|
1095
|
-
|
1096
|
-
|
1097
|
-
|
1098
|
-
|
1099
|
-
|
1100
|
-
|
1101
|
-
|
1102
|
-
|
1103
|
-
|
1104
|
-
|
1105
|
-
|
1106
|
-
|
1107
|
-
|
1108
|
-
|
1109
|
-
|
1110
|
-
|
1111
|
-
|
1112
|
-
|
1113
|
-
|
1114
|
-
|
1115
|
-
|
1116
|
-
|
1117
|
-
|
1118
|
-
|
1119
|
-
|
1120
|
-
|
1121
|
-
|
1122
|
-
|
1123
|
-
|
1124
|
-
|
1125
|
-
|
1126
|
-
|
1127
|
-
|
1128
|
-
|
1129
|
-
|
1130
|
-
|
1131
|
-
|
1132
|
-
|
1133
|
-
|
1134
|
-
|
1135
|
-
|
1136
|
-
|
1137
|
-
|
1138
|
-
|
1139
|
-
|
1140
|
-
|
1141
|
-
|
1142
|
-
|
1143
|
-
|
1144
|
-
|
1145
|
-
|
1146
|
-
|
1147
|
-
|
1148
|
-
|
1149
|
-
|
1150
|
-
|
1151
|
-
|
1152
|
-
|
1153
|
-
|
1154
|
-
|
1155
|
-
|
1156
|
-
|
1157
|
-
|
1158
|
-
|
1159
|
-
|
1160
|
-
|
1161
|
-
|
1162
|
-
|
1163
|
-
|
1164
|
-
|
1165
|
-
|
1166
|
-
|
1167
|
-
|
1168
|
-
|
1169
|
-
|
1170
|
-
|
1171
|
-
|
1172
|
-
|
1173
|
-
|
1174
|
-
|
1175
|
-
|
1176
|
-
|
1177
|
-
|
1178
|
-
|
1179
|
-
|
1180
|
-
|
1181
|
-
|
1182
|
-
|
1183
|
-
|
1184
|
-
|
1185
|
-
|
1186
|
-
|
1187
|
-
|
1188
|
-
|
1189
|
-
|
1190
|
-
|
1191
|
-
|
1192
|
-
|
1193
|
-
|
1194
|
-
|
1195
|
-
|
1196
|
-
|
1197
|
-
|
1198
|
-
|
1199
|
-
|
1200
|
-
|
1201
|
-
|
1202
|
-
|
1203
|
-
|
1204
|
-
|
1205
|
-
|
1206
|
-
|
1207
|
-
|
1208
|
-
|
1209
|
-
|
1210
|
-
|
1211
|
-
|
1212
|
-
|
1213
|
-
|
1214
|
-
|
1215
|
-
|
1216
|
-
|
1217
|
-
|
1218
|
-
|
1219
|
-
|
1220
|
-
|
1221
|
-
|
1222
|
-
|
1223
|
-
|
1224
|
-
|
1225
|
-
|
1226
|
-
|
1227
|
-
|
1228
|
-
|
1229
|
-
|
1230
|
-
|
1231
|
-
|
1232
|
-
|
1233
|
-
|
1234
|
-
return
|
1235
|
-
|
1236
|
-
|
1237
|
-
|
1238
|
-
|
1239
|
-
|
1240
|
-
|
1241
|
-
|
1242
|
-
|
1243
|
-
|
1244
|
-
|
1245
|
-
|
1246
|
-
|
1247
|
-
|
1248
|
-
|
1249
|
-
|
1250
|
-
|
1251
|
-
|
1252
|
-
|
1253
|
-
|
1254
|
-
logging.info("
|
1255
|
-
|
1256
|
-
|
1257
|
-
|
1258
|
-
|
1259
|
-
|
1260
|
-
|
1261
|
-
|
1262
|
-
|
1263
|
-
|
1264
|
-
|
1265
|
-
|
1266
|
-
|
1267
|
-
|
1268
|
-
|
1269
|
-
|
1270
|
-
|
1271
|
-
|
1272
|
-
|
1273
|
-
|
1274
|
-
|
1275
|
-
|
1276
|
-
|
1277
|
-
|
1278
|
-
|
1279
|
-
|
1280
|
-
|
1281
|
-
|
1282
|
-
|
1283
|
-
|
1284
|
-
|
1285
|
-
|
1286
|
-
|
1287
|
-
|
1288
|
-
|
1289
|
-
|
1290
|
-
|
1291
|
-
|
1292
|
-
|
1293
|
-
|
1294
|
-
|
1295
|
-
|
1296
|
-
|
1297
|
-
|
1298
|
-
|
1299
|
-
|
1300
|
-
|
1301
|
-
|
1302
|
-
|
1303
|
-
|
1304
|
-
|
1305
|
-
|
1306
|
-
|
1307
|
-
|
1308
|
-
|
1309
|
-
|
1310
|
-
|
1311
|
-
|
1312
|
-
|
1313
|
-
|
1314
|
-
|
1315
|
-
dataset =
|
1316
|
-
|
1317
|
-
|
1318
|
-
|
1319
|
-
|
1320
|
-
|
1321
|
-
dataset
|
1322
|
-
|
1323
|
-
|
1324
|
-
|
1325
|
-
dataset.
|
1326
|
-
dataset.
|
1327
|
-
dataset
|
1328
|
-
|
1329
|
-
|
1330
|
-
|
1331
|
-
|
1332
|
-
|
1333
|
-
|
1334
|
-
|
1335
|
-
|
1336
|
-
|
1337
|
-
|
1338
|
-
|
1339
|
-
"""
|
1340
|
-
|
1341
|
-
|
1342
|
-
|
1343
|
-
|
1344
|
-
|
1345
|
-
|
1346
|
-
|
1347
|
-
|
1348
|
-
|
1349
|
-
|
1350
|
-
|
1351
|
-
|
1352
|
-
|
1353
|
-
|
1354
|
-
|
1355
|
-
|
1356
|
-
|
1357
|
-
|
1358
|
-
|
1359
|
-
|
1360
|
-
|
1361
|
-
|
1362
|
-
|
1363
|
-
|
1364
|
-
|
1365
|
-
|
1366
|
-
|
1367
|
-
|
1368
|
-
|
1369
|
-
|
1370
|
-
|
1371
|
-
|
1372
|
-
|
1373
|
-
|
1374
|
-
|
1375
|
-
|
1376
|
-
|
1377
|
-
|
1378
|
-
|
1379
|
-
|
1380
|
-
|
1381
|
-
|
1382
|
-
|
1383
|
-
|
1384
|
-
|
1385
|
-
|
1386
|
-
|
1387
|
-
|
1388
|
-
|
1389
|
-
|
1390
|
-
|
1391
|
-
|
1392
|
-
|
1393
|
-
|
1394
|
-
|
1395
|
-
|
1396
|
-
|
1397
|
-
|
1398
|
-
|
1399
|
-
|
1400
|
-
|
1401
|
-
|
1402
|
-
|
1403
|
-
|
1404
|
-
|
1405
|
-
|
1406
|
-
|
1407
|
-
|
1408
|
-
|
1409
|
-
|
1410
|
-
|
1411
|
-
|
1412
|
-
|
1413
|
-
|
1414
|
-
|
1415
|
-
|
1416
|
-
|
1417
|
-
|
1418
|
-
|
1419
|
-
|
1420
|
-
|
1421
|
-
|
1422
|
-
|
1423
|
-
|
1424
|
-
|
1425
|
-
|
1426
|
-
|
1427
|
-
|
1428
|
-
|
1429
|
-
|
1430
|
-
|
1431
|
-
|
1432
|
-
|
1433
|
-
|
1434
|
-
|
1435
|
-
|
1436
|
-
|
1437
|
-
|
1438
|
-
|
1439
|
-
|
1440
|
-
|
1441
|
-
|
1442
|
-
|
1443
|
-
|
1444
|
-
|
1445
|
-
|
1446
|
-
|
1447
|
-
|
1448
|
-
|
1449
|
-
|
1450
|
-
|
1451
|
-
|
1452
|
-
|
1453
|
-
|
1454
|
-
|
1455
|
-
|
1456
|
-
|
1457
|
-
|
1458
|
-
|
1459
|
-
|
1460
|
-
|
1461
|
-
|
1462
|
-
|
1463
|
-
|
1464
|
-
|
1465
|
-
|
1466
|
-
|
1467
|
-
|
1468
|
-
|
1469
|
-
|
1470
|
-
|
1471
|
-
|
1472
|
-
|
1473
|
-
|
1474
|
-
|
1475
|
-
|
1476
|
-
|
1477
|
-
|
1478
|
-
|
1479
|
-
|
1480
|
-
|
1481
|
-
|
1482
|
-
|
1483
|
-
|
1484
|
-
|
1485
|
-
|
1486
|
-
|
1487
|
-
|
1488
|
-
|
1489
|
-
|
1490
|
-
|
1491
|
-
|
1492
|
-
|
1493
|
-
|
1494
|
-
|
1495
|
-
|
1496
|
-
|
1497
|
-
|
1498
|
-
|
1499
|
-
|
1500
|
-
|
1501
|
-
|
1502
|
-
|
1503
|
-
|
1504
|
-
|
1505
|
-
|
1506
|
-
|
1507
|
-
|
1508
|
-
|
1509
|
-
|
1510
|
-
|
1511
|
-
|
1512
|
-
|
1513
|
-
|
1514
|
-
|
1515
|
-
|
1516
|
-
|
1517
|
-
|
1518
|
-
|
1519
|
-
|
1520
|
-
|
1521
|
-
|
1522
|
-
|
1523
|
-
|
1524
|
-
|
1525
|
-
|
1526
|
-
|
1527
|
-
|
1528
|
-
|
1529
|
-
|
1530
|
-
|
1531
|
-
|
1532
|
-
|
1533
|
-
|
1534
|
-
|
1535
|
-
|
1536
|
-
|
1537
|
-
|
1538
|
-
|
1539
|
-
|
1540
|
-
|
1541
|
-
|
1542
|
-
|
1543
|
-
|
1544
|
-
|
1545
|
-
|
1546
|
-
|
1547
|
-
|
1548
|
-
|
1549
|
-
|
1550
|
-
|
1551
|
-
|
1552
|
-
|
1553
|
-
|
1554
|
-
|
1555
|
-
|
1556
|
-
|
1557
|
-
|
1558
|
-
|
1559
|
-
|
1560
|
-
|
1561
|
-
|
1562
|
-
|
1563
|
-
|
1564
|
-
|
1565
|
-
|
1566
|
-
|
1567
|
-
|
1568
|
-
|
1569
|
-
|
1570
|
-
|
1571
|
-
|
1572
|
-
|
1573
|
-
|
1574
|
-
|
1575
|
-
|
1576
|
-
|
1577
|
-
|
1578
|
-
|
1579
|
-
|
1580
|
-
|
1581
|
-
|
1582
|
-
|
1583
|
-
|
1584
|
-
|
1585
|
-
|
1586
|
-
|
1587
|
-
|
1588
|
-
|
1589
|
-
|
1590
|
-
|
1591
|
-
|
1592
|
-
|
1593
|
-
|
1594
|
-
|
1595
|
-
|
1596
|
-
|
1597
|
-
|
1
|
+
"""
|
2
|
+
Author: University of Liege, HECE, LEMA
|
3
|
+
Date: 2024
|
4
|
+
|
5
|
+
Copyright (c) 2024 University of Liege. All rights reserved.
|
6
|
+
|
7
|
+
This script and its content are protected by copyright law. Unauthorized
|
8
|
+
copying or distribution of this file, via any medium, is strictly prohibited.
|
9
|
+
"""
|
10
|
+
|
11
|
+
from ..wolf_vrt import create_vrt_from_diverged_files_first_based, translate_vrt2tif
|
12
|
+
from ..wolf_array import WolfArray
|
13
|
+
from typing import Union, Literal
|
14
|
+
from ..PyVertexvectors import Zones, zone, vector, wolfvertex, getIfromRGB
|
15
|
+
from ..PyTranslate import _
|
16
|
+
from ..scenario. config_manager import Config_Manager_2D_GPU
|
17
|
+
|
18
|
+
import geopandas as gpd
|
19
|
+
import pandas as pd
|
20
|
+
import numpy as np
|
21
|
+
from osgeo import gdal, ogr, osr, gdalconst
|
22
|
+
import os
|
23
|
+
import glob
|
24
|
+
from pathlib import Path
|
25
|
+
import logging
|
26
|
+
from tqdm import tqdm
|
27
|
+
from pyogrio import list_layers, read_dataframe
|
28
|
+
from enum import Enum
|
29
|
+
import numba as nb
|
30
|
+
|
31
|
+
ENGINE = 'pyogrio' # or 'Fiona -- Pyogrio is faster
|
32
|
+
EXTENT = '.gpkg'
|
33
|
+
class Modif_Type(Enum):
|
34
|
+
"""
|
35
|
+
Enum class for the type of modification
|
36
|
+
"""
|
37
|
+
|
38
|
+
WALOUS = 'Walous layers changed to PICC buidings'
|
39
|
+
POINT2POLY_EPURATION = 'Change BDREF based on AJOUT_PDET sent by Perrine (SPI)'
|
40
|
+
POINT2POLY_PICC = 'Convert the points to polygons based on PICC'
|
41
|
+
POINT2POLY_CAPAPICC = 'Convert the points to polygons based on PICC and CaPa'
|
42
|
+
INHABITED = 'Select only inhabited buildings'
|
43
|
+
ROAD = 'Create a buffer around the roads'
|
44
|
+
COPY = 'Copy the data'
|
45
|
+
|
46
|
+
class GPU_2D_file_extensions(Enum):
|
47
|
+
TIF = '.tif' # raster
|
48
|
+
TIFF = '.tiff' # raster
|
49
|
+
PY = '.py' # python script
|
50
|
+
NPY = '.npy' # numpy array
|
51
|
+
BIN = '.bin' # WOLF binary file
|
52
|
+
JSON = '.json' # json file
|
53
|
+
TXT = '.txt' # hydrographs
|
54
|
+
|
55
|
+
|
56
|
+
class Vulnerability_csv():
|
57
|
+
|
58
|
+
def __init__(self, file:Path) -> None:
|
59
|
+
self.file = file
|
60
|
+
self.data = pd.read_csv(file, sep=",", encoding='latin-1')
|
61
|
+
|
62
|
+
def get_layers(self) -> list:
|
63
|
+
return [a[1] for a in self.data["Path"].str.split('/')]
|
64
|
+
|
65
|
+
def get_vulnerability_level(self, layer:str) -> str:
|
66
|
+
idx = self.get_layers().index(layer)
|
67
|
+
return self.data.iloc[idx]["Vulne"]
|
68
|
+
|
69
|
+
def get_vulnerability_code(self, layer:str) -> str:
|
70
|
+
idx = self.get_layers().index(layer)
|
71
|
+
return self.data.iloc[idx]["Code"]
|
72
|
+
|
73
|
+
|
74
|
+
def get_data_type(fname:Path):
|
75
|
+
|
76
|
+
fname = Path(fname)
|
77
|
+
""" Get the data type of the input file from extension """
|
78
|
+
if fname.name.endswith('.gpkg'):
|
79
|
+
return 'GPKG'
|
80
|
+
elif fname.name.endswith('.shp'):
|
81
|
+
return 'ESRI Shapefile'
|
82
|
+
elif fname.name.endswith('.gdb'):
|
83
|
+
return 'OpenfileGDB'
|
84
|
+
else:
|
85
|
+
return None
|
86
|
+
|
87
|
+
def cleaning_directory(dir:Path):
|
88
|
+
""" Cleaning the directory """
|
89
|
+
|
90
|
+
logging.info("Cleaning the directory {}".format(dir))
|
91
|
+
|
92
|
+
files_in_output = list(dir.iterdir())
|
93
|
+
for item in files_in_output:
|
94
|
+
if item.is_file():
|
95
|
+
os.remove(item)
|
96
|
+
|
97
|
+
class Accept_Manager():
|
98
|
+
"""
|
99
|
+
Structure to store the directories and names of the files.
|
100
|
+
|
101
|
+
In the main directory, the following directories are mandatory/created:
|
102
|
+
- INPUT : filled by the user - contains the input data
|
103
|
+
- TEMP : created by the script - contains the temporary data for the study area
|
104
|
+
- OUTPUT: created by the script - contains the output data for each scenario of the study area
|
105
|
+
|
106
|
+
The INPUT directory contains the following subdirectories:
|
107
|
+
- DATABASE: contains the data for the **entire Walloon region**
|
108
|
+
- Cadastre_Walloon.gpkg: the Cadastre Walloon file
|
109
|
+
- GT_Resilence_dataRisques202010.gdb: the original gdb file from SPW - GT Resilience
|
110
|
+
- PICC-vDIFF.gdb: the PICC Walloon file
|
111
|
+
- CE_IGN_TOP10V: the IGN top10v shapefile
|
112
|
+
- EPU_STATIONS_NEW:
|
113
|
+
- AJOUT_PDET_EPU_DG03_STATIONS.shp: the EPU stations shapefile
|
114
|
+
- STUDY_AREA: contains the study area shapefiles - one for each study area - e.g. Bassin_Vesdre.shp
|
115
|
+
- CSVs: contains the CSV files
|
116
|
+
- Intermediate.csv: contains the matrices data for the acceptability computation
|
117
|
+
# - Ponderation.csv: contains the ponderation data for the acceptability computation
|
118
|
+
- Vulnerability.csv: contains the mapping between layers and vulnerability levels - a code value is also provided
|
119
|
+
- WATER_DEPTH: contains the water depth data for each scenario
|
120
|
+
- Study_area1:
|
121
|
+
- Scenario1
|
122
|
+
- Scenario2
|
123
|
+
-...
|
124
|
+
- ScenarioN
|
125
|
+
- Study_area2:
|
126
|
+
- Scenario1
|
127
|
+
- Scenario2
|
128
|
+
-...
|
129
|
+
- ScenarioN
|
130
|
+
-...
|
131
|
+
- Study_areaN:
|
132
|
+
- Scenario1
|
133
|
+
- Scenario2
|
134
|
+
-...
|
135
|
+
- ScenarioN
|
136
|
+
|
137
|
+
The TEMP directory contains the following subdirectories:
|
138
|
+
- DATABASES: contains the temporary data each study area
|
139
|
+
- Study_area1:
|
140
|
+
- database.gpkg: the clipped database
|
141
|
+
- CaPa.gpkg: the clipped Cadastre Walloon file
|
142
|
+
- PICC.gpkg: the clipped PICC Walloon file
|
143
|
+
- CE_IGN_TOP10V.tiff: the IGN top10v raster file
|
144
|
+
- Maske_River_extent.tiff: the river extent raster file from IGN
|
145
|
+
- VULNERABILITY: the vulnerability data
|
146
|
+
- RASTERS:
|
147
|
+
- Code : one file for each layer
|
148
|
+
- Vulne : one file for each layer
|
149
|
+
- Scenario1:
|
150
|
+
|
151
|
+
"""
|
152
|
+
|
153
|
+
def __init__(self,
|
154
|
+
main_dir:str = 'Data',
|
155
|
+
Study_area:str = 'Bassin_Vesdre.shp',
|
156
|
+
scenario = None,
|
157
|
+
Original_gdb:str = 'GT_Resilence_dataRisques202010.gdb',
|
158
|
+
CaPa_Walloon:str = 'Cadastre_Walloon.gpkg',
|
159
|
+
PICC_Walloon:str = 'PICC_vDIFF.gdb',
|
160
|
+
CE_IGN_top10v:str = 'CE_IGN_TOP10V/CE_IGN_TOP10V.shp',
|
161
|
+
EPU_Stations:str = 'AJOUT_PDET_EPU_DG03_STATIONS.shp',
|
162
|
+
Ponderation_csv:str = 'Ponderation.csv',
|
163
|
+
Vuln_csv:str = 'Vulnerability.csv',
|
164
|
+
Intermediate_csv:str = 'Intermediate.csv'
|
165
|
+
) -> None:
|
166
|
+
|
167
|
+
self.old_dir:Path = Path(os.getcwd())
|
168
|
+
|
169
|
+
self.main_dir:Path = Path(main_dir)
|
170
|
+
|
171
|
+
# If it is a string, concatenate it with the current directory
|
172
|
+
if not self.main_dir.is_absolute():
|
173
|
+
self.main_dir = Path(os.getcwd()) / self.main_dir
|
174
|
+
|
175
|
+
self._study_area = str(Study_area)
|
176
|
+
|
177
|
+
if Study_area is not None:
|
178
|
+
if not str(self._study_area).endswith('.shp'):
|
179
|
+
self._study_area += '.shp'
|
180
|
+
|
181
|
+
self._scenario = scenario
|
182
|
+
self._original_gdb = Original_gdb
|
183
|
+
self._capa_walloon = CaPa_Walloon
|
184
|
+
self._picc_walloon = PICC_Walloon
|
185
|
+
self._ce_ign_top10v = CE_IGN_top10v
|
186
|
+
|
187
|
+
self.IN_DIR = self.main_dir / "INPUT"
|
188
|
+
self.IN_CH_VULN = self.IN_DIR / "CHANGE_VULNE"
|
189
|
+
self.IN_CH_SA_SC = self.IN_CH_VULN / str(Study_area).rsplit('.', 1)[0] / str(scenario)
|
190
|
+
self.IN_DATABASE = self.IN_DIR / "DATABASE"
|
191
|
+
self.IN_STUDY_AREA = self.IN_DIR / "STUDY_AREA"
|
192
|
+
self.IN_CSV = self.IN_DIR / "CSVs"
|
193
|
+
self.IN_WATER_DEPTH = self.IN_DIR / "WATER_DEPTH"
|
194
|
+
self.IN_EPU_STATIONS= self.IN_DIR / "EPU_STATIONS_NEW"
|
195
|
+
|
196
|
+
self.ORIGINAL_GDB = self.IN_DATABASE / self._original_gdb
|
197
|
+
self.CAPA_WALLOON = self.IN_DATABASE / self._capa_walloon
|
198
|
+
self.PICC_WALLOON = self.IN_DATABASE / self._picc_walloon
|
199
|
+
self.CE_IGN_TOP10V = self.IN_DATABASE / self._ce_ign_top10v
|
200
|
+
self.EPU_STATIONS = self.IN_EPU_STATIONS / EPU_Stations
|
201
|
+
|
202
|
+
self.VULNERABILITY_CSV = self.IN_CSV / Vuln_csv
|
203
|
+
self.POINTS_CSV = self.IN_CSV / Intermediate_csv
|
204
|
+
self.PONDERATION_CSV = self.IN_CSV / Ponderation_csv
|
205
|
+
|
206
|
+
self._CSVs = [self.VULNERABILITY_CSV, self.POINTS_CSV]
|
207
|
+
self._GPKGs= [self.CAPA_WALLOON, self.PICC_WALLOON]
|
208
|
+
self._GDBs = [self.ORIGINAL_GDB]
|
209
|
+
self._SHPs = [self.CE_IGN_TOP10V, self.EPU_STATIONS]
|
210
|
+
self._ALLS = self._CSVs + self._GPKGs + self._GDBs + self._SHPs
|
211
|
+
|
212
|
+
self.TMP_DIR = self.main_dir / "TEMP"
|
213
|
+
|
214
|
+
self.OUT_DIR = self.main_dir / "OUTPUT"
|
215
|
+
|
216
|
+
self.points2polys = []
|
217
|
+
self.lines2polys = []
|
218
|
+
|
219
|
+
self.create_paths()
|
220
|
+
self.create_paths_scenario()
|
221
|
+
|
222
|
+
def create_paths(self):
|
223
|
+
""" Create the paths for the directories and files """
|
224
|
+
|
225
|
+
self.points2polys = []
|
226
|
+
self.lines2polys = []
|
227
|
+
|
228
|
+
if self._study_area is not None:
|
229
|
+
|
230
|
+
self.Study_area:Path = Path(self._study_area)
|
231
|
+
|
232
|
+
self.TMP_STUDYAREA = self.TMP_DIR / self.Study_area.stem
|
233
|
+
self.TMP_DATABASE = self.TMP_STUDYAREA / "DATABASES"
|
234
|
+
|
235
|
+
self.TMP_CLIPGDB = self.TMP_DATABASE / "CLIP_GDB"
|
236
|
+
self.TMP_CADASTER = self.TMP_DATABASE / "CLIP_CADASTER"
|
237
|
+
self.TMP_PICC = self.TMP_DATABASE / "CLIP_PICC"
|
238
|
+
self.TMP_IGNCE = self.TMP_DATABASE / "CLIP_IGN_CE"
|
239
|
+
self.TMP_WMODIF = self.TMP_DATABASE / "WITH_MODIF"
|
240
|
+
self.TMP_CODEVULNE = self.TMP_DATABASE / "CODE_VULNE"
|
241
|
+
|
242
|
+
self.TMP_VULN_DIR = self.TMP_STUDYAREA / "VULNERABILITY"
|
243
|
+
self.TMP_SA_SC = self.TMP_VULN_DIR / str(self._scenario)
|
244
|
+
|
245
|
+
|
246
|
+
self.TMP_RASTERS = self.TMP_VULN_DIR / "RASTERS"
|
247
|
+
self.TMP_RASTERS_CODE = self.TMP_RASTERS / "Code"
|
248
|
+
self.TMP_RASTERS_VULNE = self.TMP_RASTERS / "Vulne"
|
249
|
+
|
250
|
+
self.OUT_STUDY_AREA = self.OUT_DIR / self.Study_area.stem
|
251
|
+
|
252
|
+
self.SA = self.IN_STUDY_AREA / self.Study_area
|
253
|
+
self.SA_MASKED_RIVER = self.TMP_IGNCE / "CE_IGN_TOP10V.tiff"
|
254
|
+
self.SA_VULN = self.TMP_VULN_DIR / "Vulnerability.tiff"
|
255
|
+
self.SA_CODE = self.TMP_VULN_DIR / "Vulnerability_Code.tiff"
|
256
|
+
|
257
|
+
else:
|
258
|
+
self.Study_area = None
|
259
|
+
self._scenario = None
|
260
|
+
|
261
|
+
self.TMP_STUDYAREA = None
|
262
|
+
self.TMP_DATABASE = None
|
263
|
+
self.TMP_CADASTER = None
|
264
|
+
self.TMP_PICC = None
|
265
|
+
self.TMP_IGNCE = None
|
266
|
+
self.TMP_WMODIF = None
|
267
|
+
self.TMP_CODEVULNE = None
|
268
|
+
self.TMP_VULN_DIR = None
|
269
|
+
self.TMP_RASTERS = None
|
270
|
+
self.TMP_RASTERS_CODE = None
|
271
|
+
self.TMP_RASTERS_VULNE = None
|
272
|
+
|
273
|
+
self.OUT_STUDY_AREA = None
|
274
|
+
|
275
|
+
self.SA = None
|
276
|
+
self.SA_MASKED_RIVER = None
|
277
|
+
|
278
|
+
self.SA_VULN = None
|
279
|
+
self.SA_CODE = None
|
280
|
+
|
281
|
+
self.create_paths_scenario()
|
282
|
+
|
283
|
+
self.check_inputs()
|
284
|
+
self.check_temporary()
|
285
|
+
self.check_outputs()
|
286
|
+
|
287
|
+
def create_paths_scenario(self):
|
288
|
+
|
289
|
+
if self._scenario is not None:
|
290
|
+
|
291
|
+
self.scenario:str = str(self._scenario)
|
292
|
+
|
293
|
+
self.IN_SCEN_DIR = self.IN_WATER_DEPTH / self.SA.stem / self.scenario
|
294
|
+
self.IN_SA_Base = self.IN_WATER_DEPTH / self.SA.stem / "Scenario_baseline"
|
295
|
+
self.IN_SA_INTERP = self.IN_SCEN_DIR / "INTERP_WD"
|
296
|
+
self.IN_SA_EXTRACTED = self.IN_SCEN_DIR / "EXTRACTED_LAST_STEP_WD"
|
297
|
+
self.IN_SA_DEM = self.IN_SCEN_DIR / "DEM_FILES"
|
298
|
+
|
299
|
+
self.IN_RM_BUILD_DIR = self.IN_SCEN_DIR / "REMOVED_BUILDINGS"
|
300
|
+
|
301
|
+
self.TMP_SCEN_DIR = self.TMP_VULN_DIR / self.scenario
|
302
|
+
self.TMP_RM_BUILD_DIR = self.TMP_SCEN_DIR / "REMOVED_BUILDINGS"
|
303
|
+
self.TMP_QFILES = self.TMP_SCEN_DIR / "Q_FILES"
|
304
|
+
|
305
|
+
self.TMP_VULN = self.TMP_SCEN_DIR / "Vulnerability.tiff"
|
306
|
+
self.TMP_CODE = self.TMP_SCEN_DIR / "Vulnerability_Code.tiff"
|
307
|
+
|
308
|
+
self.OUT_SCEN_DIR = self.OUT_STUDY_AREA / self.scenario
|
309
|
+
|
310
|
+
self.OUT_VULN = self.OUT_SCEN_DIR / "Vulnerability.tiff"
|
311
|
+
self.OUT_VULN_VRT = self.OUT_SCEN_DIR / "__vuln_assembly.vrt"
|
312
|
+
self.OUT_VULN_S = self.OUT_SCEN_DIR / "Vulnerability_scenarios" #no .tif because wolf_vrt add it itself (see create_vrtIfExists below)
|
313
|
+
self.OUT_VULN_Stif = self.OUT_SCEN_DIR / "Vulnerability_scenarios.tif"
|
314
|
+
self.OUT_CODE = self.OUT_SCEN_DIR / "Vulnerability_Code.tiff"
|
315
|
+
self.OUT_MASKED_RIVER = self.OUT_SCEN_DIR / "Masked_River_extent.tiff"
|
316
|
+
self.OUT_ACCEPT = self.OUT_SCEN_DIR / "Acceptability.tiff"
|
317
|
+
self.OUT_ACCEPT_100M = self.OUT_SCEN_DIR / "Acceptability_100m.tiff"
|
318
|
+
self.OUT_ACCEPT_Stif = self.OUT_SCEN_DIR / "Acceptability_scenarios.tiff"
|
319
|
+
self.OUT_ACCEPT_100M_Stif = self.OUT_SCEN_DIR / "Acceptability_scenarios_100m.tiff"
|
320
|
+
else:
|
321
|
+
self.scenario = None
|
322
|
+
|
323
|
+
self.IN_SCEN_DIR = None
|
324
|
+
self.IN_RM_BUILD_DIR = None
|
325
|
+
|
326
|
+
self.TMP_SCEN_DIR = None
|
327
|
+
self.TMP_RM_BUILD_DIR = None
|
328
|
+
self.TMP_QFILES = None
|
329
|
+
|
330
|
+
self.TMP_VULN = None
|
331
|
+
self.TMP_CODE = None
|
332
|
+
|
333
|
+
self.OUT_SCEN_DIR = None
|
334
|
+
self.OUT_VULN = None
|
335
|
+
self.OUT_CODE = None
|
336
|
+
self.OUT_MASKED_RIVER = None
|
337
|
+
self.OUT_ACCEPT = None
|
338
|
+
self.OUT_ACCEPT_100M = None
|
339
|
+
|
340
|
+
@property
|
341
|
+
def is_valid_inputs(self) -> bool:
|
342
|
+
return self.check_inputs()
|
343
|
+
|
344
|
+
@property
|
345
|
+
def is_valid_study_area(self) -> bool:
|
346
|
+
return self.SA.exists()
|
347
|
+
|
348
|
+
@property
|
349
|
+
def is_valid_vulnerability_csv(self) -> bool:
|
350
|
+
return self.VULNERABILITY_CSV.exists()
|
351
|
+
|
352
|
+
@property
|
353
|
+
def is_valid_points_csv(self) -> bool:
|
354
|
+
return self.POINTS_CSV.exists()
|
355
|
+
|
356
|
+
@property
|
357
|
+
def is_valid_ponderation_csv(self) -> bool:
|
358
|
+
return self.PONDERATION_CSV.exists()
|
359
|
+
|
360
|
+
def check_files(self) -> str:
|
361
|
+
""" Check the files in the directories """
|
362
|
+
|
363
|
+
files = ""
|
364
|
+
for a in self._ALLS:
|
365
|
+
if not a.exists():
|
366
|
+
files += str(a) + "\n"
|
367
|
+
|
368
|
+
return files
|
369
|
+
|
370
|
+
def change_studyarea(self, Study_area:str = None) -> None:
|
371
|
+
|
372
|
+
if Study_area is None:
|
373
|
+
self._study_area = None
|
374
|
+
self._scenario = None
|
375
|
+
else:
|
376
|
+
if Study_area in self.get_list_studyareas(with_suffix=True):
|
377
|
+
self._study_area = Path(Study_area)
|
378
|
+
else:
|
379
|
+
logging.error("The study area does not exist in the study area directory")
|
380
|
+
|
381
|
+
self.create_paths()
|
382
|
+
|
383
|
+
def change_scenario(self, scenario:str) -> None:
|
384
|
+
|
385
|
+
if scenario in self.get_list_scenarios():
|
386
|
+
self._scenario = scenario
|
387
|
+
self.create_paths_scenario()
|
388
|
+
self.check_temporary()
|
389
|
+
self.check_outputs()
|
390
|
+
else:
|
391
|
+
logging.error("The scenario does not exist in the water depth directory")
|
392
|
+
|
393
|
+
def get_files_in_rm_buildings(self) -> list[Path]:
|
394
|
+
return [Path(a) for a in glob.glob(str(self.IN_RM_BUILD_DIR / ("*"+ EXTENT)))]
|
395
|
+
|
396
|
+
def get_files_in_CHANGE_VULNE(self) -> list[Path]:
|
397
|
+
return [Path(a) for a in glob.glob(str(self.IN_CH_VULN / "*.tiff"))]
|
398
|
+
|
399
|
+
def get_files_in_rasters_vulne(self) -> list[Path]:
|
400
|
+
return [Path(a) for a in glob.glob(str(self.TMP_RASTERS_VULNE / "*.tiff"))]
|
401
|
+
|
402
|
+
def get_layers_in_gdb(self) -> list[str]:
|
403
|
+
return [a[0] for a in list_layers(str(self.ORIGINAL_GDB))]
|
404
|
+
|
405
|
+
def get_layer_types_in_gdb(self) -> list[str]:
|
406
|
+
return [a[1] for a in list_layers(str(self.ORIGINAL_GDB))]
|
407
|
+
|
408
|
+
def get_layers_in_clipgdb(self) -> list[str]:
|
409
|
+
return [Path(a).stem for a in glob.glob(str(self.TMP_CLIPGDB / ("*"+ EXTENT)))]
|
410
|
+
|
411
|
+
def get_layers_in_wmodif(self) -> list[str]:
|
412
|
+
return [Path(a).stem for a in glob.glob(str(self.TMP_WMODIF / ("*"+ EXTENT)))]
|
413
|
+
|
414
|
+
def get_layers_in_codevulne(self) -> list[str]:
|
415
|
+
return [Path(a).stem for a in glob.glob(str(self.TMP_CODEVULNE / ("*"+ EXTENT)))]
|
416
|
+
|
417
|
+
def get_files_in_rasters_code(self) -> list[Path]:
|
418
|
+
return [Path(a) for a in glob.glob(str(self.TMP_RASTERS_CODE / "*.tiff"))]
|
419
|
+
|
420
|
+
def get_q_files(self) -> list[Path]:
|
421
|
+
return [Path(a) for a in glob.glob(str(self.TMP_QFILES / "*.tif"))]
|
422
|
+
|
423
|
+
def get_list_scenarios(self) -> list[str]:
|
424
|
+
|
425
|
+
list_sc = [Path(a).stem for a in glob.glob(str(self.IN_WATER_DEPTH / self.SA.stem / "Scenario*"))]
|
426
|
+
return list_sc
|
427
|
+
|
428
|
+
def get_list_studyareas(self, with_suffix:bool = False) -> list[str]:
|
429
|
+
|
430
|
+
if with_suffix:
|
431
|
+
return [Path(a).name for a in glob.glob(str(self.IN_STUDY_AREA / "*.shp"))]
|
432
|
+
else:
|
433
|
+
return [Path(a).stem for a in glob.glob(str(self.IN_STUDY_AREA / "*.shp"))]
|
434
|
+
|
435
|
+
def get_sims_files_for_scenario(self) -> list[Path]:
|
436
|
+
files = [] #to avoid NoneType
|
437
|
+
if self.IN_SA_INTERP.exists() :
|
438
|
+
files = [Path(a) for a in glob.glob(str(self.IN_SA_INTERP / "*.tif"))]
|
439
|
+
else :
|
440
|
+
logging.error("No _baseline")
|
441
|
+
return files
|
442
|
+
|
443
|
+
def get_sims_files_for_baseline(self) -> list[Path]:
|
444
|
+
files = [] #to avoid NoneType
|
445
|
+
if self.IN_SA_INTERP.exists() :
|
446
|
+
logging.info("Getting the _baseline WATER_DEPTHS files.")
|
447
|
+
track = Path(str(self.IN_SA_INTERP / "*.tif"))
|
448
|
+
files = [Path(a) for a in glob.glob(str(track))]
|
449
|
+
else :
|
450
|
+
logging.error("No _baseline WATER_DEPTHS files")
|
451
|
+
|
452
|
+
return files
|
453
|
+
|
454
|
+
def get_sim_file_for_return_period(self, return_period:int) -> Path:
|
455
|
+
|
456
|
+
sims = self.get_sims_files_for_scenario()
|
457
|
+
|
458
|
+
if len(sims)==0:
|
459
|
+
logging.info("No simulations found") #no error, need to act if so. See accept manager flow chart
|
460
|
+
return None
|
461
|
+
|
462
|
+
if "_h.tif" in sims[0].name:
|
463
|
+
for cursim in sims:
|
464
|
+
if cursim.stem.find("_T{}_".format(return_period)) != -1:
|
465
|
+
return cursim
|
466
|
+
else:
|
467
|
+
for cursim in sims:
|
468
|
+
if cursim.stem.find("T{}".format(return_period)) != -1:
|
469
|
+
return cursim
|
470
|
+
|
471
|
+
return None
|
472
|
+
|
473
|
+
def get_types_in_file(self, file:str) -> list[str]:
|
474
|
+
""" Get the types of the geometries in the Shape file """
|
475
|
+
|
476
|
+
return [a[1] for a in list_layers(str(file))]
|
477
|
+
|
478
|
+
def is_type_unique(self, file:str) -> bool:
|
479
|
+
""" Check if the file contains only one type of geometry """
|
480
|
+
|
481
|
+
types = self.get_types_in_file(file)
|
482
|
+
return len(types) == 1
|
483
|
+
|
484
|
+
def is_polygons(self, set2test:set) -> bool:
|
485
|
+
""" Check if the set contains only polygons """
|
486
|
+
|
487
|
+
set2test = list(set2test)
|
488
|
+
firstone = set2test[0]
|
489
|
+
if 'Polygon' in firstone:
|
490
|
+
for curtype in set2test:
|
491
|
+
if 'Polygon' not in curtype:
|
492
|
+
return False
|
493
|
+
return True
|
494
|
+
else:
|
495
|
+
return False
|
496
|
+
|
497
|
+
def is_same_types(self, file:str) -> tuple[bool, str]:
|
498
|
+
""" Check if the file contains only the same type of geometry """
|
499
|
+
|
500
|
+
types = self.get_types_in_file(file)
|
501
|
+
|
502
|
+
if len(types) == 1:
|
503
|
+
if 'Point' in types[0]:
|
504
|
+
return True, 'Point'
|
505
|
+
elif 'Polygon' in types[0]:
|
506
|
+
return True, 'Polygon'
|
507
|
+
elif 'LineString' in types[0]:
|
508
|
+
return True, 'LineString'
|
509
|
+
else:
|
510
|
+
raise ValueError(f"The type of geometry {types[0]} is not recognized")
|
511
|
+
else:
|
512
|
+
firstone = types[0]
|
513
|
+
if 'Point' in firstone:
|
514
|
+
for curtype in types:
|
515
|
+
if 'Point' not in curtype:
|
516
|
+
return False, None
|
517
|
+
return True, 'Point'
|
518
|
+
|
519
|
+
elif 'Polygon' in firstone:
|
520
|
+
for curtype in types:
|
521
|
+
if 'Polygon' not in curtype:
|
522
|
+
return False, None
|
523
|
+
|
524
|
+
return True, 'Polygon'
|
525
|
+
|
526
|
+
elif 'LineString' in firstone:
|
527
|
+
for curtype in types:
|
528
|
+
if 'LineString' not in curtype:
|
529
|
+
return False, None
|
530
|
+
|
531
|
+
return True, 'LineString'
|
532
|
+
else:
|
533
|
+
raise ValueError(f"The type of geometry {firstone} is not recognized")
|
534
|
+
|
535
|
+
|
536
|
+
def get_return_periods(self) -> list[int]:
|
537
|
+
"""
|
538
|
+
Get the return periods from the simulations
|
539
|
+
|
540
|
+
:return list[int]: the **sorted list** of return periods
|
541
|
+
"""
|
542
|
+
|
543
|
+
# List files in directory
|
544
|
+
sims = self.get_sims_files_for_scenario()
|
545
|
+
|
546
|
+
if len(sims)==0:
|
547
|
+
logging.info("No simulations found")#no error, need to act if so. See accept manager flow chart
|
548
|
+
return []
|
549
|
+
|
550
|
+
# Two cases:
|
551
|
+
# - Return periods are named as T2.tif, T5.tif, T10.tif, ...
|
552
|
+
# - Return periods are named as *_T2_h.tif, *_T5_h.tif, *_T10_h.tif, ...
|
553
|
+
|
554
|
+
if "_h.tif" in sims[0].name:
|
555
|
+
|
556
|
+
# Searching for the position of the return period in the name
|
557
|
+
idx_T = [cursim.name.find("_T") for cursim in sims]
|
558
|
+
idx_h = [cursim.name.find("_h.tif") for cursim in sims]
|
559
|
+
|
560
|
+
assert len(idx_T) == len(idx_h), "The number of T and h are not the same"
|
561
|
+
for curT, curh in zip(idx_T, idx_h):
|
562
|
+
assert curT != -1, "The T is not found"
|
563
|
+
assert curh != -1, "The h is not found"
|
564
|
+
assert curh > curT, "The h is before the T"
|
565
|
+
|
566
|
+
# Create the list of return periods -- only the numeric part
|
567
|
+
sims = [int(cursim.name[idx_T[i]+2:idx_h[i]]) for i, cursim in enumerate(sims)]
|
568
|
+
else:
|
569
|
+
# searching for the position of the return period in the name
|
570
|
+
idx_T = [cursim.name.find("T") for cursim in sims]
|
571
|
+
idx_h = [cursim.name.find(".tif") for cursim in sims]
|
572
|
+
|
573
|
+
assert len(idx_T) == len(idx_h), "The number of T and h are not the same"
|
574
|
+
for curT, curh in zip(idx_T, idx_h):
|
575
|
+
assert curT != -1, "The T is not found"
|
576
|
+
assert curh != -1, "The h is not found"
|
577
|
+
assert curh > curT, "The h is before the T"
|
578
|
+
|
579
|
+
# create the list of return periods -- only the numeric part
|
580
|
+
sims = [int(cursim.name[idx_T[i]+1:idx_h[i]]) for i, cursim in enumerate(sims)]
|
581
|
+
|
582
|
+
return sorted(sims)
|
583
|
+
|
584
|
+
def get_ponderations(self) -> pd.DataFrame:
|
585
|
+
""" Get the ponderation data from available simulations """
|
586
|
+
|
587
|
+
rt = self.get_return_periods()
|
588
|
+
|
589
|
+
if len(rt)==0:
|
590
|
+
logging.info("No simulations found")
|
591
|
+
return None
|
592
|
+
|
593
|
+
if len(rt)<2:
|
594
|
+
logging.info("Need for more simulations")
|
595
|
+
return None
|
596
|
+
|
597
|
+
else :
|
598
|
+
pond = []
|
599
|
+
|
600
|
+
pond.append(1./float(rt[0]) + (1./float(rt[0]) - 1./float(rt[1]))/2.)
|
601
|
+
for i in range(1, len(rt)-1):
|
602
|
+
# Full formula
|
603
|
+
# pond.append((1./float(rt[i-1]) - 1./float(rt[i]))/2. + (1./float(rt[i]) - 1./float(rt[i+1]))/2.)
|
604
|
+
|
605
|
+
# More compact formula
|
606
|
+
pond.append((1./float(rt[i-1]) - 1./float(rt[i+1]))/2.)
|
607
|
+
|
608
|
+
pond.append(1./float(rt[-1]) + (1./float(rt[-2]) - 1./float(rt[-1]))/2.)
|
609
|
+
|
610
|
+
return pd.DataFrame(pond, columns=["Ponderation"], index=rt)
|
611
|
+
|
612
|
+
def get_filepath_for_return_period(self, return_period:int) -> Path:
|
613
|
+
|
614
|
+
return self.get_sim_file_for_return_period(return_period)
|
615
|
+
|
616
|
+
def change_dir(self) -> None:
|
617
|
+
os.chdir(self.main_dir)
|
618
|
+
logging.info("Current directory: %s", os.getcwd())
|
619
|
+
|
620
|
+
def restore_dir(self) -> None:
|
621
|
+
os.chdir(self.old_dir)
|
622
|
+
logging.info("Current directory: %s", os.getcwd())
|
623
|
+
|
624
|
+
def check_inputs(self) -> bool:
|
625
|
+
"""
|
626
|
+
Check if the input directories exist.
|
627
|
+
|
628
|
+
Inputs can not be created automatically. The user must provide them.
|
629
|
+
"""
|
630
|
+
|
631
|
+
err = False
|
632
|
+
if not self.IN_DATABASE.exists():
|
633
|
+
logging.error("INPUT : The database directory does not exist")
|
634
|
+
err = True
|
635
|
+
|
636
|
+
if not self.IN_STUDY_AREA.exists():
|
637
|
+
logging.error("INPUT : The study area directory does not exist")
|
638
|
+
err = True
|
639
|
+
|
640
|
+
if not self.IN_CSV.exists():
|
641
|
+
logging.error("INPUT : The CSV directory does not exist")
|
642
|
+
err = True
|
643
|
+
|
644
|
+
if not self.IN_WATER_DEPTH.exists():
|
645
|
+
logging.error("INPUT : The water depth directory does not exist")
|
646
|
+
err = True
|
647
|
+
|
648
|
+
if not self.IN_EPU_STATIONS.exists():
|
649
|
+
logging.error("INPUT : The EPU stations directory does not exist")
|
650
|
+
err = True
|
651
|
+
|
652
|
+
if self.Study_area is not None:
|
653
|
+
if not self.SA.exists():
|
654
|
+
logging.error("INPUT : The study area file does not exist")
|
655
|
+
err = True
|
656
|
+
|
657
|
+
if not self.ORIGINAL_GDB.exists():
|
658
|
+
logging.error("INPUT : The original gdb file does not exist - Please pull it from the SPW-ARNE")
|
659
|
+
err = True
|
660
|
+
|
661
|
+
if not self.CAPA_WALLOON.exists():
|
662
|
+
logging.error("INPUT : The Cadastre Walloon file does not exist - Please pull it from the SPW")
|
663
|
+
err = True
|
664
|
+
|
665
|
+
if not self.PICC_WALLOON.exists():
|
666
|
+
logging.error("INPUT : The PICC Walloon file does not exist - Please pull it from the SPW website")
|
667
|
+
err = True
|
668
|
+
|
669
|
+
if not self.CE_IGN_TOP10V.exists():
|
670
|
+
logging.error("INPUT : The CE IGN top10v file does not exist - Please pull it from the IGN")
|
671
|
+
err = True
|
672
|
+
|
673
|
+
if self.scenario is None:
|
674
|
+
logging.debug("The scenario has not been defined")
|
675
|
+
else:
|
676
|
+
if not self.IN_SCEN_DIR.exists():
|
677
|
+
logging.error("The wd scenario directory does not exist")
|
678
|
+
err = True
|
679
|
+
|
680
|
+
return not err
|
681
|
+
|
682
|
+
def check_temporary(self) -> bool:
|
683
|
+
"""
|
684
|
+
Check if the temporary directories exist.
|
685
|
+
|
686
|
+
If not, create them.
|
687
|
+
"""
|
688
|
+
|
689
|
+
self.TMP_DIR.mkdir(parents=True, exist_ok=True)
|
690
|
+
|
691
|
+
if self.Study_area is not None:
|
692
|
+
self.TMP_STUDYAREA.mkdir(parents=True, exist_ok=True)
|
693
|
+
self.TMP_DATABASE.mkdir(parents=True, exist_ok=True)
|
694
|
+
self.TMP_CLIPGDB.mkdir(parents=True, exist_ok=True)
|
695
|
+
self.TMP_CADASTER.mkdir(parents=True, exist_ok=True)
|
696
|
+
self.TMP_WMODIF.mkdir(parents=True, exist_ok=True)
|
697
|
+
self.TMP_CODEVULNE.mkdir(parents=True, exist_ok=True)
|
698
|
+
self.TMP_PICC.mkdir(parents=True, exist_ok=True)
|
699
|
+
self.TMP_IGNCE.mkdir(parents=True, exist_ok=True)
|
700
|
+
self.TMP_VULN_DIR.mkdir(parents=True, exist_ok=True)
|
701
|
+
self.TMP_RASTERS.mkdir(parents=True, exist_ok=True)
|
702
|
+
self.TMP_RASTERS_CODE.mkdir(parents=True, exist_ok=True)
|
703
|
+
self.TMP_RASTERS_VULNE.mkdir(parents=True, exist_ok=True)
|
704
|
+
|
705
|
+
if self.scenario is not None:
|
706
|
+
self.TMP_SCEN_DIR.mkdir(parents=True, exist_ok=True)
|
707
|
+
self.TMP_RM_BUILD_DIR.mkdir(parents=True, exist_ok=True)
|
708
|
+
self.TMP_QFILES.mkdir(parents=True, exist_ok=True)
|
709
|
+
|
710
|
+
return True
|
711
|
+
|
712
|
+
def check_outputs(self) -> bool:
|
713
|
+
"""
|
714
|
+
Check if the output directories exist.
|
715
|
+
|
716
|
+
If not, create them.
|
717
|
+
"""
|
718
|
+
|
719
|
+
self.OUT_DIR.mkdir(parents=True, exist_ok=True)
|
720
|
+
|
721
|
+
if self.Study_area is not None:
|
722
|
+
self.OUT_STUDY_AREA.mkdir(parents=True, exist_ok=True)
|
723
|
+
|
724
|
+
if self.scenario is not None:
|
725
|
+
self.OUT_SCEN_DIR.mkdir(parents=True, exist_ok=True)
|
726
|
+
|
727
|
+
return True
|
728
|
+
|
729
|
+
def check_before_database_creation(self) -> bool:
|
730
|
+
""" Check if the necessary files are present before the database creation"""
|
731
|
+
|
732
|
+
if not self.is_valid_inputs:
|
733
|
+
logging.error("Theere are missing input directories - Please check carefully the input directories and the logs")
|
734
|
+
return False
|
735
|
+
|
736
|
+
if not self.is_valid_study_area:
|
737
|
+
logging.error("The study area file does not exist - Please create it")
|
738
|
+
return False
|
739
|
+
|
740
|
+
if not self.is_valid_vulnerability_csv:
|
741
|
+
logging.error("The vulnerability CSV file does not exist - Please create it")
|
742
|
+
return False
|
743
|
+
|
744
|
+
return True
|
745
|
+
|
746
|
+
def check_before_rasterize(self) -> bool:
|
747
|
+
|
748
|
+
if not self.TMP_CODEVULNE.exists():
|
749
|
+
logging.error("The final database with vulnerability levels does not exist")
|
750
|
+
return False
|
751
|
+
|
752
|
+
if not self.TMP_WMODIF.exists():
|
753
|
+
logging.error("The vector data with modifications does not exist")
|
754
|
+
return False
|
755
|
+
|
756
|
+
return True
|
757
|
+
|
758
|
+
def check_before_vulnerability(self) -> bool:
|
759
|
+
|
760
|
+
if self.SA is None:
|
761
|
+
logging.error("The area of interest does not exist")
|
762
|
+
return False
|
763
|
+
|
764
|
+
if self.IN_WATER_DEPTH is None:
|
765
|
+
logging.error("The water depth directory does not exist")
|
766
|
+
return False
|
767
|
+
|
768
|
+
if self.IN_SCEN_DIR is None:
|
769
|
+
logging.error("The wd scenario directory does not exist in the water depth directory")
|
770
|
+
return False
|
771
|
+
|
772
|
+
if self.SA_MASKED_RIVER is None:
|
773
|
+
logging.error("The IGN raster does not exist")
|
774
|
+
return False
|
775
|
+
|
776
|
+
return True
|
777
|
+
|
778
|
+
def check_vuln_code_sa(self) -> bool:
|
779
|
+
|
780
|
+
if not self.SA_VULN.exists():#SA_VULN
|
781
|
+
logging.error("The vulnerability raster file does not exist")
|
782
|
+
return False
|
783
|
+
|
784
|
+
if not self.SA_CODE.exists():
|
785
|
+
logging.error("The vulnerability code raster file does not exist")
|
786
|
+
return False
|
787
|
+
|
788
|
+
return True
|
789
|
+
|
790
|
+
def check_vuln_code_scenario(self) -> bool:
|
791
|
+
|
792
|
+
if not self.TMP_VULN.exists():
|
793
|
+
logging.error("The vulnerability raster file does not exist")
|
794
|
+
return False
|
795
|
+
|
796
|
+
if not self.TMP_CODE.exists():
|
797
|
+
logging.error("The vulnerability code raster file does not exist")
|
798
|
+
return False
|
799
|
+
|
800
|
+
return True
|
801
|
+
|
802
|
+
def compare_original_clipped_layers(self) -> str:
|
803
|
+
""" Compare the original layers with the clipped ones """
|
804
|
+
|
805
|
+
layers = self.get_layers_in_gdb()
|
806
|
+
layers_clip = self.get_layers_in_clipgdb()
|
807
|
+
|
808
|
+
ret = 'These layers have not been clipped:\n'
|
809
|
+
for layer in layers:
|
810
|
+
if layer not in layers_clip:
|
811
|
+
ret += " - {}\n".format(layer)
|
812
|
+
|
813
|
+
ret += '\nThese layers have been clipped but are not present in the GDB:\n'
|
814
|
+
for layer in layers_clip:
|
815
|
+
if layer not in layers:
|
816
|
+
ret += " - {}\n".format(layer)
|
817
|
+
|
818
|
+
ret+='\n'
|
819
|
+
|
820
|
+
return ret
|
821
|
+
|
822
|
+
def compare_clipped_raster_layers(self) -> str:
|
823
|
+
""" Compare the clipped layers with the rasterized ones """
|
824
|
+
|
825
|
+
layers = self.get_layers_in_clipgdb()
|
826
|
+
layers_rast = self.get_layers_in_codevulne()
|
827
|
+
|
828
|
+
ret = 'These layers {} have not been rasterized:\n'
|
829
|
+
for layer in layers:
|
830
|
+
if layer not in layers_rast:
|
831
|
+
ret += " - {}\n".format(layer)
|
832
|
+
|
833
|
+
ret += '\nThese layers have been rasterized but are not in the orginal GDB:\n'
|
834
|
+
for layer in layers_rast:
|
835
|
+
if layer not in layers:
|
836
|
+
ret += " - {}\n".format(layer)
|
837
|
+
|
838
|
+
ret+='\n'
|
839
|
+
|
840
|
+
return ret
|
841
|
+
|
842
|
+
def get_operand(self, file:str) -> Modif_Type:
|
843
|
+
""" Get the operand based on the layer name """
|
844
|
+
LAYERS_WALOUS = ["WALOUS_2018_LB72_112",
|
845
|
+
"WALOUS_2018_LB72_31",
|
846
|
+
"WALOUS_2018_LB72_32",
|
847
|
+
"WALOUS_2018_LB72_331",
|
848
|
+
"WALOUS_2018_LB72_332",
|
849
|
+
"WALOUS_2018_LB72_333",
|
850
|
+
"WALOUS_2018_LB72_34"]
|
851
|
+
|
852
|
+
ret, curtype = self.is_same_types(file)
|
853
|
+
layer = Path(file).stem
|
854
|
+
|
855
|
+
if not ret:
|
856
|
+
raise ValueError("The layer contains different types of geometries")
|
857
|
+
|
858
|
+
if layer in LAYERS_WALOUS:
|
859
|
+
return Modif_Type.WALOUS
|
860
|
+
|
861
|
+
elif curtype=="Point":
|
862
|
+
|
863
|
+
self.points2polys.append(layer)
|
864
|
+
|
865
|
+
if layer =="BDREF_DGO3_PASH__SCHEMA_STATIONS_EPU":
|
866
|
+
return Modif_Type.POINT2POLY_EPURATION
|
867
|
+
elif layer =="INFRASIG_SOINS_SANTE__ETAB_AINES":
|
868
|
+
return Modif_Type.POINT2POLY_PICC
|
869
|
+
else:
|
870
|
+
return Modif_Type.POINT2POLY_CAPAPICC
|
871
|
+
|
872
|
+
elif layer =="Hab_2018_CABU":
|
873
|
+
return Modif_Type.INHABITED
|
874
|
+
|
875
|
+
elif layer =="INFRASIG_ROUTE_RES_ROUTIER_TE_AXES":
|
876
|
+
|
877
|
+
self.lines2polys.append(layer)
|
878
|
+
|
879
|
+
return Modif_Type.ROAD
|
880
|
+
|
881
|
+
else:
|
882
|
+
return Modif_Type.COPY
|
883
|
+
|
884
|
+
def check_origin_shape(self) -> list[str]:
|
885
|
+
|
886
|
+
code = self.get_files_in_rasters_code()
|
887
|
+
vuln = self.get_files_in_rasters_vulne()
|
888
|
+
|
889
|
+
if len(code) == 0:
|
890
|
+
logging.error("The code rasters do not exist")
|
891
|
+
return False
|
892
|
+
|
893
|
+
if len(vuln) == 0:
|
894
|
+
logging.error("The vulnerability rasters do not exist")
|
895
|
+
return False
|
896
|
+
|
897
|
+
if len(code) != len(vuln):
|
898
|
+
logging.error("The number of code and vulnerability rasters do not match")
|
899
|
+
return False
|
900
|
+
|
901
|
+
# we take a reference raster
|
902
|
+
ref = gdal.Open(str(code[0]))
|
903
|
+
band_ref = ref.GetRasterBand(1)
|
904
|
+
proj_ref = ref.GetProjection()
|
905
|
+
geo_ref = ref.GetGeoTransform()
|
906
|
+
col_ref, row_ref = band_ref.XSize, band_ref.YSize
|
907
|
+
|
908
|
+
# we compare the reference raster with the others
|
909
|
+
diff = []
|
910
|
+
for cur in code + vuln + [self.SA_MASKED_RIVER]:
|
911
|
+
cur_ = gdal.Open(str(cur))
|
912
|
+
band_cur = cur_.GetRasterBand(1)
|
913
|
+
proj_cur = cur_.GetProjection()
|
914
|
+
geo_cur = cur_.GetGeoTransform()
|
915
|
+
col_cur, row_cur = band_cur.XSize, band_cur.YSize
|
916
|
+
|
917
|
+
if geo_ref != geo_cur:
|
918
|
+
logging.error("The geotransforms do not match {}".format(cur))
|
919
|
+
diff.append(cur)
|
920
|
+
|
921
|
+
if proj_ref != proj_cur:
|
922
|
+
logging.error("The projections do not match {}".format(cur))
|
923
|
+
diff.append(cur)
|
924
|
+
|
925
|
+
if col_ref != col_cur or row_ref != row_cur:
|
926
|
+
logging.error("The dimensions do not match {}".format(cur))
|
927
|
+
diff.append(cur)
|
928
|
+
|
929
|
+
return diff
|
930
|
+
|
931
|
+
|
932
|
+
|
933
|
+
|
934
|
+
# Assembly (FR : agglomération)
|
935
|
+
# -----------------------------
|
936
|
+
"""Basically the same operations as in the config manager to agglomerate several rasters
|
937
|
+
The class Config_Manager_2D_GPU is called, however some functions were rewritten to allow
|
938
|
+
the search of a more specific word ('vuln', and not 'bath', 'mann', or 'inf').
|
939
|
+
"""
|
940
|
+
|
941
|
+
def tree_vuln_tif(folder_path):
|
942
|
+
"""Find all .tiff files starting with 'vuln' in the directory and return paths"""
|
943
|
+
folder = Path(folder_path)
|
944
|
+
vuln_tiff_files = {file for file in folder.rglob("*.tiff") if file.name.startswith("vuln")}
|
945
|
+
vuln_tif_files = {file for file in folder.rglob("*.tif") if file.name.startswith("vuln")}
|
946
|
+
|
947
|
+
vuln_files = vuln_tiff_files.union(vuln_tif_files)
|
948
|
+
|
949
|
+
tiff_trees = []
|
950
|
+
if len(vuln_files) !=0:
|
951
|
+
for tiff in vuln_files:
|
952
|
+
curtree = [tiff]
|
953
|
+
while tiff.parent != folder:
|
954
|
+
tiff = tiff.parent
|
955
|
+
curtree.insert(0, tiff)
|
956
|
+
tiff_trees.append(curtree)
|
957
|
+
return tiff_trees
|
958
|
+
|
959
|
+
def select_vuln_tif(self, path_baseline: Path, folder_path: Path) -> list[Path]:
|
960
|
+
"""
|
961
|
+
Collects and appends all .tiff files starting with 'vuln' from folder_path into a list.
|
962
|
+
"""
|
963
|
+
files = []
|
964
|
+
#first element must be vulnerability_baseline
|
965
|
+
files.append(path_baseline.as_posix())
|
966
|
+
tiff_trees = Accept_Manager.tree_vuln_tif(folder_path)
|
967
|
+
|
968
|
+
#only the 'final' path of the tree
|
969
|
+
for tree in tiff_trees:
|
970
|
+
files.append(tree[-1].as_posix())
|
971
|
+
return files
|
972
|
+
|
973
|
+
def check_nodata(self):
|
974
|
+
""" Check nodata in a path """
|
975
|
+
|
976
|
+
list_tif = Accept_Manager.select_vuln_tif(self, self.OUT_VULN, self.IN_CH_SA_SC)
|
977
|
+
for cur_lst in list_tif:
|
978
|
+
if "Vulnerability.tiff" not in cur_lst:
|
979
|
+
curarray:WolfArray = WolfArray(cur_lst)
|
980
|
+
if curarray.nullvalue != 99999.:
|
981
|
+
curarray.nullvalue = 99999.
|
982
|
+
curarray.set_nullvalue_in_mask()
|
983
|
+
curarray.write_all()
|
984
|
+
logging.warning(_('nodata changed in favor of 99999. value for file {} !'.format(cur_lst)))
|
985
|
+
|
986
|
+
def create_vrtIfExists(self):
|
987
|
+
""" Create a vrt file from a path """
|
988
|
+
logging.info(_('Checking nodata values...'))
|
989
|
+
self.check_nodata()
|
990
|
+
list_tif = Accept_Manager.select_vuln_tif(self, self.OUT_VULN, self.IN_CH_SA_SC)
|
991
|
+
#création du fichier vrt - assembly/agglomération
|
992
|
+
if len(list_tif)>1:
|
993
|
+
logging.info(_('Creating .vrt from files (first based)...'))
|
994
|
+
create_vrt_from_diverged_files_first_based(list_tif, self.OUT_VULN_VRT)
|
995
|
+
return True
|
996
|
+
else:
|
997
|
+
return False
|
998
|
+
|
999
|
+
|
1000
|
+
def translate_vrt2tif(self):
|
1001
|
+
""" Translate vrt from OUTPUT > ... > Scenario to tif saved in the same folder """
|
1002
|
+
if (self.OUT_VULN_VRT).exists():
|
1003
|
+
translate_vrt2tif(self.OUT_VULN_VRT, self.OUT_VULN_S)
|
1004
|
+
|
1005
|
+
def copy_tif_files(self, files: list[Path], destination_dir: Path) -> None:
|
1006
|
+
destination_dir.mkdir(parents=True, exist_ok=True)
|
1007
|
+
|
1008
|
+
for file in files:
|
1009
|
+
destination_file = destination_dir / file.name
|
1010
|
+
dataset = gdal.Open(str(file))
|
1011
|
+
if dataset is None:
|
1012
|
+
logging.warning(f"Could not open {file} with GDAL.")
|
1013
|
+
continue
|
1014
|
+
gdal_driver = gdal.GetDriverByName('GTiff')
|
1015
|
+
gdal_driver.CreateCopy(str(destination_file), dataset, strict=0)
|
1016
|
+
|
1017
|
+
dataset = None
|
1018
|
+
|
1019
|
+
logging.info("All .tif files have been copied to the destination directory.")
|
1020
|
+
|
1021
|
+
|
1022
|
+
def clip_layer(layer:str,
|
1023
|
+
file_path:str,
|
1024
|
+
Study_Area:str,
|
1025
|
+
output_dir:str):
|
1026
|
+
"""
|
1027
|
+
Clip the input data based on the selected bassin and saves it
|
1028
|
+
in separate shape files.
|
1029
|
+
|
1030
|
+
As shape file doen not support DateTime, the columns with DateTime
|
1031
|
+
are converted to string.
|
1032
|
+
|
1033
|
+
:param layer: the layer name in the GDB file
|
1034
|
+
:param file_path: the path to the GDB file
|
1035
|
+
:param Study_Area: the path to the study area shapefile
|
1036
|
+
:param output_dir: the path to the output directory
|
1037
|
+
"""
|
1038
|
+
|
1039
|
+
layer = str(layer)
|
1040
|
+
file_path = str(file_path)
|
1041
|
+
Study_Area = str(Study_Area)
|
1042
|
+
output_dir = Path(output_dir)
|
1043
|
+
|
1044
|
+
St_Area = gpd.read_file(Study_Area, engine=ENGINE)
|
1045
|
+
|
1046
|
+
logging.info(layer)
|
1047
|
+
|
1048
|
+
# The data is clipped during the reading
|
1049
|
+
# **It is more efficient than reading the entire data and then clipping it**
|
1050
|
+
#
|
1051
|
+
# FIXME: "read_dataframe" is used directly rather than "gpd.read_file" cause
|
1052
|
+
# the "layer" parameter is well transmitted to the "read_dataframe" function...
|
1053
|
+
df:gpd.GeoDataFrame = read_dataframe(file_path, layer=layer, mask=St_Area['geometry'][0])
|
1054
|
+
|
1055
|
+
if len(df) == 0:
|
1056
|
+
logging.warning("No data found for layer " + str(layer))
|
1057
|
+
return "No data found for layer " + str(layer)
|
1058
|
+
|
1059
|
+
# Force Lambert72 -> EPSG:31370
|
1060
|
+
df.to_crs("EPSG:31370", inplace=True)
|
1061
|
+
try:
|
1062
|
+
date_columns = df.select_dtypes(include=['datetimetz']).columns.tolist()
|
1063
|
+
if len(date_columns)>0:
|
1064
|
+
df[date_columns] = df[date_columns].astype(str)
|
1065
|
+
|
1066
|
+
df.to_file(str(output_dir / (layer+EXTENT)), mode='w', engine=ENGINE)
|
1067
|
+
except Exception as e:
|
1068
|
+
logging.error("Error while saving the clipped " + str(layer) + " to file")
|
1069
|
+
logging.error(e)
|
1070
|
+
pass
|
1071
|
+
|
1072
|
+
logging.info("Saved the clipped " + str(layer) + " to file")
|
1073
|
+
return "Saved the clipped " +str(layer)+ " to file"
|
1074
|
+
|
1075
|
+
|
1076
|
+
def data_modification(layer:str,
|
1077
|
+
manager:Accept_Manager,
|
1078
|
+
picc:gpd.GeoDataFrame,
|
1079
|
+
capa:gpd.GeoDataFrame ):
|
1080
|
+
"""
|
1081
|
+
Apply the data modifications as described in the LEMA report
|
1082
|
+
|
1083
|
+
FIXME : Add more doc in this docstring
|
1084
|
+
|
1085
|
+
:param input_database: the path to the input database
|
1086
|
+
:param layer: the layer name in the database
|
1087
|
+
:param output_database: the path to the output database
|
1088
|
+
:param picc: the PICC Walloon file -- Preloaded
|
1089
|
+
:param capa: the Cadastre Walloon file -- Preloaded
|
1090
|
+
"""
|
1091
|
+
|
1092
|
+
df1:gpd.GeoDataFrame
|
1093
|
+
df2:gpd.GeoDataFrame
|
1094
|
+
|
1095
|
+
layer = str(layer)
|
1096
|
+
|
1097
|
+
dir_input = manager.TMP_CLIPGDB
|
1098
|
+
dir_output = manager.TMP_WMODIF
|
1099
|
+
|
1100
|
+
input_file = str(dir_input / (layer + EXTENT))
|
1101
|
+
output_file = str(dir_output / (layer + EXTENT))
|
1102
|
+
|
1103
|
+
# Read the data
|
1104
|
+
df:gpd.GeoDataFrame = gpd.read_file(input_file, engine=ENGINE)
|
1105
|
+
nblines, _ = df.shape
|
1106
|
+
|
1107
|
+
if nblines>0:
|
1108
|
+
op = manager.get_operand(input_file)
|
1109
|
+
|
1110
|
+
if op == Modif_Type.WALOUS:
|
1111
|
+
# Walous layers changed to PICC buidings
|
1112
|
+
|
1113
|
+
assert picc.crs == df.crs, "CRS of PICC and input data do not match"
|
1114
|
+
|
1115
|
+
assert "GEOREF_ID" in picc.columns, "The PICC file does not contain the GEOREF_ID column"
|
1116
|
+
assert "NATUR_CODE" in picc.columns, "The PICC file does not contain the NATUR_CODE column"
|
1117
|
+
|
1118
|
+
df1 = gpd.sjoin(picc, df, how="inner", predicate="intersects" )
|
1119
|
+
cols = df.columns
|
1120
|
+
|
1121
|
+
cols = np.append(cols, "GEOREF_ID")
|
1122
|
+
cols = np.append(cols, "NATUR_CODE")
|
1123
|
+
|
1124
|
+
df1 = df1[cols]
|
1125
|
+
|
1126
|
+
if df1.shape[0] > 0:
|
1127
|
+
assert manager.is_polygons(set(df1.geom_type)), f"The layer does not contains polygons - {op}"
|
1128
|
+
df1.to_file(output_file, engine=ENGINE)
|
1129
|
+
else:
|
1130
|
+
logging.warning("No data found for layer " + str(layer))
|
1131
|
+
|
1132
|
+
elif op == Modif_Type.POINT2POLY_EPURATION:
|
1133
|
+
# Change BDREF based on AJOUT_PDET sent by Perrine (SPI)
|
1134
|
+
|
1135
|
+
# The original layer is a point layer.
|
1136
|
+
# The EPU_STATIONS shape file (from SPI) is a polygon layer.
|
1137
|
+
|
1138
|
+
df1 = gpd.read_file(str(manager.EPU_STATIONS), engine=ENGINE)
|
1139
|
+
|
1140
|
+
assert df1.crs == df.crs, "CRS of AJOUT_PDET and input data do not match"
|
1141
|
+
|
1142
|
+
df2 = gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
|
1143
|
+
|
1144
|
+
if df2.shape[0] > 0:
|
1145
|
+
assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
|
1146
|
+
df2.to_file(output_file, engine=ENGINE)
|
1147
|
+
else:
|
1148
|
+
logging.warning("No data found for layer " + str(layer))
|
1149
|
+
|
1150
|
+
elif op == Modif_Type.POINT2POLY_PICC:
|
1151
|
+
# Select the polygons that contains the points
|
1152
|
+
# in theCadaster and PICC files
|
1153
|
+
|
1154
|
+
assert capa.crs == df.crs, "CRS of CaPa and input data do not match"
|
1155
|
+
assert "CaPaKey" in capa.columns, "The CaPa file does not contain the CaPaKey column"
|
1156
|
+
|
1157
|
+
df1= gpd.sjoin(capa, df, how="inner", predicate="intersects" )
|
1158
|
+
cols=df.columns
|
1159
|
+
|
1160
|
+
cols = np.append(cols, "CaPaKey")
|
1161
|
+
df1=df1[cols]
|
1162
|
+
df2=gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
|
1163
|
+
|
1164
|
+
if df2.shape[0] > 0:
|
1165
|
+
assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
|
1166
|
+
df2.to_file(output_file, engine=ENGINE)
|
1167
|
+
else:
|
1168
|
+
logging.warning("No data found for layer " + str(layer))
|
1169
|
+
|
1170
|
+
elif op == Modif_Type.POINT2POLY_CAPAPICC:
|
1171
|
+
|
1172
|
+
# Select the polygons that contains the points
|
1173
|
+
# in theCadaster and PICC files
|
1174
|
+
|
1175
|
+
assert capa.crs == df.crs, "CRS of CaPa and input data do not match"
|
1176
|
+
assert picc.crs == df.crs, "CRS of PICC and input data do not match"
|
1177
|
+
|
1178
|
+
# Join the Layer and CaPa DataFrames : https://geopandas.org/en/stable/docs/reference/api/geopandas.sjoin.html
|
1179
|
+
# ‘inner’: use intersection of keys from both dfs; retain only left_df geometry column
|
1180
|
+
# "intersects" : Binary predicate. Valid values are determined by the spatial index used.
|
1181
|
+
df1= gpd.sjoin(capa, df, how="inner", predicate="intersects" )
|
1182
|
+
|
1183
|
+
# Retain only the columns of the input data
|
1184
|
+
cols = df.columns
|
1185
|
+
# but add the CaPaKey
|
1186
|
+
cols = np.append(cols, "CaPaKey")
|
1187
|
+
|
1188
|
+
df1 = df1[cols]
|
1189
|
+
|
1190
|
+
# Join the df1 and PICC DataFrames : https://geopandas.org/en/stable/docs/reference/api/geopandas.sjoin.html
|
1191
|
+
df2 = gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
|
1192
|
+
|
1193
|
+
# Add only the GEOREF_ID and NATUR_CODE columns from PICC
|
1194
|
+
cols = np.append(cols, "GEOREF_ID")
|
1195
|
+
cols = np.append(cols, "NATUR_CODE")
|
1196
|
+
|
1197
|
+
df2 = df2[cols]
|
1198
|
+
|
1199
|
+
if df2.shape[0] > 0:
|
1200
|
+
assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
|
1201
|
+
df2.to_file(output_file, engine=ENGINE)
|
1202
|
+
else:
|
1203
|
+
logging.warning("No data found for layer " + str(layer))
|
1204
|
+
|
1205
|
+
elif op == Modif_Type.INHABITED:
|
1206
|
+
# Select only the buildings with a number of inhabitants > 0
|
1207
|
+
df1=df[df["NbsHabTOT"]>0]
|
1208
|
+
|
1209
|
+
if df1.shape[0] > 0:
|
1210
|
+
assert manager.is_polygons(set(df1.geom_type)), f"The layer does not contains polygons - {op}"
|
1211
|
+
df1.to_file(output_file, engine=ENGINE)
|
1212
|
+
else:
|
1213
|
+
logging.warning("No data found for layer " + str(layer))
|
1214
|
+
|
1215
|
+
elif op == Modif_Type.ROAD:
|
1216
|
+
# Create a buffer around the roads
|
1217
|
+
df1=df.buffer(distance=6, cap_style=2)
|
1218
|
+
|
1219
|
+
if df1.shape[0] > 0:
|
1220
|
+
assert set(df1.geom_type) == {'Polygon'}, f"The layer does not contains polygons - {op}"
|
1221
|
+
df1.to_file(output_file, engine=ENGINE)
|
1222
|
+
else:
|
1223
|
+
logging.warning("No data found for layer " + str(layer))
|
1224
|
+
|
1225
|
+
elif op == Modif_Type.COPY:
|
1226
|
+
# just copy the data if it is polygons
|
1227
|
+
if manager.is_polygons(set(df.geom_type)):
|
1228
|
+
df.to_file(output_file, engine=ENGINE)
|
1229
|
+
else:
|
1230
|
+
logging.error("The layer does not contains polygons - " + str(layer))
|
1231
|
+
else:
|
1232
|
+
raise ValueError(f"The operand {op} is not recognized")
|
1233
|
+
|
1234
|
+
return "Data modification done for " + str(layer)
|
1235
|
+
else:
|
1236
|
+
# Normally, phase 1 does not create empty files
|
1237
|
+
# But it is better to check... ;-)
|
1238
|
+
logging.error("skipped" + str(layer) + "due to no polygon in the study area")
|
1239
|
+
return "skipped" + str(layer) + "due to no polygon in the study area"
|
1240
|
+
|
1241
|
+
def compute_vulnerability(manager:Accept_Manager):
|
1242
|
+
"""
|
1243
|
+
Compute the vulnerability for the Study Area
|
1244
|
+
|
1245
|
+
This function **will not modify** the data by the removed buildings/scenarios.
|
1246
|
+
|
1247
|
+
:param dirsnames: the Dirs_Names object from the calling function
|
1248
|
+
"""
|
1249
|
+
|
1250
|
+
vuln_csv = Vulnerability_csv(manager.VULNERABILITY_CSV)
|
1251
|
+
|
1252
|
+
rasters_vuln = manager.get_files_in_rasters_vulne()
|
1253
|
+
|
1254
|
+
logging.info("Number of files: {}".format(len(rasters_vuln)))
|
1255
|
+
ds:gdal.Dataset = gdal.OpenEx(str(rasters_vuln[0]), gdal.GA_ReadOnly, open_options=["SPARSE_OK=TRUE"])
|
1256
|
+
|
1257
|
+
tmp_vuln = ds.GetRasterBand(1)
|
1258
|
+
|
1259
|
+
# REMARK: The XSize and YSize are the number of columns and rows
|
1260
|
+
col, row = tmp_vuln.XSize, tmp_vuln.YSize
|
1261
|
+
|
1262
|
+
logging.info("Computing Vulnerability")
|
1263
|
+
|
1264
|
+
array_vuln = np.ones((row, col), dtype=np.int8)
|
1265
|
+
|
1266
|
+
# Create a JIT function to update the arrays
|
1267
|
+
# Faster than the classical Python loop or Numpy
|
1268
|
+
@nb.jit(nopython=True, boundscheck=False, inline='always')
|
1269
|
+
def update_arrays_jit(tmp_vuln, array_vuln):
|
1270
|
+
for i in range(tmp_vuln.shape[0]):
|
1271
|
+
for j in range(tmp_vuln.shape[1]):
|
1272
|
+
if tmp_vuln[i, j] >= array_vuln[i, j]:
|
1273
|
+
array_vuln[i, j] = tmp_vuln[i, j]
|
1274
|
+
|
1275
|
+
return array_vuln
|
1276
|
+
|
1277
|
+
@nb.jit(nopython=True, boundscheck=False, inline='always')
|
1278
|
+
def update_arrays_jit_csr(row, col, locvuln, array_vuln):
|
1279
|
+
for k in range(len(row)-1):
|
1280
|
+
i = k
|
1281
|
+
j1 = row[k]
|
1282
|
+
j2 = row[k+1]
|
1283
|
+
for j in col[j1:j2]:
|
1284
|
+
if locvuln >= array_vuln[i, j]:
|
1285
|
+
array_vuln[i, j] = locvuln
|
1286
|
+
|
1287
|
+
return array_vuln
|
1288
|
+
|
1289
|
+
for i in tqdm(range(len(rasters_vuln)), 'Computing Vulnerability : '):
|
1290
|
+
logging.info("Computing layer {} / {}".format(i, len(rasters_vuln)))
|
1291
|
+
|
1292
|
+
locvuln = vuln_csv.get_vulnerability_level(rasters_vuln[i].stem)
|
1293
|
+
|
1294
|
+
if locvuln == 1:
|
1295
|
+
logging.info("No need to apply the matrice, the vulnerability is 1 which is the lower value")
|
1296
|
+
continue
|
1297
|
+
|
1298
|
+
if rasters_vuln[i].with_suffix('.npz').exists():
|
1299
|
+
ij_npz = np.load(rasters_vuln[i].with_suffix('.npz'))
|
1300
|
+
ii = ij_npz['row']
|
1301
|
+
jj = ij_npz['col']
|
1302
|
+
# We use the jit
|
1303
|
+
update_arrays_jit_csr(ii, jj, locvuln, array_vuln)
|
1304
|
+
|
1305
|
+
else:
|
1306
|
+
ds = gdal.OpenEx(str(rasters_vuln[i]), open_options=["SPARSE_OK=TRUE"])
|
1307
|
+
tmp_vuln = ds.GetRasterBand(1).ReadAsArray()
|
1308
|
+
# We use the jit
|
1309
|
+
update_arrays_jit(tmp_vuln, array_vuln)
|
1310
|
+
logging.info("Saving the computed vulnerability")
|
1311
|
+
dst_filename= str(manager.SA_VULN)
|
1312
|
+
y_pixels, x_pixels = array_vuln.shape # number of pixels in x
|
1313
|
+
|
1314
|
+
driver = gdal.GetDriverByName('GTiff')
|
1315
|
+
dataset = driver.Create(dst_filename,
|
1316
|
+
x_pixels, y_pixels,
|
1317
|
+
gdal.GDT_Byte,
|
1318
|
+
1,
|
1319
|
+
options=["COMPRESS=LZW"])
|
1320
|
+
|
1321
|
+
dataset.GetRasterBand(1).WriteArray(array_vuln.astype(np.int8))
|
1322
|
+
# follow code is adding GeoTranform and Projection
|
1323
|
+
geotrans = ds.GetGeoTransform() # get GeoTranform from existed 'data0'
|
1324
|
+
proj = ds.GetProjection() # you can get from a exsited tif or import
|
1325
|
+
dataset.SetGeoTransform(geotrans)
|
1326
|
+
dataset.SetProjection(proj)
|
1327
|
+
dataset.FlushCache()
|
1328
|
+
dataset = None
|
1329
|
+
|
1330
|
+
logging.info("Computed Vulnerability for the Study Area - Done")
|
1331
|
+
|
1332
|
+
def compute_code(manager:Accept_Manager):
|
1333
|
+
"""
|
1334
|
+
Compute the code for the Study Area
|
1335
|
+
|
1336
|
+
This function **will not modify** the data by the removed buildings/scenarios.
|
1337
|
+
|
1338
|
+
:param dirsnames: the Dirs_Names object from the calling function
|
1339
|
+
"""
|
1340
|
+
|
1341
|
+
vuln_csv = Vulnerability_csv(manager.VULNERABILITY_CSV)
|
1342
|
+
rasters_code = manager.get_files_in_rasters_code()
|
1343
|
+
|
1344
|
+
logging.info("Number of files: {}".format(len(rasters_code)))
|
1345
|
+
|
1346
|
+
ds:gdal.Dataset = gdal.OpenEx(str(rasters_code[0]), gdal.GA_ReadOnly, open_options=["SPARSE_OK=TRUE"])
|
1347
|
+
|
1348
|
+
tmp_code = ds.GetRasterBand(1)
|
1349
|
+
|
1350
|
+
# REMARK: The XSize and YSize are the number of columns and rows
|
1351
|
+
col, row = tmp_code.XSize, tmp_code.YSize
|
1352
|
+
|
1353
|
+
logging.info("Computing Code")
|
1354
|
+
|
1355
|
+
array_code = np.ones((row, col), dtype=np.int8)
|
1356
|
+
|
1357
|
+
# Create a JIT function to update the arrays
|
1358
|
+
# Faster than the classical Python loop or Numpy
|
1359
|
+
@nb.jit(nopython=True, boundscheck=False, inline='always')
|
1360
|
+
def update_arrays_jit(tmp_code, loccode, array_code):
|
1361
|
+
for i in range(tmp_code.shape[0]):
|
1362
|
+
for j in range(tmp_code.shape[1]):
|
1363
|
+
if tmp_code[i, j] >= array_code[i, j]:
|
1364
|
+
array_code[i, j] = loccode
|
1365
|
+
|
1366
|
+
return array_code
|
1367
|
+
|
1368
|
+
@nb.jit(nopython=True, boundscheck=False, inline='always')
|
1369
|
+
def update_arrays_jit_csr(row, col, loccode, array_code):
|
1370
|
+
for k in range(len(row)-1):
|
1371
|
+
i = k
|
1372
|
+
j1 = row[k]
|
1373
|
+
j2 = row[k+1]
|
1374
|
+
for j in col[j1:j2]:
|
1375
|
+
if loccode >= array_code[i, j]:
|
1376
|
+
array_code[i, j] = loccode
|
1377
|
+
|
1378
|
+
return array_code
|
1379
|
+
|
1380
|
+
for i in tqdm(range(len(rasters_code)), 'Computing Code : '):
|
1381
|
+
logging.info("Computing layer {} / {}".format(i, len(rasters_code)))
|
1382
|
+
|
1383
|
+
loccode = vuln_csv.get_vulnerability_code(rasters_code[i].stem.removesuffix("_CODE"))
|
1384
|
+
|
1385
|
+
if rasters_code[i].with_suffix('.npz').exists():
|
1386
|
+
ij_npz = np.load(rasters_code[i].with_suffix('.npz'))
|
1387
|
+
ii = ij_npz['row']
|
1388
|
+
jj = ij_npz['col']
|
1389
|
+
# We use the jit
|
1390
|
+
update_arrays_jit_csr(ii, jj, loccode, array_code)
|
1391
|
+
|
1392
|
+
else:
|
1393
|
+
ds = gdal.OpenEx(str(rasters_code[i]), open_options=["SPARSE_OK=TRUE"])
|
1394
|
+
tmp_code = ds.GetRasterBand(1).ReadAsArray()
|
1395
|
+
# We use the jit
|
1396
|
+
update_arrays_jit(tmp_code, loccode, array_code)
|
1397
|
+
|
1398
|
+
logging.info("Saving the computed codes")
|
1399
|
+
dst_filename= str(manager.SA_CODE)
|
1400
|
+
y_pixels, x_pixels = array_code.shape # number of pixels in x
|
1401
|
+
driver = gdal.GetDriverByName('GTiff')
|
1402
|
+
dataset = driver.Create(dst_filename,
|
1403
|
+
x_pixels, y_pixels,
|
1404
|
+
gdal.GDT_Byte,
|
1405
|
+
1,
|
1406
|
+
options=["COMPRESS=LZW"])
|
1407
|
+
|
1408
|
+
dataset.GetRasterBand(1).WriteArray(array_code.astype(np.int8))
|
1409
|
+
# follow code is adding GeoTranform and Projection
|
1410
|
+
geotrans = ds.GetGeoTransform() # get GeoTranform from existed 'data0'
|
1411
|
+
proj = ds.GetProjection() # you can get from a exsited tif or import
|
1412
|
+
dataset.SetGeoTransform(geotrans)
|
1413
|
+
dataset.SetProjection(proj)
|
1414
|
+
dataset.FlushCache()
|
1415
|
+
dataset = None
|
1416
|
+
|
1417
|
+
logging.info("Computed Code for the Study Area - Done")
|
1418
|
+
|
1419
|
+
def compute_vulnerability4scenario(manager:Accept_Manager):
|
1420
|
+
""" Compute the vulnerability for the scenario
|
1421
|
+
|
1422
|
+
This function **will modify** the data by the removed buildings/scenarios.
|
1423
|
+
|
1424
|
+
FIXME: It could be interseting to permit the user to provide tiff files for the removed buildings and other scenarios.
|
1425
|
+
|
1426
|
+
:param dirsnames: the Dirs_Names object from the calling function
|
1427
|
+
"""
|
1428
|
+
|
1429
|
+
array_vuln = gdal.Open(str(manager.SA_VULN))
|
1430
|
+
geotrans = array_vuln.GetGeoTransform() # get GeoTranform from existed 'data0'
|
1431
|
+
proj = array_vuln.GetProjection() # you can get from a exsited tif or import
|
1432
|
+
|
1433
|
+
array_vuln = np.array(array_vuln.GetRasterBand(1).ReadAsArray())
|
1434
|
+
|
1435
|
+
array_code = gdal.Open(str(manager.SA_CODE))
|
1436
|
+
array_code = np.array(array_code.GetRasterBand(1).ReadAsArray())
|
1437
|
+
|
1438
|
+
Rbu = manager.get_files_in_rm_buildings()
|
1439
|
+
|
1440
|
+
if len(Rbu)>0:
|
1441
|
+
for curfile in Rbu:
|
1442
|
+
array_mod = gdal.Open(str(curfile))
|
1443
|
+
array_mod = np.array(array_mod.GetRasterBand(1).ReadAsArray())
|
1444
|
+
|
1445
|
+
ij = np.argwhere(array_mod == 1)
|
1446
|
+
array_vuln[ij[:,0], ij[:,1]] = 1
|
1447
|
+
array_code[ij[:,0], ij[:,1]] = 1
|
1448
|
+
|
1449
|
+
dst_filename= str(manager.TMP_VULN)
|
1450
|
+
y_pixels, x_pixels = array_vuln.shape # number of pixels in x
|
1451
|
+
|
1452
|
+
driver = gdal.GetDriverByName('GTiff')
|
1453
|
+
dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
|
1454
|
+
dataset.GetRasterBand(1).WriteArray(array_vuln.astype(np.int8))
|
1455
|
+
# follow code is adding GeoTranform and Projection
|
1456
|
+
dataset.SetGeoTransform(geotrans)
|
1457
|
+
dataset.SetProjection(proj)
|
1458
|
+
dataset.FlushCache()
|
1459
|
+
dataset = None
|
1460
|
+
|
1461
|
+
|
1462
|
+
dst_filename= str(manager.TMP_CODE)
|
1463
|
+
y_pixels, x_pixels = array_code.shape # number of pixels in x
|
1464
|
+
driver = gdal.GetDriverByName('GTiff')
|
1465
|
+
dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
|
1466
|
+
dataset.GetRasterBand(1).WriteArray(array_code.astype(np.int8))
|
1467
|
+
# follow code is adding GeoTranform and Projection
|
1468
|
+
dataset.SetGeoTransform(geotrans)
|
1469
|
+
dataset.SetProjection(proj)
|
1470
|
+
dataset.FlushCache()
|
1471
|
+
dataset = None
|
1472
|
+
|
1473
|
+
logging.info("Computed Vulnerability and code for the scenario")
|
1474
|
+
|
1475
|
+
def match_vulnerability2sim(inRas:Path, outRas:Path, MODREC:Path):
|
1476
|
+
"""
|
1477
|
+
Clip the raster to the MODREC/simulation extent
|
1478
|
+
|
1479
|
+
:param inRas: the input raster file
|
1480
|
+
:param outRas: the output raster file
|
1481
|
+
:param MODREC: the MODREC/simulation extent file
|
1482
|
+
|
1483
|
+
"""
|
1484
|
+
|
1485
|
+
inRas = str(inRas)
|
1486
|
+
outRas = str(outRas)
|
1487
|
+
MODREC = str(MODREC)
|
1488
|
+
|
1489
|
+
data = gdal.Open(MODREC, gdalconst.GA_ReadOnly)
|
1490
|
+
geoTransform = data.GetGeoTransform()
|
1491
|
+
minx = geoTransform[0]
|
1492
|
+
maxy = geoTransform[3]
|
1493
|
+
maxx = minx + geoTransform[1] * data.RasterXSize
|
1494
|
+
miny = maxy + geoTransform[5] * data.RasterYSize
|
1495
|
+
ds = gdal.Open(inRas)
|
1496
|
+
ds = gdal.Translate(outRas, ds, projWin = [minx, maxy, maxx, miny])
|
1497
|
+
ds = None
|
1498
|
+
|
1499
|
+
|
1500
|
+
@nb.jit(nopython=True, boundscheck=False, inline='always')
|
1501
|
+
def update_accept(accept, model_h, ij, bounds, loc_accept):
|
1502
|
+
for idx in range(len(bounds)):
|
1503
|
+
for i,j in ij:
|
1504
|
+
if bounds[idx,0] < model_h[i,j] <= bounds[idx,1]: #lit dans wd vs Ti où on est et associe son score d'accept
|
1505
|
+
accept[i,j] = loc_accept[idx]
|
1506
|
+
|
1507
|
+
def compute_acceptability(manager:Accept_Manager,
|
1508
|
+
model_h:np.ndarray,
|
1509
|
+
vulnerability:np.ndarray,
|
1510
|
+
interval:int,
|
1511
|
+
geo_projection:tuple,
|
1512
|
+
save_to_file:bool=True) -> np.ndarray:
|
1513
|
+
|
1514
|
+
"""
|
1515
|
+
Compute the local acceptability based on :
|
1516
|
+
- the vulnerability
|
1517
|
+
- the water depth
|
1518
|
+
- the matrices
|
1519
|
+
|
1520
|
+
:param manager: the Accept_Manager object from the calling function
|
1521
|
+
:param model_h: the water depth matrix
|
1522
|
+
:param vulnerability: the vulnerability matrix
|
1523
|
+
:param interval: the return period
|
1524
|
+
:param geo_projection: the geotransform and the projection - tuple extracted from another raster file
|
1525
|
+
|
1526
|
+
"""
|
1527
|
+
|
1528
|
+
logging.info(interval)
|
1529
|
+
|
1530
|
+
points_accept = pd.read_csv(manager.POINTS_CSV)
|
1531
|
+
|
1532
|
+
points_accept = points_accept[points_accept["Interval"]==interval] #les wd vs Ti matrices
|
1533
|
+
points_accept = points_accept.reset_index()
|
1534
|
+
|
1535
|
+
accept = np.zeros(vulnerability.shape, dtype=np.float32)
|
1536
|
+
|
1537
|
+
bounds = np.asarray([[0., 0.02], [0.02, 0.3], [0.3, 1], [1, 2.5], [2.5, 1000]], dtype=np.float32)
|
1538
|
+
|
1539
|
+
for i in range(1,6):
|
1540
|
+
ij = np.argwhere(vulnerability == i)
|
1541
|
+
|
1542
|
+
idx_pts = 5-i
|
1543
|
+
accept_pts = [points_accept["h-0"][idx_pts],
|
1544
|
+
points_accept["h-0.02"][idx_pts],
|
1545
|
+
points_accept["h-0.3"][idx_pts],
|
1546
|
+
points_accept["h-1"][idx_pts],
|
1547
|
+
points_accept["h-2.5"][idx_pts]]
|
1548
|
+
|
1549
|
+
update_accept(accept, model_h, ij, bounds, accept_pts)
|
1550
|
+
|
1551
|
+
if save_to_file:
|
1552
|
+
#save raster
|
1553
|
+
dst_filename = str(manager.TMP_QFILES / "Q{}.tif".format(interval)) #les Qi
|
1554
|
+
|
1555
|
+
y_pixels, x_pixels = accept.shape # number of pixels in x
|
1556
|
+
driver = gdal.GetDriverByName('GTiff')
|
1557
|
+
dataset = driver.Create(dst_filename,
|
1558
|
+
x_pixels, y_pixels,
|
1559
|
+
1,
|
1560
|
+
gdal.GDT_Float32,
|
1561
|
+
options=["COMPRESS=LZW"])
|
1562
|
+
|
1563
|
+
dataset.GetRasterBand(1).WriteArray(accept.astype(np.float32))
|
1564
|
+
|
1565
|
+
geotrans, proj = geo_projection
|
1566
|
+
dataset.SetGeoTransform(geotrans)
|
1567
|
+
dataset.SetProjection(proj)
|
1568
|
+
dataset.FlushCache()
|
1569
|
+
dataset = None
|
1570
|
+
|
1571
|
+
return accept
|
1572
|
+
|
1573
|
+
def shp_to_raster(vector_fn:str, raster_fn:str, pixel_size:float = 1., manager:Accept_Manager = None):
|
1574
|
+
"""
|
1575
|
+
Convert a vector layer to a raster tiff file.
|
1576
|
+
|
1577
|
+
The raster will contain only 2 values : 0 and 1
|
1578
|
+
|
1579
|
+
- 1 : the inside of the vector layer
|
1580
|
+
- 0 : the rest == NoData/NullValue
|
1581
|
+
|
1582
|
+
:param vector_fn: the path to the vector file
|
1583
|
+
:param raster_fn: the path to the raster file
|
1584
|
+
:param pixel_size: the pixel size of the raster
|
1585
|
+
"""
|
1586
|
+
|
1587
|
+
# Force the input to be a string
|
1588
|
+
vector_fn = str(vector_fn)
|
1589
|
+
raster_fn = str(raster_fn)
|
1590
|
+
|
1591
|
+
if manager is None:
|
1592
|
+
extent_fn = vector_fn
|
1593
|
+
logging.warning("The extent file is not provided, the extent will be the same as the vector file")
|
1594
|
+
else:
|
1595
|
+
extent_fn = str(manager.SA)
|
1596
|
+
logging.info("The extent file is provided")
|
1597
|
+
|
1598
|
+
NoData_value = 0 # np.nan is not necessary a good idea
|
1599
|
+
|
1600
|
+
# Open the data sources and read the extents
|
1601
|
+
source_ds:ogr.DataSource = ogr.Open(vector_fn)
|
1602
|
+
source_layer = source_ds.GetLayer()
|
1603
|
+
|
1604
|
+
extent_ds:ogr.DataSource = ogr.Open(extent_fn)
|
1605
|
+
extent_layer = extent_ds.GetLayer()
|
1606
|
+
x_min, x_max, y_min, y_max = extent_layer.GetExtent()
|
1607
|
+
|
1608
|
+
x_min = float(int(x_min))
|
1609
|
+
x_max = float(np.ceil(x_max))
|
1610
|
+
y_min = float(int(y_min))
|
1611
|
+
y_max = float(np.ceil(y_max))
|
1612
|
+
|
1613
|
+
# Create the destination data source
|
1614
|
+
x_res = int((x_max - x_min) / pixel_size)
|
1615
|
+
y_res = int((y_max - y_min) / pixel_size)
|
1616
|
+
|
1617
|
+
target_ds = gdal.GetDriverByName('GTiff').Create(raster_fn,
|
1618
|
+
x_res, y_res,
|
1619
|
+
1,
|
1620
|
+
gdal.GDT_Byte,
|
1621
|
+
options=["COMPRESS=LZW",
|
1622
|
+
'SPARSE_OK=TRUE'])
|
1623
|
+
|
1624
|
+
target_ds.SetGeoTransform((x_min, pixel_size, 0, y_max, 0, -pixel_size))
|
1625
|
+
srs = osr.SpatialReference()
|
1626
|
+
srs.ImportFromEPSG(31370)
|
1627
|
+
target_ds.SetProjection(srs.ExportToWkt())
|
1628
|
+
band = target_ds.GetRasterBand(1)
|
1629
|
+
band.SetNoDataValue(NoData_value)
|
1630
|
+
# Rasterize the areas
|
1631
|
+
gdal.RasterizeLayer(target_ds,
|
1632
|
+
bands = [1],
|
1633
|
+
layer = source_layer,
|
1634
|
+
burn_values = [1],
|
1635
|
+
options=["ALL_TOUCHED=TRUE"])
|
1636
|
+
target_ds = None
|
1637
|
+
vector_fn = raster_fn = None
|
1638
|
+
|
1639
|
+
def vector_to_raster(layer:str,
|
1640
|
+
manager:Accept_Manager,
|
1641
|
+
attribute:str,
|
1642
|
+
pixel_size:float,
|
1643
|
+
convert_to_sparse:bool = True):
|
1644
|
+
"""
|
1645
|
+
Convert a vector layer to a raster tiff file
|
1646
|
+
|
1647
|
+
FIXME: Test de vulerability value and return immedialty if it is 1 if attribute == "Vulne"
|
1648
|
+
|
1649
|
+
:param layer: the layer name in the GDB file
|
1650
|
+
:param vector_input: the path to the vector file
|
1651
|
+
:param extent: the path to the extent file
|
1652
|
+
:param attribute: the attribute to rasterize
|
1653
|
+
:param pixel_size: the pixel size of the raster
|
1654
|
+
|
1655
|
+
"""
|
1656
|
+
|
1657
|
+
layer = str(layer)
|
1658
|
+
|
1659
|
+
vector_input = str(manager.TMP_CODEVULNE / (layer + EXTENT))
|
1660
|
+
extent = str(manager.SA)
|
1661
|
+
attribute = str(attribute)
|
1662
|
+
pixel_size = float(pixel_size)
|
1663
|
+
|
1664
|
+
if attribute == "Code":
|
1665
|
+
out_file = manager.TMP_RASTERS / attribute / (layer + "_CODE.tiff")
|
1666
|
+
else :
|
1667
|
+
out_file = manager.TMP_RASTERS / attribute / (layer + ".tiff")
|
1668
|
+
if out_file.exists():
|
1669
|
+
os.remove(out_file)
|
1670
|
+
|
1671
|
+
out_file = str(out_file)
|
1672
|
+
|
1673
|
+
NoData_value = 0
|
1674
|
+
|
1675
|
+
extent_ds:ogr.DataSource = ogr.Open(extent)
|
1676
|
+
extent_layer = extent_ds.GetLayer()
|
1677
|
+
|
1678
|
+
x_min, x_max, y_min, y_max = extent_layer.GetExtent()
|
1679
|
+
|
1680
|
+
x_min = float(int(x_min))
|
1681
|
+
x_max = float(np.ceil(x_max))
|
1682
|
+
y_min = float(int(y_min))
|
1683
|
+
y_max = float(np.ceil(y_max))
|
1684
|
+
|
1685
|
+
# Open the data sources and read the extents
|
1686
|
+
source_ds:ogr.DataSource = ogr.Open(vector_input)
|
1687
|
+
if source_ds is None:
|
1688
|
+
logging.error(f"Could not open the data source {layer}")
|
1689
|
+
return
|
1690
|
+
source_layer = source_ds.GetLayer()
|
1691
|
+
|
1692
|
+
# Create the destination data source
|
1693
|
+
x_res = int((x_max - x_min) / pixel_size)
|
1694
|
+
y_res = int((y_max - y_min) / pixel_size)
|
1695
|
+
target_ds:gdal.Driver = gdal.GetDriverByName('GTiff').Create(out_file,
|
1696
|
+
x_res, y_res, 1,
|
1697
|
+
gdal.GDT_Byte,
|
1698
|
+
options=["COMPRESS=DEFLATE",
|
1699
|
+
'SPARSE_OK=TRUE',])
|
1700
|
+
|
1701
|
+
target_ds.SetGeoTransform((x_min, pixel_size, 0, y_max, 0, -pixel_size))
|
1702
|
+
srs = osr.SpatialReference()
|
1703
|
+
srs.ImportFromEPSG(31370)
|
1704
|
+
target_ds.SetProjection(srs.ExportToWkt())
|
1705
|
+
|
1706
|
+
band = target_ds.GetRasterBand(1)
|
1707
|
+
band.SetNoDataValue(NoData_value)
|
1708
|
+
|
1709
|
+
# Rasterize the areas
|
1710
|
+
gdal.RasterizeLayer(target_ds, [1],
|
1711
|
+
source_layer,
|
1712
|
+
options=["ATTRIBUTE="+attribute,
|
1713
|
+
"ALL_TOUCHED=TRUE"])
|
1714
|
+
|
1715
|
+
if convert_to_sparse:
|
1716
|
+
SPARSITY_THRESHOLD = 0.02
|
1717
|
+
# Convert the raster to a npz containing the row and col of the non-null values
|
1718
|
+
array = band.ReadAsArray()
|
1719
|
+
ij = np.nonzero(array)
|
1720
|
+
|
1721
|
+
if len(ij[0]) < int(x_res * y_res * SPARSITY_THRESHOLD):
|
1722
|
+
i,j = convert_to_csr(ij[0], ij[1], y_res)
|
1723
|
+
np.savez_compressed(Path(out_file).with_suffix('.npz'), row=np.asarray(i, dtype=np.int32), col=np.asarray(j, dtype=np.int32))
|
1724
|
+
else:
|
1725
|
+
logging.info("The raster is not sparse enough to be converted to a CSR forma {}".format(layer))
|
1726
|
+
|
1727
|
+
target_ds = None
|
1728
|
+
|
1729
|
+
return 0
|
1730
|
+
|
1731
|
+
@nb.jit(nopython=True, boundscheck=False, inline='always')
|
1732
|
+
def convert_to_csr(i_indices, j_indices, num_rows):
|
1733
|
+
row_ptr = [0] * (num_rows + 1)
|
1734
|
+
col_idx = []
|
1735
|
+
|
1736
|
+
for i in range(len(i_indices)):
|
1737
|
+
row_ptr[i_indices[i] + 1] += 1
|
1738
|
+
col_idx.append(j_indices[i])
|
1739
|
+
|
1740
|
+
for i in range(1, len(row_ptr)):
|
1741
|
+
row_ptr[i] += row_ptr[i - 1]
|
1742
|
+
|
1743
|
+
return row_ptr, col_idx
|