wolfhece 2.1.99__py3-none-any.whl → 2.1.101__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. wolfhece/PyDraw.py +220 -29
  2. wolfhece/PyGui.py +1039 -53
  3. wolfhece/PyVertexvectors.py +2 -2
  4. wolfhece/Results2DGPU.py +37 -13
  5. wolfhece/acceptability/Parallels.py +2 -2
  6. wolfhece/acceptability/_add_path.py +23 -0
  7. wolfhece/acceptability/acceptability.py +594 -563
  8. wolfhece/acceptability/acceptability_gui.py +564 -331
  9. wolfhece/acceptability/cli.py +307 -120
  10. wolfhece/acceptability/func.py +1754 -1597
  11. wolfhece/apps/version.py +1 -1
  12. wolfhece/bernoulli/losses.py +76 -23
  13. wolfhece/bernoulli/losses_jax.py +143 -0
  14. wolfhece/bernoulli/pipe.py +7 -2
  15. wolfhece/gpuview.py +4 -1
  16. wolfhece/libs/__init__.py +11 -10
  17. wolfhece/libs/wolfogl.cp310-win_amd64.pyd +0 -0
  18. wolfhece/math_parser/__init__.py +4 -4
  19. wolfhece/math_parser/calculator.py +51 -9
  20. wolfhece/mesh2d/bc_manager.py +25 -2
  21. wolfhece/mesh2d/gpu_2d.py +644 -0
  22. wolfhece/mesh2d/simple_2d.py +2817 -0
  23. wolfhece/mesh2d/wolf2dprev.py +5 -2
  24. wolfhece/pidcontroller.py +131 -0
  25. wolfhece/pywalous.py +7 -7
  26. wolfhece/scenario/config_manager.py +98 -21
  27. wolfhece/wolf_array.py +391 -176
  28. wolfhece/wolf_vrt.py +108 -7
  29. wolfhece/wolfresults_2D.py +113 -6
  30. wolfhece/xyz_file.py +91 -51
  31. {wolfhece-2.1.99.dist-info → wolfhece-2.1.101.dist-info}/METADATA +3 -1
  32. {wolfhece-2.1.99.dist-info → wolfhece-2.1.101.dist-info}/RECORD +35 -30
  33. {wolfhece-2.1.99.dist-info → wolfhece-2.1.101.dist-info}/WHEEL +1 -1
  34. {wolfhece-2.1.99.dist-info → wolfhece-2.1.101.dist-info}/entry_points.txt +0 -0
  35. {wolfhece-2.1.99.dist-info → wolfhece-2.1.101.dist-info}/top_level.txt +0 -0
@@ -1,1597 +1,1754 @@
1
- """
2
- Author: University of Liege, HECE, LEMA
3
- Date: 2024
4
-
5
- Copyright (c) 2024 University of Liege. All rights reserved.
6
-
7
- This script and its content are protected by copyright law. Unauthorized
8
- copying or distribution of this file, via any medium, is strictly prohibited.
9
- """
10
-
11
- import geopandas as gpd
12
- import pandas as pd
13
- import numpy as np
14
- from osgeo import gdal, ogr, osr, gdalconst
15
- import os
16
- import glob
17
- from pathlib import Path
18
- import logging
19
- from tqdm import tqdm
20
- from pyogrio import list_layers, read_dataframe
21
- from enum import Enum
22
- import numba as nb
23
-
24
- ENGINE = 'pyogrio' # or 'Fiona -- Pyogrio is faster
25
- EXTENT = '.gpkg'
26
- class Modif_Type(Enum):
27
- """
28
- Enum class for the type of modification
29
- """
30
-
31
- WALOUS = 'Walous layers changed to PICC buidings'
32
- POINT2POLY_EPURATION = 'Change BDREF based on AJOUT_PDET sent by Perrine (SPI)'
33
- POINT2POLY_PICC = 'Convert the points to polygons based on PICC'
34
- POINT2POLY_CAPAPICC = 'Convert the points to polygons based on PICC and CaPa'
35
- INHABITED = 'Select only inhabited buildings'
36
- ROAD = 'Create a buffer around the roads'
37
- COPY = 'Copy the data'
38
-
39
- class Vulnerability_csv():
40
-
41
- def __init__(self, file:Path) -> None:
42
- self.file = file
43
- self.data = pd.read_csv(file, sep=",", encoding='latin-1')
44
-
45
- def get_layers(self) -> list:
46
- return [a[1] for a in self.data["Path"].str.split('/')]
47
-
48
- def get_vulnerability_level(self, layer:str) -> str:
49
- idx = self.get_layers().index(layer)
50
- return self.data.iloc[idx]["Vulne"]
51
-
52
- def get_vulnerability_code(self, layer:str) -> str:
53
- idx = self.get_layers().index(layer)
54
- return self.data.iloc[idx]["Code"]
55
-
56
-
57
- def get_data_type(fname:Path):
58
-
59
- fname = Path(fname)
60
- """ Get the data type of the input file from extension """
61
- if fname.name.endswith('.gpkg'):
62
- return 'GPKG'
63
- elif fname.name.endswith('.shp'):
64
- return 'ESRI Shapefile'
65
- elif fname.name.endswith('.gdb'):
66
- return 'OpenfileGDB'
67
- else:
68
- return None
69
-
70
- def cleaning_directory(dir:Path):
71
- """ Cleaning the directory """
72
-
73
- logging.info("Cleaning the directory {}".format(dir))
74
-
75
- files_in_output = list(dir.iterdir())
76
- for item in files_in_output:
77
- if item.is_file():
78
- os.remove(item)
79
-
80
- class Accept_Manager():
81
- """
82
- Structure to store the directories and names of the files.
83
-
84
- In the main directory, the following directories are mandatory/created:
85
- - INPUT : filled by the user - contains the input data
86
- - TEMP : created by the script - contains the temporary data for the study area
87
- - OUTPUT: created by the script - contains the output data for each scenario of the study area
88
-
89
- The INPUT directory contains the following subdirectories:
90
- - DATABASE: contains the data for the **entire Walloon region**
91
- - Cadastre_Walloon.gpkg: the Cadastre Walloon file
92
- - GT_Resilence_dataRisques202010.gdb: the original gdb file from SPW - GT Resilience
93
- - PICC-vDIFF.gdb: the PICC Walloon file
94
- - CE_IGN_TOP10V: the IGN top10v shapefile
95
- - EPU_STATIONS_NEW:
96
- - AJOUT_PDET_EPU_DG03_STATIONS.shp: the EPU stations shapefile
97
- - STUDY_AREA: contains the study area shapefiles - one for each study area - e.g. Bassin_Vesdre.shp
98
- - CSVs: contains the CSV files
99
- - Intermediate.csv: contains the matrices data for the acceptability computation
100
- # - Ponderation.csv: contains the ponderation data for the acceptability computation
101
- - Vulnerability.csv: contains the mapping between layers and vulnerability levels - a code value is also provided
102
- - WATER_DEPTH: contains the water depth data for each scenario
103
- - Study_area1:
104
- - Scenario1
105
- - Scenario2
106
- -...
107
- - ScenarioN
108
- - Study_area2:
109
- - Scenario1
110
- - Scenario2
111
- -...
112
- - ScenarioN
113
- -...
114
- - Study_areaN:
115
- - Scenario1
116
- - Scenario2
117
- -...
118
- - ScenarioN
119
-
120
- The TEMP directory contains the following subdirectories:
121
- - DATABASES: contains the temporary data each study area
122
- - Study_area1:
123
- - database.gpkg: the clipped database
124
- - CaPa.gpkg: the clipped Cadastre Walloon file
125
- - PICC.gpkg: the clipped PICC Walloon file
126
- - CE_IGN_TOP10V.tiff: the IGN top10v raster file
127
- - Maske_River_extent.tiff: the river extent raster file from IGN
128
- - VULNERABILITY: the vulnerability data
129
- - RASTERS:
130
- - Code : one file for each layer
131
- - Vulne : one file for each layer
132
- - Scenario1:
133
-
134
- """
135
-
136
- def __init__(self,
137
- main_dir:str = 'Data',
138
- Study_area:str = 'Bassin_Vesdre.shp',
139
- scenario = None,
140
- Original_gdb:str = 'GT_Resilence_dataRisques202010.gdb',
141
- CaPa_Walloon:str = 'Cadastre_Walloon.gpkg',
142
- PICC_Walloon:str = 'PICC_vDIFF.gdb',
143
- CE_IGN_top10v:str = 'CE_IGN_TOP10V/CE_IGN_TOP10V.shp',
144
- EPU_Stations:str = 'AJOUT_PDET_EPU_DG03_STATIONS.shp',
145
- Ponderation_csv:str = 'Ponderation.csv',
146
- Vuln_csv:str = 'Vulnerability.csv',
147
- Intermediate_csv:str = 'Intermediate.csv'
148
- ) -> None:
149
-
150
- self.old_dir:Path = Path(os.getcwd())
151
-
152
- self.main_dir:Path = Path(main_dir)
153
-
154
- # If it is a string, concatenate it with the current directory
155
- if not self.main_dir.is_absolute():
156
- self.main_dir = Path(os.getcwd()) / self.main_dir
157
-
158
- self._study_area = str(Study_area)
159
-
160
- if Study_area is not None:
161
- if not self._study_area.endswith('.shp'):
162
- self._study_area += '.shp'
163
-
164
- self._scenario = scenario
165
- self._original_gdb = Original_gdb
166
- self._capa_walloon = CaPa_Walloon
167
- self._picc_walloon = PICC_Walloon
168
- self._ce_ign_top10v = CE_IGN_top10v
169
-
170
- self.IN_DIR = self.main_dir / "INPUT"
171
- self.IN_DATABASE = self.IN_DIR / "DATABASE"
172
- self.IN_STUDY_AREA = self.IN_DIR / "STUDY_AREA"
173
- self.IN_CSV = self.IN_DIR / "CSVs"
174
- self.IN_WATER_DEPTH = self.IN_DIR / "WATER_DEPTH"
175
- self.IN_EPU_STATIONS= self.IN_DIR / "EPU_STATIONS_NEW"
176
-
177
- self.ORIGINAL_GDB = self.IN_DATABASE / self._original_gdb
178
- self.CAPA_WALLOON = self.IN_DATABASE / self._capa_walloon
179
- self.PICC_WALLOON = self.IN_DATABASE / self._picc_walloon
180
- self.CE_IGN_TOP10V = self.IN_DATABASE / self._ce_ign_top10v
181
- self.EPU_STATIONS = self.IN_EPU_STATIONS / EPU_Stations
182
-
183
- self.VULNERABILITY_CSV = self.IN_CSV / Vuln_csv
184
- self.POINTS_CSV = self.IN_CSV / Intermediate_csv
185
- self.PONDERATION_CSV = self.IN_CSV / Ponderation_csv
186
-
187
- self._CSVs = [self.VULNERABILITY_CSV, self.POINTS_CSV]
188
- self._GPKGs= [self.CAPA_WALLOON, self.PICC_WALLOON]
189
- self._GDBs = [self.ORIGINAL_GDB]
190
- self._SHPs = [self.CE_IGN_TOP10V, self.EPU_STATIONS]
191
- self._ALLS = self._CSVs + self._GPKGs + self._GDBs + self._SHPs
192
-
193
- self.TMP_DIR = self.main_dir / "TEMP"
194
-
195
- self.OUT_DIR = self.main_dir / "OUTPUT"
196
-
197
- self.points2polys = []
198
- self.lines2polys = []
199
-
200
- self.create_paths()
201
- self.create_paths_scenario()
202
-
203
- def create_paths(self):
204
- """ Create the paths for the directories and files """
205
-
206
- self.points2polys = []
207
- self.lines2polys = []
208
-
209
- if self._study_area is not None:
210
-
211
- self.Study_area:Path = Path(self._study_area)
212
-
213
- self.TMP_STUDYAREA = self.TMP_DIR / self.Study_area.stem
214
- self.TMP_DATABASE = self.TMP_STUDYAREA / "DATABASES"
215
-
216
- self.TMP_CLIPGDB = self.TMP_DATABASE / "CLIP_GDB"
217
- self.TMP_CADASTER = self.TMP_DATABASE / "CLIP_CADASTER"
218
- self.TMP_PICC = self.TMP_DATABASE / "CLIP_PICC"
219
- self.TMP_IGNCE = self.TMP_DATABASE / "CLIP_IGN_CE"
220
- self.TMP_WMODIF = self.TMP_DATABASE / "WITH_MODIF"
221
- self.TMP_CODEVULNE = self.TMP_DATABASE / "CODE_VULNE"
222
-
223
- self.TMP_VULN_DIR = self.TMP_STUDYAREA / "VULNERABILITY"
224
- self.TMP_RASTERS = self.TMP_VULN_DIR / "RASTERS"
225
- self.TMP_RASTERS_CODE = self.TMP_RASTERS / "Code"
226
- self.TMP_RASTERS_VULNE = self.TMP_RASTERS / "Vulne"
227
-
228
- self.OUT_STUDY_AREA = self.OUT_DIR / self.Study_area.stem
229
-
230
- self.SA = self.IN_STUDY_AREA / self.Study_area
231
- self.SA_MASKED_RIVER = self.TMP_IGNCE / "CE_IGN_TOP10V.tiff"
232
- self.SA_VULN = self.TMP_VULN_DIR / "Vulnerability.tiff"
233
- self.SA_CODE = self.TMP_VULN_DIR / "Vulnerability_Code.tiff"
234
-
235
- else:
236
- self.Study_area = None
237
- self._scenario = None
238
-
239
- self.TMP_STUDYAREA = None
240
- self.TMP_DATABASE = None
241
- self.TMP_CADASTER = None
242
- self.TMP_PICC = None
243
- self.TMP_IGNCE = None
244
- self.TMP_WMODIF = None
245
- self.TMP_CODEVULNE = None
246
- self.TMP_VULN_DIR = None
247
- self.TMP_RASTERS = None
248
- self.TMP_RASTERS_CODE = None
249
- self.TMP_RASTERS_VULNE = None
250
-
251
- self.OUT_STUDY_AREA = None
252
-
253
- self.SA = None
254
- self.SA_MASKED_RIVER = None
255
-
256
- self.SA_VULN = None
257
- self.SA_CODE = None
258
-
259
- self.create_paths_scenario()
260
-
261
- self.check_inputs()
262
- self.check_temporary()
263
- self.check_outputs()
264
-
265
- def create_paths_scenario(self):
266
-
267
- if self._scenario is not None:
268
-
269
- self.scenario:str = str(self._scenario)
270
-
271
- self.IN_SCEN_DIR = self.IN_WATER_DEPTH / self.SA.stem / self.scenario
272
- self.IN_RM_BUILD_DIR = self.IN_SCEN_DIR / "REMOVED_BUILDINGS"
273
-
274
- self.TMP_SCEN_DIR = self.TMP_VULN_DIR / self.scenario
275
- self.TMP_RM_BUILD_DIR = self.TMP_SCEN_DIR / "REMOVED_BUILDINGS"
276
- self.TMP_QFILES = self.TMP_SCEN_DIR / "Q_FILES"
277
-
278
- self.TMP_VULN = self.TMP_SCEN_DIR / "Vulnerability.tiff"
279
- self.TMP_CODE = self.TMP_SCEN_DIR / "Vulnerability_Code.tiff"
280
-
281
- self.OUT_SCEN_DIR = self.OUT_STUDY_AREA / self.scenario
282
- self.OUT_VULN = self.OUT_SCEN_DIR / "Vulnerability.tiff"
283
- self.OUT_CODE = self.OUT_SCEN_DIR / "Vulnerability_Code.tiff"
284
- self.OUT_MASKED_RIVER = self.OUT_SCEN_DIR / "Masked_River_extent.tiff"
285
- self.OUT_ACCEPT = self.OUT_SCEN_DIR / "Acceptability.tiff"
286
- self.OUT_ACCEPT_100M = self.OUT_SCEN_DIR / "Acceptability_100m.tiff"
287
-
288
- else:
289
- self.scenario = None
290
-
291
- self.IN_SCEN_DIR = None
292
- self.IN_RM_BUILD_DIR = None
293
-
294
- self.TMP_SCEN_DIR = None
295
- self.TMP_RM_BUILD_DIR = None
296
- self.TMP_QFILES = None
297
-
298
- self.TMP_VULN = None
299
- self.TMP_CODE = None
300
-
301
- self.OUT_SCEN_DIR = None
302
- self.OUT_VULN = None
303
- self.OUT_CODE = None
304
- self.OUT_MASKED_RIVER = None
305
- self.OUT_ACCEPT = None
306
- self.OUT_ACCEPT_100M = None
307
-
308
- @property
309
- def is_valid_inputs(self) -> bool:
310
- return self.check_inputs()
311
-
312
- @property
313
- def is_valid_study_area(self) -> bool:
314
- return self.SA.exists()
315
-
316
- @property
317
- def is_valid_vulnerability_csv(self) -> bool:
318
- return self.VULNERABILITY_CSV.exists()
319
-
320
- @property
321
- def is_valid_points_csv(self) -> bool:
322
- return self.POINTS_CSV.exists()
323
-
324
- @property
325
- def is_valid_ponderation_csv(self) -> bool:
326
- return self.PONDERATION_CSV.exists()
327
-
328
- def check_files(self) -> str:
329
- """ Check the files in the directories """
330
-
331
- files = ""
332
- for a in self._ALLS:
333
- if not a.exists():
334
- files += str(a) + "\n"
335
-
336
- return files
337
-
338
- def change_studyarea(self, Study_area:str = None) -> None:
339
-
340
- if Study_area is None:
341
- self._study_area = None
342
- self._scenario = None
343
- else:
344
- if Study_area in self.get_list_studyareas(with_suffix=True):
345
- self._study_area = Path(Study_area)
346
- else:
347
- logging.error("The study area does not exist in the study area directory")
348
-
349
- self.create_paths()
350
-
351
- def change_scenario(self, scenario:str) -> None:
352
-
353
- if scenario in self.get_list_scenarios():
354
- self._scenario = scenario
355
- self.create_paths_scenario()
356
- self.check_temporary()
357
- self.check_outputs()
358
- else:
359
- logging.error("The scenario does not exist in the water depth directory")
360
-
361
- def get_files_in_rm_buildings(self) -> list[Path]:
362
- return [Path(a) for a in glob.glob(str(self.IN_RM_BUILD_DIR / ("*"+ EXTENT)))]
363
-
364
- def get_files_in_rasters_vulne(self) -> list[Path]:
365
- return [Path(a) for a in glob.glob(str(self.TMP_RASTERS_VULNE / "*.tiff"))]
366
-
367
- def get_layers_in_gdb(self) -> list[str]:
368
- return [a[0] for a in list_layers(str(self.ORIGINAL_GDB))]
369
-
370
- def get_layer_types_in_gdb(self) -> list[str]:
371
- return [a[1] for a in list_layers(str(self.ORIGINAL_GDB))]
372
-
373
- def get_layers_in_clipgdb(self) -> list[str]:
374
- return [Path(a).stem for a in glob.glob(str(self.TMP_CLIPGDB / ("*"+ EXTENT)))]
375
-
376
- def get_layers_in_wmodif(self) -> list[str]:
377
- return [Path(a).stem for a in glob.glob(str(self.TMP_WMODIF / ("*"+ EXTENT)))]
378
-
379
- def get_layers_in_codevulne(self) -> list[str]:
380
- return [Path(a).stem for a in glob.glob(str(self.TMP_CODEVULNE / ("*"+ EXTENT)))]
381
-
382
- def get_files_in_rasters_code(self) -> list[Path]:
383
- return [Path(a) for a in glob.glob(str(self.TMP_RASTERS_CODE / "*.tiff"))]
384
-
385
- def get_q_files(self) -> list[Path]:
386
- return [Path(a) for a in glob.glob(str(self.TMP_QFILES / "*.tif"))]
387
-
388
- def get_list_scenarios(self) -> list[str]:
389
- return [Path(a).stem for a in glob.glob(str(self.IN_WATER_DEPTH / self.SA.stem / "Scenario*"))]
390
-
391
- def get_list_studyareas(self, with_suffix:bool = False) -> list[str]:
392
-
393
- if with_suffix:
394
- return [Path(a).name for a in glob.glob(str(self.IN_STUDY_AREA / "*.shp"))]
395
- else:
396
- return [Path(a).stem for a in glob.glob(str(self.IN_STUDY_AREA / "*.shp"))]
397
-
398
- def get_sims_files_for_scenario(self) -> list[Path]:
399
-
400
- return [Path(a) for a in glob.glob(str(self.IN_SCEN_DIR / "*.tif"))]
401
-
402
- def get_sim_file_for_return_period(self, return_period:int) -> Path:
403
-
404
- sims = self.get_sims_files_for_scenario()
405
-
406
- if len(sims)==0:
407
- logging.error("No simulations found")
408
- return None
409
-
410
- if "_h.tif" in sims[0].name:
411
- for cursim in sims:
412
- if cursim.stem.find("_T{}_".format(return_period)) != -1:
413
- return cursim
414
- else:
415
- for cursim in sims:
416
- if cursim.stem.find("T{}".format(return_period)) != -1:
417
- return cursim
418
-
419
- return None
420
-
421
- def get_types_in_file(self, file:str) -> list[str]:
422
- """ Get the types of the geometries in the Shape file """
423
-
424
- return [a[1] for a in list_layers(str(file))]
425
-
426
- def is_type_unique(self, file:str) -> bool:
427
- """ Check if the file contains only one type of geometry """
428
-
429
- types = self.get_types_in_file(file)
430
- return len(types) == 1
431
-
432
- def is_polygons(self, set2test:set) -> bool:
433
- """ Check if the set contains only polygons """
434
-
435
- set2test = list(set2test)
436
- firstone = set2test[0]
437
- if 'Polygon' in firstone:
438
- for curtype in set2test:
439
- if 'Polygon' not in curtype:
440
- return False
441
- return True
442
- else:
443
- return False
444
-
445
- def is_same_types(self, file:str) -> tuple[bool, str]:
446
- """ Check if the file contains only the same type of geometry """
447
-
448
- types = self.get_types_in_file(file)
449
-
450
- if len(types) == 1:
451
- if 'Point' in types[0]:
452
- return True, 'Point'
453
- elif 'Polygon' in types[0]:
454
- return True, 'Polygon'
455
- elif 'LineString' in types[0]:
456
- return True, 'LineString'
457
- else:
458
- raise ValueError(f"The type of geometry {types[0]} is not recognized")
459
- else:
460
- firstone = types[0]
461
- if 'Point' in firstone:
462
- for curtype in types:
463
- if 'Point' not in curtype:
464
- return False, None
465
- return True, 'Point'
466
-
467
- elif 'Polygon' in firstone:
468
- for curtype in types:
469
- if 'Polygon' not in curtype:
470
- return False, None
471
-
472
- return True, 'Polygon'
473
-
474
- elif 'LineString' in firstone:
475
- for curtype in types:
476
- if 'LineString' not in curtype:
477
- return False, None
478
-
479
- return True, 'LineString'
480
- else:
481
- raise ValueError(f"The type of geometry {firstone} is not recognized")
482
-
483
-
484
- def get_return_periods(self) -> list[int]:
485
- """
486
- Get the return periods from the simulations
487
-
488
- :return list[int]: the **sorted list** of return periods
489
- """
490
-
491
- # List files in directory
492
- sims = self.get_sims_files_for_scenario()
493
-
494
- if len(sims)==0:
495
- logging.error("No simulations found")
496
- return None
497
-
498
- # Two cases:
499
- # - Return periods are named as T2.tif, T5.tif, T10.tif, ...
500
- # - Return periods are named as *_T2_h.tif, *_T5_h.tif, *_T10_h.tif, ...
501
- if "_h.tif" in sims[0].name:
502
-
503
- # Searching for the position of the return period in the name
504
- idx_T = [cursim.name.find("_T") for cursim in sims]
505
- idx_h = [cursim.name.find("_h.tif") for cursim in sims]
506
-
507
- assert len(idx_T) == len(idx_h), "The number of T and h are not the same"
508
- for curT, curh in zip(idx_T, idx_h):
509
- assert curT != -1, "The T is not found"
510
- assert curh != -1, "The h is not found"
511
- assert curh > curT, "The h is before the T"
512
-
513
- # Create the list of return periods -- only the numeric part
514
- sims = [int(cursim.name[idx_T[i]+2:idx_h[i]]) for i, cursim in enumerate(sims)]
515
- else:
516
- # searching for the position of the return period in the name
517
- idx_T = [cursim.name.find("T") for cursim in sims]
518
- idx_h = [cursim.name.find(".tif") for cursim in sims]
519
-
520
- assert len(idx_T) == len(idx_h), "The number of T and h are not the same"
521
- for curT, curh in zip(idx_T, idx_h):
522
- assert curT != -1, "The T is not found"
523
- assert curh != -1, "The h is not found"
524
- assert curh > curT, "The h is before the T"
525
-
526
- # create the list of return periods -- only the numeric part
527
- sims = [int(cursim.name[idx_T[i]+1:idx_h[i]]) for i, cursim in enumerate(sims)]
528
-
529
- return sorted(sims)
530
-
531
- def get_ponderations(self) -> pd.DataFrame:
532
- """ Get the ponderation data from available simulations """
533
-
534
- rt = self.get_return_periods()
535
-
536
- if len(rt)==0:
537
- logging.error("No simulations found")
538
- return None
539
-
540
- pond = []
541
-
542
- pond.append(1./float(rt[0]) + (1./float(rt[0]) - 1./float(rt[1]))/2.)
543
- for i in range(1, len(rt)-1):
544
- # Full formula
545
- # pond.append((1./float(rt[i-1]) - 1./float(rt[i]))/2. + (1./float(rt[i]) - 1./float(rt[i+1]))/2.)
546
-
547
- # More compact formula
548
- pond.append((1./float(rt[i-1]) - 1./float(rt[i+1]))/2.)
549
-
550
- pond.append(1./float(rt[-1]) + (1./float(rt[-2]) - 1./float(rt[-1]))/2.)
551
-
552
- return pd.DataFrame(pond, columns=["Ponderation"], index=rt)
553
-
554
- def get_filepath_for_return_period(self, return_period:int) -> Path:
555
-
556
- return self.get_sim_file_for_return_period(return_period)
557
-
558
- def change_dir(self) -> None:
559
- os.chdir(self.main_dir)
560
- logging.info("Current directory: %s", os.getcwd())
561
-
562
- def restore_dir(self) -> None:
563
- os.chdir(self.old_dir)
564
- logging.info("Current directory: %s", os.getcwd())
565
-
566
- def check_inputs(self) -> bool:
567
- """
568
- Check if the input directories exist.
569
-
570
- Inputs can not be created automatically. The user must provide them.
571
-
572
- """
573
-
574
- err = False
575
- if not self.IN_DATABASE.exists():
576
- logging.error("INPUT : The database directory does not exist")
577
- err = True
578
-
579
- if not self.IN_STUDY_AREA.exists():
580
- logging.error("INPUT : The study area directory does not exist")
581
- err = True
582
-
583
- if not self.IN_CSV.exists():
584
- logging.error("INPUT : The CSV directory does not exist")
585
- err = True
586
-
587
- if not self.IN_WATER_DEPTH.exists():
588
- logging.error("INPUT : The water depth directory does not exist")
589
- err = True
590
-
591
- if not self.IN_EPU_STATIONS.exists():
592
- logging.error("INPUT : The EPU stations directory does not exist")
593
- err = True
594
-
595
- if self.Study_area is not None:
596
- if not self.SA.exists():
597
- logging.error("INPUT : The study area file does not exist")
598
- err = True
599
-
600
- if not self.ORIGINAL_GDB.exists():
601
- logging.error("INPUT : The original gdb file does not exist - Please pull it from the SPW-ARNE")
602
- err = True
603
-
604
- if not self.CAPA_WALLOON.exists():
605
- logging.error("INPUT : The Cadastre Walloon file does not exist - Please pull it from the SPW")
606
- err = True
607
-
608
- if not self.PICC_WALLOON.exists():
609
- logging.error("INPUT : The PICC Walloon file does not exist - Please pull it from the SPW website")
610
- err = True
611
-
612
- if not self.CE_IGN_TOP10V.exists():
613
- logging.error("INPUT : The CE IGN top10v file does not exist - Please pull it from the IGN")
614
- err = True
615
-
616
- if self.scenario is None:
617
- logging.debug("The scenario has not been defined")
618
- else:
619
- if not self.IN_SCEN_DIR.exists():
620
- logging.error("The scenario directory does not exist")
621
- err = True
622
-
623
- return not err
624
-
625
- def check_temporary(self) -> bool:
626
- """
627
- Check if the temporary directories exist.
628
-
629
- If not, create them.
630
- """
631
-
632
- self.TMP_DIR.mkdir(parents=True, exist_ok=True)
633
-
634
- if self.Study_area is not None:
635
- self.TMP_STUDYAREA.mkdir(parents=True, exist_ok=True)
636
- self.TMP_DATABASE.mkdir(parents=True, exist_ok=True)
637
- self.TMP_CLIPGDB.mkdir(parents=True, exist_ok=True)
638
- self.TMP_CADASTER.mkdir(parents=True, exist_ok=True)
639
- self.TMP_WMODIF.mkdir(parents=True, exist_ok=True)
640
- self.TMP_CODEVULNE.mkdir(parents=True, exist_ok=True)
641
- self.TMP_PICC.mkdir(parents=True, exist_ok=True)
642
- self.TMP_IGNCE.mkdir(parents=True, exist_ok=True)
643
- self.TMP_VULN_DIR.mkdir(parents=True, exist_ok=True)
644
- self.TMP_RASTERS.mkdir(parents=True, exist_ok=True)
645
- self.TMP_RASTERS_CODE.mkdir(parents=True, exist_ok=True)
646
- self.TMP_RASTERS_VULNE.mkdir(parents=True, exist_ok=True)
647
-
648
- if self.scenario is not None:
649
- self.TMP_SCEN_DIR.mkdir(parents=True, exist_ok=True)
650
- self.TMP_RM_BUILD_DIR.mkdir(parents=True, exist_ok=True)
651
- self.TMP_QFILES.mkdir(parents=True, exist_ok=True)
652
-
653
- return True
654
-
655
- def check_outputs(self) -> bool:
656
- """
657
- Check if the output directories exist.
658
-
659
- If not, create them.
660
- """
661
-
662
- self.OUT_DIR.mkdir(parents=True, exist_ok=True)
663
-
664
- if self.Study_area is not None:
665
- self.OUT_STUDY_AREA.mkdir(parents=True, exist_ok=True)
666
-
667
- if self.scenario is not None:
668
- self.OUT_SCEN_DIR.mkdir(parents=True, exist_ok=True)
669
-
670
- return True
671
-
672
- def check_before_database_creation(self) -> bool:
673
- """ Check if the necessary files are present before the database creation"""
674
-
675
- if not self.is_valid_inputs:
676
- logging.error("Theere are missing input directories - Please check carefully the input directories and the logs")
677
- return False
678
-
679
- if not self.is_valid_study_area:
680
- logging.error("The study area file does not exist - Please create it")
681
- return False
682
-
683
- if not self.is_valid_vulnerability_csv:
684
- logging.error("The vulnerability CSV file does not exist - Please create it")
685
- return False
686
-
687
- return True
688
-
689
- def check_before_rasterize(self) -> bool:
690
-
691
- if not self.TMP_CODEVULNE.exists():
692
- logging.error("The final database with vulnerability levels does not exist")
693
- return False
694
-
695
- if not self.TMP_WMODIF.exists():
696
- logging.error("The vector data with modifications does not exist")
697
- return False
698
-
699
- return True
700
-
701
- def check_before_vulnerability(self) -> bool:
702
-
703
- if self.SA is None:
704
- logging.error("The area of interest does not exist")
705
- return False
706
-
707
- if self.IN_WATER_DEPTH is None:
708
- logging.error("The water depth directory does not exist")
709
- return False
710
-
711
- if self.IN_SCEN_DIR is None:
712
- logging.error("The scenario directory does not exist in the water depth directory")
713
- return False
714
-
715
- if self.SA_MASKED_RIVER is None:
716
- logging.error("The IGN raster does not exist")
717
- return False
718
-
719
- return True
720
-
721
- def check_vuln_code_sa(self) -> bool:
722
-
723
- if not self.SA_VULN.exists():
724
- logging.error("The vulnerability raster file does not exist")
725
- return False
726
-
727
- if not self.SA_CODE.exists():
728
- logging.error("The vulnerability code raster file does not exist")
729
- return False
730
-
731
- return True
732
-
733
- def check_vuln_code_scenario(self) -> bool:
734
-
735
- if not self.TMP_VULN.exists():
736
- logging.error("The vulnerability raster file does not exist")
737
- return False
738
-
739
- if not self.TMP_CODE.exists():
740
- logging.error("The vulnerability code raster file does not exist")
741
- return False
742
-
743
- return True
744
-
745
- def compare_original_clipped_layers(self) -> str:
746
- """ Compare the original layers with the clipped ones """
747
-
748
- layers = self.get_layers_in_gdb()
749
- layers_clip = self.get_layers_in_clipgdb()
750
-
751
- ret = 'These layers have not been clipped:\n'
752
- for layer in layers:
753
- if layer not in layers_clip:
754
- ret += " - {}\n".format(layer)
755
-
756
- ret += '\nThese layers have been clipped but are not present in the GDB:\n'
757
- for layer in layers_clip:
758
- if layer not in layers:
759
- ret += " - {}\n".format(layer)
760
-
761
- ret+='\n'
762
-
763
- return ret
764
-
765
- def compare_clipped_raster_layers(self) -> str:
766
- """ Compare the clipped layers with the rasterized ones """
767
-
768
- layers = self.get_layers_in_clipgdb()
769
- layers_rast = self.get_layers_in_codevulne()
770
-
771
- ret = 'These layers {} have not been rasterized:\n'
772
- for layer in layers:
773
- if layer not in layers_rast:
774
- ret += " - {}\n".format(layer)
775
-
776
- ret += '\nThese layers have been rasterized but are not in the orginal GDB:\n'
777
- for layer in layers_rast:
778
- if layer not in layers:
779
- ret += " - {}\n".format(layer)
780
-
781
- ret+='\n'
782
-
783
- return ret
784
-
785
- def get_operand(self, file:str) -> Modif_Type:
786
- """ Get the operand based on the layer name """
787
- LAYERS_WALOUS = ["WALOUS_2018_LB72_112",
788
- "WALOUS_2018_LB72_31",
789
- "WALOUS_2018_LB72_32",
790
- "WALOUS_2018_LB72_331",
791
- "WALOUS_2018_LB72_332",
792
- "WALOUS_2018_LB72_333",
793
- "WALOUS_2018_LB72_34"]
794
-
795
- ret, curtype = self.is_same_types(file)
796
- layer = Path(file).stem
797
-
798
- if not ret:
799
- raise ValueError("The layer contains different types of geometries")
800
-
801
- if layer in LAYERS_WALOUS:
802
- return Modif_Type.WALOUS
803
-
804
- elif curtype=="Point":
805
-
806
- self.points2polys.append(layer)
807
-
808
- if layer =="BDREF_DGO3_PASH__SCHEMA_STATIONS_EPU":
809
- return Modif_Type.POINT2POLY_EPURATION
810
- elif layer =="INFRASIG_SOINS_SANTE__ETAB_AINES":
811
- return Modif_Type.POINT2POLY_PICC
812
- else:
813
- return Modif_Type.POINT2POLY_CAPAPICC
814
-
815
- elif layer =="Hab_2018_CABU":
816
- return Modif_Type.INHABITED
817
-
818
- elif layer =="INFRASIG_ROUTE_RES_ROUTIER_TE_AXES":
819
-
820
- self.lines2polys.append(layer)
821
-
822
- return Modif_Type.ROAD
823
-
824
- else:
825
- return Modif_Type.COPY
826
-
827
- def check_origin_shape(self) -> list[str]:
828
-
829
- code = self.get_files_in_rasters_code()
830
- vuln = self.get_files_in_rasters_vulne()
831
-
832
- if len(code) == 0:
833
- logging.error("The code rasters do not exist")
834
- return False
835
-
836
- if len(vuln) == 0:
837
- logging.error("The vulnerability rasters do not exist")
838
- return False
839
-
840
- if len(code) != len(vuln):
841
- logging.error("The number of code and vulnerability rasters do not match")
842
- return False
843
-
844
- # we take a reference raster
845
- ref = gdal.Open(str(code[0]))
846
- band_ref = ref.GetRasterBand(1)
847
- proj_ref = ref.GetProjection()
848
- geo_ref = ref.GetGeoTransform()
849
- col_ref, row_ref = band_ref.XSize, band_ref.YSize
850
-
851
- # we compare the reference raster with the others
852
- diff = []
853
- for cur in code + vuln + [self.SA_MASKED_RIVER]:
854
- cur_ = gdal.Open(str(cur))
855
- band_cur = cur_.GetRasterBand(1)
856
- proj_cur = cur_.GetProjection()
857
- geo_cur = cur_.GetGeoTransform()
858
- col_cur, row_cur = band_cur.XSize, band_cur.YSize
859
-
860
- if geo_ref != geo_cur:
861
- logging.error("The geotransforms do not match {}".format(cur))
862
- diff.append(cur)
863
-
864
- if proj_ref != proj_cur:
865
- logging.error("The projections do not match {}".format(cur))
866
- diff.append(cur)
867
-
868
- if col_ref != col_cur or row_ref != row_cur:
869
- logging.error("The dimensions do not match {}".format(cur))
870
- diff.append(cur)
871
-
872
- return diff
873
-
874
-
875
- def clip_layer(layer:str,
876
- file_path:str,
877
- Study_Area:str,
878
- output_dir:str):
879
- """
880
- Clip the input data based on the selected bassin and saves it
881
- in separate shape files.
882
-
883
- As shape file doen not support DateTime, the columns with DateTime
884
- are converted to string.
885
-
886
- :param layer: the layer name in the GDB file
887
- :param file_path: the path to the GDB file
888
- :param Study_Area: the path to the study area shapefile
889
- :param output_dir: the path to the output directory
890
- """
891
-
892
- layer = str(layer)
893
- file_path = str(file_path)
894
- Study_Area = str(Study_Area)
895
- output_dir = Path(output_dir)
896
-
897
- St_Area = gpd.read_file(Study_Area, engine=ENGINE)
898
-
899
- logging.info(layer)
900
-
901
- # The data is clipped during the reading
902
- # **It is more efficient than reading the entire data and then clipping it**
903
- #
904
- # FIXME: "read_dataframe" is used directly rather than "gpd.read_file" cause
905
- # the "layer" parameter is well transmitted to the "read_dataframe" function...
906
- df:gpd.GeoDataFrame = read_dataframe(file_path, layer=layer, mask=St_Area['geometry'][0])
907
-
908
- if len(df) == 0:
909
- logging.warning("No data found for layer " + str(layer))
910
- return "No data found for layer " + str(layer)
911
-
912
- # Force Lambert72 -> EPSG:31370
913
- df.to_crs("EPSG:31370", inplace=True)
914
- try:
915
- date_columns = df.select_dtypes(include=['datetimetz']).columns.tolist()
916
- if len(date_columns)>0:
917
- df[date_columns] = df[date_columns].astype(str)
918
-
919
- df.to_file(str(output_dir / (layer+EXTENT)), mode='w', engine=ENGINE)
920
- except Exception as e:
921
- logging.error("Error while saving the clipped " + str(layer) + " to file")
922
- logging.error(e)
923
- pass
924
-
925
- logging.info("Saved the clipped " + str(layer) + " to file")
926
- return "Saved the clipped " +str(layer)+ " to file"
927
-
928
-
929
- def data_modification(layer:str,
930
- manager:Accept_Manager,
931
- picc:gpd.GeoDataFrame,
932
- capa:gpd.GeoDataFrame ):
933
- """
934
- Apply the data modifications as described in the LEMA report
935
-
936
- FIXME : Add more doc in this docstring
937
-
938
- :param input_database: the path to the input database
939
- :param layer: the layer name in the database
940
- :param output_database: the path to the output database
941
- :param picc: the PICC Walloon file -- Preloaded
942
- :param capa: the Cadastre Walloon file -- Preloaded
943
- """
944
-
945
- df1:gpd.GeoDataFrame
946
- df2:gpd.GeoDataFrame
947
-
948
- layer = str(layer)
949
-
950
- dir_input = manager.TMP_CLIPGDB
951
- dir_output = manager.TMP_WMODIF
952
-
953
- input_file = str(dir_input / (layer + EXTENT))
954
- output_file = str(dir_output / (layer + EXTENT))
955
-
956
- # Read the data
957
- df:gpd.GeoDataFrame = gpd.read_file(input_file, engine=ENGINE)
958
- nblines, _ = df.shape
959
-
960
- if nblines>0:
961
- op = manager.get_operand(input_file)
962
-
963
- if op == Modif_Type.WALOUS:
964
- # Walous layers changed to PICC buidings
965
-
966
- assert picc.crs == df.crs, "CRS of PICC and input data do not match"
967
-
968
- assert "GEOREF_ID" in picc.columns, "The PICC file does not contain the GEOREF_ID column"
969
- assert "NATUR_CODE" in picc.columns, "The PICC file does not contain the NATUR_CODE column"
970
-
971
- df1 = gpd.sjoin(picc, df, how="inner", predicate="intersects" )
972
- cols = df.columns
973
-
974
- cols = np.append(cols, "GEOREF_ID")
975
- cols = np.append(cols, "NATUR_CODE")
976
-
977
- df1 = df1[cols]
978
-
979
- if df1.shape[0] > 0:
980
- assert manager.is_polygons(set(df1.geom_type)), f"The layer does not contains polygons - {op}"
981
- df1.to_file(output_file, engine=ENGINE)
982
- else:
983
- logging.warning("No data found for layer " + str(layer))
984
-
985
- elif op == Modif_Type.POINT2POLY_EPURATION:
986
- # Change BDREF based on AJOUT_PDET sent by Perrine (SPI)
987
-
988
- # The original layer is a point layer.
989
- # The EPU_STATIONS shape file (from SPI) is a polygon layer.
990
-
991
- df1 = gpd.read_file(str(manager.EPU_STATIONS), engine=ENGINE)
992
-
993
- assert df1.crs == df.crs, "CRS of AJOUT_PDET and input data do not match"
994
-
995
- df2 = gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
996
-
997
- if df2.shape[0] > 0:
998
- assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
999
- df2.to_file(output_file, engine=ENGINE)
1000
- else:
1001
- logging.warning("No data found for layer " + str(layer))
1002
-
1003
- elif op == Modif_Type.POINT2POLY_PICC:
1004
- # Select the polygons that contains the points
1005
- # in theCadaster and PICC files
1006
-
1007
- assert capa.crs == df.crs, "CRS of CaPa and input data do not match"
1008
- assert "CaPaKey" in capa.columns, "The CaPa file does not contain the CaPaKey column"
1009
-
1010
- df1= gpd.sjoin(capa, df, how="inner", predicate="intersects" )
1011
- cols=df.columns
1012
-
1013
- cols = np.append(cols, "CaPaKey")
1014
- df1=df1[cols]
1015
- df2=gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
1016
-
1017
- if df2.shape[0] > 0:
1018
- assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
1019
- df2.to_file(output_file, engine=ENGINE)
1020
- else:
1021
- logging.warning("No data found for layer " + str(layer))
1022
-
1023
- elif op == Modif_Type.POINT2POLY_CAPAPICC:
1024
-
1025
- # Select the polygons that contains the points
1026
- # in theCadaster and PICC files
1027
-
1028
- assert capa.crs == df.crs, "CRS of CaPa and input data do not match"
1029
- assert picc.crs == df.crs, "CRS of PICC and input data do not match"
1030
-
1031
- # Join the Layer and CaPa DataFrames : https://geopandas.org/en/stable/docs/reference/api/geopandas.sjoin.html
1032
- # ‘inner’: use intersection of keys from both dfs; retain only left_df geometry column
1033
- # "intersects" : Binary predicate. Valid values are determined by the spatial index used.
1034
- df1= gpd.sjoin(capa, df, how="inner", predicate="intersects" )
1035
-
1036
- # Retain only the columns of the input data
1037
- cols = df.columns
1038
- # but add the CaPaKey
1039
- cols = np.append(cols, "CaPaKey")
1040
-
1041
- df1 = df1[cols]
1042
-
1043
- # Join the df1 and PICC DataFrames : https://geopandas.org/en/stable/docs/reference/api/geopandas.sjoin.html
1044
- df2 = gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
1045
-
1046
- # Add only the GEOREF_ID and NATUR_CODE columns from PICC
1047
- cols = np.append(cols, "GEOREF_ID")
1048
- cols = np.append(cols, "NATUR_CODE")
1049
-
1050
- df2 = df2[cols]
1051
-
1052
- if df2.shape[0] > 0:
1053
- assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
1054
- df2.to_file(output_file, engine=ENGINE)
1055
- else:
1056
- logging.warning("No data found for layer " + str(layer))
1057
-
1058
- elif op == Modif_Type.INHABITED:
1059
- # Select only the buildings with a number of inhabitants > 0
1060
- df1=df[df["NbsHabTOT"]>0]
1061
-
1062
- if df1.shape[0] > 0:
1063
- assert manager.is_polygons(set(df1.geom_type)), f"The layer does not contains polygons - {op}"
1064
- df1.to_file(output_file, engine=ENGINE)
1065
- else:
1066
- logging.warning("No data found for layer " + str(layer))
1067
-
1068
- elif op == Modif_Type.ROAD:
1069
- # Create a buffer around the roads
1070
- df1=df.buffer(distance=6, cap_style=2)
1071
-
1072
- if df1.shape[0] > 0:
1073
- assert set(df1.geom_type) == {'Polygon'}, f"The layer does not contains polygons - {op}"
1074
- df1.to_file(output_file, engine=ENGINE)
1075
- else:
1076
- logging.warning("No data found for layer " + str(layer))
1077
-
1078
- elif op == Modif_Type.COPY:
1079
- # just copy the data if it is polygons
1080
- if manager.is_polygons(set(df.geom_type)):
1081
- df.to_file(output_file, engine=ENGINE)
1082
- else:
1083
- logging.error("The layer does not contains polygons - " + str(layer))
1084
- else:
1085
- raise ValueError(f"The operand {op} is not recognized")
1086
-
1087
- return "Data modification done for " + str(layer)
1088
- else:
1089
- # Normally, phase 1 does not create empty files
1090
- # But it is better to check... ;-)
1091
- logging.error("skipped" + str(layer) + "due to no polygon in the study area")
1092
- return "skipped" + str(layer) + "due to no polygon in the study area"
1093
-
1094
- def compute_vulnerability(manager:Accept_Manager):
1095
- """
1096
- Compute the vulnerability for the Study Area
1097
-
1098
- This function **will not modify** the data by the removed buildings/scenarios.
1099
-
1100
- :param dirsnames: the Dirs_Names object from the calling function
1101
- """
1102
-
1103
- vuln_csv = Vulnerability_csv(manager.VULNERABILITY_CSV)
1104
-
1105
- rasters_vuln = manager.get_files_in_rasters_vulne()
1106
-
1107
- logging.info("Number of files",len(rasters_vuln))
1108
-
1109
- ds:gdal.Dataset = gdal.OpenEx(str(rasters_vuln[0]), gdal.GA_ReadOnly, open_options=["SPARSE_OK=TRUE"])
1110
-
1111
- tmp_vuln = ds.GetRasterBand(1)
1112
-
1113
- # REMARK: The XSize and YSize are the number of columns and rows
1114
- col, row = tmp_vuln.XSize, tmp_vuln.YSize
1115
-
1116
- logging.info("Computing Vulnerability")
1117
-
1118
- array_vuln = np.ones((row, col), dtype=np.int8)
1119
-
1120
- # Create a JIT function to update the arrays
1121
- # Faster than the classical Python loop or Numpy
1122
- @nb.jit(nopython=True, boundscheck=False, inline='always')
1123
- def update_arrays_jit(tmp_vuln, array_vuln):
1124
- for i in range(tmp_vuln.shape[0]):
1125
- for j in range(tmp_vuln.shape[1]):
1126
- if tmp_vuln[i, j] >= array_vuln[i, j]:
1127
- array_vuln[i, j] = tmp_vuln[i, j]
1128
-
1129
- return array_vuln
1130
-
1131
- @nb.jit(nopython=True, boundscheck=False, inline='always')
1132
- def update_arrays_jit_csr(row, col, locvuln, array_vuln):
1133
- for k in range(len(row)-1):
1134
- i = k
1135
- j1 = row[k]
1136
- j2 = row[k+1]
1137
- for j in col[j1:j2]:
1138
- if locvuln >= array_vuln[i, j]:
1139
- array_vuln[i, j] = locvuln
1140
-
1141
- return array_vuln
1142
-
1143
- for i in tqdm(range(len(rasters_vuln)), 'Computing Vulnerability : '):
1144
- logging.info("Computing layer {} / {}".format(i, len(rasters_vuln)))
1145
-
1146
- locvuln = vuln_csv.get_vulnerability_level(rasters_vuln[i].stem)
1147
-
1148
- if locvuln == 1:
1149
- logging.info("No need to apply the matrice, the vulnerability is 1 which is the lower value")
1150
- continue
1151
-
1152
- if rasters_vuln[i].with_suffix('.npz').exists():
1153
- ij_npz = np.load(rasters_vuln[i].with_suffix('.npz'))
1154
- ii = ij_npz['row']
1155
- jj = ij_npz['col']
1156
- # We use the jit
1157
- update_arrays_jit_csr(ii, jj, locvuln, array_vuln)
1158
-
1159
- else:
1160
- ds = gdal.OpenEx(str(rasters_vuln[i]), open_options=["SPARSE_OK=TRUE"])
1161
- tmp_vuln = ds.GetRasterBand(1).ReadAsArray()
1162
- # We use the jit
1163
- update_arrays_jit(tmp_vuln, array_vuln)
1164
-
1165
- logging.info("Saving the computed vulnerability")
1166
- dst_filename= str(manager.SA_VULN)
1167
- y_pixels, x_pixels = array_vuln.shape # number of pixels in x
1168
-
1169
- driver = gdal.GetDriverByName('GTiff')
1170
- dataset = driver.Create(dst_filename,
1171
- x_pixels, y_pixels,
1172
- gdal.GDT_Byte,
1173
- 1,
1174
- options=["COMPRESS=LZW"])
1175
-
1176
- dataset.GetRasterBand(1).WriteArray(array_vuln.astype(np.int8))
1177
- # follow code is adding GeoTranform and Projection
1178
- geotrans = ds.GetGeoTransform() # get GeoTranform from existed 'data0'
1179
- proj = ds.GetProjection() # you can get from a exsited tif or import
1180
- dataset.SetGeoTransform(geotrans)
1181
- dataset.SetProjection(proj)
1182
- dataset.FlushCache()
1183
- dataset = None
1184
-
1185
- logging.info("Computed Vulnerability for the Study Area - Done")
1186
-
1187
- def compute_code(manager:Accept_Manager):
1188
- """
1189
- Compute the code for the Study Area
1190
-
1191
- This function **will not modify** the data by the removed buildings/scenarios.
1192
-
1193
- :param dirsnames: the Dirs_Names object from the calling function
1194
- """
1195
-
1196
- vuln_csv = Vulnerability_csv(manager.VULNERABILITY_CSV)
1197
-
1198
- rasters_code = manager.get_files_in_rasters_code()
1199
-
1200
- logging.info("Number of files",len(rasters_code))
1201
-
1202
- ds:gdal.Dataset = gdal.OpenEx(str(rasters_code[0]), gdal.GA_ReadOnly, open_options=["SPARSE_OK=TRUE"])
1203
-
1204
- tmp_code = ds.GetRasterBand(1)
1205
-
1206
- # REMARK: The XSize and YSize are the number of columns and rows
1207
- col, row = tmp_code.XSize, tmp_code.YSize
1208
-
1209
- logging.info("Computing Code")
1210
-
1211
- array_code = np.ones((row, col), dtype=np.int8)
1212
-
1213
- # Create a JIT function to update the arrays
1214
- # Faster than the classical Python loop or Numpy
1215
- @nb.jit(nopython=True, boundscheck=False, inline='always')
1216
- def update_arrays_jit(tmp_code, loccode, array_code):
1217
- for i in range(tmp_code.shape[0]):
1218
- for j in range(tmp_code.shape[1]):
1219
- if tmp_code[i, j] >= array_code[i, j]:
1220
- array_code[i, j] = loccode
1221
-
1222
- return array_code
1223
-
1224
- @nb.jit(nopython=True, boundscheck=False, inline='always')
1225
- def update_arrays_jit_csr(row, col, loccode, array_code):
1226
- for k in range(len(row)-1):
1227
- i = k
1228
- j1 = row[k]
1229
- j2 = row[k+1]
1230
- for j in col[j1:j2]:
1231
- if loccode >= array_code[i, j]:
1232
- array_code[i, j] = loccode
1233
-
1234
- return array_code
1235
-
1236
- for i in tqdm(range(len(rasters_code)), 'Computing Code : '):
1237
- logging.info("Computing layer {} / {}".format(i, len(rasters_code)))
1238
-
1239
- loccode = vuln_csv.get_vulnerability_code(rasters_code[i].stem)
1240
-
1241
- if rasters_code[i].with_suffix('.npz').exists():
1242
- ij_npz = np.load(rasters_code[i].with_suffix('.npz'))
1243
- ii = ij_npz['row']
1244
- jj = ij_npz['col']
1245
- # We use the jit
1246
- update_arrays_jit_csr(ii, jj, loccode, array_code)
1247
-
1248
- else:
1249
- ds = gdal.OpenEx(str(rasters_code[i]), open_options=["SPARSE_OK=TRUE"])
1250
- tmp_code = ds.GetRasterBand(1).ReadAsArray()
1251
- # We use the jit
1252
- update_arrays_jit(tmp_code, loccode, array_code)
1253
-
1254
- logging.info("Saving the computed codes")
1255
- dst_filename= str(manager.SA_CODE)
1256
- y_pixels, x_pixels = array_code.shape # number of pixels in x
1257
- driver = gdal.GetDriverByName('GTiff')
1258
- dataset = driver.Create(dst_filename,
1259
- x_pixels, y_pixels,
1260
- gdal.GDT_Byte,
1261
- 1,
1262
- options=["COMPRESS=LZW"])
1263
-
1264
- dataset.GetRasterBand(1).WriteArray(array_code.astype(np.int8))
1265
- # follow code is adding GeoTranform and Projection
1266
- geotrans = ds.GetGeoTransform() # get GeoTranform from existed 'data0'
1267
- proj = ds.GetProjection() # you can get from a exsited tif or import
1268
- dataset.SetGeoTransform(geotrans)
1269
- dataset.SetProjection(proj)
1270
- dataset.FlushCache()
1271
- dataset = None
1272
-
1273
- logging.info("Computed Code for the Study Area - Done")
1274
-
1275
- def compute_vulnerability4scenario(manager:Accept_Manager):
1276
- """ Compute the vulnerability for the scenario
1277
-
1278
- This function **will modify** the data by the removed buildings/scenarios.
1279
-
1280
- FIXME: It could be interseting to permit the user to provide tiff files for the removed buildings and other scenarios.
1281
-
1282
- :param dirsnames: the Dirs_Names object from the calling function
1283
- """
1284
-
1285
- array_vuln = gdal.Open(str(manager.SA_VULN))
1286
- geotrans = array_vuln.GetGeoTransform() # get GeoTranform from existed 'data0'
1287
- proj = array_vuln.GetProjection() # you can get from a exsited tif or import
1288
-
1289
- array_vuln = np.array(array_vuln.GetRasterBand(1).ReadAsArray())
1290
-
1291
- array_code = gdal.Open(str(manager.SA_CODE))
1292
- array_code = np.array(array_code.GetRasterBand(1).ReadAsArray())
1293
-
1294
- Rbu = manager.get_files_in_rm_buildings()
1295
-
1296
- if len(Rbu)>0:
1297
- for curfile in Rbu:
1298
- array_mod = gdal.Open(str(curfile))
1299
- array_mod = np.array(array_mod.GetRasterBand(1).ReadAsArray())
1300
-
1301
- ij = np.argwhere(array_mod == 1)
1302
- array_vuln[ij[:,0], ij[:,1]] = 1
1303
- array_code[ij[:,0], ij[:,1]] = 1
1304
-
1305
- dst_filename= str(manager.TMP_VULN)
1306
- y_pixels, x_pixels = array_vuln.shape # number of pixels in x
1307
-
1308
- driver = gdal.GetDriverByName('GTiff')
1309
- dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
1310
- dataset.GetRasterBand(1).WriteArray(array_vuln.astype(np.int8))
1311
- # follow code is adding GeoTranform and Projection
1312
- dataset.SetGeoTransform(geotrans)
1313
- dataset.SetProjection(proj)
1314
- dataset.FlushCache()
1315
- dataset = None
1316
-
1317
-
1318
- dst_filename= str(manager.TMP_CODE)
1319
- y_pixels, x_pixels = array_code.shape # number of pixels in x
1320
- driver = gdal.GetDriverByName('GTiff')
1321
- dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
1322
- dataset.GetRasterBand(1).WriteArray(array_code.astype(np.int8))
1323
- # follow code is adding GeoTranform and Projection
1324
- dataset.SetGeoTransform(geotrans)
1325
- dataset.SetProjection(proj)
1326
- dataset.FlushCache()
1327
- dataset = None
1328
-
1329
- logging.info("Computed Vulnerability and code for the scenario")
1330
-
1331
- def match_vulnerability2sim(inRas:Path, outRas:Path, MODREC:Path):
1332
- """
1333
- Clip the raster to the MODREC/simulation extent
1334
-
1335
- :param inRas: the input raster file
1336
- :param outRas: the output raster file
1337
- :param MODREC: the MODREC/simulation extent file
1338
-
1339
- """
1340
-
1341
- inRas = str(inRas)
1342
- outRas = str(outRas)
1343
- MODREC = str(MODREC)
1344
-
1345
- data = gdal.Open(MODREC, gdalconst.GA_ReadOnly)
1346
- geoTransform = data.GetGeoTransform()
1347
- minx = geoTransform[0]
1348
- maxy = geoTransform[3]
1349
- maxx = minx + geoTransform[1] * data.RasterXSize
1350
- miny = maxy + geoTransform[5] * data.RasterYSize
1351
- ds = gdal.Open(inRas)
1352
- ds = gdal.Translate(outRas, ds, projWin = [minx, maxy, maxx, miny])
1353
- ds = None
1354
-
1355
-
1356
- @nb.jit(nopython=True, boundscheck=False, inline='always')
1357
- def update_accept(accept, model_h, ij, bounds, loc_accept):
1358
- for idx in range(len(bounds)):
1359
- for i,j in ij:
1360
- if bounds[idx,0] < model_h[i,j] <= bounds[idx,1]: #lit dans wd vs Ti où on est et associe son score d'accept
1361
- accept[i,j] = loc_accept[idx]
1362
-
1363
- def compute_acceptability(manager:Accept_Manager,
1364
- model_h:np.ndarray,
1365
- vulnerability:np.ndarray,
1366
- interval:int,
1367
- geo_projection:tuple,
1368
- save_to_file:bool=True) -> np.ndarray:
1369
-
1370
- """
1371
- Compute the local acceptability based on :
1372
- - the vulnerability
1373
- - the water depth
1374
- - the matrices
1375
-
1376
- :param manager: the Accept_Manager object from the calling function
1377
- :param model_h: the water depth matrix
1378
- :param vulnerability: the vulnerability matrix
1379
- :param interval: the return period
1380
- :param geo_projection: the geotransform and the projection - tuple extracted from another raster file
1381
-
1382
- """
1383
-
1384
- logging.info(interval)
1385
-
1386
- points_accept = pd.read_csv(manager.POINTS_CSV)
1387
-
1388
- points_accept = points_accept[points_accept["Interval"]==interval] #les wd vs Ti matrices
1389
- points_accept = points_accept.reset_index()
1390
-
1391
- accept = np.zeros(vulnerability.shape, dtype=np.float32)
1392
-
1393
- bounds = np.asarray([[0., 0.02], [0.02, 0.3], [0.3, 1], [1, 2.5], [2.5, 1000]], dtype=np.float32)
1394
-
1395
- for i in range(1,6):
1396
- ij = np.argwhere(vulnerability == i)
1397
-
1398
- idx_pts = 5-i
1399
- accept_pts = [points_accept["h-0"][idx_pts],
1400
- points_accept["h-0.02"][idx_pts],
1401
- points_accept["h-0.3"][idx_pts],
1402
- points_accept["h-1"][idx_pts],
1403
- points_accept["h-2.5"][idx_pts]]
1404
-
1405
- update_accept(accept, model_h, ij, bounds, accept_pts)
1406
-
1407
- if save_to_file:
1408
- #save raster
1409
- dst_filename = str(manager.TMP_QFILES / "Q{}.tif".format(interval)) #les Qi
1410
-
1411
- y_pixels, x_pixels = accept.shape # number of pixels in x
1412
- driver = gdal.GetDriverByName('GTiff')
1413
- dataset = driver.Create(dst_filename,
1414
- x_pixels, y_pixels,
1415
- 1,
1416
- gdal.GDT_Float32,
1417
- options=["COMPRESS=LZW"])
1418
-
1419
- dataset.GetRasterBand(1).WriteArray(accept.astype(np.float32))
1420
-
1421
- geotrans, proj = geo_projection
1422
- dataset.SetGeoTransform(geotrans)
1423
- dataset.SetProjection(proj)
1424
- dataset.FlushCache()
1425
- dataset = None
1426
-
1427
- return accept
1428
-
1429
- def shp_to_raster(vector_fn:str, raster_fn:str, pixel_size:float = 1., manager:Accept_Manager = None):
1430
- """
1431
- Convert a vector layer to a raster tiff file.
1432
-
1433
- The raster will contain only 2 values : 0 and 1
1434
-
1435
- - 1 : the inside of the vector layer
1436
- - 0 : the rest == NoData/NullValue
1437
-
1438
- :param vector_fn: the path to the vector file
1439
- :param raster_fn: the path to the raster file
1440
- :param pixel_size: the pixel size of the raster
1441
- """
1442
-
1443
- # Force the input to be a string
1444
- vector_fn = str(vector_fn)
1445
- raster_fn = str(raster_fn)
1446
-
1447
- if manager is None:
1448
- extent_fn = vector_fn
1449
- logging.warning("The extent file is not provided, the extent will be the same as the vector file")
1450
- else:
1451
- extent_fn = str(manager.SA)
1452
- logging.info("The extent file is provided")
1453
-
1454
- NoData_value = 0 # np.nan is not necessary a good idea
1455
-
1456
- # Open the data sources and read the extents
1457
- source_ds:ogr.DataSource = ogr.Open(vector_fn)
1458
- source_layer = source_ds.GetLayer()
1459
-
1460
- extent_ds:ogr.DataSource = ogr.Open(extent_fn)
1461
- extent_layer = extent_ds.GetLayer()
1462
- x_min, x_max, y_min, y_max = extent_layer.GetExtent()
1463
-
1464
- x_min = float(int(x_min))
1465
- x_max = float(np.ceil(x_max))
1466
- y_min = float(int(y_min))
1467
- y_max = float(np.ceil(y_max))
1468
-
1469
- # Create the destination data source
1470
- x_res = int((x_max - x_min) / pixel_size)
1471
- y_res = int((y_max - y_min) / pixel_size)
1472
-
1473
- target_ds = gdal.GetDriverByName('GTiff').Create(raster_fn,
1474
- x_res, y_res,
1475
- 1,
1476
- gdal.GDT_Byte,
1477
- options=["COMPRESS=LZW",
1478
- 'SPARSE_OK=TRUE'])
1479
-
1480
- target_ds.SetGeoTransform((x_min, pixel_size, 0, y_max, 0, -pixel_size))
1481
- srs = osr.SpatialReference()
1482
- srs.ImportFromEPSG(31370)
1483
- target_ds.SetProjection(srs.ExportToWkt())
1484
- band = target_ds.GetRasterBand(1)
1485
- band.SetNoDataValue(NoData_value)
1486
- # Rasterize the areas
1487
- gdal.RasterizeLayer(target_ds,
1488
- bands = [1],
1489
- layer = source_layer,
1490
- burn_values = [1],
1491
- options=["ALL_TOUCHED=TRUE"])
1492
- target_ds = None
1493
- vector_fn = raster_fn = None
1494
-
1495
- def vector_to_raster(layer:str,
1496
- manager:Accept_Manager,
1497
- attribute:str,
1498
- pixel_size:float,
1499
- convert_to_sparse:bool = True):
1500
- """
1501
- Convert a vector layer to a raster tiff file
1502
-
1503
- FIXME: Test de vulerability value and return immedialty if it is 1 if attribute == "Vulne"
1504
-
1505
- :param layer: the layer name in the GDB file
1506
- :param vector_input: the path to the vector file
1507
- :param extent: the path to the extent file
1508
- :param attribute: the attribute to rasterize
1509
- :param pixel_size: the pixel size of the raster
1510
-
1511
- """
1512
-
1513
- layer = str(layer)
1514
-
1515
- vector_input = str(manager.TMP_CODEVULNE / (layer + EXTENT))
1516
- extent = str(manager.SA)
1517
- attribute = str(attribute)
1518
- pixel_size = float(pixel_size)
1519
-
1520
- out_file = manager.TMP_RASTERS / attribute / (layer + ".tiff")
1521
-
1522
- if out_file.exists():
1523
- os.remove(out_file)
1524
-
1525
- out_file = str(out_file)
1526
-
1527
- NoData_value = 0
1528
-
1529
- extent_ds:ogr.DataSource = ogr.Open(extent)
1530
- extent_layer = extent_ds.GetLayer()
1531
-
1532
- x_min, x_max, y_min, y_max = extent_layer.GetExtent()
1533
-
1534
- x_min = float(int(x_min))
1535
- x_max = float(np.ceil(x_max))
1536
- y_min = float(int(y_min))
1537
- y_max = float(np.ceil(y_max))
1538
-
1539
- # Open the data sources and read the extents
1540
- source_ds:ogr.DataSource = ogr.Open(vector_input)
1541
- if source_ds is None:
1542
- logging.error(f"Could not open the data source {layer}")
1543
- return
1544
- source_layer = source_ds.GetLayer()
1545
-
1546
- # Create the destination data source
1547
- x_res = int((x_max - x_min) / pixel_size)
1548
- y_res = int((y_max - y_min) / pixel_size)
1549
- target_ds:gdal.Driver = gdal.GetDriverByName('GTiff').Create(out_file,
1550
- x_res, y_res, 1,
1551
- gdal.GDT_Byte,
1552
- options=["COMPRESS=DEFLATE",
1553
- 'SPARSE_OK=TRUE',])
1554
-
1555
- target_ds.SetGeoTransform((x_min, pixel_size, 0, y_max, 0, -pixel_size))
1556
- srs = osr.SpatialReference()
1557
- srs.ImportFromEPSG(31370)
1558
- target_ds.SetProjection(srs.ExportToWkt())
1559
-
1560
- band = target_ds.GetRasterBand(1)
1561
- band.SetNoDataValue(NoData_value)
1562
-
1563
- # Rasterize the areas
1564
- gdal.RasterizeLayer(target_ds, [1],
1565
- source_layer,
1566
- options=["ATTRIBUTE="+attribute,
1567
- "ALL_TOUCHED=TRUE"])
1568
-
1569
- if convert_to_sparse:
1570
- SPARSITY_THRESHOLD = 0.02
1571
- # Convert the raster to a npz containing the row and col of the non-null values
1572
- array = band.ReadAsArray()
1573
- ij = np.nonzero(array)
1574
-
1575
- if len(ij[0]) < int(x_res * y_res * SPARSITY_THRESHOLD):
1576
- i,j = convert_to_csr(ij[0], ij[1], y_res)
1577
- np.savez_compressed(Path(out_file).with_suffix('.npz'), row=np.asarray(i, dtype=np.int32), col=np.asarray(j, dtype=np.int32))
1578
- else:
1579
- logging.info("The raster is not sparse enough to be converted to a CSR forma {}".format(layer))
1580
-
1581
- target_ds = None
1582
-
1583
- return 0
1584
-
1585
- @nb.jit(nopython=True, boundscheck=False, inline='always')
1586
- def convert_to_csr(i_indices, j_indices, num_rows):
1587
- row_ptr = [0] * (num_rows + 1)
1588
- col_idx = []
1589
-
1590
- for i in range(len(i_indices)):
1591
- row_ptr[i_indices[i] + 1] += 1
1592
- col_idx.append(j_indices[i])
1593
-
1594
- for i in range(1, len(row_ptr)):
1595
- row_ptr[i] += row_ptr[i - 1]
1596
-
1597
- return row_ptr, col_idx
1
+ """
2
+ Author: University of Liege, HECE, LEMA
3
+ Date: 2024
4
+
5
+ Copyright (c) 2024 University of Liege. All rights reserved.
6
+
7
+ This script and its content are protected by copyright law. Unauthorized
8
+ copying or distribution of this file, via any medium, is strictly prohibited.
9
+ """
10
+
11
+ from ..wolf_vrt import create_vrt_from_diverged_files_first_based, translate_vrt2tif
12
+ from ..wolf_array import WolfArray
13
+ from typing import Union, Literal
14
+ from ..PyVertexvectors import Zones, zone, vector, wolfvertex, getIfromRGB
15
+ from ..PyTranslate import _
16
+ from ..scenario. config_manager import Config_Manager_2D_GPU
17
+
18
+ import geopandas as gpd
19
+ import pandas as pd
20
+ import numpy as np
21
+ from osgeo import gdal, ogr, osr, gdalconst
22
+ import os
23
+ import glob
24
+ from pathlib import Path
25
+ import logging
26
+ from tqdm import tqdm
27
+ from pyogrio import list_layers, read_dataframe
28
+ from enum import Enum
29
+ import numba as nb
30
+
31
+ ENGINE = 'pyogrio' # or 'Fiona -- Pyogrio is faster
32
+ EXTENT = '.gpkg'
33
+ class Modif_Type(Enum):
34
+ """
35
+ Enum class for the type of modification
36
+ """
37
+
38
+ WALOUS = 'Walous layers changed to PICC buidings'
39
+ POINT2POLY_EPURATION = 'Change BDREF based on AJOUT_PDET sent by Perrine (SPI)'
40
+ POINT2POLY_PICC = 'Convert the points to polygons based on PICC'
41
+ POINT2POLY_CAPAPICC = 'Convert the points to polygons based on PICC and CaPa'
42
+ INHABITED = 'Select only inhabited buildings'
43
+ ROAD = 'Create a buffer around the roads'
44
+ COPY = 'Copy the data'
45
+
46
+ class GPU_2D_file_extensions(Enum):
47
+ TIF = '.tif' # raster
48
+ TIFF = '.tiff' # raster
49
+ PY = '.py' # python script
50
+ NPY = '.npy' # numpy array
51
+ BIN = '.bin' # WOLF binary file
52
+ JSON = '.json' # json file
53
+ TXT = '.txt' # hydrographs
54
+
55
+
56
+ class Vulnerability_csv():
57
+
58
+ def __init__(self, file:Path) -> None:
59
+ self.file = file
60
+ self.data = pd.read_csv(file, sep=",", encoding='latin-1')
61
+
62
+ def get_layers(self) -> list:
63
+ return [a[1] for a in self.data["Path"].str.split('/')]
64
+
65
+ def get_vulnerability_level(self, layer:str) -> str:
66
+ idx = self.get_layers().index(layer)
67
+ return self.data.iloc[idx]["Vulne"]
68
+
69
+ def get_vulnerability_code(self, layer:str) -> str:
70
+ idx = self.get_layers().index(layer)
71
+ return self.data.iloc[idx]["Code"]
72
+
73
+
74
+ def get_data_type(fname:Path):
75
+
76
+ fname = Path(fname)
77
+ """ Get the data type of the input file from extension """
78
+ if fname.name.endswith('.gpkg'):
79
+ return 'GPKG'
80
+ elif fname.name.endswith('.shp'):
81
+ return 'ESRI Shapefile'
82
+ elif fname.name.endswith('.gdb'):
83
+ return 'OpenfileGDB'
84
+ else:
85
+ return None
86
+
87
+ def cleaning_directory(dir:Path):
88
+ """ Cleaning the directory """
89
+
90
+ logging.info("Cleaning the directory {}".format(dir))
91
+
92
+ files_in_output = list(dir.iterdir())
93
+ for item in files_in_output:
94
+ if item.is_file():
95
+ os.remove(item)
96
+
97
+ class Accept_Manager():
98
+ """
99
+ Structure to store the directories and names of the files.
100
+
101
+ In the main directory, the following directories are mandatory/created:
102
+ - INPUT : filled by the user - contains the input data
103
+ - TEMP : created by the script - contains the temporary data for the study area
104
+ - OUTPUT: created by the script - contains the output data for each scenario of the study area
105
+
106
+ The INPUT directory contains the following subdirectories:
107
+ - DATABASE: contains the data for the **entire Walloon region**
108
+ - Cadastre_Walloon.gpkg: the Cadastre Walloon file
109
+ - GT_Resilence_dataRisques202010.gdb: the original gdb file from SPW - GT Resilience
110
+ - PICC-vDIFF.gdb: the PICC Walloon file
111
+ - CE_IGN_TOP10V: the IGN top10v shapefile
112
+ - EPU_STATIONS_NEW:
113
+ - AJOUT_PDET_EPU_DG03_STATIONS.shp: the EPU stations shapefile
114
+ - STUDY_AREA: contains the study area shapefiles - one for each study area - e.g. Bassin_Vesdre.shp
115
+ - CSVs: contains the CSV files
116
+ - Intermediate.csv: contains the matrices data for the acceptability computation
117
+ # - Ponderation.csv: contains the ponderation data for the acceptability computation
118
+ - Vulnerability.csv: contains the mapping between layers and vulnerability levels - a code value is also provided
119
+ - WATER_DEPTH: contains the water depth data for each scenario
120
+ - Study_area1:
121
+ - Scenario1
122
+ - Scenario2
123
+ -...
124
+ - ScenarioN
125
+ - Study_area2:
126
+ - Scenario1
127
+ - Scenario2
128
+ -...
129
+ - ScenarioN
130
+ -...
131
+ - Study_areaN:
132
+ - Scenario1
133
+ - Scenario2
134
+ -...
135
+ - ScenarioN
136
+
137
+ The TEMP directory contains the following subdirectories:
138
+ - DATABASES: contains the temporary data each study area
139
+ - Study_area1:
140
+ - database.gpkg: the clipped database
141
+ - CaPa.gpkg: the clipped Cadastre Walloon file
142
+ - PICC.gpkg: the clipped PICC Walloon file
143
+ - CE_IGN_TOP10V.tiff: the IGN top10v raster file
144
+ - Maske_River_extent.tiff: the river extent raster file from IGN
145
+ - VULNERABILITY: the vulnerability data
146
+ - RASTERS:
147
+ - Code : one file for each layer
148
+ - Vulne : one file for each layer
149
+ - Scenario1:
150
+
151
+ """
152
+
153
+ def __init__(self,
154
+ main_dir:str = 'Data',
155
+ Study_area:str = 'Bassin_Vesdre.shp',
156
+ scenario = None,
157
+ Original_gdb:str = 'GT_Resilence_dataRisques202010.gdb',
158
+ CaPa_Walloon:str = 'Cadastre_Walloon.gpkg',
159
+ PICC_Walloon:str = 'PICC_vDIFF.gdb',
160
+ CE_IGN_top10v:str = 'CE_IGN_TOP10V/CE_IGN_TOP10V.shp',
161
+ EPU_Stations:str = 'AJOUT_PDET_EPU_DG03_STATIONS.shp',
162
+ Ponderation_csv:str = 'Ponderation.csv',
163
+ Vuln_csv:str = 'Vulnerability.csv',
164
+ Intermediate_csv:str = 'Intermediate.csv'
165
+ ) -> None:
166
+
167
+ self.old_dir:Path = Path(os.getcwd())
168
+
169
+ self.main_dir:Path = Path(main_dir)
170
+
171
+ # If it is a string, concatenate it with the current directory
172
+ if not self.main_dir.is_absolute():
173
+ self.main_dir = Path(os.getcwd()) / self.main_dir
174
+
175
+ self._study_area = str(Study_area)
176
+
177
+ if Study_area is not None:
178
+ if not str(self._study_area).endswith('.shp'):
179
+ self._study_area += '.shp'
180
+
181
+ self._scenario = scenario
182
+ self._original_gdb = Original_gdb
183
+ self._capa_walloon = CaPa_Walloon
184
+ self._picc_walloon = PICC_Walloon
185
+ self._ce_ign_top10v = CE_IGN_top10v
186
+
187
+ self.IN_DIR = self.main_dir / "INPUT"
188
+ self.IN_CH_VULN = self.IN_DIR / "CHANGE_VULNE"
189
+ self.IN_CH_SA_SC = self.IN_CH_VULN / str(Study_area).rsplit('.', 1)[0] / str(scenario)
190
+ self.IN_DATABASE = self.IN_DIR / "DATABASE"
191
+ self.IN_STUDY_AREA = self.IN_DIR / "STUDY_AREA"
192
+ self.IN_CSV = self.IN_DIR / "CSVs"
193
+ self.IN_WATER_DEPTH = self.IN_DIR / "WATER_DEPTH"
194
+ self.IN_EPU_STATIONS= self.IN_DIR / "EPU_STATIONS_NEW"
195
+
196
+ self.ORIGINAL_GDB = self.IN_DATABASE / self._original_gdb
197
+ self.CAPA_WALLOON = self.IN_DATABASE / self._capa_walloon
198
+ self.PICC_WALLOON = self.IN_DATABASE / self._picc_walloon
199
+ self.CE_IGN_TOP10V = self.IN_DATABASE / self._ce_ign_top10v
200
+ self.EPU_STATIONS = self.IN_EPU_STATIONS / EPU_Stations
201
+
202
+ self.VULNERABILITY_CSV = self.IN_CSV / Vuln_csv
203
+ self.POINTS_CSV = self.IN_CSV / Intermediate_csv
204
+ self.PONDERATION_CSV = self.IN_CSV / Ponderation_csv
205
+
206
+ self._CSVs = [self.VULNERABILITY_CSV, self.POINTS_CSV]
207
+ self._GPKGs= [self.CAPA_WALLOON, self.PICC_WALLOON]
208
+ self._GDBs = [self.ORIGINAL_GDB]
209
+ self._SHPs = [self.CE_IGN_TOP10V, self.EPU_STATIONS]
210
+ self._ALLS = self._CSVs + self._GPKGs + self._GDBs + self._SHPs
211
+
212
+ self.TMP_DIR = self.main_dir / "TEMP"
213
+
214
+ self.OUT_DIR = self.main_dir / "OUTPUT"
215
+
216
+ self.points2polys = []
217
+ self.lines2polys = []
218
+
219
+ self.create_paths()
220
+ self.create_paths_scenario()
221
+
222
+ def create_paths(self):
223
+ """ Create the paths for the directories and files """
224
+
225
+ self.points2polys = []
226
+ self.lines2polys = []
227
+
228
+ if self._study_area is not None:
229
+
230
+ self.Study_area:Path = Path(self._study_area)
231
+
232
+ self.TMP_STUDYAREA = self.TMP_DIR / self.Study_area.stem
233
+ self.TMP_DATABASE = self.TMP_STUDYAREA / "DATABASES"
234
+
235
+ self.TMP_CLIPGDB = self.TMP_DATABASE / "CLIP_GDB"
236
+ self.TMP_CADASTER = self.TMP_DATABASE / "CLIP_CADASTER"
237
+ self.TMP_PICC = self.TMP_DATABASE / "CLIP_PICC"
238
+ self.TMP_IGNCE = self.TMP_DATABASE / "CLIP_IGN_CE"
239
+ self.TMP_WMODIF = self.TMP_DATABASE / "WITH_MODIF"
240
+ self.TMP_CODEVULNE = self.TMP_DATABASE / "CODE_VULNE"
241
+
242
+ self.TMP_VULN_DIR = self.TMP_STUDYAREA / "VULNERABILITY"
243
+ self.TMP_SA_SC = self.TMP_VULN_DIR / str(self._scenario)
244
+
245
+
246
+ self.TMP_RASTERS = self.TMP_VULN_DIR / "RASTERS"
247
+ self.TMP_RASTERS_CODE = self.TMP_RASTERS / "Code"
248
+ self.TMP_RASTERS_VULNE = self.TMP_RASTERS / "Vulne"
249
+
250
+ self.OUT_STUDY_AREA = self.OUT_DIR / self.Study_area.stem
251
+
252
+ self.SA = self.IN_STUDY_AREA / self.Study_area
253
+ self.SA_MASKED_RIVER = self.TMP_IGNCE / "CE_IGN_TOP10V.tiff"
254
+ self.SA_VULN = self.TMP_VULN_DIR / "Vulnerability.tiff"
255
+ self.SA_CODE = self.TMP_VULN_DIR / "Vulnerability_Code.tiff"
256
+
257
+ else:
258
+ self.Study_area = None
259
+ self._scenario = None
260
+
261
+ self.TMP_STUDYAREA = None
262
+ self.TMP_DATABASE = None
263
+ self.TMP_CADASTER = None
264
+ self.TMP_PICC = None
265
+ self.TMP_IGNCE = None
266
+ self.TMP_WMODIF = None
267
+ self.TMP_CODEVULNE = None
268
+ self.TMP_VULN_DIR = None
269
+ self.TMP_RASTERS = None
270
+ self.TMP_RASTERS_CODE = None
271
+ self.TMP_RASTERS_VULNE = None
272
+
273
+ self.OUT_STUDY_AREA = None
274
+
275
+ self.SA = None
276
+ self.SA_MASKED_RIVER = None
277
+
278
+ self.SA_VULN = None
279
+ self.SA_CODE = None
280
+
281
+ self.create_paths_scenario()
282
+
283
+ self.check_inputs()
284
+ self.check_temporary()
285
+ self.check_outputs()
286
+
287
+ def create_paths_scenario(self):
288
+
289
+ if self._scenario is not None:
290
+
291
+ self.scenario:str = str(self._scenario)
292
+
293
+ self.IN_SCEN_DIR = self.IN_WATER_DEPTH / self.SA.stem / self.scenario
294
+ self.IN_SA_Base = self.IN_WATER_DEPTH / self.SA.stem / "Scenario_baseline"
295
+ self.IN_SA_INTERP = self.IN_SCEN_DIR / "INTERP_WD"
296
+ self.IN_SA_EXTRACTED = self.IN_SCEN_DIR / "EXTRACTED_LAST_STEP_WD"
297
+ self.IN_SA_DEM = self.IN_SCEN_DIR / "DEM_FILES"
298
+
299
+ self.IN_RM_BUILD_DIR = self.IN_SCEN_DIR / "REMOVED_BUILDINGS"
300
+
301
+ self.TMP_SCEN_DIR = self.TMP_VULN_DIR / self.scenario
302
+ self.TMP_RM_BUILD_DIR = self.TMP_SCEN_DIR / "REMOVED_BUILDINGS"
303
+ self.TMP_QFILES = self.TMP_SCEN_DIR / "Q_FILES"
304
+
305
+ self.TMP_VULN = self.TMP_SCEN_DIR / "Vulnerability.tiff"
306
+ self.TMP_CODE = self.TMP_SCEN_DIR / "Vulnerability_Code.tiff"
307
+
308
+ self.OUT_SCEN_DIR = self.OUT_STUDY_AREA / self.scenario
309
+
310
+ self.OUT_VULN = self.OUT_SCEN_DIR / "Vulnerability.tiff"
311
+ self.OUT_VULN_VRT = self.OUT_SCEN_DIR / "__vuln_assembly.vrt"
312
+ self.OUT_VULN_S = self.OUT_SCEN_DIR / "Vulnerability_scenarios" #no .tif because wolf_vrt add it itself (see create_vrtIfExists below)
313
+ self.OUT_VULN_Stif = self.OUT_SCEN_DIR / "Vulnerability_scenarios.tif"
314
+ self.OUT_CODE = self.OUT_SCEN_DIR / "Vulnerability_Code.tiff"
315
+ self.OUT_MASKED_RIVER = self.OUT_SCEN_DIR / "Masked_River_extent.tiff"
316
+ self.OUT_ACCEPT = self.OUT_SCEN_DIR / "Acceptability.tiff"
317
+ self.OUT_ACCEPT_100M = self.OUT_SCEN_DIR / "Acceptability_100m.tiff"
318
+ self.OUT_ACCEPT_Stif = self.OUT_SCEN_DIR / "Acceptability_scenarios.tiff"
319
+ self.OUT_ACCEPT_100M_Stif = self.OUT_SCEN_DIR / "Acceptability_scenarios_100m.tiff"
320
+ else:
321
+ self.scenario = None
322
+
323
+ self.IN_SCEN_DIR = None
324
+ self.IN_RM_BUILD_DIR = None
325
+
326
+ self.TMP_SCEN_DIR = None
327
+ self.TMP_RM_BUILD_DIR = None
328
+ self.TMP_QFILES = None
329
+
330
+ self.TMP_VULN = None
331
+ self.TMP_CODE = None
332
+
333
+ self.OUT_SCEN_DIR = None
334
+ self.OUT_VULN = None
335
+ self.OUT_CODE = None
336
+ self.OUT_MASKED_RIVER = None
337
+ self.OUT_ACCEPT = None
338
+ self.OUT_ACCEPT_100M = None
339
+
340
+ @property
341
+ def is_valid_inputs(self) -> bool:
342
+ return self.check_inputs()
343
+
344
+ @property
345
+ def is_valid_study_area(self) -> bool:
346
+ return self.SA.exists()
347
+
348
+ @property
349
+ def is_valid_vulnerability_csv(self) -> bool:
350
+ return self.VULNERABILITY_CSV.exists()
351
+
352
+ @property
353
+ def is_valid_points_csv(self) -> bool:
354
+ return self.POINTS_CSV.exists()
355
+
356
+ @property
357
+ def is_valid_ponderation_csv(self) -> bool:
358
+ return self.PONDERATION_CSV.exists()
359
+
360
+ def check_files(self) -> str:
361
+ """ Check the files in the directories """
362
+
363
+ files = ""
364
+ for a in self._ALLS:
365
+ if not a.exists():
366
+ files += str(a) + "\n"
367
+
368
+ return files
369
+
370
+ def change_studyarea(self, Study_area:str = None) -> None:
371
+
372
+ if Study_area is None:
373
+ self._study_area = None
374
+ self._scenario = None
375
+ else:
376
+ if Study_area in self.get_list_studyareas(with_suffix=True):
377
+ self._study_area = Path(Study_area)
378
+ else:
379
+ logging.error("The study area does not exist in the study area directory")
380
+
381
+ self.create_paths()
382
+
383
+ def change_scenario(self, scenario:str) -> None:
384
+
385
+ if scenario in self.get_list_scenarios():
386
+ self._scenario = scenario
387
+ self.create_paths_scenario()
388
+ self.check_temporary()
389
+ self.check_outputs()
390
+ else:
391
+ logging.error("The scenario does not exist in the water depth directory")
392
+
393
+ def get_files_in_rm_buildings(self) -> list[Path]:
394
+ return [Path(a) for a in glob.glob(str(self.IN_RM_BUILD_DIR / ("*"+ EXTENT)))]
395
+
396
+ def get_files_in_CHANGE_VULNE(self) -> list[Path]:
397
+ return [Path(a) for a in glob.glob(str(self.IN_CH_VULN / "*.tiff"))]
398
+
399
+ def get_files_in_rasters_vulne(self) -> list[Path]:
400
+ return [Path(a) for a in glob.glob(str(self.TMP_RASTERS_VULNE / "*.tiff"))]
401
+
402
+ def get_layers_in_gdb(self) -> list[str]:
403
+ return [a[0] for a in list_layers(str(self.ORIGINAL_GDB))]
404
+
405
+ def get_layer_types_in_gdb(self) -> list[str]:
406
+ return [a[1] for a in list_layers(str(self.ORIGINAL_GDB))]
407
+
408
+ def get_layers_in_clipgdb(self) -> list[str]:
409
+ return [Path(a).stem for a in glob.glob(str(self.TMP_CLIPGDB / ("*"+ EXTENT)))]
410
+
411
+ def get_layers_in_wmodif(self) -> list[str]:
412
+ return [Path(a).stem for a in glob.glob(str(self.TMP_WMODIF / ("*"+ EXTENT)))]
413
+
414
+ def get_layers_in_codevulne(self) -> list[str]:
415
+ return [Path(a).stem for a in glob.glob(str(self.TMP_CODEVULNE / ("*"+ EXTENT)))]
416
+
417
+ def get_files_in_rasters_code(self) -> list[Path]:
418
+ return [Path(a) for a in glob.glob(str(self.TMP_RASTERS_CODE / "*.tiff"))]
419
+
420
+ def get_q_files(self) -> list[Path]:
421
+ return [Path(a) for a in glob.glob(str(self.TMP_QFILES / "*.tif"))]
422
+
423
+ def get_list_scenarios(self) -> list[str]:
424
+
425
+ list_sc = [Path(a).stem for a in glob.glob(str(self.IN_WATER_DEPTH / self.SA.stem / "Scenario*"))]
426
+ return list_sc
427
+
428
+ def get_list_studyareas(self, with_suffix:bool = False) -> list[str]:
429
+
430
+ if with_suffix:
431
+ return [Path(a).name for a in glob.glob(str(self.IN_STUDY_AREA / "*.shp"))]
432
+ else:
433
+ return [Path(a).stem for a in glob.glob(str(self.IN_STUDY_AREA / "*.shp"))]
434
+
435
+ def get_sims_files_for_scenario(self) -> list[Path]:
436
+ files = [] #to avoid NoneType
437
+ if self.IN_SA_INTERP.exists() :
438
+ files = [Path(a) for a in glob.glob(str(self.IN_SA_INTERP / "*.tif"))]
439
+ else :
440
+ logging.error("No _baseline")
441
+ return files
442
+
443
+ def get_sims_files_for_baseline(self) -> list[Path]:
444
+ files = [] #to avoid NoneType
445
+ if self.IN_SA_INTERP.exists() :
446
+ logging.info("Getting the _baseline WATER_DEPTHS files.")
447
+ track = Path(str(self.IN_SA_INTERP / "*.tif"))
448
+ files = [Path(a) for a in glob.glob(str(track))]
449
+ else :
450
+ logging.error("No _baseline WATER_DEPTHS files")
451
+
452
+ return files
453
+
454
+ def get_sim_file_for_return_period(self, return_period:int) -> Path:
455
+
456
+ sims = self.get_sims_files_for_scenario()
457
+
458
+ if len(sims)==0:
459
+ logging.info("No simulations found") #no error, need to act if so. See accept manager flow chart
460
+ return None
461
+
462
+ if "_h.tif" in sims[0].name:
463
+ for cursim in sims:
464
+ if cursim.stem.find("_T{}_".format(return_period)) != -1:
465
+ return cursim
466
+ else:
467
+ for cursim in sims:
468
+ if cursim.stem.find("T{}".format(return_period)) != -1:
469
+ return cursim
470
+
471
+ return None
472
+
473
+ def get_types_in_file(self, file:str) -> list[str]:
474
+ """ Get the types of the geometries in the Shape file """
475
+
476
+ return [a[1] for a in list_layers(str(file))]
477
+
478
+ def is_type_unique(self, file:str) -> bool:
479
+ """ Check if the file contains only one type of geometry """
480
+
481
+ types = self.get_types_in_file(file)
482
+ return len(types) == 1
483
+
484
+ def is_polygons(self, set2test:set) -> bool:
485
+ """ Check if the set contains only polygons """
486
+
487
+ set2test = list(set2test)
488
+ firstone = set2test[0]
489
+ if 'Polygon' in firstone:
490
+ for curtype in set2test:
491
+ if 'Polygon' not in curtype:
492
+ return False
493
+ return True
494
+ else:
495
+ return False
496
+
497
+ def is_same_types(self, file:str) -> tuple[bool, str]:
498
+ """ Check if the file contains only the same type of geometry """
499
+
500
+ types = self.get_types_in_file(file)
501
+
502
+ if len(types) == 1:
503
+ if 'Point' in types[0]:
504
+ return True, 'Point'
505
+ elif 'Polygon' in types[0]:
506
+ return True, 'Polygon'
507
+ elif 'LineString' in types[0]:
508
+ return True, 'LineString'
509
+ else:
510
+ raise ValueError(f"The type of geometry {types[0]} is not recognized")
511
+ else:
512
+ firstone = types[0]
513
+ if 'Point' in firstone:
514
+ for curtype in types:
515
+ if 'Point' not in curtype:
516
+ return False, None
517
+ return True, 'Point'
518
+
519
+ elif 'Polygon' in firstone:
520
+ for curtype in types:
521
+ if 'Polygon' not in curtype:
522
+ return False, None
523
+
524
+ return True, 'Polygon'
525
+
526
+ elif 'LineString' in firstone:
527
+ for curtype in types:
528
+ if 'LineString' not in curtype:
529
+ return False, None
530
+
531
+ return True, 'LineString'
532
+ else:
533
+ raise ValueError(f"The type of geometry {firstone} is not recognized")
534
+
535
+
536
+ def get_return_periods(self) -> list[int]:
537
+ """
538
+ Get the return periods from the simulations
539
+
540
+ :return list[int]: the **sorted list** of return periods
541
+ """
542
+
543
+ # List files in directory
544
+ sims = self.get_sims_files_for_scenario()
545
+
546
+ if len(sims)==0:
547
+ logging.info("No simulations found")#no error, need to act if so. See accept manager flow chart
548
+ return []
549
+
550
+ # Two cases:
551
+ # - Return periods are named as T2.tif, T5.tif, T10.tif, ...
552
+ # - Return periods are named as *_T2_h.tif, *_T5_h.tif, *_T10_h.tif, ...
553
+
554
+ if "_h.tif" in sims[0].name:
555
+
556
+ # Searching for the position of the return period in the name
557
+ idx_T = [cursim.name.find("_T") for cursim in sims]
558
+ idx_h = [cursim.name.find("_h.tif") for cursim in sims]
559
+
560
+ assert len(idx_T) == len(idx_h), "The number of T and h are not the same"
561
+ for curT, curh in zip(idx_T, idx_h):
562
+ assert curT != -1, "The T is not found"
563
+ assert curh != -1, "The h is not found"
564
+ assert curh > curT, "The h is before the T"
565
+
566
+ # Create the list of return periods -- only the numeric part
567
+ sims = [int(cursim.name[idx_T[i]+2:idx_h[i]]) for i, cursim in enumerate(sims)]
568
+ else:
569
+ # searching for the position of the return period in the name
570
+ idx_T = [cursim.name.find("T") for cursim in sims]
571
+ idx_h = [cursim.name.find(".tif") for cursim in sims]
572
+
573
+ assert len(idx_T) == len(idx_h), "The number of T and h are not the same"
574
+ for curT, curh in zip(idx_T, idx_h):
575
+ assert curT != -1, "The T is not found"
576
+ assert curh != -1, "The h is not found"
577
+ assert curh > curT, "The h is before the T"
578
+
579
+ # create the list of return periods -- only the numeric part
580
+ sims = [int(cursim.name[idx_T[i]+1:idx_h[i]]) for i, cursim in enumerate(sims)]
581
+
582
+ return sorted(sims)
583
+
584
+ def get_ponderations(self) -> pd.DataFrame:
585
+ """ Get the ponderation data from available simulations """
586
+
587
+ rt = self.get_return_periods()
588
+
589
+ if len(rt)==0:
590
+ logging.info("No simulations found")
591
+ return None
592
+
593
+ if len(rt)<2:
594
+ logging.info("Need for more simulations")
595
+ return None
596
+
597
+ else :
598
+ pond = []
599
+
600
+ pond.append(1./float(rt[0]) + (1./float(rt[0]) - 1./float(rt[1]))/2.)
601
+ for i in range(1, len(rt)-1):
602
+ # Full formula
603
+ # pond.append((1./float(rt[i-1]) - 1./float(rt[i]))/2. + (1./float(rt[i]) - 1./float(rt[i+1]))/2.)
604
+
605
+ # More compact formula
606
+ pond.append((1./float(rt[i-1]) - 1./float(rt[i+1]))/2.)
607
+
608
+ pond.append(1./float(rt[-1]) + (1./float(rt[-2]) - 1./float(rt[-1]))/2.)
609
+
610
+ return pd.DataFrame(pond, columns=["Ponderation"], index=rt)
611
+
612
+ def get_filepath_for_return_period(self, return_period:int) -> Path:
613
+
614
+ return self.get_sim_file_for_return_period(return_period)
615
+
616
+ def change_dir(self) -> None:
617
+ os.chdir(self.main_dir)
618
+ logging.info("Current directory: %s", os.getcwd())
619
+
620
+ def restore_dir(self) -> None:
621
+ os.chdir(self.old_dir)
622
+ logging.info("Current directory: %s", os.getcwd())
623
+
624
+ def check_inputs(self) -> bool:
625
+ """
626
+ Check if the input directories exist.
627
+
628
+ Inputs can not be created automatically. The user must provide them.
629
+ """
630
+
631
+ err = False
632
+ if not self.IN_DATABASE.exists():
633
+ logging.error("INPUT : The database directory does not exist")
634
+ err = True
635
+
636
+ if not self.IN_STUDY_AREA.exists():
637
+ logging.error("INPUT : The study area directory does not exist")
638
+ err = True
639
+
640
+ if not self.IN_CSV.exists():
641
+ logging.error("INPUT : The CSV directory does not exist")
642
+ err = True
643
+
644
+ if not self.IN_WATER_DEPTH.exists():
645
+ logging.error("INPUT : The water depth directory does not exist")
646
+ err = True
647
+
648
+ if not self.IN_EPU_STATIONS.exists():
649
+ logging.error("INPUT : The EPU stations directory does not exist")
650
+ err = True
651
+
652
+ if self.Study_area is not None:
653
+ if not self.SA.exists():
654
+ logging.error("INPUT : The study area file does not exist")
655
+ err = True
656
+
657
+ if not self.ORIGINAL_GDB.exists():
658
+ logging.error("INPUT : The original gdb file does not exist - Please pull it from the SPW-ARNE")
659
+ err = True
660
+
661
+ if not self.CAPA_WALLOON.exists():
662
+ logging.error("INPUT : The Cadastre Walloon file does not exist - Please pull it from the SPW")
663
+ err = True
664
+
665
+ if not self.PICC_WALLOON.exists():
666
+ logging.error("INPUT : The PICC Walloon file does not exist - Please pull it from the SPW website")
667
+ err = True
668
+
669
+ if not self.CE_IGN_TOP10V.exists():
670
+ logging.error("INPUT : The CE IGN top10v file does not exist - Please pull it from the IGN")
671
+ err = True
672
+
673
+ if self.scenario is None:
674
+ logging.debug("The scenario has not been defined")
675
+ else:
676
+ if not self.IN_SCEN_DIR.exists():
677
+ logging.error("The wd scenario directory does not exist")
678
+ err = True
679
+
680
+ return not err
681
+
682
+ def check_temporary(self) -> bool:
683
+ """
684
+ Check if the temporary directories exist.
685
+
686
+ If not, create them.
687
+ """
688
+
689
+ self.TMP_DIR.mkdir(parents=True, exist_ok=True)
690
+
691
+ if self.Study_area is not None:
692
+ self.TMP_STUDYAREA.mkdir(parents=True, exist_ok=True)
693
+ self.TMP_DATABASE.mkdir(parents=True, exist_ok=True)
694
+ self.TMP_CLIPGDB.mkdir(parents=True, exist_ok=True)
695
+ self.TMP_CADASTER.mkdir(parents=True, exist_ok=True)
696
+ self.TMP_WMODIF.mkdir(parents=True, exist_ok=True)
697
+ self.TMP_CODEVULNE.mkdir(parents=True, exist_ok=True)
698
+ self.TMP_PICC.mkdir(parents=True, exist_ok=True)
699
+ self.TMP_IGNCE.mkdir(parents=True, exist_ok=True)
700
+ self.TMP_VULN_DIR.mkdir(parents=True, exist_ok=True)
701
+ self.TMP_RASTERS.mkdir(parents=True, exist_ok=True)
702
+ self.TMP_RASTERS_CODE.mkdir(parents=True, exist_ok=True)
703
+ self.TMP_RASTERS_VULNE.mkdir(parents=True, exist_ok=True)
704
+
705
+ if self.scenario is not None:
706
+ self.TMP_SCEN_DIR.mkdir(parents=True, exist_ok=True)
707
+ self.TMP_RM_BUILD_DIR.mkdir(parents=True, exist_ok=True)
708
+ self.TMP_QFILES.mkdir(parents=True, exist_ok=True)
709
+
710
+ return True
711
+
712
+ def check_outputs(self) -> bool:
713
+ """
714
+ Check if the output directories exist.
715
+
716
+ If not, create them.
717
+ """
718
+
719
+ self.OUT_DIR.mkdir(parents=True, exist_ok=True)
720
+
721
+ if self.Study_area is not None:
722
+ self.OUT_STUDY_AREA.mkdir(parents=True, exist_ok=True)
723
+
724
+ if self.scenario is not None:
725
+ self.OUT_SCEN_DIR.mkdir(parents=True, exist_ok=True)
726
+
727
+ return True
728
+
729
+ def check_before_database_creation(self) -> bool:
730
+ """ Check if the necessary files are present before the database creation"""
731
+
732
+ if not self.is_valid_inputs:
733
+ logging.error("Theere are missing input directories - Please check carefully the input directories and the logs")
734
+ return False
735
+
736
+ if not self.is_valid_study_area:
737
+ logging.error("The study area file does not exist - Please create it")
738
+ return False
739
+
740
+ if not self.is_valid_vulnerability_csv:
741
+ logging.error("The vulnerability CSV file does not exist - Please create it")
742
+ return False
743
+
744
+ return True
745
+
746
+ def check_before_rasterize(self) -> bool:
747
+
748
+ if not self.TMP_CODEVULNE.exists():
749
+ logging.error("The final database with vulnerability levels does not exist")
750
+ return False
751
+
752
+ if not self.TMP_WMODIF.exists():
753
+ logging.error("The vector data with modifications does not exist")
754
+ return False
755
+
756
+ return True
757
+
758
+ def check_before_vulnerability(self) -> bool:
759
+
760
+ if self.SA is None:
761
+ logging.error("The area of interest does not exist")
762
+ return False
763
+
764
+ if self.IN_WATER_DEPTH is None:
765
+ logging.error("The water depth directory does not exist")
766
+ return False
767
+
768
+ if self.IN_SCEN_DIR is None:
769
+ logging.error("The wd scenario directory does not exist in the water depth directory")
770
+ return False
771
+
772
+ if self.SA_MASKED_RIVER is None:
773
+ logging.error("The IGN raster does not exist")
774
+ return False
775
+
776
+ return True
777
+
778
+ def check_vuln_code_sa(self) -> bool:
779
+
780
+ if not self.SA_VULN.exists():#SA_VULN
781
+ logging.error("The vulnerability raster file does not exist")
782
+ return False
783
+
784
+ if not self.SA_CODE.exists():
785
+ logging.error("The vulnerability code raster file does not exist")
786
+ return False
787
+
788
+ return True
789
+
790
+ def check_vuln_code_scenario(self) -> bool:
791
+
792
+ if not self.TMP_VULN.exists():
793
+ logging.error("The vulnerability raster file does not exist")
794
+ return False
795
+
796
+ if not self.TMP_CODE.exists():
797
+ logging.error("The vulnerability code raster file does not exist")
798
+ return False
799
+
800
+ return True
801
+
802
+ def compare_original_clipped_layers(self) -> str:
803
+ """ Compare the original layers with the clipped ones """
804
+
805
+ layers = self.get_layers_in_gdb()
806
+ layers_clip = self.get_layers_in_clipgdb()
807
+
808
+ ret = 'These layers have not been clipped:\n'
809
+ for layer in layers:
810
+ if layer not in layers_clip:
811
+ ret += " - {}\n".format(layer)
812
+
813
+ ret += '\nThese layers have been clipped but are not present in the GDB:\n'
814
+ for layer in layers_clip:
815
+ if layer not in layers:
816
+ ret += " - {}\n".format(layer)
817
+
818
+ ret+='\n'
819
+
820
+ return ret
821
+
822
+ def compare_clipped_raster_layers(self) -> str:
823
+ """ Compare the clipped layers with the rasterized ones """
824
+
825
+ layers = self.get_layers_in_clipgdb()
826
+ layers_rast = self.get_layers_in_codevulne()
827
+
828
+ ret = 'These layers {} have not been rasterized:\n'
829
+ for layer in layers:
830
+ if layer not in layers_rast:
831
+ ret += " - {}\n".format(layer)
832
+
833
+ ret += '\nThese layers have been rasterized but are not in the orginal GDB:\n'
834
+ for layer in layers_rast:
835
+ if layer not in layers:
836
+ ret += " - {}\n".format(layer)
837
+
838
+ ret+='\n'
839
+
840
+ return ret
841
+
842
+ def get_operand(self, file:str) -> Modif_Type:
843
+ """ Get the operand based on the layer name """
844
+ LAYERS_WALOUS = ["WALOUS_2018_LB72_112",
845
+ "WALOUS_2018_LB72_31",
846
+ "WALOUS_2018_LB72_32",
847
+ "WALOUS_2018_LB72_331",
848
+ "WALOUS_2018_LB72_332",
849
+ "WALOUS_2018_LB72_333",
850
+ "WALOUS_2018_LB72_34"]
851
+
852
+ ret, curtype = self.is_same_types(file)
853
+ layer = Path(file).stem
854
+
855
+ if not ret:
856
+ raise ValueError("The layer contains different types of geometries")
857
+
858
+ if layer in LAYERS_WALOUS:
859
+ return Modif_Type.WALOUS
860
+
861
+ elif curtype=="Point":
862
+
863
+ self.points2polys.append(layer)
864
+
865
+ if layer =="BDREF_DGO3_PASH__SCHEMA_STATIONS_EPU":
866
+ return Modif_Type.POINT2POLY_EPURATION
867
+ elif layer =="INFRASIG_SOINS_SANTE__ETAB_AINES":
868
+ return Modif_Type.POINT2POLY_PICC
869
+ else:
870
+ return Modif_Type.POINT2POLY_CAPAPICC
871
+
872
+ elif layer =="Hab_2018_CABU":
873
+ return Modif_Type.INHABITED
874
+
875
+ elif layer =="INFRASIG_ROUTE_RES_ROUTIER_TE_AXES":
876
+
877
+ self.lines2polys.append(layer)
878
+
879
+ return Modif_Type.ROAD
880
+
881
+ else:
882
+ return Modif_Type.COPY
883
+
884
+ def check_origin_shape(self) -> list[str]:
885
+
886
+ code = self.get_files_in_rasters_code()
887
+ vuln = self.get_files_in_rasters_vulne()
888
+
889
+ if len(code) == 0:
890
+ logging.error("The code rasters do not exist")
891
+ return False
892
+
893
+ if len(vuln) == 0:
894
+ logging.error("The vulnerability rasters do not exist")
895
+ return False
896
+
897
+ if len(code) != len(vuln):
898
+ logging.error("The number of code and vulnerability rasters do not match")
899
+ return False
900
+
901
+ # we take a reference raster
902
+ ref = gdal.Open(str(code[0]))
903
+ band_ref = ref.GetRasterBand(1)
904
+ proj_ref = ref.GetProjection()
905
+ geo_ref = ref.GetGeoTransform()
906
+ col_ref, row_ref = band_ref.XSize, band_ref.YSize
907
+
908
+ # we compare the reference raster with the others
909
+ diff = []
910
+ for cur in code + vuln + [self.SA_MASKED_RIVER]:
911
+ cur_ = gdal.Open(str(cur))
912
+ band_cur = cur_.GetRasterBand(1)
913
+ proj_cur = cur_.GetProjection()
914
+ geo_cur = cur_.GetGeoTransform()
915
+ col_cur, row_cur = band_cur.XSize, band_cur.YSize
916
+
917
+ if geo_ref != geo_cur:
918
+ logging.error("The geotransforms do not match {}".format(cur))
919
+ diff.append(cur)
920
+
921
+ if proj_ref != proj_cur:
922
+ logging.error("The projections do not match {}".format(cur))
923
+ diff.append(cur)
924
+
925
+ if col_ref != col_cur or row_ref != row_cur:
926
+ logging.error("The dimensions do not match {}".format(cur))
927
+ diff.append(cur)
928
+
929
+ return diff
930
+
931
+
932
+
933
+
934
+ # Assembly (FR : agglomération)
935
+ # -----------------------------
936
+ """Basically the same operations as in the config manager to agglomerate several rasters
937
+ The class Config_Manager_2D_GPU is called, however some functions were rewritten to allow
938
+ the search of a more specific word ('vuln', and not 'bath', 'mann', or 'inf').
939
+ """
940
+
941
+ def tree_vuln_tif(folder_path):
942
+ """Find all .tiff files starting with 'vuln' in the directory and return paths"""
943
+ folder = Path(folder_path)
944
+ vuln_tiff_files = {file for file in folder.rglob("*.tiff") if file.name.startswith("vuln")}
945
+ vuln_tif_files = {file for file in folder.rglob("*.tif") if file.name.startswith("vuln")}
946
+
947
+ vuln_files = vuln_tiff_files.union(vuln_tif_files)
948
+
949
+ tiff_trees = []
950
+ if len(vuln_files) !=0:
951
+ for tiff in vuln_files:
952
+ curtree = [tiff]
953
+ while tiff.parent != folder:
954
+ tiff = tiff.parent
955
+ curtree.insert(0, tiff)
956
+ tiff_trees.append(curtree)
957
+ return tiff_trees
958
+
959
+ def select_vuln_tif(self, path_baseline: Path, folder_path: Path) -> list[Path]:
960
+ """
961
+ Collects and appends all .tiff files starting with 'vuln' from folder_path into a list.
962
+ """
963
+ files = []
964
+ #first element must be vulnerability_baseline
965
+ files.append(path_baseline.as_posix())
966
+ tiff_trees = Accept_Manager.tree_vuln_tif(folder_path)
967
+
968
+ #only the 'final' path of the tree
969
+ for tree in tiff_trees:
970
+ files.append(tree[-1].as_posix())
971
+ return files
972
+
973
+ def check_nodata(self):
974
+ """ Check nodata in a path """
975
+
976
+ list_tif = Accept_Manager.select_vuln_tif(self, self.OUT_VULN, self.IN_CH_SA_SC)
977
+ for cur_lst in list_tif:
978
+ if "Vulnerability.tiff" not in cur_lst:
979
+ curarray:WolfArray = WolfArray(cur_lst)
980
+ if curarray.nullvalue != 99999.:
981
+ curarray.nullvalue = 99999.
982
+ curarray.set_nullvalue_in_mask()
983
+ curarray.write_all()
984
+ logging.warning(_('nodata changed in favor of 99999. value for file {} !'.format(cur_lst)))
985
+
986
+ def create_vrtIfExists(self):
987
+ """ Create a vrt file from a path """
988
+ logging.info(_('Checking nodata values...'))
989
+ self.check_nodata()
990
+ list_tif = Accept_Manager.select_vuln_tif(self, self.OUT_VULN, self.IN_CH_SA_SC)
991
+ #création du fichier vrt - assembly/agglomération
992
+ if len(list_tif)>1:
993
+ logging.info(_('Creating .vrt from files (first based)...'))
994
+ create_vrt_from_diverged_files_first_based(list_tif, self.OUT_VULN_VRT)
995
+ return True
996
+ else:
997
+ return False
998
+
999
+
1000
+ def translate_vrt2tif(self):
1001
+ """ Translate vrt from OUTPUT > ... > Scenario to tif saved in the same folder """
1002
+ if (self.OUT_VULN_VRT).exists():
1003
+ translate_vrt2tif(self.OUT_VULN_VRT, self.OUT_VULN_S)
1004
+
1005
+ def copy_tif_files(self, files: list[Path], destination_dir: Path) -> None:
1006
+ destination_dir.mkdir(parents=True, exist_ok=True)
1007
+
1008
+ for file in files:
1009
+ destination_file = destination_dir / file.name
1010
+ dataset = gdal.Open(str(file))
1011
+ if dataset is None:
1012
+ logging.warning(f"Could not open {file} with GDAL.")
1013
+ continue
1014
+ gdal_driver = gdal.GetDriverByName('GTiff')
1015
+ gdal_driver.CreateCopy(str(destination_file), dataset, strict=0)
1016
+
1017
+ dataset = None
1018
+
1019
+ logging.info("All .tif files have been copied to the destination directory.")
1020
+
1021
+
1022
+ def clip_layer(layer:str,
1023
+ file_path:str,
1024
+ Study_Area:str,
1025
+ output_dir:str):
1026
+ """
1027
+ Clip the input data based on the selected bassin and saves it
1028
+ in separate shape files.
1029
+
1030
+ As shape file doen not support DateTime, the columns with DateTime
1031
+ are converted to string.
1032
+
1033
+ :param layer: the layer name in the GDB file
1034
+ :param file_path: the path to the GDB file
1035
+ :param Study_Area: the path to the study area shapefile
1036
+ :param output_dir: the path to the output directory
1037
+ """
1038
+
1039
+ layer = str(layer)
1040
+ file_path = str(file_path)
1041
+ Study_Area = str(Study_Area)
1042
+ output_dir = Path(output_dir)
1043
+
1044
+ St_Area = gpd.read_file(Study_Area, engine=ENGINE)
1045
+
1046
+ logging.info(layer)
1047
+
1048
+ # The data is clipped during the reading
1049
+ # **It is more efficient than reading the entire data and then clipping it**
1050
+ #
1051
+ # FIXME: "read_dataframe" is used directly rather than "gpd.read_file" cause
1052
+ # the "layer" parameter is well transmitted to the "read_dataframe" function...
1053
+ df:gpd.GeoDataFrame = read_dataframe(file_path, layer=layer, mask=St_Area['geometry'][0])
1054
+
1055
+ if len(df) == 0:
1056
+ logging.warning("No data found for layer " + str(layer))
1057
+ return "No data found for layer " + str(layer)
1058
+
1059
+ # Force Lambert72 -> EPSG:31370
1060
+ df.to_crs("EPSG:31370", inplace=True)
1061
+ try:
1062
+ date_columns = df.select_dtypes(include=['datetimetz']).columns.tolist()
1063
+ if len(date_columns)>0:
1064
+ df[date_columns] = df[date_columns].astype(str)
1065
+
1066
+ df.to_file(str(output_dir / (layer+EXTENT)), mode='w', engine=ENGINE)
1067
+ except Exception as e:
1068
+ logging.error("Error while saving the clipped " + str(layer) + " to file")
1069
+ logging.error(e)
1070
+ pass
1071
+
1072
+ logging.info("Saved the clipped " + str(layer) + " to file")
1073
+ return "Saved the clipped " +str(layer)+ " to file"
1074
+
1075
+
1076
+ def data_modification(layer:str,
1077
+ manager:Accept_Manager,
1078
+ picc:gpd.GeoDataFrame,
1079
+ capa:gpd.GeoDataFrame ):
1080
+ """
1081
+ Apply the data modifications as described in the LEMA report
1082
+
1083
+ FIXME : Add more doc in this docstring
1084
+
1085
+ :param input_database: the path to the input database
1086
+ :param layer: the layer name in the database
1087
+ :param output_database: the path to the output database
1088
+ :param picc: the PICC Walloon file -- Preloaded
1089
+ :param capa: the Cadastre Walloon file -- Preloaded
1090
+ """
1091
+
1092
+ df1:gpd.GeoDataFrame
1093
+ df2:gpd.GeoDataFrame
1094
+
1095
+ layer = str(layer)
1096
+
1097
+ dir_input = manager.TMP_CLIPGDB
1098
+ dir_output = manager.TMP_WMODIF
1099
+
1100
+ input_file = str(dir_input / (layer + EXTENT))
1101
+ output_file = str(dir_output / (layer + EXTENT))
1102
+
1103
+ # Read the data
1104
+ df:gpd.GeoDataFrame = gpd.read_file(input_file, engine=ENGINE)
1105
+ nblines, _ = df.shape
1106
+
1107
+ if nblines>0:
1108
+ op = manager.get_operand(input_file)
1109
+
1110
+ if op == Modif_Type.WALOUS:
1111
+ # Walous layers changed to PICC buidings
1112
+
1113
+ assert picc.crs == df.crs, "CRS of PICC and input data do not match"
1114
+
1115
+ assert "GEOREF_ID" in picc.columns, "The PICC file does not contain the GEOREF_ID column"
1116
+ assert "NATUR_CODE" in picc.columns, "The PICC file does not contain the NATUR_CODE column"
1117
+
1118
+ df1 = gpd.sjoin(picc, df, how="inner", predicate="intersects" )
1119
+ cols = df.columns
1120
+
1121
+ cols = np.append(cols, "GEOREF_ID")
1122
+ cols = np.append(cols, "NATUR_CODE")
1123
+
1124
+ df1 = df1[cols]
1125
+
1126
+ if df1.shape[0] > 0:
1127
+ assert manager.is_polygons(set(df1.geom_type)), f"The layer does not contains polygons - {op}"
1128
+ df1.to_file(output_file, engine=ENGINE)
1129
+ else:
1130
+ logging.warning("No data found for layer " + str(layer))
1131
+
1132
+ elif op == Modif_Type.POINT2POLY_EPURATION:
1133
+ # Change BDREF based on AJOUT_PDET sent by Perrine (SPI)
1134
+
1135
+ # The original layer is a point layer.
1136
+ # The EPU_STATIONS shape file (from SPI) is a polygon layer.
1137
+
1138
+ df1 = gpd.read_file(str(manager.EPU_STATIONS), engine=ENGINE)
1139
+
1140
+ assert df1.crs == df.crs, "CRS of AJOUT_PDET and input data do not match"
1141
+
1142
+ df2 = gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
1143
+
1144
+ if df2.shape[0] > 0:
1145
+ assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
1146
+ df2.to_file(output_file, engine=ENGINE)
1147
+ else:
1148
+ logging.warning("No data found for layer " + str(layer))
1149
+
1150
+ elif op == Modif_Type.POINT2POLY_PICC:
1151
+ # Select the polygons that contains the points
1152
+ # in theCadaster and PICC files
1153
+
1154
+ assert capa.crs == df.crs, "CRS of CaPa and input data do not match"
1155
+ assert "CaPaKey" in capa.columns, "The CaPa file does not contain the CaPaKey column"
1156
+
1157
+ df1= gpd.sjoin(capa, df, how="inner", predicate="intersects" )
1158
+ cols=df.columns
1159
+
1160
+ cols = np.append(cols, "CaPaKey")
1161
+ df1=df1[cols]
1162
+ df2=gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
1163
+
1164
+ if df2.shape[0] > 0:
1165
+ assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
1166
+ df2.to_file(output_file, engine=ENGINE)
1167
+ else:
1168
+ logging.warning("No data found for layer " + str(layer))
1169
+
1170
+ elif op == Modif_Type.POINT2POLY_CAPAPICC:
1171
+
1172
+ # Select the polygons that contains the points
1173
+ # in theCadaster and PICC files
1174
+
1175
+ assert capa.crs == df.crs, "CRS of CaPa and input data do not match"
1176
+ assert picc.crs == df.crs, "CRS of PICC and input data do not match"
1177
+
1178
+ # Join the Layer and CaPa DataFrames : https://geopandas.org/en/stable/docs/reference/api/geopandas.sjoin.html
1179
+ # ‘inner’: use intersection of keys from both dfs; retain only left_df geometry column
1180
+ # "intersects" : Binary predicate. Valid values are determined by the spatial index used.
1181
+ df1= gpd.sjoin(capa, df, how="inner", predicate="intersects" )
1182
+
1183
+ # Retain only the columns of the input data
1184
+ cols = df.columns
1185
+ # but add the CaPaKey
1186
+ cols = np.append(cols, "CaPaKey")
1187
+
1188
+ df1 = df1[cols]
1189
+
1190
+ # Join the df1 and PICC DataFrames : https://geopandas.org/en/stable/docs/reference/api/geopandas.sjoin.html
1191
+ df2 = gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
1192
+
1193
+ # Add only the GEOREF_ID and NATUR_CODE columns from PICC
1194
+ cols = np.append(cols, "GEOREF_ID")
1195
+ cols = np.append(cols, "NATUR_CODE")
1196
+
1197
+ df2 = df2[cols]
1198
+
1199
+ if df2.shape[0] > 0:
1200
+ assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
1201
+ df2.to_file(output_file, engine=ENGINE)
1202
+ else:
1203
+ logging.warning("No data found for layer " + str(layer))
1204
+
1205
+ elif op == Modif_Type.INHABITED:
1206
+ # Select only the buildings with a number of inhabitants > 0
1207
+ df1=df[df["NbsHabTOT"]>0]
1208
+
1209
+ if df1.shape[0] > 0:
1210
+ assert manager.is_polygons(set(df1.geom_type)), f"The layer does not contains polygons - {op}"
1211
+ df1.to_file(output_file, engine=ENGINE)
1212
+ else:
1213
+ logging.warning("No data found for layer " + str(layer))
1214
+
1215
+ elif op == Modif_Type.ROAD:
1216
+ # Create a buffer around the roads
1217
+ df1=df.buffer(distance=6, cap_style=2)
1218
+
1219
+ if df1.shape[0] > 0:
1220
+ assert set(df1.geom_type) == {'Polygon'}, f"The layer does not contains polygons - {op}"
1221
+ df1.to_file(output_file, engine=ENGINE)
1222
+ else:
1223
+ logging.warning("No data found for layer " + str(layer))
1224
+
1225
+ elif op == Modif_Type.COPY:
1226
+ # just copy the data if it is polygons
1227
+ if manager.is_polygons(set(df.geom_type)):
1228
+ df.to_file(output_file, engine=ENGINE)
1229
+ else:
1230
+ logging.error("The layer does not contains polygons - " + str(layer))
1231
+ else:
1232
+ raise ValueError(f"The operand {op} is not recognized")
1233
+
1234
+ return "Data modification done for " + str(layer)
1235
+ else:
1236
+ # Normally, phase 1 does not create empty files
1237
+ # But it is better to check... ;-)
1238
+ logging.error("skipped" + str(layer) + "due to no polygon in the study area")
1239
+ return "skipped" + str(layer) + "due to no polygon in the study area"
1240
+
1241
+ def compute_vulnerability(manager:Accept_Manager):
1242
+ """
1243
+ Compute the vulnerability for the Study Area
1244
+
1245
+ This function **will not modify** the data by the removed buildings/scenarios.
1246
+
1247
+ :param dirsnames: the Dirs_Names object from the calling function
1248
+ """
1249
+
1250
+ vuln_csv = Vulnerability_csv(manager.VULNERABILITY_CSV)
1251
+
1252
+ rasters_vuln = manager.get_files_in_rasters_vulne()
1253
+
1254
+ logging.info("Number of files: {}".format(len(rasters_vuln)))
1255
+ ds:gdal.Dataset = gdal.OpenEx(str(rasters_vuln[0]), gdal.GA_ReadOnly, open_options=["SPARSE_OK=TRUE"])
1256
+
1257
+ tmp_vuln = ds.GetRasterBand(1)
1258
+
1259
+ # REMARK: The XSize and YSize are the number of columns and rows
1260
+ col, row = tmp_vuln.XSize, tmp_vuln.YSize
1261
+
1262
+ logging.info("Computing Vulnerability")
1263
+
1264
+ array_vuln = np.ones((row, col), dtype=np.int8)
1265
+
1266
+ # Create a JIT function to update the arrays
1267
+ # Faster than the classical Python loop or Numpy
1268
+ @nb.jit(nopython=True, boundscheck=False, inline='always')
1269
+ def update_arrays_jit(tmp_vuln, array_vuln):
1270
+ for i in range(tmp_vuln.shape[0]):
1271
+ for j in range(tmp_vuln.shape[1]):
1272
+ if tmp_vuln[i, j] >= array_vuln[i, j]:
1273
+ array_vuln[i, j] = tmp_vuln[i, j]
1274
+
1275
+ return array_vuln
1276
+
1277
+ @nb.jit(nopython=True, boundscheck=False, inline='always')
1278
+ def update_arrays_jit_csr(row, col, locvuln, array_vuln):
1279
+ for k in range(len(row)-1):
1280
+ i = k
1281
+ j1 = row[k]
1282
+ j2 = row[k+1]
1283
+ for j in col[j1:j2]:
1284
+ if locvuln >= array_vuln[i, j]:
1285
+ array_vuln[i, j] = locvuln
1286
+
1287
+ return array_vuln
1288
+
1289
+ for i in tqdm(range(len(rasters_vuln)), 'Computing Vulnerability : '):
1290
+ logging.info("Computing layer {} / {}".format(i, len(rasters_vuln)))
1291
+
1292
+ locvuln = vuln_csv.get_vulnerability_level(rasters_vuln[i].stem)
1293
+
1294
+ if locvuln == 1:
1295
+ logging.info("No need to apply the matrice, the vulnerability is 1 which is the lower value")
1296
+ continue
1297
+
1298
+ if rasters_vuln[i].with_suffix('.npz').exists():
1299
+ ij_npz = np.load(rasters_vuln[i].with_suffix('.npz'))
1300
+ ii = ij_npz['row']
1301
+ jj = ij_npz['col']
1302
+ # We use the jit
1303
+ update_arrays_jit_csr(ii, jj, locvuln, array_vuln)
1304
+
1305
+ else:
1306
+ ds = gdal.OpenEx(str(rasters_vuln[i]), open_options=["SPARSE_OK=TRUE"])
1307
+ tmp_vuln = ds.GetRasterBand(1).ReadAsArray()
1308
+ # We use the jit
1309
+ update_arrays_jit(tmp_vuln, array_vuln)
1310
+ logging.info("Saving the computed vulnerability")
1311
+ dst_filename= str(manager.SA_VULN)
1312
+ y_pixels, x_pixels = array_vuln.shape # number of pixels in x
1313
+
1314
+ driver = gdal.GetDriverByName('GTiff')
1315
+ dataset = driver.Create(dst_filename,
1316
+ x_pixels, y_pixels,
1317
+ gdal.GDT_Byte,
1318
+ 1,
1319
+ options=["COMPRESS=LZW"])
1320
+
1321
+ dataset.GetRasterBand(1).WriteArray(array_vuln.astype(np.int8))
1322
+ # follow code is adding GeoTranform and Projection
1323
+ geotrans = ds.GetGeoTransform() # get GeoTranform from existed 'data0'
1324
+ proj = ds.GetProjection() # you can get from a exsited tif or import
1325
+ dataset.SetGeoTransform(geotrans)
1326
+ dataset.SetProjection(proj)
1327
+ dataset.FlushCache()
1328
+ dataset = None
1329
+
1330
+ logging.info("Computed Vulnerability for the Study Area - Done")
1331
+
1332
+ def compute_code(manager:Accept_Manager):
1333
+ """
1334
+ Compute the code for the Study Area
1335
+
1336
+ This function **will not modify** the data by the removed buildings/scenarios.
1337
+
1338
+ :param dirsnames: the Dirs_Names object from the calling function
1339
+ """
1340
+
1341
+ vuln_csv = Vulnerability_csv(manager.VULNERABILITY_CSV)
1342
+ rasters_code = manager.get_files_in_rasters_code()
1343
+
1344
+ logging.info("Number of files: {}".format(len(rasters_code)))
1345
+
1346
+ ds:gdal.Dataset = gdal.OpenEx(str(rasters_code[0]), gdal.GA_ReadOnly, open_options=["SPARSE_OK=TRUE"])
1347
+
1348
+ tmp_code = ds.GetRasterBand(1)
1349
+
1350
+ # REMARK: The XSize and YSize are the number of columns and rows
1351
+ col, row = tmp_code.XSize, tmp_code.YSize
1352
+
1353
+ logging.info("Computing Code")
1354
+
1355
+ array_code = np.ones((row, col), dtype=np.int8)
1356
+
1357
+ # Create a JIT function to update the arrays
1358
+ # Faster than the classical Python loop or Numpy
1359
+ @nb.jit(nopython=True, boundscheck=False, inline='always')
1360
+ def update_arrays_jit(tmp_code, loccode, array_code):
1361
+ for i in range(tmp_code.shape[0]):
1362
+ for j in range(tmp_code.shape[1]):
1363
+ if tmp_code[i, j] >= array_code[i, j]:
1364
+ array_code[i, j] = loccode
1365
+
1366
+ return array_code
1367
+
1368
+ @nb.jit(nopython=True, boundscheck=False, inline='always')
1369
+ def update_arrays_jit_csr(row, col, loccode, array_code):
1370
+ for k in range(len(row)-1):
1371
+ i = k
1372
+ j1 = row[k]
1373
+ j2 = row[k+1]
1374
+ for j in col[j1:j2]:
1375
+ if loccode >= array_code[i, j]:
1376
+ array_code[i, j] = loccode
1377
+
1378
+ return array_code
1379
+
1380
+ for i in tqdm(range(len(rasters_code)), 'Computing Code : '):
1381
+ logging.info("Computing layer {} / {}".format(i, len(rasters_code)))
1382
+
1383
+ loccode = vuln_csv.get_vulnerability_code(rasters_code[i].stem.removesuffix("_CODE"))
1384
+
1385
+ if rasters_code[i].with_suffix('.npz').exists():
1386
+ ij_npz = np.load(rasters_code[i].with_suffix('.npz'))
1387
+ ii = ij_npz['row']
1388
+ jj = ij_npz['col']
1389
+ # We use the jit
1390
+ update_arrays_jit_csr(ii, jj, loccode, array_code)
1391
+
1392
+ else:
1393
+ ds = gdal.OpenEx(str(rasters_code[i]), open_options=["SPARSE_OK=TRUE"])
1394
+ tmp_code = ds.GetRasterBand(1).ReadAsArray()
1395
+ # We use the jit
1396
+ update_arrays_jit(tmp_code, loccode, array_code)
1397
+
1398
+ logging.info("Saving the computed codes")
1399
+ dst_filename= str(manager.SA_CODE)
1400
+ y_pixels, x_pixels = array_code.shape # number of pixels in x
1401
+ driver = gdal.GetDriverByName('GTiff')
1402
+ dataset = driver.Create(dst_filename,
1403
+ x_pixels, y_pixels,
1404
+ gdal.GDT_Byte,
1405
+ 1,
1406
+ options=["COMPRESS=LZW"])
1407
+
1408
+ dataset.GetRasterBand(1).WriteArray(array_code.astype(np.int8))
1409
+ # follow code is adding GeoTranform and Projection
1410
+ geotrans = ds.GetGeoTransform() # get GeoTranform from existed 'data0'
1411
+ proj = ds.GetProjection() # you can get from a exsited tif or import
1412
+ dataset.SetGeoTransform(geotrans)
1413
+ dataset.SetProjection(proj)
1414
+ dataset.FlushCache()
1415
+ dataset = None
1416
+
1417
+ logging.info("Computed Code for the Study Area - Done")
1418
+
1419
+ def compute_vulnerability4scenario(manager:Accept_Manager):
1420
+ """ Compute the vulnerability for the scenario
1421
+
1422
+ This function **will modify** the data by the removed buildings/scenarios.
1423
+
1424
+ FIXME: It could be interseting to permit the user to provide tiff files for the removed buildings and other scenarios.
1425
+
1426
+ :param dirsnames: the Dirs_Names object from the calling function
1427
+ """
1428
+
1429
+ array_vuln = gdal.Open(str(manager.SA_VULN))
1430
+ geotrans = array_vuln.GetGeoTransform() # get GeoTranform from existed 'data0'
1431
+ proj = array_vuln.GetProjection() # you can get from a exsited tif or import
1432
+
1433
+ array_vuln = np.array(array_vuln.GetRasterBand(1).ReadAsArray())
1434
+
1435
+ array_code = gdal.Open(str(manager.SA_CODE))
1436
+ array_code = np.array(array_code.GetRasterBand(1).ReadAsArray())
1437
+
1438
+ Rbu = manager.get_files_in_rm_buildings()
1439
+
1440
+ if len(Rbu)>0:
1441
+ for curfile in Rbu:
1442
+ array_mod = gdal.Open(str(curfile))
1443
+ array_mod = np.array(array_mod.GetRasterBand(1).ReadAsArray())
1444
+
1445
+ ij = np.argwhere(array_mod == 1)
1446
+ array_vuln[ij[:,0], ij[:,1]] = 1
1447
+ array_code[ij[:,0], ij[:,1]] = 1
1448
+
1449
+ dst_filename= str(manager.TMP_VULN)
1450
+ y_pixels, x_pixels = array_vuln.shape # number of pixels in x
1451
+
1452
+ driver = gdal.GetDriverByName('GTiff')
1453
+ dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
1454
+ dataset.GetRasterBand(1).WriteArray(array_vuln.astype(np.int8))
1455
+ # follow code is adding GeoTranform and Projection
1456
+ dataset.SetGeoTransform(geotrans)
1457
+ dataset.SetProjection(proj)
1458
+ dataset.FlushCache()
1459
+ dataset = None
1460
+
1461
+
1462
+ dst_filename= str(manager.TMP_CODE)
1463
+ y_pixels, x_pixels = array_code.shape # number of pixels in x
1464
+ driver = gdal.GetDriverByName('GTiff')
1465
+ dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
1466
+ dataset.GetRasterBand(1).WriteArray(array_code.astype(np.int8))
1467
+ # follow code is adding GeoTranform and Projection
1468
+ dataset.SetGeoTransform(geotrans)
1469
+ dataset.SetProjection(proj)
1470
+ dataset.FlushCache()
1471
+ dataset = None
1472
+
1473
+ logging.info("Computed Vulnerability and code for the scenario")
1474
+
1475
+ def match_vulnerability2sim(inRas:Path, outRas:Path, MODREC:Path):
1476
+ """
1477
+ Clip the raster to the MODREC/simulation extent
1478
+
1479
+ :param inRas: the input raster file
1480
+ :param outRas: the output raster file
1481
+ :param MODREC: the MODREC/simulation extent file
1482
+
1483
+ """
1484
+
1485
+ inRas = str(inRas)
1486
+ outRas = str(outRas)
1487
+ MODREC = str(MODREC)
1488
+
1489
+ data = gdal.Open(MODREC, gdalconst.GA_ReadOnly)
1490
+ geoTransform = data.GetGeoTransform()
1491
+ minx = geoTransform[0]
1492
+ maxy = geoTransform[3]
1493
+ maxx = minx + geoTransform[1] * data.RasterXSize
1494
+ miny = maxy + geoTransform[5] * data.RasterYSize
1495
+ ds = gdal.Open(inRas)
1496
+ ds = gdal.Translate(outRas, ds, projWin = [minx, maxy, maxx, miny])
1497
+ ds = None
1498
+
1499
+
1500
+ @nb.jit(nopython=True, boundscheck=False, inline='always')
1501
+ def update_accept(accept, model_h, ij, bounds, loc_accept):
1502
+ for idx in range(len(bounds)):
1503
+ for i,j in ij:
1504
+ if bounds[idx,0] < model_h[i,j] <= bounds[idx,1]: #lit dans wd vs Ti où on est et associe son score d'accept
1505
+ accept[i,j] = loc_accept[idx]
1506
+
1507
+ def compute_acceptability(manager:Accept_Manager,
1508
+ model_h:np.ndarray,
1509
+ vulnerability:np.ndarray,
1510
+ interval:int,
1511
+ geo_projection:tuple,
1512
+ save_to_file:bool=True) -> np.ndarray:
1513
+
1514
+ """
1515
+ Compute the local acceptability based on :
1516
+ - the vulnerability
1517
+ - the water depth
1518
+ - the matrices
1519
+
1520
+ :param manager: the Accept_Manager object from the calling function
1521
+ :param model_h: the water depth matrix
1522
+ :param vulnerability: the vulnerability matrix
1523
+ :param interval: the return period
1524
+ :param geo_projection: the geotransform and the projection - tuple extracted from another raster file
1525
+
1526
+ """
1527
+
1528
+ logging.info(interval)
1529
+
1530
+ points_accept = pd.read_csv(manager.POINTS_CSV)
1531
+
1532
+ points_accept = points_accept[points_accept["Interval"]==interval] #les wd vs Ti matrices
1533
+ points_accept = points_accept.reset_index()
1534
+
1535
+ accept = np.zeros(vulnerability.shape, dtype=np.float32)
1536
+
1537
+ bounds = np.asarray([[0., 0.02], [0.02, 0.3], [0.3, 1], [1, 2.5], [2.5, 1000]], dtype=np.float32)
1538
+
1539
+ for i in range(1,6):
1540
+ ij = np.argwhere(vulnerability == i)
1541
+
1542
+ idx_pts = 5-i
1543
+ accept_pts = [points_accept["h-0"][idx_pts],
1544
+ points_accept["h-0.02"][idx_pts],
1545
+ points_accept["h-0.3"][idx_pts],
1546
+ points_accept["h-1"][idx_pts],
1547
+ points_accept["h-2.5"][idx_pts]]
1548
+
1549
+ update_accept(accept, model_h, ij, bounds, accept_pts)
1550
+
1551
+ if save_to_file:
1552
+ #save raster
1553
+ dst_filename = str(manager.TMP_QFILES / "Q{}.tif".format(interval)) #les Qi
1554
+
1555
+ y_pixels, x_pixels = accept.shape # number of pixels in x
1556
+ driver = gdal.GetDriverByName('GTiff')
1557
+ dataset = driver.Create(dst_filename,
1558
+ x_pixels, y_pixels,
1559
+ 1,
1560
+ gdal.GDT_Float32,
1561
+ options=["COMPRESS=LZW"])
1562
+
1563
+ dataset.GetRasterBand(1).WriteArray(accept.astype(np.float32))
1564
+
1565
+ geotrans, proj = geo_projection
1566
+ dataset.SetGeoTransform(geotrans)
1567
+ dataset.SetProjection(proj)
1568
+ dataset.FlushCache()
1569
+ dataset = None
1570
+
1571
+ return accept
1572
+
1573
+ def shp_to_raster(vector_fn:str | Path, raster_fn:str | Path, pixel_size:float = 1., manager:Accept_Manager = None):
1574
+ """
1575
+ Convert a vector layer to a raster tiff file.
1576
+
1577
+ The raster will contain only 2 values : 0 and 1
1578
+
1579
+ - 1 : the inside of the vector layer
1580
+ - 0 : the rest == NoData/NullValue
1581
+
1582
+ :param vector_fn: the path to the vector file
1583
+ :param raster_fn: the path to the raster file
1584
+ :param pixel_size: the pixel size of the raster
1585
+ """
1586
+
1587
+ vector_path = Path(vector_fn)
1588
+ raster_path = Path(raster_fn)
1589
+
1590
+ if not vector_path.exists():
1591
+ logging.error(f"The vector file {vector_path} does not exist")
1592
+ return
1593
+
1594
+ if raster_path.exists():
1595
+ os.remove(raster_path)
1596
+
1597
+ # Force the input to be a string
1598
+ vector_fn = str(vector_fn)
1599
+ raster_fn = str(raster_fn)
1600
+
1601
+ if manager is None:
1602
+ extent_fn = vector_fn
1603
+ logging.warning("The extent file is not provided, the extent will be the same as the vector file")
1604
+ else:
1605
+ extent_fn = str(manager.SA)
1606
+ logging.info("The extent file is provided")
1607
+
1608
+ NoData_value = 0 # np.nan is not necessary a good idea
1609
+
1610
+ # Open the data sources and read the extents
1611
+ source_ds:ogr.DataSource = ogr.Open(vector_fn)
1612
+ source_layer = source_ds.GetLayer()
1613
+
1614
+ extent_ds:ogr.DataSource = ogr.Open(extent_fn)
1615
+ extent_layer = extent_ds.GetLayer()
1616
+ x_min, x_max, y_min, y_max = extent_layer.GetExtent()
1617
+
1618
+ x_min = float(int(x_min))
1619
+ x_max = float(np.ceil(x_max))
1620
+ y_min = float(int(y_min))
1621
+ y_max = float(np.ceil(y_max))
1622
+
1623
+ # Create the destination data source
1624
+ x_res = int((x_max - x_min) / pixel_size)
1625
+ y_res = int((y_max - y_min) / pixel_size)
1626
+
1627
+ target_ds = gdal.GetDriverByName('GTiff').Create(raster_fn,
1628
+ x_res, y_res,
1629
+ 1,
1630
+ gdal.GDT_Byte,
1631
+ options=["COMPRESS=LZW",
1632
+ 'SPARSE_OK=TRUE'])
1633
+
1634
+ target_ds.SetGeoTransform((x_min, pixel_size, 0, y_max, 0, -pixel_size))
1635
+ srs = osr.SpatialReference()
1636
+ srs.ImportFromEPSG(31370)
1637
+ target_ds.SetProjection(srs.ExportToWkt())
1638
+ band = target_ds.GetRasterBand(1)
1639
+ band.SetNoDataValue(NoData_value)
1640
+ # Rasterize the areas
1641
+ gdal.RasterizeLayer(target_ds,
1642
+ bands = [1],
1643
+ layer = source_layer,
1644
+ burn_values = [1],
1645
+ options=["ALL_TOUCHED=TRUE"])
1646
+ target_ds = None
1647
+ vector_fn = raster_fn = None
1648
+
1649
+ def vector_to_raster(layer:str,
1650
+ manager:Accept_Manager,
1651
+ attribute:str,
1652
+ pixel_size:float,
1653
+ convert_to_sparse:bool = True):
1654
+ """
1655
+ Convert a vector layer to a raster tiff file
1656
+
1657
+ FIXME: Test de vulerability value and return immedialty if it is 1 if attribute == "Vulne"
1658
+
1659
+ :param layer: the layer name in the GDB file
1660
+ :param vector_input: the path to the vector file
1661
+ :param extent: the path to the extent file
1662
+ :param attribute: the attribute to rasterize
1663
+ :param pixel_size: the pixel size of the raster
1664
+
1665
+ """
1666
+
1667
+ layer = str(layer)
1668
+
1669
+ vector_input = str(manager.TMP_CODEVULNE / (layer + EXTENT))
1670
+ extent = str(manager.SA)
1671
+ attribute = str(attribute)
1672
+ pixel_size = float(pixel_size)
1673
+
1674
+ if attribute == "Code":
1675
+ out_file = manager.TMP_RASTERS / attribute / (layer + "_CODE.tiff")
1676
+ else :
1677
+ out_file = manager.TMP_RASTERS / attribute / (layer + ".tiff")
1678
+
1679
+ if out_file.exists():
1680
+ os.remove(out_file)
1681
+
1682
+ out_file = str(out_file)
1683
+
1684
+ NoData_value = 0
1685
+
1686
+ extent_ds:ogr.DataSource = ogr.Open(extent)
1687
+ extent_layer = extent_ds.GetLayer()
1688
+
1689
+ x_min, x_max, y_min, y_max = extent_layer.GetExtent()
1690
+
1691
+ x_min = float(int(x_min))
1692
+ x_max = float(np.ceil(x_max))
1693
+ y_min = float(int(y_min))
1694
+ y_max = float(np.ceil(y_max))
1695
+
1696
+ # Open the data sources and read the extents
1697
+ source_ds:ogr.DataSource = ogr.Open(vector_input)
1698
+ if source_ds is None:
1699
+ logging.error(f"Could not open the data source {layer}")
1700
+ return
1701
+ source_layer = source_ds.GetLayer()
1702
+
1703
+ # Create the destination data source
1704
+ x_res = int((x_max - x_min) / pixel_size)
1705
+ y_res = int((y_max - y_min) / pixel_size)
1706
+ target_ds:gdal.Driver = gdal.GetDriverByName('GTiff').Create(out_file,
1707
+ x_res, y_res, 1,
1708
+ gdal.GDT_Byte,
1709
+ options=["COMPRESS=DEFLATE",
1710
+ 'SPARSE_OK=TRUE',])
1711
+
1712
+ target_ds.SetGeoTransform((x_min, pixel_size, 0, y_max, 0, -pixel_size))
1713
+ srs = osr.SpatialReference()
1714
+ srs.ImportFromEPSG(31370)
1715
+ target_ds.SetProjection(srs.ExportToWkt())
1716
+
1717
+ band = target_ds.GetRasterBand(1)
1718
+ band.SetNoDataValue(NoData_value)
1719
+
1720
+ # Rasterize the areas
1721
+ gdal.RasterizeLayer(target_ds, [1],
1722
+ source_layer,
1723
+ options=["ATTRIBUTE="+attribute,
1724
+ "ALL_TOUCHED=TRUE"])
1725
+
1726
+ if convert_to_sparse:
1727
+ SPARSITY_THRESHOLD = 0.02
1728
+ # Convert the raster to a npz containing the row and col of the non-null values
1729
+ array = band.ReadAsArray()
1730
+ ij = np.nonzero(array)
1731
+
1732
+ if len(ij[0]) < int(x_res * y_res * SPARSITY_THRESHOLD):
1733
+ i,j = convert_to_csr(ij[0], ij[1], y_res)
1734
+ np.savez_compressed(Path(out_file).with_suffix('.npz'), row=np.asarray(i, dtype=np.int32), col=np.asarray(j, dtype=np.int32))
1735
+ else:
1736
+ logging.info("The raster is not sparse enough to be converted to a CSR forma {}".format(layer))
1737
+
1738
+ target_ds = None
1739
+
1740
+ return 0
1741
+
1742
+ @nb.jit(nopython=True, boundscheck=False, inline='always')
1743
+ def convert_to_csr(i_indices, j_indices, num_rows):
1744
+ row_ptr = [0] * (num_rows + 1)
1745
+ col_idx = []
1746
+
1747
+ for i in range(len(i_indices)):
1748
+ row_ptr[i_indices[i] + 1] += 1
1749
+ col_idx.append(j_indices[i])
1750
+
1751
+ for i in range(1, len(row_ptr)):
1752
+ row_ptr[i] += row_ptr[i - 1]
1753
+
1754
+ return row_ptr, col_idx